@danielblomma/cortex-mcp 1.7.0 → 1.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/cortex.mjs CHANGED
@@ -149,6 +149,80 @@ function ensureScaffoldExists() {
149
149
  // Files that should never be overwritten if they already exist in the target.
150
150
  // These contain user-specific configuration that would be lost on re-init.
151
151
  const PRESERVE_FILES = new Set(["config.yaml", "enterprise.yml", "enterprise.yaml", "CLAUDE.md"]);
152
+ const DEFAULT_SOURCE_PATHS = [
153
+ "src",
154
+ "docs",
155
+ "design",
156
+ ".context/notes",
157
+ ".context/decisions",
158
+ "README.md"
159
+ ];
160
+ const INIT_SKIP_DIRECTORIES = new Set([
161
+ ".git",
162
+ ".idea",
163
+ ".vscode",
164
+ "node_modules",
165
+ "dist",
166
+ "build",
167
+ "coverage",
168
+ ".next",
169
+ ".cache",
170
+ ".context",
171
+ "scripts",
172
+ "mcp",
173
+ ".githooks",
174
+ "bin",
175
+ "obj"
176
+ ]);
177
+ const INIT_SOURCE_EXTENSIONS = new Set([
178
+ ".md",
179
+ ".mdx",
180
+ ".txt",
181
+ ".adoc",
182
+ ".rst",
183
+ ".yaml",
184
+ ".yml",
185
+ ".json",
186
+ ".toml",
187
+ ".csv",
188
+ ".ts",
189
+ ".tsx",
190
+ ".js",
191
+ ".jsx",
192
+ ".mjs",
193
+ ".cjs",
194
+ ".py",
195
+ ".go",
196
+ ".java",
197
+ ".cs",
198
+ ".vb",
199
+ ".sln",
200
+ ".vbproj",
201
+ ".csproj",
202
+ ".fsproj",
203
+ ".props",
204
+ ".targets",
205
+ ".config",
206
+ ".resx",
207
+ ".settings",
208
+ ".rb",
209
+ ".rs",
210
+ ".php",
211
+ ".swift",
212
+ ".kt",
213
+ ".sql",
214
+ ".sh",
215
+ ".bash",
216
+ ".zsh",
217
+ ".ps1",
218
+ ".c",
219
+ ".h",
220
+ ".cpp",
221
+ ".hpp",
222
+ ".cc",
223
+ ".hh"
224
+ ]);
225
+ const ROOT_DOC_PATHS = new Set(["docs", "design"]);
152
226
 
153
227
  function copyDirectory(sourceDir, targetDir) {
154
228
  fs.mkdirSync(targetDir, { recursive: true });
@@ -175,6 +249,147 @@ function copyDirectory(sourceDir, targetDir) {
175
249
  }
176
250
  }
177
251
 
252
+ function toPosixPath(value) {
253
+ return value.split(path.sep).join("/");
254
+ }
255
+
256
+ function yamlScalar(value) {
257
+ return /^[A-Za-z0-9._/-]+$/.test(value) ? value : JSON.stringify(value);
258
+ }
259
+
260
+ function slugifyRepoId(value) {
261
+ const dashed = String(value || "")
262
+ .trim()
263
+ .replace(/([A-Z]+)([A-Z][a-z])/g, "$1-$2")
264
+ .replace(/([a-z0-9])([A-Z])/g, "$1-$2")
265
+ .replace(/[^A-Za-z0-9]+/g, "-")
266
+ .replace(/^-+|-+$/g, "")
267
+ .toLowerCase();
268
+ return dashed || "cortex";
269
+ }
270
+
271
+ function isInterestingSourceFile(fileName) {
272
+ const base = fileName.toLowerCase();
273
+ const ext = path.extname(fileName).toLowerCase();
274
+ return INIT_SOURCE_EXTENSIONS.has(ext) || base === "readme" || base.startsWith("readme.");
275
+ }
276
+
277
+ function directoryContainsInterestingFiles(directoryPath) {
278
+ const stack = [directoryPath];
279
+ while (stack.length > 0) {
280
+ const current = stack.pop();
281
+ let entries = [];
282
+ try {
283
+ entries = fs.readdirSync(current, { withFileTypes: true });
284
+ } catch {
285
+ continue;
286
+ }
287
+
288
+ for (const entry of entries) {
289
+ const absolutePath = path.join(current, entry.name);
290
+ if (entry.isDirectory()) {
291
+ if (INIT_SKIP_DIRECTORIES.has(entry.name)) {
292
+ continue;
293
+ }
294
+ stack.push(absolutePath);
295
+ continue;
296
+ }
297
+
298
+ if (entry.isFile() && isInterestingSourceFile(entry.name)) {
299
+ return true;
300
+ }
301
+ }
302
+ }
303
+
304
+ return false;
305
+ }
306
+
307
+ function detectInitialSourcePaths(targetDir) {
308
+ if (!fs.existsSync(targetDir)) {
309
+ return [...DEFAULT_SOURCE_PATHS];
310
+ }
311
+
312
+ let entries = [];
313
+ try {
314
+ entries = fs.readdirSync(targetDir, { withFileTypes: true });
315
+ } catch {
316
+ return [...DEFAULT_SOURCE_PATHS];
317
+ }
318
+
319
+ const codeDirs = [];
320
+ const docDirs = [];
321
+ const rootFiles = [];
322
+
323
+ for (const entry of entries.sort((a, b) => a.name.localeCompare(b.name))) {
324
+ const absolutePath = path.join(targetDir, entry.name);
325
+
326
+ if (entry.isDirectory()) {
327
+ if (INIT_SKIP_DIRECTORIES.has(entry.name)) {
328
+ continue;
329
+ }
330
+ if (!directoryContainsInterestingFiles(absolutePath)) {
331
+ continue;
332
+ }
333
+
334
+ const bucket = ROOT_DOC_PATHS.has(entry.name) ? docDirs : codeDirs;
335
+ bucket.push(toPosixPath(entry.name));
336
+ continue;
337
+ }
338
+
339
+ if (entry.isFile() && isInterestingSourceFile(entry.name)) {
340
+ rootFiles.push(toPosixPath(entry.name));
341
+ }
342
+ }
343
+
344
+ const readmeFiles = rootFiles.filter((filePath) => /^readme(\.|$)/i.test(path.basename(filePath)));
345
+ const nonReadmeRootFiles = rootFiles.filter((filePath) => !readmeFiles.includes(filePath));
346
+ const detected = [
347
+ ...codeDirs,
348
+ ...nonReadmeRootFiles,
349
+ ...docDirs,
350
+ ".context/notes",
351
+ ".context/decisions",
352
+ ...readmeFiles
353
+ ];
354
+ const uniqueDetected = [...new Set(detected)];
355
+ const hasConcreteRepoContent = uniqueDetected.some((value) => !value.startsWith(".context/"));
356
+ return hasConcreteRepoContent ? uniqueDetected : [...DEFAULT_SOURCE_PATHS];
357
+ }
358
+
359
+ function buildInitialConfig(targetDir) {
360
+ const repoId = slugifyRepoId(path.basename(path.resolve(targetDir)));
361
+ const sourcePaths = detectInitialSourcePaths(targetDir);
362
+ return [
363
+ `repo_id: ${yamlScalar(repoId)}`,
364
+ "source_paths:",
365
+ ...sourcePaths.map((sourcePath) => ` - ${yamlScalar(sourcePath)}`),
366
+ "truth_order:",
367
+ " - ADR",
368
+ " - RULE",
369
+ " - CODE",
370
+ " - WIKI",
371
+ "ranking:",
372
+ " semantic: 0.40",
373
+ " graph: 0.25",
374
+ " trust: 0.20",
375
+ " recency: 0.15",
376
+ "runtime:",
377
+ " top_k: 5",
378
+ " include_uncertainties: true",
379
+ ""
380
+ ].join("\n");
381
+ }
382
+
383
+ function initializeScaffold(targetDir, force) {
384
+ const configPath = path.join(targetDir, ".context", "config.yaml");
385
+ const hasExistingConfig = fs.existsSync(configPath);
386
+ const generatedConfig = hasExistingConfig ? null : buildInitialConfig(targetDir);
387
+ installScaffold(targetDir, force);
388
+ if (!hasExistingConfig && generatedConfig) {
389
+ writeTextFile(configPath, generatedConfig);
390
+ }
391
+ }
392
+
178
393
  function ensurePathWritable(targetPath, force) {
179
394
  if (!fs.existsSync(targetPath)) {
180
395
  return;
@@ -555,7 +770,7 @@ async function maybeMigrateScaffold(targetDir, command) {
555
770
 
556
771
  console.error(`[cortex] migrating scaffold in ${targetDir}`);
557
772
  ensureScaffoldExists();
558
- installScaffold(targetDir, true);
773
+ initializeScaffold(targetDir, true);
559
774
  installAssistantHelpers(targetDir);
560
775
  await maybeInstallGitHooks(targetDir);
561
776
  await runContextCommand(targetDir, ["bootstrap"]);
@@ -590,7 +805,7 @@ async function ensureProjectInitializedForMcp(targetDir) {
590
805
  }
591
806
  ensureScaffoldExists();
592
807
  fs.mkdirSync(targetDir, { recursive: true });
593
- installScaffold(targetDir, false);
808
+ initializeScaffold(targetDir, false);
594
809
  installAssistantHelpers(targetDir);
595
810
  await maybeInstallGitHooks(targetDir);
596
811
  console.log(`[cortex] auto-init completed in ${targetDir}`);
@@ -632,7 +847,7 @@ async function run() {
632
847
  const { target, force, bootstrap, connect, watch } = parseInitArgs(rest);
633
848
  printBanner("Cortex initializes repo-scoped context for AI coding agents.");
634
849
  fs.mkdirSync(target, { recursive: true });
635
- installScaffold(target, force);
850
+ initializeScaffold(target, force);
636
851
  const helpers = installAssistantHelpers(target);
637
852
  await maybeInstallGitHooks(target);
638
853
 
@@ -753,4 +968,4 @@ if (invokedAsScript) {
753
968
  });
754
969
  }
755
970
 
756
- export { isScaffoldOutOfDate };
971
+ export { buildInitialConfig, detectInitialSourcePaths, isScaffoldOutOfDate, slugifyRepoId };
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@danielblomma/cortex-mcp",
3
3
  "mcpName": "io.github.DanielBlomma/cortex",
4
- "version": "1.7.0",
4
+ "version": "1.7.1",
5
5
  "description": "Local, repo-scoped context platform for coding assistants. Semantic search, graph relationships, and architectural rule context.",
6
6
  "type": "module",
7
7
  "author": "Daniel Blomma",
@@ -46,7 +46,7 @@
46
46
  "docs/MCP_MARKETPLACE.md"
47
47
  ],
48
48
  "scripts": {
49
- "test": "node tests/context-regressions.test.mjs && node --test tests/ingest-units.test.mjs tests/javascript-parser.test.mjs tests/sql-parser.test.mjs tests/config-parser.test.mjs tests/resources-parser.test.mjs tests/vbnet-parser.test.mjs tests/cpp-parser.test.mjs tests/multi-level.test.mjs tests/no-legacy-paths.test.mjs tests/tree-sitter-error-reporting.test.mjs tests/tree-sitter-body-cap.test.mjs tests/tree-sitter-exported.test.mjs tests/tree-sitter-robustness.test.mjs",
49
+ "test": "node tests/context-regressions.test.mjs && node --test tests/ingest-units.test.mjs tests/javascript-parser.test.mjs tests/sql-parser.test.mjs tests/config-parser.test.mjs tests/resources-parser.test.mjs tests/vbnet-parser.test.mjs tests/cpp-parser.test.mjs tests/dashboard.test.mjs tests/init-config.test.mjs tests/multi-level.test.mjs tests/no-legacy-paths.test.mjs tests/tree-sitter-error-reporting.test.mjs tests/tree-sitter-body-cap.test.mjs tests/tree-sitter-exported.test.mjs tests/tree-sitter-robustness.test.mjs",
50
50
  "release:sync-version": "node scripts/sync-release-version.mjs",
51
51
  "release:check-version-sync": "node scripts/sync-release-version.mjs --check",
52
52
  "prepublishOnly": "echo 'Ready to publish to npm'"
@@ -24,11 +24,12 @@ const SUPPORTED_TEXT_EXTENSIONS = new Set([
24
24
  // Same skip dirs as ingest.mjs
25
25
  const SKIP_DIRECTORIES = new Set([
26
26
  ".git", ".idea", ".vscode", "node_modules",
27
- "dist", "build", "coverage", ".next", ".cache", ".context"
27
+ "bin", "obj", "dist", "build", "coverage", ".next", ".cache", ".context"
28
28
  ]);
29
29
 
30
30
  const MAX_FILE_BYTES = 1024 * 1024;
31
31
  const VERSION_CHECK_TTL_MS = 10 * 60 * 1000;
32
+ const VERSION_LOOKUP_TIMEOUT_MS = 8000;
32
33
  const VERSION_INSTALL_HINT = "npm i -g github:DanielBlomma/cortex";
33
34
 
34
35
  // ── ANSI helpers ──────────────────────────────────────────────
@@ -93,7 +94,11 @@ function walkDirectory(dirPath, files) {
93
94
  if (entry.isDirectory()) {
94
95
  walkDirectory(abs, files);
95
96
  } else if (entry.isFile()) {
96
- files.push(abs);
97
+ if (typeof files.add === "function") {
98
+ files.add(abs);
99
+ } else {
100
+ files.push(abs);
101
+ }
97
102
  }
98
103
  }
99
104
  }
@@ -111,20 +116,20 @@ function hasSourcePrefix(relPath, sourcePaths) {
111
116
  }
112
117
 
113
118
  // ── Data: baseline scan ──────────────────────────────────────
114
- function scanBaseline() {
115
- if (!fs.existsSync(CONFIG_PATH)) return { files: 0, lines: 0, chars: 0, tokens: 0 };
119
+ function scanBaseline(repoRoot = REPO_ROOT, configPath = CONFIG_PATH) {
120
+ if (!fs.existsSync(configPath)) return { files: 0, lines: 0, chars: 0, tokens: 0 };
116
121
 
117
- const configText = fs.readFileSync(CONFIG_PATH, "utf8");
118
- const sourcePaths = parseSourcePaths(configText);
122
+ const configText = fs.readFileSync(configPath, "utf8");
123
+ const sourcePaths = [...new Set(parseSourcePaths(configText))];
119
124
  if (sourcePaths.length === 0) return { files: 0, lines: 0, chars: 0, tokens: 0 };
120
125
 
121
- const allFiles = [];
126
+ const allFiles = new Set();
122
127
  for (const sp of sourcePaths) {
123
- const abs = path.resolve(REPO_ROOT, sp);
128
+ const abs = path.resolve(repoRoot, sp);
124
129
  if (!fs.existsSync(abs)) continue;
125
130
  const stat = fs.statSync(abs);
126
131
  if (stat.isFile()) {
127
- allFiles.push(abs);
132
+ allFiles.add(abs);
128
133
  } else if (stat.isDirectory()) {
129
134
  walkDirectory(abs, allFiles);
130
135
  }
@@ -265,6 +270,9 @@ function shorten(text, max = 40) {
265
270
  }
266
271
 
267
272
  function summarizeError(error) {
273
+ if (error && typeof error === "object" && error.code === "ETIMEDOUT") {
274
+ return `version check timed out after ${Math.round(VERSION_LOOKUP_TIMEOUT_MS / 1000)}s`;
275
+ }
268
276
  const raw = error instanceof Error ? error.message : String(error);
269
277
  return shorten(raw.split(/\r?\n/)[0].trim());
270
278
  }
@@ -325,7 +333,7 @@ function getVersionStatus() {
325
333
  cwd: REPO_ROOT,
326
334
  stdio: ["ignore", "pipe", "pipe"],
327
335
  encoding: "utf8",
328
- timeout: 2500,
336
+ timeout: VERSION_LOOKUP_TIMEOUT_MS,
329
337
  env: { ...process.env, NPM_CONFIG_CACHE: npmCache },
330
338
  shell: true,
331
339
  }).trim();
@@ -491,6 +499,7 @@ function gatherData(baselineCache) {
491
499
  },
492
500
  tokens: tokenEstimate,
493
501
  embeddings: embedModel ? { model: embedModel, count: embedCount, dimensions: embedDim } : null,
502
+ parserHealth: manifests.ingest?.parser_health || {},
494
503
  freshness,
495
504
  version,
496
505
  topConnected,
@@ -696,6 +705,14 @@ function render(data, isTTY) {
696
705
  } else {
697
706
  lines.push(sideBorder(`Embeddings: ${col("not generated", C.yellow)} ${dim("Run: cortex embed")}`, w));
698
707
  }
708
+ if (data.parserHealth.csharp && Number(data.parserHealth.csharp.files || 0) > 0) {
709
+ const csharp = data.parserHealth.csharp;
710
+ if (!csharp.available) {
711
+ lines.push(sideBorder(`Parser warning (C#): ${col("unavailable", C.red)} ${dim(csharp.reason || "install .NET SDK")}`, w));
712
+ } else if (csharp.chunks === 0) {
713
+ lines.push(sideBorder(`Parser warning (C#): ${col("0 chunks", C.yellow)} ${dim(`${csharp.files} files indexed`)}`, w));
714
+ }
715
+ }
699
716
  lines.push(emptyLine(w));
700
717
 
701
718
  // ── TOP CONNECTED ──
@@ -791,4 +808,14 @@ function main() {
791
808
  });
792
809
  }
793
810
 
794
- main();
811
+ const isMainModule = process.argv[1] && path.resolve(process.argv[1]) === path.resolve(__filename);
812
+ if (isMainModule) {
813
+ main();
814
+ }
815
+
816
+ export {
817
+ parseSourcePaths,
818
+ render,
819
+ scanBaseline,
820
+ walkDirectory
821
+ };
@@ -24,6 +24,7 @@ let parseVb6Code = null;
24
24
  let isVbNetParserAvailable = () => false;
25
25
  let isCSharpParserAvailable = () => false;
26
26
  let isCppParserAvailable = () => false;
27
+ let getCSharpParserRuntime = () => ({ available: false, reason: "parser module not loaded" });
27
28
 
28
29
  async function loadOptionalParsers() {
29
30
  const loaders = [
@@ -37,6 +38,10 @@ async function loadOptionalParsers() {
37
38
  import("./parsers/csharp.mjs").then((module) => {
38
39
  parseCSharpCode = module.parseCode;
39
40
  parseCSharpProject = module.parseProject ?? null;
41
+ getCSharpParserRuntime =
42
+ typeof module.getCSharpParserRuntime === "function"
43
+ ? module.getCSharpParserRuntime
44
+ : () => ({ available: typeof module.parseCode === "function", reason: "runtime details unavailable" });
40
45
  isCSharpParserAvailable =
41
46
  typeof module.isCSharpParserAvailable === "function"
42
47
  ? module.isCSharpParserAvailable
@@ -472,6 +477,8 @@ const SKIP_DIRECTORIES = new Set([
472
477
  ".idea",
473
478
  ".vscode",
474
479
  "node_modules",
480
+ "bin",
481
+ "obj",
475
482
  "dist",
476
483
  "build",
477
484
  "coverage",
@@ -2675,6 +2682,8 @@ async function main() {
2675
2682
 
2676
2683
  const fileRecords = [...fileRecordMap.values()].sort((a, b) => a.path.localeCompare(b.path));
2677
2684
  const adrRecords = [...adrRecordMap.values()].sort((a, b) => a.path.localeCompare(b.path));
2685
+ const csharpFileCount = fileRecords.filter((record) => path.extname(record.path).toLowerCase() === ".cs").length;
2686
+ const csharpRuntime = csharpFileCount > 0 ? getCSharpParserRuntime() : null;
2678
2687
  const indexedFileIds = new Set(fileRecords.map((record) => record.id));
2679
2688
  const changedFileIds = new Set(
2680
2689
  [...candidates].map((absolutePath) => `file:${toPosixPath(path.relative(REPO_ROOT, absolutePath))}`)
@@ -3075,6 +3084,23 @@ async function main() {
3075
3084
  }
3076
3085
  }
3077
3086
 
3087
+ const csharpChunkCount = chunkRecords.filter((record) => record.language === "csharp").length;
3088
+ const parserHealth = {};
3089
+ if (csharpFileCount > 0) {
3090
+ parserHealth.csharp = {
3091
+ files: csharpFileCount,
3092
+ available: Boolean(csharpRuntime?.available),
3093
+ reason: csharpRuntime?.available ? null : (csharpRuntime?.reason ?? "C# parser unavailable"),
3094
+ chunks: csharpChunkCount,
3095
+ };
3096
+
3097
+ if (!csharpRuntime?.available) {
3098
+ console.log(`[ingest] warning csharp parser unavailable: ${parserHealth.csharp.reason}`);
3099
+ } else if (csharpChunkCount === 0) {
3100
+ console.log("[ingest] warning csharp parser produced 0 chunks across C# files");
3101
+ }
3102
+ }
3103
+
3078
3104
  // Generate Module entities and relations
3079
3105
  const moduleResult = generateModules(fileRecords, chunkRecords);
3080
3106
  const moduleRecords = moduleResult.modules;
@@ -3512,6 +3538,7 @@ async function main() {
3512
3538
  relations_transforms_config: configTransformRelations.length
3513
3539
  },
3514
3540
  skipped,
3541
+ parser_health: parserHealth,
3515
3542
  incremental_mode: incrementalMode,
3516
3543
  changed_candidates: candidates.size,
3517
3544
  deleted_paths: deletedRelPaths.length
@@ -25,6 +25,20 @@ const DEFAULT_TARGET_FRAMEWORK = "net8.0";
25
25
  let runtimeCache = null;
26
26
  let publishCache = null;
27
27
 
28
+ function hasGitCheckout(startDir) {
29
+ let current = startDir;
30
+ while (true) {
31
+ if (fs.existsSync(path.join(current, ".git"))) {
32
+ return true;
33
+ }
34
+ const parent = path.dirname(current);
35
+ if (parent === current) {
36
+ return false;
37
+ }
38
+ current = parent;
39
+ }
40
+ }
41
+
28
42
  function getDotnetCommand() {
29
43
  const override = process.env.CORTEX_DOTNET_CMD;
30
44
  return override && override.trim().length > 0 ? override.trim() : DEFAULT_DOTNET_COMMAND;
@@ -74,6 +88,19 @@ function needsPublish() {
74
88
  } catch {
75
89
  return true;
76
90
  }
91
+
92
+ if (process.env.CORTEX_CSHARP_FORCE_PUBLISH === "1") {
93
+ return true;
94
+ }
95
+
96
+ // In packaged installs there is no writable git checkout, but the
97
+ // published DLL is already bundled. Trust it instead of forcing an
98
+ // unnecessary `dotnet publish`, which can fail offline and leave C#
99
+ // repos with 0 chunks.
100
+ if (!hasGitCheckout(__dirname)) {
101
+ return false;
102
+ }
103
+
77
104
  return getMaxSourceMtime() > dllMtime;
78
105
  }
79
106
 
@@ -24,6 +24,13 @@ console.log(`[status] files=${c.files ?? 0} adrs=${c.adrs ?? 0} rules=${c.rules
24
24
  console.log(`[status] rels constrains=${c.relations_constrains ?? 0} implements=${c.relations_implements ?? 0} supersedes=${c.relations_supersedes ?? 0}`);
25
25
  const s = data.skipped || {};
26
26
  console.log(`[status] skipped unsupported=${s.unsupported ?? 0} too_large=${s.too_large ?? s.tooLarge ?? 0} binary=${s.binary ?? 0}`);
27
+ const parserHealth = data.parser_health || {};
28
+ if (parserHealth.csharp) {
29
+ console.log(`[status] csharp_parser available=${parserHealth.csharp.available} files=${parserHealth.csharp.files ?? 0} chunks=${parserHealth.csharp.chunks ?? 0}`);
30
+ if (parserHealth.csharp.reason) {
31
+ console.log(`[status] csharp_parser_reason=${parserHealth.csharp.reason}`);
32
+ }
33
+ }
27
34
  if (typeof data.incremental_mode === "boolean") {
28
35
  console.log(`[status] incremental_mode=${data.incremental_mode} changed_candidates=${data.changed_candidates ?? 0} deleted_paths=${data.deleted_paths ?? 0}`);
29
36
  }