@danielblomma/cortex-mcp 2.0.8 → 2.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@danielblomma/cortex-mcp",
3
3
  "mcpName": "io.github.DanielBlomma/cortex",
4
- "version": "2.0.8",
4
+ "version": "2.0.12",
5
5
  "description": "Local, repo-scoped context platform for coding assistants. Semantic search, graph relationships, and architectural rule context.",
6
6
  "type": "module",
7
7
  "author": "Daniel Blomma",
@@ -9,7 +9,8 @@
9
9
  "graph:load": "npm run build --silent && node dist/loadGraph.js",
10
10
  "dev": "node --loader ts-node/esm src/server.ts",
11
11
  "start": "node dist/server.js",
12
- "test": "npm run build --silent && node --test tests/*.test.mjs"
12
+ "test": "npm run build --silent && node --test tests/*.test.mjs",
13
+ "test:ci": "npm run build --silent && node --test --test-reporter=spec tests/*.test.mjs"
13
14
  },
14
15
  "dependencies": {
15
16
  "@huggingface/transformers": "^4.1.0",
@@ -4,6 +4,7 @@ import { readRunState } from "./artifact-io.js";
4
4
  import { DEFAULT_CAPABILITIES, type CapabilityDefinition } from "./capabilities.js";
5
5
  import { workflowDefinitionSchema, type WorkflowDefinition } from "./schemas.js";
6
6
  import { DEFAULT_WORKFLOWS } from "./default-workflows.js";
7
+ import { loadSyncedCapabilities } from "./synced-capability-registry.js";
7
8
 
8
9
  /**
9
10
  * Pre-tool-use enforcement for the harness. Pure function: takes the tool
@@ -83,7 +84,12 @@ export function evaluateToolCall(options: EvaluateOptions): EnforcementResult {
83
84
  return { allowed: true, reason: "stage has no capability declared" };
84
85
  }
85
86
 
86
- const capabilities = options.capabilities ?? DEFAULT_CAPABILITIES;
87
+ // When the caller passes an explicit registry, use it as-is (tests).
88
+ // Otherwise merge bundled defaults with the daemon-synced org-authored
89
+ // capabilities, with synced ones taking precedence on name collisions
90
+ // so org overrides actually override.
91
+ const capabilities =
92
+ options.capabilities ?? { ...DEFAULT_CAPABILITIES, ...loadSyncedCapabilities() };
87
93
  const capability = capabilities[stage.capability];
88
94
  if (!capability) {
89
95
  return {
@@ -7,3 +7,4 @@ export * from "./mcp-tools.js";
7
7
  export * from "./capabilities.js";
8
8
  export * from "./enforcement.js";
9
9
  export * from "./synced-registry.js";
10
+ export * from "./synced-capability-registry.js";
@@ -0,0 +1,66 @@
1
+ import { existsSync, readFileSync } from "node:fs";
2
+ import { homedir } from "node:os";
3
+ import { join } from "node:path";
4
+ import {
5
+ capabilityDefinitionSchema,
6
+ type CapabilityDefinition,
7
+ } from "./capabilities.js";
8
+
9
+ /**
10
+ * Read side of the org-capability sync cache. The daemon's
11
+ * capability-sync-checker writes ~/.cortex/capabilities.local.json;
12
+ * this module reads it. Kept in core/workflow/ rather than daemon/ so
13
+ * enforcement.ts can consult the cache without depending on daemon code.
14
+ *
15
+ * Each entry is validated against capabilityDefinitionSchema before being
16
+ * surfaced — if the cache file is corrupt or contains stale shapes from
17
+ * an older daemon, those entries are silently dropped rather than
18
+ * crashing the read.
19
+ */
20
+
21
+ export const SYNCED_CAPABILITIES_FILENAME = "capabilities.local.json";
22
+
23
+ type LocalCapabilityRecord = {
24
+ capability_name: string;
25
+ updated_at: string;
26
+ definition: unknown;
27
+ };
28
+
29
+ type LocalCapabilitiesState = {
30
+ capabilities?: Record<string, LocalCapabilityRecord>;
31
+ };
32
+
33
+ export function syncedCapabilitiesCachePath(dir?: string): string {
34
+ return join(dir ?? join(homedir(), ".cortex"), SYNCED_CAPABILITIES_FILENAME);
35
+ }
36
+
37
+ /**
38
+ * Returns the synced org-authored capabilities keyed by capability name.
39
+ * Empty object when the cache is missing, unreadable, malformed, or
40
+ * contains no valid entries. The optional `dir` argument is for tests;
41
+ * production callers leave it unset.
42
+ */
43
+ export function loadSyncedCapabilities(
44
+ dir?: string,
45
+ ): Record<string, CapabilityDefinition> {
46
+ const path = syncedCapabilitiesCachePath(dir);
47
+ if (!existsSync(path)) return {};
48
+
49
+ let parsed: LocalCapabilitiesState;
50
+ try {
51
+ parsed = JSON.parse(readFileSync(path, "utf8")) as LocalCapabilitiesState;
52
+ } catch {
53
+ return {};
54
+ }
55
+ const records = parsed.capabilities;
56
+ if (!records || typeof records !== "object") return {};
57
+
58
+ const out: Record<string, CapabilityDefinition> = {};
59
+ for (const [name, record] of Object.entries(records)) {
60
+ if (!record || typeof record !== "object") continue;
61
+ const result = capabilityDefinitionSchema.safeParse(record.definition);
62
+ if (!result.success) continue;
63
+ out[name] = result.data;
64
+ }
65
+ return out;
66
+ }
@@ -0,0 +1,295 @@
1
+ import {
2
+ existsSync,
3
+ readFileSync,
4
+ writeFileSync,
5
+ } from "node:fs";
6
+ import { hostname } from "node:os";
7
+ import { join } from "node:path";
8
+ import { loadEnterpriseConfig } from "../core/config.js";
9
+ import {
10
+ capabilityDefinitionSchema,
11
+ type CapabilityDefinition,
12
+ } from "../core/workflow/capabilities.js";
13
+ import { writeHostAuditEvent } from "./ungoverned-scanner.js";
14
+ import { daemonDir } from "./paths.js";
15
+
16
+ /**
17
+ * Org-capability sync flow — daemon side.
18
+ *
19
+ * The daemon polls cortex-web /api/v1/govern/capabilities/manifest each
20
+ * tick to learn what capabilities the org has authored. It diffs against
21
+ * a local state file, fetches changed full definitions, and caches them
22
+ * locally. The pre-tool-use hook's evaluateToolCall consults the merged
23
+ * registry via loadSyncedCapabilities() with synced taking precedence
24
+ * over bundled DEFAULT_CAPABILITIES on name collisions.
25
+ *
26
+ * Three audit outcomes per tick:
27
+ * - capabilities_unchanged — manifest matches local state
28
+ * - capabilities_synced — at least one capability was added /
29
+ * changed / removed (metadata: counts)
30
+ * - capabilities_sync_failed — network / auth / parse error
31
+ */
32
+
33
+ const STATE_FILENAME = "capabilities.local.json";
34
+
35
+ type ManifestEntry = {
36
+ capability_name: string;
37
+ updated_at: string;
38
+ };
39
+
40
+ type FetchedCapability = {
41
+ capability_name: string;
42
+ description: string;
43
+ definition: CapabilityDefinition;
44
+ updated_at: string;
45
+ };
46
+
47
+ type LocalCapabilityRecord = {
48
+ capability_name: string;
49
+ updated_at: string;
50
+ definition: CapabilityDefinition;
51
+ };
52
+
53
+ type LocalCapabilitiesState = {
54
+ capabilities: Record<string, LocalCapabilityRecord>;
55
+ last_synced_at?: string;
56
+ };
57
+
58
+ export type CapabilitySyncOutcome =
59
+ | { kind: "unchanged"; count: number }
60
+ | {
61
+ kind: "synced";
62
+ added: string[];
63
+ changed: string[];
64
+ removed: string[];
65
+ }
66
+ | { kind: "failed"; error: string };
67
+
68
+ function stateFilePath(): string {
69
+ return join(daemonDir(), STATE_FILENAME);
70
+ }
71
+
72
+ function readSyncedCapabilitiesState(): LocalCapabilitiesState {
73
+ const path = stateFilePath();
74
+ if (!existsSync(path)) return { capabilities: {} };
75
+ try {
76
+ const parsed = JSON.parse(readFileSync(path, "utf8")) as LocalCapabilitiesState;
77
+ return {
78
+ capabilities: parsed.capabilities ?? {},
79
+ last_synced_at: parsed.last_synced_at,
80
+ };
81
+ } catch {
82
+ return { capabilities: {} };
83
+ }
84
+ }
85
+
86
+ function writeSyncedCapabilitiesState(state: LocalCapabilitiesState): void {
87
+ writeFileSync(
88
+ stateFilePath(),
89
+ JSON.stringify(state, null, 2) + "\n",
90
+ "utf8",
91
+ );
92
+ }
93
+
94
+ async function fetchManifest(
95
+ baseUrl: string,
96
+ apiKey: string,
97
+ ): Promise<ManifestEntry[]> {
98
+ const url = new URL(
99
+ baseUrl.replace(/\/$/, "") + "/api/v1/govern/capabilities/manifest",
100
+ );
101
+ const res = await fetch(url, {
102
+ headers: { Authorization: `Bearer ${apiKey}` },
103
+ });
104
+ if (!res.ok) {
105
+ throw new Error(`HTTP ${res.status} ${res.statusText}`);
106
+ }
107
+ const body = (await res.json()) as { capabilities?: ManifestEntry[] };
108
+ return body.capabilities ?? [];
109
+ }
110
+
111
+ async function fetchCapability(
112
+ baseUrl: string,
113
+ apiKey: string,
114
+ capabilityName: string,
115
+ ): Promise<FetchedCapability> {
116
+ const url = new URL(
117
+ baseUrl.replace(/\/$/, "") +
118
+ "/api/v1/govern/capabilities/" +
119
+ encodeURIComponent(capabilityName),
120
+ );
121
+ const res = await fetch(url, {
122
+ headers: { Authorization: `Bearer ${apiKey}` },
123
+ });
124
+ if (!res.ok) {
125
+ throw new Error(`HTTP ${res.status} ${res.statusText}`);
126
+ }
127
+ const body = (await res.json()) as { capability?: FetchedCapability };
128
+ if (!body.capability) {
129
+ throw new Error(`Response for ${capabilityName} missing 'capability' field`);
130
+ }
131
+ return body.capability;
132
+ }
133
+
134
+ export async function runCapabilitySyncOnce(
135
+ cwd: string,
136
+ ): Promise<CapabilitySyncOutcome> {
137
+ const config = loadEnterpriseConfig(join(cwd, ".context"));
138
+ const apiKey = config.enterprise.api_key.trim();
139
+ const baseUrl = (config.enterprise.base_url || config.enterprise.endpoint).trim();
140
+ if (!apiKey || !baseUrl) {
141
+ const outcome: CapabilitySyncOutcome = {
142
+ kind: "failed",
143
+ error: "enterprise not configured",
144
+ };
145
+ await writeAudit(cwd, outcome);
146
+ return outcome;
147
+ }
148
+
149
+ let manifest: ManifestEntry[];
150
+ try {
151
+ manifest = await fetchManifest(baseUrl, apiKey);
152
+ } catch (err) {
153
+ const outcome: CapabilitySyncOutcome = {
154
+ kind: "failed",
155
+ error: err instanceof Error ? err.message : String(err),
156
+ };
157
+ await writeAudit(cwd, outcome);
158
+ return outcome;
159
+ }
160
+
161
+ const state = readSyncedCapabilitiesState();
162
+ const remoteByName = new Map(manifest.map((e) => [e.capability_name, e]));
163
+
164
+ const added: string[] = [];
165
+ const changed: string[] = [];
166
+ const removed: string[] = [];
167
+
168
+ for (const entry of manifest) {
169
+ const local = state.capabilities[entry.capability_name];
170
+ const isNew = !local;
171
+ const isChanged =
172
+ Boolean(local) && local.updated_at !== entry.updated_at;
173
+ if (!isNew && !isChanged) continue;
174
+
175
+ let fetched: FetchedCapability;
176
+ try {
177
+ fetched = await fetchCapability(baseUrl, apiKey, entry.capability_name);
178
+ } catch (err) {
179
+ const outcome: CapabilitySyncOutcome = {
180
+ kind: "failed",
181
+ error:
182
+ err instanceof Error
183
+ ? `fetch ${entry.capability_name}: ${err.message}`
184
+ : `fetch ${entry.capability_name}: ${String(err)}`,
185
+ };
186
+ await writeAudit(cwd, outcome);
187
+ return outcome;
188
+ }
189
+
190
+ let validated: CapabilityDefinition;
191
+ try {
192
+ validated = capabilityDefinitionSchema.parse(fetched.definition);
193
+ } catch (err) {
194
+ const outcome: CapabilitySyncOutcome = {
195
+ kind: "failed",
196
+ error:
197
+ err instanceof Error
198
+ ? `validate ${entry.capability_name}: ${err.message}`
199
+ : `validate ${entry.capability_name}: ${String(err)}`,
200
+ };
201
+ await writeAudit(cwd, outcome);
202
+ return outcome;
203
+ }
204
+
205
+ state.capabilities[entry.capability_name] = {
206
+ capability_name: entry.capability_name,
207
+ updated_at: fetched.updated_at,
208
+ definition: validated,
209
+ };
210
+ (isNew ? added : changed).push(entry.capability_name);
211
+ }
212
+
213
+ for (const name of Object.keys(state.capabilities)) {
214
+ if (remoteByName.has(name)) continue;
215
+ delete state.capabilities[name];
216
+ removed.push(name);
217
+ }
218
+
219
+ const totalChanged = added.length + changed.length + removed.length;
220
+ if (totalChanged === 0) {
221
+ const outcome: CapabilitySyncOutcome = {
222
+ kind: "unchanged",
223
+ count: manifest.length,
224
+ };
225
+ await writeAudit(cwd, outcome);
226
+ return outcome;
227
+ }
228
+
229
+ state.last_synced_at = new Date().toISOString();
230
+ writeSyncedCapabilitiesState(state);
231
+ const outcome: CapabilitySyncOutcome = {
232
+ kind: "synced",
233
+ added,
234
+ changed,
235
+ removed,
236
+ };
237
+ await writeAudit(cwd, outcome);
238
+ return outcome;
239
+ }
240
+
241
+ async function writeAudit(cwd: string, outcome: CapabilitySyncOutcome): Promise<void> {
242
+ const eventBase = {
243
+ timestamp: new Date().toISOString(),
244
+ host_id: hostname(),
245
+ };
246
+ if (outcome.kind === "unchanged") {
247
+ await writeHostAuditEvent(cwd, {
248
+ ...eventBase,
249
+ event_type: "capabilities_unchanged",
250
+ count: outcome.count,
251
+ }).catch(() => undefined);
252
+ } else if (outcome.kind === "synced") {
253
+ await writeHostAuditEvent(cwd, {
254
+ ...eventBase,
255
+ event_type: "capabilities_synced",
256
+ added: outcome.added,
257
+ changed: outcome.changed,
258
+ removed: outcome.removed,
259
+ }).catch(() => undefined);
260
+ } else {
261
+ await writeHostAuditEvent(cwd, {
262
+ ...eventBase,
263
+ event_type: "capabilities_sync_failed",
264
+ error: outcome.error,
265
+ }).catch(() => undefined);
266
+ }
267
+ }
268
+
269
+ export type CapabilitySyncTimerHandle = {
270
+ stop(): void;
271
+ };
272
+
273
+ export function startCapabilitySyncTimer(
274
+ cwd: string,
275
+ intervalMs: number,
276
+ ): CapabilitySyncTimerHandle {
277
+ const tick = () => {
278
+ void runCapabilitySyncOnce(cwd).catch((err) => {
279
+ process.stderr.write(
280
+ `[cortex-daemon] capability sync failed: ${
281
+ err instanceof Error ? err.message : String(err)
282
+ }\n`,
283
+ );
284
+ });
285
+ };
286
+
287
+ void Promise.resolve().then(tick);
288
+ const handle = setInterval(tick, intervalMs);
289
+ if (typeof handle.unref === "function") handle.unref();
290
+ return {
291
+ stop() {
292
+ clearInterval(handle);
293
+ },
294
+ };
295
+ }
@@ -29,6 +29,7 @@ import {
29
29
  import { startSyncTimer } from "./sync-checker.js";
30
30
  import { startSkillSyncTimer } from "./skill-sync-checker.js";
31
31
  import { startWorkflowSyncTimer } from "./workflow-sync-checker.js";
32
+ import { startCapabilitySyncTimer } from "./capability-sync-checker.js";
32
33
  import { startHostEventsPusher } from "./host-events-pusher.js";
33
34
  import { startEgressProxy } from "./egress-proxy.js";
34
35
  import { startHeartbeatPusher } from "./heartbeat-pusher.js";
@@ -372,6 +373,20 @@ async function main(): Promise<void> {
372
373
  startWorkflowSyncTimer(process.cwd(), workflowSyncMs);
373
374
  }
374
375
 
376
+ // Harness Phase 2: poll cortex-web for org-authored capabilities and
377
+ // cache definitions locally so evaluateToolCall can merge them over
378
+ // bundled DEFAULT_CAPABILITIES on the pre-tool-use path. Same cadence
379
+ // as the workflow sync by default; independently configurable via
380
+ // CORTEX_CAPABILITY_SYNC_MS / CORTEX_DISABLE_CAPABILITY_SYNC.
381
+ const capabilitySyncRaw = parseInt(process.env.CORTEX_CAPABILITY_SYNC_MS ?? "", 10);
382
+ const capabilitySyncMs =
383
+ Number.isFinite(capabilitySyncRaw) && capabilitySyncRaw > 0
384
+ ? capabilitySyncRaw
385
+ : workflowSyncMs;
386
+ if (process.env.CORTEX_DISABLE_CAPABILITY_SYNC !== "1") {
387
+ startCapabilitySyncTimer(process.cwd(), capabilitySyncMs);
388
+ }
389
+
375
390
  // Govern host heartbeat — fills host_enrollment on cortex-web so the
376
391
  // dashboard at /dashboard/govern actually shows this host.
377
392
  const heartbeatRaw = parseInt(process.env.CORTEX_HEARTBEAT_PUSH_MS ?? "", 10);
@@ -1,20 +1,15 @@
1
1
  import crypto from "node:crypto";
2
2
  import fs from "node:fs";
3
3
  import path from "node:path";
4
- import { fileURLToPath } from "node:url";
5
4
  import { env, pipeline } from "@huggingface/transformers";
6
5
  import { readJsonl, asString, asNumber, asBoolean } from "./jsonl.js";
6
+ import { CACHE_DIR, PATHS } from "./paths.js";
7
7
  import type { JsonObject, JsonValue } from "./types.js";
8
8
 
9
- const __filename = fileURLToPath(import.meta.url);
10
- const __dirname = path.dirname(__filename);
11
- const REPO_ROOT = path.resolve(__dirname, "../..");
12
- const CONTEXT_DIR = path.join(REPO_ROOT, ".context");
13
- const CACHE_DIR = path.join(CONTEXT_DIR, "cache");
14
- const EMBEDDINGS_DIR = path.join(CONTEXT_DIR, "embeddings");
15
- const EMBEDDINGS_PATH = path.join(EMBEDDINGS_DIR, "entities.jsonl");
16
- const EMBEDDINGS_MANIFEST_PATH = path.join(EMBEDDINGS_DIR, "manifest.json");
17
- const MODEL_CACHE_DIR = path.join(EMBEDDINGS_DIR, "models");
9
+ const EMBEDDINGS_PATH = PATHS.embeddingsEntities;
10
+ const EMBEDDINGS_MANIFEST_PATH = PATHS.embeddingsManifest;
11
+ const MODEL_CACHE_DIR = PATHS.embeddingsModelCache;
12
+ const EMBEDDINGS_DIR = path.dirname(EMBEDDINGS_PATH);
18
13
 
19
14
  const DEFAULT_MODEL_ID = "Xenova/all-MiniLM-L6-v2";
20
15
  const DEFAULT_MAX_TEXT_CHARS = 7000;
@@ -1,16 +1,10 @@
1
1
  import fs from "node:fs";
2
2
  import path from "node:path";
3
- import { fileURLToPath } from "node:url";
4
3
  import ryugraph, { type Connection, type PreparedStatement, type QueryResult, type RyuValue } from "ryugraph";
5
4
  import { readJsonl, asString, asNumber, asBoolean } from "./jsonl.js";
5
+ import { CACHE_DIR, CONTEXT_DIR, DB_PATH } from "./paths.js";
6
6
  import type { JsonObject } from "./types.js";
7
7
 
8
- const __filename = fileURLToPath(import.meta.url);
9
- const __dirname = path.dirname(__filename);
10
- const REPO_ROOT = path.resolve(__dirname, "../..");
11
- const CONTEXT_DIR = path.join(REPO_ROOT, ".context");
12
- const CACHE_DIR = path.join(CONTEXT_DIR, "cache");
13
- const DB_PATH = path.join(CONTEXT_DIR, "db", "graph.ryu");
14
8
  const ONTOLOGY_PATH = path.join(CONTEXT_DIR, "ontology.cypher");
15
9
  const BATCH_SIZE = 50;
16
10
 
@@ -21,9 +21,45 @@ function normalizeForWsl(rawPath: string): string {
21
21
  }
22
22
 
23
23
  const PROJECT_ROOT_OVERRIDE = process.env.CORTEX_PROJECT_ROOT?.trim();
24
- export const REPO_ROOT = PROJECT_ROOT_OVERRIDE
25
- ? path.resolve(normalizeForWsl(PROJECT_ROOT_OVERRIDE))
26
- : path.resolve(__dirname, "../..");
24
+
25
+ function hasContextConfig(candidate: string): boolean {
26
+ return fs.existsSync(path.join(candidate, ".context", "config.yaml"));
27
+ }
28
+
29
+ function resolveFrom(startDir: string): string | null {
30
+ let current = path.resolve(startDir);
31
+ while (true) {
32
+ if (hasContextConfig(current)) {
33
+ return current;
34
+ }
35
+
36
+ const parent = path.dirname(current);
37
+ if (parent === current) {
38
+ return null;
39
+ }
40
+ current = parent;
41
+ }
42
+ }
43
+
44
+ function resolveRepoRoot(): string {
45
+ const candidates = [
46
+ PROJECT_ROOT_OVERRIDE ? path.resolve(normalizeForWsl(PROJECT_ROOT_OVERRIDE)) : null,
47
+ process.cwd(),
48
+ __dirname,
49
+ process.env.INIT_CWD?.trim() ? path.resolve(normalizeForWsl(process.env.INIT_CWD.trim())) : null
50
+ ].filter((value): value is string => Boolean(value));
51
+
52
+ for (const candidate of candidates) {
53
+ const resolved = resolveFrom(candidate);
54
+ if (resolved) {
55
+ return resolved;
56
+ }
57
+ }
58
+
59
+ return path.resolve(__dirname, "../../..");
60
+ }
61
+
62
+ export const REPO_ROOT = resolveRepoRoot();
27
63
  export const CONTEXT_DIR = path.join(REPO_ROOT, ".context");
28
64
  export const CACHE_DIR = path.join(CONTEXT_DIR, "cache");
29
65
  export const DB_PATH = path.join(CONTEXT_DIR, "db", "graph.ryu");
@@ -0,0 +1,38 @@
1
+ import test from "node:test";
2
+ import assert from "node:assert/strict";
3
+ import { mkdtempSync, mkdirSync, realpathSync, writeFileSync } from "node:fs";
4
+ import { tmpdir } from "node:os";
5
+ import path from "node:path";
6
+ import { spawnSync } from "node:child_process";
7
+ import { pathToFileURL } from "node:url";
8
+
9
+ test("paths resolve the project root from cwd without duplicating .context", () => {
10
+ const projectRoot = mkdtempSync(path.join(tmpdir(), "cortex-paths-"));
11
+ const contextDir = path.join(projectRoot, ".context");
12
+ const mcpDir = path.join(contextDir, "mcp");
13
+ const pathsModuleUrl = pathToFileURL(path.resolve("dist/paths.js")).href;
14
+
15
+ mkdirSync(mcpDir, { recursive: true });
16
+ writeFileSync(path.join(contextDir, "config.yaml"), "source_paths:\n - src\n");
17
+
18
+ const result = spawnSync(
19
+ process.execPath,
20
+ [
21
+ "--input-type=module",
22
+ "-e",
23
+ `import { REPO_ROOT, CONTEXT_DIR, CACHE_DIR } from ${JSON.stringify(pathsModuleUrl)}; console.log(JSON.stringify({ REPO_ROOT, CONTEXT_DIR, CACHE_DIR }));`
24
+ ],
25
+ {
26
+ cwd: mcpDir,
27
+ encoding: "utf8"
28
+ }
29
+ );
30
+
31
+ assert.equal(result.status, 0, result.stderr);
32
+
33
+ const parsed = JSON.parse(result.stdout.trim());
34
+ const resolvedProjectRoot = realpathSync(projectRoot);
35
+ assert.equal(parsed.REPO_ROOT, resolvedProjectRoot);
36
+ assert.equal(parsed.CONTEXT_DIR, path.join(resolvedProjectRoot, ".context"));
37
+ assert.equal(parsed.CACHE_DIR, path.join(resolvedProjectRoot, ".context", "cache"));
38
+ });
@@ -0,0 +1,226 @@
1
+ import test from "node:test";
2
+ import assert from "node:assert/strict";
3
+ import fs from "node:fs";
4
+ import os from "node:os";
5
+ import path from "node:path";
6
+
7
+ import {
8
+ loadSyncedCapabilities,
9
+ syncedCapabilitiesCachePath,
10
+ } from "../dist/core/workflow/synced-capability-registry.js";
11
+ import { evaluateToolCall } from "../dist/core/workflow/enforcement.js";
12
+ import { createRun } from "../dist/core/workflow/run-lifecycle.js";
13
+
14
+ function makeWorkspace() {
15
+ return fs.mkdtempSync(path.join(os.tmpdir(), "cortex-synced-caps-"));
16
+ }
17
+
18
+ const FRONTEND_BUILDER = {
19
+ name: "frontend-builder",
20
+ description: "Frontend-only profile",
21
+ read_globs: ["**"],
22
+ write_globs: ["src/components/**"],
23
+ tools_allowed: [],
24
+ };
25
+
26
+ const WORKFLOW = {
27
+ id: "fe",
28
+ description: "Frontend-only build",
29
+ version: 1,
30
+ stages: [
31
+ {
32
+ name: "build",
33
+ artifact: "changes.md",
34
+ reads: [],
35
+ required_fields: [],
36
+ validators: [],
37
+ capability: "frontend-builder",
38
+ description: "Build the frontend",
39
+ },
40
+ ],
41
+ };
42
+
43
+ function writeCache(dir, payload) {
44
+ fs.mkdirSync(dir, { recursive: true });
45
+ fs.writeFileSync(
46
+ syncedCapabilitiesCachePath(dir),
47
+ JSON.stringify(payload, null, 2),
48
+ "utf8",
49
+ );
50
+ }
51
+
52
+ test("syncedCapabilitiesCachePath: defaults to ~/.cortex/capabilities.local.json", () => {
53
+ const expected = path.join(os.homedir(), ".cortex", "capabilities.local.json");
54
+ assert.equal(syncedCapabilitiesCachePath(), expected);
55
+ });
56
+
57
+ test("loadSyncedCapabilities: returns {} when cache is missing", () => {
58
+ const dir = makeWorkspace();
59
+ assert.deepEqual(loadSyncedCapabilities(dir), {});
60
+ });
61
+
62
+ test("loadSyncedCapabilities: returns {} when cache is unreadable JSON", () => {
63
+ const dir = makeWorkspace();
64
+ fs.writeFileSync(syncedCapabilitiesCachePath(dir), "not json", "utf8");
65
+ assert.deepEqual(loadSyncedCapabilities(dir), {});
66
+ });
67
+
68
+ test("loadSyncedCapabilities: returns {} when 'capabilities' key is missing", () => {
69
+ const dir = makeWorkspace();
70
+ writeCache(dir, { last_synced_at: "x" });
71
+ assert.deepEqual(loadSyncedCapabilities(dir), {});
72
+ });
73
+
74
+ test("loadSyncedCapabilities: drops entries whose definition fails schema", () => {
75
+ const dir = makeWorkspace();
76
+ writeCache(dir, {
77
+ capabilities: {
78
+ "valid-one": {
79
+ capability_name: "valid-one",
80
+ updated_at: "2026-05-07T12:00:00.000Z",
81
+ definition: FRONTEND_BUILDER,
82
+ },
83
+ "broken-one": {
84
+ capability_name: "broken-one",
85
+ updated_at: "2026-05-07T12:00:00.000Z",
86
+ definition: { name: "broken-one" /* missing description */ },
87
+ },
88
+ },
89
+ });
90
+ const loaded = loadSyncedCapabilities(dir);
91
+ assert.deepEqual(Object.keys(loaded).sort(), ["valid-one"]);
92
+ });
93
+
94
+ test("loadSyncedCapabilities: returns valid capability definitions", () => {
95
+ const dir = makeWorkspace();
96
+ writeCache(dir, {
97
+ capabilities: {
98
+ "frontend-builder": {
99
+ capability_name: "frontend-builder",
100
+ updated_at: "2026-05-07T12:00:00.000Z",
101
+ definition: FRONTEND_BUILDER,
102
+ },
103
+ },
104
+ });
105
+ const loaded = loadSyncedCapabilities(dir);
106
+ assert.equal(loaded["frontend-builder"].name, "frontend-builder");
107
+ assert.deepEqual(loaded["frontend-builder"].write_globs, ["src/components/**"]);
108
+ });
109
+
110
+ test("evaluateToolCall integration: synced capability is consulted via merged registry", () => {
111
+ const cwd = makeWorkspace();
112
+ // Sandbox the home-dir-based loader.
113
+ const fakeHome = makeWorkspace();
114
+ process.env.HOME = fakeHome;
115
+ try {
116
+ fs.mkdirSync(path.join(fakeHome, ".cortex"), { recursive: true });
117
+ fs.writeFileSync(
118
+ path.join(fakeHome, ".cortex", "capabilities.local.json"),
119
+ JSON.stringify({
120
+ capabilities: {
121
+ "frontend-builder": {
122
+ capability_name: "frontend-builder",
123
+ updated_at: "2026-05-07T12:00:00.000Z",
124
+ definition: FRONTEND_BUILDER,
125
+ },
126
+ },
127
+ }),
128
+ "utf8",
129
+ );
130
+
131
+ createRun({
132
+ cwd,
133
+ taskId: "task-1",
134
+ workflow: WORKFLOW,
135
+ taskDescription: "Build frontend",
136
+ });
137
+
138
+ // Allowed: src/components/**
139
+ const allowed = evaluateToolCall({
140
+ cwd,
141
+ taskId: "task-1",
142
+ call: { toolName: "Edit", toolInput: { file_path: "src/components/Foo.tsx" } },
143
+ workflows: { fe: WORKFLOW },
144
+ });
145
+ assert.equal(allowed.allowed, true);
146
+
147
+ // Blocked: outside write_globs
148
+ const blocked = evaluateToolCall({
149
+ cwd,
150
+ taskId: "task-1",
151
+ call: { toolName: "Edit", toolInput: { file_path: "src/server/api.ts" } },
152
+ workflows: { fe: WORKFLOW },
153
+ });
154
+ assert.equal(blocked.allowed, false);
155
+ assert.match(blocked.reason, /frontend-builder/);
156
+ } finally {
157
+ delete process.env.HOME;
158
+ }
159
+ });
160
+
161
+ test("evaluateToolCall integration: synced capability with same name as bundled overrides bundled", () => {
162
+ const cwd = makeWorkspace();
163
+ const fakeHome = makeWorkspace();
164
+ process.env.HOME = fakeHome;
165
+ try {
166
+ // Override the bundled "builder" capability with a much stricter version
167
+ // — only test files writable.
168
+ const stricterBuilder = {
169
+ name: "builder",
170
+ description: "Org-overridden strict builder",
171
+ read_globs: ["**"],
172
+ write_globs: ["tests/**"],
173
+ tools_allowed: [],
174
+ };
175
+ fs.mkdirSync(path.join(fakeHome, ".cortex"), { recursive: true });
176
+ fs.writeFileSync(
177
+ path.join(fakeHome, ".cortex", "capabilities.local.json"),
178
+ JSON.stringify({
179
+ capabilities: {
180
+ builder: {
181
+ capability_name: "builder",
182
+ updated_at: "2026-05-07T12:00:00.000Z",
183
+ definition: stricterBuilder,
184
+ },
185
+ },
186
+ }),
187
+ "utf8",
188
+ );
189
+
190
+ const builderWorkflow = {
191
+ id: "build-only",
192
+ description: "Build-only workflow using bundled name",
193
+ version: 1,
194
+ stages: [
195
+ {
196
+ name: "build",
197
+ artifact: "changes.md",
198
+ reads: [],
199
+ required_fields: [],
200
+ validators: [],
201
+ capability: "builder",
202
+ description: "Build",
203
+ },
204
+ ],
205
+ };
206
+ createRun({
207
+ cwd,
208
+ taskId: "task-2",
209
+ workflow: builderWorkflow,
210
+ taskDescription: "Build",
211
+ });
212
+
213
+ // Bundled builder allows src/** + tests/**, but the org override only
214
+ // allows tests/**. src/** must be blocked under the override.
215
+ const result = evaluateToolCall({
216
+ cwd,
217
+ taskId: "task-2",
218
+ call: { toolName: "Edit", toolInput: { file_path: "src/main.ts" } },
219
+ workflows: { "build-only": builderWorkflow },
220
+ });
221
+ assert.equal(result.allowed, false);
222
+ assert.match(result.reason, /Org-overridden|tests/i);
223
+ } finally {
224
+ delete process.env.HOME;
225
+ }
226
+ });
@@ -12,4 +12,4 @@ fi
12
12
  mkdir -p "$MCP_DIR/.npm-cache"
13
13
 
14
14
  echo "[embed] generating embeddings via .context/mcp/embed"
15
- NPM_CONFIG_CACHE="$MCP_DIR/.npm-cache" npm --prefix "$MCP_DIR" run embed --silent -- "$@"
15
+ CORTEX_PROJECT_ROOT="$REPO_ROOT" NPM_CONFIG_CACHE="$MCP_DIR/.npm-cache" npm --prefix "$MCP_DIR" run embed --silent -- "$@"
@@ -15,4 +15,4 @@ if [[ ! -d "$MCP_DIR/node_modules" ]]; then
15
15
  exit 1
16
16
  fi
17
17
 
18
- NPM_CONFIG_CACHE="$MCP_DIR/.npm-cache" npm --prefix "$MCP_DIR" run graph:load -- "$@"
18
+ CORTEX_PROJECT_ROOT="$REPO_ROOT" NPM_CONFIG_CACHE="$MCP_DIR/.npm-cache" npm --prefix "$MCP_DIR" run graph:load -- "$@"