@papyruslabsai/seshat-mcp 0.4.2 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -13,12 +13,13 @@
13
13
  * Single-project mode (default):
14
14
  * When SESHAT_PROJECTS is not set, loads from CWD. No `project` param needed.
15
15
  *
16
- * 18 tools across 3 categories:
16
+ * 20 tools across 4 categories:
17
17
  * Discovery: list_projects, query_entities, get_entity, list_modules, get_topology
18
18
  * Graph: get_dependencies, get_data_flow, find_by_constraint, get_blast_radius
19
19
  * Analysis: find_dead_code, find_layer_violations, get_coupling_metrics,
20
20
  * get_auth_matrix, find_error_gaps, get_test_coverage,
21
21
  * get_optimal_context, estimate_task_cost, report_actual_burn
22
+ * Diff: diff_bundle, conflict_matrix
22
23
  *
23
24
  * Usage:
24
25
  * npx @papyruslabs/seshat-mcp # single project (CWD)
package/dist/index.js CHANGED
@@ -13,12 +13,13 @@
13
13
  * Single-project mode (default):
14
14
  * When SESHAT_PROJECTS is not set, loads from CWD. No `project` param needed.
15
15
  *
16
- * 18 tools across 3 categories:
16
+ * 20 tools across 4 categories:
17
17
  * Discovery: list_projects, query_entities, get_entity, list_modules, get_topology
18
18
  * Graph: get_dependencies, get_data_flow, find_by_constraint, get_blast_radius
19
19
  * Analysis: find_dead_code, find_layer_violations, get_coupling_metrics,
20
20
  * get_auth_matrix, find_error_gaps, get_test_coverage,
21
21
  * get_optimal_context, estimate_task_cost, report_actual_burn
22
+ * Diff: diff_bundle, conflict_matrix
22
23
  *
23
24
  * Usage:
24
25
  * npx @papyruslabs/seshat-mcp # single project (CWD)
@@ -33,7 +34,8 @@ import { CallToolRequestSchema, ListToolsRequestSchema, } from '@modelcontextpro
33
34
  import { MultiLoader } from './loader.js';
34
35
  import { bootstrap } from './bootstrap.js';
35
36
  import { initTools, queryEntities, getEntity, getDependencies, getDataFlow, findByConstraint, getBlastRadius, listModules, getTopology, } from './tools/index.js';
36
- import { findDeadCode, findLayerViolations, getCouplingMetrics, getAuthMatrix, findErrorGaps, getTestCoverage, getOptimalContext, estimateTaskCost, reportActualBurn, } from './tools/functors.js';
37
+ import { findDeadCode, findLayerViolations, getCouplingMetrics, getAuthMatrix, findErrorGaps, getTestCoverage, getOptimalContext, estimateTaskCost, reportActualBurn, find_runtime_violations, find_ownership_violations, query_traits, } from './tools/functors.js';
38
+ import { diffBundle, conflictMatrix, } from './tools/diff.js';
37
39
  // ─── Project Discovery ───────────────────────────────────────────
38
40
  /**
39
41
  * Discover project directories from SESHAT_PROJECTS env var.
@@ -386,6 +388,101 @@ const TOOLS = [
386
388
  },
387
389
  },
388
390
  },
391
+ // ─── Semantic (9D) JSTF-T Tools ─────────────────────────────────
392
+ {
393
+ name: 'find_runtime_violations',
394
+ description: 'Analyze the call graph across the ρ (Runtime) dimension. Finds architectural leaks where framework-agnostic code improperly imports framework-specific code (e.g. pure logic calling React hooks) or where incompatible frameworks mix directly.',
395
+ inputSchema: {
396
+ type: 'object',
397
+ properties: {
398
+ project: projectParam,
399
+ },
400
+ },
401
+ },
402
+ {
403
+ name: 'find_ownership_violations',
404
+ description: 'Analyze the codebase across the λ (Ownership/Lifetimes) dimension. Flags entities with complex memory management constraints, unsafe blocks, escaping boundaries, or illegal mutability patterns on borrowed references.',
405
+ inputSchema: {
406
+ type: 'object',
407
+ properties: {
408
+ project: projectParam,
409
+ },
410
+ },
411
+ },
412
+ {
413
+ name: 'query_traits',
414
+ description: 'Search the codebase across the τ (Traits/Capabilities) dimension. Allows you to find entities by their abstract capabilities (e.g., "fallible", "asyncContext", "generator") regardless of their structural syntax.',
415
+ inputSchema: {
416
+ type: 'object',
417
+ properties: {
418
+ project: projectParam,
419
+ trait: {
420
+ type: 'string',
421
+ description: 'The trait or capability to search for (e.g., "fallible", "asyncContext")',
422
+ },
423
+ },
424
+ required: ['trait'],
425
+ },
426
+ },
427
+ // ─── Diff Tools ─────────────────────────────────────────────────
428
+ {
429
+ name: 'diff_bundle',
430
+ description: 'Compare entities between a worktree and the loaded project. Shows which entities were added, removed, or modified at the symbol level — not a line diff, but a structural diff showing changed signatures, call graphs, constraints, and logic. Extracts the worktree automatically if no bundle exists.',
431
+ inputSchema: {
432
+ type: 'object',
433
+ properties: {
434
+ project: projectParam,
435
+ worktree_path: {
436
+ type: 'string',
437
+ description: 'Absolute path to the worktree or branch checkout to compare against the loaded project',
438
+ },
439
+ include_unchanged: {
440
+ type: 'boolean',
441
+ description: 'Include unchanged entities in the output (default: false)',
442
+ },
443
+ },
444
+ required: ['worktree_path'],
445
+ },
446
+ },
447
+ {
448
+ name: 'conflict_matrix',
449
+ description: 'Given multiple tasks, classify every task pair into conflict tiers bridging JSTF-T theory and Git reality. Tier 1 (different files, safe), Tier 2 (same file, different entities, safe), Tier 3 (same entity, orthogonal Spatial Zones like imports vs logic, risky but parallelizable), Tier 4 (same entity, same Spatial Zone, MUST sequence). Passing "dimensions" per task enables Tier 3 downgrades.',
450
+ inputSchema: {
451
+ type: 'object',
452
+ properties: {
453
+ project: projectParam,
454
+ tasks: {
455
+ type: 'array',
456
+ items: {
457
+ type: 'object',
458
+ properties: {
459
+ id: {
460
+ type: 'string',
461
+ description: 'Unique task identifier (e.g. "add-dark-mode")',
462
+ },
463
+ entity_ids: {
464
+ type: 'array',
465
+ items: { type: 'string' },
466
+ description: 'Entity IDs or names that this task will modify',
467
+ },
468
+ dimensions: {
469
+ type: 'array',
470
+ items: { type: 'string' },
471
+ description: 'Optional: JSTF-T dimensions this task will modify (e.g., "edges", "struct", "semantics", "constraints"). Used to downgrade conflicts via Spatial Zones.',
472
+ },
473
+ expand_blast_radius: {
474
+ type: 'boolean',
475
+ description: 'Include transitively affected entities in the conflict check (default: false)',
476
+ },
477
+ },
478
+ required: ['id', 'entity_ids'],
479
+ },
480
+ description: 'Array of tasks to check for conflicts. Each task specifies which entities it will modify.',
481
+ },
482
+ },
483
+ required: ['tasks'],
484
+ },
485
+ },
389
486
  ];
390
487
  // ─── Server Setup ─────────────────────────────────────────────────
391
488
  async function main() {
@@ -441,7 +538,7 @@ async function main() {
441
538
  }
442
539
  const server = new Server({
443
540
  name: serverLabel,
444
- version: '0.4.2',
541
+ version: '0.5.0',
445
542
  }, {
446
543
  capabilities: {
447
544
  tools: {},
@@ -531,6 +628,23 @@ async function main() {
531
628
  case 'report_actual_burn':
532
629
  result = await reportActualBurn(args);
533
630
  break;
631
+ // Semantic (9D) JSTF-T Tools
632
+ case 'find_runtime_violations':
633
+ result = find_runtime_violations(args);
634
+ break;
635
+ case 'find_ownership_violations':
636
+ result = find_ownership_violations(args);
637
+ break;
638
+ case 'query_traits':
639
+ result = query_traits(args);
640
+ break;
641
+ // Diff Tools
642
+ case 'diff_bundle':
643
+ result = await diffBundle(args);
644
+ break;
645
+ case 'conflict_matrix':
646
+ result = conflictMatrix(args);
647
+ break;
534
648
  default:
535
649
  result = { error: `Unknown tool: ${name}` };
536
650
  }
@@ -0,0 +1,23 @@
1
+ /**
2
+ * Cross-Bundle Analysis Tools: diff_bundle + conflict_matrix
3
+ *
4
+ * diff_bundle: Compare entities between a worktree and the loaded project.
5
+ * conflict_matrix: Classify conflict tiers for parallel task scheduling.
6
+ *
7
+ * These tools compare TWO entity sets (base vs branch, or task vs task),
8
+ * unlike the single-bundle queries in index.ts and functors.ts.
9
+ */
10
+ export declare function diffBundle(args: {
11
+ worktree_path: string;
12
+ project?: string;
13
+ include_unchanged?: boolean;
14
+ }): Promise<unknown>;
15
+ export declare function conflictMatrix(args: {
16
+ tasks: Array<{
17
+ id: string;
18
+ entity_ids: string[];
19
+ dimensions?: string[];
20
+ expand_blast_radius?: boolean;
21
+ }>;
22
+ project?: string;
23
+ }): unknown;
@@ -0,0 +1,491 @@
1
+ /**
2
+ * Cross-Bundle Analysis Tools: diff_bundle + conflict_matrix
3
+ *
4
+ * diff_bundle: Compare entities between a worktree and the loaded project.
5
+ * conflict_matrix: Classify conflict tiers for parallel task scheduling.
6
+ *
7
+ * These tools compare TWO entity sets (base vs branch, or task vs task),
8
+ * unlike the single-bundle queries in index.ts and functors.ts.
9
+ */
10
+ import fs from 'fs';
11
+ import path from 'path';
12
+ import { bootstrap } from '../bootstrap.js';
13
+ import { computeBlastRadius } from '../graph.js';
14
+ import { getLoader, getGraph, validateProject, entityName, entityLayer, } from './index.js';
15
+ // ─── Entity Identity ─────────────────────────────────────────────
16
+ // Ported from api-v2/translator/seshat-pipeline/src/incremental/diff-engine.mjs
17
+ /**
18
+ * Generate a unique key for an entity: <relative_path>::<id>
19
+ *
20
+ * Uses _sourceFile (already relative in bundles) or derives from
21
+ * sourceFile (absolute path) by stripping the repo root.
22
+ */
23
+ function entityKey(entity, repoRoot) {
24
+ let relativePath = entity._sourceFile || null;
25
+ // If _sourceFile not set, try deriving from sourceFile (absolute)
26
+ if (!relativePath) {
27
+ const raw = entity;
28
+ const sourceFile = (raw.sourceFile || '').replace(/\\/g, '/');
29
+ if (repoRoot && sourceFile) {
30
+ const normalizedRoot = repoRoot.replace(/\\/g, '/').replace(/\/+$/, '') + '/';
31
+ if (sourceFile.startsWith(normalizedRoot)) {
32
+ relativePath = sourceFile.substring(normalizedRoot.length);
33
+ }
34
+ else {
35
+ // Case-insensitive match (Windows paths)
36
+ const lowerFile = sourceFile.toLowerCase();
37
+ const lowerRoot = normalizedRoot.toLowerCase();
38
+ if (lowerFile.startsWith(lowerRoot)) {
39
+ relativePath = sourceFile.substring(normalizedRoot.length);
40
+ }
41
+ }
42
+ }
43
+ if (!relativePath) {
44
+ relativePath = sourceFile || 'unknown';
45
+ }
46
+ }
47
+ // Normalize path separators
48
+ relativePath = (relativePath || 'unknown').replace(/\\/g, '/');
49
+ const id = entity.id || (typeof entity.struct === 'string' ? entity.struct : entity.struct?.name) || 'anonymous';
50
+ return `${relativePath}::${id}`;
51
+ }
52
+ // ─── Bundle Loading ───────────────────────────────────────────────
53
+ /**
54
+ * Load a branch bundle from a worktree path.
55
+ * Does NOT add to the global MultiLoader — this is a transient comparison target.
56
+ *
57
+ * 1. Check <worktree_path>/.seshat/_bundle.json — parse if exists
58
+ * 2. If not, run bootstrap() to extract on-the-fly
59
+ * 3. Apply the same field remapping as BundleLoader (sourceFile → _sourceFile, etc.)
60
+ */
61
+ async function loadBranchBundle(worktreePath) {
62
+ const absPath = path.resolve(worktreePath);
63
+ const bundlePath = path.join(absPath, '.seshat', '_bundle.json');
64
+ let bundle;
65
+ if (fs.existsSync(bundlePath)) {
66
+ const raw = fs.readFileSync(bundlePath, 'utf-8');
67
+ bundle = JSON.parse(raw);
68
+ }
69
+ else {
70
+ // Auto-extract via bootstrap
71
+ const result = await bootstrap(absPath);
72
+ if (!result.success) {
73
+ throw new Error(`Failed to extract bundle from ${absPath}: ${result.error}`);
74
+ }
75
+ // Read the freshly generated bundle
76
+ if (!fs.existsSync(bundlePath)) {
77
+ throw new Error(`Bootstrap succeeded but no bundle found at ${bundlePath}`);
78
+ }
79
+ const raw = fs.readFileSync(bundlePath, 'utf-8');
80
+ bundle = JSON.parse(raw);
81
+ }
82
+ const entities = bundle.entities || [];
83
+ // Apply the same field remapping as BundleLoader.load()
84
+ for (const e of entities) {
85
+ const raw = e;
86
+ if (raw.sourceFile && !e._sourceFile) {
87
+ e._sourceFile = raw.sourceFile;
88
+ }
89
+ if (raw.sourceLanguage && !e._sourceLanguage) {
90
+ e._sourceLanguage = raw.sourceLanguage;
91
+ }
92
+ }
93
+ // Try to get commit SHA from manifest
94
+ let commitSha = '';
95
+ const manifestPath = path.join(absPath, '.seshat', 'manifest.json');
96
+ if (fs.existsSync(manifestPath)) {
97
+ try {
98
+ const manifest = JSON.parse(fs.readFileSync(manifestPath, 'utf-8'));
99
+ commitSha = manifest.commitSha || '';
100
+ }
101
+ catch { /* ignore */ }
102
+ }
103
+ return {
104
+ entities,
105
+ source: bundle.source || absPath,
106
+ commitSha,
107
+ entityCount: entities.length,
108
+ };
109
+ }
110
+ // ─── Field Comparison ─────────────────────────────────────────────
111
+ /** The entity fields to compare, mapped to user-facing names (v0.4.2 convention) */
112
+ const FIELD_MAP = [
113
+ ['struct', 'structure'],
114
+ ['edges', 'call_graph'],
115
+ ['data', 'data_flow'],
116
+ ['constraints', 'constraints'],
117
+ ['context', 'context'],
118
+ ['ownership', 'ownership'],
119
+ ['traits', 'type_info'],
120
+ ['runtime', 'runtime'],
121
+ ['semantics', 'logic'],
122
+ ];
123
+ /**
124
+ * Compare two entities field-by-field. Returns which fields changed
125
+ * using user-facing names.
126
+ */
127
+ function compareEntityFields(baseEntity, branchEntity) {
128
+ const changed = [];
129
+ for (const [field, displayName] of FIELD_MAP) {
130
+ const baseVal = baseEntity[field];
131
+ const branchVal = branchEntity[field];
132
+ // Both undefined/null → no change
133
+ if (baseVal == null && branchVal == null)
134
+ continue;
135
+ // One null, other not → changed
136
+ if (baseVal == null || branchVal == null) {
137
+ changed.push(displayName);
138
+ continue;
139
+ }
140
+ // Deep compare via JSON
141
+ if (JSON.stringify(baseVal) !== JSON.stringify(branchVal)) {
142
+ changed.push(displayName);
143
+ }
144
+ }
145
+ return changed;
146
+ }
147
+ // ─── Tool: diff_bundle ───────────────────────────────────────────
148
+ export async function diffBundle(args) {
149
+ const projErr = validateProject(args.project);
150
+ if (projErr)
151
+ return { error: projErr };
152
+ const loader = getLoader();
153
+ const baseEntities = loader.getEntities(args.project);
154
+ const baseManifest = loader.getManifest(args.project);
155
+ if (baseEntities.length === 0) {
156
+ return { error: 'No base entities loaded. Ensure the project has been extracted.' };
157
+ }
158
+ // Load branch bundle
159
+ let branchData;
160
+ try {
161
+ branchData = await loadBranchBundle(args.worktree_path);
162
+ }
163
+ catch (err) {
164
+ return { error: `Failed to load branch bundle: ${err.message}` };
165
+ }
166
+ const branchEntities = branchData.entities;
167
+ // Build entity maps by key
168
+ const baseSource = baseManifest?.commitSha ? '' : ''; // base entities have relative paths already
169
+ const baseMap = new Map();
170
+ for (const entity of baseEntities) {
171
+ const key = entityKey(entity);
172
+ baseMap.set(key, entity);
173
+ }
174
+ const branchMap = new Map();
175
+ for (const entity of branchEntities) {
176
+ const key = entityKey(entity);
177
+ branchMap.set(key, entity);
178
+ }
179
+ // Classify entities
180
+ const added = [];
181
+ const removed = [];
182
+ const modified = [];
183
+ const unchanged = [];
184
+ // Added: in branch but not base
185
+ for (const [key, entity] of branchMap) {
186
+ if (!baseMap.has(key)) {
187
+ added.push({
188
+ id: entity.id,
189
+ name: entityName(entity),
190
+ sourceFile: entity._sourceFile || null,
191
+ layer: entityLayer(entity),
192
+ });
193
+ }
194
+ }
195
+ // Removed: in base but not branch
196
+ for (const [key, entity] of baseMap) {
197
+ if (!branchMap.has(key)) {
198
+ removed.push({
199
+ id: entity.id,
200
+ name: entityName(entity),
201
+ sourceFile: entity._sourceFile || null,
202
+ layer: entityLayer(entity),
203
+ });
204
+ }
205
+ }
206
+ // Modified / Unchanged: in both
207
+ for (const [key, branchEntity] of branchMap) {
208
+ const baseEntity = baseMap.get(key);
209
+ if (!baseEntity)
210
+ continue; // already in added
211
+ const changedFields = compareEntityFields(baseEntity, branchEntity);
212
+ if (changedFields.length > 0) {
213
+ modified.push({
214
+ id: branchEntity.id,
215
+ name: entityName(branchEntity),
216
+ sourceFile: branchEntity._sourceFile || null,
217
+ layer: entityLayer(branchEntity),
218
+ changedFields,
219
+ });
220
+ }
221
+ else if (args.include_unchanged) {
222
+ unchanged.push({
223
+ id: branchEntity.id,
224
+ name: entityName(branchEntity),
225
+ sourceFile: branchEntity._sourceFile || null,
226
+ layer: entityLayer(branchEntity),
227
+ });
228
+ }
229
+ }
230
+ // Count unique files touched
231
+ const touchedFiles = new Set();
232
+ for (const e of [...added, ...removed, ...modified]) {
233
+ if (e.sourceFile)
234
+ touchedFiles.add(e.sourceFile);
235
+ }
236
+ const totalChanges = added.length + removed.length + modified.length;
237
+ const result = {
238
+ base: {
239
+ project: baseManifest?.projectName || args.project || 'default',
240
+ commitSha: baseManifest?.commitSha || '',
241
+ entityCount: baseEntities.length,
242
+ },
243
+ branch: {
244
+ path: args.worktree_path,
245
+ commitSha: branchData.commitSha,
246
+ entityCount: branchData.entityCount,
247
+ },
248
+ summary: {
249
+ added: added.length,
250
+ removed: removed.length,
251
+ modified: modified.length,
252
+ unchanged: baseEntities.length - removed.length - modified.length,
253
+ },
254
+ added,
255
+ removed,
256
+ modified,
257
+ _summary: `Branch modifies ${totalChanges} entities (${added.length} added, ${removed.length} removed, ${modified.length} modified) across ${touchedFiles.size} files`,
258
+ };
259
+ if (args.include_unchanged) {
260
+ result.unchanged = unchanged;
261
+ }
262
+ return result;
263
+ }
264
+ // ─── Conflict Tier Classification ─────────────────────────────────
265
+ const ZONE_1 = new Set(['edges', 'imports', 'ε']); // Header / Far from body
266
+ const ZONE_2 = new Set(['struct', 'σ', 'constraints', 'κ', 'traits', 'τ', 'ownership', 'λ']); // Signature / Decorators (High collision risk)
267
+ const ZONE_3 = new Set(['semantics', 'Σ', 'data', 'δ', 'runtime', 'ρ']); // Body / Implementation
268
+ function getZones(dimensions) {
269
+ const zones = new Set();
270
+ for (const d of dimensions) {
271
+ const lower = d.toLowerCase();
272
+ if (ZONE_1.has(lower))
273
+ zones.add(1);
274
+ if (ZONE_2.has(lower))
275
+ zones.add(2);
276
+ if (ZONE_3.has(lower))
277
+ zones.add(3);
278
+ }
279
+ return zones;
280
+ }
281
+ function classifyConflictTier(taskA, taskB) {
282
+ // Check entity overlap
283
+ const sharedEntities = [];
284
+ for (const id of taskA.entityIds) {
285
+ if (taskB.entityIds.has(id)) {
286
+ sharedEntities.push(id);
287
+ }
288
+ }
289
+ if (sharedEntities.length > 0) {
290
+ // Both touch same entity. Check dimensions to see if we can downgrade from Tier 4 (Sequential) to Tier 3 (Parallelizable Orthogonal)
291
+ if (taskA.dimensions.size > 0 && taskB.dimensions.size > 0) {
292
+ const zonesA = getZones(taskA.dimensions);
293
+ const zonesB = getZones(taskB.dimensions);
294
+ let spatialCollision = false;
295
+ for (const z of zonesA) {
296
+ if (zonesB.has(z))
297
+ spatialCollision = true;
298
+ }
299
+ if (!spatialCollision) {
300
+ return {
301
+ tier: 3,
302
+ reason: `${sharedEntities.length} shared entities, but orthogonal Spatial Zones (Tier 3) — safe to parallelize`,
303
+ sharedFiles: [],
304
+ sharedEntities,
305
+ };
306
+ }
307
+ }
308
+ return {
309
+ tier: 4,
310
+ reason: `${sharedEntities.length} shared entities in same/unknown Spatial Zone (Tier 4) — MUST sequence to prevent git conflict`,
311
+ sharedFiles: [],
312
+ sharedEntities,
313
+ };
314
+ }
315
+ // Check file overlap → Tier 1 or 2
316
+ const sharedFiles = [];
317
+ for (const file of taskA.files) {
318
+ if (taskB.files.has(file)) {
319
+ sharedFiles.push(file);
320
+ }
321
+ }
322
+ if (sharedFiles.length === 0) {
323
+ return {
324
+ tier: 1,
325
+ reason: 'No shared files — safe to parallelize',
326
+ sharedFiles: [],
327
+ sharedEntities: [],
328
+ };
329
+ }
330
+ return {
331
+ tier: 2,
332
+ reason: `${sharedFiles.length} shared files but different entities — git auto-merge handles it`,
333
+ sharedFiles,
334
+ sharedEntities: [],
335
+ };
336
+ }
337
+ // ─── Execution Plan Builder ───────────────────────────────────────
338
+ /**
339
+ * Build execution plan from tier-4 conflict graph using connected components.
340
+ * Tasks with no tier-4 edges to each other run in parallel.
341
+ * Tasks in the same tier-4 component run sequentially.
342
+ */
343
+ function buildExecutionPlan(taskIds, tier4Edges) {
344
+ // Build adjacency list for tier-4 conflicts
345
+ const adj = new Map();
346
+ for (const id of taskIds) {
347
+ adj.set(id, new Set());
348
+ }
349
+ for (const [a, b] of tier4Edges) {
350
+ adj.get(a)?.add(b);
351
+ adj.get(b)?.add(a);
352
+ }
353
+ // Find connected components via BFS
354
+ const visited = new Set();
355
+ const components = [];
356
+ for (const id of taskIds) {
357
+ if (visited.has(id))
358
+ continue;
359
+ const component = [];
360
+ const queue = [id];
361
+ visited.add(id);
362
+ while (queue.length > 0) {
363
+ const current = queue.shift();
364
+ component.push(current);
365
+ const neighbors = adj.get(current);
366
+ if (neighbors) {
367
+ for (const neighbor of neighbors) {
368
+ if (!visited.has(neighbor)) {
369
+ visited.add(neighbor);
370
+ queue.push(neighbor);
371
+ }
372
+ }
373
+ }
374
+ }
375
+ components.push(component);
376
+ }
377
+ // Build groups
378
+ const groups = components.map(component => {
379
+ if (component.length === 1) {
380
+ return { tasks: component, sequential: false };
381
+ }
382
+ return { tasks: component, sequential: true, order: component };
383
+ });
384
+ // Count parallelizable groups
385
+ const parallelGroups = groups.length;
386
+ const sequentialCount = groups.filter(g => g.sequential).length;
387
+ const parallelCount = groups.filter(g => !g.sequential).length;
388
+ let summaryParts = [`${taskIds.length} tasks`];
389
+ if (parallelCount > 0) {
390
+ summaryParts.push(`${parallelCount === taskIds.length ? 'all' : parallelCount} can run in parallel`);
391
+ }
392
+ if (sequentialCount > 0) {
393
+ const seqTasks = groups.filter(g => g.sequential).reduce((sum, g) => sum + g.tasks.length, 0);
394
+ summaryParts.push(`${seqTasks} must be sequenced (${sequentialCount} sequential group${sequentialCount > 1 ? 's' : ''})`);
395
+ }
396
+ return {
397
+ parallelGroups,
398
+ groups,
399
+ _summary: summaryParts.join(': '),
400
+ };
401
+ }
402
+ // ─── Tool: conflict_matrix ────────────────────────────────────────
403
+ export function conflictMatrix(args) {
404
+ const projErr = validateProject(args.project);
405
+ if (projErr)
406
+ return { error: projErr };
407
+ const loader = getLoader();
408
+ const { tasks } = args;
409
+ if (!tasks || tasks.length < 2) {
410
+ return { error: 'At least 2 tasks are required to compute a conflict matrix.' };
411
+ }
412
+ const warnings = [];
413
+ // Resolve entity sets for each task
414
+ const resolvedTasks = [];
415
+ for (const task of tasks) {
416
+ const entityIds = new Set();
417
+ const files = new Set();
418
+ const entities = [];
419
+ const notFound = [];
420
+ const dimensions = new Set(task.dimensions || []);
421
+ for (const nameOrId of task.entity_ids) {
422
+ const entity = loader.getEntityById(nameOrId, args.project)
423
+ || loader.getEntityByName(nameOrId, args.project);
424
+ if (entity) {
425
+ entityIds.add(entity.id);
426
+ entities.push(entity);
427
+ if (entity._sourceFile)
428
+ files.add(entity._sourceFile);
429
+ }
430
+ else {
431
+ notFound.push(nameOrId);
432
+ }
433
+ }
434
+ if (notFound.length > 0) {
435
+ warnings.push(`Task "${task.id}": entities not found: ${notFound.join(', ')}`);
436
+ }
437
+ // Expand blast radius if requested
438
+ if (task.expand_blast_radius && entityIds.size > 0) {
439
+ const g = getGraph(args.project);
440
+ const blastResult = computeBlastRadius(g, entityIds);
441
+ for (const affectedId of blastResult.affected) {
442
+ if (!entityIds.has(affectedId)) {
443
+ entityIds.add(affectedId);
444
+ const affectedEntity = g.entityById.get(affectedId);
445
+ if (affectedEntity) {
446
+ entities.push(affectedEntity);
447
+ if (affectedEntity._sourceFile)
448
+ files.add(affectedEntity._sourceFile);
449
+ }
450
+ }
451
+ }
452
+ }
453
+ resolvedTasks.push({ id: task.id, entityIds, files, entities, dimensions });
454
+ }
455
+ // Pairwise comparison
456
+ const matrix = [];
457
+ const tier4Edges = [];
458
+ for (let i = 0; i < resolvedTasks.length; i++) {
459
+ for (let j = i + 1; j < resolvedTasks.length; j++) {
460
+ const taskA = resolvedTasks[i];
461
+ const taskB = resolvedTasks[j];
462
+ const result = classifyConflictTier(taskA, taskB);
463
+ const entry = {
464
+ taskA: taskA.id,
465
+ taskB: taskB.id,
466
+ tier: result.tier,
467
+ reason: result.reason,
468
+ };
469
+ if (result.sharedFiles.length > 0)
470
+ entry.sharedFiles = result.sharedFiles;
471
+ if (result.sharedEntities.length > 0)
472
+ entry.sharedEntities = result.sharedEntities;
473
+ matrix.push(entry);
474
+ if (result.tier === 4) {
475
+ tier4Edges.push([taskA.id, taskB.id]);
476
+ }
477
+ }
478
+ }
479
+ // Build execution plan
480
+ const taskIds = resolvedTasks.map(t => t.id);
481
+ const executionPlan = buildExecutionPlan(taskIds, tier4Edges);
482
+ const result = {
483
+ taskCount: tasks.length,
484
+ matrix,
485
+ executionPlan,
486
+ };
487
+ if (warnings.length > 0) {
488
+ result.warnings = warnings;
489
+ }
490
+ return result;
491
+ }
@@ -66,3 +66,13 @@ export declare function reportActualBurn(args: {
66
66
  project?: string;
67
67
  notes?: string;
68
68
  }): Promise<unknown>;
69
+ export declare function find_runtime_violations(args?: {
70
+ project?: string;
71
+ }): unknown;
72
+ export declare function find_ownership_violations(args?: {
73
+ project?: string;
74
+ }): unknown;
75
+ export declare function query_traits(args: {
76
+ trait: string;
77
+ project?: string;
78
+ }): unknown;
@@ -866,3 +866,129 @@ export async function reportActualBurn(args) {
866
866
  _summary: `Prediction ${updated.id.slice(0, 8)}… closed. Predicted ${updated.predicted_total} tokens, actual ${updated.actual_total_tokens} tokens. Drift: ${updated.drift_ratio != null ? `${(updated.drift_ratio * 100).toFixed(1)}%` : 'N/A'}.`,
867
867
  };
868
868
  }
869
+ // ─── Tool: find_runtime_violations (ρ Dimension) ─────────────────
870
+ export function find_runtime_violations(args) {
871
+ const projErr = validateProject(args?.project);
872
+ if (projErr)
873
+ return { error: projErr };
874
+ const g = getGraph(args?.project);
875
+ const violations = [];
876
+ for (const [callerId, calleeIds] of g.callees) {
877
+ const callerEntity = g.entityById.get(callerId);
878
+ if (!callerEntity)
879
+ continue;
880
+ const callerFramework = callerEntity.runtime?.framework || 'agnostic';
881
+ for (const calleeId of calleeIds) {
882
+ const calleeEntity = g.entityById.get(calleeId);
883
+ if (!calleeEntity)
884
+ continue;
885
+ const calleeFramework = calleeEntity.runtime?.framework;
886
+ // If caller is framework-agnostic but calls framework-specific code (e.g., pure logic calling React)
887
+ if (callerFramework === 'agnostic' && calleeFramework && calleeFramework !== 'agnostic') {
888
+ violations.push({
889
+ caller: { ...entitySummary(callerEntity), framework: callerFramework },
890
+ callee: { ...entitySummary(calleeEntity), framework: calleeFramework },
891
+ issue: `Framework leak: Agnostic caller depends on ${calleeFramework}-specific callee`,
892
+ });
893
+ }
894
+ // If mixing frameworks directly (e.g., Vue calling React)
895
+ if (callerFramework !== 'agnostic' && calleeFramework && calleeFramework !== 'agnostic' && callerFramework !== calleeFramework) {
896
+ violations.push({
897
+ caller: { ...entitySummary(callerEntity), framework: callerFramework },
898
+ callee: { ...entitySummary(calleeEntity), framework: calleeFramework },
899
+ issue: `Cross-framework boundary: ${callerFramework} calling ${calleeFramework}`,
900
+ });
901
+ }
902
+ }
903
+ }
904
+ return {
905
+ totalViolations: violations.length,
906
+ _summary: `Found ${violations.length} runtime/framework boundary violations (ρ dimension)`,
907
+ violations: violations.slice(0, 100),
908
+ };
909
+ }
910
+ // ─── Tool: find_ownership_violations (λ Dimension) ───────────────
911
+ export function find_ownership_violations(args) {
912
+ const projErr = validateProject(args?.project);
913
+ if (projErr)
914
+ return { error: projErr };
915
+ const loader = getLoader();
916
+ const entities = loader.getEntities(args?.project);
917
+ const violations = [];
918
+ for (const e of entities) {
919
+ if (!e.ownership || Object.keys(e.ownership).length === 0)
920
+ continue;
921
+ // We look for 'unsafe', 'escapes', 'mutates_borrowed', or complex lifetime constraints
922
+ const own = e.ownership;
923
+ if (own.unsafe) {
924
+ violations.push({
925
+ entity: entitySummary(e),
926
+ ownership_details: own,
927
+ issue: 'Unsafe memory access or pointer manipulation flagged',
928
+ });
929
+ }
930
+ else if (own.escapes) {
931
+ violations.push({
932
+ entity: entitySummary(e),
933
+ ownership_details: own,
934
+ issue: 'Value potentially escapes its lifetime boundary',
935
+ });
936
+ }
937
+ else if (own.mutates_borrowed) {
938
+ violations.push({
939
+ entity: entitySummary(e),
940
+ ownership_details: own,
941
+ issue: 'Attempts to mutate an immutably borrowed reference',
942
+ });
943
+ }
944
+ }
945
+ return {
946
+ totalViolations: violations.length,
947
+ _summary: `Found ${violations.length} entities with strict/violating ownership constraints (λ dimension)`,
948
+ violations: violations.slice(0, 100),
949
+ };
950
+ }
951
+ // ─── Tool: query_traits (τ Dimension) ────────────────────────────
952
+ export function query_traits(args) {
953
+ const projErr = validateProject(args.project);
954
+ if (projErr)
955
+ return { error: projErr };
956
+ const target = args.trait.toLowerCase();
957
+ const loader = getLoader();
958
+ const entities = loader.getEntities(args.project);
959
+ const results = entities.filter(e => {
960
+ if (!e.traits)
961
+ return false;
962
+ // Handle array of strings [ 'asyncContext', 'generator' ]
963
+ if (Array.isArray(e.traits)) {
964
+ return e.traits.some(t => t.toLowerCase().includes(target));
965
+ }
966
+ // Handle structured traits { self: { fallible: true } }
967
+ if (typeof e.traits === 'object') {
968
+ const tr = e.traits;
969
+ if (tr.self && typeof tr.self === 'object') {
970
+ const selfTraits = tr.self;
971
+ for (const [key, val] of Object.entries(selfTraits)) {
972
+ if (key.toLowerCase().includes(target) && val === true)
973
+ return true;
974
+ }
975
+ }
976
+ if (tr.params && typeof tr.params === 'object') {
977
+ const paramTraits = tr.params;
978
+ for (const p of Object.values(paramTraits)) {
979
+ if (p.bounds && Array.isArray(p.bounds) && p.bounds.some((b) => b.toLowerCase().includes(target)))
980
+ return true;
981
+ if (p.markers && Array.isArray(p.markers) && p.markers.some((m) => m.toLowerCase().includes(target)))
982
+ return true;
983
+ }
984
+ }
985
+ }
986
+ return false;
987
+ });
988
+ return {
989
+ trait: args.trait,
990
+ total: results.length,
991
+ _summary: `Found ${results.length} entities implementing the '${args.trait}' trait (τ dimension)`,
992
+ entities: results.slice(0, 100).map(entitySummary),
993
+ };
994
+ }
package/dist/types.d.ts CHANGED
@@ -11,6 +11,10 @@ export interface JstfEntity {
11
11
  _sourceFile?: string | null;
12
12
  _sourceLanguage?: string;
13
13
  _project?: string;
14
+ _sourceLocation?: {
15
+ startLine: number;
16
+ endLine: number;
17
+ };
14
18
  /** Structure: function shape, signature, modifiers */
15
19
  struct?: {
16
20
  name?: string;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@papyruslabsai/seshat-mcp",
3
- "version": "0.4.2",
3
+ "version": "0.6.0",
4
4
  "description": "Semantic MCP server — exposes a codebase's structure, dependencies, and constraints as queryable tools",
5
5
  "type": "module",
6
6
  "bin": {
@@ -30,6 +30,12 @@
30
30
  "url": "https://github.com/papyruslabs-ai/seshat.git",
31
31
  "directory": "packages/seshat-mcp"
32
32
  },
33
- "keywords": ["mcp", "semantic", "code-analysis", "seshat", "static-analysis"],
33
+ "keywords": [
34
+ "mcp",
35
+ "semantic",
36
+ "code-analysis",
37
+ "seshat",
38
+ "static-analysis"
39
+ ],
34
40
  "license": "MIT"
35
41
  }