code-graph-context 2.6.2 → 2.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -345,7 +345,6 @@ Explicit task management with dependencies:
345
345
 
346
346
  | Tool | Purpose |
347
347
  |------|---------|
348
- | `swarm_orchestrate` | Decompose a task and spawn worker agents |
349
348
  | `swarm_post_task` | Add a task to the queue |
350
349
  | `swarm_get_tasks` | Query tasks with filters |
351
350
  | `swarm_claim_task` | Claim/start/release a task |
@@ -357,28 +356,20 @@ Explicit task management with dependencies:
357
356
  ### Example: Parallel Refactoring
358
357
 
359
358
  ```typescript
360
- // Orchestrator decomposes and creates tasks
361
- swarm_orchestrate({
359
+ // Orchestrator decomposes the task and creates individual work items
360
+ swarm_post_task({
362
361
  projectId: "backend",
363
- task: "Rename getUserById to findUserById across the codebase",
364
- maxAgents: 3
362
+ swarmId: "swarm_rename_user",
363
+ title: "Update UserService.findUserById",
364
+ description: "Rename getUserById to findUserById in UserService",
365
+ type: "refactor",
366
+ createdBy: "orchestrator"
365
367
  })
366
368
 
367
- // Returns a plan:
368
- {
369
- swarmId: "swarm_abc123",
370
- plan: {
371
- totalTasks: 12,
372
- parallelizable: 8,
373
- sequential: 4, // These have dependencies
374
- tasks: [
375
- { id: "task_1", title: "Update UserService.findUserById", status: "available" },
376
- { id: "task_2", title: "Update UserController references", status: "blocked", depends: ["task_1"] },
377
- ...
378
- ]
379
- },
380
- workerInstructions: "..." // Copy-paste to spawn workers
381
- }
369
+ // Workers claim and execute tasks
370
+ swarm_claim_task({ projectId: "backend", swarmId: "swarm_rename_user", agentId: "worker_1" })
371
+ // ... do work ...
372
+ swarm_complete_task({ taskId: "task_1", agentId: "worker_1", action: "complete", summary: "Renamed method" })
382
373
  ```
383
374
 
384
375
  ### Install the Swarm Skill
@@ -428,7 +419,6 @@ See [`skills/swarm/SKILL.md`](skills/swarm/SKILL.md) for the full documentation.
428
419
  | `stop_watch_project` | Stop file watching |
429
420
  | `list_watchers` | List active file watchers |
430
421
  | **Swarm** | |
431
- | `swarm_orchestrate` | Plan and spawn parallel agents |
432
422
  | `swarm_post_task` | Add task to the queue |
433
423
  | `swarm_get_tasks` | Query tasks |
434
424
  | `swarm_claim_task` | Claim/start/release tasks |
@@ -458,7 +448,7 @@ detect_dead_code → detect_duplicate_code → prioritize cleanup
458
448
 
459
449
  **Pattern 4: Multi-Agent Work**
460
450
  ```
461
- swarm_orchestratespawn workers → swarm_get_tasks(includeStats) → swarm_cleanup
451
+ swarm_post_taskswarm_claim_task swarm_complete_task → swarm_get_tasks(includeStats) → swarm_cleanup
462
452
  ```
463
453
 
464
454
  ### Multi-Project Support
@@ -3,7 +3,7 @@
3
3
  * Shared utilities for creating/converting graph nodes and edges
4
4
  */
5
5
  import crypto from 'crypto';
6
- import { CoreEdgeType, CORE_TYPESCRIPT_SCHEMA } from '../config/schema.js';
6
+ import { CoreEdgeType, CORE_TYPESCRIPT_SCHEMA, } from '../config/schema.js';
7
7
  // ============================================
8
8
  // Node ID Generation
9
9
  // ============================================
@@ -38,7 +38,11 @@ export const TOOL_NAMES = {
38
38
  swarmClaimTask: 'swarm_claim_task',
39
39
  swarmCompleteTask: 'swarm_complete_task',
40
40
  swarmGetTasks: 'swarm_get_tasks',
41
- swarmOrchestrate: 'swarm_orchestrate',
41
+ saveSessionBookmark: 'save_session_bookmark',
42
+ restoreSessionBookmark: 'restore_session_bookmark',
43
+ saveSessionNote: 'save_session_note',
44
+ recallSessionNotes: 'recall_session_notes',
45
+ cleanupSession: 'cleanup_session',
42
46
  };
43
47
  // Tool Metadata
44
48
  export const TOOL_METADATA = {
@@ -205,9 +209,9 @@ Parameters:
205
209
  },
206
210
  [TOOL_NAMES.swarmPheromone]: {
207
211
  title: 'Swarm Pheromone',
208
- description: `Mark a code node with a pheromone for coordination. Types: exploring (2min), modifying (10min), claiming (1hr), completed (24hr), warning (permanent), blocked (5min), proposal (1hr), needs_review (30min).
212
+ description: `Mark a code node with a pheromone for coordination. Types: exploring (2min), modifying (10min), claiming (1hr), completed (24hr), warning (permanent), blocked (5min), proposal (1hr), needs_review (30min), session_context (8hr).
209
213
 
210
- Workflow states (exploring/claiming/modifying/completed/blocked) are mutually exclusive per agent+node. Use remove:true to delete. Pheromones decay automatically.`,
214
+ Workflow states (exploring/claiming/modifying/completed/blocked) are mutually exclusive per agent+node. Flag types (warning/proposal/needs_review/session_context) can coexist. Use remove:true to delete. Pheromones decay automatically.`,
211
215
  },
212
216
  [TOOL_NAMES.swarmSense]: {
213
217
  title: 'Swarm Sense',
@@ -255,11 +259,89 @@ Complete unblocks dependent tasks. Failed tasks can be retried if retryable=true
255
259
 
256
260
  Sort by: priority (default), created, updated. Add includeStats:true for aggregate counts.`,
257
261
  },
258
- [TOOL_NAMES.swarmOrchestrate]: {
259
- title: 'Swarm Orchestrate',
260
- description: `Coordinate multiple agents for complex multi-file tasks. Analyzes codebase, decomposes into atomic tasks, spawns workers, monitors progress.
262
+ [TOOL_NAMES.saveSessionBookmark]: {
263
+ title: 'Save Session Bookmark',
264
+ description: `Save current session context as a bookmark for cross-session continuity.
261
265
 
262
- Use dryRun:true to preview plan. maxAgents controls parallelism (default: 3). Failed tasks auto-retry via pheromone decay.`,
266
+ Records your working set (code node IDs), task context, findings, and next steps so a future session can resume exactly where you left off.
267
+
268
+ Parameters:
269
+ - projectId (required): Project ID, name, or path
270
+ - sessionId (required): Unique session/conversation ID for recovery
271
+ - agentId (required): Your agent identifier
272
+ - summary (required, min 10 chars): Brief description of current work state
273
+ - workingSetNodeIds (required): Code node IDs you are focused on
274
+ - taskContext (required): High-level task being worked on
275
+ - findings: Key discoveries or decisions made so far
276
+ - nextSteps: What to do next when resuming
277
+ - metadata: Additional structured data
278
+
279
+ Returns bookmarkId for use with restore_session_bookmark.`,
280
+ },
281
+ [TOOL_NAMES.restoreSessionBookmark]: {
282
+ title: 'Restore Session Bookmark',
283
+ description: `Restore a previously saved session bookmark to resume work.
284
+
285
+ Retrieves the bookmark, fetches working set code nodes (with source), and returns any session notes. Use after conversation compaction or when resuming a task in a new session.
286
+
287
+ Parameters:
288
+ - projectId (required): Project ID, name, or path
289
+ - sessionId: Specific session to restore (latest bookmark if omitted)
290
+ - agentId: Filter by agent ID (any agent if omitted)
291
+ - includeCode (default: true): Include source code for working set nodes
292
+ - snippetLength (default: 500): Max characters per code snippet
293
+
294
+ Returns: bookmark data, working set nodes, session notes, and staleNodeIds (nodes no longer in graph after re-parse).`,
295
+ },
296
+ [TOOL_NAMES.saveSessionNote]: {
297
+ title: 'Save Session Note',
298
+ description: `Save an observation, decision, insight, or risk as a durable session note linked to code nodes.
299
+
300
+ Notes survive session compaction and are recalled by restore_session_bookmark or recall_session_notes.
301
+
302
+ Parameters:
303
+ - projectId (required): Project ID, name, or path
304
+ - sessionId (required): Session/conversation identifier
305
+ - agentId (required): Your agent identifier
306
+ - topic (required, 3-100 chars): Short topic label
307
+ - content (required, min 10 chars): Full observation text
308
+ - category (required): architectural, bug, insight, decision, risk, or todo
309
+ - severity (default: info): info, warning, or critical
310
+ - aboutNodeIds: Code node IDs this note is about (creates [:ABOUT] links)
311
+ - expiresInHours: Auto-expire after N hours (omit for permanent)
312
+
313
+ Returns noteId, hasEmbedding (enables semantic recall), and expiresAt.`,
314
+ },
315
+ [TOOL_NAMES.recallSessionNotes]: {
316
+ title: 'Recall Session Notes',
317
+ description: `Search and retrieve saved session notes. Supports semantic vector search (when query provided) or filter-based search.
318
+
319
+ Parameters:
320
+ - projectId (required): Project ID, name, or path
321
+ - query: Natural language search — triggers semantic vector search when provided
322
+ - category: Filter by architectural, bug, insight, decision, risk, todo
323
+ - severity: Filter by info, warning, or critical
324
+ - sessionId: Filter by session ID
325
+ - agentId: Filter by agent ID
326
+ - limit (default: 10, max: 50): Maximum notes to return
327
+ - minSimilarity (default: 0.3): Minimum similarity for vector search
328
+
329
+ Returns notes with topic, content, category, severity, relevance score (vector mode), and linked aboutNodes.`,
330
+ },
331
+ [TOOL_NAMES.cleanupSession]: {
332
+ title: 'Cleanup Session',
333
+ description: `Clean up expired session notes and old session bookmarks.
334
+
335
+ Removes:
336
+ - Expired SessionNote nodes (past expiresAt) and their edges
337
+ - Old SessionBookmark nodes, keeping only the most recent N per session (default: 3)
338
+
339
+ Parameters:
340
+ - projectId (required): Project ID, name, or path
341
+ - keepBookmarks (default: 3): Number of most recent bookmarks to keep per session
342
+ - dryRun (default: false): Preview what would be deleted without deleting
343
+
344
+ Returns counts of deleted notes, bookmarks, and edges.`,
263
345
  },
264
346
  };
265
347
  // Default Values
@@ -74,6 +74,9 @@ export class GraphGeneratorHandler {
74
74
  await this.neo4jService.run(QUERIES.CREATE_PROJECT_ID_INDEX_EMBEDDED);
75
75
  await this.neo4jService.run(QUERIES.CREATE_PROJECT_ID_INDEX_SOURCEFILE);
76
76
  await this.neo4jService.run(QUERIES.CREATE_NORMALIZED_HASH_INDEX);
77
+ await this.neo4jService.run(QUERIES.CREATE_SESSION_BOOKMARK_INDEX);
78
+ await this.neo4jService.run(QUERIES.CREATE_SESSION_NOTE_INDEX);
79
+ await this.neo4jService.run(QUERIES.CREATE_SESSION_NOTE_CATEGORY_INDEX);
77
80
  await debugLog('Project indexes created');
78
81
  }
79
82
  async importNodes(nodes, batchSize) {
@@ -180,6 +183,7 @@ export class GraphGeneratorHandler {
180
183
  async createVectorIndexes() {
181
184
  console.error('Creating vector indexes...');
182
185
  await this.neo4jService.run(QUERIES.CREATE_EMBEDDED_VECTOR_INDEX);
186
+ await this.neo4jService.run(QUERIES.CREATE_SESSION_NOTES_VECTOR_INDEX);
183
187
  await debugLog('Vector indexes created');
184
188
  }
185
189
  flattenProperties(properties) {
@@ -24,8 +24,8 @@ import { registerAllTools } from './tools/index.js';
24
24
  import { debugLog } from './utils.js';
25
25
  // Track server state for debugging
26
26
  let serverStartTime;
27
- let toolCallCount = 0;
28
- let lastToolCall = null;
27
+ const toolCallCount = 0;
28
+ const lastToolCall = null;
29
29
  /**
30
30
  * Log memory usage and server stats
31
31
  */
@@ -4,7 +4,7 @@
4
4
  */
5
5
  import fs from 'fs/promises';
6
6
  import { join } from 'path';
7
- import { ensureNeo4jRunning, isDockerInstalled, isDockerRunning, } from '../cli/neo4j-docker.js';
7
+ import { ensureNeo4jRunning, isDockerInstalled, isDockerRunning } from '../cli/neo4j-docker.js';
8
8
  import { Neo4jService, QUERIES } from '../storage/neo4j/neo4j.service.js';
9
9
  import { FILE_PATHS, LOG_CONFIG } from './constants.js';
10
10
  import { initializeNaturalLanguageService } from './tools/natural-language-to-cypher.tool.js';
@@ -47,7 +47,7 @@ class WatchManager {
47
47
  // This is expected if the client doesn't support logging capability
48
48
  debugLog('sendNotification: MCP message failed (expected if client lacks logging)', {
49
49
  type: notification.type,
50
- error: String(error)
50
+ error: String(error),
51
51
  });
52
52
  });
53
53
  }
@@ -62,7 +62,8 @@ class WatchManager {
62
62
  }
63
63
  // Enforce maximum watcher limit
64
64
  if (this.watchers.size >= WATCH.maxWatchers) {
65
- throw new Error(`Maximum watcher limit (${WATCH.maxWatchers}) reached. ` + `Stop an existing watcher before starting a new one.`);
65
+ throw new Error(`Maximum watcher limit (${WATCH.maxWatchers}) reached. ` +
66
+ `Stop an existing watcher before starting a new one.`);
66
67
  }
67
68
  const fullConfig = {
68
69
  projectPath: config.projectPath,
@@ -138,7 +139,13 @@ class WatchManager {
138
139
  * Handle a file system event
139
140
  */
140
141
  handleFileEvent(state, type, filePath) {
141
- debugLog('handleFileEvent START', { type, filePath, projectId: state.projectId, status: state.status, isStopping: state.isStopping });
142
+ debugLog('handleFileEvent START', {
143
+ type,
144
+ filePath,
145
+ projectId: state.projectId,
146
+ status: state.status,
147
+ isStopping: state.isStopping,
148
+ });
142
149
  // Ignore events if watcher is stopping or not active
143
150
  if (state.isStopping || state.status !== 'active') {
144
151
  debugLog('Ignoring event - watcher not active or stopping', {
@@ -194,12 +201,12 @@ class WatchManager {
194
201
  projectId: state.projectId,
195
202
  isProcessing: state.isProcessing,
196
203
  pendingCount: state.pendingEvents.length,
197
- isStopping: state.isStopping
204
+ isStopping: state.isStopping,
198
205
  });
199
206
  // Don't process if already processing, no events, or watcher is stopping
200
207
  if (state.isProcessing || state.pendingEvents.length === 0 || state.isStopping) {
201
208
  await debugLog('processEvents: early return', {
202
- reason: state.isProcessing ? 'already processing' : state.pendingEvents.length === 0 ? 'no events' : 'stopping'
209
+ reason: state.isProcessing ? 'already processing' : state.pendingEvents.length === 0 ? 'no events' : 'stopping',
203
210
  });
204
211
  return;
205
212
  }
@@ -226,12 +233,12 @@ class WatchManager {
226
233
  }
227
234
  await debugLog('processEvents: calling incrementalParseHandler', {
228
235
  projectPath: state.projectPath,
229
- projectId: state.projectId
236
+ projectId: state.projectId,
230
237
  });
231
238
  const result = await this.incrementalParseHandler(state.projectPath, state.projectId, state.tsconfigPath);
232
239
  await debugLog('processEvents: incrementalParseHandler returned', {
233
240
  nodesUpdated: result.nodesUpdated,
234
- edgesUpdated: result.edgesUpdated
241
+ edgesUpdated: result.edgesUpdated,
235
242
  });
236
243
  state.lastUpdateTime = new Date();
237
244
  const elapsedMs = Date.now() - startTime;
@@ -285,7 +292,10 @@ class WatchManager {
285
292
  debugLog('handleWatcherError: cleanup succeeded', { projectId: state.projectId });
286
293
  })
287
294
  .catch((cleanupError) => {
288
- debugLog('handleWatcherError: cleanup failed', { projectId: state.projectId, cleanupError: String(cleanupError) });
295
+ debugLog('handleWatcherError: cleanup failed', {
296
+ projectId: state.projectId,
297
+ cleanupError: String(cleanupError),
298
+ });
289
299
  console.error(`[WatchManager] Failed to cleanup errored watcher ${state.projectId}:`, cleanupError);
290
300
  });
291
301
  }
@@ -306,7 +316,7 @@ class WatchManager {
306
316
  debugLog('syncMissedChanges: completed', {
307
317
  projectId: state.projectId,
308
318
  nodesUpdated: result.nodesUpdated,
309
- edgesUpdated: result.edgesUpdated
319
+ edgesUpdated: result.edgesUpdated,
310
320
  });
311
321
  if (result.nodesUpdated > 0 || result.edgesUpdated > 0) {
312
322
  console.error(`[WatchManager] Synced missed changes for ${state.projectId}: ` +
@@ -314,7 +324,11 @@ class WatchManager {
314
324
  }
315
325
  })
316
326
  .catch((error) => {
317
- debugLog('syncMissedChanges: error', { projectId: state.projectId, error: String(error), isStopping: state.isStopping });
327
+ debugLog('syncMissedChanges: error', {
328
+ projectId: state.projectId,
329
+ error: String(error),
330
+ isStopping: state.isStopping,
331
+ });
318
332
  // Only log if watcher hasn't been stopped
319
333
  if (!state.isStopping) {
320
334
  console.error(`[WatchManager] Failed to sync missed changes for ${state.projectId}:`, error);
@@ -12,13 +12,15 @@ import { createListWatchersTool } from './list-watchers.tool.js';
12
12
  import { createNaturalLanguageToCypherTool } from './natural-language-to-cypher.tool.js';
13
13
  import { createParseTypescriptProjectTool } from './parse-typescript-project.tool.js';
14
14
  import { createSearchCodebaseTool } from './search-codebase.tool.js';
15
+ import { createRestoreSessionBookmarkTool, createSaveSessionBookmarkTool } from './session-bookmark.tool.js';
16
+ import { createCleanupSessionTool } from './session-cleanup.tool.js';
17
+ import { createRecallSessionNotesTool, createSaveSessionNoteTool } from './session-note.tool.js';
15
18
  import { createStartWatchProjectTool } from './start-watch-project.tool.js';
16
19
  import { createStopWatchProjectTool } from './stop-watch-project.tool.js';
17
20
  import { createSwarmClaimTaskTool } from './swarm-claim-task.tool.js';
18
21
  import { createSwarmCleanupTool } from './swarm-cleanup.tool.js';
19
22
  import { createSwarmCompleteTaskTool } from './swarm-complete-task.tool.js';
20
23
  import { createSwarmGetTasksTool } from './swarm-get-tasks.tool.js';
21
- import { createSwarmOrchestrateTool } from './swarm-orchestrate.tool.js';
22
24
  import { createSwarmPheromoneTool } from './swarm-pheromone.tool.js';
23
25
  import { createSwarmPostTaskTool } from './swarm-post-task.tool.js';
24
26
  import { createSwarmSenseTool } from './swarm-sense.tool.js';
@@ -71,6 +73,12 @@ export const registerAllTools = (server) => {
71
73
  createSwarmClaimTaskTool(server);
72
74
  createSwarmCompleteTaskTool(server);
73
75
  createSwarmGetTasksTool(server);
74
- // Register swarm orchestration tool (meta-tool for coordinating multi-agent work)
75
- createSwarmOrchestrateTool(server);
76
+ // Register session bookmark tools (cross-session context continuity)
77
+ createSaveSessionBookmarkTool(server);
78
+ createRestoreSessionBookmarkTool(server);
79
+ // Register session note tools (durable observations and decisions)
80
+ createSaveSessionNoteTool(server);
81
+ createRecallSessionNotesTool(server);
82
+ // Register session cleanup tool
83
+ createCleanupSessionTool(server);
76
84
  };
@@ -90,8 +90,8 @@ export const createParseTypescriptProjectTool = (server) => {
90
90
  .optional()
91
91
  .default('auto')
92
92
  .describe('When to use streaming import: auto (>100 files), always, or never'),
93
- async: z
94
- .coerce.boolean()
93
+ async: z.coerce
94
+ .boolean()
95
95
  .optional()
96
96
  .default(true)
97
97
  .describe('Run parsing in background and return job ID immediately. Use check_parse_status to monitor.'),
@@ -224,7 +224,8 @@ export const createParseTypescriptProjectTool = (server) => {
224
224
  });
225
225
  const discoveredFiles = await parser.discoverSourceFiles();
226
226
  const totalFiles = discoveredFiles.length;
227
- const shouldUseStreaming = useStreaming === 'always' || (useStreaming === 'auto' && totalFiles > PARSING.streamingThreshold && chunkSize > 0);
227
+ const shouldUseStreaming = useStreaming === 'always' ||
228
+ (useStreaming === 'auto' && totalFiles > PARSING.streamingThreshold && chunkSize > 0);
228
229
  console.error(`📊 Project has ${totalFiles} files. Streaming: ${shouldUseStreaming ? 'enabled' : 'disabled'}`);
229
230
  if (shouldUseStreaming && clearExisting !== false) {
230
231
  // Use streaming import for large projects