memorygraphMCP 0.11.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. memorygraph/__init__.py +50 -0
  2. memorygraph/__main__.py +12 -0
  3. memorygraph/advanced_tools.py +509 -0
  4. memorygraph/analytics/__init__.py +46 -0
  5. memorygraph/analytics/advanced_queries.py +727 -0
  6. memorygraph/backends/__init__.py +21 -0
  7. memorygraph/backends/base.py +179 -0
  8. memorygraph/backends/cloud.py +75 -0
  9. memorygraph/backends/cloud_backend.py +858 -0
  10. memorygraph/backends/factory.py +577 -0
  11. memorygraph/backends/falkordb_backend.py +749 -0
  12. memorygraph/backends/falkordblite_backend.py +746 -0
  13. memorygraph/backends/ladybugdb_backend.py +242 -0
  14. memorygraph/backends/memgraph_backend.py +327 -0
  15. memorygraph/backends/neo4j_backend.py +298 -0
  16. memorygraph/backends/sqlite_fallback.py +463 -0
  17. memorygraph/backends/turso.py +448 -0
  18. memorygraph/cli.py +743 -0
  19. memorygraph/cloud_database.py +297 -0
  20. memorygraph/config.py +295 -0
  21. memorygraph/database.py +933 -0
  22. memorygraph/graph_analytics.py +631 -0
  23. memorygraph/integration/__init__.py +69 -0
  24. memorygraph/integration/context_capture.py +426 -0
  25. memorygraph/integration/project_analysis.py +583 -0
  26. memorygraph/integration/workflow_tracking.py +492 -0
  27. memorygraph/intelligence/__init__.py +59 -0
  28. memorygraph/intelligence/context_retrieval.py +447 -0
  29. memorygraph/intelligence/entity_extraction.py +386 -0
  30. memorygraph/intelligence/pattern_recognition.py +420 -0
  31. memorygraph/intelligence/temporal.py +374 -0
  32. memorygraph/migration/__init__.py +27 -0
  33. memorygraph/migration/manager.py +579 -0
  34. memorygraph/migration/models.py +142 -0
  35. memorygraph/migration/scripts/__init__.py +17 -0
  36. memorygraph/migration/scripts/bitemporal_migration.py +595 -0
  37. memorygraph/migration/scripts/multitenancy_migration.py +452 -0
  38. memorygraph/migration_tools_module.py +146 -0
  39. memorygraph/models.py +684 -0
  40. memorygraph/proactive/__init__.py +46 -0
  41. memorygraph/proactive/outcome_learning.py +444 -0
  42. memorygraph/proactive/predictive.py +410 -0
  43. memorygraph/proactive/session_briefing.py +399 -0
  44. memorygraph/relationships.py +668 -0
  45. memorygraph/server.py +883 -0
  46. memorygraph/sqlite_database.py +1876 -0
  47. memorygraph/tools/__init__.py +59 -0
  48. memorygraph/tools/activity_tools.py +262 -0
  49. memorygraph/tools/memory_tools.py +315 -0
  50. memorygraph/tools/migration_tools.py +181 -0
  51. memorygraph/tools/relationship_tools.py +147 -0
  52. memorygraph/tools/search_tools.py +406 -0
  53. memorygraph/tools/temporal_tools.py +339 -0
  54. memorygraph/utils/__init__.py +10 -0
  55. memorygraph/utils/context_extractor.py +429 -0
  56. memorygraph/utils/error_handling.py +151 -0
  57. memorygraph/utils/export_import.py +425 -0
  58. memorygraph/utils/graph_algorithms.py +200 -0
  59. memorygraph/utils/pagination.py +149 -0
  60. memorygraph/utils/project_detection.py +133 -0
  61. memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
  62. memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
  63. memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
  64. memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
  65. memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,631 @@
1
+ """
2
+ Graph analytics and traversal algorithms for Claude Code Memory Server.
3
+
4
+ This module provides advanced graph operations including path finding,
5
+ cluster detection, bridge identification, and graph metrics analysis.
6
+
7
+ Phase 4 Implementation - Advanced Relationship System
8
+ """
9
+
10
+ from typing import List, Dict, Set, Tuple, Optional, Any
11
+ from collections import defaultdict, deque
12
+ from dataclasses import dataclass
13
+ import logging
14
+
15
+ from .models import (
16
+ Memory,
17
+ Relationship,
18
+ RelationshipType,
19
+ MemoryGraph,
20
+ )
21
+ from .relationships import RelationshipCategory, relationship_manager
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+
26
+ @dataclass
27
+ class GraphPath:
28
+ """
29
+ Represents a path through the memory graph.
30
+
31
+ Attributes:
32
+ memories: List of memories in the path (in order)
33
+ relationships: List of relationships connecting the memories
34
+ total_strength: Cumulative strength of all relationships
35
+ length: Number of hops in the path
36
+ """
37
+
38
+ memories: List[Memory]
39
+ relationships: List[Relationship]
40
+ total_strength: float
41
+ length: int
42
+
43
+ @property
44
+ def average_strength(self) -> float:
45
+ """Calculate average relationship strength along the path."""
46
+ return self.total_strength / self.length if self.length > 0 else 0.0
47
+
48
+
49
+ @dataclass
50
+ class MemoryCluster:
51
+ """
52
+ Represents a cluster of related memories.
53
+
54
+ Attributes:
55
+ memories: List of memories in the cluster
56
+ internal_relationships: Relationships within the cluster
57
+ density: How densely connected the cluster is (0.0 to 1.0)
58
+ strength: Average relationship strength within cluster
59
+ categories: Relationship categories present in cluster
60
+ """
61
+
62
+ memories: List[Memory]
63
+ internal_relationships: List[Relationship]
64
+ density: float
65
+ strength: float
66
+ categories: Set[RelationshipCategory]
67
+
68
+
69
+ @dataclass
70
+ class BridgeNode:
71
+ """
72
+ Represents a memory that bridges different clusters.
73
+
74
+ Attributes:
75
+ memory: The bridge memory
76
+ connected_clusters: Cluster IDs this memory connects
77
+ bridge_strength: Importance of this bridge (0.0 to 1.0)
78
+ """
79
+
80
+ memory: Memory
81
+ connected_clusters: List[int]
82
+ bridge_strength: float
83
+
84
+
85
+ class GraphAnalyzer:
86
+ """
87
+ Provides graph analytics and traversal algorithms.
88
+
89
+ This class implements algorithms for analyzing the structure and
90
+ connectivity of the memory graph.
91
+ """
92
+
93
+ def __init__(self):
94
+ """Initialize the graph analyzer."""
95
+ self.rel_manager = relationship_manager
96
+
97
+ def build_adjacency_lists(
98
+ self,
99
+ memories: List[Memory],
100
+ relationships: List[Relationship]
101
+ ) -> Tuple[Dict[str, List[str]], Dict[Tuple[str, str], Relationship]]:
102
+ """
103
+ Build adjacency list representation of the graph.
104
+
105
+ Args:
106
+ memories: List of memory nodes
107
+ relationships: List of relationships (edges)
108
+
109
+ Returns:
110
+ Tuple of (adjacency_dict, relationship_map):
111
+ - adjacency_dict: Maps memory_id -> list of connected memory_ids
112
+ - relationship_map: Maps (from_id, to_id) -> Relationship
113
+ """
114
+ adjacency: Dict[str, List[str]] = defaultdict(list)
115
+ rel_map: Dict[Tuple[str, str], Relationship] = {}
116
+
117
+ # Build adjacency list
118
+ for rel in relationships:
119
+ # Add both directions for undirected traversal
120
+ adjacency[rel.from_memory_id].append(rel.to_memory_id)
121
+ adjacency[rel.to_memory_id].append(rel.from_memory_id)
122
+
123
+ # Store relationship in both directions
124
+ rel_map[(rel.from_memory_id, rel.to_memory_id)] = rel
125
+
126
+ # For bidirectional relationships, store reverse too
127
+ metadata = self.rel_manager.get_relationship_metadata(rel.type)
128
+ if metadata.bidirectional:
129
+ rel_map[(rel.to_memory_id, rel.from_memory_id)] = rel
130
+
131
+ return adjacency, rel_map
132
+
133
+ def find_shortest_path(
134
+ self,
135
+ from_memory_id: str,
136
+ to_memory_id: str,
137
+ memories: List[Memory],
138
+ relationships: List[Relationship],
139
+ max_depth: int = 5,
140
+ relationship_types: Optional[List[RelationshipType]] = None
141
+ ) -> Optional[GraphPath]:
142
+ """
143
+ Find the shortest path between two memories using BFS.
144
+
145
+ Args:
146
+ from_memory_id: Starting memory ID
147
+ to_memory_id: Target memory ID
148
+ memories: All memory nodes
149
+ relationships: All relationships
150
+ max_depth: Maximum path length to search
151
+ relationship_types: Optional filter for relationship types
152
+
153
+ Returns:
154
+ GraphPath if path found, None otherwise
155
+ """
156
+ # Build memory lookup
157
+ memory_map = {m.id: m for m in memories}
158
+
159
+ if from_memory_id not in memory_map or to_memory_id not in memory_map:
160
+ return None
161
+
162
+ # Build adjacency list
163
+ adjacency, rel_map = self.build_adjacency_lists(memories, relationships)
164
+
165
+ # Filter relationships if types specified
166
+ if relationship_types:
167
+ allowed_types = set(relationship_types)
168
+ rel_map = {
169
+ k: v for k, v in rel_map.items()
170
+ if v.type in allowed_types
171
+ }
172
+
173
+ # BFS to find shortest path
174
+ queue = deque([(from_memory_id, [from_memory_id], [])])
175
+ visited = {from_memory_id}
176
+
177
+ while queue:
178
+ current_id, path_ids, path_rels = queue.popleft()
179
+
180
+ # Check depth limit
181
+ if len(path_ids) > max_depth:
182
+ continue
183
+
184
+ # Found target
185
+ if current_id == to_memory_id:
186
+ path_memories = [memory_map[mid] for mid in path_ids]
187
+ total_strength = sum(r.properties.strength for r in path_rels)
188
+
189
+ return GraphPath(
190
+ memories=path_memories,
191
+ relationships=path_rels,
192
+ total_strength=total_strength,
193
+ length=len(path_rels)
194
+ )
195
+
196
+ # Explore neighbors
197
+ for neighbor_id in adjacency.get(current_id, []):
198
+ # Check if relationship exists (considering filters)
199
+ rel_key = (current_id, neighbor_id)
200
+ if rel_key not in rel_map:
201
+ continue
202
+
203
+ if neighbor_id not in visited:
204
+ visited.add(neighbor_id)
205
+ new_path = path_ids + [neighbor_id]
206
+ new_rels = path_rels + [rel_map[rel_key]]
207
+ queue.append((neighbor_id, new_path, new_rels))
208
+
209
+ return None
210
+
211
+ def find_all_paths(
212
+ self,
213
+ from_memory_id: str,
214
+ to_memory_id: str,
215
+ memories: List[Memory],
216
+ relationships: List[Relationship],
217
+ max_depth: int = 4,
218
+ max_paths: int = 10
219
+ ) -> List[GraphPath]:
220
+ """
221
+ Find multiple paths between two memories.
222
+
223
+ Args:
224
+ from_memory_id: Starting memory ID
225
+ to_memory_id: Target memory ID
226
+ memories: All memory nodes
227
+ relationships: All relationships
228
+ max_depth: Maximum path length
229
+ max_paths: Maximum number of paths to return
230
+
231
+ Returns:
232
+ List of GraphPath objects, sorted by strength
233
+ """
234
+ memory_map = {m.id: m for m in memories}
235
+
236
+ if from_memory_id not in memory_map or to_memory_id not in memory_map:
237
+ return []
238
+
239
+ adjacency, rel_map = self.build_adjacency_lists(memories, relationships)
240
+
241
+ paths_found: List[GraphPath] = []
242
+
243
+ def dfs(current_id: str, path_ids: List[str], path_rels: List[Relationship],
244
+ visited: Set[str]):
245
+ """DFS helper to find all paths."""
246
+ if len(paths_found) >= max_paths:
247
+ return
248
+
249
+ if len(path_ids) > max_depth:
250
+ return
251
+
252
+ if current_id == to_memory_id:
253
+ # Found a path
254
+ path_memories = [memory_map[mid] for mid in path_ids]
255
+ total_strength = sum(r.properties.strength for r in path_rels)
256
+
257
+ paths_found.append(GraphPath(
258
+ memories=path_memories,
259
+ relationships=path_rels,
260
+ total_strength=total_strength,
261
+ length=len(path_rels)
262
+ ))
263
+ return
264
+
265
+ # Explore neighbors
266
+ for neighbor_id in adjacency.get(current_id, []):
267
+ rel_key = (current_id, neighbor_id)
268
+ if rel_key not in rel_map:
269
+ continue
270
+
271
+ if neighbor_id not in visited:
272
+ visited.add(neighbor_id)
273
+ dfs(
274
+ neighbor_id,
275
+ path_ids + [neighbor_id],
276
+ path_rels + [rel_map[rel_key]],
277
+ visited
278
+ )
279
+ visited.remove(neighbor_id)
280
+
281
+ dfs(from_memory_id, [from_memory_id], [], {from_memory_id})
282
+
283
+ # Sort by total strength descending
284
+ paths_found.sort(key=lambda p: p.total_strength, reverse=True)
285
+
286
+ return paths_found
287
+
288
+ def get_neighbors(
289
+ self,
290
+ memory_id: str,
291
+ memories: List[Memory],
292
+ relationships: List[Relationship],
293
+ depth: int = 1,
294
+ min_strength: float = 0.0,
295
+ relationship_types: Optional[List[RelationshipType]] = None,
296
+ categories: Optional[List[RelationshipCategory]] = None
297
+ ) -> Dict[int, List[Tuple[Memory, Relationship]]]:
298
+ """
299
+ Get neighbors at each depth level.
300
+
301
+ Args:
302
+ memory_id: Starting memory ID
303
+ memories: All memory nodes
304
+ relationships: All relationships
305
+ depth: How many hops to traverse
306
+ min_strength: Minimum relationship strength filter
307
+ relationship_types: Optional relationship type filter
308
+ categories: Optional relationship category filter
309
+
310
+ Returns:
311
+ Dictionary mapping depth -> list of (memory, relationship) tuples
312
+ """
313
+ memory_map = {m.id: m for m in memories}
314
+
315
+ if memory_id not in memory_map:
316
+ return {}
317
+
318
+ # Build adjacency
319
+ adjacency, rel_map = self.build_adjacency_lists(memories, relationships)
320
+
321
+ # Apply filters
322
+ if relationship_types:
323
+ allowed_types = set(relationship_types)
324
+ rel_map = {
325
+ k: v for k, v in rel_map.items()
326
+ if v.type in allowed_types
327
+ }
328
+
329
+ if categories:
330
+ allowed_categories = set(categories)
331
+ rel_map = {
332
+ k: v for k, v in rel_map.items()
333
+ if self.rel_manager.get_relationship_category(v.type) in allowed_categories
334
+ }
335
+
336
+ if min_strength > 0:
337
+ rel_map = {
338
+ k: v for k, v in rel_map.items()
339
+ if v.properties.strength >= min_strength
340
+ }
341
+
342
+ # BFS to get neighbors at each depth
343
+ neighbors_by_depth: Dict[int, List[Tuple[Memory, Relationship]]] = defaultdict(list)
344
+ visited = {memory_id}
345
+ current_level = [(memory_id, 0, None)]
346
+
347
+ while current_level:
348
+ next_level = []
349
+
350
+ for current_id, current_depth, incoming_rel in current_level:
351
+ if current_depth >= depth:
352
+ continue
353
+
354
+ for neighbor_id in adjacency.get(current_id, []):
355
+ rel_key = (current_id, neighbor_id)
356
+ if rel_key not in rel_map:
357
+ continue
358
+
359
+ if neighbor_id not in visited:
360
+ visited.add(neighbor_id)
361
+ neighbor_mem = memory_map[neighbor_id]
362
+ neighbor_rel = rel_map[rel_key]
363
+
364
+ neighbors_by_depth[current_depth + 1].append(
365
+ (neighbor_mem, neighbor_rel)
366
+ )
367
+
368
+ next_level.append((neighbor_id, current_depth + 1, neighbor_rel))
369
+
370
+ current_level = next_level
371
+
372
+ return dict(neighbors_by_depth)
373
+
374
+ def detect_clusters(
375
+ self,
376
+ memories: List[Memory],
377
+ relationships: List[Relationship],
378
+ min_size: int = 3,
379
+ min_density: float = 0.3
380
+ ) -> List[MemoryCluster]:
381
+ """
382
+ Detect clusters of densely connected memories.
383
+
384
+ Uses a simple connected components + density filtering approach.
385
+
386
+ Args:
387
+ memories: All memory nodes
388
+ relationships: All relationships
389
+ min_size: Minimum cluster size
390
+ min_density: Minimum cluster density (0.0 to 1.0)
391
+
392
+ Returns:
393
+ List of MemoryCluster objects
394
+ """
395
+ memory_map = {m.id: m for m in memories}
396
+ adjacency, rel_map = self.build_adjacency_lists(memories, relationships)
397
+
398
+ # Find connected components
399
+ visited = set()
400
+ components: List[Set[str]] = []
401
+
402
+ def dfs_component(start_id: str) -> Set[str]:
403
+ """DFS to find connected component."""
404
+ component = set()
405
+ stack = [start_id]
406
+
407
+ while stack:
408
+ node_id = stack.pop()
409
+ if node_id in visited:
410
+ continue
411
+
412
+ visited.add(node_id)
413
+ component.add(node_id)
414
+
415
+ for neighbor_id in adjacency.get(node_id, []):
416
+ if neighbor_id not in visited:
417
+ stack.append(neighbor_id)
418
+
419
+ return component
420
+
421
+ # Find all components
422
+ for memory_id in memory_map:
423
+ if memory_id not in visited:
424
+ component = dfs_component(memory_id)
425
+ if len(component) >= min_size:
426
+ components.append(component)
427
+
428
+ # Calculate density and create clusters
429
+ clusters = []
430
+
431
+ for comp in components:
432
+ # Count internal relationships
433
+ internal_rels = []
434
+ for rel in relationships:
435
+ if rel.from_memory_id in comp and rel.to_memory_id in comp:
436
+ internal_rels.append(rel)
437
+
438
+ # Calculate density
439
+ n = len(comp)
440
+ max_edges = n * (n - 1) / 2 # For undirected graph
441
+ actual_edges = len(internal_rels)
442
+ density = actual_edges / max_edges if max_edges > 0 else 0.0
443
+
444
+ # Filter by density
445
+ if density >= min_density:
446
+ # Calculate average strength
447
+ avg_strength = (
448
+ sum(r.properties.strength for r in internal_rels) / len(internal_rels)
449
+ if internal_rels else 0.0
450
+ )
451
+
452
+ # Collect categories
453
+ categories = set()
454
+ for rel in internal_rels:
455
+ cat = self.rel_manager.get_relationship_category(rel.type)
456
+ categories.add(cat)
457
+
458
+ # Create cluster
459
+ cluster_memories = [memory_map[mid] for mid in comp]
460
+
461
+ clusters.append(MemoryCluster(
462
+ memories=cluster_memories,
463
+ internal_relationships=internal_rels,
464
+ density=density,
465
+ strength=avg_strength,
466
+ categories=categories
467
+ ))
468
+
469
+ # Sort by size and density
470
+ clusters.sort(key=lambda c: (len(c.memories), c.density), reverse=True)
471
+
472
+ return clusters
473
+
474
+ def find_bridge_nodes(
475
+ self,
476
+ memories: List[Memory],
477
+ relationships: List[Relationship],
478
+ clusters: Optional[List[MemoryCluster]] = None
479
+ ) -> List[BridgeNode]:
480
+ """
481
+ Identify memories that bridge different clusters.
482
+
483
+ Args:
484
+ memories: All memory nodes
485
+ relationships: All relationships
486
+ clusters: Pre-computed clusters (will detect if not provided)
487
+
488
+ Returns:
489
+ List of BridgeNode objects
490
+ """
491
+ # Detect clusters if not provided
492
+ if clusters is None:
493
+ clusters = self.detect_clusters(memories, relationships)
494
+
495
+ if len(clusters) < 2:
496
+ return [] # Need at least 2 clusters for bridges
497
+
498
+ # Build cluster membership map
499
+ memory_to_cluster: Dict[str, int] = {}
500
+ for i, cluster in enumerate(clusters):
501
+ for memory in cluster.memories:
502
+ memory_to_cluster[memory.id] = i
503
+
504
+ # Find bridge nodes
505
+ bridge_nodes: List[BridgeNode] = []
506
+
507
+ # Check each relationship for cross-cluster connections
508
+ cross_cluster_connections: Dict[str, Set[int]] = defaultdict(set)
509
+
510
+ for rel in relationships:
511
+ from_cluster = memory_to_cluster.get(rel.from_memory_id)
512
+ to_cluster = memory_to_cluster.get(rel.to_memory_id)
513
+
514
+ # Skip if either node not in a cluster
515
+ if from_cluster is None or to_cluster is None:
516
+ continue
517
+
518
+ # Found cross-cluster relationship
519
+ if from_cluster != to_cluster:
520
+ cross_cluster_connections[rel.from_memory_id].add(to_cluster)
521
+ cross_cluster_connections[rel.to_memory_id].add(from_cluster)
522
+
523
+ # Create bridge nodes
524
+ memory_map = {m.id: m for m in memories}
525
+
526
+ for memory_id, connected_clusters in cross_cluster_connections.items():
527
+ if len(connected_clusters) >= 2:
528
+ # Calculate bridge strength based on number of connections
529
+ # and relationship strengths
530
+ relevant_rels = [
531
+ r for r in relationships
532
+ if r.from_memory_id == memory_id or r.to_memory_id == memory_id
533
+ ]
534
+
535
+ avg_strength = (
536
+ sum(r.properties.strength for r in relevant_rels) / len(relevant_rels)
537
+ if relevant_rels else 0.5
538
+ )
539
+
540
+ # Bridge strength: combination of connectivity and relationship strength
541
+ bridge_strength = min(1.0, (len(connected_clusters) / 5.0) * avg_strength)
542
+
543
+ bridge_nodes.append(BridgeNode(
544
+ memory=memory_map[memory_id],
545
+ connected_clusters=sorted(list(connected_clusters)),
546
+ bridge_strength=bridge_strength
547
+ ))
548
+
549
+ # Sort by bridge strength descending
550
+ bridge_nodes.sort(key=lambda b: b.bridge_strength, reverse=True)
551
+
552
+ return bridge_nodes
553
+
554
+ def calculate_graph_metrics(
555
+ self,
556
+ memories: List[Memory],
557
+ relationships: List[Relationship]
558
+ ) -> Dict[str, Any]:
559
+ """
560
+ Calculate comprehensive graph metrics.
561
+
562
+ Args:
563
+ memories: All memory nodes
564
+ relationships: All relationships
565
+
566
+ Returns:
567
+ Dictionary with graph metrics:
568
+ - node_count: Total number of memory nodes
569
+ - edge_count: Total number of relationships
570
+ - avg_degree: Average number of connections per node
571
+ - density: Graph density (0.0 to 1.0)
572
+ - avg_strength: Average relationship strength
573
+ - category_distribution: Count of relationships by category
574
+ - type_distribution: Count of relationships by type
575
+ """
576
+ n_nodes = len(memories)
577
+ n_edges = len(relationships)
578
+
579
+ if n_nodes == 0:
580
+ return {
581
+ "node_count": 0,
582
+ "edge_count": 0,
583
+ "avg_degree": 0.0,
584
+ "density": 0.0,
585
+ "avg_strength": 0.0,
586
+ "category_distribution": {},
587
+ "type_distribution": {}
588
+ }
589
+
590
+ # Calculate degree distribution
591
+ degree_map: Dict[str, int] = defaultdict(int)
592
+ for rel in relationships:
593
+ degree_map[rel.from_memory_id] += 1
594
+ degree_map[rel.to_memory_id] += 1
595
+
596
+ avg_degree = sum(degree_map.values()) / n_nodes if n_nodes > 0 else 0.0
597
+
598
+ # Calculate density (for undirected graph)
599
+ max_edges = n_nodes * (n_nodes - 1) / 2
600
+ density = n_edges / max_edges if max_edges > 0 else 0.0
601
+
602
+ # Calculate average strength
603
+ avg_strength = (
604
+ sum(r.properties.strength for r in relationships) / n_edges
605
+ if n_edges > 0 else 0.0
606
+ )
607
+
608
+ # Category distribution
609
+ category_dist: Dict[str, int] = defaultdict(int)
610
+ for rel in relationships:
611
+ cat = self.rel_manager.get_relationship_category(rel.type)
612
+ category_dist[cat.value] += 1
613
+
614
+ # Type distribution
615
+ type_dist: Dict[str, int] = defaultdict(int)
616
+ for rel in relationships:
617
+ type_dist[rel.type.value] += 1
618
+
619
+ return {
620
+ "node_count": n_nodes,
621
+ "edge_count": n_edges,
622
+ "avg_degree": avg_degree,
623
+ "density": density,
624
+ "avg_strength": avg_strength,
625
+ "category_distribution": dict(category_dist),
626
+ "type_distribution": dict(type_dist)
627
+ }
628
+
629
+
630
+ # Singleton instance for easy access
631
+ graph_analyzer = GraphAnalyzer()
@@ -0,0 +1,69 @@
1
+ """
2
+ Claude Code Integration module for automatic context capture and project awareness.
3
+
4
+ This module provides deep integration with Claude Code development workflows:
5
+ - Development context capture (tasks, commands, errors)
6
+ - Project-aware memory (codebase analysis, file tracking)
7
+ - Workflow memory tools (tracking, suggestions, optimization)
8
+ """
9
+
10
+ from .context_capture import (
11
+ TaskContext,
12
+ CommandExecution,
13
+ ErrorPattern,
14
+ capture_task_context,
15
+ capture_command_execution,
16
+ analyze_error_patterns,
17
+ track_solution_effectiveness,
18
+ )
19
+
20
+ from .project_analysis import (
21
+ ProjectInfo,
22
+ CodebaseInfo,
23
+ FileChange,
24
+ Pattern,
25
+ detect_project,
26
+ analyze_codebase,
27
+ track_file_changes,
28
+ identify_code_patterns,
29
+ )
30
+
31
+ from .workflow_tracking import (
32
+ WorkflowAction,
33
+ WorkflowSuggestion,
34
+ Recommendation,
35
+ SessionState,
36
+ track_workflow,
37
+ suggest_workflow,
38
+ optimize_workflow,
39
+ get_session_state,
40
+ )
41
+
42
+ __all__ = [
43
+ # Context Capture
44
+ "TaskContext",
45
+ "CommandExecution",
46
+ "ErrorPattern",
47
+ "capture_task_context",
48
+ "capture_command_execution",
49
+ "analyze_error_patterns",
50
+ "track_solution_effectiveness",
51
+ # Project Analysis
52
+ "ProjectInfo",
53
+ "CodebaseInfo",
54
+ "FileChange",
55
+ "Pattern",
56
+ "detect_project",
57
+ "analyze_codebase",
58
+ "track_file_changes",
59
+ "identify_code_patterns",
60
+ # Workflow Tracking
61
+ "WorkflowAction",
62
+ "WorkflowSuggestion",
63
+ "Recommendation",
64
+ "SessionState",
65
+ "track_workflow",
66
+ "suggest_workflow",
67
+ "optimize_workflow",
68
+ "get_session_state",
69
+ ]