claude-self-reflect 2.7.2 → 2.7.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -42,6 +42,7 @@ services:
42
42
  environment:
43
43
  - QDRANT_URL=http://qdrant:6333
44
44
  - STATE_FILE=/config/imported-files.json
45
+ - LOGS_DIR=/logs
45
46
  - OPENAI_API_KEY=${OPENAI_API_KEY:-}
46
47
  - VOYAGE_API_KEY=${VOYAGE_API_KEY:-}
47
48
  - VOYAGE_KEY=${VOYAGE_KEY:-}
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "claude-self-reflect-mcp"
3
- version = "2.7.0"
3
+ version = "2.7.3"
4
4
  description = "MCP server for Claude self-reflection with memory decay"
5
5
  # readme = "README.md"
6
6
  requires-python = ">=3.10"
@@ -381,13 +381,19 @@ async def reflect_on_past(
381
381
  if c.startswith(f"conv_{project_hash}_")
382
382
  ]
383
383
 
384
+ # Always include reflections collections when searching a specific project
385
+ reflections_collections = [c for c in all_collections if c.startswith('reflections')]
386
+
384
387
  if not project_collections:
385
388
  # Fall back to searching all collections but filtering by project metadata
386
389
  await ctx.debug(f"No collections found for project {target_project}, will filter by metadata")
387
390
  collections_to_search = all_collections
388
391
  else:
389
392
  await ctx.debug(f"Found {len(project_collections)} collections for project {target_project}")
390
- collections_to_search = project_collections
393
+ # Include both project collections and reflections
394
+ collections_to_search = project_collections + reflections_collections
395
+ # Remove duplicates
396
+ collections_to_search = list(set(collections_to_search))
391
397
  else:
392
398
  collections_to_search = all_collections
393
399
 
@@ -524,12 +530,26 @@ async def reflect_on_past(
524
530
  # Check project filter if we're searching all collections but want specific project
525
531
  point_project = point.payload.get('project', collection_name.replace('conv_', '').replace('_voyage', '').replace('_local', ''))
526
532
 
533
+ # Special handling for reflections - they're global by default but can have project context
534
+ is_reflection_collection = collection_name.startswith('reflections')
535
+
527
536
  # Handle project matching - check if the target project name appears at the end of the stored project path
528
- if target_project != 'all' and not project_collections:
537
+ if target_project != 'all' and not project_collections and not is_reflection_collection:
529
538
  # The stored project name is like "-Users-username-projects-ShopifyMCPMockShop"
530
539
  # We want to match just "ShopifyMCPMockShop"
531
540
  if not point_project.endswith(f"-{target_project}") and point_project != target_project:
532
541
  continue # Skip results from other projects
542
+
543
+ # For reflections with project context, optionally filter by project
544
+ if is_reflection_collection and target_project != 'all' and 'project' in point.payload:
545
+ # Only filter if the reflection has project metadata
546
+ reflection_project = point.payload.get('project', '')
547
+ if reflection_project and not (
548
+ reflection_project == target_project or
549
+ reflection_project.endswith(f"/{target_project}") or
550
+ reflection_project.endswith(f"-{target_project}")
551
+ ):
552
+ continue # Skip reflections from other projects
533
553
 
534
554
  all_results.append(SearchResult(
535
555
  id=str(point.id),
@@ -604,12 +624,26 @@ async def reflect_on_past(
604
624
  # Check project filter if we're searching all collections but want specific project
605
625
  point_project = point.payload.get('project', collection_name.replace('conv_', '').replace('_voyage', '').replace('_local', ''))
606
626
 
627
+ # Special handling for reflections - they're global by default but can have project context
628
+ is_reflection_collection = collection_name.startswith('reflections')
629
+
607
630
  # Handle project matching - check if the target project name appears at the end of the stored project path
608
- if target_project != 'all' and not project_collections:
631
+ if target_project != 'all' and not project_collections and not is_reflection_collection:
609
632
  # The stored project name is like "-Users-username-projects-ShopifyMCPMockShop"
610
633
  # We want to match just "ShopifyMCPMockShop"
611
634
  if not point_project.endswith(f"-{target_project}") and point_project != target_project:
612
635
  continue # Skip results from other projects
636
+
637
+ # For reflections with project context, optionally filter by project
638
+ if is_reflection_collection and target_project != 'all' and 'project' in point.payload:
639
+ # Only filter if the reflection has project metadata
640
+ reflection_project = point.payload.get('project', '')
641
+ if reflection_project and not (
642
+ reflection_project == target_project or
643
+ reflection_project.endswith(f"/{target_project}") or
644
+ reflection_project.endswith(f"-{target_project}")
645
+ ):
646
+ continue # Skip reflections from other projects
613
647
 
614
648
  all_results.append(SearchResult(
615
649
  id=str(point.id),
@@ -641,13 +675,27 @@ async def reflect_on_past(
641
675
  # Check project filter if we're searching all collections but want specific project
642
676
  point_project = point.payload.get('project', collection_name.replace('conv_', '').replace('_voyage', '').replace('_local', ''))
643
677
 
678
+ # Special handling for reflections - they're global by default but can have project context
679
+ is_reflection_collection = collection_name.startswith('reflections')
680
+
644
681
  # Handle project matching - check if the target project name appears at the end of the stored project path
645
- if target_project != 'all' and not project_collections:
682
+ if target_project != 'all' and not project_collections and not is_reflection_collection:
646
683
  # The stored project name is like "-Users-username-projects-ShopifyMCPMockShop"
647
684
  # We want to match just "ShopifyMCPMockShop"
648
685
  if not point_project.endswith(f"-{target_project}") and point_project != target_project:
649
686
  continue # Skip results from other projects
650
687
 
688
+ # For reflections with project context, optionally filter by project
689
+ if is_reflection_collection and target_project != 'all' and 'project' in point.payload:
690
+ # Only filter if the reflection has project metadata
691
+ reflection_project = point.payload.get('project', '')
692
+ if reflection_project and not (
693
+ reflection_project == target_project or
694
+ reflection_project.endswith(f"/{target_project}") or
695
+ reflection_project.endswith(f"-{target_project}")
696
+ ):
697
+ continue # Skip reflections from other projects
698
+
651
699
  # BOOST V2 CHUNKS: Apply score boost for v2 chunks (better quality)
652
700
  original_score = point.score
653
701
  final_score = original_score
@@ -932,6 +980,25 @@ async def store_reflection(
932
980
  # Create reflections collection name
933
981
  collection_name = f"reflections{get_collection_suffix()}"
934
982
 
983
+ # Get current project context
984
+ cwd = os.environ.get('MCP_CLIENT_CWD', os.getcwd())
985
+ project_path = Path(cwd)
986
+
987
+ # Extract project name from path
988
+ project_name = None
989
+ path_parts = project_path.parts
990
+ if 'projects' in path_parts:
991
+ idx = path_parts.index('projects')
992
+ if idx + 1 < len(path_parts):
993
+ # Get all parts after 'projects' to form the project name
994
+ # This handles cases like projects/Connectiva-App/connectiva-ai
995
+ project_parts = path_parts[idx + 1:]
996
+ project_name = '/'.join(project_parts)
997
+
998
+ # If no project detected, use the last directory name
999
+ if not project_name:
1000
+ project_name = project_path.name
1001
+
935
1002
  # Ensure collection exists
936
1003
  try:
937
1004
  collection_info = await qdrant_client.get_collection(collection_name)
@@ -949,7 +1016,7 @@ async def store_reflection(
949
1016
  # Generate embedding for the reflection
950
1017
  embedding = await generate_embedding(content)
951
1018
 
952
- # Create point with metadata
1019
+ # Create point with metadata including project context
953
1020
  point_id = datetime.now().timestamp()
954
1021
  point = PointStruct(
955
1022
  id=int(point_id),
@@ -959,7 +1026,9 @@ async def store_reflection(
959
1026
  "tags": tags,
960
1027
  "timestamp": datetime.now().isoformat(),
961
1028
  "type": "reflection",
962
- "role": "user_reflection"
1029
+ "role": "user_reflection",
1030
+ "project": project_name, # Add project context
1031
+ "project_path": str(project_path) # Add full path for reference
963
1032
  }
964
1033
  )
965
1034
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-self-reflect",
3
- "version": "2.7.2",
3
+ "version": "2.7.4",
4
4
  "description": "Give Claude perfect memory of all your conversations - Installation wizard for Python MCP server",
5
5
  "keywords": [
6
6
  "claude",
@@ -24,7 +24,8 @@ from qdrant_client.models import Filter, FieldCondition, MatchValue
24
24
  # Configuration
25
25
  QDRANT_URL = os.getenv("QDRANT_URL", "http://localhost:6333")
26
26
  LOGS_DIR = os.getenv("LOGS_DIR", os.path.expanduser("~/.claude/projects"))
27
- STATE_FILE = os.getenv("STATE_FILE", "./config/delta-update-state.json")
27
+ # Use /config path if running in Docker, otherwise use ./config
28
+ STATE_FILE = os.getenv("STATE_FILE", "/config/delta-update-state.json" if os.path.exists("/config") else "./config/delta-update-state.json")
28
29
  PREFER_LOCAL_EMBEDDINGS = os.getenv("PREFER_LOCAL_EMBEDDINGS", "true").lower() == "true"
29
30
  DRY_RUN = os.getenv("DRY_RUN", "false").lower() == "true"
30
31
  DAYS_TO_UPDATE = int(os.getenv("DAYS_TO_UPDATE", "7"))
@@ -432,7 +433,7 @@ async def main_async():
432
433
  logger.info("=== Delta Update Complete ===")
433
434
  logger.info(f"Successfully updated: {success_count} conversations")
434
435
  logger.info(f"Failed: {failed_count} conversations")
435
- logger.info(f"Total conversations in state: {len(state['updated_conversations'])}")
436
+ logger.info(f"Total conversations in state: {len(state.get('updated_conversations', {}))}")
436
437
 
437
438
  def main():
438
439
  """Entry point."""
@@ -57,7 +57,9 @@ else:
57
57
 
58
58
  def normalize_project_name(project_name: str) -> str:
59
59
  """Normalize project name for consistency."""
60
- return project_name.replace("-Users-ramakrishnanannaswamy-projects-", "").replace("-", "_").lower()
60
+ # For compatibility with delta-metadata-update, just use the project name as-is
61
+ # This ensures collection names match between import and delta update scripts
62
+ return project_name
61
63
 
62
64
  def get_collection_name(project_path: Path) -> str:
63
65
  """Generate collection name from project path."""
@@ -0,0 +1,171 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Import old format JSONL files from Claude conversations.
4
+ These files have a different structure with type/summary fields instead of messages.
5
+ """
6
+
7
+ import json
8
+ import sys
9
+ from pathlib import Path
10
+ import hashlib
11
+ import uuid
12
+ from datetime import datetime
13
+ from qdrant_client import QdrantClient
14
+ from qdrant_client.models import Distance, VectorParams, PointStruct
15
+ from fastembed import TextEmbedding
16
+ import logging
17
+
18
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
19
+ logger = logging.getLogger(__name__)
20
+
21
+ def import_old_format_project(project_dir: Path, project_path: str = None):
22
+ """Import old format JSONL files from a project directory."""
23
+
24
+ # Initialize
25
+ client = QdrantClient(url='http://localhost:6333')
26
+ model = TextEmbedding(model_name='sentence-transformers/all-MiniLM-L6-v2', max_length=512)
27
+
28
+ # Determine project path from directory name if not provided
29
+ if not project_path:
30
+ # Convert -Users-username-projects-projectname back to path
31
+ dir_name = project_dir.name
32
+ project_path = '/' + dir_name.strip('-').replace('-', '/')
33
+
34
+ # Create collection name
35
+ project_hash = hashlib.md5(project_path.encode()).hexdigest()[:8]
36
+ collection_name = f'conv_{project_hash}_local'
37
+
38
+ logger.info(f'Project: {project_path}')
39
+ logger.info(f'Collection: {collection_name}')
40
+
41
+ # Create collection if needed
42
+ try:
43
+ client.get_collection(collection_name)
44
+ logger.info('Collection exists')
45
+ except:
46
+ client.create_collection(
47
+ collection_name=collection_name,
48
+ vectors_config=VectorParams(size=384, distance=Distance.COSINE)
49
+ )
50
+ logger.info('Created collection')
51
+
52
+ # Process all JSONL files
53
+ jsonl_files = list(project_dir.glob('*.jsonl'))
54
+ logger.info(f'Found {len(jsonl_files)} files to import')
55
+
56
+ total_points = 0
57
+ for file_path in jsonl_files:
58
+ logger.info(f'Processing {file_path.name}...')
59
+ points_batch = []
60
+
61
+ with open(file_path, 'r', encoding='utf-8') as f:
62
+ conversation_text = []
63
+ file_timestamp = file_path.stat().st_mtime
64
+
65
+ for line_num, line in enumerate(f, 1):
66
+ try:
67
+ data = json.loads(line)
68
+ msg_type = data.get('type', '')
69
+
70
+ # Extract text content based on type
71
+ content = None
72
+ if msg_type == 'summary' and data.get('summary'):
73
+ content = f"[Conversation Summary] {data['summary']}"
74
+ elif msg_type == 'user' and data.get('summary'):
75
+ content = f"User: {data['summary']}"
76
+ elif msg_type == 'assistant' and data.get('summary'):
77
+ content = f"Assistant: {data['summary']}"
78
+ elif msg_type in ['user', 'assistant']:
79
+ # Try to get content from other fields
80
+ if 'content' in data:
81
+ content = f"{msg_type.title()}: {data['content']}"
82
+ elif 'text' in data:
83
+ content = f"{msg_type.title()}: {data['text']}"
84
+
85
+ if content:
86
+ conversation_text.append(content)
87
+
88
+ # Create chunks every 5 messages or at end
89
+ if len(conversation_text) >= 5:
90
+ chunk_text = '\n\n'.join(conversation_text)
91
+ if chunk_text.strip():
92
+ # Generate embedding
93
+ embedding = list(model.embed([chunk_text[:2000]]))[0] # Limit to 2000 chars
94
+
95
+ point = PointStruct(
96
+ id=str(uuid.uuid4()),
97
+ vector=embedding.tolist(),
98
+ payload={
99
+ 'content': chunk_text[:1000], # Store first 1000 chars
100
+ 'full_content': chunk_text[:4000], # Store more for context
101
+ 'project_path': project_path,
102
+ 'file_path': str(file_path),
103
+ 'file_name': file_path.name,
104
+ 'conversation_id': file_path.stem,
105
+ 'chunk_index': len(points_batch),
106
+ 'timestamp': file_timestamp,
107
+ 'type': 'conversation_chunk'
108
+ }
109
+ )
110
+ points_batch.append(point)
111
+ conversation_text = []
112
+
113
+ except json.JSONDecodeError:
114
+ logger.warning(f'Invalid JSON at line {line_num} in {file_path.name}')
115
+ except Exception as e:
116
+ logger.warning(f'Error processing line {line_num}: {e}')
117
+
118
+ # Handle remaining text
119
+ if conversation_text:
120
+ chunk_text = '\n\n'.join(conversation_text)
121
+ if chunk_text.strip():
122
+ embedding = list(model.embed([chunk_text[:2000]]))[0]
123
+
124
+ point = PointStruct(
125
+ id=str(uuid.uuid4()),
126
+ vector=embedding.tolist(),
127
+ payload={
128
+ 'content': chunk_text[:1000],
129
+ 'full_content': chunk_text[:4000],
130
+ 'project_path': project_path,
131
+ 'file_path': str(file_path),
132
+ 'file_name': file_path.name,
133
+ 'conversation_id': file_path.stem,
134
+ 'chunk_index': len(points_batch),
135
+ 'timestamp': file_timestamp,
136
+ 'type': 'conversation_chunk'
137
+ }
138
+ )
139
+ points_batch.append(point)
140
+
141
+ # Upload batch
142
+ if points_batch:
143
+ client.upsert(collection_name=collection_name, points=points_batch)
144
+ logger.info(f' Uploaded {len(points_batch)} chunks from {file_path.name}')
145
+ total_points += len(points_batch)
146
+
147
+ # Verify
148
+ info = client.get_collection(collection_name)
149
+ logger.info(f'\nImport complete!')
150
+ logger.info(f'Collection {collection_name} now has {info.points_count} points')
151
+ logger.info(f'Added {total_points} new points in this import')
152
+
153
+ return collection_name, total_points
154
+
155
+ def main():
156
+ if len(sys.argv) < 2:
157
+ print("Usage: python import-old-format.py <project-directory> [project-path]")
158
+ print("Example: python import-old-format.py ~/.claude/projects/-Users-me-projects-myapp /Users/me/projects/myapp")
159
+ sys.exit(1)
160
+
161
+ project_dir = Path(sys.argv[1]).expanduser()
162
+ project_path = sys.argv[2] if len(sys.argv) > 2 else None
163
+
164
+ if not project_dir.exists():
165
+ print(f"Error: Directory {project_dir} does not exist")
166
+ sys.exit(1)
167
+
168
+ import_old_format_project(project_dir, project_path)
169
+
170
+ if __name__ == "__main__":
171
+ main()