claude-self-reflect 2.4.8 → 2.4.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -770,4 +770,48 @@ After importing:
770
770
  4. **Restart MCP After Import**: Ensures new collections are recognized
771
771
  5. **Verify with Search**: Test that new content is searchable
772
772
 
773
+ ## Quick Import for Current Project (NEW in v2.4.8)
774
+
775
+ For rapid updates when working on a single project, use the optimized quick import:
776
+
777
+ ### Quick Import Script
778
+ ```bash
779
+ # Import only recent conversations (last 2 hours by default)
780
+ cd /path/to/your/project
781
+ source ~/claude-self-reflect/venv/bin/activate
782
+ python ~/claude-self-reflect/scripts/import-latest.py
783
+
784
+ # Customize time window
785
+ export IMPORT_HOURS_BACK=4 # Import last 4 hours
786
+ python ~/claude-self-reflect/scripts/import-latest.py
787
+ ```
788
+
789
+ ### PreCompact Hook Integration
790
+ To automatically update conversations before compacting:
791
+
792
+ ```bash
793
+ # Install the hook (one-time setup)
794
+ cp ~/claude-self-reflect/scripts/precompact-hook.sh ~/.claude/hooks/precompact
795
+ # Or source it from your existing precompact hook:
796
+ echo "source ~/claude-self-reflect/scripts/precompact-hook.sh" >> ~/.claude/hooks/precompact
797
+ ```
798
+
799
+ ### Performance Expectations
800
+ - **Full import**: 2-7 minutes (all projects, all history)
801
+ - **Quick import**: 30-60 seconds (current project, recent files only)
802
+ - **Target**: <10 seconds (future optimization)
803
+
804
+ ### When to Use Quick Import
805
+ - Before starting a new Claude session
806
+ - After significant conversation progress
807
+ - Via PreCompact hook (automatic)
808
+ - When recent conversations aren't in search results
809
+
810
+ ### Troubleshooting Quick Import
811
+ If quick import fails:
812
+ 1. Ensure you're in a project directory with Claude logs
813
+ 2. Check virtual environment is activated
814
+ 3. Verify project has a collection: `python scripts/check-collections.py`
815
+ 4. For first-time projects, run full import once
816
+
773
817
  Remember: You're not just a search tool - you're a memory augmentation system that helps maintain continuity, prevent repeated work, and leverage collective knowledge across all Claude conversations.
package/README.md CHANGED
@@ -26,6 +26,15 @@ Your conversations become searchable. Your decisions stay remembered. Your conte
26
26
  - **Node.js** 16+ (for the setup wizard)
27
27
  - **Claude Desktop** app
28
28
 
29
+ ## System Requirements
30
+
31
+ ### Memory
32
+ - **Docker Memory**: 2GB minimum (4GB recommended for initial setup)
33
+ - **First Import**: May take 2-7 minutes to process all conversations
34
+ - **Subsequent Imports**: <60 seconds (only processes new/changed files)
35
+
36
+ 💡 **First-Time User Note**: The initial import processes your entire conversation history. This is a one-time operation. After that, the system only imports new conversations, making it much faster and using less memory.
37
+
29
38
  ## Install
30
39
 
31
40
  ### Quick Start (Local Mode - Default)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-self-reflect",
3
- "version": "2.4.8",
3
+ "version": "2.4.10",
4
4
  "description": "Give Claude perfect memory of all your conversations - Installation wizard for Python MCP server",
5
5
  "keywords": [
6
6
  "claude",
@@ -8,6 +8,7 @@ import sys
8
8
  import json
9
9
  import glob
10
10
  import hashlib
11
+ import gc
11
12
  from datetime import datetime
12
13
  from typing import List, Dict, Any
13
14
  import logging
@@ -29,7 +30,7 @@ from tenacity import (
29
30
  QDRANT_URL = os.getenv("QDRANT_URL", "http://localhost:6333")
30
31
  LOGS_DIR = os.getenv("LOGS_DIR", "/logs")
31
32
  STATE_FILE = os.getenv("STATE_FILE", "/config/imported-files.json")
32
- BATCH_SIZE = int(os.getenv("BATCH_SIZE", "100"))
33
+ BATCH_SIZE = int(os.getenv("BATCH_SIZE", "10")) # Reduced from 100 to prevent OOM
33
34
  PREFER_LOCAL_EMBEDDINGS = os.getenv("PREFER_LOCAL_EMBEDDINGS", "false").lower() == "true"
34
35
  VOYAGE_API_KEY = os.getenv("VOYAGE_KEY")
35
36
 
@@ -37,6 +38,60 @@ VOYAGE_API_KEY = os.getenv("VOYAGE_KEY")
37
38
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
38
39
  logger = logging.getLogger(__name__)
39
40
 
41
+ # State management functions
42
+ def load_state():
43
+ """Load the import state from file."""
44
+ if os.path.exists(STATE_FILE):
45
+ try:
46
+ with open(STATE_FILE, 'r') as f:
47
+ state = json.load(f)
48
+ # Ensure the expected structure exists
49
+ if "imported_files" not in state:
50
+ state["imported_files"] = {}
51
+ return state
52
+ except Exception as e:
53
+ logger.warning(f"Failed to load state file: {e}")
54
+ return {"imported_files": {}}
55
+
56
+ def save_state(state):
57
+ """Save the import state to file."""
58
+ try:
59
+ # Ensure directory exists
60
+ os.makedirs(os.path.dirname(STATE_FILE), exist_ok=True)
61
+ # Write atomically by using a temp file
62
+ temp_file = STATE_FILE + ".tmp"
63
+ with open(temp_file, 'w') as f:
64
+ json.dump(state, f, indent=2)
65
+ os.replace(temp_file, STATE_FILE)
66
+ logger.debug(f"Saved state with {len(state['imported_files'])} files")
67
+ except Exception as e:
68
+ logger.error(f"Failed to save state file: {e}")
69
+
70
+ def should_import_file(file_path, state):
71
+ """Check if a file should be imported based on modification time."""
72
+ str_path = str(file_path)
73
+ file_mtime = os.path.getmtime(file_path)
74
+
75
+ if str_path in state["imported_files"]:
76
+ last_imported = state["imported_files"][str_path].get("last_imported", 0)
77
+ last_modified = state["imported_files"][str_path].get("last_modified", 0)
78
+
79
+ # Skip if file hasn't been modified since last import
80
+ if file_mtime <= last_modified and last_imported > 0:
81
+ logger.info(f"Skipping unchanged file: {file_path.name}")
82
+ return False
83
+
84
+ return True
85
+
86
+ def update_file_state(file_path, state, chunks_imported):
87
+ """Update the state for an imported file."""
88
+ str_path = str(file_path)
89
+ state["imported_files"][str_path] = {
90
+ "last_modified": os.path.getmtime(file_path),
91
+ "last_imported": datetime.now().timestamp(),
92
+ "chunks_imported": chunks_imported
93
+ }
94
+
40
95
  # Initialize embedding provider
41
96
  embedding_provider = None
42
97
  embedding_dimension = None
@@ -120,7 +175,7 @@ def chunk_conversation(messages: List[Dict[str, Any]], chunk_size: int = 10) ->
120
175
 
121
176
  return chunks
122
177
 
123
- def import_project(project_path: Path, collection_name: str) -> int:
178
+ def import_project(project_path: Path, collection_name: str, state: dict) -> int:
124
179
  """Import all conversations from a project."""
125
180
  jsonl_files = list(project_path.glob("*.jsonl"))
126
181
 
@@ -143,6 +198,10 @@ def import_project(project_path: Path, collection_name: str) -> int:
143
198
  total_chunks = 0
144
199
 
145
200
  for jsonl_file in jsonl_files:
201
+ # Check if file should be imported
202
+ if not should_import_file(jsonl_file, state):
203
+ continue
204
+
146
205
  logger.info(f"Processing file: {jsonl_file.name}")
147
206
  try:
148
207
  # Read JSONL file and extract messages
@@ -241,7 +300,17 @@ def import_project(project_path: Path, collection_name: str) -> int:
241
300
 
242
301
  total_chunks += len(points)
243
302
 
244
- logger.info(f"Imported {len(chunks)} chunks from {jsonl_file.name}")
303
+ file_chunks = len(chunks)
304
+ logger.info(f"Imported {file_chunks} chunks from {jsonl_file.name}")
305
+
306
+ # Update state for this file
307
+ update_file_state(jsonl_file, state, file_chunks)
308
+
309
+ # Save state after each file to prevent loss on OOM
310
+ save_state(state)
311
+
312
+ # Force garbage collection to free memory
313
+ gc.collect()
245
314
 
246
315
  except Exception as e:
247
316
  logger.error(f"Failed to import {jsonl_file}: {e}")
@@ -258,6 +327,10 @@ def main():
258
327
  logger.error(f"Logs directory not found: {LOGS_DIR}")
259
328
  return
260
329
 
330
+ # Load existing state
331
+ state = load_state()
332
+ logger.info(f"Loaded state with {len(state['imported_files'])} previously imported files")
333
+
261
334
  # Find all project directories
262
335
  project_dirs = [d for d in logs_path.iterdir() if d.is_dir()]
263
336
 
@@ -274,9 +347,15 @@ def main():
274
347
  collection_name = f"conv_{hashlib.md5(project_dir.name.encode()).hexdigest()[:8]}{collection_suffix}"
275
348
 
276
349
  logger.info(f"Importing project: {project_dir.name} -> {collection_name}")
277
- chunks = import_project(project_dir, collection_name)
350
+ chunks = import_project(project_dir, collection_name, state)
278
351
  total_imported += chunks
279
352
  logger.info(f"Imported {chunks} chunks from {project_dir.name}")
353
+
354
+ # Save state after each project to avoid losing progress
355
+ save_state(state)
356
+
357
+ # Final save (redundant but ensures state is saved)
358
+ save_state(state)
280
359
 
281
360
  logger.info(f"Import complete! Total chunks imported: {total_imported}")
282
361