claude-self-reflect 1.3.5 → 2.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/.claude/agents/README.md +138 -0
  2. package/.claude/agents/docker-orchestrator.md +264 -0
  3. package/.claude/agents/documentation-writer.md +262 -0
  4. package/.claude/agents/import-debugger.md +203 -0
  5. package/.claude/agents/mcp-integration.md +286 -0
  6. package/.claude/agents/open-source-maintainer.md +150 -0
  7. package/.claude/agents/performance-tuner.md +276 -0
  8. package/.claude/agents/qdrant-specialist.md +138 -0
  9. package/.claude/agents/reflection-specialist.md +361 -0
  10. package/.claude/agents/search-optimizer.md +307 -0
  11. package/LICENSE +21 -0
  12. package/README.md +128 -0
  13. package/installer/cli.js +122 -0
  14. package/installer/postinstall.js +13 -0
  15. package/installer/setup-wizard.js +204 -0
  16. package/mcp-server/pyproject.toml +27 -0
  17. package/mcp-server/run-mcp.sh +21 -0
  18. package/mcp-server/src/__init__.py +1 -0
  19. package/mcp-server/src/__main__.py +23 -0
  20. package/mcp-server/src/server.py +316 -0
  21. package/mcp-server/src/server_v2.py +240 -0
  22. package/package.json +12 -36
  23. package/scripts/import-conversations-isolated.py +311 -0
  24. package/scripts/import-conversations-voyage-streaming.py +377 -0
  25. package/scripts/import-conversations-voyage.py +428 -0
  26. package/scripts/import-conversations.py +240 -0
  27. package/scripts/import-current-conversation.py +38 -0
  28. package/scripts/import-live-conversation.py +152 -0
  29. package/scripts/import-openai-enhanced.py +867 -0
  30. package/scripts/import-recent-only.py +29 -0
  31. package/scripts/import-single-project.py +278 -0
  32. package/scripts/import-watcher.py +169 -0
  33. package/config/claude-desktop-config.json +0 -12
  34. package/dist/cli.d.ts +0 -3
  35. package/dist/cli.d.ts.map +0 -1
  36. package/dist/cli.js +0 -55
  37. package/dist/cli.js.map +0 -1
  38. package/dist/embeddings-gemini.d.ts +0 -76
  39. package/dist/embeddings-gemini.d.ts.map +0 -1
  40. package/dist/embeddings-gemini.js +0 -158
  41. package/dist/embeddings-gemini.js.map +0 -1
  42. package/dist/embeddings.d.ts +0 -67
  43. package/dist/embeddings.d.ts.map +0 -1
  44. package/dist/embeddings.js +0 -252
  45. package/dist/embeddings.js.map +0 -1
  46. package/dist/index.d.ts +0 -3
  47. package/dist/index.d.ts.map +0 -1
  48. package/dist/index.js +0 -439
  49. package/dist/index.js.map +0 -1
  50. package/dist/project-isolation.d.ts +0 -29
  51. package/dist/project-isolation.d.ts.map +0 -1
  52. package/dist/project-isolation.js +0 -78
  53. package/dist/project-isolation.js.map +0 -1
  54. package/scripts/install-agent.js +0 -70
  55. package/scripts/setup-wizard.js +0 -596
  56. package/src/cli.ts +0 -56
  57. package/src/embeddings-gemini.ts +0 -176
  58. package/src/embeddings.ts +0 -296
  59. package/src/index.ts +0 -513
  60. package/src/project-isolation.ts +0 -93
@@ -0,0 +1,29 @@
1
+ #!/usr/bin/env python3
2
+ """Import only recent conversation files from memento-stack project."""
3
+
4
+ import os
5
+ import sys
6
+ from datetime import datetime, timedelta
7
+
8
+ # Get the import script path
9
+ import_script = os.path.join(os.path.dirname(__file__), "import-openai.py")
10
+ project_path = os.path.expanduser("~/.claude/projects/-Users-ramakrishnanannaswamy-memento-stack")
11
+
12
+ # Get files modified in last 2 days
13
+ cutoff = datetime.now() - timedelta(days=2)
14
+ recent_files = []
15
+
16
+ for file in os.listdir(project_path):
17
+ if file.endswith(".jsonl"):
18
+ file_path = os.path.join(project_path, file)
19
+ mtime = datetime.fromtimestamp(os.path.getmtime(file_path))
20
+ if mtime > cutoff:
21
+ recent_files.append(file)
22
+
23
+ print(f"Found {len(recent_files)} recent files to import")
24
+
25
+ # Set environment variable
26
+ os.environ["VOYAGE_KEY"] = "pa-wdTYGObaxhs-XFKX2r7WCczRwEVNb9eYMTSO3yrQhZI"
27
+
28
+ # Import the whole project (the script will handle individual files)
29
+ os.system(f"python {import_script} {project_path}")
@@ -0,0 +1,278 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Import a single project's conversations to Qdrant.
4
+ This script processes one project at a time to avoid memory issues.
5
+ """
6
+
7
+ import json
8
+ import os
9
+ import sys
10
+ import glob
11
+ import hashlib
12
+ from datetime import datetime
13
+ from typing import List, Dict, Any
14
+ import logging
15
+ from qdrant_client import QdrantClient
16
+ from qdrant_client.models import VectorParams, Distance, PointStruct
17
+ from sentence_transformers import SentenceTransformer
18
+
19
+ # Configuration
20
+ QDRANT_URL = os.getenv("QDRANT_URL", "http://localhost:6333")
21
+ STATE_FILE = os.getenv("STATE_FILE", "./config-isolated/imported-files.json")
22
+ EMBEDDING_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
23
+ BATCH_SIZE = 50 # Reduced batch size for memory efficiency
24
+ CHUNK_SIZE = 5 # Messages per chunk
25
+
26
+ # Set up logging
27
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
28
+ logger = logging.getLogger(__name__)
29
+
30
+ class SingleProjectImporter:
31
+ def __init__(self, project_path: str):
32
+ """Initialize importer for a single project."""
33
+ self.project_path = project_path
34
+ self.project_name = os.path.basename(project_path)
35
+ self.client = QdrantClient(url=QDRANT_URL)
36
+ self.encoder = SentenceTransformer(EMBEDDING_MODEL)
37
+ self.imported_files = self.load_state()
38
+
39
+ def load_state(self) -> Dict[str, List[str]]:
40
+ """Load import state."""
41
+ if os.path.exists(STATE_FILE):
42
+ try:
43
+ with open(STATE_FILE, 'r') as f:
44
+ data = json.load(f)
45
+ return data.get('projects', {})
46
+ except Exception as e:
47
+ logger.error(f"Failed to load state: {e}")
48
+ return {}
49
+
50
+ def save_state(self):
51
+ """Save import state."""
52
+ os.makedirs(os.path.dirname(STATE_FILE), exist_ok=True)
53
+
54
+ # Load existing state to preserve other projects
55
+ existing = {}
56
+ if os.path.exists(STATE_FILE):
57
+ try:
58
+ with open(STATE_FILE, 'r') as f:
59
+ existing = json.load(f)
60
+ except:
61
+ pass
62
+
63
+ # Update with current project
64
+ if 'projects' not in existing:
65
+ existing['projects'] = {}
66
+ existing['projects'][self.project_name] = self.imported_files.get(self.project_name, [])
67
+ existing['last_updated'] = datetime.now().isoformat()
68
+ existing['mode'] = 'isolated'
69
+
70
+ with open(STATE_FILE, 'w') as f:
71
+ json.dump(existing, f, indent=2)
72
+
73
+ def get_collection_name(self) -> str:
74
+ """Get collection name for this project."""
75
+ project_hash = hashlib.md5(self.project_name.encode()).hexdigest()[:8]
76
+ return f"conv_{project_hash}"
77
+
78
+ def setup_collection(self):
79
+ """Create or verify collection exists."""
80
+ collection_name = self.get_collection_name()
81
+
82
+ collections = self.client.get_collections().collections
83
+ exists = any(c.name == collection_name for c in collections)
84
+
85
+ if not exists:
86
+ logger.info(f"Creating collection: {collection_name}")
87
+ self.client.create_collection(
88
+ collection_name=collection_name,
89
+ vectors_config=VectorParams(
90
+ size=384, # all-MiniLM-L6-v2 dimension
91
+ distance=Distance.COSINE
92
+ )
93
+ )
94
+ else:
95
+ logger.info(f"Collection {collection_name} already exists")
96
+
97
+ return collection_name
98
+
99
+ def process_jsonl_file(self, file_path: str) -> List[Dict[str, Any]]:
100
+ """Extract messages from a JSONL file."""
101
+ messages = []
102
+
103
+ try:
104
+ with open(file_path, 'r') as f:
105
+ for line_num, line in enumerate(f, 1):
106
+ try:
107
+ data = json.loads(line.strip())
108
+
109
+ if 'message' in data and data['message']:
110
+ msg = data['message']
111
+ if 'role' in msg and 'content' in msg:
112
+ content = msg['content']
113
+ if isinstance(content, dict):
114
+ content = content.get('text', json.dumps(content))
115
+
116
+ messages.append({
117
+ 'role': msg['role'],
118
+ 'content': content,
119
+ 'file_path': file_path,
120
+ 'line_number': line_num,
121
+ 'timestamp': data.get('timestamp', datetime.now().isoformat())
122
+ })
123
+ except json.JSONDecodeError:
124
+ continue
125
+ except Exception as e:
126
+ logger.debug(f"Error processing line {line_num}: {e}")
127
+
128
+ except Exception as e:
129
+ logger.error(f"Failed to read file {file_path}: {e}")
130
+
131
+ return messages
132
+
133
+ def create_conversation_chunks(self, messages: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
134
+ """Group messages into conversation chunks."""
135
+ chunks = []
136
+
137
+ for i in range(0, len(messages), CHUNK_SIZE):
138
+ chunk_messages = messages[i:i + CHUNK_SIZE]
139
+
140
+ conversation_text = "\n\n".join([
141
+ f"{msg['role'].upper()}: {msg['content'][:500]}..."
142
+ if len(msg['content']) > 500 else f"{msg['role'].upper()}: {msg['content']}"
143
+ for msg in chunk_messages
144
+ ])
145
+
146
+ conversation_id = os.path.basename(chunk_messages[0]['file_path']).replace('.jsonl', '')
147
+
148
+ chunks.append({
149
+ 'id': hashlib.md5(f"{chunk_messages[0]['file_path']}_{i}".encode()).hexdigest(),
150
+ 'text': conversation_text,
151
+ 'metadata': {
152
+ 'project_id': self.project_name,
153
+ 'project_name': self.project_name,
154
+ 'conversation_id': conversation_id,
155
+ 'chunk_index': i // CHUNK_SIZE,
156
+ 'message_count': len(chunk_messages),
157
+ 'start_role': chunk_messages[0]['role'],
158
+ 'timestamp': chunk_messages[0]['timestamp'],
159
+ 'file_path': chunk_messages[0]['file_path']
160
+ }
161
+ })
162
+
163
+ return chunks
164
+
165
+ def import_to_qdrant(self, chunks: List[Dict[str, Any]], collection_name: str):
166
+ """Import chunks to Qdrant with memory-efficient batching."""
167
+ if not chunks:
168
+ return
169
+
170
+ # Process in smaller batches to avoid memory issues
171
+ for batch_start in range(0, len(chunks), BATCH_SIZE):
172
+ batch_chunks = chunks[batch_start:batch_start + BATCH_SIZE]
173
+
174
+ # Generate embeddings for this batch
175
+ texts = [chunk['text'] for chunk in batch_chunks]
176
+ embeddings = self.encoder.encode(texts, show_progress_bar=False)
177
+
178
+ # Create points
179
+ points = []
180
+ for chunk, embedding in zip(batch_chunks, embeddings):
181
+ points.append(
182
+ PointStruct(
183
+ id=chunk['id'],
184
+ vector=embedding.tolist(),
185
+ payload={
186
+ 'text': chunk['text'],
187
+ **chunk['metadata']
188
+ }
189
+ )
190
+ )
191
+
192
+ # Upload to Qdrant
193
+ self.client.upsert(
194
+ collection_name=collection_name,
195
+ points=points
196
+ )
197
+ logger.info(f"Uploaded batch of {len(points)} points")
198
+
199
+ # Clear memory
200
+ del texts, embeddings, points
201
+
202
+ def import_project(self):
203
+ """Import all conversations for this project."""
204
+ logger.info(f"Importing project: {self.project_name}")
205
+
206
+ # Find all JSONL files
207
+ pattern = os.path.join(self.project_path, "*.jsonl")
208
+ all_files = glob.glob(pattern)
209
+
210
+ if not all_files:
211
+ logger.warning(f"No JSONL files found in {self.project_path}")
212
+ return
213
+
214
+ # Get already imported files for this project
215
+ project_imported = set(self.imported_files.get(self.project_name, []))
216
+
217
+ # Convert to relative paths for comparison
218
+ new_files = []
219
+ for f in all_files:
220
+ rel_path = f.replace(os.path.expanduser("~/.claude/projects"), "/logs")
221
+ if rel_path not in project_imported:
222
+ new_files.append((f, rel_path))
223
+
224
+ if not new_files:
225
+ logger.info(f"All files already imported for {self.project_name}")
226
+ return
227
+
228
+ logger.info(f"Found {len(new_files)} new files to import")
229
+
230
+ # Setup collection
231
+ collection_name = self.setup_collection()
232
+
233
+ # Process files one by one
234
+ total_chunks = 0
235
+ for file_path, rel_path in new_files:
236
+ logger.info(f"Processing: {os.path.basename(file_path)}")
237
+
238
+ # Extract messages
239
+ messages = self.process_jsonl_file(file_path)
240
+ if not messages:
241
+ logger.warning(f"No messages found in {file_path}")
242
+ continue
243
+
244
+ # Create chunks
245
+ chunks = self.create_conversation_chunks(messages)
246
+
247
+ # Import to Qdrant
248
+ self.import_to_qdrant(chunks, collection_name)
249
+
250
+ # Update state after each file
251
+ if self.project_name not in self.imported_files:
252
+ self.imported_files[self.project_name] = []
253
+ self.imported_files[self.project_name].append(rel_path)
254
+ self.save_state()
255
+
256
+ total_chunks += len(chunks)
257
+ logger.info(f"Imported {len(chunks)} chunks from {os.path.basename(file_path)}")
258
+
259
+ # Final summary
260
+ count = self.client.get_collection(collection_name).points_count
261
+ logger.info(f"Project complete: {total_chunks} chunks imported, {count} total points in collection")
262
+
263
+ def main():
264
+ if len(sys.argv) != 2:
265
+ print("Usage: python import-single-project.py <project_path>")
266
+ print("Example: python import-single-project.py ~/.claude/projects/my-project")
267
+ sys.exit(1)
268
+
269
+ project_path = sys.argv[1]
270
+ if not os.path.exists(project_path):
271
+ logger.error(f"Project path does not exist: {project_path}")
272
+ sys.exit(1)
273
+
274
+ importer = SingleProjectImporter(project_path)
275
+ importer.import_project()
276
+
277
+ if __name__ == "__main__":
278
+ main()
@@ -0,0 +1,169 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ File system watcher for automatic conversation imports.
4
+ Monitors Claude projects directory for new/modified JSONL files.
5
+ """
6
+
7
+ import os
8
+ import sys
9
+ import time
10
+ import json
11
+ import logging
12
+ from datetime import datetime, timedelta
13
+ from pathlib import Path
14
+ from typing import Dict, Set
15
+ import subprocess
16
+
17
+ # Configuration
18
+ WATCH_DIR = os.getenv("WATCH_DIR", "/logs")
19
+ STATE_FILE = os.getenv("STATE_FILE", "/config/imported-files.json")
20
+ WATCH_INTERVAL = int(os.getenv("WATCH_INTERVAL", "60")) # seconds
21
+ IMPORT_DELAY = int(os.getenv("IMPORT_DELAY", "30")) # Wait before importing new files
22
+ IMPORTER_SCRIPT = "/scripts/import-conversations-voyage-streaming.py"
23
+
24
+ # Set up logging
25
+ logging.basicConfig(
26
+ level=logging.INFO,
27
+ format='%(asctime)s - %(levelname)s - [Watcher] %(message)s'
28
+ )
29
+ logger = logging.getLogger(__name__)
30
+
31
+ class ImportWatcher:
32
+ def __init__(self):
33
+ """Initialize the import watcher."""
34
+ self.watch_dir = Path(WATCH_DIR)
35
+ self.state_file = Path(STATE_FILE)
36
+ self.pending_imports: Dict[str, datetime] = {}
37
+ self.last_scan = datetime.now()
38
+
39
+ def load_imported_files(self) -> Set[str]:
40
+ """Load set of already imported files."""
41
+ imported = set()
42
+
43
+ if self.state_file.exists():
44
+ try:
45
+ with open(self.state_file, 'r') as f:
46
+ state = json.load(f)
47
+ for project_files in state.get("projects", {}).values():
48
+ imported.update(project_files)
49
+ except Exception as e:
50
+ logger.error(f"Failed to load state: {e}")
51
+
52
+ return imported
53
+
54
+ def find_new_files(self, imported_files: Set[str]) -> Dict[str, Path]:
55
+ """Find new or modified JSONL files."""
56
+ new_files = {}
57
+
58
+ for project_dir in self.watch_dir.iterdir():
59
+ if not project_dir.is_dir():
60
+ continue
61
+
62
+ for jsonl_file in project_dir.glob("*.jsonl"):
63
+ # Convert to relative path for comparison
64
+ rel_path = str(jsonl_file).replace(str(self.watch_dir), "/logs")
65
+
66
+ # Check if file is new or modified
67
+ if rel_path not in imported_files:
68
+ mtime = datetime.fromtimestamp(jsonl_file.stat().st_mtime)
69
+
70
+ # Only consider files modified after last scan
71
+ if mtime > self.last_scan - timedelta(seconds=WATCH_INTERVAL):
72
+ new_files[rel_path] = jsonl_file
73
+ logger.info(f"Found new file: {jsonl_file.name} in {project_dir.name}")
74
+
75
+ return new_files
76
+
77
+ def import_project(self, project_path: Path) -> bool:
78
+ """Trigger import for a specific project."""
79
+ try:
80
+ logger.info(f"Starting import for project: {project_path.name}")
81
+
82
+ # Run the streaming importer
83
+ result = subprocess.run(
84
+ ["python", IMPORTER_SCRIPT, str(project_path)],
85
+ capture_output=True,
86
+ text=True,
87
+ timeout=300 # 5 minute timeout
88
+ )
89
+
90
+ if result.returncode == 0:
91
+ logger.info(f"Successfully imported project: {project_path.name}")
92
+ return True
93
+ else:
94
+ logger.error(f"Import failed for {project_path.name}: {result.stderr}")
95
+ return False
96
+
97
+ except subprocess.TimeoutExpired:
98
+ logger.error(f"Import timeout for project: {project_path.name}")
99
+ return False
100
+ except Exception as e:
101
+ logger.error(f"Import error for {project_path.name}: {e}")
102
+ return False
103
+
104
+ def process_pending_imports(self):
105
+ """Process files that are ready for import."""
106
+ current_time = datetime.now()
107
+ projects_to_import = set()
108
+
109
+ # Check which files are ready for import
110
+ for file_path, added_time in list(self.pending_imports.items()):
111
+ if current_time - added_time >= timedelta(seconds=IMPORT_DELAY):
112
+ project_path = Path(file_path).parent
113
+ projects_to_import.add(project_path)
114
+ del self.pending_imports[file_path]
115
+
116
+ # Import each project
117
+ for project_path in projects_to_import:
118
+ self.import_project(project_path)
119
+
120
+ def run(self):
121
+ """Main watch loop."""
122
+ logger.info(f"Starting import watcher on {self.watch_dir}")
123
+ logger.info(f"Scan interval: {WATCH_INTERVAL}s, Import delay: {IMPORT_DELAY}s")
124
+
125
+ # Initial full import
126
+ logger.info("Running initial full import...")
127
+ subprocess.run(["python", IMPORTER_SCRIPT], timeout=3600)
128
+
129
+ while True:
130
+ try:
131
+ # Load current import state
132
+ imported_files = self.load_imported_files()
133
+
134
+ # Find new files
135
+ new_files = self.find_new_files(imported_files)
136
+
137
+ # Add new files to pending
138
+ for file_path, full_path in new_files.items():
139
+ if file_path not in self.pending_imports:
140
+ self.pending_imports[file_path] = datetime.now()
141
+ logger.info(f"Queued for import: {full_path.name}")
142
+
143
+ # Process pending imports
144
+ self.process_pending_imports()
145
+
146
+ # Update last scan time
147
+ self.last_scan = datetime.now()
148
+
149
+ # Log status
150
+ if self.pending_imports:
151
+ logger.info(f"Files pending import: {len(self.pending_imports)}")
152
+
153
+ # Wait for next scan
154
+ time.sleep(WATCH_INTERVAL)
155
+
156
+ except KeyboardInterrupt:
157
+ logger.info("Watcher stopped by user")
158
+ break
159
+ except Exception as e:
160
+ logger.error(f"Watcher error: {e}")
161
+ time.sleep(WATCH_INTERVAL)
162
+
163
+ def main():
164
+ """Main entry point."""
165
+ watcher = ImportWatcher()
166
+ watcher.run()
167
+
168
+ if __name__ == "__main__":
169
+ main()
@@ -1,12 +0,0 @@
1
- {
2
- "mcpServers": {
3
- "self-reflection": {
4
- "command": "node",
5
- "args": ["/Users/ramakrishnanannaswamy/claude-self-reflect/qdrant-mcp-stack/claude-self-reflection/dist/index.js"],
6
- "env": {
7
- "QDRANT_URL": "http://localhost:6333",
8
- "VOYAGE_KEY": "pa-wdTYGObaxhs-XFKX2r7WCczRwEVNb9eYMTSO3yrQhZI"
9
- }
10
- }
11
- }
12
- }
package/dist/cli.d.ts DELETED
@@ -1,3 +0,0 @@
1
- #!/usr/bin/env node
2
- export {};
3
- //# sourceMappingURL=cli.d.ts.map
package/dist/cli.d.ts.map DELETED
@@ -1 +0,0 @@
1
- {"version":3,"file":"cli.d.ts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":""}
package/dist/cli.js DELETED
@@ -1,55 +0,0 @@
1
- #!/usr/bin/env node
2
- import { spawn } from 'child_process';
3
- import { fileURLToPath } from 'url';
4
- import { dirname, join } from 'path';
5
- import { readFileSync } from 'fs';
6
- const __filename = fileURLToPath(import.meta.url);
7
- const __dirname = dirname(__filename);
8
- // Handle command line arguments
9
- const args = process.argv.slice(2);
10
- const command = args[0];
11
- if (command === 'setup') {
12
- // Run the setup wizard
13
- const setupPath = join(__dirname, '..', 'scripts', 'setup-wizard.js');
14
- const child = spawn('node', [setupPath], {
15
- stdio: 'inherit'
16
- });
17
- child.on('error', (error) => {
18
- console.error('Failed to start setup wizard:', error);
19
- process.exit(1);
20
- });
21
- child.on('exit', (code) => {
22
- process.exit(code || 0);
23
- });
24
- }
25
- else if (command === '--version' || command === '-v') {
26
- // Read package.json to get version
27
- const packagePath = join(__dirname, '..', 'package.json');
28
- const pkg = JSON.parse(readFileSync(packagePath, 'utf8'));
29
- console.log(pkg.version);
30
- }
31
- else if (command === '--help' || command === '-h' || !command) {
32
- console.log(`
33
- Claude Self-Reflect - Give Claude perfect memory of all your conversations
34
-
35
- Usage:
36
- claude-self-reflect <command>
37
-
38
- Commands:
39
- setup Run the interactive setup wizard
40
-
41
- Options:
42
- --version Show version number
43
- --help Show this help message
44
-
45
- Examples:
46
- claude-self-reflect setup # Run interactive setup
47
- npx claude-self-reflect setup # Run without installing globally
48
- `);
49
- }
50
- else {
51
- console.error(`Unknown command: ${command}`);
52
- console.error('Run "claude-self-reflect --help" for usage information');
53
- process.exit(1);
54
- }
55
- //# sourceMappingURL=cli.js.map
package/dist/cli.js.map DELETED
@@ -1 +0,0 @@
1
- {"version":3,"file":"cli.js","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";AACA,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AACpC,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,IAAI,CAAC;AAElC,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;AAEtC,gCAAgC;AAChC,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACnC,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;AAExB,IAAI,OAAO,KAAK,OAAO,EAAE,CAAC;IACxB,uBAAuB;IACvB,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,SAAS,EAAE,iBAAiB,CAAC,CAAC;IACtE,MAAM,KAAK,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,EAAE;QACvC,KAAK,EAAE,SAAS;KACjB,CAAC,CAAC;IAEH,KAAK,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;QAC1B,OAAO,CAAC,KAAK,CAAC,+BAA+B,EAAE,KAAK,CAAC,CAAC;QACtD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC,CAAC,CAAC;IAEH,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;QACxB,OAAO,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC;IAC1B,CAAC,CAAC,CAAC;AACL,CAAC;KAAM,IAAI,OAAO,KAAK,WAAW,IAAI,OAAO,KAAK,IAAI,EAAE,CAAC;IACvD,mCAAmC;IACnC,MAAM,WAAW,GAAG,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,cAAc,CAAC,CAAC;IAC1D,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;AAC3B,CAAC;KAAM,IAAI,OAAO,KAAK,QAAQ,IAAI,OAAO,KAAK,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;IAChE,OAAO,CAAC,GAAG,CAAC;;;;;;;;;;;;;;;;CAgBb,CAAC,CAAC;AACH,CAAC;KAAM,CAAC;IACN,OAAO,CAAC,KAAK,CAAC,oBAAoB,OAAO,EAAE,CAAC,CAAC;IAC7C,OAAO,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;IACxE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC"}
@@ -1,76 +0,0 @@
1
- import { EmbeddingService } from './embeddings';
2
- /**
3
- * Google Gemini embedding service implementation
4
- * Supports task-specific optimization and variable dimensions
5
- */
6
- export declare class GeminiEmbeddingService implements EmbeddingService {
7
- private apiKey;
8
- private model;
9
- private dimensions;
10
- private taskType;
11
- constructor(apiKey: string, model?: string, dimensions?: number, // Can be 768, 1536, or 3072
12
- taskType?: GeminiTaskType);
13
- generateEmbedding(text: string): Promise<number[]>;
14
- /**
15
- * Normalize embedding vector
16
- */
17
- private normalizeEmbedding;
18
- getDimensions(): number;
19
- getModelName(): string;
20
- }
21
- /**
22
- * Gemini task types for optimized embeddings
23
- */
24
- export declare enum GeminiTaskType {
25
- SEMANTIC_SIMILARITY = "SEMANTIC_SIMILARITY",
26
- CLASSIFICATION = "CLASSIFICATION",
27
- CLUSTERING = "CLUSTERING",
28
- RETRIEVAL_DOCUMENT = "RETRIEVAL_DOCUMENT",
29
- RETRIEVAL_QUERY = "RETRIEVAL_QUERY",
30
- CODE_RETRIEVAL_QUERY = "CODE_RETRIEVAL_QUERY",
31
- QUESTION_ANSWERING = "QUESTION_ANSWERING",
32
- FACT_VERIFICATION = "FACT_VERIFICATION"
33
- }
34
- /**
35
- * Gemini vs Voyage comparison utility
36
- */
37
- export declare class EmbeddingComparison {
38
- /**
39
- * Compare Gemini and Voyage for different use cases
40
- */
41
- static getComparison(): {
42
- gemini: {
43
- model: string;
44
- dimensions: number[];
45
- taskTypes: GeminiTaskType[];
46
- advantages: string[];
47
- limitations: string[];
48
- bestFor: string[];
49
- };
50
- voyage: {
51
- model: string;
52
- dimensions: number;
53
- accuracy: string;
54
- tokenLimit: number;
55
- advantages: string[];
56
- limitations: string[];
57
- bestFor: string[];
58
- };
59
- };
60
- /**
61
- * Estimate costs for project
62
- */
63
- static estimateCosts(totalTokens: number): {
64
- voyage: {
65
- freeTokens: number;
66
- costPerMillion: number;
67
- estimatedCost: number;
68
- };
69
- gemini: {
70
- note: string;
71
- approximateCostPerMillion: number;
72
- estimatedCost: number;
73
- };
74
- };
75
- }
76
- //# sourceMappingURL=embeddings-gemini.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"embeddings-gemini.d.ts","sourceRoot":"","sources":["../src/embeddings-gemini.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,gBAAgB,EAAE,MAAM,cAAc,CAAC;AAEhD;;;GAGG;AACH,qBAAa,sBAAuB,YAAW,gBAAgB;IAC7D,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,QAAQ,CAAiB;gBAG/B,MAAM,EAAE,MAAM,EACd,KAAK,GAAE,MAA+B,EACtC,UAAU,GAAE,MAAY,EAAE,4BAA4B;IACtD,QAAQ,GAAE,cAA+C;IAQrD,iBAAiB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAwCxD;;OAEG;IACH,OAAO,CAAC,kBAAkB;IAK1B,aAAa,IAAI,MAAM;IAIvB,YAAY,IAAI,MAAM;CAGvB;AAED;;GAEG;AACH,oBAAY,cAAc;IACxB,mBAAmB,wBAAwB;IAC3C,cAAc,mBAAmB;IACjC,UAAU,eAAe;IACzB,kBAAkB,uBAAuB;IACzC,eAAe,oBAAoB;IACnC,oBAAoB,yBAAyB;IAC7C,kBAAkB,uBAAuB;IACzC,iBAAiB,sBAAsB;CACxC;AAED;;GAEG;AACH,qBAAa,mBAAmB;IAC9B;;OAEG;IACH,MAAM,CAAC,aAAa;;;;;;;;;;;;;;;;;;;IAsDpB;;OAEG;IACH,MAAM,CAAC,aAAa,CAAC,WAAW,EAAE,MAAM;;;;;;;;;;;;CAezC"}