ds-agent-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/ds-agent.js +451 -0
- package/ds_agent/__init__.py +8 -0
- package/package.json +28 -0
- package/requirements.txt +126 -0
- package/setup.py +35 -0
- package/src/__init__.py +7 -0
- package/src/_compress_tool_result.py +118 -0
- package/src/api/__init__.py +4 -0
- package/src/api/app.py +1626 -0
- package/src/cache/__init__.py +5 -0
- package/src/cache/cache_manager.py +561 -0
- package/src/cli.py +2886 -0
- package/src/dynamic_prompts.py +281 -0
- package/src/orchestrator.py +4799 -0
- package/src/progress_manager.py +139 -0
- package/src/reasoning/__init__.py +332 -0
- package/src/reasoning/business_summary.py +431 -0
- package/src/reasoning/data_understanding.py +356 -0
- package/src/reasoning/model_explanation.py +383 -0
- package/src/reasoning/reasoning_trace.py +239 -0
- package/src/registry/__init__.py +3 -0
- package/src/registry/tools_registry.py +3 -0
- package/src/session_memory.py +448 -0
- package/src/session_store.py +370 -0
- package/src/storage/__init__.py +19 -0
- package/src/storage/artifact_store.py +620 -0
- package/src/storage/helpers.py +116 -0
- package/src/storage/huggingface_storage.py +694 -0
- package/src/storage/r2_storage.py +0 -0
- package/src/storage/user_files_service.py +288 -0
- package/src/tools/__init__.py +335 -0
- package/src/tools/advanced_analysis.py +823 -0
- package/src/tools/advanced_feature_engineering.py +708 -0
- package/src/tools/advanced_insights.py +578 -0
- package/src/tools/advanced_preprocessing.py +549 -0
- package/src/tools/advanced_training.py +906 -0
- package/src/tools/agent_tool_mapping.py +326 -0
- package/src/tools/auto_pipeline.py +420 -0
- package/src/tools/autogluon_training.py +1480 -0
- package/src/tools/business_intelligence.py +860 -0
- package/src/tools/cloud_data_sources.py +581 -0
- package/src/tools/code_interpreter.py +390 -0
- package/src/tools/computer_vision.py +614 -0
- package/src/tools/data_cleaning.py +614 -0
- package/src/tools/data_profiling.py +593 -0
- package/src/tools/data_type_conversion.py +268 -0
- package/src/tools/data_wrangling.py +433 -0
- package/src/tools/eda_reports.py +284 -0
- package/src/tools/enhanced_feature_engineering.py +241 -0
- package/src/tools/feature_engineering.py +302 -0
- package/src/tools/matplotlib_visualizations.py +1327 -0
- package/src/tools/model_training.py +520 -0
- package/src/tools/nlp_text_analytics.py +761 -0
- package/src/tools/plotly_visualizations.py +497 -0
- package/src/tools/production_mlops.py +852 -0
- package/src/tools/time_series.py +507 -0
- package/src/tools/tools_registry.py +2133 -0
- package/src/tools/visualization_engine.py +559 -0
- package/src/utils/__init__.py +42 -0
- package/src/utils/error_recovery.py +313 -0
- package/src/utils/parallel_executor.py +402 -0
- package/src/utils/polars_helpers.py +248 -0
- package/src/utils/schema_extraction.py +132 -0
- package/src/utils/semantic_layer.py +392 -0
- package/src/utils/token_budget.py +411 -0
- package/src/utils/validation.py +377 -0
- package/src/workflow_state.py +154 -0
|
@@ -0,0 +1,561 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Cache Manager for Data Science Copilot
|
|
3
|
+
Uses SQLite for persistent caching with hierarchical support.
|
|
4
|
+
Supports individual tool result caching and cache warming.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import hashlib
|
|
8
|
+
import json
|
|
9
|
+
import sqlite3
|
|
10
|
+
import time
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Any, Optional, Dict, List
|
|
13
|
+
import pickle
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class CacheManager:
|
|
17
|
+
"""
|
|
18
|
+
Manages caching of LLM responses and expensive computations.
|
|
19
|
+
|
|
20
|
+
Features:
|
|
21
|
+
- Hierarchical caching: file_hash → [profile, quality, features, etc.]
|
|
22
|
+
- Individual tool result caching (not full workflows)
|
|
23
|
+
- Cache warming on file upload
|
|
24
|
+
- TTL-based invalidation
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self, db_path: str = "./cache_db/cache.db", ttl_seconds: int = 86400):
|
|
28
|
+
"""
|
|
29
|
+
Initialize cache manager.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
db_path: Path to SQLite database file
|
|
33
|
+
ttl_seconds: Time-to-live for cache entries (default 24 hours)
|
|
34
|
+
"""
|
|
35
|
+
self.db_path = Path(db_path)
|
|
36
|
+
self.ttl_seconds = ttl_seconds
|
|
37
|
+
|
|
38
|
+
# Ensure cache directory exists
|
|
39
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
40
|
+
|
|
41
|
+
# Initialize database
|
|
42
|
+
self._init_db()
|
|
43
|
+
|
|
44
|
+
def _init_db(self) -> None:
|
|
45
|
+
"""Create cache tables if they don't exist."""
|
|
46
|
+
try:
|
|
47
|
+
conn = sqlite3.connect(self.db_path)
|
|
48
|
+
cursor = conn.cursor()
|
|
49
|
+
|
|
50
|
+
# Main cache table for individual tool results
|
|
51
|
+
cursor.execute("""
|
|
52
|
+
CREATE TABLE IF NOT EXISTS cache (
|
|
53
|
+
key TEXT PRIMARY KEY,
|
|
54
|
+
value BLOB NOT NULL,
|
|
55
|
+
created_at INTEGER NOT NULL,
|
|
56
|
+
expires_at INTEGER NOT NULL,
|
|
57
|
+
metadata TEXT
|
|
58
|
+
)
|
|
59
|
+
""")
|
|
60
|
+
|
|
61
|
+
# Hierarchical cache table for file-based operations
|
|
62
|
+
cursor.execute("""
|
|
63
|
+
CREATE TABLE IF NOT EXISTS hierarchical_cache (
|
|
64
|
+
file_hash TEXT NOT NULL,
|
|
65
|
+
tool_name TEXT NOT NULL,
|
|
66
|
+
tool_args TEXT,
|
|
67
|
+
result BLOB NOT NULL,
|
|
68
|
+
created_at INTEGER NOT NULL,
|
|
69
|
+
expires_at INTEGER NOT NULL,
|
|
70
|
+
PRIMARY KEY (file_hash, tool_name, tool_args)
|
|
71
|
+
)
|
|
72
|
+
""")
|
|
73
|
+
|
|
74
|
+
# Create indices for efficient lookup
|
|
75
|
+
cursor.execute("""
|
|
76
|
+
CREATE INDEX IF NOT EXISTS idx_expires_at
|
|
77
|
+
ON cache(expires_at)
|
|
78
|
+
""")
|
|
79
|
+
|
|
80
|
+
cursor.execute("""
|
|
81
|
+
CREATE INDEX IF NOT EXISTS idx_file_hash
|
|
82
|
+
ON hierarchical_cache(file_hash)
|
|
83
|
+
""")
|
|
84
|
+
|
|
85
|
+
cursor.execute("""
|
|
86
|
+
CREATE INDEX IF NOT EXISTS idx_hierarchical_expires
|
|
87
|
+
ON hierarchical_cache(expires_at)
|
|
88
|
+
""")
|
|
89
|
+
|
|
90
|
+
conn.commit()
|
|
91
|
+
conn.close()
|
|
92
|
+
print(f"✅ Cache database initialized at {self.db_path}")
|
|
93
|
+
except Exception as e:
|
|
94
|
+
print(f"⚠️ Error initializing cache database: {e}")
|
|
95
|
+
print(f" Attempting to recreate database...")
|
|
96
|
+
try:
|
|
97
|
+
# Remove corrupted database and recreate
|
|
98
|
+
if self.db_path.exists():
|
|
99
|
+
self.db_path.unlink()
|
|
100
|
+
|
|
101
|
+
conn = sqlite3.connect(self.db_path)
|
|
102
|
+
cursor = conn.cursor()
|
|
103
|
+
|
|
104
|
+
cursor.execute("""
|
|
105
|
+
CREATE TABLE cache (
|
|
106
|
+
key TEXT PRIMARY KEY,
|
|
107
|
+
value BLOB NOT NULL,
|
|
108
|
+
created_at INTEGER NOT NULL,
|
|
109
|
+
expires_at INTEGER NOT NULL,
|
|
110
|
+
metadata TEXT
|
|
111
|
+
)
|
|
112
|
+
""")
|
|
113
|
+
|
|
114
|
+
cursor.execute("""
|
|
115
|
+
CREATE TABLE hierarchical_cache (
|
|
116
|
+
file_hash TEXT NOT NULL,
|
|
117
|
+
tool_name TEXT NOT NULL,
|
|
118
|
+
tool_args TEXT,
|
|
119
|
+
result BLOB NOT NULL,
|
|
120
|
+
created_at INTEGER NOT NULL,
|
|
121
|
+
expires_at INTEGER NOT NULL,
|
|
122
|
+
PRIMARY KEY (file_hash, tool_name, tool_args)
|
|
123
|
+
)
|
|
124
|
+
""")
|
|
125
|
+
|
|
126
|
+
cursor.execute("""
|
|
127
|
+
CREATE INDEX idx_expires_at
|
|
128
|
+
ON cache(expires_at)
|
|
129
|
+
""")
|
|
130
|
+
|
|
131
|
+
cursor.execute("""
|
|
132
|
+
CREATE INDEX idx_file_hash
|
|
133
|
+
ON hierarchical_cache(file_hash)
|
|
134
|
+
""")
|
|
135
|
+
|
|
136
|
+
cursor.execute("""
|
|
137
|
+
CREATE INDEX idx_hierarchical_expires
|
|
138
|
+
ON hierarchical_cache(expires_at)
|
|
139
|
+
""")
|
|
140
|
+
|
|
141
|
+
conn.commit()
|
|
142
|
+
conn.close()
|
|
143
|
+
print(f"✅ Cache database recreated successfully")
|
|
144
|
+
except Exception as e2:
|
|
145
|
+
print(f"❌ Failed to recreate cache database: {e2}")
|
|
146
|
+
print(f" Cache functionality will be disabled")
|
|
147
|
+
|
|
148
|
+
def _generate_key(self, *args, **kwargs) -> str:
|
|
149
|
+
"""
|
|
150
|
+
Generate a unique cache key from arguments.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
*args: Positional arguments to hash
|
|
154
|
+
**kwargs: Keyword arguments to hash
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
MD5 hash of the arguments
|
|
158
|
+
"""
|
|
159
|
+
# Combine args and kwargs into a single string
|
|
160
|
+
key_data = json.dumps({"args": args, "kwargs": kwargs}, sort_keys=True)
|
|
161
|
+
return hashlib.md5(key_data.encode()).hexdigest()
|
|
162
|
+
|
|
163
|
+
def get(self, key: str) -> Optional[Any]:
|
|
164
|
+
"""
|
|
165
|
+
Retrieve value from cache.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
key: Cache key
|
|
169
|
+
|
|
170
|
+
Returns:
|
|
171
|
+
Cached value if exists and not expired, None otherwise
|
|
172
|
+
"""
|
|
173
|
+
try:
|
|
174
|
+
conn = sqlite3.connect(self.db_path)
|
|
175
|
+
cursor = conn.cursor()
|
|
176
|
+
|
|
177
|
+
current_time = int(time.time())
|
|
178
|
+
|
|
179
|
+
cursor.execute("""
|
|
180
|
+
SELECT value, expires_at
|
|
181
|
+
FROM cache
|
|
182
|
+
WHERE key = ? AND expires_at > ?
|
|
183
|
+
""", (key, current_time))
|
|
184
|
+
|
|
185
|
+
result = cursor.fetchone()
|
|
186
|
+
conn.close()
|
|
187
|
+
except sqlite3.OperationalError as e:
|
|
188
|
+
print(f"⚠️ Cache read error: {e}")
|
|
189
|
+
print(f" Reinitializing cache database...")
|
|
190
|
+
self._init_db()
|
|
191
|
+
return None
|
|
192
|
+
except Exception as e:
|
|
193
|
+
print(f"⚠️ Unexpected cache error: {e}")
|
|
194
|
+
return None
|
|
195
|
+
|
|
196
|
+
if result:
|
|
197
|
+
value_blob, expires_at = result
|
|
198
|
+
# Deserialize using pickle for complex Python objects
|
|
199
|
+
return pickle.loads(value_blob)
|
|
200
|
+
|
|
201
|
+
return None
|
|
202
|
+
|
|
203
|
+
def set(self, key: str, value: Any, ttl_override: Optional[int] = None,
|
|
204
|
+
metadata: Optional[dict] = None) -> None:
|
|
205
|
+
"""
|
|
206
|
+
Store value in cache.
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
key: Cache key
|
|
210
|
+
value: Value to cache (must be pickleable)
|
|
211
|
+
ttl_override: Optional override for TTL (seconds)
|
|
212
|
+
metadata: Optional metadata to store with cache entry
|
|
213
|
+
"""
|
|
214
|
+
try:
|
|
215
|
+
conn = sqlite3.connect(self.db_path)
|
|
216
|
+
cursor = conn.cursor()
|
|
217
|
+
|
|
218
|
+
current_time = int(time.time())
|
|
219
|
+
ttl = ttl_override if ttl_override is not None else self.ttl_seconds
|
|
220
|
+
expires_at = current_time + ttl
|
|
221
|
+
|
|
222
|
+
# Serialize value using pickle
|
|
223
|
+
value_blob = pickle.dumps(value)
|
|
224
|
+
|
|
225
|
+
# Serialize metadata as JSON
|
|
226
|
+
metadata_json = json.dumps(metadata) if metadata else None
|
|
227
|
+
|
|
228
|
+
cursor.execute("""
|
|
229
|
+
INSERT OR REPLACE INTO cache (key, value, created_at, expires_at, metadata)
|
|
230
|
+
VALUES (?, ?, ?, ?, ?)
|
|
231
|
+
""", (key, value_blob, current_time, expires_at, metadata_json))
|
|
232
|
+
|
|
233
|
+
conn.commit()
|
|
234
|
+
conn.close()
|
|
235
|
+
except sqlite3.OperationalError as e:
|
|
236
|
+
print(f"⚠️ Cache write error: {e}")
|
|
237
|
+
print(f" Reinitializing cache database...")
|
|
238
|
+
self._init_db()
|
|
239
|
+
except Exception as e:
|
|
240
|
+
print(f"⚠️ Unexpected cache error during write: {e}")
|
|
241
|
+
|
|
242
|
+
def invalidate(self, key: str) -> bool:
|
|
243
|
+
"""
|
|
244
|
+
Remove specific entry from cache.
|
|
245
|
+
|
|
246
|
+
Args:
|
|
247
|
+
key: Cache key to invalidate
|
|
248
|
+
|
|
249
|
+
Returns:
|
|
250
|
+
True if entry was removed, False if not found
|
|
251
|
+
"""
|
|
252
|
+
conn = sqlite3.connect(self.db_path)
|
|
253
|
+
cursor = conn.cursor()
|
|
254
|
+
|
|
255
|
+
cursor.execute("DELETE FROM cache WHERE key = ?", (key,))
|
|
256
|
+
deleted = cursor.rowcount > 0
|
|
257
|
+
|
|
258
|
+
conn.commit()
|
|
259
|
+
conn.close()
|
|
260
|
+
|
|
261
|
+
return deleted
|
|
262
|
+
|
|
263
|
+
def clear_expired(self) -> int:
|
|
264
|
+
"""
|
|
265
|
+
Remove all expired entries from cache.
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
Number of entries removed
|
|
269
|
+
"""
|
|
270
|
+
conn = sqlite3.connect(self.db_path)
|
|
271
|
+
cursor = conn.cursor()
|
|
272
|
+
|
|
273
|
+
current_time = int(time.time())
|
|
274
|
+
cursor.execute("DELETE FROM cache WHERE expires_at <= ?", (current_time,))
|
|
275
|
+
deleted = cursor.rowcount
|
|
276
|
+
|
|
277
|
+
conn.commit()
|
|
278
|
+
conn.close()
|
|
279
|
+
|
|
280
|
+
return deleted
|
|
281
|
+
|
|
282
|
+
def clear_all(self) -> None:
|
|
283
|
+
"""Remove all entries from cache."""
|
|
284
|
+
conn = sqlite3.connect(self.db_path)
|
|
285
|
+
cursor = conn.cursor()
|
|
286
|
+
|
|
287
|
+
cursor.execute("DELETE FROM cache")
|
|
288
|
+
|
|
289
|
+
conn.commit()
|
|
290
|
+
conn.close()
|
|
291
|
+
|
|
292
|
+
def get_stats(self) -> dict:
|
|
293
|
+
"""
|
|
294
|
+
Get cache statistics.
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
Dictionary with cache stats (total entries, expired, size)
|
|
298
|
+
"""
|
|
299
|
+
conn = sqlite3.connect(self.db_path)
|
|
300
|
+
cursor = conn.cursor()
|
|
301
|
+
|
|
302
|
+
current_time = int(time.time())
|
|
303
|
+
|
|
304
|
+
# Total entries
|
|
305
|
+
cursor.execute("SELECT COUNT(*) FROM cache")
|
|
306
|
+
total = cursor.fetchone()[0]
|
|
307
|
+
|
|
308
|
+
# Valid entries
|
|
309
|
+
cursor.execute("SELECT COUNT(*) FROM cache WHERE expires_at > ?", (current_time,))
|
|
310
|
+
valid = cursor.fetchone()[0]
|
|
311
|
+
|
|
312
|
+
# Database size
|
|
313
|
+
cursor.execute("SELECT page_count * page_size FROM pragma_page_count(), pragma_page_size()")
|
|
314
|
+
size_bytes = cursor.fetchone()[0]
|
|
315
|
+
|
|
316
|
+
conn.close()
|
|
317
|
+
|
|
318
|
+
return {
|
|
319
|
+
"total_entries": total,
|
|
320
|
+
"valid_entries": valid,
|
|
321
|
+
"expired_entries": total - valid,
|
|
322
|
+
"size_mb": round(size_bytes / (1024 * 1024), 2)
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
def generate_file_hash(self, file_path: str) -> str:
|
|
326
|
+
"""
|
|
327
|
+
Generate hash of file contents for cache key.
|
|
328
|
+
|
|
329
|
+
Args:
|
|
330
|
+
file_path: Path to file
|
|
331
|
+
|
|
332
|
+
Returns:
|
|
333
|
+
MD5 hash of file contents
|
|
334
|
+
"""
|
|
335
|
+
hasher = hashlib.md5()
|
|
336
|
+
|
|
337
|
+
with open(file_path, 'rb') as f:
|
|
338
|
+
# Read file in chunks to handle large files
|
|
339
|
+
for chunk in iter(lambda: f.read(4096), b""):
|
|
340
|
+
hasher.update(chunk)
|
|
341
|
+
|
|
342
|
+
return hasher.hexdigest()
|
|
343
|
+
|
|
344
|
+
# ========================================
|
|
345
|
+
# HIERARCHICAL CACHING (NEW)
|
|
346
|
+
# ========================================
|
|
347
|
+
|
|
348
|
+
def get_tool_result(self, file_hash: str, tool_name: str, tool_args: Dict[str, Any] = None) -> Optional[Any]:
|
|
349
|
+
"""
|
|
350
|
+
Get cached result for a specific tool applied to a file.
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
file_hash: MD5 hash of the file
|
|
354
|
+
tool_name: Name of the tool
|
|
355
|
+
tool_args: Arguments passed to the tool (excluding file_path)
|
|
356
|
+
|
|
357
|
+
Returns:
|
|
358
|
+
Cached tool result if exists and not expired, None otherwise
|
|
359
|
+
"""
|
|
360
|
+
try:
|
|
361
|
+
conn = sqlite3.connect(self.db_path)
|
|
362
|
+
cursor = conn.cursor()
|
|
363
|
+
|
|
364
|
+
current_time = int(time.time())
|
|
365
|
+
tool_args_str = json.dumps(tool_args or {}, sort_keys=True)
|
|
366
|
+
|
|
367
|
+
cursor.execute("""
|
|
368
|
+
SELECT result, expires_at
|
|
369
|
+
FROM hierarchical_cache
|
|
370
|
+
WHERE file_hash = ? AND tool_name = ? AND tool_args = ? AND expires_at > ?
|
|
371
|
+
""", (file_hash, tool_name, tool_args_str, current_time))
|
|
372
|
+
|
|
373
|
+
result = cursor.fetchone()
|
|
374
|
+
conn.close()
|
|
375
|
+
|
|
376
|
+
if result:
|
|
377
|
+
result_blob, expires_at = result
|
|
378
|
+
cached_result = pickle.loads(result_blob)
|
|
379
|
+
print(f"📦 Cache HIT: {tool_name} for file {file_hash[:8]}...")
|
|
380
|
+
return cached_result
|
|
381
|
+
else:
|
|
382
|
+
print(f"📭 Cache MISS: {tool_name} for file {file_hash[:8]}...")
|
|
383
|
+
return None
|
|
384
|
+
|
|
385
|
+
except Exception as e:
|
|
386
|
+
print(f"⚠️ Hierarchical cache read error: {e}")
|
|
387
|
+
return None
|
|
388
|
+
|
|
389
|
+
def set_tool_result(self, file_hash: str, tool_name: str, result: Any,
|
|
390
|
+
tool_args: Dict[str, Any] = None, ttl_override: Optional[int] = None) -> None:
|
|
391
|
+
"""
|
|
392
|
+
Cache result for a specific tool applied to a file.
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
file_hash: MD5 hash of the file
|
|
396
|
+
tool_name: Name of the tool
|
|
397
|
+
result: Tool result to cache
|
|
398
|
+
tool_args: Arguments passed to the tool (excluding file_path)
|
|
399
|
+
ttl_override: Optional override for TTL (seconds)
|
|
400
|
+
"""
|
|
401
|
+
try:
|
|
402
|
+
conn = sqlite3.connect(self.db_path)
|
|
403
|
+
cursor = conn.cursor()
|
|
404
|
+
|
|
405
|
+
current_time = int(time.time())
|
|
406
|
+
ttl = ttl_override if ttl_override is not None else self.ttl_seconds
|
|
407
|
+
expires_at = current_time + ttl
|
|
408
|
+
|
|
409
|
+
tool_args_str = json.dumps(tool_args or {}, sort_keys=True)
|
|
410
|
+
result_blob = pickle.dumps(result)
|
|
411
|
+
|
|
412
|
+
cursor.execute("""
|
|
413
|
+
INSERT OR REPLACE INTO hierarchical_cache
|
|
414
|
+
(file_hash, tool_name, tool_args, result, created_at, expires_at)
|
|
415
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
416
|
+
""", (file_hash, tool_name, tool_args_str, result_blob, current_time, expires_at))
|
|
417
|
+
|
|
418
|
+
conn.commit()
|
|
419
|
+
conn.close()
|
|
420
|
+
print(f"💾 Cached: {tool_name} for file {file_hash[:8]}...")
|
|
421
|
+
|
|
422
|
+
except Exception as e:
|
|
423
|
+
print(f"⚠️ Hierarchical cache write error: {e}")
|
|
424
|
+
|
|
425
|
+
def get_all_tool_results_for_file(self, file_hash: str) -> Dict[str, Any]:
|
|
426
|
+
"""
|
|
427
|
+
Get all cached tool results for a specific file.
|
|
428
|
+
|
|
429
|
+
Args:
|
|
430
|
+
file_hash: MD5 hash of the file
|
|
431
|
+
|
|
432
|
+
Returns:
|
|
433
|
+
Dictionary mapping tool_name → result for all cached results
|
|
434
|
+
"""
|
|
435
|
+
try:
|
|
436
|
+
conn = sqlite3.connect(self.db_path)
|
|
437
|
+
cursor = conn.cursor()
|
|
438
|
+
|
|
439
|
+
current_time = int(time.time())
|
|
440
|
+
|
|
441
|
+
cursor.execute("""
|
|
442
|
+
SELECT tool_name, tool_args, result
|
|
443
|
+
FROM hierarchical_cache
|
|
444
|
+
WHERE file_hash = ? AND expires_at > ?
|
|
445
|
+
""", (file_hash, current_time))
|
|
446
|
+
|
|
447
|
+
results = {}
|
|
448
|
+
for row in cursor.fetchall():
|
|
449
|
+
tool_name, tool_args_str, result_blob = row
|
|
450
|
+
tool_args = json.loads(tool_args_str)
|
|
451
|
+
result = pickle.loads(result_blob)
|
|
452
|
+
|
|
453
|
+
# Create unique key for tool + args combination
|
|
454
|
+
if tool_args:
|
|
455
|
+
key = f"{tool_name}_{hashlib.md5(tool_args_str.encode()).hexdigest()[:8]}"
|
|
456
|
+
else:
|
|
457
|
+
key = tool_name
|
|
458
|
+
|
|
459
|
+
results[key] = {
|
|
460
|
+
"tool_name": tool_name,
|
|
461
|
+
"tool_args": tool_args,
|
|
462
|
+
"result": result
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
conn.close()
|
|
466
|
+
|
|
467
|
+
if results:
|
|
468
|
+
print(f"📦 Found {len(results)} cached results for file {file_hash[:8]}...")
|
|
469
|
+
|
|
470
|
+
return results
|
|
471
|
+
|
|
472
|
+
except Exception as e:
|
|
473
|
+
print(f"⚠️ Error retrieving file cache results: {e}")
|
|
474
|
+
return {}
|
|
475
|
+
|
|
476
|
+
def warm_cache_for_file(self, file_path: str, tools_to_warm: List[str] = None) -> Dict[str, bool]:
|
|
477
|
+
"""
|
|
478
|
+
Warm cache by pre-computing common tool results for a file.
|
|
479
|
+
|
|
480
|
+
This is typically called on file upload to speed up first analysis.
|
|
481
|
+
|
|
482
|
+
Args:
|
|
483
|
+
file_path: Path to the file
|
|
484
|
+
tools_to_warm: List of tool names to pre-compute (defaults to basic profiling tools)
|
|
485
|
+
|
|
486
|
+
Returns:
|
|
487
|
+
Dictionary mapping tool_name → success status
|
|
488
|
+
"""
|
|
489
|
+
if tools_to_warm is None:
|
|
490
|
+
# Default tools to warm: basic profiling operations
|
|
491
|
+
tools_to_warm = [
|
|
492
|
+
"profile_dataset",
|
|
493
|
+
"detect_data_quality_issues",
|
|
494
|
+
"analyze_correlations"
|
|
495
|
+
]
|
|
496
|
+
|
|
497
|
+
file_hash = self.generate_file_hash(file_path)
|
|
498
|
+
results = {}
|
|
499
|
+
|
|
500
|
+
print(f"🔥 Warming cache for file {file_hash[:8]}... ({len(tools_to_warm)} tools)")
|
|
501
|
+
|
|
502
|
+
# Import here to avoid circular dependency
|
|
503
|
+
from ..orchestrator import DataScienceOrchestrator
|
|
504
|
+
|
|
505
|
+
try:
|
|
506
|
+
# Create temporary orchestrator for cache warming
|
|
507
|
+
orchestrator = DataScienceOrchestrator(use_cache=False) # Don't use cache during warming
|
|
508
|
+
|
|
509
|
+
for tool_name in tools_to_warm:
|
|
510
|
+
try:
|
|
511
|
+
# Execute tool
|
|
512
|
+
result = orchestrator._execute_tool(tool_name, {"file_path": file_path})
|
|
513
|
+
|
|
514
|
+
# Cache the result
|
|
515
|
+
if result.get("success", True):
|
|
516
|
+
self.set_tool_result(file_hash, tool_name, result)
|
|
517
|
+
results[tool_name] = True
|
|
518
|
+
print(f" ✓ Warmed: {tool_name}")
|
|
519
|
+
else:
|
|
520
|
+
results[tool_name] = False
|
|
521
|
+
print(f" ✗ Failed: {tool_name}")
|
|
522
|
+
|
|
523
|
+
except Exception as e:
|
|
524
|
+
results[tool_name] = False
|
|
525
|
+
print(f" ✗ Error warming {tool_name}: {e}")
|
|
526
|
+
|
|
527
|
+
print(f"✅ Cache warming complete: {sum(results.values())}/{len(tools_to_warm)} successful")
|
|
528
|
+
|
|
529
|
+
except Exception as e:
|
|
530
|
+
print(f"❌ Cache warming failed: {e}")
|
|
531
|
+
|
|
532
|
+
return results
|
|
533
|
+
|
|
534
|
+
def invalidate_file_cache(self, file_hash: str) -> int:
|
|
535
|
+
"""
|
|
536
|
+
Invalidate all cached results for a specific file.
|
|
537
|
+
|
|
538
|
+
Args:
|
|
539
|
+
file_hash: MD5 hash of the file
|
|
540
|
+
|
|
541
|
+
Returns:
|
|
542
|
+
Number of entries invalidated
|
|
543
|
+
"""
|
|
544
|
+
try:
|
|
545
|
+
conn = sqlite3.connect(self.db_path)
|
|
546
|
+
cursor = conn.cursor()
|
|
547
|
+
|
|
548
|
+
cursor.execute("DELETE FROM hierarchical_cache WHERE file_hash = ?", (file_hash,))
|
|
549
|
+
deleted = cursor.rowcount
|
|
550
|
+
|
|
551
|
+
conn.commit()
|
|
552
|
+
conn.close()
|
|
553
|
+
|
|
554
|
+
if deleted > 0:
|
|
555
|
+
print(f"🗑️ Invalidated {deleted} cached results for file {file_hash[:8]}...")
|
|
556
|
+
|
|
557
|
+
return deleted
|
|
558
|
+
|
|
559
|
+
except Exception as e:
|
|
560
|
+
print(f"⚠️ Error invalidating file cache: {e}")
|
|
561
|
+
return 0
|