gitflow-analytics 1.0.1__py3-none-any.whl → 1.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. gitflow_analytics/__init__.py +11 -11
  2. gitflow_analytics/_version.py +2 -2
  3. gitflow_analytics/classification/__init__.py +31 -0
  4. gitflow_analytics/classification/batch_classifier.py +752 -0
  5. gitflow_analytics/classification/classifier.py +464 -0
  6. gitflow_analytics/classification/feature_extractor.py +725 -0
  7. gitflow_analytics/classification/linguist_analyzer.py +574 -0
  8. gitflow_analytics/classification/model.py +455 -0
  9. gitflow_analytics/cli.py +4490 -378
  10. gitflow_analytics/cli_rich.py +503 -0
  11. gitflow_analytics/config/__init__.py +43 -0
  12. gitflow_analytics/config/errors.py +261 -0
  13. gitflow_analytics/config/loader.py +904 -0
  14. gitflow_analytics/config/profiles.py +264 -0
  15. gitflow_analytics/config/repository.py +124 -0
  16. gitflow_analytics/config/schema.py +441 -0
  17. gitflow_analytics/config/validator.py +154 -0
  18. gitflow_analytics/config.py +44 -398
  19. gitflow_analytics/core/analyzer.py +1320 -172
  20. gitflow_analytics/core/branch_mapper.py +132 -132
  21. gitflow_analytics/core/cache.py +1554 -175
  22. gitflow_analytics/core/data_fetcher.py +1193 -0
  23. gitflow_analytics/core/identity.py +571 -185
  24. gitflow_analytics/core/metrics_storage.py +526 -0
  25. gitflow_analytics/core/progress.py +372 -0
  26. gitflow_analytics/core/schema_version.py +269 -0
  27. gitflow_analytics/extractors/base.py +13 -11
  28. gitflow_analytics/extractors/ml_tickets.py +1100 -0
  29. gitflow_analytics/extractors/story_points.py +77 -59
  30. gitflow_analytics/extractors/tickets.py +841 -89
  31. gitflow_analytics/identity_llm/__init__.py +6 -0
  32. gitflow_analytics/identity_llm/analysis_pass.py +231 -0
  33. gitflow_analytics/identity_llm/analyzer.py +464 -0
  34. gitflow_analytics/identity_llm/models.py +76 -0
  35. gitflow_analytics/integrations/github_integration.py +258 -87
  36. gitflow_analytics/integrations/jira_integration.py +572 -123
  37. gitflow_analytics/integrations/orchestrator.py +206 -82
  38. gitflow_analytics/metrics/activity_scoring.py +322 -0
  39. gitflow_analytics/metrics/branch_health.py +470 -0
  40. gitflow_analytics/metrics/dora.py +542 -179
  41. gitflow_analytics/models/database.py +986 -59
  42. gitflow_analytics/pm_framework/__init__.py +115 -0
  43. gitflow_analytics/pm_framework/adapters/__init__.py +50 -0
  44. gitflow_analytics/pm_framework/adapters/jira_adapter.py +1845 -0
  45. gitflow_analytics/pm_framework/base.py +406 -0
  46. gitflow_analytics/pm_framework/models.py +211 -0
  47. gitflow_analytics/pm_framework/orchestrator.py +652 -0
  48. gitflow_analytics/pm_framework/registry.py +333 -0
  49. gitflow_analytics/qualitative/__init__.py +29 -0
  50. gitflow_analytics/qualitative/chatgpt_analyzer.py +259 -0
  51. gitflow_analytics/qualitative/classifiers/__init__.py +13 -0
  52. gitflow_analytics/qualitative/classifiers/change_type.py +742 -0
  53. gitflow_analytics/qualitative/classifiers/domain_classifier.py +506 -0
  54. gitflow_analytics/qualitative/classifiers/intent_analyzer.py +535 -0
  55. gitflow_analytics/qualitative/classifiers/llm/__init__.py +35 -0
  56. gitflow_analytics/qualitative/classifiers/llm/base.py +193 -0
  57. gitflow_analytics/qualitative/classifiers/llm/batch_processor.py +383 -0
  58. gitflow_analytics/qualitative/classifiers/llm/cache.py +479 -0
  59. gitflow_analytics/qualitative/classifiers/llm/cost_tracker.py +435 -0
  60. gitflow_analytics/qualitative/classifiers/llm/openai_client.py +403 -0
  61. gitflow_analytics/qualitative/classifiers/llm/prompts.py +373 -0
  62. gitflow_analytics/qualitative/classifiers/llm/response_parser.py +287 -0
  63. gitflow_analytics/qualitative/classifiers/llm_commit_classifier.py +607 -0
  64. gitflow_analytics/qualitative/classifiers/risk_analyzer.py +438 -0
  65. gitflow_analytics/qualitative/core/__init__.py +13 -0
  66. gitflow_analytics/qualitative/core/llm_fallback.py +657 -0
  67. gitflow_analytics/qualitative/core/nlp_engine.py +382 -0
  68. gitflow_analytics/qualitative/core/pattern_cache.py +479 -0
  69. gitflow_analytics/qualitative/core/processor.py +673 -0
  70. gitflow_analytics/qualitative/enhanced_analyzer.py +2236 -0
  71. gitflow_analytics/qualitative/example_enhanced_usage.py +420 -0
  72. gitflow_analytics/qualitative/models/__init__.py +25 -0
  73. gitflow_analytics/qualitative/models/schemas.py +306 -0
  74. gitflow_analytics/qualitative/utils/__init__.py +13 -0
  75. gitflow_analytics/qualitative/utils/batch_processor.py +339 -0
  76. gitflow_analytics/qualitative/utils/cost_tracker.py +345 -0
  77. gitflow_analytics/qualitative/utils/metrics.py +361 -0
  78. gitflow_analytics/qualitative/utils/text_processing.py +285 -0
  79. gitflow_analytics/reports/__init__.py +100 -0
  80. gitflow_analytics/reports/analytics_writer.py +550 -18
  81. gitflow_analytics/reports/base.py +648 -0
  82. gitflow_analytics/reports/branch_health_writer.py +322 -0
  83. gitflow_analytics/reports/classification_writer.py +924 -0
  84. gitflow_analytics/reports/cli_integration.py +427 -0
  85. gitflow_analytics/reports/csv_writer.py +1700 -216
  86. gitflow_analytics/reports/data_models.py +504 -0
  87. gitflow_analytics/reports/database_report_generator.py +427 -0
  88. gitflow_analytics/reports/example_usage.py +344 -0
  89. gitflow_analytics/reports/factory.py +499 -0
  90. gitflow_analytics/reports/formatters.py +698 -0
  91. gitflow_analytics/reports/html_generator.py +1116 -0
  92. gitflow_analytics/reports/interfaces.py +489 -0
  93. gitflow_analytics/reports/json_exporter.py +2770 -0
  94. gitflow_analytics/reports/narrative_writer.py +2289 -158
  95. gitflow_analytics/reports/story_point_correlation.py +1144 -0
  96. gitflow_analytics/reports/weekly_trends_writer.py +389 -0
  97. gitflow_analytics/training/__init__.py +5 -0
  98. gitflow_analytics/training/model_loader.py +377 -0
  99. gitflow_analytics/training/pipeline.py +550 -0
  100. gitflow_analytics/tui/__init__.py +5 -0
  101. gitflow_analytics/tui/app.py +724 -0
  102. gitflow_analytics/tui/screens/__init__.py +8 -0
  103. gitflow_analytics/tui/screens/analysis_progress_screen.py +496 -0
  104. gitflow_analytics/tui/screens/configuration_screen.py +523 -0
  105. gitflow_analytics/tui/screens/loading_screen.py +348 -0
  106. gitflow_analytics/tui/screens/main_screen.py +321 -0
  107. gitflow_analytics/tui/screens/results_screen.py +722 -0
  108. gitflow_analytics/tui/widgets/__init__.py +7 -0
  109. gitflow_analytics/tui/widgets/data_table.py +255 -0
  110. gitflow_analytics/tui/widgets/export_modal.py +301 -0
  111. gitflow_analytics/tui/widgets/progress_widget.py +187 -0
  112. gitflow_analytics-1.3.6.dist-info/METADATA +1015 -0
  113. gitflow_analytics-1.3.6.dist-info/RECORD +122 -0
  114. gitflow_analytics-1.0.1.dist-info/METADATA +0 -463
  115. gitflow_analytics-1.0.1.dist-info/RECORD +0 -31
  116. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.3.6.dist-info}/WHEEL +0 -0
  117. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.3.6.dist-info}/entry_points.txt +0 -0
  118. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.3.6.dist-info}/licenses/LICENSE +0 -0
  119. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.3.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,479 @@
1
+ """LLM-specific caching layer for classification results.
2
+
3
+ This module provides persistent caching of LLM classification results
4
+ to minimize API calls and reduce costs.
5
+
6
+ WHY: LLM API calls are expensive and slow. Caching results for identical
7
+ inputs dramatically reduces costs and improves performance.
8
+
9
+ DESIGN DECISIONS:
10
+ - Use SQLite for persistence and efficient lookups
11
+ - Hash-based keys for fast matching
12
+ - Configurable expiration for cache freshness
13
+ - Statistics tracking for cache effectiveness
14
+ - Support for cache warming and export
15
+ """
16
+
17
+ import contextlib
18
+ import hashlib
19
+ import json
20
+ import logging
21
+ import sqlite3
22
+ from datetime import datetime, timedelta
23
+ from pathlib import Path
24
+ from typing import Any, Optional
25
+
26
+ logger = logging.getLogger(__name__)
27
+
28
+
29
+ class LLMCache:
30
+ """SQLite-based cache for LLM classification results.
31
+
32
+ WHY: Persistent caching reduces API costs by 90%+ for repeated
33
+ classifications while maintaining result consistency.
34
+ """
35
+
36
+ def __init__(self, cache_path: Path, expiration_days: int = 90, max_cache_size_mb: int = 500):
37
+ """Initialize LLM cache.
38
+
39
+ Args:
40
+ cache_path: Path to SQLite cache database
41
+ expiration_days: Days before cache entries expire
42
+ max_cache_size_mb: Maximum cache size in megabytes
43
+ """
44
+ self.cache_path = cache_path
45
+ self.expiration_days = expiration_days
46
+ self.max_cache_size_mb = max_cache_size_mb
47
+
48
+ # Ensure cache directory exists
49
+ self.cache_path.parent.mkdir(parents=True, exist_ok=True)
50
+
51
+ # Initialize database
52
+ self._init_database()
53
+
54
+ # Track cache statistics
55
+ self.hits = 0
56
+ self.misses = 0
57
+ self.stores = 0
58
+
59
+ def _init_database(self) -> None:
60
+ """Initialize SQLite database with cache tables.
61
+
62
+ WHY: Structured database enables efficient lookups and
63
+ management of cached results.
64
+ """
65
+ with sqlite3.connect(self.cache_path) as conn:
66
+ # Main cache table
67
+ conn.execute(
68
+ """
69
+ CREATE TABLE IF NOT EXISTS llm_cache (
70
+ cache_key TEXT PRIMARY KEY,
71
+ message_hash TEXT NOT NULL,
72
+ files_hash TEXT NOT NULL,
73
+ category TEXT NOT NULL,
74
+ confidence REAL NOT NULL,
75
+ method TEXT NOT NULL,
76
+ reasoning TEXT,
77
+ model TEXT,
78
+ alternatives TEXT, -- JSON array
79
+ processing_time_ms REAL,
80
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
81
+ expires_at TIMESTAMP NOT NULL,
82
+ access_count INTEGER DEFAULT 0,
83
+ last_accessed TIMESTAMP DEFAULT CURRENT_TIMESTAMP
84
+ )
85
+ """
86
+ )
87
+
88
+ # Indices for efficient operations
89
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_expires_at ON llm_cache(expires_at)")
90
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_message_hash ON llm_cache(message_hash)")
91
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_created_at ON llm_cache(created_at)")
92
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_access_count ON llm_cache(access_count)")
93
+
94
+ # Metadata table for cache management
95
+ conn.execute(
96
+ """
97
+ CREATE TABLE IF NOT EXISTS cache_metadata (
98
+ key TEXT PRIMARY KEY,
99
+ value TEXT,
100
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
101
+ )
102
+ """
103
+ )
104
+
105
+ conn.commit()
106
+
107
+ def get(
108
+ self, message: str, files_changed: Optional[list[str]] = None
109
+ ) -> Optional[dict[str, Any]]:
110
+ """Get cached classification if available.
111
+
112
+ Args:
113
+ message: Commit message
114
+ files_changed: Optional list of changed files
115
+
116
+ Returns:
117
+ Cached classification result or None
118
+ """
119
+ cache_key, _, _ = self._generate_cache_key(message, files_changed or [])
120
+
121
+ try:
122
+ with sqlite3.connect(self.cache_path) as conn:
123
+ conn.row_factory = sqlite3.Row
124
+ cursor = conn.execute(
125
+ """
126
+ SELECT category, confidence, reasoning, model, alternatives,
127
+ method, processing_time_ms
128
+ FROM llm_cache
129
+ WHERE cache_key = ? AND expires_at > datetime('now')
130
+ """,
131
+ (cache_key,),
132
+ )
133
+
134
+ row = cursor.fetchone()
135
+ if row:
136
+ # Update access statistics
137
+ conn.execute(
138
+ """
139
+ UPDATE llm_cache
140
+ SET access_count = access_count + 1,
141
+ last_accessed = CURRENT_TIMESTAMP
142
+ WHERE cache_key = ?
143
+ """,
144
+ (cache_key,),
145
+ )
146
+ conn.commit()
147
+
148
+ self.hits += 1
149
+
150
+ # Parse alternatives from JSON
151
+ alternatives = []
152
+ if row["alternatives"]:
153
+ with contextlib.suppress(json.JSONDecodeError):
154
+ alternatives = json.loads(row["alternatives"])
155
+
156
+ return {
157
+ "category": row["category"],
158
+ "confidence": row["confidence"],
159
+ "method": "cached",
160
+ "reasoning": row["reasoning"] or "Cached result",
161
+ "model": row["model"] or "unknown",
162
+ "alternatives": alternatives,
163
+ "processing_time_ms": row["processing_time_ms"] or 0.0,
164
+ "cache_hit": True,
165
+ }
166
+
167
+ self.misses += 1
168
+
169
+ except Exception as e:
170
+ logger.warning(f"Cache lookup failed: {e}")
171
+ self.misses += 1
172
+
173
+ return None
174
+
175
+ def store(
176
+ self, message: str, files_changed: Optional[list[str]], result: dict[str, Any]
177
+ ) -> bool:
178
+ """Store classification result in cache.
179
+
180
+ Args:
181
+ message: Commit message
182
+ files_changed: Optional list of changed files
183
+ result: Classification result to cache
184
+
185
+ Returns:
186
+ True if stored successfully
187
+ """
188
+ cache_key, message_hash, files_hash = self._generate_cache_key(message, files_changed or [])
189
+
190
+ try:
191
+ expires_at = datetime.now() + timedelta(days=self.expiration_days)
192
+
193
+ # Serialize alternatives
194
+ alternatives_json = json.dumps(result.get("alternatives", []))
195
+
196
+ with sqlite3.connect(self.cache_path) as conn:
197
+ conn.execute(
198
+ """
199
+ INSERT OR REPLACE INTO llm_cache
200
+ (cache_key, message_hash, files_hash, category, confidence,
201
+ method, reasoning, model, alternatives, processing_time_ms, expires_at)
202
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
203
+ """,
204
+ (
205
+ cache_key,
206
+ message_hash,
207
+ files_hash,
208
+ result.get("category", "maintenance"),
209
+ result.get("confidence", 0.5),
210
+ result.get("method", "llm"),
211
+ result.get("reasoning", ""),
212
+ result.get("model", ""),
213
+ alternatives_json,
214
+ result.get("processing_time_ms", 0.0),
215
+ expires_at,
216
+ ),
217
+ )
218
+ conn.commit()
219
+
220
+ self.stores += 1
221
+
222
+ # Check cache size and cleanup if needed
223
+ self._check_cache_size(conn)
224
+
225
+ return True
226
+
227
+ except Exception as e:
228
+ logger.warning(f"Cache storage failed: {e}")
229
+ return False
230
+
231
+ def _generate_cache_key(self, message: str, files_changed: list[str]) -> tuple[str, str, str]:
232
+ """Generate cache key components.
233
+
234
+ Args:
235
+ message: Commit message
236
+ files_changed: List of changed files
237
+
238
+ Returns:
239
+ Tuple of (cache_key, message_hash, files_hash)
240
+ """
241
+ # Normalize message
242
+ normalized_message = message.strip().lower()
243
+ message_hash = hashlib.md5(normalized_message.encode("utf-8")).hexdigest()
244
+
245
+ # Normalize and hash files
246
+ normalized_files = "|".join(sorted(f.lower() for f in files_changed))
247
+ files_hash = hashlib.md5(normalized_files.encode("utf-8")).hexdigest()
248
+
249
+ # Combined cache key
250
+ cache_key = f"{message_hash}:{files_hash}"
251
+
252
+ return cache_key, message_hash, files_hash
253
+
254
+ def _check_cache_size(self, conn: sqlite3.Connection) -> None:
255
+ """Check cache size and cleanup if needed.
256
+
257
+ WHY: Prevents cache from growing unbounded and consuming
258
+ excessive disk space.
259
+
260
+ Args:
261
+ conn: SQLite connection
262
+ """
263
+ # Get current database size
264
+ db_size_bytes = self.cache_path.stat().st_size if self.cache_path.exists() else 0
265
+ db_size_mb = db_size_bytes / (1024 * 1024)
266
+
267
+ if db_size_mb > self.max_cache_size_mb:
268
+ logger.info(
269
+ f"Cache size {db_size_mb:.1f}MB exceeds limit {self.max_cache_size_mb}MB, cleaning up"
270
+ )
271
+
272
+ # Remove expired entries first
273
+ deleted = self.cleanup_expired()
274
+ logger.info(f"Removed {deleted} expired entries")
275
+
276
+ # If still too large, remove least recently accessed
277
+ db_size_bytes = self.cache_path.stat().st_size
278
+ db_size_mb = db_size_bytes / (1024 * 1024)
279
+
280
+ if db_size_mb > self.max_cache_size_mb * 0.9: # Keep 10% buffer
281
+ # Delete 20% of least recently accessed entries
282
+ cursor = conn.execute(
283
+ """
284
+ SELECT COUNT(*) FROM llm_cache
285
+ """
286
+ )
287
+ total_entries = cursor.fetchone()[0]
288
+
289
+ if total_entries > 0:
290
+ to_delete = int(total_entries * 0.2)
291
+ conn.execute(
292
+ """
293
+ DELETE FROM llm_cache
294
+ WHERE cache_key IN (
295
+ SELECT cache_key FROM llm_cache
296
+ ORDER BY last_accessed ASC, access_count ASC
297
+ LIMIT ?
298
+ )
299
+ """,
300
+ (to_delete,),
301
+ )
302
+ conn.commit()
303
+ logger.info(f"Removed {to_delete} least recently used entries")
304
+
305
+ def cleanup_expired(self) -> int:
306
+ """Remove expired cache entries.
307
+
308
+ Returns:
309
+ Number of entries removed
310
+ """
311
+ try:
312
+ with sqlite3.connect(self.cache_path) as conn:
313
+ cursor = conn.execute(
314
+ """
315
+ DELETE FROM llm_cache
316
+ WHERE expires_at <= datetime('now')
317
+ """
318
+ )
319
+ conn.commit()
320
+ return cursor.rowcount
321
+
322
+ except Exception as e:
323
+ logger.warning(f"Cache cleanup failed: {e}")
324
+ return 0
325
+
326
+ def get_statistics(self) -> dict[str, Any]:
327
+ """Get cache usage statistics.
328
+
329
+ Returns:
330
+ Dictionary with cache statistics
331
+ """
332
+ try:
333
+ with sqlite3.connect(self.cache_path) as conn:
334
+ cursor = conn.execute(
335
+ """
336
+ SELECT
337
+ COUNT(*) as total_entries,
338
+ COUNT(CASE WHEN expires_at > datetime('now') THEN 1 END) as active_entries,
339
+ COUNT(CASE WHEN expires_at <= datetime('now') THEN 1 END) as expired_entries,
340
+ AVG(access_count) as avg_access_count,
341
+ MAX(access_count) as max_access_count,
342
+ COUNT(DISTINCT model) as unique_models
343
+ FROM llm_cache
344
+ """
345
+ )
346
+
347
+ row = cursor.fetchone()
348
+ if row:
349
+ # Calculate hit rate
350
+ total_requests = self.hits + self.misses
351
+ hit_rate = self.hits / total_requests if total_requests > 0 else 0.0
352
+
353
+ # Get cache file size
354
+ cache_size_mb = (
355
+ self.cache_path.stat().st_size / (1024 * 1024)
356
+ if self.cache_path.exists()
357
+ else 0
358
+ )
359
+
360
+ return {
361
+ "total_entries": row[0],
362
+ "active_entries": row[1],
363
+ "expired_entries": row[2],
364
+ "avg_access_count": row[3] or 0,
365
+ "max_access_count": row[4] or 0,
366
+ "unique_models": row[5],
367
+ "cache_hits": self.hits,
368
+ "cache_misses": self.misses,
369
+ "cache_stores": self.stores,
370
+ "hit_rate": hit_rate,
371
+ "cache_file_size_mb": cache_size_mb,
372
+ "max_cache_size_mb": self.max_cache_size_mb,
373
+ }
374
+
375
+ except Exception as e:
376
+ logger.warning(f"Failed to get cache statistics: {e}")
377
+
378
+ return {
379
+ "error": "Failed to retrieve statistics",
380
+ "cache_hits": self.hits,
381
+ "cache_misses": self.misses,
382
+ "cache_stores": self.stores,
383
+ }
384
+
385
+ def warm_cache(
386
+ self, classifications: list[tuple[str, Optional[list[str]], dict[str, Any]]]
387
+ ) -> int:
388
+ """Warm cache with pre-computed classifications.
389
+
390
+ WHY: Cache warming allows bulk import of classifications,
391
+ useful for migrations or pre-processing.
392
+
393
+ Args:
394
+ classifications: List of (message, files, result) tuples
395
+
396
+ Returns:
397
+ Number of entries added
398
+ """
399
+ added = 0
400
+ for message, files, result in classifications:
401
+ if self.store(message, files, result):
402
+ added += 1
403
+
404
+ logger.info(f"Warmed cache with {added} entries")
405
+ return added
406
+
407
+ def export_cache(self, output_file: Path) -> int:
408
+ """Export cache contents to JSON file.
409
+
410
+ Args:
411
+ output_file: Path to export file
412
+
413
+ Returns:
414
+ Number of entries exported
415
+ """
416
+ try:
417
+ with sqlite3.connect(self.cache_path) as conn:
418
+ conn.row_factory = sqlite3.Row
419
+ cursor = conn.execute(
420
+ """
421
+ SELECT * FROM llm_cache
422
+ WHERE expires_at > datetime('now')
423
+ ORDER BY access_count DESC
424
+ """
425
+ )
426
+
427
+ entries = []
428
+ for row in cursor:
429
+ entry = dict(row)
430
+ # Parse JSON fields
431
+ if entry["alternatives"]:
432
+ try:
433
+ entry["alternatives"] = json.loads(entry["alternatives"])
434
+ except json.JSONDecodeError:
435
+ entry["alternatives"] = []
436
+ entries.append(entry)
437
+
438
+ with open(output_file, "w") as f:
439
+ json.dump(
440
+ {
441
+ "cache_entries": entries,
442
+ "statistics": self.get_statistics(),
443
+ "exported_at": datetime.now().isoformat(),
444
+ },
445
+ f,
446
+ indent=2,
447
+ default=str,
448
+ )
449
+
450
+ logger.info(f"Exported {len(entries)} cache entries to {output_file}")
451
+ return len(entries)
452
+
453
+ except Exception as e:
454
+ logger.error(f"Cache export failed: {e}")
455
+ return 0
456
+
457
+ def clear(self) -> int:
458
+ """Clear all cache entries.
459
+
460
+ Returns:
461
+ Number of entries cleared
462
+ """
463
+ try:
464
+ with sqlite3.connect(self.cache_path) as conn:
465
+ cursor = conn.execute("DELETE FROM llm_cache")
466
+ conn.commit()
467
+ cleared = cursor.rowcount
468
+
469
+ # Reset statistics
470
+ self.hits = 0
471
+ self.misses = 0
472
+ self.stores = 0
473
+
474
+ logger.info(f"Cleared {cleared} cache entries")
475
+ return cleared
476
+
477
+ except Exception as e:
478
+ logger.error(f"Cache clear failed: {e}")
479
+ return 0