mcp-vector-search 0.15.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-vector-search might be problematic. Click here for more details.

Files changed (86) hide show
  1. mcp_vector_search/__init__.py +10 -0
  2. mcp_vector_search/cli/__init__.py +1 -0
  3. mcp_vector_search/cli/commands/__init__.py +1 -0
  4. mcp_vector_search/cli/commands/auto_index.py +397 -0
  5. mcp_vector_search/cli/commands/chat.py +534 -0
  6. mcp_vector_search/cli/commands/config.py +393 -0
  7. mcp_vector_search/cli/commands/demo.py +358 -0
  8. mcp_vector_search/cli/commands/index.py +762 -0
  9. mcp_vector_search/cli/commands/init.py +658 -0
  10. mcp_vector_search/cli/commands/install.py +869 -0
  11. mcp_vector_search/cli/commands/install_old.py +700 -0
  12. mcp_vector_search/cli/commands/mcp.py +1254 -0
  13. mcp_vector_search/cli/commands/reset.py +393 -0
  14. mcp_vector_search/cli/commands/search.py +796 -0
  15. mcp_vector_search/cli/commands/setup.py +1133 -0
  16. mcp_vector_search/cli/commands/status.py +584 -0
  17. mcp_vector_search/cli/commands/uninstall.py +404 -0
  18. mcp_vector_search/cli/commands/visualize/__init__.py +39 -0
  19. mcp_vector_search/cli/commands/visualize/cli.py +265 -0
  20. mcp_vector_search/cli/commands/visualize/exporters/__init__.py +12 -0
  21. mcp_vector_search/cli/commands/visualize/exporters/html_exporter.py +33 -0
  22. mcp_vector_search/cli/commands/visualize/exporters/json_exporter.py +29 -0
  23. mcp_vector_search/cli/commands/visualize/graph_builder.py +709 -0
  24. mcp_vector_search/cli/commands/visualize/layout_engine.py +469 -0
  25. mcp_vector_search/cli/commands/visualize/server.py +201 -0
  26. mcp_vector_search/cli/commands/visualize/state_manager.py +428 -0
  27. mcp_vector_search/cli/commands/visualize/templates/__init__.py +16 -0
  28. mcp_vector_search/cli/commands/visualize/templates/base.py +218 -0
  29. mcp_vector_search/cli/commands/visualize/templates/scripts.py +3670 -0
  30. mcp_vector_search/cli/commands/visualize/templates/styles.py +779 -0
  31. mcp_vector_search/cli/commands/visualize.py.original +2536 -0
  32. mcp_vector_search/cli/commands/watch.py +287 -0
  33. mcp_vector_search/cli/didyoumean.py +520 -0
  34. mcp_vector_search/cli/export.py +320 -0
  35. mcp_vector_search/cli/history.py +295 -0
  36. mcp_vector_search/cli/interactive.py +342 -0
  37. mcp_vector_search/cli/main.py +484 -0
  38. mcp_vector_search/cli/output.py +414 -0
  39. mcp_vector_search/cli/suggestions.py +375 -0
  40. mcp_vector_search/config/__init__.py +1 -0
  41. mcp_vector_search/config/constants.py +24 -0
  42. mcp_vector_search/config/defaults.py +200 -0
  43. mcp_vector_search/config/settings.py +146 -0
  44. mcp_vector_search/core/__init__.py +1 -0
  45. mcp_vector_search/core/auto_indexer.py +298 -0
  46. mcp_vector_search/core/config_utils.py +394 -0
  47. mcp_vector_search/core/connection_pool.py +360 -0
  48. mcp_vector_search/core/database.py +1237 -0
  49. mcp_vector_search/core/directory_index.py +318 -0
  50. mcp_vector_search/core/embeddings.py +294 -0
  51. mcp_vector_search/core/exceptions.py +89 -0
  52. mcp_vector_search/core/factory.py +318 -0
  53. mcp_vector_search/core/git_hooks.py +345 -0
  54. mcp_vector_search/core/indexer.py +1002 -0
  55. mcp_vector_search/core/llm_client.py +453 -0
  56. mcp_vector_search/core/models.py +294 -0
  57. mcp_vector_search/core/project.py +350 -0
  58. mcp_vector_search/core/scheduler.py +330 -0
  59. mcp_vector_search/core/search.py +952 -0
  60. mcp_vector_search/core/watcher.py +322 -0
  61. mcp_vector_search/mcp/__init__.py +5 -0
  62. mcp_vector_search/mcp/__main__.py +25 -0
  63. mcp_vector_search/mcp/server.py +752 -0
  64. mcp_vector_search/parsers/__init__.py +8 -0
  65. mcp_vector_search/parsers/base.py +296 -0
  66. mcp_vector_search/parsers/dart.py +605 -0
  67. mcp_vector_search/parsers/html.py +413 -0
  68. mcp_vector_search/parsers/javascript.py +643 -0
  69. mcp_vector_search/parsers/php.py +694 -0
  70. mcp_vector_search/parsers/python.py +502 -0
  71. mcp_vector_search/parsers/registry.py +223 -0
  72. mcp_vector_search/parsers/ruby.py +678 -0
  73. mcp_vector_search/parsers/text.py +186 -0
  74. mcp_vector_search/parsers/utils.py +265 -0
  75. mcp_vector_search/py.typed +1 -0
  76. mcp_vector_search/utils/__init__.py +42 -0
  77. mcp_vector_search/utils/gitignore.py +250 -0
  78. mcp_vector_search/utils/gitignore_updater.py +212 -0
  79. mcp_vector_search/utils/monorepo.py +339 -0
  80. mcp_vector_search/utils/timing.py +338 -0
  81. mcp_vector_search/utils/version.py +47 -0
  82. mcp_vector_search-0.15.7.dist-info/METADATA +884 -0
  83. mcp_vector_search-0.15.7.dist-info/RECORD +86 -0
  84. mcp_vector_search-0.15.7.dist-info/WHEEL +4 -0
  85. mcp_vector_search-0.15.7.dist-info/entry_points.txt +3 -0
  86. mcp_vector_search-0.15.7.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,320 @@
1
+ """Export functionality for search results."""
2
+
3
+ import csv
4
+ import json
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+
8
+ from rich.console import Console
9
+
10
+ from ..core.models import SearchResult
11
+ from .output import print_error, print_success
12
+
13
+ console = Console()
14
+
15
+
16
+ class SearchResultExporter:
17
+ """Export search results to various formats."""
18
+
19
+ def __init__(self):
20
+ """Initialize exporter."""
21
+ pass
22
+
23
+ def export_to_json(
24
+ self,
25
+ results: list[SearchResult],
26
+ output_path: Path,
27
+ query: str,
28
+ include_metadata: bool = True,
29
+ ) -> bool:
30
+ """Export results to JSON format.
31
+
32
+ Args:
33
+ results: Search results to export
34
+ output_path: Output file path
35
+ query: Original search query
36
+ include_metadata: Whether to include metadata
37
+
38
+ Returns:
39
+ True if successful
40
+ """
41
+ try:
42
+ export_data = {
43
+ "query": query,
44
+ "timestamp": datetime.now().isoformat(),
45
+ "total_results": len(results),
46
+ "results": [],
47
+ }
48
+
49
+ for result in results:
50
+ result_data = {
51
+ "rank": result.rank,
52
+ "file_path": str(result.file_path),
53
+ "similarity_score": result.similarity_score,
54
+ "start_line": result.start_line,
55
+ "end_line": result.end_line,
56
+ "language": result.language,
57
+ "chunk_type": result.chunk_type,
58
+ }
59
+
60
+ if result.function_name:
61
+ result_data["function_name"] = result.function_name
62
+ if result.class_name:
63
+ result_data["class_name"] = result.class_name
64
+ if result.content:
65
+ result_data["content"] = result.content
66
+
67
+ if include_metadata:
68
+ result_data["location"] = result.location
69
+
70
+ export_data["results"].append(result_data)
71
+
72
+ with open(output_path, "w", encoding="utf-8") as f:
73
+ json.dump(export_data, f, indent=2, ensure_ascii=False)
74
+
75
+ print_success(f"Exported {len(results)} results to {output_path}")
76
+ return True
77
+
78
+ except Exception as e:
79
+ print_error(f"Failed to export to JSON: {e}")
80
+ return False
81
+
82
+ def export_to_csv(
83
+ self, results: list[SearchResult], output_path: Path, query: str
84
+ ) -> bool:
85
+ """Export results to CSV format.
86
+
87
+ Args:
88
+ results: Search results to export
89
+ output_path: Output file path
90
+ query: Original search query
91
+
92
+ Returns:
93
+ True if successful
94
+ """
95
+ try:
96
+ fieldnames = [
97
+ "rank",
98
+ "file_path",
99
+ "similarity_score",
100
+ "start_line",
101
+ "end_line",
102
+ "language",
103
+ "chunk_type",
104
+ "function_name",
105
+ "class_name",
106
+ "location",
107
+ ]
108
+
109
+ with open(output_path, "w", newline="", encoding="utf-8") as f:
110
+ writer = csv.DictWriter(f, fieldnames=fieldnames)
111
+ writer.writeheader()
112
+
113
+ # Write metadata row
114
+ writer.writerow(
115
+ {
116
+ "rank": f"# Query: {query}",
117
+ "file_path": f"# Timestamp: {datetime.now().isoformat()}",
118
+ "similarity_score": f"# Total Results: {len(results)}",
119
+ "start_line": "",
120
+ "end_line": "",
121
+ "language": "",
122
+ "chunk_type": "",
123
+ "function_name": "",
124
+ "class_name": "",
125
+ "location": "",
126
+ }
127
+ )
128
+
129
+ for result in results:
130
+ writer.writerow(
131
+ {
132
+ "rank": result.rank,
133
+ "file_path": str(result.file_path),
134
+ "similarity_score": f"{result.similarity_score:.4f}",
135
+ "start_line": result.start_line,
136
+ "end_line": result.end_line,
137
+ "language": result.language,
138
+ "chunk_type": result.chunk_type,
139
+ "function_name": result.function_name or "",
140
+ "class_name": result.class_name or "",
141
+ "location": result.location,
142
+ }
143
+ )
144
+
145
+ print_success(f"Exported {len(results)} results to {output_path}")
146
+ return True
147
+
148
+ except Exception as e:
149
+ print_error(f"Failed to export to CSV: {e}")
150
+ return False
151
+
152
+ def export_to_markdown(
153
+ self,
154
+ results: list[SearchResult],
155
+ output_path: Path,
156
+ query: str,
157
+ include_content: bool = True,
158
+ ) -> bool:
159
+ """Export results to Markdown format.
160
+
161
+ Args:
162
+ results: Search results to export
163
+ output_path: Output file path
164
+ query: Original search query
165
+ include_content: Whether to include code content
166
+
167
+ Returns:
168
+ True if successful
169
+ """
170
+ try:
171
+ with open(output_path, "w", encoding="utf-8") as f:
172
+ # Write header
173
+ f.write("# Search Results\n\n")
174
+ f.write(f"**Query:** `{query}`\n")
175
+ f.write(
176
+ f"**Timestamp:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
177
+ )
178
+ f.write(f"**Total Results:** {len(results)}\n\n")
179
+
180
+ # Write results
181
+ for result in results:
182
+ f.write(f"## {result.rank}. {result.file_path.name}\n\n")
183
+
184
+ # Basic info
185
+ f.write(f"- **File:** `{result.file_path}`\n")
186
+ f.write(
187
+ f"- **Location:** Lines {result.start_line}-{result.end_line}\n"
188
+ )
189
+ f.write(f"- **Similarity:** {result.similarity_score:.2%}\n")
190
+ f.write(f"- **Language:** {result.language}\n")
191
+
192
+ if result.function_name:
193
+ f.write(f"- **Function:** `{result.function_name}()`\n")
194
+ if result.class_name:
195
+ f.write(f"- **Class:** `{result.class_name}`\n")
196
+
197
+ f.write("\n")
198
+
199
+ # Code content
200
+ if include_content and result.content:
201
+ f.write(f"```{result.language}\n")
202
+ f.write(result.content)
203
+ f.write("\n```\n\n")
204
+
205
+ f.write("---\n\n")
206
+
207
+ print_success(f"Exported {len(results)} results to {output_path}")
208
+ return True
209
+
210
+ except Exception as e:
211
+ print_error(f"Failed to export to Markdown: {e}")
212
+ return False
213
+
214
+ def export_summary_table(
215
+ self, results: list[SearchResult], output_path: Path, query: str
216
+ ) -> bool:
217
+ """Export a summary table of results.
218
+
219
+ Args:
220
+ results: Search results to export
221
+ output_path: Output file path
222
+ query: Original search query
223
+
224
+ Returns:
225
+ True if successful
226
+ """
227
+ try:
228
+ # Calculate summary statistics
229
+ languages = {}
230
+ files = {}
231
+ functions = {}
232
+ classes = {}
233
+
234
+ for result in results:
235
+ languages[result.language] = languages.get(result.language, 0) + 1
236
+ files[result.file_path.name] = files.get(result.file_path.name, 0) + 1
237
+
238
+ if result.function_name:
239
+ functions[result.function_name] = (
240
+ functions.get(result.function_name, 0) + 1
241
+ )
242
+ if result.class_name:
243
+ classes[result.class_name] = classes.get(result.class_name, 0) + 1
244
+
245
+ avg_similarity = (
246
+ sum(r.similarity_score for r in results) / len(results)
247
+ if results
248
+ else 0
249
+ )
250
+
251
+ summary_data = {
252
+ "query": query,
253
+ "timestamp": datetime.now().isoformat(),
254
+ "summary": {
255
+ "total_results": len(results),
256
+ "average_similarity": round(avg_similarity, 4),
257
+ "unique_files": len(files),
258
+ "unique_functions": len(functions),
259
+ "unique_classes": len(classes),
260
+ },
261
+ "distributions": {
262
+ "languages": dict(
263
+ sorted(languages.items(), key=lambda x: x[1], reverse=True)
264
+ ),
265
+ "top_files": dict(
266
+ sorted(files.items(), key=lambda x: x[1], reverse=True)[:10]
267
+ ),
268
+ "top_functions": dict(
269
+ sorted(functions.items(), key=lambda x: x[1], reverse=True)[:10]
270
+ ),
271
+ "top_classes": dict(
272
+ sorted(classes.items(), key=lambda x: x[1], reverse=True)[:10]
273
+ ),
274
+ },
275
+ }
276
+
277
+ with open(output_path, "w", encoding="utf-8") as f:
278
+ json.dump(summary_data, f, indent=2, ensure_ascii=False)
279
+
280
+ print_success(
281
+ f"Exported summary for {len(results)} results to {output_path}"
282
+ )
283
+ return True
284
+
285
+ except Exception as e:
286
+ print_error(f"Failed to export summary: {e}")
287
+ return False
288
+
289
+
290
+ def get_export_path(format_type: str, query: str, base_dir: Path | None = None) -> Path:
291
+ """Generate export file path based on format and query.
292
+
293
+ Args:
294
+ format_type: Export format (json, csv, markdown, summary)
295
+ query: Search query
296
+ base_dir: Base directory for export
297
+
298
+ Returns:
299
+ Generated file path
300
+ """
301
+ if base_dir is None:
302
+ base_dir = Path.cwd()
303
+
304
+ # Sanitize query for filename
305
+ safe_query = "".join(
306
+ c for c in query if c.isalnum() or c in (" ", "-", "_")
307
+ ).rstrip()
308
+ safe_query = safe_query.replace(" ", "_")[:50] # Limit length
309
+
310
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
311
+
312
+ extensions = {
313
+ "json": ".json",
314
+ "csv": ".csv",
315
+ "markdown": ".md",
316
+ "summary": "_summary.json",
317
+ }
318
+
319
+ filename = f"search_{safe_query}_{timestamp}{extensions.get(format_type, '.txt')}"
320
+ return base_dir / filename
@@ -0,0 +1,295 @@
1
+ """Search history and favorites management."""
2
+
3
+ import json
4
+ from datetime import datetime
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+ from rich.console import Console
9
+ from rich.table import Table
10
+
11
+ from .output import print_error, print_info, print_success
12
+
13
+ console = Console()
14
+
15
+
16
+ class SearchHistory:
17
+ """Manage search history and favorites."""
18
+
19
+ def __init__(self, project_root: Path):
20
+ """Initialize search history manager."""
21
+ self.project_root = project_root
22
+ self.history_file = project_root / ".mcp-vector-search" / "search_history.json"
23
+ self.favorites_file = project_root / ".mcp-vector-search" / "favorites.json"
24
+
25
+ # Ensure directory exists
26
+ self.history_file.parent.mkdir(parents=True, exist_ok=True)
27
+
28
+ def add_search(
29
+ self,
30
+ query: str,
31
+ results_count: int,
32
+ filters: dict[str, Any] | None = None,
33
+ execution_time: float | None = None,
34
+ ) -> None:
35
+ """Add a search to history.
36
+
37
+ Args:
38
+ query: Search query
39
+ results_count: Number of results found
40
+ filters: Applied filters
41
+ execution_time: Search execution time in seconds
42
+ """
43
+ try:
44
+ history = self._load_history()
45
+
46
+ search_entry = {
47
+ "query": query,
48
+ "timestamp": datetime.now().isoformat(),
49
+ "results_count": results_count,
50
+ "filters": filters or {},
51
+ "execution_time": execution_time,
52
+ }
53
+
54
+ # Add to beginning of history
55
+ history.insert(0, search_entry)
56
+
57
+ # Keep only last 100 searches
58
+ history = history[:100]
59
+
60
+ self._save_history(history)
61
+
62
+ except Exception as e:
63
+ print_error(f"Failed to save search history: {e}")
64
+
65
+ def get_history(self, limit: int = 20) -> list[dict[str, Any]]:
66
+ """Get search history.
67
+
68
+ Args:
69
+ limit: Maximum number of entries to return
70
+
71
+ Returns:
72
+ List of search history entries
73
+ """
74
+ try:
75
+ history = self._load_history()
76
+ return history[:limit]
77
+ except Exception as e:
78
+ print_error(f"Failed to load search history: {e}")
79
+ return []
80
+
81
+ def clear_history(self) -> bool:
82
+ """Clear search history.
83
+
84
+ Returns:
85
+ True if successful
86
+ """
87
+ try:
88
+ self._save_history([])
89
+ print_success("Search history cleared")
90
+ return True
91
+ except Exception as e:
92
+ print_error(f"Failed to clear search history: {e}")
93
+ return False
94
+
95
+ def add_favorite(self, query: str, description: str | None = None) -> bool:
96
+ """Add a search query to favorites.
97
+
98
+ Args:
99
+ query: Search query to favorite
100
+ description: Optional description
101
+
102
+ Returns:
103
+ True if successful
104
+ """
105
+ try:
106
+ favorites = self._load_favorites()
107
+
108
+ # Check if already exists
109
+ for fav in favorites:
110
+ if fav["query"] == query:
111
+ print_info(f"Query already in favorites: {query}")
112
+ return True
113
+
114
+ favorite_entry = {
115
+ "query": query,
116
+ "description": description or "",
117
+ "created": datetime.now().isoformat(),
118
+ "usage_count": 0,
119
+ }
120
+
121
+ favorites.append(favorite_entry)
122
+ self._save_favorites(favorites)
123
+
124
+ print_success(f"Added to favorites: {query}")
125
+ return True
126
+
127
+ except Exception as e:
128
+ print_error(f"Failed to add favorite: {e}")
129
+ return False
130
+
131
+ def remove_favorite(self, query: str) -> bool:
132
+ """Remove a query from favorites.
133
+
134
+ Args:
135
+ query: Query to remove
136
+
137
+ Returns:
138
+ True if successful
139
+ """
140
+ try:
141
+ favorites = self._load_favorites()
142
+ original_count = len(favorites)
143
+
144
+ favorites = [fav for fav in favorites if fav["query"] != query]
145
+
146
+ if len(favorites) < original_count:
147
+ self._save_favorites(favorites)
148
+ print_success(f"Removed from favorites: {query}")
149
+ return True
150
+ else:
151
+ print_info(f"Query not found in favorites: {query}")
152
+ return False
153
+
154
+ except Exception as e:
155
+ print_error(f"Failed to remove favorite: {e}")
156
+ return False
157
+
158
+ def get_favorites(self) -> list[dict[str, Any]]:
159
+ """Get favorite queries.
160
+
161
+ Returns:
162
+ List of favorite queries
163
+ """
164
+ try:
165
+ return self._load_favorites()
166
+ except Exception as e:
167
+ print_error(f"Failed to load favorites: {e}")
168
+ return []
169
+
170
+ def increment_favorite_usage(self, query: str) -> None:
171
+ """Increment usage count for a favorite query.
172
+
173
+ Args:
174
+ query: Query that was used
175
+ """
176
+ try:
177
+ favorites = self._load_favorites()
178
+
179
+ for fav in favorites:
180
+ if fav["query"] == query:
181
+ fav["usage_count"] = fav.get("usage_count", 0) + 1
182
+ fav["last_used"] = datetime.now().isoformat()
183
+ break
184
+
185
+ self._save_favorites(favorites)
186
+
187
+ except Exception as e:
188
+ # Don't show error for this non-critical operation
189
+ logger.debug(f"Failed to update history ranking: {e}")
190
+ pass
191
+
192
+ def _load_history(self) -> list[dict[str, Any]]:
193
+ """Load search history from file."""
194
+ if not self.history_file.exists():
195
+ return []
196
+
197
+ try:
198
+ with open(self.history_file, encoding="utf-8") as f:
199
+ return json.load(f)
200
+ except Exception as e:
201
+ logger.debug(f"Failed to load history file: {e}")
202
+ return []
203
+
204
+ def _save_history(self, history: list[dict[str, Any]]) -> None:
205
+ """Save search history to file."""
206
+ with open(self.history_file, "w", encoding="utf-8") as f:
207
+ json.dump(history, f, indent=2, ensure_ascii=False)
208
+
209
+ def _load_favorites(self) -> list[dict[str, Any]]:
210
+ """Load favorites from file."""
211
+ if not self.favorites_file.exists():
212
+ return []
213
+
214
+ try:
215
+ with open(self.favorites_file, encoding="utf-8") as f:
216
+ return json.load(f)
217
+ except Exception as e:
218
+ logger.debug(f"Failed to load favorites file: {e}")
219
+ return []
220
+
221
+ def _save_favorites(self, favorites: list[dict[str, Any]]) -> None:
222
+ """Save favorites to file."""
223
+ with open(self.favorites_file, "w", encoding="utf-8") as f:
224
+ json.dump(favorites, f, indent=2, ensure_ascii=False)
225
+
226
+
227
+ def show_search_history(project_root: Path, limit: int = 20) -> None:
228
+ """Display search history in a formatted table."""
229
+ history_manager = SearchHistory(project_root)
230
+ history = history_manager.get_history(limit)
231
+
232
+ if not history:
233
+ print_info("No search history found")
234
+ return
235
+
236
+ table = Table(
237
+ title=f"Search History (Last {len(history)} searches)", show_header=True
238
+ )
239
+ table.add_column("#", style="cyan", width=3)
240
+ table.add_column("Query", style="white", min_width=20)
241
+ table.add_column("Results", style="green", width=8)
242
+ table.add_column("Time", style="dim", width=16)
243
+ table.add_column("Filters", style="yellow", width=15)
244
+
245
+ for i, entry in enumerate(history, 1):
246
+ timestamp = datetime.fromisoformat(entry["timestamp"]).strftime("%m-%d %H:%M")
247
+ filters_str = ", ".join(f"{k}:{v}" for k, v in entry.get("filters", {}).items())
248
+ if not filters_str:
249
+ filters_str = "-"
250
+
251
+ table.add_row(
252
+ str(i),
253
+ entry["query"][:40] + "..." if len(entry["query"]) > 40 else entry["query"],
254
+ str(entry["results_count"]),
255
+ timestamp,
256
+ filters_str[:15] + "..." if len(filters_str) > 15 else filters_str,
257
+ )
258
+
259
+ console.print(table)
260
+
261
+
262
+ def show_favorites(project_root: Path) -> None:
263
+ """Display favorite queries in a formatted table."""
264
+ history_manager = SearchHistory(project_root)
265
+ favorites = history_manager.get_favorites()
266
+
267
+ if not favorites:
268
+ print_info("No favorite queries found")
269
+ return
270
+
271
+ # Sort by usage count (descending)
272
+ favorites.sort(key=lambda x: x.get("usage_count", 0), reverse=True)
273
+
274
+ table = Table(title="Favorite Queries", show_header=True)
275
+ table.add_column("#", style="cyan", width=3)
276
+ table.add_column("Query", style="white", min_width=25)
277
+ table.add_column("Description", style="dim", min_width=20)
278
+ table.add_column("Usage", style="green", width=6)
279
+ table.add_column("Created", style="dim", width=10)
280
+
281
+ for i, fav in enumerate(favorites, 1):
282
+ created = datetime.fromisoformat(fav["created"]).strftime("%m-%d")
283
+ description = fav.get("description", "")[:30]
284
+ if len(fav.get("description", "")) > 30:
285
+ description += "..."
286
+
287
+ table.add_row(
288
+ str(i),
289
+ fav["query"],
290
+ description or "-",
291
+ str(fav.get("usage_count", 0)),
292
+ created,
293
+ )
294
+
295
+ console.print(table)