kicad-sch-api 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kicad-sch-api might be problematic. Click here for more details.
- kicad_sch_api/cli.py +345 -0
- kicad_sch_api/discovery/__init__.py +10 -0
- kicad_sch_api/discovery/search_index.py +421 -0
- kicad_sch_api/mcp/__init__.py +7 -0
- kicad_sch_api/mcp/server.py +1509 -0
- kicad_sch_api-0.1.2.dist-info/METADATA +326 -0
- {kicad_sch_api-0.1.1.dist-info → kicad_sch_api-0.1.2.dist-info}/RECORD +11 -6
- {kicad_sch_api-0.1.1.dist-info → kicad_sch_api-0.1.2.dist-info}/entry_points.txt +1 -0
- kicad_sch_api-0.1.1.dist-info/METADATA +0 -207
- {kicad_sch_api-0.1.1.dist-info → kicad_sch_api-0.1.2.dist-info}/WHEEL +0 -0
- {kicad_sch_api-0.1.1.dist-info → kicad_sch_api-0.1.2.dist-info}/licenses/LICENSE +0 -0
- {kicad_sch_api-0.1.1.dist-info → kicad_sch_api-0.1.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SQLite-based search index for fast component discovery.
|
|
3
|
+
|
|
4
|
+
This module creates and maintains a lightweight SQLite database for fast
|
|
5
|
+
multi-field component searches, built from the existing SymbolDefinition cache.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
import sqlite3
|
|
10
|
+
import time
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
13
|
+
|
|
14
|
+
from ..library.cache import SymbolDefinition, get_symbol_cache
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ComponentSearchIndex:
|
|
20
|
+
"""Fast SQLite-based search index for KiCAD components."""
|
|
21
|
+
|
|
22
|
+
def __init__(self, cache_dir: Optional[Path] = None):
|
|
23
|
+
"""Initialize the search index."""
|
|
24
|
+
self.cache_dir = cache_dir or Path.home() / ".cache" / "kicad-sch-api"
|
|
25
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
26
|
+
|
|
27
|
+
self.db_path = self.cache_dir / "search_index.db"
|
|
28
|
+
self._init_database()
|
|
29
|
+
|
|
30
|
+
def _init_database(self):
|
|
31
|
+
"""Initialize the SQLite database schema."""
|
|
32
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
33
|
+
conn.execute("""
|
|
34
|
+
CREATE TABLE IF NOT EXISTS components (
|
|
35
|
+
lib_id TEXT PRIMARY KEY,
|
|
36
|
+
name TEXT NOT NULL,
|
|
37
|
+
library TEXT NOT NULL,
|
|
38
|
+
description TEXT DEFAULT '',
|
|
39
|
+
keywords TEXT DEFAULT '',
|
|
40
|
+
reference_prefix TEXT DEFAULT 'U',
|
|
41
|
+
pin_count INTEGER DEFAULT 0,
|
|
42
|
+
category TEXT DEFAULT '',
|
|
43
|
+
last_updated REAL DEFAULT 0
|
|
44
|
+
)
|
|
45
|
+
""")
|
|
46
|
+
|
|
47
|
+
# Create search indexes for fast queries
|
|
48
|
+
conn.execute("""
|
|
49
|
+
CREATE INDEX IF NOT EXISTS idx_name
|
|
50
|
+
ON components(name COLLATE NOCASE)
|
|
51
|
+
""")
|
|
52
|
+
|
|
53
|
+
conn.execute("""
|
|
54
|
+
CREATE INDEX IF NOT EXISTS idx_description
|
|
55
|
+
ON components(description COLLATE NOCASE)
|
|
56
|
+
""")
|
|
57
|
+
|
|
58
|
+
conn.execute("""
|
|
59
|
+
CREATE INDEX IF NOT EXISTS idx_library
|
|
60
|
+
ON components(library)
|
|
61
|
+
""")
|
|
62
|
+
|
|
63
|
+
conn.execute("""
|
|
64
|
+
CREATE INDEX IF NOT EXISTS idx_category
|
|
65
|
+
ON components(category)
|
|
66
|
+
""")
|
|
67
|
+
|
|
68
|
+
# Full-text search virtual table for advanced queries
|
|
69
|
+
conn.execute("""
|
|
70
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS components_fts
|
|
71
|
+
USING fts5(lib_id, name, description, keywords, content=components)
|
|
72
|
+
""")
|
|
73
|
+
|
|
74
|
+
conn.commit()
|
|
75
|
+
logger.debug("Initialized search index database")
|
|
76
|
+
|
|
77
|
+
def rebuild_index(self, progress_callback: Optional[callable] = None) -> int:
|
|
78
|
+
"""Rebuild the search index from the symbol cache."""
|
|
79
|
+
start_time = time.time()
|
|
80
|
+
symbol_cache = get_symbol_cache()
|
|
81
|
+
|
|
82
|
+
# Get all cached symbols
|
|
83
|
+
symbols = []
|
|
84
|
+
for lib_name in symbol_cache._library_index.keys():
|
|
85
|
+
try:
|
|
86
|
+
lib_symbols = symbol_cache.get_library_symbols(lib_name)
|
|
87
|
+
symbols.extend(lib_symbols)
|
|
88
|
+
|
|
89
|
+
if progress_callback:
|
|
90
|
+
progress_callback(f"Indexing {lib_name}: {len(lib_symbols)} symbols")
|
|
91
|
+
|
|
92
|
+
except Exception as e:
|
|
93
|
+
logger.warning(f"Failed to load library {lib_name}: {e}")
|
|
94
|
+
|
|
95
|
+
# Clear and rebuild index
|
|
96
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
97
|
+
conn.execute("DELETE FROM components")
|
|
98
|
+
conn.execute("DELETE FROM components_fts")
|
|
99
|
+
|
|
100
|
+
# Insert symbols in batches for better performance
|
|
101
|
+
batch_size = 100
|
|
102
|
+
for i in range(0, len(symbols), batch_size):
|
|
103
|
+
batch = symbols[i:i + batch_size]
|
|
104
|
+
|
|
105
|
+
# Prepare batch data
|
|
106
|
+
batch_data = []
|
|
107
|
+
for symbol in batch:
|
|
108
|
+
batch_data.append((
|
|
109
|
+
symbol.lib_id,
|
|
110
|
+
symbol.name,
|
|
111
|
+
symbol.library,
|
|
112
|
+
symbol.description,
|
|
113
|
+
symbol.keywords,
|
|
114
|
+
symbol.reference_prefix,
|
|
115
|
+
len(symbol.pins),
|
|
116
|
+
self._categorize_component(symbol),
|
|
117
|
+
time.time()
|
|
118
|
+
))
|
|
119
|
+
|
|
120
|
+
# Insert batch
|
|
121
|
+
conn.executemany("""
|
|
122
|
+
INSERT OR REPLACE INTO components
|
|
123
|
+
(lib_id, name, library, description, keywords, reference_prefix,
|
|
124
|
+
pin_count, category, last_updated)
|
|
125
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
126
|
+
""", batch_data)
|
|
127
|
+
|
|
128
|
+
# Update FTS table
|
|
129
|
+
conn.executemany("""
|
|
130
|
+
INSERT OR REPLACE INTO components_fts
|
|
131
|
+
(lib_id, name, description, keywords)
|
|
132
|
+
VALUES (?, ?, ?, ?)
|
|
133
|
+
""", [(data[0], data[1], data[3], data[4]) for data in batch_data])
|
|
134
|
+
|
|
135
|
+
if progress_callback:
|
|
136
|
+
progress_callback(f"Indexed {min(i + batch_size, len(symbols))}/{len(symbols)} components")
|
|
137
|
+
|
|
138
|
+
conn.commit()
|
|
139
|
+
|
|
140
|
+
elapsed = time.time() - start_time
|
|
141
|
+
logger.info(f"Rebuilt search index with {len(symbols)} components in {elapsed:.2f}s")
|
|
142
|
+
return len(symbols)
|
|
143
|
+
|
|
144
|
+
def search(self, query: str, library: Optional[str] = None,
|
|
145
|
+
category: Optional[str] = None, limit: int = 20) -> List[Dict[str, Any]]:
|
|
146
|
+
"""Search components using multiple strategies."""
|
|
147
|
+
results = []
|
|
148
|
+
|
|
149
|
+
# Try different search strategies
|
|
150
|
+
strategies = [
|
|
151
|
+
self._search_exact_match,
|
|
152
|
+
self._search_prefix_match,
|
|
153
|
+
self._search_contains,
|
|
154
|
+
self._search_fts,
|
|
155
|
+
]
|
|
156
|
+
|
|
157
|
+
for strategy in strategies:
|
|
158
|
+
try:
|
|
159
|
+
strategy_results = strategy(query, library, category, limit - len(results))
|
|
160
|
+
|
|
161
|
+
# Avoid duplicates
|
|
162
|
+
existing_ids = {r["lib_id"] for r in results}
|
|
163
|
+
new_results = [r for r in strategy_results if r["lib_id"] not in existing_ids]
|
|
164
|
+
|
|
165
|
+
results.extend(new_results)
|
|
166
|
+
|
|
167
|
+
if len(results) >= limit:
|
|
168
|
+
break
|
|
169
|
+
|
|
170
|
+
except Exception as e:
|
|
171
|
+
logger.debug(f"Search strategy failed: {e}")
|
|
172
|
+
|
|
173
|
+
return results[:limit]
|
|
174
|
+
|
|
175
|
+
def _search_exact_match(self, query: str, library: Optional[str],
|
|
176
|
+
category: Optional[str], limit: int) -> List[Dict[str, Any]]:
|
|
177
|
+
"""Search for exact name matches."""
|
|
178
|
+
conditions = ["name = ? COLLATE NOCASE"]
|
|
179
|
+
params = [query]
|
|
180
|
+
|
|
181
|
+
if library:
|
|
182
|
+
conditions.append("library = ?")
|
|
183
|
+
params.append(library)
|
|
184
|
+
|
|
185
|
+
if category:
|
|
186
|
+
conditions.append("category = ?")
|
|
187
|
+
params.append(category)
|
|
188
|
+
|
|
189
|
+
sql = f"""
|
|
190
|
+
SELECT lib_id, name, library, description, keywords, reference_prefix,
|
|
191
|
+
pin_count, category, 1.0 as match_score
|
|
192
|
+
FROM components
|
|
193
|
+
WHERE {' AND '.join(conditions)}
|
|
194
|
+
ORDER BY name
|
|
195
|
+
LIMIT ?
|
|
196
|
+
"""
|
|
197
|
+
params.append(limit)
|
|
198
|
+
|
|
199
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
200
|
+
conn.row_factory = sqlite3.Row
|
|
201
|
+
return [dict(row) for row in conn.execute(sql, params)]
|
|
202
|
+
|
|
203
|
+
def _search_prefix_match(self, query: str, library: Optional[str],
|
|
204
|
+
category: Optional[str], limit: int) -> List[Dict[str, Any]]:
|
|
205
|
+
"""Search for components starting with query."""
|
|
206
|
+
conditions = ["name LIKE ? COLLATE NOCASE"]
|
|
207
|
+
params = [f"{query}%"]
|
|
208
|
+
|
|
209
|
+
if library:
|
|
210
|
+
conditions.append("library = ?")
|
|
211
|
+
params.append(library)
|
|
212
|
+
|
|
213
|
+
if category:
|
|
214
|
+
conditions.append("category = ?")
|
|
215
|
+
params.append(category)
|
|
216
|
+
|
|
217
|
+
sql = f"""
|
|
218
|
+
SELECT lib_id, name, library, description, keywords, reference_prefix,
|
|
219
|
+
pin_count, category, 0.8 as match_score
|
|
220
|
+
FROM components
|
|
221
|
+
WHERE {' AND '.join(conditions)}
|
|
222
|
+
ORDER BY name
|
|
223
|
+
LIMIT ?
|
|
224
|
+
"""
|
|
225
|
+
params.append(limit)
|
|
226
|
+
|
|
227
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
228
|
+
conn.row_factory = sqlite3.Row
|
|
229
|
+
return [dict(row) for row in conn.execute(sql, params)]
|
|
230
|
+
|
|
231
|
+
def _search_contains(self, query: str, library: Optional[str],
|
|
232
|
+
category: Optional[str], limit: int) -> List[Dict[str, Any]]:
|
|
233
|
+
"""Search for components containing query in name or description."""
|
|
234
|
+
conditions = ["(name LIKE ? COLLATE NOCASE OR description LIKE ? COLLATE NOCASE)"]
|
|
235
|
+
params = [f"%{query}%", f"%{query}%"]
|
|
236
|
+
|
|
237
|
+
if library:
|
|
238
|
+
conditions.append("library = ?")
|
|
239
|
+
params.append(library)
|
|
240
|
+
|
|
241
|
+
if category:
|
|
242
|
+
conditions.append("category = ?")
|
|
243
|
+
params.append(category)
|
|
244
|
+
|
|
245
|
+
sql = f"""
|
|
246
|
+
SELECT lib_id, name, library, description, keywords, reference_prefix,
|
|
247
|
+
pin_count, category, 0.6 as match_score
|
|
248
|
+
FROM components
|
|
249
|
+
WHERE {' AND '.join(conditions)}
|
|
250
|
+
ORDER BY
|
|
251
|
+
CASE WHEN name LIKE ? COLLATE NOCASE THEN 1 ELSE 2 END,
|
|
252
|
+
name
|
|
253
|
+
LIMIT ?
|
|
254
|
+
"""
|
|
255
|
+
params.extend([f"%{query}%", limit])
|
|
256
|
+
|
|
257
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
258
|
+
conn.row_factory = sqlite3.Row
|
|
259
|
+
return [dict(row) for row in conn.execute(sql, params)]
|
|
260
|
+
|
|
261
|
+
def _search_fts(self, query: str, library: Optional[str],
|
|
262
|
+
category: Optional[str], limit: int) -> List[Dict[str, Any]]:
|
|
263
|
+
"""Full-text search using FTS5."""
|
|
264
|
+
# Build FTS query
|
|
265
|
+
fts_query = ' '.join(f'"{term}"*' for term in query.split())
|
|
266
|
+
|
|
267
|
+
sql = """
|
|
268
|
+
SELECT c.lib_id, c.name, c.library, c.description, c.keywords,
|
|
269
|
+
c.reference_prefix, c.pin_count, c.category,
|
|
270
|
+
fts.rank as match_score
|
|
271
|
+
FROM components_fts fts
|
|
272
|
+
JOIN components c ON c.lib_id = fts.lib_id
|
|
273
|
+
WHERE fts MATCH ?
|
|
274
|
+
"""
|
|
275
|
+
params = [fts_query]
|
|
276
|
+
|
|
277
|
+
if library:
|
|
278
|
+
sql += " AND c.library = ?"
|
|
279
|
+
params.append(library)
|
|
280
|
+
|
|
281
|
+
if category:
|
|
282
|
+
sql += " AND c.category = ?"
|
|
283
|
+
params.append(category)
|
|
284
|
+
|
|
285
|
+
sql += " ORDER BY fts.rank LIMIT ?"
|
|
286
|
+
params.append(limit)
|
|
287
|
+
|
|
288
|
+
try:
|
|
289
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
290
|
+
conn.row_factory = sqlite3.Row
|
|
291
|
+
return [dict(row) for row in conn.execute(sql, params)]
|
|
292
|
+
except sqlite3.OperationalError:
|
|
293
|
+
# FTS query failed, return empty results
|
|
294
|
+
return []
|
|
295
|
+
|
|
296
|
+
def get_libraries(self) -> List[Dict[str, Any]]:
|
|
297
|
+
"""Get all available libraries with component counts."""
|
|
298
|
+
sql = """
|
|
299
|
+
SELECT library, COUNT(*) as component_count
|
|
300
|
+
FROM components
|
|
301
|
+
GROUP BY library
|
|
302
|
+
ORDER BY library
|
|
303
|
+
"""
|
|
304
|
+
|
|
305
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
306
|
+
conn.row_factory = sqlite3.Row
|
|
307
|
+
return [dict(row) for row in conn.execute(sql)]
|
|
308
|
+
|
|
309
|
+
def get_categories(self) -> List[Dict[str, Any]]:
|
|
310
|
+
"""Get all component categories with counts."""
|
|
311
|
+
sql = """
|
|
312
|
+
SELECT category, COUNT(*) as component_count
|
|
313
|
+
FROM components
|
|
314
|
+
WHERE category != ''
|
|
315
|
+
GROUP BY category
|
|
316
|
+
ORDER BY component_count DESC
|
|
317
|
+
"""
|
|
318
|
+
|
|
319
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
320
|
+
conn.row_factory = sqlite3.Row
|
|
321
|
+
return [dict(row) for row in conn.execute(sql)]
|
|
322
|
+
|
|
323
|
+
def validate_component(self, lib_id: str) -> Optional[Dict[str, Any]]:
|
|
324
|
+
"""Check if a component exists in the index."""
|
|
325
|
+
sql = """
|
|
326
|
+
SELECT lib_id, name, library, description, keywords, reference_prefix,
|
|
327
|
+
pin_count, category
|
|
328
|
+
FROM components
|
|
329
|
+
WHERE lib_id = ?
|
|
330
|
+
"""
|
|
331
|
+
|
|
332
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
333
|
+
conn.row_factory = sqlite3.Row
|
|
334
|
+
result = conn.execute(sql, [lib_id]).fetchone()
|
|
335
|
+
return dict(result) if result else None
|
|
336
|
+
|
|
337
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
338
|
+
"""Get search index statistics."""
|
|
339
|
+
with sqlite3.connect(str(self.db_path)) as conn:
|
|
340
|
+
total_components = conn.execute("SELECT COUNT(*) FROM components").fetchone()[0]
|
|
341
|
+
total_libraries = conn.execute("SELECT COUNT(DISTINCT library) FROM components").fetchone()[0]
|
|
342
|
+
|
|
343
|
+
# Get library breakdown
|
|
344
|
+
library_stats = conn.execute("""
|
|
345
|
+
SELECT library, COUNT(*) as count
|
|
346
|
+
FROM components
|
|
347
|
+
GROUP BY library
|
|
348
|
+
ORDER BY count DESC
|
|
349
|
+
LIMIT 10
|
|
350
|
+
""").fetchall()
|
|
351
|
+
|
|
352
|
+
return {
|
|
353
|
+
"total_components": total_components,
|
|
354
|
+
"total_libraries": total_libraries,
|
|
355
|
+
"top_libraries": [{"library": lib, "count": count} for lib, count in library_stats],
|
|
356
|
+
"database_path": str(self.db_path),
|
|
357
|
+
"database_size_mb": round(self.db_path.stat().st_size / (1024 * 1024), 2)
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
def _categorize_component(self, symbol: SymbolDefinition) -> str:
|
|
361
|
+
"""Categorize a component based on its properties."""
|
|
362
|
+
prefix = symbol.reference_prefix.upper()
|
|
363
|
+
name_lower = symbol.name.lower()
|
|
364
|
+
desc_lower = symbol.description.lower()
|
|
365
|
+
|
|
366
|
+
# Category mapping based on reference prefix and description
|
|
367
|
+
if prefix == "R":
|
|
368
|
+
return "resistor"
|
|
369
|
+
elif prefix == "C":
|
|
370
|
+
return "capacitor"
|
|
371
|
+
elif prefix == "L":
|
|
372
|
+
return "inductor"
|
|
373
|
+
elif prefix in ["D", "LED"]:
|
|
374
|
+
return "diode"
|
|
375
|
+
elif prefix == "Q":
|
|
376
|
+
return "transistor"
|
|
377
|
+
elif prefix == "U":
|
|
378
|
+
if any(term in desc_lower for term in ["microcontroller", "mcu", "processor"]):
|
|
379
|
+
return "microcontroller"
|
|
380
|
+
elif any(term in desc_lower for term in ["amplifier", "op-amp", "opamp"]):
|
|
381
|
+
return "amplifier"
|
|
382
|
+
elif any(term in desc_lower for term in ["regulator", "ldo", "buck", "boost"]):
|
|
383
|
+
return "regulator"
|
|
384
|
+
else:
|
|
385
|
+
return "integrated_circuit"
|
|
386
|
+
elif prefix == "J":
|
|
387
|
+
return "connector"
|
|
388
|
+
elif prefix in ["SW", "S"]:
|
|
389
|
+
return "switch"
|
|
390
|
+
elif prefix == "Y":
|
|
391
|
+
return "crystal"
|
|
392
|
+
elif prefix == "TP":
|
|
393
|
+
return "test_point"
|
|
394
|
+
else:
|
|
395
|
+
return "other"
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
# Global search index instance
|
|
399
|
+
_global_search_index: Optional[ComponentSearchIndex] = None
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
def get_search_index() -> ComponentSearchIndex:
|
|
403
|
+
"""Get the global search index instance."""
|
|
404
|
+
global _global_search_index
|
|
405
|
+
if _global_search_index is None:
|
|
406
|
+
_global_search_index = ComponentSearchIndex()
|
|
407
|
+
return _global_search_index
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
def ensure_index_built(rebuild: bool = False) -> int:
|
|
411
|
+
"""Ensure the search index is built and up-to-date."""
|
|
412
|
+
index = get_search_index()
|
|
413
|
+
|
|
414
|
+
if rebuild or not index.db_path.exists():
|
|
415
|
+
logger.info("Building component search index...")
|
|
416
|
+
return index.rebuild_index()
|
|
417
|
+
else:
|
|
418
|
+
# Check if index needs updating based on symbol cache
|
|
419
|
+
stats = index.get_stats()
|
|
420
|
+
logger.info(f"Search index ready: {stats['total_components']} components")
|
|
421
|
+
return stats["total_components"]
|