fixflow-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +21 -0
- package/README.md +274 -0
- package/bin/cli.mjs +113 -0
- package/fastmcp_docs_server/README.md +64 -0
- package/fastmcp_docs_server/__init__.py +1 -0
- package/fastmcp_docs_server/__pycache__/server.cpython-311.pyc +0 -0
- package/fastmcp_docs_server/requirements.txt +3 -0
- package/fastmcp_docs_server/server.py +607 -0
- package/fastmcp_docs_server/skills/creating-kb-cards.md +146 -0
- package/fastmcp_docs_server/tech_kb/devops/CROSS_DOCKER_001.md +104 -0
- package/fastmcp_docs_server/tech_kb/devops/CROSS_GIT_001.md +70 -0
- package/fastmcp_docs_server/tech_kb/index.json +78 -0
- package/fastmcp_docs_server/tech_kb/terminal/WIN_TERM_042.md +93 -0
- package/package.json +28 -0
|
@@ -0,0 +1,607 @@
|
|
|
1
|
+
"""
|
|
2
|
+
FixFlow MCP Server — Community Knowledge Base for AI Agents.
|
|
3
|
+
|
|
4
|
+
Provides tools to search, read, create, and validate
|
|
5
|
+
technical KB cards via the Model Context Protocol.
|
|
6
|
+
|
|
7
|
+
Architecture:
|
|
8
|
+
Local MCP Server (stdio) ←→ Supabase Cloud DB (shared)
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from fastmcp import FastMCP
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import re
|
|
15
|
+
import sys
|
|
16
|
+
import yaml
|
|
17
|
+
from typing import List, Dict, Optional
|
|
18
|
+
|
|
19
|
+
# ─── Configuration ───────────────────────────────────────────────
|
|
20
|
+
mcp = FastMCP("FixFlow")
|
|
21
|
+
|
|
22
|
+
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
23
|
+
KB_DIR = os.path.join(BASE_DIR, "fixflow_kb")
|
|
24
|
+
INDEX_FILE = os.path.join(KB_DIR, "index.json")
|
|
25
|
+
|
|
26
|
+
MAX_CONTENT_LENGTH = 100 * 1024 # 100 KB limit
|
|
27
|
+
MAX_QUERY_LENGTH = 200 # Query length limit (DoS protection)
|
|
28
|
+
|
|
29
|
+
# Supabase Configuration (via env vars for security)
|
|
30
|
+
# Support both FIXFLOW_ and TECHDOCS_ prefixes for backward compatibility
|
|
31
|
+
SUPABASE_URL = os.environ.get("FIXFLOW_SUPABASE_URL", os.environ.get("TECHDOCS_SUPABASE_URL", "https://hbwrduqbmuupxhtndrta.supabase.co"))
|
|
32
|
+
SUPABASE_KEY = os.environ.get("FIXFLOW_SUPABASE_KEY", os.environ.get("TECHDOCS_SUPABASE_KEY", "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Imhid3JkdXFibXV1cHhodG5kcnRhIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NzEyNzQxNDQsImV4cCI6MjA4Njg1MDE0NH0.t37Ag0pQHuYdyflfviST69ZX8R2FTNCdLzhpN2tt_s0"))
|
|
33
|
+
|
|
34
|
+
# ─── Supabase Client (lazy init) ─────────────────────────────────
|
|
35
|
+
|
|
36
|
+
_supabase_client = None
|
|
37
|
+
|
|
38
|
+
def get_supabase():
|
|
39
|
+
"""Lazy-initialize Supabase client. Returns None if unavailable."""
|
|
40
|
+
global _supabase_client
|
|
41
|
+
if _supabase_client is not None:
|
|
42
|
+
return _supabase_client
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
from supabase import create_client
|
|
46
|
+
_supabase_client = create_client(SUPABASE_URL, SUPABASE_KEY)
|
|
47
|
+
sys.stderr.write("✅ Supabase: connected\n")
|
|
48
|
+
return _supabase_client
|
|
49
|
+
except ImportError:
|
|
50
|
+
sys.stderr.write("⚠️ Supabase: supabase-py not installed, using local-only mode\n")
|
|
51
|
+
return None
|
|
52
|
+
except Exception as e:
|
|
53
|
+
sys.stderr.write(f"⚠️ Supabase: connection failed ({e}), using local-only mode\n")
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
# ─── Helpers ─────────────────────────────────────────────────────
|
|
58
|
+
|
|
59
|
+
def load_kb_index() -> List[Dict]:
|
|
60
|
+
"""Load the KB index from disk. Returns empty list on error."""
|
|
61
|
+
if not os.path.exists(INDEX_FILE):
|
|
62
|
+
return []
|
|
63
|
+
try:
|
|
64
|
+
with open(INDEX_FILE, "r", encoding="utf-8") as f:
|
|
65
|
+
return json.load(f)
|
|
66
|
+
except Exception as e:
|
|
67
|
+
sys.stderr.write(f"Index load error: {str(e).replace(BASE_DIR, '...')}\n")
|
|
68
|
+
return []
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def validate_kb_markdown(content: str) -> tuple[bool, str]:
|
|
72
|
+
"""
|
|
73
|
+
Validate KB card Markdown content.
|
|
74
|
+
Returns (is_valid, error_message).
|
|
75
|
+
"""
|
|
76
|
+
if len(content) > MAX_CONTENT_LENGTH:
|
|
77
|
+
return False, f"Content exceeds {MAX_CONTENT_LENGTH // 1024}KB limit."
|
|
78
|
+
|
|
79
|
+
yaml_match = re.match(r'^---\r?\n(.*?)\r?\n---', content, re.DOTALL)
|
|
80
|
+
if not yaml_match:
|
|
81
|
+
return False, "Missing YAML frontmatter (--- ... ---)."
|
|
82
|
+
|
|
83
|
+
try:
|
|
84
|
+
metadata = yaml.safe_load(yaml_match.group(1))
|
|
85
|
+
except yaml.YAMLError as e:
|
|
86
|
+
return False, f"Invalid YAML: {e}"
|
|
87
|
+
|
|
88
|
+
if not isinstance(metadata, dict):
|
|
89
|
+
return False, "Frontmatter is not a valid YAML mapping."
|
|
90
|
+
|
|
91
|
+
required_fields = ['kb_id', 'category', 'platform', 'criticality']
|
|
92
|
+
missing = [f for f in required_fields if f not in metadata]
|
|
93
|
+
if missing:
|
|
94
|
+
return False, f"Missing required fields: {', '.join(missing)}."
|
|
95
|
+
|
|
96
|
+
kb_id = metadata.get('kb_id', '')
|
|
97
|
+
if not re.match(r'^[A-Z]+_[A-Z]+_\d+$', kb_id):
|
|
98
|
+
return False, f"Invalid KB_ID format '{kb_id}'. Expected: PLATFORM_CATEGORY_NUMBER (e.g. WIN_TERM_042)."
|
|
99
|
+
|
|
100
|
+
required_sections = {
|
|
101
|
+
'Title (# ...)': r'#\s+',
|
|
102
|
+
'🔍 This Is Your Problem If:': r'##\s+🔍\s+This Is Your Problem If',
|
|
103
|
+
'✅ SOLUTION (copy-paste)': r'##\s+✅\s+SOLUTION\s+\(copy-paste\)',
|
|
104
|
+
'✔️ Verification': r'##\s+✔️\s+Verification',
|
|
105
|
+
}
|
|
106
|
+
for name, pattern in required_sections.items():
|
|
107
|
+
if not re.search(pattern, content):
|
|
108
|
+
return False, f"Missing required section: '{name}'."
|
|
109
|
+
|
|
110
|
+
if len(content) < 200:
|
|
111
|
+
return False, "Content too short (minimum 200 characters)."
|
|
112
|
+
|
|
113
|
+
return True, ""
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def extract_tldr_fields(content: str) -> dict:
|
|
117
|
+
"""Extract quick_summary and fix_time from TL;DR block."""
|
|
118
|
+
result = {"quick_summary": "", "fix_time": ""}
|
|
119
|
+
|
|
120
|
+
tldr_match = re.search(r'\*\*TL;DR\*\*:\s*(.+)', content)
|
|
121
|
+
if tldr_match:
|
|
122
|
+
result["quick_summary"] = tldr_match.group(1).strip()
|
|
123
|
+
|
|
124
|
+
fix_match = re.search(r'\*\*Fix Time\*\*:\s*([^|]+)', content)
|
|
125
|
+
if fix_match:
|
|
126
|
+
result["fix_time"] = fix_match.group(1).strip()
|
|
127
|
+
|
|
128
|
+
return result
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def save_to_local(kb_id: str, category: str, content: str, metadata: dict) -> tuple[bool, str]:
|
|
132
|
+
"""Save KB card to local filesystem."""
|
|
133
|
+
category_dir = os.path.join(KB_DIR, category)
|
|
134
|
+
os.makedirs(category_dir, exist_ok=True)
|
|
135
|
+
|
|
136
|
+
filename = f"{kb_id}.md"
|
|
137
|
+
file_path = os.path.join(category_dir, filename)
|
|
138
|
+
rel_path = f"{category}/{filename}"
|
|
139
|
+
|
|
140
|
+
# Security: Ensure path stays inside KB_DIR
|
|
141
|
+
if not os.path.abspath(file_path).startswith(os.path.abspath(KB_DIR)):
|
|
142
|
+
return False, "❌ Security Error: Access denied."
|
|
143
|
+
|
|
144
|
+
try:
|
|
145
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
146
|
+
f.write(content)
|
|
147
|
+
except Exception as e:
|
|
148
|
+
return False, f"❌ Error saving local file: {e}"
|
|
149
|
+
|
|
150
|
+
# Update local index
|
|
151
|
+
index = load_kb_index()
|
|
152
|
+
index = [item for item in index if item['kb_id'] != kb_id]
|
|
153
|
+
|
|
154
|
+
title_match = re.search(r'^#\s+(.+)$', content, re.MULTILINE)
|
|
155
|
+
title = title_match.group(1) if title_match else "Unknown Title"
|
|
156
|
+
tldr = extract_tldr_fields(content)
|
|
157
|
+
|
|
158
|
+
new_entry = {
|
|
159
|
+
"kb_id": kb_id,
|
|
160
|
+
"title": title,
|
|
161
|
+
"category": category,
|
|
162
|
+
"platform": metadata.get('platform', 'unknown'),
|
|
163
|
+
"technologies": metadata.get('technologies', []),
|
|
164
|
+
"complexity": metadata.get('complexity', 1),
|
|
165
|
+
"criticality": metadata.get('criticality', 'low'),
|
|
166
|
+
"created": metadata.get('created', ''),
|
|
167
|
+
"tags": metadata.get('tags', []),
|
|
168
|
+
"related_kb": metadata.get('related_kb', []),
|
|
169
|
+
"file_path": rel_path,
|
|
170
|
+
"quick_summary": tldr["quick_summary"],
|
|
171
|
+
"fix_time": tldr["fix_time"],
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
index.append(new_entry)
|
|
175
|
+
|
|
176
|
+
try:
|
|
177
|
+
with open(INDEX_FILE, "w", encoding="utf-8") as f:
|
|
178
|
+
json.dump(index, f, indent=4, ensure_ascii=False)
|
|
179
|
+
except Exception as e:
|
|
180
|
+
return False, f"⚠️ File saved, but index update failed: {e}"
|
|
181
|
+
|
|
182
|
+
return True, rel_path
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def save_to_cloud(kb_id: str, content: str, metadata: dict) -> tuple[bool, str]:
|
|
186
|
+
"""Save KB card to Supabase cloud database."""
|
|
187
|
+
sb = get_supabase()
|
|
188
|
+
if not sb:
|
|
189
|
+
return False, "Cloud unavailable"
|
|
190
|
+
|
|
191
|
+
title_match = re.search(r'^#\s+(.+)$', content, re.MULTILINE)
|
|
192
|
+
title = title_match.group(1) if title_match else "Unknown Title"
|
|
193
|
+
tldr = extract_tldr_fields(content)
|
|
194
|
+
|
|
195
|
+
# Generate embedding for semantic search
|
|
196
|
+
emb_text = f"{title} {tldr['quick_summary']} {' '.join(metadata.get('tags', []))}"
|
|
197
|
+
embedding = _get_embedding(emb_text)
|
|
198
|
+
|
|
199
|
+
row = {
|
|
200
|
+
"kb_id": kb_id,
|
|
201
|
+
"title": title,
|
|
202
|
+
"category": metadata.get('category', '').lower(),
|
|
203
|
+
"platform": metadata.get('platform', 'unknown'),
|
|
204
|
+
"technologies": metadata.get('technologies', []),
|
|
205
|
+
"complexity": metadata.get('complexity', 1),
|
|
206
|
+
"criticality": metadata.get('criticality', 'low'),
|
|
207
|
+
"tags": metadata.get('tags', []),
|
|
208
|
+
"related_kb": metadata.get('related_kb', []),
|
|
209
|
+
"quick_summary": tldr["quick_summary"],
|
|
210
|
+
"fix_time": tldr["fix_time"],
|
|
211
|
+
"content": content,
|
|
212
|
+
"embedding": embedding,
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
try:
|
|
216
|
+
result = sb.table("fixflow_kb").upsert(row, on_conflict="kb_id").execute()
|
|
217
|
+
return True, "synced (with embedding)" if embedding else "synced (no embedding)"
|
|
218
|
+
except Exception as e:
|
|
219
|
+
return False, str(e)
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def _get_embedding(text: str) -> Optional[List[float]]:
|
|
223
|
+
"""Generate embedding using Supabase Edge Function 'embed'."""
|
|
224
|
+
sb = get_supabase()
|
|
225
|
+
if not sb or not text:
|
|
226
|
+
return None
|
|
227
|
+
try:
|
|
228
|
+
# Call Edge Function
|
|
229
|
+
# Note: adjust based on actual supabase-py version behavior
|
|
230
|
+
res = sb.functions.invoke("embed", invoke_options={'body': {'input': text}})
|
|
231
|
+
|
|
232
|
+
# Parse response
|
|
233
|
+
data = None
|
|
234
|
+
if hasattr(res, 'data'):
|
|
235
|
+
if isinstance(res.data, bytes):
|
|
236
|
+
data = json.loads(res.data.decode('utf-8'))
|
|
237
|
+
elif isinstance(res.data, str):
|
|
238
|
+
data = json.loads(res.data)
|
|
239
|
+
else:
|
|
240
|
+
data = res.data
|
|
241
|
+
elif isinstance(res, dict):
|
|
242
|
+
data = res
|
|
243
|
+
|
|
244
|
+
if data and "embedding" in data:
|
|
245
|
+
return data["embedding"]
|
|
246
|
+
|
|
247
|
+
except Exception as e:
|
|
248
|
+
# Silent fail for embeddings, core function should proceed
|
|
249
|
+
sys.stderr.write(f"Embedding generation failed: {e}\n")
|
|
250
|
+
return None
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def search_cloud(query: str) -> List[Dict]:
|
|
254
|
+
"""Search KB cards in Supabase using server-side search function.
|
|
255
|
+
|
|
256
|
+
Uses the `search_kb_cards` RPC function which supports:
|
|
257
|
+
- Full-text search with websearch_to_tsquery (multi-word, phrases)
|
|
258
|
+
- Vector similarity search (when embeddings available)
|
|
259
|
+
- Hybrid ranking (FTS rank or cosine similarity)
|
|
260
|
+
"""
|
|
261
|
+
sb = get_supabase()
|
|
262
|
+
if not sb:
|
|
263
|
+
return []
|
|
264
|
+
|
|
265
|
+
try:
|
|
266
|
+
# Try to generate embedding for the query
|
|
267
|
+
query_embedding = _get_embedding(query)
|
|
268
|
+
|
|
269
|
+
result = sb.rpc("search_kb_cards", {
|
|
270
|
+
"query_text": query,
|
|
271
|
+
"query_embedding": query_embedding, # Pass embedding if available
|
|
272
|
+
"match_limit": 20
|
|
273
|
+
}).execute()
|
|
274
|
+
return result.data if result.data else []
|
|
275
|
+
except Exception as e:
|
|
276
|
+
sys.stderr.write(f"Cloud search error: {e}\n")
|
|
277
|
+
# Fallback to direct table query
|
|
278
|
+
try:
|
|
279
|
+
result = sb.table("fixflow_kb") \
|
|
280
|
+
.select("kb_id, title, category, platform, technologies, complexity, criticality, tags, quick_summary, fix_time") \
|
|
281
|
+
.eq("status", "published") \
|
|
282
|
+
.ilike("title", f"%{query}%") \
|
|
283
|
+
.limit(20) \
|
|
284
|
+
.execute()
|
|
285
|
+
return result.data if result.data else []
|
|
286
|
+
except Exception:
|
|
287
|
+
return []
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
# ─── Tools ───────────────────────────────────────────────────────
|
|
291
|
+
|
|
292
|
+
@mcp.tool
|
|
293
|
+
def resolve_kb_id(query: str) -> List[Dict]:
|
|
294
|
+
"""
|
|
295
|
+
Search the Knowledge Base to find a KB ID.
|
|
296
|
+
|
|
297
|
+
Call this FIRST to find the correct `kb_id` before reading a document.
|
|
298
|
+
Returns list of matching entries with metadata.
|
|
299
|
+
Searches both local index AND cloud database.
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
query: User question, error message, or technology name.
|
|
303
|
+
"""
|
|
304
|
+
query = query[:MAX_QUERY_LENGTH].lower()
|
|
305
|
+
|
|
306
|
+
# 1. Local search
|
|
307
|
+
index = load_kb_index()
|
|
308
|
+
local_results = []
|
|
309
|
+
for item in index:
|
|
310
|
+
searchable = " ".join([
|
|
311
|
+
item.get('title', ''),
|
|
312
|
+
item.get('quick_summary', ''),
|
|
313
|
+
item.get('kb_id', ''),
|
|
314
|
+
" ".join(item.get('tags', [])),
|
|
315
|
+
" ".join(item.get('technologies', [])),
|
|
316
|
+
]).lower()
|
|
317
|
+
if query in searchable:
|
|
318
|
+
item_copy = dict(item)
|
|
319
|
+
item_copy["_source"] = "local"
|
|
320
|
+
local_results.append(item_copy)
|
|
321
|
+
|
|
322
|
+
# 2. Cloud search (full-text)
|
|
323
|
+
cloud_results = search_cloud(query)
|
|
324
|
+
for item in cloud_results:
|
|
325
|
+
item["_source"] = "cloud"
|
|
326
|
+
|
|
327
|
+
# 3. Merge: local first, then cloud (deduplicate by kb_id)
|
|
328
|
+
seen_ids = {r["kb_id"] for r in local_results}
|
|
329
|
+
merged = local_results[:]
|
|
330
|
+
for item in cloud_results:
|
|
331
|
+
if item["kb_id"] not in seen_ids:
|
|
332
|
+
merged.append(item)
|
|
333
|
+
seen_ids.add(item["kb_id"])
|
|
334
|
+
|
|
335
|
+
return merged
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
@mcp.tool
|
|
339
|
+
def read_kb_doc(kb_id: str) -> str:
|
|
340
|
+
"""
|
|
341
|
+
Read the full Markdown content of a KB card.
|
|
342
|
+
|
|
343
|
+
Requires a valid `kb_id` (e.g., 'WIN_TERM_042') obtained from `resolve_kb_id`.
|
|
344
|
+
Returns the solution, checklist, and verification steps.
|
|
345
|
+
Reads from local cache first, falls back to cloud.
|
|
346
|
+
Automatically tracks view count in cloud.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
kb_id: The unique ID of the card to read.
|
|
350
|
+
"""
|
|
351
|
+
content = None
|
|
352
|
+
|
|
353
|
+
# 1. Try local first
|
|
354
|
+
index = load_kb_index()
|
|
355
|
+
entry = next((item for item in index if item["kb_id"] == kb_id), None)
|
|
356
|
+
|
|
357
|
+
if entry:
|
|
358
|
+
rel_path = entry.get("file_path", "")
|
|
359
|
+
if rel_path:
|
|
360
|
+
full_path = os.path.abspath(os.path.join(KB_DIR, rel_path))
|
|
361
|
+
if full_path.startswith(os.path.abspath(KB_DIR)) and os.path.exists(full_path):
|
|
362
|
+
try:
|
|
363
|
+
with open(full_path, "r", encoding="utf-8") as f:
|
|
364
|
+
content = f.read()
|
|
365
|
+
except Exception:
|
|
366
|
+
pass
|
|
367
|
+
|
|
368
|
+
# 2. Try cloud
|
|
369
|
+
if not content:
|
|
370
|
+
sb = get_supabase()
|
|
371
|
+
if sb:
|
|
372
|
+
try:
|
|
373
|
+
result = sb.table("fixflow_kb") \
|
|
374
|
+
.select("content") \
|
|
375
|
+
.eq("kb_id", kb_id) \
|
|
376
|
+
.eq("status", "published") \
|
|
377
|
+
.single() \
|
|
378
|
+
.execute()
|
|
379
|
+
if result.data and result.data.get("content"):
|
|
380
|
+
content = result.data["content"]
|
|
381
|
+
except Exception as e:
|
|
382
|
+
sys.stderr.write(f"Cloud read error: {e}\n")
|
|
383
|
+
|
|
384
|
+
if not content:
|
|
385
|
+
return f"Error: KB ID '{kb_id}' not found (checked local + cloud)."
|
|
386
|
+
|
|
387
|
+
# 3. Track view (fire-and-forget)
|
|
388
|
+
_track_event(kb_id, "view")
|
|
389
|
+
|
|
390
|
+
return content
|
|
391
|
+
|
|
392
|
+
|
|
393
|
+
@mcp.tool
|
|
394
|
+
def save_kb_card(content: str, overwrite: bool = False) -> str:
|
|
395
|
+
"""
|
|
396
|
+
Validate and save a new Knowledge Base card.
|
|
397
|
+
|
|
398
|
+
Performs server-side validation of Markdown structure and metadata.
|
|
399
|
+
Checks for duplicates before saving.
|
|
400
|
+
Saves BOTH locally and to the cloud database for community access.
|
|
401
|
+
|
|
402
|
+
Args:
|
|
403
|
+
content: Full Markdown content with YAML frontmatter.
|
|
404
|
+
overwrite: Set to True to update an existing card.
|
|
405
|
+
"""
|
|
406
|
+
# 1. Validate
|
|
407
|
+
is_valid, error_msg = validate_kb_markdown(content)
|
|
408
|
+
if not is_valid:
|
|
409
|
+
return f"❌ Validation FAILED: {error_msg}"
|
|
410
|
+
|
|
411
|
+
# 2. Parse Metadata
|
|
412
|
+
yaml_match = re.match(r'^---\r?\n(.*?)\r?\n---', content, re.DOTALL)
|
|
413
|
+
metadata = yaml.safe_load(yaml_match.group(1))
|
|
414
|
+
|
|
415
|
+
kb_id = metadata['kb_id']
|
|
416
|
+
category = metadata['category'].lower()
|
|
417
|
+
|
|
418
|
+
# Security: Validate category name
|
|
419
|
+
if not re.match(r'^[a-z0-9_-]+$', category):
|
|
420
|
+
return f"❌ Security Error: Invalid category '{category}'."
|
|
421
|
+
|
|
422
|
+
# 3. Deduplication check (cloud)
|
|
423
|
+
if not overwrite:
|
|
424
|
+
title_match = re.search(r'^#\s+(.+)$', content, re.MULTILINE)
|
|
425
|
+
title = title_match.group(1) if title_match else ""
|
|
426
|
+
duplicates = _find_duplicates(kb_id, title, metadata.get('tags', []))
|
|
427
|
+
if duplicates:
|
|
428
|
+
dup_list = "\n".join([
|
|
429
|
+
f" • {d['kb_id']} — {d['title']} (similarity: {d.get('similarity', 'N/A')})"
|
|
430
|
+
for d in duplicates[:3]
|
|
431
|
+
])
|
|
432
|
+
return (
|
|
433
|
+
f"⚠️ Similar cards already exist:\n{dup_list}\n\n"
|
|
434
|
+
f"If this is intentionally different, use overwrite=True."
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
# 4. Check existence (local)
|
|
438
|
+
category_dir = os.path.join(KB_DIR, category)
|
|
439
|
+
file_path = os.path.join(category_dir, f"{kb_id}.md")
|
|
440
|
+
if os.path.exists(file_path) and not overwrite:
|
|
441
|
+
return f"❌ Card '{kb_id}' already exists. Use overwrite=True to update."
|
|
442
|
+
|
|
443
|
+
# 5. Save locally
|
|
444
|
+
local_ok, local_msg = save_to_local(kb_id, category, content, metadata)
|
|
445
|
+
|
|
446
|
+
# 6. Save to cloud
|
|
447
|
+
cloud_ok, cloud_msg = save_to_cloud(kb_id, content, metadata)
|
|
448
|
+
|
|
449
|
+
# 7. Build response
|
|
450
|
+
parts = []
|
|
451
|
+
if local_ok:
|
|
452
|
+
parts.append(f"📁 Local: saved to {local_msg}")
|
|
453
|
+
else:
|
|
454
|
+
parts.append(f"📁 Local: {local_msg}")
|
|
455
|
+
|
|
456
|
+
if cloud_ok:
|
|
457
|
+
parts.append(f"☁️ Cloud: {cloud_msg}")
|
|
458
|
+
else:
|
|
459
|
+
parts.append(f"☁️ Cloud: {cloud_msg}")
|
|
460
|
+
|
|
461
|
+
status = "✅" if local_ok else "⚠️"
|
|
462
|
+
return f"{status} KB card '{kb_id}' — " + " | ".join(parts)
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
def _find_duplicates(kb_id: str, title: str, tags: list) -> List[Dict]:
|
|
466
|
+
"""Check if similar KB cards already exist in the cloud."""
|
|
467
|
+
sb = get_supabase()
|
|
468
|
+
if not sb or not title:
|
|
469
|
+
return []
|
|
470
|
+
|
|
471
|
+
try:
|
|
472
|
+
# Search by title keywords (first 3 significant words)
|
|
473
|
+
words = [w for w in title.split() if len(w) > 3][:3]
|
|
474
|
+
if not words:
|
|
475
|
+
return []
|
|
476
|
+
|
|
477
|
+
query = " ".join(words)
|
|
478
|
+
result = sb.rpc("search_kb_cards", {
|
|
479
|
+
"query_text": query,
|
|
480
|
+
"match_limit": 5
|
|
481
|
+
}).execute()
|
|
482
|
+
|
|
483
|
+
if not result.data:
|
|
484
|
+
return []
|
|
485
|
+
|
|
486
|
+
# Filter: exclude the card itself, only show high similarity
|
|
487
|
+
return [
|
|
488
|
+
r for r in result.data
|
|
489
|
+
if r["kb_id"] != kb_id and r.get("similarity", 0) > 0.3
|
|
490
|
+
]
|
|
491
|
+
except Exception:
|
|
492
|
+
return []
|
|
493
|
+
|
|
494
|
+
|
|
495
|
+
@mcp.resource("tech-kb://skill/{skill_name}")
|
|
496
|
+
def get_skill(skill_name: str = "create-kb-card") -> str:
|
|
497
|
+
"""Get instructions for a specific skill (e.g. creating a KB card)."""
|
|
498
|
+
skills_map = {
|
|
499
|
+
"create-kb-card": "skills/creating-kb-cards.md",
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
if skill_name not in skills_map:
|
|
503
|
+
available = ", ".join(skills_map.keys())
|
|
504
|
+
return f"Skill '{skill_name}' not found. Available: {available}"
|
|
505
|
+
|
|
506
|
+
skill_path = os.path.join(BASE_DIR, skills_map[skill_name])
|
|
507
|
+
|
|
508
|
+
try:
|
|
509
|
+
with open(skill_path, "r", encoding="utf-8") as f:
|
|
510
|
+
return f.read()
|
|
511
|
+
except FileNotFoundError:
|
|
512
|
+
return f"Error: Skill file not found for '{skill_name}'."
|
|
513
|
+
except Exception as e:
|
|
514
|
+
return f"Error loading skill '{skill_name}': {e}"
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
def report_card_result(kb_id: str, result: str) -> str:
|
|
518
|
+
"""Internal: report result of applying a KB card solution.
|
|
519
|
+
|
|
520
|
+
Called automatically, not exposed as a tool.
|
|
521
|
+
"""
|
|
522
|
+
if result not in ("solved", "failed", "applied"):
|
|
523
|
+
return ""
|
|
524
|
+
data = _track_event(kb_id, result)
|
|
525
|
+
return f"Feedback '{result}' recorded for {kb_id}" if data else ""
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
def _track_event(kb_id: str, event: str) -> dict | None:
|
|
529
|
+
"""Track a card usage event via Supabase RPC. Returns stats or None."""
|
|
530
|
+
sb = get_supabase()
|
|
531
|
+
if not sb:
|
|
532
|
+
return None
|
|
533
|
+
try:
|
|
534
|
+
result = sb.rpc("track_card_event", {
|
|
535
|
+
"p_kb_id": kb_id,
|
|
536
|
+
"p_event": event
|
|
537
|
+
}).execute()
|
|
538
|
+
return result.data if result.data else None
|
|
539
|
+
except Exception as e:
|
|
540
|
+
sys.stderr.write(f"Track event error: {e}\n")
|
|
541
|
+
return None
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
# ─── Resources ───────────────────────────────────────────────────
|
|
545
|
+
|
|
546
|
+
@mcp.resource("tech-kb://index")
|
|
547
|
+
def get_full_index() -> str:
|
|
548
|
+
"""Get the full index of all KB cards."""
|
|
549
|
+
return json.dumps(load_kb_index(), indent=2, ensure_ascii=False)
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
@mcp.resource("tech-kb://stats")
|
|
553
|
+
def get_kb_stats() -> str:
|
|
554
|
+
"""Get Knowledge Base statistics: total cards, categories, top rated."""
|
|
555
|
+
sb = get_supabase()
|
|
556
|
+
|
|
557
|
+
index = load_kb_index()
|
|
558
|
+
local_count = len(index)
|
|
559
|
+
local_categories = list(set(item.get("category", "unknown") for item in index))
|
|
560
|
+
|
|
561
|
+
parts = [f"📊 FixFlow KB Statistics\n"]
|
|
562
|
+
parts.append(f"📁 Local cards: {local_count}")
|
|
563
|
+
parts.append(f"📁 Categories: {', '.join(local_categories) if local_categories else 'none'}")
|
|
564
|
+
|
|
565
|
+
if sb:
|
|
566
|
+
try:
|
|
567
|
+
cards = sb.table("fixflow_kb") \
|
|
568
|
+
.select("kb_id, title, category, view_count, solved_count, failed_count") \
|
|
569
|
+
.eq("status", "published") \
|
|
570
|
+
.execute()
|
|
571
|
+
|
|
572
|
+
if cards.data:
|
|
573
|
+
cloud_count = len(cards.data)
|
|
574
|
+
total_views = sum(c.get("view_count", 0) or 0 for c in cards.data)
|
|
575
|
+
total_solved = sum(c.get("solved_count", 0) or 0 for c in cards.data)
|
|
576
|
+
total_failed = sum(c.get("failed_count", 0) or 0 for c in cards.data)
|
|
577
|
+
|
|
578
|
+
parts.append(f"\n☁️ Cloud cards: {cloud_count}")
|
|
579
|
+
parts.append(f"👁️ Total views: {total_views}")
|
|
580
|
+
parts.append(f"✅ Solved: {total_solved} | ❌ Failed: {total_failed}")
|
|
581
|
+
|
|
582
|
+
if total_solved + total_failed > 0:
|
|
583
|
+
rate = round(total_solved / (total_solved + total_failed) * 100, 1)
|
|
584
|
+
parts.append(f"📈 Success rate: {rate}%")
|
|
585
|
+
|
|
586
|
+
by_views = sorted(cards.data, key=lambda x: x.get("view_count", 0) or 0, reverse=True)[:5]
|
|
587
|
+
parts.append(f"\n🏆 Most viewed:")
|
|
588
|
+
for i, card in enumerate(by_views, 1):
|
|
589
|
+
v = card.get("view_count", 0) or 0
|
|
590
|
+
parts.append(f" {i}. {card['kb_id']} — {card['title']} (👁️{v})")
|
|
591
|
+
except Exception as e:
|
|
592
|
+
parts.append(f"\n⚠️ Cloud error: {e}")
|
|
593
|
+
else:
|
|
594
|
+
parts.append("\n☁️ Cloud: unavailable")
|
|
595
|
+
|
|
596
|
+
return "\n".join(parts)
|
|
597
|
+
|
|
598
|
+
|
|
599
|
+
# ─── Entry Point ─────────────────────────────────────────────────
|
|
600
|
+
|
|
601
|
+
def main():
|
|
602
|
+
"""CLI entry point for fixflow-mcp."""
|
|
603
|
+
mcp.run(transport='stdio')
|
|
604
|
+
|
|
605
|
+
|
|
606
|
+
if __name__ == "__main__":
|
|
607
|
+
main()
|