signalpilot-ai-internal 0.10.22__py3-none-any.whl → 0.11.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- signalpilot_ai_internal/_version.py +1 -1
- signalpilot_ai_internal/cache_service.py +22 -21
- signalpilot_ai_internal/composio_handlers.py +224 -0
- signalpilot_ai_internal/composio_service.py +511 -0
- signalpilot_ai_internal/database_config_handlers.py +182 -0
- signalpilot_ai_internal/database_config_service.py +166 -0
- signalpilot_ai_internal/databricks_schema_service.py +19 -14
- signalpilot_ai_internal/file_scanner_service.py +5 -146
- signalpilot_ai_internal/handlers.py +317 -8
- signalpilot_ai_internal/integrations_config.py +256 -0
- signalpilot_ai_internal/log_utils.py +31 -0
- signalpilot_ai_internal/mcp_handlers.py +33 -9
- signalpilot_ai_internal/mcp_service.py +94 -142
- signalpilot_ai_internal/oauth_token_store.py +141 -0
- signalpilot_ai_internal/schema_search_config.yml +17 -11
- signalpilot_ai_internal/schema_search_service.py +30 -10
- signalpilot_ai_internal/signalpilot_home.py +961 -0
- signalpilot_ai_internal/snowflake_schema_service.py +2 -0
- signalpilot_ai_internal/unified_database_schema_service.py +2 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +15 -48
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +9 -52
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.bab318d6caadb055e29c.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/129.868ca665e6fc225c20a0.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/179.fd45a2e75d471d0aa3b9.js +7 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.81105a94aa873fc51a94.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.a002dd4630d3b6404a90.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.cc6f6ecacd703bcdb468.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.817a883549d55a0e0576.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.a4daecd44f1e9364e44a.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.667225aab294fb5ed161.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.8138af2522716e5a926f.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.925c73e32f3c07448da0.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/477.aaa4cc9e87801fb45f5b.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.370056149a59022b700c.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/510.868ca665e6fc225c20a0.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.835f97f7ccfc70ff5c93.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.6c13335f73de089d6b1e.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/574.ad2709e91ebcac5bbe68.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.bddbab8e464fe31f0393.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.fda1bcdb10497b0a6ade.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.d046701f475fcbf6697d.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.c306dffd4cfe8a613d13.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.e39898b6f336539f228c.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.77cc0ca10a1860df1b52.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.4e2850b2af985ed0d378.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js +2 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.055f50d20a31f3068c72.js +1 -0
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +29 -29
- {signalpilot_ai_internal-0.10.22.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/METADATA +13 -31
- signalpilot_ai_internal-0.11.24.dist-info/RECORD +66 -0
- signalpilot_ai_internal-0.11.24.dist-info/licenses/LICENSE +7 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/110.224e83db03814fd03955.js +0 -7
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.972abe1d2d66f083f9cc.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.ad22ccddd74ee306fb56.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -2
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.c4232851631fb2e7e59a.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/726.318e4e791edb63cc788f.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.dc49867fafb03ea2ba4d.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.2d75de1a8d2c3131a8db.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.ca9e114a30896b669a3c.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.d9914229e4f120e7e9e4.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.d80de1e4da5b520d2f3b.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.b63c429ca81e743b403c.js +0 -1
- signalpilot_ai_internal-0.10.22.dist-info/RECORD +0 -56
- signalpilot_ai_internal-0.10.22.dist-info/licenses/LICENSE +0 -29
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +0 -0
- /signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt → /signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js.LICENSE.txt +0 -0
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
- {signalpilot_ai_internal-0.10.22.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,961 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SignalPilotHomeManager - Centralized configuration management for connect/ folder
|
|
3
|
+
|
|
4
|
+
The connect/ folder is stored in the OS-specific cache directory:
|
|
5
|
+
- macOS: ~/Library/Caches/SignalPilotAI/connect/
|
|
6
|
+
- Windows: %LOCALAPPDATA%/SignalPilotAI/Cache/connect/
|
|
7
|
+
- Linux: ~/.cache/signalpilot-ai-internal/connect/
|
|
8
|
+
|
|
9
|
+
Provides unified access to:
|
|
10
|
+
- mcp.json - MCP server configurations
|
|
11
|
+
- db.toml - Database configurations
|
|
12
|
+
- .env - OAuth tokens
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import json
|
|
16
|
+
import logging
|
|
17
|
+
import os
|
|
18
|
+
import platform
|
|
19
|
+
import shutil
|
|
20
|
+
import sys
|
|
21
|
+
import tempfile
|
|
22
|
+
import threading
|
|
23
|
+
import time
|
|
24
|
+
import uuid
|
|
25
|
+
from pathlib import Path
|
|
26
|
+
from typing import Any, Dict, List, Optional
|
|
27
|
+
|
|
28
|
+
# TOML reading - tomllib is built-in for Python 3.11+
|
|
29
|
+
if sys.version_info >= (3, 11):
|
|
30
|
+
import tomllib
|
|
31
|
+
else:
|
|
32
|
+
import tomli as tomllib
|
|
33
|
+
|
|
34
|
+
logger = logging.getLogger(__name__)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _get_cache_base_directory() -> Path:
|
|
38
|
+
"""Get the OS-specific cache directory for SignalPilotAI."""
|
|
39
|
+
system = platform.system().lower()
|
|
40
|
+
|
|
41
|
+
try:
|
|
42
|
+
if system == "windows":
|
|
43
|
+
appdata_local = os.environ.get('LOCALAPPDATA')
|
|
44
|
+
if appdata_local:
|
|
45
|
+
return Path(appdata_local) / "SignalPilotAI" / "Cache"
|
|
46
|
+
appdata_roaming = os.environ.get('APPDATA')
|
|
47
|
+
if appdata_roaming:
|
|
48
|
+
return Path(appdata_roaming) / "SignalPilotAI" / "Cache"
|
|
49
|
+
userprofile = os.environ.get('USERPROFILE')
|
|
50
|
+
if userprofile:
|
|
51
|
+
return Path(userprofile) / ".signalpilot-cache"
|
|
52
|
+
|
|
53
|
+
elif system == "darwin": # macOS
|
|
54
|
+
return Path.home() / "Library" / "Caches" / "SignalPilotAI"
|
|
55
|
+
|
|
56
|
+
else: # Linux and other Unix-like
|
|
57
|
+
cache_home = os.environ.get('XDG_CACHE_HOME')
|
|
58
|
+
if cache_home:
|
|
59
|
+
return Path(cache_home) / "signalpilot-ai-internal"
|
|
60
|
+
return Path.home() / ".cache" / "signalpilot-ai-internal"
|
|
61
|
+
|
|
62
|
+
except Exception as e:
|
|
63
|
+
logger.error(f"Error determining cache directory: {e}")
|
|
64
|
+
|
|
65
|
+
# Fallback
|
|
66
|
+
return Path(tempfile.gettempdir()) / f"signalpilot-ai-internal-{os.getuid() if hasattr(os, 'getuid') else 'user'}"
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _format_toml_value(value: Any) -> str:
|
|
70
|
+
"""Format a Python value as a TOML value string."""
|
|
71
|
+
if value is None:
|
|
72
|
+
return '""'
|
|
73
|
+
elif isinstance(value, bool):
|
|
74
|
+
return "true" if value else "false"
|
|
75
|
+
elif isinstance(value, int):
|
|
76
|
+
return str(value)
|
|
77
|
+
elif isinstance(value, float):
|
|
78
|
+
return str(value)
|
|
79
|
+
elif isinstance(value, str):
|
|
80
|
+
# Check if multiline
|
|
81
|
+
if '\n' in value:
|
|
82
|
+
# Use multiline basic string
|
|
83
|
+
escaped = value.replace('\\', '\\\\').replace('"""', '\\"\\"\\"')
|
|
84
|
+
return f'"""{escaped}"""'
|
|
85
|
+
else:
|
|
86
|
+
# Use basic string with escaping
|
|
87
|
+
escaped = value.replace('\\', '\\\\').replace('"', '\\"')
|
|
88
|
+
return f'"{escaped}"'
|
|
89
|
+
elif isinstance(value, list):
|
|
90
|
+
items = [_format_toml_value(v) for v in value]
|
|
91
|
+
return f"[{', '.join(items)}]"
|
|
92
|
+
else:
|
|
93
|
+
# Fallback to string representation
|
|
94
|
+
return f'"{str(value)}"'
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _write_toml(data: Dict[str, Any]) -> str:
|
|
98
|
+
"""
|
|
99
|
+
Simple TOML writer for our specific use case.
|
|
100
|
+
Handles: [defaults], [defaults.type], [[type]] arrays of tables
|
|
101
|
+
"""
|
|
102
|
+
lines = []
|
|
103
|
+
|
|
104
|
+
# Write defaults section first if present
|
|
105
|
+
defaults = data.get("defaults", {})
|
|
106
|
+
if defaults:
|
|
107
|
+
lines.append("[defaults]")
|
|
108
|
+
for key, value in defaults.items():
|
|
109
|
+
if isinstance(value, dict):
|
|
110
|
+
# Will be written as [defaults.key] later
|
|
111
|
+
continue
|
|
112
|
+
lines.append(f"{key} = {_format_toml_value(value)}")
|
|
113
|
+
lines.append("")
|
|
114
|
+
|
|
115
|
+
# Write nested defaults like [defaults.snowflake]
|
|
116
|
+
for key, value in defaults.items():
|
|
117
|
+
if isinstance(value, dict):
|
|
118
|
+
lines.append(f"[defaults.{key}]")
|
|
119
|
+
for k, v in value.items():
|
|
120
|
+
lines.append(f"{k} = {_format_toml_value(v)}")
|
|
121
|
+
lines.append("")
|
|
122
|
+
|
|
123
|
+
# Write array of tables for each database type
|
|
124
|
+
for db_type in ["snowflake", "postgres", "mysql", "databricks"]:
|
|
125
|
+
if db_type in data and data[db_type]:
|
|
126
|
+
for entry in data[db_type]:
|
|
127
|
+
lines.append(f"[[{db_type}]]")
|
|
128
|
+
for key, value in entry.items():
|
|
129
|
+
if not isinstance(value, (dict, list)) or isinstance(value, list):
|
|
130
|
+
lines.append(f"{key} = {_format_toml_value(value)}")
|
|
131
|
+
lines.append("")
|
|
132
|
+
|
|
133
|
+
return "\n".join(lines)
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class SignalPilotHomeManager:
|
|
137
|
+
"""
|
|
138
|
+
Centralized manager for SignalPilot configuration files.
|
|
139
|
+
All configs stored in the OS-specific cache directory under connect/
|
|
140
|
+
(e.g., ~/Library/Caches/SignalPilotAI/connect/ on macOS)
|
|
141
|
+
"""
|
|
142
|
+
|
|
143
|
+
_instance = None
|
|
144
|
+
_lock = threading.Lock()
|
|
145
|
+
|
|
146
|
+
# Directory structure
|
|
147
|
+
CONNECT_DIR_NAME = "connect"
|
|
148
|
+
|
|
149
|
+
# File names
|
|
150
|
+
MCP_CONFIG_FILE = "mcp.json"
|
|
151
|
+
DB_CONFIG_FILE = "db.toml"
|
|
152
|
+
ENV_FILE = ".env"
|
|
153
|
+
|
|
154
|
+
def __init__(self):
|
|
155
|
+
self._base_path: Optional[Path] = None
|
|
156
|
+
self._connect_path: Optional[Path] = None
|
|
157
|
+
self._file_lock = threading.RLock()
|
|
158
|
+
self._setup_directories()
|
|
159
|
+
|
|
160
|
+
@classmethod
|
|
161
|
+
def get_instance(cls) -> 'SignalPilotHomeManager':
|
|
162
|
+
"""Get singleton instance (thread-safe)."""
|
|
163
|
+
if cls._instance is None:
|
|
164
|
+
with cls._lock:
|
|
165
|
+
if cls._instance is None:
|
|
166
|
+
cls._instance = SignalPilotHomeManager()
|
|
167
|
+
return cls._instance
|
|
168
|
+
|
|
169
|
+
def _setup_directories(self):
|
|
170
|
+
"""Create connect/ directory in the OS-specific cache location."""
|
|
171
|
+
self._base_path = _get_cache_base_directory()
|
|
172
|
+
self._connect_path = self._base_path / self.CONNECT_DIR_NAME
|
|
173
|
+
|
|
174
|
+
try:
|
|
175
|
+
self._connect_path.mkdir(parents=True, exist_ok=True)
|
|
176
|
+
logger.info(f"[SignalPilotHomeManager] Using directory: {self._connect_path}")
|
|
177
|
+
except Exception as e:
|
|
178
|
+
logger.error(f"[SignalPilotHomeManager] Error creating directory: {e}")
|
|
179
|
+
|
|
180
|
+
# ==================== Path Accessors ====================
|
|
181
|
+
|
|
182
|
+
@property
|
|
183
|
+
def base_path(self) -> Path:
|
|
184
|
+
"""Get the cache base directory (e.g., ~/Library/Caches/SignalPilotAI on macOS)"""
|
|
185
|
+
return self._base_path
|
|
186
|
+
|
|
187
|
+
@property
|
|
188
|
+
def connect_path(self) -> Path:
|
|
189
|
+
"""Get the connect directory (e.g., ~/Library/Caches/SignalPilotAI/connect on macOS)"""
|
|
190
|
+
return self._connect_path
|
|
191
|
+
|
|
192
|
+
@property
|
|
193
|
+
def mcp_config_path(self) -> Path:
|
|
194
|
+
"""Get path to mcp.json"""
|
|
195
|
+
return self._connect_path / self.MCP_CONFIG_FILE
|
|
196
|
+
|
|
197
|
+
@property
|
|
198
|
+
def db_config_path(self) -> Path:
|
|
199
|
+
"""Get path to db.toml"""
|
|
200
|
+
return self._connect_path / self.DB_CONFIG_FILE
|
|
201
|
+
|
|
202
|
+
@property
|
|
203
|
+
def env_path(self) -> Path:
|
|
204
|
+
"""Get path to .env"""
|
|
205
|
+
return self._connect_path / self.ENV_FILE
|
|
206
|
+
|
|
207
|
+
# ==================== Safe File Operations ====================
|
|
208
|
+
|
|
209
|
+
def _safe_write_json(self, file_path: Path, data: Any, max_retries: int = 3) -> bool:
|
|
210
|
+
"""Safely write JSON data with atomic operations."""
|
|
211
|
+
with self._file_lock:
|
|
212
|
+
if not file_path.parent.exists():
|
|
213
|
+
try:
|
|
214
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
215
|
+
except Exception as e:
|
|
216
|
+
logger.error(f"Failed to create directory {file_path.parent}: {e}")
|
|
217
|
+
return False
|
|
218
|
+
|
|
219
|
+
for attempt in range(max_retries):
|
|
220
|
+
temp_path = file_path.with_suffix(f".tmp.{uuid.uuid4().hex[:8]}")
|
|
221
|
+
|
|
222
|
+
try:
|
|
223
|
+
# Write to temporary file first
|
|
224
|
+
with open(temp_path, 'w', encoding='utf-8') as f:
|
|
225
|
+
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
226
|
+
|
|
227
|
+
# Verify the written data
|
|
228
|
+
with open(temp_path, 'r', encoding='utf-8') as f:
|
|
229
|
+
json.load(f)
|
|
230
|
+
|
|
231
|
+
# Atomic move to final location
|
|
232
|
+
if platform.system().lower() == "windows":
|
|
233
|
+
if file_path.exists():
|
|
234
|
+
file_path.unlink()
|
|
235
|
+
|
|
236
|
+
shutil.move(str(temp_path), str(file_path))
|
|
237
|
+
return True
|
|
238
|
+
|
|
239
|
+
except Exception as e:
|
|
240
|
+
logger.error(f"Write attempt {attempt + 1} failed for {file_path}: {e}")
|
|
241
|
+
|
|
242
|
+
try:
|
|
243
|
+
if temp_path.exists():
|
|
244
|
+
temp_path.unlink()
|
|
245
|
+
except:
|
|
246
|
+
pass
|
|
247
|
+
|
|
248
|
+
if attempt < max_retries - 1:
|
|
249
|
+
time.sleep(0.1 * (attempt + 1))
|
|
250
|
+
|
|
251
|
+
return False
|
|
252
|
+
|
|
253
|
+
def _safe_read_json(self, file_path: Path, default: Any = None) -> Any:
|
|
254
|
+
"""Safely read JSON data."""
|
|
255
|
+
if not file_path.exists():
|
|
256
|
+
return default
|
|
257
|
+
|
|
258
|
+
try:
|
|
259
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
260
|
+
return json.load(f)
|
|
261
|
+
except Exception as e:
|
|
262
|
+
logger.error(f"Failed to read {file_path}: {e}")
|
|
263
|
+
return default
|
|
264
|
+
|
|
265
|
+
def _safe_write_toml(self, file_path: Path, data: Dict[str, Any], max_retries: int = 3) -> bool:
|
|
266
|
+
"""Safely write TOML data with atomic operations."""
|
|
267
|
+
with self._file_lock:
|
|
268
|
+
if not file_path.parent.exists():
|
|
269
|
+
try:
|
|
270
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
271
|
+
except Exception as e:
|
|
272
|
+
logger.error(f"Failed to create directory {file_path.parent}: {e}")
|
|
273
|
+
return False
|
|
274
|
+
|
|
275
|
+
for attempt in range(max_retries):
|
|
276
|
+
temp_path = file_path.with_suffix(f".tmp.{uuid.uuid4().hex[:8]}")
|
|
277
|
+
|
|
278
|
+
try:
|
|
279
|
+
# Format TOML content
|
|
280
|
+
toml_content = _write_toml(data)
|
|
281
|
+
|
|
282
|
+
# Write to temporary file first
|
|
283
|
+
with open(temp_path, 'w', encoding='utf-8') as f:
|
|
284
|
+
f.write(toml_content)
|
|
285
|
+
|
|
286
|
+
# Verify the written data can be read back
|
|
287
|
+
with open(temp_path, 'rb') as f:
|
|
288
|
+
tomllib.load(f)
|
|
289
|
+
|
|
290
|
+
# Atomic move to final location
|
|
291
|
+
if platform.system().lower() == "windows":
|
|
292
|
+
if file_path.exists():
|
|
293
|
+
file_path.unlink()
|
|
294
|
+
|
|
295
|
+
shutil.move(str(temp_path), str(file_path))
|
|
296
|
+
return True
|
|
297
|
+
|
|
298
|
+
except Exception as e:
|
|
299
|
+
logger.error(f"TOML write attempt {attempt + 1} failed for {file_path}: {e}")
|
|
300
|
+
|
|
301
|
+
try:
|
|
302
|
+
if temp_path.exists():
|
|
303
|
+
temp_path.unlink()
|
|
304
|
+
except:
|
|
305
|
+
pass
|
|
306
|
+
|
|
307
|
+
if attempt < max_retries - 1:
|
|
308
|
+
time.sleep(0.1 * (attempt + 1))
|
|
309
|
+
|
|
310
|
+
return False
|
|
311
|
+
|
|
312
|
+
def _safe_read_toml(self, file_path: Path, default: Any = None) -> Any:
|
|
313
|
+
"""Safely read TOML data."""
|
|
314
|
+
if not file_path.exists():
|
|
315
|
+
return default
|
|
316
|
+
|
|
317
|
+
try:
|
|
318
|
+
with open(file_path, 'rb') as f:
|
|
319
|
+
return tomllib.load(f)
|
|
320
|
+
except Exception as e:
|
|
321
|
+
logger.error(f"Failed to read TOML {file_path}: {e}")
|
|
322
|
+
return default
|
|
323
|
+
|
|
324
|
+
# ==================== MCP Config (JSON) ====================
|
|
325
|
+
|
|
326
|
+
def read_mcp_config(self) -> Dict[str, Any]:
|
|
327
|
+
"""Read mcp.json configuration."""
|
|
328
|
+
return self._safe_read_json(self.mcp_config_path, {"mcpServers": {}})
|
|
329
|
+
|
|
330
|
+
def write_mcp_config(self, config: Dict[str, Any]) -> bool:
|
|
331
|
+
"""Write mcp.json configuration."""
|
|
332
|
+
return self._safe_write_json(self.mcp_config_path, config)
|
|
333
|
+
|
|
334
|
+
def get_mcp_servers(self) -> Dict[str, Any]:
|
|
335
|
+
"""Get all MCP servers from config."""
|
|
336
|
+
config = self.read_mcp_config()
|
|
337
|
+
return config.get("mcpServers", {})
|
|
338
|
+
|
|
339
|
+
def set_mcp_server(self, server_id: str, server_config: Dict[str, Any]) -> bool:
|
|
340
|
+
"""Set/update a single MCP server."""
|
|
341
|
+
config = self.read_mcp_config()
|
|
342
|
+
if "mcpServers" not in config:
|
|
343
|
+
config["mcpServers"] = {}
|
|
344
|
+
config["mcpServers"][server_id] = server_config
|
|
345
|
+
return self.write_mcp_config(config)
|
|
346
|
+
|
|
347
|
+
def remove_mcp_server(self, server_id: str) -> bool:
|
|
348
|
+
"""Remove an MCP server."""
|
|
349
|
+
config = self.read_mcp_config()
|
|
350
|
+
servers = config.get("mcpServers", {})
|
|
351
|
+
if server_id in servers:
|
|
352
|
+
del servers[server_id]
|
|
353
|
+
config["mcpServers"] = servers
|
|
354
|
+
return self.write_mcp_config(config)
|
|
355
|
+
return True
|
|
356
|
+
|
|
357
|
+
# ==================== DB Config (TOML) ====================
|
|
358
|
+
|
|
359
|
+
def read_db_config(self) -> Dict[str, Any]:
|
|
360
|
+
"""Read db.toml configuration."""
|
|
361
|
+
return self._safe_read_toml(self.db_config_path, {"defaults": {}})
|
|
362
|
+
|
|
363
|
+
def write_db_config(self, config: Dict[str, Any]) -> bool:
|
|
364
|
+
"""Write db.toml configuration."""
|
|
365
|
+
return self._safe_write_toml(self.db_config_path, config)
|
|
366
|
+
|
|
367
|
+
def get_database_configs(self) -> List[Dict[str, Any]]:
|
|
368
|
+
"""Get all database configurations from db.toml."""
|
|
369
|
+
config = self.read_db_config()
|
|
370
|
+
defaults = config.get("defaults", {})
|
|
371
|
+
|
|
372
|
+
databases = []
|
|
373
|
+
for db_type in ["snowflake", "postgres", "mysql", "databricks"]:
|
|
374
|
+
if db_type in config:
|
|
375
|
+
type_defaults = defaults.get(db_type, {})
|
|
376
|
+
global_defaults = {k: v for k, v in defaults.items()
|
|
377
|
+
if not isinstance(v, dict)}
|
|
378
|
+
|
|
379
|
+
for db_config in config[db_type]:
|
|
380
|
+
# Merge: global defaults < type defaults < specific config
|
|
381
|
+
merged = {**global_defaults, **type_defaults, **db_config}
|
|
382
|
+
merged["type"] = db_type
|
|
383
|
+
databases.append(merged)
|
|
384
|
+
|
|
385
|
+
return databases
|
|
386
|
+
|
|
387
|
+
def get_database_config(self, db_type: str, name: str) -> Optional[Dict[str, Any]]:
|
|
388
|
+
"""Get a specific database configuration."""
|
|
389
|
+
configs = self.get_database_configs()
|
|
390
|
+
for config in configs:
|
|
391
|
+
if config.get("type") == db_type and config.get("name") == name:
|
|
392
|
+
return config
|
|
393
|
+
return None
|
|
394
|
+
|
|
395
|
+
def add_database_config(self, db_type: str, config: Dict[str, Any]) -> bool:
|
|
396
|
+
"""Add a new database configuration to db.toml."""
|
|
397
|
+
full_config = self.read_db_config()
|
|
398
|
+
|
|
399
|
+
if db_type not in full_config:
|
|
400
|
+
full_config[db_type] = []
|
|
401
|
+
|
|
402
|
+
# Check for duplicate name
|
|
403
|
+
for existing in full_config[db_type]:
|
|
404
|
+
if existing.get("name") == config.get("name"):
|
|
405
|
+
logger.error(f"Database config with name '{config.get('name')}' already exists")
|
|
406
|
+
return False
|
|
407
|
+
|
|
408
|
+
full_config[db_type].append(config)
|
|
409
|
+
return self.write_db_config(full_config)
|
|
410
|
+
|
|
411
|
+
def update_database_config(self, db_type: str, name: str,
|
|
412
|
+
updates: Dict[str, Any]) -> bool:
|
|
413
|
+
"""Update an existing database configuration."""
|
|
414
|
+
full_config = self.read_db_config()
|
|
415
|
+
|
|
416
|
+
if db_type not in full_config:
|
|
417
|
+
return False
|
|
418
|
+
|
|
419
|
+
for i, db in enumerate(full_config[db_type]):
|
|
420
|
+
if db.get("name") == name:
|
|
421
|
+
full_config[db_type][i] = {**db, **updates}
|
|
422
|
+
return self.write_db_config(full_config)
|
|
423
|
+
|
|
424
|
+
return False
|
|
425
|
+
|
|
426
|
+
def remove_database_config(self, db_type: str, name: str) -> bool:
|
|
427
|
+
"""Remove a database configuration."""
|
|
428
|
+
full_config = self.read_db_config()
|
|
429
|
+
|
|
430
|
+
if db_type not in full_config:
|
|
431
|
+
return False
|
|
432
|
+
|
|
433
|
+
original_len = len(full_config[db_type])
|
|
434
|
+
full_config[db_type] = [
|
|
435
|
+
db for db in full_config[db_type]
|
|
436
|
+
if db.get("name") != name
|
|
437
|
+
]
|
|
438
|
+
|
|
439
|
+
if len(full_config[db_type]) < original_len:
|
|
440
|
+
return self.write_db_config(full_config)
|
|
441
|
+
return False
|
|
442
|
+
|
|
443
|
+
def set_database_defaults(self, defaults: Dict[str, Any]) -> bool:
|
|
444
|
+
"""Set global defaults for database configurations."""
|
|
445
|
+
full_config = self.read_db_config()
|
|
446
|
+
full_config["defaults"] = defaults
|
|
447
|
+
return self.write_db_config(full_config)
|
|
448
|
+
|
|
449
|
+
def get_database_defaults(self) -> Dict[str, Any]:
|
|
450
|
+
"""Get global defaults."""
|
|
451
|
+
config = self.read_db_config()
|
|
452
|
+
return config.get("defaults", {})
|
|
453
|
+
|
|
454
|
+
# ==================== OAuth Tokens (.env) ====================
|
|
455
|
+
|
|
456
|
+
def read_env(self) -> Dict[str, str]:
|
|
457
|
+
"""Read .env file as key-value pairs."""
|
|
458
|
+
if not self.env_path.exists():
|
|
459
|
+
return {}
|
|
460
|
+
|
|
461
|
+
env_vars = {}
|
|
462
|
+
try:
|
|
463
|
+
with open(self.env_path, 'r', encoding='utf-8') as f:
|
|
464
|
+
for line in f:
|
|
465
|
+
line = line.strip()
|
|
466
|
+
if line and not line.startswith('#') and '=' in line:
|
|
467
|
+
key, _, value = line.partition('=')
|
|
468
|
+
# Remove quotes if present
|
|
469
|
+
value = value.strip()
|
|
470
|
+
if (value.startswith('"') and value.endswith('"')) or \
|
|
471
|
+
(value.startswith("'") and value.endswith("'")):
|
|
472
|
+
value = value[1:-1]
|
|
473
|
+
env_vars[key.strip()] = value
|
|
474
|
+
except Exception as e:
|
|
475
|
+
logger.error(f"[SignalPilotHomeManager] Error reading .env: {e}")
|
|
476
|
+
|
|
477
|
+
return env_vars
|
|
478
|
+
|
|
479
|
+
def write_env(self, env_vars: Dict[str, str]) -> bool:
|
|
480
|
+
"""Write .env file from key-value pairs."""
|
|
481
|
+
with self._file_lock:
|
|
482
|
+
try:
|
|
483
|
+
temp_path = self.env_path.with_suffix(f".tmp.{uuid.uuid4().hex[:8]}")
|
|
484
|
+
|
|
485
|
+
with open(temp_path, 'w', encoding='utf-8') as f:
|
|
486
|
+
f.write("# SignalPilot OAuth Tokens\n")
|
|
487
|
+
f.write("# Auto-generated - do not edit manually\n\n")
|
|
488
|
+
|
|
489
|
+
for key in sorted(env_vars.keys()):
|
|
490
|
+
value = env_vars[key]
|
|
491
|
+
# Quote values containing spaces, quotes, or special chars
|
|
492
|
+
if any(c in value for c in ' "\'\n\r\t#'):
|
|
493
|
+
# Escape existing quotes and wrap in quotes
|
|
494
|
+
value = value.replace('\\', '\\\\').replace('"', '\\"')
|
|
495
|
+
value = f'"{value}"'
|
|
496
|
+
f.write(f"{key}={value}\n")
|
|
497
|
+
|
|
498
|
+
# Atomic move
|
|
499
|
+
if platform.system().lower() == "windows":
|
|
500
|
+
if self.env_path.exists():
|
|
501
|
+
self.env_path.unlink()
|
|
502
|
+
|
|
503
|
+
shutil.move(str(temp_path), str(self.env_path))
|
|
504
|
+
return True
|
|
505
|
+
|
|
506
|
+
except Exception as e:
|
|
507
|
+
logger.error(f"[SignalPilotHomeManager] Error writing .env: {e}")
|
|
508
|
+
try:
|
|
509
|
+
if temp_path.exists():
|
|
510
|
+
temp_path.unlink()
|
|
511
|
+
except:
|
|
512
|
+
pass
|
|
513
|
+
return False
|
|
514
|
+
|
|
515
|
+
def _get_env_prefix(self, server_id: str) -> str:
|
|
516
|
+
"""Generate env variable prefix for a server ID."""
|
|
517
|
+
# Convert server-id to OAUTH_SERVER_ID_
|
|
518
|
+
safe_id = server_id.upper().replace('-', '_').replace('.', '_')
|
|
519
|
+
return f"OAUTH_{safe_id}_"
|
|
520
|
+
|
|
521
|
+
def get_oauth_tokens(self, server_id: str) -> Optional[Dict[str, str]]:
|
|
522
|
+
"""Get OAuth tokens for a specific MCP server."""
|
|
523
|
+
env_vars = self.read_env()
|
|
524
|
+
prefix = self._get_env_prefix(server_id)
|
|
525
|
+
|
|
526
|
+
tokens = {}
|
|
527
|
+
for key, value in env_vars.items():
|
|
528
|
+
if key.startswith(prefix):
|
|
529
|
+
# Remove prefix to get original token name
|
|
530
|
+
token_name = key[len(prefix):]
|
|
531
|
+
tokens[token_name] = value
|
|
532
|
+
|
|
533
|
+
return tokens if tokens else None
|
|
534
|
+
|
|
535
|
+
def set_oauth_tokens(self, server_id: str, tokens: Dict[str, str]) -> bool:
|
|
536
|
+
"""Set OAuth tokens for an MCP server."""
|
|
537
|
+
env_vars = self.read_env()
|
|
538
|
+
prefix = self._get_env_prefix(server_id)
|
|
539
|
+
|
|
540
|
+
# Remove existing tokens for this server
|
|
541
|
+
env_vars = {k: v for k, v in env_vars.items()
|
|
542
|
+
if not k.startswith(prefix)}
|
|
543
|
+
|
|
544
|
+
# Add new tokens
|
|
545
|
+
for token_name, value in tokens.items():
|
|
546
|
+
env_vars[f"{prefix}{token_name}"] = value
|
|
547
|
+
|
|
548
|
+
return self.write_env(env_vars)
|
|
549
|
+
|
|
550
|
+
def remove_oauth_tokens(self, server_id: str) -> bool:
|
|
551
|
+
"""Remove OAuth tokens for an MCP server."""
|
|
552
|
+
env_vars = self.read_env()
|
|
553
|
+
prefix = self._get_env_prefix(server_id)
|
|
554
|
+
|
|
555
|
+
new_env = {k: v for k, v in env_vars.items()
|
|
556
|
+
if not k.startswith(prefix)}
|
|
557
|
+
|
|
558
|
+
if len(new_env) < len(env_vars):
|
|
559
|
+
return self.write_env(new_env)
|
|
560
|
+
return True # Nothing to remove
|
|
561
|
+
|
|
562
|
+
def get_oauth_registry(self) -> Dict[str, str]:
|
|
563
|
+
"""Get mapping of server IDs to integration IDs from .env registry."""
|
|
564
|
+
env_vars = self.read_env()
|
|
565
|
+
registry = {}
|
|
566
|
+
|
|
567
|
+
for key, value in env_vars.items():
|
|
568
|
+
if key.startswith("OAUTH_REGISTRY_"):
|
|
569
|
+
# OAUTH_REGISTRY_SERVER_ID=integration_id
|
|
570
|
+
server_id = key[15:].lower().replace('_', '-')
|
|
571
|
+
registry[server_id] = value
|
|
572
|
+
|
|
573
|
+
return registry
|
|
574
|
+
|
|
575
|
+
def set_oauth_registry_entry(self, server_id: str, integration_id: str) -> bool:
|
|
576
|
+
"""Add an entry to the OAuth registry."""
|
|
577
|
+
env_vars = self.read_env()
|
|
578
|
+
registry_key = f"OAUTH_REGISTRY_{server_id.upper().replace('-', '_')}"
|
|
579
|
+
env_vars[registry_key] = integration_id
|
|
580
|
+
return self.write_env(env_vars)
|
|
581
|
+
|
|
582
|
+
def remove_oauth_registry_entry(self, server_id: str) -> bool:
|
|
583
|
+
"""Remove an entry from the OAuth registry."""
|
|
584
|
+
env_vars = self.read_env()
|
|
585
|
+
registry_key = f"OAUTH_REGISTRY_{server_id.upper().replace('-', '_')}"
|
|
586
|
+
|
|
587
|
+
if registry_key in env_vars:
|
|
588
|
+
del env_vars[registry_key]
|
|
589
|
+
return self.write_env(env_vars)
|
|
590
|
+
return True
|
|
591
|
+
|
|
592
|
+
# ==================== Utility Methods ====================
|
|
593
|
+
|
|
594
|
+
def is_available(self) -> bool:
|
|
595
|
+
"""Check if the SignalPilotHome directory is available."""
|
|
596
|
+
return self._connect_path is not None and self._connect_path.exists()
|
|
597
|
+
|
|
598
|
+
def get_info(self) -> Dict[str, Any]:
|
|
599
|
+
"""Get information about the SignalPilotHome setup."""
|
|
600
|
+
info = {
|
|
601
|
+
"available": self.is_available(),
|
|
602
|
+
"base_path": str(self._base_path),
|
|
603
|
+
"connect_path": str(self._connect_path),
|
|
604
|
+
"mcp_config_exists": self.mcp_config_path.exists(),
|
|
605
|
+
"db_config_exists": self.db_config_path.exists(),
|
|
606
|
+
"env_exists": self.env_path.exists(),
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
if self.is_available():
|
|
610
|
+
try:
|
|
611
|
+
if self.mcp_config_path.exists():
|
|
612
|
+
info["mcp_config_size"] = self.mcp_config_path.stat().st_size
|
|
613
|
+
info["mcp_servers_count"] = len(self.get_mcp_servers())
|
|
614
|
+
|
|
615
|
+
if self.db_config_path.exists():
|
|
616
|
+
info["db_config_size"] = self.db_config_path.stat().st_size
|
|
617
|
+
info["db_configs_count"] = len(self.get_database_configs())
|
|
618
|
+
|
|
619
|
+
if self.env_path.exists():
|
|
620
|
+
info["env_size"] = self.env_path.stat().st_size
|
|
621
|
+
info["oauth_registry_count"] = len(self.get_oauth_registry())
|
|
622
|
+
|
|
623
|
+
except Exception as e:
|
|
624
|
+
info["error"] = str(e)
|
|
625
|
+
|
|
626
|
+
return info
|
|
627
|
+
|
|
628
|
+
|
|
629
|
+
# Global instance accessor
|
|
630
|
+
def get_signalpilot_home() -> SignalPilotHomeManager:
|
|
631
|
+
"""Get the singleton instance."""
|
|
632
|
+
return SignalPilotHomeManager.get_instance()
|
|
633
|
+
|
|
634
|
+
|
|
635
|
+
class UserRulesManager:
|
|
636
|
+
"""
|
|
637
|
+
Manager for user-defined rules (snippets) stored as markdown files.
|
|
638
|
+
Rules are stored in ~/SignalPilotHome/user-rules/ as .md files.
|
|
639
|
+
|
|
640
|
+
Each rule file follows a format:
|
|
641
|
+
---
|
|
642
|
+
id: unique-id
|
|
643
|
+
title: Rule Title
|
|
644
|
+
description: Optional description
|
|
645
|
+
created_at: ISO timestamp
|
|
646
|
+
updated_at: ISO timestamp
|
|
647
|
+
---
|
|
648
|
+
|
|
649
|
+
Rule content goes here...
|
|
650
|
+
"""
|
|
651
|
+
|
|
652
|
+
_instance = None
|
|
653
|
+
_lock = threading.Lock()
|
|
654
|
+
|
|
655
|
+
SIGNALPILOT_HOME_DIR = "SignalPilotHome"
|
|
656
|
+
USER_RULES_DIR = "user-rules"
|
|
657
|
+
|
|
658
|
+
def __init__(self):
|
|
659
|
+
self._rules_path: Optional[Path] = None
|
|
660
|
+
self._file_lock = threading.RLock()
|
|
661
|
+
self._setup_directory()
|
|
662
|
+
|
|
663
|
+
@classmethod
|
|
664
|
+
def get_instance(cls) -> 'UserRulesManager':
|
|
665
|
+
"""Get singleton instance (thread-safe)."""
|
|
666
|
+
if cls._instance is None:
|
|
667
|
+
with cls._lock:
|
|
668
|
+
if cls._instance is None:
|
|
669
|
+
cls._instance = UserRulesManager()
|
|
670
|
+
return cls._instance
|
|
671
|
+
|
|
672
|
+
def _setup_directory(self):
|
|
673
|
+
"""Create user-rules directory in ~/SignalPilotHome/."""
|
|
674
|
+
try:
|
|
675
|
+
# Use ~/SignalPilotHome/user-rules/
|
|
676
|
+
home_dir = Path.home() / self.SIGNALPILOT_HOME_DIR
|
|
677
|
+
self._rules_path = home_dir / self.USER_RULES_DIR
|
|
678
|
+
self._rules_path.mkdir(parents=True, exist_ok=True)
|
|
679
|
+
logger.info(f"[UserRulesManager] Using directory: {self._rules_path}")
|
|
680
|
+
except Exception as e:
|
|
681
|
+
logger.error(f"[UserRulesManager] Error creating directory: {e}")
|
|
682
|
+
self._rules_path = None
|
|
683
|
+
|
|
684
|
+
@property
|
|
685
|
+
def rules_path(self) -> Optional[Path]:
|
|
686
|
+
"""Get the user-rules directory path."""
|
|
687
|
+
return self._rules_path
|
|
688
|
+
|
|
689
|
+
def _parse_frontmatter(self, content: str) -> tuple[Dict[str, Any], str]:
|
|
690
|
+
"""Parse YAML frontmatter from markdown content."""
|
|
691
|
+
if not content.startswith('---'):
|
|
692
|
+
return {}, content
|
|
693
|
+
|
|
694
|
+
parts = content.split('---', 2)
|
|
695
|
+
if len(parts) < 3:
|
|
696
|
+
return {}, content
|
|
697
|
+
|
|
698
|
+
frontmatter_str = parts[1].strip()
|
|
699
|
+
body = parts[2].strip()
|
|
700
|
+
|
|
701
|
+
# Simple YAML parsing for our specific use case
|
|
702
|
+
frontmatter = {}
|
|
703
|
+
for line in frontmatter_str.split('\n'):
|
|
704
|
+
line = line.strip()
|
|
705
|
+
if ':' in line:
|
|
706
|
+
key, _, value = line.partition(':')
|
|
707
|
+
key = key.strip()
|
|
708
|
+
value = value.strip()
|
|
709
|
+
# Remove quotes if present
|
|
710
|
+
if (value.startswith('"') and value.endswith('"')) or \
|
|
711
|
+
(value.startswith("'") and value.endswith("'")):
|
|
712
|
+
value = value[1:-1]
|
|
713
|
+
frontmatter[key] = value
|
|
714
|
+
|
|
715
|
+
return frontmatter, body
|
|
716
|
+
|
|
717
|
+
def _format_frontmatter(self, metadata: Dict[str, Any]) -> str:
|
|
718
|
+
"""Format metadata as YAML frontmatter."""
|
|
719
|
+
lines = ['---']
|
|
720
|
+
for key, value in metadata.items():
|
|
721
|
+
if value is not None:
|
|
722
|
+
# Quote strings that contain special characters
|
|
723
|
+
if isinstance(value, str) and any(c in value for c in ':\n"\''):
|
|
724
|
+
value = f'"{value}"'
|
|
725
|
+
lines.append(f'{key}: {value}')
|
|
726
|
+
lines.append('---')
|
|
727
|
+
return '\n'.join(lines)
|
|
728
|
+
|
|
729
|
+
def _sanitize_filename(self, title: str) -> str:
|
|
730
|
+
"""Convert title to safe filename."""
|
|
731
|
+
import re
|
|
732
|
+
# Replace spaces with hyphens, remove special characters
|
|
733
|
+
filename = re.sub(r'[^\w\s-]', '', title.lower())
|
|
734
|
+
filename = re.sub(r'[-\s]+', '-', filename).strip('-')
|
|
735
|
+
return filename[:50] # Limit length
|
|
736
|
+
|
|
737
|
+
def list_rules(self) -> List[Dict[str, Any]]:
|
|
738
|
+
"""List all user rules."""
|
|
739
|
+
if not self._rules_path or not self._rules_path.exists():
|
|
740
|
+
return []
|
|
741
|
+
|
|
742
|
+
rules = []
|
|
743
|
+
try:
|
|
744
|
+
for md_file in sorted(self._rules_path.glob('*.md')):
|
|
745
|
+
try:
|
|
746
|
+
with open(md_file, 'r', encoding='utf-8') as f:
|
|
747
|
+
content = f.read()
|
|
748
|
+
|
|
749
|
+
frontmatter, body = self._parse_frontmatter(content)
|
|
750
|
+
|
|
751
|
+
# Use frontmatter id or generate from filename
|
|
752
|
+
rule_id = frontmatter.get('id', md_file.stem)
|
|
753
|
+
|
|
754
|
+
rules.append({
|
|
755
|
+
'id': rule_id,
|
|
756
|
+
'title': frontmatter.get('title', md_file.stem),
|
|
757
|
+
'description': frontmatter.get('description', ''),
|
|
758
|
+
'content': body,
|
|
759
|
+
'created_at': frontmatter.get('created_at', ''),
|
|
760
|
+
'updated_at': frontmatter.get('updated_at', ''),
|
|
761
|
+
'filename': md_file.name
|
|
762
|
+
})
|
|
763
|
+
except Exception as e:
|
|
764
|
+
logger.error(f"[UserRulesManager] Error reading {md_file}: {e}")
|
|
765
|
+
continue
|
|
766
|
+
|
|
767
|
+
except Exception as e:
|
|
768
|
+
logger.error(f"[UserRulesManager] Error listing rules: {e}")
|
|
769
|
+
|
|
770
|
+
return rules
|
|
771
|
+
|
|
772
|
+
def get_rule(self, rule_id: str) -> Optional[Dict[str, Any]]:
|
|
773
|
+
"""Get a specific rule by ID."""
|
|
774
|
+
rules = self.list_rules()
|
|
775
|
+
for rule in rules:
|
|
776
|
+
if rule['id'] == rule_id:
|
|
777
|
+
return rule
|
|
778
|
+
return None
|
|
779
|
+
|
|
780
|
+
def create_rule(self, title: str, content: str, description: str = '',
|
|
781
|
+
rule_id: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
|
782
|
+
"""Create a new rule as a markdown file."""
|
|
783
|
+
if not self._rules_path:
|
|
784
|
+
logger.error("[UserRulesManager] Rules directory not available")
|
|
785
|
+
return None
|
|
786
|
+
|
|
787
|
+
with self._file_lock:
|
|
788
|
+
# Generate ID if not provided
|
|
789
|
+
if not rule_id:
|
|
790
|
+
rule_id = f"rule-{uuid.uuid4().hex[:8]}"
|
|
791
|
+
|
|
792
|
+
# Generate filename from title
|
|
793
|
+
filename = self._sanitize_filename(title)
|
|
794
|
+
if not filename:
|
|
795
|
+
filename = rule_id
|
|
796
|
+
|
|
797
|
+
filepath = self._rules_path / f"{filename}.md"
|
|
798
|
+
|
|
799
|
+
# Ensure unique filename
|
|
800
|
+
counter = 1
|
|
801
|
+
while filepath.exists():
|
|
802
|
+
filepath = self._rules_path / f"{filename}-{counter}.md"
|
|
803
|
+
counter += 1
|
|
804
|
+
|
|
805
|
+
now = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())
|
|
806
|
+
|
|
807
|
+
metadata = {
|
|
808
|
+
'id': rule_id,
|
|
809
|
+
'title': title,
|
|
810
|
+
'description': description,
|
|
811
|
+
'created_at': now,
|
|
812
|
+
'updated_at': now
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
frontmatter = self._format_frontmatter(metadata)
|
|
816
|
+
full_content = f"{frontmatter}\n\n{content}"
|
|
817
|
+
|
|
818
|
+
try:
|
|
819
|
+
with open(filepath, 'w', encoding='utf-8') as f:
|
|
820
|
+
f.write(full_content)
|
|
821
|
+
|
|
822
|
+
logger.info(f"[UserRulesManager] Created rule: {filepath}")
|
|
823
|
+
|
|
824
|
+
return {
|
|
825
|
+
'id': rule_id,
|
|
826
|
+
'title': title,
|
|
827
|
+
'description': description,
|
|
828
|
+
'content': content,
|
|
829
|
+
'created_at': now,
|
|
830
|
+
'updated_at': now,
|
|
831
|
+
'filename': filepath.name
|
|
832
|
+
}
|
|
833
|
+
except Exception as e:
|
|
834
|
+
logger.error(f"[UserRulesManager] Error creating rule: {e}")
|
|
835
|
+
return None
|
|
836
|
+
|
|
837
|
+
def update_rule(self, rule_id: str, title: Optional[str] = None,
|
|
838
|
+
content: Optional[str] = None,
|
|
839
|
+
description: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
|
840
|
+
"""Update an existing rule."""
|
|
841
|
+
if not self._rules_path:
|
|
842
|
+
return None
|
|
843
|
+
|
|
844
|
+
with self._file_lock:
|
|
845
|
+
# Find the rule file
|
|
846
|
+
for md_file in self._rules_path.glob('*.md'):
|
|
847
|
+
try:
|
|
848
|
+
with open(md_file, 'r', encoding='utf-8') as f:
|
|
849
|
+
file_content = f.read()
|
|
850
|
+
|
|
851
|
+
frontmatter, body = self._parse_frontmatter(file_content)
|
|
852
|
+
|
|
853
|
+
if frontmatter.get('id') == rule_id:
|
|
854
|
+
# Update fields
|
|
855
|
+
if title is not None:
|
|
856
|
+
frontmatter['title'] = title
|
|
857
|
+
if description is not None:
|
|
858
|
+
frontmatter['description'] = description
|
|
859
|
+
if content is not None:
|
|
860
|
+
body = content
|
|
861
|
+
|
|
862
|
+
frontmatter['updated_at'] = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())
|
|
863
|
+
|
|
864
|
+
# Write back
|
|
865
|
+
new_frontmatter = self._format_frontmatter(frontmatter)
|
|
866
|
+
full_content = f"{new_frontmatter}\n\n{body}"
|
|
867
|
+
|
|
868
|
+
with open(md_file, 'w', encoding='utf-8') as f:
|
|
869
|
+
f.write(full_content)
|
|
870
|
+
|
|
871
|
+
logger.info(f"[UserRulesManager] Updated rule: {md_file}")
|
|
872
|
+
|
|
873
|
+
return {
|
|
874
|
+
'id': rule_id,
|
|
875
|
+
'title': frontmatter.get('title', ''),
|
|
876
|
+
'description': frontmatter.get('description', ''),
|
|
877
|
+
'content': body,
|
|
878
|
+
'created_at': frontmatter.get('created_at', ''),
|
|
879
|
+
'updated_at': frontmatter.get('updated_at', ''),
|
|
880
|
+
'filename': md_file.name
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
except Exception as e:
|
|
884
|
+
logger.error(f"[UserRulesManager] Error updating {md_file}: {e}")
|
|
885
|
+
continue
|
|
886
|
+
|
|
887
|
+
logger.warning(f"[UserRulesManager] Rule not found: {rule_id}")
|
|
888
|
+
return None
|
|
889
|
+
|
|
890
|
+
def delete_rule(self, rule_id: str) -> bool:
|
|
891
|
+
"""Delete a rule by ID."""
|
|
892
|
+
if not self._rules_path:
|
|
893
|
+
return False
|
|
894
|
+
|
|
895
|
+
with self._file_lock:
|
|
896
|
+
for md_file in self._rules_path.glob('*.md'):
|
|
897
|
+
try:
|
|
898
|
+
with open(md_file, 'r', encoding='utf-8') as f:
|
|
899
|
+
content = f.read()
|
|
900
|
+
|
|
901
|
+
frontmatter, _ = self._parse_frontmatter(content)
|
|
902
|
+
|
|
903
|
+
if frontmatter.get('id') == rule_id:
|
|
904
|
+
md_file.unlink()
|
|
905
|
+
logger.info(f"[UserRulesManager] Deleted rule: {md_file}")
|
|
906
|
+
return True
|
|
907
|
+
|
|
908
|
+
except Exception as e:
|
|
909
|
+
logger.error(f"[UserRulesManager] Error checking {md_file}: {e}")
|
|
910
|
+
continue
|
|
911
|
+
|
|
912
|
+
logger.warning(f"[UserRulesManager] Rule not found for deletion: {rule_id}")
|
|
913
|
+
return False
|
|
914
|
+
|
|
915
|
+
def migrate_from_json(self, snippets: List[Dict[str, Any]]) -> int:
|
|
916
|
+
"""
|
|
917
|
+
Migrate snippets from JSON format to markdown files.
|
|
918
|
+
Returns the number of successfully migrated rules.
|
|
919
|
+
"""
|
|
920
|
+
migrated = 0
|
|
921
|
+
for snippet in snippets:
|
|
922
|
+
try:
|
|
923
|
+
rule = self.create_rule(
|
|
924
|
+
title=snippet.get('title', 'Untitled'),
|
|
925
|
+
content=snippet.get('content', ''),
|
|
926
|
+
description=snippet.get('description', ''),
|
|
927
|
+
rule_id=snippet.get('id')
|
|
928
|
+
)
|
|
929
|
+
if rule:
|
|
930
|
+
migrated += 1
|
|
931
|
+
except Exception as e:
|
|
932
|
+
logger.error(f"[UserRulesManager] Error migrating snippet: {e}")
|
|
933
|
+
|
|
934
|
+
logger.info(f"[UserRulesManager] Migrated {migrated}/{len(snippets)} snippets")
|
|
935
|
+
return migrated
|
|
936
|
+
|
|
937
|
+
def is_available(self) -> bool:
|
|
938
|
+
"""Check if the user rules directory is available."""
|
|
939
|
+
return self._rules_path is not None and self._rules_path.exists()
|
|
940
|
+
|
|
941
|
+
def get_info(self) -> Dict[str, Any]:
|
|
942
|
+
"""Get information about the user rules setup."""
|
|
943
|
+
info = {
|
|
944
|
+
"available": self.is_available(),
|
|
945
|
+
"rules_path": str(self._rules_path) if self._rules_path else None,
|
|
946
|
+
"rules_count": 0
|
|
947
|
+
}
|
|
948
|
+
|
|
949
|
+
if self.is_available():
|
|
950
|
+
try:
|
|
951
|
+
info["rules_count"] = len(list(self._rules_path.glob('*.md')))
|
|
952
|
+
except Exception as e:
|
|
953
|
+
info["error"] = str(e)
|
|
954
|
+
|
|
955
|
+
return info
|
|
956
|
+
|
|
957
|
+
|
|
958
|
+
# Global user rules instance accessor
|
|
959
|
+
def get_user_rules_manager() -> UserRulesManager:
|
|
960
|
+
"""Get the singleton instance."""
|
|
961
|
+
return UserRulesManager.get_instance()
|