nc1709 1.15.4__py3-none-any.whl → 1.18.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nc1709/__init__.py +1 -1
- nc1709/agent/core.py +172 -19
- nc1709/agent/permissions.py +2 -2
- nc1709/agent/tools/bash_tool.py +295 -8
- nc1709/cli.py +435 -19
- nc1709/cli_ui.py +137 -52
- nc1709/conversation_logger.py +416 -0
- nc1709/llm_adapter.py +62 -4
- nc1709/plugins/agents/database_agent.py +695 -0
- nc1709/plugins/agents/django_agent.py +11 -4
- nc1709/plugins/agents/docker_agent.py +11 -4
- nc1709/plugins/agents/fastapi_agent.py +11 -4
- nc1709/plugins/agents/git_agent.py +11 -4
- nc1709/plugins/agents/nextjs_agent.py +11 -4
- nc1709/plugins/agents/ollama_agent.py +574 -0
- nc1709/plugins/agents/test_agent.py +702 -0
- nc1709/prompts/unified_prompt.py +156 -14
- nc1709/requirements_tracker.py +526 -0
- nc1709/thinking_messages.py +337 -0
- nc1709/version_check.py +6 -2
- nc1709/web/server.py +63 -3
- nc1709/web/templates/index.html +819 -140
- {nc1709-1.15.4.dist-info → nc1709-1.18.8.dist-info}/METADATA +10 -7
- {nc1709-1.15.4.dist-info → nc1709-1.18.8.dist-info}/RECORD +28 -22
- {nc1709-1.15.4.dist-info → nc1709-1.18.8.dist-info}/WHEEL +0 -0
- {nc1709-1.15.4.dist-info → nc1709-1.18.8.dist-info}/entry_points.txt +0 -0
- {nc1709-1.15.4.dist-info → nc1709-1.18.8.dist-info}/licenses/LICENSE +0 -0
- {nc1709-1.15.4.dist-info → nc1709-1.18.8.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,695 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Agent for NC1709
|
|
3
|
+
Handles database operations across PostgreSQL, MySQL, SQLite, MongoDB, Redis
|
|
4
|
+
"""
|
|
5
|
+
import subprocess
|
|
6
|
+
import json
|
|
7
|
+
import re
|
|
8
|
+
import os
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Dict, Any, Optional, List
|
|
11
|
+
from dataclasses import dataclass
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from ..base import (
|
|
15
|
+
Plugin, PluginMetadata, PluginCapability,
|
|
16
|
+
ActionResult
|
|
17
|
+
)
|
|
18
|
+
except ImportError:
|
|
19
|
+
# When loaded dynamically via importlib
|
|
20
|
+
from nc1709.plugins.base import (
|
|
21
|
+
Plugin, PluginMetadata, PluginCapability,
|
|
22
|
+
ActionResult
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class TableInfo:
|
|
28
|
+
"""Represents a database table"""
|
|
29
|
+
name: str
|
|
30
|
+
schema: str = "public"
|
|
31
|
+
rows: int = 0
|
|
32
|
+
size: str = ""
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass
|
|
36
|
+
class ConnectionInfo:
|
|
37
|
+
"""Database connection information"""
|
|
38
|
+
host: str = "localhost"
|
|
39
|
+
port: int = 5432
|
|
40
|
+
database: str = ""
|
|
41
|
+
user: str = ""
|
|
42
|
+
# Password should come from environment
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class DatabaseAgent(Plugin):
|
|
46
|
+
"""
|
|
47
|
+
Database operations agent.
|
|
48
|
+
|
|
49
|
+
Provides database operations:
|
|
50
|
+
- Connection testing
|
|
51
|
+
- Schema inspection (tables, columns)
|
|
52
|
+
- Query execution (read-only by default)
|
|
53
|
+
- Backup/restore helpers
|
|
54
|
+
- Migration status
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
METADATA = PluginMetadata(
|
|
58
|
+
name="database",
|
|
59
|
+
version="1.0.0",
|
|
60
|
+
description="Database operations and management",
|
|
61
|
+
author="NC1709 Team",
|
|
62
|
+
capabilities=[
|
|
63
|
+
PluginCapability.COMMAND_EXECUTION
|
|
64
|
+
],
|
|
65
|
+
keywords=[
|
|
66
|
+
"database", "sql", "postgres", "postgresql", "mysql", "sqlite",
|
|
67
|
+
"mongodb", "redis", "psql", "query", "schema", "migration",
|
|
68
|
+
"backup", "restore", "table", "column"
|
|
69
|
+
],
|
|
70
|
+
config_schema={
|
|
71
|
+
"default_database": {"type": "string", "default": "postgres"},
|
|
72
|
+
"connection_timeout": {"type": "integer", "default": 10},
|
|
73
|
+
"query_timeout": {"type": "integer", "default": 30},
|
|
74
|
+
}
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
# Supported databases and their CLI tools
|
|
78
|
+
DATABASE_TOOLS = {
|
|
79
|
+
"postgres": {
|
|
80
|
+
"cli": "psql",
|
|
81
|
+
"version_cmd": "psql --version",
|
|
82
|
+
"list_dbs": "psql -l",
|
|
83
|
+
"list_tables": "psql -c '\\dt'",
|
|
84
|
+
"describe_table": "psql -c '\\d {table}'",
|
|
85
|
+
"default_port": 5432,
|
|
86
|
+
},
|
|
87
|
+
"mysql": {
|
|
88
|
+
"cli": "mysql",
|
|
89
|
+
"version_cmd": "mysql --version",
|
|
90
|
+
"list_dbs": "mysql -e 'SHOW DATABASES'",
|
|
91
|
+
"list_tables": "mysql -e 'SHOW TABLES'",
|
|
92
|
+
"describe_table": "mysql -e 'DESCRIBE {table}'",
|
|
93
|
+
"default_port": 3306,
|
|
94
|
+
},
|
|
95
|
+
"sqlite": {
|
|
96
|
+
"cli": "sqlite3",
|
|
97
|
+
"version_cmd": "sqlite3 --version",
|
|
98
|
+
"list_tables": "sqlite3 {db} '.tables'",
|
|
99
|
+
"describe_table": "sqlite3 {db} '.schema {table}'",
|
|
100
|
+
"default_port": None,
|
|
101
|
+
},
|
|
102
|
+
"mongodb": {
|
|
103
|
+
"cli": "mongosh",
|
|
104
|
+
"version_cmd": "mongosh --version",
|
|
105
|
+
"list_dbs": "mongosh --eval 'show dbs'",
|
|
106
|
+
"list_collections": "mongosh --eval 'show collections'",
|
|
107
|
+
"default_port": 27017,
|
|
108
|
+
},
|
|
109
|
+
"redis": {
|
|
110
|
+
"cli": "redis-cli",
|
|
111
|
+
"version_cmd": "redis-cli --version",
|
|
112
|
+
"ping": "redis-cli ping",
|
|
113
|
+
"info": "redis-cli info",
|
|
114
|
+
"default_port": 6379,
|
|
115
|
+
},
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
@property
|
|
119
|
+
def metadata(self) -> PluginMetadata:
|
|
120
|
+
return self.METADATA
|
|
121
|
+
|
|
122
|
+
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
|
123
|
+
super().__init__(config)
|
|
124
|
+
self._available_databases = {}
|
|
125
|
+
|
|
126
|
+
def initialize(self) -> bool:
|
|
127
|
+
"""Initialize the database agent"""
|
|
128
|
+
# Detect available database CLIs
|
|
129
|
+
for db, config in self.DATABASE_TOOLS.items():
|
|
130
|
+
try:
|
|
131
|
+
result = subprocess.run(
|
|
132
|
+
config["version_cmd"],
|
|
133
|
+
shell=True,
|
|
134
|
+
capture_output=True,
|
|
135
|
+
text=True,
|
|
136
|
+
timeout=5
|
|
137
|
+
)
|
|
138
|
+
self._available_databases[db] = result.returncode == 0
|
|
139
|
+
except Exception:
|
|
140
|
+
self._available_databases[db] = False
|
|
141
|
+
|
|
142
|
+
return any(self._available_databases.values())
|
|
143
|
+
|
|
144
|
+
def cleanup(self) -> None:
|
|
145
|
+
"""Cleanup resources"""
|
|
146
|
+
pass
|
|
147
|
+
|
|
148
|
+
def _register_actions(self) -> None:
|
|
149
|
+
"""Register database actions"""
|
|
150
|
+
self.register_action(
|
|
151
|
+
"status",
|
|
152
|
+
self.check_status,
|
|
153
|
+
"Check database availability",
|
|
154
|
+
parameters={
|
|
155
|
+
"database": {"type": "string", "optional": True},
|
|
156
|
+
}
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
self.register_action(
|
|
160
|
+
"list_tables",
|
|
161
|
+
self.list_tables,
|
|
162
|
+
"List database tables",
|
|
163
|
+
parameters={
|
|
164
|
+
"database": {"type": "string", "optional": True},
|
|
165
|
+
"db_name": {"type": "string", "optional": True},
|
|
166
|
+
}
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
self.register_action(
|
|
170
|
+
"describe",
|
|
171
|
+
self.describe_table,
|
|
172
|
+
"Describe a table's structure",
|
|
173
|
+
parameters={
|
|
174
|
+
"table": {"type": "string", "required": True},
|
|
175
|
+
"database": {"type": "string", "optional": True},
|
|
176
|
+
}
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
self.register_action(
|
|
180
|
+
"query",
|
|
181
|
+
self.run_query,
|
|
182
|
+
"Run a read-only query",
|
|
183
|
+
parameters={
|
|
184
|
+
"sql": {"type": "string", "required": True},
|
|
185
|
+
"database": {"type": "string", "optional": True},
|
|
186
|
+
"db_name": {"type": "string", "optional": True},
|
|
187
|
+
}
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
self.register_action(
|
|
191
|
+
"connect_test",
|
|
192
|
+
self.test_connection,
|
|
193
|
+
"Test database connection",
|
|
194
|
+
parameters={
|
|
195
|
+
"database": {"type": "string", "optional": True},
|
|
196
|
+
"host": {"type": "string", "optional": True},
|
|
197
|
+
"port": {"type": "integer", "optional": True},
|
|
198
|
+
}
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
self.register_action(
|
|
202
|
+
"backup",
|
|
203
|
+
self.create_backup,
|
|
204
|
+
"Create database backup",
|
|
205
|
+
parameters={
|
|
206
|
+
"database": {"type": "string", "optional": True},
|
|
207
|
+
"db_name": {"type": "string", "required": True},
|
|
208
|
+
"output": {"type": "string", "optional": True},
|
|
209
|
+
},
|
|
210
|
+
requires_confirmation=True
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
self.register_action(
|
|
214
|
+
"migrations",
|
|
215
|
+
self.check_migrations,
|
|
216
|
+
"Check migration status",
|
|
217
|
+
parameters={
|
|
218
|
+
"framework": {"type": "string", "optional": True},
|
|
219
|
+
}
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
def _run_command(self, cmd: str, timeout: int = 30) -> subprocess.CompletedProcess:
|
|
223
|
+
"""Run a database command"""
|
|
224
|
+
return subprocess.run(
|
|
225
|
+
cmd,
|
|
226
|
+
shell=True,
|
|
227
|
+
capture_output=True,
|
|
228
|
+
text=True,
|
|
229
|
+
timeout=timeout,
|
|
230
|
+
env={**os.environ, "PGPASSWORD": os.environ.get("PGPASSWORD", "")}
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
def _get_database_type(self, specified: Optional[str] = None) -> Optional[str]:
|
|
234
|
+
"""Get the database type to use"""
|
|
235
|
+
if specified and specified in self._available_databases:
|
|
236
|
+
if self._available_databases[specified]:
|
|
237
|
+
return specified
|
|
238
|
+
return None
|
|
239
|
+
|
|
240
|
+
# Auto-detect based on project files
|
|
241
|
+
cwd = Path.cwd()
|
|
242
|
+
|
|
243
|
+
# Check for database config files
|
|
244
|
+
if (cwd / "prisma").exists() or (cwd / "schema.prisma").exists():
|
|
245
|
+
# Could be any, check prisma schema
|
|
246
|
+
pass
|
|
247
|
+
|
|
248
|
+
# Check for Django
|
|
249
|
+
if (cwd / "manage.py").exists():
|
|
250
|
+
# Likely postgres or sqlite
|
|
251
|
+
if self._available_databases.get("postgres"):
|
|
252
|
+
return "postgres"
|
|
253
|
+
if self._available_databases.get("sqlite"):
|
|
254
|
+
return "sqlite"
|
|
255
|
+
|
|
256
|
+
# Return first available
|
|
257
|
+
for db, available in self._available_databases.items():
|
|
258
|
+
if available:
|
|
259
|
+
return db
|
|
260
|
+
|
|
261
|
+
return None
|
|
262
|
+
|
|
263
|
+
def check_status(self, database: Optional[str] = None) -> ActionResult:
|
|
264
|
+
"""Check database availability
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
database: Specific database type to check
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
ActionResult with status information
|
|
271
|
+
"""
|
|
272
|
+
if database:
|
|
273
|
+
if database not in self.DATABASE_TOOLS:
|
|
274
|
+
return ActionResult.fail(f"Unknown database: {database}")
|
|
275
|
+
|
|
276
|
+
available = self._available_databases.get(database, False)
|
|
277
|
+
config = self.DATABASE_TOOLS[database]
|
|
278
|
+
|
|
279
|
+
if available:
|
|
280
|
+
# Get version
|
|
281
|
+
try:
|
|
282
|
+
result = self._run_command(config["version_cmd"])
|
|
283
|
+
version = result.stdout.strip()
|
|
284
|
+
except Exception:
|
|
285
|
+
version = "unknown"
|
|
286
|
+
|
|
287
|
+
return ActionResult.ok(
|
|
288
|
+
message=f"{database} is available",
|
|
289
|
+
data={
|
|
290
|
+
"database": database,
|
|
291
|
+
"available": True,
|
|
292
|
+
"version": version,
|
|
293
|
+
"cli": config["cli"],
|
|
294
|
+
}
|
|
295
|
+
)
|
|
296
|
+
else:
|
|
297
|
+
return ActionResult.fail(
|
|
298
|
+
f"{database} CLI ({config['cli']}) not found. "
|
|
299
|
+
f"Install it to use {database} features."
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
# Check all databases
|
|
303
|
+
status = {}
|
|
304
|
+
for db, available in self._available_databases.items():
|
|
305
|
+
status[db] = "available" if available else "not installed"
|
|
306
|
+
|
|
307
|
+
available_list = [db for db, avail in self._available_databases.items() if avail]
|
|
308
|
+
|
|
309
|
+
return ActionResult.ok(
|
|
310
|
+
message=f"{len(available_list)} database CLIs available",
|
|
311
|
+
data={
|
|
312
|
+
"databases": status,
|
|
313
|
+
"available": available_list,
|
|
314
|
+
}
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
def list_tables(
|
|
318
|
+
self,
|
|
319
|
+
database: Optional[str] = None,
|
|
320
|
+
db_name: Optional[str] = None
|
|
321
|
+
) -> ActionResult:
|
|
322
|
+
"""List database tables
|
|
323
|
+
|
|
324
|
+
Args:
|
|
325
|
+
database: Database type (postgres, mysql, sqlite)
|
|
326
|
+
db_name: Database name to connect to
|
|
327
|
+
|
|
328
|
+
Returns:
|
|
329
|
+
ActionResult with table list
|
|
330
|
+
"""
|
|
331
|
+
db_type = self._get_database_type(database)
|
|
332
|
+
if not db_type:
|
|
333
|
+
return ActionResult.fail("No database CLI available")
|
|
334
|
+
|
|
335
|
+
config = self.DATABASE_TOOLS[db_type]
|
|
336
|
+
|
|
337
|
+
if db_type == "postgres":
|
|
338
|
+
cmd = f"psql {db_name or ''} -c '\\dt'"
|
|
339
|
+
elif db_type == "mysql":
|
|
340
|
+
cmd = f"mysql {db_name or ''} -e 'SHOW TABLES'"
|
|
341
|
+
elif db_type == "sqlite":
|
|
342
|
+
if not db_name:
|
|
343
|
+
return ActionResult.fail("SQLite requires a database file path")
|
|
344
|
+
cmd = f"sqlite3 {db_name} '.tables'"
|
|
345
|
+
elif db_type == "mongodb":
|
|
346
|
+
cmd = f"mongosh {db_name or 'test'} --eval 'db.getCollectionNames()'"
|
|
347
|
+
elif db_type == "redis":
|
|
348
|
+
cmd = "redis-cli KEYS '*'"
|
|
349
|
+
else:
|
|
350
|
+
return ActionResult.fail(f"List tables not supported for {db_type}")
|
|
351
|
+
|
|
352
|
+
try:
|
|
353
|
+
result = self._run_command(cmd)
|
|
354
|
+
|
|
355
|
+
if result.returncode != 0:
|
|
356
|
+
return ActionResult.fail(f"Error listing tables:\n{result.stderr}")
|
|
357
|
+
|
|
358
|
+
return ActionResult.ok(
|
|
359
|
+
message=f"Tables in {db_name or 'default'} ({db_type})",
|
|
360
|
+
data={
|
|
361
|
+
"database_type": db_type,
|
|
362
|
+
"database_name": db_name,
|
|
363
|
+
"output": result.stdout,
|
|
364
|
+
}
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
except subprocess.TimeoutExpired:
|
|
368
|
+
return ActionResult.fail("Query timed out")
|
|
369
|
+
except Exception as e:
|
|
370
|
+
return ActionResult.fail(f"Error: {e}")
|
|
371
|
+
|
|
372
|
+
def describe_table(
|
|
373
|
+
self,
|
|
374
|
+
table: str,
|
|
375
|
+
database: Optional[str] = None,
|
|
376
|
+
db_name: Optional[str] = None
|
|
377
|
+
) -> ActionResult:
|
|
378
|
+
"""Describe a table's structure
|
|
379
|
+
|
|
380
|
+
Args:
|
|
381
|
+
table: Table name
|
|
382
|
+
database: Database type
|
|
383
|
+
db_name: Database name
|
|
384
|
+
|
|
385
|
+
Returns:
|
|
386
|
+
ActionResult with table structure
|
|
387
|
+
"""
|
|
388
|
+
db_type = self._get_database_type(database)
|
|
389
|
+
if not db_type:
|
|
390
|
+
return ActionResult.fail("No database CLI available")
|
|
391
|
+
|
|
392
|
+
if db_type == "postgres":
|
|
393
|
+
cmd = f"psql {db_name or ''} -c '\\d {table}'"
|
|
394
|
+
elif db_type == "mysql":
|
|
395
|
+
cmd = f"mysql {db_name or ''} -e 'DESCRIBE {table}'"
|
|
396
|
+
elif db_type == "sqlite":
|
|
397
|
+
if not db_name:
|
|
398
|
+
return ActionResult.fail("SQLite requires a database file path")
|
|
399
|
+
cmd = f"sqlite3 {db_name} '.schema {table}'"
|
|
400
|
+
else:
|
|
401
|
+
return ActionResult.fail(f"Describe not supported for {db_type}")
|
|
402
|
+
|
|
403
|
+
try:
|
|
404
|
+
result = self._run_command(cmd)
|
|
405
|
+
|
|
406
|
+
if result.returncode != 0:
|
|
407
|
+
return ActionResult.fail(f"Error describing table:\n{result.stderr}")
|
|
408
|
+
|
|
409
|
+
return ActionResult.ok(
|
|
410
|
+
message=f"Structure of {table}",
|
|
411
|
+
data={
|
|
412
|
+
"table": table,
|
|
413
|
+
"database_type": db_type,
|
|
414
|
+
"output": result.stdout,
|
|
415
|
+
}
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
except Exception as e:
|
|
419
|
+
return ActionResult.fail(f"Error: {e}")
|
|
420
|
+
|
|
421
|
+
def run_query(
|
|
422
|
+
self,
|
|
423
|
+
sql: str,
|
|
424
|
+
database: Optional[str] = None,
|
|
425
|
+
db_name: Optional[str] = None
|
|
426
|
+
) -> ActionResult:
|
|
427
|
+
"""Run a read-only SQL query
|
|
428
|
+
|
|
429
|
+
Args:
|
|
430
|
+
sql: SQL query to run
|
|
431
|
+
database: Database type
|
|
432
|
+
db_name: Database name
|
|
433
|
+
|
|
434
|
+
Returns:
|
|
435
|
+
ActionResult with query results
|
|
436
|
+
"""
|
|
437
|
+
db_type = self._get_database_type(database)
|
|
438
|
+
if not db_type:
|
|
439
|
+
return ActionResult.fail("No database CLI available")
|
|
440
|
+
|
|
441
|
+
# Safety check: only allow SELECT, SHOW, DESCRIBE, EXPLAIN
|
|
442
|
+
sql_upper = sql.strip().upper()
|
|
443
|
+
allowed_starts = ("SELECT", "SHOW", "DESCRIBE", "EXPLAIN", "WITH")
|
|
444
|
+
|
|
445
|
+
if not any(sql_upper.startswith(start) for start in allowed_starts):
|
|
446
|
+
return ActionResult.fail(
|
|
447
|
+
"Only read-only queries allowed (SELECT, SHOW, DESCRIBE, EXPLAIN). "
|
|
448
|
+
"Use database tools directly for write operations."
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
if db_type == "postgres":
|
|
452
|
+
cmd = f"psql {db_name or ''} -c \"{sql}\""
|
|
453
|
+
elif db_type == "mysql":
|
|
454
|
+
cmd = f"mysql {db_name or ''} -e \"{sql}\""
|
|
455
|
+
elif db_type == "sqlite":
|
|
456
|
+
if not db_name:
|
|
457
|
+
return ActionResult.fail("SQLite requires a database file path")
|
|
458
|
+
cmd = f"sqlite3 {db_name} \"{sql}\""
|
|
459
|
+
else:
|
|
460
|
+
return ActionResult.fail(f"Query not supported for {db_type}")
|
|
461
|
+
|
|
462
|
+
try:
|
|
463
|
+
result = self._run_command(cmd)
|
|
464
|
+
|
|
465
|
+
if result.returncode != 0:
|
|
466
|
+
return ActionResult.fail(f"Query error:\n{result.stderr}")
|
|
467
|
+
|
|
468
|
+
return ActionResult.ok(
|
|
469
|
+
message="Query executed",
|
|
470
|
+
data={
|
|
471
|
+
"query": sql,
|
|
472
|
+
"database_type": db_type,
|
|
473
|
+
"output": result.stdout,
|
|
474
|
+
}
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
except subprocess.TimeoutExpired:
|
|
478
|
+
return ActionResult.fail("Query timed out")
|
|
479
|
+
except Exception as e:
|
|
480
|
+
return ActionResult.fail(f"Error: {e}")
|
|
481
|
+
|
|
482
|
+
def test_connection(
|
|
483
|
+
self,
|
|
484
|
+
database: Optional[str] = None,
|
|
485
|
+
host: Optional[str] = None,
|
|
486
|
+
port: Optional[int] = None
|
|
487
|
+
) -> ActionResult:
|
|
488
|
+
"""Test database connection
|
|
489
|
+
|
|
490
|
+
Args:
|
|
491
|
+
database: Database type
|
|
492
|
+
host: Database host
|
|
493
|
+
port: Database port
|
|
494
|
+
|
|
495
|
+
Returns:
|
|
496
|
+
ActionResult with connection status
|
|
497
|
+
"""
|
|
498
|
+
db_type = self._get_database_type(database)
|
|
499
|
+
if not db_type:
|
|
500
|
+
return ActionResult.fail("No database CLI available")
|
|
501
|
+
|
|
502
|
+
config = self.DATABASE_TOOLS[db_type]
|
|
503
|
+
host = host or "localhost"
|
|
504
|
+
port = port or config["default_port"]
|
|
505
|
+
|
|
506
|
+
if db_type == "postgres":
|
|
507
|
+
cmd = f"pg_isready -h {host} -p {port}"
|
|
508
|
+
elif db_type == "mysql":
|
|
509
|
+
cmd = f"mysqladmin -h {host} -P {port} ping"
|
|
510
|
+
elif db_type == "redis":
|
|
511
|
+
cmd = f"redis-cli -h {host} -p {port} ping"
|
|
512
|
+
elif db_type == "mongodb":
|
|
513
|
+
cmd = f"mongosh --host {host}:{port} --eval 'db.runCommand({{ping: 1}})'"
|
|
514
|
+
else:
|
|
515
|
+
return ActionResult.fail(f"Connection test not supported for {db_type}")
|
|
516
|
+
|
|
517
|
+
try:
|
|
518
|
+
result = self._run_command(cmd, timeout=10)
|
|
519
|
+
|
|
520
|
+
if result.returncode == 0:
|
|
521
|
+
return ActionResult.ok(
|
|
522
|
+
message=f"Connected to {db_type} at {host}:{port}",
|
|
523
|
+
data={
|
|
524
|
+
"database_type": db_type,
|
|
525
|
+
"host": host,
|
|
526
|
+
"port": port,
|
|
527
|
+
"connected": True,
|
|
528
|
+
}
|
|
529
|
+
)
|
|
530
|
+
else:
|
|
531
|
+
return ActionResult.fail(
|
|
532
|
+
f"Could not connect to {db_type} at {host}:{port}\n{result.stderr}"
|
|
533
|
+
)
|
|
534
|
+
|
|
535
|
+
except subprocess.TimeoutExpired:
|
|
536
|
+
return ActionResult.fail(f"Connection to {db_type} timed out")
|
|
537
|
+
except Exception as e:
|
|
538
|
+
return ActionResult.fail(f"Connection error: {e}")
|
|
539
|
+
|
|
540
|
+
def create_backup(
|
|
541
|
+
self,
|
|
542
|
+
db_name: str,
|
|
543
|
+
database: Optional[str] = None,
|
|
544
|
+
output: Optional[str] = None
|
|
545
|
+
) -> ActionResult:
|
|
546
|
+
"""Create database backup
|
|
547
|
+
|
|
548
|
+
Args:
|
|
549
|
+
db_name: Database name to backup
|
|
550
|
+
database: Database type
|
|
551
|
+
output: Output file path
|
|
552
|
+
|
|
553
|
+
Returns:
|
|
554
|
+
ActionResult with backup info
|
|
555
|
+
"""
|
|
556
|
+
db_type = self._get_database_type(database)
|
|
557
|
+
if not db_type:
|
|
558
|
+
return ActionResult.fail("No database CLI available")
|
|
559
|
+
|
|
560
|
+
output = output or f"{db_name}_backup.sql"
|
|
561
|
+
|
|
562
|
+
if db_type == "postgres":
|
|
563
|
+
cmd = f"pg_dump {db_name} > {output}"
|
|
564
|
+
elif db_type == "mysql":
|
|
565
|
+
cmd = f"mysqldump {db_name} > {output}"
|
|
566
|
+
elif db_type == "sqlite":
|
|
567
|
+
cmd = f"sqlite3 {db_name} '.dump' > {output}"
|
|
568
|
+
elif db_type == "mongodb":
|
|
569
|
+
cmd = f"mongodump --db {db_name} --out {output}"
|
|
570
|
+
else:
|
|
571
|
+
return ActionResult.fail(f"Backup not supported for {db_type}")
|
|
572
|
+
|
|
573
|
+
try:
|
|
574
|
+
result = self._run_command(cmd, timeout=1800) # 30 min timeout
|
|
575
|
+
|
|
576
|
+
if result.returncode == 0:
|
|
577
|
+
return ActionResult.ok(
|
|
578
|
+
message=f"Backup created: {output}",
|
|
579
|
+
data={
|
|
580
|
+
"database_type": db_type,
|
|
581
|
+
"database_name": db_name,
|
|
582
|
+
"output_file": output,
|
|
583
|
+
}
|
|
584
|
+
)
|
|
585
|
+
else:
|
|
586
|
+
return ActionResult.fail(f"Backup failed:\n{result.stderr}")
|
|
587
|
+
|
|
588
|
+
except subprocess.TimeoutExpired:
|
|
589
|
+
return ActionResult.fail("Backup timed out")
|
|
590
|
+
except Exception as e:
|
|
591
|
+
return ActionResult.fail(f"Backup error: {e}")
|
|
592
|
+
|
|
593
|
+
def check_migrations(self, framework: Optional[str] = None) -> ActionResult:
|
|
594
|
+
"""Check migration status
|
|
595
|
+
|
|
596
|
+
Args:
|
|
597
|
+
framework: ORM/migration framework (alembic, django, prisma, etc.)
|
|
598
|
+
|
|
599
|
+
Returns:
|
|
600
|
+
ActionResult with migration status
|
|
601
|
+
"""
|
|
602
|
+
cwd = Path.cwd()
|
|
603
|
+
|
|
604
|
+
# Auto-detect framework
|
|
605
|
+
if not framework:
|
|
606
|
+
if (cwd / "alembic.ini").exists() or (cwd / "alembic").exists():
|
|
607
|
+
framework = "alembic"
|
|
608
|
+
elif (cwd / "manage.py").exists():
|
|
609
|
+
framework = "django"
|
|
610
|
+
elif (cwd / "prisma").exists():
|
|
611
|
+
framework = "prisma"
|
|
612
|
+
elif (cwd / "db" / "migrate").exists():
|
|
613
|
+
framework = "rails"
|
|
614
|
+
|
|
615
|
+
if not framework:
|
|
616
|
+
return ActionResult.fail(
|
|
617
|
+
"Could not detect migration framework. "
|
|
618
|
+
"Supported: alembic, django, prisma, rails"
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
migration_commands = {
|
|
622
|
+
"alembic": "alembic current",
|
|
623
|
+
"django": "python manage.py showmigrations",
|
|
624
|
+
"prisma": "npx prisma migrate status",
|
|
625
|
+
"rails": "rails db:migrate:status",
|
|
626
|
+
"sequelize": "npx sequelize-cli db:migrate:status",
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
if framework not in migration_commands:
|
|
630
|
+
return ActionResult.fail(f"Unknown migration framework: {framework}")
|
|
631
|
+
|
|
632
|
+
try:
|
|
633
|
+
result = self._run_command(migration_commands[framework])
|
|
634
|
+
|
|
635
|
+
return ActionResult.ok(
|
|
636
|
+
message=f"Migration status ({framework})",
|
|
637
|
+
data={
|
|
638
|
+
"framework": framework,
|
|
639
|
+
"output": result.stdout + result.stderr,
|
|
640
|
+
"return_code": result.returncode,
|
|
641
|
+
}
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
except Exception as e:
|
|
645
|
+
return ActionResult.fail(f"Error checking migrations: {e}")
|
|
646
|
+
|
|
647
|
+
def can_handle(self, request: str) -> float:
|
|
648
|
+
"""Check if request is database-related"""
|
|
649
|
+
request_lower = request.lower()
|
|
650
|
+
|
|
651
|
+
# High confidence
|
|
652
|
+
high_conf = [
|
|
653
|
+
"database", "sql", "postgres", "mysql", "sqlite", "mongodb",
|
|
654
|
+
"redis", "psql", "table", "schema", "migration", "query"
|
|
655
|
+
]
|
|
656
|
+
for kw in high_conf:
|
|
657
|
+
if kw in request_lower:
|
|
658
|
+
return 0.85
|
|
659
|
+
|
|
660
|
+
# Medium confidence
|
|
661
|
+
med_conf = ["backup", "restore", "dump", "select", "show tables"]
|
|
662
|
+
for kw in med_conf:
|
|
663
|
+
if kw in request_lower:
|
|
664
|
+
return 0.6
|
|
665
|
+
|
|
666
|
+
return super().can_handle(request)
|
|
667
|
+
|
|
668
|
+
def handle_request(self, request: str, **kwargs) -> Optional[ActionResult]:
|
|
669
|
+
"""Handle a natural language request"""
|
|
670
|
+
request_lower = request.lower()
|
|
671
|
+
|
|
672
|
+
# Status check
|
|
673
|
+
if any(kw in request_lower for kw in ["database status", "db status", "check database"]):
|
|
674
|
+
return self.check_status()
|
|
675
|
+
|
|
676
|
+
# List tables
|
|
677
|
+
if any(kw in request_lower for kw in ["list table", "show table", "what table"]):
|
|
678
|
+
return self.list_tables()
|
|
679
|
+
|
|
680
|
+
# Describe table
|
|
681
|
+
if any(kw in request_lower for kw in ["describe", "schema", "structure"]):
|
|
682
|
+
# Extract table name
|
|
683
|
+
match = re.search(r"(?:describe|schema|structure)\s+(?:of\s+)?(\w+)", request_lower)
|
|
684
|
+
if match:
|
|
685
|
+
return self.describe_table(match.group(1))
|
|
686
|
+
|
|
687
|
+
# Migration status
|
|
688
|
+
if "migration" in request_lower:
|
|
689
|
+
return self.check_migrations()
|
|
690
|
+
|
|
691
|
+
# Connection test
|
|
692
|
+
if any(kw in request_lower for kw in ["connect", "connection", "ping"]):
|
|
693
|
+
return self.test_connection()
|
|
694
|
+
|
|
695
|
+
return None
|