sqlsaber 0.21.0__py3-none-any.whl → 0.23.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlsaber might be problematic. Click here for more details.
- sqlsaber/cli/display.py +17 -3
- sqlsaber/cli/streaming.py +8 -2
- sqlsaber/cli/threads.py +4 -0
- sqlsaber/database/connection.py +105 -12
- {sqlsaber-0.21.0.dist-info → sqlsaber-0.23.0.dist-info}/METADATA +1 -1
- {sqlsaber-0.21.0.dist-info → sqlsaber-0.23.0.dist-info}/RECORD +9 -9
- {sqlsaber-0.21.0.dist-info → sqlsaber-0.23.0.dist-info}/WHEEL +0 -0
- {sqlsaber-0.21.0.dist-info → sqlsaber-0.23.0.dist-info}/entry_points.txt +0 -0
- {sqlsaber-0.21.0.dist-info → sqlsaber-0.23.0.dist-info}/licenses/LICENSE +0 -0
sqlsaber/cli/display.py
CHANGED
|
@@ -93,12 +93,15 @@ class LiveMarkdownRenderer:
|
|
|
93
93
|
"""Finalize and stop the current Live segment, if any."""
|
|
94
94
|
if self._live is None:
|
|
95
95
|
return
|
|
96
|
-
|
|
97
|
-
|
|
96
|
+
# Persist the *final* render exactly once, then shut Live down.
|
|
97
|
+
buf = self._buffer
|
|
98
98
|
self._live.stop()
|
|
99
99
|
self._live = None
|
|
100
100
|
self._buffer = ""
|
|
101
101
|
self._current_kind = None
|
|
102
|
+
# Print the complete markdown to scroll-back for permanent reference
|
|
103
|
+
if buf:
|
|
104
|
+
self.console.print(Markdown(buf))
|
|
102
105
|
|
|
103
106
|
def end_if_active(self) -> None:
|
|
104
107
|
self.end()
|
|
@@ -154,10 +157,12 @@ class LiveMarkdownRenderer:
|
|
|
154
157
|
if self._live is not None:
|
|
155
158
|
self.end()
|
|
156
159
|
self._buffer = initial_markdown or ""
|
|
160
|
+
# NOTE: Use transient=True so the live widget disappears on exit,
|
|
161
|
+
# giving a clean transition to the final printed result.
|
|
157
162
|
live = Live(
|
|
158
163
|
Markdown(self._buffer),
|
|
159
164
|
console=self.console,
|
|
160
|
-
|
|
165
|
+
transient=True,
|
|
161
166
|
refresh_per_second=12,
|
|
162
167
|
)
|
|
163
168
|
self._live = live
|
|
@@ -251,6 +256,15 @@ class DisplayManager:
|
|
|
251
256
|
"""Display error message."""
|
|
252
257
|
self.console.print(f"\n[bold red]Error:[/bold red] {error_message}")
|
|
253
258
|
|
|
259
|
+
def show_sql_error(self, error_message: str, suggestions: list[str] | None = None):
|
|
260
|
+
"""Display SQL-specific error with optional suggestions."""
|
|
261
|
+
self.show_newline()
|
|
262
|
+
self.console.print(f"[bold red]SQL error:[/bold red] {error_message}")
|
|
263
|
+
if suggestions:
|
|
264
|
+
self.console.print("[yellow]Hints:[/yellow]")
|
|
265
|
+
for suggestion in suggestions:
|
|
266
|
+
self.console.print(f" • {suggestion}")
|
|
267
|
+
|
|
254
268
|
def show_processing(self, message: str):
|
|
255
269
|
"""Display processing message."""
|
|
256
270
|
self.console.print() # Add newline
|
sqlsaber/cli/streaming.py
CHANGED
|
@@ -114,8 +114,14 @@ class StreamingQueryHandler:
|
|
|
114
114
|
pass
|
|
115
115
|
elif isinstance(content, dict):
|
|
116
116
|
data = content
|
|
117
|
-
|
|
118
|
-
|
|
117
|
+
|
|
118
|
+
if isinstance(data, dict):
|
|
119
|
+
if data.get("success") and data.get("results"):
|
|
120
|
+
self.display.show_query_results(data["results"]) # type: ignore[arg-type]
|
|
121
|
+
elif "error" in data:
|
|
122
|
+
self.display.show_sql_error(
|
|
123
|
+
data.get("error"), data.get("suggestions")
|
|
124
|
+
)
|
|
119
125
|
# Add a blank line after tool output to separate from next segment
|
|
120
126
|
self.display.show_newline()
|
|
121
127
|
# Show status while agent sends a follow-up request to the model
|
sqlsaber/cli/threads.py
CHANGED
|
@@ -131,6 +131,10 @@ def _render_transcript(
|
|
|
131
131
|
and data.get("results")
|
|
132
132
|
):
|
|
133
133
|
dm.show_query_results(data["results"]) # type: ignore[arg-type]
|
|
134
|
+
elif isinstance(data, dict) and "error" in data:
|
|
135
|
+
dm.show_sql_error(
|
|
136
|
+
data.get("error"), data.get("suggestions")
|
|
137
|
+
)
|
|
134
138
|
else:
|
|
135
139
|
console.print(
|
|
136
140
|
Panel.fit(
|
sqlsaber/database/connection.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"""Database connection management."""
|
|
2
2
|
|
|
3
|
+
import asyncio
|
|
3
4
|
import ssl
|
|
4
5
|
from abc import ABC, abstractmethod
|
|
5
6
|
from pathlib import Path
|
|
@@ -10,6 +11,17 @@ import aiomysql
|
|
|
10
11
|
import aiosqlite
|
|
11
12
|
import asyncpg
|
|
12
13
|
|
|
14
|
+
# Default query timeout to prevent runaway queries
|
|
15
|
+
DEFAULT_QUERY_TIMEOUT = 30.0 # seconds
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class QueryTimeoutError(RuntimeError):
|
|
19
|
+
"""Exception raised when a query exceeds its timeout."""
|
|
20
|
+
|
|
21
|
+
def __init__(self, seconds: float):
|
|
22
|
+
self.timeout = seconds
|
|
23
|
+
super().__init__(f"Query exceeded timeout of {seconds}s")
|
|
24
|
+
|
|
13
25
|
|
|
14
26
|
class BaseDatabaseConnection(ABC):
|
|
15
27
|
"""Abstract base class for database connections."""
|
|
@@ -29,11 +41,18 @@ class BaseDatabaseConnection(ABC):
|
|
|
29
41
|
pass
|
|
30
42
|
|
|
31
43
|
@abstractmethod
|
|
32
|
-
async def execute_query(
|
|
44
|
+
async def execute_query(
|
|
45
|
+
self, query: str, *args, timeout: float | None = None
|
|
46
|
+
) -> list[dict[str, Any]]:
|
|
33
47
|
"""Execute a query and return results as list of dicts.
|
|
34
48
|
|
|
35
49
|
All queries run in a transaction that is rolled back at the end,
|
|
36
50
|
ensuring no changes are persisted to the database.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
query: SQL query to execute
|
|
54
|
+
*args: Query parameters
|
|
55
|
+
timeout: Query timeout in seconds (overrides default_timeout)
|
|
37
56
|
"""
|
|
38
57
|
pass
|
|
39
58
|
|
|
@@ -111,21 +130,40 @@ class PostgreSQLConnection(BaseDatabaseConnection):
|
|
|
111
130
|
await self._pool.close()
|
|
112
131
|
self._pool = None
|
|
113
132
|
|
|
114
|
-
async def execute_query(
|
|
133
|
+
async def execute_query(
|
|
134
|
+
self, query: str, *args, timeout: float | None = None
|
|
135
|
+
) -> list[dict[str, Any]]:
|
|
115
136
|
"""Execute a query and return results as list of dicts.
|
|
116
137
|
|
|
117
138
|
All queries run in a transaction that is rolled back at the end,
|
|
118
139
|
ensuring no changes are persisted to the database.
|
|
119
140
|
"""
|
|
141
|
+
effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
|
|
120
142
|
pool = await self.get_pool()
|
|
143
|
+
|
|
121
144
|
async with pool.acquire() as conn:
|
|
122
145
|
# Start a transaction that we'll always rollback
|
|
123
146
|
transaction = conn.transaction()
|
|
124
147
|
await transaction.start()
|
|
125
148
|
|
|
126
149
|
try:
|
|
127
|
-
|
|
150
|
+
# Set server-side timeout if specified
|
|
151
|
+
if effective_timeout:
|
|
152
|
+
await conn.execute(
|
|
153
|
+
f"SET LOCAL statement_timeout = {int(effective_timeout * 1000)}"
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
# Execute query with client-side timeout
|
|
157
|
+
if effective_timeout:
|
|
158
|
+
rows = await asyncio.wait_for(
|
|
159
|
+
conn.fetch(query, *args), timeout=effective_timeout
|
|
160
|
+
)
|
|
161
|
+
else:
|
|
162
|
+
rows = await conn.fetch(query, *args)
|
|
163
|
+
|
|
128
164
|
return [dict(row) for row in rows]
|
|
165
|
+
except asyncio.TimeoutError as exc:
|
|
166
|
+
raise QueryTimeoutError(effective_timeout or 0) from exc
|
|
129
167
|
finally:
|
|
130
168
|
# Always rollback to ensure no changes are committed
|
|
131
169
|
await transaction.rollback()
|
|
@@ -216,21 +254,44 @@ class MySQLConnection(BaseDatabaseConnection):
|
|
|
216
254
|
await self._pool.wait_closed()
|
|
217
255
|
self._pool = None
|
|
218
256
|
|
|
219
|
-
async def execute_query(
|
|
257
|
+
async def execute_query(
|
|
258
|
+
self, query: str, *args, timeout: float | None = None
|
|
259
|
+
) -> list[dict[str, Any]]:
|
|
220
260
|
"""Execute a query and return results as list of dicts.
|
|
221
261
|
|
|
222
262
|
All queries run in a transaction that is rolled back at the end,
|
|
223
263
|
ensuring no changes are persisted to the database.
|
|
224
264
|
"""
|
|
265
|
+
effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
|
|
225
266
|
pool = await self.get_pool()
|
|
267
|
+
|
|
226
268
|
async with pool.acquire() as conn:
|
|
227
269
|
async with conn.cursor(aiomysql.DictCursor) as cursor:
|
|
228
270
|
# Start transaction
|
|
229
271
|
await conn.begin()
|
|
230
272
|
try:
|
|
231
|
-
|
|
232
|
-
|
|
273
|
+
# Set server-side timeout if specified
|
|
274
|
+
if effective_timeout:
|
|
275
|
+
await cursor.execute(
|
|
276
|
+
f"SET SESSION MAX_EXECUTION_TIME = {int(effective_timeout * 1000)}"
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
# Execute query with client-side timeout
|
|
280
|
+
if effective_timeout:
|
|
281
|
+
await asyncio.wait_for(
|
|
282
|
+
cursor.execute(query, args if args else None),
|
|
283
|
+
timeout=effective_timeout,
|
|
284
|
+
)
|
|
285
|
+
rows = await asyncio.wait_for(
|
|
286
|
+
cursor.fetchall(), timeout=effective_timeout
|
|
287
|
+
)
|
|
288
|
+
else:
|
|
289
|
+
await cursor.execute(query, args if args else None)
|
|
290
|
+
rows = await cursor.fetchall()
|
|
291
|
+
|
|
233
292
|
return [dict(row) for row in rows]
|
|
293
|
+
except asyncio.TimeoutError as exc:
|
|
294
|
+
raise QueryTimeoutError(effective_timeout or 0) from exc
|
|
234
295
|
finally:
|
|
235
296
|
# Always rollback to ensure no changes are committed
|
|
236
297
|
await conn.rollback()
|
|
@@ -252,12 +313,16 @@ class SQLiteConnection(BaseDatabaseConnection):
|
|
|
252
313
|
"""SQLite connections are created per query, no persistent pool to close."""
|
|
253
314
|
pass
|
|
254
315
|
|
|
255
|
-
async def execute_query(
|
|
316
|
+
async def execute_query(
|
|
317
|
+
self, query: str, *args, timeout: float | None = None
|
|
318
|
+
) -> list[dict[str, Any]]:
|
|
256
319
|
"""Execute a query and return results as list of dicts.
|
|
257
320
|
|
|
258
321
|
All queries run in a transaction that is rolled back at the end,
|
|
259
322
|
ensuring no changes are persisted to the database.
|
|
260
323
|
"""
|
|
324
|
+
effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
|
|
325
|
+
|
|
261
326
|
async with aiosqlite.connect(self.database_path) as conn:
|
|
262
327
|
# Enable row factory for dict-like access
|
|
263
328
|
conn.row_factory = aiosqlite.Row
|
|
@@ -265,9 +330,22 @@ class SQLiteConnection(BaseDatabaseConnection):
|
|
|
265
330
|
# Start transaction
|
|
266
331
|
await conn.execute("BEGIN")
|
|
267
332
|
try:
|
|
268
|
-
|
|
269
|
-
|
|
333
|
+
# Execute query with client-side timeout (SQLite has no server-side timeout)
|
|
334
|
+
if effective_timeout:
|
|
335
|
+
cursor = await asyncio.wait_for(
|
|
336
|
+
conn.execute(query, args if args else ()),
|
|
337
|
+
timeout=effective_timeout,
|
|
338
|
+
)
|
|
339
|
+
rows = await asyncio.wait_for(
|
|
340
|
+
cursor.fetchall(), timeout=effective_timeout
|
|
341
|
+
)
|
|
342
|
+
else:
|
|
343
|
+
cursor = await conn.execute(query, args if args else ())
|
|
344
|
+
rows = await cursor.fetchall()
|
|
345
|
+
|
|
270
346
|
return [dict(row) for row in rows]
|
|
347
|
+
except asyncio.TimeoutError as exc:
|
|
348
|
+
raise QueryTimeoutError(effective_timeout or 0) from exc
|
|
271
349
|
finally:
|
|
272
350
|
# Always rollback to ensure no changes are committed
|
|
273
351
|
await conn.rollback()
|
|
@@ -383,20 +461,35 @@ class CSVConnection(BaseDatabaseConnection):
|
|
|
383
461
|
except Exception as e:
|
|
384
462
|
raise ValueError(f"Error loading CSV file '{self.csv_path}': {str(e)}")
|
|
385
463
|
|
|
386
|
-
async def execute_query(
|
|
464
|
+
async def execute_query(
|
|
465
|
+
self, query: str, *args, timeout: float | None = None
|
|
466
|
+
) -> list[dict[str, Any]]:
|
|
387
467
|
"""Execute a query and return results as list of dicts.
|
|
388
468
|
|
|
389
469
|
All queries run in a transaction that is rolled back at the end,
|
|
390
470
|
ensuring no changes are persisted to the database.
|
|
391
471
|
"""
|
|
472
|
+
effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
|
|
392
473
|
conn = await self.get_pool()
|
|
393
474
|
|
|
394
475
|
# Start transaction
|
|
395
476
|
await conn.execute("BEGIN")
|
|
396
477
|
try:
|
|
397
|
-
|
|
398
|
-
|
|
478
|
+
# Execute query with client-side timeout (CSV uses in-memory SQLite)
|
|
479
|
+
if effective_timeout:
|
|
480
|
+
cursor = await asyncio.wait_for(
|
|
481
|
+
conn.execute(query, args if args else ()), timeout=effective_timeout
|
|
482
|
+
)
|
|
483
|
+
rows = await asyncio.wait_for(
|
|
484
|
+
cursor.fetchall(), timeout=effective_timeout
|
|
485
|
+
)
|
|
486
|
+
else:
|
|
487
|
+
cursor = await conn.execute(query, args if args else ())
|
|
488
|
+
rows = await cursor.fetchall()
|
|
489
|
+
|
|
399
490
|
return [dict(row) for row in rows]
|
|
491
|
+
except asyncio.TimeoutError as exc:
|
|
492
|
+
raise QueryTimeoutError(effective_timeout or 0) from exc
|
|
400
493
|
finally:
|
|
401
494
|
# Always rollback to ensure no changes are committed
|
|
402
495
|
await conn.rollback()
|
|
@@ -9,12 +9,12 @@ sqlsaber/cli/auth.py,sha256=jTsRgbmlGPlASSuIKmdjjwfqtKvjfKd_cTYxX0-QqaQ,7400
|
|
|
9
9
|
sqlsaber/cli/commands.py,sha256=mjLG9i1bXf0TEroxkIxq5O7Hhjufz3Ad72cyJz7vE1k,8128
|
|
10
10
|
sqlsaber/cli/completers.py,sha256=HsUPjaZweLSeYCWkAcgMl8FylQ1xjWBWYTEL_9F6xfU,6430
|
|
11
11
|
sqlsaber/cli/database.py,sha256=JKtHSN-BFzBa14REf0phFVQB7d67m1M5FFaD8N6DdrY,12966
|
|
12
|
-
sqlsaber/cli/display.py,sha256=
|
|
12
|
+
sqlsaber/cli/display.py,sha256=9uXg0GqFXGAwBj1O7-i6aLPQkGq-iURrsKHk6PWTq1E,15025
|
|
13
13
|
sqlsaber/cli/interactive.py,sha256=7uM4LoXbhPJr8o5yNjICSzL0uxZkp1psWrVq4G9V0OI,13118
|
|
14
14
|
sqlsaber/cli/memory.py,sha256=OufHFJFwV0_GGn7LvKRTJikkWhV1IwNIUDOxFPHXOaQ,7794
|
|
15
15
|
sqlsaber/cli/models.py,sha256=ZewtwGQwhd9b-yxBAPKePolvI1qQG-EkmeWAGMqtWNQ,8986
|
|
16
|
-
sqlsaber/cli/streaming.py,sha256=
|
|
17
|
-
sqlsaber/cli/threads.py,sha256=
|
|
16
|
+
sqlsaber/cli/streaming.py,sha256=Eo5CNUgDGY1WYP90jwDA2aY7RefN-TfcStA6NyjUQTY,7076
|
|
17
|
+
sqlsaber/cli/threads.py,sha256=HJ6v9wEv1as21B7IJglYs3q6LH7Plv2oheLbM5YEQQA,11549
|
|
18
18
|
sqlsaber/config/__init__.py,sha256=olwC45k8Nc61yK0WmPUk7XHdbsZH9HuUAbwnmKe3IgA,100
|
|
19
19
|
sqlsaber/config/api_keys.py,sha256=RqWQCko1tY7sES7YOlexgBH5Hd5ne_kGXHdBDNqcV2U,3649
|
|
20
20
|
sqlsaber/config/auth.py,sha256=b5qB2h1doXyO9Bn8z0CcL8LAR2jF431gGXBGKLgTmtQ,2756
|
|
@@ -24,7 +24,7 @@ sqlsaber/config/oauth_tokens.py,sha256=C9z35hyx-PvSAYdC1LNf3rg9_wsEIY56hkEczelba
|
|
|
24
24
|
sqlsaber/config/providers.py,sha256=JFjeJv1K5Q93zWSlWq3hAvgch1TlgoF0qFa0KJROkKY,2957
|
|
25
25
|
sqlsaber/config/settings.py,sha256=vgb_RXaM-7DgbxYDmWNw1cSyMqwys4j3qNCvM4bljwI,5586
|
|
26
26
|
sqlsaber/database/__init__.py,sha256=a_gtKRJnZVO8-fEZI7g3Z8YnGa6Nio-5Y50PgVp07ss,176
|
|
27
|
-
sqlsaber/database/connection.py,sha256=
|
|
27
|
+
sqlsaber/database/connection.py,sha256=1bDPEa6cmdh87gPfhNeBLpOdI0E2_2KlE74q_-4l_jI,18913
|
|
28
28
|
sqlsaber/database/resolver.py,sha256=RPXF5EoKzvQDDLmPGNHYd2uG_oNICH8qvUjBp6iXmNY,3348
|
|
29
29
|
sqlsaber/database/schema.py,sha256=r12qoN3tdtAXdO22EKlauAe7QwOm8lL2vTMM59XEMMY,26594
|
|
30
30
|
sqlsaber/mcp/__init__.py,sha256=COdWq7wauPBp5Ew8tfZItFzbcLDSEkHBJSMhxzy8C9c,112
|
|
@@ -40,8 +40,8 @@ sqlsaber/tools/enums.py,sha256=CH32mL-0k9ZA18911xLpNtsgpV6tB85TktMj6uqGz54,411
|
|
|
40
40
|
sqlsaber/tools/instructions.py,sha256=X-x8maVkkyi16b6Tl0hcAFgjiYceZaSwyWTfmrvx8U8,9024
|
|
41
41
|
sqlsaber/tools/registry.py,sha256=HWOQMsNIdL4XZS6TeNUyrL-5KoSDH6PHsWd3X66o-18,3211
|
|
42
42
|
sqlsaber/tools/sql_tools.py,sha256=hM6tKqW5MDhFUt6MesoqhTUqIpq_5baIIDoN1MjDCXY,9647
|
|
43
|
-
sqlsaber-0.
|
|
44
|
-
sqlsaber-0.
|
|
45
|
-
sqlsaber-0.
|
|
46
|
-
sqlsaber-0.
|
|
47
|
-
sqlsaber-0.
|
|
43
|
+
sqlsaber-0.23.0.dist-info/METADATA,sha256=olVI7W6UB-F6oZUqOjTWhJAvYtDbXXO0xoeOVYYmpfM,6178
|
|
44
|
+
sqlsaber-0.23.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
45
|
+
sqlsaber-0.23.0.dist-info/entry_points.txt,sha256=qEbOB7OffXPFgyJc7qEIJlMEX5RN9xdzLmWZa91zCQQ,162
|
|
46
|
+
sqlsaber-0.23.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
47
|
+
sqlsaber-0.23.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|