sqlsaber 0.22.0__tar.gz → 0.24.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlsaber might be problematic. Click here for more details.
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/PKG-INFO +1 -1
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/changelog.md +15 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/index.mdx +2 -2
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/pyproject.toml +1 -1
- sqlsaber-0.24.0/sqlsaber.gif +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/display.py +66 -22
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/streaming.py +8 -2
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/threads.py +45 -18
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/database/schema.py +152 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/tools/sql_tools.py +6 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_tools/test_sql_tools.py +1 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/uv.lock +1 -1
- sqlsaber-0.22.0/sqlsaber.gif +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/.github/workflows/claude-code-review.yml +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/.github/workflows/claude.yml +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/.github/workflows/deploy-docs.yml +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/.github/workflows/publish.yml +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/.github/workflows/test.yml +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/.gitignore +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/.python-version +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/AGENT.md +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/CLAUDE.md +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/LICENSE +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/README.md +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/.gitignore +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/.vscode/extensions.json +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/.vscode/launch.json +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/CLAUDE.md +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/astro.config.mjs +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/package-lock.json +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/package.json +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/public/CNAME +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/public/favicon.svg +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/assets/sqlsaber-hero.svg +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/guides/authentication.mdx +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/guides/database-setup.mdx +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/guides/getting-started.mdx +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/guides/memory.mdx +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/guides/models.mdx +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/guides/queries.mdx +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/guides/threads.md +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/installation.mdx +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content/docs/reference/commands.md +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/content.config.ts +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/src/styles/global.css +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/docs/tsconfig.json +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/legislators.db +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/pytest.ini +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/sqlsaber.svg +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/__main__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/agents/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/agents/base.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/agents/mcp.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/agents/pydantic_ai_agent.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/auth.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/commands.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/completers.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/database.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/interactive.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/memory.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/cli/models.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/config/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/config/api_keys.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/config/auth.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/config/database.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/config/oauth_flow.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/config/oauth_tokens.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/config/providers.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/config/settings.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/database/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/database/connection.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/database/resolver.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/mcp/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/mcp/mcp.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/memory/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/memory/manager.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/memory/storage.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/threads/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/threads/storage.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/tools/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/tools/base.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/tools/enums.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/tools/instructions.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/src/sqlsaber/tools/registry.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/conftest.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_cli/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_cli/test_auth_reset.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_cli/test_commands.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_cli/test_threads.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_config/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_config/test_database.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_config/test_oauth.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_config/test_providers.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_config/test_settings.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_database/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_database/test_connection.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_database/test_timeout.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_database_resolver.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_threads_storage.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_tools/__init__.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_tools/test_base.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_tools/test_instructions.py +0 -0
- {sqlsaber-0.22.0 → sqlsaber-0.24.0}/tests/test_tools/test_registry.py +0 -0
|
@@ -7,6 +7,21 @@ All notable changes to SQLsaber will be documented here.
|
|
|
7
7
|
|
|
8
8
|
### Unreleased
|
|
9
9
|
|
|
10
|
+
### v0.24.0 - 2025-09-24
|
|
11
|
+
|
|
12
|
+
#### Added
|
|
13
|
+
|
|
14
|
+
- Index information is now included in `introspect_schema` tool output alongside columns, primary keys, and foreign keys
|
|
15
|
+
- Cleaner markdown formatting for thread exports when redirected to files
|
|
16
|
+
- Terminal display remains unchanged with rich styling and colors
|
|
17
|
+
|
|
18
|
+
### v0.23.0 - 2025-09-16
|
|
19
|
+
|
|
20
|
+
#### Added
|
|
21
|
+
|
|
22
|
+
- Smoother markdown streaming and prevent duplicate messages
|
|
23
|
+
- SQL execution errors now display in interactive sessions instead of being silently dropped
|
|
24
|
+
|
|
10
25
|
### v0.22.0 - 2025-09-15
|
|
11
26
|
|
|
12
27
|
#### Added
|
|
Binary file
|
|
@@ -93,12 +93,15 @@ class LiveMarkdownRenderer:
|
|
|
93
93
|
"""Finalize and stop the current Live segment, if any."""
|
|
94
94
|
if self._live is None:
|
|
95
95
|
return
|
|
96
|
-
|
|
97
|
-
|
|
96
|
+
# Persist the *final* render exactly once, then shut Live down.
|
|
97
|
+
buf = self._buffer
|
|
98
98
|
self._live.stop()
|
|
99
99
|
self._live = None
|
|
100
100
|
self._buffer = ""
|
|
101
101
|
self._current_kind = None
|
|
102
|
+
# Print the complete markdown to scroll-back for permanent reference
|
|
103
|
+
if buf:
|
|
104
|
+
self.console.print(Markdown(buf))
|
|
102
105
|
|
|
103
106
|
def end_if_active(self) -> None:
|
|
104
107
|
self.end()
|
|
@@ -154,10 +157,12 @@ class LiveMarkdownRenderer:
|
|
|
154
157
|
if self._live is not None:
|
|
155
158
|
self.end()
|
|
156
159
|
self._buffer = initial_markdown or ""
|
|
160
|
+
# NOTE: Use transient=True so the live widget disappears on exit,
|
|
161
|
+
# giving a clean transition to the final printed result.
|
|
157
162
|
live = Live(
|
|
158
163
|
Markdown(self._buffer),
|
|
159
164
|
console=self.console,
|
|
160
|
-
|
|
165
|
+
transient=True,
|
|
161
166
|
refresh_per_second=12,
|
|
162
167
|
)
|
|
163
168
|
self._live = live
|
|
@@ -193,22 +198,32 @@ class DisplayManager:
|
|
|
193
198
|
# Normalized leading blank line before tool headers
|
|
194
199
|
self.show_newline()
|
|
195
200
|
if tool_name == "list_tables":
|
|
196
|
-
self.console.
|
|
197
|
-
|
|
198
|
-
|
|
201
|
+
if self.console.is_terminal:
|
|
202
|
+
self.console.print(
|
|
203
|
+
"[dim bold]:gear: Discovering available tables[/dim bold]"
|
|
204
|
+
)
|
|
205
|
+
else:
|
|
206
|
+
self.console.print("**Discovering available tables**\n")
|
|
199
207
|
elif tool_name == "introspect_schema":
|
|
200
208
|
pattern = tool_input.get("table_pattern", "all tables")
|
|
201
|
-
self.console.
|
|
202
|
-
|
|
203
|
-
|
|
209
|
+
if self.console.is_terminal:
|
|
210
|
+
self.console.print(
|
|
211
|
+
f"[dim bold]:gear: Examining schema for: {pattern}[/dim bold]"
|
|
212
|
+
)
|
|
213
|
+
else:
|
|
214
|
+
self.console.print(f"**Examining schema for:** {pattern}\n")
|
|
204
215
|
elif tool_name == "execute_sql":
|
|
205
216
|
# For streaming, we render SQL via LiveMarkdownRenderer; keep Syntax
|
|
206
217
|
# rendering for threads show/resume. Controlled by include_sql flag.
|
|
207
218
|
query = tool_input.get("query", "")
|
|
208
|
-
self.console.
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
219
|
+
if self.console.is_terminal:
|
|
220
|
+
self.console.print("[dim bold]:gear: Executing SQL:[/dim bold]")
|
|
221
|
+
self.show_newline()
|
|
222
|
+
syntax = Syntax(query, "sql", background_color="default", word_wrap=True)
|
|
223
|
+
self.console.print(syntax)
|
|
224
|
+
else:
|
|
225
|
+
self.console.print("**Executing SQL:**\n")
|
|
226
|
+
self.console.print(f"```sql\n{query}\n```\n")
|
|
212
227
|
|
|
213
228
|
def show_text_stream(self, text: str):
|
|
214
229
|
"""Display streaming text."""
|
|
@@ -220,9 +235,12 @@ class DisplayManager:
|
|
|
220
235
|
if not results:
|
|
221
236
|
return
|
|
222
237
|
|
|
223
|
-
self.console.
|
|
224
|
-
|
|
225
|
-
|
|
238
|
+
if self.console.is_terminal:
|
|
239
|
+
self.console.print(
|
|
240
|
+
f"\n[bold magenta]Results ({len(results)} rows):[/bold magenta]"
|
|
241
|
+
)
|
|
242
|
+
else:
|
|
243
|
+
self.console.print(f"\n**Results ({len(results)} rows):**\n")
|
|
226
244
|
|
|
227
245
|
# Create table with columns from first result
|
|
228
246
|
all_columns = list(results[0].keys())
|
|
@@ -230,9 +248,14 @@ class DisplayManager:
|
|
|
230
248
|
|
|
231
249
|
# Show warning if columns were truncated
|
|
232
250
|
if len(all_columns) > 15:
|
|
233
|
-
self.console.
|
|
234
|
-
|
|
235
|
-
|
|
251
|
+
if self.console.is_terminal:
|
|
252
|
+
self.console.print(
|
|
253
|
+
f"[yellow]Note: Showing first 15 of {len(all_columns)} columns[/yellow]"
|
|
254
|
+
)
|
|
255
|
+
else:
|
|
256
|
+
self.console.print(
|
|
257
|
+
f"*Note: Showing first 15 of {len(all_columns)} columns*\n"
|
|
258
|
+
)
|
|
236
259
|
|
|
237
260
|
table = self._create_table(display_columns)
|
|
238
261
|
|
|
@@ -243,14 +266,28 @@ class DisplayManager:
|
|
|
243
266
|
self.console.print(table)
|
|
244
267
|
|
|
245
268
|
if len(results) > 20:
|
|
246
|
-
self.console.
|
|
247
|
-
|
|
248
|
-
|
|
269
|
+
if self.console.is_terminal:
|
|
270
|
+
self.console.print(
|
|
271
|
+
f"[yellow]... and {len(results) - 20} more rows[/yellow]"
|
|
272
|
+
)
|
|
273
|
+
else:
|
|
274
|
+
self.console.print(
|
|
275
|
+
f"*... and {len(results) - 20} more rows*\n"
|
|
276
|
+
)
|
|
249
277
|
|
|
250
278
|
def show_error(self, error_message: str):
|
|
251
279
|
"""Display error message."""
|
|
252
280
|
self.console.print(f"\n[bold red]Error:[/bold red] {error_message}")
|
|
253
281
|
|
|
282
|
+
def show_sql_error(self, error_message: str, suggestions: list[str] | None = None):
|
|
283
|
+
"""Display SQL-specific error with optional suggestions."""
|
|
284
|
+
self.show_newline()
|
|
285
|
+
self.console.print(f"[bold red]SQL error:[/bold red] {error_message}")
|
|
286
|
+
if suggestions:
|
|
287
|
+
self.console.print("[yellow]Hints:[/yellow]")
|
|
288
|
+
for suggestion in suggestions:
|
|
289
|
+
self.console.print(f" • {suggestion}")
|
|
290
|
+
|
|
254
291
|
def show_processing(self, message: str):
|
|
255
292
|
"""Display processing message."""
|
|
256
293
|
self.console.print() # Add newline
|
|
@@ -371,6 +408,13 @@ class DisplayManager:
|
|
|
371
408
|
for fk in foreign_keys:
|
|
372
409
|
self.console.print(f" • {fk}")
|
|
373
410
|
|
|
411
|
+
# Show indexes
|
|
412
|
+
indexes = table_info.get("indexes", [])
|
|
413
|
+
if indexes:
|
|
414
|
+
self.console.print("[bold blue]Indexes:[/bold blue]")
|
|
415
|
+
for idx in indexes:
|
|
416
|
+
self.console.print(f" • {idx}")
|
|
417
|
+
|
|
374
418
|
except json.JSONDecodeError:
|
|
375
419
|
self.show_error("Failed to parse schema data")
|
|
376
420
|
except Exception as e:
|
|
@@ -114,8 +114,14 @@ class StreamingQueryHandler:
|
|
|
114
114
|
pass
|
|
115
115
|
elif isinstance(content, dict):
|
|
116
116
|
data = content
|
|
117
|
-
|
|
118
|
-
|
|
117
|
+
|
|
118
|
+
if isinstance(data, dict):
|
|
119
|
+
if data.get("success") and data.get("results"):
|
|
120
|
+
self.display.show_query_results(data["results"]) # type: ignore[arg-type]
|
|
121
|
+
elif "error" in data:
|
|
122
|
+
self.display.show_sql_error(
|
|
123
|
+
data.get("error"), data.get("suggestions")
|
|
124
|
+
)
|
|
119
125
|
# Add a blank line after tool output to separate from next segment
|
|
120
126
|
self.display.show_newline()
|
|
121
127
|
# Show status while agent sends a follow-up request to the model
|
|
@@ -38,6 +38,8 @@ def _render_transcript(
|
|
|
38
38
|
from sqlsaber.cli.display import DisplayManager
|
|
39
39
|
|
|
40
40
|
dm = DisplayManager(console)
|
|
41
|
+
# Check if output is being redirected (for clean markdown export)
|
|
42
|
+
is_redirected = not console.is_terminal
|
|
41
43
|
|
|
42
44
|
# Locate indices of user prompts
|
|
43
45
|
user_indices: list[int] = []
|
|
@@ -78,11 +80,17 @@ def _render_transcript(
|
|
|
78
80
|
parts.append(str(seg))
|
|
79
81
|
text = "\n".join([s for s in parts if s]) or None
|
|
80
82
|
if text:
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
83
|
+
if is_redirected:
|
|
84
|
+
console.print(f"**User:**\n\n{text}\n")
|
|
85
|
+
else:
|
|
86
|
+
console.print(
|
|
87
|
+
Panel.fit(Markdown(text), title="User", border_style="cyan")
|
|
88
|
+
)
|
|
84
89
|
return
|
|
85
|
-
|
|
90
|
+
if is_redirected:
|
|
91
|
+
console.print("**User:** (no content)\n")
|
|
92
|
+
else:
|
|
93
|
+
console.print(Panel.fit("(no content)", title="User", border_style="cyan"))
|
|
86
94
|
|
|
87
95
|
def _render_response(message: ModelMessage) -> None:
|
|
88
96
|
for part in getattr(message, "parts", []):
|
|
@@ -90,11 +98,14 @@ def _render_transcript(
|
|
|
90
98
|
if kind == "text":
|
|
91
99
|
text = getattr(part, "content", "")
|
|
92
100
|
if isinstance(text, str) and text.strip():
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
101
|
+
if is_redirected:
|
|
102
|
+
console.print(f"**Assistant:**\n\n{text}\n")
|
|
103
|
+
else:
|
|
104
|
+
console.print(
|
|
105
|
+
Panel.fit(
|
|
106
|
+
Markdown(text), title="Assistant", border_style="green"
|
|
107
|
+
)
|
|
96
108
|
)
|
|
97
|
-
)
|
|
98
109
|
elif kind in ("tool-call", "builtin-tool-call"):
|
|
99
110
|
name = getattr(part, "tool_name", "tool")
|
|
100
111
|
args = getattr(part, "args", None)
|
|
@@ -131,6 +142,24 @@ def _render_transcript(
|
|
|
131
142
|
and data.get("results")
|
|
132
143
|
):
|
|
133
144
|
dm.show_query_results(data["results"]) # type: ignore[arg-type]
|
|
145
|
+
elif isinstance(data, dict) and "error" in data:
|
|
146
|
+
dm.show_sql_error(
|
|
147
|
+
data.get("error"), data.get("suggestions")
|
|
148
|
+
)
|
|
149
|
+
else:
|
|
150
|
+
if is_redirected:
|
|
151
|
+
console.print(f"**Tool result ({name}):**\n\n{content_str}\n")
|
|
152
|
+
else:
|
|
153
|
+
console.print(
|
|
154
|
+
Panel.fit(
|
|
155
|
+
content_str,
|
|
156
|
+
title=f"Tool result: {name}",
|
|
157
|
+
border_style="yellow",
|
|
158
|
+
)
|
|
159
|
+
)
|
|
160
|
+
except Exception:
|
|
161
|
+
if is_redirected:
|
|
162
|
+
console.print(f"**Tool result ({name}):**\n\n{content_str}\n")
|
|
134
163
|
else:
|
|
135
164
|
console.print(
|
|
136
165
|
Panel.fit(
|
|
@@ -139,7 +168,10 @@ def _render_transcript(
|
|
|
139
168
|
border_style="yellow",
|
|
140
169
|
)
|
|
141
170
|
)
|
|
142
|
-
|
|
171
|
+
else:
|
|
172
|
+
if is_redirected:
|
|
173
|
+
console.print(f"**Tool result ({name}):**\n\n{content_str}\n")
|
|
174
|
+
else:
|
|
143
175
|
console.print(
|
|
144
176
|
Panel.fit(
|
|
145
177
|
content_str,
|
|
@@ -147,14 +179,6 @@ def _render_transcript(
|
|
|
147
179
|
border_style="yellow",
|
|
148
180
|
)
|
|
149
181
|
)
|
|
150
|
-
else:
|
|
151
|
-
console.print(
|
|
152
|
-
Panel.fit(
|
|
153
|
-
content_str,
|
|
154
|
-
title=f"Tool result: {name}",
|
|
155
|
-
border_style="yellow",
|
|
156
|
-
)
|
|
157
|
-
)
|
|
158
182
|
# Thinking parts omitted
|
|
159
183
|
|
|
160
184
|
for start_idx, end_idx in slices or [(0, len(all_msgs))]:
|
|
@@ -266,7 +290,10 @@ def resume(
|
|
|
266
290
|
try:
|
|
267
291
|
agent = build_sqlsaber_agent(db_conn, db_name)
|
|
268
292
|
history = await store.get_thread_messages(thread_id)
|
|
269
|
-
console.
|
|
293
|
+
if console.is_terminal:
|
|
294
|
+
console.print(Panel.fit(f"Thread: {thread.id}", border_style="blue"))
|
|
295
|
+
else:
|
|
296
|
+
console.print(f"# Thread: {thread.id}\n")
|
|
270
297
|
_render_transcript(console, history, None)
|
|
271
298
|
session = InteractiveSession(
|
|
272
299
|
console=console,
|
|
@@ -32,6 +32,15 @@ class ForeignKeyInfo(TypedDict):
|
|
|
32
32
|
references: dict[str, str] # {"table": "schema.table", "column": "column_name"}
|
|
33
33
|
|
|
34
34
|
|
|
35
|
+
class IndexInfo(TypedDict):
|
|
36
|
+
"""Type definition for index information."""
|
|
37
|
+
|
|
38
|
+
name: str
|
|
39
|
+
columns: list[str] # ordered
|
|
40
|
+
unique: bool
|
|
41
|
+
type: str | None # btree, gin, FULLTEXT, etc. None if unknown
|
|
42
|
+
|
|
43
|
+
|
|
35
44
|
class SchemaInfo(TypedDict):
|
|
36
45
|
"""Type definition for schema information."""
|
|
37
46
|
|
|
@@ -41,6 +50,7 @@ class SchemaInfo(TypedDict):
|
|
|
41
50
|
columns: dict[str, ColumnInfo]
|
|
42
51
|
primary_keys: list[str]
|
|
43
52
|
foreign_keys: list[ForeignKeyInfo]
|
|
53
|
+
indexes: list[IndexInfo]
|
|
44
54
|
|
|
45
55
|
|
|
46
56
|
class BaseSchemaIntrospector(ABC):
|
|
@@ -68,6 +78,11 @@ class BaseSchemaIntrospector(ABC):
|
|
|
68
78
|
"""Get primary keys information for the specific database type."""
|
|
69
79
|
pass
|
|
70
80
|
|
|
81
|
+
@abstractmethod
|
|
82
|
+
async def get_indexes_info(self, connection, tables: list) -> list:
|
|
83
|
+
"""Get indexes information for the specific database type."""
|
|
84
|
+
pass
|
|
85
|
+
|
|
71
86
|
@abstractmethod
|
|
72
87
|
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
73
88
|
"""Get list of tables with basic information."""
|
|
@@ -209,6 +224,43 @@ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
209
224
|
"""
|
|
210
225
|
return await conn.fetch(pk_query)
|
|
211
226
|
|
|
227
|
+
async def get_indexes_info(self, connection, tables: list) -> list:
|
|
228
|
+
"""Get indexes information for PostgreSQL."""
|
|
229
|
+
if not tables:
|
|
230
|
+
return []
|
|
231
|
+
|
|
232
|
+
pool = await connection.get_pool()
|
|
233
|
+
async with pool.acquire() as conn:
|
|
234
|
+
# Build proper table filters
|
|
235
|
+
idx_table_filters = []
|
|
236
|
+
for table in tables:
|
|
237
|
+
idx_table_filters.append(
|
|
238
|
+
f"(ns.nspname = '{table['table_schema']}' AND t.relname = '{table['table_name']}')"
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
idx_query = f"""
|
|
242
|
+
SELECT
|
|
243
|
+
ns.nspname AS table_schema,
|
|
244
|
+
t.relname AS table_name,
|
|
245
|
+
i.relname AS index_name,
|
|
246
|
+
ix.indisunique AS is_unique,
|
|
247
|
+
am.amname AS index_type,
|
|
248
|
+
array_agg(a.attname ORDER BY ord.ordinality) AS column_names
|
|
249
|
+
FROM pg_class t
|
|
250
|
+
JOIN pg_namespace ns ON ns.oid = t.relnamespace
|
|
251
|
+
JOIN pg_index ix ON ix.indrelid = t.oid
|
|
252
|
+
JOIN pg_class i ON i.oid = ix.indexrelid
|
|
253
|
+
JOIN pg_am am ON am.oid = i.relam
|
|
254
|
+
JOIN LATERAL unnest(ix.indkey) WITH ORDINALITY AS ord(attnum, ordinality)
|
|
255
|
+
ON TRUE
|
|
256
|
+
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ord.attnum
|
|
257
|
+
WHERE ns.nspname NOT IN ('pg_catalog', 'information_schema')
|
|
258
|
+
AND ({" OR ".join(idx_table_filters)})
|
|
259
|
+
GROUP BY table_schema, table_name, index_name, is_unique, index_type
|
|
260
|
+
ORDER BY table_schema, table_name, index_name;
|
|
261
|
+
"""
|
|
262
|
+
return await conn.fetch(idx_query)
|
|
263
|
+
|
|
212
264
|
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
213
265
|
"""Get list of tables with basic information for PostgreSQL."""
|
|
214
266
|
pool = await connection.get_pool()
|
|
@@ -379,6 +431,37 @@ class MySQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
379
431
|
await cursor.execute(pk_query)
|
|
380
432
|
return await cursor.fetchall()
|
|
381
433
|
|
|
434
|
+
async def get_indexes_info(self, connection, tables: list) -> list:
|
|
435
|
+
"""Get indexes information for MySQL."""
|
|
436
|
+
if not tables:
|
|
437
|
+
return []
|
|
438
|
+
|
|
439
|
+
pool = await connection.get_pool()
|
|
440
|
+
async with pool.acquire() as conn:
|
|
441
|
+
async with conn.cursor() as cursor:
|
|
442
|
+
# Build proper table filters
|
|
443
|
+
idx_table_filters = []
|
|
444
|
+
for table in tables:
|
|
445
|
+
idx_table_filters.append(
|
|
446
|
+
f"(TABLE_SCHEMA = '{table['table_schema']}' AND TABLE_NAME = '{table['table_name']}')"
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
idx_query = f"""
|
|
450
|
+
SELECT
|
|
451
|
+
TABLE_SCHEMA AS table_schema,
|
|
452
|
+
TABLE_NAME AS table_name,
|
|
453
|
+
INDEX_NAME AS index_name,
|
|
454
|
+
(NON_UNIQUE = 0) AS is_unique,
|
|
455
|
+
INDEX_TYPE AS index_type,
|
|
456
|
+
GROUP_CONCAT(COLUMN_NAME ORDER BY SEQ_IN_INDEX) AS column_names
|
|
457
|
+
FROM INFORMATION_SCHEMA.STATISTICS
|
|
458
|
+
WHERE ({" OR ".join(idx_table_filters)})
|
|
459
|
+
GROUP BY table_schema, table_name, index_name, is_unique, index_type
|
|
460
|
+
ORDER BY table_schema, table_name, index_name;
|
|
461
|
+
"""
|
|
462
|
+
await cursor.execute(idx_query)
|
|
463
|
+
return await cursor.fetchall()
|
|
464
|
+
|
|
382
465
|
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
383
466
|
"""Get list of tables with basic information for MySQL."""
|
|
384
467
|
pool = await connection.get_pool()
|
|
@@ -531,6 +614,47 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
531
614
|
|
|
532
615
|
return primary_keys
|
|
533
616
|
|
|
617
|
+
async def get_indexes_info(self, connection, tables: list) -> list:
|
|
618
|
+
"""Get indexes information for SQLite."""
|
|
619
|
+
if not tables:
|
|
620
|
+
return []
|
|
621
|
+
|
|
622
|
+
indexes = []
|
|
623
|
+
for table in tables:
|
|
624
|
+
table_name = table["table_name"]
|
|
625
|
+
|
|
626
|
+
# Get index list using PRAGMA
|
|
627
|
+
pragma_query = f"PRAGMA index_list({table_name})"
|
|
628
|
+
table_indexes = await self._execute_query(connection, pragma_query)
|
|
629
|
+
|
|
630
|
+
for idx in table_indexes:
|
|
631
|
+
idx_name = idx["name"]
|
|
632
|
+
unique = bool(idx["unique"])
|
|
633
|
+
|
|
634
|
+
# Skip auto-generated primary key indexes
|
|
635
|
+
if idx_name.startswith("sqlite_autoindex_"):
|
|
636
|
+
continue
|
|
637
|
+
|
|
638
|
+
# Get index columns using PRAGMA
|
|
639
|
+
pragma_info_query = f"PRAGMA index_info({idx_name})"
|
|
640
|
+
idx_cols = await self._execute_query(connection, pragma_info_query)
|
|
641
|
+
columns = [
|
|
642
|
+
c["name"] for c in sorted(idx_cols, key=lambda r: r["seqno"])
|
|
643
|
+
]
|
|
644
|
+
|
|
645
|
+
indexes.append(
|
|
646
|
+
{
|
|
647
|
+
"table_schema": "main",
|
|
648
|
+
"table_name": table_name,
|
|
649
|
+
"index_name": idx_name,
|
|
650
|
+
"is_unique": unique,
|
|
651
|
+
"index_type": None, # SQLite only has B-tree currently
|
|
652
|
+
"column_names": columns,
|
|
653
|
+
}
|
|
654
|
+
)
|
|
655
|
+
|
|
656
|
+
return indexes
|
|
657
|
+
|
|
534
658
|
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
535
659
|
"""Get list of tables with basic information for SQLite."""
|
|
536
660
|
# Get table names without row counts for better performance
|
|
@@ -589,12 +713,14 @@ class SchemaManager:
|
|
|
589
713
|
columns = await self.introspector.get_columns_info(self.db, tables)
|
|
590
714
|
foreign_keys = await self.introspector.get_foreign_keys_info(self.db, tables)
|
|
591
715
|
primary_keys = await self.introspector.get_primary_keys_info(self.db, tables)
|
|
716
|
+
indexes = await self.introspector.get_indexes_info(self.db, tables)
|
|
592
717
|
|
|
593
718
|
# Build schema structure
|
|
594
719
|
schema_info = self._build_table_structure(tables)
|
|
595
720
|
self._add_columns_to_schema(schema_info, columns)
|
|
596
721
|
self._add_primary_keys_to_schema(schema_info, primary_keys)
|
|
597
722
|
self._add_foreign_keys_to_schema(schema_info, foreign_keys)
|
|
723
|
+
self._add_indexes_to_schema(schema_info, indexes)
|
|
598
724
|
|
|
599
725
|
return schema_info
|
|
600
726
|
|
|
@@ -613,6 +739,7 @@ class SchemaManager:
|
|
|
613
739
|
"columns": {},
|
|
614
740
|
"primary_keys": [],
|
|
615
741
|
"foreign_keys": [],
|
|
742
|
+
"indexes": [],
|
|
616
743
|
}
|
|
617
744
|
return schema_info
|
|
618
745
|
|
|
@@ -666,6 +793,31 @@ class SchemaManager:
|
|
|
666
793
|
}
|
|
667
794
|
)
|
|
668
795
|
|
|
796
|
+
def _add_indexes_to_schema(
|
|
797
|
+
self, schema_info: dict[str, dict], indexes: list
|
|
798
|
+
) -> None:
|
|
799
|
+
"""Add index information to schema."""
|
|
800
|
+
for idx in indexes:
|
|
801
|
+
full_name = f"{idx['table_schema']}.{idx['table_name']}"
|
|
802
|
+
if full_name in schema_info:
|
|
803
|
+
# Handle different column name formats from different databases
|
|
804
|
+
if isinstance(idx["column_names"], list):
|
|
805
|
+
columns = idx["column_names"]
|
|
806
|
+
else:
|
|
807
|
+
# MySQL returns comma-separated string
|
|
808
|
+
columns = (
|
|
809
|
+
idx["column_names"].split(",") if idx["column_names"] else []
|
|
810
|
+
)
|
|
811
|
+
|
|
812
|
+
schema_info[full_name]["indexes"].append(
|
|
813
|
+
{
|
|
814
|
+
"name": idx["index_name"],
|
|
815
|
+
"columns": columns,
|
|
816
|
+
"unique": idx["is_unique"],
|
|
817
|
+
"type": idx.get("index_type"),
|
|
818
|
+
}
|
|
819
|
+
)
|
|
820
|
+
|
|
669
821
|
async def list_tables(self) -> dict[str, Any]:
|
|
670
822
|
"""Get a list of all tables with basic information."""
|
|
671
823
|
tables = await self.introspector.list_tables_info(self.db)
|
|
@@ -138,6 +138,12 @@ class IntrospectSchemaTool(SQLTool):
|
|
|
138
138
|
f"{fk['column']} -> {fk['references']['table']}.{fk['references']['column']}"
|
|
139
139
|
for fk in table_info["foreign_keys"]
|
|
140
140
|
],
|
|
141
|
+
"indexes": [
|
|
142
|
+
f"{idx['name']} ({', '.join(idx['columns'])})"
|
|
143
|
+
+ (" UNIQUE" if idx["unique"] else "")
|
|
144
|
+
+ (f" [{idx['type']}]" if idx["type"] else "")
|
|
145
|
+
for idx in table_info["indexes"]
|
|
146
|
+
],
|
|
141
147
|
}
|
|
142
148
|
|
|
143
149
|
return json.dumps(formatted_info)
|
sqlsaber-0.22.0/sqlsaber.gif
DELETED
|
Binary file
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|