sqlsaber 0.33.0__py3-none-any.whl → 0.35.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlsaber might be problematic. Click here for more details.
- sqlsaber/application/auth_setup.py +83 -11
- sqlsaber/cli/auth.py +3 -1
- sqlsaber/cli/display.py +19 -3
- sqlsaber/cli/interactive.py +4 -2
- sqlsaber/config/api_keys.py +23 -0
- sqlsaber/config/auth.py +6 -0
- sqlsaber/config/logging.py +0 -1
- sqlsaber/database/base.py +2 -0
- sqlsaber/database/duckdb.py +41 -26
- sqlsaber/database/mysql.py +7 -3
- sqlsaber/database/postgresql.py +70 -14
- sqlsaber/database/schema.py +3 -0
- sqlsaber/database/sqlite.py +18 -5
- sqlsaber/tools/sql_tools.py +32 -21
- {sqlsaber-0.33.0.dist-info → sqlsaber-0.35.0.dist-info}/METADATA +1 -1
- {sqlsaber-0.33.0.dist-info → sqlsaber-0.35.0.dist-info}/RECORD +19 -19
- {sqlsaber-0.33.0.dist-info → sqlsaber-0.35.0.dist-info}/WHEEL +0 -0
- {sqlsaber-0.33.0.dist-info → sqlsaber-0.35.0.dist-info}/entry_points.txt +0 -0
- {sqlsaber-0.33.0.dist-info → sqlsaber-0.35.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""Shared auth setup logic for onboarding and CLI."""
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
+
import os
|
|
4
5
|
|
|
5
6
|
from questionary import Choice
|
|
6
7
|
|
|
@@ -9,6 +10,7 @@ from sqlsaber.config import providers
|
|
|
9
10
|
from sqlsaber.config.api_keys import APIKeyManager
|
|
10
11
|
from sqlsaber.config.auth import AuthConfigManager, AuthMethod
|
|
11
12
|
from sqlsaber.config.oauth_flow import AnthropicOAuthFlow
|
|
13
|
+
from sqlsaber.config.oauth_tokens import OAuthTokenManager
|
|
12
14
|
from sqlsaber.theme.manager import create_console
|
|
13
15
|
|
|
14
16
|
console = create_console()
|
|
@@ -102,24 +104,49 @@ async def setup_auth(
|
|
|
102
104
|
Returns:
|
|
103
105
|
Tuple of (success: bool, provider: str | None)
|
|
104
106
|
"""
|
|
105
|
-
|
|
106
|
-
if auth_manager.has_auth_configured():
|
|
107
|
-
console.print("[success]✓ Authentication already configured![/success]")
|
|
108
|
-
return True, None
|
|
107
|
+
oauth_manager = OAuthTokenManager()
|
|
109
108
|
|
|
110
|
-
# Select provider
|
|
111
109
|
provider = await select_provider(prompter, default=default_provider)
|
|
112
110
|
|
|
113
111
|
if provider is None:
|
|
114
112
|
return False, None
|
|
115
113
|
|
|
114
|
+
env_var = api_key_manager.get_env_var_name(provider)
|
|
115
|
+
api_key_in_env = bool(os.getenv(env_var))
|
|
116
|
+
api_key_in_keyring = api_key_manager.has_stored_api_key(provider)
|
|
117
|
+
has_oauth = (
|
|
118
|
+
oauth_manager.has_oauth_token("anthropic")
|
|
119
|
+
if provider == "anthropic" and allow_oauth
|
|
120
|
+
else False
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
if api_key_in_env or api_key_in_keyring or has_oauth:
|
|
124
|
+
parts: list[str] = []
|
|
125
|
+
if api_key_in_keyring:
|
|
126
|
+
parts.append("stored API key")
|
|
127
|
+
if api_key_in_env:
|
|
128
|
+
parts.append(f"{env_var} environment variable")
|
|
129
|
+
if has_oauth:
|
|
130
|
+
parts.append("OAuth token")
|
|
131
|
+
summary = ", ".join(parts)
|
|
132
|
+
console.print(
|
|
133
|
+
f"[info]Existing authentication found for {provider}: {summary}[/info]"
|
|
134
|
+
)
|
|
135
|
+
|
|
116
136
|
# For Anthropic, offer OAuth or API key
|
|
117
137
|
if provider == "anthropic" and allow_oauth:
|
|
138
|
+
api_key_label = "API Key"
|
|
139
|
+
if api_key_in_keyring or api_key_in_env:
|
|
140
|
+
api_key_label += " [configured]"
|
|
141
|
+
oauth_label = "Claude Pro/Max (OAuth)"
|
|
142
|
+
if has_oauth:
|
|
143
|
+
oauth_label += " [configured]"
|
|
144
|
+
|
|
118
145
|
method_choice = await prompter.select(
|
|
119
146
|
"Authentication method:",
|
|
120
147
|
choices=[
|
|
121
|
-
Choice(
|
|
122
|
-
Choice(
|
|
148
|
+
Choice(api_key_label, value=AuthMethod.API_KEY),
|
|
149
|
+
Choice(oauth_label, value=AuthMethod.CLAUDE_PRO),
|
|
123
150
|
],
|
|
124
151
|
)
|
|
125
152
|
|
|
@@ -127,6 +154,28 @@ async def setup_auth(
|
|
|
127
154
|
return False, None
|
|
128
155
|
|
|
129
156
|
if method_choice == AuthMethod.CLAUDE_PRO:
|
|
157
|
+
if has_oauth:
|
|
158
|
+
reset = await prompter.confirm(
|
|
159
|
+
"Anthropic OAuth is already configured. Reset before continuing?",
|
|
160
|
+
default=False,
|
|
161
|
+
)
|
|
162
|
+
if not reset:
|
|
163
|
+
console.print(
|
|
164
|
+
"[warning]No changes made to Anthropic OAuth credentials.[/warning]"
|
|
165
|
+
)
|
|
166
|
+
return True, None
|
|
167
|
+
|
|
168
|
+
removal_success = oauth_manager.remove_oauth_token("anthropic")
|
|
169
|
+
if not removal_success:
|
|
170
|
+
console.print(
|
|
171
|
+
"[error]Failed to remove existing Anthropic OAuth credentials.[/error]"
|
|
172
|
+
)
|
|
173
|
+
return False, None
|
|
174
|
+
|
|
175
|
+
current_method = auth_manager.get_auth_method()
|
|
176
|
+
if current_method == AuthMethod.CLAUDE_PRO:
|
|
177
|
+
auth_manager.clear_auth_method()
|
|
178
|
+
|
|
130
179
|
console.print()
|
|
131
180
|
oauth_success = await configure_oauth_anthropic(
|
|
132
181
|
auth_manager, run_in_thread=run_oauth_in_thread
|
|
@@ -136,12 +185,35 @@ async def setup_auth(
|
|
|
136
185
|
"[green]✓ Anthropic OAuth configured successfully![/green]"
|
|
137
186
|
)
|
|
138
187
|
return True, provider
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
188
|
+
|
|
189
|
+
console.print("[error]✗ Anthropic OAuth setup failed.[/error]")
|
|
190
|
+
return False, None
|
|
142
191
|
|
|
143
192
|
# API key flow
|
|
144
|
-
|
|
193
|
+
if api_key_in_keyring:
|
|
194
|
+
reset_api_key = await prompter.confirm(
|
|
195
|
+
f"{provider.title()} API key is stored in your keyring. Reset before continuing?",
|
|
196
|
+
default=False,
|
|
197
|
+
)
|
|
198
|
+
if not reset_api_key:
|
|
199
|
+
console.print(
|
|
200
|
+
"[warning]No changes made to stored API key credentials.[/warning]"
|
|
201
|
+
)
|
|
202
|
+
return True, None
|
|
203
|
+
if not api_key_manager.delete_api_key(provider):
|
|
204
|
+
console.print(
|
|
205
|
+
"[error]Failed to remove existing API key credentials.[/error]"
|
|
206
|
+
)
|
|
207
|
+
return False, None
|
|
208
|
+
console.print(
|
|
209
|
+
f"[muted]{provider.title()} API key removed from keyring.[/muted]"
|
|
210
|
+
)
|
|
211
|
+
api_key_in_keyring = False
|
|
212
|
+
|
|
213
|
+
if api_key_in_env:
|
|
214
|
+
console.print(
|
|
215
|
+
f"[muted]{env_var} is set in your environment. Update it there if you need a new value.[/muted]"
|
|
216
|
+
)
|
|
145
217
|
|
|
146
218
|
console.print()
|
|
147
219
|
console.print(f"[dim]To use {provider.title()}, you need an API key.[/dim]")
|
sqlsaber/cli/auth.py
CHANGED
|
@@ -167,7 +167,9 @@ def reset():
|
|
|
167
167
|
pass
|
|
168
168
|
except Exception as e:
|
|
169
169
|
console.print(f"Warning: Could not remove API key: {e}", style="warning")
|
|
170
|
-
logger.warning(
|
|
170
|
+
logger.warning(
|
|
171
|
+
"auth.reset.api_key_remove_failed", provider=provider, error=str(e)
|
|
172
|
+
)
|
|
171
173
|
|
|
172
174
|
# Optionally clear global auth method if removing Anthropic OAuth configuration
|
|
173
175
|
if provider == "anthropic" and oauth_present:
|
sqlsaber/cli/display.py
CHANGED
|
@@ -406,9 +406,17 @@ class DisplayManager:
|
|
|
406
406
|
for table_name, table_info in data.items():
|
|
407
407
|
self.console.print(f"\n[heading]Table: {table_name}[/heading]")
|
|
408
408
|
|
|
409
|
+
table_comment = table_info.get("comment")
|
|
410
|
+
if table_comment:
|
|
411
|
+
self.console.print(f"[muted]Comment: {table_comment}[/muted]")
|
|
412
|
+
|
|
409
413
|
# Show columns
|
|
410
414
|
table_columns = table_info.get("columns", {})
|
|
411
415
|
if table_columns:
|
|
416
|
+
include_column_comments = any(
|
|
417
|
+
col_info.get("comment") for col_info in table_columns.values()
|
|
418
|
+
)
|
|
419
|
+
|
|
412
420
|
# Create a table for columns
|
|
413
421
|
columns = [
|
|
414
422
|
{"name": "Column Name", "style": "column.name"},
|
|
@@ -416,6 +424,8 @@ class DisplayManager:
|
|
|
416
424
|
{"name": "Nullable", "style": "info"},
|
|
417
425
|
{"name": "Default", "style": "muted"},
|
|
418
426
|
]
|
|
427
|
+
if include_column_comments:
|
|
428
|
+
columns.append({"name": "Comment", "style": "muted"})
|
|
419
429
|
col_table = self._create_table(columns, title="Columns")
|
|
420
430
|
|
|
421
431
|
for col_name, col_info in table_columns.items():
|
|
@@ -425,9 +435,15 @@ class DisplayManager:
|
|
|
425
435
|
if col_info.get("default")
|
|
426
436
|
else ""
|
|
427
437
|
)
|
|
428
|
-
|
|
429
|
-
col_name,
|
|
430
|
-
|
|
438
|
+
row = [
|
|
439
|
+
col_name,
|
|
440
|
+
col_info.get("type", ""),
|
|
441
|
+
nullable,
|
|
442
|
+
default,
|
|
443
|
+
]
|
|
444
|
+
if include_column_comments:
|
|
445
|
+
row.append(col_info.get("comment") or "")
|
|
446
|
+
col_table.add_row(*row)
|
|
431
447
|
|
|
432
448
|
self.console.print(col_table)
|
|
433
449
|
|
sqlsaber/cli/interactive.py
CHANGED
|
@@ -20,6 +20,7 @@ from sqlsaber.cli.completers import (
|
|
|
20
20
|
)
|
|
21
21
|
from sqlsaber.cli.display import DisplayManager
|
|
22
22
|
from sqlsaber.cli.streaming import StreamingQueryHandler
|
|
23
|
+
from sqlsaber.config.logging import get_logger
|
|
23
24
|
from sqlsaber.database import (
|
|
24
25
|
CSVConnection,
|
|
25
26
|
DuckDBConnection,
|
|
@@ -30,7 +31,6 @@ from sqlsaber.database import (
|
|
|
30
31
|
from sqlsaber.database.schema import SchemaManager
|
|
31
32
|
from sqlsaber.theme.manager import get_theme_manager
|
|
32
33
|
from sqlsaber.threads import ThreadStorage
|
|
33
|
-
from sqlsaber.config.logging import get_logger
|
|
34
34
|
|
|
35
35
|
if TYPE_CHECKING:
|
|
36
36
|
from sqlsaber.agents.pydantic_ai_agent import SQLSaberAgent
|
|
@@ -309,6 +309,8 @@ class InteractiveSession:
|
|
|
309
309
|
style=self.tm.pt_style(),
|
|
310
310
|
)
|
|
311
311
|
|
|
312
|
+
user_query = user_query.strip()
|
|
313
|
+
|
|
312
314
|
if not user_query:
|
|
313
315
|
continue
|
|
314
316
|
|
|
@@ -325,7 +327,7 @@ class InteractiveSession:
|
|
|
325
327
|
|
|
326
328
|
# Handle memory addition
|
|
327
329
|
if user_query.strip().startswith("#"):
|
|
328
|
-
await self._handle_memory(user_query
|
|
330
|
+
await self._handle_memory(user_query[1:].strip())
|
|
329
331
|
continue
|
|
330
332
|
|
|
331
333
|
# Execute query with cancellation support
|
sqlsaber/config/api_keys.py
CHANGED
|
@@ -41,6 +41,29 @@ class APIKeyManager:
|
|
|
41
41
|
# 3. Prompt user for API key
|
|
42
42
|
return self._prompt_and_store_key(provider, env_var_name, service_name)
|
|
43
43
|
|
|
44
|
+
def has_stored_api_key(self, provider: str) -> bool:
|
|
45
|
+
"""Check if an API key is stored for the provider."""
|
|
46
|
+
service_name = self._get_service_name(provider)
|
|
47
|
+
try:
|
|
48
|
+
return keyring.get_password(service_name, provider) is not None
|
|
49
|
+
except Exception:
|
|
50
|
+
return False
|
|
51
|
+
|
|
52
|
+
def delete_api_key(self, provider: str) -> bool:
|
|
53
|
+
"""Remove stored API key for the provider."""
|
|
54
|
+
service_name = self._get_service_name(provider)
|
|
55
|
+
try:
|
|
56
|
+
keyring.delete_password(service_name, provider)
|
|
57
|
+
return True
|
|
58
|
+
except keyring.errors.PasswordDeleteError:
|
|
59
|
+
return True
|
|
60
|
+
except Exception as e:
|
|
61
|
+
console.print(
|
|
62
|
+
f"Warning: Could not remove API key: {e}",
|
|
63
|
+
style="warning",
|
|
64
|
+
)
|
|
65
|
+
return False
|
|
66
|
+
|
|
44
67
|
def get_env_var_name(self, provider: str) -> str:
|
|
45
68
|
"""Get the expected environment variable name for a provider."""
|
|
46
69
|
# Normalize aliases to canonical provider keys
|
sqlsaber/config/auth.py
CHANGED
|
@@ -81,6 +81,12 @@ class AuthConfigManager:
|
|
|
81
81
|
config["auth_method"] = auth_method.value
|
|
82
82
|
self._save_config(config)
|
|
83
83
|
|
|
84
|
+
def clear_auth_method(self) -> None:
|
|
85
|
+
"""Clear any configured authentication method."""
|
|
86
|
+
config = self._load_config()
|
|
87
|
+
config["auth_method"] = None
|
|
88
|
+
self._save_config(config)
|
|
89
|
+
|
|
84
90
|
def has_auth_configured(self) -> bool:
|
|
85
91
|
"""Check if authentication method is configured."""
|
|
86
92
|
return self.get_auth_method() is not None
|
sqlsaber/config/logging.py
CHANGED
sqlsaber/database/base.py
CHANGED
|
@@ -24,6 +24,7 @@ class ColumnInfo(TypedDict):
|
|
|
24
24
|
max_length: int | None
|
|
25
25
|
precision: int | None
|
|
26
26
|
scale: int | None
|
|
27
|
+
comment: str | None
|
|
27
28
|
|
|
28
29
|
|
|
29
30
|
class ForeignKeyInfo(TypedDict):
|
|
@@ -48,6 +49,7 @@ class SchemaInfo(TypedDict):
|
|
|
48
49
|
schema: str
|
|
49
50
|
name: str
|
|
50
51
|
type: str
|
|
52
|
+
comment: str | None
|
|
51
53
|
columns: dict[str, ColumnInfo]
|
|
52
54
|
primary_keys: list[str]
|
|
53
55
|
foreign_keys: list[ForeignKeyInfo]
|
sqlsaber/database/duckdb.py
CHANGED
|
@@ -132,29 +132,35 @@ class DuckDBSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
132
132
|
) -> list[dict[str, Any]]:
|
|
133
133
|
"""Get tables information for DuckDB."""
|
|
134
134
|
where_conditions = [
|
|
135
|
-
"table_schema NOT IN ('information_schema', 'pg_catalog', 'duckdb_catalog')"
|
|
135
|
+
"t.table_schema NOT IN ('information_schema', 'pg_catalog', 'duckdb_catalog')"
|
|
136
136
|
]
|
|
137
137
|
params: list[Any] = []
|
|
138
138
|
|
|
139
139
|
if table_pattern:
|
|
140
140
|
if "." in table_pattern:
|
|
141
141
|
schema_pattern, table_name_pattern = table_pattern.split(".", 1)
|
|
142
|
-
where_conditions.append(
|
|
142
|
+
where_conditions.append(
|
|
143
|
+
"(t.table_schema LIKE ? AND t.table_name LIKE ?)"
|
|
144
|
+
)
|
|
143
145
|
params.extend([schema_pattern, table_name_pattern])
|
|
144
146
|
else:
|
|
145
147
|
where_conditions.append(
|
|
146
|
-
"(table_name LIKE ? OR table_schema || '.' || table_name LIKE ?)"
|
|
148
|
+
"(t.table_name LIKE ? OR t.table_schema || '.' || t.table_name LIKE ?)"
|
|
147
149
|
)
|
|
148
150
|
params.extend([table_pattern, table_pattern])
|
|
149
151
|
|
|
150
152
|
query = f"""
|
|
151
153
|
SELECT
|
|
152
|
-
table_schema,
|
|
153
|
-
table_name,
|
|
154
|
-
table_type
|
|
155
|
-
|
|
154
|
+
t.table_schema,
|
|
155
|
+
t.table_name,
|
|
156
|
+
t.table_type,
|
|
157
|
+
dt.comment AS table_comment
|
|
158
|
+
FROM information_schema.tables t
|
|
159
|
+
LEFT JOIN duckdb_tables() dt
|
|
160
|
+
ON t.table_schema = dt.schema_name
|
|
161
|
+
AND t.table_name = dt.table_name
|
|
156
162
|
WHERE {" AND ".join(where_conditions)}
|
|
157
|
-
ORDER BY table_schema, table_name;
|
|
163
|
+
ORDER BY t.table_schema, t.table_name;
|
|
158
164
|
"""
|
|
159
165
|
|
|
160
166
|
return await self._execute_query(connection, query, tuple(params))
|
|
@@ -166,7 +172,7 @@ class DuckDBSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
166
172
|
|
|
167
173
|
table_filters = []
|
|
168
174
|
for table in tables:
|
|
169
|
-
table_filters.append("(table_schema = ? AND table_name = ?)")
|
|
175
|
+
table_filters.append("(c.table_schema = ? AND c.table_name = ?)")
|
|
170
176
|
|
|
171
177
|
params: list[Any] = []
|
|
172
178
|
for table in tables:
|
|
@@ -174,18 +180,23 @@ class DuckDBSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
174
180
|
|
|
175
181
|
query = f"""
|
|
176
182
|
SELECT
|
|
177
|
-
table_schema,
|
|
178
|
-
table_name,
|
|
179
|
-
column_name,
|
|
180
|
-
data_type,
|
|
181
|
-
is_nullable,
|
|
182
|
-
column_default,
|
|
183
|
-
character_maximum_length,
|
|
184
|
-
numeric_precision,
|
|
185
|
-
numeric_scale
|
|
186
|
-
|
|
183
|
+
c.table_schema,
|
|
184
|
+
c.table_name,
|
|
185
|
+
c.column_name,
|
|
186
|
+
c.data_type,
|
|
187
|
+
c.is_nullable,
|
|
188
|
+
c.column_default,
|
|
189
|
+
c.character_maximum_length,
|
|
190
|
+
c.numeric_precision,
|
|
191
|
+
c.numeric_scale,
|
|
192
|
+
dc.comment AS column_comment
|
|
193
|
+
FROM information_schema.columns c
|
|
194
|
+
LEFT JOIN duckdb_columns() dc
|
|
195
|
+
ON c.table_schema = dc.schema_name
|
|
196
|
+
AND c.table_name = dc.table_name
|
|
197
|
+
AND c.column_name = dc.column_name
|
|
187
198
|
WHERE {" OR ".join(table_filters)}
|
|
188
|
-
ORDER BY table_schema, table_name, ordinal_position;
|
|
199
|
+
ORDER BY c.table_schema, c.table_name, c.ordinal_position;
|
|
189
200
|
"""
|
|
190
201
|
|
|
191
202
|
return await self._execute_query(connection, query, tuple(params))
|
|
@@ -307,12 +318,16 @@ class DuckDBSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
307
318
|
"""Get list of tables with basic information for DuckDB."""
|
|
308
319
|
query = """
|
|
309
320
|
SELECT
|
|
310
|
-
table_schema,
|
|
311
|
-
table_name,
|
|
312
|
-
table_type
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
321
|
+
t.table_schema,
|
|
322
|
+
t.table_name,
|
|
323
|
+
t.table_type,
|
|
324
|
+
dt.comment AS table_comment
|
|
325
|
+
FROM information_schema.tables t
|
|
326
|
+
LEFT JOIN duckdb_tables() dt
|
|
327
|
+
ON t.table_schema = dt.schema_name
|
|
328
|
+
AND t.table_name = dt.table_name
|
|
329
|
+
WHERE t.table_schema NOT IN ('information_schema', 'pg_catalog', 'duckdb_catalog')
|
|
330
|
+
ORDER BY t.table_schema, t.table_name;
|
|
316
331
|
"""
|
|
317
332
|
|
|
318
333
|
return await self._execute_query(connection, query)
|
sqlsaber/database/mysql.py
CHANGED
|
@@ -202,7 +202,8 @@ class MySQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
202
202
|
SELECT
|
|
203
203
|
table_schema,
|
|
204
204
|
table_name,
|
|
205
|
-
table_type
|
|
205
|
+
table_type,
|
|
206
|
+
table_comment
|
|
206
207
|
FROM information_schema.tables
|
|
207
208
|
WHERE {" AND ".join(where_conditions)}
|
|
208
209
|
ORDER BY table_schema, table_name;
|
|
@@ -230,7 +231,8 @@ class MySQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
230
231
|
c.column_default,
|
|
231
232
|
c.character_maximum_length,
|
|
232
233
|
c.numeric_precision,
|
|
233
|
-
c.numeric_scale
|
|
234
|
+
c.numeric_scale,
|
|
235
|
+
c.column_comment
|
|
234
236
|
FROM information_schema.columns c
|
|
235
237
|
WHERE (c.table_schema, c.table_name) IN ({placeholders})
|
|
236
238
|
ORDER BY c.table_schema, c.table_name, c.ordinal_position;
|
|
@@ -331,7 +333,8 @@ class MySQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
331
333
|
SELECT
|
|
332
334
|
t.table_schema,
|
|
333
335
|
t.table_name,
|
|
334
|
-
t.table_type
|
|
336
|
+
t.table_type,
|
|
337
|
+
t.table_comment
|
|
335
338
|
FROM information_schema.tables t
|
|
336
339
|
WHERE t.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys')
|
|
337
340
|
ORDER BY t.table_schema, t.table_name;
|
|
@@ -345,6 +348,7 @@ class MySQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
345
348
|
"table_schema": row["table_schema"],
|
|
346
349
|
"table_name": row["table_name"],
|
|
347
350
|
"table_type": row["table_type"],
|
|
351
|
+
"table_comment": row["table_comment"],
|
|
348
352
|
}
|
|
349
353
|
for row in rows
|
|
350
354
|
]
|
sqlsaber/database/postgresql.py
CHANGED
|
@@ -135,6 +135,35 @@ class PostgreSQLConnection(BaseDatabaseConnection):
|
|
|
135
135
|
class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
136
136
|
"""PostgreSQL-specific schema introspection."""
|
|
137
137
|
|
|
138
|
+
def _get_excluded_schemas(self) -> list[str]:
|
|
139
|
+
"""Return schemas to exclude during introspection.
|
|
140
|
+
|
|
141
|
+
Defaults include PostgreSQL system schemas and TimescaleDB internal
|
|
142
|
+
partitions schema. Additional schemas can be excluded by setting the
|
|
143
|
+
environment variable `SQLSABER_PG_EXCLUDE_SCHEMAS` to a comma-separated
|
|
144
|
+
list of schema names.
|
|
145
|
+
"""
|
|
146
|
+
import os
|
|
147
|
+
|
|
148
|
+
# Base exclusions: system schemas and TimescaleDB internal partitions
|
|
149
|
+
excluded = [
|
|
150
|
+
"pg_catalog",
|
|
151
|
+
"information_schema",
|
|
152
|
+
"_timescaledb_internal",
|
|
153
|
+
"_timescaledb_cache",
|
|
154
|
+
"_timescaledb_config",
|
|
155
|
+
"_timescaledb_catalog",
|
|
156
|
+
]
|
|
157
|
+
|
|
158
|
+
extra = os.getenv("SQLSABER_PG_EXCLUDE_SCHEMAS", "")
|
|
159
|
+
if extra:
|
|
160
|
+
for item in extra.split(","):
|
|
161
|
+
name = item.strip()
|
|
162
|
+
if name and name not in excluded:
|
|
163
|
+
excluded.append(name)
|
|
164
|
+
|
|
165
|
+
return excluded
|
|
166
|
+
|
|
138
167
|
def _build_table_filter_clause(self, tables: list) -> tuple[str, list]:
|
|
139
168
|
"""Build VALUES clause with bind parameters for table filtering.
|
|
140
169
|
|
|
@@ -160,23 +189,35 @@ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
160
189
|
"""Get tables information for PostgreSQL."""
|
|
161
190
|
pool = await connection.get_pool()
|
|
162
191
|
async with pool.acquire() as conn:
|
|
163
|
-
# Build WHERE clause for filtering
|
|
164
|
-
where_conditions = [
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
192
|
+
# Build WHERE clause for filtering with bind params
|
|
193
|
+
where_conditions: list[str] = []
|
|
194
|
+
params: list[Any] = []
|
|
195
|
+
|
|
196
|
+
excluded = self._get_excluded_schemas()
|
|
197
|
+
if excluded:
|
|
198
|
+
placeholders = ", ".join(f"${i + 1}" for i in range(len(excluded)))
|
|
199
|
+
where_conditions.append(f"table_schema NOT IN ({placeholders})")
|
|
200
|
+
params.extend(excluded)
|
|
201
|
+
else:
|
|
202
|
+
# Fallback safety
|
|
203
|
+
where_conditions.append(
|
|
204
|
+
"table_schema NOT IN ('pg_catalog', 'information_schema')"
|
|
205
|
+
)
|
|
168
206
|
|
|
169
207
|
if table_pattern:
|
|
170
208
|
# Support patterns like 'schema.table' or just 'table'
|
|
171
209
|
if "." in table_pattern:
|
|
172
210
|
schema_pattern, table_name_pattern = table_pattern.split(".", 1)
|
|
211
|
+
s_idx = len(params) + 1
|
|
212
|
+
t_idx = len(params) + 2
|
|
173
213
|
where_conditions.append(
|
|
174
|
-
"(table_schema LIKE $
|
|
214
|
+
f"(table_schema LIKE ${s_idx} AND table_name LIKE ${t_idx})"
|
|
175
215
|
)
|
|
176
216
|
params.extend([schema_pattern, table_name_pattern])
|
|
177
217
|
else:
|
|
218
|
+
p_idx = len(params) + 1
|
|
178
219
|
where_conditions.append(
|
|
179
|
-
"(table_name LIKE $
|
|
220
|
+
f"(table_name LIKE ${p_idx} OR table_schema || '.' || table_name LIKE ${p_idx})"
|
|
180
221
|
)
|
|
181
222
|
params.append(table_pattern)
|
|
182
223
|
|
|
@@ -185,7 +226,8 @@ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
185
226
|
SELECT
|
|
186
227
|
table_schema,
|
|
187
228
|
table_name,
|
|
188
|
-
table_type
|
|
229
|
+
table_type,
|
|
230
|
+
obj_description(('"' || table_schema || '"."' || table_name || '"')::regclass, 'pg_class') AS table_comment
|
|
189
231
|
FROM information_schema.tables
|
|
190
232
|
WHERE {" AND ".join(where_conditions)}
|
|
191
233
|
ORDER BY table_schema, table_name;
|
|
@@ -211,7 +253,8 @@ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
211
253
|
c.column_default,
|
|
212
254
|
c.character_maximum_length,
|
|
213
255
|
c.numeric_precision,
|
|
214
|
-
c.numeric_scale
|
|
256
|
+
c.numeric_scale,
|
|
257
|
+
col_description(('"' || c.table_schema || '"."' || c.table_name || '"')::regclass::oid, c.ordinal_position::INT) AS column_comment
|
|
215
258
|
FROM information_schema.columns c
|
|
216
259
|
WHERE (c.table_schema, c.table_name) IN (VALUES {values_clause})
|
|
217
260
|
ORDER BY c.table_schema, c.table_name, c.ordinal_position;
|
|
@@ -310,17 +353,29 @@ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
310
353
|
"""Get list of tables with basic information for PostgreSQL."""
|
|
311
354
|
pool = await connection.get_pool()
|
|
312
355
|
async with pool.acquire() as conn:
|
|
313
|
-
#
|
|
314
|
-
|
|
356
|
+
# Exclude system schemas (and TimescaleDB internals) for performance
|
|
357
|
+
excluded = self._get_excluded_schemas()
|
|
358
|
+
params: list[Any] = []
|
|
359
|
+
if excluded:
|
|
360
|
+
placeholders = ", ".join(f"${i + 1}" for i in range(len(excluded)))
|
|
361
|
+
where_clause = f"table_schema NOT IN ({placeholders})"
|
|
362
|
+
params.extend(excluded)
|
|
363
|
+
else:
|
|
364
|
+
where_clause = (
|
|
365
|
+
"table_schema NOT IN ('pg_catalog', 'information_schema')"
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
tables_query = f"""
|
|
315
369
|
SELECT
|
|
316
370
|
table_schema,
|
|
317
371
|
table_name,
|
|
318
|
-
table_type
|
|
372
|
+
table_type,
|
|
373
|
+
obj_description(('"' || table_schema || '"."' || table_name || '"')::regclass, 'pg_class') AS table_comment
|
|
319
374
|
FROM information_schema.tables
|
|
320
|
-
WHERE
|
|
375
|
+
WHERE {where_clause}
|
|
321
376
|
ORDER BY table_schema, table_name;
|
|
322
377
|
"""
|
|
323
|
-
tables = await conn.fetch(tables_query)
|
|
378
|
+
tables = await conn.fetch(tables_query, *params)
|
|
324
379
|
|
|
325
380
|
# Convert to expected format
|
|
326
381
|
return [
|
|
@@ -328,6 +383,7 @@ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
328
383
|
"table_schema": table["table_schema"],
|
|
329
384
|
"table_name": table["table_name"],
|
|
330
385
|
"table_type": table["table_type"],
|
|
386
|
+
"table_comment": table["table_comment"],
|
|
331
387
|
}
|
|
332
388
|
for table in tables
|
|
333
389
|
]
|
sqlsaber/database/schema.py
CHANGED
|
@@ -72,6 +72,7 @@ class SchemaManager:
|
|
|
72
72
|
"schema": schema_name,
|
|
73
73
|
"name": table_name,
|
|
74
74
|
"type": table["table_type"],
|
|
75
|
+
"comment": table["table_comment"],
|
|
75
76
|
"columns": {},
|
|
76
77
|
"primary_keys": [],
|
|
77
78
|
"foreign_keys": [],
|
|
@@ -85,6 +86,7 @@ class SchemaManager:
|
|
|
85
86
|
for col in columns:
|
|
86
87
|
full_name = f"{col['table_schema']}.{col['table_name']}"
|
|
87
88
|
if full_name in schema_info:
|
|
89
|
+
# Handle different row types (dict vs Row objects)
|
|
88
90
|
column_info: ColumnInfo = {
|
|
89
91
|
"data_type": col["data_type"],
|
|
90
92
|
"nullable": col.get("is_nullable", "YES") == "YES",
|
|
@@ -92,6 +94,7 @@ class SchemaManager:
|
|
|
92
94
|
"max_length": col.get("character_maximum_length"),
|
|
93
95
|
"precision": col.get("numeric_precision"),
|
|
94
96
|
"scale": col.get("numeric_scale"),
|
|
97
|
+
"comment": col.get("column_comment"),
|
|
95
98
|
}
|
|
96
99
|
# Add type field for display compatibility
|
|
97
100
|
column_info["type"] = col["data_type"]
|
sqlsaber/database/sqlite.py
CHANGED
|
@@ -93,7 +93,10 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
93
93
|
async def get_tables_info(
|
|
94
94
|
self, connection, table_pattern: str | None = None
|
|
95
95
|
) -> dict[str, Any]:
|
|
96
|
-
"""Get tables information for SQLite.
|
|
96
|
+
"""Get tables information for SQLite.
|
|
97
|
+
|
|
98
|
+
Note: SQLite does not support native table comments, so table_comment is always None.
|
|
99
|
+
"""
|
|
97
100
|
where_conditions = ["type IN ('table', 'view')", "name NOT LIKE 'sqlite_%'"]
|
|
98
101
|
params = ()
|
|
99
102
|
|
|
@@ -105,7 +108,8 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
105
108
|
SELECT
|
|
106
109
|
'main' as table_schema,
|
|
107
110
|
name as table_name,
|
|
108
|
-
type as table_type
|
|
111
|
+
type as table_type,
|
|
112
|
+
NULL as table_comment
|
|
109
113
|
FROM sqlite_master
|
|
110
114
|
WHERE {" AND ".join(where_conditions)}
|
|
111
115
|
ORDER BY name;
|
|
@@ -114,7 +118,10 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
114
118
|
return await self._execute_query(connection, query, params)
|
|
115
119
|
|
|
116
120
|
async def get_columns_info(self, connection, tables: list) -> list:
|
|
117
|
-
"""Get columns information for SQLite.
|
|
121
|
+
"""Get columns information for SQLite.
|
|
122
|
+
|
|
123
|
+
Note: SQLite does not support native column comments, so column_comment is always None.
|
|
124
|
+
"""
|
|
118
125
|
if not tables:
|
|
119
126
|
return []
|
|
120
127
|
|
|
@@ -138,6 +145,7 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
138
145
|
"character_maximum_length": None,
|
|
139
146
|
"numeric_precision": None,
|
|
140
147
|
"numeric_scale": None,
|
|
148
|
+
"column_comment": None,
|
|
141
149
|
}
|
|
142
150
|
)
|
|
143
151
|
|
|
@@ -237,13 +245,17 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
237
245
|
return indexes
|
|
238
246
|
|
|
239
247
|
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
240
|
-
"""Get list of tables with basic information for SQLite.
|
|
248
|
+
"""Get list of tables with basic information for SQLite.
|
|
249
|
+
|
|
250
|
+
Note: SQLite does not support native table comments, so table_comment is always None.
|
|
251
|
+
"""
|
|
241
252
|
# Get table names without row counts for better performance
|
|
242
253
|
tables_query = """
|
|
243
254
|
SELECT
|
|
244
255
|
'main' as table_schema,
|
|
245
256
|
name as table_name,
|
|
246
|
-
type as table_type
|
|
257
|
+
type as table_type,
|
|
258
|
+
NULL as table_comment
|
|
247
259
|
FROM sqlite_master
|
|
248
260
|
WHERE type IN ('table', 'view')
|
|
249
261
|
AND name NOT LIKE 'sqlite_%'
|
|
@@ -258,6 +270,7 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
|
|
|
258
270
|
"table_schema": table["table_schema"],
|
|
259
271
|
"table_name": table["table_name"],
|
|
260
272
|
"table_type": table["table_type"],
|
|
273
|
+
"table_comment": table["table_comment"],
|
|
261
274
|
}
|
|
262
275
|
for table in tables
|
|
263
276
|
]
|
sqlsaber/tools/sql_tools.py
CHANGED
|
@@ -95,27 +95,38 @@ class IntrospectSchemaTool(SQLTool):
|
|
|
95
95
|
# Format the schema information
|
|
96
96
|
formatted_info = {}
|
|
97
97
|
for table_name, table_info in schema_info.items():
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
"
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
98
|
+
table_data = {}
|
|
99
|
+
|
|
100
|
+
# Add table comment if present
|
|
101
|
+
if table_info.get("comment"):
|
|
102
|
+
table_data["comment"] = table_info["comment"]
|
|
103
|
+
|
|
104
|
+
# Add columns with comments if present
|
|
105
|
+
table_data["columns"] = {}
|
|
106
|
+
for col_name, col_info in table_info["columns"].items():
|
|
107
|
+
column_data = {
|
|
108
|
+
"type": col_info["data_type"],
|
|
109
|
+
"nullable": col_info["nullable"],
|
|
110
|
+
"default": col_info["default"],
|
|
111
|
+
}
|
|
112
|
+
if col_info.get("comment"):
|
|
113
|
+
column_data["comment"] = col_info["comment"]
|
|
114
|
+
table_data["columns"][col_name] = column_data
|
|
115
|
+
|
|
116
|
+
# Add other schema information
|
|
117
|
+
table_data["primary_keys"] = table_info["primary_keys"]
|
|
118
|
+
table_data["foreign_keys"] = [
|
|
119
|
+
f"{fk['column']} -> {fk['references']['table']}.{fk['references']['column']}"
|
|
120
|
+
for fk in table_info["foreign_keys"]
|
|
121
|
+
]
|
|
122
|
+
table_data["indexes"] = [
|
|
123
|
+
f"{idx['name']} ({', '.join(idx['columns'])})"
|
|
124
|
+
+ (" UNIQUE" if idx["unique"] else "")
|
|
125
|
+
+ (f" [{idx['type']}]" if idx["type"] else "")
|
|
126
|
+
for idx in table_info["indexes"]
|
|
127
|
+
]
|
|
128
|
+
|
|
129
|
+
formatted_info[table_name] = table_data
|
|
119
130
|
|
|
120
131
|
return json.dumps(formatted_info)
|
|
121
132
|
except Exception as e:
|
|
@@ -4,17 +4,17 @@ sqlsaber/agents/__init__.py,sha256=qYI6rLY4q5AbF47vXH5RVoM08-yQjymBSaePh4lFIW4,1
|
|
|
4
4
|
sqlsaber/agents/base.py,sha256=T05UsMZPwAlMhsJFpuuVI1RNDhdiwiEsgCWr9MbPoAU,2654
|
|
5
5
|
sqlsaber/agents/pydantic_ai_agent.py,sha256=6_KppII8YcMw74KOGsYI5Dt6AP8WSduK3yAXCawVex4,10643
|
|
6
6
|
sqlsaber/application/__init__.py,sha256=KY_-d5nEdQyAwNOsK5r-f7Tb69c63XbuEkHPeLpJal8,84
|
|
7
|
-
sqlsaber/application/auth_setup.py,sha256=
|
|
7
|
+
sqlsaber/application/auth_setup.py,sha256=wbi9MaYl6q27LjcSBZmqFC12JtE5hrUHEX1NmD-7UVc,7778
|
|
8
8
|
sqlsaber/application/db_setup.py,sha256=ZSgR9rJJVHttIjsbYQS9GEIyzkM09k5RLrVGdegrfYc,6859
|
|
9
9
|
sqlsaber/application/model_selection.py,sha256=fSC06MZNKinHDR-csMFVYYJFyK8MydKf6pStof74Jp0,3191
|
|
10
10
|
sqlsaber/application/prompts.py,sha256=4rMGcWpYJbNWPMzqVWseUMx0nwvXOkWS6GaTAJ5mhfc,3473
|
|
11
11
|
sqlsaber/cli/__init__.py,sha256=qVSLVJLLJYzoC6aj6y9MFrzZvAwc4_OgxU9DlkQnZ4M,86
|
|
12
|
-
sqlsaber/cli/auth.py,sha256=
|
|
12
|
+
sqlsaber/cli/auth.py,sha256=TmAD4BJr2gIjPeW2LA1QkKK5gUZ2OOS6vOmBvB6PC0M,7051
|
|
13
13
|
sqlsaber/cli/commands.py,sha256=WocWlLrxA5kM8URfvIvWFtc0ocfgKWAwoYTxVNZhmM4,10962
|
|
14
14
|
sqlsaber/cli/completers.py,sha256=g-hLDq5fiBx7gg8Bte1Lq8GU-ZxCYVs4dcPsmHPIcK4,6574
|
|
15
15
|
sqlsaber/cli/database.py,sha256=BBGj0eyduh5DDXNLZLDtWfY9kWpeT_ZX0J9R9INZyyU,12421
|
|
16
|
-
sqlsaber/cli/display.py,sha256=
|
|
17
|
-
sqlsaber/cli/interactive.py,sha256=
|
|
16
|
+
sqlsaber/cli/display.py,sha256=wxkEFceyyUloDUB49Gzqhl6JGRGq2cQdmJ3XQeYd6ok,18599
|
|
17
|
+
sqlsaber/cli/interactive.py,sha256=bqeQVUgghKeZdsZ31LTcUTNt_jYyc2ujnAlUNRpxF5c,14181
|
|
18
18
|
sqlsaber/cli/memory.py,sha256=kAY5LLFueIF30gJ8ibfrFw42rOyy5wajeJGS4h5XQw4,9475
|
|
19
19
|
sqlsaber/cli/models.py,sha256=aVHazP_fiT-Mj9AtCdjliDtq3E3fJrhgP4oF5p4CuwI,9593
|
|
20
20
|
sqlsaber/cli/onboarding.py,sha256=iBGT-W-OJFRvQoEpuHYyO1c9Mym5c97eIefRvxGHtTg,11265
|
|
@@ -22,23 +22,23 @@ sqlsaber/cli/streaming.py,sha256=jicSDLWQ3efitpdc2y4QsasHcEW8ogZ4lHcWmftq9Ao,676
|
|
|
22
22
|
sqlsaber/cli/theme.py,sha256=D6HIt7rmF00B5ZOCV5lXKzPICE4uppHdraOdVs7k5Nw,4672
|
|
23
23
|
sqlsaber/cli/threads.py,sha256=zYvs1epmRRuQxOofF85eXk1_YHS6co7oq_F33DdNdf0,14643
|
|
24
24
|
sqlsaber/config/__init__.py,sha256=olwC45k8Nc61yK0WmPUk7XHdbsZH9HuUAbwnmKe3IgA,100
|
|
25
|
-
sqlsaber/config/api_keys.py,sha256=
|
|
26
|
-
sqlsaber/config/auth.py,sha256=
|
|
25
|
+
sqlsaber/config/api_keys.py,sha256=H7xBU1mflngXIlnaDjbTvuNqZsW_92AIve31eDLij34,4513
|
|
26
|
+
sqlsaber/config/auth.py,sha256=G1uulySUclWSId8EIt1hPNmsUNhbfRzfp8VVQftYyG8,2964
|
|
27
27
|
sqlsaber/config/database.py,sha256=Yec6_0wdzq-ADblMNnbgvouYCimYOY_DWHT9oweaISc,11449
|
|
28
|
-
sqlsaber/config/logging.py,sha256=
|
|
28
|
+
sqlsaber/config/logging.py,sha256=vv4oCuQePeYQ7bMs0OLKj8ZSiNcFWbHmWtdC0lTsUyc,6173
|
|
29
29
|
sqlsaber/config/oauth_flow.py,sha256=cDfaJjqr4spNuzxbAlzuJfk6SEe1ojSRAkoOWlvQYy0,11037
|
|
30
30
|
sqlsaber/config/oauth_tokens.py,sha256=KCC2u3lOjdh0M-rd0K1rW0PWk58w7mqpodAhlPVp9NE,6424
|
|
31
31
|
sqlsaber/config/providers.py,sha256=JFjeJv1K5Q93zWSlWq3hAvgch1TlgoF0qFa0KJROkKY,2957
|
|
32
32
|
sqlsaber/config/settings.py,sha256=-nIBNt9E0tCRGd14bk4x-bNAwO12sbsjRsN8fFannK4,6449
|
|
33
33
|
sqlsaber/database/__init__.py,sha256=Gi9N_NOkD459WRWXDg3hSuGoBs3xWbMDRBvsTVmnGAg,2025
|
|
34
|
-
sqlsaber/database/base.py,sha256=
|
|
34
|
+
sqlsaber/database/base.py,sha256=ZxbDuk0W-3edo6aN2EZ65SMQ3Oxiud_jygc4ZWi2CmI,3751
|
|
35
35
|
sqlsaber/database/csv.py,sha256=41wuP40FaGPfj28HMiD0I69uG0JbUxArpoTLC3MG2uc,4464
|
|
36
|
-
sqlsaber/database/duckdb.py,sha256=
|
|
37
|
-
sqlsaber/database/mysql.py,sha256=
|
|
38
|
-
sqlsaber/database/postgresql.py,sha256=
|
|
36
|
+
sqlsaber/database/duckdb.py,sha256=zGa1akNzG1Jreyhq6jm93iFzmPtOC8WcS-7TtQxzxME,11961
|
|
37
|
+
sqlsaber/database/mysql.py,sha256=wZ98wrA9E7hljGAcz5h4zBJdRJ0uPdvTZQmFqQVXDBk,14657
|
|
38
|
+
sqlsaber/database/postgresql.py,sha256=0fyko76pEdn5GxtaW75lwf8OIKnj2NnaY7Z1j_heUNs,15685
|
|
39
39
|
sqlsaber/database/resolver.py,sha256=wSCcn__aCqwIfpt_LCjtW2Zgb8RpG5PlmwwZHli1q_U,3628
|
|
40
|
-
sqlsaber/database/schema.py,sha256=
|
|
41
|
-
sqlsaber/database/sqlite.py,sha256=
|
|
40
|
+
sqlsaber/database/schema.py,sha256=T4dEYOk-Cy5XkhvvQ3GqAU2w584eRivbiu9UZ0nHTBo,7138
|
|
41
|
+
sqlsaber/database/sqlite.py,sha256=juEVIhSwbeT0gMkEhkjwNUJn-Mif_IIDmXVqostNT-o,9918
|
|
42
42
|
sqlsaber/memory/__init__.py,sha256=GiWkU6f6YYVV0EvvXDmFWe_CxarmDCql05t70MkTEWs,63
|
|
43
43
|
sqlsaber/memory/manager.py,sha256=p3fybMVfH-E4ApT1ZRZUnQIWSk9dkfUPCyfkmA0HALs,2739
|
|
44
44
|
sqlsaber/memory/storage.py,sha256=ne8szLlGj5NELheqLnI7zu21V8YS4rtpYGGC7tOmi-s,5745
|
|
@@ -54,9 +54,9 @@ sqlsaber/tools/__init__.py,sha256=O6eqkMk8mkhYDniQD1eYgAElOjiHz03I2bGARdgkDkk,42
|
|
|
54
54
|
sqlsaber/tools/base.py,sha256=NKEEooliPKTJj_Pomwte_wW0Xd9Z5kXNfVdCRfTppuw,883
|
|
55
55
|
sqlsaber/tools/registry.py,sha256=XmBzERq0LJXtg3BZ-r8cEyt8J54NUekgUlTJ_EdSYMk,2204
|
|
56
56
|
sqlsaber/tools/sql_guard.py,sha256=dTDwcZP-N4xPGzcr7MQtKUxKrlDzlc1irr9aH5a4wvk,6182
|
|
57
|
-
sqlsaber/tools/sql_tools.py,sha256=
|
|
58
|
-
sqlsaber-0.
|
|
59
|
-
sqlsaber-0.
|
|
60
|
-
sqlsaber-0.
|
|
61
|
-
sqlsaber-0.
|
|
62
|
-
sqlsaber-0.
|
|
57
|
+
sqlsaber/tools/sql_tools.py,sha256=q479PNneuAlpaDCp2cyw1MFhLUY4vcUHV_ZyIuSMHK0,7796
|
|
58
|
+
sqlsaber-0.35.0.dist-info/METADATA,sha256=2TaG6of-RcfHsYWODoipU909C40Kfq2qaWRE71sI85M,5915
|
|
59
|
+
sqlsaber-0.35.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
60
|
+
sqlsaber-0.35.0.dist-info/entry_points.txt,sha256=tw1mB0fjlkXQiOsC0434X6nE-o1cFCuQwt2ZYHv_WAE,91
|
|
61
|
+
sqlsaber-0.35.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
62
|
+
sqlsaber-0.35.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|