sqlsaber 0.25.0__py3-none-any.whl → 0.27.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlsaber might be problematic. Click here for more details.

Files changed (38) hide show
  1. sqlsaber/agents/__init__.py +2 -2
  2. sqlsaber/agents/base.py +1 -1
  3. sqlsaber/agents/mcp.py +1 -1
  4. sqlsaber/agents/pydantic_ai_agent.py +207 -135
  5. sqlsaber/application/__init__.py +1 -0
  6. sqlsaber/application/auth_setup.py +164 -0
  7. sqlsaber/application/db_setup.py +223 -0
  8. sqlsaber/application/model_selection.py +98 -0
  9. sqlsaber/application/prompts.py +115 -0
  10. sqlsaber/cli/auth.py +22 -50
  11. sqlsaber/cli/commands.py +22 -28
  12. sqlsaber/cli/completers.py +2 -0
  13. sqlsaber/cli/database.py +25 -86
  14. sqlsaber/cli/display.py +29 -9
  15. sqlsaber/cli/interactive.py +150 -127
  16. sqlsaber/cli/models.py +18 -28
  17. sqlsaber/cli/onboarding.py +325 -0
  18. sqlsaber/cli/streaming.py +15 -17
  19. sqlsaber/cli/threads.py +10 -6
  20. sqlsaber/config/api_keys.py +2 -2
  21. sqlsaber/config/settings.py +25 -2
  22. sqlsaber/database/__init__.py +55 -1
  23. sqlsaber/database/base.py +124 -0
  24. sqlsaber/database/csv.py +133 -0
  25. sqlsaber/database/duckdb.py +313 -0
  26. sqlsaber/database/mysql.py +345 -0
  27. sqlsaber/database/postgresql.py +328 -0
  28. sqlsaber/database/schema.py +66 -963
  29. sqlsaber/database/sqlite.py +258 -0
  30. sqlsaber/mcp/mcp.py +1 -1
  31. sqlsaber/tools/sql_tools.py +1 -1
  32. {sqlsaber-0.25.0.dist-info → sqlsaber-0.27.0.dist-info}/METADATA +43 -9
  33. sqlsaber-0.27.0.dist-info/RECORD +58 -0
  34. sqlsaber/database/connection.py +0 -535
  35. sqlsaber-0.25.0.dist-info/RECORD +0 -47
  36. {sqlsaber-0.25.0.dist-info → sqlsaber-0.27.0.dist-info}/WHEEL +0 -0
  37. {sqlsaber-0.25.0.dist-info → sqlsaber-0.27.0.dist-info}/entry_points.txt +0 -0
  38. {sqlsaber-0.25.0.dist-info → sqlsaber-0.27.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,535 +0,0 @@
1
- """Database connection management."""
2
-
3
- import asyncio
4
- import ssl
5
- from abc import ABC, abstractmethod
6
- from pathlib import Path
7
- from typing import Any
8
- from urllib.parse import parse_qs, urlparse
9
-
10
- import aiomysql
11
- import aiosqlite
12
- import asyncpg
13
- import duckdb
14
-
15
- # Default query timeout to prevent runaway queries
16
- DEFAULT_QUERY_TIMEOUT = 30.0 # seconds
17
-
18
-
19
- class QueryTimeoutError(RuntimeError):
20
- """Exception raised when a query exceeds its timeout."""
21
-
22
- def __init__(self, seconds: float):
23
- self.timeout = seconds
24
- super().__init__(f"Query exceeded timeout of {seconds}s")
25
-
26
-
27
- class BaseDatabaseConnection(ABC):
28
- """Abstract base class for database connections."""
29
-
30
- def __init__(self, connection_string: str):
31
- self.connection_string = connection_string
32
- self._pool = None
33
-
34
- @abstractmethod
35
- async def get_pool(self):
36
- """Get or create connection pool."""
37
- pass
38
-
39
- @abstractmethod
40
- async def close(self):
41
- """Close the connection pool."""
42
- pass
43
-
44
- @abstractmethod
45
- async def execute_query(
46
- self, query: str, *args, timeout: float | None = None
47
- ) -> list[dict[str, Any]]:
48
- """Execute a query and return results as list of dicts.
49
-
50
- All queries run in a transaction that is rolled back at the end,
51
- ensuring no changes are persisted to the database.
52
-
53
- Args:
54
- query: SQL query to execute
55
- *args: Query parameters
56
- timeout: Query timeout in seconds (overrides default_timeout)
57
- """
58
- pass
59
-
60
-
61
- class PostgreSQLConnection(BaseDatabaseConnection):
62
- """PostgreSQL database connection using asyncpg."""
63
-
64
- def __init__(self, connection_string: str):
65
- super().__init__(connection_string)
66
- self._pool: asyncpg.Pool | None = None
67
- self._ssl_context = self._create_ssl_context()
68
-
69
- def _create_ssl_context(self) -> ssl.SSLContext | None:
70
- """Create SSL context from connection string parameters."""
71
- parsed = urlparse(self.connection_string)
72
- if not parsed.query:
73
- return None
74
-
75
- params = parse_qs(parsed.query)
76
- ssl_mode = params.get("sslmode", [None])[0]
77
-
78
- if not ssl_mode or ssl_mode == "disable":
79
- return None
80
-
81
- # Create SSL context based on mode
82
- if ssl_mode in ["require", "verify-ca", "verify-full"]:
83
- ssl_context = ssl.create_default_context()
84
-
85
- # Configure certificate verification
86
- if ssl_mode == "require":
87
- ssl_context.check_hostname = False
88
- ssl_context.verify_mode = ssl.CERT_NONE
89
- elif ssl_mode == "verify-ca":
90
- ssl_context.check_hostname = False
91
- ssl_context.verify_mode = ssl.CERT_REQUIRED
92
- elif ssl_mode == "verify-full":
93
- ssl_context.check_hostname = True
94
- ssl_context.verify_mode = ssl.CERT_REQUIRED
95
-
96
- # Load certificates if provided
97
- ssl_ca = params.get("sslrootcert", [None])[0]
98
- ssl_cert = params.get("sslcert", [None])[0]
99
- ssl_key = params.get("sslkey", [None])[0]
100
-
101
- if ssl_ca:
102
- ssl_context.load_verify_locations(ssl_ca)
103
-
104
- if ssl_cert and ssl_key:
105
- ssl_context.load_cert_chain(ssl_cert, ssl_key)
106
-
107
- return ssl_context
108
-
109
- return None
110
-
111
- async def get_pool(self) -> asyncpg.Pool:
112
- """Get or create connection pool."""
113
- if self._pool is None:
114
- # Create pool with SSL context if configured
115
- if self._ssl_context:
116
- self._pool = await asyncpg.create_pool(
117
- self.connection_string,
118
- min_size=1,
119
- max_size=10,
120
- ssl=self._ssl_context,
121
- )
122
- else:
123
- self._pool = await asyncpg.create_pool(
124
- self.connection_string, min_size=1, max_size=10
125
- )
126
- return self._pool
127
-
128
- async def close(self):
129
- """Close the connection pool."""
130
- if self._pool:
131
- await self._pool.close()
132
- self._pool = None
133
-
134
- async def execute_query(
135
- self, query: str, *args, timeout: float | None = None
136
- ) -> list[dict[str, Any]]:
137
- """Execute a query and return results as list of dicts.
138
-
139
- All queries run in a transaction that is rolled back at the end,
140
- ensuring no changes are persisted to the database.
141
- """
142
- effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
143
- pool = await self.get_pool()
144
-
145
- async with pool.acquire() as conn:
146
- # Start a transaction that we'll always rollback
147
- transaction = conn.transaction()
148
- await transaction.start()
149
-
150
- try:
151
- # Set server-side timeout if specified
152
- if effective_timeout:
153
- await conn.execute(
154
- f"SET LOCAL statement_timeout = {int(effective_timeout * 1000)}"
155
- )
156
-
157
- # Execute query with client-side timeout
158
- if effective_timeout:
159
- rows = await asyncio.wait_for(
160
- conn.fetch(query, *args), timeout=effective_timeout
161
- )
162
- else:
163
- rows = await conn.fetch(query, *args)
164
-
165
- return [dict(row) for row in rows]
166
- except asyncio.TimeoutError as exc:
167
- raise QueryTimeoutError(effective_timeout or 0) from exc
168
- finally:
169
- # Always rollback to ensure no changes are committed
170
- await transaction.rollback()
171
-
172
-
173
- class MySQLConnection(BaseDatabaseConnection):
174
- """MySQL database connection using aiomysql."""
175
-
176
- def __init__(self, connection_string: str):
177
- super().__init__(connection_string)
178
- self._pool: aiomysql.Pool | None = None
179
- self._parse_connection_string()
180
-
181
- def _parse_connection_string(self):
182
- """Parse MySQL connection string into components."""
183
- parsed = urlparse(self.connection_string)
184
- self.host = parsed.hostname or "localhost"
185
- self.port = parsed.port or 3306
186
- self.database = parsed.path.lstrip("/") if parsed.path else ""
187
- self.user = parsed.username or ""
188
- self.password = parsed.password or ""
189
-
190
- # Parse SSL parameters
191
- self.ssl_params = {}
192
- if parsed.query:
193
- params = parse_qs(parsed.query)
194
-
195
- ssl_mode = params.get("ssl_mode", [None])[0]
196
- if ssl_mode:
197
- # Map SSL modes to aiomysql SSL parameters
198
- if ssl_mode.upper() == "DISABLED":
199
- self.ssl_params["ssl"] = None
200
- elif ssl_mode.upper() in [
201
- "PREFERRED",
202
- "REQUIRED",
203
- "VERIFY_CA",
204
- "VERIFY_IDENTITY",
205
- ]:
206
- ssl_context = ssl.create_default_context()
207
-
208
- if ssl_mode.upper() == "REQUIRED":
209
- ssl_context.check_hostname = False
210
- ssl_context.verify_mode = ssl.CERT_NONE
211
- elif ssl_mode.upper() == "VERIFY_CA":
212
- ssl_context.check_hostname = False
213
- ssl_context.verify_mode = ssl.CERT_REQUIRED
214
- elif ssl_mode.upper() == "VERIFY_IDENTITY":
215
- ssl_context.check_hostname = True
216
- ssl_context.verify_mode = ssl.CERT_REQUIRED
217
-
218
- # Load certificates if provided
219
- ssl_ca = params.get("ssl_ca", [None])[0]
220
- ssl_cert = params.get("ssl_cert", [None])[0]
221
- ssl_key = params.get("ssl_key", [None])[0]
222
-
223
- if ssl_ca:
224
- ssl_context.load_verify_locations(ssl_ca)
225
-
226
- if ssl_cert and ssl_key:
227
- ssl_context.load_cert_chain(ssl_cert, ssl_key)
228
-
229
- self.ssl_params["ssl"] = ssl_context
230
-
231
- async def get_pool(self) -> aiomysql.Pool:
232
- """Get or create connection pool."""
233
- if self._pool is None:
234
- pool_kwargs = {
235
- "host": self.host,
236
- "port": self.port,
237
- "user": self.user,
238
- "password": self.password,
239
- "db": self.database,
240
- "minsize": 1,
241
- "maxsize": 10,
242
- "autocommit": False,
243
- }
244
-
245
- # Add SSL parameters if configured
246
- pool_kwargs.update(self.ssl_params)
247
-
248
- self._pool = await aiomysql.create_pool(**pool_kwargs)
249
- return self._pool
250
-
251
- async def close(self):
252
- """Close the connection pool."""
253
- if self._pool:
254
- self._pool.close()
255
- await self._pool.wait_closed()
256
- self._pool = None
257
-
258
- async def execute_query(
259
- self, query: str, *args, timeout: float | None = None
260
- ) -> list[dict[str, Any]]:
261
- """Execute a query and return results as list of dicts.
262
-
263
- All queries run in a transaction that is rolled back at the end,
264
- ensuring no changes are persisted to the database.
265
- """
266
- effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
267
- pool = await self.get_pool()
268
-
269
- async with pool.acquire() as conn:
270
- async with conn.cursor(aiomysql.DictCursor) as cursor:
271
- # Start transaction
272
- await conn.begin()
273
- try:
274
- # Set server-side timeout if specified
275
- if effective_timeout:
276
- await cursor.execute(
277
- f"SET SESSION MAX_EXECUTION_TIME = {int(effective_timeout * 1000)}"
278
- )
279
-
280
- # Execute query with client-side timeout
281
- if effective_timeout:
282
- await asyncio.wait_for(
283
- cursor.execute(query, args if args else None),
284
- timeout=effective_timeout,
285
- )
286
- rows = await asyncio.wait_for(
287
- cursor.fetchall(), timeout=effective_timeout
288
- )
289
- else:
290
- await cursor.execute(query, args if args else None)
291
- rows = await cursor.fetchall()
292
-
293
- return [dict(row) for row in rows]
294
- except asyncio.TimeoutError as exc:
295
- raise QueryTimeoutError(effective_timeout or 0) from exc
296
- finally:
297
- # Always rollback to ensure no changes are committed
298
- await conn.rollback()
299
-
300
-
301
- class SQLiteConnection(BaseDatabaseConnection):
302
- """SQLite database connection using aiosqlite."""
303
-
304
- def __init__(self, connection_string: str):
305
- super().__init__(connection_string)
306
- # Extract database path from sqlite:///path format
307
- self.database_path = connection_string.replace("sqlite:///", "")
308
-
309
- async def get_pool(self):
310
- """SQLite doesn't use connection pooling, return database path."""
311
- return self.database_path
312
-
313
- async def close(self):
314
- """SQLite connections are created per query, no persistent pool to close."""
315
- pass
316
-
317
- async def execute_query(
318
- self, query: str, *args, timeout: float | None = None
319
- ) -> list[dict[str, Any]]:
320
- """Execute a query and return results as list of dicts.
321
-
322
- All queries run in a transaction that is rolled back at the end,
323
- ensuring no changes are persisted to the database.
324
- """
325
- effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
326
-
327
- async with aiosqlite.connect(self.database_path) as conn:
328
- # Enable row factory for dict-like access
329
- conn.row_factory = aiosqlite.Row
330
-
331
- # Start transaction
332
- await conn.execute("BEGIN")
333
- try:
334
- # Execute query with client-side timeout (SQLite has no server-side timeout)
335
- if effective_timeout:
336
- cursor = await asyncio.wait_for(
337
- conn.execute(query, args if args else ()),
338
- timeout=effective_timeout,
339
- )
340
- rows = await asyncio.wait_for(
341
- cursor.fetchall(), timeout=effective_timeout
342
- )
343
- else:
344
- cursor = await conn.execute(query, args if args else ())
345
- rows = await cursor.fetchall()
346
-
347
- return [dict(row) for row in rows]
348
- except asyncio.TimeoutError as exc:
349
- raise QueryTimeoutError(effective_timeout or 0) from exc
350
- finally:
351
- # Always rollback to ensure no changes are committed
352
- await conn.rollback()
353
-
354
-
355
- def _execute_duckdb_transaction(
356
- conn: duckdb.DuckDBPyConnection, query: str, args: tuple[Any, ...]
357
- ) -> list[dict[str, Any]]:
358
- """Run a DuckDB query inside a transaction and return list of dicts."""
359
- conn.execute("BEGIN TRANSACTION")
360
- try:
361
- if args:
362
- conn.execute(query, args)
363
- else:
364
- conn.execute(query)
365
-
366
- if conn.description is None:
367
- rows: list[dict[str, Any]] = []
368
- else:
369
- columns = [col[0] for col in conn.description]
370
- data = conn.fetchall()
371
- rows = [dict(zip(columns, row)) for row in data]
372
-
373
- conn.execute("ROLLBACK")
374
- return rows
375
- except Exception:
376
- conn.execute("ROLLBACK")
377
- raise
378
-
379
-
380
- class CSVConnection(BaseDatabaseConnection):
381
- """CSV file connection using DuckDB per query."""
382
-
383
- def __init__(self, connection_string: str):
384
- super().__init__(connection_string)
385
-
386
- raw_path = connection_string.replace("csv:///", "", 1)
387
- self.csv_path = raw_path.split("?", 1)[0]
388
-
389
- self.delimiter = ","
390
- self.encoding = "utf-8"
391
- self.has_header = True
392
-
393
- parsed = urlparse(connection_string)
394
- if parsed.query:
395
- params = parse_qs(parsed.query)
396
- self.delimiter = params.get("delimiter", [self.delimiter])[0]
397
- self.encoding = params.get("encoding", [self.encoding])[0]
398
- self.has_header = params.get("header", ["true"])[0].lower() == "true"
399
-
400
- self.table_name = Path(self.csv_path).stem or "csv_table"
401
-
402
- async def get_pool(self):
403
- """CSV connections do not maintain a pool."""
404
- return None
405
-
406
- async def close(self):
407
- """No persistent resources to close for CSV connections."""
408
- pass
409
-
410
- def _quote_identifier(self, identifier: str) -> str:
411
- escaped = identifier.replace('"', '""')
412
- return f'"{escaped}"'
413
-
414
- def _quote_literal(self, value: str) -> str:
415
- escaped = value.replace("'", "''")
416
- return f"'{escaped}'"
417
-
418
- def _normalized_encoding(self) -> str | None:
419
- encoding = (self.encoding or "").strip()
420
- if not encoding or encoding.lower() == "utf-8":
421
- return None
422
- return encoding.replace("-", "").replace("_", "").upper()
423
-
424
- def _create_view(self, conn: duckdb.DuckDBPyConnection) -> None:
425
- header_literal = "TRUE" if self.has_header else "FALSE"
426
- option_parts = [f"HEADER={header_literal}"]
427
-
428
- if self.delimiter:
429
- option_parts.append(f"DELIM={self._quote_literal(self.delimiter)}")
430
-
431
- encoding = self._normalized_encoding()
432
- if encoding:
433
- option_parts.append(f"ENCODING={self._quote_literal(encoding)}")
434
-
435
- options_sql = ""
436
- if option_parts:
437
- options_sql = ", " + ", ".join(option_parts)
438
-
439
- base_relation_sql = (
440
- f"read_csv_auto({self._quote_literal(self.csv_path)}{options_sql})"
441
- )
442
-
443
- create_view_sql = (
444
- f"CREATE VIEW {self._quote_identifier(self.table_name)} AS "
445
- f"SELECT * FROM {base_relation_sql}"
446
- )
447
- conn.execute(create_view_sql)
448
-
449
- async def execute_query(
450
- self, query: str, *args, timeout: float | None = None
451
- ) -> list[dict[str, Any]]:
452
- effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
453
- args_tuple = tuple(args) if args else tuple()
454
-
455
- def _run_query() -> list[dict[str, Any]]:
456
- conn = duckdb.connect(":memory:")
457
- try:
458
- self._create_view(conn)
459
- return _execute_duckdb_transaction(conn, query, args_tuple)
460
- finally:
461
- conn.close()
462
-
463
- try:
464
- return await asyncio.wait_for(
465
- asyncio.to_thread(_run_query), timeout=effective_timeout
466
- )
467
- except asyncio.TimeoutError as exc:
468
- raise QueryTimeoutError(effective_timeout or 0) from exc
469
-
470
-
471
- class DuckDBConnection(BaseDatabaseConnection):
472
- """DuckDB database connection using duckdb Python API."""
473
-
474
- def __init__(self, connection_string: str):
475
- super().__init__(connection_string)
476
- if connection_string.startswith("duckdb:///"):
477
- db_path = connection_string.replace("duckdb:///", "", 1)
478
- elif connection_string.startswith("duckdb://"):
479
- db_path = connection_string.replace("duckdb://", "", 1)
480
- else:
481
- db_path = connection_string
482
-
483
- self.database_path = db_path or ":memory:"
484
-
485
- async def get_pool(self):
486
- """DuckDB creates connections per query, return database path."""
487
- return self.database_path
488
-
489
- async def close(self):
490
- """DuckDB connections are created per query, no persistent pool to close."""
491
- pass
492
-
493
- async def execute_query(
494
- self, query: str, *args, timeout: float | None = None
495
- ) -> list[dict[str, Any]]:
496
- """Execute a query and return results as list of dicts.
497
-
498
- All queries run in a transaction that is rolled back at the end,
499
- ensuring no changes are persisted to the database.
500
- """
501
- effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
502
-
503
- args_tuple = tuple(args) if args else tuple()
504
-
505
- def _run_query() -> list[dict[str, Any]]:
506
- conn = duckdb.connect(self.database_path)
507
- try:
508
- return _execute_duckdb_transaction(conn, query, args_tuple)
509
- finally:
510
- conn.close()
511
-
512
- try:
513
- return await asyncio.wait_for(
514
- asyncio.to_thread(_run_query), timeout=effective_timeout
515
- )
516
- except asyncio.TimeoutError as exc:
517
- raise QueryTimeoutError(effective_timeout or 0) from exc
518
-
519
-
520
- def DatabaseConnection(connection_string: str) -> BaseDatabaseConnection:
521
- """Factory function to create appropriate database connection based on connection string."""
522
- if connection_string.startswith("postgresql://"):
523
- return PostgreSQLConnection(connection_string)
524
- elif connection_string.startswith("mysql://"):
525
- return MySQLConnection(connection_string)
526
- elif connection_string.startswith("sqlite:///"):
527
- return SQLiteConnection(connection_string)
528
- elif connection_string.startswith("duckdb://"):
529
- return DuckDBConnection(connection_string)
530
- elif connection_string.startswith("csv:///"):
531
- return CSVConnection(connection_string)
532
- else:
533
- raise ValueError(
534
- f"Unsupported database type in connection string: {connection_string}"
535
- )
@@ -1,47 +0,0 @@
1
- sqlsaber/__init__.py,sha256=HjS8ULtP4MGpnTL7njVY45NKV9Fi4e_yeYuY-hyXWQc,73
2
- sqlsaber/__main__.py,sha256=RIHxWeWh2QvLfah-2OkhI5IJxojWfy4fXpMnVEJYvxw,78
3
- sqlsaber/agents/__init__.py,sha256=i_MI2eWMQaVzGikKU71FPCmSQxNDKq36Imq1PrYoIPU,130
4
- sqlsaber/agents/base.py,sha256=EAuoj3vpWNqksudMd2lL1Fmx68Y91qNX6NyK1RjQ4-g,2679
5
- sqlsaber/agents/mcp.py,sha256=GcJTx7YDYH6aaxIADEIxSgcWAdWakUx395JIzVnf17U,768
6
- sqlsaber/agents/pydantic_ai_agent.py,sha256=qn-DnTGcdUzSEn9xBWwGhgtifYxZ_NEo8XPePnl1StE,7154
7
- sqlsaber/cli/__init__.py,sha256=qVSLVJLLJYzoC6aj6y9MFrzZvAwc4_OgxU9DlkQnZ4M,86
8
- sqlsaber/cli/auth.py,sha256=jTsRgbmlGPlASSuIKmdjjwfqtKvjfKd_cTYxX0-QqaQ,7400
9
- sqlsaber/cli/commands.py,sha256=NyBDr5qEnCOZrHEMGcEpHLXEWdlzEQW3D61NIrPi2fQ,8727
10
- sqlsaber/cli/completers.py,sha256=HsUPjaZweLSeYCWkAcgMl8FylQ1xjWBWYTEL_9F6xfU,6430
11
- sqlsaber/cli/database.py,sha256=93etjqiYAfH08jBe_OJpLMNKiu3H81G8O7CMB31MIIc,13424
12
- sqlsaber/cli/display.py,sha256=XuKiTWUw5k0U0P_f1K7zhDWX5KTO2DQVG0Q0XU9VEhs,16334
13
- sqlsaber/cli/interactive.py,sha256=lVOtONBeAmZxWdfkvdoVoX4POs_-C1YVs0jPxY9MoZs,13288
14
- sqlsaber/cli/memory.py,sha256=OufHFJFwV0_GGn7LvKRTJikkWhV1IwNIUDOxFPHXOaQ,7794
15
- sqlsaber/cli/models.py,sha256=ZewtwGQwhd9b-yxBAPKePolvI1qQG-EkmeWAGMqtWNQ,8986
16
- sqlsaber/cli/streaming.py,sha256=Eo5CNUgDGY1WYP90jwDA2aY7RefN-TfcStA6NyjUQTY,7076
17
- sqlsaber/cli/threads.py,sha256=ufDABlqndVJKd5COgSokcFRIKTgsGqXdHV84DVVm7MA,12743
18
- sqlsaber/config/__init__.py,sha256=olwC45k8Nc61yK0WmPUk7XHdbsZH9HuUAbwnmKe3IgA,100
19
- sqlsaber/config/api_keys.py,sha256=RqWQCko1tY7sES7YOlexgBH5Hd5ne_kGXHdBDNqcV2U,3649
20
- sqlsaber/config/auth.py,sha256=b5qB2h1doXyO9Bn8z0CcL8LAR2jF431gGXBGKLgTmtQ,2756
21
- sqlsaber/config/database.py,sha256=Yec6_0wdzq-ADblMNnbgvouYCimYOY_DWHT9oweaISc,11449
22
- sqlsaber/config/oauth_flow.py,sha256=A3bSXaBLzuAfXV2ZPA94m9NV33c2MyL6M4ii9oEkswQ,10291
23
- sqlsaber/config/oauth_tokens.py,sha256=C9z35hyx-PvSAYdC1LNf3rg9_wsEIY56hkEczelbad0,6015
24
- sqlsaber/config/providers.py,sha256=JFjeJv1K5Q93zWSlWq3hAvgch1TlgoF0qFa0KJROkKY,2957
25
- sqlsaber/config/settings.py,sha256=vgb_RXaM-7DgbxYDmWNw1cSyMqwys4j3qNCvM4bljwI,5586
26
- sqlsaber/database/__init__.py,sha256=a_gtKRJnZVO8-fEZI7g3Z8YnGa6Nio-5Y50PgVp07ss,176
27
- sqlsaber/database/connection.py,sha256=J3U08Qu7NQrmem0jPM5XKIHPmPJE927IiLhN8zA6oLo,19392
28
- sqlsaber/database/resolver.py,sha256=wSCcn__aCqwIfpt_LCjtW2Zgb8RpG5PlmwwZHli1q_U,3628
29
- sqlsaber/database/schema.py,sha256=9HXTb5O_nlS2aNDeyv7EXhX7_kN2hs6rbPnJ8fnLyWk,41260
30
- sqlsaber/mcp/__init__.py,sha256=COdWq7wauPBp5Ew8tfZItFzbcLDSEkHBJSMhxzy8C9c,112
31
- sqlsaber/mcp/mcp.py,sha256=X12oCMZYAtgJ7MNuh5cqz8y3lALrOzkXWcfpuY0Ijxk,3950
32
- sqlsaber/memory/__init__.py,sha256=GiWkU6f6YYVV0EvvXDmFWe_CxarmDCql05t70MkTEWs,63
33
- sqlsaber/memory/manager.py,sha256=p3fybMVfH-E4ApT1ZRZUnQIWSk9dkfUPCyfkmA0HALs,2739
34
- sqlsaber/memory/storage.py,sha256=ne8szLlGj5NELheqLnI7zu21V8YS4rtpYGGC7tOmi-s,5745
35
- sqlsaber/threads/__init__.py,sha256=Hh3dIG1tuC8fXprREUpslCIgPYz8_6o7aRLx4yNeO48,139
36
- sqlsaber/threads/storage.py,sha256=rsUdxT4CR52D7xtGir9UlsFnBMk11jZeflzDrk2q4ME,11183
37
- sqlsaber/tools/__init__.py,sha256=x3YdmX_7P0Qq_HtZHAgfIVKTLxYqKk6oc4tGsujQWsc,586
38
- sqlsaber/tools/base.py,sha256=mHhvAj27BHmckyvuDLCPlAQdzABJyYxd9SJnaYAwwuA,1777
39
- sqlsaber/tools/enums.py,sha256=CH32mL-0k9ZA18911xLpNtsgpV6tB85TktMj6uqGz54,411
40
- sqlsaber/tools/instructions.py,sha256=X-x8maVkkyi16b6Tl0hcAFgjiYceZaSwyWTfmrvx8U8,9024
41
- sqlsaber/tools/registry.py,sha256=HWOQMsNIdL4XZS6TeNUyrL-5KoSDH6PHsWd3X66o-18,3211
42
- sqlsaber/tools/sql_tools.py,sha256=j4yRqfKokPFnZ_tEZPrWU5WStDc3Mexo1fWZ8KsmUjQ,9965
43
- sqlsaber-0.25.0.dist-info/METADATA,sha256=9Q2AsBv4I78FLo8Uezmnv_fCch3jIKgv1gzBBm1cVB4,6243
44
- sqlsaber-0.25.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
45
- sqlsaber-0.25.0.dist-info/entry_points.txt,sha256=qEbOB7OffXPFgyJc7qEIJlMEX5RN9xdzLmWZa91zCQQ,162
46
- sqlsaber-0.25.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
47
- sqlsaber-0.25.0.dist-info/RECORD,,