sqlsaber 0.24.0__py3-none-any.whl → 0.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlsaber might be problematic. Click here for more details.

@@ -1,511 +0,0 @@
1
- """Database connection management."""
2
-
3
- import asyncio
4
- import ssl
5
- from abc import ABC, abstractmethod
6
- from pathlib import Path
7
- from typing import Any
8
- from urllib.parse import parse_qs, urlparse
9
-
10
- import aiomysql
11
- import aiosqlite
12
- import asyncpg
13
-
14
- # Default query timeout to prevent runaway queries
15
- DEFAULT_QUERY_TIMEOUT = 30.0 # seconds
16
-
17
-
18
- class QueryTimeoutError(RuntimeError):
19
- """Exception raised when a query exceeds its timeout."""
20
-
21
- def __init__(self, seconds: float):
22
- self.timeout = seconds
23
- super().__init__(f"Query exceeded timeout of {seconds}s")
24
-
25
-
26
- class BaseDatabaseConnection(ABC):
27
- """Abstract base class for database connections."""
28
-
29
- def __init__(self, connection_string: str):
30
- self.connection_string = connection_string
31
- self._pool = None
32
-
33
- @abstractmethod
34
- async def get_pool(self):
35
- """Get or create connection pool."""
36
- pass
37
-
38
- @abstractmethod
39
- async def close(self):
40
- """Close the connection pool."""
41
- pass
42
-
43
- @abstractmethod
44
- async def execute_query(
45
- self, query: str, *args, timeout: float | None = None
46
- ) -> list[dict[str, Any]]:
47
- """Execute a query and return results as list of dicts.
48
-
49
- All queries run in a transaction that is rolled back at the end,
50
- ensuring no changes are persisted to the database.
51
-
52
- Args:
53
- query: SQL query to execute
54
- *args: Query parameters
55
- timeout: Query timeout in seconds (overrides default_timeout)
56
- """
57
- pass
58
-
59
-
60
- class PostgreSQLConnection(BaseDatabaseConnection):
61
- """PostgreSQL database connection using asyncpg."""
62
-
63
- def __init__(self, connection_string: str):
64
- super().__init__(connection_string)
65
- self._pool: asyncpg.Pool | None = None
66
- self._ssl_context = self._create_ssl_context()
67
-
68
- def _create_ssl_context(self) -> ssl.SSLContext | None:
69
- """Create SSL context from connection string parameters."""
70
- parsed = urlparse(self.connection_string)
71
- if not parsed.query:
72
- return None
73
-
74
- params = parse_qs(parsed.query)
75
- ssl_mode = params.get("sslmode", [None])[0]
76
-
77
- if not ssl_mode or ssl_mode == "disable":
78
- return None
79
-
80
- # Create SSL context based on mode
81
- if ssl_mode in ["require", "verify-ca", "verify-full"]:
82
- ssl_context = ssl.create_default_context()
83
-
84
- # Configure certificate verification
85
- if ssl_mode == "require":
86
- ssl_context.check_hostname = False
87
- ssl_context.verify_mode = ssl.CERT_NONE
88
- elif ssl_mode == "verify-ca":
89
- ssl_context.check_hostname = False
90
- ssl_context.verify_mode = ssl.CERT_REQUIRED
91
- elif ssl_mode == "verify-full":
92
- ssl_context.check_hostname = True
93
- ssl_context.verify_mode = ssl.CERT_REQUIRED
94
-
95
- # Load certificates if provided
96
- ssl_ca = params.get("sslrootcert", [None])[0]
97
- ssl_cert = params.get("sslcert", [None])[0]
98
- ssl_key = params.get("sslkey", [None])[0]
99
-
100
- if ssl_ca:
101
- ssl_context.load_verify_locations(ssl_ca)
102
-
103
- if ssl_cert and ssl_key:
104
- ssl_context.load_cert_chain(ssl_cert, ssl_key)
105
-
106
- return ssl_context
107
-
108
- return None
109
-
110
- async def get_pool(self) -> asyncpg.Pool:
111
- """Get or create connection pool."""
112
- if self._pool is None:
113
- # Create pool with SSL context if configured
114
- if self._ssl_context:
115
- self._pool = await asyncpg.create_pool(
116
- self.connection_string,
117
- min_size=1,
118
- max_size=10,
119
- ssl=self._ssl_context,
120
- )
121
- else:
122
- self._pool = await asyncpg.create_pool(
123
- self.connection_string, min_size=1, max_size=10
124
- )
125
- return self._pool
126
-
127
- async def close(self):
128
- """Close the connection pool."""
129
- if self._pool:
130
- await self._pool.close()
131
- self._pool = None
132
-
133
- async def execute_query(
134
- self, query: str, *args, timeout: float | None = None
135
- ) -> list[dict[str, Any]]:
136
- """Execute a query and return results as list of dicts.
137
-
138
- All queries run in a transaction that is rolled back at the end,
139
- ensuring no changes are persisted to the database.
140
- """
141
- effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
142
- pool = await self.get_pool()
143
-
144
- async with pool.acquire() as conn:
145
- # Start a transaction that we'll always rollback
146
- transaction = conn.transaction()
147
- await transaction.start()
148
-
149
- try:
150
- # Set server-side timeout if specified
151
- if effective_timeout:
152
- await conn.execute(
153
- f"SET LOCAL statement_timeout = {int(effective_timeout * 1000)}"
154
- )
155
-
156
- # Execute query with client-side timeout
157
- if effective_timeout:
158
- rows = await asyncio.wait_for(
159
- conn.fetch(query, *args), timeout=effective_timeout
160
- )
161
- else:
162
- rows = await conn.fetch(query, *args)
163
-
164
- return [dict(row) for row in rows]
165
- except asyncio.TimeoutError as exc:
166
- raise QueryTimeoutError(effective_timeout or 0) from exc
167
- finally:
168
- # Always rollback to ensure no changes are committed
169
- await transaction.rollback()
170
-
171
-
172
- class MySQLConnection(BaseDatabaseConnection):
173
- """MySQL database connection using aiomysql."""
174
-
175
- def __init__(self, connection_string: str):
176
- super().__init__(connection_string)
177
- self._pool: aiomysql.Pool | None = None
178
- self._parse_connection_string()
179
-
180
- def _parse_connection_string(self):
181
- """Parse MySQL connection string into components."""
182
- parsed = urlparse(self.connection_string)
183
- self.host = parsed.hostname or "localhost"
184
- self.port = parsed.port or 3306
185
- self.database = parsed.path.lstrip("/") if parsed.path else ""
186
- self.user = parsed.username or ""
187
- self.password = parsed.password or ""
188
-
189
- # Parse SSL parameters
190
- self.ssl_params = {}
191
- if parsed.query:
192
- params = parse_qs(parsed.query)
193
-
194
- ssl_mode = params.get("ssl_mode", [None])[0]
195
- if ssl_mode:
196
- # Map SSL modes to aiomysql SSL parameters
197
- if ssl_mode.upper() == "DISABLED":
198
- self.ssl_params["ssl"] = None
199
- elif ssl_mode.upper() in [
200
- "PREFERRED",
201
- "REQUIRED",
202
- "VERIFY_CA",
203
- "VERIFY_IDENTITY",
204
- ]:
205
- ssl_context = ssl.create_default_context()
206
-
207
- if ssl_mode.upper() == "REQUIRED":
208
- ssl_context.check_hostname = False
209
- ssl_context.verify_mode = ssl.CERT_NONE
210
- elif ssl_mode.upper() == "VERIFY_CA":
211
- ssl_context.check_hostname = False
212
- ssl_context.verify_mode = ssl.CERT_REQUIRED
213
- elif ssl_mode.upper() == "VERIFY_IDENTITY":
214
- ssl_context.check_hostname = True
215
- ssl_context.verify_mode = ssl.CERT_REQUIRED
216
-
217
- # Load certificates if provided
218
- ssl_ca = params.get("ssl_ca", [None])[0]
219
- ssl_cert = params.get("ssl_cert", [None])[0]
220
- ssl_key = params.get("ssl_key", [None])[0]
221
-
222
- if ssl_ca:
223
- ssl_context.load_verify_locations(ssl_ca)
224
-
225
- if ssl_cert and ssl_key:
226
- ssl_context.load_cert_chain(ssl_cert, ssl_key)
227
-
228
- self.ssl_params["ssl"] = ssl_context
229
-
230
- async def get_pool(self) -> aiomysql.Pool:
231
- """Get or create connection pool."""
232
- if self._pool is None:
233
- pool_kwargs = {
234
- "host": self.host,
235
- "port": self.port,
236
- "user": self.user,
237
- "password": self.password,
238
- "db": self.database,
239
- "minsize": 1,
240
- "maxsize": 10,
241
- "autocommit": False,
242
- }
243
-
244
- # Add SSL parameters if configured
245
- pool_kwargs.update(self.ssl_params)
246
-
247
- self._pool = await aiomysql.create_pool(**pool_kwargs)
248
- return self._pool
249
-
250
- async def close(self):
251
- """Close the connection pool."""
252
- if self._pool:
253
- self._pool.close()
254
- await self._pool.wait_closed()
255
- self._pool = None
256
-
257
- async def execute_query(
258
- self, query: str, *args, timeout: float | None = None
259
- ) -> list[dict[str, Any]]:
260
- """Execute a query and return results as list of dicts.
261
-
262
- All queries run in a transaction that is rolled back at the end,
263
- ensuring no changes are persisted to the database.
264
- """
265
- effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
266
- pool = await self.get_pool()
267
-
268
- async with pool.acquire() as conn:
269
- async with conn.cursor(aiomysql.DictCursor) as cursor:
270
- # Start transaction
271
- await conn.begin()
272
- try:
273
- # Set server-side timeout if specified
274
- if effective_timeout:
275
- await cursor.execute(
276
- f"SET SESSION MAX_EXECUTION_TIME = {int(effective_timeout * 1000)}"
277
- )
278
-
279
- # Execute query with client-side timeout
280
- if effective_timeout:
281
- await asyncio.wait_for(
282
- cursor.execute(query, args if args else None),
283
- timeout=effective_timeout,
284
- )
285
- rows = await asyncio.wait_for(
286
- cursor.fetchall(), timeout=effective_timeout
287
- )
288
- else:
289
- await cursor.execute(query, args if args else None)
290
- rows = await cursor.fetchall()
291
-
292
- return [dict(row) for row in rows]
293
- except asyncio.TimeoutError as exc:
294
- raise QueryTimeoutError(effective_timeout or 0) from exc
295
- finally:
296
- # Always rollback to ensure no changes are committed
297
- await conn.rollback()
298
-
299
-
300
- class SQLiteConnection(BaseDatabaseConnection):
301
- """SQLite database connection using aiosqlite."""
302
-
303
- def __init__(self, connection_string: str):
304
- super().__init__(connection_string)
305
- # Extract database path from sqlite:///path format
306
- self.database_path = connection_string.replace("sqlite:///", "")
307
-
308
- async def get_pool(self):
309
- """SQLite doesn't use connection pooling, return database path."""
310
- return self.database_path
311
-
312
- async def close(self):
313
- """SQLite connections are created per query, no persistent pool to close."""
314
- pass
315
-
316
- async def execute_query(
317
- self, query: str, *args, timeout: float | None = None
318
- ) -> list[dict[str, Any]]:
319
- """Execute a query and return results as list of dicts.
320
-
321
- All queries run in a transaction that is rolled back at the end,
322
- ensuring no changes are persisted to the database.
323
- """
324
- effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
325
-
326
- async with aiosqlite.connect(self.database_path) as conn:
327
- # Enable row factory for dict-like access
328
- conn.row_factory = aiosqlite.Row
329
-
330
- # Start transaction
331
- await conn.execute("BEGIN")
332
- try:
333
- # Execute query with client-side timeout (SQLite has no server-side timeout)
334
- if effective_timeout:
335
- cursor = await asyncio.wait_for(
336
- conn.execute(query, args if args else ()),
337
- timeout=effective_timeout,
338
- )
339
- rows = await asyncio.wait_for(
340
- cursor.fetchall(), timeout=effective_timeout
341
- )
342
- else:
343
- cursor = await conn.execute(query, args if args else ())
344
- rows = await cursor.fetchall()
345
-
346
- return [dict(row) for row in rows]
347
- except asyncio.TimeoutError as exc:
348
- raise QueryTimeoutError(effective_timeout or 0) from exc
349
- finally:
350
- # Always rollback to ensure no changes are committed
351
- await conn.rollback()
352
-
353
-
354
- class CSVConnection(BaseDatabaseConnection):
355
- """CSV file connection using in-memory SQLite database."""
356
-
357
- def __init__(self, connection_string: str):
358
- super().__init__(connection_string)
359
-
360
- # Parse CSV file path from connection string
361
- self.csv_path = connection_string.replace("csv:///", "")
362
-
363
- # CSV parsing options
364
- self.delimiter = ","
365
- self.encoding = "utf-8"
366
- self.has_header = True
367
-
368
- # Parse additional options from connection string
369
- parsed = urlparse(connection_string)
370
- if parsed.query:
371
- params = parse_qs(parsed.query)
372
- self.delimiter = params.get("delimiter", [","])[0]
373
- self.encoding = params.get("encoding", ["utf-8"])[0]
374
- self.has_header = params.get("header", ["true"])[0].lower() == "true"
375
-
376
- # Table name derived from filename
377
- self.table_name = Path(self.csv_path).stem
378
-
379
- # Initialize connection and flag to track if CSV is loaded
380
- self._conn = None
381
- self._csv_loaded = False
382
-
383
- async def get_pool(self):
384
- """Get or create the in-memory database connection."""
385
- if self._conn is None:
386
- self._conn = await aiosqlite.connect(":memory:")
387
- self._conn.row_factory = aiosqlite.Row
388
- await self._load_csv_data()
389
- return self._conn
390
-
391
- async def close(self):
392
- """Close the database connection."""
393
- if self._conn:
394
- await self._conn.close()
395
- self._conn = None
396
- self._csv_loaded = False
397
-
398
- async def _load_csv_data(self):
399
- """Load CSV data into the in-memory SQLite database."""
400
- if self._csv_loaded or not self._conn:
401
- return
402
-
403
- try:
404
- # Import pandas only when needed for CSV operations
405
- # This improves CLI load times
406
- import pandas as pd
407
-
408
- # Read CSV file using pandas
409
- df = pd.read_csv(
410
- self.csv_path,
411
- delimiter=self.delimiter,
412
- encoding=self.encoding,
413
- header=0 if self.has_header else None,
414
- )
415
-
416
- # If no header, create column names
417
- if not self.has_header:
418
- df.columns = [f"column_{i}" for i in range(len(df.columns))]
419
-
420
- # Create table with proper column types
421
- columns_sql = []
422
- for col in df.columns:
423
- # Infer SQLite type from pandas dtype
424
- dtype = df[col].dtype
425
- if pd.api.types.is_integer_dtype(dtype):
426
- sql_type = "INTEGER"
427
- elif pd.api.types.is_float_dtype(dtype):
428
- sql_type = "REAL"
429
- elif pd.api.types.is_bool_dtype(dtype):
430
- sql_type = "INTEGER" # SQLite doesn't have BOOLEAN
431
- else:
432
- sql_type = "TEXT"
433
-
434
- columns_sql.append(f'"{col}" {sql_type}')
435
-
436
- create_table_sql = (
437
- f'CREATE TABLE "{self.table_name}" ({", ".join(columns_sql)})'
438
- )
439
- await self._conn.execute(create_table_sql)
440
-
441
- # Insert data row by row
442
- placeholders = ", ".join(["?" for _ in df.columns])
443
- insert_sql = f'INSERT INTO "{self.table_name}" VALUES ({placeholders})'
444
-
445
- for _, row in df.iterrows():
446
- # Convert pandas values to Python native types
447
- values = []
448
- for val in row:
449
- if pd.isna(val):
450
- values.append(None)
451
- elif isinstance(val, (pd.Timestamp, pd.Timedelta)):
452
- values.append(str(val))
453
- else:
454
- values.append(val)
455
-
456
- await self._conn.execute(insert_sql, values)
457
-
458
- await self._conn.commit()
459
- self._csv_loaded = True
460
-
461
- except Exception as e:
462
- raise ValueError(f"Error loading CSV file '{self.csv_path}': {str(e)}")
463
-
464
- async def execute_query(
465
- self, query: str, *args, timeout: float | None = None
466
- ) -> list[dict[str, Any]]:
467
- """Execute a query and return results as list of dicts.
468
-
469
- All queries run in a transaction that is rolled back at the end,
470
- ensuring no changes are persisted to the database.
471
- """
472
- effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
473
- conn = await self.get_pool()
474
-
475
- # Start transaction
476
- await conn.execute("BEGIN")
477
- try:
478
- # Execute query with client-side timeout (CSV uses in-memory SQLite)
479
- if effective_timeout:
480
- cursor = await asyncio.wait_for(
481
- conn.execute(query, args if args else ()), timeout=effective_timeout
482
- )
483
- rows = await asyncio.wait_for(
484
- cursor.fetchall(), timeout=effective_timeout
485
- )
486
- else:
487
- cursor = await conn.execute(query, args if args else ())
488
- rows = await cursor.fetchall()
489
-
490
- return [dict(row) for row in rows]
491
- except asyncio.TimeoutError as exc:
492
- raise QueryTimeoutError(effective_timeout or 0) from exc
493
- finally:
494
- # Always rollback to ensure no changes are committed
495
- await conn.rollback()
496
-
497
-
498
- def DatabaseConnection(connection_string: str) -> BaseDatabaseConnection:
499
- """Factory function to create appropriate database connection based on connection string."""
500
- if connection_string.startswith("postgresql://"):
501
- return PostgreSQLConnection(connection_string)
502
- elif connection_string.startswith("mysql://"):
503
- return MySQLConnection(connection_string)
504
- elif connection_string.startswith("sqlite:///"):
505
- return SQLiteConnection(connection_string)
506
- elif connection_string.startswith("csv:///"):
507
- return CSVConnection(connection_string)
508
- else:
509
- raise ValueError(
510
- f"Unsupported database type in connection string: {connection_string}"
511
- )
@@ -1,47 +0,0 @@
1
- sqlsaber/__init__.py,sha256=HjS8ULtP4MGpnTL7njVY45NKV9Fi4e_yeYuY-hyXWQc,73
2
- sqlsaber/__main__.py,sha256=RIHxWeWh2QvLfah-2OkhI5IJxojWfy4fXpMnVEJYvxw,78
3
- sqlsaber/agents/__init__.py,sha256=i_MI2eWMQaVzGikKU71FPCmSQxNDKq36Imq1PrYoIPU,130
4
- sqlsaber/agents/base.py,sha256=7zOZTHKxUuU0uMc-NTaCkkBfDnU3jtwbT8_eP1ZtJ2k,2615
5
- sqlsaber/agents/mcp.py,sha256=GcJTx7YDYH6aaxIADEIxSgcWAdWakUx395JIzVnf17U,768
6
- sqlsaber/agents/pydantic_ai_agent.py,sha256=6RvG2O7G8P6NN9QaRXUodg5Q26QJ4ShGWoTGYbVQ5K4,7065
7
- sqlsaber/cli/__init__.py,sha256=qVSLVJLLJYzoC6aj6y9MFrzZvAwc4_OgxU9DlkQnZ4M,86
8
- sqlsaber/cli/auth.py,sha256=jTsRgbmlGPlASSuIKmdjjwfqtKvjfKd_cTYxX0-QqaQ,7400
9
- sqlsaber/cli/commands.py,sha256=mjLG9i1bXf0TEroxkIxq5O7Hhjufz3Ad72cyJz7vE1k,8128
10
- sqlsaber/cli/completers.py,sha256=HsUPjaZweLSeYCWkAcgMl8FylQ1xjWBWYTEL_9F6xfU,6430
11
- sqlsaber/cli/database.py,sha256=JKtHSN-BFzBa14REf0phFVQB7d67m1M5FFaD8N6DdrY,12966
12
- sqlsaber/cli/display.py,sha256=XuKiTWUw5k0U0P_f1K7zhDWX5KTO2DQVG0Q0XU9VEhs,16334
13
- sqlsaber/cli/interactive.py,sha256=7uM4LoXbhPJr8o5yNjICSzL0uxZkp1psWrVq4G9V0OI,13118
14
- sqlsaber/cli/memory.py,sha256=OufHFJFwV0_GGn7LvKRTJikkWhV1IwNIUDOxFPHXOaQ,7794
15
- sqlsaber/cli/models.py,sha256=ZewtwGQwhd9b-yxBAPKePolvI1qQG-EkmeWAGMqtWNQ,8986
16
- sqlsaber/cli/streaming.py,sha256=Eo5CNUgDGY1WYP90jwDA2aY7RefN-TfcStA6NyjUQTY,7076
17
- sqlsaber/cli/threads.py,sha256=ufDABlqndVJKd5COgSokcFRIKTgsGqXdHV84DVVm7MA,12743
18
- sqlsaber/config/__init__.py,sha256=olwC45k8Nc61yK0WmPUk7XHdbsZH9HuUAbwnmKe3IgA,100
19
- sqlsaber/config/api_keys.py,sha256=RqWQCko1tY7sES7YOlexgBH5Hd5ne_kGXHdBDNqcV2U,3649
20
- sqlsaber/config/auth.py,sha256=b5qB2h1doXyO9Bn8z0CcL8LAR2jF431gGXBGKLgTmtQ,2756
21
- sqlsaber/config/database.py,sha256=c6q3l4EvoBch1ckYHA70hf6L7fSOY-sItnLCpvJiPrA,11357
22
- sqlsaber/config/oauth_flow.py,sha256=A3bSXaBLzuAfXV2ZPA94m9NV33c2MyL6M4ii9oEkswQ,10291
23
- sqlsaber/config/oauth_tokens.py,sha256=C9z35hyx-PvSAYdC1LNf3rg9_wsEIY56hkEczelbad0,6015
24
- sqlsaber/config/providers.py,sha256=JFjeJv1K5Q93zWSlWq3hAvgch1TlgoF0qFa0KJROkKY,2957
25
- sqlsaber/config/settings.py,sha256=vgb_RXaM-7DgbxYDmWNw1cSyMqwys4j3qNCvM4bljwI,5586
26
- sqlsaber/database/__init__.py,sha256=a_gtKRJnZVO8-fEZI7g3Z8YnGa6Nio-5Y50PgVp07ss,176
27
- sqlsaber/database/connection.py,sha256=1bDPEa6cmdh87gPfhNeBLpOdI0E2_2KlE74q_-4l_jI,18913
28
- sqlsaber/database/resolver.py,sha256=RPXF5EoKzvQDDLmPGNHYd2uG_oNICH8qvUjBp6iXmNY,3348
29
- sqlsaber/database/schema.py,sha256=Le5DXSgpsWyhMDuY6qpc_dsP4jjMXgJTRtAKq9S5Oog,32868
30
- sqlsaber/mcp/__init__.py,sha256=COdWq7wauPBp5Ew8tfZItFzbcLDSEkHBJSMhxzy8C9c,112
31
- sqlsaber/mcp/mcp.py,sha256=X12oCMZYAtgJ7MNuh5cqz8y3lALrOzkXWcfpuY0Ijxk,3950
32
- sqlsaber/memory/__init__.py,sha256=GiWkU6f6YYVV0EvvXDmFWe_CxarmDCql05t70MkTEWs,63
33
- sqlsaber/memory/manager.py,sha256=p3fybMVfH-E4ApT1ZRZUnQIWSk9dkfUPCyfkmA0HALs,2739
34
- sqlsaber/memory/storage.py,sha256=ne8szLlGj5NELheqLnI7zu21V8YS4rtpYGGC7tOmi-s,5745
35
- sqlsaber/threads/__init__.py,sha256=Hh3dIG1tuC8fXprREUpslCIgPYz8_6o7aRLx4yNeO48,139
36
- sqlsaber/threads/storage.py,sha256=rsUdxT4CR52D7xtGir9UlsFnBMk11jZeflzDrk2q4ME,11183
37
- sqlsaber/tools/__init__.py,sha256=x3YdmX_7P0Qq_HtZHAgfIVKTLxYqKk6oc4tGsujQWsc,586
38
- sqlsaber/tools/base.py,sha256=mHhvAj27BHmckyvuDLCPlAQdzABJyYxd9SJnaYAwwuA,1777
39
- sqlsaber/tools/enums.py,sha256=CH32mL-0k9ZA18911xLpNtsgpV6tB85TktMj6uqGz54,411
40
- sqlsaber/tools/instructions.py,sha256=X-x8maVkkyi16b6Tl0hcAFgjiYceZaSwyWTfmrvx8U8,9024
41
- sqlsaber/tools/registry.py,sha256=HWOQMsNIdL4XZS6TeNUyrL-5KoSDH6PHsWd3X66o-18,3211
42
- sqlsaber/tools/sql_tools.py,sha256=j4yRqfKokPFnZ_tEZPrWU5WStDc3Mexo1fWZ8KsmUjQ,9965
43
- sqlsaber-0.24.0.dist-info/METADATA,sha256=cPXj4eFPU-I6AWgHVVboKwu3zMmYKvs46LtrmZCBlhU,6178
44
- sqlsaber-0.24.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
45
- sqlsaber-0.24.0.dist-info/entry_points.txt,sha256=qEbOB7OffXPFgyJc7qEIJlMEX5RN9xdzLmWZa91zCQQ,162
46
- sqlsaber-0.24.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
47
- sqlsaber-0.24.0.dist-info/RECORD,,