sqlsaber 0.23.0__py3-none-any.whl → 0.24.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlsaber might be problematic. Click here for more details.

sqlsaber/cli/display.py CHANGED
@@ -198,22 +198,32 @@ class DisplayManager:
198
198
  # Normalized leading blank line before tool headers
199
199
  self.show_newline()
200
200
  if tool_name == "list_tables":
201
- self.console.print(
202
- "[dim bold]:gear: Discovering available tables[/dim bold]"
203
- )
201
+ if self.console.is_terminal:
202
+ self.console.print(
203
+ "[dim bold]:gear: Discovering available tables[/dim bold]"
204
+ )
205
+ else:
206
+ self.console.print("**Discovering available tables**\n")
204
207
  elif tool_name == "introspect_schema":
205
208
  pattern = tool_input.get("table_pattern", "all tables")
206
- self.console.print(
207
- f"[dim bold]:gear: Examining schema for: {pattern}[/dim bold]"
208
- )
209
+ if self.console.is_terminal:
210
+ self.console.print(
211
+ f"[dim bold]:gear: Examining schema for: {pattern}[/dim bold]"
212
+ )
213
+ else:
214
+ self.console.print(f"**Examining schema for:** {pattern}\n")
209
215
  elif tool_name == "execute_sql":
210
216
  # For streaming, we render SQL via LiveMarkdownRenderer; keep Syntax
211
217
  # rendering for threads show/resume. Controlled by include_sql flag.
212
218
  query = tool_input.get("query", "")
213
- self.console.print("[dim bold]:gear: Executing SQL:[/dim bold]")
214
- self.show_newline()
215
- syntax = Syntax(query, "sql", background_color="default", word_wrap=True)
216
- self.console.print(syntax)
219
+ if self.console.is_terminal:
220
+ self.console.print("[dim bold]:gear: Executing SQL:[/dim bold]")
221
+ self.show_newline()
222
+ syntax = Syntax(query, "sql", background_color="default", word_wrap=True)
223
+ self.console.print(syntax)
224
+ else:
225
+ self.console.print("**Executing SQL:**\n")
226
+ self.console.print(f"```sql\n{query}\n```\n")
217
227
 
218
228
  def show_text_stream(self, text: str):
219
229
  """Display streaming text."""
@@ -225,9 +235,12 @@ class DisplayManager:
225
235
  if not results:
226
236
  return
227
237
 
228
- self.console.print(
229
- f"\n[bold magenta]Results ({len(results)} rows):[/bold magenta]"
230
- )
238
+ if self.console.is_terminal:
239
+ self.console.print(
240
+ f"\n[bold magenta]Results ({len(results)} rows):[/bold magenta]"
241
+ )
242
+ else:
243
+ self.console.print(f"\n**Results ({len(results)} rows):**\n")
231
244
 
232
245
  # Create table with columns from first result
233
246
  all_columns = list(results[0].keys())
@@ -235,9 +248,14 @@ class DisplayManager:
235
248
 
236
249
  # Show warning if columns were truncated
237
250
  if len(all_columns) > 15:
238
- self.console.print(
239
- f"[yellow]Note: Showing first 15 of {len(all_columns)} columns[/yellow]"
240
- )
251
+ if self.console.is_terminal:
252
+ self.console.print(
253
+ f"[yellow]Note: Showing first 15 of {len(all_columns)} columns[/yellow]"
254
+ )
255
+ else:
256
+ self.console.print(
257
+ f"*Note: Showing first 15 of {len(all_columns)} columns*\n"
258
+ )
241
259
 
242
260
  table = self._create_table(display_columns)
243
261
 
@@ -248,9 +266,14 @@ class DisplayManager:
248
266
  self.console.print(table)
249
267
 
250
268
  if len(results) > 20:
251
- self.console.print(
252
- f"[yellow]... and {len(results) - 20} more rows[/yellow]"
253
- )
269
+ if self.console.is_terminal:
270
+ self.console.print(
271
+ f"[yellow]... and {len(results) - 20} more rows[/yellow]"
272
+ )
273
+ else:
274
+ self.console.print(
275
+ f"*... and {len(results) - 20} more rows*\n"
276
+ )
254
277
 
255
278
  def show_error(self, error_message: str):
256
279
  """Display error message."""
@@ -385,6 +408,13 @@ class DisplayManager:
385
408
  for fk in foreign_keys:
386
409
  self.console.print(f" • {fk}")
387
410
 
411
+ # Show indexes
412
+ indexes = table_info.get("indexes", [])
413
+ if indexes:
414
+ self.console.print("[bold blue]Indexes:[/bold blue]")
415
+ for idx in indexes:
416
+ self.console.print(f" • {idx}")
417
+
388
418
  except json.JSONDecodeError:
389
419
  self.show_error("Failed to parse schema data")
390
420
  except Exception as e:
sqlsaber/cli/threads.py CHANGED
@@ -38,6 +38,8 @@ def _render_transcript(
38
38
  from sqlsaber.cli.display import DisplayManager
39
39
 
40
40
  dm = DisplayManager(console)
41
+ # Check if output is being redirected (for clean markdown export)
42
+ is_redirected = not console.is_terminal
41
43
 
42
44
  # Locate indices of user prompts
43
45
  user_indices: list[int] = []
@@ -78,11 +80,17 @@ def _render_transcript(
78
80
  parts.append(str(seg))
79
81
  text = "\n".join([s for s in parts if s]) or None
80
82
  if text:
81
- console.print(
82
- Panel.fit(Markdown(text), title="User", border_style="cyan")
83
- )
83
+ if is_redirected:
84
+ console.print(f"**User:**\n\n{text}\n")
85
+ else:
86
+ console.print(
87
+ Panel.fit(Markdown(text), title="User", border_style="cyan")
88
+ )
84
89
  return
85
- console.print(Panel.fit("(no content)", title="User", border_style="cyan"))
90
+ if is_redirected:
91
+ console.print("**User:** (no content)\n")
92
+ else:
93
+ console.print(Panel.fit("(no content)", title="User", border_style="cyan"))
86
94
 
87
95
  def _render_response(message: ModelMessage) -> None:
88
96
  for part in getattr(message, "parts", []):
@@ -90,11 +98,14 @@ def _render_transcript(
90
98
  if kind == "text":
91
99
  text = getattr(part, "content", "")
92
100
  if isinstance(text, str) and text.strip():
93
- console.print(
94
- Panel.fit(
95
- Markdown(text), title="Assistant", border_style="green"
101
+ if is_redirected:
102
+ console.print(f"**Assistant:**\n\n{text}\n")
103
+ else:
104
+ console.print(
105
+ Panel.fit(
106
+ Markdown(text), title="Assistant", border_style="green"
107
+ )
96
108
  )
97
- )
98
109
  elif kind in ("tool-call", "builtin-tool-call"):
99
110
  name = getattr(part, "tool_name", "tool")
100
111
  args = getattr(part, "args", None)
@@ -135,6 +146,20 @@ def _render_transcript(
135
146
  dm.show_sql_error(
136
147
  data.get("error"), data.get("suggestions")
137
148
  )
149
+ else:
150
+ if is_redirected:
151
+ console.print(f"**Tool result ({name}):**\n\n{content_str}\n")
152
+ else:
153
+ console.print(
154
+ Panel.fit(
155
+ content_str,
156
+ title=f"Tool result: {name}",
157
+ border_style="yellow",
158
+ )
159
+ )
160
+ except Exception:
161
+ if is_redirected:
162
+ console.print(f"**Tool result ({name}):**\n\n{content_str}\n")
138
163
  else:
139
164
  console.print(
140
165
  Panel.fit(
@@ -143,7 +168,10 @@ def _render_transcript(
143
168
  border_style="yellow",
144
169
  )
145
170
  )
146
- except Exception:
171
+ else:
172
+ if is_redirected:
173
+ console.print(f"**Tool result ({name}):**\n\n{content_str}\n")
174
+ else:
147
175
  console.print(
148
176
  Panel.fit(
149
177
  content_str,
@@ -151,14 +179,6 @@ def _render_transcript(
151
179
  border_style="yellow",
152
180
  )
153
181
  )
154
- else:
155
- console.print(
156
- Panel.fit(
157
- content_str,
158
- title=f"Tool result: {name}",
159
- border_style="yellow",
160
- )
161
- )
162
182
  # Thinking parts omitted
163
183
 
164
184
  for start_idx, end_idx in slices or [(0, len(all_msgs))]:
@@ -270,7 +290,10 @@ def resume(
270
290
  try:
271
291
  agent = build_sqlsaber_agent(db_conn, db_name)
272
292
  history = await store.get_thread_messages(thread_id)
273
- console.print(Panel.fit(f"Thread: {thread.id}", border_style="blue"))
293
+ if console.is_terminal:
294
+ console.print(Panel.fit(f"Thread: {thread.id}", border_style="blue"))
295
+ else:
296
+ console.print(f"# Thread: {thread.id}\n")
274
297
  _render_transcript(console, history, None)
275
298
  session = InteractiveSession(
276
299
  console=console,
@@ -32,6 +32,15 @@ class ForeignKeyInfo(TypedDict):
32
32
  references: dict[str, str] # {"table": "schema.table", "column": "column_name"}
33
33
 
34
34
 
35
+ class IndexInfo(TypedDict):
36
+ """Type definition for index information."""
37
+
38
+ name: str
39
+ columns: list[str] # ordered
40
+ unique: bool
41
+ type: str | None # btree, gin, FULLTEXT, etc. None if unknown
42
+
43
+
35
44
  class SchemaInfo(TypedDict):
36
45
  """Type definition for schema information."""
37
46
 
@@ -41,6 +50,7 @@ class SchemaInfo(TypedDict):
41
50
  columns: dict[str, ColumnInfo]
42
51
  primary_keys: list[str]
43
52
  foreign_keys: list[ForeignKeyInfo]
53
+ indexes: list[IndexInfo]
44
54
 
45
55
 
46
56
  class BaseSchemaIntrospector(ABC):
@@ -68,6 +78,11 @@ class BaseSchemaIntrospector(ABC):
68
78
  """Get primary keys information for the specific database type."""
69
79
  pass
70
80
 
81
+ @abstractmethod
82
+ async def get_indexes_info(self, connection, tables: list) -> list:
83
+ """Get indexes information for the specific database type."""
84
+ pass
85
+
71
86
  @abstractmethod
72
87
  async def list_tables_info(self, connection) -> list[dict[str, Any]]:
73
88
  """Get list of tables with basic information."""
@@ -209,6 +224,43 @@ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
209
224
  """
210
225
  return await conn.fetch(pk_query)
211
226
 
227
+ async def get_indexes_info(self, connection, tables: list) -> list:
228
+ """Get indexes information for PostgreSQL."""
229
+ if not tables:
230
+ return []
231
+
232
+ pool = await connection.get_pool()
233
+ async with pool.acquire() as conn:
234
+ # Build proper table filters
235
+ idx_table_filters = []
236
+ for table in tables:
237
+ idx_table_filters.append(
238
+ f"(ns.nspname = '{table['table_schema']}' AND t.relname = '{table['table_name']}')"
239
+ )
240
+
241
+ idx_query = f"""
242
+ SELECT
243
+ ns.nspname AS table_schema,
244
+ t.relname AS table_name,
245
+ i.relname AS index_name,
246
+ ix.indisunique AS is_unique,
247
+ am.amname AS index_type,
248
+ array_agg(a.attname ORDER BY ord.ordinality) AS column_names
249
+ FROM pg_class t
250
+ JOIN pg_namespace ns ON ns.oid = t.relnamespace
251
+ JOIN pg_index ix ON ix.indrelid = t.oid
252
+ JOIN pg_class i ON i.oid = ix.indexrelid
253
+ JOIN pg_am am ON am.oid = i.relam
254
+ JOIN LATERAL unnest(ix.indkey) WITH ORDINALITY AS ord(attnum, ordinality)
255
+ ON TRUE
256
+ JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ord.attnum
257
+ WHERE ns.nspname NOT IN ('pg_catalog', 'information_schema')
258
+ AND ({" OR ".join(idx_table_filters)})
259
+ GROUP BY table_schema, table_name, index_name, is_unique, index_type
260
+ ORDER BY table_schema, table_name, index_name;
261
+ """
262
+ return await conn.fetch(idx_query)
263
+
212
264
  async def list_tables_info(self, connection) -> list[dict[str, Any]]:
213
265
  """Get list of tables with basic information for PostgreSQL."""
214
266
  pool = await connection.get_pool()
@@ -379,6 +431,37 @@ class MySQLSchemaIntrospector(BaseSchemaIntrospector):
379
431
  await cursor.execute(pk_query)
380
432
  return await cursor.fetchall()
381
433
 
434
+ async def get_indexes_info(self, connection, tables: list) -> list:
435
+ """Get indexes information for MySQL."""
436
+ if not tables:
437
+ return []
438
+
439
+ pool = await connection.get_pool()
440
+ async with pool.acquire() as conn:
441
+ async with conn.cursor() as cursor:
442
+ # Build proper table filters
443
+ idx_table_filters = []
444
+ for table in tables:
445
+ idx_table_filters.append(
446
+ f"(TABLE_SCHEMA = '{table['table_schema']}' AND TABLE_NAME = '{table['table_name']}')"
447
+ )
448
+
449
+ idx_query = f"""
450
+ SELECT
451
+ TABLE_SCHEMA AS table_schema,
452
+ TABLE_NAME AS table_name,
453
+ INDEX_NAME AS index_name,
454
+ (NON_UNIQUE = 0) AS is_unique,
455
+ INDEX_TYPE AS index_type,
456
+ GROUP_CONCAT(COLUMN_NAME ORDER BY SEQ_IN_INDEX) AS column_names
457
+ FROM INFORMATION_SCHEMA.STATISTICS
458
+ WHERE ({" OR ".join(idx_table_filters)})
459
+ GROUP BY table_schema, table_name, index_name, is_unique, index_type
460
+ ORDER BY table_schema, table_name, index_name;
461
+ """
462
+ await cursor.execute(idx_query)
463
+ return await cursor.fetchall()
464
+
382
465
  async def list_tables_info(self, connection) -> list[dict[str, Any]]:
383
466
  """Get list of tables with basic information for MySQL."""
384
467
  pool = await connection.get_pool()
@@ -531,6 +614,47 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
531
614
 
532
615
  return primary_keys
533
616
 
617
+ async def get_indexes_info(self, connection, tables: list) -> list:
618
+ """Get indexes information for SQLite."""
619
+ if not tables:
620
+ return []
621
+
622
+ indexes = []
623
+ for table in tables:
624
+ table_name = table["table_name"]
625
+
626
+ # Get index list using PRAGMA
627
+ pragma_query = f"PRAGMA index_list({table_name})"
628
+ table_indexes = await self._execute_query(connection, pragma_query)
629
+
630
+ for idx in table_indexes:
631
+ idx_name = idx["name"]
632
+ unique = bool(idx["unique"])
633
+
634
+ # Skip auto-generated primary key indexes
635
+ if idx_name.startswith("sqlite_autoindex_"):
636
+ continue
637
+
638
+ # Get index columns using PRAGMA
639
+ pragma_info_query = f"PRAGMA index_info({idx_name})"
640
+ idx_cols = await self._execute_query(connection, pragma_info_query)
641
+ columns = [
642
+ c["name"] for c in sorted(idx_cols, key=lambda r: r["seqno"])
643
+ ]
644
+
645
+ indexes.append(
646
+ {
647
+ "table_schema": "main",
648
+ "table_name": table_name,
649
+ "index_name": idx_name,
650
+ "is_unique": unique,
651
+ "index_type": None, # SQLite only has B-tree currently
652
+ "column_names": columns,
653
+ }
654
+ )
655
+
656
+ return indexes
657
+
534
658
  async def list_tables_info(self, connection) -> list[dict[str, Any]]:
535
659
  """Get list of tables with basic information for SQLite."""
536
660
  # Get table names without row counts for better performance
@@ -589,12 +713,14 @@ class SchemaManager:
589
713
  columns = await self.introspector.get_columns_info(self.db, tables)
590
714
  foreign_keys = await self.introspector.get_foreign_keys_info(self.db, tables)
591
715
  primary_keys = await self.introspector.get_primary_keys_info(self.db, tables)
716
+ indexes = await self.introspector.get_indexes_info(self.db, tables)
592
717
 
593
718
  # Build schema structure
594
719
  schema_info = self._build_table_structure(tables)
595
720
  self._add_columns_to_schema(schema_info, columns)
596
721
  self._add_primary_keys_to_schema(schema_info, primary_keys)
597
722
  self._add_foreign_keys_to_schema(schema_info, foreign_keys)
723
+ self._add_indexes_to_schema(schema_info, indexes)
598
724
 
599
725
  return schema_info
600
726
 
@@ -613,6 +739,7 @@ class SchemaManager:
613
739
  "columns": {},
614
740
  "primary_keys": [],
615
741
  "foreign_keys": [],
742
+ "indexes": [],
616
743
  }
617
744
  return schema_info
618
745
 
@@ -666,6 +793,31 @@ class SchemaManager:
666
793
  }
667
794
  )
668
795
 
796
+ def _add_indexes_to_schema(
797
+ self, schema_info: dict[str, dict], indexes: list
798
+ ) -> None:
799
+ """Add index information to schema."""
800
+ for idx in indexes:
801
+ full_name = f"{idx['table_schema']}.{idx['table_name']}"
802
+ if full_name in schema_info:
803
+ # Handle different column name formats from different databases
804
+ if isinstance(idx["column_names"], list):
805
+ columns = idx["column_names"]
806
+ else:
807
+ # MySQL returns comma-separated string
808
+ columns = (
809
+ idx["column_names"].split(",") if idx["column_names"] else []
810
+ )
811
+
812
+ schema_info[full_name]["indexes"].append(
813
+ {
814
+ "name": idx["index_name"],
815
+ "columns": columns,
816
+ "unique": idx["is_unique"],
817
+ "type": idx.get("index_type"),
818
+ }
819
+ )
820
+
669
821
  async def list_tables(self) -> dict[str, Any]:
670
822
  """Get a list of all tables with basic information."""
671
823
  tables = await self.introspector.list_tables_info(self.db)
@@ -138,6 +138,12 @@ class IntrospectSchemaTool(SQLTool):
138
138
  f"{fk['column']} -> {fk['references']['table']}.{fk['references']['column']}"
139
139
  for fk in table_info["foreign_keys"]
140
140
  ],
141
+ "indexes": [
142
+ f"{idx['name']} ({', '.join(idx['columns'])})"
143
+ + (" UNIQUE" if idx["unique"] else "")
144
+ + (f" [{idx['type']}]" if idx["type"] else "")
145
+ for idx in table_info["indexes"]
146
+ ],
141
147
  }
142
148
 
143
149
  return json.dumps(formatted_info)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlsaber
3
- Version: 0.23.0
3
+ Version: 0.24.0
4
4
  Summary: SQLsaber - Open-source agentic SQL assistant
5
5
  License-File: LICENSE
6
6
  Requires-Python: >=3.12
@@ -9,12 +9,12 @@ sqlsaber/cli/auth.py,sha256=jTsRgbmlGPlASSuIKmdjjwfqtKvjfKd_cTYxX0-QqaQ,7400
9
9
  sqlsaber/cli/commands.py,sha256=mjLG9i1bXf0TEroxkIxq5O7Hhjufz3Ad72cyJz7vE1k,8128
10
10
  sqlsaber/cli/completers.py,sha256=HsUPjaZweLSeYCWkAcgMl8FylQ1xjWBWYTEL_9F6xfU,6430
11
11
  sqlsaber/cli/database.py,sha256=JKtHSN-BFzBa14REf0phFVQB7d67m1M5FFaD8N6DdrY,12966
12
- sqlsaber/cli/display.py,sha256=9uXg0GqFXGAwBj1O7-i6aLPQkGq-iURrsKHk6PWTq1E,15025
12
+ sqlsaber/cli/display.py,sha256=XuKiTWUw5k0U0P_f1K7zhDWX5KTO2DQVG0Q0XU9VEhs,16334
13
13
  sqlsaber/cli/interactive.py,sha256=7uM4LoXbhPJr8o5yNjICSzL0uxZkp1psWrVq4G9V0OI,13118
14
14
  sqlsaber/cli/memory.py,sha256=OufHFJFwV0_GGn7LvKRTJikkWhV1IwNIUDOxFPHXOaQ,7794
15
15
  sqlsaber/cli/models.py,sha256=ZewtwGQwhd9b-yxBAPKePolvI1qQG-EkmeWAGMqtWNQ,8986
16
16
  sqlsaber/cli/streaming.py,sha256=Eo5CNUgDGY1WYP90jwDA2aY7RefN-TfcStA6NyjUQTY,7076
17
- sqlsaber/cli/threads.py,sha256=HJ6v9wEv1as21B7IJglYs3q6LH7Plv2oheLbM5YEQQA,11549
17
+ sqlsaber/cli/threads.py,sha256=ufDABlqndVJKd5COgSokcFRIKTgsGqXdHV84DVVm7MA,12743
18
18
  sqlsaber/config/__init__.py,sha256=olwC45k8Nc61yK0WmPUk7XHdbsZH9HuUAbwnmKe3IgA,100
19
19
  sqlsaber/config/api_keys.py,sha256=RqWQCko1tY7sES7YOlexgBH5Hd5ne_kGXHdBDNqcV2U,3649
20
20
  sqlsaber/config/auth.py,sha256=b5qB2h1doXyO9Bn8z0CcL8LAR2jF431gGXBGKLgTmtQ,2756
@@ -26,7 +26,7 @@ sqlsaber/config/settings.py,sha256=vgb_RXaM-7DgbxYDmWNw1cSyMqwys4j3qNCvM4bljwI,5
26
26
  sqlsaber/database/__init__.py,sha256=a_gtKRJnZVO8-fEZI7g3Z8YnGa6Nio-5Y50PgVp07ss,176
27
27
  sqlsaber/database/connection.py,sha256=1bDPEa6cmdh87gPfhNeBLpOdI0E2_2KlE74q_-4l_jI,18913
28
28
  sqlsaber/database/resolver.py,sha256=RPXF5EoKzvQDDLmPGNHYd2uG_oNICH8qvUjBp6iXmNY,3348
29
- sqlsaber/database/schema.py,sha256=r12qoN3tdtAXdO22EKlauAe7QwOm8lL2vTMM59XEMMY,26594
29
+ sqlsaber/database/schema.py,sha256=Le5DXSgpsWyhMDuY6qpc_dsP4jjMXgJTRtAKq9S5Oog,32868
30
30
  sqlsaber/mcp/__init__.py,sha256=COdWq7wauPBp5Ew8tfZItFzbcLDSEkHBJSMhxzy8C9c,112
31
31
  sqlsaber/mcp/mcp.py,sha256=X12oCMZYAtgJ7MNuh5cqz8y3lALrOzkXWcfpuY0Ijxk,3950
32
32
  sqlsaber/memory/__init__.py,sha256=GiWkU6f6YYVV0EvvXDmFWe_CxarmDCql05t70MkTEWs,63
@@ -39,9 +39,9 @@ sqlsaber/tools/base.py,sha256=mHhvAj27BHmckyvuDLCPlAQdzABJyYxd9SJnaYAwwuA,1777
39
39
  sqlsaber/tools/enums.py,sha256=CH32mL-0k9ZA18911xLpNtsgpV6tB85TktMj6uqGz54,411
40
40
  sqlsaber/tools/instructions.py,sha256=X-x8maVkkyi16b6Tl0hcAFgjiYceZaSwyWTfmrvx8U8,9024
41
41
  sqlsaber/tools/registry.py,sha256=HWOQMsNIdL4XZS6TeNUyrL-5KoSDH6PHsWd3X66o-18,3211
42
- sqlsaber/tools/sql_tools.py,sha256=hM6tKqW5MDhFUt6MesoqhTUqIpq_5baIIDoN1MjDCXY,9647
43
- sqlsaber-0.23.0.dist-info/METADATA,sha256=olVI7W6UB-F6oZUqOjTWhJAvYtDbXXO0xoeOVYYmpfM,6178
44
- sqlsaber-0.23.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
45
- sqlsaber-0.23.0.dist-info/entry_points.txt,sha256=qEbOB7OffXPFgyJc7qEIJlMEX5RN9xdzLmWZa91zCQQ,162
46
- sqlsaber-0.23.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
47
- sqlsaber-0.23.0.dist-info/RECORD,,
42
+ sqlsaber/tools/sql_tools.py,sha256=j4yRqfKokPFnZ_tEZPrWU5WStDc3Mexo1fWZ8KsmUjQ,9965
43
+ sqlsaber-0.24.0.dist-info/METADATA,sha256=cPXj4eFPU-I6AWgHVVboKwu3zMmYKvs46LtrmZCBlhU,6178
44
+ sqlsaber-0.24.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
45
+ sqlsaber-0.24.0.dist-info/entry_points.txt,sha256=qEbOB7OffXPFgyJc7qEIJlMEX5RN9xdzLmWZa91zCQQ,162
46
+ sqlsaber-0.24.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
47
+ sqlsaber-0.24.0.dist-info/RECORD,,