sqlsaber 0.6.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlsaber might be problematic. Click here for more details.

@@ -5,11 +5,13 @@ import os
5
5
  import platform
6
6
  import stat
7
7
  from pathlib import Path
8
- from typing import Any, Dict, Optional
8
+ from typing import Any
9
9
 
10
10
  import platformdirs
11
11
 
12
12
  from sqlsaber.config.api_keys import APIKeyManager
13
+ from sqlsaber.config.auth import AuthConfigManager, AuthMethod
14
+ from sqlsaber.config.oauth_flow import AnthropicOAuthFlow
13
15
 
14
16
 
15
17
  class ModelConfigManager:
@@ -40,7 +42,7 @@ class ModelConfigManager:
40
42
  except (OSError, PermissionError):
41
43
  pass
42
44
 
43
- def _load_config(self) -> Dict[str, Any]:
45
+ def _load_config(self) -> dict[str, Any]:
44
46
  """Load configuration from file."""
45
47
  if not self.config_file.exists():
46
48
  return {"model": self.DEFAULT_MODEL}
@@ -55,7 +57,7 @@ class ModelConfigManager:
55
57
  except (json.JSONDecodeError, IOError):
56
58
  return {"model": self.DEFAULT_MODEL}
57
59
 
58
- def _save_config(self, config: Dict[str, Any]) -> None:
60
+ def _save_config(self, config: dict[str, Any]) -> None:
59
61
  """Save configuration to file."""
60
62
  with open(self.config_file, "w") as f:
61
63
  json.dump(config, f, indent=2)
@@ -81,35 +83,44 @@ class Config:
81
83
  self.model_config_manager = ModelConfigManager()
82
84
  self.model_name = self.model_config_manager.get_model()
83
85
  self.api_key_manager = APIKeyManager()
84
- self.api_key = self._get_api_key()
86
+ self.auth_config_manager = AuthConfigManager()
87
+ self.oauth_flow = AnthropicOAuthFlow()
88
+
89
+ # Get authentication credentials based on configured method
90
+ self.auth_method = self.auth_config_manager.get_auth_method()
91
+ self.api_key = None
92
+ self.oauth_token = None
93
+
94
+ if self.auth_method == AuthMethod.CLAUDE_PRO:
95
+ # Try to get OAuth token and refresh if needed
96
+ try:
97
+ token = self.oauth_flow.refresh_token_if_needed()
98
+ if token:
99
+ self.oauth_token = token.access_token
100
+ except Exception:
101
+ # OAuth token unavailable, will need to re-authenticate
102
+ pass
103
+ else:
104
+ # Use API key authentication (default or explicitly configured)
105
+ self.api_key = self._get_api_key()
85
106
 
86
- def _get_api_key(self) -> Optional[str]:
107
+ def _get_api_key(self) -> str | None:
87
108
  """Get API key for the model provider using cascading logic."""
88
109
  model = self.model_name
89
-
90
- if model.startswith("openai:"):
91
- return self.api_key_manager.get_api_key("openai")
92
- elif model.startswith("anthropic:"):
110
+ if model.startswith("anthropic:"):
93
111
  return self.api_key_manager.get_api_key("anthropic")
94
- else:
95
- # For other providers, use generic key
96
- return self.api_key_manager.get_api_key("generic")
97
112
 
98
113
  def set_model(self, model: str) -> None:
99
114
  """Set the model and update configuration."""
100
115
  self.model_config_manager.set_model(model)
101
116
  self.model_name = model
102
- # Update API key for new model
103
- self.api_key = self._get_api_key()
104
117
 
105
118
  def validate(self):
106
119
  """Validate that necessary configuration is present."""
107
- if not self.api_key:
108
- model = self.model_name
109
- provider = "generic"
110
- if model.startswith("openai:"):
111
- provider = "OpenAI"
112
- elif model.startswith("anthropic:"):
113
- provider = "Anthropic"
114
-
115
- raise ValueError(f"{provider} API key not found.")
120
+ if self.auth_method == AuthMethod.CLAUDE_PRO and not self.oauth_token:
121
+ raise ValueError(
122
+ "OAuth token not available. Run 'saber auth setup' to authenticate with Claude Pro."
123
+ )
124
+ else:
125
+ if not self.api_key:
126
+ raise ValueError("Anthropic API key not found.")
@@ -1,7 +1,7 @@
1
1
  """Database connection management."""
2
2
 
3
3
  from abc import ABC, abstractmethod
4
- from typing import Any, Dict, List, Optional
4
+ from typing import Any
5
5
  from urllib.parse import urlparse, parse_qs
6
6
  import ssl
7
7
  from pathlib import Path
@@ -30,7 +30,7 @@ class BaseDatabaseConnection(ABC):
30
30
  pass
31
31
 
32
32
  @abstractmethod
33
- async def execute_query(self, query: str, *args) -> List[Dict[str, Any]]:
33
+ async def execute_query(self, query: str, *args) -> list[dict[str, Any]]:
34
34
  """Execute a query and return results as list of dicts.
35
35
 
36
36
  All queries run in a transaction that is rolled back at the end,
@@ -44,10 +44,10 @@ class PostgreSQLConnection(BaseDatabaseConnection):
44
44
 
45
45
  def __init__(self, connection_string: str):
46
46
  super().__init__(connection_string)
47
- self._pool: Optional[asyncpg.Pool] = None
47
+ self._pool: asyncpg.Pool | None = None
48
48
  self._ssl_context = self._create_ssl_context()
49
49
 
50
- def _create_ssl_context(self) -> Optional[ssl.SSLContext]:
50
+ def _create_ssl_context(self) -> ssl.SSLContext | None:
51
51
  """Create SSL context from connection string parameters."""
52
52
  parsed = urlparse(self.connection_string)
53
53
  if not parsed.query:
@@ -112,7 +112,7 @@ class PostgreSQLConnection(BaseDatabaseConnection):
112
112
  await self._pool.close()
113
113
  self._pool = None
114
114
 
115
- async def execute_query(self, query: str, *args) -> List[Dict[str, Any]]:
115
+ async def execute_query(self, query: str, *args) -> list[dict[str, Any]]:
116
116
  """Execute a query and return results as list of dicts.
117
117
 
118
118
  All queries run in a transaction that is rolled back at the end,
@@ -137,7 +137,7 @@ class MySQLConnection(BaseDatabaseConnection):
137
137
 
138
138
  def __init__(self, connection_string: str):
139
139
  super().__init__(connection_string)
140
- self._pool: Optional[aiomysql.Pool] = None
140
+ self._pool: aiomysql.Pool | None = None
141
141
  self._parse_connection_string()
142
142
 
143
143
  def _parse_connection_string(self):
@@ -217,7 +217,7 @@ class MySQLConnection(BaseDatabaseConnection):
217
217
  await self._pool.wait_closed()
218
218
  self._pool = None
219
219
 
220
- async def execute_query(self, query: str, *args) -> List[Dict[str, Any]]:
220
+ async def execute_query(self, query: str, *args) -> list[dict[str, Any]]:
221
221
  """Execute a query and return results as list of dicts.
222
222
 
223
223
  All queries run in a transaction that is rolled back at the end,
@@ -253,7 +253,7 @@ class SQLiteConnection(BaseDatabaseConnection):
253
253
  """SQLite connections are created per query, no persistent pool to close."""
254
254
  pass
255
255
 
256
- async def execute_query(self, query: str, *args) -> List[Dict[str, Any]]:
256
+ async def execute_query(self, query: str, *args) -> list[dict[str, Any]]:
257
257
  """Execute a query and return results as list of dicts.
258
258
 
259
259
  All queries run in a transaction that is rolled back at the end,
@@ -380,7 +380,7 @@ class CSVConnection(BaseDatabaseConnection):
380
380
  except Exception as e:
381
381
  raise ValueError(f"Error loading CSV file '{self.csv_path}': {str(e)}")
382
382
 
383
- async def execute_query(self, query: str, *args) -> List[Dict[str, Any]]:
383
+ async def execute_query(self, query: str, *args) -> list[dict[str, Any]]:
384
384
  """Execute a query and return results as list of dicts.
385
385
 
386
386
  All queries run in a transaction that is rolled back at the end,
@@ -2,7 +2,7 @@
2
2
 
3
3
  import time
4
4
  from abc import ABC, abstractmethod
5
- from typing import Any, Dict, Optional, Tuple
5
+ from typing import Any
6
6
 
7
7
  import aiosqlite
8
8
 
@@ -21,8 +21,8 @@ class BaseSchemaIntrospector(ABC):
21
21
 
22
22
  @abstractmethod
23
23
  async def get_tables_info(
24
- self, connection, table_pattern: Optional[str] = None
25
- ) -> Dict[str, Any]:
24
+ self, connection, table_pattern: str | None = None
25
+ ) -> dict[str, Any]:
26
26
  """Get tables information for the specific database type."""
27
27
  pass
28
28
 
@@ -42,7 +42,7 @@ class BaseSchemaIntrospector(ABC):
42
42
  pass
43
43
 
44
44
  @abstractmethod
45
- async def list_tables_info(self, connection) -> Dict[str, Any]:
45
+ async def list_tables_info(self, connection) -> dict[str, Any]:
46
46
  """Get list of tables with basic information."""
47
47
  pass
48
48
 
@@ -51,8 +51,8 @@ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
51
51
  """PostgreSQL-specific schema introspection."""
52
52
 
53
53
  async def get_tables_info(
54
- self, connection, table_pattern: Optional[str] = None
55
- ) -> Dict[str, Any]:
54
+ self, connection, table_pattern: str | None = None
55
+ ) -> dict[str, Any]:
56
56
  """Get tables information for PostgreSQL."""
57
57
  pool = await connection.get_pool()
58
58
  async with pool.acquire() as conn:
@@ -182,7 +182,7 @@ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
182
182
  """
183
183
  return await conn.fetch(pk_query)
184
184
 
185
- async def list_tables_info(self, connection) -> Dict[str, Any]:
185
+ async def list_tables_info(self, connection) -> dict[str, Any]:
186
186
  """Get list of tables with basic information for PostgreSQL."""
187
187
  pool = await connection.get_pool()
188
188
  async with pool.acquire() as conn:
@@ -214,8 +214,8 @@ class MySQLSchemaIntrospector(BaseSchemaIntrospector):
214
214
  """MySQL-specific schema introspection."""
215
215
 
216
216
  async def get_tables_info(
217
- self, connection, table_pattern: Optional[str] = None
218
- ) -> Dict[str, Any]:
217
+ self, connection, table_pattern: str | None = None
218
+ ) -> dict[str, Any]:
219
219
  """Get tables information for MySQL."""
220
220
  pool = await connection.get_pool()
221
221
  async with pool.acquire() as conn:
@@ -353,7 +353,7 @@ class MySQLSchemaIntrospector(BaseSchemaIntrospector):
353
353
  await cursor.execute(pk_query)
354
354
  return await cursor.fetchall()
355
355
 
356
- async def list_tables_info(self, connection) -> Dict[str, Any]:
356
+ async def list_tables_info(self, connection) -> dict[str, Any]:
357
357
  """Get list of tables with basic information for MySQL."""
358
358
  pool = await connection.get_pool()
359
359
  async with pool.acquire() as conn:
@@ -392,8 +392,8 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
392
392
  return await cursor.fetchall()
393
393
 
394
394
  async def get_tables_info(
395
- self, connection, table_pattern: Optional[str] = None
396
- ) -> Dict[str, Any]:
395
+ self, connection, table_pattern: str | None = None
396
+ ) -> dict[str, Any]:
397
397
  """Get tables information for SQLite."""
398
398
  where_conditions = ["type IN ('table', 'view')", "name NOT LIKE 'sqlite_%'"]
399
399
  params = ()
@@ -496,7 +496,7 @@ class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
496
496
 
497
497
  return primary_keys
498
498
 
499
- async def list_tables_info(self, connection) -> Dict[str, Any]:
499
+ async def list_tables_info(self, connection) -> dict[str, Any]:
500
500
  """Get list of tables with basic information for SQLite."""
501
501
  # First get the table names
502
502
  tables_query = """
@@ -548,7 +548,7 @@ class SchemaManager:
548
548
  def __init__(self, db_connection: BaseDatabaseConnection, cache_ttl: int = 900):
549
549
  self.db = db_connection
550
550
  self.cache_ttl = cache_ttl # Default 15 minutes
551
- self._schema_cache: Dict[str, Tuple[float, Dict[str, Any]]] = {}
551
+ self._schema_cache: dict[str, tuple[float, dict[str, Any]]] = {}
552
552
 
553
553
  # Select appropriate introspector based on connection type
554
554
  if isinstance(db_connection, PostgreSQLConnection):
@@ -567,8 +567,8 @@ class SchemaManager:
567
567
  self._schema_cache.clear()
568
568
 
569
569
  async def get_schema_info(
570
- self, table_pattern: Optional[str] = None
571
- ) -> Dict[str, SchemaInfo]:
570
+ self, table_pattern: str | None = None
571
+ ) -> dict[str, SchemaInfo]:
572
572
  """Get database schema information, optionally filtered by table pattern.
573
573
 
574
574
  Args:
@@ -587,7 +587,7 @@ class SchemaManager:
587
587
  self._schema_cache[cache_key] = (time.time(), schema_info)
588
588
  return schema_info
589
589
 
590
- def _get_cached_schema(self, cache_key: str) -> Optional[Dict[str, SchemaInfo]]:
590
+ def _get_cached_schema(self, cache_key: str) -> dict[str, SchemaInfo] | None:
591
591
  """Get schema from cache if available and not expired."""
592
592
  if cache_key in self._schema_cache:
593
593
  cached_time, cached_data = self._schema_cache[cache_key]
@@ -596,8 +596,8 @@ class SchemaManager:
596
596
  return None
597
597
 
598
598
  async def _fetch_schema_from_db(
599
- self, table_pattern: Optional[str]
600
- ) -> Dict[str, SchemaInfo]:
599
+ self, table_pattern: str | None
600
+ ) -> dict[str, SchemaInfo]:
601
601
  """Fetch schema information from database."""
602
602
  # Get all schema components
603
603
  tables = await self.introspector.get_tables_info(self.db, table_pattern)
@@ -613,7 +613,7 @@ class SchemaManager:
613
613
 
614
614
  return schema_info
615
615
 
616
- def _build_table_structure(self, tables: list) -> Dict[str, Dict]:
616
+ def _build_table_structure(self, tables: list) -> dict[str, dict]:
617
617
  """Build basic table structure from table info."""
618
618
  schema_info = {}
619
619
  for table in tables:
@@ -632,7 +632,7 @@ class SchemaManager:
632
632
  return schema_info
633
633
 
634
634
  def _add_columns_to_schema(
635
- self, schema_info: Dict[str, Dict], columns: list
635
+ self, schema_info: dict[str, dict], columns: list
636
636
  ) -> None:
637
637
  """Add column information to schema."""
638
638
  for col in columns:
@@ -656,7 +656,7 @@ class SchemaManager:
656
656
  schema_info[full_name]["columns"][col["column_name"]] = col_info
657
657
 
658
658
  def _add_primary_keys_to_schema(
659
- self, schema_info: Dict[str, Dict], primary_keys: list
659
+ self, schema_info: dict[str, dict], primary_keys: list
660
660
  ) -> None:
661
661
  """Add primary key information to schema."""
662
662
  for pk in primary_keys:
@@ -665,7 +665,7 @@ class SchemaManager:
665
665
  schema_info[full_name]["primary_keys"].append(pk["column_name"])
666
666
 
667
667
  def _add_foreign_keys_to_schema(
668
- self, schema_info: Dict[str, Dict], foreign_keys: list
668
+ self, schema_info: dict[str, dict], foreign_keys: list
669
669
  ) -> None:
670
670
  """Add foreign key information to schema."""
671
671
  for fk in foreign_keys:
@@ -681,8 +681,15 @@ class SchemaManager:
681
681
  }
682
682
  )
683
683
 
684
- async def list_tables(self) -> Dict[str, Any]:
684
+ async def list_tables(self) -> dict[str, Any]:
685
685
  """Get a list of all tables with basic information like row counts."""
686
+ # Check cache first
687
+ cache_key = "list_tables"
688
+ cached_data = self._get_cached_tables(cache_key)
689
+ if cached_data is not None:
690
+ return cached_data
691
+
692
+ # Fetch from database if not cached
686
693
  tables = await self.introspector.list_tables_info(self.db)
687
694
 
688
695
  # Format the result
@@ -699,4 +706,14 @@ class SchemaManager:
699
706
  }
700
707
  )
701
708
 
709
+ # Cache the result
710
+ self._schema_cache[cache_key] = (time.time(), result)
702
711
  return result
712
+
713
+ def _get_cached_tables(self, cache_key: str) -> dict[str, Any] | None:
714
+ """Get table list from cache if available and not expired."""
715
+ if cache_key in self._schema_cache:
716
+ cached_time, cached_data = self._schema_cache[cache_key]
717
+ if time.time() - cached_time < self.cache_ttl:
718
+ return cached_data
719
+ return None
sqlsaber/mcp/mcp.py CHANGED
@@ -1,7 +1,6 @@
1
1
  """FastMCP server implementation for SQLSaber."""
2
2
 
3
3
  import json
4
- from typing import Optional
5
4
 
6
5
  from fastmcp import FastMCP
7
6
 
@@ -32,7 +31,7 @@ mcp = FastMCP(name="SQL Assistant", instructions=INSTRUCTIONS)
32
31
  config_manager = DatabaseConfigManager()
33
32
 
34
33
 
35
- async def _create_agent_for_database(database_name: str) -> Optional[MCPSQLAgent]:
34
+ async def _create_agent_for_database(database_name: str) -> MCPSQLAgent | None:
36
35
  """Create a MCPSQLAgent for the specified database."""
37
36
  try:
38
37
  # Look up configured database connection
@@ -92,7 +91,7 @@ async def list_tables(database: str) -> str:
92
91
 
93
92
 
94
93
  @mcp.tool
95
- async def introspect_schema(database: str, table_pattern: Optional[str] = None) -> str:
94
+ async def introspect_schema(database: str, table_pattern: str | None = None) -> str:
96
95
  """
97
96
  Introspect database schema to understand table structures. Use optional pattern to filter tables (e.g., 'public.users', 'user%', '%order%').
98
97
  """
@@ -112,7 +111,7 @@ async def introspect_schema(database: str, table_pattern: Optional[str] = None)
112
111
 
113
112
 
114
113
  @mcp.tool
115
- async def execute_sql(database: str, query: str, limit: Optional[int] = 100) -> str:
114
+ async def execute_sql(database: str, query: str, limit: int | None = 100) -> str:
116
115
  """Execute a SQL query against the specified database."""
117
116
  try:
118
117
  agent = await _create_agent_for_database(database)
@@ -1,7 +1,5 @@
1
1
  """Memory manager for handling database-specific context and memories."""
2
2
 
3
- from typing import List, Optional
4
-
5
3
  from sqlsaber.memory.storage import Memory, MemoryStorage
6
4
 
7
5
 
@@ -15,7 +13,7 @@ class MemoryManager:
15
13
  """Add a new memory for the specified database."""
16
14
  return self.storage.add_memory(database_name, content)
17
15
 
18
- def get_memories(self, database_name: str) -> List[Memory]:
16
+ def get_memories(self, database_name: str) -> list[Memory]:
19
17
  """Get all memories for the specified database."""
20
18
  return self.storage.get_memories(database_name)
21
19
 
@@ -27,7 +25,7 @@ class MemoryManager:
27
25
  """Clear all memories for the specified database."""
28
26
  return self.storage.clear_memories(database_name)
29
27
 
30
- def get_memory_by_id(self, database_name: str, memory_id: str) -> Optional[Memory]:
28
+ def get_memory_by_id(self, database_name: str, memory_id: str) -> Memory | None:
31
29
  """Get a specific memory by ID."""
32
30
  return self.storage.get_memory_by_id(database_name, memory_id)
33
31
 
@@ -72,6 +70,6 @@ Use this context to better understand the user's needs and provide more relevant
72
70
  ],
73
71
  }
74
72
 
75
- def list_databases_with_memories(self) -> List[str]:
73
+ def list_databases_with_memories(self) -> list[str]:
76
74
  """List all databases that have memories."""
77
75
  return self.storage.list_databases_with_memories()
@@ -8,7 +8,6 @@ import time
8
8
  import uuid
9
9
  from dataclasses import dataclass
10
10
  from pathlib import Path
11
- from typing import Dict, List, Optional
12
11
 
13
12
  import platformdirs
14
13
 
@@ -21,7 +20,7 @@ class Memory:
21
20
  content: str
22
21
  timestamp: float
23
22
 
24
- def to_dict(self) -> Dict:
23
+ def to_dict(self) -> dict:
25
24
  """Convert memory to dictionary for JSON serialization."""
26
25
  return {
27
26
  "id": self.id,
@@ -30,7 +29,7 @@ class Memory:
30
29
  }
31
30
 
32
31
  @classmethod
33
- def from_dict(cls, data: Dict) -> "Memory":
32
+ def from_dict(cls, data: dict) -> "Memory":
34
33
  """Create Memory from dictionary."""
35
34
  return cls(
36
35
  id=data["id"],
@@ -79,7 +78,7 @@ class MemoryStorage:
79
78
  """Get the memory file path for a specific database."""
80
79
  return self.memory_dir / f"{database_name}.json"
81
80
 
82
- def _load_memories(self, database_name: str) -> List[Memory]:
81
+ def _load_memories(self, database_name: str) -> list[Memory]:
83
82
  """Load memories for a specific database."""
84
83
  memory_file = self._get_memory_file(database_name)
85
84
 
@@ -96,7 +95,7 @@ class MemoryStorage:
96
95
  except (json.JSONDecodeError, IOError, KeyError):
97
96
  return []
98
97
 
99
- def _save_memories(self, database_name: str, memories: List[Memory]) -> None:
98
+ def _save_memories(self, database_name: str, memories: list[Memory]) -> None:
100
99
  """Save memories for a specific database."""
101
100
  memory_file = self._get_memory_file(database_name)
102
101
 
@@ -125,7 +124,7 @@ class MemoryStorage:
125
124
 
126
125
  return memory
127
126
 
128
- def get_memories(self, database_name: str) -> List[Memory]:
127
+ def get_memories(self, database_name: str) -> list[Memory]:
129
128
  """Get all memories for the specified database."""
130
129
  return self._load_memories(database_name)
131
130
 
@@ -152,7 +151,7 @@ class MemoryStorage:
152
151
 
153
152
  return count
154
153
 
155
- def get_memory_by_id(self, database_name: str, memory_id: str) -> Optional[Memory]:
154
+ def get_memory_by_id(self, database_name: str, memory_id: str) -> Memory | None:
156
155
  """Get a specific memory by ID."""
157
156
  memories = self._load_memories(database_name)
158
157
  return next((m for m in memories if m.id == memory_id), None)
@@ -161,7 +160,7 @@ class MemoryStorage:
161
160
  """Check if database has any memories."""
162
161
  return len(self._load_memories(database_name)) > 0
163
162
 
164
- def list_databases_with_memories(self) -> List[str]:
163
+ def list_databases_with_memories(self) -> list[str]:
165
164
  """List all databases that have memories."""
166
165
  databases = []
167
166
 
sqlsaber/models/events.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """Event models for streaming and responses."""
2
2
 
3
- from typing import Any, Dict, List, Optional
3
+ from typing import Any
4
4
 
5
5
 
6
6
  class StreamEvent:
@@ -17,10 +17,10 @@ class SQLResponse:
17
17
 
18
18
  def __init__(
19
19
  self,
20
- query: Optional[str] = None,
20
+ query: str | None = None,
21
21
  explanation: str = "",
22
- results: Optional[List[Dict[str, Any]]] = None,
23
- error: Optional[str] = None,
22
+ results: list[dict[str, Any]] | None = None,
23
+ error: str | None = None,
24
24
  ):
25
25
  self.query = query
26
26
  self.explanation = explanation
sqlsaber/models/types.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """Type definitions for SQLSaber."""
2
2
 
3
- from typing import Any, Dict, List, Optional, TypedDict
3
+ from typing import Any, TypedDict
4
4
 
5
5
 
6
6
  class ColumnInfo(TypedDict):
@@ -8,17 +8,17 @@ class ColumnInfo(TypedDict):
8
8
 
9
9
  data_type: str
10
10
  nullable: bool
11
- default: Optional[str]
12
- max_length: Optional[int]
13
- precision: Optional[int]
14
- scale: Optional[int]
11
+ default: str | None
12
+ max_length: int | None
13
+ precision: int | None
14
+ scale: int | None
15
15
 
16
16
 
17
17
  class ForeignKeyInfo(TypedDict):
18
18
  """Type definition for foreign key information."""
19
19
 
20
20
  column: str
21
- references: Dict[str, str] # {"table": "schema.table", "column": "column_name"}
21
+ references: dict[str, str] # {"table": "schema.table", "column": "column_name"}
22
22
 
23
23
 
24
24
  class SchemaInfo(TypedDict):
@@ -27,9 +27,9 @@ class SchemaInfo(TypedDict):
27
27
  schema: str
28
28
  name: str
29
29
  type: str
30
- columns: Dict[str, ColumnInfo]
31
- primary_keys: List[str]
32
- foreign_keys: List[ForeignKeyInfo]
30
+ columns: dict[str, ColumnInfo]
31
+ primary_keys: list[str]
32
+ foreign_keys: list[ForeignKeyInfo]
33
33
 
34
34
 
35
35
  class ToolDefinition(TypedDict):
@@ -37,4 +37,4 @@ class ToolDefinition(TypedDict):
37
37
 
38
38
  name: str
39
39
  description: str
40
- input_schema: Dict[str, Any]
40
+ input_schema: dict[str, Any]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlsaber
3
- Version: 0.6.0
3
+ Version: 0.8.0
4
4
  Summary: SQLSaber - Agentic SQL assistant like Claude Code
5
5
  License-File: LICENSE
6
6
  Requires-Python: >=3.12
@@ -212,23 +212,24 @@ The MCP server uses your existing SQLSaber database configurations, so make sure
212
212
 
213
213
  ## How It Works
214
214
 
215
- SQLSaber uses an intelligent three-step process optimized for minimal token usage:
215
+ SQLSaber uses a multi-step process to gather the right context, provide it to the model, and execute SQL queries to get the right answers:
216
+
217
+ ![](./sqlsaber.svg)
216
218
 
217
219
  ### 🔍 Discovery Phase
218
220
 
219
221
  1. **List Tables Tool**: Quickly discovers available tables with row counts
220
- 2. **Pattern Matching**: Identifies relevant tables based on your query using SQL LIKE patterns
222
+ 2. **Pattern Matching**: Identifies relevant tables based on your query
221
223
 
222
224
  ### 📋 Schema Analysis
223
225
 
224
- 3. **Smart Introspection**: Analyzes only the specific table structures needed for your query
225
- 4. **Selective Loading**: Fetches schema information only for relevant tables
226
+ 3. **Smart Schema Introspection**: Analyzes only the specific table structures needed for your query
226
227
 
227
228
  ### ⚡ Execution Phase
228
229
 
229
- 5. **SQL Generation**: Creates optimized SQL queries based on natural language input
230
- 6. **Safe Execution**: Runs queries with built-in protections against destructive operations
231
- 7. **Result Formatting**: Presents results with syntax highlighting and explanations
230
+ 4. **SQL Generation**: Creates optimized SQL queries based on natural language input
231
+ 5. **Safe Execution**: Runs read-only queries with built-in protections against destructive operations
232
+ 6. **Result Formatting**: Presents results with explanations in tables and optionally, visualizes using plots
232
233
 
233
234
  ## Contributing
234
235
 
@@ -0,0 +1,46 @@
1
+ sqlsaber/__init__.py,sha256=QCFi8xTVMohelfi7zOV1-6oLCcGoiXoOcKQY-HNBCk8,66
2
+ sqlsaber/__main__.py,sha256=RIHxWeWh2QvLfah-2OkhI5IJxojWfy4fXpMnVEJYvxw,78
3
+ sqlsaber/agents/__init__.py,sha256=LWeSeEUE4BhkyAYFF3TE-fx8TtLud3oyEtyB8ojFJgo,167
4
+ sqlsaber/agents/anthropic.py,sha256=CBHneR5NJhu155d0-D1mSGOcTH7kmbXZSLv2mVQotSM,22128
5
+ sqlsaber/agents/base.py,sha256=Cl5ZV4dfgjslOAq8jbrnt5kX-NM_8QmjacWzb0hvbzs,10527
6
+ sqlsaber/agents/mcp.py,sha256=FKtXgDrPZ2-xqUYCw2baI5JzrWekXaC5fjkYW1_Mg50,827
7
+ sqlsaber/agents/streaming.py,sha256=LaSeMTlxuJFRArJVqDly5-_KgcePiCCKPKfMxfB4oGs,521
8
+ sqlsaber/cli/__init__.py,sha256=qVSLVJLLJYzoC6aj6y9MFrzZvAwc4_OgxU9DlkQnZ4M,86
9
+ sqlsaber/cli/auth.py,sha256=tm3f-qIuNS0nQbU2DEI7ezWG092mayNW1GuoiwdV8hI,5047
10
+ sqlsaber/cli/commands.py,sha256=Ob505FV1kfaRKoW_agon4Q82772QmLjxISfvbXGOHE4,5256
11
+ sqlsaber/cli/completers.py,sha256=HsUPjaZweLSeYCWkAcgMl8FylQ1xjWBWYTEL_9F6xfU,6430
12
+ sqlsaber/cli/database.py,sha256=mWpMPcISUokYIiAMU4M_g8YeI-Fz5YU_R3PYs-GigCw,12588
13
+ sqlsaber/cli/display.py,sha256=XcBkjdG7RoM_ijHgv0VWqWleT5CCTm0Hcp1sJoE1FKE,9979
14
+ sqlsaber/cli/interactive.py,sha256=sQQXO8RcbVwxIBArNUlv-8ePhLn3UUdx6zUl44l8tow,7395
15
+ sqlsaber/cli/memory.py,sha256=OFspjaZ2RaYrBdSDVOD-9_6T8NbqedHEn5FztGkLUlc,7621
16
+ sqlsaber/cli/models.py,sha256=7bvIykGPTJu3-3tpPinr44GBkPIQhoeKI3d3Kgn3jOI,7783
17
+ sqlsaber/cli/streaming.py,sha256=WfhFd5ntq2HStpJZwWJ0C5uyXKc3aU14eo8HdjzW1o0,3767
18
+ sqlsaber/clients/__init__.py,sha256=jcMoVsT92U6nQrfotCp1h0ggskJPAcgeYarqQl1qEBg,171
19
+ sqlsaber/clients/anthropic.py,sha256=umRmuzpmJdYO7hO3biAZXO9T_sb6Vv010o6zqn03is8,9947
20
+ sqlsaber/clients/base.py,sha256=RLFJ3NV75Z6keiu9mnh9zrMZK1HwdeUby0e3oeJMtyw,935
21
+ sqlsaber/clients/exceptions.py,sha256=6OoCSxvuA13I3dML2Zngygl9MdaObISh1UHvBB3yUq0,3408
22
+ sqlsaber/clients/models.py,sha256=fOvnkW8NQSdn8Oqfk3-5dP4TylLh7C9wOvuNQYw184A,7016
23
+ sqlsaber/clients/streaming.py,sha256=CwdoocLAyW_GjZm2XcLb33Sa99w5fyb7dU-27FFpePQ,8319
24
+ sqlsaber/config/__init__.py,sha256=olwC45k8Nc61yK0WmPUk7XHdbsZH9HuUAbwnmKe3IgA,100
25
+ sqlsaber/config/api_keys.py,sha256=wnWlYy26AkkevZ1Vln6avYRBDLPRzfrHkj-fPojkxaQ,3624
26
+ sqlsaber/config/auth.py,sha256=b5qB2h1doXyO9Bn8z0CcL8LAR2jF431gGXBGKLgTmtQ,2756
27
+ sqlsaber/config/database.py,sha256=c6q3l4EvoBch1ckYHA70hf6L7fSOY-sItnLCpvJiPrA,11357
28
+ sqlsaber/config/oauth_flow.py,sha256=A3bSXaBLzuAfXV2ZPA94m9NV33c2MyL6M4ii9oEkswQ,10291
29
+ sqlsaber/config/oauth_tokens.py,sha256=C9z35hyx-PvSAYdC1LNf3rg9_wsEIY56hkEczelbad0,6015
30
+ sqlsaber/config/settings.py,sha256=H2NrTaB7Vy5YWhg6k1g94XiQHZq0LZOQEd1ILtx7GHw,4567
31
+ sqlsaber/database/__init__.py,sha256=a_gtKRJnZVO8-fEZI7g3Z8YnGa6Nio-5Y50PgVp07ss,176
32
+ sqlsaber/database/connection.py,sha256=sZVGNMzMwiM11GrsLLPwR8A5ugzJ5O0TCdkrt0KVRuI,15123
33
+ sqlsaber/database/schema.py,sha256=B4emtbaNiqjz6aGBUQYYwARsTMqBilvWSurNg_zKu9U,28600
34
+ sqlsaber/mcp/__init__.py,sha256=COdWq7wauPBp5Ew8tfZItFzbcLDSEkHBJSMhxzy8C9c,112
35
+ sqlsaber/mcp/mcp.py,sha256=YH4crygqb5_Y94nsns6d-26FZCTlDPOh3tf-ghihzDM,4440
36
+ sqlsaber/memory/__init__.py,sha256=GiWkU6f6YYVV0EvvXDmFWe_CxarmDCql05t70MkTEWs,63
37
+ sqlsaber/memory/manager.py,sha256=p3fybMVfH-E4ApT1ZRZUnQIWSk9dkfUPCyfkmA0HALs,2739
38
+ sqlsaber/memory/storage.py,sha256=ne8szLlGj5NELheqLnI7zu21V8YS4rtpYGGC7tOmi-s,5745
39
+ sqlsaber/models/__init__.py,sha256=RJ7p3WtuSwwpFQ1Iw4_DHV2zzCtHqIzsjJzxv8kUjUE,287
40
+ sqlsaber/models/events.py,sha256=89SXKb5GGpH01yTr2kPEBhzp9xv35RFIYuFdAZSIPoE,721
41
+ sqlsaber/models/types.py,sha256=w-zk81V2dtveuteej36_o1fDK3So428j3P2rAejU62U,862
42
+ sqlsaber-0.8.0.dist-info/METADATA,sha256=jksWiXSR2Qy0O2KgeYm6pCY5boEBM1kO6lJuYSHV6_Y,5986
43
+ sqlsaber-0.8.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
44
+ sqlsaber-0.8.0.dist-info/entry_points.txt,sha256=jmFo96Ylm0zIKXJBwhv_P5wQ7SXP9qdaBcnTp8iCEe8,195
45
+ sqlsaber-0.8.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
46
+ sqlsaber-0.8.0.dist-info/RECORD,,