sqlsaber 0.30.2__py3-none-any.whl → 0.32.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlsaber might be problematic. Click here for more details.
- sqlsaber/cli/auth.py +15 -1
- sqlsaber/cli/commands.py +74 -0
- sqlsaber/cli/database.py +39 -0
- sqlsaber/cli/interactive.py +8 -2
- sqlsaber/cli/memory.py +26 -0
- sqlsaber/cli/models.py +19 -0
- sqlsaber/cli/streaming.py +5 -0
- sqlsaber/cli/theme.py +8 -0
- sqlsaber/cli/threads.py +17 -0
- sqlsaber/config/logging.py +196 -0
- sqlsaber/config/oauth_flow.py +22 -10
- sqlsaber/config/oauth_tokens.py +15 -6
- sqlsaber/threads/storage.py +31 -17
- {sqlsaber-0.30.2.dist-info → sqlsaber-0.32.0.dist-info}/METADATA +2 -1
- {sqlsaber-0.30.2.dist-info → sqlsaber-0.32.0.dist-info}/RECORD +18 -17
- {sqlsaber-0.30.2.dist-info → sqlsaber-0.32.0.dist-info}/WHEEL +0 -0
- {sqlsaber-0.30.2.dist-info → sqlsaber-0.32.0.dist-info}/entry_points.txt +0 -0
- {sqlsaber-0.30.2.dist-info → sqlsaber-0.32.0.dist-info}/licenses/LICENSE +0 -0
sqlsaber/cli/models.py
CHANGED
|
@@ -11,9 +11,11 @@ from rich.table import Table
|
|
|
11
11
|
from sqlsaber.config import providers
|
|
12
12
|
from sqlsaber.config.settings import Config
|
|
13
13
|
from sqlsaber.theme.manager import create_console
|
|
14
|
+
from sqlsaber.config.logging import get_logger
|
|
14
15
|
|
|
15
16
|
# Global instances for CLI commands
|
|
16
17
|
console = create_console()
|
|
18
|
+
logger = get_logger(__name__)
|
|
17
19
|
|
|
18
20
|
# Create the model management CLI app
|
|
19
21
|
models_app = cyclopts.App(
|
|
@@ -104,9 +106,11 @@ class ModelManager:
|
|
|
104
106
|
|
|
105
107
|
# Sort by provider then by name
|
|
106
108
|
results.sort(key=lambda x: (x["provider"], x["name"]))
|
|
109
|
+
logger.info("models.fetch.success", count=len(results))
|
|
107
110
|
return results
|
|
108
111
|
except Exception as e:
|
|
109
112
|
console.print(f"[error]Error fetching models: {e}[/error]")
|
|
113
|
+
logger.warning("models.fetch.error", error=str(e))
|
|
110
114
|
return []
|
|
111
115
|
|
|
112
116
|
def get_current_model(self) -> str:
|
|
@@ -119,9 +123,11 @@ class ModelManager:
|
|
|
119
123
|
try:
|
|
120
124
|
config = Config()
|
|
121
125
|
config.set_model(model_id)
|
|
126
|
+
logger.info("models.set.success", model=model_id)
|
|
122
127
|
return True
|
|
123
128
|
except Exception as e:
|
|
124
129
|
console.print(f"[error]Error setting model: {e}[/error]")
|
|
130
|
+
logger.error("models.set.error", model=model_id, error=str(e))
|
|
125
131
|
return False
|
|
126
132
|
|
|
127
133
|
def reset_model(self) -> bool:
|
|
@@ -135,6 +141,7 @@ model_manager = ModelManager()
|
|
|
135
141
|
@models_app.command
|
|
136
142
|
def list():
|
|
137
143
|
"""List available AI models."""
|
|
144
|
+
logger.info("models.list.start")
|
|
138
145
|
|
|
139
146
|
async def fetch_and_display():
|
|
140
147
|
console.print("[blue]Fetching available models...[/blue]")
|
|
@@ -144,6 +151,7 @@ def list():
|
|
|
144
151
|
console.print(
|
|
145
152
|
"[warning]No models available or failed to fetch models[/warning]"
|
|
146
153
|
)
|
|
154
|
+
logger.info("models.list.empty")
|
|
147
155
|
return
|
|
148
156
|
|
|
149
157
|
table = Table(title="Available Models")
|
|
@@ -180,6 +188,7 @@ def list():
|
|
|
180
188
|
|
|
181
189
|
console.print(table)
|
|
182
190
|
console.print(f"\n[dim]Current model: {current_model}[/dim]")
|
|
191
|
+
logger.info("models.list.complete", current=current_model, count=len(models))
|
|
183
192
|
|
|
184
193
|
asyncio.run(fetch_and_display())
|
|
185
194
|
|
|
@@ -187,6 +196,7 @@ def list():
|
|
|
187
196
|
@models_app.command
|
|
188
197
|
def set():
|
|
189
198
|
"""Set the AI model to use."""
|
|
199
|
+
logger.info("models.set.start")
|
|
190
200
|
|
|
191
201
|
async def interactive_set():
|
|
192
202
|
from sqlsaber.application.model_selection import choose_model, fetch_models
|
|
@@ -197,6 +207,7 @@ def set():
|
|
|
197
207
|
|
|
198
208
|
if not models:
|
|
199
209
|
console.print("[error]Failed to fetch models. Cannot set model.[/error]")
|
|
210
|
+
logger.error("models.set.no_models")
|
|
200
211
|
sys.exit(1)
|
|
201
212
|
|
|
202
213
|
prompter = AsyncPrompter()
|
|
@@ -207,11 +218,14 @@ def set():
|
|
|
207
218
|
if selected_model:
|
|
208
219
|
if model_manager.set_model(selected_model):
|
|
209
220
|
console.print(f"[green]✓ Model set to: {selected_model}[/green]")
|
|
221
|
+
logger.info("models.set.done", model=selected_model)
|
|
210
222
|
else:
|
|
211
223
|
console.print("[error]✗ Failed to set model[/error]")
|
|
224
|
+
logger.error("models.set.failed", model=selected_model)
|
|
212
225
|
sys.exit(1)
|
|
213
226
|
else:
|
|
214
227
|
console.print("[warning]Operation cancelled[/warning]")
|
|
228
|
+
logger.info("models.set.cancelled")
|
|
215
229
|
|
|
216
230
|
asyncio.run(interactive_set())
|
|
217
231
|
|
|
@@ -221,11 +235,13 @@ def current():
|
|
|
221
235
|
"""Show the currently configured model."""
|
|
222
236
|
current = model_manager.get_current_model()
|
|
223
237
|
console.print(f"Current model: [cyan]{current}[/cyan]")
|
|
238
|
+
logger.info("models.current", model=current)
|
|
224
239
|
|
|
225
240
|
|
|
226
241
|
@models_app.command
|
|
227
242
|
def reset():
|
|
228
243
|
"""Reset to the default model."""
|
|
244
|
+
logger.info("models.reset.start")
|
|
229
245
|
|
|
230
246
|
async def interactive_reset():
|
|
231
247
|
if await questionary.confirm(
|
|
@@ -235,11 +251,14 @@ def reset():
|
|
|
235
251
|
console.print(
|
|
236
252
|
f"[green]✓ Model reset to default: {ModelManager.DEFAULT_MODEL}[/green]"
|
|
237
253
|
)
|
|
254
|
+
logger.info("models.reset.done", model=ModelManager.DEFAULT_MODEL)
|
|
238
255
|
else:
|
|
239
256
|
console.print("[error]✗ Failed to reset model[/error]")
|
|
257
|
+
logger.error("models.reset.failed")
|
|
240
258
|
sys.exit(1)
|
|
241
259
|
else:
|
|
242
260
|
console.print("[warning]Operation cancelled[/warning]")
|
|
261
|
+
logger.info("models.reset.cancelled")
|
|
243
262
|
|
|
244
263
|
asyncio.run(interactive_reset())
|
|
245
264
|
|
sqlsaber/cli/streaming.py
CHANGED
|
@@ -25,6 +25,7 @@ from pydantic_ai.messages import (
|
|
|
25
25
|
from rich.console import Console
|
|
26
26
|
|
|
27
27
|
from sqlsaber.cli.display import DisplayManager
|
|
28
|
+
from sqlsaber.config.logging import get_logger
|
|
28
29
|
|
|
29
30
|
if TYPE_CHECKING:
|
|
30
31
|
from sqlsaber.agents.pydantic_ai_agent import SQLSaberAgent
|
|
@@ -40,6 +41,7 @@ class StreamingQueryHandler:
|
|
|
40
41
|
def __init__(self, console: Console):
|
|
41
42
|
self.console = console
|
|
42
43
|
self.display = DisplayManager(console)
|
|
44
|
+
self.log = get_logger(__name__)
|
|
43
45
|
|
|
44
46
|
async def _event_stream_handler(
|
|
45
47
|
self, ctx: RunContext, event_stream: AsyncIterable[AgentStreamEvent]
|
|
@@ -140,6 +142,7 @@ class StreamingQueryHandler:
|
|
|
140
142
|
# Prepare nicer code block rendering for Markdown
|
|
141
143
|
self.display.live.prepare_code_blocks()
|
|
142
144
|
try:
|
|
145
|
+
self.log.info("streaming.execute.start")
|
|
143
146
|
# If Anthropic OAuth, inject SQLsaber instructions before the first user prompt
|
|
144
147
|
prepared_prompt: str | list[str] = user_query
|
|
145
148
|
no_history = not message_history
|
|
@@ -157,11 +160,13 @@ class StreamingQueryHandler:
|
|
|
157
160
|
message_history=message_history,
|
|
158
161
|
event_stream_handler=self._event_stream_handler,
|
|
159
162
|
)
|
|
163
|
+
self.log.info("streaming.execute.end")
|
|
160
164
|
return run
|
|
161
165
|
except asyncio.CancelledError:
|
|
162
166
|
# Show interruption message outside of Live
|
|
163
167
|
self.display.show_newline()
|
|
164
168
|
self.console.print("[warning]Query interrupted[/warning]")
|
|
169
|
+
self.log.info("streaming.execute.cancelled")
|
|
165
170
|
return None
|
|
166
171
|
finally:
|
|
167
172
|
# End any active status and live markdown segments
|
sqlsaber/cli/theme.py
CHANGED
|
@@ -12,8 +12,10 @@ from platformdirs import user_config_dir
|
|
|
12
12
|
from pygments.styles import get_all_styles
|
|
13
13
|
|
|
14
14
|
from sqlsaber.theme.manager import DEFAULT_THEME_NAME, create_console
|
|
15
|
+
from sqlsaber.config.logging import get_logger
|
|
15
16
|
|
|
16
17
|
console = create_console()
|
|
18
|
+
logger = get_logger(__name__)
|
|
17
19
|
|
|
18
20
|
# Create the theme management CLI app
|
|
19
21
|
theme_app = cyclopts.App(
|
|
@@ -71,6 +73,7 @@ class ThemeManager:
|
|
|
71
73
|
return True
|
|
72
74
|
except Exception as e:
|
|
73
75
|
console.print(f"[error]Error setting theme: {e}[/error]")
|
|
76
|
+
logger.error("theme.set.error", theme=theme_name, error=str(e))
|
|
74
77
|
return False
|
|
75
78
|
|
|
76
79
|
def reset_theme(self) -> bool:
|
|
@@ -81,6 +84,7 @@ class ThemeManager:
|
|
|
81
84
|
return True
|
|
82
85
|
except Exception as e:
|
|
83
86
|
console.print(f"[error]Error resetting theme: {e}[/error]")
|
|
87
|
+
logger.error("theme.reset.error", error=str(e))
|
|
84
88
|
return False
|
|
85
89
|
|
|
86
90
|
def get_available_themes(self) -> list[str]:
|
|
@@ -94,6 +98,7 @@ theme_manager = ThemeManager()
|
|
|
94
98
|
@theme_app.command
|
|
95
99
|
def set():
|
|
96
100
|
"""Set the theme to use for syntax highlighting."""
|
|
101
|
+
logger.info("theme.set.start")
|
|
97
102
|
|
|
98
103
|
async def interactive_set():
|
|
99
104
|
themes = theme_manager.get_available_themes()
|
|
@@ -119,11 +124,13 @@ def set():
|
|
|
119
124
|
if selected_theme:
|
|
120
125
|
if theme_manager.set_theme(selected_theme):
|
|
121
126
|
console.print(f"[success]✓ Theme set to: {selected_theme}[/success]")
|
|
127
|
+
logger.info("theme.set.done", theme=selected_theme)
|
|
122
128
|
else:
|
|
123
129
|
console.print("[error]✗ Failed to set theme[/error]")
|
|
124
130
|
sys.exit(1)
|
|
125
131
|
else:
|
|
126
132
|
console.print("[warning]Operation cancelled[/warning]")
|
|
133
|
+
logger.info("theme.set.cancelled")
|
|
127
134
|
|
|
128
135
|
asyncio.run(interactive_set())
|
|
129
136
|
|
|
@@ -136,6 +143,7 @@ def reset():
|
|
|
136
143
|
console.print(
|
|
137
144
|
f"[success]✓ Theme reset to default: {DEFAULT_THEME_NAME}[/success]"
|
|
138
145
|
)
|
|
146
|
+
logger.info("theme.reset.done", theme=DEFAULT_THEME_NAME)
|
|
139
147
|
else:
|
|
140
148
|
console.print("[error]✗ Failed to reset theme[/error]")
|
|
141
149
|
sys.exit(1)
|
sqlsaber/cli/threads.py
CHANGED
|
@@ -12,12 +12,14 @@ from rich.markdown import Markdown
|
|
|
12
12
|
from rich.panel import Panel
|
|
13
13
|
from rich.table import Table
|
|
14
14
|
|
|
15
|
+
from sqlsaber.config.logging import get_logger
|
|
15
16
|
from sqlsaber.theme.manager import create_console, get_theme_manager
|
|
16
17
|
from sqlsaber.threads import ThreadStorage
|
|
17
18
|
|
|
18
19
|
# Globals consistent with other CLI modules
|
|
19
20
|
console = create_console()
|
|
20
21
|
tm = get_theme_manager()
|
|
22
|
+
logger = get_logger(__name__)
|
|
21
23
|
|
|
22
24
|
|
|
23
25
|
threads_app = cyclopts.App(
|
|
@@ -219,10 +221,12 @@ def list_threads(
|
|
|
219
221
|
] = 50,
|
|
220
222
|
):
|
|
221
223
|
"""List threads (optionally filtered by database)."""
|
|
224
|
+
logger.info("threads.cli.list.start", database=database, limit=limit)
|
|
222
225
|
store = ThreadStorage()
|
|
223
226
|
threads = asyncio.run(store.list_threads(database_name=database, limit=limit))
|
|
224
227
|
if not threads:
|
|
225
228
|
console.print("No threads found.")
|
|
229
|
+
logger.info("threads.cli.list.empty")
|
|
226
230
|
return
|
|
227
231
|
table = Table(title="Threads")
|
|
228
232
|
table.add_column("ID", style=tm.style("info"))
|
|
@@ -239,6 +243,7 @@ def list_threads(
|
|
|
239
243
|
t.model_name or "-",
|
|
240
244
|
)
|
|
241
245
|
console.print(table)
|
|
246
|
+
logger.info("threads.cli.list.complete", count=len(threads))
|
|
242
247
|
|
|
243
248
|
|
|
244
249
|
@threads_app.command
|
|
@@ -246,10 +251,12 @@ def show(
|
|
|
246
251
|
thread_id: Annotated[str, cyclopts.Parameter(help="Thread ID")],
|
|
247
252
|
):
|
|
248
253
|
"""Show thread metadata and render the full transcript."""
|
|
254
|
+
logger.info("threads.cli.show.start", thread_id=thread_id)
|
|
249
255
|
store = ThreadStorage()
|
|
250
256
|
thread = asyncio.run(store.get_thread(thread_id))
|
|
251
257
|
if not thread:
|
|
252
258
|
console.print(f"[error]Thread not found:[/error] {thread_id}")
|
|
259
|
+
logger.error("threads.cli.show.not_found", thread_id=thread_id)
|
|
253
260
|
return
|
|
254
261
|
msgs = asyncio.run(store.get_thread_messages(thread_id))
|
|
255
262
|
console.print(f"[bold]Thread: {thread.id}[/bold]")
|
|
@@ -261,6 +268,7 @@ def show(
|
|
|
261
268
|
console.print("")
|
|
262
269
|
|
|
263
270
|
_render_transcript(console, msgs, None)
|
|
271
|
+
logger.info("threads.cli.show.complete", thread_id=thread_id)
|
|
264
272
|
|
|
265
273
|
|
|
266
274
|
@threads_app.command
|
|
@@ -272,6 +280,7 @@ def resume(
|
|
|
272
280
|
] = None,
|
|
273
281
|
):
|
|
274
282
|
"""Render transcript, then resume thread in interactive mode."""
|
|
283
|
+
logger.info("threads.cli.resume.start", thread_id=thread_id, database=database)
|
|
275
284
|
store = ThreadStorage()
|
|
276
285
|
|
|
277
286
|
async def _run() -> None:
|
|
@@ -288,12 +297,14 @@ def resume(
|
|
|
288
297
|
thread = await store.get_thread(thread_id)
|
|
289
298
|
if not thread:
|
|
290
299
|
console.print(f"[error]Thread not found:[/error] {thread_id}")
|
|
300
|
+
logger.error("threads.cli.resume.not_found", thread_id=thread_id)
|
|
291
301
|
return
|
|
292
302
|
db_selector = database or thread.database_name
|
|
293
303
|
if not db_selector:
|
|
294
304
|
console.print(
|
|
295
305
|
"[error]No database specified or stored with this thread.[/error]"
|
|
296
306
|
)
|
|
307
|
+
logger.error("threads.cli.resume.no_database", thread_id=thread_id)
|
|
297
308
|
return
|
|
298
309
|
try:
|
|
299
310
|
config_manager = DatabaseConfigManager()
|
|
@@ -302,6 +313,9 @@ def resume(
|
|
|
302
313
|
db_name = resolved.name
|
|
303
314
|
except DatabaseResolutionError as e:
|
|
304
315
|
console.print(f"[error]Database resolution error:[/error] {e}")
|
|
316
|
+
logger.error(
|
|
317
|
+
"threads.cli.resume.resolve_failed", thread_id=thread_id, error=str(e)
|
|
318
|
+
)
|
|
305
319
|
return
|
|
306
320
|
|
|
307
321
|
db_conn = DatabaseConnection(connection_string)
|
|
@@ -330,6 +344,7 @@ def resume(
|
|
|
330
344
|
finally:
|
|
331
345
|
await db_conn.close()
|
|
332
346
|
console.print("\n[success]Goodbye![/success]")
|
|
347
|
+
logger.info("threads.cli.resume.closed")
|
|
333
348
|
|
|
334
349
|
asyncio.run(_run())
|
|
335
350
|
|
|
@@ -344,11 +359,13 @@ def prune(
|
|
|
344
359
|
] = 30,
|
|
345
360
|
):
|
|
346
361
|
"""Prune old threads by last activity timestamp."""
|
|
362
|
+
logger.info("threads.cli.prune.start", days=days)
|
|
347
363
|
store = ThreadStorage()
|
|
348
364
|
|
|
349
365
|
async def _run() -> None:
|
|
350
366
|
deleted = await store.prune_threads(older_than_days=days)
|
|
351
367
|
console.print(f"[success]✓ Pruned {deleted} thread(s).[/success]")
|
|
368
|
+
logger.info("threads.cli.prune.complete", deleted=deleted)
|
|
352
369
|
|
|
353
370
|
asyncio.run(_run())
|
|
354
371
|
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
"""Central logging configuration for SQLSaber using structlog.
|
|
2
|
+
|
|
3
|
+
This module provides a single entry point `setup_logging()` to configure
|
|
4
|
+
structured logging across the project, plus a helper `get_logger()` to
|
|
5
|
+
retrieve namespaced loggers.
|
|
6
|
+
|
|
7
|
+
Defaults:
|
|
8
|
+
- JSON logs to a rotating file under the user log directory.
|
|
9
|
+
- Optional pretty console logs when `SQLSABER_DEBUG=1` or
|
|
10
|
+
`SQLSABER_LOG_TO_STDERR=1`.
|
|
11
|
+
|
|
12
|
+
Environment variables:
|
|
13
|
+
- `SQLSABER_LOG_LEVEL` (default: INFO)
|
|
14
|
+
- `SQLSABER_LOG_FILE` (default: <user_log_dir>/sqlsaber.log)
|
|
15
|
+
- `SQLSABER_LOG_TO_STDERR` (0/1, default: 0)
|
|
16
|
+
- `SQLSABER_LOG_ROTATION` ("time" or "size", default: "time")
|
|
17
|
+
- `SQLSABER_LOG_WHEN` (Timed rotation unit, default: "midnight")
|
|
18
|
+
- `SQLSABER_LOG_INTERVAL` (Timed rotation interval, default: 1)
|
|
19
|
+
- `SQLSABER_LOG_BACKUP_COUNT` (number of rotated files to keep, default: 14)
|
|
20
|
+
- `SQLSABER_LOG_MAX_BYTES` (for size rotation, default: 10485760)
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
import logging
|
|
26
|
+
import os
|
|
27
|
+
from logging import Handler
|
|
28
|
+
from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler
|
|
29
|
+
from pathlib import Path
|
|
30
|
+
from typing import Optional
|
|
31
|
+
|
|
32
|
+
import platformdirs
|
|
33
|
+
import structlog
|
|
34
|
+
|
|
35
|
+
_CONFIGURED = False
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _to_bool(value: str | None, default: bool = False) -> bool:
|
|
39
|
+
if value is None:
|
|
40
|
+
return default
|
|
41
|
+
return value.strip().lower() in {"1", "true", "yes", "on"}
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def default_log_dir() -> Path:
|
|
45
|
+
return Path(platformdirs.user_log_dir("sqlsaber", "sqlsaber"))
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def default_log_file() -> Path:
|
|
49
|
+
return default_log_dir() / "sqlsaber.log"
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def get_logger(name: Optional[str] = None) -> structlog.BoundLogger:
|
|
53
|
+
"""Return a structlog logger bound to `name`.
|
|
54
|
+
|
|
55
|
+
Prefer using this over the stdlib `logging.getLogger` in new code.
|
|
56
|
+
"""
|
|
57
|
+
return structlog.get_logger(name)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _build_file_handler(log_path: Path, level: int) -> Handler:
|
|
61
|
+
rotation = os.getenv("SQLSABER_LOG_ROTATION", "time").strip().lower()
|
|
62
|
+
|
|
63
|
+
# Formatter that renders as JSON for files
|
|
64
|
+
json_formatter = structlog.stdlib.ProcessorFormatter(
|
|
65
|
+
processors=[
|
|
66
|
+
structlog.stdlib.ProcessorFormatter.remove_processors_meta,
|
|
67
|
+
structlog.processors.dict_tracebacks,
|
|
68
|
+
structlog.processors.JSONRenderer(sort_keys=True),
|
|
69
|
+
],
|
|
70
|
+
foreign_pre_chain=[
|
|
71
|
+
structlog.processors.add_log_level,
|
|
72
|
+
structlog.processors.TimeStamper(fmt="iso", utc=True),
|
|
73
|
+
],
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
if rotation == "size":
|
|
77
|
+
max_bytes = int(os.getenv("SQLSABER_LOG_MAX_BYTES", str(10 * 1024 * 1024)))
|
|
78
|
+
backup_count = int(os.getenv("SQLSABER_LOG_BACKUP_COUNT", "5"))
|
|
79
|
+
handler: Handler = RotatingFileHandler(
|
|
80
|
+
log_path, maxBytes=max_bytes, backupCount=backup_count, encoding="utf-8"
|
|
81
|
+
)
|
|
82
|
+
else:
|
|
83
|
+
when = os.getenv("SQLSABER_LOG_WHEN", "midnight")
|
|
84
|
+
interval = int(os.getenv("SQLSABER_LOG_INTERVAL", "1"))
|
|
85
|
+
backup_count = int(os.getenv("SQLSABER_LOG_BACKUP_COUNT", "14"))
|
|
86
|
+
handler = TimedRotatingFileHandler(
|
|
87
|
+
log_path,
|
|
88
|
+
when=when,
|
|
89
|
+
interval=interval,
|
|
90
|
+
backupCount=backup_count,
|
|
91
|
+
encoding="utf-8",
|
|
92
|
+
utc=True,
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
handler.setLevel(level)
|
|
96
|
+
handler.setFormatter(json_formatter)
|
|
97
|
+
return handler
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def _build_console_handler(level: int) -> Handler:
|
|
101
|
+
console_formatter = structlog.stdlib.ProcessorFormatter(
|
|
102
|
+
processors=[
|
|
103
|
+
structlog.stdlib.ProcessorFormatter.remove_processors_meta,
|
|
104
|
+
structlog.dev.ConsoleRenderer(colors=True),
|
|
105
|
+
],
|
|
106
|
+
foreign_pre_chain=[
|
|
107
|
+
structlog.processors.add_log_level,
|
|
108
|
+
structlog.processors.TimeStamper(fmt="iso", utc=True),
|
|
109
|
+
],
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
ch = logging.StreamHandler()
|
|
113
|
+
ch.setLevel(level)
|
|
114
|
+
ch.setFormatter(console_formatter)
|
|
115
|
+
return ch
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def setup_logging(*, force: bool = False) -> None:
|
|
119
|
+
"""Configure structlog + stdlib logging with sensible defaults.
|
|
120
|
+
|
|
121
|
+
Call this early in the CLI startup. It's safe to call multiple times.
|
|
122
|
+
"""
|
|
123
|
+
global _CONFIGURED
|
|
124
|
+
if _CONFIGURED and not force:
|
|
125
|
+
return
|
|
126
|
+
|
|
127
|
+
# Resolve level
|
|
128
|
+
level_name = os.getenv("SQLSABER_LOG_LEVEL", "INFO").upper()
|
|
129
|
+
level = getattr(logging, level_name, logging.INFO)
|
|
130
|
+
|
|
131
|
+
# Log file path
|
|
132
|
+
log_file_env = os.getenv("SQLSABER_LOG_FILE")
|
|
133
|
+
log_path = Path(log_file_env) if log_file_env else default_log_file()
|
|
134
|
+
log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
135
|
+
|
|
136
|
+
# Whether to also log to console (stderr)
|
|
137
|
+
to_console = _to_bool(os.getenv("SQLSABER_LOG_TO_STDERR")) or _to_bool(
|
|
138
|
+
os.getenv("SQLSABER_DEBUG")
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
# Configure structlog to hand off to stdlib formatting
|
|
142
|
+
structlog.configure(
|
|
143
|
+
processors=[
|
|
144
|
+
structlog.contextvars.merge_contextvars,
|
|
145
|
+
structlog.stdlib.add_logger_name,
|
|
146
|
+
structlog.stdlib.add_log_level,
|
|
147
|
+
structlog.processors.TimeStamper(fmt="iso", utc=True),
|
|
148
|
+
structlog.processors.StackInfoRenderer(),
|
|
149
|
+
structlog.processors.format_exc_info,
|
|
150
|
+
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
|
151
|
+
],
|
|
152
|
+
logger_factory=structlog.stdlib.LoggerFactory(),
|
|
153
|
+
wrapper_class=structlog.stdlib.BoundLogger,
|
|
154
|
+
cache_logger_on_first_use=True,
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# Build handlers
|
|
158
|
+
handlers: list[Handler] = []
|
|
159
|
+
handlers.append(_build_file_handler(log_path, level))
|
|
160
|
+
if to_console:
|
|
161
|
+
handlers.append(_build_console_handler(level))
|
|
162
|
+
|
|
163
|
+
# Install handlers on root logger
|
|
164
|
+
root = logging.getLogger()
|
|
165
|
+
root.setLevel(level)
|
|
166
|
+
for h in list(root.handlers):
|
|
167
|
+
root.removeHandler(h)
|
|
168
|
+
for h in handlers:
|
|
169
|
+
root.addHandler(h)
|
|
170
|
+
|
|
171
|
+
# Capture warnings too
|
|
172
|
+
logging.captureWarnings(True)
|
|
173
|
+
|
|
174
|
+
# Pre-bind useful context
|
|
175
|
+
try:
|
|
176
|
+
from importlib.metadata import PackageNotFoundError, version # type: ignore
|
|
177
|
+
|
|
178
|
+
try:
|
|
179
|
+
ver = version("sqlsaber")
|
|
180
|
+
except PackageNotFoundError: # during dev
|
|
181
|
+
ver = "dev"
|
|
182
|
+
except Exception:
|
|
183
|
+
ver = "unknown"
|
|
184
|
+
|
|
185
|
+
structlog.contextvars.bind_contextvars(app="sqlsaber", version=ver)
|
|
186
|
+
|
|
187
|
+
_CONFIGURED = True
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
__all__ = [
|
|
191
|
+
"setup_logging",
|
|
192
|
+
"get_logger",
|
|
193
|
+
"default_log_dir",
|
|
194
|
+
"default_log_file",
|
|
195
|
+
]
|
|
196
|
+
|
sqlsaber/config/oauth_flow.py
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
import base64
|
|
4
4
|
import hashlib
|
|
5
|
-
import logging
|
|
6
5
|
import secrets
|
|
7
6
|
import urllib.parse
|
|
8
7
|
import webbrowser
|
|
@@ -12,12 +11,13 @@ import httpx
|
|
|
12
11
|
import questionary
|
|
13
12
|
from rich.progress import Progress, SpinnerColumn, TextColumn
|
|
14
13
|
|
|
14
|
+
from sqlsaber.config.logging import get_logger
|
|
15
15
|
from sqlsaber.theme.manager import create_console
|
|
16
16
|
|
|
17
17
|
from .oauth_tokens import OAuthToken, OAuthTokenManager
|
|
18
18
|
|
|
19
19
|
console = create_console()
|
|
20
|
-
logger =
|
|
20
|
+
logger = get_logger(__name__)
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
CLIENT_ID = "9d1c250a-e61b-44d9-88ed-5944d1962f5e"
|
|
@@ -60,6 +60,7 @@ class AnthropicOAuthFlow:
|
|
|
60
60
|
}
|
|
61
61
|
|
|
62
62
|
url = "https://claude.ai/oauth/authorize?" + urllib.parse.urlencode(params)
|
|
63
|
+
logger.debug("oauth.auth_url.created")
|
|
63
64
|
return url, verifier
|
|
64
65
|
|
|
65
66
|
def _exchange_code_for_tokens(self, code: str, verifier: str) -> dict[str, str]:
|
|
@@ -86,11 +87,13 @@ class AnthropicOAuthFlow:
|
|
|
86
87
|
)
|
|
87
88
|
|
|
88
89
|
if not response.is_success:
|
|
89
|
-
|
|
90
|
+
logger.error(
|
|
91
|
+
"oauth.token_exchange.failed",
|
|
92
|
+
status_code=response.status_code,
|
|
93
|
+
)
|
|
94
|
+
raise Exception(
|
|
90
95
|
f"Token exchange failed: {response.status_code} {response.text}"
|
|
91
96
|
)
|
|
92
|
-
logger.error(error_msg)
|
|
93
|
-
raise Exception(error_msg)
|
|
94
97
|
|
|
95
98
|
return response.json()
|
|
96
99
|
|
|
@@ -110,11 +113,13 @@ class AnthropicOAuthFlow:
|
|
|
110
113
|
)
|
|
111
114
|
|
|
112
115
|
if not response.is_success:
|
|
113
|
-
|
|
116
|
+
logger.error(
|
|
117
|
+
"oauth.token_refresh.failed",
|
|
118
|
+
status_code=response.status_code,
|
|
119
|
+
)
|
|
120
|
+
raise Exception(
|
|
114
121
|
f"Token refresh failed: {response.status_code} {response.text}"
|
|
115
122
|
)
|
|
116
|
-
logger.error(error_msg)
|
|
117
|
-
raise Exception(error_msg)
|
|
118
123
|
|
|
119
124
|
return response.json()
|
|
120
125
|
|
|
@@ -132,6 +137,7 @@ class AnthropicOAuthFlow:
|
|
|
132
137
|
"Continue with browser-based authentication?", default=True
|
|
133
138
|
).ask():
|
|
134
139
|
console.print("[warning]Authentication cancelled.[/warning]")
|
|
140
|
+
logger.info("oauth.authenticate.cancelled_at_prompt")
|
|
135
141
|
return False
|
|
136
142
|
|
|
137
143
|
try:
|
|
@@ -169,6 +175,7 @@ class AnthropicOAuthFlow:
|
|
|
169
175
|
|
|
170
176
|
if not auth_code:
|
|
171
177
|
console.print("[warning]Authentication cancelled.[/warning]")
|
|
178
|
+
logger.info("oauth.authenticate.cancelled_no_code")
|
|
172
179
|
return False
|
|
173
180
|
|
|
174
181
|
# Step 2: Exchange code for tokens
|
|
@@ -202,18 +209,21 @@ class AnthropicOAuthFlow:
|
|
|
202
209
|
console.print(
|
|
203
210
|
"Your Claude Pro/Max subscription is now configured for SQLSaber."
|
|
204
211
|
)
|
|
212
|
+
logger.info("oauth.authenticate.success")
|
|
205
213
|
return True
|
|
206
214
|
else:
|
|
207
215
|
console.print(
|
|
208
216
|
"[error]✗ Failed to store authentication tokens.[/error]"
|
|
209
217
|
)
|
|
218
|
+
logger.error("oauth.authenticate.store_failed")
|
|
210
219
|
return False
|
|
211
220
|
|
|
212
221
|
except KeyboardInterrupt:
|
|
213
222
|
console.print("\n[warning]Authentication cancelled by user.[/warning]")
|
|
223
|
+
logger.info("oauth.authenticate.cancelled_keyboard")
|
|
214
224
|
return False
|
|
215
225
|
except Exception as e:
|
|
216
|
-
logger.
|
|
226
|
+
logger.exception("oauth.authenticate.error", error=str(e))
|
|
217
227
|
console.print(f"[error]✗ Authentication failed: {str(e)}[/error]")
|
|
218
228
|
return False
|
|
219
229
|
|
|
@@ -253,13 +263,15 @@ class AnthropicOAuthFlow:
|
|
|
253
263
|
# Store the refreshed token
|
|
254
264
|
if self.token_manager.store_oauth_token("anthropic", refreshed_token):
|
|
255
265
|
console.print("OAuth token refreshed successfully", style="green")
|
|
266
|
+
logger.info("oauth.token_refresh.success")
|
|
256
267
|
return refreshed_token
|
|
257
268
|
else:
|
|
258
269
|
console.print("Failed to store refreshed token", style="warning")
|
|
270
|
+
logger.warning("oauth.token_refresh.store_failed")
|
|
259
271
|
return current_token
|
|
260
272
|
|
|
261
273
|
except Exception as e:
|
|
262
|
-
logger.warning(
|
|
274
|
+
logger.warning("oauth.token_refresh.error", error=str(e))
|
|
263
275
|
console.print(
|
|
264
276
|
"Token refresh failed. You may need to re-authenticate.",
|
|
265
277
|
style="warning",
|