sigma-terminal 3.4.0__py3-none-any.whl → 3.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sigma/__init__.py +4 -5
- sigma/analytics/__init__.py +11 -9
- sigma/app.py +384 -1125
- sigma/backtest/__init__.py +2 -0
- sigma/backtest/service.py +116 -0
- sigma/charts.py +2 -2
- sigma/cli.py +15 -13
- sigma/comparison.py +2 -2
- sigma/config.py +25 -12
- sigma/core/command_router.py +93 -0
- sigma/llm/__init__.py +3 -0
- sigma/llm/providers/anthropic_provider.py +196 -0
- sigma/llm/providers/base.py +29 -0
- sigma/llm/providers/google_provider.py +197 -0
- sigma/llm/providers/ollama_provider.py +156 -0
- sigma/llm/providers/openai_provider.py +168 -0
- sigma/llm/providers/sigma_cloud_provider.py +57 -0
- sigma/llm/rate_limit.py +40 -0
- sigma/llm/registry.py +66 -0
- sigma/llm/router.py +122 -0
- sigma/setup_agent.py +188 -0
- sigma/tools/__init__.py +23 -0
- sigma/tools/adapter.py +38 -0
- sigma/{tools.py → tools/library.py} +593 -1
- sigma/tools/registry.py +108 -0
- sigma/utils/extraction.py +83 -0
- sigma_terminal-3.5.0.dist-info/METADATA +184 -0
- sigma_terminal-3.5.0.dist-info/RECORD +46 -0
- sigma/llm.py +0 -786
- sigma/setup.py +0 -440
- sigma_terminal-3.4.0.dist-info/METADATA +0 -264
- sigma_terminal-3.4.0.dist-info/RECORD +0 -30
- /sigma/{backtest.py → backtest/simple_engine.py} +0 -0
- {sigma_terminal-3.4.0.dist-info → sigma_terminal-3.5.0.dist-info}/WHEEL +0 -0
- {sigma_terminal-3.4.0.dist-info → sigma_terminal-3.5.0.dist-info}/entry_points.txt +0 -0
- {sigma_terminal-3.4.0.dist-info → sigma_terminal-3.5.0.dist-info}/licenses/LICENSE +0 -0
sigma/setup.py
DELETED
|
@@ -1,440 +0,0 @@
|
|
|
1
|
-
"""Sigma v3.4.0 - Setup Wizard."""
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
import sys
|
|
5
|
-
import subprocess
|
|
6
|
-
from pathlib import Path
|
|
7
|
-
from typing import Optional
|
|
8
|
-
|
|
9
|
-
from rich.console import Console
|
|
10
|
-
from rich.panel import Panel
|
|
11
|
-
from rich.prompt import Prompt, Confirm
|
|
12
|
-
from rich.table import Table
|
|
13
|
-
from rich.text import Text
|
|
14
|
-
|
|
15
|
-
from .config import (
|
|
16
|
-
get_settings,
|
|
17
|
-
save_api_key,
|
|
18
|
-
save_setting,
|
|
19
|
-
LLMProvider,
|
|
20
|
-
AVAILABLE_MODELS,
|
|
21
|
-
CONFIG_DIR,
|
|
22
|
-
detect_lean_installation,
|
|
23
|
-
detect_ollama,
|
|
24
|
-
install_lean_cli_sync,
|
|
25
|
-
)
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
__version__ = "3.4.0"
|
|
29
|
-
SIGMA = "σ"
|
|
30
|
-
console = Console()
|
|
31
|
-
|
|
32
|
-
# Clean banner - just SIGMA
|
|
33
|
-
BANNER = """
|
|
34
|
-
[bold #3b82f6]███████╗██╗ ██████╗ ███╗ ███╗ █████╗ [/bold #3b82f6]
|
|
35
|
-
[bold #60a5fa]██╔════╝██║██╔════╝ ████╗ ████║██╔══██╗[/bold #60a5fa]
|
|
36
|
-
[bold #93c5fd]███████╗██║██║ ███╗██╔████╔██║███████║[/bold #93c5fd]
|
|
37
|
-
[bold #60a5fa]╚════██║██║██║ ██║██║╚██╔╝██║██╔══██║[/bold #60a5fa]
|
|
38
|
-
[bold #3b82f6]███████║██║╚██████╔╝██║ ╚═╝ ██║██║ ██║[/bold #3b82f6]
|
|
39
|
-
[bold #1d4ed8]╚══════╝╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝[/bold #1d4ed8]
|
|
40
|
-
|
|
41
|
-
[bold cyan]σ Finance Research Agent[/bold cyan] [dim]- Setup Wizard v3.4.0[/dim]
|
|
42
|
-
"""
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
class SetupWizard:
|
|
46
|
-
"""Interactive setup wizard."""
|
|
47
|
-
|
|
48
|
-
def __init__(self):
|
|
49
|
-
self.settings = get_settings()
|
|
50
|
-
self.providers = {
|
|
51
|
-
LLMProvider.GOOGLE: {
|
|
52
|
-
"name": "Google Gemini",
|
|
53
|
-
"models": AVAILABLE_MODELS.get("google", []),
|
|
54
|
-
"url": "https://aistudio.google.com/apikey",
|
|
55
|
-
"free": True,
|
|
56
|
-
"desc": "Fast, capable, free tier",
|
|
57
|
-
"recommended": True,
|
|
58
|
-
},
|
|
59
|
-
LLMProvider.OPENAI: {
|
|
60
|
-
"name": "OpenAI GPT",
|
|
61
|
-
"models": AVAILABLE_MODELS.get("openai", []),
|
|
62
|
-
"url": "https://platform.openai.com/api-keys",
|
|
63
|
-
"free": False,
|
|
64
|
-
"desc": "Industry standard",
|
|
65
|
-
},
|
|
66
|
-
LLMProvider.ANTHROPIC: {
|
|
67
|
-
"name": "Anthropic Claude",
|
|
68
|
-
"models": AVAILABLE_MODELS.get("anthropic", []),
|
|
69
|
-
"url": "https://console.anthropic.com/",
|
|
70
|
-
"free": False,
|
|
71
|
-
"desc": "Advanced reasoning",
|
|
72
|
-
},
|
|
73
|
-
LLMProvider.GROQ: {
|
|
74
|
-
"name": "Groq (Llama)",
|
|
75
|
-
"models": AVAILABLE_MODELS.get("groq", []),
|
|
76
|
-
"url": "https://console.groq.com/keys",
|
|
77
|
-
"free": True,
|
|
78
|
-
"desc": "Ultra-fast, free tier",
|
|
79
|
-
"recommended": True,
|
|
80
|
-
},
|
|
81
|
-
LLMProvider.XAI: {
|
|
82
|
-
"name": "xAI Grok",
|
|
83
|
-
"models": AVAILABLE_MODELS.get("xai", []),
|
|
84
|
-
"url": "https://console.x.ai/",
|
|
85
|
-
"free": False,
|
|
86
|
-
"desc": "X.com AI",
|
|
87
|
-
},
|
|
88
|
-
LLMProvider.OLLAMA: {
|
|
89
|
-
"name": "Ollama (Local)",
|
|
90
|
-
"models": AVAILABLE_MODELS.get("ollama", []),
|
|
91
|
-
"url": "https://ollama.ai/download",
|
|
92
|
-
"free": True,
|
|
93
|
-
"desc": "Run locally, no API key",
|
|
94
|
-
},
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
def run(self) -> bool:
|
|
98
|
-
"""Run setup wizard."""
|
|
99
|
-
console.clear()
|
|
100
|
-
console.print(BANNER)
|
|
101
|
-
console.print()
|
|
102
|
-
|
|
103
|
-
console.print(Panel(
|
|
104
|
-
"[bold]Welcome to Sigma Setup[/bold]\n\n"
|
|
105
|
-
"This wizard will configure Sigma for first use.\n"
|
|
106
|
-
f"Configuration stored in [cyan]~/.sigma/[/cyan]\n\n"
|
|
107
|
-
"[bold]Steps:[/bold]\n"
|
|
108
|
-
" 1. Choose AI provider\n"
|
|
109
|
-
" 2. Configure API key\n"
|
|
110
|
-
" 3. Select model\n"
|
|
111
|
-
" 4. Data settings\n"
|
|
112
|
-
" 5. Optional: Ollama, LEAN",
|
|
113
|
-
title=f"[cyan]{SIGMA} Setup[/cyan]",
|
|
114
|
-
border_style="blue",
|
|
115
|
-
))
|
|
116
|
-
console.print()
|
|
117
|
-
|
|
118
|
-
if not Confirm.ask("Ready to begin?", default=True):
|
|
119
|
-
console.print("[dim]Cancelled.[/dim]")
|
|
120
|
-
return False
|
|
121
|
-
|
|
122
|
-
console.print()
|
|
123
|
-
|
|
124
|
-
# Steps
|
|
125
|
-
self._setup_provider()
|
|
126
|
-
console.print()
|
|
127
|
-
self._setup_api_key()
|
|
128
|
-
console.print()
|
|
129
|
-
self._setup_model()
|
|
130
|
-
console.print()
|
|
131
|
-
self._setup_data()
|
|
132
|
-
console.print()
|
|
133
|
-
self._setup_integrations()
|
|
134
|
-
console.print()
|
|
135
|
-
self._show_summary()
|
|
136
|
-
|
|
137
|
-
return True
|
|
138
|
-
|
|
139
|
-
def _setup_provider(self):
|
|
140
|
-
"""Choose AI provider."""
|
|
141
|
-
console.print(Panel("[bold]Step 1: AI Provider[/bold]", border_style="blue"))
|
|
142
|
-
console.print()
|
|
143
|
-
|
|
144
|
-
providers = list(self.providers.keys())
|
|
145
|
-
|
|
146
|
-
for i, p in enumerate(providers, 1):
|
|
147
|
-
info = self.providers[p]
|
|
148
|
-
name = info["name"]
|
|
149
|
-
desc = info["desc"]
|
|
150
|
-
free = "[green]free[/green]" if info.get("free") else "[yellow]paid[/yellow]"
|
|
151
|
-
rec = " [cyan](recommended)[/cyan]" if info.get("recommended") else ""
|
|
152
|
-
console.print(f" {i}. [bold]{name}[/bold] - {desc} {free}{rec}")
|
|
153
|
-
|
|
154
|
-
console.print()
|
|
155
|
-
choice = Prompt.ask(
|
|
156
|
-
"Choose provider",
|
|
157
|
-
choices=[str(i) for i in range(1, len(providers) + 1)],
|
|
158
|
-
default="1"
|
|
159
|
-
)
|
|
160
|
-
|
|
161
|
-
provider = providers[int(choice) - 1]
|
|
162
|
-
save_setting("default_provider", provider.value)
|
|
163
|
-
self.settings.default_provider = provider
|
|
164
|
-
|
|
165
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Provider: [bold]{self.providers[provider]['name']}[/bold]")
|
|
166
|
-
|
|
167
|
-
def _setup_api_key(self):
|
|
168
|
-
"""Configure API key."""
|
|
169
|
-
console.print(Panel("[bold]Step 2: API Key[/bold]", border_style="blue"))
|
|
170
|
-
console.print()
|
|
171
|
-
|
|
172
|
-
provider = self.settings.default_provider
|
|
173
|
-
info = self.providers[provider]
|
|
174
|
-
|
|
175
|
-
if provider == LLMProvider.OLLAMA:
|
|
176
|
-
console.print("[dim]Ollama runs locally - no API key needed.[/dim]")
|
|
177
|
-
if Confirm.ask("Is Ollama installed?", default=True):
|
|
178
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Ollama configured")
|
|
179
|
-
else:
|
|
180
|
-
console.print(f"Install from: [bold]{info['url']}[/bold]")
|
|
181
|
-
return
|
|
182
|
-
|
|
183
|
-
# Check existing
|
|
184
|
-
key_attr = f"{provider.value}_api_key"
|
|
185
|
-
existing = getattr(self.settings, key_attr, None)
|
|
186
|
-
|
|
187
|
-
if existing:
|
|
188
|
-
masked = f"{existing[:8]}...{existing[-4:]}"
|
|
189
|
-
console.print(f"[dim]Existing key: {masked}[/dim]")
|
|
190
|
-
if not Confirm.ask("Replace?", default=False):
|
|
191
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Keeping existing key")
|
|
192
|
-
return
|
|
193
|
-
|
|
194
|
-
console.print(f"Get key from: [bold]{info['url']}[/bold]")
|
|
195
|
-
console.print()
|
|
196
|
-
|
|
197
|
-
api_key = Prompt.ask("API key", password=True)
|
|
198
|
-
|
|
199
|
-
if api_key:
|
|
200
|
-
save_api_key(provider, api_key)
|
|
201
|
-
setattr(self.settings, key_attr, api_key)
|
|
202
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Key saved for {info['name']}")
|
|
203
|
-
else:
|
|
204
|
-
console.print("[yellow]Skipped[/yellow]")
|
|
205
|
-
|
|
206
|
-
def _setup_model(self):
|
|
207
|
-
"""Select model."""
|
|
208
|
-
console.print(Panel("[bold]Step 3: Model[/bold]", border_style="blue"))
|
|
209
|
-
console.print()
|
|
210
|
-
|
|
211
|
-
provider = self.settings.default_provider
|
|
212
|
-
models = self.providers[provider]["models"]
|
|
213
|
-
|
|
214
|
-
if not models:
|
|
215
|
-
console.print("[yellow]No models for this provider[/yellow]")
|
|
216
|
-
return
|
|
217
|
-
|
|
218
|
-
console.print("Available models:")
|
|
219
|
-
for i, m in enumerate(models, 1):
|
|
220
|
-
current = " [cyan](current)[/cyan]" if m == self.settings.default_model else ""
|
|
221
|
-
console.print(f" {i}. {m}{current}")
|
|
222
|
-
|
|
223
|
-
console.print()
|
|
224
|
-
choice = Prompt.ask(
|
|
225
|
-
"Choose model",
|
|
226
|
-
choices=[str(i) for i in range(1, len(models) + 1)],
|
|
227
|
-
default="1"
|
|
228
|
-
)
|
|
229
|
-
|
|
230
|
-
model = models[int(choice) - 1]
|
|
231
|
-
save_setting("default_model", model)
|
|
232
|
-
self.settings.default_model = model
|
|
233
|
-
|
|
234
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Model: [bold]{model}[/bold]")
|
|
235
|
-
|
|
236
|
-
def _setup_data(self):
|
|
237
|
-
"""Data settings."""
|
|
238
|
-
console.print(Panel("[bold]Step 4: Data Settings[/bold]", border_style="blue"))
|
|
239
|
-
console.print()
|
|
240
|
-
|
|
241
|
-
console.print(f"Data stored in: [bold]~/.sigma[/bold]")
|
|
242
|
-
console.print()
|
|
243
|
-
|
|
244
|
-
# Output directory
|
|
245
|
-
default_out = os.path.expanduser("~/Documents/Sigma")
|
|
246
|
-
out_dir = Prompt.ask("Output directory", default=default_out)
|
|
247
|
-
|
|
248
|
-
Path(out_dir).mkdir(parents=True, exist_ok=True)
|
|
249
|
-
save_setting("output_dir", out_dir)
|
|
250
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Output: {out_dir}")
|
|
251
|
-
|
|
252
|
-
# Cache
|
|
253
|
-
if Confirm.ask("Enable caching?", default=True):
|
|
254
|
-
save_setting("cache_enabled", "true")
|
|
255
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Caching enabled")
|
|
256
|
-
else:
|
|
257
|
-
save_setting("cache_enabled", "false")
|
|
258
|
-
|
|
259
|
-
def _setup_integrations(self):
|
|
260
|
-
"""Optional integrations."""
|
|
261
|
-
console.print(Panel("[bold]Step 5: Integrations[/bold]", border_style="blue"))
|
|
262
|
-
console.print()
|
|
263
|
-
|
|
264
|
-
# Ollama detection
|
|
265
|
-
if self.settings.default_provider != LLMProvider.OLLAMA:
|
|
266
|
-
ollama_running, ollama_host = detect_ollama()
|
|
267
|
-
if ollama_running and ollama_host:
|
|
268
|
-
console.print(f"[green][ok][/green] Ollama detected at {ollama_host}")
|
|
269
|
-
if Confirm.ask("Enable Ollama as local fallback?", default=True):
|
|
270
|
-
save_setting("ollama_host", ollama_host)
|
|
271
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Ollama enabled")
|
|
272
|
-
else:
|
|
273
|
-
if Confirm.ask("Setup Ollama for local fallback?", default=False):
|
|
274
|
-
console.print("[dim]Install: https://ollama.ai/download[/dim]")
|
|
275
|
-
console.print("[dim]Run: ollama pull llama3.2[/dim]")
|
|
276
|
-
|
|
277
|
-
console.print()
|
|
278
|
-
|
|
279
|
-
# LEAN auto-detection
|
|
280
|
-
lean_installed, lean_cli, lean_dir = detect_lean_installation()
|
|
281
|
-
|
|
282
|
-
if lean_installed:
|
|
283
|
-
console.print(f"[green][ok][/green] LEAN/QuantConnect detected!")
|
|
284
|
-
if lean_cli:
|
|
285
|
-
console.print(f" [dim]CLI: {lean_cli}[/dim]")
|
|
286
|
-
if lean_dir:
|
|
287
|
-
console.print(f" [dim]Directory: {lean_dir}[/dim]")
|
|
288
|
-
console.print()
|
|
289
|
-
|
|
290
|
-
if Confirm.ask("Enable LEAN integration?", default=True):
|
|
291
|
-
save_setting("lean_enabled", "true")
|
|
292
|
-
if lean_cli:
|
|
293
|
-
save_setting("lean_cli_path", lean_cli)
|
|
294
|
-
if lean_dir:
|
|
295
|
-
save_setting("lean_directory", lean_dir)
|
|
296
|
-
console.print(f"[cyan]{SIGMA}[/cyan] LEAN integration enabled")
|
|
297
|
-
else:
|
|
298
|
-
save_setting("lean_enabled", "false")
|
|
299
|
-
else:
|
|
300
|
-
console.print("[yellow]![/yellow] LEAN/QuantConnect not detected")
|
|
301
|
-
console.print("[dim]LEAN provides institutional-grade backtesting with QuantConnect's engine.[/dim]")
|
|
302
|
-
console.print()
|
|
303
|
-
|
|
304
|
-
lean_choice = Prompt.ask(
|
|
305
|
-
"Would you like to",
|
|
306
|
-
choices=["install", "manual", "skip"],
|
|
307
|
-
default="skip"
|
|
308
|
-
)
|
|
309
|
-
|
|
310
|
-
if lean_choice == "install":
|
|
311
|
-
console.print()
|
|
312
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Installing LEAN CLI via pip...")
|
|
313
|
-
console.print("[dim]This may take a minute...[/dim]")
|
|
314
|
-
|
|
315
|
-
with console.status("[bold blue]Installing LEAN...[/bold blue]"):
|
|
316
|
-
success, message = install_lean_cli_sync()
|
|
317
|
-
|
|
318
|
-
if success:
|
|
319
|
-
console.print(f"[green][ok][/green] {message}")
|
|
320
|
-
save_setting("lean_enabled", "true")
|
|
321
|
-
save_setting("lean_cli_path", "lean")
|
|
322
|
-
|
|
323
|
-
# Verify installation
|
|
324
|
-
lean_installed, lean_cli, lean_dir = detect_lean_installation()
|
|
325
|
-
if lean_cli:
|
|
326
|
-
console.print(f"[cyan]{SIGMA}[/cyan] LEAN CLI ready: {lean_cli}")
|
|
327
|
-
else:
|
|
328
|
-
console.print(f"[red][x][/red] {message}")
|
|
329
|
-
console.print("[dim]You can install manually later: pip install lean[/dim]")
|
|
330
|
-
|
|
331
|
-
elif lean_choice == "manual":
|
|
332
|
-
console.print()
|
|
333
|
-
console.print("[bold]Manual Installation Options:[/bold]")
|
|
334
|
-
console.print(" 1. [cyan]pip install lean[/cyan] - LEAN CLI (recommended)")
|
|
335
|
-
console.print(" 2. [cyan]https://github.com/QuantConnect/Lean[/cyan] - Full source")
|
|
336
|
-
console.print()
|
|
337
|
-
|
|
338
|
-
lean_path = Prompt.ask("Enter LEAN CLI path (or Enter to skip)", default="")
|
|
339
|
-
if lean_path:
|
|
340
|
-
save_setting("lean_enabled", "true")
|
|
341
|
-
save_setting("lean_cli_path", lean_path)
|
|
342
|
-
console.print(f"[cyan]{SIGMA}[/cyan] LEAN configured: {lean_path}")
|
|
343
|
-
else:
|
|
344
|
-
console.print("[dim]Skipping LEAN setup. You can configure it later.[/dim]")
|
|
345
|
-
|
|
346
|
-
console.print()
|
|
347
|
-
|
|
348
|
-
# Exa API Key (Optional)
|
|
349
|
-
console.print("[bold]Exa Search (Optional)[/bold]")
|
|
350
|
-
console.print("[dim]Enables financial news search, SEC filings, and earnings transcripts.[/dim]")
|
|
351
|
-
console.print()
|
|
352
|
-
|
|
353
|
-
if Confirm.ask("Configure Exa? (financial news search, SEC filings)", default=False):
|
|
354
|
-
console.print("[dim]Get key: https://exa.ai[/dim]")
|
|
355
|
-
exa_key = Prompt.ask("Exa API key", password=True, default="")
|
|
356
|
-
if exa_key:
|
|
357
|
-
save_setting("exa_api_key", exa_key)
|
|
358
|
-
console.print(f"[cyan]{SIGMA}[/cyan] Exa configured")
|
|
359
|
-
|
|
360
|
-
def _show_summary(self):
|
|
361
|
-
"""Show summary."""
|
|
362
|
-
console.print(Panel(
|
|
363
|
-
"[bold green]Setup Complete![/bold green]",
|
|
364
|
-
border_style="green",
|
|
365
|
-
))
|
|
366
|
-
console.print()
|
|
367
|
-
|
|
368
|
-
table = Table(show_header=False, box=None)
|
|
369
|
-
table.add_column("", style="bold")
|
|
370
|
-
table.add_column("")
|
|
371
|
-
|
|
372
|
-
provider = getattr(self.settings.default_provider, 'value', str(self.settings.default_provider))
|
|
373
|
-
table.add_row("Provider", provider)
|
|
374
|
-
table.add_row("Model", self.settings.default_model)
|
|
375
|
-
|
|
376
|
-
console.print(table)
|
|
377
|
-
console.print()
|
|
378
|
-
console.print(f"Run [bold]sigma[/bold] to start!")
|
|
379
|
-
console.print()
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
def run_setup() -> bool:
|
|
383
|
-
"""Run the setup wizard."""
|
|
384
|
-
wizard = SetupWizard()
|
|
385
|
-
return wizard.run()
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
def quick_setup():
|
|
389
|
-
"""Quick setup for first-time users."""
|
|
390
|
-
console.print(BANNER)
|
|
391
|
-
console.print()
|
|
392
|
-
|
|
393
|
-
console.print("[bold]Quick Setup[/bold]")
|
|
394
|
-
console.print()
|
|
395
|
-
|
|
396
|
-
# Pick provider
|
|
397
|
-
console.print("Choose provider:")
|
|
398
|
-
console.print(" 1. [bold]Google Gemini[/bold] [green](free, recommended)[/green]")
|
|
399
|
-
console.print(" 2. [bold]Groq[/bold] [green](free, fast)[/green]")
|
|
400
|
-
console.print(" 3. [bold]Ollama[/bold] [green](local, no key)[/green]")
|
|
401
|
-
console.print()
|
|
402
|
-
|
|
403
|
-
choice = Prompt.ask("Provider", choices=["1", "2", "3"], default="1")
|
|
404
|
-
|
|
405
|
-
providers = {
|
|
406
|
-
"1": ("google", "gemini-3-flash-preview"),
|
|
407
|
-
"2": ("groq", "llama-3.3-70b-versatile"),
|
|
408
|
-
"3": ("ollama", "llama3.2"),
|
|
409
|
-
}
|
|
410
|
-
|
|
411
|
-
provider_key, model = providers[choice]
|
|
412
|
-
provider_name = {"google": "Google Gemini", "groq": "Groq", "ollama": "Ollama"}[provider_key]
|
|
413
|
-
|
|
414
|
-
if provider_key != "ollama":
|
|
415
|
-
urls = {
|
|
416
|
-
"google": "https://aistudio.google.com/apikey",
|
|
417
|
-
"groq": "https://console.groq.com/keys",
|
|
418
|
-
}
|
|
419
|
-
console.print(f"\nGet key from: [bold]{urls[provider_key]}[/bold]")
|
|
420
|
-
api_key = Prompt.ask("API key", password=True)
|
|
421
|
-
|
|
422
|
-
if api_key:
|
|
423
|
-
save_api_key(LLMProvider(provider_key), api_key)
|
|
424
|
-
|
|
425
|
-
save_setting("default_provider", provider_key)
|
|
426
|
-
save_setting("default_model", model)
|
|
427
|
-
|
|
428
|
-
console.print()
|
|
429
|
-
console.print(f"[bold green]{SIGMA} Setup complete![/bold green]")
|
|
430
|
-
console.print(f"Provider: {provider_name}")
|
|
431
|
-
console.print(f"Model: {model}")
|
|
432
|
-
console.print()
|
|
433
|
-
console.print("Run [bold]sigma[/bold] to start!")
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
if __name__ == "__main__":
|
|
437
|
-
if "--quick" in sys.argv:
|
|
438
|
-
quick_setup()
|
|
439
|
-
else:
|
|
440
|
-
run_setup()
|