sigma-terminal 2.0.2__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sigma/config.py ADDED
@@ -0,0 +1,366 @@
1
+ """Configuration management for Sigma v3.3.0."""
2
+
3
+ import os
4
+ import shutil
5
+ import subprocess
6
+ from enum import Enum
7
+ from pathlib import Path
8
+ from typing import Optional, Tuple
9
+
10
+ from pydantic import Field
11
+ from pydantic_settings import BaseSettings
12
+
13
+
14
+ __version__ = "3.3.0"
15
+
16
+
17
+ class LLMProvider(str, Enum):
18
+ """Supported LLM providers."""
19
+ GOOGLE = "google"
20
+ OPENAI = "openai"
21
+ ANTHROPIC = "anthropic"
22
+ GROQ = "groq"
23
+ XAI = "xai"
24
+ OLLAMA = "ollama"
25
+
26
+
27
+ # Available models per provider
28
+ AVAILABLE_MODELS = {
29
+ "google": ["gemini-2.0-flash", "gemini-1.5-flash", "gemini-1.5-pro"],
30
+ "openai": ["gpt-4o", "gpt-4o-mini", "gpt-4-turbo", "o1-preview", "o1-mini"],
31
+ "anthropic": ["claude-sonnet-4-20250514", "claude-3-5-sonnet-20241022", "claude-3-opus-20240229"],
32
+ "groq": ["llama-3.3-70b-versatile", "llama-3.1-8b-instant", "mixtral-8x7b-32768"],
33
+ "xai": ["grok-2", "grok-2-mini"],
34
+ "ollama": ["llama3.2", "llama3.1", "mistral", "codellama", "phi3"],
35
+ }
36
+
37
+ # Config directory
38
+ CONFIG_DIR = Path.home() / ".sigma"
39
+ CONFIG_FILE = CONFIG_DIR / "config.env"
40
+ FIRST_RUN_MARKER = CONFIG_DIR / ".first_run_complete"
41
+
42
+
43
+ def is_first_run() -> bool:
44
+ """Check if this is the first run of the application."""
45
+ return not FIRST_RUN_MARKER.exists()
46
+
47
+
48
+ def mark_first_run_complete() -> None:
49
+ """Mark that the first run setup has been completed."""
50
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
51
+ FIRST_RUN_MARKER.touch()
52
+
53
+
54
+ def detect_lean_installation() -> Tuple[bool, Optional[str], Optional[str]]:
55
+ """
56
+ Auto-detect LEAN/QuantConnect installation.
57
+ Returns: (is_installed, cli_path, lean_directory)
58
+ """
59
+ lean_cli_path = None
60
+ lean_directory = None
61
+
62
+ # Check if lean CLI is available in PATH
63
+ lean_cli = shutil.which("lean")
64
+ if lean_cli:
65
+ lean_cli_path = lean_cli
66
+
67
+ # Check common installation paths for LEAN directory
68
+ common_paths = [
69
+ Path.home() / "Lean",
70
+ Path.home() / ".lean",
71
+ Path.home() / "QuantConnect" / "Lean",
72
+ Path("/opt/lean"),
73
+ Path.home() / "Projects" / "Lean",
74
+ Path.home() / ".local" / "share" / "lean",
75
+ ]
76
+
77
+ for path in common_paths:
78
+ if path.exists():
79
+ # Check for LEAN directory structure
80
+ if (path / "Launcher").exists() or (path / "Algorithm.Python").exists() or (path / "lean.json").exists():
81
+ lean_directory = str(path)
82
+ break
83
+
84
+ # Check if lean is installed via pip (check both pip and pip3)
85
+ if not lean_cli_path:
86
+ for pip_cmd in ["pip3", "pip"]:
87
+ try:
88
+ result = subprocess.run(
89
+ [pip_cmd, "show", "lean"],
90
+ capture_output=True,
91
+ text=True,
92
+ timeout=10
93
+ )
94
+ if result.returncode == 0:
95
+ # Parse location from pip show output
96
+ for line in result.stdout.split("\n"):
97
+ if line.startswith("Location:"):
98
+ # lean is installed via pip
99
+ lean_cli_path = "lean"
100
+ break
101
+ if lean_cli_path:
102
+ break
103
+ except (subprocess.TimeoutExpired, FileNotFoundError, Exception):
104
+ continue
105
+
106
+ # Also check if lean command works directly
107
+ if not lean_cli_path:
108
+ try:
109
+ result = subprocess.run(
110
+ ["lean", "--version"],
111
+ capture_output=True,
112
+ text=True,
113
+ timeout=5
114
+ )
115
+ if result.returncode == 0:
116
+ lean_cli_path = "lean"
117
+ except (FileNotFoundError, subprocess.TimeoutExpired, Exception):
118
+ pass
119
+
120
+ is_installed = lean_cli_path is not None or lean_directory is not None
121
+ return is_installed, lean_cli_path, lean_directory
122
+
123
+
124
+ async def install_lean_cli() -> Tuple[bool, str]:
125
+ """
126
+ Install LEAN CLI via pip.
127
+ Returns: (success, message)
128
+ """
129
+ import asyncio
130
+
131
+ try:
132
+ # Try pip3 first, then pip
133
+ for pip_cmd in ["pip3", "pip"]:
134
+ try:
135
+ process = await asyncio.create_subprocess_exec(
136
+ pip_cmd, "install", "lean",
137
+ stdout=asyncio.subprocess.PIPE,
138
+ stderr=asyncio.subprocess.PIPE
139
+ )
140
+ stdout, stderr = await asyncio.wait_for(process.communicate(), timeout=120)
141
+
142
+ if process.returncode == 0:
143
+ return True, "LEAN CLI installed successfully!"
144
+ except (FileNotFoundError, asyncio.TimeoutError):
145
+ continue
146
+
147
+ return False, "Failed to install LEAN CLI. Please install manually: pip install lean"
148
+ except Exception as e:
149
+ return False, f"Installation error: {str(e)}"
150
+
151
+
152
+ def install_lean_cli_sync() -> Tuple[bool, str]:
153
+ """
154
+ Install LEAN CLI via pip (synchronous version).
155
+ Returns: (success, message)
156
+ """
157
+ try:
158
+ # Try pip3 first, then pip
159
+ for pip_cmd in ["pip3", "pip"]:
160
+ try:
161
+ result = subprocess.run(
162
+ [pip_cmd, "install", "lean"],
163
+ capture_output=True,
164
+ text=True,
165
+ timeout=120
166
+ )
167
+
168
+ if result.returncode == 0:
169
+ return True, "LEAN CLI installed successfully!"
170
+ except (FileNotFoundError, subprocess.TimeoutExpired):
171
+ continue
172
+
173
+ return False, "Failed to install LEAN CLI. Please install manually: pip install lean"
174
+ except Exception as e:
175
+ return False, f"Installation error: {str(e)}"
176
+
177
+
178
+ def detect_ollama() -> Tuple[bool, Optional[str]]:
179
+ """
180
+ Auto-detect Ollama installation and available models.
181
+ Returns: (is_running, host_url)
182
+ """
183
+ import urllib.request
184
+ import urllib.error
185
+
186
+ hosts_to_check = [
187
+ "http://localhost:11434",
188
+ "http://127.0.0.1:11434",
189
+ ]
190
+
191
+ for host in hosts_to_check:
192
+ try:
193
+ req = urllib.request.Request(f"{host}/api/tags", method="GET")
194
+ with urllib.request.urlopen(req, timeout=2) as resp:
195
+ if resp.status == 200:
196
+ return True, host
197
+ except (urllib.error.URLError, OSError):
198
+ continue
199
+
200
+ return False, None
201
+
202
+
203
+ class Settings(BaseSettings):
204
+ """Application settings."""
205
+
206
+ # Provider settings
207
+ default_provider: LLMProvider = LLMProvider.GOOGLE
208
+ default_model: str = Field(default="gemini-2.0-flash", alias="DEFAULT_MODEL")
209
+
210
+ # API Keys
211
+ google_api_key: Optional[str] = Field(default=None, alias="GOOGLE_API_KEY")
212
+ openai_api_key: Optional[str] = Field(default=None, alias="OPENAI_API_KEY")
213
+ anthropic_api_key: Optional[str] = Field(default=None, alias="ANTHROPIC_API_KEY")
214
+ groq_api_key: Optional[str] = Field(default=None, alias="GROQ_API_KEY")
215
+ xai_api_key: Optional[str] = Field(default=None, alias="XAI_API_KEY")
216
+
217
+ # Model settings
218
+ google_model: str = "gemini-2.0-flash"
219
+ openai_model: str = "gpt-4o"
220
+ anthropic_model: str = "claude-sonnet-4-20250514"
221
+ groq_model: str = "llama-3.3-70b-versatile"
222
+ xai_model: str = "grok-2"
223
+ ollama_model: str = "llama3.2"
224
+
225
+ # Ollama settings
226
+ ollama_host: str = "http://localhost:11434"
227
+
228
+ # LEAN settings
229
+ lean_cli_path: Optional[str] = Field(default=None, alias="LEAN_CLI_PATH")
230
+ lean_directory: Optional[str] = Field(default=None, alias="LEAN_DIRECTORY")
231
+ lean_enabled: bool = Field(default=False, alias="LEAN_ENABLED")
232
+
233
+ # Data API keys
234
+ alpha_vantage_api_key: str = "6ER128DD3NQUPTVC" # Built-in free key
235
+ exa_api_key: Optional[str] = None
236
+
237
+ class Config:
238
+ env_file = str(CONFIG_FILE)
239
+ env_file_encoding = "utf-8"
240
+ extra = "ignore"
241
+
242
+ def get_api_key(self, provider: LLMProvider) -> Optional[str]:
243
+ """Get API key for a provider."""
244
+ key_map = {
245
+ LLMProvider.GOOGLE: self.google_api_key,
246
+ LLMProvider.OPENAI: self.openai_api_key,
247
+ LLMProvider.ANTHROPIC: self.anthropic_api_key,
248
+ LLMProvider.GROQ: self.groq_api_key,
249
+ LLMProvider.XAI: self.xai_api_key,
250
+ LLMProvider.OLLAMA: None, # No key needed
251
+ }
252
+ return key_map.get(provider)
253
+
254
+ def get_model(self, provider: LLMProvider) -> str:
255
+ """Get model for a provider."""
256
+ model_map = {
257
+ LLMProvider.GOOGLE: self.google_model,
258
+ LLMProvider.OPENAI: self.openai_model,
259
+ LLMProvider.ANTHROPIC: self.anthropic_model,
260
+ LLMProvider.GROQ: self.groq_model,
261
+ LLMProvider.XAI: self.xai_model,
262
+ LLMProvider.OLLAMA: self.ollama_model,
263
+ }
264
+ return model_map.get(provider, "")
265
+
266
+ def get_available_providers(self) -> list[LLMProvider]:
267
+ """Get list of providers with configured API keys."""
268
+ available = []
269
+ for provider in LLMProvider:
270
+ if provider == LLMProvider.OLLAMA:
271
+ available.append(provider) # Always available
272
+ elif self.get_api_key(provider):
273
+ available.append(provider)
274
+ return available
275
+
276
+ def is_configured(self) -> bool:
277
+ """Check if at least one provider is configured."""
278
+ return len(self.get_available_providers()) > 0
279
+
280
+
281
+ def get_settings() -> Settings:
282
+ """Get application settings."""
283
+ # Ensure config directory exists
284
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
285
+
286
+ # Load from environment and config file
287
+ return Settings()
288
+
289
+
290
+ def save_api_key(provider: str, key: str) -> None:
291
+ """Save an API key to the config file."""
292
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
293
+
294
+ # Read existing config
295
+ config = {}
296
+ if CONFIG_FILE.exists():
297
+ with open(CONFIG_FILE) as f:
298
+ for line in f:
299
+ line = line.strip()
300
+ if "=" in line and not line.startswith("#"):
301
+ k, v = line.split("=", 1)
302
+ config[k] = v
303
+
304
+ # Update key
305
+ key_map = {
306
+ "google": "GOOGLE_API_KEY",
307
+ "openai": "OPENAI_API_KEY",
308
+ "anthropic": "ANTHROPIC_API_KEY",
309
+ "groq": "GROQ_API_KEY",
310
+ "xai": "XAI_API_KEY",
311
+ }
312
+
313
+ env_key = key_map.get(provider.lower())
314
+ if env_key:
315
+ config[env_key] = key
316
+
317
+ # Write back
318
+ with open(CONFIG_FILE, "w") as f:
319
+ f.write("# Sigma Configuration\n\n")
320
+ for k, v in sorted(config.items()):
321
+ f.write(f"{k}={v}\n")
322
+
323
+
324
+ def get_api_key(provider: str) -> Optional[str]:
325
+ """Get API key for a provider."""
326
+ settings = get_settings()
327
+ try:
328
+ return settings.get_api_key(LLMProvider(provider.lower()))
329
+ except ValueError:
330
+ return None
331
+
332
+
333
+ def save_setting(key: str, value: str) -> None:
334
+ """Save a setting to the config file."""
335
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
336
+
337
+ # Read existing config
338
+ config = {}
339
+ if CONFIG_FILE.exists():
340
+ with open(CONFIG_FILE) as f:
341
+ for line in f:
342
+ line = line.strip()
343
+ if "=" in line and not line.startswith("#"):
344
+ k, v = line.split("=", 1)
345
+ config[k] = v
346
+
347
+ # Map setting name to config key
348
+ setting_map = {
349
+ "default_provider": "DEFAULT_PROVIDER",
350
+ "default_model": "DEFAULT_MODEL",
351
+ "output_dir": "OUTPUT_DIR",
352
+ "cache_enabled": "CACHE_ENABLED",
353
+ "lean_cli_path": "LEAN_CLI_PATH",
354
+ "lean_directory": "LEAN_DIRECTORY",
355
+ "lean_enabled": "LEAN_ENABLED",
356
+ "ollama_host": "OLLAMA_HOST",
357
+ }
358
+
359
+ config_key = setting_map.get(key, key.upper())
360
+ config[config_key] = str(value)
361
+
362
+ # Write back
363
+ with open(CONFIG_FILE, "w") as f:
364
+ f.write("# Sigma Configuration\n\n")
365
+ for k, v in sorted(config.items()):
366
+ f.write(f"{k}={v}\n")
sigma/core/__init__.py CHANGED
@@ -1,18 +1,5 @@
1
- """Core module initialization."""
1
+ """Core infrastructure for Sigma Financial Intelligence Platform."""
2
2
 
3
- from sigma.core.agent import SigmaAgent
4
- from sigma.core.config import LLMProvider, Settings, get_settings
5
- from sigma.core.llm import get_llm
6
- from sigma.core.models import Message, MessageRole, ToolCall, ToolResult
7
-
8
- __all__ = [
9
- "SigmaAgent",
10
- "LLMProvider",
11
- "Settings",
12
- "get_settings",
13
- "get_llm",
14
- "Message",
15
- "MessageRole",
16
- "ToolCall",
17
- "ToolResult",
18
- ]
3
+ from .models import *
4
+ from .intent import *
5
+ from .engine import *