EvoScientist 0.0.1.dev2__py3-none-any.whl → 0.0.1.dev4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
EvoScientist/config.py ADDED
@@ -0,0 +1,274 @@
1
+ """Configuration management for EvoScientist.
2
+
3
+ Handles loading, saving, and merging configuration from multiple sources
4
+ with the following priority (highest to lowest):
5
+ CLI arguments > Environment variables > Config file > Defaults
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import os
11
+ from dataclasses import dataclass, asdict, fields
12
+ from pathlib import Path
13
+ from typing import Any, Literal
14
+
15
+ import yaml
16
+
17
+
18
+ # =============================================================================
19
+ # Configuration paths
20
+ # =============================================================================
21
+
22
+ def get_config_dir() -> Path:
23
+ """Get the configuration directory path.
24
+
25
+ Uses XDG_CONFIG_HOME if set, otherwise ~/.config/evoscientist/
26
+ """
27
+ xdg_config = os.environ.get("XDG_CONFIG_HOME")
28
+ if xdg_config:
29
+ return Path(xdg_config) / "evoscientist"
30
+ return Path.home() / ".config" / "evoscientist"
31
+
32
+
33
+ def get_config_path() -> Path:
34
+ """Get the path to the configuration file."""
35
+ return get_config_dir() / "config.yaml"
36
+
37
+
38
+ # =============================================================================
39
+ # Configuration dataclass
40
+ # =============================================================================
41
+
42
+ @dataclass
43
+ class EvoScientistConfig:
44
+ """EvoScientist configuration settings.
45
+
46
+ Attributes:
47
+ anthropic_api_key: Anthropic API key for Claude models.
48
+ openai_api_key: OpenAI API key for GPT models.
49
+ tavily_api_key: Tavily API key for web search.
50
+ provider: Default LLM provider ('anthropic' or 'openai').
51
+ model: Default model name (short name or full ID).
52
+ default_mode: Default workspace mode ('daemon' or 'run').
53
+ default_workdir: Default workspace directory (empty = use ./workspace).
54
+ max_concurrent: Maximum concurrent sub-agents.
55
+ max_iterations: Maximum delegation iterations.
56
+ show_thinking: Whether to show thinking panels in CLI.
57
+ """
58
+
59
+ # API Keys
60
+ anthropic_api_key: str = ""
61
+ openai_api_key: str = ""
62
+ nvidia_api_key: str = ""
63
+ tavily_api_key: str = ""
64
+
65
+ # LLM Settings
66
+ provider: str = "anthropic"
67
+ model: str = "claude-sonnet-4-5"
68
+
69
+ # Workspace Settings
70
+ default_mode: Literal["daemon", "run"] = "daemon"
71
+ default_workdir: str = ""
72
+
73
+ # Agent Parameters
74
+ max_concurrent: int = 3
75
+ max_iterations: int = 3
76
+
77
+ # UI Settings
78
+ show_thinking: bool = True
79
+
80
+
81
+ # =============================================================================
82
+ # Config file operations
83
+ # =============================================================================
84
+
85
+ def load_config() -> EvoScientistConfig:
86
+ """Load configuration from file.
87
+
88
+ Returns:
89
+ EvoScientistConfig instance with values from file, or defaults if
90
+ file doesn't exist.
91
+ """
92
+ config_path = get_config_path()
93
+
94
+ if not config_path.exists():
95
+ return EvoScientistConfig()
96
+
97
+ try:
98
+ with open(config_path) as f:
99
+ data = yaml.safe_load(f) or {}
100
+
101
+ # Filter to only valid fields
102
+ valid_fields = {f.name for f in fields(EvoScientistConfig)}
103
+ filtered_data = {k: v for k, v in data.items() if k in valid_fields}
104
+
105
+ return EvoScientistConfig(**filtered_data)
106
+ except Exception:
107
+ # On any error, return defaults
108
+ return EvoScientistConfig()
109
+
110
+
111
+ def save_config(config: EvoScientistConfig) -> None:
112
+ """Save configuration to file.
113
+
114
+ Args:
115
+ config: EvoScientistConfig instance to save.
116
+ """
117
+ config_path = get_config_path()
118
+ config_path.parent.mkdir(parents=True, exist_ok=True)
119
+
120
+ data = asdict(config)
121
+
122
+ # Save all fields including empty API keys (users can set them via env vars instead)
123
+ with open(config_path, "w") as f:
124
+ yaml.safe_dump(data, f, default_flow_style=False, sort_keys=False)
125
+
126
+
127
+ def reset_config() -> None:
128
+ """Reset configuration to defaults by deleting the config file."""
129
+ config_path = get_config_path()
130
+ if config_path.exists():
131
+ config_path.unlink()
132
+
133
+
134
+ # =============================================================================
135
+ # Config value operations
136
+ # =============================================================================
137
+
138
+ def get_config_value(key: str) -> Any:
139
+ """Get a single configuration value.
140
+
141
+ Args:
142
+ key: Configuration key name.
143
+
144
+ Returns:
145
+ The value, or None if key doesn't exist.
146
+ """
147
+ config = load_config()
148
+ return getattr(config, key, None)
149
+
150
+
151
+ def set_config_value(key: str, value: Any) -> bool:
152
+ """Set a single configuration value.
153
+
154
+ Args:
155
+ key: Configuration key name.
156
+ value: New value.
157
+
158
+ Returns:
159
+ True if successful, False if key is invalid.
160
+ """
161
+ valid_fields = {f.name for f in fields(EvoScientistConfig)}
162
+ if key not in valid_fields:
163
+ return False
164
+
165
+ config = load_config()
166
+
167
+ # Type coercion based on field type
168
+ field_info = next(f for f in fields(EvoScientistConfig) if f.name == key)
169
+ field_type = field_info.type
170
+
171
+ try:
172
+ if field_type == "bool" or field_type is bool:
173
+ if isinstance(value, str):
174
+ value = value.lower() in ("true", "1", "yes", "on")
175
+ else:
176
+ value = bool(value)
177
+ elif field_type == "int" or field_type is int:
178
+ value = int(value)
179
+ elif field_type == "str" or field_type is str:
180
+ value = str(value)
181
+ except (ValueError, TypeError):
182
+ return False
183
+
184
+ setattr(config, key, value)
185
+ save_config(config)
186
+ return True
187
+
188
+
189
+ def list_config() -> dict[str, Any]:
190
+ """List all configuration values.
191
+
192
+ Returns:
193
+ Dictionary of all configuration key-value pairs.
194
+ """
195
+ return asdict(load_config())
196
+
197
+
198
+ # =============================================================================
199
+ # Effective configuration (merging sources)
200
+ # =============================================================================
201
+
202
+ # Environment variable mappings
203
+ _ENV_MAPPINGS = {
204
+ "anthropic_api_key": "ANTHROPIC_API_KEY",
205
+ "openai_api_key": "OPENAI_API_KEY",
206
+ "nvidia_api_key": "NVIDIA_API_KEY",
207
+ "tavily_api_key": "TAVILY_API_KEY",
208
+ "default_mode": "EVOSCIENTIST_DEFAULT_MODE",
209
+ "default_workdir": "EVOSCIENTIST_WORKSPACE_DIR",
210
+ }
211
+
212
+
213
+ def get_effective_config(cli_overrides: dict[str, Any] | None = None) -> EvoScientistConfig:
214
+ """Get effective configuration by merging all sources.
215
+
216
+ Priority (highest to lowest):
217
+ 1. CLI arguments (cli_overrides)
218
+ 2. Environment variables
219
+ 3. Config file
220
+ 4. Defaults
221
+
222
+ Args:
223
+ cli_overrides: Dictionary of CLI argument overrides.
224
+
225
+ Returns:
226
+ EvoScientistConfig with merged values.
227
+ """
228
+ # Start with file config (includes defaults for missing values)
229
+ config = load_config()
230
+ data = asdict(config)
231
+
232
+ # Apply environment variable overrides
233
+ for config_key, env_key in _ENV_MAPPINGS.items():
234
+ env_value = os.environ.get(env_key)
235
+ if env_value:
236
+ # Type coercion
237
+ field_info = next(f for f in fields(EvoScientistConfig) if f.name == config_key)
238
+ field_type = field_info.type
239
+ if field_type == "bool" or field_type is bool:
240
+ data[config_key] = env_value.lower() in ("true", "1", "yes", "on")
241
+ elif field_type == "int" or field_type is int:
242
+ try:
243
+ data[config_key] = int(env_value)
244
+ except ValueError:
245
+ pass
246
+ else:
247
+ data[config_key] = env_value
248
+
249
+ # Apply CLI overrides (highest priority)
250
+ if cli_overrides:
251
+ for key, value in cli_overrides.items():
252
+ if value is not None and key in data:
253
+ data[key] = value
254
+
255
+ return EvoScientistConfig(**data)
256
+
257
+
258
+ def apply_config_to_env(config: EvoScientistConfig) -> None:
259
+ """Apply config API keys to environment variables if not already set.
260
+
261
+ This allows the config file to provide API keys that downstream
262
+ libraries (like langchain-anthropic) can pick up.
263
+
264
+ Args:
265
+ config: Configuration to apply.
266
+ """
267
+ if config.anthropic_api_key and not os.environ.get("ANTHROPIC_API_KEY"):
268
+ os.environ["ANTHROPIC_API_KEY"] = config.anthropic_api_key
269
+ if config.openai_api_key and not os.environ.get("OPENAI_API_KEY"):
270
+ os.environ["OPENAI_API_KEY"] = config.openai_api_key
271
+ if config.nvidia_api_key and not os.environ.get("NVIDIA_API_KEY"):
272
+ os.environ["NVIDIA_API_KEY"] = config.nvidia_api_key
273
+ if config.tavily_api_key and not os.environ.get("TAVILY_API_KEY"):
274
+ os.environ["TAVILY_API_KEY"] = config.tavily_api_key
@@ -0,0 +1,21 @@
1
+ """LLM module for EvoScientist.
2
+
3
+ Provides a unified interface for creating chat model instances
4
+ with support for multiple providers.
5
+ """
6
+
7
+ from .models import (
8
+ MODELS,
9
+ DEFAULT_MODEL,
10
+ get_chat_model,
11
+ list_models,
12
+ get_model_info,
13
+ )
14
+
15
+ __all__ = [
16
+ "MODELS",
17
+ "DEFAULT_MODEL",
18
+ "get_chat_model",
19
+ "list_models",
20
+ "get_model_info",
21
+ ]
@@ -0,0 +1,99 @@
1
+ """LLM model configuration based on LangChain init_chat_model.
2
+
3
+ This module provides a unified interface for creating chat model instances
4
+ with support for multiple providers (Anthropic, OpenAI) and convenient
5
+ short names for common models.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Any
11
+
12
+ from langchain.chat_models import init_chat_model
13
+
14
+ # Model registry: short_name -> (model_id, provider)
15
+ MODELS: dict[str, tuple[str, str]] = {
16
+ # Anthropic
17
+ "claude-sonnet-4-5": ("claude-sonnet-4-5-20250929", "anthropic"),
18
+ "claude-opus-4-5": ("claude-opus-4-5-20251101", "anthropic"),
19
+ "claude-3-5-sonnet": ("claude-3-5-sonnet-20241022", "anthropic"),
20
+ "claude-3-5-haiku": ("claude-3-5-haiku-20241022", "anthropic"),
21
+ # OpenAI
22
+ "gpt-4o": ("gpt-4o", "openai"),
23
+ "gpt-4o-mini": ("gpt-4o-mini", "openai"),
24
+ "o1": ("o1", "openai"),
25
+ "o1-mini": ("o1-mini", "openai"),
26
+ # NVIDIA
27
+ "glm4.7": ("z-ai/glm4.7", "nvidia"),
28
+ "deepseek-v3.1": ("deepseek-ai/deepseek-v3.1-terminus", "nvidia"),
29
+ "nemotron-nano": ("nvidia/nemotron-3-nano-30b-a3b", "nvidia"),
30
+ }
31
+
32
+ DEFAULT_MODEL = "claude-sonnet-4-5"
33
+
34
+
35
+ def get_chat_model(
36
+ model: str | None = None,
37
+ provider: str | None = None,
38
+ **kwargs: Any,
39
+ ) -> Any:
40
+ """Get a chat model instance.
41
+
42
+ Args:
43
+ model: Model name (short name like 'claude-sonnet-4-5' or full ID
44
+ like 'claude-sonnet-4-5-20250929'). Defaults to DEFAULT_MODEL.
45
+ provider: Override the provider (e.g., 'anthropic', 'openai').
46
+ If not specified, inferred from model name or defaults to 'anthropic'.
47
+ **kwargs: Additional arguments passed to init_chat_model (e.g., temperature).
48
+
49
+ Returns:
50
+ A LangChain chat model instance.
51
+
52
+ Examples:
53
+ >>> model = get_chat_model() # Uses default (claude-sonnet-4-5)
54
+ >>> model = get_chat_model("claude-opus-4-5") # Use short name
55
+ >>> model = get_chat_model("gpt-4o") # OpenAI model
56
+ >>> model = get_chat_model("claude-3-opus-20240229", provider="anthropic") # Full ID
57
+ """
58
+ model = model or DEFAULT_MODEL
59
+
60
+ # Look up short name in registry
61
+ if model in MODELS:
62
+ model_id, default_provider = MODELS[model]
63
+ provider = provider or default_provider
64
+ else:
65
+ # Assume it's a full model ID
66
+ model_id = model
67
+ # Try to infer provider from model ID prefix
68
+ if provider is None:
69
+ if model_id.startswith(("claude-", "anthropic")):
70
+ provider = "anthropic"
71
+ elif model_id.startswith(("gpt-", "o1", "davinci", "text-")):
72
+ provider = "openai"
73
+ elif "/" in model_id:
74
+ provider = "nvidia"
75
+ else:
76
+ provider = "anthropic" # Default fallback
77
+
78
+ return init_chat_model(model=model_id, model_provider=provider, **kwargs)
79
+
80
+
81
+ def list_models() -> list[str]:
82
+ """List all available model short names.
83
+
84
+ Returns:
85
+ List of model short names that can be passed to get_chat_model().
86
+ """
87
+ return list(MODELS.keys())
88
+
89
+
90
+ def get_model_info(model: str) -> tuple[str, str] | None:
91
+ """Get the (model_id, provider) tuple for a short name.
92
+
93
+ Args:
94
+ model: Short model name.
95
+
96
+ Returns:
97
+ Tuple of (model_id, provider) or None if not found.
98
+ """
99
+ return MODELS.get(model)