EvoScientist 0.0.1.dev4__py3-none-any.whl → 0.1.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. EvoScientist/EvoScientist.py +25 -61
  2. EvoScientist/__init__.py +0 -19
  3. EvoScientist/backends.py +0 -26
  4. EvoScientist/cli.py +1365 -480
  5. EvoScientist/middleware.py +7 -56
  6. EvoScientist/skills/clip/SKILL.md +253 -0
  7. EvoScientist/skills/clip/references/applications.md +207 -0
  8. EvoScientist/skills/langgraph-docs/SKILL.md +36 -0
  9. EvoScientist/skills/tensorboard/SKILL.md +629 -0
  10. EvoScientist/skills/tensorboard/references/integrations.md +638 -0
  11. EvoScientist/skills/tensorboard/references/profiling.md +545 -0
  12. EvoScientist/skills/tensorboard/references/visualization.md +620 -0
  13. EvoScientist/skills/vllm/SKILL.md +364 -0
  14. EvoScientist/skills/vllm/references/optimization.md +226 -0
  15. EvoScientist/skills/vllm/references/quantization.md +284 -0
  16. EvoScientist/skills/vllm/references/server-deployment.md +255 -0
  17. EvoScientist/skills/vllm/references/troubleshooting.md +447 -0
  18. EvoScientist/stream/__init__.py +0 -25
  19. EvoScientist/stream/utils.py +16 -23
  20. EvoScientist/tools.py +2 -75
  21. {evoscientist-0.0.1.dev4.dist-info → evoscientist-0.1.0rc2.dist-info}/METADATA +8 -153
  22. {evoscientist-0.0.1.dev4.dist-info → evoscientist-0.1.0rc2.dist-info}/RECORD +26 -24
  23. evoscientist-0.1.0rc2.dist-info/entry_points.txt +2 -0
  24. EvoScientist/config.py +0 -274
  25. EvoScientist/llm/__init__.py +0 -21
  26. EvoScientist/llm/models.py +0 -99
  27. EvoScientist/memory.py +0 -715
  28. EvoScientist/onboard.py +0 -725
  29. EvoScientist/paths.py +0 -44
  30. EvoScientist/skills_manager.py +0 -391
  31. EvoScientist/stream/display.py +0 -604
  32. EvoScientist/stream/events.py +0 -415
  33. EvoScientist/stream/state.py +0 -343
  34. evoscientist-0.0.1.dev4.dist-info/entry_points.txt +0 -5
  35. {evoscientist-0.0.1.dev4.dist-info → evoscientist-0.1.0rc2.dist-info}/WHEEL +0 -0
  36. {evoscientist-0.0.1.dev4.dist-info → evoscientist-0.1.0rc2.dist-info}/licenses/LICENSE +0 -0
  37. {evoscientist-0.0.1.dev4.dist-info → evoscientist-0.1.0rc2.dist-info}/top_level.txt +0 -0
@@ -1,99 +0,0 @@
1
- """LLM model configuration based on LangChain init_chat_model.
2
-
3
- This module provides a unified interface for creating chat model instances
4
- with support for multiple providers (Anthropic, OpenAI) and convenient
5
- short names for common models.
6
- """
7
-
8
- from __future__ import annotations
9
-
10
- from typing import Any
11
-
12
- from langchain.chat_models import init_chat_model
13
-
14
- # Model registry: short_name -> (model_id, provider)
15
- MODELS: dict[str, tuple[str, str]] = {
16
- # Anthropic
17
- "claude-sonnet-4-5": ("claude-sonnet-4-5-20250929", "anthropic"),
18
- "claude-opus-4-5": ("claude-opus-4-5-20251101", "anthropic"),
19
- "claude-3-5-sonnet": ("claude-3-5-sonnet-20241022", "anthropic"),
20
- "claude-3-5-haiku": ("claude-3-5-haiku-20241022", "anthropic"),
21
- # OpenAI
22
- "gpt-4o": ("gpt-4o", "openai"),
23
- "gpt-4o-mini": ("gpt-4o-mini", "openai"),
24
- "o1": ("o1", "openai"),
25
- "o1-mini": ("o1-mini", "openai"),
26
- # NVIDIA
27
- "glm4.7": ("z-ai/glm4.7", "nvidia"),
28
- "deepseek-v3.1": ("deepseek-ai/deepseek-v3.1-terminus", "nvidia"),
29
- "nemotron-nano": ("nvidia/nemotron-3-nano-30b-a3b", "nvidia"),
30
- }
31
-
32
- DEFAULT_MODEL = "claude-sonnet-4-5"
33
-
34
-
35
- def get_chat_model(
36
- model: str | None = None,
37
- provider: str | None = None,
38
- **kwargs: Any,
39
- ) -> Any:
40
- """Get a chat model instance.
41
-
42
- Args:
43
- model: Model name (short name like 'claude-sonnet-4-5' or full ID
44
- like 'claude-sonnet-4-5-20250929'). Defaults to DEFAULT_MODEL.
45
- provider: Override the provider (e.g., 'anthropic', 'openai').
46
- If not specified, inferred from model name or defaults to 'anthropic'.
47
- **kwargs: Additional arguments passed to init_chat_model (e.g., temperature).
48
-
49
- Returns:
50
- A LangChain chat model instance.
51
-
52
- Examples:
53
- >>> model = get_chat_model() # Uses default (claude-sonnet-4-5)
54
- >>> model = get_chat_model("claude-opus-4-5") # Use short name
55
- >>> model = get_chat_model("gpt-4o") # OpenAI model
56
- >>> model = get_chat_model("claude-3-opus-20240229", provider="anthropic") # Full ID
57
- """
58
- model = model or DEFAULT_MODEL
59
-
60
- # Look up short name in registry
61
- if model in MODELS:
62
- model_id, default_provider = MODELS[model]
63
- provider = provider or default_provider
64
- else:
65
- # Assume it's a full model ID
66
- model_id = model
67
- # Try to infer provider from model ID prefix
68
- if provider is None:
69
- if model_id.startswith(("claude-", "anthropic")):
70
- provider = "anthropic"
71
- elif model_id.startswith(("gpt-", "o1", "davinci", "text-")):
72
- provider = "openai"
73
- elif "/" in model_id:
74
- provider = "nvidia"
75
- else:
76
- provider = "anthropic" # Default fallback
77
-
78
- return init_chat_model(model=model_id, model_provider=provider, **kwargs)
79
-
80
-
81
- def list_models() -> list[str]:
82
- """List all available model short names.
83
-
84
- Returns:
85
- List of model short names that can be passed to get_chat_model().
86
- """
87
- return list(MODELS.keys())
88
-
89
-
90
- def get_model_info(model: str) -> tuple[str, str] | None:
91
- """Get the (model_id, provider) tuple for a short name.
92
-
93
- Args:
94
- model: Short model name.
95
-
96
- Returns:
97
- Tuple of (model_id, provider) or None if not found.
98
- """
99
- return MODELS.get(model)