spaik-sdk 0.6.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- spaik_sdk/__init__.py +21 -0
- spaik_sdk/agent/__init__.py +0 -0
- spaik_sdk/agent/base_agent.py +249 -0
- spaik_sdk/attachments/__init__.py +22 -0
- spaik_sdk/attachments/builder.py +61 -0
- spaik_sdk/attachments/file_storage_provider.py +27 -0
- spaik_sdk/attachments/mime_types.py +118 -0
- spaik_sdk/attachments/models.py +63 -0
- spaik_sdk/attachments/provider_support.py +53 -0
- spaik_sdk/attachments/storage/__init__.py +0 -0
- spaik_sdk/attachments/storage/base_file_storage.py +32 -0
- spaik_sdk/attachments/storage/impl/__init__.py +0 -0
- spaik_sdk/attachments/storage/impl/local_file_storage.py +101 -0
- spaik_sdk/audio/__init__.py +12 -0
- spaik_sdk/audio/options.py +53 -0
- spaik_sdk/audio/providers/__init__.py +1 -0
- spaik_sdk/audio/providers/google_tts.py +77 -0
- spaik_sdk/audio/providers/openai_stt.py +71 -0
- spaik_sdk/audio/providers/openai_tts.py +111 -0
- spaik_sdk/audio/stt.py +61 -0
- spaik_sdk/audio/tts.py +124 -0
- spaik_sdk/config/credentials_provider.py +10 -0
- spaik_sdk/config/env.py +59 -0
- spaik_sdk/config/env_credentials_provider.py +7 -0
- spaik_sdk/config/get_credentials_provider.py +14 -0
- spaik_sdk/image_gen/__init__.py +9 -0
- spaik_sdk/image_gen/image_generator.py +83 -0
- spaik_sdk/image_gen/options.py +24 -0
- spaik_sdk/image_gen/providers/__init__.py +0 -0
- spaik_sdk/image_gen/providers/google.py +75 -0
- spaik_sdk/image_gen/providers/openai.py +60 -0
- spaik_sdk/llm/__init__.py +0 -0
- spaik_sdk/llm/cancellation_handle.py +10 -0
- spaik_sdk/llm/consumption/__init__.py +0 -0
- spaik_sdk/llm/consumption/consumption_estimate.py +26 -0
- spaik_sdk/llm/consumption/consumption_estimate_builder.py +113 -0
- spaik_sdk/llm/consumption/consumption_extractor.py +59 -0
- spaik_sdk/llm/consumption/token_usage.py +31 -0
- spaik_sdk/llm/converters.py +146 -0
- spaik_sdk/llm/cost/__init__.py +1 -0
- spaik_sdk/llm/cost/builtin_cost_provider.py +83 -0
- spaik_sdk/llm/cost/cost_estimate.py +8 -0
- spaik_sdk/llm/cost/cost_provider.py +28 -0
- spaik_sdk/llm/extract_error_message.py +37 -0
- spaik_sdk/llm/langchain_loop_manager.py +270 -0
- spaik_sdk/llm/langchain_service.py +196 -0
- spaik_sdk/llm/message_handler.py +188 -0
- spaik_sdk/llm/streaming/__init__.py +1 -0
- spaik_sdk/llm/streaming/block_manager.py +152 -0
- spaik_sdk/llm/streaming/models.py +42 -0
- spaik_sdk/llm/streaming/streaming_content_handler.py +157 -0
- spaik_sdk/llm/streaming/streaming_event_handler.py +215 -0
- spaik_sdk/llm/streaming/streaming_state_manager.py +58 -0
- spaik_sdk/models/__init__.py +0 -0
- spaik_sdk/models/factories/__init__.py +0 -0
- spaik_sdk/models/factories/anthropic_factory.py +33 -0
- spaik_sdk/models/factories/base_model_factory.py +71 -0
- spaik_sdk/models/factories/google_factory.py +30 -0
- spaik_sdk/models/factories/ollama_factory.py +41 -0
- spaik_sdk/models/factories/openai_factory.py +50 -0
- spaik_sdk/models/llm_config.py +46 -0
- spaik_sdk/models/llm_families.py +7 -0
- spaik_sdk/models/llm_model.py +17 -0
- spaik_sdk/models/llm_wrapper.py +25 -0
- spaik_sdk/models/model_registry.py +156 -0
- spaik_sdk/models/providers/__init__.py +0 -0
- spaik_sdk/models/providers/anthropic_provider.py +29 -0
- spaik_sdk/models/providers/azure_provider.py +31 -0
- spaik_sdk/models/providers/base_provider.py +62 -0
- spaik_sdk/models/providers/google_provider.py +26 -0
- spaik_sdk/models/providers/ollama_provider.py +26 -0
- spaik_sdk/models/providers/openai_provider.py +26 -0
- spaik_sdk/models/providers/provider_type.py +90 -0
- spaik_sdk/orchestration/__init__.py +24 -0
- spaik_sdk/orchestration/base_orchestrator.py +238 -0
- spaik_sdk/orchestration/checkpoint.py +80 -0
- spaik_sdk/orchestration/models.py +103 -0
- spaik_sdk/prompt/__init__.py +0 -0
- spaik_sdk/prompt/get_prompt_loader.py +13 -0
- spaik_sdk/prompt/local_prompt_loader.py +21 -0
- spaik_sdk/prompt/prompt_loader.py +48 -0
- spaik_sdk/prompt/prompt_loader_mode.py +14 -0
- spaik_sdk/py.typed +1 -0
- spaik_sdk/recording/__init__.py +1 -0
- spaik_sdk/recording/base_playback.py +90 -0
- spaik_sdk/recording/base_recorder.py +50 -0
- spaik_sdk/recording/conditional_recorder.py +38 -0
- spaik_sdk/recording/impl/__init__.py +1 -0
- spaik_sdk/recording/impl/local_playback.py +76 -0
- spaik_sdk/recording/impl/local_recorder.py +85 -0
- spaik_sdk/recording/langchain_serializer.py +88 -0
- spaik_sdk/server/__init__.py +1 -0
- spaik_sdk/server/api/routers/__init__.py +0 -0
- spaik_sdk/server/api/routers/api_builder.py +149 -0
- spaik_sdk/server/api/routers/audio_router_factory.py +201 -0
- spaik_sdk/server/api/routers/file_router_factory.py +111 -0
- spaik_sdk/server/api/routers/thread_router_factory.py +284 -0
- spaik_sdk/server/api/streaming/__init__.py +0 -0
- spaik_sdk/server/api/streaming/format_sse_event.py +41 -0
- spaik_sdk/server/api/streaming/negotiate_streaming_response.py +8 -0
- spaik_sdk/server/api/streaming/streaming_negotiator.py +10 -0
- spaik_sdk/server/authorization/__init__.py +0 -0
- spaik_sdk/server/authorization/base_authorizer.py +64 -0
- spaik_sdk/server/authorization/base_user.py +13 -0
- spaik_sdk/server/authorization/dummy_authorizer.py +17 -0
- spaik_sdk/server/job_processor/__init__.py +0 -0
- spaik_sdk/server/job_processor/base_job_processor.py +8 -0
- spaik_sdk/server/job_processor/thread_job_processor.py +32 -0
- spaik_sdk/server/pubsub/__init__.py +1 -0
- spaik_sdk/server/pubsub/cancellation_publisher.py +7 -0
- spaik_sdk/server/pubsub/cancellation_subscriber.py +38 -0
- spaik_sdk/server/pubsub/event_publisher.py +13 -0
- spaik_sdk/server/pubsub/impl/__init__.py +1 -0
- spaik_sdk/server/pubsub/impl/local_cancellation_pubsub.py +48 -0
- spaik_sdk/server/pubsub/impl/signalr_publisher.py +36 -0
- spaik_sdk/server/queue/__init__.py +1 -0
- spaik_sdk/server/queue/agent_job_queue.py +27 -0
- spaik_sdk/server/queue/impl/__init__.py +1 -0
- spaik_sdk/server/queue/impl/azure_queue.py +24 -0
- spaik_sdk/server/response/__init__.py +0 -0
- spaik_sdk/server/response/agent_response_generator.py +39 -0
- spaik_sdk/server/response/response_generator.py +13 -0
- spaik_sdk/server/response/simple_agent_response_generator.py +14 -0
- spaik_sdk/server/services/__init__.py +0 -0
- spaik_sdk/server/services/thread_converters.py +113 -0
- spaik_sdk/server/services/thread_models.py +90 -0
- spaik_sdk/server/services/thread_service.py +91 -0
- spaik_sdk/server/storage/__init__.py +1 -0
- spaik_sdk/server/storage/base_thread_repository.py +51 -0
- spaik_sdk/server/storage/impl/__init__.py +0 -0
- spaik_sdk/server/storage/impl/in_memory_thread_repository.py +100 -0
- spaik_sdk/server/storage/impl/local_file_thread_repository.py +217 -0
- spaik_sdk/server/storage/thread_filter.py +166 -0
- spaik_sdk/server/storage/thread_metadata.py +53 -0
- spaik_sdk/thread/__init__.py +0 -0
- spaik_sdk/thread/adapters/__init__.py +0 -0
- spaik_sdk/thread/adapters/cli/__init__.py +0 -0
- spaik_sdk/thread/adapters/cli/block_display.py +92 -0
- spaik_sdk/thread/adapters/cli/display_manager.py +84 -0
- spaik_sdk/thread/adapters/cli/live_cli.py +235 -0
- spaik_sdk/thread/adapters/event_adapter.py +28 -0
- spaik_sdk/thread/adapters/streaming_block_adapter.py +57 -0
- spaik_sdk/thread/adapters/sync_adapter.py +76 -0
- spaik_sdk/thread/models.py +224 -0
- spaik_sdk/thread/thread_container.py +468 -0
- spaik_sdk/tools/__init__.py +0 -0
- spaik_sdk/tools/impl/__init__.py +0 -0
- spaik_sdk/tools/impl/mcp_tool_provider.py +93 -0
- spaik_sdk/tools/impl/search_tool_provider.py +18 -0
- spaik_sdk/tools/tool_provider.py +131 -0
- spaik_sdk/tracing/__init__.py +13 -0
- spaik_sdk/tracing/agent_trace.py +72 -0
- spaik_sdk/tracing/get_trace_sink.py +15 -0
- spaik_sdk/tracing/local_trace_sink.py +23 -0
- spaik_sdk/tracing/trace_sink.py +19 -0
- spaik_sdk/tracing/trace_sink_mode.py +14 -0
- spaik_sdk/utils/__init__.py +0 -0
- spaik_sdk/utils/init_logger.py +24 -0
- spaik_sdk-0.6.2.dist-info/METADATA +379 -0
- spaik_sdk-0.6.2.dist-info/RECORD +161 -0
- spaik_sdk-0.6.2.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from typing import Dict, Set
|
|
3
|
+
|
|
4
|
+
from spaik_sdk.models.llm_families import LLMFamilies
|
|
5
|
+
from spaik_sdk.models.llm_model import LLMModel
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ModelRegistry:
|
|
9
|
+
"""Registry containing all built-in models with extensibility support."""
|
|
10
|
+
|
|
11
|
+
# Anthropic models
|
|
12
|
+
CLAUDE_3_7_SONNET_FEB_2025 = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-3-7-sonnet-20250219", prompt_caching=True)
|
|
13
|
+
CLAUDE_3_7_SONNET = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-3-7-sonnet-latest", prompt_caching=True)
|
|
14
|
+
CLAUDE_4_SONNET = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-sonnet-4-20250514", prompt_caching=True)
|
|
15
|
+
CLAUDE_4_OPUS = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-opus-4-20250514", prompt_caching=True)
|
|
16
|
+
CLAUDE_4_1_OPUS = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-opus-4-1-20250805", prompt_caching=True)
|
|
17
|
+
CLAUDE_4_SONNET_MAY_2025 = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-sonnet-4-20250514", prompt_caching=True)
|
|
18
|
+
CLAUDE_4_OPUS_MAY_2025 = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-opus-4-20250514", prompt_caching=True)
|
|
19
|
+
CLAUDE_4_5_SONNET = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-sonnet-4-5-20250929", prompt_caching=True)
|
|
20
|
+
CLAUDE_4_5_HAIKU = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-haiku-4-5-20251001", prompt_caching=True)
|
|
21
|
+
CLAUDE_4_5_OPUS = LLMModel(family=LLMFamilies.ANTHROPIC, name="claude-opus-4-5-20251101", prompt_caching=True)
|
|
22
|
+
|
|
23
|
+
# OpenAI models
|
|
24
|
+
GPT_4_1 = LLMModel(family=LLMFamilies.OPENAI, name="gpt-4.1", reasoning=False, prompt_caching=True)
|
|
25
|
+
GPT_4O = LLMModel(family=LLMFamilies.OPENAI, name="gpt-4o", reasoning=False, prompt_caching=True)
|
|
26
|
+
O4_MINI = LLMModel(family=LLMFamilies.OPENAI, name="o4-mini")
|
|
27
|
+
O4_MINI_APRIL_2025 = LLMModel(family=LLMFamilies.OPENAI, name="o4-mini-2025-04-16")
|
|
28
|
+
GPT_5 = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5", reasoning=True, prompt_caching=True)
|
|
29
|
+
GPT_5_MINI = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5-mini", reasoning=True, prompt_caching=True)
|
|
30
|
+
GPT_5_NANO = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5-nano", reasoning=True, prompt_caching=True)
|
|
31
|
+
GPT_5_1 = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5.1", reasoning=True, prompt_caching=True)
|
|
32
|
+
GPT_5_1_CODEX = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5.1-codex", reasoning=True, prompt_caching=True)
|
|
33
|
+
GPT_5_1_CODEX_MINI = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5.1-codex-mini", reasoning=True, prompt_caching=True)
|
|
34
|
+
GPT_5_1_CODEX_MAX = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5.1-codex-max", reasoning=True, prompt_caching=True)
|
|
35
|
+
GPT_5_2 = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5.2", reasoning=True, prompt_caching=True)
|
|
36
|
+
GPT_5_2_PRO = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5.2-pro", reasoning=True, prompt_caching=True)
|
|
37
|
+
|
|
38
|
+
# Google models
|
|
39
|
+
GEMINI_2_5_FLASH = LLMModel(family=LLMFamilies.GOOGLE, name="gemini-2.5-flash", prompt_caching=True)
|
|
40
|
+
GEMINI_2_5_PRO = LLMModel(family=LLMFamilies.GOOGLE, name="gemini-2.5-pro", prompt_caching=True)
|
|
41
|
+
GEMINI_2_5_FLASH_MAY_2025 = LLMModel(family=LLMFamilies.GOOGLE, name="gemini-2.5-flash", prompt_caching=True)
|
|
42
|
+
GEMINI_2_5_PRO_MAY_2025 = LLMModel(family=LLMFamilies.GOOGLE, name="gemini-2.5-pro", prompt_caching=True)
|
|
43
|
+
GEMINI_3_FLASH = LLMModel(family=LLMFamilies.GOOGLE, name="gemini-3-flash-preview", prompt_caching=True)
|
|
44
|
+
GEMINI_3_PRO = LLMModel(family=LLMFamilies.GOOGLE, name="gemini-3-pro-preview", prompt_caching=True)
|
|
45
|
+
|
|
46
|
+
# Registry for custom models
|
|
47
|
+
_custom_models: Set[LLMModel] = set()
|
|
48
|
+
|
|
49
|
+
@classmethod
|
|
50
|
+
def register_custom(cls, model: LLMModel) -> LLMModel:
|
|
51
|
+
"""Register a custom model."""
|
|
52
|
+
cls._custom_models.add(model)
|
|
53
|
+
return model
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
def get_all_built_in(cls) -> Set[LLMModel]:
|
|
57
|
+
"""Get all built-in models."""
|
|
58
|
+
built_ins = set()
|
|
59
|
+
for attr_name in dir(cls):
|
|
60
|
+
if not attr_name.startswith("_") and not callable(getattr(cls, attr_name)):
|
|
61
|
+
attr = getattr(cls, attr_name)
|
|
62
|
+
if isinstance(attr, LLMModel):
|
|
63
|
+
built_ins.add(attr)
|
|
64
|
+
return built_ins
|
|
65
|
+
|
|
66
|
+
@classmethod
|
|
67
|
+
def get_all(cls) -> Set[LLMModel]:
|
|
68
|
+
"""Get all models (built-in + custom)."""
|
|
69
|
+
return cls.get_all_built_in().union(cls._custom_models)
|
|
70
|
+
|
|
71
|
+
@classmethod
|
|
72
|
+
def get_by_family(cls, family: str) -> Set[LLMModel]:
|
|
73
|
+
"""Get all models for a specific family."""
|
|
74
|
+
return {model for model in cls.get_all() if model.family == family}
|
|
75
|
+
|
|
76
|
+
@classmethod
|
|
77
|
+
def from_name(cls, name: str) -> LLMModel:
|
|
78
|
+
"""Find model by name with alias support."""
|
|
79
|
+
return _find_model_by_name(name, cls._get_aliases())
|
|
80
|
+
|
|
81
|
+
@classmethod
|
|
82
|
+
def _get_aliases(cls) -> Dict[str, LLMModel]:
|
|
83
|
+
"""Get aliases mapping."""
|
|
84
|
+
return {
|
|
85
|
+
"sonnet": cls.CLAUDE_4_SONNET,
|
|
86
|
+
"sonnet 3.7": cls.CLAUDE_3_7_SONNET,
|
|
87
|
+
"sonnet 4.5": cls.CLAUDE_4_5_SONNET,
|
|
88
|
+
"haiku": cls.CLAUDE_4_5_HAIKU,
|
|
89
|
+
"haiku 4.5": cls.CLAUDE_4_5_HAIKU,
|
|
90
|
+
"opus": cls.CLAUDE_4_5_OPUS,
|
|
91
|
+
"opus 4.1": cls.CLAUDE_4_1_OPUS,
|
|
92
|
+
"opus 4.5": cls.CLAUDE_4_5_OPUS,
|
|
93
|
+
"claude 4.1 opus": cls.CLAUDE_4_1_OPUS,
|
|
94
|
+
"claude opus 4.1": cls.CLAUDE_4_1_OPUS,
|
|
95
|
+
"claude 4.5 opus": cls.CLAUDE_4_5_OPUS,
|
|
96
|
+
"claude opus 4.5": cls.CLAUDE_4_5_OPUS,
|
|
97
|
+
"claude": cls.CLAUDE_4_SONNET,
|
|
98
|
+
"claude 3.7 sonnet": cls.CLAUDE_3_7_SONNET,
|
|
99
|
+
"claude 4 sonnet": cls.CLAUDE_4_SONNET,
|
|
100
|
+
"claude 4.5 sonnet": cls.CLAUDE_4_5_SONNET,
|
|
101
|
+
"claude 4.5 haiku": cls.CLAUDE_4_5_HAIKU,
|
|
102
|
+
"claude 4 opus": cls.CLAUDE_4_OPUS,
|
|
103
|
+
"o4 mini": cls.O4_MINI,
|
|
104
|
+
"o4 mini 2025-04-16": cls.O4_MINI_APRIL_2025,
|
|
105
|
+
"gpt 4.1": cls.GPT_4_1,
|
|
106
|
+
"gpt 4o": cls.GPT_4O,
|
|
107
|
+
"gpt 5": cls.GPT_5,
|
|
108
|
+
"gpt 5 mini": cls.GPT_5_MINI,
|
|
109
|
+
"gpt 5 nano": cls.GPT_5_NANO,
|
|
110
|
+
"gpt 5.1": cls.GPT_5_1,
|
|
111
|
+
"gpt 5.1 codex": cls.GPT_5_1_CODEX,
|
|
112
|
+
"gpt 5.1 codex mini": cls.GPT_5_1_CODEX_MINI,
|
|
113
|
+
"gpt 5.1 codex max": cls.GPT_5_1_CODEX_MAX,
|
|
114
|
+
"gpt 5.2": cls.GPT_5_2,
|
|
115
|
+
"gpt 5.2 pro": cls.GPT_5_2_PRO,
|
|
116
|
+
"gemini 2.5 flash": cls.GEMINI_2_5_FLASH,
|
|
117
|
+
"gemini 2.5 pro": cls.GEMINI_2_5_PRO,
|
|
118
|
+
"gemini 3 flash": cls.GEMINI_3_FLASH,
|
|
119
|
+
"gemini 3.0 flash": cls.GEMINI_3_FLASH,
|
|
120
|
+
"gemini 3 pro": cls.GEMINI_3_PRO,
|
|
121
|
+
"gemini 3.0 pro": cls.GEMINI_3_PRO,
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _normalize_name(name: str) -> str:
|
|
126
|
+
"""Normalize name by keeping only alphanumeric characters and converting to lowercase."""
|
|
127
|
+
return re.sub(r"[^a-zA-Z0-9]", "", name.lower())
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _find_model_by_name(name: str, aliases: Dict[str, LLMModel]) -> LLMModel:
|
|
131
|
+
"""Find model by name with alias support."""
|
|
132
|
+
# Check for exact match in aliases first (case-insensitive, ignoring non-alphanumeric)
|
|
133
|
+
normalized_name = _normalize_name(name)
|
|
134
|
+
for alias_key, alias_value in aliases.items():
|
|
135
|
+
if _normalize_name(alias_key) == normalized_name:
|
|
136
|
+
return alias_value
|
|
137
|
+
|
|
138
|
+
# Check for exact match in all models
|
|
139
|
+
all_models = ModelRegistry.get_all()
|
|
140
|
+
for model in all_models:
|
|
141
|
+
if model.name == name:
|
|
142
|
+
return model
|
|
143
|
+
|
|
144
|
+
# Check for starts-with matches in all models
|
|
145
|
+
matches = []
|
|
146
|
+
for model in all_models:
|
|
147
|
+
if model.name.startswith(name):
|
|
148
|
+
matches.append(model)
|
|
149
|
+
|
|
150
|
+
if len(matches) == 0:
|
|
151
|
+
raise ValueError(f"No LLMModel found starting with '{name}'")
|
|
152
|
+
elif len(matches) == 1:
|
|
153
|
+
return matches[0]
|
|
154
|
+
else:
|
|
155
|
+
match_names = [m.name for m in matches]
|
|
156
|
+
raise ValueError(f"Ambiguous LLMModel name '{name}'. Could match: {', '.join(match_names)}")
|
|
File without changes
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from typing import Any, Collection, Dict
|
|
2
|
+
|
|
3
|
+
from langchain_anthropic import ChatAnthropic
|
|
4
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
5
|
+
|
|
6
|
+
from spaik_sdk.config.get_credentials_provider import credentials_provider
|
|
7
|
+
from spaik_sdk.models.factories.anthropic_factory import AnthropicModelFactory
|
|
8
|
+
from spaik_sdk.models.llm_config import LLMConfig
|
|
9
|
+
from spaik_sdk.models.llm_model import LLMModel
|
|
10
|
+
from spaik_sdk.models.providers.base_provider import BaseProvider
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class AnthropicProvider(BaseProvider):
|
|
14
|
+
def get_supported_models(self) -> Collection[LLMModel]:
|
|
15
|
+
"""Get list of models supported by Anthropic provider."""
|
|
16
|
+
return AnthropicModelFactory.MODELS
|
|
17
|
+
|
|
18
|
+
def get_model_config(self, config: LLMConfig) -> Dict[str, Any]:
|
|
19
|
+
"""Get Anthropic provider configuration (provider-specific only)."""
|
|
20
|
+
return {
|
|
21
|
+
"anthropic_api_key": credentials_provider.get_provider_key("anthropic"),
|
|
22
|
+
"model_kwargs": {
|
|
23
|
+
"extra_headers": {"anthropic-beta": "prompt-caching-2024-07-31"} # TODO add output length header
|
|
24
|
+
},
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
def create_langchain_model(self, config: LLMConfig, full_config: Dict[str, Any]) -> BaseChatModel:
|
|
28
|
+
"""Create Anthropic langchain model with complete configuration."""
|
|
29
|
+
return ChatAnthropic(**full_config)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from typing import Any, Collection, Dict
|
|
2
|
+
|
|
3
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
4
|
+
from langchain_openai import AzureChatOpenAI
|
|
5
|
+
|
|
6
|
+
from spaik_sdk.config.env import env_config
|
|
7
|
+
from spaik_sdk.models.factories.openai_factory import OpenAIModelFactory
|
|
8
|
+
from spaik_sdk.models.llm_config import LLMConfig
|
|
9
|
+
from spaik_sdk.models.llm_model import LLMModel
|
|
10
|
+
from spaik_sdk.models.providers.base_provider import BaseProvider
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class AzureProvider(BaseProvider):
|
|
14
|
+
def get_supported_models(self) -> Collection[LLMModel]:
|
|
15
|
+
"""Get list of models supported by Azure provider."""
|
|
16
|
+
return OpenAIModelFactory.MODELS
|
|
17
|
+
|
|
18
|
+
def get_model_config(self, config: LLMConfig) -> Dict[str, Any]:
|
|
19
|
+
"""Get Azure AI Foundry provider configuration (provider-specific only)."""
|
|
20
|
+
return {
|
|
21
|
+
"api_key": env_config.get_azure_keys()["api_key"],
|
|
22
|
+
"api_version": env_config.get_azure_keys()["api_version"],
|
|
23
|
+
"azure_endpoint": env_config.get_azure_keys()["endpoint"],
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
def create_langchain_model(self, config: LLMConfig, full_config: Dict[str, Any]) -> BaseChatModel:
|
|
27
|
+
"""Create Azure langchain model with complete configuration and model-specific deployments."""
|
|
28
|
+
# Add Azure provider-specific deployment configuration
|
|
29
|
+
full_config["deployment_name"] = env_config.get_azure_keys()[f"{config.model.name.lower()}_deployment"]
|
|
30
|
+
|
|
31
|
+
return AzureChatOpenAI(**full_config)
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import TYPE_CHECKING, Any, Collection, Dict, Optional
|
|
3
|
+
|
|
4
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from spaik_sdk.models.llm_config import LLMConfig
|
|
8
|
+
|
|
9
|
+
from spaik_sdk.config.env import env_config
|
|
10
|
+
from spaik_sdk.models.llm_model import LLMModel
|
|
11
|
+
from spaik_sdk.models.providers.provider_type import ProviderType
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BaseProvider(ABC):
|
|
15
|
+
@abstractmethod
|
|
16
|
+
def get_supported_models(self) -> Collection[LLMModel]:
|
|
17
|
+
"""Get list of models supported by this provider."""
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
def supports_model(self, model: LLMModel) -> bool:
|
|
21
|
+
return model in self.get_supported_models()
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
def get_model_config(self, config: "LLMConfig") -> Dict[str, Any]:
|
|
25
|
+
"""Get model configuration based on model and provider type."""
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
def create_langchain_model(self, config: "LLMConfig", full_config: Dict[str, Any]) -> BaseChatModel:
|
|
30
|
+
"""Create the langchain model instance with complete configuration and model info."""
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
@classmethod
|
|
34
|
+
def create_provider(cls, provider_type: Optional[ProviderType] = None) -> "BaseProvider":
|
|
35
|
+
"""Factory method to create appropriate provider instance."""
|
|
36
|
+
|
|
37
|
+
if provider_type is None:
|
|
38
|
+
provider_type = env_config.get_provider_type()
|
|
39
|
+
|
|
40
|
+
# Import here to avoid circular imports
|
|
41
|
+
if provider_type == ProviderType.ANTHROPIC:
|
|
42
|
+
from spaik_sdk.models.providers.anthropic_provider import AnthropicProvider
|
|
43
|
+
|
|
44
|
+
return AnthropicProvider()
|
|
45
|
+
elif provider_type == ProviderType.AZURE_AI_FOUNDRY:
|
|
46
|
+
from spaik_sdk.models.providers.azure_provider import AzureProvider
|
|
47
|
+
|
|
48
|
+
return AzureProvider()
|
|
49
|
+
elif provider_type == ProviderType.OPENAI_DIRECT:
|
|
50
|
+
from spaik_sdk.models.providers.openai_provider import OpenAIProvider
|
|
51
|
+
|
|
52
|
+
return OpenAIProvider()
|
|
53
|
+
elif provider_type == ProviderType.GOOGLE:
|
|
54
|
+
from spaik_sdk.models.providers.google_provider import GoogleProvider
|
|
55
|
+
|
|
56
|
+
return GoogleProvider()
|
|
57
|
+
elif provider_type == ProviderType.OLLAMA:
|
|
58
|
+
from spaik_sdk.models.providers.ollama_provider import OllamaProvider
|
|
59
|
+
|
|
60
|
+
return OllamaProvider()
|
|
61
|
+
else:
|
|
62
|
+
raise ValueError(f"Unsupported provider type: {provider_type}")
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from typing import Any, Collection, Dict
|
|
2
|
+
|
|
3
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
4
|
+
from langchain_google_genai import ChatGoogleGenerativeAI # type: ignore
|
|
5
|
+
|
|
6
|
+
from spaik_sdk.config.get_credentials_provider import credentials_provider
|
|
7
|
+
from spaik_sdk.models.factories.google_factory import GoogleModelFactory
|
|
8
|
+
from spaik_sdk.models.llm_config import LLMConfig
|
|
9
|
+
from spaik_sdk.models.llm_model import LLMModel
|
|
10
|
+
from spaik_sdk.models.providers.base_provider import BaseProvider
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class GoogleProvider(BaseProvider):
|
|
14
|
+
def get_supported_models(self) -> Collection[LLMModel]:
|
|
15
|
+
"""Get list of models supported by Google provider."""
|
|
16
|
+
return GoogleModelFactory.MODELS
|
|
17
|
+
|
|
18
|
+
def get_model_config(self, config: LLMConfig) -> Dict[str, Any]:
|
|
19
|
+
"""Get Google provider configuration (provider-specific only)."""
|
|
20
|
+
return {
|
|
21
|
+
"google_api_key": credentials_provider.get_provider_key("google"),
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
def create_langchain_model(self, config: LLMConfig, full_config: Dict[str, Any]) -> BaseChatModel:
|
|
25
|
+
"""Create Google langchain model with complete configuration."""
|
|
26
|
+
return ChatGoogleGenerativeAI(**full_config)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from typing import Any, Collection, Dict
|
|
2
|
+
|
|
3
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
4
|
+
from langchain_ollama import ChatOllama
|
|
5
|
+
|
|
6
|
+
from spaik_sdk.config.get_credentials_provider import credentials_provider
|
|
7
|
+
from spaik_sdk.models.factories.ollama_factory import OllamaModelFactory
|
|
8
|
+
from spaik_sdk.models.llm_config import LLMConfig
|
|
9
|
+
from spaik_sdk.models.llm_model import LLMModel
|
|
10
|
+
from spaik_sdk.models.providers.base_provider import BaseProvider
|
|
11
|
+
from spaik_sdk.utils.init_logger import init_logger
|
|
12
|
+
|
|
13
|
+
logger = init_logger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class OllamaProvider(BaseProvider):
|
|
17
|
+
def get_supported_models(self) -> Collection[LLMModel]:
|
|
18
|
+
return OllamaModelFactory.MODELS
|
|
19
|
+
|
|
20
|
+
def get_model_config(self, config: LLMConfig) -> Dict[str, Any]:
|
|
21
|
+
provider_config = {}
|
|
22
|
+
provider_config["base_url"] = credentials_provider.get_key("OLLAMA_BASE_URL", "http://localhost:11434")
|
|
23
|
+
return provider_config
|
|
24
|
+
|
|
25
|
+
def create_langchain_model(self, config: LLMConfig, full_config: Dict[str, Any]) -> BaseChatModel:
|
|
26
|
+
return ChatOllama(model=config.model.name, **full_config)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from typing import Any, Collection, Dict
|
|
2
|
+
|
|
3
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
4
|
+
from langchain_openai import ChatOpenAI
|
|
5
|
+
|
|
6
|
+
from spaik_sdk.config.get_credentials_provider import credentials_provider
|
|
7
|
+
from spaik_sdk.models.factories.openai_factory import OpenAIModelFactory
|
|
8
|
+
from spaik_sdk.models.llm_config import LLMConfig
|
|
9
|
+
from spaik_sdk.models.llm_model import LLMModel
|
|
10
|
+
from spaik_sdk.models.providers.base_provider import BaseProvider
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class OpenAIProvider(BaseProvider):
|
|
14
|
+
def get_supported_models(self) -> Collection[LLMModel]:
|
|
15
|
+
"""Get list of models supported by OpenAI provider."""
|
|
16
|
+
return OpenAIModelFactory.MODELS
|
|
17
|
+
|
|
18
|
+
def get_model_config(self, config: LLMConfig) -> Dict[str, Any]:
|
|
19
|
+
"""Get OpenAI provider configuration (provider-specific only)."""
|
|
20
|
+
return {
|
|
21
|
+
"api_key": credentials_provider.get_provider_key("openai"),
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
def create_langchain_model(self, config: LLMConfig, full_config: Dict[str, Any]) -> BaseChatModel:
|
|
25
|
+
"""Create OpenAI langchain model with complete configuration."""
|
|
26
|
+
return ChatOpenAI(**full_config)
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Dict, Type, TypeVar
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ProviderType(Enum):
|
|
7
|
+
ANTHROPIC = "anthropic"
|
|
8
|
+
AZURE_AI_FOUNDRY = "azure_ai_foundry"
|
|
9
|
+
OPENAI_DIRECT = "openai"
|
|
10
|
+
GOOGLE = "google"
|
|
11
|
+
OLLAMA = "ollama"
|
|
12
|
+
|
|
13
|
+
@classmethod
|
|
14
|
+
def from_name(cls, name: str) -> "ProviderType":
|
|
15
|
+
return _find_enum_by_name(cls, name, PROVIDER_ALIASES)
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def from_model_name(cls, model_name: str) -> "ProviderType":
|
|
19
|
+
if model_name.startswith("claude"):
|
|
20
|
+
return cls.ANTHROPIC
|
|
21
|
+
elif model_name.startswith("gemini"):
|
|
22
|
+
return cls.GOOGLE
|
|
23
|
+
elif model_name.startswith("gpt") or model_name.startswith("o3") or model_name.startswith("o4"):
|
|
24
|
+
return cls.OPENAI_DIRECT
|
|
25
|
+
else:
|
|
26
|
+
# For ollama models, we can't determine from name alone since they're arbitrary
|
|
27
|
+
# Users will need to specify family="ollama" when creating LLMModel
|
|
28
|
+
raise ValueError(f"Cant determine provider type from model name: {model_name}")
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def from_family(cls, family: str) -> "ProviderType":
|
|
32
|
+
"""Get provider type from model family."""
|
|
33
|
+
family_lower = family.lower()
|
|
34
|
+
if family_lower == "anthropic":
|
|
35
|
+
return cls.ANTHROPIC
|
|
36
|
+
elif family_lower == "openai":
|
|
37
|
+
return cls.OPENAI_DIRECT
|
|
38
|
+
elif family_lower == "google":
|
|
39
|
+
return cls.GOOGLE
|
|
40
|
+
elif family_lower == "ollama":
|
|
41
|
+
return cls.OLLAMA
|
|
42
|
+
else:
|
|
43
|
+
raise ValueError(f"Unknown model family: {family}")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
PROVIDER_ALIASES = {
|
|
47
|
+
"claude": ProviderType.ANTHROPIC,
|
|
48
|
+
"ollama": ProviderType.OLLAMA,
|
|
49
|
+
"azure": ProviderType.AZURE_AI_FOUNDRY,
|
|
50
|
+
"foundry": ProviderType.AZURE_AI_FOUNDRY,
|
|
51
|
+
"openai": ProviderType.OPENAI_DIRECT,
|
|
52
|
+
"google": ProviderType.GOOGLE,
|
|
53
|
+
"gemini": ProviderType.GOOGLE,
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
T = TypeVar("T", bound=Enum)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _normalize_name(name: str) -> str:
|
|
61
|
+
"""Normalize name by keeping only alphanumeric characters and converting to lowercase."""
|
|
62
|
+
return re.sub(r"[^a-zA-Z0-9]", "", name.lower())
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _find_enum_by_name(enum_cls: Type[T], name: str, aliases: Dict[str, T]) -> T:
|
|
66
|
+
"""Shared logic for finding enum members by name with alias support."""
|
|
67
|
+
# Check for exact match in aliases first (case-insensitive, ignoring non-alphanumeric)
|
|
68
|
+
normalized_name = _normalize_name(name)
|
|
69
|
+
for alias_key, alias_value in aliases.items():
|
|
70
|
+
if _normalize_name(alias_key) == normalized_name:
|
|
71
|
+
return alias_value
|
|
72
|
+
|
|
73
|
+
# Check for exact match in enum values
|
|
74
|
+
for item in enum_cls:
|
|
75
|
+
if item.value == name:
|
|
76
|
+
return item
|
|
77
|
+
|
|
78
|
+
# Check for starts-with matches in enum values
|
|
79
|
+
matches = []
|
|
80
|
+
for item in enum_cls:
|
|
81
|
+
if item.value.startswith(name):
|
|
82
|
+
matches.append(item)
|
|
83
|
+
|
|
84
|
+
if len(matches) == 0:
|
|
85
|
+
raise ValueError(f"No {enum_cls.__name__} found starting with '{name}'")
|
|
86
|
+
elif len(matches) == 1:
|
|
87
|
+
return matches[0]
|
|
88
|
+
else:
|
|
89
|
+
match_names = [m.value for m in matches]
|
|
90
|
+
raise ValueError(f"Ambiguous {enum_cls.__name__} name '{name}'. Could match: {', '.join(match_names)}")
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from spaik_sdk.orchestration.base_orchestrator import BaseOrchestrator, SimpleOrchestrator
|
|
2
|
+
from spaik_sdk.orchestration.checkpoint import (
|
|
3
|
+
CheckpointProvider,
|
|
4
|
+
DictCheckpointProvider,
|
|
5
|
+
InMemoryCheckpointProvider,
|
|
6
|
+
)
|
|
7
|
+
from spaik_sdk.orchestration.models import (
|
|
8
|
+
OrchestratorEvent,
|
|
9
|
+
ProgressUpdate,
|
|
10
|
+
StepInfo,
|
|
11
|
+
StepStatus,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"BaseOrchestrator",
|
|
16
|
+
"SimpleOrchestrator",
|
|
17
|
+
"CheckpointProvider",
|
|
18
|
+
"InMemoryCheckpointProvider",
|
|
19
|
+
"DictCheckpointProvider",
|
|
20
|
+
"OrchestratorEvent",
|
|
21
|
+
"ProgressUpdate",
|
|
22
|
+
"StepInfo",
|
|
23
|
+
"StepStatus",
|
|
24
|
+
]
|