hanzo 0.3.21__py3-none-any.whl → 0.3.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hanzo might be problematic. Click here for more details.

@@ -0,0 +1,399 @@
1
+ """Unified Model Registry - Single source of truth for all AI model mappings.
2
+
3
+ This module provides a centralized registry for AI model configurations,
4
+ eliminating duplication and ensuring consistency across the codebase.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from dataclasses import dataclass, field
10
+ from typing import Dict, List, Optional, Set
11
+ from enum import Enum
12
+
13
+
14
+ class ModelProvider(Enum):
15
+ """Enumeration of AI model providers."""
16
+
17
+ ANTHROPIC = "anthropic"
18
+ OPENAI = "openai"
19
+ GOOGLE = "google"
20
+ XAI = "xai"
21
+ OLLAMA = "ollama"
22
+ DEEPSEEK = "deepseek"
23
+ MISTRAL = "mistral"
24
+ META = "meta"
25
+ HANZO = "hanzo"
26
+
27
+
28
+ @dataclass(frozen=True)
29
+ class ModelConfig:
30
+ """Configuration for a single AI model."""
31
+
32
+ full_name: str
33
+ provider: ModelProvider
34
+ aliases: Set[str] = field(default_factory=set)
35
+ default_params: Dict[str, any] = field(default_factory=dict)
36
+ supports_vision: bool = False
37
+ supports_tools: bool = False
38
+ supports_streaming: bool = True
39
+ context_window: int = 8192
40
+ max_output: int = 4096
41
+ api_key_env: Optional[str] = None
42
+ cli_command: Optional[str] = None
43
+
44
+
45
+ class ModelRegistry:
46
+ """Centralized registry for all AI models.
47
+
48
+ This is the single source of truth for model configurations,
49
+ ensuring no duplication across the codebase.
50
+ """
51
+
52
+ _instance: Optional[ModelRegistry] = None
53
+ _models: Dict[str, ModelConfig] = {}
54
+
55
+ def __new__(cls) -> ModelRegistry:
56
+ """Singleton pattern to ensure single registry instance."""
57
+ if cls._instance is None:
58
+ cls._instance = super().__new__(cls)
59
+ cls._instance._initialize_models()
60
+ return cls._instance
61
+
62
+ def _initialize_models(self) -> None:
63
+ """Initialize all model configurations."""
64
+ # Claude models
65
+ self._register(ModelConfig(
66
+ full_name="claude-3-5-sonnet-20241022",
67
+ provider=ModelProvider.ANTHROPIC,
68
+ aliases={"claude", "cc", "claude-code", "sonnet", "sonnet-4.1"},
69
+ supports_vision=True,
70
+ supports_tools=True,
71
+ context_window=200000,
72
+ max_output=8192,
73
+ api_key_env="ANTHROPIC_API_KEY",
74
+ cli_command="claude",
75
+ ))
76
+
77
+ self._register(ModelConfig(
78
+ full_name="claude-opus-4-1-20250805",
79
+ provider=ModelProvider.ANTHROPIC,
80
+ aliases={"opus", "opus-4.1", "claude-opus"},
81
+ supports_vision=True,
82
+ supports_tools=True,
83
+ context_window=200000,
84
+ max_output=8192,
85
+ api_key_env="ANTHROPIC_API_KEY",
86
+ cli_command="claude",
87
+ ))
88
+
89
+ self._register(ModelConfig(
90
+ full_name="claude-3-haiku-20240307",
91
+ provider=ModelProvider.ANTHROPIC,
92
+ aliases={"haiku", "claude-haiku"},
93
+ supports_vision=True,
94
+ supports_tools=True,
95
+ context_window=200000,
96
+ max_output=4096,
97
+ api_key_env="ANTHROPIC_API_KEY",
98
+ cli_command="claude",
99
+ ))
100
+
101
+ # OpenAI models
102
+ self._register(ModelConfig(
103
+ full_name="gpt-4-turbo",
104
+ provider=ModelProvider.OPENAI,
105
+ aliases={"gpt4", "gpt-4", "codex"},
106
+ supports_vision=True,
107
+ supports_tools=True,
108
+ context_window=128000,
109
+ max_output=4096,
110
+ api_key_env="OPENAI_API_KEY",
111
+ cli_command="openai",
112
+ ))
113
+
114
+ self._register(ModelConfig(
115
+ full_name="gpt-5-turbo",
116
+ provider=ModelProvider.OPENAI,
117
+ aliases={"gpt5", "gpt-5"},
118
+ supports_vision=True,
119
+ supports_tools=True,
120
+ context_window=256000,
121
+ max_output=16384,
122
+ api_key_env="OPENAI_API_KEY",
123
+ cli_command="openai",
124
+ ))
125
+
126
+ self._register(ModelConfig(
127
+ full_name="o1-preview",
128
+ provider=ModelProvider.OPENAI,
129
+ aliases={"o1", "openai-o1"},
130
+ supports_vision=False,
131
+ supports_tools=False,
132
+ context_window=128000,
133
+ max_output=32768,
134
+ api_key_env="OPENAI_API_KEY",
135
+ cli_command="openai",
136
+ ))
137
+
138
+ # Google models
139
+ self._register(ModelConfig(
140
+ full_name="gemini-1.5-pro",
141
+ provider=ModelProvider.GOOGLE,
142
+ aliases={"gemini", "gemini-pro"},
143
+ supports_vision=True,
144
+ supports_tools=True,
145
+ context_window=2000000,
146
+ max_output=8192,
147
+ api_key_env="GEMINI_API_KEY",
148
+ cli_command="gemini",
149
+ ))
150
+
151
+ self._register(ModelConfig(
152
+ full_name="gemini-1.5-flash",
153
+ provider=ModelProvider.GOOGLE,
154
+ aliases={"gemini-flash", "flash"},
155
+ supports_vision=True,
156
+ supports_tools=True,
157
+ context_window=1000000,
158
+ max_output=8192,
159
+ api_key_env="GEMINI_API_KEY",
160
+ cli_command="gemini",
161
+ ))
162
+
163
+ # xAI models
164
+ self._register(ModelConfig(
165
+ full_name="grok-2",
166
+ provider=ModelProvider.XAI,
167
+ aliases={"grok", "xai-grok"},
168
+ supports_vision=False,
169
+ supports_tools=True,
170
+ context_window=128000,
171
+ max_output=8192,
172
+ api_key_env="XAI_API_KEY",
173
+ cli_command="grok",
174
+ ))
175
+
176
+ # Ollama models
177
+ self._register(ModelConfig(
178
+ full_name="ollama/llama-3.2-3b",
179
+ provider=ModelProvider.OLLAMA,
180
+ aliases={"llama", "llama-3.2", "llama3"},
181
+ supports_vision=False,
182
+ supports_tools=False,
183
+ context_window=128000,
184
+ max_output=4096,
185
+ api_key_env=None, # Local model
186
+ cli_command="ollama",
187
+ ))
188
+
189
+ self._register(ModelConfig(
190
+ full_name="ollama/mistral:7b",
191
+ provider=ModelProvider.MISTRAL,
192
+ aliases={"mistral", "mistral-7b"},
193
+ supports_vision=False,
194
+ supports_tools=False,
195
+ context_window=32000,
196
+ max_output=4096,
197
+ api_key_env=None, # Local model
198
+ cli_command="ollama",
199
+ ))
200
+
201
+ # DeepSeek models
202
+ self._register(ModelConfig(
203
+ full_name="deepseek-coder-v2",
204
+ provider=ModelProvider.DEEPSEEK,
205
+ aliases={"deepseek", "deepseek-coder"},
206
+ supports_vision=False,
207
+ supports_tools=True,
208
+ context_window=128000,
209
+ max_output=8192,
210
+ api_key_env="DEEPSEEK_API_KEY",
211
+ cli_command="deepseek",
212
+ ))
213
+
214
+ def _register(self, config: ModelConfig) -> None:
215
+ """Register a model configuration.
216
+
217
+ Args:
218
+ config: Model configuration to register
219
+ """
220
+ # Register by full name
221
+ self._models[config.full_name] = config
222
+
223
+ # Register all aliases
224
+ for alias in config.aliases:
225
+ self._models[alias.lower()] = config
226
+
227
+ def get(self, model_name: str) -> Optional[ModelConfig]:
228
+ """Get model configuration by name or alias.
229
+
230
+ Args:
231
+ model_name: Model name or alias
232
+
233
+ Returns:
234
+ Model configuration or None if not found
235
+ """
236
+ return self._models.get(model_name.lower())
237
+
238
+ def resolve(self, model_name: str) -> str:
239
+ """Resolve model name or alias to full model name.
240
+
241
+ Args:
242
+ model_name: Model name or alias
243
+
244
+ Returns:
245
+ Full model name, or original if not found
246
+ """
247
+ config = self.get(model_name)
248
+ return config.full_name if config else model_name
249
+
250
+ def get_by_provider(self, provider: ModelProvider) -> List[ModelConfig]:
251
+ """Get all models for a specific provider.
252
+
253
+ Args:
254
+ provider: Model provider
255
+
256
+ Returns:
257
+ List of model configurations
258
+ """
259
+ seen = set()
260
+ results = []
261
+ for config in self._models.values():
262
+ if config.provider == provider and config.full_name not in seen:
263
+ seen.add(config.full_name)
264
+ results.append(config)
265
+ return results
266
+
267
+ def get_models_supporting(
268
+ self,
269
+ vision: Optional[bool] = None,
270
+ tools: Optional[bool] = None,
271
+ streaming: Optional[bool] = None,
272
+ ) -> List[ModelConfig]:
273
+ """Get models supporting specific features.
274
+
275
+ Args:
276
+ vision: Filter by vision support
277
+ tools: Filter by tool support
278
+ streaming: Filter by streaming support
279
+
280
+ Returns:
281
+ List of matching model configurations
282
+ """
283
+ seen = set()
284
+ results = []
285
+
286
+ for config in self._models.values():
287
+ if config.full_name in seen:
288
+ continue
289
+
290
+ if vision is not None and config.supports_vision != vision:
291
+ continue
292
+ if tools is not None and config.supports_tools != tools:
293
+ continue
294
+ if streaming is not None and config.supports_streaming != streaming:
295
+ continue
296
+
297
+ seen.add(config.full_name)
298
+ results.append(config)
299
+
300
+ return results
301
+
302
+ def get_api_key_env(self, model_name: str) -> Optional[str]:
303
+ """Get the API key environment variable for a model.
304
+
305
+ Args:
306
+ model_name: Model name or alias
307
+
308
+ Returns:
309
+ Environment variable name or None
310
+ """
311
+ config = self.get(model_name)
312
+ return config.api_key_env if config else None
313
+
314
+ def get_cli_command(self, model_name: str) -> Optional[str]:
315
+ """Get the CLI command for a model.
316
+
317
+ Args:
318
+ model_name: Model name or alias
319
+
320
+ Returns:
321
+ CLI command or None
322
+ """
323
+ config = self.get(model_name)
324
+ return config.cli_command if config else None
325
+
326
+ def list_all_models(self) -> List[str]:
327
+ """List all unique model full names.
328
+
329
+ Returns:
330
+ List of full model names
331
+ """
332
+ seen = set()
333
+ for config in self._models.values():
334
+ seen.add(config.full_name)
335
+ return sorted(list(seen))
336
+
337
+ def list_all_aliases(self) -> Dict[str, str]:
338
+ """List all aliases and their full names.
339
+
340
+ Returns:
341
+ Dictionary mapping aliases to full names
342
+ """
343
+ result = {}
344
+ for key, config in self._models.items():
345
+ if key != config.full_name:
346
+ result[key] = config.full_name
347
+ return result
348
+
349
+
350
+ # Global singleton instance
351
+ registry = ModelRegistry()
352
+
353
+
354
+ # Convenience functions
355
+ def resolve_model(model_name: str) -> str:
356
+ """Resolve model name or alias to full model name.
357
+
358
+ Args:
359
+ model_name: Model name or alias
360
+
361
+ Returns:
362
+ Full model name
363
+ """
364
+ return registry.resolve(model_name)
365
+
366
+
367
+ def get_model_config(model_name: str) -> Optional[ModelConfig]:
368
+ """Get model configuration.
369
+
370
+ Args:
371
+ model_name: Model name or alias
372
+
373
+ Returns:
374
+ Model configuration or None
375
+ """
376
+ return registry.get(model_name)
377
+
378
+
379
+ def get_api_key_env(model_name: str) -> Optional[str]:
380
+ """Get API key environment variable for model.
381
+
382
+ Args:
383
+ model_name: Model name or alias
384
+
385
+ Returns:
386
+ Environment variable name or None
387
+ """
388
+ return registry.get_api_key_env(model_name)
389
+
390
+
391
+ __all__ = [
392
+ "ModelProvider",
393
+ "ModelConfig",
394
+ "ModelRegistry",
395
+ "registry",
396
+ "resolve_model",
397
+ "get_model_config",
398
+ "get_api_key_env",
399
+ ]