aix 0.0.23__py3-none-any.whl → 0.0.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
aix/__init__.py CHANGED
@@ -27,12 +27,6 @@ Choose a chat function and chat with it:
27
27
  """
28
28
 
29
29
  from aix.gen_ai import chat, chat_models, chat_funcs
30
- from aix import contexts
31
- from aix.contexts import (
32
- bytes_to_markdown, # Convert bytes to markdown (with plugin support for different types of files)
33
- bytes_store_to_markdown_store, # Convert a bytes store to a markdown store based on extensions
34
- aggregate_store, # Aggregate a store into a single value (say, a string)
35
- )
36
30
 
37
31
  # TODO: Change this so that there's a load_pkg function that loads the packages dynamically
38
32
  # if and when use wants.
@@ -0,0 +1,136 @@
1
+ """AI Model Management Module.
2
+
3
+ A unified interface for managing AI models across multiple providers.
4
+
5
+ Basic usage:
6
+ >>> from aix.ai_models import get_manager
7
+ >>> manager = get_manager()
8
+ >>> _ = manager.discover_from_source("openrouter", auto_register=True, verbose=False)
9
+ >>> models = manager.list_models(provider="openai")
10
+
11
+ Custom filtering:
12
+ >>> cheap_models = manager.list_models(
13
+ ... custom_filter=lambda m: m.cost_per_token.get("input", 0) < 0.001
14
+ ... )
15
+
16
+ Get connector-specific metadata:
17
+ >>> openai_meta = manager.get_connector_metadata("openai/gpt-4", "openai")
18
+ >>> # Use with: openai.ChatCompletion.create(**openai_meta, messages=[...])
19
+
20
+ """
21
+
22
+ # Core types
23
+ from aix.ai_models.base import (
24
+ Model,
25
+ ModelRegistry,
26
+ ModelSource,
27
+ Connector,
28
+ ConnectorRegistry,
29
+ )
30
+
31
+ # Concrete sources and connectors
32
+ from aix.ai_models.sources import (
33
+ Connector,
34
+ OpenRouterSource,
35
+ OllamaSource,
36
+ ProviderAPISource,
37
+ OpenAIConnector,
38
+ OpenRouterConnector,
39
+ LangChainConnector,
40
+ OllamaConnector,
41
+ DSPyConnector,
42
+ )
43
+
44
+ # Main facade
45
+ from aix.ai_models.manager import (
46
+ ModelManager,
47
+ get_manager,
48
+ )
49
+
50
+ # Version info
51
+ __version__ = "0.1.0"
52
+
53
+ # Public API
54
+ __all__ = [
55
+ # Core types
56
+ "Model",
57
+ "ModelRegistry",
58
+ "ModelSource",
59
+ "Connector",
60
+ "ConnectorRegistry",
61
+ # Sources
62
+ "OpenRouterSource",
63
+ "OllamaSource",
64
+ "ProviderAPISource",
65
+ # Connectors
66
+ "OpenAIConnector",
67
+ "OpenRouterConnector",
68
+ "LangChainConnector",
69
+ "OllamaConnector",
70
+ "DSPyConnector",
71
+ # Main API
72
+ "ModelManager",
73
+ "get_manager",
74
+ ]
75
+
76
+
77
+ # Convenience functions for common operations
78
+
79
+
80
+ def list_available_models(
81
+ *,
82
+ provider: str | None = None,
83
+ is_local: bool | None = None,
84
+ storage_path: str | None = None,
85
+ ) -> list[Model]:
86
+ """Quick function to list available models without explicit manager.
87
+
88
+ >>> models = list_available_models(provider="openai")
89
+ >>> len(models) >= 0
90
+ True
91
+ """
92
+ manager = get_manager(storage_path=storage_path)
93
+ return manager.list_models(provider=provider, is_local=is_local)
94
+
95
+
96
+ def discover_models(
97
+ source_name: str = "openrouter",
98
+ *,
99
+ storage_path: str | None = None,
100
+ auto_register: bool = True,
101
+ verbose: bool = True,
102
+ ) -> list[Model]:
103
+ """Quick function to discover models from a source.
104
+
105
+ >>> models = discover_models("openrouter", auto_register=False, verbose=False)
106
+ >>> len(models) > 0
107
+ True
108
+ """
109
+ manager = get_manager(storage_path=storage_path)
110
+ return manager.discover_from_source(
111
+ source_name, auto_register=auto_register, verbose=verbose
112
+ )
113
+
114
+
115
+ def get_model_metadata(
116
+ model_id: str, connector_name: str, *, storage_path: str | None = None
117
+ ) -> dict:
118
+ """Quick function to get formatted metadata for a model.
119
+
120
+ >>> import tempfile, os, json
121
+ >>> if 'OPENROUTER_API_KEY' in os.environ:
122
+ ... from aix.ai_models.manager import get_manager
123
+ ... with tempfile.NamedTemporaryFile(mode='w+', suffix=".json", delete=False) as temp:
124
+ ... storage_path = temp.name
125
+ ... json.dump({'models': []}, temp)
126
+ ... try:
127
+ ... manager = get_manager(storage_path=storage_path)
128
+ ... _ = manager.discover_from_source("openrouter", auto_register=True, verbose=False)
129
+ ... assert 'openai/gpt-3.5-turbo' in manager.models
130
+ ... metadata = get_model_metadata("openai/gpt-3.5-turbo", "openrouter", storage_path=storage_path)
131
+ ... assert 'model' in metadata
132
+ ... finally:
133
+ ... os.remove(storage_path)
134
+ """
135
+ manager = get_manager(storage_path=storage_path)
136
+ return manager.get_connector_metadata(model_id, connector_name)
aix/ai_models/base.py ADDED
@@ -0,0 +1,246 @@
1
+ """Core types for AI model management.
2
+
3
+ This module provides a unified interface for managing, discovering, and
4
+ connecting to AI models across multiple providers and deployment methods.
5
+ """
6
+
7
+ from dataclasses import dataclass, field, asdict
8
+ from typing import Any, Mapping, MutableMapping, Iterator, Callable, Iterable
9
+ from abc import ABC, abstractmethod
10
+ import json
11
+ from pathlib import Path
12
+
13
+
14
+ @dataclass
15
+ class Model:
16
+ """Represents an AI model with its metadata.
17
+
18
+ >>> model = Model(
19
+ ... id="gpt-4",
20
+ ... provider="openai",
21
+ ... context_size=8192,
22
+ ... is_local=False
23
+ ... )
24
+ >>> model.id
25
+ 'gpt-4'
26
+ """
27
+ id: str
28
+ provider: str
29
+ context_size: int | None = None
30
+ is_local: bool = False
31
+ capabilities: dict[str, Any] = field(default_factory=dict)
32
+ cost_per_token: dict[str, float] = field(default_factory=dict)
33
+ tags: set[str] = field(default_factory=set)
34
+ connector_metadata: dict[str, dict[str, Any]] = field(default_factory=dict)
35
+ custom_metadata: dict[str, Any] = field(default_factory=dict)
36
+
37
+ def to_dict(self) -> dict[str, Any]:
38
+ """Convert model to dictionary representation."""
39
+ return asdict(self)
40
+
41
+ def matches_filter(self, **criteria) -> bool:
42
+ """Check if model matches given criteria.
43
+
44
+ >>> model = Model(id="gpt-4", provider="openai", is_local=False)
45
+ >>> model.matches_filter(provider="openai")
46
+ True
47
+ >>> model.matches_filter(is_local=True)
48
+ False
49
+ """
50
+ for key, value in criteria.items():
51
+ if not hasattr(self, key):
52
+ return False
53
+ if getattr(self, key) != value:
54
+ return False
55
+ return True
56
+
57
+
58
+ class ModelRegistry(MutableMapping[str, Model]):
59
+ """Registry for managing AI models using Mapping interface.
60
+
61
+ >>> registry = ModelRegistry()
62
+ >>> registry["gpt-4"] = Model(id="gpt-4", provider="openai")
63
+ >>> "gpt-4" in registry
64
+ True
65
+ >>> len(registry)
66
+ 1
67
+ """
68
+
69
+ def __init__(self, *, storage_path: Path | None = None):
70
+ """Initialize registry with optional persistent storage."""
71
+ self._models: dict[str, Model] = {}
72
+ self._storage_path = storage_path
73
+ if storage_path and storage_path.exists():
74
+ self._load()
75
+
76
+ def __setitem__(self, model_id: str, model: Model) -> None:
77
+ """Add or update a model in the registry."""
78
+ self._models[model_id] = model
79
+ if self._storage_path:
80
+ self._save()
81
+
82
+ def __getitem__(self, key: str | list[str] | Callable[[Model], bool]) -> Model | list[Model]:
83
+ """Get model(s) by ID, list of IDs, or filter function.
84
+
85
+ Supports:
86
+ - Single ID: registry["gpt-4"]
87
+ - Multiple IDs: registry[["gpt-4", "claude-3"]]
88
+ - Filter function: registry[lambda m: m.is_local]
89
+ """
90
+ if isinstance(key, str):
91
+ return self._models[key]
92
+ elif isinstance(key, list):
93
+ return [self._models[k] for k in key if k in self._models]
94
+ elif callable(key):
95
+ return [m for m in self._models.values() if key(m)]
96
+ else:
97
+ raise TypeError(f"Unsupported key type: {type(key)}")
98
+
99
+ def __delitem__(self, model_id: str) -> None:
100
+ """Remove a model from the registry."""
101
+ del self._models[model_id]
102
+ if self._storage_path:
103
+ self._save()
104
+
105
+ def __iter__(self) -> Iterator[str]:
106
+ """Iterate over model IDs."""
107
+ yield from self._models.keys()
108
+
109
+ def __len__(self) -> int:
110
+ """Return number of models in registry."""
111
+ return len(self._models)
112
+
113
+ def filter(
114
+ self,
115
+ *,
116
+ provider: str | None = None,
117
+ is_local: bool | None = None,
118
+ min_context_size: int | None = None,
119
+ max_context_size: int | None = None,
120
+ has_capabilities: Iterable[str] | None = None,
121
+ tags: Iterable[str] | None = None,
122
+ custom_filter: Callable[[Model], bool] | None = None
123
+ ) -> list[Model]:
124
+ """Filter models by multiple criteria.
125
+
126
+ >>> registry = ModelRegistry()
127
+ >>> registry["gpt-4"] = Model(id="gpt-4", provider="openai", context_size=8192)
128
+ >>> registry["llama2"] = Model(id="llama2", provider="ollama", is_local=True)
129
+ >>> local_models = registry.filter(is_local=True)
130
+ >>> len(local_models)
131
+ 1
132
+ """
133
+ def _matches(model: Model) -> bool:
134
+ if provider and model.provider != provider:
135
+ return False
136
+ if is_local is not None and model.is_local != is_local:
137
+ return False
138
+ if min_context_size and (not model.context_size or model.context_size < min_context_size):
139
+ return False
140
+ if max_context_size and (not model.context_size or model.context_size > max_context_size):
141
+ return False
142
+ if has_capabilities:
143
+ for cap in has_capabilities:
144
+ if not model.capabilities.get(cap):
145
+ return False
146
+ if tags and not model.tags.issuperset(tags):
147
+ return False
148
+ if custom_filter and not custom_filter(model):
149
+ return False
150
+ return True
151
+
152
+ return [m for m in self._models.values() if _matches(m)]
153
+
154
+ def _load(self) -> None:
155
+ """Load models from persistent storage."""
156
+ if not self._storage_path:
157
+ return
158
+
159
+ with open(self._storage_path, 'r') as f:
160
+ data = json.load(f)
161
+ for model_data in data['models']:
162
+ # Reconstruct set for tags
163
+ model_data['tags'] = set(model_data.get('tags', []))
164
+ model = Model(**model_data)
165
+ self._models[model.id] = model
166
+
167
+ def _save(self) -> None:
168
+ """Save models to persistent storage."""
169
+ if not self._storage_path:
170
+ return
171
+
172
+ models_data = []
173
+ for model in self._models.values():
174
+ model_dict = model.to_dict()
175
+ # Convert set to list for JSON serialization
176
+ model_dict['tags'] = list(model_dict['tags'])
177
+ models_data.append(model_dict)
178
+
179
+ self._storage_path.parent.mkdir(parents=True, exist_ok=True)
180
+ with open(self._storage_path, 'w') as f:
181
+ json.dump({'models': models_data}, f, indent=2)
182
+
183
+
184
+ class ModelSource(ABC):
185
+ """Abstract base for model discovery sources."""
186
+
187
+ @abstractmethod
188
+ def discover_models(self) -> Iterable[Model]:
189
+ """Discover available models from this source.
190
+
191
+ Yields Model instances for each discovered model.
192
+ """
193
+ pass
194
+
195
+
196
+ class Connector(ABC):
197
+ """Abstract base for model connectors/clients."""
198
+
199
+ @abstractmethod
200
+ def format_metadata(self, model: Model) -> dict[str, Any]:
201
+ """Format model metadata for this connector.
202
+
203
+ Returns a dict that can be used to instantiate/connect via this connector.
204
+ """
205
+ pass
206
+
207
+ @property
208
+ @abstractmethod
209
+ def name(self) -> str:
210
+ """Unique identifier for this connector."""
211
+ pass
212
+
213
+
214
+ class ConnectorRegistry(MutableMapping[str, Connector]):
215
+ """Registry for managing model connectors.
216
+
217
+ >>> registry = ConnectorRegistry()
218
+ >>> class MyConnector(Connector):
219
+ ... @property
220
+ ... def name(self) -> str:
221
+ ... return "my_connector"
222
+ ... def format_metadata(self, model: Model) -> dict[str, Any]:
223
+ ... return {"model": model.id}
224
+ >>> connector = MyConnector()
225
+ >>> registry[connector.name] = connector
226
+ >>> "my_connector" in registry
227
+ True
228
+ """
229
+
230
+ def __init__(self):
231
+ self._connectors: dict[str, Connector] = {}
232
+
233
+ def __setitem__(self, name: str, connector: Connector) -> None:
234
+ self._connectors[name] = connector
235
+
236
+ def __getitem__(self, name: str) -> Connector:
237
+ return self._connectors[name]
238
+
239
+ def __delitem__(self, name: str) -> None:
240
+ del self._connectors[name]
241
+
242
+ def __iter__(self) -> Iterator[str]:
243
+ yield from self._connectors.keys()
244
+
245
+ def __len__(self) -> int:
246
+ return len(self._connectors)