aix 0.0.24__py3-none-any.whl → 0.0.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
aix/__init__.py CHANGED
@@ -1,62 +1,159 @@
1
- """
2
- Facade to key AI tools.
3
-
4
- Get a list of available chat functions (this will depend on the AI packages
5
- you have installed locally):
6
-
7
- >>> from aix import chat_funcs
8
- >>> list(chat_funcs) # doctest: +SKIP
9
- ['gemini-1.5-flash',
10
- 'gpt-4',
11
- 'gpt-4-32k',
12
- 'gpt-4-turbo',
13
- 'gpt-3.5-turbo',
14
- 'o1-preview',
15
- 'o1-mini',
16
- 'gpt-4o',
17
- 'gpt-4o-mini']
18
-
19
- Choose a chat function and chat with it:
20
- >>> google_ai_chat = chat_funcs['gemini-1.5-flash'] # doctest: +SKIP
21
- >>> google_ai_chat("What is the meaning of life? Respond with a number.") # doctest: +SKIP
22
- '42'
23
- >>> openai_chat = chat_funcs['gpt-3.5-turbo'] # doctest: +SKIP
24
- >>> openai_chat("What is the meaning of life? Respond with a number.") # doctest: +SKIP
25
- '42'
1
+ """AIX: Artificial Intelligence eXtensions
2
+
3
+ A clean, pythonic facade for common AI operations that abstracts away
4
+ provider-specific details and complexities.
5
+
6
+ Quick Start:
7
+ >>> from aix import chat, embeddings, prompt_func, models
8
+
9
+ # Simple chat
10
+ >>> response = chat("What is 2+2?") # doctest: +SKIP
11
+ 'The answer is 4.'
12
+
13
+ # Create prompt-based functions
14
+ >>> translate = prompt_func("Translate to French: {text}")
15
+ >>> translate(text="Hello world") # doctest: +SKIP
16
+ 'Bonjour le monde'
17
+
18
+ # Get embeddings
19
+ >>> vecs = list(embeddings(["hello", "world"])) # doctest: +SKIP
20
+ >>> len(vecs) # doctest: +SKIP
21
+ 2
26
22
 
23
+ # Discover models
24
+ >>> models.discover() # doctest: +SKIP
25
+ >>> list(models)[:5] # doctest: +SKIP
26
+ ['openai/gpt-4o', 'openai/gpt-4o-mini', ...]
27
+
28
+ Main Features:
29
+ - chat(): Simple chat interface across providers
30
+ - embeddings(): Vector embeddings for text
31
+ - prompt_func(): Create functions from prompt templates
32
+ - models: Model discovery and selection
33
+ - generate_image(): Text-to-image generation
34
+ - text_to_speech(), transcribe(): Audio operations
35
+ - generate_video(): Text-to-video generation (provider-dependent)
36
+ - Batch operations for efficiency
37
+ - Clean, i2mint-style Mapping interfaces
38
+
39
+ Backends:
40
+ - Uses LiteLLM for provider interactions
41
+ - Supports OpenAI, Anthropic, Google, and 100+ models
42
+ - OpenRouter integration for multi-provider access
43
+
44
+ For detailed documentation, see: https://github.com/thorwhalen/aix
27
45
  """
28
46
 
29
- from aix.gen_ai import chat, chat_models, chat_funcs
30
-
31
- # TODO: Change this so that there's a load_pkg function that loads the packages dynamically
32
- # if and when use wants.
33
-
34
- # from aix.pd import *
35
- # from aix.np import *
36
- # from aix.sk import *
37
-
38
- # from aix import pd
39
- # from aix import np
40
- # from aix import sk
41
-
42
-
43
- #
44
- # from contextlib import suppress
45
- #
46
- # preferred_order = ['sk', 'np', 'pd']
47
- #
48
- # with suppress(ModuleNotFoundError):
49
- # from aix import sk
50
- #
51
- # with suppress(ModuleNotFoundError):
52
- # from aix import np
53
- #
54
- # with suppress(ModuleNotFoundError):
55
- # from aix import pd
56
- #
57
- # for _module_name in preferred_order[::-1]:
58
- # print(f"------ {_module_name}")
59
- # _module = __import__(f'aix.{_module_name}')
60
- # for _name in filter(lambda x: not x.startswith('__'), dir(_module)):
61
- # print(_name, _module)
62
- # locals()[_name] = getattr(_module, _name)
47
+ # Core interfaces (new clean API)
48
+ from aix.chat import chat, ask, chat_with_history, ChatSession
49
+ from aix.embeddings import (
50
+ embeddings,
51
+ embed,
52
+ cosine_similarity,
53
+ find_most_similar,
54
+ EmbeddingCache,
55
+ )
56
+ from aix.prompts import (
57
+ prompt_func,
58
+ prompt_to_text,
59
+ prompt_to_json,
60
+ PromptFuncs,
61
+ common_funcs,
62
+ constrained_answer,
63
+ )
64
+ from aix.models import (
65
+ models,
66
+ ModelStore,
67
+ discover_available_models,
68
+ get_model_info,
69
+ find_models,
70
+ )
71
+ from aix.batches import (
72
+ batch_chat,
73
+ batch_embeddings,
74
+ batch_process,
75
+ BatchProcessor,
76
+ )
77
+ from aix.image import (
78
+ generate_image,
79
+ generate_images,
80
+ edit_image,
81
+ create_variation,
82
+ GeneratedImage,
83
+ )
84
+ from aix.audio import (
85
+ text_to_speech,
86
+ transcribe,
87
+ transcribe_with_timestamps,
88
+ translate_audio,
89
+ GeneratedAudio,
90
+ TranscriptionResult,
91
+ )
92
+ from aix.video import (
93
+ generate_video,
94
+ animate_image as animate_image_to_video,
95
+ extend_video,
96
+ GeneratedVideo,
97
+ get_available_providers as get_video_providers,
98
+ )
99
+
100
+ # Legacy interfaces (for backward compatibility)
101
+ from aix.gen_ai import chat_models, chat_funcs
102
+
103
+ # Version info
104
+ __version__ = "0.1.0"
105
+
106
+ # Public API
107
+ __all__ = [
108
+ # Core chat
109
+ "chat",
110
+ "ask",
111
+ "chat_with_history",
112
+ "ChatSession",
113
+ # Embeddings
114
+ "embeddings",
115
+ "embed",
116
+ "cosine_similarity",
117
+ "find_most_similar",
118
+ "EmbeddingCache",
119
+ # Prompts
120
+ "prompt_func",
121
+ "prompt_to_text",
122
+ "prompt_to_json",
123
+ "PromptFuncs",
124
+ "common_funcs",
125
+ "constrained_answer",
126
+ # Models
127
+ "models",
128
+ "ModelStore",
129
+ "discover_available_models",
130
+ "get_model_info",
131
+ "find_models",
132
+ # Batches
133
+ "batch_chat",
134
+ "batch_embeddings",
135
+ "batch_process",
136
+ "BatchProcessor",
137
+ # Image
138
+ "generate_image",
139
+ "generate_images",
140
+ "edit_image",
141
+ "create_variation",
142
+ "GeneratedImage",
143
+ # Audio
144
+ "text_to_speech",
145
+ "transcribe",
146
+ "transcribe_with_timestamps",
147
+ "translate_audio",
148
+ "GeneratedAudio",
149
+ "TranscriptionResult",
150
+ # Video
151
+ "generate_video",
152
+ "animate_image_to_video",
153
+ "extend_video",
154
+ "GeneratedVideo",
155
+ "get_video_providers",
156
+ # Legacy
157
+ "chat_models",
158
+ "chat_funcs",
159
+ ]
aix/ai_models/base.py CHANGED
@@ -4,8 +4,9 @@ This module provides a unified interface for managing, discovering, and
4
4
  connecting to AI models across multiple providers and deployment methods.
5
5
  """
6
6
 
7
- from dataclasses import dataclass, field, asdict
8
- from typing import Any, Mapping, MutableMapping, Iterator, Callable, Iterable
7
+ from dataclasses import dataclass, field, asdict, fields
8
+ from typing import Any
9
+ from collections.abc import Mapping, MutableMapping, Iterator, Callable, Iterable
9
10
  from abc import ABC, abstractmethod
10
11
  import json
11
12
  from pathlib import Path
@@ -14,7 +15,7 @@ from pathlib import Path
14
15
  @dataclass
15
16
  class Model:
16
17
  """Represents an AI model with its metadata.
17
-
18
+
18
19
  >>> model = Model(
19
20
  ... id="gpt-4",
20
21
  ... provider="openai",
@@ -24,6 +25,7 @@ class Model:
24
25
  >>> model.id
25
26
  'gpt-4'
26
27
  """
28
+
27
29
  id: str
28
30
  provider: str
29
31
  context_size: int | None = None
@@ -33,14 +35,14 @@ class Model:
33
35
  tags: set[str] = field(default_factory=set)
34
36
  connector_metadata: dict[str, dict[str, Any]] = field(default_factory=dict)
35
37
  custom_metadata: dict[str, Any] = field(default_factory=dict)
36
-
38
+
37
39
  def to_dict(self) -> dict[str, Any]:
38
40
  """Convert model to dictionary representation."""
39
41
  return asdict(self)
40
-
42
+
41
43
  def matches_filter(self, **criteria) -> bool:
42
44
  """Check if model matches given criteria.
43
-
45
+
44
46
  >>> model = Model(id="gpt-4", provider="openai", is_local=False)
45
47
  >>> model.matches_filter(provider="openai")
46
48
  True
@@ -54,10 +56,25 @@ class Model:
54
56
  return False
55
57
  return True
56
58
 
59
+ def __getitem__(self, key: str) -> Any:
60
+ """Get field value by name."""
61
+ try:
62
+ return getattr(self, key)
63
+ except AttributeError:
64
+ raise KeyError(key)
65
+
66
+ def __iter__(self) -> Iterator[str]:
67
+ """Iterate over field names."""
68
+ return iter(field.name for field in fields(self))
69
+
70
+ def __len__(self) -> int:
71
+ """Return number of fields."""
72
+ return len(fields(self))
73
+
57
74
 
58
75
  class ModelRegistry(MutableMapping[str, Model]):
59
76
  """Registry for managing AI models using Mapping interface.
60
-
77
+
61
78
  >>> registry = ModelRegistry()
62
79
  >>> registry["gpt-4"] = Model(id="gpt-4", provider="openai")
63
80
  >>> "gpt-4" in registry
@@ -65,23 +82,25 @@ class ModelRegistry(MutableMapping[str, Model]):
65
82
  >>> len(registry)
66
83
  1
67
84
  """
68
-
85
+
69
86
  def __init__(self, *, storage_path: Path | None = None):
70
87
  """Initialize registry with optional persistent storage."""
71
88
  self._models: dict[str, Model] = {}
72
89
  self._storage_path = storage_path
73
90
  if storage_path and storage_path.exists():
74
91
  self._load()
75
-
92
+
76
93
  def __setitem__(self, model_id: str, model: Model) -> None:
77
94
  """Add or update a model in the registry."""
78
95
  self._models[model_id] = model
79
96
  if self._storage_path:
80
97
  self._save()
81
-
82
- def __getitem__(self, key: str | list[str] | Callable[[Model], bool]) -> Model | list[Model]:
98
+
99
+ def __getitem__(
100
+ self, key: str | list[str] | Callable[[Model], bool]
101
+ ) -> Model | list[Model]:
83
102
  """Get model(s) by ID, list of IDs, or filter function.
84
-
103
+
85
104
  Supports:
86
105
  - Single ID: registry["gpt-4"]
87
106
  - Multiple IDs: registry[["gpt-4", "claude-3"]]
@@ -95,21 +114,21 @@ class ModelRegistry(MutableMapping[str, Model]):
95
114
  return [m for m in self._models.values() if key(m)]
96
115
  else:
97
116
  raise TypeError(f"Unsupported key type: {type(key)}")
98
-
117
+
99
118
  def __delitem__(self, model_id: str) -> None:
100
119
  """Remove a model from the registry."""
101
120
  del self._models[model_id]
102
121
  if self._storage_path:
103
122
  self._save()
104
-
123
+
105
124
  def __iter__(self) -> Iterator[str]:
106
125
  """Iterate over model IDs."""
107
126
  yield from self._models.keys()
108
-
127
+
109
128
  def __len__(self) -> int:
110
129
  """Return number of models in registry."""
111
130
  return len(self._models)
112
-
131
+
113
132
  def filter(
114
133
  self,
115
134
  *,
@@ -119,10 +138,10 @@ class ModelRegistry(MutableMapping[str, Model]):
119
138
  max_context_size: int | None = None,
120
139
  has_capabilities: Iterable[str] | None = None,
121
140
  tags: Iterable[str] | None = None,
122
- custom_filter: Callable[[Model], bool] | None = None
141
+ custom_filter: Callable[[Model], bool] | None = None,
123
142
  ) -> list[Model]:
124
143
  """Filter models by multiple criteria.
125
-
144
+
126
145
  >>> registry = ModelRegistry()
127
146
  >>> registry["gpt-4"] = Model(id="gpt-4", provider="openai", context_size=8192)
128
147
  >>> registry["llama2"] = Model(id="llama2", provider="ollama", is_local=True)
@@ -130,14 +149,19 @@ class ModelRegistry(MutableMapping[str, Model]):
130
149
  >>> len(local_models)
131
150
  1
132
151
  """
152
+
133
153
  def _matches(model: Model) -> bool:
134
154
  if provider and model.provider != provider:
135
155
  return False
136
156
  if is_local is not None and model.is_local != is_local:
137
157
  return False
138
- if min_context_size and (not model.context_size or model.context_size < min_context_size):
158
+ if min_context_size and (
159
+ not model.context_size or model.context_size < min_context_size
160
+ ):
139
161
  return False
140
- if max_context_size and (not model.context_size or model.context_size > max_context_size):
162
+ if max_context_size and (
163
+ not model.context_size or model.context_size > max_context_size
164
+ ):
141
165
  return False
142
166
  if has_capabilities:
143
167
  for cap in has_capabilities:
@@ -148,46 +172,46 @@ class ModelRegistry(MutableMapping[str, Model]):
148
172
  if custom_filter and not custom_filter(model):
149
173
  return False
150
174
  return True
151
-
175
+
152
176
  return [m for m in self._models.values() if _matches(m)]
153
-
177
+
154
178
  def _load(self) -> None:
155
179
  """Load models from persistent storage."""
156
180
  if not self._storage_path:
157
181
  return
158
-
159
- with open(self._storage_path, 'r') as f:
182
+
183
+ with open(self._storage_path) as f:
160
184
  data = json.load(f)
161
- for model_data in data['models']:
185
+ for model_data in data["models"]:
162
186
  # Reconstruct set for tags
163
- model_data['tags'] = set(model_data.get('tags', []))
187
+ model_data["tags"] = set(model_data.get("tags", []))
164
188
  model = Model(**model_data)
165
189
  self._models[model.id] = model
166
-
190
+
167
191
  def _save(self) -> None:
168
192
  """Save models to persistent storage."""
169
193
  if not self._storage_path:
170
194
  return
171
-
195
+
172
196
  models_data = []
173
197
  for model in self._models.values():
174
198
  model_dict = model.to_dict()
175
199
  # Convert set to list for JSON serialization
176
- model_dict['tags'] = list(model_dict['tags'])
200
+ model_dict["tags"] = list(model_dict["tags"])
177
201
  models_data.append(model_dict)
178
-
202
+
179
203
  self._storage_path.parent.mkdir(parents=True, exist_ok=True)
180
- with open(self._storage_path, 'w') as f:
181
- json.dump({'models': models_data}, f, indent=2)
204
+ with open(self._storage_path, "w") as f:
205
+ json.dump({"models": models_data}, f, indent=2)
182
206
 
183
207
 
184
208
  class ModelSource(ABC):
185
209
  """Abstract base for model discovery sources."""
186
-
210
+
187
211
  @abstractmethod
188
212
  def discover_models(self) -> Iterable[Model]:
189
213
  """Discover available models from this source.
190
-
214
+
191
215
  Yields Model instances for each discovered model.
192
216
  """
193
217
  pass
@@ -195,15 +219,15 @@ class ModelSource(ABC):
195
219
 
196
220
  class Connector(ABC):
197
221
  """Abstract base for model connectors/clients."""
198
-
222
+
199
223
  @abstractmethod
200
224
  def format_metadata(self, model: Model) -> dict[str, Any]:
201
225
  """Format model metadata for this connector.
202
-
226
+
203
227
  Returns a dict that can be used to instantiate/connect via this connector.
204
228
  """
205
229
  pass
206
-
230
+
207
231
  @property
208
232
  @abstractmethod
209
233
  def name(self) -> str:
@@ -213,7 +237,7 @@ class Connector(ABC):
213
237
 
214
238
  class ConnectorRegistry(MutableMapping[str, Connector]):
215
239
  """Registry for managing model connectors.
216
-
240
+
217
241
  >>> registry = ConnectorRegistry()
218
242
  >>> class MyConnector(Connector):
219
243
  ... @property
@@ -226,21 +250,21 @@ class ConnectorRegistry(MutableMapping[str, Connector]):
226
250
  >>> "my_connector" in registry
227
251
  True
228
252
  """
229
-
253
+
230
254
  def __init__(self):
231
255
  self._connectors: dict[str, Connector] = {}
232
-
256
+
233
257
  def __setitem__(self, name: str, connector: Connector) -> None:
234
258
  self._connectors[name] = connector
235
-
259
+
236
260
  def __getitem__(self, name: str) -> Connector:
237
261
  return self._connectors[name]
238
-
262
+
239
263
  def __delitem__(self, name: str) -> None:
240
264
  del self._connectors[name]
241
-
265
+
242
266
  def __iter__(self) -> Iterator[str]:
243
267
  yield from self._connectors.keys()
244
-
268
+
245
269
  def __len__(self) -> int:
246
270
  return len(self._connectors)
aix/ai_models/examples.py CHANGED
@@ -4,7 +4,8 @@ This module demonstrates common patterns and use cases.
4
4
  """
5
5
 
6
6
  from pathlib import Path
7
- from typing import Any, Iterable, Callable
7
+ from typing import Any
8
+ from collections.abc import Iterable, Callable
8
9
  from aix.ai_models.base import ModelSource, Model
9
10
  from aix.ai_models.manager import ModelManager
10
11
 
@@ -103,7 +104,7 @@ def example_advanced_filtering():
103
104
  """Check if model is suitable for a specific task."""
104
105
  has_function_calling = model.capabilities.get("function_calling", False)
105
106
  has_sufficient_context = (model.context_size or 0) >= 32000
106
- is_affordable = model.cost_per_token.get("input", float('inf')) < 0.01
107
+ is_affordable = model.cost_per_token.get("input", float("inf")) < 0.01
107
108
  return has_function_calling and has_sufficient_context and is_affordable
108
109
 
109
110
  suitable_models = manager.list_models(custom_filter=_is_suitable_for_task)
aix/ai_models/manager.py CHANGED
@@ -4,7 +4,8 @@ Provides a high-level API for managing AI models across providers.
4
4
  """
5
5
 
6
6
  from pathlib import Path
7
- from typing import Any, Iterable, Callable
7
+ from typing import Any
8
+ from collections.abc import Iterable, Callable
8
9
 
9
10
  from aix.ai_models.base import Model, ModelRegistry, ConnectorRegistry, ModelSource
10
11
  from aix.ai_models.sources import (
aix/ai_models/sources.py CHANGED
@@ -5,7 +5,8 @@ and connectors for formatting metadata for different clients.
5
5
  """
6
6
 
7
7
  import requests
8
- from typing import Any, Iterable
8
+ from typing import Any
9
+ from collections.abc import Iterable
9
10
  from dataclasses import dataclass
10
11
 
11
12
  from aix.ai_models.base import Model, ModelSource, Connector