indoxrouter 0.1.2__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. indoxrouter-0.1.3/MANIFEST.in +4 -0
  2. indoxrouter-0.1.3/PKG-INFO +188 -0
  3. indoxrouter-0.1.3/README.md +151 -0
  4. indoxrouter-0.1.3/cookbook/README.md +44 -0
  5. indoxrouter-0.1.3/cookbook/chat_completion.md +117 -0
  6. indoxrouter-0.1.3/cookbook/embeddings.md +123 -0
  7. indoxrouter-0.1.3/cookbook/image_generation.md +117 -0
  8. indoxrouter-0.1.3/cookbook/indoxRouter_cookbook.ipynb +1244 -0
  9. indoxrouter-0.1.3/cookbook/indoxrouter_cookbook.md +753 -0
  10. indoxrouter-0.1.3/examples/chat_example.py +85 -0
  11. indoxrouter-0.1.3/examples/embedding_example.py +68 -0
  12. indoxrouter-0.1.3/examples/image_example.py +76 -0
  13. indoxrouter-0.1.3/indoxrouter.egg-info/PKG-INFO +188 -0
  14. indoxrouter-0.1.3/indoxrouter.egg-info/SOURCES.txt +18 -0
  15. indoxrouter-0.1.3/indoxrouter.egg-info/requires.txt +10 -0
  16. indoxrouter-0.1.3/indoxrouter.egg-info/top_level.txt +1 -0
  17. {indoxrouter-0.1.2 → indoxrouter-0.1.3}/setup.py +5 -23
  18. indoxrouter-0.1.3/tests/test_client.py +313 -0
  19. indoxrouter-0.1.2/LICENSE +0 -21
  20. indoxrouter-0.1.2/PKG-INFO +0 -259
  21. indoxrouter-0.1.2/README.md +0 -209
  22. indoxrouter-0.1.2/indoxRouter/__init__.py +0 -83
  23. indoxrouter-0.1.2/indoxRouter/client.py +0 -632
  24. indoxrouter-0.1.2/indoxRouter/client_resourses/__init__.py +0 -20
  25. indoxrouter-0.1.2/indoxRouter/client_resourses/base.py +0 -67
  26. indoxrouter-0.1.2/indoxRouter/client_resourses/chat.py +0 -144
  27. indoxrouter-0.1.2/indoxRouter/client_resourses/completion.py +0 -138
  28. indoxrouter-0.1.2/indoxRouter/client_resourses/embedding.py +0 -83
  29. indoxrouter-0.1.2/indoxRouter/client_resourses/image.py +0 -116
  30. indoxrouter-0.1.2/indoxRouter/client_resourses/models.py +0 -114
  31. indoxrouter-0.1.2/indoxRouter/config.py +0 -151
  32. indoxrouter-0.1.2/indoxRouter/constants/__init__.py +0 -81
  33. indoxrouter-0.1.2/indoxRouter/exceptions/__init__.py +0 -70
  34. indoxrouter-0.1.2/indoxRouter/models/__init__.py +0 -111
  35. indoxrouter-0.1.2/indoxRouter/providers/__init__.py +0 -108
  36. indoxrouter-0.1.2/indoxRouter/providers/ai21labs.json +0 -128
  37. indoxrouter-0.1.2/indoxRouter/providers/base_provider.py +0 -101
  38. indoxrouter-0.1.2/indoxRouter/providers/claude.json +0 -164
  39. indoxrouter-0.1.2/indoxRouter/providers/cohere.json +0 -116
  40. indoxrouter-0.1.2/indoxRouter/providers/databricks.json +0 -110
  41. indoxrouter-0.1.2/indoxRouter/providers/deepseek.json +0 -110
  42. indoxrouter-0.1.2/indoxRouter/providers/google.json +0 -128
  43. indoxrouter-0.1.2/indoxRouter/providers/meta.json +0 -128
  44. indoxrouter-0.1.2/indoxRouter/providers/mistral.json +0 -146
  45. indoxrouter-0.1.2/indoxRouter/providers/nvidia.json +0 -110
  46. indoxrouter-0.1.2/indoxRouter/providers/openai.json +0 -308
  47. indoxrouter-0.1.2/indoxRouter/providers/openai.py +0 -521
  48. indoxrouter-0.1.2/indoxRouter/providers/qwen.json +0 -110
  49. indoxrouter-0.1.2/indoxRouter/utils/__init__.py +0 -240
  50. indoxrouter-0.1.2/indoxRouter.egg-info/PKG-INFO +0 -259
  51. indoxrouter-0.1.2/indoxRouter.egg-info/SOURCES.txt +0 -42
  52. indoxrouter-0.1.2/indoxRouter.egg-info/requires.txt +0 -24
  53. indoxrouter-0.1.2/indoxRouter.egg-info/top_level.txt +0 -1
  54. indoxrouter-0.1.2/tests/test_client.py +0 -136
  55. {indoxrouter-0.1.2/indoxRouter.egg-info → indoxrouter-0.1.3/indoxrouter.egg-info}/dependency_links.txt +0 -0
  56. {indoxrouter-0.1.2 → indoxrouter-0.1.3}/setup.cfg +0 -0
@@ -0,0 +1,4 @@
1
+ include README.md
2
+ include LICENSE
3
+ recursive-include cookbook *.md *.ipynb
4
+ recursive-include examples *.py
@@ -0,0 +1,188 @@
1
+ Metadata-Version: 2.2
2
+ Name: indoxrouter
3
+ Version: 0.1.3
4
+ Summary: A unified client for various AI providers
5
+ Home-page: https://github.com/indoxrouter/indoxrouter
6
+ Author: indoxRouter Team
7
+ Author-email: ashkan.eskandari.dev@gmail.com
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.8
13
+ Classifier: Programming Language :: Python :: 3.9
14
+ Classifier: Programming Language :: Python :: 3.10
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Requires-Python: >=3.8
17
+ Description-Content-Type: text/markdown
18
+ Requires-Dist: requests>=2.25.0
19
+ Requires-Dist: python-dotenv>=1.0.0
20
+ Provides-Extra: dev
21
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
22
+ Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
23
+ Requires-Dist: black>=23.0.0; extra == "dev"
24
+ Requires-Dist: isort>=5.0.0; extra == "dev"
25
+ Requires-Dist: flake8>=6.0.0; extra == "dev"
26
+ Requires-Dist: mypy>=1.0.0; extra == "dev"
27
+ Dynamic: author
28
+ Dynamic: author-email
29
+ Dynamic: classifier
30
+ Dynamic: description
31
+ Dynamic: description-content-type
32
+ Dynamic: home-page
33
+ Dynamic: provides-extra
34
+ Dynamic: requires-dist
35
+ Dynamic: requires-python
36
+ Dynamic: summary
37
+
38
+ # IndoxRouter Client
39
+
40
+ A unified client for various AI providers, including OpenAI, Anthropic, Cohere, Google, and Mistral.
41
+
42
+ ## Features
43
+
44
+ - **Unified API**: Access multiple AI providers through a single API
45
+ - **Simple Interface**: Easy-to-use methods for chat, completion, embeddings, and image generation
46
+ - **Error Handling**: Standardized error handling across providers
47
+ - **Authentication**: Automatic token management
48
+
49
+ ## Installation
50
+
51
+ ```bash
52
+ pip install indoxrouter
53
+ ```
54
+
55
+ ## Usage
56
+
57
+ ### Initialization
58
+
59
+ ```python
60
+ from indoxrouter import Client
61
+
62
+ # Initialize with API key
63
+ client = Client(api_key="your_api_key", base_url="http://your-server-url:8000")
64
+
65
+ # Or initialize with username and password
66
+ client = Client(
67
+ username="your_username",
68
+ password="your_password",
69
+ base_url="http://your-server-url:8000"
70
+ )
71
+
72
+ # Using environment variables
73
+ # Set INDOXROUTER_API_KEY or INDOXROUTER_USERNAME and INDOXROUTER_PASSWORD
74
+ client = Client(base_url="http://your-server-url:8000")
75
+ ```
76
+
77
+ ### Chat Completions
78
+
79
+ ```python
80
+ response = client.chat(
81
+ messages=[
82
+ {"role": "system", "content": "You are a helpful assistant."},
83
+ {"role": "user", "content": "Tell me a joke."}
84
+ ],
85
+ provider="openai",
86
+ model="gpt-3.5-turbo",
87
+ temperature=0.7
88
+ )
89
+
90
+ print(response["choices"][0]["message"]["content"])
91
+ ```
92
+
93
+ ### Text Completions
94
+
95
+ ```python
96
+ response = client.completion(
97
+ prompt="Once upon a time,",
98
+ provider="openai",
99
+ model="gpt-3.5-turbo-instruct",
100
+ max_tokens=100
101
+ )
102
+
103
+ print(response["choices"][0]["text"])
104
+ ```
105
+
106
+ ### Embeddings
107
+
108
+ ```python
109
+ response = client.embeddings(
110
+ text=["Hello world", "AI is amazing"],
111
+ provider="openai",
112
+ model="text-embedding-ada-002"
113
+ )
114
+
115
+ print(f"Dimensions: {response['dimensions']}")
116
+ print(f"First embedding: {response['embeddings'][0][:5]}...")
117
+ ```
118
+
119
+ ### Image Generation
120
+
121
+ ```python
122
+ response = client.images(
123
+ prompt="A serene landscape with mountains and a lake",
124
+ provider="openai",
125
+ model="dall-e-3",
126
+ size="1024x1024"
127
+ )
128
+
129
+ print(f"Image URL: {response['images'][0]['url']}")
130
+ ```
131
+
132
+ ### Streaming Responses
133
+
134
+ ```python
135
+ for chunk in client.chat(
136
+ messages=[{"role": "user", "content": "Write a short story."}],
137
+ stream=True
138
+ ):
139
+ if "choices" in chunk and len(chunk["choices"]) > 0:
140
+ content = chunk["choices"][0].get("delta", {}).get("content", "")
141
+ print(content, end="", flush=True)
142
+ ```
143
+
144
+ ### Getting Available Models
145
+
146
+ ```python
147
+ # Get all providers and models
148
+ providers = client.models()
149
+ for provider in providers:
150
+ print(f"Provider: {provider['name']}")
151
+ for model in provider["models"]:
152
+ print(f" - {model['id']}: {model['name']}")
153
+
154
+ # Get models for a specific provider
155
+ openai_provider = client.models("openai")
156
+ print(f"OpenAI models: {[m['id'] for m in openai_provider['models']]}")
157
+ ```
158
+
159
+ ## Error Handling
160
+
161
+ ```python
162
+ from indoxrouter import Client, ModelNotFoundError, ProviderError
163
+
164
+ try:
165
+ client = Client(api_key="your_api_key", base_url="http://your-server-url:8000")
166
+ response = client.chat(
167
+ messages=[{"role": "user", "content": "Hello"}],
168
+ provider="nonexistent",
169
+ model="nonexistent-model"
170
+ )
171
+ except ModelNotFoundError as e:
172
+ print(f"Model not found: {e}")
173
+ except ProviderError as e:
174
+ print(f"Provider error: {e}")
175
+ ```
176
+
177
+ ## Context Manager
178
+
179
+ ```python
180
+ with Client(api_key="your_api_key", base_url="http://your-server-url:8000") as client:
181
+ response = client.chat([{"role": "user", "content": "Hello!"}])
182
+ print(response["choices"][0]["message"]["content"])
183
+ # Client is automatically closed when exiting the block
184
+ ```
185
+
186
+ ## License
187
+
188
+ This project is licensed under the MIT License - see the LICENSE file for details.
@@ -0,0 +1,151 @@
1
+ # IndoxRouter Client
2
+
3
+ A unified client for various AI providers, including OpenAI, Anthropic, Cohere, Google, and Mistral.
4
+
5
+ ## Features
6
+
7
+ - **Unified API**: Access multiple AI providers through a single API
8
+ - **Simple Interface**: Easy-to-use methods for chat, completion, embeddings, and image generation
9
+ - **Error Handling**: Standardized error handling across providers
10
+ - **Authentication**: Automatic token management
11
+
12
+ ## Installation
13
+
14
+ ```bash
15
+ pip install indoxrouter
16
+ ```
17
+
18
+ ## Usage
19
+
20
+ ### Initialization
21
+
22
+ ```python
23
+ from indoxrouter import Client
24
+
25
+ # Initialize with API key
26
+ client = Client(api_key="your_api_key", base_url="http://your-server-url:8000")
27
+
28
+ # Or initialize with username and password
29
+ client = Client(
30
+ username="your_username",
31
+ password="your_password",
32
+ base_url="http://your-server-url:8000"
33
+ )
34
+
35
+ # Using environment variables
36
+ # Set INDOXROUTER_API_KEY or INDOXROUTER_USERNAME and INDOXROUTER_PASSWORD
37
+ client = Client(base_url="http://your-server-url:8000")
38
+ ```
39
+
40
+ ### Chat Completions
41
+
42
+ ```python
43
+ response = client.chat(
44
+ messages=[
45
+ {"role": "system", "content": "You are a helpful assistant."},
46
+ {"role": "user", "content": "Tell me a joke."}
47
+ ],
48
+ provider="openai",
49
+ model="gpt-3.5-turbo",
50
+ temperature=0.7
51
+ )
52
+
53
+ print(response["choices"][0]["message"]["content"])
54
+ ```
55
+
56
+ ### Text Completions
57
+
58
+ ```python
59
+ response = client.completion(
60
+ prompt="Once upon a time,",
61
+ provider="openai",
62
+ model="gpt-3.5-turbo-instruct",
63
+ max_tokens=100
64
+ )
65
+
66
+ print(response["choices"][0]["text"])
67
+ ```
68
+
69
+ ### Embeddings
70
+
71
+ ```python
72
+ response = client.embeddings(
73
+ text=["Hello world", "AI is amazing"],
74
+ provider="openai",
75
+ model="text-embedding-ada-002"
76
+ )
77
+
78
+ print(f"Dimensions: {response['dimensions']}")
79
+ print(f"First embedding: {response['embeddings'][0][:5]}...")
80
+ ```
81
+
82
+ ### Image Generation
83
+
84
+ ```python
85
+ response = client.images(
86
+ prompt="A serene landscape with mountains and a lake",
87
+ provider="openai",
88
+ model="dall-e-3",
89
+ size="1024x1024"
90
+ )
91
+
92
+ print(f"Image URL: {response['images'][0]['url']}")
93
+ ```
94
+
95
+ ### Streaming Responses
96
+
97
+ ```python
98
+ for chunk in client.chat(
99
+ messages=[{"role": "user", "content": "Write a short story."}],
100
+ stream=True
101
+ ):
102
+ if "choices" in chunk and len(chunk["choices"]) > 0:
103
+ content = chunk["choices"][0].get("delta", {}).get("content", "")
104
+ print(content, end="", flush=True)
105
+ ```
106
+
107
+ ### Getting Available Models
108
+
109
+ ```python
110
+ # Get all providers and models
111
+ providers = client.models()
112
+ for provider in providers:
113
+ print(f"Provider: {provider['name']}")
114
+ for model in provider["models"]:
115
+ print(f" - {model['id']}: {model['name']}")
116
+
117
+ # Get models for a specific provider
118
+ openai_provider = client.models("openai")
119
+ print(f"OpenAI models: {[m['id'] for m in openai_provider['models']]}")
120
+ ```
121
+
122
+ ## Error Handling
123
+
124
+ ```python
125
+ from indoxrouter import Client, ModelNotFoundError, ProviderError
126
+
127
+ try:
128
+ client = Client(api_key="your_api_key", base_url="http://your-server-url:8000")
129
+ response = client.chat(
130
+ messages=[{"role": "user", "content": "Hello"}],
131
+ provider="nonexistent",
132
+ model="nonexistent-model"
133
+ )
134
+ except ModelNotFoundError as e:
135
+ print(f"Model not found: {e}")
136
+ except ProviderError as e:
137
+ print(f"Provider error: {e}")
138
+ ```
139
+
140
+ ## Context Manager
141
+
142
+ ```python
143
+ with Client(api_key="your_api_key", base_url="http://your-server-url:8000") as client:
144
+ response = client.chat([{"role": "user", "content": "Hello!"}])
145
+ print(response["choices"][0]["message"]["content"])
146
+ # Client is automatically closed when exiting the block
147
+ ```
148
+
149
+ ## License
150
+
151
+ This project is licensed under the MIT License - see the LICENSE file for details.
@@ -0,0 +1,44 @@
1
+ # IndoxRouter Cookbook
2
+
3
+ This directory contains examples and recipes for using the IndoxRouter client to interact with various AI providers.
4
+
5
+ ## Examples
6
+
7
+ - [Chat Completion](chat_completion.md): Examples of generating chat completions
8
+ - [Embeddings](embeddings.md): Examples of generating embeddings
9
+ - [Image Generation](image_generation.md): Examples of generating images
10
+
11
+ ## Jupyter Notebook
12
+
13
+ - [IndoxRouter Cookbook](indoxRouter_cookbook.ipynb): A comprehensive Jupyter notebook with examples of using the IndoxRouter client
14
+
15
+ ## Getting Started
16
+
17
+ To run these examples, you'll need to:
18
+
19
+ 1. Install the IndoxRouter client:
20
+
21
+ ```bash
22
+ pip install indoxrouter
23
+ ```
24
+
25
+ 2. Set your API key:
26
+
27
+ ```bash
28
+ # Set environment variable
29
+ export INDOXROUTER_API_KEY=your-api-key
30
+
31
+ # Or provide it directly in your code
32
+ from indoxrouter import Client
33
+ client = Client(api_key="your-api-key")
34
+ ```
35
+
36
+ 3. Run the examples:
37
+
38
+ ```bash
39
+ python -c "from indoxrouter import Client; client = Client(api_key='your_api_key'); print(client.chat([{'role': 'user', 'content': 'Hello!'}]))"
40
+ ```
41
+
42
+ ## Note on API Keys
43
+
44
+ The IndoxRouter API key is used to authenticate with the IndoxRouter server. You don't need to provide individual API keys for each provider (like OpenAI, Anthropic, etc.) as the IndoxRouter server handles that for you.
@@ -0,0 +1,117 @@
1
+ # Chat Completion Example
2
+
3
+ This example demonstrates how to use the IndoxRouter client to generate chat completions from various AI providers.
4
+
5
+ ## Basic Chat Completion
6
+
7
+ ```python
8
+ from indoxrouter import Client
9
+
10
+ # Initialize client with API key
11
+ client = Client(api_key="your_api_key")
12
+
13
+ # Generate a chat completion
14
+ response = client.chat(
15
+ messages=[
16
+ {"role": "system", "content": "You are a helpful assistant."},
17
+ {"role": "user", "content": "Tell me a joke."}
18
+ ],
19
+ provider="openai",
20
+ model="gpt-3.5-turbo",
21
+ temperature=0.7
22
+ )
23
+
24
+ # Print the response
25
+ print(response["choices"][0]["message"]["content"])
26
+ ```
27
+
28
+ ## Streaming Chat Completion
29
+
30
+ ```python
31
+ from indoxrouter import Client
32
+
33
+ # Initialize client with API key
34
+ client = Client(api_key="your_api_key")
35
+
36
+ # Generate a streaming chat completion
37
+ print("Response: ", end="", flush=True)
38
+ for chunk in client.chat(
39
+ messages=[
40
+ {"role": "system", "content": "You are a helpful assistant."},
41
+ {"role": "user", "content": "Write a short story about a robot learning to paint."}
42
+ ],
43
+ provider="openai",
44
+ model="gpt-3.5-turbo",
45
+ temperature=0.7,
46
+ stream=True
47
+ ):
48
+ if "choices" in chunk and len(chunk["choices"]) > 0:
49
+ content = chunk["choices"][0].get("delta", {}).get("content", "")
50
+ print(content, end="", flush=True)
51
+ print("\n")
52
+ ```
53
+
54
+ ## Using Different Providers
55
+
56
+ ### Anthropic (Claude)
57
+
58
+ ```python
59
+ response = client.chat(
60
+ messages=[
61
+ {"role": "system", "content": "You are a helpful assistant."},
62
+ {"role": "user", "content": "Tell me a joke."}
63
+ ],
64
+ provider="anthropic",
65
+ model="claude-3-haiku-20240307",
66
+ temperature=0.7
67
+ )
68
+ ```
69
+
70
+ ### Mistral
71
+
72
+ ```python
73
+ response = client.chat(
74
+ messages=[
75
+ {"role": "system", "content": "You are a helpful assistant."},
76
+ {"role": "user", "content": "Tell me a joke."}
77
+ ],
78
+ provider="mistral",
79
+ model="mistral-small",
80
+ temperature=0.7
81
+ )
82
+ ```
83
+
84
+ ### Google (Gemini)
85
+
86
+ ```python
87
+ response = client.chat(
88
+ messages=[
89
+ {"role": "system", "content": "You are a helpful assistant."},
90
+ {"role": "user", "content": "Tell me a joke."}
91
+ ],
92
+ provider="google",
93
+ model="gemini-pro",
94
+ temperature=0.7
95
+ )
96
+ ```
97
+
98
+ ## Error Handling
99
+
100
+ ```python
101
+ from indoxrouter import Client
102
+ from indoxrouter.exceptions import ModelNotFoundError, ProviderNotFoundError
103
+
104
+ try:
105
+ client = Client(api_key="your_api_key")
106
+ response = client.chat(
107
+ messages=[
108
+ {"role": "user", "content": "Hello"}
109
+ ],
110
+ provider="nonexistent",
111
+ model="nonexistent-model"
112
+ )
113
+ except ProviderNotFoundError as e:
114
+ print(f"Provider not found: {e}")
115
+ except ModelNotFoundError as e:
116
+ print(f"Model not found: {e}")
117
+ ```
@@ -0,0 +1,123 @@
1
+ # Embeddings Example
2
+
3
+ This example demonstrates how to use the IndoxRouter client to generate embeddings from various AI providers.
4
+
5
+ ## Basic Embeddings
6
+
7
+ ```python
8
+ from indoxrouter import Client
9
+
10
+ # Initialize client with API key
11
+ client = Client(api_key="your_api_key")
12
+
13
+ # Generate embeddings for a single text
14
+ response = client.embeddings(
15
+ text="This is a sample text to embed.",
16
+ provider="openai",
17
+ model="text-embedding-ada-002"
18
+ )
19
+
20
+ # Print the response
21
+ print(f"Dimensions: {response['dimensions']}")
22
+ print(f"Embedding (first 5 values): {response['embeddings'][0][:5]}")
23
+ ```
24
+
25
+ ## Batch Embeddings
26
+
27
+ ```python
28
+ import numpy as np
29
+ from indoxrouter import Client
30
+
31
+ # Initialize client with API key
32
+ client = Client(api_key="your_api_key")
33
+
34
+ # Define some texts to embed
35
+ texts = [
36
+ "The quick brown fox jumps over the lazy dog.",
37
+ "The five boxing wizards jump quickly.",
38
+ "How vexingly quick daft zebras jump!",
39
+ ]
40
+
41
+ # Generate embeddings for multiple texts
42
+ response = client.embeddings(
43
+ text=texts,
44
+ provider="openai",
45
+ model="text-embedding-ada-002"
46
+ )
47
+
48
+ # Print the response
49
+ print(f"Number of embeddings: {len(response['embeddings'])}")
50
+ print(f"Dimensions: {response['dimensions']}")
51
+
52
+ # Calculate cosine similarity between embeddings
53
+ embeddings = response["embeddings"]
54
+
55
+ def cosine_similarity(a, b):
56
+ return np.dot(a, b) / (np.linalg.norm(a) * np.linalg.norm(b))
57
+
58
+ for i in range(len(embeddings)):
59
+ for j in range(i + 1, len(embeddings)):
60
+ similarity = cosine_similarity(embeddings[i], embeddings[j])
61
+ print(f"Similarity between text {i+1} and text {j+1}: {similarity:.4f}")
62
+ ```
63
+
64
+ ## Using Different Providers
65
+
66
+ ### OpenAI
67
+
68
+ ```python
69
+ response = client.embeddings(
70
+ text="This is a sample text to embed.",
71
+ provider="openai",
72
+ model="text-embedding-ada-002"
73
+ )
74
+ ```
75
+
76
+ ### Cohere
77
+
78
+ ```python
79
+ response = client.embeddings(
80
+ text="This is a sample text to embed.",
81
+ provider="cohere",
82
+ model="embed-english-v3.0"
83
+ )
84
+ ```
85
+
86
+ ### Google
87
+
88
+ ```python
89
+ response = client.embeddings(
90
+ text="This is a sample text to embed.",
91
+ provider="google",
92
+ model="embedding-001"
93
+ )
94
+ ```
95
+
96
+ ### Mistral
97
+
98
+ ```python
99
+ response = client.embeddings(
100
+ text="This is a sample text to embed.",
101
+ provider="mistral",
102
+ model="mistral-embed"
103
+ )
104
+ ```
105
+
106
+ ## Error Handling
107
+
108
+ ```python
109
+ from indoxrouter import Client
110
+ from indoxrouter.exceptions import ModelNotFoundError, ProviderNotFoundError
111
+
112
+ try:
113
+ client = Client(api_key="your_api_key")
114
+ response = client.embeddings(
115
+ text="This is a sample text to embed.",
116
+ provider="nonexistent",
117
+ model="nonexistent-model"
118
+ )
119
+ except ProviderNotFoundError as e:
120
+ print(f"Provider not found: {e}")
121
+ except ModelNotFoundError as e:
122
+ print(f"Model not found: {e}")
123
+ ```