euriai 0.3.27__tar.gz → 0.3.28__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {euriai-0.3.27 → euriai-0.3.28}/PKG-INFO +1 -1
- {euriai-0.3.27 → euriai-0.3.28}/euriai/euri_chat.py +18 -10
- {euriai-0.3.27 → euriai-0.3.28}/euriai/euri_embed.py +16 -9
- {euriai-0.3.27 → euriai-0.3.28}/euriai.egg-info/PKG-INFO +1 -1
- {euriai-0.3.27 → euriai-0.3.28}/setup.py +1 -1
- {euriai-0.3.27 → euriai-0.3.28}/README.md +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai/__init__.py +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai/cli.py +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai/client.py +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai/embedding.py +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai/langchain_embed.py +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai/langchain_llm.py +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai.egg-info/SOURCES.txt +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai.egg-info/dependency_links.txt +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai.egg-info/entry_points.txt +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai.egg-info/requires.txt +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/euriai.egg-info/top_level.txt +0 -0
- {euriai-0.3.27 → euriai-0.3.28}/setup.cfg +0 -0
@@ -1,10 +1,17 @@
|
|
1
1
|
import requests
|
2
|
-
from typing import List, Optional
|
2
|
+
from typing import List, Optional, Any, Dict
|
3
3
|
from llama_index.core.llms import LLM
|
4
4
|
from llama_index.core.base.llms.types import ChatMessage, CompletionResponse, CompletionResponseGen
|
5
5
|
|
6
6
|
|
7
7
|
class EuriaiLlamaIndexLLM(LLM):
|
8
|
+
# Define class attributes as expected by Pydantic
|
9
|
+
api_key: str
|
10
|
+
model: str = "gpt-4.1-nano"
|
11
|
+
temperature: float = 0.7
|
12
|
+
max_tokens: int = 1000
|
13
|
+
url: str = "https://api.euron.one/api/v1/euri/alpha/chat/completions"
|
14
|
+
|
8
15
|
def __init__(
|
9
16
|
self,
|
10
17
|
api_key: str,
|
@@ -21,18 +28,19 @@ class EuriaiLlamaIndexLLM(LLM):
|
|
21
28
|
temperature (float, optional): Sampling temperature. Defaults to 0.7.
|
22
29
|
max_tokens (int, optional): Maximum number of tokens. Defaults to 1000.
|
23
30
|
"""
|
24
|
-
#
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
31
|
+
# Create a dictionary of parameters for the parent class
|
32
|
+
model_params = {
|
33
|
+
"api_key": api_key,
|
34
|
+
"model": model if model is not None else self.model,
|
35
|
+
"temperature": temperature if temperature is not None else self.temperature,
|
36
|
+
"max_tokens": max_tokens if max_tokens is not None else self.max_tokens,
|
37
|
+
}
|
30
38
|
|
31
|
-
#
|
32
|
-
super().__init__()
|
39
|
+
# Initialize the parent class with the parameters
|
40
|
+
super().__init__(**model_params)
|
33
41
|
|
34
42
|
@property
|
35
|
-
def metadata(self):
|
43
|
+
def metadata(self) -> Dict[str, Any]:
|
36
44
|
return {
|
37
45
|
"context_window": 8000,
|
38
46
|
"num_output": self.max_tokens,
|
@@ -1,17 +1,24 @@
|
|
1
1
|
import requests
|
2
2
|
import numpy as np
|
3
|
+
from typing import List, Optional
|
3
4
|
from llama_index.core.embeddings import BaseEmbedding
|
4
5
|
|
5
6
|
class EuriaiLlamaIndexEmbedding(BaseEmbedding):
|
6
|
-
|
7
|
+
# Define class attributes as expected by Pydantic
|
8
|
+
api_key: str
|
9
|
+
model: str = "text-embedding-3-small"
|
10
|
+
url: str = "https://api.euron.one/api/v1/euri/alpha/embeddings"
|
11
|
+
|
12
|
+
def __init__(self, api_key: str, model: Optional[str] = None):
|
7
13
|
"""Initialize embedding model with API key and model name."""
|
8
|
-
#
|
9
|
-
|
10
|
-
|
11
|
-
|
14
|
+
# Create parameters for the parent class
|
15
|
+
embed_params = {
|
16
|
+
"api_key": api_key,
|
17
|
+
"model": model if model is not None else self.model,
|
18
|
+
}
|
12
19
|
|
13
|
-
#
|
14
|
-
super().__init__()
|
20
|
+
# Initialize the parent class
|
21
|
+
super().__init__(**embed_params)
|
15
22
|
|
16
23
|
def _post_embedding(self, texts):
|
17
24
|
"""Helper method to post data to API and get embeddings."""
|
@@ -27,10 +34,10 @@ class EuriaiLlamaIndexEmbedding(BaseEmbedding):
|
|
27
34
|
response.raise_for_status()
|
28
35
|
return [np.array(obj["embedding"]).tolist() for obj in response.json()["data"]]
|
29
36
|
|
30
|
-
def get_text_embedding(self, text: str) ->
|
37
|
+
def get_text_embedding(self, text: str) -> List[float]:
|
31
38
|
"""Get embedding for a single text."""
|
32
39
|
return self._post_embedding([text])[0]
|
33
40
|
|
34
|
-
def get_text_embeddings(self, texts:
|
41
|
+
def get_text_embeddings(self, texts: List[str]) -> List[List[float]]:
|
35
42
|
"""Get embeddings for multiple texts."""
|
36
43
|
return self._post_embedding(texts)
|
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="euriai",
|
5
|
-
version="0.3.
|
5
|
+
version="0.3.28",
|
6
6
|
description="Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration",
|
7
7
|
long_description=open("README.md", encoding="utf-8").read(),
|
8
8
|
long_description_content_type="text/markdown",
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|