euriai 0.3.7__tar.gz → 0.3.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {euriai-0.3.7 → euriai-0.3.9}/PKG-INFO +4 -2
- euriai-0.3.9/euriai/llamaindex/__init__.py +0 -0
- euriai-0.3.9/euriai/llamaindex/euri_chat.py +31 -0
- euriai-0.3.9/euriai/llamaindex/euri_embed.py +28 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai.egg-info/PKG-INFO +4 -2
- {euriai-0.3.7 → euriai-0.3.9}/euriai.egg-info/SOURCES.txt +4 -1
- euriai-0.3.9/euriai.egg-info/requires.txt +4 -0
- {euriai-0.3.7 → euriai-0.3.9}/setup.py +9 -6
- euriai-0.3.7/euriai.egg-info/requires.txt +0 -2
- {euriai-0.3.7 → euriai-0.3.9}/README.md +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai/__init__.py +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai/cli.py +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai/client.py +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai/embedding.py +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai/langchain_embed.py +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai/langchain_llm.py +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai.egg-info/dependency_links.txt +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai.egg-info/entry_points.txt +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/euriai.egg-info/top_level.txt +0 -0
- {euriai-0.3.7 → euriai-0.3.9}/setup.cfg +0 -0
@@ -1,7 +1,7 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: euriai
|
3
|
-
Version: 0.3.
|
4
|
-
Summary: Python client for EURI LLM API (euron.one) with CLI and
|
3
|
+
Version: 0.3.9
|
4
|
+
Summary: Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration
|
5
5
|
Author: euron.one
|
6
6
|
Author-email: sudhanshu@euron.one
|
7
7
|
License: MIT
|
@@ -13,6 +13,8 @@ Requires-Python: >=3.6
|
|
13
13
|
Description-Content-Type: text/markdown
|
14
14
|
Requires-Dist: requests
|
15
15
|
Requires-Dist: langchain-core
|
16
|
+
Requires-Dist: llama-index<0.11.0,>=0.10.0
|
17
|
+
Requires-Dist: numpy
|
16
18
|
Dynamic: author
|
17
19
|
Dynamic: author-email
|
18
20
|
Dynamic: classifier
|
File without changes
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import requests
|
2
|
+
from llama_index.llms.base import LLM, ChatMessage, CompletionResponse
|
3
|
+
from typing import List
|
4
|
+
|
5
|
+
class EuriaiLlamaIndexLLM(LLM):
|
6
|
+
def __init__(self, api_key: str, model: str = "gpt-4.1-nano", temperature: float = 0.7, max_tokens: int = 1000):
|
7
|
+
self.api_key = api_key
|
8
|
+
self.model = model
|
9
|
+
self.temperature = temperature
|
10
|
+
self.max_tokens = max_tokens
|
11
|
+
self.url = "https://api.euron.one/api/v1/chat/completions"
|
12
|
+
|
13
|
+
def complete(self, prompt: str, **kwargs) -> CompletionResponse:
|
14
|
+
return self.chat([ChatMessage(role="user", content=prompt)])
|
15
|
+
|
16
|
+
def chat(self, messages: List[ChatMessage], **kwargs) -> CompletionResponse:
|
17
|
+
payload = {
|
18
|
+
"messages": [{"role": m.role, "content": m.content} for m in messages],
|
19
|
+
"model": self.model,
|
20
|
+
"temperature": self.temperature,
|
21
|
+
"max_tokens": self.max_tokens
|
22
|
+
}
|
23
|
+
headers = {
|
24
|
+
"Content-Type": "application/json",
|
25
|
+
"Authorization": f"Bearer {self.api_key}"
|
26
|
+
}
|
27
|
+
response = requests.post(self.url, headers=headers, json=payload)
|
28
|
+
response.raise_for_status()
|
29
|
+
result = response.json()
|
30
|
+
content = result["choices"][0]["message"]["content"]
|
31
|
+
return CompletionResponse(text=content)
|
@@ -0,0 +1,28 @@
|
|
1
|
+
import requests
|
2
|
+
import numpy as np
|
3
|
+
from llama_index.embeddings.base import BaseEmbedding
|
4
|
+
|
5
|
+
class EuriaiLlamaIndexEmbedding(BaseEmbedding):
|
6
|
+
def __init__(self, api_key: str, model: str = "text-embedding-3-small"):
|
7
|
+
self.api_key = api_key
|
8
|
+
self.model = model
|
9
|
+
self.url = "https://api.euron.one/api/v1/euri/alpha/embeddings"
|
10
|
+
|
11
|
+
def _post_embedding(self, texts):
|
12
|
+
headers = {
|
13
|
+
"Content-Type": "application/json",
|
14
|
+
"Authorization": f"Bearer {self.api_key}"
|
15
|
+
}
|
16
|
+
payload = {
|
17
|
+
"input": texts,
|
18
|
+
"model": self.model
|
19
|
+
}
|
20
|
+
response = requests.post(self.url, headers=headers, json=payload)
|
21
|
+
response.raise_for_status()
|
22
|
+
return [np.array(obj["embedding"]).tolist() for obj in response.json()["data"]]
|
23
|
+
|
24
|
+
def get_text_embedding(self, text: str) -> list[float]:
|
25
|
+
return self._post_embedding([text])[0]
|
26
|
+
|
27
|
+
def get_text_embeddings(self, texts: list[str]) -> list[list[float]]:
|
28
|
+
return self._post_embedding(texts)
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: euriai
|
3
|
-
Version: 0.3.
|
4
|
-
Summary: Python client for EURI LLM API (euron.one) with CLI and
|
3
|
+
Version: 0.3.9
|
4
|
+
Summary: Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration
|
5
5
|
Author: euron.one
|
6
6
|
Author-email: sudhanshu@euron.one
|
7
7
|
License: MIT
|
@@ -13,6 +13,8 @@ Requires-Python: >=3.6
|
|
13
13
|
Description-Content-Type: text/markdown
|
14
14
|
Requires-Dist: requests
|
15
15
|
Requires-Dist: langchain-core
|
16
|
+
Requires-Dist: llama-index<0.11.0,>=0.10.0
|
17
|
+
Requires-Dist: numpy
|
16
18
|
Dynamic: author
|
17
19
|
Dynamic: author-email
|
18
20
|
Dynamic: classifier
|
@@ -11,4 +11,7 @@ euriai.egg-info/SOURCES.txt
|
|
11
11
|
euriai.egg-info/dependency_links.txt
|
12
12
|
euriai.egg-info/entry_points.txt
|
13
13
|
euriai.egg-info/requires.txt
|
14
|
-
euriai.egg-info/top_level.txt
|
14
|
+
euriai.egg-info/top_level.txt
|
15
|
+
euriai/llamaindex/__init__.py
|
16
|
+
euriai/llamaindex/euri_chat.py
|
17
|
+
euriai/llamaindex/euri_embed.py
|
@@ -2,17 +2,20 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="euriai",
|
5
|
-
version="0.3.
|
6
|
-
description="Python client for EURI LLM API (euron.one) with CLI and
|
5
|
+
version="0.3.9",
|
6
|
+
description="Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration",
|
7
7
|
long_description=open("README.md", encoding="utf-8").read(),
|
8
8
|
long_description_content_type="text/markdown",
|
9
9
|
author="euron.one",
|
10
10
|
author_email="sudhanshu@euron.one",
|
11
11
|
packages=find_packages(),
|
12
12
|
install_requires=[
|
13
|
-
|
14
|
-
|
15
|
-
|
13
|
+
"requests",
|
14
|
+
"langchain-core",
|
15
|
+
"llama-index>=0.10.0,<0.11.0",
|
16
|
+
"numpy"
|
17
|
+
]
|
18
|
+
,
|
16
19
|
python_requires=">=3.6",
|
17
20
|
entry_points={
|
18
21
|
"console_scripts": [
|
@@ -26,4 +29,4 @@ setup(
|
|
26
29
|
"Intended Audience :: Developers",
|
27
30
|
],
|
28
31
|
license="MIT",
|
29
|
-
)
|
32
|
+
)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|