euriai 0.3.7__tar.gz → 0.3.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 0.3.7
4
- Summary: Python client for EURI LLM API (euron.one) with CLI and interactive wizard
3
+ Version: 0.3.8
4
+ Summary: Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: euron.one
6
6
  Author-email: sudhanshu@euron.one
7
7
  License: MIT
@@ -13,6 +13,8 @@ Requires-Python: >=3.6
13
13
  Description-Content-Type: text/markdown
14
14
  Requires-Dist: requests
15
15
  Requires-Dist: langchain-core
16
+ Requires-Dist: llama-index
17
+ Requires-Dist: numpy
16
18
  Dynamic: author
17
19
  Dynamic: author-email
18
20
  Dynamic: classifier
File without changes
@@ -0,0 +1,31 @@
1
+ import requests
2
+ from llama_index.llms.base import LLM, ChatMessage, CompletionResponse
3
+ from typing import List
4
+
5
+ class EuriaiLlamaIndexLLM(LLM):
6
+ def __init__(self, api_key: str, model: str = "gpt-4.1-nano", temperature: float = 0.7, max_tokens: int = 1000):
7
+ self.api_key = api_key
8
+ self.model = model
9
+ self.temperature = temperature
10
+ self.max_tokens = max_tokens
11
+ self.url = "https://api.euron.one/api/v1/chat/completions"
12
+
13
+ def complete(self, prompt: str, **kwargs) -> CompletionResponse:
14
+ return self.chat([ChatMessage(role="user", content=prompt)])
15
+
16
+ def chat(self, messages: List[ChatMessage], **kwargs) -> CompletionResponse:
17
+ payload = {
18
+ "messages": [{"role": m.role, "content": m.content} for m in messages],
19
+ "model": self.model,
20
+ "temperature": self.temperature,
21
+ "max_tokens": self.max_tokens
22
+ }
23
+ headers = {
24
+ "Content-Type": "application/json",
25
+ "Authorization": f"Bearer {self.api_key}"
26
+ }
27
+ response = requests.post(self.url, headers=headers, json=payload)
28
+ response.raise_for_status()
29
+ result = response.json()
30
+ content = result["choices"][0]["message"]["content"]
31
+ return CompletionResponse(text=content)
@@ -0,0 +1,28 @@
1
+ import requests
2
+ import numpy as np
3
+ from llama_index.embeddings.base import BaseEmbedding
4
+
5
+ class EuriaiLlamaIndexEmbedding(BaseEmbedding):
6
+ def __init__(self, api_key: str, model: str = "text-embedding-3-small"):
7
+ self.api_key = api_key
8
+ self.model = model
9
+ self.url = "https://api.euron.one/api/v1/euri/alpha/embeddings"
10
+
11
+ def _post_embedding(self, texts):
12
+ headers = {
13
+ "Content-Type": "application/json",
14
+ "Authorization": f"Bearer {self.api_key}"
15
+ }
16
+ payload = {
17
+ "input": texts,
18
+ "model": self.model
19
+ }
20
+ response = requests.post(self.url, headers=headers, json=payload)
21
+ response.raise_for_status()
22
+ return [np.array(obj["embedding"]).tolist() for obj in response.json()["data"]]
23
+
24
+ def get_text_embedding(self, text: str) -> list[float]:
25
+ return self._post_embedding([text])[0]
26
+
27
+ def get_text_embeddings(self, texts: list[str]) -> list[list[float]]:
28
+ return self._post_embedding(texts)
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 0.3.7
4
- Summary: Python client for EURI LLM API (euron.one) with CLI and interactive wizard
3
+ Version: 0.3.8
4
+ Summary: Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: euron.one
6
6
  Author-email: sudhanshu@euron.one
7
7
  License: MIT
@@ -13,6 +13,8 @@ Requires-Python: >=3.6
13
13
  Description-Content-Type: text/markdown
14
14
  Requires-Dist: requests
15
15
  Requires-Dist: langchain-core
16
+ Requires-Dist: llama-index
17
+ Requires-Dist: numpy
16
18
  Dynamic: author
17
19
  Dynamic: author-email
18
20
  Dynamic: classifier
@@ -11,4 +11,7 @@ euriai.egg-info/SOURCES.txt
11
11
  euriai.egg-info/dependency_links.txt
12
12
  euriai.egg-info/entry_points.txt
13
13
  euriai.egg-info/requires.txt
14
- euriai.egg-info/top_level.txt
14
+ euriai.egg-info/top_level.txt
15
+ euriai/llamaindex/__init__.py
16
+ euriai/llamaindex/euri_chat.py
17
+ euriai/llamaindex/euri_embed.py
@@ -1,2 +1,4 @@
1
1
  requests
2
2
  langchain-core
3
+ llama-index
4
+ numpy
@@ -2,8 +2,8 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name="euriai",
5
- version="0.3.7",
6
- description="Python client for EURI LLM API (euron.one) with CLI and interactive wizard",
5
+ version="0.3.8",
6
+ description="Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration",
7
7
  long_description=open("README.md", encoding="utf-8").read(),
8
8
  long_description_content_type="text/markdown",
9
9
  author="euron.one",
@@ -11,7 +11,9 @@ setup(
11
11
  packages=find_packages(),
12
12
  install_requires=[
13
13
  "requests",
14
- "langchain-core"
14
+ "langchain-core",
15
+ "llama-index",
16
+ "numpy"
15
17
  ],
16
18
  python_requires=">=3.6",
17
19
  entry_points={
@@ -26,4 +28,4 @@ setup(
26
28
  "Intended Audience :: Developers",
27
29
  ],
28
30
  license="MIT",
29
- )
31
+ )
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes