euriai 0.3.13__py3-none-any.whl → 0.3.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,22 +1,27 @@
1
1
  import requests
2
- from typing import List
2
+ from typing import List, Optional
3
3
  from llama_index.core.llms import LLM
4
4
  from llama_index.core.base.llms.types import ChatMessage, CompletionResponse, CompletionResponseGen
5
5
 
6
-
7
6
  class EuriaiLlamaIndexLLM(LLM):
7
+ api_key: str
8
+ model: str = "gpt-4.1-nano"
9
+ temperature: float = 0.7
10
+ max_tokens: int = 1000
11
+ url: str = "https://api.euron.one/api/v1/chat/completions"
12
+
8
13
  def __init__(
9
14
  self,
10
15
  api_key: str,
11
- model: str = "gpt-4.1-nano",
12
- temperature: float = 0.7,
13
- max_tokens: int = 1000,
16
+ model: Optional[str] = None,
17
+ temperature: Optional[float] = None,
18
+ max_tokens: Optional[int] = None
14
19
  ):
20
+ super().__init__()
15
21
  self.api_key = api_key
16
- self.model = model
17
- self.temperature = temperature
18
- self.max_tokens = max_tokens
19
- self.url = "https://api.euron.one/api/v1/chat/completions"
22
+ self.model = model or self.model
23
+ self.temperature = temperature if temperature is not None else self.temperature
24
+ self.max_tokens = max_tokens if max_tokens is not None else self.max_tokens
20
25
 
21
26
  @property
22
27
  def metadata(self):
@@ -43,27 +48,25 @@ class EuriaiLlamaIndexLLM(LLM):
43
48
  response.raise_for_status()
44
49
  result = response.json()
45
50
  content = result["choices"][0]["message"]["content"]
46
-
47
51
  return CompletionResponse(text=content)
48
52
 
49
53
  def complete(self, prompt: str, **kwargs) -> CompletionResponse:
50
54
  return self.chat([ChatMessage(role="user", content=prompt)])
51
55
 
52
- # Async versions (not supported, so raise or return NotImplementedError)
53
56
  async def achat(self, messages: List[ChatMessage], **kwargs) -> CompletionResponse:
54
- raise NotImplementedError("Async chat not implemented for Euriai.")
57
+ raise NotImplementedError("Async chat not supported.")
55
58
 
56
59
  async def acomplete(self, prompt: str, **kwargs) -> CompletionResponse:
57
- raise NotImplementedError("Async complete not implemented for Euriai.")
60
+ raise NotImplementedError("Async complete not supported.")
58
61
 
59
62
  def stream_chat(self, messages: List[ChatMessage], **kwargs) -> CompletionResponseGen:
60
- raise NotImplementedError("Streaming not supported in EuriaiLlamaIndexLLM.")
63
+ raise NotImplementedError("Streaming not supported.")
61
64
 
62
65
  def stream_complete(self, prompt: str, **kwargs) -> CompletionResponseGen:
63
- raise NotImplementedError("Streaming not supported in EuriaiLlamaIndexLLM.")
66
+ raise NotImplementedError("Streaming not supported.")
64
67
 
65
68
  async def astream_chat(self, messages: List[ChatMessage], **kwargs) -> CompletionResponseGen:
66
- raise NotImplementedError("Async streaming not supported in EuriaiLlamaIndexLLM.")
69
+ raise NotImplementedError("Async streaming not supported.")
67
70
 
68
71
  async def astream_complete(self, prompt: str, **kwargs) -> CompletionResponseGen:
69
- raise NotImplementedError("Async streaming not supported in EuriaiLlamaIndexLLM.")
72
+ raise NotImplementedError("Async streaming not supported.")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 0.3.13
3
+ Version: 0.3.15
4
4
  Summary: Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: euron.one
6
6
  Author-email: sudhanshu@euron.one
@@ -5,10 +5,10 @@ euriai/embedding.py,sha256=z-LLKU68tCrPi9QMs1tlKwyr7WJcjceCTkNQIFMG6vA,1276
5
5
  euriai/langchain_embed.py,sha256=OXWWxiKJ4g24TFgnWPOCZvhK7G8xtSf0ppQ2zwHkIPM,584
6
6
  euriai/langchain_llm.py,sha256=D5YvYwV7q9X2_vdoaQiPs7tNiUmjkGz-9Q-7M61hhkg,986
7
7
  euriai/llamaindex/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- euriai/llamaindex/euri_chat.py,sha256=_n3LpslD-QsDKtB3oCMjatgwqmTv7TtKJSvGzCsS0LM,2816
8
+ euriai/llamaindex/euri_chat.py,sha256=3sUKzSmM3E_Rg5x7gEMQWHX45_Y3DC4Rsul5sF_7qhs,2891
9
9
  euriai/llamaindex/euri_embed.py,sha256=RO62uzT9rStp9_Ow2cYa_ZrGabkPFX89ZL3Mgb13X08,1063
10
- euriai-0.3.13.dist-info/METADATA,sha256=u3jmtY4iHdw1P9BVJzxsV-y6OU0EFqgbcJiIkBQbGeU,3257
11
- euriai-0.3.13.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
12
- euriai-0.3.13.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
13
- euriai-0.3.13.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
14
- euriai-0.3.13.dist-info/RECORD,,
10
+ euriai-0.3.15.dist-info/METADATA,sha256=YvnSilgl-fe_cv0yXPnLvyF3ZmpNO3YxsQKAfY5vB44,3257
11
+ euriai-0.3.15.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
12
+ euriai-0.3.15.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
13
+ euriai-0.3.15.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
14
+ euriai-0.3.15.dist-info/RECORD,,