euriai 0.3.15__py3-none-any.whl → 0.3.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,14 @@
1
+ def __init__(
2
+ self,
3
+ api_key: str,
4
+ model: Optional[str] = None,
5
+ temperature: Optional[float] = None,
6
+ max_tokens: Optional[int] = None,
7
+ ):
8
+ # Use default values directly, no `self.model` yet
9
+ super().__init__(
10
+ api_key=api_key,
11
+ model=model if model is not None else "gpt-4.1-nano",
12
+ temperature=temperature if temperature is not None else 0.7,
13
+ max_tokens=max_tokens if max_tokens is not None else 1000,
14
+ )
@@ -3,6 +3,7 @@ from typing import List, Optional
3
3
  from llama_index.core.llms import LLM
4
4
  from llama_index.core.base.llms.types import ChatMessage, CompletionResponse, CompletionResponseGen
5
5
 
6
+
6
7
  class EuriaiLlamaIndexLLM(LLM):
7
8
  api_key: str
8
9
  model: str = "gpt-4.1-nano"
@@ -15,13 +16,15 @@ class EuriaiLlamaIndexLLM(LLM):
15
16
  api_key: str,
16
17
  model: Optional[str] = None,
17
18
  temperature: Optional[float] = None,
18
- max_tokens: Optional[int] = None
19
+ max_tokens: Optional[int] = None,
19
20
  ):
20
- super().__init__()
21
- self.api_key = api_key
22
- self.model = model or self.model
23
- self.temperature = temperature if temperature is not None else self.temperature
24
- self.max_tokens = max_tokens if max_tokens is not None else self.max_tokens
21
+ # ✅ Pydantic-style super init with all fields
22
+ super().__init__(
23
+ api_key=api_key,
24
+ model=model or self.model,
25
+ temperature=temperature if temperature is not None else self.temperature,
26
+ max_tokens=max_tokens if max_tokens is not None else self.max_tokens,
27
+ )
25
28
 
26
29
  @property
27
30
  def metadata(self):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 0.3.15
3
+ Version: 0.3.17
4
4
  Summary: Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: euron.one
6
6
  Author-email: sudhanshu@euron.one
@@ -4,11 +4,11 @@ euriai/client.py,sha256=USiqdMULgAiky7nkrJKF3FyKcOS2DtDmUdbeBSnyLYk,4076
4
4
  euriai/embedding.py,sha256=z-LLKU68tCrPi9QMs1tlKwyr7WJcjceCTkNQIFMG6vA,1276
5
5
  euriai/langchain_embed.py,sha256=OXWWxiKJ4g24TFgnWPOCZvhK7G8xtSf0ppQ2zwHkIPM,584
6
6
  euriai/langchain_llm.py,sha256=D5YvYwV7q9X2_vdoaQiPs7tNiUmjkGz-9Q-7M61hhkg,986
7
- euriai/llamaindex/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- euriai/llamaindex/euri_chat.py,sha256=3sUKzSmM3E_Rg5x7gEMQWHX45_Y3DC4Rsul5sF_7qhs,2891
7
+ euriai/llamaindex/__init__.py,sha256=cknI1gzAK9BQir10ubSLBbTsr9pv2-robvam---0-dc,477
8
+ euriai/llamaindex/euri_chat.py,sha256=Uw0rg-nriuZHUSdaO39qu0ub_M4ExEVK5Xyh83g8sYU,2953
9
9
  euriai/llamaindex/euri_embed.py,sha256=RO62uzT9rStp9_Ow2cYa_ZrGabkPFX89ZL3Mgb13X08,1063
10
- euriai-0.3.15.dist-info/METADATA,sha256=YvnSilgl-fe_cv0yXPnLvyF3ZmpNO3YxsQKAfY5vB44,3257
11
- euriai-0.3.15.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
12
- euriai-0.3.15.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
13
- euriai-0.3.15.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
14
- euriai-0.3.15.dist-info/RECORD,,
10
+ euriai-0.3.17.dist-info/METADATA,sha256=boFP0M5YJ5PCqNieT-vjBsNN-n77m53wWhvYejye0Ng,3257
11
+ euriai-0.3.17.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
12
+ euriai-0.3.17.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
13
+ euriai-0.3.17.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
14
+ euriai-0.3.17.dist-info/RECORD,,