euriai 0.3.26__tar.gz → 0.3.28__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 0.3.26
3
+ Version: 0.3.28
4
4
  Summary: Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: euron.one
6
6
  Author-email: sudhanshu@euron.one
@@ -1,10 +1,11 @@
1
1
  import requests
2
- from typing import List, Optional
2
+ from typing import List, Optional, Any, Dict
3
3
  from llama_index.core.llms import LLM
4
4
  from llama_index.core.base.llms.types import ChatMessage, CompletionResponse, CompletionResponseGen
5
5
 
6
6
 
7
7
  class EuriaiLlamaIndexLLM(LLM):
8
+ # Define class attributes as expected by Pydantic
8
9
  api_key: str
9
10
  model: str = "gpt-4.1-nano"
10
11
  temperature: float = 0.7
@@ -18,16 +19,28 @@ class EuriaiLlamaIndexLLM(LLM):
18
19
  temperature: Optional[float] = None,
19
20
  max_tokens: Optional[int] = None,
20
21
  ):
21
- # ✅ Pydantic-style super init with all fields
22
- super().__init__(
23
- api_key=api_key,
24
- model=model or self.model,
25
- temperature=temperature if temperature is not None else self.temperature,
26
- max_tokens=max_tokens if max_tokens is not None else self.max_tokens,
27
- )
22
+ """
23
+ Initializes the EuriaiLlamaIndexLLM.
24
+
25
+ Args:
26
+ api_key (str): Your EURI API key.
27
+ model (str, optional): Model ID to use. Defaults to "gpt-4.1-nano".
28
+ temperature (float, optional): Sampling temperature. Defaults to 0.7.
29
+ max_tokens (int, optional): Maximum number of tokens. Defaults to 1000.
30
+ """
31
+ # Create a dictionary of parameters for the parent class
32
+ model_params = {
33
+ "api_key": api_key,
34
+ "model": model if model is not None else self.model,
35
+ "temperature": temperature if temperature is not None else self.temperature,
36
+ "max_tokens": max_tokens if max_tokens is not None else self.max_tokens,
37
+ }
38
+
39
+ # Initialize the parent class with the parameters
40
+ super().__init__(**model_params)
28
41
 
29
42
  @property
30
- def metadata(self):
43
+ def metadata(self) -> Dict[str, Any]:
31
44
  return {
32
45
  "context_window": 8000,
33
46
  "num_output": self.max_tokens,
@@ -1,15 +1,24 @@
1
1
  import requests
2
2
  import numpy as np
3
- from llama_index.core.embeddings import BaseEmbedding # Correct import path
3
+ from typing import List, Optional
4
+ from llama_index.core.embeddings import BaseEmbedding
4
5
 
5
6
  class EuriaiLlamaIndexEmbedding(BaseEmbedding):
6
- def __init__(self, api_key: str, model: str = "text-embedding-3-small"):
7
+ # Define class attributes as expected by Pydantic
8
+ api_key: str
9
+ model: str = "text-embedding-3-small"
10
+ url: str = "https://api.euron.one/api/v1/euri/alpha/embeddings"
11
+
12
+ def __init__(self, api_key: str, model: Optional[str] = None):
7
13
  """Initialize embedding model with API key and model name."""
8
- self.api_key = api_key
9
- self.model = model
10
- self.url = "https://api.euron.one/api/v1/euri/alpha/embeddings"
11
- # Call the parent class constructor (important!)
12
- super().__init__()
14
+ # Create parameters for the parent class
15
+ embed_params = {
16
+ "api_key": api_key,
17
+ "model": model if model is not None else self.model,
18
+ }
19
+
20
+ # Initialize the parent class
21
+ super().__init__(**embed_params)
13
22
 
14
23
  def _post_embedding(self, texts):
15
24
  """Helper method to post data to API and get embeddings."""
@@ -25,10 +34,10 @@ class EuriaiLlamaIndexEmbedding(BaseEmbedding):
25
34
  response.raise_for_status()
26
35
  return [np.array(obj["embedding"]).tolist() for obj in response.json()["data"]]
27
36
 
28
- def get_text_embedding(self, text: str) -> list[float]:
37
+ def get_text_embedding(self, text: str) -> List[float]:
29
38
  """Get embedding for a single text."""
30
39
  return self._post_embedding([text])[0]
31
40
 
32
- def get_text_embeddings(self, texts: list[str]) -> list[list[float]]:
41
+ def get_text_embeddings(self, texts: List[str]) -> List[List[float]]:
33
42
  """Get embeddings for multiple texts."""
34
43
  return self._post_embedding(texts)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 0.3.26
3
+ Version: 0.3.28
4
4
  Summary: Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: euron.one
6
6
  Author-email: sudhanshu@euron.one
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name="euriai",
5
- version="0.3.26",
5
+ version="0.3.28",
6
6
  description="Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration",
7
7
  long_description=open("README.md", encoding="utf-8").read(),
8
8
  long_description_content_type="text/markdown",
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes