euriai 0.3.1__tar.gz → 0.3.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {euriai-0.3.1 → euriai-0.3.3}/PKG-INFO +1 -1
- euriai-0.3.3/euriai/langchain_llm.py +26 -0
- {euriai-0.3.1 → euriai-0.3.3}/euriai.egg-info/PKG-INFO +1 -1
- {euriai-0.3.1 → euriai-0.3.3}/setup.py +1 -1
- euriai-0.3.1/euriai/langchain_llm.py +0 -32
- {euriai-0.3.1 → euriai-0.3.3}/README.md +0 -0
- {euriai-0.3.1 → euriai-0.3.3}/euriai/__init__.py +0 -0
- {euriai-0.3.1 → euriai-0.3.3}/euriai/cli.py +0 -0
- {euriai-0.3.1 → euriai-0.3.3}/euriai/client.py +0 -0
- {euriai-0.3.1 → euriai-0.3.3}/euriai.egg-info/SOURCES.txt +0 -0
- {euriai-0.3.1 → euriai-0.3.3}/euriai.egg-info/dependency_links.txt +0 -0
- {euriai-0.3.1 → euriai-0.3.3}/euriai.egg-info/entry_points.txt +0 -0
- {euriai-0.3.1 → euriai-0.3.3}/euriai.egg-info/requires.txt +0 -0
- {euriai-0.3.1 → euriai-0.3.3}/euriai.egg-info/top_level.txt +0 -0
- {euriai-0.3.1 → euriai-0.3.3}/setup.cfg +0 -0
@@ -0,0 +1,26 @@
|
|
1
|
+
from langchain.llms.base import LLM
|
2
|
+
from typing import Optional, List
|
3
|
+
from euriai import EuriaiClient
|
4
|
+
|
5
|
+
|
6
|
+
class EuriaiLangChainLLM(LLM):
|
7
|
+
model: str = "gpt-4.1-nano"
|
8
|
+
temperature: float = 0.7
|
9
|
+
max_tokens: int = 300
|
10
|
+
|
11
|
+
def __init__(self, api_key: str, model: str = "gpt-4.1-nano", temperature: float = 0.7, max_tokens: int = 300, **kwargs):
|
12
|
+
super().__init__(model=model, temperature=temperature, max_tokens=max_tokens, **kwargs)
|
13
|
+
object.__setattr__(self, "_client", EuriaiClient(api_key=api_key, model=model))
|
14
|
+
|
15
|
+
@property
|
16
|
+
def _llm_type(self) -> str:
|
17
|
+
return "euriai"
|
18
|
+
|
19
|
+
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
20
|
+
response = self._client.generate_completion(
|
21
|
+
prompt=prompt,
|
22
|
+
temperature=self.temperature,
|
23
|
+
max_tokens=self.max_tokens,
|
24
|
+
stop=stop
|
25
|
+
)
|
26
|
+
return response.get("choices", [{}])[0].get("message", {}).get("content", "")
|
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="euriai",
|
5
|
-
version="0.3.
|
5
|
+
version="0.3.3",
|
6
6
|
description="Python client for EURI LLM API (euron.one) with CLI and interactive wizard",
|
7
7
|
long_description=open("README.md", encoding="utf-8").read(),
|
8
8
|
long_description_content_type="text/markdown",
|
@@ -1,32 +0,0 @@
|
|
1
|
-
from langchain_core.language_models.llms import LLM
|
2
|
-
from typing import Optional, List
|
3
|
-
from .client import EuriaiClient
|
4
|
-
|
5
|
-
class EuriaiLangChainLLM(LLM):
|
6
|
-
"""
|
7
|
-
LangChain-compatible wrapper for euriai.EuriaiClient
|
8
|
-
"""
|
9
|
-
|
10
|
-
def __init__(
|
11
|
-
self,
|
12
|
-
api_key: str,
|
13
|
-
model: str = "gpt-4.1-nano",
|
14
|
-
temperature: float = 0.7,
|
15
|
-
max_tokens: int = 300
|
16
|
-
):
|
17
|
-
self.client = EuriaiClient(api_key=api_key, model=model)
|
18
|
-
self.temperature = temperature
|
19
|
-
self.max_tokens = max_tokens
|
20
|
-
|
21
|
-
@property
|
22
|
-
def _llm_type(self) -> str:
|
23
|
-
return "euriai"
|
24
|
-
|
25
|
-
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
26
|
-
response = self.client.generate_completion(
|
27
|
-
prompt=prompt,
|
28
|
-
temperature=self.temperature,
|
29
|
-
max_tokens=self.max_tokens,
|
30
|
-
stop=stop
|
31
|
-
)
|
32
|
-
return response.get("choices", [{}])[0].get("message", {}).get("content", "")
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|