euriai 0.3.0__tar.gz → 0.3.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {euriai-0.3.0 → euriai-0.3.2}/PKG-INFO +1 -1
- euriai-0.3.2/euriai/langchain_llm.py +29 -0
- {euriai-0.3.0 → euriai-0.3.2}/euriai.egg-info/PKG-INFO +1 -1
- {euriai-0.3.0 → euriai-0.3.2}/euriai.egg-info/SOURCES.txt +1 -0
- {euriai-0.3.0 → euriai-0.3.2}/setup.py +1 -1
- {euriai-0.3.0 → euriai-0.3.2}/README.md +0 -0
- {euriai-0.3.0 → euriai-0.3.2}/euriai/__init__.py +0 -0
- {euriai-0.3.0 → euriai-0.3.2}/euriai/cli.py +0 -0
- {euriai-0.3.0 → euriai-0.3.2}/euriai/client.py +0 -0
- {euriai-0.3.0 → euriai-0.3.2}/euriai.egg-info/dependency_links.txt +0 -0
- {euriai-0.3.0 → euriai-0.3.2}/euriai.egg-info/entry_points.txt +0 -0
- {euriai-0.3.0 → euriai-0.3.2}/euriai.egg-info/requires.txt +0 -0
- {euriai-0.3.0 → euriai-0.3.2}/euriai.egg-info/top_level.txt +0 -0
- {euriai-0.3.0 → euriai-0.3.2}/setup.cfg +0 -0
@@ -0,0 +1,29 @@
|
|
1
|
+
from langchain_core.language_models.llms import LLM
|
2
|
+
from typing import Optional, List
|
3
|
+
from pydantic import PrivateAttr
|
4
|
+
from .client import EuriaiClient
|
5
|
+
|
6
|
+
|
7
|
+
class EuriaiLangChainLLM(LLM):
|
8
|
+
model: str = "gpt-4.1-nano"
|
9
|
+
temperature: float = 0.7
|
10
|
+
max_tokens: int = 300
|
11
|
+
|
12
|
+
_client: EuriaiClient = PrivateAttr()
|
13
|
+
|
14
|
+
def __init__(self, api_key: str, model: str = "gpt-4.1-nano", temperature: float = 0.7, max_tokens: int = 300):
|
15
|
+
super().__init__(model=model, temperature=temperature, max_tokens=max_tokens)
|
16
|
+
self._client = EuriaiClient(api_key=api_key, model=model)
|
17
|
+
|
18
|
+
@property
|
19
|
+
def _llm_type(self) -> str:
|
20
|
+
return "euriai"
|
21
|
+
|
22
|
+
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
23
|
+
response = self._client.generate_completion(
|
24
|
+
prompt=prompt,
|
25
|
+
temperature=self.temperature,
|
26
|
+
max_tokens=self.max_tokens,
|
27
|
+
stop=stop
|
28
|
+
)
|
29
|
+
return response.get("choices", [{}])[0].get("message", {}).get("content", "")
|
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="euriai",
|
5
|
-
version="0.3.
|
5
|
+
version="0.3.2",
|
6
6
|
description="Python client for EURI LLM API (euron.one) with CLI and interactive wizard",
|
7
7
|
long_description=open("README.md", encoding="utf-8").read(),
|
8
8
|
long_description_content_type="text/markdown",
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|