euriai 1.0.0__tar.gz → 1.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {euriai-1.0.0 → euriai-1.0.1}/PKG-INFO +1 -1
- {euriai-1.0.0 → euriai-1.0.1}/euriai/__init__.py +4 -1
- {euriai-1.0.0 → euriai-1.0.1}/euriai/crewai.py +19 -1
- {euriai-1.0.0 → euriai-1.0.1}/euriai/langchain.py +24 -21
- {euriai-1.0.0 → euriai-1.0.1}/euriai.egg-info/PKG-INFO +1 -1
- {euriai-1.0.0 → euriai-1.0.1}/setup.cfg +1 -1
- {euriai-1.0.0 → euriai-1.0.1}/setup.py +1 -1
- {euriai-1.0.0 → euriai-1.0.1}/README.md +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/autogen.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/cli.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/client.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/embedding.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/euri_chat.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/euri_embed.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/langgraph.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/llamaindex.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/n8n.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai/smolagents.py +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai.egg-info/SOURCES.txt +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai.egg-info/dependency_links.txt +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai.egg-info/entry_points.txt +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai.egg-info/requires.txt +0 -0
- {euriai-1.0.0 → euriai-1.0.1}/euriai.egg-info/top_level.txt +0 -0
@@ -1,5 +1,5 @@
|
|
1
1
|
from .client import EuriaiClient
|
2
|
-
from .langchain import EuriaiChatModel, EuriaiEmbeddings, EuriaiLLM
|
2
|
+
from .langchain import EuriaiChatModel, EuriaiEmbeddings, EuriaiLLM, create_chat_model, create_embeddings, create_llm
|
3
3
|
from .embedding import EuriaiEmbeddingClient
|
4
4
|
from .euri_chat import EuriaiLlamaIndexLLM
|
5
5
|
from .euri_embed import EuriaiLlamaIndexEmbedding
|
@@ -24,4 +24,7 @@ __all__ = [
|
|
24
24
|
"EuriaiChatModel",
|
25
25
|
"EuriaiEmbeddings",
|
26
26
|
"EuriaiLLM",
|
27
|
+
"create_chat_model",
|
28
|
+
"create_embeddings",
|
29
|
+
"create_llm",
|
27
30
|
]
|
@@ -6,13 +6,31 @@ from euriai.client import EuriaiClient
|
|
6
6
|
try:
|
7
7
|
from crewai import Agent, Crew, Task, Process
|
8
8
|
from crewai.llm import LLM
|
9
|
+
CREWAI_AVAILABLE = True
|
9
10
|
except ImportError:
|
10
|
-
|
11
|
+
CREWAI_AVAILABLE = False
|
12
|
+
# Fallback base classes for when CrewAI is not available
|
13
|
+
class Agent:
|
14
|
+
pass
|
15
|
+
class Crew:
|
16
|
+
pass
|
17
|
+
class Task:
|
18
|
+
pass
|
19
|
+
class Process:
|
20
|
+
pass
|
21
|
+
class LLM:
|
22
|
+
pass
|
11
23
|
|
12
24
|
class EuriaiLLM(LLM):
|
13
25
|
"""Custom LLM that uses Euri API for CrewAI agents"""
|
14
26
|
|
15
27
|
def __init__(self, api_key: str, model: str = "gpt-4.1-nano", temperature: float = 0.7, max_tokens: int = 1000):
|
28
|
+
if not CREWAI_AVAILABLE:
|
29
|
+
raise ImportError(
|
30
|
+
"CrewAI is not installed. Please install with: "
|
31
|
+
"pip install crewai"
|
32
|
+
)
|
33
|
+
|
16
34
|
self.client = EuriaiClient(api_key=api_key, model=model)
|
17
35
|
self.model = model
|
18
36
|
self.temperature = temperature
|
@@ -150,7 +150,7 @@ class EuriaiChatModel(BaseChatModel):
|
|
150
150
|
"""
|
151
151
|
|
152
152
|
# Configuration
|
153
|
-
api_key:
|
153
|
+
api_key: str = Field(description="Euri API key")
|
154
154
|
model: str = Field(default="gpt-4.1-nano", description="Model name")
|
155
155
|
temperature: float = Field(default=0.7, ge=0.0, le=1.0, description="Sampling temperature")
|
156
156
|
max_tokens: int = Field(default=1000, gt=0, description="Maximum tokens to generate")
|
@@ -177,9 +177,8 @@ class EuriaiChatModel(BaseChatModel):
|
|
177
177
|
super().__init__(**kwargs)
|
178
178
|
|
179
179
|
# Initialize client
|
180
|
-
api_key_str = self.api_key.get_secret_value() if hasattr(self.api_key, 'get_secret_value') else str(self.api_key)
|
181
180
|
self._client = EuriaiClient(
|
182
|
-
api_key=
|
181
|
+
api_key=self.api_key,
|
183
182
|
model=self.model
|
184
183
|
)
|
185
184
|
|
@@ -517,30 +516,35 @@ class EuriaiEmbeddings(Embeddings):
|
|
517
516
|
embedding = await embeddings.aembed_query("Hello world")
|
518
517
|
"""
|
519
518
|
|
520
|
-
|
521
|
-
|
522
|
-
|
523
|
-
|
524
|
-
|
525
|
-
|
526
|
-
|
527
|
-
# Internal
|
528
|
-
_client: Optional[EuriaiEmbeddingClient] = None
|
529
|
-
_executor: Optional[ThreadPoolExecutor] = None
|
530
|
-
|
531
|
-
def __init__(self, **kwargs):
|
519
|
+
def __init__(self,
|
520
|
+
api_key: str,
|
521
|
+
model: str = "text-embedding-3-small",
|
522
|
+
chunk_size: int = 1000,
|
523
|
+
max_retries: int = 3,
|
524
|
+
request_timeout: int = 60,
|
525
|
+
**kwargs):
|
532
526
|
if not LANGCHAIN_AVAILABLE:
|
533
527
|
raise ImportError(
|
534
528
|
"LangChain is not installed. Please install with: "
|
535
529
|
"pip install langchain-core"
|
536
530
|
)
|
537
531
|
|
538
|
-
super().__init__(
|
532
|
+
super().__init__()
|
533
|
+
|
534
|
+
# Initialize configuration
|
535
|
+
self.api_key = api_key
|
536
|
+
self.model = model
|
537
|
+
self.chunk_size = chunk_size
|
538
|
+
self.max_retries = max_retries
|
539
|
+
self.request_timeout = request_timeout
|
540
|
+
|
541
|
+
# Internal
|
542
|
+
self._client: Optional[EuriaiEmbeddingClient] = None
|
543
|
+
self._executor: Optional[ThreadPoolExecutor] = None
|
539
544
|
|
540
545
|
# Initialize client
|
541
|
-
api_key_str = self.api_key.get_secret_value() if hasattr(self.api_key, 'get_secret_value') else str(self.api_key)
|
542
546
|
self._client = EuriaiEmbeddingClient(
|
543
|
-
api_key=
|
547
|
+
api_key=self.api_key,
|
544
548
|
model=self.model
|
545
549
|
)
|
546
550
|
|
@@ -609,7 +613,7 @@ class EuriaiLLM(LLM):
|
|
609
613
|
"""
|
610
614
|
|
611
615
|
# Configuration
|
612
|
-
api_key:
|
616
|
+
api_key: str = Field(description="Euri API key")
|
613
617
|
model: str = Field(default="gpt-4.1-nano", description="Model name")
|
614
618
|
temperature: float = Field(default=0.7, ge=0.0, le=1.0, description="Sampling temperature")
|
615
619
|
max_tokens: int = Field(default=1000, gt=0, description="Maximum tokens to generate")
|
@@ -631,9 +635,8 @@ class EuriaiLLM(LLM):
|
|
631
635
|
super().__init__(**kwargs)
|
632
636
|
|
633
637
|
# Initialize client
|
634
|
-
api_key_str = self.api_key.get_secret_value() if hasattr(self.api_key, 'get_secret_value') else str(self.api_key)
|
635
638
|
self._client = EuriaiClient(
|
636
|
-
api_key=
|
639
|
+
api_key=self.api_key,
|
637
640
|
model=self.model
|
638
641
|
)
|
639
642
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[metadata]
|
2
2
|
name = euriai
|
3
|
-
version = 1.0.
|
3
|
+
version = 1.0.1
|
4
4
|
license = MIT
|
5
5
|
description = Python client for EURI LLM API (euron.one) with CLI, LangChain, LlamaIndex, LangGraph, SmolAgents, and n8n integration
|
6
6
|
keywords = euriai, llm, langchain, llamaindex, langgraph, smolagents, n8n, agents, ai, sdk
|
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="euriai",
|
5
|
-
version="1.0.
|
5
|
+
version="1.0.1",
|
6
6
|
description="Python client for Euri API (euron.one) with CLI, LangChain, and LlamaIndex integration",
|
7
7
|
long_description=open("README.md", encoding="utf-8").read(),
|
8
8
|
long_description_content_type="text/markdown",
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|