euriai 0.3.29__py3-none-any.whl → 0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
euriai/__init__.py CHANGED
@@ -4,6 +4,12 @@ from .embedding import EuriaiEmbeddingClient
4
4
  from .langchain_embed import EuriaiEmbeddings
5
5
  from .euri_chat import EuriaiLlamaIndexLLM
6
6
  from .euri_embed import EuriaiLlamaIndexEmbedding
7
+ from .euri_crewai import EuriaiCrewAI
8
+ from .euri_autogen import EuriaiAutoGen
9
+ from .euri_llamaindex import EuriaiLlamaIndex
10
+ from .euri_langgraph import EuriaiLangGraph
11
+ from .euri_smolagents import EuriaiSmolAgent
12
+ from .euri_n8n import EuriaiN8N
7
13
 
8
14
  __all__ = [
9
15
  "EuriaiClient",
@@ -11,5 +17,11 @@ __all__ = [
11
17
  "EuriaiEmbeddingClient",
12
18
  "EuriaiEmbeddings",
13
19
  "EuriaiLlamaIndexLLM",
14
- "EuriaiLlamaIndexEmbedding"
20
+ "EuriaiLlamaIndexEmbedding",
21
+ "EuriaiCrewAI",
22
+ "EuriaiAutoGen",
23
+ "EuriaiLlamaIndex",
24
+ "EuriaiLangGraph",
25
+ "EuriaiSmolAgent",
26
+ "EuriaiN8N",
15
27
  ]
euriai/client.py CHANGED
@@ -6,14 +6,14 @@ class EuriaiClient:
6
6
  self,
7
7
  api_key: str,
8
8
  model: str = "gpt-4.1-nano",
9
- endpoint: str = "https://api.euron.one/api/v1/euri/alpha/chat/completions"
9
+ endpoint: str = "https://api.euron.one/api/v1/euri/chat/completions"
10
10
  ):
11
11
  """
12
12
  Initializes the EuriaiClient.
13
13
 
14
14
  Args:
15
15
  api_key (str): Your EURI API key.
16
- model (str, optional): Model ID to use (e.g., 'gpt-4.1-nano', 'gemini-2.0-flash-001').
16
+ model (str, optional): Model ID to use (e.g., 'gpt-4.1-nano', 'gemini-2.5-flash').
17
17
  endpoint (str, optional): API endpoint URL.
18
18
  """
19
19
  self.api_key = api_key
euriai/embedding.py CHANGED
@@ -5,7 +5,7 @@ class EuriaiEmbeddingClient:
5
5
  def __init__(self, api_key: str, model: str = "text-embedding-3-small"):
6
6
  self.api_key = api_key
7
7
  self.model = model
8
- self.url = "https://api.euron.one/api/v1/euri/alpha/embeddings"
8
+ self.url = "https://api.euron.one/api/v1/euri/embeddings"
9
9
 
10
10
  def embed(self, text: str) -> np.ndarray:
11
11
  headers = {
euriai/euri_autogen.py ADDED
@@ -0,0 +1,74 @@
1
+ from typing import Optional, Dict, Any, List
2
+
3
+ try:
4
+ import autogen
5
+ except ImportError:
6
+ autogen = None
7
+
8
+ class EuriaiAutoGen:
9
+ """
10
+ Full-featured wrapper for AutoGen integration in the EURI SDK.
11
+ Allows programmatic agent, tool, and workflow management, and chat execution.
12
+ """
13
+ def __init__(self, config: Optional[Dict[str, Any]] = None):
14
+ """
15
+ Initialize the AutoGen wrapper.
16
+ Args:
17
+ config: Dict of config options (API keys, model, etc.)
18
+ """
19
+ if autogen is None:
20
+ raise ImportError("AutoGen is not installed. Please install with `pip install pyautogen`.")
21
+ self.config = config or {}
22
+ self.agents: List[Any] = []
23
+ self.tools: List[Any] = []
24
+ self.memory: Optional[Any] = None
25
+ self.workflow: Optional[Any] = None
26
+ self.history: List[Dict[str, Any]] = []
27
+
28
+ def add_agent(self, agent_config: Dict[str, Any]) -> Any:
29
+ """Add an agent with config."""
30
+ agent = autogen.Agent(**agent_config)
31
+ self.agents.append(agent)
32
+ return agent
33
+
34
+ def add_tool(self, tool_config: Dict[str, Any]) -> Any:
35
+ """Add a tool with config."""
36
+ tool = autogen.Tool(**tool_config)
37
+ self.tools.append(tool)
38
+ return tool
39
+
40
+ def set_memory(self, memory_config: Dict[str, Any]) -> None:
41
+ """Set memory for the workflow."""
42
+ self.memory = autogen.Memory(**memory_config)
43
+
44
+ def run_chat(self, prompt: str, agent_idx: int = 0, **kwargs) -> str:
45
+ """
46
+ Run a chat with the specified agent and prompt.
47
+ Returns the agent's response.
48
+ """
49
+ if not self.agents:
50
+ raise ValueError("No agents defined. Use add_agent().")
51
+ agent = self.agents[agent_idx]
52
+ response = agent.chat(prompt, **kwargs)
53
+ self.history.append({"agent": agent, "prompt": prompt, "response": response})
54
+ return response
55
+
56
+ def run_workflow(self, workflow_config: Dict[str, Any], **kwargs) -> Any:
57
+ """
58
+ Run a custom workflow (advanced usage).
59
+ """
60
+ workflow = autogen.Workflow(**workflow_config)
61
+ self.workflow = workflow
62
+ result = workflow.run(**kwargs)
63
+ return result
64
+
65
+ def get_history(self) -> List[Dict[str, Any]]:
66
+ return self.history
67
+
68
+ def reset(self):
69
+ """Reset agents, tools, memory, and history."""
70
+ self.agents = []
71
+ self.tools = []
72
+ self.memory = None
73
+ self.workflow = None
74
+ self.history = []
euriai/euri_chat.py CHANGED
@@ -10,7 +10,7 @@ class EuriaiLlamaIndexLLM(LLM):
10
10
  model: str = "gpt-4.1-nano"
11
11
  temperature: float = 0.7
12
12
  max_tokens: int = 1000
13
- url: str = "https://api.euron.one/api/v1/euri/alpha/chat/completions"
13
+ url: str = "https://api.euron.one/api/v1/euri/chat/completions"
14
14
 
15
15
  def __init__(
16
16
  self,
euriai/euri_crewai.py ADDED
@@ -0,0 +1,92 @@
1
+ import os
2
+ from typing import Optional, Dict, Any, List, Union
3
+
4
+ # CrewAI imports (user must install crewai)
5
+ try:
6
+ from crewai import Agent, Crew, Task, Process
7
+ except ImportError:
8
+ Agent = Crew = Task = Process = None
9
+
10
+ class EuriaiCrewAI:
11
+ """
12
+ Full-featured wrapper for CrewAI integration in the EURI SDK.
13
+ Allows programmatic and config-based crew creation, agent/task management, and workflow execution.
14
+ """
15
+ def __init__(self, agents: Optional[Dict[str, Any]] = None, tasks: Optional[Dict[str, Any]] = None, process: str = "sequential", verbose: bool = True):
16
+ """
17
+ Initialize the CrewAI wrapper.
18
+ Args:
19
+ agents: Dict of agent configs or Agent objects.
20
+ tasks: Dict of task configs or Task objects.
21
+ process: 'sequential' or 'parallel'.
22
+ verbose: Print detailed logs.
23
+ """
24
+ if Agent is None:
25
+ raise ImportError("CrewAI is not installed. Please install with `pip install crewai`.")
26
+ self.agents_config = agents or {}
27
+ self.tasks_config = tasks or {}
28
+ self.process = Process.sequential if process == "sequential" else Process.parallel
29
+ self.verbose = verbose
30
+ self._agents: List[Agent] = []
31
+ self._tasks: List[Task] = []
32
+ self._crew: Optional[Crew] = None
33
+
34
+ def add_agent(self, name: str, config: Dict[str, Any]) -> None:
35
+ """Add an agent by config."""
36
+ agent = Agent(**config)
37
+ self._agents.append(agent)
38
+ self.agents_config[name] = config
39
+
40
+ def add_task(self, name: str, config: Dict[str, Any]) -> None:
41
+ """Add a task by config."""
42
+ task = Task(**config)
43
+ self._tasks.append(task)
44
+ self.tasks_config[name] = config
45
+
46
+ def build_crew(self) -> Crew:
47
+ """Build the Crew object from current agents and tasks."""
48
+ if not self._agents:
49
+ self._agents = [Agent(**cfg) for cfg in self.agents_config.values()]
50
+ if not self._tasks:
51
+ self._tasks = [Task(**cfg) for cfg in self.tasks_config.values()]
52
+ self._crew = Crew(agents=self._agents, tasks=self._tasks, process=self.process, verbose=self.verbose)
53
+ return self._crew
54
+
55
+ def run(self, inputs: Optional[Dict[str, Any]] = None) -> Any:
56
+ """
57
+ Run the crew workflow. Optionally pass input variables for tasks.
58
+ Returns the final result or report.
59
+ """
60
+ if self._crew is None:
61
+ self.build_crew()
62
+ return self._crew.kickoff(inputs=inputs or {})
63
+
64
+ @classmethod
65
+ def from_yaml(cls, agents_yaml: str, tasks_yaml: str, process: str = "sequential", verbose: bool = True):
66
+ """
67
+ Create a CrewAI wrapper from YAML config files.
68
+ Args:
69
+ agents_yaml: Path to agents.yaml
70
+ tasks_yaml: Path to tasks.yaml
71
+ """
72
+ import yaml
73
+ with open(agents_yaml, "r") as f:
74
+ agents = yaml.safe_load(f)
75
+ with open(tasks_yaml, "r") as f:
76
+ tasks = yaml.safe_load(f)
77
+ return cls(agents=agents, tasks=tasks, process=process, verbose=verbose)
78
+
79
+ def get_agents(self) -> List[Agent]:
80
+ return self._agents
81
+
82
+ def get_tasks(self) -> List[Task]:
83
+ return self._tasks
84
+
85
+ def get_crew(self) -> Optional[Crew]:
86
+ return self._crew
87
+
88
+ def reset(self):
89
+ """Reset agents, tasks, and crew."""
90
+ self._agents = []
91
+ self._tasks = []
92
+ self._crew = None
euriai/euri_embed.py CHANGED
@@ -1,13 +1,13 @@
1
1
  import requests
2
2
  import numpy as np
3
- from typing import List, Optional
3
+ from typing import List, Optional, Callable, Any
4
4
  from llama_index.core.embeddings import BaseEmbedding
5
5
 
6
6
  class EuriaiLlamaIndexEmbedding(BaseEmbedding):
7
7
  # Define class attributes as expected by Pydantic
8
8
  api_key: str
9
9
  model: str = "text-embedding-3-small"
10
- url: str = "https://api.euron.one/api/v1/euri/alpha/embeddings"
10
+ url: str = "https://api.euron.one/api/v1/euri/embeddings"
11
11
 
12
12
  def __init__(self, api_key: str, model: Optional[str] = None):
13
13
  """Initialize embedding model with API key and model name."""
@@ -38,6 +38,15 @@ class EuriaiLlamaIndexEmbedding(BaseEmbedding):
38
38
  """Get embedding for a single text."""
39
39
  return self._post_embedding([text])[0]
40
40
 
41
+ def get_query_embedding(self, query: str) -> List[float]:
42
+ """Get embedding for a query string."""
43
+ return self.get_text_embedding(query)
44
+
41
45
  def get_text_embeddings(self, texts: List[str]) -> List[List[float]]:
42
46
  """Get embeddings for multiple texts."""
43
- return self._post_embedding(texts)
47
+ return self._post_embedding(texts)
48
+
49
+ async def aget_query_embedding(self, query: str) -> List[float]:
50
+ """Async version of get_query_embedding."""
51
+ # We don't support async, so raise NotImplementedError
52
+ raise NotImplementedError("Async embeddings not supported")
@@ -0,0 +1,64 @@
1
+ from typing import Any, Callable, Dict, List, Optional, Union
2
+
3
+ try:
4
+ from langgraph.graph import StateGraph
5
+ except ImportError:
6
+ StateGraph = None
7
+
8
+ class EuriaiLangGraph:
9
+ """
10
+ Full-featured wrapper for LangGraph integration in the EURI SDK.
11
+ Allows programmatic graph construction, node/edge management, and workflow execution.
12
+ """
13
+ def __init__(self, name: str = "EuriaiLangGraph", state: Optional[Dict[str, Any]] = None):
14
+ """
15
+ Initialize the LangGraph wrapper.
16
+ Args:
17
+ name: Name of the graph.
18
+ state: Initial state dictionary.
19
+ """
20
+ if StateGraph is None:
21
+ raise ImportError("LangGraph is not installed. Please install with `pip install langgraph`.")
22
+ self.name = name
23
+ self.state = state or {}
24
+ self.graph = StateGraph()
25
+ self.nodes: Dict[str, Callable] = {}
26
+ self.edges: List[tuple] = []
27
+
28
+ def add_node(self, node_name: str, node_fn: Callable) -> None:
29
+ """Add a node to the graph."""
30
+ self.graph.add_node(node_name, node_fn)
31
+ self.nodes[node_name] = node_fn
32
+
33
+ def add_edge(self, from_node: str, to_node: str) -> None:
34
+ """Add an edge between two nodes."""
35
+ self.graph.add_edge(from_node, to_node)
36
+ self.edges.append((from_node, to_node))
37
+
38
+ def set_state(self, state: Dict[str, Any]) -> None:
39
+ """Set the initial state for the graph execution."""
40
+ self.state = state
41
+
42
+ def run(self, input_state: Optional[Dict[str, Any]] = None, **kwargs) -> Any:
43
+ """
44
+ Run the graph workflow with the given state.
45
+ Returns the final state/output.
46
+ """
47
+ state = input_state or self.state
48
+ return self.graph.run(state, **kwargs)
49
+
50
+ def get_nodes(self) -> Dict[str, Callable]:
51
+ return self.nodes
52
+
53
+ def get_edges(self) -> List[tuple]:
54
+ return self.edges
55
+
56
+ def get_graph(self) -> Any:
57
+ return self.graph
58
+
59
+ def reset(self):
60
+ """Reset the graph, nodes, and edges."""
61
+ self.graph = StateGraph()
62
+ self.nodes = {}
63
+ self.edges = []
64
+ self.state = {}
@@ -0,0 +1,58 @@
1
+ from typing import Optional, List, Any, Dict, Union
2
+
3
+ try:
4
+ from llama_index.core import VectorStoreIndex, ServiceContext
5
+ from llama_index.core.llms import LLM
6
+ from llama_index.core.schema import Document
7
+ except ImportError:
8
+ VectorStoreIndex = ServiceContext = LLM = Document = None
9
+
10
+ class EuriaiLlamaIndex:
11
+ """
12
+ Full-featured wrapper for LlamaIndex integration in the EURI SDK.
13
+ Allows document ingestion, index building, and querying with advanced config.
14
+ """
15
+ def __init__(self, llm: Optional[Any] = None, service_context: Optional[Any] = None):
16
+ """
17
+ Initialize the LlamaIndex wrapper.
18
+ Args:
19
+ llm: LLM object (optional)
20
+ service_context: ServiceContext object (optional)
21
+ """
22
+ if VectorStoreIndex is None:
23
+ raise ImportError("LlamaIndex is not installed. Please install with `pip install llama_index`.")
24
+ self.llm = llm
25
+ self.service_context = service_context or (ServiceContext.from_defaults(llm=llm) if llm else ServiceContext.from_defaults())
26
+ self.index: Optional[Any] = None
27
+ self.documents: List[Any] = []
28
+
29
+ def add_documents(self, docs: List[Union[str, Dict[str, Any]]]) -> None:
30
+ """Add documents (as strings or dicts) to the index."""
31
+ for doc in docs:
32
+ if isinstance(doc, str):
33
+ self.documents.append(Document(text=doc))
34
+ elif isinstance(doc, dict):
35
+ self.documents.append(Document(**doc))
36
+ else:
37
+ raise ValueError("Document must be str or dict.")
38
+
39
+ def build_index(self) -> None:
40
+ """Build the vector index from current documents."""
41
+ self.index = VectorStoreIndex.from_documents(self.documents)
42
+
43
+ def query(self, query: str, **kwargs) -> Any:
44
+ """
45
+ Query the index with a string. Returns the response object.
46
+ """
47
+ if self.index is None:
48
+ self.build_index()
49
+ query_engine = self.index.as_query_engine(service_context=self.service_context)
50
+ return query_engine.query(query, **kwargs)
51
+
52
+ def get_index(self) -> Optional[Any]:
53
+ return self.index
54
+
55
+ def reset(self):
56
+ """Reset documents and index."""
57
+ self.documents = []
58
+ self.index = None
euriai/euri_n8n.py ADDED
@@ -0,0 +1,30 @@
1
+ import requests
2
+ from typing import Any, Dict, Optional
3
+
4
+ class EuriaiN8N:
5
+ """
6
+ Wrapper for n8n workflow automation integration in the EURI SDK.
7
+ Allows triggering n8n workflows and exchanging data via REST API.
8
+ """
9
+ def __init__(self, base_url: str, api_key: Optional[str] = None):
10
+ """
11
+ Initialize the n8n wrapper.
12
+ Args:
13
+ base_url: Base URL of the n8n instance (e.g., http://localhost:5678 or cloud URL)
14
+ api_key: Optional API key for authentication
15
+ """
16
+ self.base_url = base_url.rstrip('/')
17
+ self.api_key = api_key
18
+
19
+ def trigger_workflow(self, workflow_id: str, data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
20
+ """
21
+ Trigger an n8n workflow by ID, optionally passing data.
22
+ Returns the workflow execution response.
23
+ """
24
+ url = f"{self.base_url}/webhook/{workflow_id}"
25
+ headers = {}
26
+ if self.api_key:
27
+ headers["Authorization"] = f"Bearer {self.api_key}"
28
+ response = requests.post(url, json=data or {}, headers=headers)
29
+ response.raise_for_status()
30
+ return response.json()
@@ -0,0 +1,44 @@
1
+ from typing import Any, Callable, Dict, List, Optional
2
+
3
+ try:
4
+ from smolagents import CodeAgent, HfApiModel, tool
5
+ except ImportError:
6
+ CodeAgent = HfApiModel = tool = None
7
+
8
+ class EuriaiSmolAgent:
9
+ """
10
+ Full-featured wrapper for SmolAgents integration in the EURI SDK.
11
+ Allows agent creation, tool integration, and task execution.
12
+ """
13
+ def __init__(self, model: Optional[Any] = None, tools: Optional[List[Callable]] = None):
14
+ """
15
+ Initialize the SmolAgent wrapper.
16
+ Args:
17
+ model: LLM model (default: HfApiModel())
18
+ tools: List of tool functions (decorated with @tool)
19
+ """
20
+ if CodeAgent is None:
21
+ raise ImportError("SmolAgents is not installed. Please install with `pip install smolagents`.")
22
+ self.model = model or HfApiModel()
23
+ self.tools = tools or []
24
+ self.agent = CodeAgent(tools=self.tools, model=self.model)
25
+
26
+ def add_tool(self, tool_fn: Callable) -> None:
27
+ """Add a tool to the agent."""
28
+ self.tools.append(tool_fn)
29
+ self.agent = CodeAgent(tools=self.tools, model=self.model)
30
+
31
+ def run(self, prompt: str, **kwargs) -> Any:
32
+ """
33
+ Run the agent on a prompt/task.
34
+ Returns the agent's response.
35
+ """
36
+ return self.agent.run(prompt, **kwargs)
37
+
38
+ def get_agent(self) -> Any:
39
+ return self.agent
40
+
41
+ def reset(self):
42
+ """Reset the agent and tools."""
43
+ self.tools = []
44
+ self.agent = CodeAgent(tools=self.tools, model=self.model)
euriai/langchain_embed.py CHANGED
@@ -1,4 +1,7 @@
1
- from langchain_core.embeddings import Embeddings
1
+ try:
2
+ from langchain_core.embeddings import Embeddings
3
+ except ImportError:
4
+ raise ImportError("LangChain is not installed. Please install with 'pip install euriai[langchain]' or 'pip install langchain'.")
2
5
  from typing import List
3
6
  from euriai.embedding import EuriaiEmbeddingClient
4
7
 
euriai/langchain_llm.py CHANGED
@@ -1,4 +1,7 @@
1
- from langchain.llms.base import LLM
1
+ try:
2
+ from langchain.llms.base import LLM
3
+ except ImportError:
4
+ raise ImportError("LangChain is not installed. Please install with 'pip install euriai[langchain]' or 'pip install langchain'.")
2
5
  from typing import Optional, List
3
6
  from euriai import EuriaiClient
4
7
 
@@ -0,0 +1,282 @@
1
+ Metadata-Version: 2.4
2
+ Name: euriai
3
+ Version: 0.4
4
+ Summary: Python client for Euri API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
+ Author: Euri
6
+ Author-email: tech@euron.one
7
+ License: MIT
8
+ Keywords: euriai,llm,langchain,llamaindex,langgraph,smolagents,n8n,agents,ai,sdk
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Intended Audience :: Developers
13
+ Requires-Python: >=3.6
14
+ Description-Content-Type: text/markdown
15
+ Requires-Dist: requests
16
+ Requires-Dist: numpy
17
+ Requires-Dist: pyyaml
18
+ Provides-Extra: langchain-core
19
+ Requires-Dist: langchain-core; extra == "langchain-core"
20
+ Provides-Extra: langchain
21
+ Requires-Dist: langchain; extra == "langchain"
22
+ Provides-Extra: llama-index
23
+ Requires-Dist: llama-index>=0.10.0; extra == "llama-index"
24
+ Provides-Extra: langgraph
25
+ Requires-Dist: langgraph; extra == "langgraph"
26
+ Provides-Extra: smolagents
27
+ Requires-Dist: smolagents; extra == "smolagents"
28
+ Provides-Extra: n8n
29
+ Requires-Dist: requests; extra == "n8n"
30
+ Provides-Extra: crewai
31
+ Requires-Dist: crewai; extra == "crewai"
32
+ Provides-Extra: autogen
33
+ Requires-Dist: pyautogen; extra == "autogen"
34
+ Provides-Extra: test
35
+ Requires-Dist: pytest; extra == "test"
36
+ Dynamic: author
37
+ Dynamic: author-email
38
+ Dynamic: classifier
39
+ Dynamic: description
40
+ Dynamic: description-content-type
41
+ Dynamic: license
42
+ Dynamic: provides-extra
43
+ Dynamic: requires-dist
44
+ Dynamic: requires-python
45
+ Dynamic: summary
46
+
47
+ # euriai 🧠
48
+
49
+ **EURI AI Python Client** – A simple wrapper and CLI tool for the [Euri API](https://euron.one/euri). Supports completions, streaming responses, embeddings, CLI interaction, and an interactive guided wizard!
50
+
51
+ ---
52
+
53
+ ## 🔧 Installation
54
+
55
+ ```bash
56
+ pip install euriai
57
+ ```
58
+
59
+ ## 🚀 Python Usage
60
+
61
+ ### Text Generation
62
+
63
+ ```python
64
+ from euriai import EuriaiClient
65
+
66
+ client = EuriaiClient(
67
+ api_key="your_api_key_here",
68
+ model="gpt-4.1-nano" # You can also try: "gemini-2.0-flash-001", "llama-4-maverick", etc.
69
+ )
70
+
71
+ response = client.generate_completion(
72
+ prompt="Write a short poem about artificial intelligence.",
73
+ temperature=0.7,
74
+ max_tokens=300
75
+ )
76
+
77
+ print(response)
78
+ ```
79
+
80
+ ### Embeddings
81
+
82
+ ```python
83
+ from euriai.embedding import EuriaiEmbeddingClient
84
+
85
+ client = EuriaiEmbeddingClient(api_key="your_key")
86
+ embedding = client.embed("Hello world")
87
+ print(embedding[:5]) # Print first 5 dimensions of the embedding vector
88
+ ```
89
+
90
+ ## 💻 Command-Line Interface (CLI) Usage
91
+
92
+ Run prompts directly from the terminal:
93
+
94
+ ```bash
95
+ euriai --api_key YOUR_API_KEY --prompt "Tell me a joke"
96
+ ```
97
+
98
+ Enable streaming output (if supported by the model):
99
+
100
+ ```bash
101
+ euriai --api_key YOUR_API_KEY --prompt "Stream a fun fact" --stream
102
+ ```
103
+
104
+ List all supported model IDs with recommended use-cases and temperature/token advice:
105
+
106
+ ```bash
107
+ euriai --models
108
+ ```
109
+
110
+ ## 🤖 LangChain Integration
111
+
112
+ ### Text Generation
113
+
114
+ Use Euriai with LangChain directly:
115
+
116
+ ```python
117
+ from euriai import EuriaiLangChainLLM
118
+
119
+ llm = EuriaiLangChainLLM(
120
+ api_key="your_api_key",
121
+ model="gpt-4.1-nano",
122
+ temperature=0.7,
123
+ max_tokens=300
124
+ )
125
+
126
+ print(llm.invoke("Write a poem about time travel."))
127
+ ```
128
+
129
+ ### Embeddings
130
+
131
+ Use Euriai embeddings with LangChain:
132
+
133
+ ```python
134
+ from euriai.langchain_embed import EuriaiEmbeddings
135
+
136
+ embedding_model = EuriaiEmbeddings(api_key="your_key")
137
+ print(embedding_model.embed_query("What's AI?")[:5]) # Print first 5 dimensions
138
+ ```
139
+
140
+ ## Usage Examples
141
+
142
+ ### CrewAI Integration
143
+ ```python
144
+ from euriai import EuriaiCrewAI
145
+
146
+ # Example: Create a crew from YAML config files
147
+ crew = EuriaiCrewAI.from_yaml('agents.yaml', 'tasks.yaml')
148
+ result = crew.run(inputs={"topic": "AI in Healthcare"})
149
+ print(result)
150
+
151
+ # Or programmatically
152
+ crew = EuriaiCrewAI()
153
+ crew.add_agent("researcher", {
154
+ "role": "Researcher",
155
+ "goal": "Find information about {topic}",
156
+ "llm": "openai/gpt-4o"
157
+ })
158
+ crew.add_task("research_task", {
159
+ "description": "Research the topic {topic}",
160
+ "agent": "researcher"
161
+ })
162
+ crew.build_crew()
163
+ result = crew.run(inputs={"topic": "AI in Healthcare"})
164
+ print(result)
165
+ ```
166
+
167
+ ### AutoGen Integration
168
+ ```python
169
+ from euriai import EuriaiAutoGen
170
+
171
+ autogen = EuriaiAutoGen()
172
+ # Add an agent (see AutoGen docs for agent config details)
173
+ agent = autogen.add_agent({
174
+ "name": "assistant",
175
+ "llm_config": {"api_key": "YOUR_OPENAI_KEY", "model": "gpt-4o"}
176
+ })
177
+ # Run a chat
178
+ response = autogen.run_chat("Hello, what is the weather today?")
179
+ print(response)
180
+ # Access chat history
181
+ print(autogen.get_history())
182
+ ```
183
+
184
+ ### LlamaIndex Integration
185
+ ```python
186
+ from euriai import EuriaiLlamaIndex
187
+
188
+ llama = EuriaiLlamaIndex()
189
+ llama.add_documents([
190
+ "Abraham Lincoln was the 16th President of the United States.",
191
+ "He led the country during the American Civil War."
192
+ ])
193
+ llama.build_index()
194
+ response = llama.query("Who was Abraham Lincoln?")
195
+ print(response)
196
+ ```
197
+
198
+ ### LangGraph Integration
199
+ ```python
200
+ from euriai import EuriaiLangGraph
201
+
202
+ def greet_node(state):
203
+ print(f"Hello, {state['name']}!")
204
+ state['greeted'] = True
205
+ return state
206
+
207
+ def farewell_node(state):
208
+ if state.get('greeted'):
209
+ print(f"Goodbye, {state['name']}!")
210
+ return state
211
+
212
+ # Create the graph
213
+ graph = EuriaiLangGraph()
214
+ graph.add_node("greet", greet_node)
215
+ graph.add_node("farewell", farewell_node)
216
+ graph.add_edge("greet", "farewell")
217
+ graph.set_state({"name": "Alice"})
218
+ result = graph.run()
219
+ print(result)
220
+ ```
221
+
222
+ ---
223
+
224
+ ## 2. **SmolAgents Integration**
225
+
226
+ ```python
227
+ from euriai import EuriaiSmolAgent
228
+
229
+ # Define a tool using the @tool decorator
230
+ try:
231
+ from smolagents import tool
232
+ except ImportError:
233
+ raise ImportError("Please install smolagents: pip install smolagents")
234
+
235
+ @tool
236
+ def add(a: int, b: int) -> int:
237
+ """Add two numbers."""
238
+ return a + b
239
+
240
+ # Create the agent with the tool
241
+ agent = EuriaiSmolAgent(tools=[add])
242
+ response = agent.run("What is 2 + 3?")
243
+ print(response)
244
+ ```
245
+
246
+ ---
247
+
248
+ ## 3. **n8n Integration**
249
+
250
+ ```python
251
+ from euriai import EuriaiN8N
252
+
253
+ # Initialize with your n8n instance URL and (optionally) API key
254
+ n8n = EuriaiN8N(base_url="http://localhost:5678", api_key="YOUR_N8N_API_KEY")
255
+
256
+ # Trigger a workflow by its webhook ID, passing data as needed
257
+ workflow_id = "your-workflow-webhook-id"
258
+ data = {"message": "Hello from EURI SDK!"}
259
+ result = n8n.trigger_workflow(workflow_id, data)
260
+ print(result)
261
+ ```
262
+
263
+ ---
264
+
265
+ **You can copy-paste these code blocks into your client documentation or UI for user reference.**
266
+ If you want advanced examples (e.g., multi-tool SmolAgents, LangGraph with more nodes, or n8n with authentication), just let me know!
267
+
268
+ ## 📘 Documentation
269
+
270
+ For full documentation, visit our [official docs site](https://euron.one/euri).
271
+
272
+ ## 🔑 Getting an API Key
273
+
274
+ Sign up for an API key at [Euron AI Platform](https://euron.one/euri).
275
+
276
+ ## 🤝 Contributing
277
+
278
+ Contributions are welcome! Please feel free to submit a Pull Request.
279
+
280
+ ## 📄 License
281
+
282
+ This project is licensed under the MIT License - see the LICENSE file for details.
@@ -0,0 +1,19 @@
1
+ euriai/__init__.py,sha256=JaXTVZFPARYHzpICgl-F5hd5OCYcwG0IOdTXV4wsrK4,835
2
+ euriai/cli.py,sha256=hF1wiiL2QQSfWf8WlLQyNVDBd4YkbiwmMSoPxVbyPTM,3290
3
+ euriai/client.py,sha256=L-o6hv9N3md-l-hz-kz5nYVaaZqnrREZlo_0jguhF7E,4066
4
+ euriai/embedding.py,sha256=uP66Ph1k9Ou6J5RAkztJxlfyj0S0MESOvZ4ulhnVo-o,1270
5
+ euriai/euri_autogen.py,sha256=OSrQJbbPXXgZ5bubofxnvq-dabs8B8G2dfSHn-LCbw0,2515
6
+ euriai/euri_chat.py,sha256=DEAiet1ReRwB4ljkPYaTl1Nb5uc20-JF-3PQjGQZXk4,3567
7
+ euriai/euri_crewai.py,sha256=1Z_lqHr4Jn0mt7jTsh8JXWdjpC_q1AhN-Mm6PDigG10,3443
8
+ euriai/euri_embed.py,sha256=VE-RLUb5bYnEFA_dxFkj2c3Jr_SYyJKPmFOzsDOR0Ys,2137
9
+ euriai/euri_langgraph.py,sha256=x8RNSrdcCzY6AunNNHzeX3297srecNUwHdDLTrBb1u8,2131
10
+ euriai/euri_llamaindex.py,sha256=AVBgfm_cQiZEpfcIAcMeIs55eW_SvDPJPZ31Y_blN0I,2281
11
+ euriai/euri_n8n.py,sha256=8JvEgvq14ZI7k5_Rfimpbht4RtYSwMh2gKyo1H4Wl6M,1148
12
+ euriai/euri_smolagents.py,sha256=RcxwI9coXF_DmIy_0TqEF737CIIpZn8f0p_9G5rSFpY,1530
13
+ euriai/langchain_embed.py,sha256=9nZgD_99CCpsGvXpLUW-Gklxp-pJ4IMhO8ZfbesqLyI,749
14
+ euriai/langchain_llm.py,sha256=BKNg1i48EAciEXyBVK7_Y9glkhLGWF_OfkKPzvnLtB0,1151
15
+ euriai-0.4.dist-info/METADATA,sha256=G0fouJYynkW0KG8mcqABkL5TKbEoLLwAJSInBSOgHxg,6879
16
+ euriai-0.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
17
+ euriai-0.4.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
18
+ euriai-0.4.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
19
+ euriai-0.4.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.4.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,135 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: euriai
3
- Version: 0.3.29
4
- Summary: Python client for EURI LLM API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
- Author: euron.one
6
- Author-email: sudhanshu@euron.one
7
- License: MIT
8
- Classifier: Programming Language :: Python :: 3
9
- Classifier: Operating System :: OS Independent
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Intended Audience :: Developers
12
- Requires-Python: >=3.6
13
- Description-Content-Type: text/markdown
14
- Requires-Dist: requests
15
- Requires-Dist: langchain-core
16
- Requires-Dist: llama-index>=0.10.0
17
- Requires-Dist: numpy
18
- Dynamic: author
19
- Dynamic: author-email
20
- Dynamic: classifier
21
- Dynamic: description
22
- Dynamic: description-content-type
23
- Dynamic: license
24
- Dynamic: requires-dist
25
- Dynamic: requires-python
26
- Dynamic: summary
27
-
28
- # euriai 🧠
29
-
30
- **EURI AI Python Client** – A simple wrapper and CLI tool for the [Euron LLM API](https://euron.one/euri/api). Supports completions, streaming responses, embeddings, CLI interaction, and an interactive guided wizard!
31
-
32
- ---
33
-
34
- ## 🔧 Installation
35
-
36
- ```bash
37
- pip install euriai
38
- ```
39
-
40
- ## 🚀 Python Usage
41
-
42
- ### Text Generation
43
-
44
- ```python
45
- from euriai import EuriaiClient
46
-
47
- client = EuriaiClient(
48
- api_key="your_api_key_here",
49
- model="gpt-4.1-nano" # You can also try: "gemini-2.0-flash-001", "llama-4-maverick", etc.
50
- )
51
-
52
- response = client.generate_completion(
53
- prompt="Write a short poem about artificial intelligence.",
54
- temperature=0.7,
55
- max_tokens=300
56
- )
57
-
58
- print(response)
59
- ```
60
-
61
- ### Embeddings
62
-
63
- ```python
64
- from euriai.embedding import EuriaiEmbeddingClient
65
-
66
- client = EuriaiEmbeddingClient(api_key="your_key")
67
- embedding = client.embed("Hello world")
68
- print(embedding[:5]) # Print first 5 dimensions of the embedding vector
69
- ```
70
-
71
- ## 💻 Command-Line Interface (CLI) Usage
72
-
73
- Run prompts directly from the terminal:
74
-
75
- ```bash
76
- euriai --api_key YOUR_API_KEY --prompt "Tell me a joke"
77
- ```
78
-
79
- Enable streaming output (if supported by the model):
80
-
81
- ```bash
82
- euriai --api_key YOUR_API_KEY --prompt "Stream a fun fact" --stream
83
- ```
84
-
85
- List all supported model IDs with recommended use-cases and temperature/token advice:
86
-
87
- ```bash
88
- euriai --models
89
- ```
90
-
91
- ## 🤖 LangChain Integration
92
-
93
- ### Text Generation
94
-
95
- Use Euriai with LangChain directly:
96
-
97
- ```python
98
- from euriai import EuriaiLangChainLLM
99
-
100
- llm = EuriaiLangChainLLM(
101
- api_key="your_api_key",
102
- model="gpt-4.1-nano",
103
- temperature=0.7,
104
- max_tokens=300
105
- )
106
-
107
- print(llm.invoke("Write a poem about time travel."))
108
- ```
109
-
110
- ### Embeddings
111
-
112
- Use Euriai embeddings with LangChain:
113
-
114
- ```python
115
- from euriai.langchain_embed import EuriaiEmbeddings
116
-
117
- embedding_model = EuriaiEmbeddings(api_key="your_key")
118
- print(embedding_model.embed_query("What's AI?")[:5]) # Print first 5 dimensions
119
- ```
120
-
121
- ## 📘 Documentation
122
-
123
- For full documentation, visit our [official docs site](https://euron.one/euri/api).
124
-
125
- ## 🔑 Getting an API Key
126
-
127
- Sign up for an API key at [Euron AI Platform](https://euron.one/euri/api).
128
-
129
- ## 🤝 Contributing
130
-
131
- Contributions are welcome! Please feel free to submit a Pull Request.
132
-
133
- ## 📄 License
134
-
135
- This project is licensed under the MIT License - see the LICENSE file for details.
@@ -1,13 +0,0 @@
1
- euriai/__init__.py,sha256=8YhbRV8s4wmNxDP9KmjOxQYLgKUCke450vnp-8-kPKs,449
2
- euriai/cli.py,sha256=hF1wiiL2QQSfWf8WlLQyNVDBd4YkbiwmMSoPxVbyPTM,3290
3
- euriai/client.py,sha256=USiqdMULgAiky7nkrJKF3FyKcOS2DtDmUdbeBSnyLYk,4076
4
- euriai/embedding.py,sha256=z-LLKU68tCrPi9QMs1tlKwyr7WJcjceCTkNQIFMG6vA,1276
5
- euriai/euri_chat.py,sha256=tuNhwyyzZTSJjX4Su8XBftp7a5YtsalerglNhho2fpA,3573
6
- euriai/euri_embed.py,sha256=SifhobAIAFuwt9x7K_f1uNhxuK14O2STtUCNC6TVApI,1693
7
- euriai/langchain_embed.py,sha256=OXWWxiKJ4g24TFgnWPOCZvhK7G8xtSf0ppQ2zwHkIPM,584
8
- euriai/langchain_llm.py,sha256=D5YvYwV7q9X2_vdoaQiPs7tNiUmjkGz-9Q-7M61hhkg,986
9
- euriai-0.3.29.dist-info/METADATA,sha256=4V-uSeO1P7k0LmgtsSW3-gLX4_StMOoHNysS_ucvU3A,3249
10
- euriai-0.3.29.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
11
- euriai-0.3.29.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
12
- euriai-0.3.29.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
13
- euriai-0.3.29.dist-info/RECORD,,