euriai 1.0.0__py3-none-any.whl → 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
euriai/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from .client import EuriaiClient
2
- from .langchain import EuriaiChatModel, EuriaiEmbeddings, EuriaiLLM
2
+ from .langchain import EuriaiChatModel, EuriaiEmbeddings, EuriaiLLM, create_chat_model, create_embeddings, create_llm
3
3
  from .embedding import EuriaiEmbeddingClient
4
4
  from .euri_chat import EuriaiLlamaIndexLLM
5
5
  from .euri_embed import EuriaiLlamaIndexEmbedding
@@ -24,4 +24,7 @@ __all__ = [
24
24
  "EuriaiChatModel",
25
25
  "EuriaiEmbeddings",
26
26
  "EuriaiLLM",
27
+ "create_chat_model",
28
+ "create_embeddings",
29
+ "create_llm",
27
30
  ]
euriai/crewai.py CHANGED
@@ -6,13 +6,31 @@ from euriai.client import EuriaiClient
6
6
  try:
7
7
  from crewai import Agent, Crew, Task, Process
8
8
  from crewai.llm import LLM
9
+ CREWAI_AVAILABLE = True
9
10
  except ImportError:
10
- Agent = Crew = Task = Process = LLM = None
11
+ CREWAI_AVAILABLE = False
12
+ # Fallback base classes for when CrewAI is not available
13
+ class Agent:
14
+ pass
15
+ class Crew:
16
+ pass
17
+ class Task:
18
+ pass
19
+ class Process:
20
+ pass
21
+ class LLM:
22
+ pass
11
23
 
12
24
  class EuriaiLLM(LLM):
13
25
  """Custom LLM that uses Euri API for CrewAI agents"""
14
26
 
15
27
  def __init__(self, api_key: str, model: str = "gpt-4.1-nano", temperature: float = 0.7, max_tokens: int = 1000):
28
+ if not CREWAI_AVAILABLE:
29
+ raise ImportError(
30
+ "CrewAI is not installed. Please install with: "
31
+ "pip install crewai"
32
+ )
33
+
16
34
  self.client = EuriaiClient(api_key=api_key, model=model)
17
35
  self.model = model
18
36
  self.temperature = temperature
euriai/langchain.py CHANGED
@@ -150,7 +150,7 @@ class EuriaiChatModel(BaseChatModel):
150
150
  """
151
151
 
152
152
  # Configuration
153
- api_key: SecretStr = Field(description="Euri API key")
153
+ api_key: str = Field(description="Euri API key")
154
154
  model: str = Field(default="gpt-4.1-nano", description="Model name")
155
155
  temperature: float = Field(default=0.7, ge=0.0, le=1.0, description="Sampling temperature")
156
156
  max_tokens: int = Field(default=1000, gt=0, description="Maximum tokens to generate")
@@ -177,9 +177,8 @@ class EuriaiChatModel(BaseChatModel):
177
177
  super().__init__(**kwargs)
178
178
 
179
179
  # Initialize client
180
- api_key_str = self.api_key.get_secret_value() if hasattr(self.api_key, 'get_secret_value') else str(self.api_key)
181
180
  self._client = EuriaiClient(
182
- api_key=api_key_str,
181
+ api_key=self.api_key,
183
182
  model=self.model
184
183
  )
185
184
 
@@ -222,6 +221,24 @@ class EuriaiChatModel(BaseChatModel):
222
221
 
223
222
  return formatted_messages
224
223
 
224
+ def _messages_to_prompt(self, messages: List[BaseMessage]) -> str:
225
+ """Convert LangChain messages to a single prompt string."""
226
+ prompt_parts = []
227
+
228
+ for message in messages:
229
+ if isinstance(message, SystemMessage):
230
+ prompt_parts.append(f"System: {message.content}")
231
+ elif isinstance(message, HumanMessage):
232
+ prompt_parts.append(f"Human: {message.content}")
233
+ elif isinstance(message, AIMessage):
234
+ prompt_parts.append(f"Assistant: {message.content}")
235
+ elif isinstance(message, (FunctionMessage, ToolMessage)):
236
+ prompt_parts.append(f"Function: {message.content}")
237
+ else:
238
+ prompt_parts.append(f"User: {message.content}")
239
+
240
+ return "\n\n".join(prompt_parts)
241
+
225
242
  def _create_chat_result(self, response: Dict[str, Any]) -> ChatResult:
226
243
  """Create ChatResult from API response."""
227
244
  if "choices" not in response or not response["choices"]:
@@ -274,12 +291,12 @@ class EuriaiChatModel(BaseChatModel):
274
291
  **kwargs: Any,
275
292
  ) -> ChatResult:
276
293
  """Generate chat response."""
277
- # Format messages
278
- formatted_messages = self._format_messages(messages)
294
+ # Convert messages to prompt format
295
+ prompt = self._messages_to_prompt(messages)
279
296
 
280
297
  # Prepare request
281
298
  request_params = {
282
- "messages": formatted_messages,
299
+ "prompt": prompt,
283
300
  "temperature": self.temperature,
284
301
  "max_tokens": self.max_tokens,
285
302
  }
@@ -314,12 +331,12 @@ class EuriaiChatModel(BaseChatModel):
314
331
  **kwargs: Any,
315
332
  ) -> Iterator[ChatGenerationChunk]:
316
333
  """Stream chat response."""
317
- # Format messages
318
- formatted_messages = self._format_messages(messages)
334
+ # Convert messages to prompt format
335
+ prompt = self._messages_to_prompt(messages)
319
336
 
320
337
  # Prepare request
321
338
  request_params = {
322
- "messages": formatted_messages,
339
+ "prompt": prompt,
323
340
  "temperature": self.temperature,
324
341
  "max_tokens": self.max_tokens,
325
342
  }
@@ -517,30 +534,35 @@ class EuriaiEmbeddings(Embeddings):
517
534
  embedding = await embeddings.aembed_query("Hello world")
518
535
  """
519
536
 
520
- # Configuration
521
- api_key: SecretStr = Field(description="Euri API key")
522
- model: str = Field(default="text-embedding-3-small", description="Embedding model name")
523
- chunk_size: int = Field(default=1000, gt=0, description="Chunk size for batch processing")
524
- max_retries: int = Field(default=3, ge=0, description="Maximum number of retries")
525
- request_timeout: int = Field(default=60, gt=0, description="Request timeout in seconds")
526
-
527
- # Internal
528
- _client: Optional[EuriaiEmbeddingClient] = None
529
- _executor: Optional[ThreadPoolExecutor] = None
530
-
531
- def __init__(self, **kwargs):
537
+ def __init__(self,
538
+ api_key: str,
539
+ model: str = "text-embedding-3-small",
540
+ chunk_size: int = 1000,
541
+ max_retries: int = 3,
542
+ request_timeout: int = 60,
543
+ **kwargs):
532
544
  if not LANGCHAIN_AVAILABLE:
533
545
  raise ImportError(
534
546
  "LangChain is not installed. Please install with: "
535
547
  "pip install langchain-core"
536
548
  )
537
549
 
538
- super().__init__(**kwargs)
550
+ super().__init__()
551
+
552
+ # Initialize configuration
553
+ self.api_key = api_key
554
+ self.model = model
555
+ self.chunk_size = chunk_size
556
+ self.max_retries = max_retries
557
+ self.request_timeout = request_timeout
558
+
559
+ # Internal
560
+ self._client: Optional[EuriaiEmbeddingClient] = None
561
+ self._executor: Optional[ThreadPoolExecutor] = None
539
562
 
540
563
  # Initialize client
541
- api_key_str = self.api_key.get_secret_value() if hasattr(self.api_key, 'get_secret_value') else str(self.api_key)
542
564
  self._client = EuriaiEmbeddingClient(
543
- api_key=api_key_str,
565
+ api_key=self.api_key,
544
566
  model=self.model
545
567
  )
546
568
 
@@ -609,7 +631,7 @@ class EuriaiLLM(LLM):
609
631
  """
610
632
 
611
633
  # Configuration
612
- api_key: SecretStr = Field(description="Euri API key")
634
+ api_key: str = Field(description="Euri API key")
613
635
  model: str = Field(default="gpt-4.1-nano", description="Model name")
614
636
  temperature: float = Field(default=0.7, ge=0.0, le=1.0, description="Sampling temperature")
615
637
  max_tokens: int = Field(default=1000, gt=0, description="Maximum tokens to generate")
@@ -631,9 +653,8 @@ class EuriaiLLM(LLM):
631
653
  super().__init__(**kwargs)
632
654
 
633
655
  # Initialize client
634
- api_key_str = self.api_key.get_secret_value() if hasattr(self.api_key, 'get_secret_value') else str(self.api_key)
635
656
  self._client = EuriaiClient(
636
- api_key=api_key_str,
657
+ api_key=self.api_key,
637
658
  model=self.model
638
659
  )
639
660
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 1.0.0
3
+ Version: 1.0.2
4
4
  Summary: Python client for Euri API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: Euri
6
6
  Author-email: tech@euron.one
@@ -1,18 +1,18 @@
1
- euriai/__init__.py,sha256=5AFG_ovSriY5Kfz0kb1uKuNK_ei6o4k-kcCgoTZqNk0,795
1
+ euriai/__init__.py,sha256=WE4zl0HNxDm4zwR3d0uRyjl3fjcD_pccKvPsbtuDysw,916
2
2
  euriai/autogen.py,sha256=4YAHrY65YYOKVsBjQa9G8pr-_Xxss1uR08qLAFzo7Rk,16023
3
3
  euriai/cli.py,sha256=hF1wiiL2QQSfWf8WlLQyNVDBd4YkbiwmMSoPxVbyPTM,3290
4
4
  euriai/client.py,sha256=L-o6hv9N3md-l-hz-kz5nYVaaZqnrREZlo_0jguhF7E,4066
5
- euriai/crewai.py,sha256=stnwsChy4MYXwWP6JBk_twg61EZTaTj8zoBiLN5n_I0,7135
5
+ euriai/crewai.py,sha256=eeDfZQC9LtEePSnaj94DA0Kcb8Ayq0Gn_eDSQwbcMBU,7543
6
6
  euriai/embedding.py,sha256=uP66Ph1k9Ou6J5RAkztJxlfyj0S0MESOvZ4ulhnVo-o,1270
7
7
  euriai/euri_chat.py,sha256=DEAiet1ReRwB4ljkPYaTl1Nb5uc20-JF-3PQjGQZXk4,3567
8
8
  euriai/euri_embed.py,sha256=VE-RLUb5bYnEFA_dxFkj2c3Jr_SYyJKPmFOzsDOR0Ys,2137
9
- euriai/langchain.py,sha256=ZHET7cO9CslXIMSKFYcjwCLWfP8pov22Udal5gSMOoo,29640
9
+ euriai/langchain.py,sha256=K8yJdkB9sUiLQj1kt_IJ_cNBWLEimo7xNgGedmdkdsc,30109
10
10
  euriai/langgraph.py,sha256=sw9e-PnfwAwmp_tUCnAGIUB78GyJsMkAzxOGvFUafiM,34128
11
11
  euriai/llamaindex.py,sha256=c-ujod2bjL6QIvfAyuIxm1SvSCS00URFElYybKQ5Ew0,26551
12
12
  euriai/n8n.py,sha256=hjkckqyW_hZNL78UkBCof1WvKCKCIjwdvZdAgx6NrB8,3764
13
13
  euriai/smolagents.py,sha256=xlixGx2IWzAPTpSJGsYIK2L-SHGY9Mw1-8GbwVsEYtU,28507
14
- euriai-1.0.0.dist-info/METADATA,sha256=3qQs4Z403ImXKfKnzI3__uftPhwdd_MSEFyT9rn5iPA,6881
15
- euriai-1.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
- euriai-1.0.0.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
17
- euriai-1.0.0.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
18
- euriai-1.0.0.dist-info/RECORD,,
14
+ euriai-1.0.2.dist-info/METADATA,sha256=S9UtbT2UjLIHakIbQlJDfR3EZv2p-PFsjq_mzvSlJ-U,6881
15
+ euriai-1.0.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ euriai-1.0.2.dist-info/entry_points.txt,sha256=9OkET8KIGcsjQn8UlnpPKRT75s2KW34jq1__1SXtpMA,43
17
+ euriai-1.0.2.dist-info/top_level.txt,sha256=TG1htJ8cuD62MXn-NJ7DVF21QHY16w6M_QgfF_Er_EQ,7
18
+ euriai-1.0.2.dist-info/RECORD,,
File without changes