euriai 1.0.19__tar.gz → 1.0.21__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 1.0.19
3
+ Version: 1.0.21
4
4
  Summary: Python client for Euri API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: Euri
6
6
  Author-email: tech@euron.one
@@ -4,7 +4,7 @@ Euri AI Python SDK
4
4
  A comprehensive Python SDK for the Euri AI API with integrations for popular frameworks.
5
5
  """
6
6
 
7
- __version__ = "1.0.19"
7
+ __version__ = "1.0.21"
8
8
 
9
9
  # Core imports that should always work
10
10
  try:
@@ -247,12 +247,9 @@ class EuriaiLlamaIndexLLM(LLM):
247
247
 
248
248
  def complete(self, prompt: str, formatted: bool = False, **kwargs) -> CompletionResponse:
249
249
  """Complete a prompt."""
250
- # Convert prompt to message format
251
- messages = [{"role": "user", "content": prompt}]
252
-
253
250
  # Prepare request
254
251
  params = self._prepare_request_params(**kwargs)
255
- params["messages"] = messages
252
+ params["prompt"] = prompt # Use 'prompt' directly instead of converting to messages
256
253
 
257
254
  try:
258
255
  # Make API call
@@ -267,9 +264,27 @@ class EuriaiLlamaIndexLLM(LLM):
267
264
  # Format messages
268
265
  formatted_messages = self._format_messages(messages)
269
266
 
267
+ # Convert messages to a single prompt string
268
+ prompt_parts = []
269
+ system_message = None
270
+
271
+ for msg in formatted_messages:
272
+ if msg["role"] == "system":
273
+ system_message = msg["content"]
274
+ elif msg["role"] == "user":
275
+ prompt_parts.append(f"User: {msg['content']}")
276
+ elif msg["role"] == "assistant":
277
+ prompt_parts.append(f"Assistant: {msg['content']}")
278
+
279
+ # Combine system message and conversation
280
+ if system_message:
281
+ prompt = f"System: {system_message}\n\n" + "\n".join(prompt_parts)
282
+ else:
283
+ prompt = "\n".join(prompt_parts)
284
+
270
285
  # Prepare request
271
286
  params = self._prepare_request_params(**kwargs)
272
- params["messages"] = formatted_messages
287
+ params["prompt"] = prompt # Use 'prompt' instead of 'messages'
273
288
 
274
289
  try:
275
290
  # Make API call
@@ -433,6 +448,23 @@ class EuriaiLlamaIndexEmbedding(BaseEmbedding):
433
448
  async def aget_query_embedding(self, query: str) -> List[float]:
434
449
  """Async get query embedding."""
435
450
  return await self.aget_text_embedding(query)
451
+
452
+ # Abstract methods required by BaseEmbedding
453
+ def _get_text_embedding(self, text: str) -> List[float]:
454
+ """Private method required by BaseEmbedding."""
455
+ return self.get_text_embedding(text)
456
+
457
+ def _get_query_embedding(self, query: str) -> List[float]:
458
+ """Private method required by BaseEmbedding."""
459
+ return self.get_query_embedding(query)
460
+
461
+ async def _aget_text_embedding(self, text: str) -> List[float]:
462
+ """Private async method required by BaseEmbedding."""
463
+ return await self.aget_text_embedding(text)
464
+
465
+ async def _aget_query_embedding(self, query: str) -> List[float]:
466
+ """Private async method required by BaseEmbedding."""
467
+ return await self.aget_query_embedding(query)
436
468
 
437
469
 
438
470
  class EuriaiLlamaIndex:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: euriai
3
- Version: 1.0.19
3
+ Version: 1.0.21
4
4
  Summary: Python client for Euri API (euron.one) with CLI, LangChain, and LlamaIndex integration
5
5
  Author: Euri
6
6
  Author-email: tech@euron.one
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name="euriai",
5
- version="1.0.19",
5
+ version="1.0.21",
6
6
  description="Python client for Euri API (euron.one) with CLI, LangChain, and LlamaIndex integration",
7
7
  long_description=open("README.md", encoding="utf-8").read(),
8
8
  long_description_content_type="text/markdown",
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes