agno 2.3.8__py3-none-any.whl → 2.3.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. agno/agent/agent.py +134 -82
  2. agno/db/mysql/__init__.py +2 -1
  3. agno/db/mysql/async_mysql.py +2888 -0
  4. agno/db/mysql/mysql.py +17 -8
  5. agno/db/mysql/utils.py +139 -6
  6. agno/db/postgres/async_postgres.py +10 -5
  7. agno/db/postgres/postgres.py +7 -2
  8. agno/db/schemas/evals.py +1 -0
  9. agno/db/singlestore/singlestore.py +5 -1
  10. agno/db/sqlite/async_sqlite.py +2 -2
  11. agno/eval/__init__.py +10 -0
  12. agno/eval/agent_as_judge.py +860 -0
  13. agno/eval/base.py +29 -0
  14. agno/eval/utils.py +2 -1
  15. agno/exceptions.py +7 -0
  16. agno/knowledge/embedder/openai.py +8 -8
  17. agno/knowledge/knowledge.py +1142 -176
  18. agno/media.py +22 -6
  19. agno/models/aws/claude.py +8 -7
  20. agno/models/base.py +27 -1
  21. agno/models/deepseek/deepseek.py +67 -0
  22. agno/models/google/gemini.py +65 -11
  23. agno/models/google/utils.py +22 -0
  24. agno/models/message.py +2 -0
  25. agno/models/openai/chat.py +4 -0
  26. agno/os/app.py +64 -74
  27. agno/os/interfaces/a2a/router.py +3 -4
  28. agno/os/interfaces/agui/router.py +2 -0
  29. agno/os/router.py +3 -1607
  30. agno/os/routers/agents/__init__.py +3 -0
  31. agno/os/routers/agents/router.py +581 -0
  32. agno/os/routers/agents/schema.py +261 -0
  33. agno/os/routers/evals/evals.py +26 -6
  34. agno/os/routers/evals/schemas.py +34 -2
  35. agno/os/routers/evals/utils.py +101 -20
  36. agno/os/routers/knowledge/knowledge.py +1 -1
  37. agno/os/routers/teams/__init__.py +3 -0
  38. agno/os/routers/teams/router.py +496 -0
  39. agno/os/routers/teams/schema.py +257 -0
  40. agno/os/routers/workflows/__init__.py +3 -0
  41. agno/os/routers/workflows/router.py +545 -0
  42. agno/os/routers/workflows/schema.py +75 -0
  43. agno/os/schema.py +1 -559
  44. agno/os/utils.py +139 -2
  45. agno/team/team.py +73 -16
  46. agno/tools/file_generation.py +12 -6
  47. agno/tools/firecrawl.py +15 -7
  48. agno/utils/hooks.py +64 -5
  49. agno/utils/http.py +2 -2
  50. agno/utils/media.py +11 -1
  51. agno/utils/print_response/agent.py +8 -0
  52. agno/utils/print_response/team.py +8 -0
  53. agno/vectordb/pgvector/pgvector.py +88 -51
  54. agno/workflow/parallel.py +3 -3
  55. agno/workflow/step.py +14 -2
  56. agno/workflow/types.py +38 -2
  57. agno/workflow/workflow.py +12 -4
  58. {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/METADATA +7 -2
  59. {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/RECORD +62 -49
  60. {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/WHEEL +0 -0
  61. {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/licenses/LICENSE +0 -0
  62. {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/top_level.txt +0 -0
@@ -31,6 +31,7 @@ def print_response_stream(
31
31
  session_id: Optional[str] = None,
32
32
  session_state: Optional[Dict[str, Any]] = None,
33
33
  user_id: Optional[str] = None,
34
+ run_id: Optional[str] = None,
34
35
  audio: Optional[Sequence[Audio]] = None,
35
36
  images: Optional[Sequence[Image]] = None,
36
37
  videos: Optional[Sequence[Video]] = None,
@@ -86,6 +87,7 @@ def print_response_stream(
86
87
  session_id=session_id,
87
88
  session_state=session_state,
88
89
  user_id=user_id,
90
+ run_id=run_id,
89
91
  audio=audio,
90
92
  images=images,
91
93
  videos=videos,
@@ -222,6 +224,7 @@ async def aprint_response_stream(
222
224
  session_id: Optional[str] = None,
223
225
  session_state: Optional[Dict[str, Any]] = None,
224
226
  user_id: Optional[str] = None,
227
+ run_id: Optional[str] = None,
225
228
  audio: Optional[Sequence[Audio]] = None,
226
229
  images: Optional[Sequence[Image]] = None,
227
230
  videos: Optional[Sequence[Video]] = None,
@@ -275,6 +278,7 @@ async def aprint_response_stream(
275
278
  session_id=session_id,
276
279
  session_state=session_state,
277
280
  user_id=user_id,
281
+ run_id=run_id,
278
282
  audio=audio,
279
283
  images=images,
280
284
  videos=videos,
@@ -512,6 +516,7 @@ def print_response(
512
516
  session_id: Optional[str] = None,
513
517
  session_state: Optional[Dict[str, Any]] = None,
514
518
  user_id: Optional[str] = None,
519
+ run_id: Optional[str] = None,
515
520
  audio: Optional[Sequence[Audio]] = None,
516
521
  images: Optional[Sequence[Image]] = None,
517
522
  videos: Optional[Sequence[Video]] = None,
@@ -571,6 +576,7 @@ def print_response(
571
576
  session_id=session_id,
572
577
  session_state=session_state,
573
578
  user_id=user_id,
579
+ run_id=run_id,
574
580
  audio=audio,
575
581
  images=images,
576
582
  videos=videos,
@@ -645,6 +651,7 @@ async def aprint_response(
645
651
  session_id: Optional[str] = None,
646
652
  session_state: Optional[Dict[str, Any]] = None,
647
653
  user_id: Optional[str] = None,
654
+ run_id: Optional[str] = None,
648
655
  audio: Optional[Sequence[Audio]] = None,
649
656
  images: Optional[Sequence[Image]] = None,
650
657
  videos: Optional[Sequence[Video]] = None,
@@ -704,6 +711,7 @@ async def aprint_response(
704
711
  session_id=session_id,
705
712
  session_state=session_state,
706
713
  user_id=user_id,
714
+ run_id=run_id,
707
715
  audio=audio,
708
716
  images=images,
709
717
  videos=videos,
@@ -30,6 +30,7 @@ def print_response(
30
30
  session_id: Optional[str] = None,
31
31
  session_state: Optional[Dict[str, Any]] = None,
32
32
  user_id: Optional[str] = None,
33
+ run_id: Optional[str] = None,
33
34
  audio: Optional[Sequence[Audio]] = None,
34
35
  images: Optional[Sequence[Image]] = None,
35
36
  videos: Optional[Sequence[Video]] = None,
@@ -81,6 +82,7 @@ def print_response(
81
82
  # Run the agent
82
83
  run_response: TeamRunOutput = team.run( # type: ignore
83
84
  input=input,
85
+ run_id=run_id,
84
86
  images=images,
85
87
  audio=audio,
86
88
  videos=videos,
@@ -339,6 +341,7 @@ def print_response_stream(
339
341
  session_id: Optional[str] = None,
340
342
  session_state: Optional[Dict[str, Any]] = None,
341
343
  user_id: Optional[str] = None,
344
+ run_id: Optional[str] = None,
342
345
  audio: Optional[Sequence[Audio]] = None,
343
346
  images: Optional[Sequence[Image]] = None,
344
347
  videos: Optional[Sequence[Video]] = None,
@@ -417,6 +420,7 @@ def print_response_stream(
417
420
  session_id=session_id,
418
421
  session_state=session_state,
419
422
  user_id=user_id,
423
+ run_id=run_id,
420
424
  knowledge_filters=knowledge_filters,
421
425
  add_history_to_context=add_history_to_context,
422
426
  dependencies=dependencies,
@@ -893,6 +897,7 @@ async def aprint_response(
893
897
  session_id: Optional[str] = None,
894
898
  session_state: Optional[Dict[str, Any]] = None,
895
899
  user_id: Optional[str] = None,
900
+ run_id: Optional[str] = None,
896
901
  audio: Optional[Sequence[Audio]] = None,
897
902
  images: Optional[Sequence[Image]] = None,
898
903
  videos: Optional[Sequence[Video]] = None,
@@ -944,6 +949,7 @@ async def aprint_response(
944
949
  # Run the agent
945
950
  run_response: TeamRunOutput = await team.arun( # type: ignore
946
951
  input=input,
952
+ run_id=run_id,
947
953
  images=images,
948
954
  audio=audio,
949
955
  videos=videos,
@@ -1200,6 +1206,7 @@ async def aprint_response_stream(
1200
1206
  session_id: Optional[str] = None,
1201
1207
  session_state: Optional[Dict[str, Any]] = None,
1202
1208
  user_id: Optional[str] = None,
1209
+ run_id: Optional[str] = None,
1203
1210
  audio: Optional[Sequence[Audio]] = None,
1204
1211
  images: Optional[Sequence[Image]] = None,
1205
1212
  videos: Optional[Sequence[Video]] = None,
@@ -1288,6 +1295,7 @@ async def aprint_response_stream(
1288
1295
  session_id=session_id,
1289
1296
  session_state=session_state,
1290
1297
  user_id=user_id,
1298
+ run_id=run_id,
1291
1299
  knowledge_filters=knowledge_filters,
1292
1300
  add_history_to_context=add_history_to_context,
1293
1301
  add_dependencies_to_context=add_dependencies_to_context,
@@ -28,7 +28,7 @@ from agno.filters import FilterExpr
28
28
  from agno.knowledge.document import Document
29
29
  from agno.knowledge.embedder import Embedder
30
30
  from agno.knowledge.reranker.base import Reranker
31
- from agno.utils.log import log_debug, log_info, logger
31
+ from agno.utils.log import log_debug, log_info, log_error, log_warning
32
32
  from agno.vectordb.base import VectorDb
33
33
  from agno.vectordb.distance import Distance
34
34
  from agno.vectordb.pgvector.index import HNSW, Ivfflat
@@ -62,6 +62,7 @@ class PgVector(VectorDb):
62
62
  schema_version: int = 1,
63
63
  auto_upgrade_schema: bool = False,
64
64
  reranker: Optional[Reranker] = None,
65
+ create_schema: bool = True,
65
66
  ):
66
67
  """
67
68
  Initialize the PgVector instance.
@@ -82,6 +83,8 @@ class PgVector(VectorDb):
82
83
  content_language (str): Language for full-text search.
83
84
  schema_version (int): Version of the database schema.
84
85
  auto_upgrade_schema (bool): Automatically upgrade schema if True.
86
+ create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
87
+ Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
85
88
  """
86
89
  if not table_name:
87
90
  raise ValueError("Table name must be provided.")
@@ -104,7 +107,7 @@ class PgVector(VectorDb):
104
107
  try:
105
108
  db_engine = create_engine(db_url)
106
109
  except Exception as e:
107
- logger.error(f"Failed to create engine from 'db_url': {e}")
110
+ log_error(f"Failed to create engine from 'db_url': {e}")
108
111
  raise
109
112
 
110
113
  # Database settings
@@ -147,6 +150,9 @@ class PgVector(VectorDb):
147
150
  # Reranker instance
148
151
  self.reranker: Optional[Reranker] = reranker
149
152
 
153
+ # Schema creation flag
154
+ self.create_schema: bool = create_schema
155
+
150
156
  # Database session
151
157
  self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
152
158
  # Database table
@@ -209,7 +215,7 @@ class PgVector(VectorDb):
209
215
  try:
210
216
  return inspect(self.db_engine).has_table(self.table_name, schema=self.schema)
211
217
  except Exception as e:
212
- logger.error(f"Error checking if table exists: {e}")
218
+ log_error(f"Error checking if table exists: {e}")
213
219
  return False
214
220
 
215
221
  def create(self) -> None:
@@ -220,7 +226,7 @@ class PgVector(VectorDb):
220
226
  with self.Session() as sess, sess.begin():
221
227
  log_debug("Creating extension: vector")
222
228
  sess.execute(text("CREATE EXTENSION IF NOT EXISTS vector;"))
223
- if self.schema is not None:
229
+ if self.create_schema and self.schema is not None:
224
230
  log_debug(f"Creating schema: {self.schema}")
225
231
  sess.execute(text(f"CREATE SCHEMA IF NOT EXISTS {self.schema};"))
226
232
  log_debug(f"Creating table: {self.table_name}")
@@ -247,7 +253,7 @@ class PgVector(VectorDb):
247
253
  result = sess.execute(stmt).first()
248
254
  return result is not None
249
255
  except Exception as e:
250
- logger.error(f"Error checking if record exists: {e}")
256
+ log_error(f"Error checking if record exists: {e}")
251
257
  return False
252
258
 
253
259
  def name_exists(self, name: str) -> bool:
@@ -324,7 +330,7 @@ class PgVector(VectorDb):
324
330
  try:
325
331
  batch_records.append(self._get_document_record(doc, filters, content_hash))
326
332
  except Exception as e:
327
- logger.error(f"Error processing document '{doc.name}': {e}")
333
+ log_error(f"Error processing document '{doc.name}': {e}")
328
334
 
329
335
  # Insert the batch of records
330
336
  insert_stmt = postgresql.insert(self.table)
@@ -332,11 +338,11 @@ class PgVector(VectorDb):
332
338
  sess.commit() # Commit batch independently
333
339
  log_info(f"Inserted batch of {len(batch_records)} documents.")
334
340
  except Exception as e:
335
- logger.error(f"Error with batch starting at index {i}: {e}")
341
+ log_error(f"Error with batch starting at index {i}: {e}")
336
342
  sess.rollback() # Rollback the current batch if there's an error
337
343
  raise
338
344
  except Exception as e:
339
- logger.error(f"Error inserting documents: {e}")
345
+ log_error(f"Error inserting documents: {e}")
340
346
  raise
341
347
 
342
348
  async def async_insert(
@@ -380,7 +386,7 @@ class PgVector(VectorDb):
380
386
  }
381
387
  batch_records.append(record)
382
388
  except Exception as e:
383
- logger.error(f"Error processing document '{doc.name}': {e}")
389
+ log_error(f"Error processing document '{doc.name}': {e}")
384
390
 
385
391
  # Insert the batch of records
386
392
  if batch_records:
@@ -389,11 +395,11 @@ class PgVector(VectorDb):
389
395
  sess.commit() # Commit batch independently
390
396
  log_info(f"Inserted batch of {len(batch_records)} documents.")
391
397
  except Exception as e:
392
- logger.error(f"Error with batch starting at index {i}: {e}")
398
+ log_error(f"Error with batch starting at index {i}: {e}")
393
399
  sess.rollback() # Rollback the current batch if there's an error
394
400
  raise
395
401
  except Exception as e:
396
- logger.error(f"Error inserting documents: {e}")
402
+ log_error(f"Error inserting documents: {e}")
397
403
  raise
398
404
 
399
405
  def upsert_available(self) -> bool:
@@ -422,7 +428,7 @@ class PgVector(VectorDb):
422
428
  self._delete_by_content_hash(content_hash)
423
429
  self._upsert(content_hash, documents, filters, batch_size)
424
430
  except Exception as e:
425
- logger.error(f"Error upserting documents by content hash: {e}")
431
+ log_error(f"Error upserting documents by content hash: {e}")
426
432
  raise
427
433
 
428
434
  def _upsert(
@@ -452,7 +458,7 @@ class PgVector(VectorDb):
452
458
  try:
453
459
  batch_records_dict[doc.id] = self._get_document_record(doc, filters, content_hash) # type: ignore
454
460
  except Exception as e:
455
- logger.error(f"Error processing document '{doc.name}': {e}")
461
+ log_error(f"Error processing document '{doc.name}': {e}")
456
462
 
457
463
  # Convert dict to list for upsert
458
464
  batch_records = list(batch_records_dict.values())
@@ -479,11 +485,11 @@ class PgVector(VectorDb):
479
485
  sess.commit() # Commit batch independently
480
486
  log_info(f"Upserted batch of {len(batch_records)} documents.")
481
487
  except Exception as e:
482
- logger.error(f"Error with batch starting at index {i}: {e}")
488
+ log_error(f"Error with batch starting at index {i}: {e}")
483
489
  sess.rollback() # Rollback the current batch if there's an error
484
490
  raise
485
491
  except Exception as e:
486
- logger.error(f"Error upserting documents: {e}")
492
+ log_error(f"Error upserting documents: {e}")
487
493
  raise
488
494
 
489
495
  def _get_document_record(
@@ -532,7 +538,7 @@ class PgVector(VectorDb):
532
538
  doc.embedding = embeddings[j]
533
539
  doc.usage = usages[j] if j < len(usages) else None
534
540
  except Exception as e:
535
- logger.error(f"Error assigning batch embedding to document '{doc.name}': {e}")
541
+ log_error(f"Error assigning batch embedding to document '{doc.name}': {e}")
536
542
 
537
543
  except Exception as e:
538
544
  # Check if this is a rate limit error - don't fall back as it would make things worse
@@ -543,17 +549,41 @@ class PgVector(VectorDb):
543
549
  )
544
550
 
545
551
  if is_rate_limit:
546
- logger.error(f"Rate limit detected during batch embedding. {e}")
552
+ log_error(f"Rate limit detected during batch embedding. {e}")
547
553
  raise e
548
554
  else:
549
- logger.warning(f"Async batch embedding failed, falling back to individual embeddings: {e}")
555
+ log_warning(f"Async batch embedding failed, falling back to individual embeddings: {e}")
550
556
  # Fall back to individual embedding
551
557
  embed_tasks = [doc.async_embed(embedder=self.embedder) for doc in batch_docs]
552
- await asyncio.gather(*embed_tasks, return_exceptions=True)
558
+ results = await asyncio.gather(*embed_tasks, return_exceptions=True)
559
+
560
+ # Check for exceptions and handle them
561
+ for i, result in enumerate(results):
562
+ if isinstance(result, Exception):
563
+ error_msg = str(result)
564
+ # If it's an event loop closure error, log it but don't fail
565
+ if "Event loop is closed" in error_msg or "RuntimeError" in type(result).__name__:
566
+ log_warning(
567
+ f"Event loop closure during embedding for document {i}, but operation may have succeeded: {result}"
568
+ )
569
+ else:
570
+ log_error(f"Error embedding document {i}: {result}")
553
571
  else:
554
572
  # Use individual embedding
555
573
  embed_tasks = [doc.async_embed(embedder=self.embedder) for doc in batch_docs]
556
- await asyncio.gather(*embed_tasks, return_exceptions=True)
574
+ results = await asyncio.gather(*embed_tasks, return_exceptions=True)
575
+
576
+ # Check for exceptions and handle them
577
+ for i, result in enumerate(results):
578
+ if isinstance(result, Exception):
579
+ error_msg = str(result)
580
+ # If it's an event loop closure error, log it but don't fail
581
+ if "Event loop is closed" in error_msg or "RuntimeError" in type(result).__name__:
582
+ log_warning(
583
+ f"Event loop closure during embedding for document {i}, but operation may have succeeded: {result}"
584
+ )
585
+ else:
586
+ log_error(f"Error embedding document {i}: {result}")
557
587
 
558
588
  async def async_upsert(
559
589
  self,
@@ -568,7 +598,7 @@ class PgVector(VectorDb):
568
598
  self._delete_by_content_hash(content_hash)
569
599
  await self._async_upsert(content_hash, documents, filters, batch_size)
570
600
  except Exception as e:
571
- logger.error(f"Error upserting documents by content hash: {e}")
601
+ log_error(f"Error upserting documents by content hash: {e}")
572
602
  raise
573
603
 
574
604
  async def _async_upsert(
@@ -597,11 +627,18 @@ class PgVector(VectorDb):
597
627
 
598
628
  # Prepare documents for upserting
599
629
  batch_records_dict = {} # Use dict to deduplicate by ID
600
- for doc in batch_docs:
630
+ for idx, doc in enumerate(batch_docs):
601
631
  try:
602
632
  cleaned_content = self._clean_content(doc.content)
603
633
  record_id = md5(cleaned_content.encode()).hexdigest()
604
634
 
635
+ if (
636
+ doc.embedding is not None
637
+ and isinstance(doc.embedding, list)
638
+ and len(doc.embedding) == 0
639
+ ):
640
+ log_warning(f"Document {idx} '{doc.name}' has empty embedding (length 0)")
641
+
605
642
  meta_data = doc.meta_data or {}
606
643
  if filters:
607
644
  meta_data.update(filters)
@@ -619,7 +656,7 @@ class PgVector(VectorDb):
619
656
  }
620
657
  batch_records_dict[record_id] = record # This deduplicates by ID
621
658
  except Exception as e:
622
- logger.error(f"Error processing document '{doc.name}': {e}")
659
+ log_error(f"Error processing document '{doc.name}': {e}")
623
660
 
624
661
  # Convert dict to list for upsert
625
662
  batch_records = list(batch_records_dict.values())
@@ -646,11 +683,11 @@ class PgVector(VectorDb):
646
683
  sess.commit() # Commit batch independently
647
684
  log_info(f"Upserted batch of {len(batch_records)} documents.")
648
685
  except Exception as e:
649
- logger.error(f"Error with batch starting at index {i}: {e}")
686
+ log_error(f"Error with batch starting at index {i}: {e}")
650
687
  sess.rollback() # Rollback the current batch if there's an error
651
688
  raise
652
689
  except Exception as e:
653
- logger.error(f"Error upserting documents: {e}")
690
+ log_error(f"Error upserting documents: {e}")
654
691
  raise
655
692
 
656
693
  def update_metadata(self, content_id: str, metadata: Dict[str, Any]) -> None:
@@ -679,7 +716,7 @@ class PgVector(VectorDb):
679
716
  sess.execute(stmt)
680
717
  sess.commit()
681
718
  except Exception as e:
682
- logger.error(f"Error updating metadata for document {content_id}: {e}")
719
+ log_error(f"Error updating metadata for document {content_id}: {e}")
683
720
  raise
684
721
 
685
722
  def search(
@@ -703,7 +740,7 @@ class PgVector(VectorDb):
703
740
  elif self.search_type == SearchType.hybrid:
704
741
  return self.hybrid_search(query=query, limit=limit, filters=filters)
705
742
  else:
706
- logger.error(f"Invalid search type '{self.search_type}'.")
743
+ log_error(f"Invalid search type '{self.search_type}'.")
707
744
  return []
708
745
 
709
746
  async def async_search(
@@ -751,7 +788,7 @@ class PgVector(VectorDb):
751
788
  # Get the embedding for the query string
752
789
  query_embedding = self.embedder.get_embedding(query)
753
790
  if query_embedding is None:
754
- logger.error(f"Error getting embedding for Query: {query}")
791
+ log_error(f"Error getting embedding for Query: {query}")
755
792
  return []
756
793
 
757
794
  # Define the columns to select
@@ -789,7 +826,7 @@ class PgVector(VectorDb):
789
826
  elif self.distance == Distance.max_inner_product:
790
827
  stmt = stmt.order_by(self.table.c.embedding.max_inner_product(query_embedding))
791
828
  else:
792
- logger.error(f"Unknown distance metric: {self.distance}")
829
+ log_error(f"Unknown distance metric: {self.distance}")
793
830
  return []
794
831
 
795
832
  # Limit the number of results
@@ -808,8 +845,8 @@ class PgVector(VectorDb):
808
845
  sess.execute(text(f"SET LOCAL hnsw.ef_search = {self.vector_index.ef_search}"))
809
846
  results = sess.execute(stmt).fetchall()
810
847
  except Exception as e:
811
- logger.error(f"Error performing semantic search: {e}")
812
- logger.error("Table might not exist, creating for future use")
848
+ log_error(f"Error performing semantic search: {e}")
849
+ log_error("Table might not exist, creating for future use")
813
850
  self.create()
814
851
  return []
815
852
 
@@ -834,7 +871,7 @@ class PgVector(VectorDb):
834
871
  log_info(f"Found {len(search_results)} documents")
835
872
  return search_results
836
873
  except Exception as e:
837
- logger.error(f"Error during vector search: {e}")
874
+ log_error(f"Error during vector search: {e}")
838
875
  return []
839
876
 
840
877
  def enable_prefix_matching(self, query: str) -> str:
@@ -916,8 +953,8 @@ class PgVector(VectorDb):
916
953
  with self.Session() as sess, sess.begin():
917
954
  results = sess.execute(stmt).fetchall()
918
955
  except Exception as e:
919
- logger.error(f"Error performing keyword search: {e}")
920
- logger.error("Table might not exist, creating for future use")
956
+ log_error(f"Error performing keyword search: {e}")
957
+ log_error("Table might not exist, creating for future use")
921
958
  self.create()
922
959
  return []
923
960
 
@@ -939,7 +976,7 @@ class PgVector(VectorDb):
939
976
  log_info(f"Found {len(search_results)} documents")
940
977
  return search_results
941
978
  except Exception as e:
942
- logger.error(f"Error during keyword search: {e}")
979
+ log_error(f"Error during keyword search: {e}")
943
980
  return []
944
981
 
945
982
  def hybrid_search(
@@ -963,7 +1000,7 @@ class PgVector(VectorDb):
963
1000
  # Get the embedding for the query string
964
1001
  query_embedding = self.embedder.get_embedding(query)
965
1002
  if query_embedding is None:
966
- logger.error(f"Error getting embedding for Query: {query}")
1003
+ log_error(f"Error getting embedding for Query: {query}")
967
1004
  return []
968
1005
 
969
1006
  # Define the columns to select
@@ -1001,7 +1038,7 @@ class PgVector(VectorDb):
1001
1038
  # Normalize to range [0, 1]
1002
1039
  vector_score = (raw_vector_score + 1) / 2
1003
1040
  else:
1004
- logger.error(f"Unknown distance metric: {self.distance}")
1041
+ log_error(f"Unknown distance metric: {self.distance}")
1005
1042
  return []
1006
1043
 
1007
1044
  # Apply weights to control the influence of each score
@@ -1052,7 +1089,7 @@ class PgVector(VectorDb):
1052
1089
  sess.execute(text(f"SET LOCAL hnsw.ef_search = {self.vector_index.ef_search}"))
1053
1090
  results = sess.execute(stmt).fetchall()
1054
1091
  except Exception as e:
1055
- logger.error(f"Error performing hybrid search: {e}")
1092
+ log_error(f"Error performing hybrid search: {e}")
1056
1093
  return []
1057
1094
 
1058
1095
  # Process the results and convert to Document objects
@@ -1076,7 +1113,7 @@ class PgVector(VectorDb):
1076
1113
  log_info(f"Found {len(search_results)} documents")
1077
1114
  return search_results
1078
1115
  except Exception as e:
1079
- logger.error(f"Error during hybrid search: {e}")
1116
+ log_error(f"Error during hybrid search: {e}")
1080
1117
  return []
1081
1118
 
1082
1119
  def drop(self) -> None:
@@ -1089,7 +1126,7 @@ class PgVector(VectorDb):
1089
1126
  self.table.drop(self.db_engine)
1090
1127
  log_info(f"Table '{self.table.fullname}' dropped successfully.")
1091
1128
  except Exception as e:
1092
- logger.error(f"Error dropping table '{self.table.fullname}': {e}")
1129
+ log_error(f"Error dropping table '{self.table.fullname}': {e}")
1093
1130
  raise
1094
1131
  else:
1095
1132
  log_info(f"Table '{self.table.fullname}' does not exist.")
@@ -1124,7 +1161,7 @@ class PgVector(VectorDb):
1124
1161
  result = sess.execute(stmt).scalar()
1125
1162
  return int(result) if result is not None else 0
1126
1163
  except Exception as e:
1127
- logger.error(f"Error getting count from table '{self.table.fullname}': {e}")
1164
+ log_error(f"Error getting count from table '{self.table.fullname}': {e}")
1128
1165
  return 0
1129
1166
 
1130
1167
  def optimize(self, force_recreate: bool = False) -> None:
@@ -1165,7 +1202,7 @@ class PgVector(VectorDb):
1165
1202
  drop_index_sql = f'DROP INDEX IF EXISTS "{self.schema}"."{index_name}";'
1166
1203
  sess.execute(text(drop_index_sql))
1167
1204
  except Exception as e:
1168
- logger.error(f"Error dropping index '{index_name}': {e}")
1205
+ log_error(f"Error dropping index '{index_name}': {e}")
1169
1206
  raise
1170
1207
 
1171
1208
  def _create_vector_index(self, force_recreate: bool = False) -> None:
@@ -1220,10 +1257,10 @@ class PgVector(VectorDb):
1220
1257
  elif isinstance(self.vector_index, HNSW):
1221
1258
  self._create_hnsw_index(sess, table_fullname, index_distance)
1222
1259
  else:
1223
- logger.error(f"Unknown index type: {type(self.vector_index)}")
1260
+ log_error(f"Unknown index type: {type(self.vector_index)}")
1224
1261
  return
1225
1262
  except Exception as e:
1226
- logger.error(f"Error creating vector index '{self.vector_index.name}': {e}")
1263
+ log_error(f"Error creating vector index '{self.vector_index.name}': {e}")
1227
1264
  raise
1228
1265
 
1229
1266
  def _create_ivfflat_index(self, sess: Session, table_fullname: str, index_distance: str) -> None:
@@ -1322,7 +1359,7 @@ class PgVector(VectorDb):
1322
1359
  )
1323
1360
  sess.execute(create_gin_index_sql)
1324
1361
  except Exception as e:
1325
- logger.error(f"Error creating GIN index '{gin_index_name}': {e}")
1362
+ log_error(f"Error creating GIN index '{gin_index_name}': {e}")
1326
1363
  raise
1327
1364
 
1328
1365
  def delete(self) -> bool:
@@ -1341,7 +1378,7 @@ class PgVector(VectorDb):
1341
1378
  log_info(f"Deleted all records from table '{self.table.fullname}'.")
1342
1379
  return True
1343
1380
  except Exception as e:
1344
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1381
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1345
1382
  sess.rollback()
1346
1383
  return False
1347
1384
 
@@ -1357,7 +1394,7 @@ class PgVector(VectorDb):
1357
1394
  log_info(f"Deleted records with id '{id}' from table '{self.table.fullname}'.")
1358
1395
  return True
1359
1396
  except Exception as e:
1360
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1397
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1361
1398
  sess.rollback()
1362
1399
  return False
1363
1400
 
@@ -1373,7 +1410,7 @@ class PgVector(VectorDb):
1373
1410
  log_info(f"Deleted records with name '{name}' from table '{self.table.fullname}'.")
1374
1411
  return True
1375
1412
  except Exception as e:
1376
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1413
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1377
1414
  sess.rollback()
1378
1415
  return False
1379
1416
 
@@ -1389,7 +1426,7 @@ class PgVector(VectorDb):
1389
1426
  log_info(f"Deleted records with metadata '{metadata}' from table '{self.table.fullname}'.")
1390
1427
  return True
1391
1428
  except Exception as e:
1392
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1429
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1393
1430
  sess.rollback()
1394
1431
  return False
1395
1432
 
@@ -1405,7 +1442,7 @@ class PgVector(VectorDb):
1405
1442
  log_info(f"Deleted records with content ID '{content_id}' from table '{self.table.fullname}'.")
1406
1443
  return True
1407
1444
  except Exception as e:
1408
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1445
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1409
1446
  sess.rollback()
1410
1447
  return False
1411
1448
 
@@ -1421,7 +1458,7 @@ class PgVector(VectorDb):
1421
1458
  log_info(f"Deleted records with content hash '{content_hash}' from table '{self.table.fullname}'.")
1422
1459
  return True
1423
1460
  except Exception as e:
1424
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1461
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1425
1462
  sess.rollback()
1426
1463
  return False
1427
1464
 
agno/workflow/parallel.py CHANGED
@@ -102,7 +102,7 @@ class Parallel:
102
102
  step_name=self.name or "Parallel",
103
103
  step_id=str(uuid4()),
104
104
  step_type=StepType.PARALLEL,
105
- content=f"Parallel {self.name or 'execution'} completed with 1 result",
105
+ content=self._build_aggregated_content(step_outputs),
106
106
  executor_name=self.name or "Parallel",
107
107
  images=single_result.images,
108
108
  videos=single_result.videos,
@@ -116,8 +116,8 @@ class Parallel:
116
116
 
117
117
  early_termination_requested = any(output.stop for output in step_outputs if hasattr(output, "stop"))
118
118
 
119
- # Multiple results - aggregate them
120
- aggregated_content = f"Parallel {self.name or 'execution'} completed with {len(step_outputs)} results"
119
+ # Multiple results - aggregate them with actual content from all steps
120
+ aggregated_content = self._build_aggregated_content(step_outputs)
121
121
 
122
122
  # Combine all media from parallel steps
123
123
  all_images = []
agno/workflow/step.py CHANGED
@@ -1363,10 +1363,22 @@ class Step:
1363
1363
 
1364
1364
  For container steps (Steps, Router, Loop, etc.), this will recursively find the content from the
1365
1365
  last actual step rather than using the generic container message.
1366
+
1367
+ For Parallel steps, aggregates content from ALL inner steps (not just the last one).
1366
1368
  """
1367
- # If this step has nested steps (like Steps, Condition, Router, Loop, etc.)
1369
+ # If this step has nested steps (like Steps, Condition, Router, Loop, Parallel, etc.)
1368
1370
  if hasattr(step_output, "steps") and step_output.steps and len(step_output.steps) > 0:
1369
- # Recursively get content from the last nested step
1371
+ # For Parallel steps, aggregate content from ALL inner steps
1372
+ if step_output.step_type == StepType.PARALLEL:
1373
+ aggregated_parts = []
1374
+ for i, inner_step in enumerate(step_output.steps):
1375
+ inner_content = self._get_deepest_content_from_step_output(inner_step)
1376
+ if inner_content:
1377
+ step_name = inner_step.step_name or f"Step {i + 1}"
1378
+ aggregated_parts.append(f"=== {step_name} ===\n{inner_content}")
1379
+ return "\n\n".join(aggregated_parts) if aggregated_parts else step_output.content # type: ignore
1380
+
1381
+ # For other nested step types, recursively get content from the last nested step
1370
1382
  return self._get_deepest_content_from_step_output(step_output.steps[-1])
1371
1383
 
1372
1384
  # For regular steps, return their content