agno 2.3.7__py3-none-any.whl → 2.3.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. agno/agent/agent.py +391 -335
  2. agno/db/mongo/async_mongo.py +0 -24
  3. agno/db/mongo/mongo.py +0 -16
  4. agno/db/mysql/__init__.py +2 -1
  5. agno/db/mysql/async_mysql.py +2888 -0
  6. agno/db/mysql/mysql.py +17 -27
  7. agno/db/mysql/utils.py +139 -6
  8. agno/db/postgres/async_postgres.py +10 -26
  9. agno/db/postgres/postgres.py +7 -25
  10. agno/db/redis/redis.py +0 -4
  11. agno/db/schemas/evals.py +1 -0
  12. agno/db/singlestore/singlestore.py +5 -12
  13. agno/db/sqlite/async_sqlite.py +2 -26
  14. agno/db/sqlite/sqlite.py +0 -20
  15. agno/eval/__init__.py +10 -0
  16. agno/eval/agent_as_judge.py +860 -0
  17. agno/eval/base.py +29 -0
  18. agno/eval/utils.py +2 -1
  19. agno/exceptions.py +7 -0
  20. agno/knowledge/embedder/openai.py +8 -8
  21. agno/knowledge/knowledge.py +1142 -176
  22. agno/media.py +22 -6
  23. agno/models/aws/claude.py +8 -7
  24. agno/models/base.py +160 -11
  25. agno/models/deepseek/deepseek.py +67 -0
  26. agno/models/google/gemini.py +65 -11
  27. agno/models/google/utils.py +22 -0
  28. agno/models/message.py +2 -0
  29. agno/models/openai/chat.py +4 -0
  30. agno/models/openai/responses.py +3 -2
  31. agno/os/app.py +64 -74
  32. agno/os/interfaces/a2a/router.py +3 -4
  33. agno/os/interfaces/a2a/utils.py +1 -1
  34. agno/os/interfaces/agui/router.py +2 -0
  35. agno/os/middleware/jwt.py +8 -6
  36. agno/os/router.py +3 -1607
  37. agno/os/routers/agents/__init__.py +3 -0
  38. agno/os/routers/agents/router.py +581 -0
  39. agno/os/routers/agents/schema.py +261 -0
  40. agno/os/routers/evals/evals.py +26 -6
  41. agno/os/routers/evals/schemas.py +34 -2
  42. agno/os/routers/evals/utils.py +101 -20
  43. agno/os/routers/knowledge/knowledge.py +1 -1
  44. agno/os/routers/teams/__init__.py +3 -0
  45. agno/os/routers/teams/router.py +496 -0
  46. agno/os/routers/teams/schema.py +257 -0
  47. agno/os/routers/workflows/__init__.py +3 -0
  48. agno/os/routers/workflows/router.py +545 -0
  49. agno/os/routers/workflows/schema.py +75 -0
  50. agno/os/schema.py +1 -559
  51. agno/os/utils.py +139 -2
  52. agno/team/team.py +159 -100
  53. agno/tools/file_generation.py +12 -6
  54. agno/tools/firecrawl.py +15 -7
  55. agno/tools/workflow.py +8 -1
  56. agno/utils/hooks.py +64 -5
  57. agno/utils/http.py +2 -2
  58. agno/utils/media.py +11 -1
  59. agno/utils/print_response/agent.py +8 -0
  60. agno/utils/print_response/team.py +8 -0
  61. agno/vectordb/pgvector/pgvector.py +88 -51
  62. agno/workflow/parallel.py +11 -5
  63. agno/workflow/step.py +17 -5
  64. agno/workflow/types.py +38 -2
  65. agno/workflow/workflow.py +12 -4
  66. {agno-2.3.7.dist-info → agno-2.3.9.dist-info}/METADATA +8 -3
  67. {agno-2.3.7.dist-info → agno-2.3.9.dist-info}/RECORD +70 -58
  68. agno/tools/memori.py +0 -339
  69. {agno-2.3.7.dist-info → agno-2.3.9.dist-info}/WHEEL +0 -0
  70. {agno-2.3.7.dist-info → agno-2.3.9.dist-info}/licenses/LICENSE +0 -0
  71. {agno-2.3.7.dist-info → agno-2.3.9.dist-info}/top_level.txt +0 -0
@@ -28,7 +28,7 @@ from agno.filters import FilterExpr
28
28
  from agno.knowledge.document import Document
29
29
  from agno.knowledge.embedder import Embedder
30
30
  from agno.knowledge.reranker.base import Reranker
31
- from agno.utils.log import log_debug, log_info, logger
31
+ from agno.utils.log import log_debug, log_info, log_error, log_warning
32
32
  from agno.vectordb.base import VectorDb
33
33
  from agno.vectordb.distance import Distance
34
34
  from agno.vectordb.pgvector.index import HNSW, Ivfflat
@@ -62,6 +62,7 @@ class PgVector(VectorDb):
62
62
  schema_version: int = 1,
63
63
  auto_upgrade_schema: bool = False,
64
64
  reranker: Optional[Reranker] = None,
65
+ create_schema: bool = True,
65
66
  ):
66
67
  """
67
68
  Initialize the PgVector instance.
@@ -82,6 +83,8 @@ class PgVector(VectorDb):
82
83
  content_language (str): Language for full-text search.
83
84
  schema_version (int): Version of the database schema.
84
85
  auto_upgrade_schema (bool): Automatically upgrade schema if True.
86
+ create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
87
+ Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
85
88
  """
86
89
  if not table_name:
87
90
  raise ValueError("Table name must be provided.")
@@ -104,7 +107,7 @@ class PgVector(VectorDb):
104
107
  try:
105
108
  db_engine = create_engine(db_url)
106
109
  except Exception as e:
107
- logger.error(f"Failed to create engine from 'db_url': {e}")
110
+ log_error(f"Failed to create engine from 'db_url': {e}")
108
111
  raise
109
112
 
110
113
  # Database settings
@@ -147,6 +150,9 @@ class PgVector(VectorDb):
147
150
  # Reranker instance
148
151
  self.reranker: Optional[Reranker] = reranker
149
152
 
153
+ # Schema creation flag
154
+ self.create_schema: bool = create_schema
155
+
150
156
  # Database session
151
157
  self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
152
158
  # Database table
@@ -209,7 +215,7 @@ class PgVector(VectorDb):
209
215
  try:
210
216
  return inspect(self.db_engine).has_table(self.table_name, schema=self.schema)
211
217
  except Exception as e:
212
- logger.error(f"Error checking if table exists: {e}")
218
+ log_error(f"Error checking if table exists: {e}")
213
219
  return False
214
220
 
215
221
  def create(self) -> None:
@@ -220,7 +226,7 @@ class PgVector(VectorDb):
220
226
  with self.Session() as sess, sess.begin():
221
227
  log_debug("Creating extension: vector")
222
228
  sess.execute(text("CREATE EXTENSION IF NOT EXISTS vector;"))
223
- if self.schema is not None:
229
+ if self.create_schema and self.schema is not None:
224
230
  log_debug(f"Creating schema: {self.schema}")
225
231
  sess.execute(text(f"CREATE SCHEMA IF NOT EXISTS {self.schema};"))
226
232
  log_debug(f"Creating table: {self.table_name}")
@@ -247,7 +253,7 @@ class PgVector(VectorDb):
247
253
  result = sess.execute(stmt).first()
248
254
  return result is not None
249
255
  except Exception as e:
250
- logger.error(f"Error checking if record exists: {e}")
256
+ log_error(f"Error checking if record exists: {e}")
251
257
  return False
252
258
 
253
259
  def name_exists(self, name: str) -> bool:
@@ -324,7 +330,7 @@ class PgVector(VectorDb):
324
330
  try:
325
331
  batch_records.append(self._get_document_record(doc, filters, content_hash))
326
332
  except Exception as e:
327
- logger.error(f"Error processing document '{doc.name}': {e}")
333
+ log_error(f"Error processing document '{doc.name}': {e}")
328
334
 
329
335
  # Insert the batch of records
330
336
  insert_stmt = postgresql.insert(self.table)
@@ -332,11 +338,11 @@ class PgVector(VectorDb):
332
338
  sess.commit() # Commit batch independently
333
339
  log_info(f"Inserted batch of {len(batch_records)} documents.")
334
340
  except Exception as e:
335
- logger.error(f"Error with batch starting at index {i}: {e}")
341
+ log_error(f"Error with batch starting at index {i}: {e}")
336
342
  sess.rollback() # Rollback the current batch if there's an error
337
343
  raise
338
344
  except Exception as e:
339
- logger.error(f"Error inserting documents: {e}")
345
+ log_error(f"Error inserting documents: {e}")
340
346
  raise
341
347
 
342
348
  async def async_insert(
@@ -380,7 +386,7 @@ class PgVector(VectorDb):
380
386
  }
381
387
  batch_records.append(record)
382
388
  except Exception as e:
383
- logger.error(f"Error processing document '{doc.name}': {e}")
389
+ log_error(f"Error processing document '{doc.name}': {e}")
384
390
 
385
391
  # Insert the batch of records
386
392
  if batch_records:
@@ -389,11 +395,11 @@ class PgVector(VectorDb):
389
395
  sess.commit() # Commit batch independently
390
396
  log_info(f"Inserted batch of {len(batch_records)} documents.")
391
397
  except Exception as e:
392
- logger.error(f"Error with batch starting at index {i}: {e}")
398
+ log_error(f"Error with batch starting at index {i}: {e}")
393
399
  sess.rollback() # Rollback the current batch if there's an error
394
400
  raise
395
401
  except Exception as e:
396
- logger.error(f"Error inserting documents: {e}")
402
+ log_error(f"Error inserting documents: {e}")
397
403
  raise
398
404
 
399
405
  def upsert_available(self) -> bool:
@@ -422,7 +428,7 @@ class PgVector(VectorDb):
422
428
  self._delete_by_content_hash(content_hash)
423
429
  self._upsert(content_hash, documents, filters, batch_size)
424
430
  except Exception as e:
425
- logger.error(f"Error upserting documents by content hash: {e}")
431
+ log_error(f"Error upserting documents by content hash: {e}")
426
432
  raise
427
433
 
428
434
  def _upsert(
@@ -452,7 +458,7 @@ class PgVector(VectorDb):
452
458
  try:
453
459
  batch_records_dict[doc.id] = self._get_document_record(doc, filters, content_hash) # type: ignore
454
460
  except Exception as e:
455
- logger.error(f"Error processing document '{doc.name}': {e}")
461
+ log_error(f"Error processing document '{doc.name}': {e}")
456
462
 
457
463
  # Convert dict to list for upsert
458
464
  batch_records = list(batch_records_dict.values())
@@ -479,11 +485,11 @@ class PgVector(VectorDb):
479
485
  sess.commit() # Commit batch independently
480
486
  log_info(f"Upserted batch of {len(batch_records)} documents.")
481
487
  except Exception as e:
482
- logger.error(f"Error with batch starting at index {i}: {e}")
488
+ log_error(f"Error with batch starting at index {i}: {e}")
483
489
  sess.rollback() # Rollback the current batch if there's an error
484
490
  raise
485
491
  except Exception as e:
486
- logger.error(f"Error upserting documents: {e}")
492
+ log_error(f"Error upserting documents: {e}")
487
493
  raise
488
494
 
489
495
  def _get_document_record(
@@ -532,7 +538,7 @@ class PgVector(VectorDb):
532
538
  doc.embedding = embeddings[j]
533
539
  doc.usage = usages[j] if j < len(usages) else None
534
540
  except Exception as e:
535
- logger.error(f"Error assigning batch embedding to document '{doc.name}': {e}")
541
+ log_error(f"Error assigning batch embedding to document '{doc.name}': {e}")
536
542
 
537
543
  except Exception as e:
538
544
  # Check if this is a rate limit error - don't fall back as it would make things worse
@@ -543,17 +549,41 @@ class PgVector(VectorDb):
543
549
  )
544
550
 
545
551
  if is_rate_limit:
546
- logger.error(f"Rate limit detected during batch embedding. {e}")
552
+ log_error(f"Rate limit detected during batch embedding. {e}")
547
553
  raise e
548
554
  else:
549
- logger.warning(f"Async batch embedding failed, falling back to individual embeddings: {e}")
555
+ log_warning(f"Async batch embedding failed, falling back to individual embeddings: {e}")
550
556
  # Fall back to individual embedding
551
557
  embed_tasks = [doc.async_embed(embedder=self.embedder) for doc in batch_docs]
552
- await asyncio.gather(*embed_tasks, return_exceptions=True)
558
+ results = await asyncio.gather(*embed_tasks, return_exceptions=True)
559
+
560
+ # Check for exceptions and handle them
561
+ for i, result in enumerate(results):
562
+ if isinstance(result, Exception):
563
+ error_msg = str(result)
564
+ # If it's an event loop closure error, log it but don't fail
565
+ if "Event loop is closed" in error_msg or "RuntimeError" in type(result).__name__:
566
+ log_warning(
567
+ f"Event loop closure during embedding for document {i}, but operation may have succeeded: {result}"
568
+ )
569
+ else:
570
+ log_error(f"Error embedding document {i}: {result}")
553
571
  else:
554
572
  # Use individual embedding
555
573
  embed_tasks = [doc.async_embed(embedder=self.embedder) for doc in batch_docs]
556
- await asyncio.gather(*embed_tasks, return_exceptions=True)
574
+ results = await asyncio.gather(*embed_tasks, return_exceptions=True)
575
+
576
+ # Check for exceptions and handle them
577
+ for i, result in enumerate(results):
578
+ if isinstance(result, Exception):
579
+ error_msg = str(result)
580
+ # If it's an event loop closure error, log it but don't fail
581
+ if "Event loop is closed" in error_msg or "RuntimeError" in type(result).__name__:
582
+ log_warning(
583
+ f"Event loop closure during embedding for document {i}, but operation may have succeeded: {result}"
584
+ )
585
+ else:
586
+ log_error(f"Error embedding document {i}: {result}")
557
587
 
558
588
  async def async_upsert(
559
589
  self,
@@ -568,7 +598,7 @@ class PgVector(VectorDb):
568
598
  self._delete_by_content_hash(content_hash)
569
599
  await self._async_upsert(content_hash, documents, filters, batch_size)
570
600
  except Exception as e:
571
- logger.error(f"Error upserting documents by content hash: {e}")
601
+ log_error(f"Error upserting documents by content hash: {e}")
572
602
  raise
573
603
 
574
604
  async def _async_upsert(
@@ -597,11 +627,18 @@ class PgVector(VectorDb):
597
627
 
598
628
  # Prepare documents for upserting
599
629
  batch_records_dict = {} # Use dict to deduplicate by ID
600
- for doc in batch_docs:
630
+ for idx, doc in enumerate(batch_docs):
601
631
  try:
602
632
  cleaned_content = self._clean_content(doc.content)
603
633
  record_id = md5(cleaned_content.encode()).hexdigest()
604
634
 
635
+ if (
636
+ doc.embedding is not None
637
+ and isinstance(doc.embedding, list)
638
+ and len(doc.embedding) == 0
639
+ ):
640
+ log_warning(f"Document {idx} '{doc.name}' has empty embedding (length 0)")
641
+
605
642
  meta_data = doc.meta_data or {}
606
643
  if filters:
607
644
  meta_data.update(filters)
@@ -619,7 +656,7 @@ class PgVector(VectorDb):
619
656
  }
620
657
  batch_records_dict[record_id] = record # This deduplicates by ID
621
658
  except Exception as e:
622
- logger.error(f"Error processing document '{doc.name}': {e}")
659
+ log_error(f"Error processing document '{doc.name}': {e}")
623
660
 
624
661
  # Convert dict to list for upsert
625
662
  batch_records = list(batch_records_dict.values())
@@ -646,11 +683,11 @@ class PgVector(VectorDb):
646
683
  sess.commit() # Commit batch independently
647
684
  log_info(f"Upserted batch of {len(batch_records)} documents.")
648
685
  except Exception as e:
649
- logger.error(f"Error with batch starting at index {i}: {e}")
686
+ log_error(f"Error with batch starting at index {i}: {e}")
650
687
  sess.rollback() # Rollback the current batch if there's an error
651
688
  raise
652
689
  except Exception as e:
653
- logger.error(f"Error upserting documents: {e}")
690
+ log_error(f"Error upserting documents: {e}")
654
691
  raise
655
692
 
656
693
  def update_metadata(self, content_id: str, metadata: Dict[str, Any]) -> None:
@@ -679,7 +716,7 @@ class PgVector(VectorDb):
679
716
  sess.execute(stmt)
680
717
  sess.commit()
681
718
  except Exception as e:
682
- logger.error(f"Error updating metadata for document {content_id}: {e}")
719
+ log_error(f"Error updating metadata for document {content_id}: {e}")
683
720
  raise
684
721
 
685
722
  def search(
@@ -703,7 +740,7 @@ class PgVector(VectorDb):
703
740
  elif self.search_type == SearchType.hybrid:
704
741
  return self.hybrid_search(query=query, limit=limit, filters=filters)
705
742
  else:
706
- logger.error(f"Invalid search type '{self.search_type}'.")
743
+ log_error(f"Invalid search type '{self.search_type}'.")
707
744
  return []
708
745
 
709
746
  async def async_search(
@@ -751,7 +788,7 @@ class PgVector(VectorDb):
751
788
  # Get the embedding for the query string
752
789
  query_embedding = self.embedder.get_embedding(query)
753
790
  if query_embedding is None:
754
- logger.error(f"Error getting embedding for Query: {query}")
791
+ log_error(f"Error getting embedding for Query: {query}")
755
792
  return []
756
793
 
757
794
  # Define the columns to select
@@ -789,7 +826,7 @@ class PgVector(VectorDb):
789
826
  elif self.distance == Distance.max_inner_product:
790
827
  stmt = stmt.order_by(self.table.c.embedding.max_inner_product(query_embedding))
791
828
  else:
792
- logger.error(f"Unknown distance metric: {self.distance}")
829
+ log_error(f"Unknown distance metric: {self.distance}")
793
830
  return []
794
831
 
795
832
  # Limit the number of results
@@ -808,8 +845,8 @@ class PgVector(VectorDb):
808
845
  sess.execute(text(f"SET LOCAL hnsw.ef_search = {self.vector_index.ef_search}"))
809
846
  results = sess.execute(stmt).fetchall()
810
847
  except Exception as e:
811
- logger.error(f"Error performing semantic search: {e}")
812
- logger.error("Table might not exist, creating for future use")
848
+ log_error(f"Error performing semantic search: {e}")
849
+ log_error("Table might not exist, creating for future use")
813
850
  self.create()
814
851
  return []
815
852
 
@@ -834,7 +871,7 @@ class PgVector(VectorDb):
834
871
  log_info(f"Found {len(search_results)} documents")
835
872
  return search_results
836
873
  except Exception as e:
837
- logger.error(f"Error during vector search: {e}")
874
+ log_error(f"Error during vector search: {e}")
838
875
  return []
839
876
 
840
877
  def enable_prefix_matching(self, query: str) -> str:
@@ -916,8 +953,8 @@ class PgVector(VectorDb):
916
953
  with self.Session() as sess, sess.begin():
917
954
  results = sess.execute(stmt).fetchall()
918
955
  except Exception as e:
919
- logger.error(f"Error performing keyword search: {e}")
920
- logger.error("Table might not exist, creating for future use")
956
+ log_error(f"Error performing keyword search: {e}")
957
+ log_error("Table might not exist, creating for future use")
921
958
  self.create()
922
959
  return []
923
960
 
@@ -939,7 +976,7 @@ class PgVector(VectorDb):
939
976
  log_info(f"Found {len(search_results)} documents")
940
977
  return search_results
941
978
  except Exception as e:
942
- logger.error(f"Error during keyword search: {e}")
979
+ log_error(f"Error during keyword search: {e}")
943
980
  return []
944
981
 
945
982
  def hybrid_search(
@@ -963,7 +1000,7 @@ class PgVector(VectorDb):
963
1000
  # Get the embedding for the query string
964
1001
  query_embedding = self.embedder.get_embedding(query)
965
1002
  if query_embedding is None:
966
- logger.error(f"Error getting embedding for Query: {query}")
1003
+ log_error(f"Error getting embedding for Query: {query}")
967
1004
  return []
968
1005
 
969
1006
  # Define the columns to select
@@ -1001,7 +1038,7 @@ class PgVector(VectorDb):
1001
1038
  # Normalize to range [0, 1]
1002
1039
  vector_score = (raw_vector_score + 1) / 2
1003
1040
  else:
1004
- logger.error(f"Unknown distance metric: {self.distance}")
1041
+ log_error(f"Unknown distance metric: {self.distance}")
1005
1042
  return []
1006
1043
 
1007
1044
  # Apply weights to control the influence of each score
@@ -1052,7 +1089,7 @@ class PgVector(VectorDb):
1052
1089
  sess.execute(text(f"SET LOCAL hnsw.ef_search = {self.vector_index.ef_search}"))
1053
1090
  results = sess.execute(stmt).fetchall()
1054
1091
  except Exception as e:
1055
- logger.error(f"Error performing hybrid search: {e}")
1092
+ log_error(f"Error performing hybrid search: {e}")
1056
1093
  return []
1057
1094
 
1058
1095
  # Process the results and convert to Document objects
@@ -1076,7 +1113,7 @@ class PgVector(VectorDb):
1076
1113
  log_info(f"Found {len(search_results)} documents")
1077
1114
  return search_results
1078
1115
  except Exception as e:
1079
- logger.error(f"Error during hybrid search: {e}")
1116
+ log_error(f"Error during hybrid search: {e}")
1080
1117
  return []
1081
1118
 
1082
1119
  def drop(self) -> None:
@@ -1089,7 +1126,7 @@ class PgVector(VectorDb):
1089
1126
  self.table.drop(self.db_engine)
1090
1127
  log_info(f"Table '{self.table.fullname}' dropped successfully.")
1091
1128
  except Exception as e:
1092
- logger.error(f"Error dropping table '{self.table.fullname}': {e}")
1129
+ log_error(f"Error dropping table '{self.table.fullname}': {e}")
1093
1130
  raise
1094
1131
  else:
1095
1132
  log_info(f"Table '{self.table.fullname}' does not exist.")
@@ -1124,7 +1161,7 @@ class PgVector(VectorDb):
1124
1161
  result = sess.execute(stmt).scalar()
1125
1162
  return int(result) if result is not None else 0
1126
1163
  except Exception as e:
1127
- logger.error(f"Error getting count from table '{self.table.fullname}': {e}")
1164
+ log_error(f"Error getting count from table '{self.table.fullname}': {e}")
1128
1165
  return 0
1129
1166
 
1130
1167
  def optimize(self, force_recreate: bool = False) -> None:
@@ -1165,7 +1202,7 @@ class PgVector(VectorDb):
1165
1202
  drop_index_sql = f'DROP INDEX IF EXISTS "{self.schema}"."{index_name}";'
1166
1203
  sess.execute(text(drop_index_sql))
1167
1204
  except Exception as e:
1168
- logger.error(f"Error dropping index '{index_name}': {e}")
1205
+ log_error(f"Error dropping index '{index_name}': {e}")
1169
1206
  raise
1170
1207
 
1171
1208
  def _create_vector_index(self, force_recreate: bool = False) -> None:
@@ -1220,10 +1257,10 @@ class PgVector(VectorDb):
1220
1257
  elif isinstance(self.vector_index, HNSW):
1221
1258
  self._create_hnsw_index(sess, table_fullname, index_distance)
1222
1259
  else:
1223
- logger.error(f"Unknown index type: {type(self.vector_index)}")
1260
+ log_error(f"Unknown index type: {type(self.vector_index)}")
1224
1261
  return
1225
1262
  except Exception as e:
1226
- logger.error(f"Error creating vector index '{self.vector_index.name}': {e}")
1263
+ log_error(f"Error creating vector index '{self.vector_index.name}': {e}")
1227
1264
  raise
1228
1265
 
1229
1266
  def _create_ivfflat_index(self, sess: Session, table_fullname: str, index_distance: str) -> None:
@@ -1322,7 +1359,7 @@ class PgVector(VectorDb):
1322
1359
  )
1323
1360
  sess.execute(create_gin_index_sql)
1324
1361
  except Exception as e:
1325
- logger.error(f"Error creating GIN index '{gin_index_name}': {e}")
1362
+ log_error(f"Error creating GIN index '{gin_index_name}': {e}")
1326
1363
  raise
1327
1364
 
1328
1365
  def delete(self) -> bool:
@@ -1341,7 +1378,7 @@ class PgVector(VectorDb):
1341
1378
  log_info(f"Deleted all records from table '{self.table.fullname}'.")
1342
1379
  return True
1343
1380
  except Exception as e:
1344
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1381
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1345
1382
  sess.rollback()
1346
1383
  return False
1347
1384
 
@@ -1357,7 +1394,7 @@ class PgVector(VectorDb):
1357
1394
  log_info(f"Deleted records with id '{id}' from table '{self.table.fullname}'.")
1358
1395
  return True
1359
1396
  except Exception as e:
1360
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1397
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1361
1398
  sess.rollback()
1362
1399
  return False
1363
1400
 
@@ -1373,7 +1410,7 @@ class PgVector(VectorDb):
1373
1410
  log_info(f"Deleted records with name '{name}' from table '{self.table.fullname}'.")
1374
1411
  return True
1375
1412
  except Exception as e:
1376
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1413
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1377
1414
  sess.rollback()
1378
1415
  return False
1379
1416
 
@@ -1389,7 +1426,7 @@ class PgVector(VectorDb):
1389
1426
  log_info(f"Deleted records with metadata '{metadata}' from table '{self.table.fullname}'.")
1390
1427
  return True
1391
1428
  except Exception as e:
1392
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1429
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1393
1430
  sess.rollback()
1394
1431
  return False
1395
1432
 
@@ -1405,7 +1442,7 @@ class PgVector(VectorDb):
1405
1442
  log_info(f"Deleted records with content ID '{content_id}' from table '{self.table.fullname}'.")
1406
1443
  return True
1407
1444
  except Exception as e:
1408
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1445
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1409
1446
  sess.rollback()
1410
1447
  return False
1411
1448
 
@@ -1421,7 +1458,7 @@ class PgVector(VectorDb):
1421
1458
  log_info(f"Deleted records with content hash '{content_hash}' from table '{self.table.fullname}'.")
1422
1459
  return True
1423
1460
  except Exception as e:
1424
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1461
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1425
1462
  sess.rollback()
1426
1463
  return False
1427
1464
 
agno/workflow/parallel.py CHANGED
@@ -1,6 +1,7 @@
1
1
  import asyncio
2
2
  import warnings
3
3
  from concurrent.futures import ThreadPoolExecutor, as_completed
4
+ from contextvars import copy_context
4
5
  from copy import deepcopy
5
6
  from dataclasses import dataclass
6
7
  from typing import Any, AsyncIterator, Awaitable, Callable, Dict, Iterator, List, Optional, Union
@@ -101,7 +102,7 @@ class Parallel:
101
102
  step_name=self.name or "Parallel",
102
103
  step_id=str(uuid4()),
103
104
  step_type=StepType.PARALLEL,
104
- content=f"Parallel {self.name or 'execution'} completed with 1 result",
105
+ content=self._build_aggregated_content(step_outputs),
105
106
  executor_name=self.name or "Parallel",
106
107
  images=single_result.images,
107
108
  videos=single_result.videos,
@@ -115,8 +116,8 @@ class Parallel:
115
116
 
116
117
  early_termination_requested = any(output.stop for output in step_outputs if hasattr(output, "stop"))
117
118
 
118
- # Multiple results - aggregate them
119
- aggregated_content = f"Parallel {self.name or 'execution'} completed with {len(step_outputs)} results"
119
+ # Multiple results - aggregate them with actual content from all steps
120
+ aggregated_content = self._build_aggregated_content(step_outputs)
120
121
 
121
122
  # Combine all media from parallel steps
122
123
  all_images = []
@@ -267,8 +268,9 @@ class Parallel:
267
268
 
268
269
  with ThreadPoolExecutor(max_workers=len(self.steps)) as executor:
269
270
  # Submit all tasks with their original indices
271
+ # Use copy_context().run to propagate context variables to child threads
270
272
  future_to_index = {
271
- executor.submit(execute_step_with_index, indexed_step): indexed_step[0]
273
+ executor.submit(copy_context().run, execute_step_with_index, indexed_step): indexed_step[0]
272
274
  for indexed_step in indexed_steps
273
275
  }
274
276
 
@@ -449,7 +451,11 @@ class Parallel:
449
451
 
450
452
  with ThreadPoolExecutor(max_workers=len(self.steps)) as executor:
451
453
  # Submit all tasks
452
- futures = [executor.submit(execute_step_stream_with_index, indexed_step) for indexed_step in indexed_steps]
454
+ # Use copy_context().run to propagate context variables to child threads
455
+ futures = [
456
+ executor.submit(copy_context().run, execute_step_stream_with_index, indexed_step)
457
+ for indexed_step in indexed_steps
458
+ ]
453
459
 
454
460
  # Process events from queue as they arrive
455
461
  completed_steps = 0
agno/workflow/step.py CHANGED
@@ -644,7 +644,7 @@ class Step:
644
644
  session_state=session_state_copy, # Send a copy to the executor
645
645
  stream=True,
646
646
  stream_events=stream_events,
647
- yield_run_response=True,
647
+ yield_run_output=True,
648
648
  run_context=run_context,
649
649
  **kwargs,
650
650
  )
@@ -653,7 +653,7 @@ class Step:
653
653
  for event in response_stream:
654
654
  if isinstance(event, RunOutput) or isinstance(event, TeamRunOutput):
655
655
  active_executor_run_response = event
656
- break
656
+ continue
657
657
  # Only yield executor events if stream_executor_events is True
658
658
  if stream_executor_events:
659
659
  enriched_event = self._enrich_event_with_context(
@@ -1174,7 +1174,7 @@ class Step:
1174
1174
  stream=True,
1175
1175
  stream_events=stream_events,
1176
1176
  run_context=run_context,
1177
- yield_run_response=True,
1177
+ yield_run_output=True,
1178
1178
  **kwargs,
1179
1179
  )
1180
1180
 
@@ -1363,10 +1363,22 @@ class Step:
1363
1363
 
1364
1364
  For container steps (Steps, Router, Loop, etc.), this will recursively find the content from the
1365
1365
  last actual step rather than using the generic container message.
1366
+
1367
+ For Parallel steps, aggregates content from ALL inner steps (not just the last one).
1366
1368
  """
1367
- # If this step has nested steps (like Steps, Condition, Router, Loop, etc.)
1369
+ # If this step has nested steps (like Steps, Condition, Router, Loop, Parallel, etc.)
1368
1370
  if hasattr(step_output, "steps") and step_output.steps and len(step_output.steps) > 0:
1369
- # Recursively get content from the last nested step
1371
+ # For Parallel steps, aggregate content from ALL inner steps
1372
+ if step_output.step_type == StepType.PARALLEL:
1373
+ aggregated_parts = []
1374
+ for i, inner_step in enumerate(step_output.steps):
1375
+ inner_content = self._get_deepest_content_from_step_output(inner_step)
1376
+ if inner_content:
1377
+ step_name = inner_step.step_name or f"Step {i + 1}"
1378
+ aggregated_parts.append(f"=== {step_name} ===\n{inner_content}")
1379
+ return "\n\n".join(aggregated_parts) if aggregated_parts else step_output.content # type: ignore
1380
+
1381
+ # For other nested step types, recursively get content from the last nested step
1370
1382
  return self._get_deepest_content_from_step_output(step_output.steps[-1])
1371
1383
 
1372
1384
  # For regular steps, return their content
agno/workflow/types.py CHANGED
@@ -107,10 +107,46 @@ class StepInput:
107
107
  return str(self.input)
108
108
 
109
109
  def get_step_output(self, step_name: str) -> Optional["StepOutput"]:
110
- """Get output from a specific previous step by name"""
110
+ """Get output from a specific previous step by name
111
+
112
+ Searches recursively through nested steps (Parallel, Condition, Router, Loop, Steps)
113
+ to find step outputs at any depth.
114
+ """
111
115
  if not self.previous_step_outputs:
112
116
  return None
113
- return self.previous_step_outputs.get(step_name)
117
+
118
+ # First try direct lookup
119
+ direct = self.previous_step_outputs.get(step_name)
120
+ if direct:
121
+ return direct
122
+
123
+ # Search recursively in nested steps
124
+ return self._search_nested_steps(step_name)
125
+
126
+ def _search_nested_steps(self, step_name: str) -> Optional["StepOutput"]:
127
+ """Recursively search for a step output in nested steps (Parallel, Condition, etc.)"""
128
+ if not self.previous_step_outputs:
129
+ return None
130
+
131
+ for step_output in self.previous_step_outputs.values():
132
+ result = self._search_in_step_output(step_output, step_name)
133
+ if result:
134
+ return result
135
+ return None
136
+
137
+ def _search_in_step_output(self, step_output: "StepOutput", step_name: str) -> Optional["StepOutput"]:
138
+ """Helper to recursively search within a single StepOutput"""
139
+ if not step_output.steps:
140
+ return None
141
+
142
+ for nested_step in step_output.steps:
143
+ if nested_step.step_name == step_name:
144
+ return nested_step
145
+ # Recursively search deeper
146
+ result = self._search_in_step_output(nested_step, step_name)
147
+ if result:
148
+ return result
149
+ return None
114
150
 
115
151
  def get_step_content(self, step_name: str) -> Optional[Union[str, Dict[str, str]]]:
116
152
  """Get content from a specific previous step by name