agno 2.3.8__py3-none-any.whl → 2.3.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. agno/agent/agent.py +134 -94
  2. agno/db/mysql/__init__.py +2 -1
  3. agno/db/mysql/async_mysql.py +2888 -0
  4. agno/db/mysql/mysql.py +17 -8
  5. agno/db/mysql/utils.py +139 -6
  6. agno/db/postgres/async_postgres.py +10 -5
  7. agno/db/postgres/postgres.py +7 -2
  8. agno/db/schemas/evals.py +1 -0
  9. agno/db/singlestore/singlestore.py +5 -1
  10. agno/db/sqlite/async_sqlite.py +3 -3
  11. agno/eval/__init__.py +10 -0
  12. agno/eval/accuracy.py +11 -8
  13. agno/eval/agent_as_judge.py +861 -0
  14. agno/eval/base.py +29 -0
  15. agno/eval/utils.py +2 -1
  16. agno/exceptions.py +7 -0
  17. agno/knowledge/embedder/openai.py +8 -8
  18. agno/knowledge/knowledge.py +1142 -176
  19. agno/media.py +22 -6
  20. agno/models/aws/claude.py +8 -7
  21. agno/models/base.py +61 -2
  22. agno/models/deepseek/deepseek.py +67 -0
  23. agno/models/google/gemini.py +134 -51
  24. agno/models/google/utils.py +22 -0
  25. agno/models/message.py +5 -0
  26. agno/models/openai/chat.py +4 -0
  27. agno/os/app.py +64 -74
  28. agno/os/interfaces/a2a/router.py +3 -4
  29. agno/os/interfaces/agui/router.py +2 -0
  30. agno/os/router.py +3 -1607
  31. agno/os/routers/agents/__init__.py +3 -0
  32. agno/os/routers/agents/router.py +581 -0
  33. agno/os/routers/agents/schema.py +261 -0
  34. agno/os/routers/evals/evals.py +26 -6
  35. agno/os/routers/evals/schemas.py +34 -2
  36. agno/os/routers/evals/utils.py +77 -18
  37. agno/os/routers/knowledge/knowledge.py +1 -1
  38. agno/os/routers/teams/__init__.py +3 -0
  39. agno/os/routers/teams/router.py +496 -0
  40. agno/os/routers/teams/schema.py +257 -0
  41. agno/os/routers/workflows/__init__.py +3 -0
  42. agno/os/routers/workflows/router.py +545 -0
  43. agno/os/routers/workflows/schema.py +75 -0
  44. agno/os/schema.py +1 -559
  45. agno/os/utils.py +139 -2
  46. agno/team/team.py +87 -24
  47. agno/tools/file_generation.py +12 -6
  48. agno/tools/firecrawl.py +15 -7
  49. agno/tools/function.py +37 -23
  50. agno/tools/shopify.py +1519 -0
  51. agno/tools/spotify.py +2 -5
  52. agno/utils/hooks.py +64 -5
  53. agno/utils/http.py +2 -2
  54. agno/utils/media.py +11 -1
  55. agno/utils/print_response/agent.py +8 -0
  56. agno/utils/print_response/team.py +8 -0
  57. agno/vectordb/pgvector/pgvector.py +88 -51
  58. agno/workflow/parallel.py +5 -3
  59. agno/workflow/step.py +14 -2
  60. agno/workflow/types.py +38 -2
  61. agno/workflow/workflow.py +12 -4
  62. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/METADATA +7 -2
  63. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/RECORD +66 -52
  64. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/WHEEL +0 -0
  65. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/licenses/LICENSE +0 -0
  66. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/top_level.txt +0 -0
@@ -28,7 +28,7 @@ from agno.filters import FilterExpr
28
28
  from agno.knowledge.document import Document
29
29
  from agno.knowledge.embedder import Embedder
30
30
  from agno.knowledge.reranker.base import Reranker
31
- from agno.utils.log import log_debug, log_info, logger
31
+ from agno.utils.log import log_debug, log_error, log_info, log_warning
32
32
  from agno.vectordb.base import VectorDb
33
33
  from agno.vectordb.distance import Distance
34
34
  from agno.vectordb.pgvector.index import HNSW, Ivfflat
@@ -62,6 +62,7 @@ class PgVector(VectorDb):
62
62
  schema_version: int = 1,
63
63
  auto_upgrade_schema: bool = False,
64
64
  reranker: Optional[Reranker] = None,
65
+ create_schema: bool = True,
65
66
  ):
66
67
  """
67
68
  Initialize the PgVector instance.
@@ -82,6 +83,8 @@ class PgVector(VectorDb):
82
83
  content_language (str): Language for full-text search.
83
84
  schema_version (int): Version of the database schema.
84
85
  auto_upgrade_schema (bool): Automatically upgrade schema if True.
86
+ create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
87
+ Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
85
88
  """
86
89
  if not table_name:
87
90
  raise ValueError("Table name must be provided.")
@@ -104,7 +107,7 @@ class PgVector(VectorDb):
104
107
  try:
105
108
  db_engine = create_engine(db_url)
106
109
  except Exception as e:
107
- logger.error(f"Failed to create engine from 'db_url': {e}")
110
+ log_error(f"Failed to create engine from 'db_url': {e}")
108
111
  raise
109
112
 
110
113
  # Database settings
@@ -147,6 +150,9 @@ class PgVector(VectorDb):
147
150
  # Reranker instance
148
151
  self.reranker: Optional[Reranker] = reranker
149
152
 
153
+ # Schema creation flag
154
+ self.create_schema: bool = create_schema
155
+
150
156
  # Database session
151
157
  self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
152
158
  # Database table
@@ -209,7 +215,7 @@ class PgVector(VectorDb):
209
215
  try:
210
216
  return inspect(self.db_engine).has_table(self.table_name, schema=self.schema)
211
217
  except Exception as e:
212
- logger.error(f"Error checking if table exists: {e}")
218
+ log_error(f"Error checking if table exists: {e}")
213
219
  return False
214
220
 
215
221
  def create(self) -> None:
@@ -220,7 +226,7 @@ class PgVector(VectorDb):
220
226
  with self.Session() as sess, sess.begin():
221
227
  log_debug("Creating extension: vector")
222
228
  sess.execute(text("CREATE EXTENSION IF NOT EXISTS vector;"))
223
- if self.schema is not None:
229
+ if self.create_schema and self.schema is not None:
224
230
  log_debug(f"Creating schema: {self.schema}")
225
231
  sess.execute(text(f"CREATE SCHEMA IF NOT EXISTS {self.schema};"))
226
232
  log_debug(f"Creating table: {self.table_name}")
@@ -247,7 +253,7 @@ class PgVector(VectorDb):
247
253
  result = sess.execute(stmt).first()
248
254
  return result is not None
249
255
  except Exception as e:
250
- logger.error(f"Error checking if record exists: {e}")
256
+ log_error(f"Error checking if record exists: {e}")
251
257
  return False
252
258
 
253
259
  def name_exists(self, name: str) -> bool:
@@ -324,7 +330,7 @@ class PgVector(VectorDb):
324
330
  try:
325
331
  batch_records.append(self._get_document_record(doc, filters, content_hash))
326
332
  except Exception as e:
327
- logger.error(f"Error processing document '{doc.name}': {e}")
333
+ log_error(f"Error processing document '{doc.name}': {e}")
328
334
 
329
335
  # Insert the batch of records
330
336
  insert_stmt = postgresql.insert(self.table)
@@ -332,11 +338,11 @@ class PgVector(VectorDb):
332
338
  sess.commit() # Commit batch independently
333
339
  log_info(f"Inserted batch of {len(batch_records)} documents.")
334
340
  except Exception as e:
335
- logger.error(f"Error with batch starting at index {i}: {e}")
341
+ log_error(f"Error with batch starting at index {i}: {e}")
336
342
  sess.rollback() # Rollback the current batch if there's an error
337
343
  raise
338
344
  except Exception as e:
339
- logger.error(f"Error inserting documents: {e}")
345
+ log_error(f"Error inserting documents: {e}")
340
346
  raise
341
347
 
342
348
  async def async_insert(
@@ -380,7 +386,7 @@ class PgVector(VectorDb):
380
386
  }
381
387
  batch_records.append(record)
382
388
  except Exception as e:
383
- logger.error(f"Error processing document '{doc.name}': {e}")
389
+ log_error(f"Error processing document '{doc.name}': {e}")
384
390
 
385
391
  # Insert the batch of records
386
392
  if batch_records:
@@ -389,11 +395,11 @@ class PgVector(VectorDb):
389
395
  sess.commit() # Commit batch independently
390
396
  log_info(f"Inserted batch of {len(batch_records)} documents.")
391
397
  except Exception as e:
392
- logger.error(f"Error with batch starting at index {i}: {e}")
398
+ log_error(f"Error with batch starting at index {i}: {e}")
393
399
  sess.rollback() # Rollback the current batch if there's an error
394
400
  raise
395
401
  except Exception as e:
396
- logger.error(f"Error inserting documents: {e}")
402
+ log_error(f"Error inserting documents: {e}")
397
403
  raise
398
404
 
399
405
  def upsert_available(self) -> bool:
@@ -422,7 +428,7 @@ class PgVector(VectorDb):
422
428
  self._delete_by_content_hash(content_hash)
423
429
  self._upsert(content_hash, documents, filters, batch_size)
424
430
  except Exception as e:
425
- logger.error(f"Error upserting documents by content hash: {e}")
431
+ log_error(f"Error upserting documents by content hash: {e}")
426
432
  raise
427
433
 
428
434
  def _upsert(
@@ -452,7 +458,7 @@ class PgVector(VectorDb):
452
458
  try:
453
459
  batch_records_dict[doc.id] = self._get_document_record(doc, filters, content_hash) # type: ignore
454
460
  except Exception as e:
455
- logger.error(f"Error processing document '{doc.name}': {e}")
461
+ log_error(f"Error processing document '{doc.name}': {e}")
456
462
 
457
463
  # Convert dict to list for upsert
458
464
  batch_records = list(batch_records_dict.values())
@@ -479,11 +485,11 @@ class PgVector(VectorDb):
479
485
  sess.commit() # Commit batch independently
480
486
  log_info(f"Upserted batch of {len(batch_records)} documents.")
481
487
  except Exception as e:
482
- logger.error(f"Error with batch starting at index {i}: {e}")
488
+ log_error(f"Error with batch starting at index {i}: {e}")
483
489
  sess.rollback() # Rollback the current batch if there's an error
484
490
  raise
485
491
  except Exception as e:
486
- logger.error(f"Error upserting documents: {e}")
492
+ log_error(f"Error upserting documents: {e}")
487
493
  raise
488
494
 
489
495
  def _get_document_record(
@@ -532,7 +538,7 @@ class PgVector(VectorDb):
532
538
  doc.embedding = embeddings[j]
533
539
  doc.usage = usages[j] if j < len(usages) else None
534
540
  except Exception as e:
535
- logger.error(f"Error assigning batch embedding to document '{doc.name}': {e}")
541
+ log_error(f"Error assigning batch embedding to document '{doc.name}': {e}")
536
542
 
537
543
  except Exception as e:
538
544
  # Check if this is a rate limit error - don't fall back as it would make things worse
@@ -543,17 +549,41 @@ class PgVector(VectorDb):
543
549
  )
544
550
 
545
551
  if is_rate_limit:
546
- logger.error(f"Rate limit detected during batch embedding. {e}")
552
+ log_error(f"Rate limit detected during batch embedding. {e}")
547
553
  raise e
548
554
  else:
549
- logger.warning(f"Async batch embedding failed, falling back to individual embeddings: {e}")
555
+ log_warning(f"Async batch embedding failed, falling back to individual embeddings: {e}")
550
556
  # Fall back to individual embedding
551
557
  embed_tasks = [doc.async_embed(embedder=self.embedder) for doc in batch_docs]
552
- await asyncio.gather(*embed_tasks, return_exceptions=True)
558
+ results = await asyncio.gather(*embed_tasks, return_exceptions=True)
559
+
560
+ # Check for exceptions and handle them
561
+ for i, result in enumerate(results):
562
+ if isinstance(result, Exception):
563
+ error_msg = str(result)
564
+ # If it's an event loop closure error, log it but don't fail
565
+ if "Event loop is closed" in error_msg or "RuntimeError" in type(result).__name__:
566
+ log_warning(
567
+ f"Event loop closure during embedding for document {i}, but operation may have succeeded: {result}"
568
+ )
569
+ else:
570
+ log_error(f"Error embedding document {i}: {result}")
553
571
  else:
554
572
  # Use individual embedding
555
573
  embed_tasks = [doc.async_embed(embedder=self.embedder) for doc in batch_docs]
556
- await asyncio.gather(*embed_tasks, return_exceptions=True)
574
+ results = await asyncio.gather(*embed_tasks, return_exceptions=True)
575
+
576
+ # Check for exceptions and handle them
577
+ for i, result in enumerate(results):
578
+ if isinstance(result, Exception):
579
+ error_msg = str(result)
580
+ # If it's an event loop closure error, log it but don't fail
581
+ if "Event loop is closed" in error_msg or "RuntimeError" in type(result).__name__:
582
+ log_warning(
583
+ f"Event loop closure during embedding for document {i}, but operation may have succeeded: {result}"
584
+ )
585
+ else:
586
+ log_error(f"Error embedding document {i}: {result}")
557
587
 
558
588
  async def async_upsert(
559
589
  self,
@@ -568,7 +598,7 @@ class PgVector(VectorDb):
568
598
  self._delete_by_content_hash(content_hash)
569
599
  await self._async_upsert(content_hash, documents, filters, batch_size)
570
600
  except Exception as e:
571
- logger.error(f"Error upserting documents by content hash: {e}")
601
+ log_error(f"Error upserting documents by content hash: {e}")
572
602
  raise
573
603
 
574
604
  async def _async_upsert(
@@ -597,11 +627,18 @@ class PgVector(VectorDb):
597
627
 
598
628
  # Prepare documents for upserting
599
629
  batch_records_dict = {} # Use dict to deduplicate by ID
600
- for doc in batch_docs:
630
+ for idx, doc in enumerate(batch_docs):
601
631
  try:
602
632
  cleaned_content = self._clean_content(doc.content)
603
633
  record_id = md5(cleaned_content.encode()).hexdigest()
604
634
 
635
+ if (
636
+ doc.embedding is not None
637
+ and isinstance(doc.embedding, list)
638
+ and len(doc.embedding) == 0
639
+ ):
640
+ log_warning(f"Document {idx} '{doc.name}' has empty embedding (length 0)")
641
+
605
642
  meta_data = doc.meta_data or {}
606
643
  if filters:
607
644
  meta_data.update(filters)
@@ -619,7 +656,7 @@ class PgVector(VectorDb):
619
656
  }
620
657
  batch_records_dict[record_id] = record # This deduplicates by ID
621
658
  except Exception as e:
622
- logger.error(f"Error processing document '{doc.name}': {e}")
659
+ log_error(f"Error processing document '{doc.name}': {e}")
623
660
 
624
661
  # Convert dict to list for upsert
625
662
  batch_records = list(batch_records_dict.values())
@@ -646,11 +683,11 @@ class PgVector(VectorDb):
646
683
  sess.commit() # Commit batch independently
647
684
  log_info(f"Upserted batch of {len(batch_records)} documents.")
648
685
  except Exception as e:
649
- logger.error(f"Error with batch starting at index {i}: {e}")
686
+ log_error(f"Error with batch starting at index {i}: {e}")
650
687
  sess.rollback() # Rollback the current batch if there's an error
651
688
  raise
652
689
  except Exception as e:
653
- logger.error(f"Error upserting documents: {e}")
690
+ log_error(f"Error upserting documents: {e}")
654
691
  raise
655
692
 
656
693
  def update_metadata(self, content_id: str, metadata: Dict[str, Any]) -> None:
@@ -679,7 +716,7 @@ class PgVector(VectorDb):
679
716
  sess.execute(stmt)
680
717
  sess.commit()
681
718
  except Exception as e:
682
- logger.error(f"Error updating metadata for document {content_id}: {e}")
719
+ log_error(f"Error updating metadata for document {content_id}: {e}")
683
720
  raise
684
721
 
685
722
  def search(
@@ -703,7 +740,7 @@ class PgVector(VectorDb):
703
740
  elif self.search_type == SearchType.hybrid:
704
741
  return self.hybrid_search(query=query, limit=limit, filters=filters)
705
742
  else:
706
- logger.error(f"Invalid search type '{self.search_type}'.")
743
+ log_error(f"Invalid search type '{self.search_type}'.")
707
744
  return []
708
745
 
709
746
  async def async_search(
@@ -751,7 +788,7 @@ class PgVector(VectorDb):
751
788
  # Get the embedding for the query string
752
789
  query_embedding = self.embedder.get_embedding(query)
753
790
  if query_embedding is None:
754
- logger.error(f"Error getting embedding for Query: {query}")
791
+ log_error(f"Error getting embedding for Query: {query}")
755
792
  return []
756
793
 
757
794
  # Define the columns to select
@@ -789,7 +826,7 @@ class PgVector(VectorDb):
789
826
  elif self.distance == Distance.max_inner_product:
790
827
  stmt = stmt.order_by(self.table.c.embedding.max_inner_product(query_embedding))
791
828
  else:
792
- logger.error(f"Unknown distance metric: {self.distance}")
829
+ log_error(f"Unknown distance metric: {self.distance}")
793
830
  return []
794
831
 
795
832
  # Limit the number of results
@@ -808,8 +845,8 @@ class PgVector(VectorDb):
808
845
  sess.execute(text(f"SET LOCAL hnsw.ef_search = {self.vector_index.ef_search}"))
809
846
  results = sess.execute(stmt).fetchall()
810
847
  except Exception as e:
811
- logger.error(f"Error performing semantic search: {e}")
812
- logger.error("Table might not exist, creating for future use")
848
+ log_error(f"Error performing semantic search: {e}")
849
+ log_error("Table might not exist, creating for future use")
813
850
  self.create()
814
851
  return []
815
852
 
@@ -834,7 +871,7 @@ class PgVector(VectorDb):
834
871
  log_info(f"Found {len(search_results)} documents")
835
872
  return search_results
836
873
  except Exception as e:
837
- logger.error(f"Error during vector search: {e}")
874
+ log_error(f"Error during vector search: {e}")
838
875
  return []
839
876
 
840
877
  def enable_prefix_matching(self, query: str) -> str:
@@ -916,8 +953,8 @@ class PgVector(VectorDb):
916
953
  with self.Session() as sess, sess.begin():
917
954
  results = sess.execute(stmt).fetchall()
918
955
  except Exception as e:
919
- logger.error(f"Error performing keyword search: {e}")
920
- logger.error("Table might not exist, creating for future use")
956
+ log_error(f"Error performing keyword search: {e}")
957
+ log_error("Table might not exist, creating for future use")
921
958
  self.create()
922
959
  return []
923
960
 
@@ -939,7 +976,7 @@ class PgVector(VectorDb):
939
976
  log_info(f"Found {len(search_results)} documents")
940
977
  return search_results
941
978
  except Exception as e:
942
- logger.error(f"Error during keyword search: {e}")
979
+ log_error(f"Error during keyword search: {e}")
943
980
  return []
944
981
 
945
982
  def hybrid_search(
@@ -963,7 +1000,7 @@ class PgVector(VectorDb):
963
1000
  # Get the embedding for the query string
964
1001
  query_embedding = self.embedder.get_embedding(query)
965
1002
  if query_embedding is None:
966
- logger.error(f"Error getting embedding for Query: {query}")
1003
+ log_error(f"Error getting embedding for Query: {query}")
967
1004
  return []
968
1005
 
969
1006
  # Define the columns to select
@@ -1001,7 +1038,7 @@ class PgVector(VectorDb):
1001
1038
  # Normalize to range [0, 1]
1002
1039
  vector_score = (raw_vector_score + 1) / 2
1003
1040
  else:
1004
- logger.error(f"Unknown distance metric: {self.distance}")
1041
+ log_error(f"Unknown distance metric: {self.distance}")
1005
1042
  return []
1006
1043
 
1007
1044
  # Apply weights to control the influence of each score
@@ -1052,7 +1089,7 @@ class PgVector(VectorDb):
1052
1089
  sess.execute(text(f"SET LOCAL hnsw.ef_search = {self.vector_index.ef_search}"))
1053
1090
  results = sess.execute(stmt).fetchall()
1054
1091
  except Exception as e:
1055
- logger.error(f"Error performing hybrid search: {e}")
1092
+ log_error(f"Error performing hybrid search: {e}")
1056
1093
  return []
1057
1094
 
1058
1095
  # Process the results and convert to Document objects
@@ -1076,7 +1113,7 @@ class PgVector(VectorDb):
1076
1113
  log_info(f"Found {len(search_results)} documents")
1077
1114
  return search_results
1078
1115
  except Exception as e:
1079
- logger.error(f"Error during hybrid search: {e}")
1116
+ log_error(f"Error during hybrid search: {e}")
1080
1117
  return []
1081
1118
 
1082
1119
  def drop(self) -> None:
@@ -1089,7 +1126,7 @@ class PgVector(VectorDb):
1089
1126
  self.table.drop(self.db_engine)
1090
1127
  log_info(f"Table '{self.table.fullname}' dropped successfully.")
1091
1128
  except Exception as e:
1092
- logger.error(f"Error dropping table '{self.table.fullname}': {e}")
1129
+ log_error(f"Error dropping table '{self.table.fullname}': {e}")
1093
1130
  raise
1094
1131
  else:
1095
1132
  log_info(f"Table '{self.table.fullname}' does not exist.")
@@ -1124,7 +1161,7 @@ class PgVector(VectorDb):
1124
1161
  result = sess.execute(stmt).scalar()
1125
1162
  return int(result) if result is not None else 0
1126
1163
  except Exception as e:
1127
- logger.error(f"Error getting count from table '{self.table.fullname}': {e}")
1164
+ log_error(f"Error getting count from table '{self.table.fullname}': {e}")
1128
1165
  return 0
1129
1166
 
1130
1167
  def optimize(self, force_recreate: bool = False) -> None:
@@ -1165,7 +1202,7 @@ class PgVector(VectorDb):
1165
1202
  drop_index_sql = f'DROP INDEX IF EXISTS "{self.schema}"."{index_name}";'
1166
1203
  sess.execute(text(drop_index_sql))
1167
1204
  except Exception as e:
1168
- logger.error(f"Error dropping index '{index_name}': {e}")
1205
+ log_error(f"Error dropping index '{index_name}': {e}")
1169
1206
  raise
1170
1207
 
1171
1208
  def _create_vector_index(self, force_recreate: bool = False) -> None:
@@ -1220,10 +1257,10 @@ class PgVector(VectorDb):
1220
1257
  elif isinstance(self.vector_index, HNSW):
1221
1258
  self._create_hnsw_index(sess, table_fullname, index_distance)
1222
1259
  else:
1223
- logger.error(f"Unknown index type: {type(self.vector_index)}")
1260
+ log_error(f"Unknown index type: {type(self.vector_index)}")
1224
1261
  return
1225
1262
  except Exception as e:
1226
- logger.error(f"Error creating vector index '{self.vector_index.name}': {e}")
1263
+ log_error(f"Error creating vector index '{self.vector_index.name}': {e}")
1227
1264
  raise
1228
1265
 
1229
1266
  def _create_ivfflat_index(self, sess: Session, table_fullname: str, index_distance: str) -> None:
@@ -1322,7 +1359,7 @@ class PgVector(VectorDb):
1322
1359
  )
1323
1360
  sess.execute(create_gin_index_sql)
1324
1361
  except Exception as e:
1325
- logger.error(f"Error creating GIN index '{gin_index_name}': {e}")
1362
+ log_error(f"Error creating GIN index '{gin_index_name}': {e}")
1326
1363
  raise
1327
1364
 
1328
1365
  def delete(self) -> bool:
@@ -1341,7 +1378,7 @@ class PgVector(VectorDb):
1341
1378
  log_info(f"Deleted all records from table '{self.table.fullname}'.")
1342
1379
  return True
1343
1380
  except Exception as e:
1344
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1381
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1345
1382
  sess.rollback()
1346
1383
  return False
1347
1384
 
@@ -1357,7 +1394,7 @@ class PgVector(VectorDb):
1357
1394
  log_info(f"Deleted records with id '{id}' from table '{self.table.fullname}'.")
1358
1395
  return True
1359
1396
  except Exception as e:
1360
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1397
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1361
1398
  sess.rollback()
1362
1399
  return False
1363
1400
 
@@ -1373,7 +1410,7 @@ class PgVector(VectorDb):
1373
1410
  log_info(f"Deleted records with name '{name}' from table '{self.table.fullname}'.")
1374
1411
  return True
1375
1412
  except Exception as e:
1376
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1413
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1377
1414
  sess.rollback()
1378
1415
  return False
1379
1416
 
@@ -1389,7 +1426,7 @@ class PgVector(VectorDb):
1389
1426
  log_info(f"Deleted records with metadata '{metadata}' from table '{self.table.fullname}'.")
1390
1427
  return True
1391
1428
  except Exception as e:
1392
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1429
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1393
1430
  sess.rollback()
1394
1431
  return False
1395
1432
 
@@ -1405,7 +1442,7 @@ class PgVector(VectorDb):
1405
1442
  log_info(f"Deleted records with content ID '{content_id}' from table '{self.table.fullname}'.")
1406
1443
  return True
1407
1444
  except Exception as e:
1408
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1445
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1409
1446
  sess.rollback()
1410
1447
  return False
1411
1448
 
@@ -1421,7 +1458,7 @@ class PgVector(VectorDb):
1421
1458
  log_info(f"Deleted records with content hash '{content_hash}' from table '{self.table.fullname}'.")
1422
1459
  return True
1423
1460
  except Exception as e:
1424
- logger.error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1461
+ log_error(f"Error deleting rows from table '{self.table.fullname}': {e}")
1425
1462
  sess.rollback()
1426
1463
  return False
1427
1464
 
agno/workflow/parallel.py CHANGED
@@ -102,7 +102,7 @@ class Parallel:
102
102
  step_name=self.name or "Parallel",
103
103
  step_id=str(uuid4()),
104
104
  step_type=StepType.PARALLEL,
105
- content=f"Parallel {self.name or 'execution'} completed with 1 result",
105
+ content=self._build_aggregated_content(step_outputs),
106
106
  executor_name=self.name or "Parallel",
107
107
  images=single_result.images,
108
108
  videos=single_result.videos,
@@ -116,8 +116,8 @@ class Parallel:
116
116
 
117
117
  early_termination_requested = any(output.stop for output in step_outputs if hasattr(output, "stop"))
118
118
 
119
- # Multiple results - aggregate them
120
- aggregated_content = f"Parallel {self.name or 'execution'} completed with {len(step_outputs)} results"
119
+ # Multiple results - aggregate them with actual content from all steps
120
+ aggregated_content = self._build_aggregated_content(step_outputs)
121
121
 
122
122
  # Combine all media from parallel steps
123
123
  all_images = []
@@ -419,6 +419,7 @@ class Parallel:
419
419
  step_index=sub_step_index,
420
420
  store_executor_outputs=store_executor_outputs,
421
421
  session_state=step_session_state,
422
+ run_context=run_context,
422
423
  parent_step_id=parallel_step_id,
423
424
  workflow_session=workflow_session,
424
425
  add_workflow_history_to_steps=add_workflow_history_to_steps,
@@ -580,6 +581,7 @@ class Parallel:
580
581
  add_workflow_history_to_steps=add_workflow_history_to_steps,
581
582
  num_history_runs=num_history_runs,
582
583
  session_state=step_session_state,
584
+ run_context=run_context,
583
585
  background_tasks=background_tasks,
584
586
  ) # type: ignore[union-attr]
585
587
  return idx, inner_step_result, step_session_state
agno/workflow/step.py CHANGED
@@ -1363,10 +1363,22 @@ class Step:
1363
1363
 
1364
1364
  For container steps (Steps, Router, Loop, etc.), this will recursively find the content from the
1365
1365
  last actual step rather than using the generic container message.
1366
+
1367
+ For Parallel steps, aggregates content from ALL inner steps (not just the last one).
1366
1368
  """
1367
- # If this step has nested steps (like Steps, Condition, Router, Loop, etc.)
1369
+ # If this step has nested steps (like Steps, Condition, Router, Loop, Parallel, etc.)
1368
1370
  if hasattr(step_output, "steps") and step_output.steps and len(step_output.steps) > 0:
1369
- # Recursively get content from the last nested step
1371
+ # For Parallel steps, aggregate content from ALL inner steps
1372
+ if step_output.step_type == StepType.PARALLEL:
1373
+ aggregated_parts = []
1374
+ for i, inner_step in enumerate(step_output.steps):
1375
+ inner_content = self._get_deepest_content_from_step_output(inner_step)
1376
+ if inner_content:
1377
+ step_name = inner_step.step_name or f"Step {i + 1}"
1378
+ aggregated_parts.append(f"=== {step_name} ===\n{inner_content}")
1379
+ return "\n\n".join(aggregated_parts) if aggregated_parts else step_output.content # type: ignore
1380
+
1381
+ # For other nested step types, recursively get content from the last nested step
1370
1382
  return self._get_deepest_content_from_step_output(step_output.steps[-1])
1371
1383
 
1372
1384
  # For regular steps, return their content
agno/workflow/types.py CHANGED
@@ -107,10 +107,46 @@ class StepInput:
107
107
  return str(self.input)
108
108
 
109
109
  def get_step_output(self, step_name: str) -> Optional["StepOutput"]:
110
- """Get output from a specific previous step by name"""
110
+ """Get output from a specific previous step by name
111
+
112
+ Searches recursively through nested steps (Parallel, Condition, Router, Loop, Steps)
113
+ to find step outputs at any depth.
114
+ """
111
115
  if not self.previous_step_outputs:
112
116
  return None
113
- return self.previous_step_outputs.get(step_name)
117
+
118
+ # First try direct lookup
119
+ direct = self.previous_step_outputs.get(step_name)
120
+ if direct:
121
+ return direct
122
+
123
+ # Search recursively in nested steps
124
+ return self._search_nested_steps(step_name)
125
+
126
+ def _search_nested_steps(self, step_name: str) -> Optional["StepOutput"]:
127
+ """Recursively search for a step output in nested steps (Parallel, Condition, etc.)"""
128
+ if not self.previous_step_outputs:
129
+ return None
130
+
131
+ for step_output in self.previous_step_outputs.values():
132
+ result = self._search_in_step_output(step_output, step_name)
133
+ if result:
134
+ return result
135
+ return None
136
+
137
+ def _search_in_step_output(self, step_output: "StepOutput", step_name: str) -> Optional["StepOutput"]:
138
+ """Helper to recursively search within a single StepOutput"""
139
+ if not step_output.steps:
140
+ return None
141
+
142
+ for nested_step in step_output.steps:
143
+ if nested_step.step_name == step_name:
144
+ return nested_step
145
+ # Recursively search deeper
146
+ result = self._search_in_step_output(nested_step, step_name)
147
+ if result:
148
+ return result
149
+ return None
114
150
 
115
151
  def get_step_content(self, step_name: str) -> Optional[Union[str, Dict[str, str]]]:
116
152
  """Get content from a specific previous step by name
agno/workflow/workflow.py CHANGED
@@ -2569,6 +2569,8 @@ class Workflow:
2569
2569
  else:
2570
2570
  # Update status to RUNNING and save
2571
2571
  workflow_run_response.status = RunStatus.running
2572
+
2573
+ workflow_session.upsert_run(run=workflow_run_response)
2572
2574
  if self._has_async_db():
2573
2575
  await self.asave_session(session=workflow_session)
2574
2576
  else:
@@ -3449,6 +3451,7 @@ class Workflow:
3449
3451
  input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel]] = None,
3450
3452
  additional_data: Optional[Dict[str, Any]] = None,
3451
3453
  user_id: Optional[str] = None,
3454
+ run_id: Optional[str] = None,
3452
3455
  session_id: Optional[str] = None,
3453
3456
  session_state: Optional[Dict[str, Any]] = None,
3454
3457
  audio: Optional[List[Audio]] = None,
@@ -3468,6 +3471,7 @@ class Workflow:
3468
3471
  input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel]] = None,
3469
3472
  additional_data: Optional[Dict[str, Any]] = None,
3470
3473
  user_id: Optional[str] = None,
3474
+ run_id: Optional[str] = None,
3471
3475
  session_id: Optional[str] = None,
3472
3476
  session_state: Optional[Dict[str, Any]] = None,
3473
3477
  audio: Optional[List[Audio]] = None,
@@ -3486,6 +3490,7 @@ class Workflow:
3486
3490
  input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel]] = None,
3487
3491
  additional_data: Optional[Dict[str, Any]] = None,
3488
3492
  user_id: Optional[str] = None,
3493
+ run_id: Optional[str] = None,
3489
3494
  session_id: Optional[str] = None,
3490
3495
  session_state: Optional[Dict[str, Any]] = None,
3491
3496
  audio: Optional[List[Audio]] = None,
@@ -3503,8 +3508,8 @@ class Workflow:
3503
3508
  if self._has_async_db():
3504
3509
  raise Exception("`run()` is not supported with an async DB. Please use `arun()`.")
3505
3510
 
3506
- # Create a run_id for this specific run and register immediately for cancellation tracking
3507
- run_id = str(uuid4())
3511
+ # Set the id for the run and register it immediately for cancellation tracking
3512
+ run_id = run_id or str(uuid4())
3508
3513
  register_run(run_id)
3509
3514
 
3510
3515
  input = self._validate_input(input)
@@ -3621,6 +3626,7 @@ class Workflow:
3621
3626
  input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel, List[Message]]] = None,
3622
3627
  additional_data: Optional[Dict[str, Any]] = None,
3623
3628
  user_id: Optional[str] = None,
3629
+ run_id: Optional[str] = None,
3624
3630
  session_id: Optional[str] = None,
3625
3631
  session_state: Optional[Dict[str, Any]] = None,
3626
3632
  audio: Optional[List[Audio]] = None,
@@ -3641,6 +3647,7 @@ class Workflow:
3641
3647
  input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel, List[Message]]] = None,
3642
3648
  additional_data: Optional[Dict[str, Any]] = None,
3643
3649
  user_id: Optional[str] = None,
3650
+ run_id: Optional[str] = None,
3644
3651
  session_id: Optional[str] = None,
3645
3652
  session_state: Optional[Dict[str, Any]] = None,
3646
3653
  audio: Optional[List[Audio]] = None,
@@ -3660,6 +3667,7 @@ class Workflow:
3660
3667
  input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel, List[Message]]] = None,
3661
3668
  additional_data: Optional[Dict[str, Any]] = None,
3662
3669
  user_id: Optional[str] = None,
3670
+ run_id: Optional[str] = None,
3663
3671
  session_id: Optional[str] = None,
3664
3672
  session_state: Optional[Dict[str, Any]] = None,
3665
3673
  audio: Optional[List[Audio]] = None,
@@ -3730,8 +3738,8 @@ class Workflow:
3730
3738
 
3731
3739
  self._set_debug()
3732
3740
 
3733
- # Create a run_id for this specific run and register immediately for cancellation tracking
3734
- run_id = str(uuid4())
3741
+ # Set the id for the run and register it immediately for cancellation tracking
3742
+ run_id = run_id or str(uuid4())
3735
3743
  register_run(run_id)
3736
3744
 
3737
3745
  self.initialize_workflow()