agno 2.1.3__py3-none-any.whl → 2.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. agno/agent/agent.py +1779 -577
  2. agno/db/async_postgres/__init__.py +3 -0
  3. agno/db/async_postgres/async_postgres.py +1668 -0
  4. agno/db/async_postgres/schemas.py +124 -0
  5. agno/db/async_postgres/utils.py +289 -0
  6. agno/db/base.py +237 -2
  7. agno/db/dynamo/dynamo.py +10 -8
  8. agno/db/dynamo/schemas.py +1 -10
  9. agno/db/dynamo/utils.py +2 -2
  10. agno/db/firestore/firestore.py +2 -2
  11. agno/db/firestore/utils.py +4 -2
  12. agno/db/gcs_json/gcs_json_db.py +2 -2
  13. agno/db/in_memory/in_memory_db.py +2 -2
  14. agno/db/json/json_db.py +2 -2
  15. agno/db/migrations/v1_to_v2.py +30 -13
  16. agno/db/mongo/mongo.py +18 -6
  17. agno/db/mysql/mysql.py +35 -13
  18. agno/db/postgres/postgres.py +29 -6
  19. agno/db/redis/redis.py +2 -2
  20. agno/db/singlestore/singlestore.py +2 -2
  21. agno/db/sqlite/sqlite.py +34 -12
  22. agno/db/sqlite/utils.py +8 -3
  23. agno/eval/accuracy.py +50 -43
  24. agno/eval/performance.py +6 -3
  25. agno/eval/reliability.py +6 -3
  26. agno/eval/utils.py +33 -16
  27. agno/exceptions.py +8 -2
  28. agno/knowledge/embedder/fastembed.py +1 -1
  29. agno/knowledge/knowledge.py +260 -46
  30. agno/knowledge/reader/pdf_reader.py +4 -6
  31. agno/knowledge/reader/reader_factory.py +2 -3
  32. agno/memory/manager.py +241 -33
  33. agno/models/anthropic/claude.py +37 -0
  34. agno/os/app.py +15 -10
  35. agno/os/interfaces/a2a/router.py +3 -5
  36. agno/os/interfaces/agui/router.py +4 -1
  37. agno/os/interfaces/agui/utils.py +33 -6
  38. agno/os/interfaces/slack/router.py +2 -4
  39. agno/os/mcp.py +98 -41
  40. agno/os/router.py +23 -0
  41. agno/os/routers/evals/evals.py +52 -20
  42. agno/os/routers/evals/utils.py +14 -14
  43. agno/os/routers/knowledge/knowledge.py +130 -9
  44. agno/os/routers/knowledge/schemas.py +57 -0
  45. agno/os/routers/memory/memory.py +116 -44
  46. agno/os/routers/metrics/metrics.py +16 -6
  47. agno/os/routers/session/session.py +65 -22
  48. agno/os/schema.py +38 -0
  49. agno/os/utils.py +69 -13
  50. agno/reasoning/anthropic.py +80 -0
  51. agno/reasoning/gemini.py +73 -0
  52. agno/reasoning/openai.py +5 -0
  53. agno/reasoning/vertexai.py +76 -0
  54. agno/session/workflow.py +69 -1
  55. agno/team/team.py +934 -241
  56. agno/tools/function.py +36 -18
  57. agno/tools/google_drive.py +270 -0
  58. agno/tools/googlesheets.py +20 -5
  59. agno/tools/mcp_toolbox.py +3 -3
  60. agno/tools/scrapegraph.py +1 -1
  61. agno/utils/models/claude.py +3 -1
  62. agno/utils/print_response/workflow.py +112 -12
  63. agno/utils/streamlit.py +1 -1
  64. agno/vectordb/base.py +22 -1
  65. agno/vectordb/cassandra/cassandra.py +9 -0
  66. agno/vectordb/chroma/chromadb.py +26 -6
  67. agno/vectordb/clickhouse/clickhousedb.py +9 -1
  68. agno/vectordb/couchbase/couchbase.py +11 -0
  69. agno/vectordb/lancedb/lance_db.py +20 -0
  70. agno/vectordb/langchaindb/langchaindb.py +11 -0
  71. agno/vectordb/lightrag/lightrag.py +9 -0
  72. agno/vectordb/llamaindex/llamaindexdb.py +15 -1
  73. agno/vectordb/milvus/milvus.py +23 -0
  74. agno/vectordb/mongodb/mongodb.py +22 -0
  75. agno/vectordb/pgvector/pgvector.py +19 -0
  76. agno/vectordb/pineconedb/pineconedb.py +35 -4
  77. agno/vectordb/qdrant/qdrant.py +24 -0
  78. agno/vectordb/singlestore/singlestore.py +25 -17
  79. agno/vectordb/surrealdb/surrealdb.py +18 -1
  80. agno/vectordb/upstashdb/upstashdb.py +26 -1
  81. agno/vectordb/weaviate/weaviate.py +18 -0
  82. agno/workflow/condition.py +29 -0
  83. agno/workflow/loop.py +29 -0
  84. agno/workflow/parallel.py +141 -113
  85. agno/workflow/router.py +29 -0
  86. agno/workflow/step.py +146 -25
  87. agno/workflow/steps.py +29 -0
  88. agno/workflow/types.py +26 -1
  89. agno/workflow/workflow.py +507 -22
  90. {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/METADATA +100 -41
  91. {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/RECORD +94 -86
  92. {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/WHEEL +0 -0
  93. {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/licenses/LICENSE +0 -0
  94. {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/top_level.txt +0 -0
@@ -33,6 +33,9 @@ class MongoDb(VectorDb):
33
33
  def __init__(
34
34
  self,
35
35
  collection_name: str,
36
+ name: Optional[str] = None,
37
+ description: Optional[str] = None,
38
+ id: Optional[str] = None,
36
39
  db_url: Optional[str] = "mongodb://localhost:27017/",
37
40
  database: str = "agno",
38
41
  embedder: Optional[Embedder] = None,
@@ -56,6 +59,8 @@ class MongoDb(VectorDb):
56
59
 
57
60
  Args:
58
61
  collection_name (str): Name of the MongoDB collection.
62
+ name (Optional[str]): Name of the vector database.
63
+ description (Optional[str]): Description of the vector database.
59
64
  db_url (Optional[str]): MongoDB connection string.
60
65
  database (str): Database name.
61
66
  embedder (Embedder): Embedder instance for generating embeddings.
@@ -74,11 +79,24 @@ class MongoDb(VectorDb):
74
79
  hybrid_rank_constant (int): Default rank constant (k) for Reciprocal Rank Fusion in hybrid search. This constant is added to the rank before taking the reciprocal, helping to smooth scores. A common value is 60.
75
80
  **kwargs: Additional arguments for MongoClient.
76
81
  """
82
+ # Validate required parameters
77
83
  if not collection_name:
78
84
  raise ValueError("Collection name must not be empty.")
79
85
  if not database:
80
86
  raise ValueError("Database name must not be empty.")
87
+
88
+ # Dynamic ID generation based on unique identifiers
89
+ if id is None:
90
+ from agno.utils.string import generate_id
91
+
92
+ connection_identifier = db_url or "mongodb://localhost:27017/"
93
+ seed = f"{connection_identifier}#{database}#{collection_name}"
94
+ id = generate_id(seed)
95
+
81
96
  self.collection_name = collection_name
97
+ # Initialize base class with name, description, and generated ID
98
+ super().__init__(id=id, name=name, description=description)
99
+
82
100
  self.database = database
83
101
  self.search_index_name = search_index_name
84
102
  self.cosmos_compatibility = cosmos_compatibility
@@ -1382,3 +1400,7 @@ class MongoDb(VectorDb):
1382
1400
  except Exception as e:
1383
1401
  logger.error(f"Error updating metadata for content_id '{content_id}': {e}")
1384
1402
  raise
1403
+
1404
+ def get_supported_search_types(self) -> List[str]:
1405
+ """Get the supported search types for this vector database."""
1406
+ return [SearchType.vector, SearchType.hybrid]
@@ -3,6 +3,8 @@ from hashlib import md5
3
3
  from math import sqrt
4
4
  from typing import Any, Dict, List, Optional, Union, cast
5
5
 
6
+ from agno.utils.string import generate_id
7
+
6
8
  try:
7
9
  from sqlalchemy import update
8
10
  from sqlalchemy.dialects import postgresql
@@ -43,6 +45,9 @@ class PgVector(VectorDb):
43
45
  self,
44
46
  table_name: str,
45
47
  schema: str = "ai",
48
+ name: Optional[str] = None,
49
+ description: Optional[str] = None,
50
+ id: Optional[str] = None,
46
51
  db_url: Optional[str] = None,
47
52
  db_engine: Optional[Engine] = None,
48
53
  embedder: Optional[Embedder] = None,
@@ -62,6 +67,8 @@ class PgVector(VectorDb):
62
67
  Args:
63
68
  table_name (str): Name of the table to store vector data.
64
69
  schema (str): Database schema name.
70
+ name (Optional[str]): Name of the vector database.
71
+ description (Optional[str]): Description of the vector database.
65
72
  db_url (Optional[str]): Database connection URL.
66
73
  db_engine (Optional[Engine]): SQLAlchemy database engine.
67
74
  embedder (Optional[Embedder]): Embedder instance for creating embeddings.
@@ -80,6 +87,15 @@ class PgVector(VectorDb):
80
87
  if db_engine is None and db_url is None:
81
88
  raise ValueError("Either 'db_url' or 'db_engine' must be provided.")
82
89
 
90
+ if id is None:
91
+ base_seed = db_url or str(db_engine.url) # type: ignore
92
+ schema_suffix = table_name if table_name is not None else "ai"
93
+ seed = f"{base_seed}#{schema_suffix}"
94
+ id = generate_id(seed)
95
+
96
+ # Initialize base class with name and description
97
+ super().__init__(id=id, name=name, description=description)
98
+
83
99
  if db_engine is None:
84
100
  if db_url is None:
85
101
  raise ValueError("Must provide 'db_url' if 'db_engine' is None.")
@@ -1383,3 +1399,6 @@ class PgVector(VectorDb):
1383
1399
  copied_obj.table = copied_obj.get_table()
1384
1400
 
1385
1401
  return copied_obj
1402
+
1403
+ def get_supported_search_types(self) -> List[str]:
1404
+ return [SearchType.vector, SearchType.keyword, SearchType.hybrid]
@@ -66,9 +66,11 @@ class PineconeDb(VectorDb):
66
66
 
67
67
  def __init__(
68
68
  self,
69
- name: str,
70
69
  dimension: int,
71
70
  spec: Union[Dict, ServerlessSpec, PodSpec],
71
+ name: Optional[str] = None,
72
+ description: Optional[str] = None,
73
+ id: Optional[str] = None,
72
74
  embedder: Optional[Embedder] = None,
73
75
  metric: Optional[str] = "cosine",
74
76
  additional_headers: Optional[Dict[str, str]] = None,
@@ -84,6 +86,23 @@ class PineconeDb(VectorDb):
84
86
  reranker: Optional[Reranker] = None,
85
87
  **kwargs,
86
88
  ):
89
+ # Validate required parameters
90
+ if dimension is None or dimension <= 0:
91
+ raise ValueError("Dimension must be provided and greater than 0.")
92
+ if spec is None:
93
+ raise ValueError("Spec must be provided for Pinecone index.")
94
+
95
+ # Dynamic ID generation based on unique identifiers
96
+ if id is None:
97
+ from agno.utils.string import generate_id
98
+
99
+ index_name = name or "default_index"
100
+ seed = f"{host or 'pinecone'}#{index_name}#{dimension}"
101
+ id = generate_id(seed)
102
+
103
+ # Initialize base class with name, description, and generated ID
104
+ super().__init__(id=id, name=name, description=description)
105
+
87
106
  self._client = None
88
107
  self._index = None
89
108
  self.api_key: Optional[str] = api_key
@@ -93,7 +112,6 @@ class PineconeDb(VectorDb):
93
112
  self.pool_threads: Optional[int] = pool_threads
94
113
  self.namespace: Optional[str] = namespace
95
114
  self.index_api: Optional[Any] = index_api
96
- self.name: str = name
97
115
  self.dimension: Optional[int] = dimension
98
116
  self.spec: Union[Dict, ServerlessSpec, PodSpec] = spec
99
117
  self.metric: Optional[str] = metric
@@ -307,6 +325,8 @@ class PineconeDb(VectorDb):
307
325
  show_progress: bool = False,
308
326
  ) -> None:
309
327
  """Upsert documents into the index asynchronously with batching."""
328
+ if self.content_hash_exists(content_hash):
329
+ await asyncio.to_thread(self._delete_by_content_hash, content_hash)
310
330
  if not documents:
311
331
  return
312
332
 
@@ -320,7 +340,7 @@ class PineconeDb(VectorDb):
320
340
 
321
341
  # Process each batch in parallel
322
342
  async def process_batch(batch_docs):
323
- return await self._prepare_vectors(batch_docs)
343
+ return await self._prepare_vectors(batch_docs, content_hash, filters)
324
344
 
325
345
  # Run all batches in parallel
326
346
  batch_vectors = await asyncio.gather(*[process_batch(batch) for batch in batches])
@@ -335,7 +355,9 @@ class PineconeDb(VectorDb):
335
355
 
336
356
  log_debug(f"Finished async upsert of {len(documents)} documents")
337
357
 
338
- async def _prepare_vectors(self, documents: List[Document]) -> List[Dict[str, Any]]:
358
+ async def _prepare_vectors(
359
+ self, documents: List[Document], content_hash: str, filters: Optional[Dict[str, Any]] = None
360
+ ) -> List[Dict[str, Any]]:
339
361
  """Prepare vectors for upsert."""
340
362
  vectors = []
341
363
 
@@ -382,11 +404,16 @@ class PineconeDb(VectorDb):
382
404
  doc.meta_data["text"] = doc.content
383
405
  # Include name and content_id in metadata
384
406
  metadata = doc.meta_data.copy()
407
+ if filters:
408
+ metadata.update(filters)
409
+
385
410
  if doc.name:
386
411
  metadata["name"] = doc.name
387
412
  if doc.content_id:
388
413
  metadata["content_id"] = doc.content_id
389
414
 
415
+ metadata["content_hash"] = content_hash
416
+
390
417
  data_to_upsert = {
391
418
  "id": doc.id,
392
419
  "values": doc.embedding,
@@ -710,3 +737,7 @@ class PineconeDb(VectorDb):
710
737
  except Exception as e:
711
738
  logger.error(f"Error updating metadata for content_id '{content_id}': {e}")
712
739
  raise
740
+
741
+ def get_supported_search_types(self) -> List[str]:
742
+ """Get the supported search types for this vector database."""
743
+ return [] # PineconeDb doesn't use SearchType enum
@@ -28,6 +28,9 @@ class Qdrant(VectorDb):
28
28
  def __init__(
29
29
  self,
30
30
  collection: str,
31
+ name: Optional[str] = None,
32
+ description: Optional[str] = None,
33
+ id: Optional[str] = None,
31
34
  embedder: Optional[Embedder] = None,
32
35
  distance: Distance = Distance.cosine,
33
36
  location: Optional[str] = None,
@@ -52,6 +55,8 @@ class Qdrant(VectorDb):
52
55
  """
53
56
  Args:
54
57
  collection (str): Name of the Qdrant collection.
58
+ name (Optional[str]): Name of the vector database.
59
+ description (Optional[str]): Description of the vector database.
55
60
  embedder (Optional[Embedder]): Optional embedder for automatic vector generation.
56
61
  distance (Distance): Distance metric to use (default: cosine).
57
62
  location (Optional[str]): `":memory:"` for in-memory, or str used as `url`. If `None`, use default host/port.
@@ -73,6 +78,21 @@ class Qdrant(VectorDb):
73
78
  fastembed_kwargs (Optional[dict]): Keyword args for `fastembed.SparseTextEmbedding.__init__()`.
74
79
  **kwargs: Keyword args for `qdrant_client.QdrantClient.__init__()`.
75
80
  """
81
+ # Validate required parameters
82
+ if not collection:
83
+ raise ValueError("Collection name must be provided.")
84
+
85
+ # Dynamic ID generation based on unique identifiers
86
+ if id is None:
87
+ from agno.utils.string import generate_id
88
+
89
+ host_identifier = host or location or url or "localhost"
90
+ seed = f"{host_identifier}#{collection}"
91
+ id = generate_id(seed)
92
+
93
+ # Initialize base class with name, description, and generated ID
94
+ super().__init__(id=id, name=name, description=description)
95
+
76
96
  # Collection attributes
77
97
  self.collection: str = collection
78
98
 
@@ -1096,3 +1116,7 @@ class Qdrant(VectorDb):
1096
1116
  log_debug(f"Error closing async Qdrant client: {e}")
1097
1117
  finally:
1098
1118
  self._async_client = None
1119
+
1120
+ def get_supported_search_types(self) -> List[str]:
1121
+ """Get the supported search types for this vector database."""
1122
+ return [SearchType.vector, SearchType.keyword, SearchType.hybrid]
@@ -32,6 +32,8 @@ class SingleStore(VectorDb):
32
32
  embedder: Optional[Embedder] = None,
33
33
  distance: Distance = Distance.cosine,
34
34
  reranker: Optional[Reranker] = None,
35
+ name: Optional[str] = None,
36
+ description: Optional[str] = None,
35
37
  # index: Optional[Union[Ivfflat, HNSW]] = HNSW(),
36
38
  ):
37
39
  _engine: Optional[Engine] = db_engine
@@ -44,9 +46,11 @@ class SingleStore(VectorDb):
44
46
  self.collection: str = collection
45
47
  self.schema: Optional[str] = schema
46
48
  self.db_url: Optional[str] = db_url
49
+ # Initialize base class with name and description
50
+ super().__init__(name=name, description=description)
51
+
47
52
  self.db_engine: Engine = _engine
48
53
  self.metadata: MetaData = MetaData(schema=self.schema)
49
-
50
54
  if embedder is None:
51
55
  from agno.knowledge.embedder.openai import OpenAIEmbedder
52
56
 
@@ -428,9 +432,9 @@ class SingleStore(VectorDb):
428
432
  try:
429
433
  with self.Session.begin() as sess:
430
434
  stmt = delete(self.table).where(self.table.c.id == id)
431
- result = sess.execute(stmt)
432
- log_info(f"Deleted {result.rowcount} records with ID {id} from table '{self.table.name}'.")
433
- return result.rowcount > 0
435
+ result = sess.execute(stmt) # type: ignore
436
+ log_info(f"Deleted {result.rowcount} records with ID {id} from table '{self.table.name}'.") # type: ignore
437
+ return result.rowcount > 0 # type: ignore
434
438
  except Exception as e:
435
439
  log_error(f"Error deleting document with ID {id}: {e}")
436
440
  return False
@@ -444,11 +448,11 @@ class SingleStore(VectorDb):
444
448
  try:
445
449
  with self.Session.begin() as sess:
446
450
  stmt = delete(self.table).where(self.table.c.content_id == content_id)
447
- result = sess.execute(stmt)
451
+ result = sess.execute(stmt) # type: ignore
448
452
  log_info(
449
- f"Deleted {result.rowcount} records with content_id {content_id} from table '{self.table.name}'."
453
+ f"Deleted {result.rowcount} records with content_id {content_id} from table '{self.table.name}'." # type: ignore
450
454
  )
451
- return result.rowcount > 0
455
+ return result.rowcount > 0 # type: ignore
452
456
  except Exception as e:
453
457
  log_error(f"Error deleting document with content_id {content_id}: {e}")
454
458
  return False
@@ -462,9 +466,9 @@ class SingleStore(VectorDb):
462
466
  try:
463
467
  with self.Session.begin() as sess:
464
468
  stmt = delete(self.table).where(self.table.c.name == name)
465
- result = sess.execute(stmt)
466
- log_info(f"Deleted {result.rowcount} records with name '{name}' from table '{self.table.name}'.")
467
- return result.rowcount > 0
469
+ result = sess.execute(stmt) # type: ignore
470
+ log_info(f"Deleted {result.rowcount} records with name '{name}' from table '{self.table.name}'.") # type: ignore
471
+ return result.rowcount > 0 # type: ignore
468
472
  except Exception as e:
469
473
  log_error(f"Error deleting document with name {name}: {e}")
470
474
  return False
@@ -480,9 +484,9 @@ class SingleStore(VectorDb):
480
484
  # Convert metadata to JSON string for comparison
481
485
  metadata_json = json.dumps(metadata, sort_keys=True)
482
486
  stmt = delete(self.table).where(self.table.c.meta_data == metadata_json)
483
- result = sess.execute(stmt)
484
- log_info(f"Deleted {result.rowcount} records with metadata {metadata} from table '{self.table.name}'.")
485
- return result.rowcount > 0
487
+ result = sess.execute(stmt) # type: ignore
488
+ log_info(f"Deleted {result.rowcount} records with metadata {metadata} from table '{self.table.name}'.") # type: ignore
489
+ return result.rowcount > 0 # type: ignore
486
490
  except Exception as e:
487
491
  log_error(f"Error deleting documents with metadata {metadata}: {e}")
488
492
  return False
@@ -689,11 +693,11 @@ class SingleStore(VectorDb):
689
693
  try:
690
694
  with self.Session.begin() as sess:
691
695
  stmt = delete(self.table).where(self.table.c.content_hash == content_hash)
692
- result = sess.execute(stmt)
696
+ result = sess.execute(stmt) # type: ignore
693
697
  log_info(
694
- f"Deleted {result.rowcount} records with content_hash '{content_hash}' from table '{self.table.name}'."
698
+ f"Deleted {result.rowcount} records with content_hash '{content_hash}' from table '{self.table.name}'." # type: ignore
695
699
  )
696
- return result.rowcount > 0
700
+ return result.rowcount > 0 # type: ignore
697
701
  except Exception as e:
698
702
  log_error(f"Error deleting documents with content_hash {content_hash}: {e}")
699
703
  return False
@@ -712,7 +716,7 @@ class SingleStore(VectorDb):
712
716
  with self.Session.begin() as sess:
713
717
  # Find documents with the given content_id
714
718
  stmt = select(self.table).where(self.table.c.content_id == content_id)
715
- result = sess.execute(stmt)
719
+ result = sess.execute(stmt) # type: ignore
716
720
 
717
721
  updated_count = 0
718
722
  for row in result:
@@ -748,3 +752,7 @@ class SingleStore(VectorDb):
748
752
  except Exception as e:
749
753
  log_error(f"Error updating metadata for content_id '{content_id}': {e}")
750
754
  raise
755
+
756
+ def get_supported_search_types(self) -> List[str]:
757
+ """Get the supported search types for this vector database."""
758
+ return [] # SingleStore doesn't use SearchType enum
@@ -107,6 +107,9 @@ class SurrealDb(VectorDb):
107
107
  m: int = 12,
108
108
  search_ef: int = 40,
109
109
  embedder: Optional[Embedder] = None,
110
+ name: Optional[str] = None,
111
+ description: Optional[str] = None,
112
+ id: Optional[str] = None,
110
113
  ):
111
114
  """Initialize SurrealDB connection.
112
115
 
@@ -122,6 +125,17 @@ class SurrealDb(VectorDb):
122
125
  embedder: Embedder instance for creating embeddings (default: OpenAIEmbedder)
123
126
 
124
127
  """
128
+ # Dynamic ID generation based on unique identifiers
129
+ if id is None:
130
+ from agno.utils.string import generate_id
131
+
132
+ client_info = str(client) if client else str(async_client) if async_client else "default"
133
+ seed = f"{client_info}#{collection}"
134
+ id = generate_id(seed)
135
+
136
+ # Initialize base class with name, description, and generated ID
137
+ super().__init__(id=id, name=name, description=description)
138
+
125
139
  # Embedder for embedding the document contents
126
140
  if embedder is None:
127
141
  from agno.knowledge.embedder.openai import OpenAIEmbedder
@@ -131,7 +145,6 @@ class SurrealDb(VectorDb):
131
145
  self.embedder: Embedder = embedder
132
146
  self.dimensions = self.embedder.dimensions
133
147
  self.collection = collection
134
-
135
148
  # Convert Distance enum to SurrealDB distance type
136
149
  self.distance = {Distance.cosine: "COSINE", Distance.l2: "EUCLIDEAN", Distance.max_inner_product: "DOT"}[
137
150
  distance
@@ -671,3 +684,7 @@ class SurrealDb(VectorDb):
671
684
  except Exception as e:
672
685
  log_error(f"Error updating metadata for content_id '{content_id}': {e}")
673
686
  raise
687
+
688
+ def get_supported_search_types(self) -> List[str]:
689
+ """Get the supported search types for this vector database."""
690
+ return [] # SurrealDb doesn't use SearchType enum
@@ -32,6 +32,8 @@ class UpstashVectorDb(VectorDb):
32
32
  embedder (Optional[Embedder], optional): The embedder to use. If None, uses Upstash hosted embedding models.
33
33
  namespace (Optional[str], optional): The namespace to use. Defaults to DEFAULT_NAMESPACE.
34
34
  reranker (Optional[Reranker], optional): The reranker to use. Defaults to None.
35
+ name (Optional[str], optional): The name of the vector database. Defaults to None.
36
+ description (Optional[str], optional): The description of the vector database. Defaults to None.
35
37
  **kwargs: Additional keyword arguments.
36
38
  """
37
39
 
@@ -45,8 +47,28 @@ class UpstashVectorDb(VectorDb):
45
47
  embedder: Optional[Embedder] = None,
46
48
  namespace: Optional[str] = DEFAULT_NAMESPACE,
47
49
  reranker: Optional[Reranker] = None,
50
+ name: Optional[str] = None,
51
+ description: Optional[str] = None,
52
+ id: Optional[str] = None,
48
53
  **kwargs: Any,
49
54
  ) -> None:
55
+ # Validate required parameters
56
+ if not url:
57
+ raise ValueError("URL must be provided.")
58
+ if not token:
59
+ raise ValueError("Token must be provided.")
60
+
61
+ # Dynamic ID generation based on unique identifiers
62
+ if id is None:
63
+ from agno.utils.string import generate_id
64
+
65
+ namespace_identifier = namespace or DEFAULT_NAMESPACE
66
+ seed = f"{url}#{namespace_identifier}"
67
+ id = generate_id(seed)
68
+
69
+ # Initialize base class with name, description, and generated ID
70
+ super().__init__(id=id, name=name, description=description)
71
+
50
72
  self._index: Optional[Index] = None
51
73
  self.url: str = url
52
74
  self.token: str = token
@@ -56,7 +78,6 @@ class UpstashVectorDb(VectorDb):
56
78
  self.namespace: str = namespace if namespace is not None else DEFAULT_NAMESPACE
57
79
  self.kwargs: Dict[str, Any] = kwargs
58
80
  self.use_upstash_embeddings: bool = embedder is None
59
-
60
81
  if embedder is None:
61
82
  logger.warning(
62
83
  "You have not provided an embedder, using Upstash hosted embedding models. "
@@ -688,3 +709,7 @@ class UpstashVectorDb(VectorDb):
688
709
  except Exception as e:
689
710
  logger.error(f"Error updating metadata for content_id '{content_id}': {e}")
690
711
  raise
712
+
713
+ def get_supported_search_types(self) -> List[str]:
714
+ """Get the supported search types for this vector database."""
715
+ return [] # UpstashVectorDb doesn't use SearchType enum
@@ -41,6 +41,9 @@ class Weaviate(VectorDb):
41
41
  local: bool = False,
42
42
  # Collection params
43
43
  collection: str = "default",
44
+ name: Optional[str] = None,
45
+ description: Optional[str] = None,
46
+ id: Optional[str] = None,
44
47
  vector_index: VectorIndex = VectorIndex.HNSW,
45
48
  distance: Distance = Distance.COSINE,
46
49
  # Search/Embedding params
@@ -49,6 +52,17 @@ class Weaviate(VectorDb):
49
52
  reranker: Optional[Reranker] = None,
50
53
  hybrid_search_alpha: float = 0.5,
51
54
  ):
55
+ # Dynamic ID generation based on unique identifiers
56
+ if id is None:
57
+ from agno.utils.string import generate_id
58
+
59
+ connection_identifier = wcd_url or "local" if local else "default"
60
+ seed = f"{connection_identifier}#{collection}"
61
+ id = generate_id(seed)
62
+
63
+ # Initialize base class with name, description, and generated ID
64
+ super().__init__(id=id, name=name, description=description)
65
+
52
66
  # Connection setup
53
67
  self.wcd_url = wcd_url or getenv("WCD_URL")
54
68
  self.wcd_api_key = wcd_api_key or getenv("WCD_API_KEY")
@@ -968,3 +982,7 @@ class Weaviate(VectorDb):
968
982
  except Exception as e:
969
983
  logger.error(f"Error deleting documents by content_hash '{content_hash}': {e}")
970
984
  return False
985
+
986
+ def get_supported_search_types(self) -> List[str]:
987
+ """Get the supported search types for this vector database."""
988
+ return [SearchType.vector, SearchType.keyword, SearchType.hybrid]
@@ -11,6 +11,7 @@ from agno.run.workflow import (
11
11
  WorkflowRunOutput,
12
12
  WorkflowRunOutputEvent,
13
13
  )
14
+ from agno.session.workflow import WorkflowSession
14
15
  from agno.utils.log import log_debug, logger
15
16
  from agno.workflow.step import Step
16
17
  from agno.workflow.types import StepInput, StepOutput, StepType
@@ -153,6 +154,9 @@ class Condition:
153
154
  workflow_run_response: Optional[WorkflowRunOutput] = None,
154
155
  store_executor_outputs: bool = True,
155
156
  session_state: Optional[Dict[str, Any]] = None,
157
+ workflow_session: Optional[WorkflowSession] = None,
158
+ add_workflow_history_to_steps: Optional[bool] = False,
159
+ num_history_runs: int = 3,
156
160
  ) -> StepOutput:
157
161
  """Execute the condition and its steps with sequential chaining if condition is true"""
158
162
  log_debug(f"Condition Start: {self.name}", center=True, symbol="-")
@@ -189,6 +193,9 @@ class Condition:
189
193
  workflow_run_response=workflow_run_response,
190
194
  store_executor_outputs=store_executor_outputs,
191
195
  session_state=session_state,
196
+ workflow_session=workflow_session,
197
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
198
+ num_history_runs=num_history_runs,
192
199
  )
193
200
 
194
201
  # Handle both single StepOutput and List[StepOutput] (from Loop/Condition/Router steps)
@@ -250,11 +257,15 @@ class Condition:
250
257
  session_id: Optional[str] = None,
251
258
  user_id: Optional[str] = None,
252
259
  stream_intermediate_steps: bool = False,
260
+ stream_executor_events: bool = True,
253
261
  workflow_run_response: Optional[WorkflowRunOutput] = None,
254
262
  step_index: Optional[Union[int, tuple]] = None,
255
263
  store_executor_outputs: bool = True,
256
264
  session_state: Optional[Dict[str, Any]] = None,
257
265
  parent_step_id: Optional[str] = None,
266
+ workflow_session: Optional[WorkflowSession] = None,
267
+ add_workflow_history_to_steps: Optional[bool] = False,
268
+ num_history_runs: int = 3,
258
269
  ) -> Iterator[Union[WorkflowRunOutputEvent, StepOutput]]:
259
270
  """Execute the condition with streaming support - mirrors Loop logic"""
260
271
  log_debug(f"Condition Start: {self.name}", center=True, symbol="-")
@@ -322,11 +333,15 @@ class Condition:
322
333
  session_id=session_id,
323
334
  user_id=user_id,
324
335
  stream_intermediate_steps=stream_intermediate_steps,
336
+ stream_executor_events=stream_executor_events,
325
337
  workflow_run_response=workflow_run_response,
326
338
  step_index=child_step_index,
327
339
  store_executor_outputs=store_executor_outputs,
328
340
  session_state=session_state,
329
341
  parent_step_id=conditional_step_id,
342
+ workflow_session=workflow_session,
343
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
344
+ num_history_runs=num_history_runs,
330
345
  ):
331
346
  if isinstance(event, StepOutput):
332
347
  step_outputs_for_step.append(event)
@@ -407,6 +422,9 @@ class Condition:
407
422
  workflow_run_response: Optional[WorkflowRunOutput] = None,
408
423
  store_executor_outputs: bool = True,
409
424
  session_state: Optional[Dict[str, Any]] = None,
425
+ workflow_session: Optional[WorkflowSession] = None,
426
+ add_workflow_history_to_steps: Optional[bool] = False,
427
+ num_history_runs: int = 3,
410
428
  ) -> StepOutput:
411
429
  """Async execute the condition and its steps with sequential chaining"""
412
430
  log_debug(f"Condition Start: {self.name}", center=True, symbol="-")
@@ -445,6 +463,9 @@ class Condition:
445
463
  workflow_run_response=workflow_run_response,
446
464
  store_executor_outputs=store_executor_outputs,
447
465
  session_state=session_state,
466
+ workflow_session=workflow_session,
467
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
468
+ num_history_runs=num_history_runs,
448
469
  )
449
470
 
450
471
  # Handle both single StepOutput and List[StepOutput]
@@ -504,11 +525,15 @@ class Condition:
504
525
  session_id: Optional[str] = None,
505
526
  user_id: Optional[str] = None,
506
527
  stream_intermediate_steps: bool = False,
528
+ stream_executor_events: bool = True,
507
529
  workflow_run_response: Optional[WorkflowRunOutput] = None,
508
530
  step_index: Optional[Union[int, tuple]] = None,
509
531
  store_executor_outputs: bool = True,
510
532
  session_state: Optional[Dict[str, Any]] = None,
511
533
  parent_step_id: Optional[str] = None,
534
+ workflow_session: Optional[WorkflowSession] = None,
535
+ add_workflow_history_to_steps: Optional[bool] = False,
536
+ num_history_runs: int = 3,
512
537
  ) -> AsyncIterator[Union[WorkflowRunOutputEvent, TeamRunOutputEvent, RunOutputEvent, StepOutput]]:
513
538
  """Async execute the condition with streaming support - mirrors Loop logic"""
514
539
  log_debug(f"Condition Start: {self.name}", center=True, symbol="-")
@@ -578,11 +603,15 @@ class Condition:
578
603
  session_id=session_id,
579
604
  user_id=user_id,
580
605
  stream_intermediate_steps=stream_intermediate_steps,
606
+ stream_executor_events=stream_executor_events,
581
607
  workflow_run_response=workflow_run_response,
582
608
  step_index=child_step_index,
583
609
  store_executor_outputs=store_executor_outputs,
584
610
  session_state=session_state,
585
611
  parent_step_id=conditional_step_id,
612
+ workflow_session=workflow_session,
613
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
614
+ num_history_runs=num_history_runs,
586
615
  ):
587
616
  if isinstance(event, StepOutput):
588
617
  step_outputs_for_step.append(event)