agno 2.1.4__py3-none-any.whl → 2.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +1775 -538
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/async_postgres/async_postgres.py +1668 -0
- agno/db/async_postgres/schemas.py +124 -0
- agno/db/async_postgres/utils.py +289 -0
- agno/db/base.py +237 -2
- agno/db/dynamo/dynamo.py +2 -2
- agno/db/firestore/firestore.py +2 -2
- agno/db/firestore/utils.py +4 -2
- agno/db/gcs_json/gcs_json_db.py +2 -2
- agno/db/in_memory/in_memory_db.py +2 -2
- agno/db/json/json_db.py +2 -2
- agno/db/migrations/v1_to_v2.py +43 -13
- agno/db/mongo/mongo.py +14 -6
- agno/db/mongo/utils.py +0 -4
- agno/db/mysql/mysql.py +23 -13
- agno/db/postgres/postgres.py +17 -6
- agno/db/redis/redis.py +2 -2
- agno/db/singlestore/singlestore.py +19 -10
- agno/db/sqlite/sqlite.py +22 -12
- agno/db/sqlite/utils.py +8 -3
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +259 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1193 -0
- agno/db/surrealdb/utils.py +87 -0
- agno/eval/accuracy.py +50 -43
- agno/eval/performance.py +6 -3
- agno/eval/reliability.py +6 -3
- agno/eval/utils.py +33 -16
- agno/exceptions.py +8 -2
- agno/knowledge/knowledge.py +260 -46
- agno/knowledge/reader/pdf_reader.py +4 -6
- agno/knowledge/reader/reader_factory.py +2 -3
- agno/memory/manager.py +254 -46
- agno/models/anthropic/claude.py +37 -0
- agno/os/app.py +8 -7
- agno/os/interfaces/a2a/router.py +3 -5
- agno/os/interfaces/agui/router.py +4 -1
- agno/os/interfaces/agui/utils.py +27 -6
- agno/os/interfaces/slack/router.py +2 -4
- agno/os/mcp.py +98 -41
- agno/os/router.py +23 -0
- agno/os/routers/evals/evals.py +52 -20
- agno/os/routers/evals/utils.py +14 -14
- agno/os/routers/knowledge/knowledge.py +130 -9
- agno/os/routers/knowledge/schemas.py +57 -0
- agno/os/routers/memory/memory.py +116 -44
- agno/os/routers/metrics/metrics.py +16 -6
- agno/os/routers/session/session.py +65 -22
- agno/os/schema.py +36 -0
- agno/os/utils.py +64 -11
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/openai.py +5 -0
- agno/reasoning/vertexai.py +76 -0
- agno/session/workflow.py +3 -3
- agno/team/team.py +968 -179
- agno/tools/googlesheets.py +20 -5
- agno/tools/mcp_toolbox.py +3 -3
- agno/tools/scrapegraph.py +1 -1
- agno/utils/models/claude.py +3 -1
- agno/utils/streamlit.py +1 -1
- agno/vectordb/base.py +22 -1
- agno/vectordb/cassandra/cassandra.py +9 -0
- agno/vectordb/chroma/chromadb.py +26 -6
- agno/vectordb/clickhouse/clickhousedb.py +9 -1
- agno/vectordb/couchbase/couchbase.py +11 -0
- agno/vectordb/lancedb/lance_db.py +20 -0
- agno/vectordb/langchaindb/langchaindb.py +11 -0
- agno/vectordb/lightrag/lightrag.py +9 -0
- agno/vectordb/llamaindex/llamaindexdb.py +15 -1
- agno/vectordb/milvus/milvus.py +23 -0
- agno/vectordb/mongodb/mongodb.py +22 -0
- agno/vectordb/pgvector/pgvector.py +19 -0
- agno/vectordb/pineconedb/pineconedb.py +35 -4
- agno/vectordb/qdrant/qdrant.py +24 -0
- agno/vectordb/singlestore/singlestore.py +25 -17
- agno/vectordb/surrealdb/surrealdb.py +18 -2
- agno/vectordb/upstashdb/upstashdb.py +26 -1
- agno/vectordb/weaviate/weaviate.py +18 -0
- agno/workflow/condition.py +4 -0
- agno/workflow/loop.py +4 -0
- agno/workflow/parallel.py +4 -0
- agno/workflow/router.py +4 -0
- agno/workflow/step.py +30 -14
- agno/workflow/steps.py +4 -0
- agno/workflow/types.py +2 -2
- agno/workflow/workflow.py +328 -61
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/METADATA +100 -41
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/RECORD +95 -82
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/WHEEL +0 -0
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/licenses/LICENSE +0 -0
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/top_level.txt +0 -0
|
@@ -66,9 +66,11 @@ class PineconeDb(VectorDb):
|
|
|
66
66
|
|
|
67
67
|
def __init__(
|
|
68
68
|
self,
|
|
69
|
-
name: str,
|
|
70
69
|
dimension: int,
|
|
71
70
|
spec: Union[Dict, ServerlessSpec, PodSpec],
|
|
71
|
+
name: Optional[str] = None,
|
|
72
|
+
description: Optional[str] = None,
|
|
73
|
+
id: Optional[str] = None,
|
|
72
74
|
embedder: Optional[Embedder] = None,
|
|
73
75
|
metric: Optional[str] = "cosine",
|
|
74
76
|
additional_headers: Optional[Dict[str, str]] = None,
|
|
@@ -84,6 +86,23 @@ class PineconeDb(VectorDb):
|
|
|
84
86
|
reranker: Optional[Reranker] = None,
|
|
85
87
|
**kwargs,
|
|
86
88
|
):
|
|
89
|
+
# Validate required parameters
|
|
90
|
+
if dimension is None or dimension <= 0:
|
|
91
|
+
raise ValueError("Dimension must be provided and greater than 0.")
|
|
92
|
+
if spec is None:
|
|
93
|
+
raise ValueError("Spec must be provided for Pinecone index.")
|
|
94
|
+
|
|
95
|
+
# Dynamic ID generation based on unique identifiers
|
|
96
|
+
if id is None:
|
|
97
|
+
from agno.utils.string import generate_id
|
|
98
|
+
|
|
99
|
+
index_name = name or "default_index"
|
|
100
|
+
seed = f"{host or 'pinecone'}#{index_name}#{dimension}"
|
|
101
|
+
id = generate_id(seed)
|
|
102
|
+
|
|
103
|
+
# Initialize base class with name, description, and generated ID
|
|
104
|
+
super().__init__(id=id, name=name, description=description)
|
|
105
|
+
|
|
87
106
|
self._client = None
|
|
88
107
|
self._index = None
|
|
89
108
|
self.api_key: Optional[str] = api_key
|
|
@@ -93,7 +112,6 @@ class PineconeDb(VectorDb):
|
|
|
93
112
|
self.pool_threads: Optional[int] = pool_threads
|
|
94
113
|
self.namespace: Optional[str] = namespace
|
|
95
114
|
self.index_api: Optional[Any] = index_api
|
|
96
|
-
self.name: str = name
|
|
97
115
|
self.dimension: Optional[int] = dimension
|
|
98
116
|
self.spec: Union[Dict, ServerlessSpec, PodSpec] = spec
|
|
99
117
|
self.metric: Optional[str] = metric
|
|
@@ -307,6 +325,8 @@ class PineconeDb(VectorDb):
|
|
|
307
325
|
show_progress: bool = False,
|
|
308
326
|
) -> None:
|
|
309
327
|
"""Upsert documents into the index asynchronously with batching."""
|
|
328
|
+
if self.content_hash_exists(content_hash):
|
|
329
|
+
await asyncio.to_thread(self._delete_by_content_hash, content_hash)
|
|
310
330
|
if not documents:
|
|
311
331
|
return
|
|
312
332
|
|
|
@@ -320,7 +340,7 @@ class PineconeDb(VectorDb):
|
|
|
320
340
|
|
|
321
341
|
# Process each batch in parallel
|
|
322
342
|
async def process_batch(batch_docs):
|
|
323
|
-
return await self._prepare_vectors(batch_docs)
|
|
343
|
+
return await self._prepare_vectors(batch_docs, content_hash, filters)
|
|
324
344
|
|
|
325
345
|
# Run all batches in parallel
|
|
326
346
|
batch_vectors = await asyncio.gather(*[process_batch(batch) for batch in batches])
|
|
@@ -335,7 +355,9 @@ class PineconeDb(VectorDb):
|
|
|
335
355
|
|
|
336
356
|
log_debug(f"Finished async upsert of {len(documents)} documents")
|
|
337
357
|
|
|
338
|
-
async def _prepare_vectors(
|
|
358
|
+
async def _prepare_vectors(
|
|
359
|
+
self, documents: List[Document], content_hash: str, filters: Optional[Dict[str, Any]] = None
|
|
360
|
+
) -> List[Dict[str, Any]]:
|
|
339
361
|
"""Prepare vectors for upsert."""
|
|
340
362
|
vectors = []
|
|
341
363
|
|
|
@@ -382,11 +404,16 @@ class PineconeDb(VectorDb):
|
|
|
382
404
|
doc.meta_data["text"] = doc.content
|
|
383
405
|
# Include name and content_id in metadata
|
|
384
406
|
metadata = doc.meta_data.copy()
|
|
407
|
+
if filters:
|
|
408
|
+
metadata.update(filters)
|
|
409
|
+
|
|
385
410
|
if doc.name:
|
|
386
411
|
metadata["name"] = doc.name
|
|
387
412
|
if doc.content_id:
|
|
388
413
|
metadata["content_id"] = doc.content_id
|
|
389
414
|
|
|
415
|
+
metadata["content_hash"] = content_hash
|
|
416
|
+
|
|
390
417
|
data_to_upsert = {
|
|
391
418
|
"id": doc.id,
|
|
392
419
|
"values": doc.embedding,
|
|
@@ -710,3 +737,7 @@ class PineconeDb(VectorDb):
|
|
|
710
737
|
except Exception as e:
|
|
711
738
|
logger.error(f"Error updating metadata for content_id '{content_id}': {e}")
|
|
712
739
|
raise
|
|
740
|
+
|
|
741
|
+
def get_supported_search_types(self) -> List[str]:
|
|
742
|
+
"""Get the supported search types for this vector database."""
|
|
743
|
+
return [] # PineconeDb doesn't use SearchType enum
|
agno/vectordb/qdrant/qdrant.py
CHANGED
|
@@ -28,6 +28,9 @@ class Qdrant(VectorDb):
|
|
|
28
28
|
def __init__(
|
|
29
29
|
self,
|
|
30
30
|
collection: str,
|
|
31
|
+
name: Optional[str] = None,
|
|
32
|
+
description: Optional[str] = None,
|
|
33
|
+
id: Optional[str] = None,
|
|
31
34
|
embedder: Optional[Embedder] = None,
|
|
32
35
|
distance: Distance = Distance.cosine,
|
|
33
36
|
location: Optional[str] = None,
|
|
@@ -52,6 +55,8 @@ class Qdrant(VectorDb):
|
|
|
52
55
|
"""
|
|
53
56
|
Args:
|
|
54
57
|
collection (str): Name of the Qdrant collection.
|
|
58
|
+
name (Optional[str]): Name of the vector database.
|
|
59
|
+
description (Optional[str]): Description of the vector database.
|
|
55
60
|
embedder (Optional[Embedder]): Optional embedder for automatic vector generation.
|
|
56
61
|
distance (Distance): Distance metric to use (default: cosine).
|
|
57
62
|
location (Optional[str]): `":memory:"` for in-memory, or str used as `url`. If `None`, use default host/port.
|
|
@@ -73,6 +78,21 @@ class Qdrant(VectorDb):
|
|
|
73
78
|
fastembed_kwargs (Optional[dict]): Keyword args for `fastembed.SparseTextEmbedding.__init__()`.
|
|
74
79
|
**kwargs: Keyword args for `qdrant_client.QdrantClient.__init__()`.
|
|
75
80
|
"""
|
|
81
|
+
# Validate required parameters
|
|
82
|
+
if not collection:
|
|
83
|
+
raise ValueError("Collection name must be provided.")
|
|
84
|
+
|
|
85
|
+
# Dynamic ID generation based on unique identifiers
|
|
86
|
+
if id is None:
|
|
87
|
+
from agno.utils.string import generate_id
|
|
88
|
+
|
|
89
|
+
host_identifier = host or location or url or "localhost"
|
|
90
|
+
seed = f"{host_identifier}#{collection}"
|
|
91
|
+
id = generate_id(seed)
|
|
92
|
+
|
|
93
|
+
# Initialize base class with name, description, and generated ID
|
|
94
|
+
super().__init__(id=id, name=name, description=description)
|
|
95
|
+
|
|
76
96
|
# Collection attributes
|
|
77
97
|
self.collection: str = collection
|
|
78
98
|
|
|
@@ -1096,3 +1116,7 @@ class Qdrant(VectorDb):
|
|
|
1096
1116
|
log_debug(f"Error closing async Qdrant client: {e}")
|
|
1097
1117
|
finally:
|
|
1098
1118
|
self._async_client = None
|
|
1119
|
+
|
|
1120
|
+
def get_supported_search_types(self) -> List[str]:
|
|
1121
|
+
"""Get the supported search types for this vector database."""
|
|
1122
|
+
return [SearchType.vector, SearchType.keyword, SearchType.hybrid]
|
|
@@ -32,6 +32,8 @@ class SingleStore(VectorDb):
|
|
|
32
32
|
embedder: Optional[Embedder] = None,
|
|
33
33
|
distance: Distance = Distance.cosine,
|
|
34
34
|
reranker: Optional[Reranker] = None,
|
|
35
|
+
name: Optional[str] = None,
|
|
36
|
+
description: Optional[str] = None,
|
|
35
37
|
# index: Optional[Union[Ivfflat, HNSW]] = HNSW(),
|
|
36
38
|
):
|
|
37
39
|
_engine: Optional[Engine] = db_engine
|
|
@@ -44,9 +46,11 @@ class SingleStore(VectorDb):
|
|
|
44
46
|
self.collection: str = collection
|
|
45
47
|
self.schema: Optional[str] = schema
|
|
46
48
|
self.db_url: Optional[str] = db_url
|
|
49
|
+
# Initialize base class with name and description
|
|
50
|
+
super().__init__(name=name, description=description)
|
|
51
|
+
|
|
47
52
|
self.db_engine: Engine = _engine
|
|
48
53
|
self.metadata: MetaData = MetaData(schema=self.schema)
|
|
49
|
-
|
|
50
54
|
if embedder is None:
|
|
51
55
|
from agno.knowledge.embedder.openai import OpenAIEmbedder
|
|
52
56
|
|
|
@@ -428,9 +432,9 @@ class SingleStore(VectorDb):
|
|
|
428
432
|
try:
|
|
429
433
|
with self.Session.begin() as sess:
|
|
430
434
|
stmt = delete(self.table).where(self.table.c.id == id)
|
|
431
|
-
result = sess.execute(stmt)
|
|
432
|
-
log_info(f"Deleted {result.rowcount} records with ID {id} from table '{self.table.name}'.")
|
|
433
|
-
return result.rowcount > 0
|
|
435
|
+
result = sess.execute(stmt) # type: ignore
|
|
436
|
+
log_info(f"Deleted {result.rowcount} records with ID {id} from table '{self.table.name}'.") # type: ignore
|
|
437
|
+
return result.rowcount > 0 # type: ignore
|
|
434
438
|
except Exception as e:
|
|
435
439
|
log_error(f"Error deleting document with ID {id}: {e}")
|
|
436
440
|
return False
|
|
@@ -444,11 +448,11 @@ class SingleStore(VectorDb):
|
|
|
444
448
|
try:
|
|
445
449
|
with self.Session.begin() as sess:
|
|
446
450
|
stmt = delete(self.table).where(self.table.c.content_id == content_id)
|
|
447
|
-
result = sess.execute(stmt)
|
|
451
|
+
result = sess.execute(stmt) # type: ignore
|
|
448
452
|
log_info(
|
|
449
|
-
f"Deleted {result.rowcount} records with content_id {content_id} from table '{self.table.name}'."
|
|
453
|
+
f"Deleted {result.rowcount} records with content_id {content_id} from table '{self.table.name}'." # type: ignore
|
|
450
454
|
)
|
|
451
|
-
return result.rowcount > 0
|
|
455
|
+
return result.rowcount > 0 # type: ignore
|
|
452
456
|
except Exception as e:
|
|
453
457
|
log_error(f"Error deleting document with content_id {content_id}: {e}")
|
|
454
458
|
return False
|
|
@@ -462,9 +466,9 @@ class SingleStore(VectorDb):
|
|
|
462
466
|
try:
|
|
463
467
|
with self.Session.begin() as sess:
|
|
464
468
|
stmt = delete(self.table).where(self.table.c.name == name)
|
|
465
|
-
result = sess.execute(stmt)
|
|
466
|
-
log_info(f"Deleted {result.rowcount} records with name '{name}' from table '{self.table.name}'.")
|
|
467
|
-
return result.rowcount > 0
|
|
469
|
+
result = sess.execute(stmt) # type: ignore
|
|
470
|
+
log_info(f"Deleted {result.rowcount} records with name '{name}' from table '{self.table.name}'.") # type: ignore
|
|
471
|
+
return result.rowcount > 0 # type: ignore
|
|
468
472
|
except Exception as e:
|
|
469
473
|
log_error(f"Error deleting document with name {name}: {e}")
|
|
470
474
|
return False
|
|
@@ -480,9 +484,9 @@ class SingleStore(VectorDb):
|
|
|
480
484
|
# Convert metadata to JSON string for comparison
|
|
481
485
|
metadata_json = json.dumps(metadata, sort_keys=True)
|
|
482
486
|
stmt = delete(self.table).where(self.table.c.meta_data == metadata_json)
|
|
483
|
-
result = sess.execute(stmt)
|
|
484
|
-
log_info(f"Deleted {result.rowcount} records with metadata {metadata} from table '{self.table.name}'.")
|
|
485
|
-
return result.rowcount > 0
|
|
487
|
+
result = sess.execute(stmt) # type: ignore
|
|
488
|
+
log_info(f"Deleted {result.rowcount} records with metadata {metadata} from table '{self.table.name}'.") # type: ignore
|
|
489
|
+
return result.rowcount > 0 # type: ignore
|
|
486
490
|
except Exception as e:
|
|
487
491
|
log_error(f"Error deleting documents with metadata {metadata}: {e}")
|
|
488
492
|
return False
|
|
@@ -689,11 +693,11 @@ class SingleStore(VectorDb):
|
|
|
689
693
|
try:
|
|
690
694
|
with self.Session.begin() as sess:
|
|
691
695
|
stmt = delete(self.table).where(self.table.c.content_hash == content_hash)
|
|
692
|
-
result = sess.execute(stmt)
|
|
696
|
+
result = sess.execute(stmt) # type: ignore
|
|
693
697
|
log_info(
|
|
694
|
-
f"Deleted {result.rowcount} records with content_hash '{content_hash}' from table '{self.table.name}'."
|
|
698
|
+
f"Deleted {result.rowcount} records with content_hash '{content_hash}' from table '{self.table.name}'." # type: ignore
|
|
695
699
|
)
|
|
696
|
-
return result.rowcount > 0
|
|
700
|
+
return result.rowcount > 0 # type: ignore
|
|
697
701
|
except Exception as e:
|
|
698
702
|
log_error(f"Error deleting documents with content_hash {content_hash}: {e}")
|
|
699
703
|
return False
|
|
@@ -712,7 +716,7 @@ class SingleStore(VectorDb):
|
|
|
712
716
|
with self.Session.begin() as sess:
|
|
713
717
|
# Find documents with the given content_id
|
|
714
718
|
stmt = select(self.table).where(self.table.c.content_id == content_id)
|
|
715
|
-
result = sess.execute(stmt)
|
|
719
|
+
result = sess.execute(stmt) # type: ignore
|
|
716
720
|
|
|
717
721
|
updated_count = 0
|
|
718
722
|
for row in result:
|
|
@@ -748,3 +752,7 @@ class SingleStore(VectorDb):
|
|
|
748
752
|
except Exception as e:
|
|
749
753
|
log_error(f"Error updating metadata for content_id '{content_id}': {e}")
|
|
750
754
|
raise
|
|
755
|
+
|
|
756
|
+
def get_supported_search_types(self) -> List[str]:
|
|
757
|
+
"""Get the supported search types for this vector database."""
|
|
758
|
+
return [] # SingleStore doesn't use SearchType enum
|
|
@@ -107,11 +107,13 @@ class SurrealDb(VectorDb):
|
|
|
107
107
|
m: int = 12,
|
|
108
108
|
search_ef: int = 40,
|
|
109
109
|
embedder: Optional[Embedder] = None,
|
|
110
|
+
name: Optional[str] = None,
|
|
111
|
+
description: Optional[str] = None,
|
|
112
|
+
id: Optional[str] = None,
|
|
110
113
|
):
|
|
111
114
|
"""Initialize SurrealDB connection.
|
|
112
115
|
|
|
113
116
|
Args:
|
|
114
|
-
url: SurrealDB server URL (e.g. ws://localhost:8000/rpc)
|
|
115
117
|
client: A blocking connection, either HTTP or WS
|
|
116
118
|
async_client: An async connection, either HTTP or WS (default: None)
|
|
117
119
|
collection: Collection name to store documents (default: documents)
|
|
@@ -122,6 +124,17 @@ class SurrealDb(VectorDb):
|
|
|
122
124
|
embedder: Embedder instance for creating embeddings (default: OpenAIEmbedder)
|
|
123
125
|
|
|
124
126
|
"""
|
|
127
|
+
# Dynamic ID generation based on unique identifiers
|
|
128
|
+
if id is None:
|
|
129
|
+
from agno.utils.string import generate_id
|
|
130
|
+
|
|
131
|
+
client_info = str(client) if client else str(async_client) if async_client else "default"
|
|
132
|
+
seed = f"{client_info}#{collection}"
|
|
133
|
+
id = generate_id(seed)
|
|
134
|
+
|
|
135
|
+
# Initialize base class with name, description, and generated ID
|
|
136
|
+
super().__init__(id=id, name=name, description=description)
|
|
137
|
+
|
|
125
138
|
# Embedder for embedding the document contents
|
|
126
139
|
if embedder is None:
|
|
127
140
|
from agno.knowledge.embedder.openai import OpenAIEmbedder
|
|
@@ -131,7 +144,6 @@ class SurrealDb(VectorDb):
|
|
|
131
144
|
self.embedder: Embedder = embedder
|
|
132
145
|
self.dimensions = self.embedder.dimensions
|
|
133
146
|
self.collection = collection
|
|
134
|
-
|
|
135
147
|
# Convert Distance enum to SurrealDB distance type
|
|
136
148
|
self.distance = {Distance.cosine: "COSINE", Distance.l2: "EUCLIDEAN", Distance.max_inner_product: "DOT"}[
|
|
137
149
|
distance
|
|
@@ -671,3 +683,7 @@ class SurrealDb(VectorDb):
|
|
|
671
683
|
except Exception as e:
|
|
672
684
|
log_error(f"Error updating metadata for content_id '{content_id}': {e}")
|
|
673
685
|
raise
|
|
686
|
+
|
|
687
|
+
def get_supported_search_types(self) -> List[str]:
|
|
688
|
+
"""Get the supported search types for this vector database."""
|
|
689
|
+
return [] # SurrealDb doesn't use SearchType enum
|
|
@@ -32,6 +32,8 @@ class UpstashVectorDb(VectorDb):
|
|
|
32
32
|
embedder (Optional[Embedder], optional): The embedder to use. If None, uses Upstash hosted embedding models.
|
|
33
33
|
namespace (Optional[str], optional): The namespace to use. Defaults to DEFAULT_NAMESPACE.
|
|
34
34
|
reranker (Optional[Reranker], optional): The reranker to use. Defaults to None.
|
|
35
|
+
name (Optional[str], optional): The name of the vector database. Defaults to None.
|
|
36
|
+
description (Optional[str], optional): The description of the vector database. Defaults to None.
|
|
35
37
|
**kwargs: Additional keyword arguments.
|
|
36
38
|
"""
|
|
37
39
|
|
|
@@ -45,8 +47,28 @@ class UpstashVectorDb(VectorDb):
|
|
|
45
47
|
embedder: Optional[Embedder] = None,
|
|
46
48
|
namespace: Optional[str] = DEFAULT_NAMESPACE,
|
|
47
49
|
reranker: Optional[Reranker] = None,
|
|
50
|
+
name: Optional[str] = None,
|
|
51
|
+
description: Optional[str] = None,
|
|
52
|
+
id: Optional[str] = None,
|
|
48
53
|
**kwargs: Any,
|
|
49
54
|
) -> None:
|
|
55
|
+
# Validate required parameters
|
|
56
|
+
if not url:
|
|
57
|
+
raise ValueError("URL must be provided.")
|
|
58
|
+
if not token:
|
|
59
|
+
raise ValueError("Token must be provided.")
|
|
60
|
+
|
|
61
|
+
# Dynamic ID generation based on unique identifiers
|
|
62
|
+
if id is None:
|
|
63
|
+
from agno.utils.string import generate_id
|
|
64
|
+
|
|
65
|
+
namespace_identifier = namespace or DEFAULT_NAMESPACE
|
|
66
|
+
seed = f"{url}#{namespace_identifier}"
|
|
67
|
+
id = generate_id(seed)
|
|
68
|
+
|
|
69
|
+
# Initialize base class with name, description, and generated ID
|
|
70
|
+
super().__init__(id=id, name=name, description=description)
|
|
71
|
+
|
|
50
72
|
self._index: Optional[Index] = None
|
|
51
73
|
self.url: str = url
|
|
52
74
|
self.token: str = token
|
|
@@ -56,7 +78,6 @@ class UpstashVectorDb(VectorDb):
|
|
|
56
78
|
self.namespace: str = namespace if namespace is not None else DEFAULT_NAMESPACE
|
|
57
79
|
self.kwargs: Dict[str, Any] = kwargs
|
|
58
80
|
self.use_upstash_embeddings: bool = embedder is None
|
|
59
|
-
|
|
60
81
|
if embedder is None:
|
|
61
82
|
logger.warning(
|
|
62
83
|
"You have not provided an embedder, using Upstash hosted embedding models. "
|
|
@@ -688,3 +709,7 @@ class UpstashVectorDb(VectorDb):
|
|
|
688
709
|
except Exception as e:
|
|
689
710
|
logger.error(f"Error updating metadata for content_id '{content_id}': {e}")
|
|
690
711
|
raise
|
|
712
|
+
|
|
713
|
+
def get_supported_search_types(self) -> List[str]:
|
|
714
|
+
"""Get the supported search types for this vector database."""
|
|
715
|
+
return [] # UpstashVectorDb doesn't use SearchType enum
|
|
@@ -41,6 +41,9 @@ class Weaviate(VectorDb):
|
|
|
41
41
|
local: bool = False,
|
|
42
42
|
# Collection params
|
|
43
43
|
collection: str = "default",
|
|
44
|
+
name: Optional[str] = None,
|
|
45
|
+
description: Optional[str] = None,
|
|
46
|
+
id: Optional[str] = None,
|
|
44
47
|
vector_index: VectorIndex = VectorIndex.HNSW,
|
|
45
48
|
distance: Distance = Distance.COSINE,
|
|
46
49
|
# Search/Embedding params
|
|
@@ -49,6 +52,17 @@ class Weaviate(VectorDb):
|
|
|
49
52
|
reranker: Optional[Reranker] = None,
|
|
50
53
|
hybrid_search_alpha: float = 0.5,
|
|
51
54
|
):
|
|
55
|
+
# Dynamic ID generation based on unique identifiers
|
|
56
|
+
if id is None:
|
|
57
|
+
from agno.utils.string import generate_id
|
|
58
|
+
|
|
59
|
+
connection_identifier = wcd_url or "local" if local else "default"
|
|
60
|
+
seed = f"{connection_identifier}#{collection}"
|
|
61
|
+
id = generate_id(seed)
|
|
62
|
+
|
|
63
|
+
# Initialize base class with name, description, and generated ID
|
|
64
|
+
super().__init__(id=id, name=name, description=description)
|
|
65
|
+
|
|
52
66
|
# Connection setup
|
|
53
67
|
self.wcd_url = wcd_url or getenv("WCD_URL")
|
|
54
68
|
self.wcd_api_key = wcd_api_key or getenv("WCD_API_KEY")
|
|
@@ -968,3 +982,7 @@ class Weaviate(VectorDb):
|
|
|
968
982
|
except Exception as e:
|
|
969
983
|
logger.error(f"Error deleting documents by content_hash '{content_hash}': {e}")
|
|
970
984
|
return False
|
|
985
|
+
|
|
986
|
+
def get_supported_search_types(self) -> List[str]:
|
|
987
|
+
"""Get the supported search types for this vector database."""
|
|
988
|
+
return [SearchType.vector, SearchType.keyword, SearchType.hybrid]
|
agno/workflow/condition.py
CHANGED
|
@@ -257,6 +257,7 @@ class Condition:
|
|
|
257
257
|
session_id: Optional[str] = None,
|
|
258
258
|
user_id: Optional[str] = None,
|
|
259
259
|
stream_intermediate_steps: bool = False,
|
|
260
|
+
stream_executor_events: bool = True,
|
|
260
261
|
workflow_run_response: Optional[WorkflowRunOutput] = None,
|
|
261
262
|
step_index: Optional[Union[int, tuple]] = None,
|
|
262
263
|
store_executor_outputs: bool = True,
|
|
@@ -332,6 +333,7 @@ class Condition:
|
|
|
332
333
|
session_id=session_id,
|
|
333
334
|
user_id=user_id,
|
|
334
335
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
336
|
+
stream_executor_events=stream_executor_events,
|
|
335
337
|
workflow_run_response=workflow_run_response,
|
|
336
338
|
step_index=child_step_index,
|
|
337
339
|
store_executor_outputs=store_executor_outputs,
|
|
@@ -523,6 +525,7 @@ class Condition:
|
|
|
523
525
|
session_id: Optional[str] = None,
|
|
524
526
|
user_id: Optional[str] = None,
|
|
525
527
|
stream_intermediate_steps: bool = False,
|
|
528
|
+
stream_executor_events: bool = True,
|
|
526
529
|
workflow_run_response: Optional[WorkflowRunOutput] = None,
|
|
527
530
|
step_index: Optional[Union[int, tuple]] = None,
|
|
528
531
|
store_executor_outputs: bool = True,
|
|
@@ -600,6 +603,7 @@ class Condition:
|
|
|
600
603
|
session_id=session_id,
|
|
601
604
|
user_id=user_id,
|
|
602
605
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
606
|
+
stream_executor_events=stream_executor_events,
|
|
603
607
|
workflow_run_response=workflow_run_response,
|
|
604
608
|
step_index=child_step_index,
|
|
605
609
|
store_executor_outputs=store_executor_outputs,
|
agno/workflow/loop.py
CHANGED
|
@@ -227,6 +227,7 @@ class Loop:
|
|
|
227
227
|
session_id: Optional[str] = None,
|
|
228
228
|
user_id: Optional[str] = None,
|
|
229
229
|
stream_intermediate_steps: bool = False,
|
|
230
|
+
stream_executor_events: bool = True,
|
|
230
231
|
workflow_run_response: Optional[WorkflowRunOutput] = None,
|
|
231
232
|
step_index: Optional[Union[int, tuple]] = None,
|
|
232
233
|
store_executor_outputs: bool = True,
|
|
@@ -302,6 +303,7 @@ class Loop:
|
|
|
302
303
|
session_id=session_id,
|
|
303
304
|
user_id=user_id,
|
|
304
305
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
306
|
+
stream_executor_events=stream_executor_events,
|
|
305
307
|
workflow_run_response=workflow_run_response,
|
|
306
308
|
step_index=composite_step_index,
|
|
307
309
|
store_executor_outputs=store_executor_outputs,
|
|
@@ -522,6 +524,7 @@ class Loop:
|
|
|
522
524
|
session_id: Optional[str] = None,
|
|
523
525
|
user_id: Optional[str] = None,
|
|
524
526
|
stream_intermediate_steps: bool = False,
|
|
527
|
+
stream_executor_events: bool = True,
|
|
525
528
|
workflow_run_response: Optional[WorkflowRunOutput] = None,
|
|
526
529
|
step_index: Optional[Union[int, tuple]] = None,
|
|
527
530
|
store_executor_outputs: bool = True,
|
|
@@ -597,6 +600,7 @@ class Loop:
|
|
|
597
600
|
session_id=session_id,
|
|
598
601
|
user_id=user_id,
|
|
599
602
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
603
|
+
stream_executor_events=stream_executor_events,
|
|
600
604
|
workflow_run_response=workflow_run_response,
|
|
601
605
|
step_index=composite_step_index,
|
|
602
606
|
store_executor_outputs=store_executor_outputs,
|
agno/workflow/parallel.py
CHANGED
|
@@ -317,6 +317,7 @@ class Parallel:
|
|
|
317
317
|
session_id: Optional[str] = None,
|
|
318
318
|
user_id: Optional[str] = None,
|
|
319
319
|
stream_intermediate_steps: bool = False,
|
|
320
|
+
stream_executor_events: bool = True,
|
|
320
321
|
workflow_run_response: Optional[WorkflowRunOutput] = None,
|
|
321
322
|
step_index: Optional[Union[int, tuple]] = None,
|
|
322
323
|
store_executor_outputs: bool = True,
|
|
@@ -385,6 +386,7 @@ class Parallel:
|
|
|
385
386
|
session_id=session_id,
|
|
386
387
|
user_id=user_id,
|
|
387
388
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
389
|
+
stream_executor_events=stream_executor_events,
|
|
388
390
|
workflow_run_response=workflow_run_response,
|
|
389
391
|
step_index=sub_step_index,
|
|
390
392
|
store_executor_outputs=store_executor_outputs,
|
|
@@ -621,6 +623,7 @@ class Parallel:
|
|
|
621
623
|
session_id: Optional[str] = None,
|
|
622
624
|
user_id: Optional[str] = None,
|
|
623
625
|
stream_intermediate_steps: bool = False,
|
|
626
|
+
stream_executor_events: bool = True,
|
|
624
627
|
workflow_run_response: Optional[WorkflowRunOutput] = None,
|
|
625
628
|
step_index: Optional[Union[int, tuple]] = None,
|
|
626
629
|
store_executor_outputs: bool = True,
|
|
@@ -689,6 +692,7 @@ class Parallel:
|
|
|
689
692
|
session_id=session_id,
|
|
690
693
|
user_id=user_id,
|
|
691
694
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
695
|
+
stream_executor_events=stream_executor_events,
|
|
692
696
|
workflow_run_response=workflow_run_response,
|
|
693
697
|
step_index=sub_step_index,
|
|
694
698
|
store_executor_outputs=store_executor_outputs,
|
agno/workflow/router.py
CHANGED
|
@@ -246,6 +246,7 @@ class Router:
|
|
|
246
246
|
user_id: Optional[str] = None,
|
|
247
247
|
session_state: Optional[Dict[str, Any]] = None,
|
|
248
248
|
stream_intermediate_steps: bool = False,
|
|
249
|
+
stream_executor_events: bool = True,
|
|
249
250
|
workflow_run_response: Optional[WorkflowRunOutput] = None,
|
|
250
251
|
step_index: Optional[Union[int, tuple]] = None,
|
|
251
252
|
store_executor_outputs: bool = True,
|
|
@@ -310,6 +311,7 @@ class Router:
|
|
|
310
311
|
session_id=session_id,
|
|
311
312
|
user_id=user_id,
|
|
312
313
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
314
|
+
stream_executor_events=stream_executor_events,
|
|
313
315
|
workflow_run_response=workflow_run_response,
|
|
314
316
|
step_index=step_index,
|
|
315
317
|
store_executor_outputs=store_executor_outputs,
|
|
@@ -497,6 +499,7 @@ class Router:
|
|
|
497
499
|
user_id: Optional[str] = None,
|
|
498
500
|
session_state: Optional[Dict[str, Any]] = None,
|
|
499
501
|
stream_intermediate_steps: bool = False,
|
|
502
|
+
stream_executor_events: bool = True,
|
|
500
503
|
workflow_run_response: Optional[WorkflowRunOutput] = None,
|
|
501
504
|
step_index: Optional[Union[int, tuple]] = None,
|
|
502
505
|
store_executor_outputs: bool = True,
|
|
@@ -563,6 +566,7 @@ class Router:
|
|
|
563
566
|
session_id=session_id,
|
|
564
567
|
user_id=user_id,
|
|
565
568
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
569
|
+
stream_executor_events=stream_executor_events,
|
|
566
570
|
workflow_run_response=workflow_run_response,
|
|
567
571
|
step_index=step_index,
|
|
568
572
|
store_executor_outputs=store_executor_outputs,
|
agno/workflow/step.py
CHANGED
|
@@ -384,10 +384,10 @@ class Step:
|
|
|
384
384
|
"""Enrich event with step and workflow context information"""
|
|
385
385
|
if workflow_run_response is None:
|
|
386
386
|
return event
|
|
387
|
-
|
|
388
|
-
if hasattr(event,
|
|
387
|
+
|
|
388
|
+
if hasattr(event, "workflow_id"):
|
|
389
389
|
event.workflow_id = workflow_run_response.workflow_id
|
|
390
|
-
if hasattr(event,
|
|
390
|
+
if hasattr(event, "workflow_run_id"):
|
|
391
391
|
event.workflow_run_id = workflow_run_response.run_id
|
|
392
392
|
if hasattr(event, "step_id"):
|
|
393
393
|
event.step_id = self.step_id
|
|
@@ -407,6 +407,7 @@ class Step:
|
|
|
407
407
|
session_id: Optional[str] = None,
|
|
408
408
|
user_id: Optional[str] = None,
|
|
409
409
|
stream_intermediate_steps: bool = False,
|
|
410
|
+
stream_executor_events: bool = True,
|
|
410
411
|
workflow_run_response: Optional["WorkflowRunOutput"] = None,
|
|
411
412
|
session_state: Optional[Dict[str, Any]] = None,
|
|
412
413
|
step_index: Optional[Union[int, tuple]] = None,
|
|
@@ -475,7 +476,9 @@ class Step:
|
|
|
475
476
|
enriched_event = self._enrich_event_with_context(
|
|
476
477
|
event, workflow_run_response, step_index
|
|
477
478
|
)
|
|
478
|
-
yield
|
|
479
|
+
# Only yield executor events if stream_executor_events is True
|
|
480
|
+
if stream_executor_events:
|
|
481
|
+
yield enriched_event # type: ignore[misc]
|
|
479
482
|
|
|
480
483
|
# Merge session_state changes back
|
|
481
484
|
if session_state is not None:
|
|
@@ -559,10 +562,10 @@ class Step:
|
|
|
559
562
|
if isinstance(event, RunOutput) or isinstance(event, TeamRunOutput):
|
|
560
563
|
active_executor_run_response = event
|
|
561
564
|
break
|
|
562
|
-
enriched_event = self._enrich_event_with_context(
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
565
|
+
enriched_event = self._enrich_event_with_context(event, workflow_run_response, step_index)
|
|
566
|
+
# Only yield executor events if stream_executor_events is True
|
|
567
|
+
if stream_executor_events:
|
|
568
|
+
yield enriched_event # type: ignore[misc]
|
|
566
569
|
|
|
567
570
|
if session_state is not None:
|
|
568
571
|
# Update workflow session state
|
|
@@ -809,6 +812,7 @@ class Step:
|
|
|
809
812
|
session_id: Optional[str] = None,
|
|
810
813
|
user_id: Optional[str] = None,
|
|
811
814
|
stream_intermediate_steps: bool = False,
|
|
815
|
+
stream_executor_events: bool = True,
|
|
812
816
|
workflow_run_response: Optional["WorkflowRunOutput"] = None,
|
|
813
817
|
session_state: Optional[Dict[str, Any]] = None,
|
|
814
818
|
step_index: Optional[Union[int, tuple]] = None,
|
|
@@ -876,7 +880,9 @@ class Step:
|
|
|
876
880
|
enriched_event = self._enrich_event_with_context(
|
|
877
881
|
event, workflow_run_response, step_index
|
|
878
882
|
)
|
|
879
|
-
yield
|
|
883
|
+
# Only yield executor events if stream_executor_events is True
|
|
884
|
+
if stream_executor_events:
|
|
885
|
+
yield enriched_event # type: ignore[misc]
|
|
880
886
|
if not final_response:
|
|
881
887
|
final_response = StepOutput(content=content)
|
|
882
888
|
elif inspect.iscoroutinefunction(self.active_executor):
|
|
@@ -907,7 +913,9 @@ class Step:
|
|
|
907
913
|
enriched_event = self._enrich_event_with_context(
|
|
908
914
|
event, workflow_run_response, step_index
|
|
909
915
|
)
|
|
910
|
-
yield
|
|
916
|
+
# Only yield executor events if stream_executor_events is True
|
|
917
|
+
if stream_executor_events:
|
|
918
|
+
yield enriched_event # type: ignore[misc]
|
|
911
919
|
if not final_response:
|
|
912
920
|
final_response = StepOutput(content=content)
|
|
913
921
|
else:
|
|
@@ -981,10 +989,10 @@ class Step:
|
|
|
981
989
|
if isinstance(event, RunOutput) or isinstance(event, TeamRunOutput):
|
|
982
990
|
active_executor_run_response = event
|
|
983
991
|
break
|
|
984
|
-
enriched_event = self._enrich_event_with_context(
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
992
|
+
enriched_event = self._enrich_event_with_context(event, workflow_run_response, step_index)
|
|
993
|
+
# Only yield executor events if stream_executor_events is True
|
|
994
|
+
if stream_executor_events:
|
|
995
|
+
yield enriched_event # type: ignore[misc]
|
|
988
996
|
|
|
989
997
|
if session_state is not None:
|
|
990
998
|
# Update workflow session state
|
|
@@ -1050,6 +1058,14 @@ class Step:
|
|
|
1050
1058
|
executor_run_response.parent_run_id = workflow_run_response.run_id
|
|
1051
1059
|
executor_run_response.workflow_step_id = self.step_id
|
|
1052
1060
|
|
|
1061
|
+
# Scrub the executor response based on the executor's storage flags before storing
|
|
1062
|
+
if (
|
|
1063
|
+
not self.active_executor.store_media
|
|
1064
|
+
or not self.active_executor.store_tool_messages
|
|
1065
|
+
or not self.active_executor.store_history_messages
|
|
1066
|
+
): # type: ignore
|
|
1067
|
+
self.active_executor._scrub_run_output_for_storage(executor_run_response) # type: ignore
|
|
1068
|
+
|
|
1053
1069
|
# Get the raw response from the step's active executor
|
|
1054
1070
|
raw_response = executor_run_response
|
|
1055
1071
|
if raw_response and isinstance(raw_response, (RunOutput, TeamRunOutput)):
|
agno/workflow/steps.py
CHANGED
|
@@ -208,6 +208,7 @@ class Steps:
|
|
|
208
208
|
session_id: Optional[str] = None,
|
|
209
209
|
user_id: Optional[str] = None,
|
|
210
210
|
stream_intermediate_steps: bool = False,
|
|
211
|
+
stream_executor_events: bool = True,
|
|
211
212
|
step_index: Optional[Union[int, tuple]] = None,
|
|
212
213
|
store_executor_outputs: bool = True,
|
|
213
214
|
parent_step_id: Optional[str] = None,
|
|
@@ -266,6 +267,7 @@ class Steps:
|
|
|
266
267
|
user_id=user_id,
|
|
267
268
|
session_state=session_state,
|
|
268
269
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
270
|
+
stream_executor_events=stream_executor_events,
|
|
269
271
|
workflow_run_response=workflow_run_response,
|
|
270
272
|
step_index=child_step_index,
|
|
271
273
|
store_executor_outputs=store_executor_outputs,
|
|
@@ -437,6 +439,7 @@ class Steps:
|
|
|
437
439
|
session_id: Optional[str] = None,
|
|
438
440
|
user_id: Optional[str] = None,
|
|
439
441
|
stream_intermediate_steps: bool = False,
|
|
442
|
+
stream_executor_events: bool = True,
|
|
440
443
|
step_index: Optional[Union[int, tuple]] = None,
|
|
441
444
|
store_executor_outputs: bool = True,
|
|
442
445
|
parent_step_id: Optional[str] = None,
|
|
@@ -495,6 +498,7 @@ class Steps:
|
|
|
495
498
|
user_id=user_id,
|
|
496
499
|
session_state=session_state,
|
|
497
500
|
stream_intermediate_steps=stream_intermediate_steps,
|
|
501
|
+
stream_executor_events=stream_executor_events,
|
|
498
502
|
workflow_run_response=workflow_run_response,
|
|
499
503
|
step_index=child_step_index,
|
|
500
504
|
store_executor_outputs=store_executor_outputs,
|