agno 2.2.10__py3-none-any.whl → 2.2.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +75 -48
- agno/db/dynamo/utils.py +1 -1
- agno/db/firestore/utils.py +1 -1
- agno/db/gcs_json/utils.py +1 -1
- agno/db/in_memory/utils.py +1 -1
- agno/db/json/utils.py +1 -1
- agno/db/mongo/utils.py +3 -3
- agno/db/mysql/mysql.py +1 -1
- agno/db/mysql/utils.py +1 -1
- agno/db/postgres/utils.py +1 -1
- agno/db/redis/utils.py +1 -1
- agno/db/singlestore/singlestore.py +1 -1
- agno/db/singlestore/utils.py +1 -1
- agno/db/sqlite/async_sqlite.py +1 -1
- agno/db/sqlite/sqlite.py +1 -1
- agno/db/sqlite/utils.py +1 -1
- agno/filters.py +354 -0
- agno/knowledge/chunking/agentic.py +8 -9
- agno/knowledge/chunking/strategy.py +59 -15
- agno/knowledge/embedder/sentence_transformer.py +6 -2
- agno/knowledge/knowledge.py +43 -22
- agno/knowledge/reader/base.py +6 -2
- agno/knowledge/utils.py +20 -0
- agno/models/anthropic/claude.py +45 -9
- agno/models/base.py +4 -0
- agno/os/app.py +23 -7
- agno/os/interfaces/slack/router.py +53 -33
- agno/os/interfaces/slack/slack.py +9 -1
- agno/os/router.py +25 -1
- agno/os/routers/health.py +5 -3
- agno/os/routers/knowledge/knowledge.py +43 -17
- agno/os/routers/knowledge/schemas.py +4 -3
- agno/run/agent.py +11 -1
- agno/run/base.py +3 -2
- agno/session/agent.py +10 -5
- agno/team/team.py +57 -18
- agno/tools/file_generation.py +4 -4
- agno/tools/gmail.py +179 -0
- agno/tools/parallel.py +314 -0
- agno/utils/agent.py +22 -17
- agno/utils/gemini.py +15 -5
- agno/utils/knowledge.py +12 -5
- agno/utils/log.py +1 -0
- agno/utils/models/claude.py +2 -1
- agno/utils/print_response/agent.py +5 -4
- agno/utils/print_response/team.py +5 -4
- agno/vectordb/base.py +2 -4
- agno/vectordb/cassandra/cassandra.py +12 -5
- agno/vectordb/chroma/chromadb.py +10 -4
- agno/vectordb/clickhouse/clickhousedb.py +12 -4
- agno/vectordb/couchbase/couchbase.py +12 -3
- agno/vectordb/lancedb/lance_db.py +69 -144
- agno/vectordb/langchaindb/langchaindb.py +13 -4
- agno/vectordb/lightrag/lightrag.py +8 -3
- agno/vectordb/llamaindex/llamaindexdb.py +10 -4
- agno/vectordb/milvus/milvus.py +16 -5
- agno/vectordb/mongodb/mongodb.py +14 -3
- agno/vectordb/pgvector/pgvector.py +73 -15
- agno/vectordb/pineconedb/pineconedb.py +6 -2
- agno/vectordb/qdrant/qdrant.py +25 -13
- agno/vectordb/redis/redisdb.py +37 -30
- agno/vectordb/singlestore/singlestore.py +9 -4
- agno/vectordb/surrealdb/surrealdb.py +13 -3
- agno/vectordb/upstashdb/upstashdb.py +8 -5
- agno/vectordb/weaviate/weaviate.py +29 -12
- agno/workflow/step.py +3 -2
- agno/workflow/types.py +20 -1
- agno/workflow/workflow.py +103 -14
- {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/METADATA +4 -1
- {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/RECORD +73 -71
- {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/WHEEL +0 -0
- {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/licenses/LICENSE +0 -0
- {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
from typing import Any, Callable, Dict, List, Optional
|
|
1
|
+
from typing import Any, Callable, Dict, List, Optional, Union
|
|
2
2
|
|
|
3
|
+
from agno.filters import FilterExpr
|
|
3
4
|
from agno.knowledge.document import Document
|
|
4
|
-
from agno.utils.log import logger
|
|
5
|
+
from agno.utils.log import log_warning, logger
|
|
5
6
|
from agno.vectordb.base import VectorDb
|
|
6
7
|
|
|
7
8
|
try:
|
|
@@ -68,7 +69,9 @@ class LlamaIndexVectorDb(VectorDb):
|
|
|
68
69
|
logger.warning("LlamaIndexVectorDb.async_upsert() not supported - please check the vectorstore manually.")
|
|
69
70
|
raise NotImplementedError
|
|
70
71
|
|
|
71
|
-
def search(
|
|
72
|
+
def search(
|
|
73
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
74
|
+
) -> List[Document]:
|
|
72
75
|
"""
|
|
73
76
|
Returns relevant documents matching the query.
|
|
74
77
|
|
|
@@ -82,6 +85,9 @@ class LlamaIndexVectorDb(VectorDb):
|
|
|
82
85
|
Raises:
|
|
83
86
|
ValueError: If the knowledge retriever is not of type BaseRetriever.
|
|
84
87
|
"""
|
|
88
|
+
if filters is not None:
|
|
89
|
+
log_warning("Filters are not supported in LlamaIndex. No filters will be applied.")
|
|
90
|
+
|
|
85
91
|
if not isinstance(self.knowledge_retriever, BaseRetriever):
|
|
86
92
|
raise ValueError(f"Knowledge retriever is not of type BaseRetriever: {self.knowledge_retriever}")
|
|
87
93
|
|
|
@@ -99,7 +105,7 @@ class LlamaIndexVectorDb(VectorDb):
|
|
|
99
105
|
return documents
|
|
100
106
|
|
|
101
107
|
async def async_search(
|
|
102
|
-
self, query: str, limit: int = 5, filters: Optional[Dict[str, Any]] = None
|
|
108
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
103
109
|
) -> List[Document]:
|
|
104
110
|
return self.search(query, limit, filters)
|
|
105
111
|
|
agno/vectordb/milvus/milvus.py
CHANGED
|
@@ -9,10 +9,11 @@ try:
|
|
|
9
9
|
except ImportError:
|
|
10
10
|
raise ImportError("The `pymilvus` package is not installed. Please install it via `pip install pymilvus`.")
|
|
11
11
|
|
|
12
|
+
from agno.filters import FilterExpr
|
|
12
13
|
from agno.knowledge.document import Document
|
|
13
14
|
from agno.knowledge.embedder import Embedder
|
|
14
15
|
from agno.knowledge.reranker.base import Reranker
|
|
15
|
-
from agno.utils.log import log_debug, log_error, log_info
|
|
16
|
+
from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
16
17
|
from agno.vectordb.base import VectorDb
|
|
17
18
|
from agno.vectordb.distance import Distance
|
|
18
19
|
from agno.vectordb.search import SearchType
|
|
@@ -675,7 +676,9 @@ class Milvus(VectorDb):
|
|
|
675
676
|
"""
|
|
676
677
|
return MILVUS_DISTANCE_MAP.get(self.distance, "COSINE")
|
|
677
678
|
|
|
678
|
-
def search(
|
|
679
|
+
def search(
|
|
680
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
681
|
+
) -> List[Document]:
|
|
679
682
|
"""
|
|
680
683
|
Search for documents matching the query.
|
|
681
684
|
|
|
@@ -687,6 +690,9 @@ class Milvus(VectorDb):
|
|
|
687
690
|
Returns:
|
|
688
691
|
List[Document]: List of matching documents
|
|
689
692
|
"""
|
|
693
|
+
if isinstance(filters, List):
|
|
694
|
+
log_warning("Filters Expressions are not supported in Milvus. No filters will be applied.")
|
|
695
|
+
filters = None
|
|
690
696
|
if self.search_type == SearchType.hybrid:
|
|
691
697
|
return self.hybrid_search(query, limit)
|
|
692
698
|
|
|
@@ -728,8 +734,11 @@ class Milvus(VectorDb):
|
|
|
728
734
|
return search_results
|
|
729
735
|
|
|
730
736
|
async def async_search(
|
|
731
|
-
self, query: str, limit: int = 5, filters: Optional[Dict[str, Any]] = None
|
|
737
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
732
738
|
) -> List[Document]:
|
|
739
|
+
if isinstance(filters, List):
|
|
740
|
+
log_warning("Filters Expressions are not supported in Milvus. No filters will be applied.")
|
|
741
|
+
filters = None
|
|
733
742
|
if self.search_type == SearchType.hybrid:
|
|
734
743
|
return await self.async_hybrid_search(query, limit, filters)
|
|
735
744
|
|
|
@@ -765,7 +774,9 @@ class Milvus(VectorDb):
|
|
|
765
774
|
log_info(f"Found {len(search_results)} documents")
|
|
766
775
|
return search_results
|
|
767
776
|
|
|
768
|
-
def hybrid_search(
|
|
777
|
+
def hybrid_search(
|
|
778
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
779
|
+
) -> List[Document]:
|
|
769
780
|
"""
|
|
770
781
|
Perform a hybrid search combining dense and sparse vector similarity.
|
|
771
782
|
|
|
@@ -857,7 +868,7 @@ class Milvus(VectorDb):
|
|
|
857
868
|
return []
|
|
858
869
|
|
|
859
870
|
async def async_hybrid_search(
|
|
860
|
-
self, query: str, limit: int = 5, filters: Optional[Dict[str, Any]] = None
|
|
871
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
861
872
|
) -> List[Document]:
|
|
862
873
|
"""
|
|
863
874
|
Perform an asynchronous hybrid search combining dense and sparse vector similarity.
|
agno/vectordb/mongodb/mongodb.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import time
|
|
3
|
-
from typing import Any, Dict, List, Optional
|
|
3
|
+
from typing import Any, Dict, List, Optional, Union
|
|
4
4
|
|
|
5
5
|
from bson import ObjectId
|
|
6
6
|
|
|
7
|
+
from agno.filters import FilterExpr
|
|
7
8
|
from agno.knowledge.document import Document
|
|
8
9
|
from agno.knowledge.embedder import Embedder
|
|
9
10
|
from agno.utils.log import log_debug, log_info, log_warning, logger
|
|
@@ -585,9 +586,16 @@ class MongoDb(VectorDb):
|
|
|
585
586
|
return True
|
|
586
587
|
|
|
587
588
|
def search(
|
|
588
|
-
self,
|
|
589
|
+
self,
|
|
590
|
+
query: str,
|
|
591
|
+
limit: int = 5,
|
|
592
|
+
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
|
|
593
|
+
min_score: float = 0.0,
|
|
589
594
|
) -> List[Document]:
|
|
590
595
|
"""Search for documents using vector similarity."""
|
|
596
|
+
if isinstance(filters, List):
|
|
597
|
+
log_warning("Filters Expressions are not supported in MongoDB. No filters will be applied.")
|
|
598
|
+
filters = None
|
|
591
599
|
if self.search_type == SearchType.hybrid:
|
|
592
600
|
return self.hybrid_search(query, limit=limit, filters=filters)
|
|
593
601
|
|
|
@@ -1153,9 +1161,12 @@ class MongoDb(VectorDb):
|
|
|
1153
1161
|
logger.error(f"Error upserting document '{document.name}' asynchronously: {e}")
|
|
1154
1162
|
|
|
1155
1163
|
async def async_search(
|
|
1156
|
-
self, query: str, limit: int = 5, filters: Optional[Dict[str, Any]] = None
|
|
1164
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
1157
1165
|
) -> List[Document]:
|
|
1158
1166
|
"""Search for documents asynchronously."""
|
|
1167
|
+
if isinstance(filters, List):
|
|
1168
|
+
log_warning("Filters Expressions are not supported in MongoDB. No filters will be applied.")
|
|
1169
|
+
filters = None
|
|
1159
1170
|
query_embedding = self.embedder.get_embedding(query)
|
|
1160
1171
|
if query_embedding is None:
|
|
1161
1172
|
logger.error(f"Failed to generate embedding for query: {query}")
|
|
@@ -6,14 +6,15 @@ from typing import Any, Dict, List, Optional, Union, cast
|
|
|
6
6
|
from agno.utils.string import generate_id
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
-
from sqlalchemy import update
|
|
9
|
+
from sqlalchemy import and_, not_, or_, update
|
|
10
10
|
from sqlalchemy.dialects import postgresql
|
|
11
11
|
from sqlalchemy.engine import Engine, create_engine
|
|
12
12
|
from sqlalchemy.inspection import inspect
|
|
13
13
|
from sqlalchemy.orm import Session, scoped_session, sessionmaker
|
|
14
14
|
from sqlalchemy.schema import Column, Index, MetaData, Table
|
|
15
|
+
from sqlalchemy.sql.elements import ColumnElement
|
|
15
16
|
from sqlalchemy.sql.expression import bindparam, desc, func, select, text
|
|
16
|
-
from sqlalchemy.types import DateTime, String
|
|
17
|
+
from sqlalchemy.types import DateTime, Integer, String
|
|
17
18
|
|
|
18
19
|
except ImportError:
|
|
19
20
|
raise ImportError("`sqlalchemy` not installed. Please install using `pip install sqlalchemy psycopg`")
|
|
@@ -23,6 +24,7 @@ try:
|
|
|
23
24
|
except ImportError:
|
|
24
25
|
raise ImportError("`pgvector` not installed. Please install using `pip install pgvector`")
|
|
25
26
|
|
|
27
|
+
from agno.filters import FilterExpr
|
|
26
28
|
from agno.knowledge.document import Document
|
|
27
29
|
from agno.knowledge.embedder import Embedder
|
|
28
30
|
from agno.knowledge.reranker.base import Reranker
|
|
@@ -680,14 +682,16 @@ class PgVector(VectorDb):
|
|
|
680
682
|
logger.error(f"Error updating metadata for document {content_id}: {e}")
|
|
681
683
|
raise
|
|
682
684
|
|
|
683
|
-
def search(
|
|
685
|
+
def search(
|
|
686
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
687
|
+
) -> List[Document]:
|
|
684
688
|
"""
|
|
685
689
|
Perform a search based on the configured search type.
|
|
686
690
|
|
|
687
691
|
Args:
|
|
688
692
|
query (str): The search query.
|
|
689
693
|
limit (int): Maximum number of results to return.
|
|
690
|
-
filters (Optional[Dict[str, Any]]): Filters to apply to the search.
|
|
694
|
+
filters (Optional[Union[Dict[str, Any], List[FilterExpr]]]): Filters to apply to the search.
|
|
691
695
|
|
|
692
696
|
Returns:
|
|
693
697
|
List[Document]: List of matching documents.
|
|
@@ -703,19 +707,42 @@ class PgVector(VectorDb):
|
|
|
703
707
|
return []
|
|
704
708
|
|
|
705
709
|
async def async_search(
|
|
706
|
-
self, query: str, limit: int = 5, filters: Optional[Dict[str, Any]] = None
|
|
710
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
707
711
|
) -> List[Document]:
|
|
708
712
|
"""Search asynchronously by running in a thread."""
|
|
709
713
|
return await asyncio.to_thread(self.search, query, limit, filters)
|
|
710
714
|
|
|
711
|
-
def
|
|
715
|
+
def _dsl_to_sqlalchemy(self, filter_expr, table) -> ColumnElement[bool]:
|
|
716
|
+
op = filter_expr["op"]
|
|
717
|
+
|
|
718
|
+
if op == "EQ":
|
|
719
|
+
return table.c.meta_data[filter_expr["key"]].astext == str(filter_expr["value"])
|
|
720
|
+
elif op == "IN":
|
|
721
|
+
# Postgres JSONB array containment
|
|
722
|
+
return table.c.meta_data[filter_expr["key"]].astext.in_([str(v) for v in filter_expr["values"]])
|
|
723
|
+
elif op == "GT":
|
|
724
|
+
return table.c.meta_data[filter_expr["key"]].astext.cast(Integer) > filter_expr["value"]
|
|
725
|
+
elif op == "LT":
|
|
726
|
+
return table.c.meta_data[filter_expr["key"]].astext.cast(Integer) < filter_expr["value"]
|
|
727
|
+
elif op == "NOT":
|
|
728
|
+
return not_(self._dsl_to_sqlalchemy(filter_expr["condition"], table))
|
|
729
|
+
elif op == "AND":
|
|
730
|
+
return and_(*[self._dsl_to_sqlalchemy(cond, table) for cond in filter_expr["conditions"]])
|
|
731
|
+
elif op == "OR":
|
|
732
|
+
return or_(*[self._dsl_to_sqlalchemy(cond, table) for cond in filter_expr["conditions"]])
|
|
733
|
+
else:
|
|
734
|
+
raise ValueError(f"Unknown filter operator: {op}")
|
|
735
|
+
|
|
736
|
+
def vector_search(
|
|
737
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
738
|
+
) -> List[Document]:
|
|
712
739
|
"""
|
|
713
740
|
Perform a vector similarity search.
|
|
714
741
|
|
|
715
742
|
Args:
|
|
716
743
|
query (str): The search query.
|
|
717
744
|
limit (int): Maximum number of results to return.
|
|
718
|
-
filters (Optional[Dict[str, Any]]): Filters to apply to the search.
|
|
745
|
+
filters (Optional[Union[Dict[str, Any], List[FilterExpr]]]): Filters to apply to the search.
|
|
719
746
|
|
|
720
747
|
Returns:
|
|
721
748
|
List[Document]: List of matching documents.
|
|
@@ -742,7 +769,17 @@ class PgVector(VectorDb):
|
|
|
742
769
|
|
|
743
770
|
# Apply filters if provided
|
|
744
771
|
if filters is not None:
|
|
745
|
-
|
|
772
|
+
# Handle dict filters
|
|
773
|
+
if isinstance(filters, dict):
|
|
774
|
+
stmt = stmt.where(self.table.c.meta_data.contains(filters))
|
|
775
|
+
# Handle FilterExpr DSL
|
|
776
|
+
else:
|
|
777
|
+
# Convert each DSL expression to SQLAlchemy and AND them together
|
|
778
|
+
sqlalchemy_conditions = [
|
|
779
|
+
self._dsl_to_sqlalchemy(f.to_dict() if hasattr(f, "to_dict") else f, self.table)
|
|
780
|
+
for f in filters
|
|
781
|
+
]
|
|
782
|
+
stmt = stmt.where(and_(*sqlalchemy_conditions))
|
|
746
783
|
|
|
747
784
|
# Order the results based on the distance metric
|
|
748
785
|
if self.distance == Distance.l2:
|
|
@@ -815,14 +852,16 @@ class PgVector(VectorDb):
|
|
|
815
852
|
processed_words = [word + "*" for word in words]
|
|
816
853
|
return " ".join(processed_words)
|
|
817
854
|
|
|
818
|
-
def keyword_search(
|
|
855
|
+
def keyword_search(
|
|
856
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
857
|
+
) -> List[Document]:
|
|
819
858
|
"""
|
|
820
859
|
Perform a keyword search on the 'content' column.
|
|
821
860
|
|
|
822
861
|
Args:
|
|
823
862
|
query (str): The search query.
|
|
824
863
|
limit (int): Maximum number of results to return.
|
|
825
|
-
filters (Optional[Dict[str, Any]]): Filters to apply to the search.
|
|
864
|
+
filters (Optional[Union[Dict[str, Any], List[FilterExpr]]]): Filters to apply to the search.
|
|
826
865
|
|
|
827
866
|
Returns:
|
|
828
867
|
List[Document]: List of matching documents.
|
|
@@ -851,8 +890,17 @@ class PgVector(VectorDb):
|
|
|
851
890
|
|
|
852
891
|
# Apply filters if provided
|
|
853
892
|
if filters is not None:
|
|
854
|
-
#
|
|
855
|
-
|
|
893
|
+
# Handle dict filters
|
|
894
|
+
if isinstance(filters, dict):
|
|
895
|
+
stmt = stmt.where(self.table.c.meta_data.contains(filters))
|
|
896
|
+
# Handle FilterExpr DSL
|
|
897
|
+
else:
|
|
898
|
+
# Convert each DSL expression to SQLAlchemy and AND them together
|
|
899
|
+
sqlalchemy_conditions = [
|
|
900
|
+
self._dsl_to_sqlalchemy(f.to_dict() if hasattr(f, "to_dict") else f, self.table)
|
|
901
|
+
for f in filters
|
|
902
|
+
]
|
|
903
|
+
stmt = stmt.where(and_(*sqlalchemy_conditions))
|
|
856
904
|
|
|
857
905
|
# Order by the relevance rank
|
|
858
906
|
stmt = stmt.order_by(text_rank.desc())
|
|
@@ -898,7 +946,7 @@ class PgVector(VectorDb):
|
|
|
898
946
|
self,
|
|
899
947
|
query: str,
|
|
900
948
|
limit: int = 5,
|
|
901
|
-
filters: Optional[Dict[str, Any]] = None,
|
|
949
|
+
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
|
|
902
950
|
) -> List[Document]:
|
|
903
951
|
"""
|
|
904
952
|
Perform a hybrid search combining vector similarity and full-text search.
|
|
@@ -906,7 +954,7 @@ class PgVector(VectorDb):
|
|
|
906
954
|
Args:
|
|
907
955
|
query (str): The search query.
|
|
908
956
|
limit (int): Maximum number of results to return.
|
|
909
|
-
filters (Optional[Dict[str, Any]]): Filters to apply to the search.
|
|
957
|
+
filters (Optional[Union[Dict[str, Any], List[FilterExpr]]]): Filters to apply to the search.
|
|
910
958
|
|
|
911
959
|
Returns:
|
|
912
960
|
List[Document]: List of matching documents.
|
|
@@ -973,7 +1021,17 @@ class PgVector(VectorDb):
|
|
|
973
1021
|
|
|
974
1022
|
# Apply filters if provided
|
|
975
1023
|
if filters is not None:
|
|
976
|
-
|
|
1024
|
+
# Handle dict filters
|
|
1025
|
+
if isinstance(filters, dict):
|
|
1026
|
+
stmt = stmt.where(self.table.c.meta_data.contains(filters))
|
|
1027
|
+
# Handle FilterExpr DSL
|
|
1028
|
+
else:
|
|
1029
|
+
# Convert each DSL expression to SQLAlchemy and AND them together
|
|
1030
|
+
sqlalchemy_conditions = [
|
|
1031
|
+
self._dsl_to_sqlalchemy(f.to_dict() if hasattr(f, "to_dict") else f, self.table)
|
|
1032
|
+
for f in filters
|
|
1033
|
+
]
|
|
1034
|
+
stmt = stmt.where(and_(*sqlalchemy_conditions))
|
|
977
1035
|
|
|
978
1036
|
# Order the results by the hybrid score in descending order
|
|
979
1037
|
stmt = stmt.order_by(desc("hybrid_score"))
|
|
@@ -22,6 +22,7 @@ except ImportError:
|
|
|
22
22
|
raise ImportError("The `pinecone` package is not installed, please install using `pip install pinecone`.")
|
|
23
23
|
|
|
24
24
|
|
|
25
|
+
from agno.filters import FilterExpr
|
|
25
26
|
from agno.knowledge.document import Document
|
|
26
27
|
from agno.knowledge.embedder import Embedder
|
|
27
28
|
from agno.knowledge.reranker.base import Reranker
|
|
@@ -474,7 +475,7 @@ class PineconeDb(VectorDb):
|
|
|
474
475
|
self,
|
|
475
476
|
query: str,
|
|
476
477
|
limit: int = 5,
|
|
477
|
-
filters: Optional[Dict[str,
|
|
478
|
+
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
|
|
478
479
|
namespace: Optional[str] = None,
|
|
479
480
|
include_values: Optional[bool] = None,
|
|
480
481
|
) -> List[Document]:
|
|
@@ -492,6 +493,9 @@ class PineconeDb(VectorDb):
|
|
|
492
493
|
List[Document]: The list of matching documents.
|
|
493
494
|
|
|
494
495
|
"""
|
|
496
|
+
if isinstance(filters, List):
|
|
497
|
+
log_warning("Filters Expressions are not supported in PineconeDB. No filters will be applied.")
|
|
498
|
+
filters = None
|
|
495
499
|
dense_embedding = self.embedder.get_embedding(query)
|
|
496
500
|
|
|
497
501
|
if self.use_hybrid_search:
|
|
@@ -540,7 +544,7 @@ class PineconeDb(VectorDb):
|
|
|
540
544
|
self,
|
|
541
545
|
query: str,
|
|
542
546
|
limit: int = 5,
|
|
543
|
-
filters: Optional[Dict[str,
|
|
547
|
+
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
|
|
544
548
|
namespace: Optional[str] = None,
|
|
545
549
|
include_values: Optional[bool] = None,
|
|
546
550
|
) -> List[Document]:
|
agno/vectordb/qdrant/qdrant.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from hashlib import md5
|
|
2
|
-
from typing import Any, Dict, List, Optional
|
|
2
|
+
from typing import Any, Dict, List, Optional, Union
|
|
3
3
|
|
|
4
4
|
try:
|
|
5
5
|
from qdrant_client import AsyncQdrantClient, QdrantClient # noqa: F401
|
|
@@ -9,6 +9,7 @@ except ImportError:
|
|
|
9
9
|
"The `qdrant-client` package is not installed. Please install it via `pip install qdrant-client`."
|
|
10
10
|
)
|
|
11
11
|
|
|
12
|
+
from agno.filters import FilterExpr
|
|
12
13
|
from agno.knowledge.document import Document
|
|
13
14
|
from agno.knowledge.embedder import Embedder
|
|
14
15
|
from agno.knowledge.reranker.base import Reranker
|
|
@@ -528,7 +529,9 @@ class Qdrant(VectorDb):
|
|
|
528
529
|
log_debug("Redirecting the async request to async_insert")
|
|
529
530
|
await self.async_insert(content_hash=content_hash, documents=documents, filters=filters)
|
|
530
531
|
|
|
531
|
-
def search(
|
|
532
|
+
def search(
|
|
533
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
534
|
+
) -> List[Document]:
|
|
532
535
|
"""
|
|
533
536
|
Search for documents in the collection.
|
|
534
537
|
|
|
@@ -537,28 +540,37 @@ class Qdrant(VectorDb):
|
|
|
537
540
|
limit (int): Number of search results to return
|
|
538
541
|
filters (Optional[Dict[str, Any]]): Filters to apply while searching
|
|
539
542
|
"""
|
|
543
|
+
|
|
544
|
+
if isinstance(filters, List):
|
|
545
|
+
log_warning("Filters Expressions are not supported in Qdrant. No filters will be applied.")
|
|
546
|
+
filters = None
|
|
547
|
+
|
|
540
548
|
filters = self._format_filters(filters or {}) # type: ignore
|
|
541
549
|
if self.search_type == SearchType.vector:
|
|
542
|
-
results = self._run_vector_search_sync(query, limit, filters)
|
|
550
|
+
results = self._run_vector_search_sync(query, limit, filters) # type: ignore
|
|
543
551
|
elif self.search_type == SearchType.keyword:
|
|
544
|
-
results = self._run_keyword_search_sync(query, limit, filters)
|
|
552
|
+
results = self._run_keyword_search_sync(query, limit, filters) # type: ignore
|
|
545
553
|
elif self.search_type == SearchType.hybrid:
|
|
546
|
-
results = self._run_hybrid_search_sync(query, limit, filters)
|
|
554
|
+
results = self._run_hybrid_search_sync(query, limit, filters) # type: ignore
|
|
547
555
|
else:
|
|
548
556
|
raise ValueError(f"Unsupported search type: {self.search_type}")
|
|
549
557
|
|
|
550
558
|
return self._build_search_results(results, query)
|
|
551
559
|
|
|
552
560
|
async def async_search(
|
|
553
|
-
self, query: str, limit: int = 5, filters: Optional[Dict[str, Any]] = None
|
|
561
|
+
self, query: str, limit: int = 5, filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None
|
|
554
562
|
) -> List[Document]:
|
|
563
|
+
if isinstance(filters, List):
|
|
564
|
+
log_warning("Filters Expressions are not supported in Qdrant. No filters will be applied.")
|
|
565
|
+
filters = None
|
|
566
|
+
|
|
555
567
|
filters = self._format_filters(filters or {}) # type: ignore
|
|
556
568
|
if self.search_type == SearchType.vector:
|
|
557
|
-
results = await self._run_vector_search_async(query, limit, filters)
|
|
569
|
+
results = await self._run_vector_search_async(query, limit, filters) # type: ignore
|
|
558
570
|
elif self.search_type == SearchType.keyword:
|
|
559
|
-
results = await self._run_keyword_search_async(query, limit, filters)
|
|
571
|
+
results = await self._run_keyword_search_async(query, limit, filters) # type: ignore
|
|
560
572
|
elif self.search_type == SearchType.hybrid:
|
|
561
|
-
results = await self._run_hybrid_search_async(query, limit, filters)
|
|
573
|
+
results = await self._run_hybrid_search_async(query, limit, filters) # type: ignore
|
|
562
574
|
else:
|
|
563
575
|
raise ValueError(f"Unsupported search type: {self.search_type}")
|
|
564
576
|
|
|
@@ -568,7 +580,7 @@ class Qdrant(VectorDb):
|
|
|
568
580
|
self,
|
|
569
581
|
query: str,
|
|
570
582
|
limit: int,
|
|
571
|
-
filters: Optional[Dict[str, Any]],
|
|
583
|
+
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]],
|
|
572
584
|
) -> List[models.ScoredPoint]:
|
|
573
585
|
dense_embedding = self.embedder.get_embedding(query)
|
|
574
586
|
sparse_embedding = next(iter(self.sparse_encoder.embed([query]))).as_object()
|
|
@@ -594,7 +606,7 @@ class Qdrant(VectorDb):
|
|
|
594
606
|
self,
|
|
595
607
|
query: str,
|
|
596
608
|
limit: int,
|
|
597
|
-
filters: Optional[Dict[str, Any]],
|
|
609
|
+
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]],
|
|
598
610
|
) -> List[models.ScoredPoint]:
|
|
599
611
|
dense_embedding = self.embedder.get_embedding(query)
|
|
600
612
|
|
|
@@ -625,7 +637,7 @@ class Qdrant(VectorDb):
|
|
|
625
637
|
self,
|
|
626
638
|
query: str,
|
|
627
639
|
limit: int,
|
|
628
|
-
filters: Optional[Dict[str, Any]],
|
|
640
|
+
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]],
|
|
629
641
|
) -> List[models.ScoredPoint]:
|
|
630
642
|
sparse_embedding = next(iter(self.sparse_encoder.embed([query]))).as_object()
|
|
631
643
|
call = self.client.query_points(
|
|
@@ -692,7 +704,7 @@ class Qdrant(VectorDb):
|
|
|
692
704
|
self,
|
|
693
705
|
query: str,
|
|
694
706
|
limit: int,
|
|
695
|
-
filters: Optional[Dict[str, Any]],
|
|
707
|
+
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]],
|
|
696
708
|
) -> List[models.ScoredPoint]:
|
|
697
709
|
dense_embedding = self.embedder.get_embedding(query)
|
|
698
710
|
sparse_embedding = next(iter(self.sparse_encoder.embed([query]))).as_object()
|