endee-llamaindex 0.1.2__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- endee_llamaindex/base.py +652 -304
- endee_llamaindex/constants.py +70 -0
- endee_llamaindex/utils.py +160 -0
- endee_llamaindex-0.1.5.dist-info/METADATA +615 -0
- endee_llamaindex-0.1.5.dist-info/RECORD +8 -0
- {endee_llamaindex-0.1.2.dist-info → endee_llamaindex-0.1.5.dist-info}/WHEEL +1 -1
- endee_llamaindex-0.1.2.dist-info/METADATA +0 -140
- endee_llamaindex-0.1.2.dist-info/RECORD +0 -6
- {endee_llamaindex-0.1.2.dist-info → endee_llamaindex-0.1.5.dist-info}/top_level.txt +0 -0
endee_llamaindex/base.py
CHANGED
|
@@ -1,16 +1,32 @@
|
|
|
1
|
+
"""
|
|
2
|
+
EndeeVectorStore: LlamaIndex vector store backed by the Endee API.
|
|
3
|
+
|
|
4
|
+
Aligned with the local endee package (./endee). API contract:
|
|
5
|
+
|
|
6
|
+
Endee (endee.endee):
|
|
7
|
+
- __init__(token, http_library)
|
|
8
|
+
- create_index(name, dimension, space_type, M, ef_con, precision, version, sparse_dim)
|
|
9
|
+
Validates: index name (alphanumeric + underscores, max length), dimension <= MAX_DIMENSION_ALLOWED,
|
|
10
|
+
space_type in SPACE_TYPES_SUPPORTED ('cosine','l2','ip'), precision in PRECISION_TYPES_SUPPORTED,
|
|
11
|
+
sparse_dim >= 0. Map 'euclidean'->'l2', 'inner_product'->'ip' before calling.
|
|
12
|
+
- get_index(name) -> Index
|
|
13
|
+
|
|
14
|
+
Index (endee.index):
|
|
15
|
+
- upsert(input_array): list of {id, vector, meta?, filter?, sparse_indices?, sparse_values?}; max MAX_VECTORS_PER_BATCH per batch; duplicate IDs in batch raise
|
|
16
|
+
- query(vector, top_k, filter, ef, include_vectors, sparse_indices, sparse_values)
|
|
17
|
+
- delete_vector(id), get_vector(id), describe()
|
|
18
|
+
|
|
19
|
+
No list_ids or batch fetch in endee; filter for query is JSON-serializable (e.g. [{"field":{"$op":value}}] or dict).
|
|
20
|
+
"""
|
|
21
|
+
|
|
1
22
|
import logging
|
|
2
|
-
from collections import Counter
|
|
3
|
-
from functools import partial
|
|
4
23
|
import json
|
|
5
24
|
from typing import Any, Callable, Dict, List, Optional, cast
|
|
6
|
-
|
|
7
25
|
from llama_index.core.bridge.pydantic import PrivateAttr
|
|
8
|
-
from llama_index.core.schema import BaseNode,
|
|
26
|
+
from llama_index.core.schema import BaseNode, TextNode
|
|
9
27
|
from llama_index.core.vector_stores.types import (
|
|
10
28
|
BasePydanticVectorStore,
|
|
11
|
-
MetadataFilters,
|
|
12
29
|
VectorStoreQuery,
|
|
13
|
-
VectorStoreQueryMode,
|
|
14
30
|
VectorStoreQueryResult,
|
|
15
31
|
)
|
|
16
32
|
from llama_index.core.vector_stores.utils import (
|
|
@@ -19,83 +35,34 @@ from llama_index.core.vector_stores.utils import (
|
|
|
19
35
|
metadata_dict_to_node,
|
|
20
36
|
node_to_metadata_dict,
|
|
21
37
|
)
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
ID_KEY = "id"
|
|
40
|
-
VECTOR_KEY = "values"
|
|
41
|
-
SPARSE_VECTOR_KEY = "sparse_values"
|
|
42
|
-
METADATA_KEY = "metadata"
|
|
43
|
-
|
|
44
|
-
DEFAULT_BATCH_SIZE = 100
|
|
45
|
-
|
|
38
|
+
from .constants import (
|
|
39
|
+
DEFAULT_BATCH_SIZE,
|
|
40
|
+
DEFAULT_EF_SEARCH,
|
|
41
|
+
MAX_DIMENSION_ALLOWED,
|
|
42
|
+
MAX_EF_SEARCH_ALLOWED,
|
|
43
|
+
MAX_INDEX_NAME_LENGTH_ALLOWED,
|
|
44
|
+
MAX_TOP_K_ALLOWED,
|
|
45
|
+
MAX_VECTORS_PER_BATCH,
|
|
46
|
+
PRECISION_VALID,
|
|
47
|
+
REVERSE_OPERATOR_MAP,
|
|
48
|
+
SPACE_TYPE_MAP,
|
|
49
|
+
SPACE_TYPES_VALID,
|
|
50
|
+
SUPPORTED_FILTER_OPERATORS,
|
|
51
|
+
)
|
|
52
|
+
from .utils import get_sparse_encoder
|
|
53
|
+
from endee import Endee
|
|
46
54
|
_logger = logging.getLogger(__name__)
|
|
47
55
|
|
|
48
|
-
from llama_index.core.vector_stores.types import MetadataFilter, FilterOperator
|
|
49
|
-
|
|
50
|
-
reverse_operator_map = {
|
|
51
|
-
FilterOperator.EQ: "$eq",
|
|
52
|
-
FilterOperator.NE: "$ne",
|
|
53
|
-
FilterOperator.GT: "$gt",
|
|
54
|
-
FilterOperator.GTE: "$gte",
|
|
55
|
-
FilterOperator.LT: "$lt",
|
|
56
|
-
FilterOperator.LTE: "$lte",
|
|
57
|
-
FilterOperator.IN: "$in",
|
|
58
|
-
FilterOperator.NIN: "$nin",
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def build_dict(input_batch: List[List[int]]) -> List[Dict[str, Any]]:
|
|
64
|
-
"""
|
|
65
|
-
Build a list of sparse dictionaries from a batch of input_ids.
|
|
66
56
|
|
|
67
|
-
NOTE: taken from https://www.pinecone.io/learn/hybrid-search-intro/.
|
|
68
57
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
indices = []
|
|
75
|
-
values = []
|
|
76
|
-
# convert the input_ids list to a dictionary of key to frequency values
|
|
77
|
-
d = dict(Counter(token_ids))
|
|
78
|
-
for idx in d:
|
|
79
|
-
indices.append(idx)
|
|
80
|
-
values.append(float(d[idx]))
|
|
81
|
-
sparse_emb.append({"indices": indices, "values": values})
|
|
82
|
-
# return sparse_emb list
|
|
83
|
-
return sparse_emb
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
def generate_sparse_vectors(
|
|
87
|
-
context_batch: List[str], tokenizer: Callable
|
|
88
|
-
) -> List[Dict[str, Any]]:
|
|
89
|
-
"""
|
|
90
|
-
Generate sparse vectors from a batch of contexts.
|
|
58
|
+
# Supported sparse embedding models
|
|
59
|
+
SUPPORTED_SPARSE_MODELS = {
|
|
60
|
+
"splade_pp": "prithivida/Splade_PP_en_v1",
|
|
61
|
+
"splade_cocondenser": "naver/splade-cocondenser-ensembledistil",
|
|
62
|
+
}
|
|
91
63
|
|
|
92
|
-
NOTE: taken from https://www.pinecone.io/learn/hybrid-search-intro/.
|
|
93
64
|
|
|
94
|
-
|
|
95
|
-
# create batch of input_ids
|
|
96
|
-
inputs = tokenizer(context_batch)["input_ids"]
|
|
97
|
-
# create sparse dictionaries
|
|
98
|
-
return build_dict(inputs)
|
|
65
|
+
# Import sparse encoder utilities from utils module
|
|
99
66
|
|
|
100
67
|
|
|
101
68
|
import_err_msg = (
|
|
@@ -107,18 +74,20 @@ class EndeeVectorStore(BasePydanticVectorStore):
|
|
|
107
74
|
|
|
108
75
|
stores_text: bool = True
|
|
109
76
|
flat_metadata: bool = False
|
|
110
|
-
|
|
111
77
|
api_token: Optional[str]
|
|
112
78
|
index_name: Optional[str]
|
|
113
79
|
space_type: Optional[str]
|
|
114
80
|
dimension: Optional[int]
|
|
115
|
-
insert_kwargs: Optional[Dict]
|
|
116
81
|
add_sparse_vector: bool
|
|
117
82
|
text_key: str
|
|
118
83
|
batch_size: int
|
|
119
84
|
remove_text_from_metadata: bool
|
|
120
|
-
|
|
85
|
+
hybrid: bool
|
|
86
|
+
sparse_dim: Optional[int]
|
|
87
|
+
model_name: Optional[str]
|
|
88
|
+
precision: Optional[str]
|
|
121
89
|
_endee_index: Any = PrivateAttr()
|
|
90
|
+
_sparse_encoder: Optional[Callable] = PrivateAttr(default=None)
|
|
122
91
|
|
|
123
92
|
def __init__(
|
|
124
93
|
self,
|
|
@@ -127,31 +96,76 @@ class EndeeVectorStore(BasePydanticVectorStore):
|
|
|
127
96
|
index_name: Optional[str] = None,
|
|
128
97
|
space_type: Optional[str] = "cosine",
|
|
129
98
|
dimension: Optional[int] = None,
|
|
130
|
-
insert_kwargs: Optional[Dict] = None,
|
|
131
99
|
add_sparse_vector: bool = False,
|
|
132
100
|
text_key: str = DEFAULT_TEXT_KEY,
|
|
133
101
|
batch_size: int = DEFAULT_BATCH_SIZE,
|
|
134
102
|
remove_text_from_metadata: bool = False,
|
|
103
|
+
hybrid: bool = False,
|
|
104
|
+
sparse_dim: Optional[int] = None,
|
|
105
|
+
model_name: Optional[str] = None,
|
|
106
|
+
precision: Optional[str] = "float16",
|
|
107
|
+
M: Optional[int] = None,
|
|
108
|
+
ef_con: Optional[int] = None,
|
|
135
109
|
**kwargs: Any,
|
|
136
110
|
) -> None:
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
111
|
+
try:
|
|
112
|
+
super().__init__(
|
|
113
|
+
index_name=index_name,
|
|
114
|
+
api_token=api_token,
|
|
115
|
+
space_type=space_type,
|
|
116
|
+
dimension=dimension,
|
|
117
|
+
add_sparse_vector=add_sparse_vector,
|
|
118
|
+
text_key=text_key,
|
|
119
|
+
batch_size=batch_size,
|
|
120
|
+
remove_text_from_metadata=remove_text_from_metadata,
|
|
121
|
+
sparse_dim=sparse_dim,
|
|
122
|
+
hybrid=hybrid,
|
|
123
|
+
model_name=model_name,
|
|
124
|
+
precision=precision,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# Initialize index (handles both dense and hybrid)
|
|
128
|
+
if endee_index is not None:
|
|
129
|
+
self._endee_index = endee_index
|
|
130
|
+
else:
|
|
131
|
+
# sparse_dim=None creates dense index, sparse_dim>0 creates hybrid index
|
|
132
|
+
self._endee_index = self._initialize_endee_index(
|
|
133
|
+
api_token,
|
|
134
|
+
index_name,
|
|
135
|
+
dimension,
|
|
136
|
+
space_type,
|
|
137
|
+
precision,
|
|
138
|
+
sparse_dim=sparse_dim if hybrid else None,
|
|
139
|
+
M=M,
|
|
140
|
+
ef_con=ef_con,
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
# Initialize sparse encoder if hybrid mode is enabled
|
|
144
|
+
if hybrid:
|
|
145
|
+
# Use default model if none provided
|
|
146
|
+
if model_name is None:
|
|
147
|
+
model_name = "splade_pp" # Default sparse model
|
|
148
|
+
_logger.info(f"Using default sparse model: {model_name}")
|
|
149
|
+
# If user provided an unsupported model, fall back to one of the supported models
|
|
150
|
+
elif model_name not in SUPPORTED_SPARSE_MODELS:
|
|
151
|
+
_logger.warning(
|
|
152
|
+
f"Unsupported sparse model_name {model_name!r} provided. "
|
|
153
|
+
"Falling back to default 'splade_pp'. "
|
|
154
|
+
f"Supported sparse models: {list(SUPPORTED_SPARSE_MODELS.keys())}"
|
|
155
|
+
)
|
|
156
|
+
model_name = "splade_pp"
|
|
157
|
+
|
|
158
|
+
_logger.info(f"Initializing sparse encoder with model: {model_name}")
|
|
159
|
+
self._sparse_encoder = get_sparse_encoder(
|
|
160
|
+
model_name=model_name,
|
|
161
|
+
use_fastembed=True, # Default to FastEmbed
|
|
162
|
+
batch_size=batch_size,
|
|
163
|
+
)
|
|
164
|
+
else:
|
|
165
|
+
self._sparse_encoder = None
|
|
166
|
+
except Exception as e:
|
|
167
|
+
_logger.error(f"Error initializing EndeeVectorStore: {e}")
|
|
168
|
+
raise
|
|
155
169
|
|
|
156
170
|
@classmethod
|
|
157
171
|
def _initialize_endee_index(
|
|
@@ -160,33 +174,139 @@ class EndeeVectorStore(BasePydanticVectorStore):
|
|
|
160
174
|
index_name: Optional[str],
|
|
161
175
|
dimension: Optional[int] = None,
|
|
162
176
|
space_type: Optional[str] = "cosine",
|
|
177
|
+
precision: Optional[str] = "float16",
|
|
178
|
+
sparse_dim: Optional[int] = None,
|
|
179
|
+
M: Optional[int] = None,
|
|
180
|
+
ef_con: Optional[int] = None,
|
|
163
181
|
) -> Any:
|
|
164
|
-
"""
|
|
165
|
-
|
|
166
|
-
from endee.endee_client import Endee
|
|
167
|
-
|
|
168
|
-
# Initialize Endee client
|
|
169
|
-
nd = Endee(token=api_token)
|
|
182
|
+
"""
|
|
183
|
+
Initialize Endee index (dense or hybrid).
|
|
170
184
|
|
|
185
|
+
Args:
|
|
186
|
+
api_token: Endee API token
|
|
187
|
+
index_name: Name of the index
|
|
188
|
+
dimension: Dense vector dimension
|
|
189
|
+
space_type: Distance metric (cosine, l2, ip)
|
|
190
|
+
precision: Vector precision type
|
|
191
|
+
sparse_dim: Sparse vector dimension. If None or 0, creates dense-only index.
|
|
192
|
+
If > 0, creates hybrid index with both dense and sparse vectors.
|
|
193
|
+
M: HNSW graph connectivity parameter (optional)
|
|
194
|
+
ef_con: HNSW construction parameter (optional)
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
Endee Index object
|
|
198
|
+
"""
|
|
171
199
|
try:
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
_logger.info("Connecting to Endee service...")
|
|
204
|
+
nd = Endee(token=api_token)
|
|
205
|
+
prec = precision if precision is not None else "float16"
|
|
206
|
+
is_hybrid = sparse_dim is not None and sparse_dim > 0
|
|
207
|
+
dim_sparse = sparse_dim if is_hybrid else 0
|
|
208
|
+
|
|
209
|
+
try:
|
|
210
|
+
_logger.info(f"Checking if index '{index_name}' exists...")
|
|
211
|
+
index = nd.get_index(name=index_name)
|
|
212
|
+
# Check if existing index matches expected type
|
|
213
|
+
existing_sparse_dim = getattr(index, "sparse_dim", 0)
|
|
214
|
+
if is_hybrid and existing_sparse_dim > 0:
|
|
215
|
+
_logger.info(f"✓ Retrieved existing hybrid index: {index_name}")
|
|
216
|
+
elif not is_hybrid and existing_sparse_dim == 0:
|
|
217
|
+
_logger.info(f"✓ Retrieved existing dense index: {index_name}")
|
|
218
|
+
elif is_hybrid and existing_sparse_dim == 0:
|
|
219
|
+
_logger.warning(
|
|
220
|
+
f"Index '{index_name}' exists as dense-only (sparse_dim=0) but hybrid was requested. "
|
|
221
|
+
f"Using existing dense index."
|
|
222
|
+
)
|
|
223
|
+
else:
|
|
224
|
+
_logger.warning(
|
|
225
|
+
f"Index '{index_name}' exists as hybrid (sparse_dim={existing_sparse_dim}) "
|
|
226
|
+
f"but dense-only was requested. Using existing hybrid index."
|
|
227
|
+
)
|
|
228
|
+
return index
|
|
229
|
+
|
|
230
|
+
except Exception as e:
|
|
231
|
+
# Index doesn't exist, create new one
|
|
232
|
+
if dimension is None:
|
|
233
|
+
raise ValueError(
|
|
234
|
+
f"Must provide dimension when creating a new {'hybrid' if is_hybrid else 'dense'} index"
|
|
235
|
+
) from e
|
|
236
|
+
if is_hybrid and sparse_dim is None:
|
|
237
|
+
raise ValueError(
|
|
238
|
+
"Must provide sparse_dim when creating a new hybrid index"
|
|
239
|
+
) from e
|
|
240
|
+
|
|
241
|
+
# Validate index name
|
|
242
|
+
try:
|
|
243
|
+
from endee.utils import is_valid_index_name
|
|
244
|
+
|
|
245
|
+
if not is_valid_index_name(index_name):
|
|
246
|
+
raise ValueError(
|
|
247
|
+
f"Invalid index name. Index name must be alphanumeric and can "
|
|
248
|
+
f"contain underscores and should be less than "
|
|
249
|
+
f"{MAX_INDEX_NAME_LENGTH_ALLOWED} characters"
|
|
250
|
+
)
|
|
251
|
+
except ImportError:
|
|
252
|
+
pass
|
|
253
|
+
|
|
254
|
+
# Validate dimension
|
|
255
|
+
if dimension > MAX_DIMENSION_ALLOWED:
|
|
256
|
+
raise ValueError(
|
|
257
|
+
f"Dimension cannot be greater than {MAX_DIMENSION_ALLOWED}"
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
# Validate sparse_dim
|
|
261
|
+
if dim_sparse < 0:
|
|
262
|
+
raise ValueError("sparse_dim cannot be negative")
|
|
263
|
+
|
|
264
|
+
# Validate and map space_type
|
|
265
|
+
space = SPACE_TYPE_MAP.get(
|
|
266
|
+
(space_type or "cosine").lower(), (space_type or "cosine").lower()
|
|
267
|
+
)
|
|
268
|
+
if space not in SPACE_TYPES_VALID:
|
|
269
|
+
raise ValueError(f"Invalid space type: {space}")
|
|
270
|
+
|
|
271
|
+
# Validate precision
|
|
272
|
+
if prec not in PRECISION_VALID:
|
|
273
|
+
raise ValueError(
|
|
274
|
+
f"Invalid precision: {prec}. Use one of {PRECISION_VALID}"
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
# Build create_index kwargs
|
|
278
|
+
create_kwargs = {
|
|
279
|
+
"name": index_name,
|
|
280
|
+
"dimension": dimension,
|
|
281
|
+
"space_type": space,
|
|
282
|
+
"precision": prec,
|
|
283
|
+
"sparse_dim": dim_sparse,
|
|
284
|
+
}
|
|
285
|
+
# Only add M and ef_con if provided
|
|
286
|
+
if M is not None:
|
|
287
|
+
create_kwargs["M"] = M
|
|
288
|
+
if ef_con is not None:
|
|
289
|
+
create_kwargs["ef_con"] = ef_con
|
|
290
|
+
# Build log message
|
|
291
|
+
index_type = "hybrid" if is_hybrid else "dense"
|
|
292
|
+
log_msg = f"Creating new {index_type} index '{index_name}' (dimension={dimension}"
|
|
293
|
+
if is_hybrid:
|
|
294
|
+
log_msg += f", sparse_dim={dim_sparse}"
|
|
295
|
+
if M is not None:
|
|
296
|
+
log_msg += f", M={M}"
|
|
297
|
+
if ef_con is not None:
|
|
298
|
+
log_msg += f", ef_con={ef_con}"
|
|
299
|
+
log_msg += ")..."
|
|
300
|
+
|
|
301
|
+
_logger.info(log_msg)
|
|
302
|
+
|
|
303
|
+
nd.create_index(**create_kwargs)
|
|
304
|
+
_logger.info("✓ Index created successfully")
|
|
305
|
+
return nd.get_index(name=index_name)
|
|
306
|
+
|
|
176
307
|
except Exception as e:
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
"Must provide dimension when creating a new index"
|
|
180
|
-
) from e
|
|
181
|
-
|
|
182
|
-
# Create a new index if it doesn't exist
|
|
183
|
-
_logger.info(f"Creating new index: {index_name}")
|
|
184
|
-
nd.create_index(
|
|
185
|
-
name=index_name,
|
|
186
|
-
dimension=dimension,
|
|
187
|
-
space_type=space_type,
|
|
188
|
-
)
|
|
189
|
-
return nd.get_index(name=index_name)
|
|
308
|
+
_logger.error(f"Error initializing Endee index: {e}")
|
|
309
|
+
raise
|
|
190
310
|
|
|
191
311
|
@classmethod
|
|
192
312
|
def from_params(
|
|
@@ -196,24 +316,109 @@ class EndeeVectorStore(BasePydanticVectorStore):
|
|
|
196
316
|
dimension: Optional[int] = None,
|
|
197
317
|
space_type: str = "cosine",
|
|
198
318
|
batch_size: int = DEFAULT_BATCH_SIZE,
|
|
319
|
+
hybrid: bool = False,
|
|
320
|
+
sparse_dim: Optional[int] = None,
|
|
321
|
+
model_name: Optional[str] = None,
|
|
322
|
+
precision: Optional[str] = "float16",
|
|
323
|
+
M: Optional[int] = None,
|
|
324
|
+
ef_con: Optional[int] = None,
|
|
199
325
|
) -> "EndeeVectorStore":
|
|
200
|
-
"""Create EndeeVectorStore from parameters.
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
dimension
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
326
|
+
"""Create EndeeVectorStore from parameters.
|
|
327
|
+
|
|
328
|
+
Args:
|
|
329
|
+
api_token: API token for Endee service
|
|
330
|
+
index_name: Name of the index
|
|
331
|
+
dimension: Vector dimension
|
|
332
|
+
space_type: Distance metric ("cosine", "l2", or "ip")
|
|
333
|
+
batch_size: Batch size for operations
|
|
334
|
+
hybrid: If True, create/use a hybrid index. Auto-set to True if sparse_dim > 0.
|
|
335
|
+
sparse_dim: Sparse dimension for hybrid index. If > 0, hybrid is automatically enabled.
|
|
336
|
+
model_name: Model name or alias for sparse embeddings. Defaults to 'splade_pp' if not provided.
|
|
337
|
+
Available: 'splade_pp', 'splade_cocondenser', 'bert_base', 'distilbert', etc.
|
|
338
|
+
precision: Precision for index. Use one of: "binary", "float16", "float32", "int16d", "int8d". Default "float16".
|
|
339
|
+
M: Optional HNSW M parameter (bi-directional links per node). If not provided, backend uses default.
|
|
340
|
+
ef_con: Optional HNSW ef_construction parameter. If not provided, backend uses default.
|
|
341
|
+
"""
|
|
342
|
+
# Auto-enable hybrid if sparse_dim is provided and > 0
|
|
343
|
+
try:
|
|
344
|
+
if sparse_dim is not None and sparse_dim > 0:
|
|
345
|
+
hybrid = True
|
|
346
|
+
_logger.info(f"Auto-enabling hybrid mode (sparse_dim={sparse_dim} > 0)")
|
|
347
|
+
if sparse_dim>0:
|
|
348
|
+
sparse_dim=30522
|
|
349
|
+
|
|
350
|
+
# Initialize index (unified method handles both dense and hybrid)
|
|
351
|
+
endee_index = cls._initialize_endee_index(
|
|
352
|
+
api_token,
|
|
353
|
+
index_name,
|
|
354
|
+
dimension,
|
|
355
|
+
space_type,
|
|
356
|
+
precision,
|
|
357
|
+
sparse_dim=sparse_dim if hybrid else None,
|
|
358
|
+
M=M,
|
|
359
|
+
ef_con=ef_con,
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
# Get actual index configuration from the backend
|
|
363
|
+
try:
|
|
364
|
+
index_info = endee_index.describe()
|
|
365
|
+
actual_index_name = index_info.get("name", index_name)
|
|
366
|
+
actual_dimension = index_info.get("dimension", dimension)
|
|
367
|
+
actual_space_type = index_info.get("space_type", space_type)
|
|
368
|
+
actual_precision = index_info.get("precision", precision)
|
|
369
|
+
actual_sparse_dim = index_info.get("sparse_dim", sparse_dim)
|
|
370
|
+
except Exception as e:
|
|
371
|
+
_logger.warning(
|
|
372
|
+
f"Could not get index info, using provided parameters: {e}"
|
|
373
|
+
)
|
|
374
|
+
# Fallback to provided parameters
|
|
375
|
+
actual_index_name = index_name
|
|
376
|
+
actual_dimension = dimension
|
|
377
|
+
actual_space_type = space_type
|
|
378
|
+
actual_precision = precision
|
|
379
|
+
actual_sparse_dim = sparse_dim
|
|
380
|
+
|
|
381
|
+
# Determine if index is hybrid based on sparse_dim
|
|
382
|
+
actual_hybrid = actual_sparse_dim is not None and actual_sparse_dim > 0
|
|
383
|
+
|
|
384
|
+
return cls(
|
|
385
|
+
endee_index=endee_index,
|
|
386
|
+
api_token=api_token,
|
|
387
|
+
index_name=actual_index_name,
|
|
388
|
+
dimension=actual_dimension,
|
|
389
|
+
space_type=actual_space_type,
|
|
390
|
+
batch_size=batch_size,
|
|
391
|
+
sparse_dim=actual_sparse_dim,
|
|
392
|
+
hybrid=actual_hybrid,
|
|
393
|
+
model_name=model_name,
|
|
394
|
+
precision=actual_precision,
|
|
395
|
+
M=M,
|
|
396
|
+
ef_con=ef_con,
|
|
397
|
+
)
|
|
398
|
+
except Exception as e:
|
|
399
|
+
_logger.error(f"Error creating EndeeVectorStore from params: {e}")
|
|
400
|
+
raise
|
|
213
401
|
|
|
214
402
|
@classmethod
|
|
215
403
|
def class_name(cls) -> str:
|
|
216
|
-
|
|
404
|
+
try:
|
|
405
|
+
return "EndeeVectorStore"
|
|
406
|
+
except Exception as e:
|
|
407
|
+
_logger.error(f"Error getting class name: {e}")
|
|
408
|
+
raise
|
|
409
|
+
|
|
410
|
+
def _compute_sparse_vectors(self, texts: List[str]) -> tuple:
|
|
411
|
+
"""Compute sparse vectors for a list of texts."""
|
|
412
|
+
try:
|
|
413
|
+
if self._sparse_encoder is None:
|
|
414
|
+
raise ValueError(
|
|
415
|
+
"Sparse encoder not initialized. "
|
|
416
|
+
"Please provide model_name when creating the store with hybrid=True."
|
|
417
|
+
)
|
|
418
|
+
return self._sparse_encoder(texts)
|
|
419
|
+
except Exception as e:
|
|
420
|
+
_logger.error(f"Error computing sparse vectors: {e}")
|
|
421
|
+
raise
|
|
217
422
|
|
|
218
423
|
def add(
|
|
219
424
|
self,
|
|
@@ -224,193 +429,336 @@ class EndeeVectorStore(BasePydanticVectorStore):
|
|
|
224
429
|
Add nodes to index.
|
|
225
430
|
|
|
226
431
|
Args:
|
|
227
|
-
nodes: List
|
|
432
|
+
nodes: List of nodes with embeddings to add to the index.
|
|
433
|
+
If index is configured for hybrid search (self.hybrid=True),
|
|
434
|
+
sparse vectors will be automatically computed from node text.
|
|
228
435
|
"""
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
if
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
436
|
+
try:
|
|
437
|
+
# Use instance hybrid setting
|
|
438
|
+
use_hybrid = self.hybrid
|
|
439
|
+
|
|
440
|
+
# Endee Index.upsert rejects duplicate IDs in a batch; dedupe by node_id (keep last)
|
|
441
|
+
seen: Dict[str, int] = {}
|
|
442
|
+
for idx, node in enumerate(nodes):
|
|
443
|
+
seen[node.node_id] = idx
|
|
444
|
+
deduped_indices = sorted(seen.values())
|
|
445
|
+
nodes = [nodes[i] for i in deduped_indices]
|
|
446
|
+
|
|
447
|
+
ids = []
|
|
448
|
+
entries = []
|
|
449
|
+
texts = []
|
|
450
|
+
|
|
451
|
+
# Collect texts for sparse encoding if hybrid mode
|
|
452
|
+
if use_hybrid:
|
|
453
|
+
for node in nodes:
|
|
454
|
+
text = node.get_content()
|
|
455
|
+
texts.append(text)
|
|
456
|
+
|
|
457
|
+
# Compute sparse vectors in batch
|
|
458
|
+
if self._sparse_encoder is not None and texts:
|
|
459
|
+
sparse_indices, sparse_values = self._compute_sparse_vectors(texts)
|
|
460
|
+
else:
|
|
461
|
+
sparse_indices = [[] for _ in texts]
|
|
462
|
+
sparse_values = [[] for _ in texts]
|
|
463
|
+
|
|
464
|
+
for i, node in enumerate(nodes):
|
|
465
|
+
node_id = node.node_id
|
|
466
|
+
metadata = node_to_metadata_dict(node)
|
|
467
|
+
|
|
468
|
+
# Filter values must be simple key-value pairs
|
|
469
|
+
filter_data = {}
|
|
470
|
+
ref_id = getattr(node, "ref_doc_id", None) or metadata.get("ref_doc_id")
|
|
471
|
+
if ref_id is not None:
|
|
472
|
+
filter_data["ref_doc_id"] = ref_id
|
|
473
|
+
if "file_name" in metadata:
|
|
474
|
+
filter_data["file_name"] = metadata["file_name"]
|
|
475
|
+
if "doc_id" in metadata:
|
|
476
|
+
filter_data["doc_id"] = metadata["doc_id"]
|
|
477
|
+
if "category" in metadata:
|
|
478
|
+
filter_data["category"] = metadata["category"]
|
|
479
|
+
if "difficulty" in metadata:
|
|
480
|
+
filter_data["difficulty"] = metadata["difficulty"]
|
|
481
|
+
if "language" in metadata:
|
|
482
|
+
filter_data["language"] = metadata["language"]
|
|
483
|
+
if "field" in metadata:
|
|
484
|
+
filter_data["field"] = metadata["field"]
|
|
485
|
+
if "type" in metadata:
|
|
486
|
+
filter_data["type"] = metadata["type"]
|
|
487
|
+
if "feature" in metadata:
|
|
488
|
+
filter_data["feature"] = metadata["feature"]
|
|
489
|
+
|
|
490
|
+
# Build entry for endee Index.upsert
|
|
491
|
+
if use_hybrid:
|
|
492
|
+
entry = {
|
|
493
|
+
"id": node_id,
|
|
494
|
+
"vector": node.get_embedding(),
|
|
495
|
+
"sparse_indices": sparse_indices[i],
|
|
496
|
+
"sparse_values": sparse_values[i],
|
|
497
|
+
"meta": metadata,
|
|
498
|
+
"filter": filter_data,
|
|
499
|
+
}
|
|
500
|
+
else:
|
|
501
|
+
entry = {
|
|
502
|
+
"id": node_id,
|
|
503
|
+
"vector": node.get_embedding(),
|
|
504
|
+
"meta": metadata,
|
|
505
|
+
"filter": filter_data,
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
ids.append(node_id)
|
|
509
|
+
entries.append(entry)
|
|
510
|
+
|
|
511
|
+
# Batch insert; endee Index.upsert allows max MAX_VECTORS_PER_BATCH per batch
|
|
512
|
+
batch_size = min(self.batch_size, MAX_VECTORS_PER_BATCH)
|
|
513
|
+
for i in range(0, len(entries), batch_size):
|
|
514
|
+
batch = entries[i : i + batch_size]
|
|
515
|
+
self._endee_index.upsert(batch)
|
|
516
|
+
|
|
517
|
+
return ids
|
|
518
|
+
except Exception as e:
|
|
519
|
+
_logger.error(f"Error adding nodes to index: {e}")
|
|
520
|
+
raise
|
|
273
521
|
|
|
274
522
|
def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None:
|
|
275
523
|
"""
|
|
276
|
-
Delete nodes using
|
|
277
|
-
|
|
278
|
-
Args:
|
|
279
|
-
ref_doc_id (str): The id of the document to delete.
|
|
524
|
+
Delete nodes by ref_doc_id using endee Index.delete_with_filter.
|
|
525
|
+
Only deletes vectors that were stored with ref_doc_id in their filter (see add()).
|
|
280
526
|
"""
|
|
281
527
|
try:
|
|
282
|
-
|
|
528
|
+
# Filter format consistent with query: list of {field: {$op: value}}
|
|
529
|
+
filter_dict = [{"ref_doc_id": {"$eq": ref_doc_id}}]
|
|
530
|
+
self._endee_index.delete_with_filter(filter_dict)
|
|
283
531
|
except Exception as e:
|
|
284
|
-
_logger.error(f"Error deleting
|
|
532
|
+
_logger.error(f"Error deleting by ref_doc_id {ref_doc_id!r}: {e}")
|
|
533
|
+
raise
|
|
285
534
|
|
|
286
535
|
@property
|
|
287
536
|
def client(self) -> Any:
|
|
288
537
|
"""Return Endee index client."""
|
|
289
|
-
|
|
538
|
+
try:
|
|
539
|
+
return self._endee_index
|
|
540
|
+
except Exception as e:
|
|
541
|
+
_logger.error(f"Error getting client: {e}")
|
|
542
|
+
raise
|
|
543
|
+
|
|
544
|
+
def describe(self) -> Dict[str, Any]:
|
|
545
|
+
"""Get index metadata (endee Index.describe())."""
|
|
546
|
+
try:
|
|
547
|
+
return self._endee_index.describe()
|
|
548
|
+
except Exception as e:
|
|
549
|
+
_logger.error(f"Error describing index: {e}")
|
|
550
|
+
return {}
|
|
290
551
|
|
|
291
|
-
def
|
|
552
|
+
def fetch(self, ids: List[str]) -> List[Dict[str, Any]]:
|
|
553
|
+
"""Fetch vectors by IDs (uses endee Index.get_vector per id)."""
|
|
554
|
+
out: List[Dict[str, Any]] = []
|
|
555
|
+
for id_ in ids:
|
|
556
|
+
try:
|
|
557
|
+
out.append(self._endee_index.get_vector(id_))
|
|
558
|
+
except Exception as e:
|
|
559
|
+
_logger.error(f"Error fetching vector id {id_}: {e}")
|
|
560
|
+
return out
|
|
561
|
+
|
|
562
|
+
def query(
|
|
563
|
+
self,
|
|
564
|
+
query: VectorStoreQuery,
|
|
565
|
+
ef: int = DEFAULT_EF_SEARCH,
|
|
566
|
+
**kwargs: Any,
|
|
567
|
+
) -> VectorStoreQueryResult:
|
|
292
568
|
"""
|
|
293
569
|
Query index for top k most similar nodes.
|
|
294
570
|
|
|
295
571
|
Args:
|
|
296
|
-
query: VectorStoreQuery object containing query parameters
|
|
572
|
+
query: VectorStoreQuery object containing query parameters:
|
|
573
|
+
- query_embedding: Dense vector for search
|
|
574
|
+
- query_str: Text query for sparse search (used if index is hybrid)
|
|
575
|
+
- similarity_top_k: Number of results to return
|
|
576
|
+
- filters: Optional metadata filters
|
|
577
|
+
- alpha: Optional weighting for hybrid search (0=sparse, 1=dense)
|
|
578
|
+
ef: HNSW ef_search parameter (default 128, max 1024).
|
|
579
|
+
Controls search quality vs speed tradeoff.
|
|
297
580
|
"""
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
raise ValueError("Could not determine vector dimension")
|
|
307
|
-
else:
|
|
308
|
-
dimension = self._endee_index.dimension
|
|
309
|
-
|
|
310
|
-
query_embedding = [0.0] * dimension # Default empty vector
|
|
311
|
-
filters = {}
|
|
312
|
-
|
|
313
|
-
# Apply any metadata filters if provided
|
|
314
|
-
if query.filters is not None:
|
|
315
|
-
for filter_item in query.filters.filters:
|
|
316
|
-
# Case 1: MetadataFilter object
|
|
317
|
-
if hasattr(filter_item, "key") and hasattr(filter_item, "value") and hasattr(filter_item, "operator"):
|
|
318
|
-
op_symbol = reverse_operator_map.get(filter_item.operator)
|
|
319
|
-
if not op_symbol:
|
|
320
|
-
raise ValueError(f"Unsupported filter operator: {filter_item.operator}")
|
|
321
|
-
|
|
322
|
-
if filter_item.key not in filters:
|
|
323
|
-
filters[filter_item.key] = {}
|
|
324
|
-
|
|
325
|
-
filters[filter_item.key][op_symbol] = filter_item.value
|
|
326
|
-
|
|
327
|
-
# Case 2: Raw dict, e.g. {"category": {"$eq": "programming"}}
|
|
328
|
-
elif isinstance(filter_item, dict):
|
|
329
|
-
for key, op_dict in filter_item.items():
|
|
330
|
-
if isinstance(op_dict, dict):
|
|
331
|
-
for op, val in op_dict.items():
|
|
332
|
-
if key not in filters:
|
|
333
|
-
filters[key] = {}
|
|
334
|
-
filters[key][op] = val
|
|
335
|
-
else:
|
|
336
|
-
raise ValueError(f"Unsupported filter format: {filter_item}")
|
|
581
|
+
# Use index configuration to determine hybrid mode
|
|
582
|
+
try:
|
|
583
|
+
use_hybrid = self.hybrid
|
|
584
|
+
|
|
585
|
+
# Log the mode being used
|
|
586
|
+
_logger.info(
|
|
587
|
+
f"Using {'hybrid' if use_hybrid else 'dense-only'} search (index configured with hybrid={self.hybrid})"
|
|
588
|
+
)
|
|
337
589
|
|
|
338
|
-
|
|
590
|
+
if not hasattr(self._endee_index, "dimension"):
|
|
591
|
+
# Get dimension from index if available, otherwise try to infer from query
|
|
592
|
+
try:
|
|
593
|
+
dimension = self._endee_index.describe()["dimension"]
|
|
594
|
+
except Exception as e:
|
|
595
|
+
_logger.warning(f"Could not get dimension from index: {e}")
|
|
596
|
+
if query.query_embedding is not None:
|
|
597
|
+
dimension = len(query.query_embedding)
|
|
598
|
+
else:
|
|
599
|
+
raise ValueError("Could not determine vector dimension")
|
|
600
|
+
else:
|
|
601
|
+
dimension = self._endee_index.dimension
|
|
602
|
+
|
|
603
|
+
query_embedding = [0.0] * dimension # Default empty vector
|
|
604
|
+
filters = {}
|
|
605
|
+
# Apply any metadata filters if provided
|
|
606
|
+
if query.filters is not None:
|
|
607
|
+
for filter_item in query.filters.filters:
|
|
608
|
+
# Case 1: MetadataFilter object
|
|
609
|
+
if (
|
|
610
|
+
hasattr(filter_item, "key")
|
|
611
|
+
and hasattr(filter_item, "value")
|
|
612
|
+
and hasattr(filter_item, "operator")
|
|
613
|
+
):
|
|
614
|
+
if filter_item.operator not in SUPPORTED_FILTER_OPERATORS:
|
|
615
|
+
raise ValueError(
|
|
616
|
+
f"Unsupported filter operator: {filter_item.operator}. "
|
|
617
|
+
"Supported filter operations: EQ ($eq), IN ($in)."
|
|
618
|
+
)
|
|
619
|
+
op_symbol = REVERSE_OPERATOR_MAP[filter_item.operator]
|
|
620
|
+
if filter_item.key not in filters:
|
|
621
|
+
filters[filter_item.key] = {}
|
|
622
|
+
filters[filter_item.key][op_symbol] = filter_item.value
|
|
623
|
+
|
|
624
|
+
# Case 2: Raw dict, e.g. {"category": {"$eq": "programming"}}
|
|
625
|
+
elif isinstance(filter_item, dict):
|
|
626
|
+
for key, op_dict in filter_item.items():
|
|
627
|
+
if isinstance(op_dict, dict):
|
|
628
|
+
for op, val in op_dict.items():
|
|
629
|
+
if key not in filters:
|
|
630
|
+
filters[key] = {}
|
|
631
|
+
filters[key][op] = val
|
|
632
|
+
else:
|
|
633
|
+
raise ValueError(f"Unsupported filter format: {filter_item}")
|
|
634
|
+
|
|
635
|
+
_logger.info(f"Final structured filters: {filters}")
|
|
636
|
+
|
|
637
|
+
# Endee API expects filter as array: [{"field": {"$op": value}}, ...]
|
|
638
|
+
filter_for_api: Optional[List[Dict[str, Any]]] = None
|
|
639
|
+
if filters:
|
|
640
|
+
filter_for_api = [{field: ops} for field, ops in filters.items()]
|
|
641
|
+
_logger.info(f"Filter sent to backend API: {filter_for_api}")
|
|
642
|
+
|
|
643
|
+
# Use the query embedding if provided
|
|
644
|
+
if query.query_embedding is not None:
|
|
645
|
+
query_embedding = cast(List[float], query.query_embedding)
|
|
646
|
+
if query.alpha is not None and use_hybrid:
|
|
647
|
+
# Apply alpha scaling in hybrid mode
|
|
648
|
+
query_embedding = [v * query.alpha for v in query_embedding]# Sparse query components for hybrid (endee Index.query uses sparse_indices, sparse_values)
|
|
649
|
+
sparse_indices_q: Optional[List[int]] = None
|
|
650
|
+
sparse_values_q: Optional[List[float]] = None
|
|
651
|
+
if use_hybrid:
|
|
652
|
+
# Get query text from query.query_str
|
|
653
|
+
query_text = getattr(query, "query_str", None)
|
|
654
|
+
if query_text and self._sparse_encoder is not None:
|
|
655
|
+
_logger.info(
|
|
656
|
+
f"Processing sparse vectors for hybrid search with query_str: '{query_text[:100]}...'"
|
|
657
|
+
)
|
|
658
|
+
si, sv = self._compute_sparse_vectors([query_text])
|
|
659
|
+
sparse_indices_q = si[0]
|
|
660
|
+
sparse_values_q = [float(v) for v in sv[0]]
|
|
661
|
+
_logger.info(f"Generated {len(sparse_indices_q)} sparse features")
|
|
662
|
+
elif query_text:
|
|
663
|
+
_logger.warning(
|
|
664
|
+
"Hybrid mode enabled but no sparse encoder available"
|
|
665
|
+
)
|
|
666
|
+
else:
|
|
667
|
+
_logger.warning(
|
|
668
|
+
"Hybrid mode enabled but no query_str provided in VectorStoreQuery"
|
|
669
|
+
)
|
|
670
|
+
else:
|
|
671
|
+
_logger.info("Using dense-only search (not hybrid mode)")
|
|
339
672
|
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
673
|
+
# Cap to endee limits (MAX_TOP_K_ALLOWED=512, MAX_EF_SEARCH_ALLOWED=1024)
|
|
674
|
+
requested_top_k = (
|
|
675
|
+
query.similarity_top_k if query.similarity_top_k is not None else 10
|
|
676
|
+
)
|
|
677
|
+
top_k = min(requested_top_k, MAX_TOP_K_ALLOWED)
|
|
678
|
+
ef_capped = min(ef, MAX_EF_SEARCH_ALLOWED)
|
|
679
|
+
|
|
680
|
+
# Build query kwargs - only include optional parameters if they have values
|
|
681
|
+
query_kwargs = {
|
|
682
|
+
"vector": query_embedding,
|
|
683
|
+
"top_k": top_k,
|
|
684
|
+
"ef": ef_capped,
|
|
685
|
+
"include_vectors": True,
|
|
686
|
+
}
|
|
346
687
|
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
688
|
+
# Only add filter if provided
|
|
689
|
+
if filter_for_api is not None:
|
|
690
|
+
query_kwargs["filter"] = filter_for_api# Only add sparse vectors if provided (for hybrid search)
|
|
691
|
+
if sparse_indices_q is not None:
|
|
692
|
+
query_kwargs["sparse_indices"] = sparse_indices_q
|
|
693
|
+
if sparse_values_q is not None:
|
|
694
|
+
query_kwargs["sparse_values"] = sparse_values_q
|
|
695
|
+
# Use endee Index.query
|
|
696
|
+
try:
|
|
697
|
+
results = self._endee_index.query(**query_kwargs)
|
|
698
|
+
except Exception as e:
|
|
699
|
+
_logger.error(f"Error querying Endee: {e}")
|
|
700
|
+
return VectorStoreQueryResult(nodes=[], similarities=[], ids=[])
|
|
701
|
+
|
|
702
|
+
# Process results
|
|
703
|
+
nodes = []
|
|
704
|
+
similarities = []
|
|
705
|
+
ids = []
|
|
706
|
+
|
|
707
|
+
for result in results:
|
|
708
|
+
node_id = result["id"]
|
|
709
|
+
score = result.get("similarity", result.get("score", 0.0))
|
|
710
|
+
metadata = result.get("meta", {})
|
|
711
|
+
|
|
712
|
+
# Create node from metadata
|
|
713
|
+
if self.flat_metadata:
|
|
714
|
+
node = metadata_dict_to_node(
|
|
715
|
+
metadata=metadata,
|
|
716
|
+
text=metadata.pop(self.text_key, None),
|
|
717
|
+
id_=node_id,
|
|
718
|
+
)
|
|
719
|
+
else:
|
|
720
|
+
metadata_dict, node_info, relationships = (
|
|
721
|
+
legacy_metadata_dict_to_node(
|
|
722
|
+
metadata=metadata,
|
|
723
|
+
text_key=self.text_key,
|
|
724
|
+
)
|
|
725
|
+
)
|
|
726
|
+
|
|
727
|
+
# Create TextNode with the extracted metadata
|
|
728
|
+
# Step 1: Get the JSON string from "_node_content"
|
|
729
|
+
_node_content_str = metadata.get("_node_content", "{}")
|
|
730
|
+
|
|
731
|
+
# Step 2: Convert JSON string to Python dict
|
|
732
|
+
try:
|
|
733
|
+
node_content = json.loads(_node_content_str)
|
|
734
|
+
except json.JSONDecodeError:
|
|
735
|
+
node_content = {}
|
|
736
|
+
|
|
737
|
+
# Step 3: Get the text
|
|
738
|
+
text = node_content.get(self.text_key, "")
|
|
739
|
+
node = TextNode(
|
|
740
|
+
text=text,
|
|
741
|
+
metadata=metadata_dict,
|
|
742
|
+
relationships=relationships,
|
|
743
|
+
node_id=node_id,
|
|
744
|
+
)
|
|
745
|
+
|
|
746
|
+
# Add any node_info properties to the node
|
|
747
|
+
for key, val in node_info.items():
|
|
748
|
+
if hasattr(node, key):
|
|
749
|
+
setattr(node, key, val)
|
|
750
|
+
|
|
751
|
+
# If embedding was returned in the results, add it to the node
|
|
752
|
+
if "vector" in result:
|
|
753
|
+
node.embedding = result["vector"]
|
|
754
|
+
|
|
755
|
+
nodes.append(node)
|
|
756
|
+
similarities.append(score)
|
|
757
|
+
ids.append(node_id)
|
|
758
|
+
|
|
759
|
+
return VectorStoreQueryResult(
|
|
760
|
+
nodes=nodes, similarities=similarities, ids=ids
|
|
354
761
|
)
|
|
355
762
|
except Exception as e:
|
|
356
|
-
_logger.error(f"Error querying
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
# Process results
|
|
360
|
-
nodes = []
|
|
361
|
-
similarities = []
|
|
362
|
-
ids = []
|
|
363
|
-
|
|
364
|
-
for result in results:
|
|
365
|
-
node_id = result["id"]
|
|
366
|
-
score = result["similarity"]
|
|
367
|
-
|
|
368
|
-
# Get metadata from result
|
|
369
|
-
metadata = result.get("meta", {})
|
|
370
|
-
|
|
371
|
-
# Create node from metadata
|
|
372
|
-
if self.flat_metadata:
|
|
373
|
-
node = metadata_dict_to_node(
|
|
374
|
-
metadata=metadata,
|
|
375
|
-
text=metadata.pop(self.text_key, None),
|
|
376
|
-
id_=node_id,
|
|
377
|
-
)
|
|
378
|
-
else:
|
|
379
|
-
metadata_dict, node_info, relationships = legacy_metadata_dict_to_node(
|
|
380
|
-
metadata=metadata,
|
|
381
|
-
text_key=self.text_key,
|
|
382
|
-
)
|
|
383
|
-
|
|
384
|
-
# Create TextNode with the extracted metadata
|
|
385
|
-
# Step 1: Get the JSON string from "_node_content"
|
|
386
|
-
_node_content_str = metadata.get("_node_content", "{}")
|
|
387
|
-
|
|
388
|
-
# Step 2: Convert JSON string to Python dict
|
|
389
|
-
try:
|
|
390
|
-
node_content = json.loads(_node_content_str)
|
|
391
|
-
except json.JSONDecodeError:
|
|
392
|
-
node_content = {}
|
|
393
|
-
|
|
394
|
-
# Step 3: Get the text
|
|
395
|
-
text = node_content.get(self.text_key, "")
|
|
396
|
-
node = TextNode(
|
|
397
|
-
text=text,
|
|
398
|
-
metadata=metadata_dict,
|
|
399
|
-
relationships=relationships,
|
|
400
|
-
node_id=node_id,
|
|
401
|
-
)
|
|
402
|
-
|
|
403
|
-
# Add any node_info properties to the node
|
|
404
|
-
for key, val in node_info.items():
|
|
405
|
-
if hasattr(node, key):
|
|
406
|
-
setattr(node, key, val)
|
|
407
|
-
|
|
408
|
-
# If embedding was returned in the results, add it to the node
|
|
409
|
-
if "vector" in result:
|
|
410
|
-
node.embedding = result["vector"]
|
|
411
|
-
|
|
412
|
-
nodes.append(node)
|
|
413
|
-
similarities.append(score)
|
|
414
|
-
ids.append(node_id)
|
|
415
|
-
|
|
416
|
-
return VectorStoreQueryResult(nodes=nodes, similarities=similarities, ids=ids)
|
|
763
|
+
_logger.error(f"Error querying index: {e}")
|
|
764
|
+
raise
|