llama-index-vector-stores-opensearch 0.1.11__py3-none-any.whl → 0.1.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-index-vector-stores-opensearch might be problematic. Click here for more details.

@@ -20,6 +20,7 @@ from llama_index.core.vector_stores.utils import (
20
20
  node_to_metadata_dict,
21
21
  )
22
22
  from opensearchpy import AsyncOpenSearch
23
+ from opensearchpy.client import Client as OSClient
23
24
  from opensearchpy.exceptions import NotFoundError
24
25
  from opensearchpy.helpers import async_bulk
25
26
 
@@ -64,8 +65,10 @@ class OpensearchVectorClient:
64
65
  embedding_field: str = "embedding",
65
66
  text_field: str = "content",
66
67
  method: Optional[dict] = None,
68
+ engine: Optional[str] = "nmslib",
67
69
  max_chunk_bytes: int = 1 * 1024 * 1024,
68
70
  search_pipeline: Optional[str] = None,
71
+ os_client: Optional[OSClient] = None,
69
72
  **kwargs: Any,
70
73
  ):
71
74
  """Init params."""
@@ -73,7 +76,7 @@ class OpensearchVectorClient:
73
76
  method = {
74
77
  "name": "hnsw",
75
78
  "space_type": "l2",
76
- "engine": "nmslib",
79
+ "engine": engine,
77
80
  "parameters": {"ef_construction": 256, "m": 48},
78
81
  }
79
82
  if embedding_field is None:
@@ -102,7 +105,9 @@ class OpensearchVectorClient:
102
105
  }
103
106
  },
104
107
  }
105
- self._os_client = self._get_async_opensearch_client(self._endpoint, **kwargs)
108
+ self._os_client = os_client or self._get_async_opensearch_client(
109
+ self._endpoint, **kwargs
110
+ )
106
111
  not_found_error = self._import_not_found_error()
107
112
 
108
113
  event_loop = asyncio.get_event_loop()
@@ -209,7 +214,10 @@ class OpensearchVectorClient:
209
214
  pre_filter = []
210
215
  if filters is not None:
211
216
  for f in filters.legacy_filters():
212
- pre_filter.append({f.key: json.loads(str(f.value))})
217
+ if isinstance(f.value, str):
218
+ pre_filter.append({f.key: f.value})
219
+ else:
220
+ pre_filter.append({f.key: json.loads(str(f.value))})
213
221
 
214
222
  return pre_filter
215
223
 
@@ -384,6 +392,41 @@ class OpensearchVectorClient:
384
392
  }
385
393
  await self._os_client.delete_by_query(index=self._index, body=search_query)
386
394
 
395
+ async def delete_nodes(
396
+ self,
397
+ node_ids: Optional[List[str]] = None,
398
+ filters: Optional[MetadataFilters] = None,
399
+ **delete_kwargs: Any,
400
+ ) -> None:
401
+ """Deletes nodes.
402
+
403
+ Args:
404
+ node_ids (Optional[List[str]], optional): IDs of nodes to delete. Defaults to None.
405
+ filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None.
406
+ """
407
+ if not node_ids and not filters:
408
+ return
409
+
410
+ query = {"query": {"bool": {"filter": []}}}
411
+ if node_ids:
412
+ query["query"]["bool"]["filter"].append({"terms": {"_id": node_ids or []}})
413
+
414
+ if filters:
415
+ for filter in self._parse_filters(filters):
416
+ newfilter = {}
417
+
418
+ for key in filter:
419
+ newfilter[f"metadata.{key}.keyword"] = filter[key]
420
+
421
+ query["query"]["bool"]["filter"].append({"term": newfilter})
422
+
423
+ await self._os_client.delete_by_query(index=self._index, body=query)
424
+
425
+ async def clear(self) -> None:
426
+ """Clears index."""
427
+ query = {"query": {"bool": {"filter": []}}}
428
+ await self._os_client.delete_by_query(index=self._index, body=query)
429
+
387
430
  async def aquery(
388
431
  self,
389
432
  query_mode: VectorStoreQueryMode,
@@ -569,6 +612,44 @@ class OpensearchVectorStore(BasePydanticVectorStore):
569
612
  """
570
613
  await self._client.delete_by_doc_id(ref_doc_id)
571
614
 
615
+ async def adelete_nodes(
616
+ self,
617
+ node_ids: Optional[List[str]] = None,
618
+ filters: Optional[MetadataFilters] = None,
619
+ **delete_kwargs: Any,
620
+ ) -> None:
621
+ """Deletes nodes async.
622
+
623
+ Args:
624
+ node_ids (Optional[List[str]], optional): IDs of nodes to delete. Defaults to None.
625
+ filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None.
626
+ """
627
+ await self._client.delete_nodes(node_ids, filters, **delete_kwargs)
628
+
629
+ def delete_nodes(
630
+ self,
631
+ node_ids: Optional[List[str]] = None,
632
+ filters: Optional[MetadataFilters] = None,
633
+ **delete_kwargs: Any,
634
+ ) -> None:
635
+ """Deletes nodes.
636
+
637
+ Args:
638
+ node_ids (Optional[List[str]], optional): IDs of nodes to delete. Defaults to None.
639
+ filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None.
640
+ """
641
+ asyncio.get_event_loop().run_until_complete(
642
+ self.adelete_nodes(node_ids, filters, **delete_kwargs)
643
+ )
644
+
645
+ async def aclear(self) -> None:
646
+ """Clears index."""
647
+ await self._client.clear()
648
+
649
+ def clear(self) -> None:
650
+ """Clears index."""
651
+ asyncio.get_event_loop().run_until_complete(self.aclear())
652
+
572
653
  def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult:
573
654
  """
574
655
  Query index for top k most similar nodes.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llama-index-vector-stores-opensearch
3
- Version: 0.1.11
3
+ Version: 0.1.13
4
4
  Summary: llama-index vector_stores opensearch integration
5
5
  License: MIT
6
6
  Author: Your Name
@@ -0,0 +1,5 @@
1
+ llama_index/vector_stores/opensearch/__init__.py,sha256=U1_XAkZb6zcskOk4s10NB8Tjs9AZRGdRQLzOGpbWdBA,176
2
+ llama_index/vector_stores/opensearch/base.py,sha256=YShOE7dzy3L5G8N9iobA6aenZLF_dJACs1aCBrQIPyg,22584
3
+ llama_index_vector_stores_opensearch-0.1.13.dist-info/METADATA,sha256=X2CxNuyUSEoChYA5Rss4QThW0HarjyC3UQpWE24reOU,729
4
+ llama_index_vector_stores_opensearch-0.1.13.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
5
+ llama_index_vector_stores_opensearch-0.1.13.dist-info/RECORD,,
@@ -1,5 +0,0 @@
1
- llama_index/vector_stores/opensearch/__init__.py,sha256=U1_XAkZb6zcskOk4s10NB8Tjs9AZRGdRQLzOGpbWdBA,176
2
- llama_index/vector_stores/opensearch/base.py,sha256=bHE7FV-CLU-x_ChG9CdoE4WzT1YkkZyIg23PYB7Eg30,19780
3
- llama_index_vector_stores_opensearch-0.1.11.dist-info/METADATA,sha256=JrqjHpIkrncELU0t2pNIKbbYaAahVnFP8r9Tz0LeYrE,729
4
- llama_index_vector_stores_opensearch-0.1.11.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
5
- llama_index_vector_stores_opensearch-0.1.11.dist-info/RECORD,,