projectdavid 1.31.0__py3-none-any.whl → 1.38.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- projectdavid/clients/assistants_client.py +7 -13
- projectdavid/clients/file_processor.py +102 -107
- projectdavid/clients/messages_client.py +24 -39
- projectdavid/clients/runs.py +156 -211
- projectdavid/clients/synchronous_inference_wrapper.py +52 -24
- projectdavid/clients/threads_client.py +32 -12
- projectdavid/clients/vector_store_manager.py +110 -21
- projectdavid/clients/vectors.py +47 -30
- projectdavid/clients/vision-file_processor.py +462 -0
- projectdavid/clients/vision_vectors.py +1058 -0
- projectdavid/decorators.py +64 -0
- projectdavid/entity.py +24 -5
- projectdavid/synthesis/reranker.py +4 -2
- projectdavid/utils/function_call_suppressor.py +40 -0
- {projectdavid-1.31.0.dist-info → projectdavid-1.38.1.dist-info}/METADATA +6 -7
- {projectdavid-1.31.0.dist-info → projectdavid-1.38.1.dist-info}/RECORD +19 -15
- {projectdavid-1.31.0.dist-info → projectdavid-1.38.1.dist-info}/WHEEL +1 -1
- {projectdavid-1.31.0.dist-info → projectdavid-1.38.1.dist-info}/licenses/LICENSE +0 -0
- {projectdavid-1.31.0.dist-info → projectdavid-1.38.1.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
|
|
1
|
+
#! projectdavid/clients/threads_client.py
|
|
2
2
|
from typing import Any, Dict, List, Optional
|
|
3
3
|
|
|
4
4
|
import httpx
|
|
@@ -104,16 +104,30 @@ class ThreadsClient(BaseAPIClient):
|
|
|
104
104
|
logging_utility.error("Unexpected error retrieving thread: %s", str(e))
|
|
105
105
|
raise
|
|
106
106
|
|
|
107
|
-
def update_thread(
|
|
107
|
+
def update_thread(
|
|
108
|
+
self,
|
|
109
|
+
thread_id: str,
|
|
110
|
+
*,
|
|
111
|
+
participant_ids: Optional[List[str]] = None,
|
|
112
|
+
meta_data: Optional[Dict[str, Any]] = None,
|
|
113
|
+
tool_resources: Optional[Dict[str, Any]] = None,
|
|
114
|
+
) -> validator.ThreadReadDetailed:
|
|
108
115
|
logging_utility.info("Updating thread with id: %s", thread_id)
|
|
109
116
|
try:
|
|
110
|
-
validated_updates = validator.ThreadUpdate(
|
|
111
|
-
|
|
112
|
-
|
|
117
|
+
validated_updates = validator.ThreadUpdate(
|
|
118
|
+
participant_ids=participant_ids,
|
|
119
|
+
meta_data=meta_data,
|
|
120
|
+
tool_resources=tool_resources,
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
response = self.client.put(
|
|
124
|
+
f"/v1/threads/{thread_id}",
|
|
125
|
+
json=validated_updates.model_dump(),
|
|
113
126
|
)
|
|
114
127
|
response.raise_for_status()
|
|
115
128
|
updated_thread = response.json()
|
|
116
129
|
return validator.ThreadReadDetailed(**updated_thread)
|
|
130
|
+
|
|
117
131
|
except httpx.HTTPStatusError as e:
|
|
118
132
|
logging_utility.error("HTTP error updating thread: %s", str(e))
|
|
119
133
|
raise
|
|
@@ -154,17 +168,23 @@ class ThreadsClient(BaseAPIClient):
|
|
|
154
168
|
logging_utility.error("Unexpected error listing threads: %s", str(e))
|
|
155
169
|
raise
|
|
156
170
|
|
|
157
|
-
def delete_thread(self, thread_id: str) ->
|
|
171
|
+
def delete_thread(self, thread_id: str) -> validator.ThreadDeleted | None:
|
|
172
|
+
"""
|
|
173
|
+
Delete a thread.
|
|
174
|
+
▶ On success: returns ThreadDeleted(id=…, object='thread.deleted', deleted=True)
|
|
175
|
+
▶ If not found: returns None
|
|
176
|
+
"""
|
|
158
177
|
logging_utility.info("Deleting thread with id: %s", thread_id)
|
|
178
|
+
|
|
159
179
|
try:
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
return
|
|
180
|
+
resp = self.client.delete(f"/v1/threads/{thread_id}")
|
|
181
|
+
resp.raise_for_status() # 2xx → OK
|
|
182
|
+
return validator.ThreadDeleted(**resp.json()) # ← parse envelope
|
|
163
183
|
except httpx.HTTPStatusError as e:
|
|
164
184
|
if e.response.status_code == 404:
|
|
165
|
-
return
|
|
166
|
-
logging_utility.error("HTTP error deleting thread: %s",
|
|
185
|
+
return None # thread not found
|
|
186
|
+
logging_utility.error("HTTP error deleting thread: %s", e)
|
|
167
187
|
raise
|
|
168
188
|
except Exception as e:
|
|
169
|
-
logging_utility.error("Unexpected error deleting thread: %s",
|
|
189
|
+
logging_utility.error("Unexpected error deleting thread: %s", e)
|
|
170
190
|
raise
|
|
@@ -9,7 +9,6 @@ from __future__ import annotations
|
|
|
9
9
|
|
|
10
10
|
import time
|
|
11
11
|
import uuid
|
|
12
|
-
from pathlib import Path
|
|
13
12
|
from typing import Any, Dict, List, Optional
|
|
14
13
|
|
|
15
14
|
from dotenv import load_dotenv
|
|
@@ -50,11 +49,20 @@ class VectorStoreManager(BaseVectorStore):
|
|
|
50
49
|
def create_store(
|
|
51
50
|
self,
|
|
52
51
|
collection_name: str,
|
|
52
|
+
*,
|
|
53
53
|
vector_size: int = 384,
|
|
54
54
|
distance: str = "COSINE",
|
|
55
|
+
vectors_config: Optional[Dict[str, qdrant.VectorParams]] = None,
|
|
55
56
|
) -> dict:
|
|
57
|
+
"""
|
|
58
|
+
Create or recreate a Qdrant collection.
|
|
59
|
+
|
|
60
|
+
• If *vectors_config* is provided → use it verbatim (multi-vector schema).
|
|
61
|
+
• Otherwise create a classic single-vector collection *without* naming the
|
|
62
|
+
vector field – so upserts can omit ``vector_name``.
|
|
63
|
+
"""
|
|
56
64
|
try:
|
|
57
|
-
#
|
|
65
|
+
# ── pre-existence check ────────────────────────────────────────────
|
|
58
66
|
if any(
|
|
59
67
|
col.name == collection_name
|
|
60
68
|
for col in self.client.get_collections().collections
|
|
@@ -65,16 +73,32 @@ class VectorStoreManager(BaseVectorStore):
|
|
|
65
73
|
if dist not in qdrant.Distance.__members__:
|
|
66
74
|
raise ValueError(f"Invalid distance metric '{distance}'")
|
|
67
75
|
|
|
76
|
+
# ── choose schema ──────────────────────────────────────────────────
|
|
77
|
+
if vectors_config: # caller supplied full mapping
|
|
78
|
+
config = vectors_config # e.g. {"text_vec": ..., "img_vec": ...}
|
|
79
|
+
else: # default = single unnamed vector
|
|
80
|
+
config = qdrant.VectorParams(
|
|
81
|
+
size=vector_size,
|
|
82
|
+
distance=qdrant.Distance[dist],
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
# ── (re)create collection ─────────────────────────────────────────
|
|
68
86
|
self.client.recreate_collection(
|
|
69
87
|
collection_name=collection_name,
|
|
70
|
-
vectors_config=
|
|
71
|
-
size=vector_size, distance=qdrant.Distance[dist]
|
|
72
|
-
),
|
|
88
|
+
vectors_config=config,
|
|
73
89
|
)
|
|
90
|
+
|
|
91
|
+
# ── bookkeeping ───────────────────────────────────────────────────
|
|
92
|
+
if isinstance(config, dict):
|
|
93
|
+
fields = list(config.keys())
|
|
94
|
+
else: # unnamed field
|
|
95
|
+
fields = [None]
|
|
96
|
+
|
|
74
97
|
self.active_stores[collection_name] = {
|
|
75
98
|
"created_at": int(time.time()),
|
|
76
99
|
"vector_size": vector_size,
|
|
77
100
|
"distance": dist,
|
|
101
|
+
"fields": fields,
|
|
78
102
|
}
|
|
79
103
|
log.info("Created Qdrant collection %s", collection_name)
|
|
80
104
|
return {"collection_name": collection_name, "status": "created"}
|
|
@@ -103,30 +127,68 @@ class VectorStoreManager(BaseVectorStore):
|
|
|
103
127
|
"name": store_name,
|
|
104
128
|
"status": "active",
|
|
105
129
|
"vectors_count": info.points_count,
|
|
106
|
-
"configuration": info.config.params
|
|
130
|
+
"configuration": info.config.params,
|
|
107
131
|
"created_at": self.active_stores[store_name]["created_at"],
|
|
132
|
+
"fields": self.active_stores[store_name].get("fields"),
|
|
108
133
|
}
|
|
109
134
|
except Exception as e:
|
|
110
135
|
log.error("Store info failed: %s", e)
|
|
111
136
|
raise VectorStoreError(f"Info retrieval failed: {e}") from e
|
|
112
137
|
|
|
113
|
-
# ------------------------------------------------------------------ #
|
|
114
|
-
# ingestion helpers
|
|
115
|
-
# ------------------------------------------------------------------ #
|
|
116
138
|
def add_to_store(
|
|
117
139
|
self,
|
|
140
|
+
*,
|
|
118
141
|
store_name: str,
|
|
119
142
|
texts: List[str],
|
|
120
143
|
vectors: List[List[float]],
|
|
121
144
|
metadata: List[dict],
|
|
122
|
-
|
|
145
|
+
vector_name: Optional[str] = None,
|
|
146
|
+
) -> Dict[str, Any]:
|
|
147
|
+
"""
|
|
148
|
+
Upsert vectors + payloads into *store_name*.
|
|
149
|
+
|
|
150
|
+
If *vector_name* is omitted the manager:
|
|
151
|
+
|
|
152
|
+
• auto-detects the single vector field for classic (unnamed) collections
|
|
153
|
+
• auto-detects the sole key for named-vector collections with exactly one field
|
|
154
|
+
• raises if multiple named fields exist.
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
# ─── input validation ───────────────────────────────────────────────
|
|
123
158
|
if not vectors:
|
|
124
159
|
raise ValueError("Empty vectors list")
|
|
125
160
|
expected = len(vectors[0])
|
|
126
161
|
for i, vec in enumerate(vectors):
|
|
127
162
|
if len(vec) != expected or not all(isinstance(v, float) for v in vec):
|
|
128
|
-
raise ValueError(f"Vector {i} malformed")
|
|
163
|
+
raise ValueError(f"Vector {i} malformed: expected {expected} floats")
|
|
129
164
|
|
|
165
|
+
# ─── auto-detect vector field ───────────────────────────────────────
|
|
166
|
+
if vector_name is None:
|
|
167
|
+
coll_info = self.client.get_collection(collection_name=store_name)
|
|
168
|
+
v_cfg = coll_info.config.params.vectors
|
|
169
|
+
|
|
170
|
+
if isinstance(v_cfg, dict): # modern named-vector schema
|
|
171
|
+
vector_fields = list(v_cfg.keys())
|
|
172
|
+
if len(vector_fields) == 1: # exactly one → safe default
|
|
173
|
+
vector_name = vector_fields[0]
|
|
174
|
+
log.debug(
|
|
175
|
+
"Auto-detected vector_name=%r for store=%s",
|
|
176
|
+
vector_name,
|
|
177
|
+
store_name,
|
|
178
|
+
)
|
|
179
|
+
else: # >1 named fields → ambiguous
|
|
180
|
+
raise ValueError(
|
|
181
|
+
f"Multiple vector fields {vector_fields}; please specify vector_name"
|
|
182
|
+
)
|
|
183
|
+
else:
|
|
184
|
+
# legacy single-vector schema → leave vector_name as None
|
|
185
|
+
log.debug(
|
|
186
|
+
"Collection %s uses legacy single-vector schema; "
|
|
187
|
+
"upserting without vector_name",
|
|
188
|
+
store_name,
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
# ─── build points payload ───────────────────────────────────────────
|
|
130
192
|
points = [
|
|
131
193
|
qdrant.PointStruct(
|
|
132
194
|
id=self._generate_vector_id(),
|
|
@@ -135,12 +197,27 @@ class VectorStoreManager(BaseVectorStore):
|
|
|
135
197
|
)
|
|
136
198
|
for txt, vec, meta in zip(texts, vectors, metadata)
|
|
137
199
|
]
|
|
200
|
+
|
|
201
|
+
# ─── upsert with backward-compat for old qdrant-client builds ───────
|
|
202
|
+
import inspect # keep local to avoid top-level dependency if absent elsewhere
|
|
203
|
+
|
|
204
|
+
upsert_sig = inspect.signature(self.client.upsert)
|
|
205
|
+
supports_vector_name = "vector_name" in upsert_sig.parameters
|
|
206
|
+
|
|
207
|
+
upsert_kwargs: Dict[str, Any] = {
|
|
208
|
+
"collection_name": store_name,
|
|
209
|
+
"points": points,
|
|
210
|
+
"wait": True,
|
|
211
|
+
}
|
|
212
|
+
if supports_vector_name and vector_name is not None:
|
|
213
|
+
upsert_kwargs["vector_name"] = vector_name
|
|
214
|
+
|
|
138
215
|
try:
|
|
139
|
-
self.client.upsert(
|
|
216
|
+
self.client.upsert(**upsert_kwargs)
|
|
140
217
|
return {"status": "success", "points_inserted": len(points)}
|
|
141
|
-
except Exception as
|
|
142
|
-
log.error("Add
|
|
143
|
-
raise VectorStoreError(f"Insertion failed: {
|
|
218
|
+
except Exception as exc: # noqa: BLE001
|
|
219
|
+
log.error("Add-to-store failed: %s", exc, exc_info=True)
|
|
220
|
+
raise VectorStoreError(f"Insertion failed: {exc}") from exc
|
|
144
221
|
|
|
145
222
|
# ------------------------------------------------------------------ #
|
|
146
223
|
# search / query
|
|
@@ -189,15 +266,25 @@ class VectorStoreManager(BaseVectorStore):
|
|
|
189
266
|
query_vector: List[float],
|
|
190
267
|
top_k: int = 5,
|
|
191
268
|
filters: Optional[dict] = None,
|
|
269
|
+
*,
|
|
270
|
+
vector_field: Optional[str] = None, # ← NEW
|
|
192
271
|
score_threshold: float = 0.0,
|
|
193
272
|
offset: int = 0,
|
|
194
273
|
limit: Optional[int] = None,
|
|
195
274
|
) -> List[dict]:
|
|
196
|
-
"""
|
|
275
|
+
"""
|
|
276
|
+
Run a similarity search against *store_name*.
|
|
277
|
+
|
|
278
|
+
• Works with any Qdrant-client ≥ 1.0
|
|
279
|
+
• `vector_field` lets you target a non-default vector column
|
|
280
|
+
(e.g. ``\"caption_vector\"`` for image stores). Pass **None**
|
|
281
|
+
to use the collection’s default vector.
|
|
282
|
+
"""
|
|
197
283
|
|
|
198
284
|
limit = limit or top_k
|
|
199
285
|
flt = self._dict_to_filter(filters) if filters else None
|
|
200
286
|
|
|
287
|
+
# ── shared kwargs ----------------------------------------------------
|
|
201
288
|
common: Dict[str, Any] = dict(
|
|
202
289
|
collection_name=store_name,
|
|
203
290
|
query_vector=query_vector,
|
|
@@ -208,19 +295,21 @@ class VectorStoreManager(BaseVectorStore):
|
|
|
208
295
|
with_vectors=False,
|
|
209
296
|
)
|
|
210
297
|
|
|
298
|
+
# if vector_field: # ← inject when requested
|
|
299
|
+
# common["vector_name"] = vector_field
|
|
300
|
+
|
|
301
|
+
# ── call search (new client first, fallback to old) ------------------
|
|
211
302
|
try:
|
|
212
|
-
|
|
213
|
-
res = self.client.search(**common, filter=flt) # type: ignore[arg-type]
|
|
303
|
+
res = self.client.search(**common, filter=flt) # ≥ 1.6
|
|
214
304
|
except AssertionError as ae:
|
|
215
305
|
if "Unknown arguments" not in str(ae):
|
|
216
306
|
raise
|
|
217
|
-
|
|
218
|
-
res = self.client.search(**common, query_filter=flt) # type: ignore[arg-type]
|
|
219
|
-
|
|
307
|
+
res = self.client.search(**common, query_filter=flt) # < 1.6
|
|
220
308
|
except Exception as e:
|
|
221
309
|
log.error("Query failed: %s", e)
|
|
222
310
|
raise VectorStoreError(f"Query failed: {e}") from e
|
|
223
311
|
|
|
312
|
+
# ── normalise result -------------------------------------------------
|
|
224
313
|
return [
|
|
225
314
|
{
|
|
226
315
|
"id": p.id,
|
projectdavid/clients/vectors.py
CHANGED
|
@@ -61,13 +61,16 @@ class VectorStoreClient:
|
|
|
61
61
|
• create_vector_store() no longer takes user_id; ownership from token.
|
|
62
62
|
"""
|
|
63
63
|
|
|
64
|
-
#
|
|
64
|
+
# ------------------------------------------------------------------ #
|
|
65
|
+
# Construction / cleanup
|
|
66
|
+
# ------------------------------------------------------------------ #
|
|
65
67
|
def __init__(
|
|
66
68
|
self,
|
|
67
69
|
base_url: Optional[str] = None,
|
|
68
70
|
api_key: Optional[str] = None,
|
|
69
71
|
*,
|
|
70
72
|
vector_store_host: str = "localhost",
|
|
73
|
+
file_processor_kwargs: Optional[dict] = None, # 🔶 add arg
|
|
71
74
|
):
|
|
72
75
|
self.base_url = (base_url or os.getenv("BASE_URL", "")).rstrip("/")
|
|
73
76
|
self.api_key = api_key or os.getenv("API_KEY")
|
|
@@ -84,9 +87,13 @@ class VectorStoreClient:
|
|
|
84
87
|
base_url=self.base_url, headers=self._base_headers, timeout=30.0
|
|
85
88
|
)
|
|
86
89
|
|
|
87
|
-
# Local helpers
|
|
90
|
+
# Local helpers ---------------------------------------------------
|
|
88
91
|
self.vector_manager = VectorStoreManager(vector_store_host=vector_store_host)
|
|
89
92
|
self.identifier_service = UtilsInterface.IdentifierService()
|
|
93
|
+
|
|
94
|
+
# 🔶 forward kwargs into the upgraded FileProcessor
|
|
95
|
+
# self.file_processor = FileProcessor(**(file_processor_kwargs or {}))
|
|
96
|
+
# Using Stripped down version for now until we move forward with multi-modal stores
|
|
90
97
|
self.file_processor = FileProcessor()
|
|
91
98
|
|
|
92
99
|
log.info("VectorStoreClient → %s", self.base_url)
|
|
@@ -240,32 +247,31 @@ class VectorStoreClient:
|
|
|
240
247
|
) -> ValidationInterface.VectorStoreFileRead:
|
|
241
248
|
processed = await self.file_processor.process_file(p)
|
|
242
249
|
texts, vectors = processed["chunks"], processed["vectors"]
|
|
243
|
-
line_data = processed.get("line_data") or []
|
|
244
|
-
|
|
245
|
-
base_md = meta or {}
|
|
246
|
-
base_md.update({"source": str(p), "file_name": p.name})
|
|
250
|
+
line_data = processed.get("line_data") or []
|
|
247
251
|
|
|
252
|
+
base_md = (meta or {}) | {"source": str(p), "file_name": p.name}
|
|
248
253
|
file_record_id = f"vsf_{uuid.uuid4()}"
|
|
249
254
|
|
|
250
|
-
# Build per‑chunk payload, now including page/lines if present
|
|
251
255
|
chunk_md = []
|
|
252
|
-
for i in
|
|
253
|
-
payload = {
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
"file_id": file_record_id,
|
|
257
|
-
}
|
|
258
|
-
if i < len(line_data): # ← NEW
|
|
259
|
-
payload.update(line_data[i]) # {'page': …, 'lines': …}
|
|
256
|
+
for i, txt in enumerate(texts):
|
|
257
|
+
payload = {**base_md, "chunk_index": i, "file_id": file_record_id}
|
|
258
|
+
if i < len(line_data):
|
|
259
|
+
payload.update(line_data[i]) # {'page':…, 'lines':…}
|
|
260
260
|
chunk_md.append(payload)
|
|
261
261
|
|
|
262
|
+
# 🔑 1. look up the backend store to get its *collection* name
|
|
263
|
+
store = self.retrieve_vector_store_sync(vector_store_id)
|
|
264
|
+
collection_name = store.collection_name
|
|
265
|
+
|
|
266
|
+
# 🔑 2. upsert via VectorStoreManager (auto-detects vector field)
|
|
262
267
|
self.vector_manager.add_to_store(
|
|
263
|
-
store_name=
|
|
268
|
+
store_name=collection_name,
|
|
264
269
|
texts=texts,
|
|
265
270
|
vectors=vectors,
|
|
266
271
|
metadata=chunk_md,
|
|
267
272
|
)
|
|
268
273
|
|
|
274
|
+
# 3. register the file with the API
|
|
269
275
|
resp = await self._request(
|
|
270
276
|
"POST",
|
|
271
277
|
f"/v1/vector-stores/{vector_store_id}/files",
|
|
@@ -287,26 +293,36 @@ class VectorStoreClient:
|
|
|
287
293
|
filters: Optional[Dict] = None,
|
|
288
294
|
vector_store_host: Optional[str] = None,
|
|
289
295
|
) -> List[Dict[str, Any]]:
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
296
|
+
|
|
297
|
+
# pick local vs. override host
|
|
298
|
+
vector_manager = (
|
|
299
|
+
VectorStoreManager(vector_store_host=vector_store_host)
|
|
300
|
+
if vector_store_host
|
|
301
|
+
else self.vector_manager
|
|
302
|
+
)
|
|
295
303
|
|
|
296
304
|
store = self.retrieve_vector_store_sync(vector_store_id)
|
|
297
|
-
|
|
305
|
+
|
|
306
|
+
# 🔶 choose encoder by vector_size
|
|
307
|
+
if store.vector_size == 1024: # images collection
|
|
308
|
+
vec = self.file_processor.encode_clip_text(query_text).tolist()
|
|
309
|
+
vector_field = "caption_vector" # field name in Qdrant
|
|
310
|
+
else: # 384-D text collection
|
|
311
|
+
vec = self.file_processor.encode_text(query_text).tolist()
|
|
312
|
+
vector_field = None # default field
|
|
298
313
|
|
|
299
314
|
return vector_manager.query_store(
|
|
300
315
|
store_name=store.collection_name,
|
|
301
316
|
query_vector=vec,
|
|
302
317
|
top_k=top_k,
|
|
303
318
|
filters=filters,
|
|
319
|
+
vector_field=vector_field,
|
|
304
320
|
)
|
|
305
321
|
|
|
306
|
-
async def _delete_vs_async(
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
qres = self.vector_manager.delete_store(
|
|
322
|
+
async def _delete_vs_async(self, vector_store_id: str, permanent: bool):
|
|
323
|
+
# collection deletion must use the *collection* name
|
|
324
|
+
store = self.retrieve_vector_store_sync(vector_store_id)
|
|
325
|
+
qres = self.vector_manager.delete_store(store.collection_name)
|
|
310
326
|
await self._request(
|
|
311
327
|
"DELETE",
|
|
312
328
|
f"/v1/vector-stores/{vector_store_id}",
|
|
@@ -319,10 +335,11 @@ class VectorStoreClient:
|
|
|
319
335
|
"qdrant_result": qres,
|
|
320
336
|
}
|
|
321
337
|
|
|
322
|
-
async def _delete_file_async(
|
|
323
|
-
self
|
|
324
|
-
|
|
325
|
-
|
|
338
|
+
async def _delete_file_async(self, vector_store_id: str, file_path: str):
|
|
339
|
+
store = self.retrieve_vector_store_sync(vector_store_id)
|
|
340
|
+
fres = self.vector_manager.delete_file_from_store(
|
|
341
|
+
store.collection_name, file_path
|
|
342
|
+
)
|
|
326
343
|
await self._request(
|
|
327
344
|
"DELETE",
|
|
328
345
|
f"/v1/vector-stores/{vector_store_id}/files",
|