vectordb-bench 1.0.2__py3-none-any.whl → 1.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,11 +3,9 @@ import logging
3
3
  import time
4
4
  from contextlib import contextmanager
5
5
 
6
- from alibabacloud_ha3engine_vector import client, models
6
+ from alibabacloud_ha3engine_vector import models
7
+ from alibabacloud_ha3engine_vector.client import Client
7
8
  from alibabacloud_ha3engine_vector.models import QueryRequest
8
- from alibabacloud_searchengine20211025 import models as searchengine_models
9
- from alibabacloud_searchengine20211025.client import Client as searchengineClient
10
- from alibabacloud_tea_openapi import models as open_api_models
11
9
 
12
10
  from ..api import MetricType, VectorDB
13
11
  from .config import AliyunOpenSearchIndexConfig
@@ -28,18 +26,25 @@ class AliyunOpenSearch(VectorDB):
28
26
  drop_old: bool = False,
29
27
  **kwargs,
30
28
  ):
31
- self.control_client = None
32
29
  self.dim = dim
33
30
  self.db_config = db_config
34
31
  self.case_config = db_case_config
35
32
  self.collection_name = collection_name
36
33
  self.instance_id = db_config["host"].split(".")[0].replace("http://", "").replace("https://", "")
34
+ self.config = models.Config(
35
+ endpoint=self.db_config["host"],
36
+ protocol="http",
37
+ access_user_name=self.db_config["user"],
38
+ access_pass_word=self.db_config["password"],
39
+ )
37
40
 
38
41
  self._primary_field = "id"
39
42
  self._scalar_field = "int_id"
40
43
  self._vector_field = "vector"
41
44
  self._index_name = "vector_idx"
42
45
 
46
+ client = Client(self.config)
47
+
43
48
  self.batch_size = int(
44
49
  min(
45
50
  ALIYUN_OPENSEARCH_MAX_SIZE_PER_BATCH / (dim * 25),
@@ -48,22 +53,16 @@ class AliyunOpenSearch(VectorDB):
48
53
  )
49
54
 
50
55
  log.info(f"Aliyun_OpenSearch client config: {self.db_config}")
51
- control_config = open_api_models.Config(
52
- access_key_id=self.db_config["ak"],
53
- access_key_secret=self.db_config["sk"],
54
- endpoint=self.db_config["control_host"],
55
- )
56
- self.control_client = searchengineClient(control_config)
57
56
 
58
57
  if drop_old:
59
58
  log.info(f"aliyun_OpenSearch client drop old index: {self.collection_name}")
60
- if self._index_exists(self.control_client):
61
- self._modify_index(self.control_client)
59
+ if self._index_exists(client):
60
+ self._modify_index(client)
62
61
  else:
63
- self._create_index(self.control_client)
62
+ self._create_index(client)
64
63
 
65
- def _create_index(self, client: searchengineClient):
66
- create_table_request = searchengine_models.CreateTableRequest()
64
+ def _create_index(self, client: Client):
65
+ create_table_request = models.CreateTableRequest()
67
66
  create_table_request.name = self.collection_name
68
67
  create_table_request.primary_key = self._primary_field
69
68
  create_table_request.partition_count = 1
@@ -72,14 +71,14 @@ class AliyunOpenSearch(VectorDB):
72
71
  self._vector_field: "MULTI_FLOAT",
73
72
  self._scalar_field: "INT64",
74
73
  }
75
- vector_index = searchengine_models.ModifyTableRequestVectorIndex()
74
+ vector_index = models.ModifyTableRequestVectorIndex()
76
75
  vector_index.index_name = self._index_name
77
76
  vector_index.dimension = self.dim
78
77
  vector_index.distance_type = self.case_config.distance_type()
79
78
  vector_index.vector_field = self._vector_field
80
79
  vector_index.vector_index_type = "HNSW"
81
80
 
82
- advance_params = searchengine_models.ModifyTableRequestVectorIndexAdvanceParams()
81
+ advance_params = models.ModifyTableRequestVectorIndexAdvanceParams()
83
82
  str_max_neighbor_count = f'"proxima.hnsw.builder.max_neighbor_count":{self.case_config.M}'
84
83
  str_efc = f'"proxima.hnsw.builder.efconstruction":{self.case_config.ef_construction}'
85
84
  str_enable_adsampling = '"proxima.hnsw.builder.enable_adsampling":true'
@@ -95,7 +94,7 @@ class AliyunOpenSearch(VectorDB):
95
94
  str_thread_count,
96
95
  ],
97
96
  )
98
- advance_params.build_index_params = params
97
+ advance_params.build_index_params = "{" + params + "}"
99
98
  advance_params.search_index_params = (
100
99
  '{"proxima.hnsw.searcher.ef":400,"proxima.hnsw.searcher.dynamic_termination.prob_threshold":0.7}'
101
100
  )
@@ -103,7 +102,7 @@ class AliyunOpenSearch(VectorDB):
103
102
  create_table_request.vector_index = [vector_index]
104
103
 
105
104
  try:
106
- response = client.create_table(self.instance_id, create_table_request)
105
+ response = client.create_table(create_table_request)
107
106
  log.info(f"create table success: {response.body}")
108
107
  except Exception as error:
109
108
  log.info(error.message)
@@ -115,20 +114,20 @@ class AliyunOpenSearch(VectorDB):
115
114
  self._active_index(client)
116
115
 
117
116
  # check if index create success
118
- def _active_index(self, client: searchengineClient) -> None:
117
+ def _active_index(self, client: Client) -> None:
119
118
  retry_times = 0
120
119
  while True:
121
120
  time.sleep(10)
122
121
  log.info(f"begin to {retry_times} times get table")
123
122
  retry_times += 1
124
- response = client.get_table(self.instance_id, self.collection_name)
123
+ response = client.get_table(self.collection_name)
125
124
  if response.body.result.status == "IN_USE":
126
125
  log.info(f"{self.collection_name} table begin to use.")
127
126
  return
128
127
 
129
- def _index_exists(self, client: searchengineClient) -> bool:
128
+ def _index_exists(self, client: Client) -> bool:
130
129
  try:
131
- client.get_table(self.instance_id, self.collection_name)
130
+ client.get_table(self.collection_name)
132
131
  except Exception as err:
133
132
  log.warning(f"get table from searchengine error, err={err}")
134
133
  return False
@@ -136,7 +135,7 @@ class AliyunOpenSearch(VectorDB):
136
135
  return True
137
136
 
138
137
  # check if index build success, Insert the embeddings to the vector database after index build success
139
- def _index_build_success(self, client: searchengineClient) -> None:
138
+ def _index_build_success(self, client: Client) -> None:
140
139
  log.info("begin to check if table build success.")
141
140
  time.sleep(50)
142
141
 
@@ -145,10 +144,10 @@ class AliyunOpenSearch(VectorDB):
145
144
  time.sleep(10)
146
145
  log.info(f"begin to {retry_times} times get table fsm")
147
146
  retry_times += 1
148
- request = searchengine_models.ListTasksRequest()
149
- request.start = (int(time.time()) - 3600) * 1000
150
- request.end = int(time.time()) * 1000
151
- response = client.list_tasks(self.instance_id, request)
147
+ request = models.ListTasksRequest()
148
+ request.start = int(time.time()) - 3600
149
+ request.end = int(time.time())
150
+ response = client.list_tasks(request)
152
151
  fsms = response.body.result
153
152
  cur_fsm = None
154
153
  for fsm in fsms:
@@ -164,11 +163,11 @@ class AliyunOpenSearch(VectorDB):
164
163
  if cur_fsm["status"] == "success":
165
164
  return
166
165
 
167
- def _modify_index(self, client: searchengineClient) -> None:
166
+ def _modify_index(self, client: Client) -> None:
168
167
  # check if index create success
169
168
  self._active_index(client)
170
169
 
171
- modify_table_request = searchengine_models.ModifyTableRequest()
170
+ modify_table_request = models.ModifyTableRequest()
172
171
  modify_table_request.partition_count = 1
173
172
  modify_table_request.primary_key = self._primary_field
174
173
  modify_table_request.field_schema = {
@@ -176,13 +175,13 @@ class AliyunOpenSearch(VectorDB):
176
175
  self._vector_field: "MULTI_FLOAT",
177
176
  self._scalar_field: "INT64",
178
177
  }
179
- vector_index = searchengine_models.ModifyTableRequestVectorIndex()
178
+ vector_index = models.ModifyTableRequestVectorIndex()
180
179
  vector_index.index_name = self._index_name
181
180
  vector_index.dimension = self.dim
182
181
  vector_index.distance_type = self.case_config.distance_type()
183
182
  vector_index.vector_field = self._vector_field
184
183
  vector_index.vector_index_type = "HNSW"
185
- advance_params = searchengine_models.ModifyTableRequestVectorIndexAdvanceParams()
184
+ advance_params = models.ModifyTableRequestVectorIndexAdvanceParams()
186
185
 
187
186
  str_max_neighbor_count = f'"proxima.hnsw.builder.max_neighbor_count":{self.case_config.M}'
188
187
  str_efc = f'"proxima.hnsw.builder.efconstruction":{self.case_config.ef_construction}'
@@ -199,7 +198,7 @@ class AliyunOpenSearch(VectorDB):
199
198
  str_thread_count,
200
199
  ],
201
200
  )
202
- advance_params.build_index_params = params
201
+ advance_params.build_index_params = "{" + params + "}"
203
202
  advance_params.search_index_params = (
204
203
  '{"proxima.hnsw.searcher.ef":400,"proxima.hnsw.searcher.dynamic_termination.prob_threshold":0.7}'
205
204
  )
@@ -209,7 +208,6 @@ class AliyunOpenSearch(VectorDB):
209
208
 
210
209
  try:
211
210
  response = client.modify_table(
212
- self.instance_id,
213
211
  self.collection_name,
214
212
  modify_table_request,
215
213
  )
@@ -240,14 +238,8 @@ class AliyunOpenSearch(VectorDB):
240
238
  @contextmanager
241
239
  def init(self) -> None:
242
240
  """connect to aliyun opensearch"""
243
- config = models.Config(
244
- endpoint=self.db_config["host"],
245
- protocol="http",
246
- access_user_name=self.db_config["user"],
247
- access_pass_word=self.db_config["password"],
248
- )
249
241
 
250
- self.client = client.Client(config)
242
+ self.client = Client(self.config)
251
243
 
252
244
  yield
253
245
  self.client = None
@@ -12,18 +12,11 @@ class AliyunOpenSearchConfig(DBConfig, BaseModel):
12
12
  user: str = ""
13
13
  password: SecretStr = ""
14
14
 
15
- ak: str = ""
16
- sk: SecretStr = ""
17
- control_host: str = "searchengine.cn-hangzhou.aliyuncs.com"
18
-
19
15
  def to_dict(self) -> dict:
20
16
  return {
21
17
  "host": self.host,
22
18
  "user": self.user,
23
19
  "password": self.password.get_secret_value(),
24
- "ak": self.ak,
25
- "sk": self.sk.get_secret_value(),
26
- "control_host": self.control_host,
27
20
  }
28
21
 
29
22
 
@@ -109,6 +109,162 @@ def MilvusHNSW(**parameters: Unpack[MilvusHNSWTypedDict]):
109
109
  )
110
110
 
111
111
 
112
+ class MilvusRefineTypedDict(TypedDict):
113
+ refine: Annotated[
114
+ bool,
115
+ click.option(
116
+ "--refine",
117
+ type=bool,
118
+ required=True,
119
+ help="Whether refined data is reserved during index building.",
120
+ ),
121
+ ]
122
+ refine_type: Annotated[
123
+ str | None,
124
+ click.option(
125
+ "--refine-type",
126
+ type=click.Choice(["SQ6", "SQ8", "BF16", "FP16", "FP32"], case_sensitive=False),
127
+ help="The data type of the refine index to use. Supported values: SQ6,SQ8,BF16,FP16,FP32",
128
+ required=True,
129
+ ),
130
+ ]
131
+ refine_k: Annotated[
132
+ float,
133
+ click.option(
134
+ "--refine-k",
135
+ type=float,
136
+ help="The magnification factor of refine compared to k.",
137
+ required=True,
138
+ ),
139
+ ]
140
+
141
+
142
+ class MilvusHNSWPQTypedDict(
143
+ CommonTypedDict,
144
+ MilvusTypedDict,
145
+ MilvusHNSWTypedDict,
146
+ MilvusRefineTypedDict
147
+ ):
148
+ nbits: Annotated[
149
+ int,
150
+ click.option(
151
+ "--nbits",
152
+ type=int,
153
+ required=True,
154
+ )
155
+ ]
156
+
157
+
158
+ @cli.command()
159
+ @click_parameter_decorators_from_typed_dict(MilvusHNSWPQTypedDict)
160
+ def MilvusHNSWPQ(**parameters: Unpack[MilvusHNSWPQTypedDict]):
161
+ from .config import HNSWPQConfig, MilvusConfig
162
+
163
+ run(
164
+ db=DBTYPE,
165
+ db_config=MilvusConfig(
166
+ db_label=parameters["db_label"],
167
+ uri=SecretStr(parameters["uri"]),
168
+ user=parameters["user_name"],
169
+ password=SecretStr(parameters["password"]) if parameters["password"] else None,
170
+ num_shards=int(parameters["num_shards"]),
171
+ ),
172
+ db_case_config=HNSWPQConfig(
173
+ M=parameters["m"],
174
+ efConstruction=parameters["ef_construction"],
175
+ ef=parameters["ef_search"],
176
+ nbits=parameters["nbits"],
177
+ refine=parameters["refine"],
178
+ refine_type=parameters["refine_type"],
179
+ refine_k=parameters["refine_k"],
180
+ ),
181
+ **parameters,
182
+ )
183
+
184
+
185
+ class MilvusHNSWPRQTypedDict(
186
+ CommonTypedDict,
187
+ MilvusTypedDict,
188
+ MilvusHNSWPQTypedDict,
189
+ ):
190
+ nrq: Annotated[
191
+ int,
192
+ click.option(
193
+ "--nrq",
194
+ type=int,
195
+ help="The number of residual subquantizers.",
196
+ required=True,
197
+ )
198
+ ]
199
+
200
+
201
+ @cli.command()
202
+ @click_parameter_decorators_from_typed_dict(MilvusHNSWPRQTypedDict)
203
+ def MilvusHNSWPRQ(**parameters: Unpack[MilvusHNSWPRQTypedDict]):
204
+ from .config import HNSWPRQConfig, MilvusConfig
205
+
206
+ run(
207
+ db=DBTYPE,
208
+ db_config=MilvusConfig(
209
+ db_label=parameters["db_label"],
210
+ uri=SecretStr(parameters["uri"]),
211
+ user=parameters["user_name"],
212
+ password=SecretStr(parameters["password"]) if parameters["password"] else None,
213
+ num_shards=int(parameters["num_shards"]),
214
+ ),
215
+ db_case_config=HNSWPRQConfig(
216
+ M=parameters["m"],
217
+ efConstruction=parameters["ef_construction"],
218
+ ef=parameters["ef_search"],
219
+ nbits=parameters["nbits"],
220
+ refine=parameters["refine"],
221
+ refine_type=parameters["refine_type"],
222
+ refine_k=parameters["refine_k"],
223
+ nrq=parameters["nrq"]
224
+ ),
225
+ **parameters,
226
+ )
227
+
228
+
229
+ class MilvusHNSWSQTypedDict(CommonTypedDict, MilvusTypedDict, MilvusHNSWTypedDict, MilvusRefineTypedDict):
230
+ sq_type: Annotated[
231
+ str | None,
232
+ click.option(
233
+ "--sq-type",
234
+ type=click.Choice(["SQ6", "SQ8", "BF16", "FP16", "FP32"], case_sensitive=False),
235
+ help="Scalar quantizer type. Supported values: SQ6,SQ8,BF16,FP16,FP32",
236
+ required=True,
237
+ ),
238
+ ]
239
+
240
+
241
+ @cli.command()
242
+ @click_parameter_decorators_from_typed_dict(MilvusHNSWSQTypedDict)
243
+ def MilvusHNSWSQ(**parameters: Unpack[MilvusHNSWSQTypedDict]):
244
+ from .config import HNSWSQConfig, MilvusConfig
245
+
246
+ run(
247
+ db=DBTYPE,
248
+ db_config=MilvusConfig(
249
+ db_label=parameters["db_label"],
250
+ uri=SecretStr(parameters["uri"]),
251
+ user=parameters["user_name"],
252
+ password=SecretStr(parameters["password"]) if parameters["password"] else None,
253
+ num_shards=int(parameters["num_shards"]),
254
+ ),
255
+ db_case_config=HNSWSQConfig(
256
+ M=parameters["m"],
257
+ efConstruction=parameters["ef_construction"],
258
+ ef=parameters["ef_search"],
259
+ sq_type=parameters["sq_type"],
260
+ refine=parameters["refine"],
261
+ refine_type=parameters["refine_type"],
262
+ refine_k=parameters["refine_k"],
263
+ ),
264
+ **parameters,
265
+ )
266
+
267
+
112
268
  class MilvusIVFFlatTypedDict(CommonTypedDict, MilvusTypedDict, IVFFlatTypedDictN): ...
113
269
 
114
270
 
@@ -156,6 +312,71 @@ def MilvusIVFSQ8(**parameters: Unpack[MilvusIVFFlatTypedDict]):
156
312
  )
157
313
 
158
314
 
315
+ class MilvusIVFRABITQTypedDict(CommonTypedDict, MilvusTypedDict, MilvusIVFFlatTypedDict):
316
+ rbq_bits_query: Annotated[
317
+ int,
318
+ click.option(
319
+ "--rbq-bits-query",
320
+ type=int,
321
+ help="The magnification factor of refine compared to k.",
322
+ required=True,
323
+ ),
324
+ ]
325
+ refine: Annotated[
326
+ bool,
327
+ click.option(
328
+ "--refine",
329
+ type=bool,
330
+ required=True,
331
+ help="Whether refined data is reserved during index building.",
332
+ ),
333
+ ]
334
+ refine_type: Annotated[
335
+ str | None,
336
+ click.option(
337
+ "--refine-type",
338
+ type=click.Choice(["SQ6", "SQ8", "BF16", "FP16", "FP32"], case_sensitive=False),
339
+ help="The data type of the refine index to use. Supported values: SQ6,SQ8,BF16,FP16,FP32",
340
+ required=True,
341
+ ),
342
+ ]
343
+ refine_k: Annotated[
344
+ float,
345
+ click.option(
346
+ "--refine-k",
347
+ type=float,
348
+ help="The magnification factor of refine compared to k.",
349
+ required=True,
350
+ ),
351
+ ]
352
+
353
+
354
+ @cli.command()
355
+ @click_parameter_decorators_from_typed_dict(MilvusIVFRABITQTypedDict)
356
+ def MilvusIVFRabitQ(**parameters: Unpack[MilvusIVFRABITQTypedDict]):
357
+ from .config import IVFRABITQConfig, MilvusConfig
358
+
359
+ run(
360
+ db=DBTYPE,
361
+ db_config=MilvusConfig(
362
+ db_label=parameters["db_label"],
363
+ uri=SecretStr(parameters["uri"]),
364
+ user=parameters["user_name"],
365
+ password=SecretStr(parameters["password"]) if parameters["password"] else None,
366
+ num_shards=int(parameters["num_shards"]),
367
+ ),
368
+ db_case_config=IVFRABITQConfig(
369
+ nlist=parameters["nlist"],
370
+ nprobe=parameters["nprobe"],
371
+ rbq_bits_query=parameters["rbq_bits_query"],
372
+ refine=parameters["refine"],
373
+ refine_type=parameters["refine_type"],
374
+ refine_k=parameters["refine_k"],
375
+ ),
376
+ **parameters,
377
+ )
378
+
379
+
159
380
  class MilvusDISKANNTypedDict(CommonTypedDict, MilvusTypedDict):
160
381
  search_list: Annotated[str, click.option("--search-list", type=int, required=True)]
161
382
 
@@ -294,6 +294,7 @@ class DatasetManager(BaseModel):
294
294
  download_files.extend([gt_file, test_file])
295
295
  if self.data.with_scalar_labels and self.data.scalar_labels_file_separated:
296
296
  download_files.append(self.data.scalar_labels_file)
297
+ download_files = [file for file in download_files if file is not None]
297
298
  source.reader().read(
298
299
  dataset=self.data.dir_name.lower(),
299
300
  files=download_files,
@@ -4139,7 +4139,7 @@
4139
4139
  "task_config": {
4140
4140
  "db": "ElasticCloud",
4141
4141
  "db_config": {
4142
- "db_label": "8c60g",
4142
+ "db_label": "8c60g-force_merge",
4143
4143
  "version": "8.17",
4144
4144
  "note": "",
4145
4145
  "cloud_id": "**********",
@@ -4250,7 +4250,7 @@
4250
4250
  "task_config": {
4251
4251
  "db": "ElasticCloud",
4252
4252
  "db_config": {
4253
- "db_label": "8c60g",
4253
+ "db_label": "8c60g-force_merge",
4254
4254
  "version": "8.17",
4255
4255
  "note": "",
4256
4256
  "cloud_id": "**********",
@@ -4361,7 +4361,7 @@
4361
4361
  "task_config": {
4362
4362
  "db": "ElasticCloud",
4363
4363
  "db_config": {
4364
- "db_label": "8c60g",
4364
+ "db_label": "8c60g-force_merge",
4365
4365
  "version": "8.17",
4366
4366
  "note": "",
4367
4367
  "cloud_id": "**********",
@@ -4472,7 +4472,7 @@
4472
4472
  "task_config": {
4473
4473
  "db": "ElasticCloud",
4474
4474
  "db_config": {
4475
- "db_label": "8c60g",
4475
+ "db_label": "8c60g-force_merge",
4476
4476
  "version": "8.17",
4477
4477
  "note": "",
4478
4478
  "cloud_id": "**********",
@@ -4583,7 +4583,7 @@
4583
4583
  "task_config": {
4584
4584
  "db": "ElasticCloud",
4585
4585
  "db_config": {
4586
- "db_label": "8c60g",
4586
+ "db_label": "8c60g-force_merge",
4587
4587
  "version": "8.17",
4588
4588
  "note": "",
4589
4589
  "cloud_id": "**********",
@@ -4694,7 +4694,7 @@
4694
4694
  "task_config": {
4695
4695
  "db": "ElasticCloud",
4696
4696
  "db_config": {
4697
- "db_label": "8c60g",
4697
+ "db_label": "8c60g-force_merge",
4698
4698
  "version": "8.17",
4699
4699
  "note": "",
4700
4700
  "cloud_id": "**********",
@@ -49,7 +49,7 @@ def main():
49
49
  "qps": round(qps, 4),
50
50
  "latency": round(latency, 4),
51
51
  "recall": round(recall, 4),
52
- "filter_ratio": round(filter_ratio, 2),
52
+ "filter_ratio": round(filter_ratio, 3),
53
53
  }
54
54
  )
55
55
  save_to_json(data, config.RESULTS_LOCAL_DIR / "leaderboard_v2.json")
@@ -77,7 +77,7 @@
77
77
  "qps": 3033.786,
78
78
  "latency": 8.7,
79
79
  "recall": 0.9934,
80
- "filter_ratio": 1.0
80
+ "filter_ratio": 0.999
81
81
  },
82
82
  {
83
83
  "dataset": "Cohere (Medium)",
@@ -87,7 +87,7 @@
87
87
  "qps": 3019.2416,
88
88
  "latency": 9.5,
89
89
  "recall": 0.9765,
90
- "filter_ratio": 1.0
90
+ "filter_ratio": 0.998
91
91
  },
92
92
  {
93
93
  "dataset": "Cohere (Medium)",
@@ -97,7 +97,7 @@
97
97
  "qps": 2890.9523,
98
98
  "latency": 9.4,
99
99
  "recall": 0.9625,
100
- "filter_ratio": 0.99
100
+ "filter_ratio": 0.995
101
101
  },
102
102
  {
103
103
  "dataset": "Cohere (Medium)",
@@ -207,7 +207,7 @@
207
207
  "qps": 350.0132,
208
208
  "latency": 29.7,
209
209
  "recall": 1.0,
210
- "filter_ratio": 1.0
210
+ "filter_ratio": 0.999
211
211
  },
212
212
  {
213
213
  "dataset": "Cohere (Large)",
@@ -217,7 +217,7 @@
217
217
  "qps": 179.5204,
218
218
  "latency": 51.4,
219
219
  "recall": 1.0,
220
- "filter_ratio": 1.0
220
+ "filter_ratio": 0.998
221
221
  },
222
222
  {
223
223
  "dataset": "Cohere (Large)",
@@ -227,7 +227,7 @@
227
227
  "qps": 72.99,
228
228
  "latency": 111.4,
229
229
  "recall": 1.0,
230
- "filter_ratio": 0.99
230
+ "filter_ratio": 0.995
231
231
  },
232
232
  {
233
233
  "dataset": "Cohere (Large)",
@@ -382,8 +382,8 @@
382
382
  {
383
383
  "dataset": "Cohere (Medium)",
384
384
  "db": "ElasticCloud",
385
- "label": "8c60g",
386
- "db_name": "ElasticCloud-8c60g",
385
+ "label": "8c60g-force_merge",
386
+ "db_name": "ElasticCloud-8c60g-force_merge",
387
387
  "qps": 2095.7067,
388
388
  "latency": 12.4,
389
389
  "recall": 0.8961,
@@ -392,8 +392,8 @@
392
392
  {
393
393
  "dataset": "Cohere (Medium)",
394
394
  "db": "ElasticCloud",
395
- "label": "8c60g",
396
- "db_name": "ElasticCloud-8c60g",
395
+ "label": "8c60g-force_merge",
396
+ "db_name": "ElasticCloud-8c60g-force_merge",
397
397
  "qps": 1925.3019,
398
398
  "latency": 11.3,
399
399
  "recall": 0.9141,
@@ -402,8 +402,8 @@
402
402
  {
403
403
  "dataset": "Cohere (Medium)",
404
404
  "db": "ElasticCloud",
405
- "label": "8c60g",
406
- "db_name": "ElasticCloud-8c60g",
405
+ "label": "8c60g-force_merge",
406
+ "db_name": "ElasticCloud-8c60g-force_merge",
407
407
  "qps": 1707.8841,
408
408
  "latency": 10.0,
409
409
  "recall": 0.9314,
@@ -412,8 +412,8 @@
412
412
  {
413
413
  "dataset": "Cohere (Medium)",
414
414
  "db": "ElasticCloud",
415
- "label": "8c60g",
416
- "db_name": "ElasticCloud-8c60g",
415
+ "label": "8c60g-force_merge",
416
+ "db_name": "ElasticCloud-8c60g-force_merge",
417
417
  "qps": 1442.0638,
418
418
  "latency": 10.1,
419
419
  "recall": 0.9482,
@@ -422,8 +422,8 @@
422
422
  {
423
423
  "dataset": "Cohere (Medium)",
424
424
  "db": "ElasticCloud",
425
- "label": "8c60g",
426
- "db_name": "ElasticCloud-8c60g",
425
+ "label": "8c60g-force_merge",
426
+ "db_name": "ElasticCloud-8c60g-force_merge",
427
427
  "qps": 1115.106,
428
428
  "latency": 13.1,
429
429
  "recall": 0.9662,
@@ -432,8 +432,8 @@
432
432
  {
433
433
  "dataset": "Cohere (Medium)",
434
434
  "db": "ElasticCloud",
435
- "label": "8c60g",
436
- "db_name": "ElasticCloud-8c60g",
435
+ "label": "8c60g-force_merge",
436
+ "db_name": "ElasticCloud-8c60g-force_merge",
437
437
  "qps": 910.4322,
438
438
  "latency": 14.2,
439
439
  "recall": 0.9748,
@@ -447,7 +447,7 @@
447
447
  "qps": 2175.2694,
448
448
  "latency": 9.8,
449
449
  "recall": 1.0,
450
- "filter_ratio": 1.0
450
+ "filter_ratio": 0.999
451
451
  },
452
452
  {
453
453
  "dataset": "Cohere (Medium)",
@@ -457,7 +457,7 @@
457
457
  "qps": 1430.0244,
458
458
  "latency": 12.6,
459
459
  "recall": 1.0,
460
- "filter_ratio": 1.0
460
+ "filter_ratio": 0.998
461
461
  },
462
462
  {
463
463
  "dataset": "Cohere (Medium)",
@@ -467,7 +467,7 @@
467
467
  "qps": 692.5751,
468
468
  "latency": 18.7,
469
469
  "recall": 1.0,
470
- "filter_ratio": 0.99
470
+ "filter_ratio": 0.995
471
471
  },
472
472
  {
473
473
  "dataset": "Cohere (Medium)",
@@ -617,7 +617,7 @@
617
617
  "qps": 3680.6045,
618
618
  "latency": 2.2,
619
619
  "recall": 0.9954,
620
- "filter_ratio": 1.0
620
+ "filter_ratio": 0.999
621
621
  },
622
622
  {
623
623
  "dataset": "Cohere (Medium)",
@@ -627,7 +627,7 @@
627
627
  "qps": 3407.9972,
628
628
  "latency": 2.2,
629
629
  "recall": 0.994,
630
- "filter_ratio": 1.0
630
+ "filter_ratio": 0.998
631
631
  },
632
632
  {
633
633
  "dataset": "Cohere (Medium)",
@@ -637,7 +637,7 @@
637
637
  "qps": 3062.6755,
638
638
  "latency": 2.4,
639
639
  "recall": 0.9932,
640
- "filter_ratio": 0.99
640
+ "filter_ratio": 0.995
641
641
  },
642
642
  {
643
643
  "dataset": "Cohere (Medium)",
@@ -707,7 +707,7 @@
707
707
  "qps": 11763.5538,
708
708
  "latency": 1.5,
709
709
  "recall": 1.0,
710
- "filter_ratio": 1.0
710
+ "filter_ratio": 0.999
711
711
  },
712
712
  {
713
713
  "dataset": "Cohere (Medium)",
@@ -717,7 +717,7 @@
717
717
  "qps": 11803.1944,
718
718
  "latency": 1.5,
719
719
  "recall": 0.9778,
720
- "filter_ratio": 1.0
720
+ "filter_ratio": 0.998
721
721
  },
722
722
  {
723
723
  "dataset": "Cohere (Medium)",
@@ -727,7 +727,7 @@
727
727
  "qps": 11520.9234,
728
728
  "latency": 1.5,
729
729
  "recall": 0.9634,
730
- "filter_ratio": 0.99
730
+ "filter_ratio": 0.995
731
731
  },
732
732
  {
733
733
  "dataset": "Cohere (Medium)",
@@ -877,7 +877,7 @@
877
877
  "qps": 432.8374,
878
878
  "latency": 5.0,
879
879
  "recall": 0.9865,
880
- "filter_ratio": 1.0
880
+ "filter_ratio": 0.999
881
881
  },
882
882
  {
883
883
  "dataset": "Cohere (Large)",
@@ -887,7 +887,7 @@
887
887
  "qps": 368.6042,
888
888
  "latency": 5.4,
889
889
  "recall": 0.9843,
890
- "filter_ratio": 1.0
890
+ "filter_ratio": 0.998
891
891
  },
892
892
  {
893
893
  "dataset": "Cohere (Large)",
@@ -897,7 +897,7 @@
897
897
  "qps": 318.8159,
898
898
  "latency": 7.1,
899
899
  "recall": 0.9836,
900
- "filter_ratio": 0.99
900
+ "filter_ratio": 0.995
901
901
  },
902
902
  {
903
903
  "dataset": "Cohere (Large)",
@@ -967,7 +967,7 @@
967
967
  "qps": 11397.7043,
968
968
  "latency": 1.6,
969
969
  "recall": 0.9597,
970
- "filter_ratio": 1.0
970
+ "filter_ratio": 0.999
971
971
  },
972
972
  {
973
973
  "dataset": "Cohere (Large)",
@@ -977,7 +977,7 @@
977
977
  "qps": 10891.7531,
978
978
  "latency": 1.7,
979
979
  "recall": 0.9408,
980
- "filter_ratio": 1.0
980
+ "filter_ratio": 0.998
981
981
  },
982
982
  {
983
983
  "dataset": "Cohere (Large)",
@@ -987,7 +987,7 @@
987
987
  "qps": 10276.7451,
988
988
  "latency": 1.7,
989
989
  "recall": 0.9159,
990
- "filter_ratio": 0.99
990
+ "filter_ratio": 0.995
991
991
  },
992
992
  {
993
993
  "dataset": "Cohere (Large)",
@@ -1067,7 +1067,7 @@
1067
1067
  "qps": 1148.1735,
1068
1068
  "latency": 8.9,
1069
1069
  "recall": 0.9801,
1070
- "filter_ratio": 1.0
1070
+ "filter_ratio": 0.999
1071
1071
  },
1072
1072
  {
1073
1073
  "dataset": "Cohere (Medium)",
@@ -1077,7 +1077,7 @@
1077
1077
  "qps": 1149.1219,
1078
1078
  "latency": 10.3,
1079
1079
  "recall": 0.9764,
1080
- "filter_ratio": 1.0
1080
+ "filter_ratio": 0.998
1081
1081
  },
1082
1082
  {
1083
1083
  "dataset": "Cohere (Medium)",
@@ -1087,7 +1087,7 @@
1087
1087
  "qps": 1140.4099,
1088
1088
  "latency": 13.5,
1089
1089
  "recall": 0.9716,
1090
- "filter_ratio": 0.99
1090
+ "filter_ratio": 0.995
1091
1091
  },
1092
1092
  {
1093
1093
  "dataset": "Cohere (Medium)",
@@ -1167,7 +1167,7 @@
1167
1167
  "qps": 1114.952,
1168
1168
  "latency": 12.7,
1169
1169
  "recall": 0.97,
1170
- "filter_ratio": 1.0
1170
+ "filter_ratio": 0.999
1171
1171
  },
1172
1172
  {
1173
1173
  "dataset": "Cohere (Large)",
@@ -1177,7 +1177,7 @@
1177
1177
  "qps": 583.5009,
1178
1178
  "latency": 23.0,
1179
1179
  "recall": 0.9668,
1180
- "filter_ratio": 1.0
1180
+ "filter_ratio": 0.998
1181
1181
  },
1182
1182
  {
1183
1183
  "dataset": "Cohere (Large)",
@@ -1187,7 +1187,7 @@
1187
1187
  "qps": 31.4779,
1188
1188
  "latency": 351.0,
1189
1189
  "recall": 0.9414,
1190
- "filter_ratio": 0.99
1190
+ "filter_ratio": 0.995
1191
1191
  },
1192
1192
  {
1193
1193
  "dataset": "Cohere (Large)",
@@ -1257,7 +1257,7 @@
1257
1257
  "qps": 4318.9697,
1258
1258
  "latency": 4.3,
1259
1259
  "recall": 1.0,
1260
- "filter_ratio": 1.0
1260
+ "filter_ratio": 0.999
1261
1261
  },
1262
1262
  {
1263
1263
  "dataset": "Cohere (Medium)",
@@ -1267,7 +1267,7 @@
1267
1267
  "qps": 4250.2894,
1268
1268
  "latency": 4.6,
1269
1269
  "recall": 1.0,
1270
- "filter_ratio": 1.0
1270
+ "filter_ratio": 0.998
1271
1271
  },
1272
1272
  {
1273
1273
  "dataset": "Cohere (Medium)",
@@ -1277,7 +1277,7 @@
1277
1277
  "qps": 2997.4391,
1278
1278
  "latency": 6.1,
1279
1279
  "recall": 1.0,
1280
- "filter_ratio": 0.99
1280
+ "filter_ratio": 0.995
1281
1281
  },
1282
1282
  {
1283
1283
  "dataset": "Cohere (Medium)",
@@ -1347,7 +1347,7 @@
1347
1347
  "qps": 1202.8677,
1348
1348
  "latency": 7.0,
1349
1349
  "recall": 1.0,
1350
- "filter_ratio": 1.0
1350
+ "filter_ratio": 0.999
1351
1351
  },
1352
1352
  {
1353
1353
  "dataset": "Cohere (Large)",
@@ -1357,7 +1357,7 @@
1357
1357
  "qps": 639.3991,
1358
1358
  "latency": 7.3,
1359
1359
  "recall": 1.0,
1360
- "filter_ratio": 1.0
1360
+ "filter_ratio": 0.998
1361
1361
  },
1362
1362
  {
1363
1363
  "dataset": "Cohere (Large)",
@@ -1367,7 +1367,7 @@
1367
1367
  "qps": 274.8559,
1368
1368
  "latency": 9.9,
1369
1369
  "recall": 1.0,
1370
- "filter_ratio": 0.99
1370
+ "filter_ratio": 0.995
1371
1371
  },
1372
1372
  {
1373
1373
  "dataset": "Cohere (Large)",
@@ -1727,7 +1727,7 @@
1727
1727
  "qps": 3099.4124,
1728
1728
  "latency": 6.2,
1729
1729
  "recall": 1.0,
1730
- "filter_ratio": 1.0
1730
+ "filter_ratio": 0.999
1731
1731
  },
1732
1732
  {
1733
1733
  "dataset": "Cohere (Medium)",
@@ -1737,7 +1737,7 @@
1737
1737
  "qps": 3014.2483,
1738
1738
  "latency": 7.0,
1739
1739
  "recall": 1.0,
1740
- "filter_ratio": 1.0
1740
+ "filter_ratio": 0.998
1741
1741
  },
1742
1742
  {
1743
1743
  "dataset": "Cohere (Medium)",
@@ -1747,7 +1747,7 @@
1747
1747
  "qps": 2073.2153,
1748
1748
  "latency": 11.0,
1749
1749
  "recall": 1.0,
1750
- "filter_ratio": 0.99
1750
+ "filter_ratio": 0.995
1751
1751
  },
1752
1752
  {
1753
1753
  "dataset": "Cohere (Medium)",
@@ -1827,7 +1827,7 @@
1827
1827
  "qps": 3103.0539,
1828
1828
  "latency": 5.6,
1829
1829
  "recall": 1.0,
1830
- "filter_ratio": 1.0
1830
+ "filter_ratio": 0.999
1831
1831
  },
1832
1832
  {
1833
1833
  "dataset": "Cohere (Medium)",
@@ -1837,7 +1837,7 @@
1837
1837
  "qps": 3086.1957,
1838
1838
  "latency": 6.7,
1839
1839
  "recall": 1.0,
1840
- "filter_ratio": 1.0
1840
+ "filter_ratio": 0.998
1841
1841
  },
1842
1842
  {
1843
1843
  "dataset": "Cohere (Medium)",
@@ -1847,7 +1847,7 @@
1847
1847
  "qps": 3090.0478,
1848
1848
  "latency": 6.4,
1849
1849
  "recall": 0.9628,
1850
- "filter_ratio": 0.99
1850
+ "filter_ratio": 0.995
1851
1851
  },
1852
1852
  {
1853
1853
  "dataset": "Cohere (Medium)",
@@ -1947,7 +1947,7 @@
1947
1947
  "qps": 1022.2696,
1948
1948
  "latency": 17.9,
1949
1949
  "recall": 0.936,
1950
- "filter_ratio": 1.0
1950
+ "filter_ratio": 0.999
1951
1951
  },
1952
1952
  {
1953
1953
  "dataset": "Cohere (Large)",
@@ -1957,7 +1957,7 @@
1957
1957
  "qps": 696.9777,
1958
1958
  "latency": 24.6,
1959
1959
  "recall": 0.997,
1960
- "filter_ratio": 1.0
1960
+ "filter_ratio": 0.998
1961
1961
  },
1962
1962
  {
1963
1963
  "dataset": "Cohere (Large)",
@@ -1967,7 +1967,7 @@
1967
1967
  "qps": 353.7862,
1968
1968
  "latency": 45.2,
1969
1969
  "recall": 1.0,
1970
- "filter_ratio": 0.99
1970
+ "filter_ratio": 0.995
1971
1971
  },
1972
1972
  {
1973
1973
  "dataset": "Cohere (Large)",
@@ -2037,7 +2037,7 @@
2037
2037
  "qps": 3033.5491,
2038
2038
  "latency": 6.4,
2039
2039
  "recall": 0.9844,
2040
- "filter_ratio": 1.0
2040
+ "filter_ratio": 0.999
2041
2041
  },
2042
2042
  {
2043
2043
  "dataset": "Cohere (Large)",
@@ -2047,7 +2047,7 @@
2047
2047
  "qps": 2988.4205,
2048
2048
  "latency": 7.6,
2049
2049
  "recall": 0.9741,
2050
- "filter_ratio": 1.0
2050
+ "filter_ratio": 0.998
2051
2051
  },
2052
2052
  {
2053
2053
  "dataset": "Cohere (Large)",
@@ -2057,7 +2057,7 @@
2057
2057
  "qps": 2950.717,
2058
2058
  "latency": 6.9,
2059
2059
  "recall": 0.9558,
2060
- "filter_ratio": 0.99
2060
+ "filter_ratio": 0.995
2061
2061
  },
2062
2062
  {
2063
2063
  "dataset": "Cohere (Large)",
@@ -2217,7 +2217,7 @@
2217
2217
  "qps": 9773.6593,
2218
2218
  "latency": 3.7,
2219
2219
  "recall": 0.9955,
2220
- "filter_ratio": 1.0
2220
+ "filter_ratio": 0.999
2221
2221
  },
2222
2222
  {
2223
2223
  "dataset": "Cohere (Medium)",
@@ -2227,7 +2227,7 @@
2227
2227
  "qps": 9081.1518,
2228
2228
  "latency": 3.0,
2229
2229
  "recall": 0.9943,
2230
- "filter_ratio": 1.0
2230
+ "filter_ratio": 0.998
2231
2231
  },
2232
2232
  {
2233
2233
  "dataset": "Cohere (Medium)",
@@ -2237,7 +2237,7 @@
2237
2237
  "qps": 8455.2896,
2238
2238
  "latency": 4.0,
2239
2239
  "recall": 0.9921,
2240
- "filter_ratio": 0.99
2240
+ "filter_ratio": 0.995
2241
2241
  },
2242
2242
  {
2243
2243
  "dataset": "Cohere (Medium)",
@@ -2307,7 +2307,7 @@
2307
2307
  "qps": 10089.4308,
2308
2308
  "latency": 2.6,
2309
2309
  "recall": 0.9934,
2310
- "filter_ratio": 1.0
2310
+ "filter_ratio": 0.999
2311
2311
  },
2312
2312
  {
2313
2313
  "dataset": "Cohere (Medium)",
@@ -2317,7 +2317,7 @@
2317
2317
  "qps": 10557.4373,
2318
2318
  "latency": 2.7,
2319
2319
  "recall": 0.9393,
2320
- "filter_ratio": 1.0
2320
+ "filter_ratio": 0.998
2321
2321
  },
2322
2322
  {
2323
2323
  "dataset": "Cohere (Medium)",
@@ -2327,7 +2327,7 @@
2327
2327
  "qps": 9805.0401,
2328
2328
  "latency": 2.6,
2329
2329
  "recall": 0.9257,
2330
- "filter_ratio": 0.99
2330
+ "filter_ratio": 0.995
2331
2331
  },
2332
2332
  {
2333
2333
  "dataset": "Cohere (Medium)",
@@ -2397,7 +2397,7 @@
2397
2397
  "qps": 8695.2765,
2398
2398
  "latency": 4.3,
2399
2399
  "recall": 0.9603,
2400
- "filter_ratio": 1.0
2400
+ "filter_ratio": 0.999
2401
2401
  },
2402
2402
  {
2403
2403
  "dataset": "Cohere (Large)",
@@ -2407,7 +2407,7 @@
2407
2407
  "qps": 9244.1135,
2408
2408
  "latency": 4.2,
2409
2409
  "recall": 0.9724,
2410
- "filter_ratio": 1.0
2410
+ "filter_ratio": 0.998
2411
2411
  },
2412
2412
  {
2413
2413
  "dataset": "Cohere (Large)",
@@ -2417,7 +2417,7 @@
2417
2417
  "qps": 9289.0118,
2418
2418
  "latency": 4.2,
2419
2419
  "recall": 0.9574,
2420
- "filter_ratio": 0.99
2420
+ "filter_ratio": 0.995
2421
2421
  },
2422
2422
  {
2423
2423
  "dataset": "Cohere (Large)",
@@ -2577,7 +2577,7 @@
2577
2577
  "qps": 3411.0934,
2578
2578
  "latency": 3.3,
2579
2579
  "recall": 0.995,
2580
- "filter_ratio": 1.0
2580
+ "filter_ratio": 0.999
2581
2581
  },
2582
2582
  {
2583
2583
  "dataset": "Cohere (Large)",
@@ -2587,7 +2587,7 @@
2587
2587
  "qps": 2838.356,
2588
2588
  "latency": 3.8,
2589
2589
  "recall": 0.9946,
2590
- "filter_ratio": 1.0
2590
+ "filter_ratio": 0.998
2591
2591
  },
2592
2592
  {
2593
2593
  "dataset": "Cohere (Large)",
@@ -2597,7 +2597,7 @@
2597
2597
  "qps": 1826.0672,
2598
2598
  "latency": 5.3,
2599
2599
  "recall": 0.9938,
2600
- "filter_ratio": 0.99
2600
+ "filter_ratio": 0.995
2601
2601
  },
2602
2602
  {
2603
2603
  "dataset": "Cohere (Large)",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: vectordb-bench
3
- Version: 1.0.2
3
+ Version: 1.0.4
4
4
  Summary: VectorDBBench is not just an offering of benchmark results for mainstream vector databases and cloud services, it's your go-to tool for the ultimate performance and cost-effectiveness comparison. Designed with ease-of-use in mind, VectorDBBench is devised to help users, even non-professionals, reproduce results or test new systems, making the hunt for the optimal choice amongst a plethora of cloud services and open-source vector databases a breeze.
5
5
  Author-email: XuanYang-cn <xuan.yang@zilliz.com>
6
6
  Project-URL: repository, https://github.com/zilliztech/VectorDBBench
@@ -47,7 +47,6 @@ Requires-Dist: opensearch-dsl; extra == "all"
47
47
  Requires-Dist: opensearch-py; extra == "all"
48
48
  Requires-Dist: memorydb; extra == "all"
49
49
  Requires-Dist: alibabacloud_ha3engine_vector; extra == "all"
50
- Requires-Dist: alibabacloud_searchengine20211025; extra == "all"
51
50
  Requires-Dist: mariadb; extra == "all"
52
51
  Requires-Dist: PyMySQL; extra == "all"
53
52
  Requires-Dist: clickhouse-connect; extra == "all"
@@ -78,7 +77,6 @@ Provides-Extra: opensearch
78
77
  Requires-Dist: opensearch-py; extra == "opensearch"
79
78
  Provides-Extra: aliyun-opensearch
80
79
  Requires-Dist: alibabacloud_ha3engine_vector; extra == "aliyun-opensearch"
81
- Requires-Dist: alibabacloud_searchengine20211025; extra == "aliyun-opensearch"
82
80
  Provides-Extra: mongodb
83
81
  Requires-Dist: pymongo; extra == "mongodb"
84
82
  Provides-Extra: mariadb
@@ -110,7 +108,7 @@ Closely mimicking real-world production environments, we've set up diverse testi
110
108
 
111
109
  Prepare to delve into the world of VDBBench, and let it guide you in uncovering your perfect vector database match.
112
110
 
113
- VDBBench is sponsered by Zilliz,the leading opensource vectorDB company behind Milvus. Choose smarter with VDBBench - start your free test on [zilliz cloud](https://zilliz.com/) today!
111
+ VDBBench is sponsored by Zilliz,the leading opensource vectorDB company behind Milvus. Choose smarter with VDBBench - start your free test on [zilliz cloud](https://zilliz.com/) today!
114
112
 
115
113
  **Leaderboard:** https://zilliz.com/benchmark
116
114
  ## Quick Start
@@ -818,4 +816,4 @@ This multi-tiered timeout approach allows our benchmark to be more representativ
818
816
  </tr>
819
817
  </table>
820
818
 
821
- **Note:** Some datapoints in the standard benchmark results that voilate this timeout will be kept for now for reference. We will remove them in the future.
819
+ **Note:** Some datapoints in the standard benchmark results that violate this timeout will be kept for now for reference. We will remove them in the future.
@@ -9,7 +9,7 @@ vectordb_bench/backend/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3
9
9
  vectordb_bench/backend/assembler.py,sha256=MdAOXVhCrRGT76Q21xBusCmDc4mXS7yMrhSYAlKPQVA,2785
10
10
  vectordb_bench/backend/cases.py,sha256=TdILp-UBrogsx0xYTkq6P4NLo-zx1SsdbE_hud1k9-A,23368
11
11
  vectordb_bench/backend/data_source.py,sha256=bfa_Zg4O9fRP2ENmVZ_2-NISKozoFN-TocyxOlw1JtE,5524
12
- vectordb_bench/backend/dataset.py,sha256=jFPV6wuQwfLi61EAgbTkT-VtXT04y9PffEd1atYH31A,13978
12
+ vectordb_bench/backend/dataset.py,sha256=22FIoT2YjvIJI9zCd4RFssjMrbnQX28nEIq-es7rdJ0,14061
13
13
  vectordb_bench/backend/filter.py,sha256=fDaq8SUab6KfwfGlkIQNGjE0k1gRjyXovQaTKzsUo4U,1922
14
14
  vectordb_bench/backend/result_collector.py,sha256=mpROVdZ-HChKBVyMV5TZ5v7YGRb69bvfT7Gezn5F5sY,819
15
15
  vectordb_bench/backend/task_runner.py,sha256=9idq-tS-wOL3XuN0Cdz_6LMw9MpuVAH_Hc8r9OtPTbI,13039
@@ -18,8 +18,8 @@ vectordb_bench/backend/clients/__init__.py,sha256=VTbkRw9HpLowKDRp_FxRwobZmIe6uR
18
18
  vectordb_bench/backend/clients/api.py,sha256=BnVzFdvlj3cQYUx6DCbQZ08Tae2cJA5ZKuep9H-5X80,7368
19
19
  vectordb_bench/backend/clients/aliyun_elasticsearch/aliyun_elasticsearch.py,sha256=7yPYaWoHeHNxDMtpReGXsdEPFD1e4vQblFor7TmLq5o,770
20
20
  vectordb_bench/backend/clients/aliyun_elasticsearch/config.py,sha256=d9RCgfCgauKvy6z9ig_wBormgwiGtkh8POyoHloHnJA,505
21
- vectordb_bench/backend/clients/aliyun_opensearch/aliyun_opensearch.py,sha256=rwa4rtbbP2Kaczh7Bf0bc_lE_sGG5w9PhtfdFu7rQNs,13237
22
- vectordb_bench/backend/clients/aliyun_opensearch/config.py,sha256=KSiuRu-p7oL2PEukfD6SvYCKg1jTVvro9lMcUnQSN6I,1214
21
+ vectordb_bench/backend/clients/aliyun_opensearch/aliyun_opensearch.py,sha256=YoQr9Gw8yulE9bM3QOSYyK-qIQyoXzl9nvCdDCwN1H4,12505
22
+ vectordb_bench/backend/clients/aliyun_opensearch/config.py,sha256=tJonjx703YRqySaF8jl7hPlk7SmicVp9DwNqLO56hYI,989
23
23
  vectordb_bench/backend/clients/alloydb/alloydb.py,sha256=E24hxCUgpBCRiScdcS_iBk8n0wngUgVg8qujOWiUhw0,13009
24
24
  vectordb_bench/backend/clients/alloydb/cli.py,sha256=G6Q0WApoDXDG_pqmK2lEKFIvKB8qAsZFPM8TfsURydE,5086
25
25
  vectordb_bench/backend/clients/alloydb/config.py,sha256=PJs2wIJqwcG6UJ3T8R7Pi3xTMBfxTZiNkcWyhtHv5dc,5313
@@ -43,7 +43,7 @@ vectordb_bench/backend/clients/mariadb/mariadb.py,sha256=O2PY7pP3dYdp-aTOQLDVckd
43
43
  vectordb_bench/backend/clients/memorydb/cli.py,sha256=mUpBN0VoE6M55AAEwyd20uEtPkOpckJzmcP2XXpue30,2659
44
44
  vectordb_bench/backend/clients/memorydb/config.py,sha256=D2Q-HkDwnmz98ek1e_iNu4o9CIRB14pOQWSZgRvd6oY,1500
45
45
  vectordb_bench/backend/clients/memorydb/memorydb.py,sha256=5PPOSdFLQes6Gq5H3Yfi_q2m32eErMfNVO86qIjlnoc,10219
46
- vectordb_bench/backend/clients/milvus/cli.py,sha256=Mtrp8mQF6z0PCnBV8hndkO2Rfj5n9qTGbUL1BoVoems,11043
46
+ vectordb_bench/backend/clients/milvus/cli.py,sha256=1a14Y2uFnhjKTXNFhCOUvgjAUeIfqZ0H8nP8z-_gog0,17736
47
47
  vectordb_bench/backend/clients/milvus/config.py,sha256=072nqR0EdlGWAM5e_TqyNCBrQXLcGNWpPMcUBz9mCus,12852
48
48
  vectordb_bench/backend/clients/milvus/milvus.py,sha256=06G53iQZDo6Sfp0VRenAzGvxTQ5c41Dati-Cx6KZPIw,9278
49
49
  vectordb_bench/backend/clients/mongodb/config.py,sha256=7DZCh0bjPiqJW2luPypfpNeGfvKxVC4mdHLqgcjF1hA,1745
@@ -149,12 +149,12 @@ vectordb_bench/frontend/pages/streaming.py,sha256=dndTQhje1RpV8FOXpE4MVnht_-4rzP
149
149
  vectordb_bench/frontend/pages/tables.py,sha256=ANJhrykG94ec3Vs7HJiymvzH5NbjLCei78Sf8nTTG_I,677
150
150
  vectordb_bench/results/dbPrices.json,sha256=50y-RrqDN3oAdwiUW4irMKV1IRgzR1iFOQcl8lG7950,749
151
151
  vectordb_bench/results/getLeaderboardData.py,sha256=fuNQmFuWEdm60McaQrXSGLApNOHRnfmvzn1soT3iGHE,2323
152
- vectordb_bench/results/getLeaderboardDataV2.py,sha256=qNdk_Zcr4xn3qSou16HRA4xNP6P-ZDTcIvBRkD6kbgM,1939
152
+ vectordb_bench/results/getLeaderboardDataV2.py,sha256=y8HUVTI8-mqVSWt5Qb8YjqhhMCPR4DXadTPBh8AZ1fQ,1939
153
153
  vectordb_bench/results/leaderboard.json,sha256=OooOar8Pj0hG-HlpOU8N_hNjJS53PaMMRSoSUtqLJ-k,69276
154
- vectordb_bench/results/leaderboard_v2.json,sha256=dDtPd8hoZAbc8rIbIvLUkg00jurloFegNeoGec8yM_Y,71042
154
+ vectordb_bench/results/leaderboard_v2.json,sha256=9XdPdqKsTzDP7FY_-wkXRcR4CLpnTuynlJWpaNJf98g,71281
155
155
  vectordb_bench/results/ElasticCloud/result_20230727_standard_elasticcloud.json,sha256=IyJKjHGwTCcqKJAaBgfI_hhvMIGrXMl8S9Z2-19BvEE,5807
156
156
  vectordb_bench/results/ElasticCloud/result_20230808_standard_elasticcloud.json,sha256=sx_B3lbWICcMrePiYqeoJ179pwHD2l78bMf2B880QI0,4431
157
- vectordb_bench/results/ElasticCloud/result_20250318_standard_elasticcloud.json,sha256=d5cfP-4oEy1VsWFoaNLQ2iUXxqv27zQDEBG80b0QPw0,204853
157
+ vectordb_bench/results/ElasticCloud/result_20250318_standard_elasticcloud.json,sha256=MhOGcXwbuuRt3w1ItWiCsRI4tfY81G_JmC6-SW_-dsk,204925
158
158
  vectordb_bench/results/Milvus/result_20230727_standard_milvus.json,sha256=UqwfO78qx2zRRWp29J-Egw8K8R5j-FFj9vX0iUrNRm8,17709
159
159
  vectordb_bench/results/Milvus/result_20230808_standard_milvus.json,sha256=ck9loRNVUGHnjSyuHWN76lGNRRPtWMW7Cj0zf3uwXEo,16303
160
160
  vectordb_bench/results/Milvus/result_20250509_standard_milvus.json,sha256=0v9-x488ikM2c_StMOOHCqqIh0iPxi1HJIUWTIQGSEQ,209904
@@ -170,9 +170,9 @@ vectordb_bench/results/QdrantCloud/result_20250602_standard_qdrantcloud.json,sha
170
170
  vectordb_bench/results/WeaviateCloud/result_20230727_standard_weaviatecloud.json,sha256=WBlfjmbO3R4G6F4lDuneEigffUyTU7ti1SyWoff3oNI,15497
171
171
  vectordb_bench/results/WeaviateCloud/result_20230808_standard_weaviatecloud.json,sha256=lXjudo-l-6H0EOIemoB5n4GddOOHJnwndrGwCJIH-EY,7865
172
172
  vectordb_bench/results/ZillizCloud/result_20250613_standard_zillizcloud.json,sha256=gZCnDanS5Yb6Uzvb0Q6wDxMl81UAoGzsZRHU8JwqNds,215610
173
- vectordb_bench-1.0.2.dist-info/licenses/LICENSE,sha256=HXbxhrb5u5SegVzeLNF_voVgRsJMavcLaOmD1N0lZkM,1067
174
- vectordb_bench-1.0.2.dist-info/METADATA,sha256=4Ql8Uf4-oa5lqmvgskkBx1OwTMxaC1_72oUqEPO_q5Y,40753
175
- vectordb_bench-1.0.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
176
- vectordb_bench-1.0.2.dist-info/entry_points.txt,sha256=Qzw6gVx96ui8esG21H6yHsI6nboEohRmV424TYhQNrA,113
177
- vectordb_bench-1.0.2.dist-info/top_level.txt,sha256=jnhZFZAuKX1J60yt-XOeBZ__ctiZMvoC_s0RFq29lpM,15
178
- vectordb_bench-1.0.2.dist-info/RECORD,,
173
+ vectordb_bench-1.0.4.dist-info/licenses/LICENSE,sha256=HXbxhrb5u5SegVzeLNF_voVgRsJMavcLaOmD1N0lZkM,1067
174
+ vectordb_bench-1.0.4.dist-info/METADATA,sha256=ugyBSZhCJ-h0zCeJBrE8vBSa34078P6nS7o7F2JXwXw,40609
175
+ vectordb_bench-1.0.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
176
+ vectordb_bench-1.0.4.dist-info/entry_points.txt,sha256=Qzw6gVx96ui8esG21H6yHsI6nboEohRmV424TYhQNrA,113
177
+ vectordb_bench-1.0.4.dist-info/top_level.txt,sha256=jnhZFZAuKX1J60yt-XOeBZ__ctiZMvoC_s0RFq29lpM,15
178
+ vectordb_bench-1.0.4.dist-info/RECORD,,