mteb 2.3.10__py3-none-any.whl → 2.3.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,11 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
- from typing import Any
4
+ from typing import TYPE_CHECKING, Any
3
5
 
4
6
  import torch
5
7
  import torch.nn.functional as F
6
8
  from datasets import Dataset
7
- from PIL.Image import Image
8
9
  from torch.utils.data import DataLoader
9
10
 
10
11
  from mteb._create_dataloaders import (
@@ -15,6 +16,10 @@ from mteb._requires_package import requires_image_dependencies
15
16
  from mteb.abstasks.task_metadata import TaskMetadata
16
17
  from mteb.models.models_protocols import EncoderProtocol
17
18
 
19
+ if TYPE_CHECKING:
20
+ from PIL.Image import Image
21
+
22
+
18
23
  logger = logging.getLogger(__name__)
19
24
 
20
25
 
@@ -1,7 +1,8 @@
1
+ from __future__ import annotations
2
+
1
3
  import hashlib
2
4
  from collections import Counter
3
-
4
- from PIL import Image
5
+ from typing import TYPE_CHECKING
5
6
 
6
7
  from mteb.types import TopRankedDocumentsType
7
8
  from mteb.types.statistics import (
@@ -13,6 +14,9 @@ from mteb.types.statistics import (
13
14
  TopRankedStatistics,
14
15
  )
15
16
 
17
+ if TYPE_CHECKING:
18
+ from PIL import Image
19
+
16
20
 
17
21
  def calculate_text_statistics(texts: list[str]) -> TextStatistics:
18
22
  """Calculate descriptive statistics for a list of texts.
@@ -5,7 +5,6 @@ from typing import Any, TypedDict
5
5
 
6
6
  import numpy as np
7
7
  from datasets import Dataset, DatasetDict
8
- from PIL import ImageFile
9
8
  from sklearn.linear_model import LogisticRegression
10
9
  from sklearn.metrics import (
11
10
  accuracy_score,
@@ -32,7 +31,6 @@ from ._statistics_calculation import (
32
31
  )
33
32
  from .abstask import AbsTask
34
33
 
35
- ImageFile.LOAD_TRUNCATED_IMAGES = True
36
34
  logger = logging.getLogger(__name__)
37
35
 
38
36
 
@@ -1,7 +1,5 @@
1
1
  import hashlib
2
2
 
3
- from PIL import Image
4
-
5
3
  from mteb.types import BatchedInput
6
4
 
7
5
 
@@ -11,6 +9,8 @@ def _hash_item(item: BatchedInput) -> str:
11
9
  item_hash = hashlib.sha256(item["text"].encode()).hexdigest()
12
10
 
13
11
  if "image" in item:
12
+ from PIL import Image
13
+
14
14
  image: Image.Image = item["image"]
15
15
  item_hash += hashlib.sha256(image.tobytes()).hexdigest()
16
16
 
@@ -1,8 +1,9 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
- from typing import Any
4
+ from typing import TYPE_CHECKING, Any
3
5
 
4
6
  import torch
5
- from PIL import Image
6
7
  from torch.utils.data import DataLoader
7
8
  from tqdm.auto import tqdm
8
9
 
@@ -15,6 +16,9 @@ from mteb.models.abs_encoder import AbsEncoder
15
16
  from mteb.models.model_meta import ModelMeta, ScoringFunction
16
17
  from mteb.types import Array, BatchedInput, PromptType
17
18
 
19
+ if TYPE_CHECKING:
20
+ from PIL import Image
21
+
18
22
  logger = logging.getLogger(__name__)
19
23
 
20
24
 
@@ -89,6 +93,7 @@ class ColPaliEngineWrapper(AbsEncoder):
89
93
  **kwargs,
90
94
  ):
91
95
  import torchvision.transforms.functional as F
96
+ from PIL import Image
92
97
 
93
98
  all_embeds = []
94
99
 
@@ -2,7 +2,6 @@ import logging
2
2
  from typing import Any
3
3
 
4
4
  import torch
5
- from PIL import Image
6
5
  from torch.utils.data import DataLoader
7
6
  from tqdm.auto import tqdm
8
7
 
@@ -154,6 +153,7 @@ class ColQwen3Wrapper(AbsEncoder):
154
153
  **kwargs: Any,
155
154
  ):
156
155
  import torchvision.transforms.functional as F
156
+ from PIL import Image
157
157
 
158
158
  contains_image = "image" in image_texts_pairs.dataset.features
159
159
  contains_text = "text" in image_texts_pairs.dataset.features
@@ -1,9 +1,10 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
4
  import math
3
- from typing import Any
5
+ from typing import TYPE_CHECKING, Any
4
6
 
5
7
  import torch
6
- from PIL import Image
7
8
  from torch.utils.data import DataLoader
8
9
  from tqdm.autonotebook import tqdm
9
10
 
@@ -12,6 +13,9 @@ from mteb.models.abs_encoder import AbsEncoder
12
13
  from mteb.models.model_meta import ModelMeta, ScoringFunction
13
14
  from mteb.types import Array, BatchedInput, PromptType
14
15
 
16
+ if TYPE_CHECKING:
17
+ from PIL import Image
18
+
15
19
  logger = logging.getLogger(__name__)
16
20
 
17
21
  GME_CITATION = """@misc{zhang2024gme,
@@ -267,9 +271,9 @@ def smart_resize(
267
271
  return h_bar, w_bar
268
272
 
269
273
 
270
- def fetch_image(
271
- image: str | Image.Image, size_factor: int = IMAGE_FACTOR
272
- ) -> Image.Image:
274
+ def fetch_image(image: Image.Image, size_factor: int = IMAGE_FACTOR) -> Image.Image:
275
+ from PIL import Image
276
+
273
277
  image_obj = None
274
278
  if isinstance(image, Image.Image):
275
279
  image_obj = image
@@ -1,8 +1,9 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
- from typing import Any
4
+ from typing import TYPE_CHECKING, Any
3
5
 
4
6
  import torch
5
- from PIL import Image
6
7
  from torch.utils.data import DataLoader
7
8
  from tqdm.auto import tqdm
8
9
 
@@ -15,6 +16,9 @@ from mteb.types import Array, BatchedInput, PromptType
15
16
 
16
17
  logger = logging.getLogger(__name__)
17
18
 
19
+ if TYPE_CHECKING:
20
+ from PIL import Image
21
+
18
22
 
19
23
  class GraniteVisionEmbeddingWrapper:
20
24
  def __init__(
@@ -355,13 +355,13 @@ Jasper_Token_Compression_600M = ModelMeta(
355
355
  | qzhou_training_data,
356
356
  citation="""
357
357
  @misc{zhang2025jaspertokencompression600mtechnicalreport,
358
- title={Jasper-Token-Compression-600M Technical Report},
358
+ title={Jasper-Token-Compression-600M Technical Report},
359
359
  author={Dun Zhang and Ziyang Zeng and Yudong Zhou and Shuyang Lu},
360
360
  year={2025},
361
361
  eprint={2511.14405},
362
362
  archivePrefix={arXiv},
363
363
  primaryClass={cs.IR},
364
- url={https://arxiv.org/abs/2511.14405},
364
+ url={https://arxiv.org/abs/2511.14405},
365
365
  }
366
366
  """,
367
367
  )
@@ -740,7 +740,7 @@ jina_reranker_v3 = ModelMeta(
740
740
  training_datasets=JINARerankerV3_TRAINING_DATA,
741
741
  adapted_from="Qwen/Qwen3-0.6B",
742
742
  citation="""@misc{wang2025jinarerankerv3lateinteractionlistwise,
743
- title={jina-reranker-v3: Last but Not Late Interaction for Listwise Document Reranking},
743
+ title={jina-reranker-v3: Last but Not Late Interaction for Listwise Document Reranking},
744
744
  author={Feng Wang and Yuqing Li and Han Xiao},
745
745
  year={2025},
746
746
  eprint={2509.25085},
@@ -1,8 +1,9 @@
1
- from typing import Any
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any
2
4
 
3
5
  import torch
4
6
  import torch.nn.functional as F
5
- from PIL import Image
6
7
  from torch.utils.data import DataLoader
7
8
  from tqdm.auto import tqdm
8
9
 
@@ -12,6 +13,9 @@ from mteb.models.abs_encoder import AbsEncoder
12
13
  from mteb.models.model_meta import ModelMeta, ScoringFunction
13
14
  from mteb.types import Array, BatchedInput, PromptType
14
15
 
16
+ if TYPE_CHECKING:
17
+ from PIL import Image
18
+
15
19
  NOMIC_EMBED_VISION_CITATION = """@article{nussbaum2024nomicembedvision,
16
20
  title={Nomic Embed Vision: Expanding the Latent Space},
17
21
  author={Nussbaum, Zach and Duderstadt, Brandon and Mulyar, Andriy},
@@ -1,7 +1,6 @@
1
- from typing import Any
1
+ from typing import TYPE_CHECKING, Any
2
2
 
3
3
  import torch
4
- from PIL import Image
5
4
  from torch.utils.data import DataLoader
6
5
 
7
6
  from mteb.abstasks.task_metadata import TaskMetadata
@@ -9,6 +8,10 @@ from mteb.models.abs_encoder import AbsEncoder
9
8
  from mteb.models.model_meta import ModelMeta
10
9
  from mteb.types import Array, BatchedInput, PromptType
11
10
 
11
+ if TYPE_CHECKING:
12
+ pass
13
+
14
+
12
15
  LLAMA_NEMORETRIEVER_CITATION = """@misc{xu2025llamanemoretrievercolembedtopperforming,
13
16
  title={Llama Nemoretriever Colembed: Top-Performing Text-Image Retrieval Model},
14
17
  author={Mengyao Xu and Gabriel Moreira and Ronay Ak and Radek Osmulski and Yauhen Babakhin and Zhiding Yu and Benedikt Schifferer and Even Oldridge},
@@ -53,6 +56,7 @@ class LlamaNemoretrieverColembed(AbsEncoder):
53
56
  **kwargs,
54
57
  ):
55
58
  import torchvision.transforms.functional as F
59
+ from PIL import Image
56
60
 
57
61
  all_images = []
58
62
  if isinstance(images, DataLoader):
@@ -328,13 +328,10 @@ class MultiVectorModel(AbsEncoder, PylateSearchEncoder):
328
328
  inputs,
329
329
  prompt_name=prompt_name,
330
330
  is_query=prompt_type == PromptType.query,
331
- convert_to_tensor=True,
332
331
  **kwargs,
333
332
  )
334
333
 
335
- # encode returns a list of tensors shaped (x, token_dim), pad to uniform length
336
- pred = torch.nn.utils.rnn.pad_sequence(pred, batch_first=True, padding_value=0)
337
- return pred.cpu().numpy()
334
+ return pred
338
335
 
339
336
 
340
337
  colbert_v2 = ModelMeta(
@@ -1,9 +1,10 @@
1
+ from __future__ import annotations
2
+
1
3
  import hashlib
2
- from typing import Any, Literal
4
+ from typing import TYPE_CHECKING, Any, Literal
3
5
 
4
6
  import numpy as np
5
7
  import torch
6
- from PIL import Image
7
8
  from torch.utils.data import DataLoader
8
9
 
9
10
  from mteb.abstasks.task_metadata import TaskMetadata
@@ -14,6 +15,9 @@ from mteb.similarity_functions import (
14
15
  )
15
16
  from mteb.types._encoder_io import Array, BatchedInput, PromptType
16
17
 
18
+ if TYPE_CHECKING:
19
+ from PIL import Image
20
+
17
21
 
18
22
  def _string_to_vector(text: str | None, size: int) -> np.ndarray:
19
23
  """Generate a deterministic random vector based on a string.
@@ -1,14 +1,15 @@
1
+ from __future__ import annotations
2
+
1
3
  import base64
2
4
  import logging
3
5
  import os
4
6
  import time
5
7
  from concurrent.futures import ThreadPoolExecutor, as_completed
6
8
  from io import BytesIO
7
- from typing import Any
9
+ from typing import TYPE_CHECKING, Any
8
10
 
9
11
  import requests
10
12
  import torch
11
- from PIL import Image
12
13
  from torch.utils.data import DataLoader
13
14
 
14
15
  from mteb._requires_package import requires_package
@@ -19,6 +20,10 @@ from mteb.models.model_implementations.nvidia_models import nvidia_training_data
19
20
  from mteb.models.model_meta import ModelMeta
20
21
  from mteb.types import Array, BatchedInput, PromptType
21
22
 
23
+ if TYPE_CHECKING:
24
+ from PIL import Image
25
+
26
+
22
27
  logger = logging.getLogger(__name__)
23
28
 
24
29
 
@@ -1,8 +1,9 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
- from typing import Any, Literal
4
+ from typing import TYPE_CHECKING, Any, Literal
3
5
 
4
6
  import torch
5
- from PIL import Image
6
7
  from torch.utils.data import DataLoader
7
8
  from tqdm.auto import tqdm
8
9
 
@@ -12,6 +13,9 @@ from mteb.models.abs_encoder import AbsEncoder
12
13
  from mteb.models.model_meta import ModelMeta, ScoringFunction
13
14
  from mteb.types import Array, BatchedInput, PromptType
14
15
 
16
+ if TYPE_CHECKING:
17
+ from PIL import Image
18
+
15
19
 
16
20
  def _downsample_image(
17
21
  image: Image.Image, max_pixels: int = 16000000, target_longest_side: int = 4000
mteb/types/_encoder_io.py CHANGED
@@ -1,13 +1,18 @@
1
+ from __future__ import annotations
2
+
1
3
  from collections.abc import Mapping
2
4
  from enum import Enum
3
- from typing import TypedDict
5
+ from typing import TYPE_CHECKING, TypedDict
4
6
 
5
7
  import numpy as np
6
8
  import torch
7
9
  from datasets import Dataset
8
- from PIL import Image
9
10
  from typing_extensions import NotRequired
10
11
 
12
+ if TYPE_CHECKING:
13
+ from PIL import Image
14
+
15
+
11
16
  # --- Output types ---
12
17
  Array = np.ndarray | torch.Tensor
13
18
  """General array type, can be a numpy array or a torch tensor."""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mteb
3
- Version: 2.3.10
3
+ Version: 2.3.11
4
4
  Summary: Massive Text Embedding Benchmark
5
5
  Author-email: MTEB Contributors <niklas@huggingface.co>, Kenneth Enevoldsen <kenneth.enevoldsen@cas.au.dk>, Nouamane Tazi <nouamane@huggingface.co>, Nils Reimers <info@nils-reimers.de>
6
6
  Maintainer-email: Kenneth Enevoldsen <kenneth.enevoldsen@cas.au.dk>, Roman Solomatin <risolomatin@gmail.com>, Isaac Chung <chungisaac1217@gmail.com>
@@ -34,6 +34,7 @@ Requires-Dist: pydantic>=2.0.0
34
34
  Requires-Dist: polars>=0.20.22
35
35
  Provides-Extra: image
36
36
  Requires-Dist: torchvision>0.2.1; extra == "image"
37
+ Requires-Dist: transformers[torch-vision,vision]; extra == "image"
37
38
  Provides-Extra: codecarbon
38
39
  Requires-Dist: codecarbon<3.0.0,>=2.0.0; extra == "codecarbon"
39
40
  Provides-Extra: leaderboard
@@ -24,17 +24,17 @@ mteb/_evaluators/retrieval_metrics.py,sha256=we0damQCJrdaRUD6JlU2MM7Ls9xERP_OBS5
24
24
  mteb/_evaluators/sklearn_evaluator.py,sha256=f9SgBbvgCrkltdTebQTixT7KmIagGkjQ_cNnKuHTb3w,3772
25
25
  mteb/_evaluators/zeroshot_classification_evaluator.py,sha256=dQq6g9my-0xn_0fLJXSnhN9Qu6PuJtWCKGIDrlkeyJk,2282
26
26
  mteb/_evaluators/image/__init__.py,sha256=CsQd7OMkeV2Phun7paPWjayZ5qRnvj8H0TYBFeqMxag,148
27
- mteb/_evaluators/image/imagetext_pairclassification_evaluator.py,sha256=RSJxEIVvOl_Za07fk2jfXE-7QOwKsLOhIyGhZtLVABs,4808
27
+ mteb/_evaluators/image/imagetext_pairclassification_evaluator.py,sha256=lVizL_11s0yFAZzuGqv-wtkBbMaK7cArD1eUkxwG4uU,4883
28
28
  mteb/_evaluators/text/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
29
  mteb/_evaluators/text/bitext_mining_evaluator.py,sha256=XS7AVml5-BpQWtG1XFHf6fx8VMVPRwibg-9si4b-A_U,6308
30
30
  mteb/_evaluators/text/summarization_evaluator.py,sha256=l0AwjVO594mtzPV9Kcqf_xtHHpkx6uhDJ61KnolcVAo,10461
31
31
  mteb/abstasks/__init__.py,sha256=1iAwpYTWX7U-goak2KMmacPFCzxPchLQAmZ_uI0t-p0,1130
32
- mteb/abstasks/_statistics_calculation.py,sha256=gf_BhkXbCdwT1w4xza3op6FmjFsZIsNjvyUMCgRyTrg,5578
32
+ mteb/abstasks/_statistics_calculation.py,sha256=UP2H2Cy8yqwtqeimTWfe4unmZ4iyyr5qiBNZzzFjy9o,5669
33
33
  mteb/abstasks/_stratification.py,sha256=zfwkIVmD7Aq7mR2Yt8jTeW1j5ZVV7CIweW842VzcfXc,14364
34
34
  mteb/abstasks/abstask.py,sha256=nZwiY_5d0VVtUrlCATngpFLG3JAovO5AvmD0nkkWsLE,25118
35
35
  mteb/abstasks/aggregate_task_metadata.py,sha256=vzt1z2wDl0sXD7ErZFwKojYwmFUBPAnGlXLuqLA_-6Q,5992
36
36
  mteb/abstasks/aggregated_task.py,sha256=puY6-EAqbL5ehKvFHTMriIdy3rAuqqYHF3ezog1eYxw,6671
37
- mteb/abstasks/classification.py,sha256=Es9pmRdjv6xbc-KnGqVdO6dR1cc7yAHhekCZES7n5o8,13441
37
+ mteb/abstasks/classification.py,sha256=k_wrM1rq2XcVEK97RpU_uEcqhiWWbV7sm3B0dtvP5yY,13376
38
38
  mteb/abstasks/clustering.py,sha256=4KcaU8_sNLmLvMhwDpNmcY2nD3BNyx_LcM-ddSv-wtY,14410
39
39
  mteb/abstasks/clustering_legacy.py,sha256=HZY8zgBgqqs5urF_to9wzqm3MnjFivs59hU6P3NrzcI,8684
40
40
  mteb/abstasks/dataset_card_template.md,sha256=aD6l8qc3_jxwoIGJNYLzse-jpRa8hu92AxpnUtNgges,5122
@@ -1447,7 +1447,7 @@ mteb/models/cache_wrappers/__init__.py,sha256=1w1TnMwulWJSzNkLXjbh5MY3sqgHWc6vUn
1447
1447
  mteb/models/cache_wrappers/cache_backend_protocol.py,sha256=TR7kD7KbN1J4piszIecpegtLZYGy7sRHZt3SDWlImKk,1665
1448
1448
  mteb/models/cache_wrappers/cache_wrapper.py,sha256=KLDeOCe_ndQshbZa5ep2u3jovsl--tfpQzvt9EXyxCA,6589
1449
1449
  mteb/models/cache_wrappers/cache_backends/__init__.py,sha256=hN2Tq7cpTxoOYSCJ1Wnpvb8dEm-kQLfCCahT1N9Bacw,123
1450
- mteb/models/cache_wrappers/cache_backends/_hash_utils.py,sha256=WeQjXlNUpGpb-CbaWpu_EtezML7zoDYfbwIo0Jswvsc,480
1450
+ mteb/models/cache_wrappers/cache_backends/_hash_utils.py,sha256=zAp7BDuYyGETn2kX58uk8_tn1G2B7bgcsItDDxgyn-w,488
1451
1451
  mteb/models/cache_wrappers/cache_backends/faiss_cache.py,sha256=i9IfaCv1-_BvVokXFW1UZ9hMLCuM6rZ0tI-ZesoBkt4,3734
1452
1452
  mteb/models/cache_wrappers/cache_backends/numpy_cache.py,sha256=GyTVC5DLph3EeRnDMO1EEQzBDoOgk2J1hPqpl07lefM,7442
1453
1453
  mteb/models/model_implementations/__init__.py,sha256=BZDdde6ajKv-yroy9mqE2YS3Hw1KBdKoxBPg8aPTZEs,1164
@@ -1471,8 +1471,8 @@ mteb/models/model_implementations/codefuse_models.py,sha256=19Y-d_qetVU64quzEvuU
1471
1471
  mteb/models/model_implementations/codesage_models.py,sha256=D4CdISGyv5f2GMYq4_efgm5qNq80SWAX5R2u5mjEiXM,2998
1472
1472
  mteb/models/model_implementations/cohere_models.py,sha256=OWFClVAN4phjBoxfGGDyGDmzMu-t2VrjCGFyAIWmz4w,13832
1473
1473
  mteb/models/model_implementations/cohere_v.py,sha256=K6VEw1NkyM2PuMd18kHE6aqPrcByYSwEmAKjvLods_w,15760
1474
- mteb/models/model_implementations/colpali_models.py,sha256=7PJ0SshVXasyncTfZRFIf_ZWzbqxJhhzNKAoGLhNktw,9004
1475
- mteb/models/model_implementations/colqwen_models.py,sha256=6upaxe19V8j5Ayu03Dgj5jPtC8SJBCITK_RionJRMSE,15545
1474
+ mteb/models/model_implementations/colpali_models.py,sha256=l-0A3J5rt1bhhTKFPQ3Ti0qvWf2qXYkiv3j1si04R8I,9108
1475
+ mteb/models/model_implementations/colqwen_models.py,sha256=wxR3sqyzObuXMlm1QLoFopJK3ZpQTzd3ZB5IrkzPfZk,15553
1476
1476
  mteb/models/model_implementations/colsmol_models.py,sha256=O2M7Ksydh94M_Iax4KytHb-wOL18N0BIYLKSsLF8BFs,2967
1477
1477
  mteb/models/model_implementations/conan_models.py,sha256=G-s7xo9VtNX-f7lWKtYVGHHiMMN0Xp44PlNIp7E0LAo,6502
1478
1478
  mteb/models/model_implementations/dino_models.py,sha256=QFgaFHR5YKrylqJGSljXCBn2W7qHhmF6KdXkvHrQNEI,16380
@@ -1487,18 +1487,18 @@ mteb/models/model_implementations/evaclip_models.py,sha256=cPMGYLDIq4s8zJxb4vPXq
1487
1487
  mteb/models/model_implementations/fa_models.py,sha256=WGal70_ezITWoNdjcMdbOCTSCtoaXzuPadYstLVXxhg,7478
1488
1488
  mteb/models/model_implementations/facebookai.py,sha256=uhE6rB1YgxE0SIc7u8heE1U62qRFFA23IMgpjxBq_Ok,3116
1489
1489
  mteb/models/model_implementations/geogpt_models.py,sha256=Juv86SwhgQX80lVLjAFtim2aSiJT1AcgjniyyiKyk1Q,1923
1490
- mteb/models/model_implementations/gme_v_models.py,sha256=NkfgR3_UdZzoBt1NnalVou6LOR-F7qXM4by9EbAVrys,13568
1490
+ mteb/models/model_implementations/gme_v_models.py,sha256=GEu1wl5q77RMM3BwtKMjkMwm38KX_r0qWxD_IEMVC2U,13657
1491
1491
  mteb/models/model_implementations/google_models.py,sha256=7QfsaJ5JNDRQxFl7Zh2AtiR2PR7PZcfeCBgviuOFBCo,9130
1492
- mteb/models/model_implementations/granite_vision_embedding_models.py,sha256=uqQ5-e_a-ADv3gf3sR9Drk0S4x8Gy8mZkpL-E4X16TM,7241
1492
+ mteb/models/model_implementations/granite_vision_embedding_models.py,sha256=cvG5NliPwDVMvGuJTo8rk5yL3m6cuJZ_fMLEc0ESNfc,7315
1493
1493
  mteb/models/model_implementations/gritlm_models.py,sha256=aS_CuioL95JAQMYiaKlGuAWU9wZjabn268Xut3bD8-w,3005
1494
1494
  mteb/models/model_implementations/gte_models.py,sha256=o26Xyu_tucUlP435Q_jB4-bl0xckgj4wtbutTwhYgIo,10073
1495
1495
  mteb/models/model_implementations/hinvec_models.py,sha256=I_d_dSNVaGIwMIwyvTlaPAzGMpwh_PzvsfE4y47GFyg,1575
1496
1496
  mteb/models/model_implementations/human.py,sha256=klMpuMAtYH92EIEwNMEhne_Baf9fNiTg1DNWYD11P44,532
1497
1497
  mteb/models/model_implementations/ibm_granite_models.py,sha256=YCT0jbgawy19ps5l8QlxpQoJLjq8Nh-3R-e6yxS0DRM,7902
1498
1498
  mteb/models/model_implementations/inf_models.py,sha256=lvXUFhAYDltq2_Xa9MHcwfhh1V20rbJLSgON76tkj6w,2906
1499
- mteb/models/model_implementations/jasper_models.py,sha256=ZY7qRRpBpD3eVryQb4rLs5E3KDXlgFBvyelataqLIWs,16213
1499
+ mteb/models/model_implementations/jasper_models.py,sha256=onX_ipI-UZbaZrjcHpZtk34tpy6DcT6Yvq6X3RMSmYA,16211
1500
1500
  mteb/models/model_implementations/jina_clip.py,sha256=CfiIxbhKspjQajNtObCfGPHOWPk6uLn4cuwydQHFTMo,5118
1501
- mteb/models/model_implementations/jina_models.py,sha256=HrHm2Io3g9gHwxU5icAaudy_E8rAVkAAIFSzVYWF-dM,34859
1501
+ mteb/models/model_implementations/jina_models.py,sha256=1bkGwIaRNIun2ghkWb4FG-7js4lJ39s97Q9KAW3wkXo,34858
1502
1502
  mteb/models/model_implementations/kalm_models.py,sha256=FmW7Z5Qs6WYBLuKvql3u4IJW36kj4k-Ypah8qTBEBkg,59837
1503
1503
  mteb/models/model_implementations/kblab.py,sha256=DDh8gDEI6YPjS4_yGYWC4HatE0mFf7vhGDU83zzV7V0,866
1504
1504
  mteb/models/model_implementations/kennethenevoldsen_models.py,sha256=DF-9nmsewYO9ikZ0kV81ujKGr7Ot36-9iPoxN7KX2mY,2993
@@ -1521,8 +1521,8 @@ mteb/models/model_implementations/mxbai_models.py,sha256=33ta2BnhvKYBUgE89wFgPNf
1521
1521
  mteb/models/model_implementations/nbailab.py,sha256=bqqR0qs10IH2g5HC6K962tDMBciw8qFsNVHADNS72jk,2396
1522
1522
  mteb/models/model_implementations/no_instruct_sentence_models.py,sha256=6i-xbLRRNKuDpU-hwklwdQjgu1wnz5CecLSoc6kyd7Q,3976
1523
1523
  mteb/models/model_implementations/nomic_models.py,sha256=mT-v5Gs5-sRH8-ziCw_CtxB9ox3C6FtwWJjNghNrunw,11334
1524
- mteb/models/model_implementations/nomic_models_vision.py,sha256=gEEieMThvw4p-QhRH0G_9-WWTvj-jqOlgFsh6O07dbc,6731
1525
- mteb/models/model_implementations/nvidia_llama_nemoretriever_colemb.py,sha256=14XSv7wGsitu0cF8P3A951gel_Py7PrKlRixkLS4qG4,6203
1524
+ mteb/models/model_implementations/nomic_models_vision.py,sha256=6aca0XVLXnkGk6GW8jVCIbbjPGq98lKq4c9Az4jbEkE,6805
1525
+ mteb/models/model_implementations/nvidia_llama_nemoretriever_colemb.py,sha256=OEhVrvA-zfX2PSm76VcCDPkRyAArSFkVeweyLyzpqPI,6255
1526
1526
  mteb/models/model_implementations/nvidia_models.py,sha256=acVverAt77lURkILCVkCdXsWgY1BJoG1-ugB7yIhlIM,21555
1527
1527
  mteb/models/model_implementations/openai_models.py,sha256=loU6JByNUwRidq7lmcu8iGOtUQvzejw6HVLaF_IKCR0,9352
1528
1528
  mteb/models/model_implementations/openclip_models.py,sha256=W8XcokgLU1nSmMaWpYXkWWizVd3sQezcP02YtF2fXpo,11436
@@ -1532,12 +1532,12 @@ mteb/models/model_implementations/ordalietech_solon_embeddings_mini_beta_1_1.py,
1532
1532
  mteb/models/model_implementations/pawan_models.py,sha256=rV2ePGIuYroocvwqDXm4VU369Y_Vr67CyAE-08K5B9c,1151
1533
1533
  mteb/models/model_implementations/piccolo_models.py,sha256=d8Dtkv_ZTUOCmJLLOuwquq-gX-2UfKvAtl_LvAS0Xi0,2113
1534
1534
  mteb/models/model_implementations/promptriever_models.py,sha256=S7uWes_P74p3OZR_KBJHJN_ezlvvRx2__46DMCWqV5M,6328
1535
- mteb/models/model_implementations/pylate_models.py,sha256=yINGQL97S4xjj74-FTWpO4KHX-E9NDOEeyQWyRmmnaE,14772
1535
+ mteb/models/model_implementations/pylate_models.py,sha256=oNoPndZuiJahSd-ikR4dE4vL9261btXYiJbF3bk3Dco,14546
1536
1536
  mteb/models/model_implementations/qodo_models.py,sha256=JDqffDlQiOEariyheybOIf3iNkqot2gTkEIHWDnRbUE,2037
1537
1537
  mteb/models/model_implementations/qtack_models.py,sha256=biZLH5E3UWIcMZXIZNGgBZFEUvovPpAo6vUyL776W1w,1224
1538
1538
  mteb/models/model_implementations/qwen3_models.py,sha256=F_o6ciD-6gLFfIlQYD9MsNvcbkmGzJ39eKpFlEog1rM,5132
1539
1539
  mteb/models/model_implementations/qzhou_models.py,sha256=7KaZpHdap-YyK0QxOMHxU0W2aGismx7GZv_bNXkEOcI,3536
1540
- mteb/models/model_implementations/random_baseline.py,sha256=1VNnWBSi0Ph_RLON6clOuQI-Kli5BRtiiDFZMrTj7PM,7489
1540
+ mteb/models/model_implementations/random_baseline.py,sha256=z4xNs5fbH1HUZhtf3Ry5AKa264SWk2Y4eobRu8rmPKM,7563
1541
1541
  mteb/models/model_implementations/rasgaard_models.py,sha256=a8F3kDSBWHH0UR7wRioOrWGQUxtloD5mU7EG27iM-68,1260
1542
1542
  mteb/models/model_implementations/reasonir_model.py,sha256=wSCcJpUgZ0pG2g3vTEzYNmPlPG_CVn_rR0ENVCines0,2218
1543
1543
  mteb/models/model_implementations/repllama_models.py,sha256=89HoqEpzkNysHeuf_-YhU8WETamHTogSRztGIRo6G1s,7321
@@ -1550,7 +1550,7 @@ mteb/models/model_implementations/salesforce_models.py,sha256=KslTK-IKeLvNG-vQir
1550
1550
  mteb/models/model_implementations/samilpwc_models.py,sha256=oMwKNwCxoH1jZgCy04oo2oVlBZWu253QMpnEEC6emz8,2021
1551
1551
  mteb/models/model_implementations/sarashina_embedding_models.py,sha256=TSmr2FEX79mJTA9mbEV3meEZYSelGv58Veiw__TTGFM,8415
1552
1552
  mteb/models/model_implementations/searchmap_models.py,sha256=XvVl99emIgnNUCxkTuFQXW6py2R8vgsArfpyHveCugw,1904
1553
- mteb/models/model_implementations/seed_1_6_embedding_models.py,sha256=8J3htEddltyGTydIbnMUudgAV97FdD43-SQKaSA_Iuc,18534
1553
+ mteb/models/model_implementations/seed_1_6_embedding_models.py,sha256=Q8JTW2fjePR9dq4spuwK2lyVeL3mn1bl-H5wkQuEV_E,18609
1554
1554
  mteb/models/model_implementations/seed_models.py,sha256=SgK4kPVO6V33G3F1zSq06zSkWarPLEwBt1SWp4TUoVw,14142
1555
1555
  mteb/models/model_implementations/sentence_transformers_models.py,sha256=EtEaXg1yFFp3DQEOxu6am8bcVQR-ypcHj6DCqJGHOVU,21160
1556
1556
  mteb/models/model_implementations/shuu_model.py,sha256=KkcuVYjIzoha3Fvxh8ppqHQ9BfNMWeqDqn9dGCRKUjg,1167
@@ -1567,7 +1567,7 @@ mteb/models/model_implementations/vi_vn_models.py,sha256=quWmd3JT2J6SlAsFrV2gcnc
1567
1567
  mteb/models/model_implementations/vista_models.py,sha256=Q3I01kRtIPaoke0iMIcH4CLcCDTnMSIBFNCof7LPTX4,10832
1568
1568
  mteb/models/model_implementations/vlm2vec_models.py,sha256=HGGy_-z9Wc99xOKum71rBNipCPqWcM1efmmXgy5Rvxc,11724
1569
1569
  mteb/models/model_implementations/voyage_models.py,sha256=dOCccOQlloGrg0q44PxMQzx8dHuQ8VgkDUD01EydpJ0,19824
1570
- mteb/models/model_implementations/voyage_v.py,sha256=WnvwYNVv3c5K0ChzGA3v2iTQX2ekXYn_3402H721c6M,8088
1570
+ mteb/models/model_implementations/voyage_v.py,sha256=vT1MXCt6-_PWA9U7lNz-Qj2zyGHwm_79WqxH4elMm90,8162
1571
1571
  mteb/models/model_implementations/xyz_models.py,sha256=TePlrH6EHwRPO87U_J3Yce9-XHCn_X7I2cJ_6BZ2fUY,1296
1572
1572
  mteb/models/model_implementations/youtu_models.py,sha256=NB74E6z-_36HyXb8GXKn8CrmRLN68uX9eH4xcS57zl0,5938
1573
1573
  mteb/models/model_implementations/yuan_models.py,sha256=yZ6ki6YFaoVrJ_2pPSRQaMKOsIOUo3GtmhPx1qeUl2w,939
@@ -2577,14 +2577,14 @@ mteb/tasks/zeroshot_classification/eng/sun397.py,sha256=Nls7tXM2Svu008MmAUjt-o_N
2577
2577
  mteb/tasks/zeroshot_classification/eng/ucf101.py,sha256=kwNRYks-_Oe4VE3GyoHIvN-2OJ6zhkwFr76WDNL9ymU,1884
2578
2578
  mteb/tasks/zeroshot_classification/eng/templates/__init__.py,sha256=da1PTClDMl-IBkrSvq6JC1lnS-K_BASzCvxVhNxN5Ls,13
2579
2579
  mteb/types/__init__.py,sha256=7_q6_84RvMuHeZK51GbLc5gbpTb3C1WmnqDHm6bnCzw,1104
2580
- mteb/types/_encoder_io.py,sha256=YlIthHwi0o-ObANsWwVPcQc4pA1wpiRwIewDjNTMAdQ,5461
2580
+ mteb/types/_encoder_io.py,sha256=Q7llxv3FfiExFKiQGHtATvbSk4_DwdJolLMPTnAPrrI,5536
2581
2581
  mteb/types/_metadata.py,sha256=NN-W0S6a5TDV7UkpRx1pyWtGF4TyyCyoPUfHOwdeci8,2290
2582
2582
  mteb/types/_result.py,sha256=CRAUc5IvqI3_9SyXDwv-PWLCXwXdZem9RePeYESRtuw,996
2583
2583
  mteb/types/_string_validators.py,sha256=PY-dYq4E8O50VS3bLYdldPWp400fl_WzUjfVSkNWe8U,523
2584
2584
  mteb/types/statistics.py,sha256=YwJsxTf1eaCI_RE-J37a-gK5wDeGAsmkeZKoZCFihSo,3755
2585
- mteb-2.3.10.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
2586
- mteb-2.3.10.dist-info/METADATA,sha256=IPpkXC-YeiZU0BtiAnv-e9aS8X99_uAsGYxCCIz7nr4,13924
2587
- mteb-2.3.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
2588
- mteb-2.3.10.dist-info/entry_points.txt,sha256=8IJoEJFKoDHmVnNev-qJ9pp4Ln7_1-ma9QsXnzVCzGU,39
2589
- mteb-2.3.10.dist-info/top_level.txt,sha256=OLVIjcQAlWBz0bdmutKlWHLF42FF0hp4uVAg3ZyiG4U,5
2590
- mteb-2.3.10.dist-info/RECORD,,
2585
+ mteb-2.3.11.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
2586
+ mteb-2.3.11.dist-info/METADATA,sha256=zK0XHgO0btF1XS2eXGROlNeh8jCSj6dQV4NAT3N_Hn8,13991
2587
+ mteb-2.3.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
2588
+ mteb-2.3.11.dist-info/entry_points.txt,sha256=8IJoEJFKoDHmVnNev-qJ9pp4Ln7_1-ma9QsXnzVCzGU,39
2589
+ mteb-2.3.11.dist-info/top_level.txt,sha256=OLVIjcQAlWBz0bdmutKlWHLF42FF0hp4uVAg3ZyiG4U,5
2590
+ mteb-2.3.11.dist-info/RECORD,,
File without changes