kaiko-eva 0.2.2__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kaiko-eva might be problematic. Click here for more details.

Files changed (90) hide show
  1. eva/core/data/dataloaders/__init__.py +2 -1
  2. eva/core/data/dataloaders/collate_fn/__init__.py +5 -0
  3. eva/core/data/dataloaders/collate_fn/collate.py +24 -0
  4. eva/core/data/dataloaders/dataloader.py +4 -0
  5. eva/core/interface/interface.py +34 -1
  6. eva/core/metrics/defaults/classification/multiclass.py +45 -35
  7. eva/core/models/modules/__init__.py +2 -1
  8. eva/core/models/modules/scheduler.py +51 -0
  9. eva/core/models/transforms/extract_cls_features.py +1 -1
  10. eva/core/models/transforms/extract_patch_features.py +1 -1
  11. eva/core/models/wrappers/base.py +17 -14
  12. eva/core/models/wrappers/from_function.py +5 -4
  13. eva/core/models/wrappers/from_torchhub.py +5 -6
  14. eva/core/models/wrappers/huggingface.py +8 -5
  15. eva/core/models/wrappers/onnx.py +4 -4
  16. eva/core/trainers/functional.py +40 -43
  17. eva/core/utils/factory.py +66 -0
  18. eva/core/utils/registry.py +42 -0
  19. eva/core/utils/requirements.py +26 -0
  20. eva/language/__init__.py +13 -0
  21. eva/language/data/__init__.py +5 -0
  22. eva/language/data/datasets/__init__.py +9 -0
  23. eva/language/data/datasets/classification/__init__.py +7 -0
  24. eva/language/data/datasets/classification/base.py +63 -0
  25. eva/language/data/datasets/classification/pubmedqa.py +149 -0
  26. eva/language/data/datasets/language.py +13 -0
  27. eva/language/models/__init__.py +25 -0
  28. eva/language/models/modules/__init__.py +5 -0
  29. eva/language/models/modules/text.py +85 -0
  30. eva/language/models/modules/typings.py +16 -0
  31. eva/language/models/wrappers/__init__.py +11 -0
  32. eva/language/models/wrappers/huggingface.py +69 -0
  33. eva/language/models/wrappers/litellm.py +77 -0
  34. eva/language/models/wrappers/vllm.py +149 -0
  35. eva/language/utils/__init__.py +5 -0
  36. eva/language/utils/str_to_int_tensor.py +95 -0
  37. eva/vision/data/dataloaders/__init__.py +2 -1
  38. eva/vision/data/dataloaders/worker_init.py +35 -0
  39. eva/vision/data/datasets/__init__.py +5 -5
  40. eva/vision/data/datasets/segmentation/__init__.py +4 -4
  41. eva/vision/data/datasets/segmentation/btcv.py +3 -0
  42. eva/vision/data/datasets/segmentation/consep.py +5 -4
  43. eva/vision/data/datasets/segmentation/lits17.py +231 -0
  44. eva/vision/data/datasets/segmentation/metadata/__init__.py +1 -0
  45. eva/vision/data/datasets/segmentation/metadata/_msd_task7_pancreas.py +287 -0
  46. eva/vision/data/datasets/segmentation/msd_task7_pancreas.py +243 -0
  47. eva/vision/data/datasets/segmentation/total_segmentator_2d.py +1 -1
  48. eva/vision/data/transforms/__init__.py +11 -2
  49. eva/vision/data/transforms/base/__init__.py +5 -0
  50. eva/vision/data/transforms/base/monai.py +27 -0
  51. eva/vision/data/transforms/common/__init__.py +2 -1
  52. eva/vision/data/transforms/common/squeeze.py +24 -0
  53. eva/vision/data/transforms/croppad/__init__.py +4 -0
  54. eva/vision/data/transforms/croppad/rand_crop_by_label_classes.py +74 -0
  55. eva/vision/data/transforms/croppad/rand_crop_by_pos_neg_label.py +6 -2
  56. eva/vision/data/transforms/croppad/rand_spatial_crop.py +89 -0
  57. eva/vision/data/transforms/intensity/rand_scale_intensity.py +6 -2
  58. eva/vision/data/transforms/intensity/rand_shift_intensity.py +8 -4
  59. eva/vision/models/modules/semantic_segmentation.py +18 -7
  60. eva/vision/models/networks/backbones/__init__.py +2 -3
  61. eva/vision/models/networks/backbones/_utils.py +1 -1
  62. eva/vision/models/networks/backbones/pathology/bioptimus.py +4 -4
  63. eva/vision/models/networks/backbones/pathology/gigapath.py +2 -2
  64. eva/vision/models/networks/backbones/pathology/histai.py +3 -3
  65. eva/vision/models/networks/backbones/pathology/hkust.py +2 -2
  66. eva/vision/models/networks/backbones/pathology/kaiko.py +7 -7
  67. eva/vision/models/networks/backbones/pathology/lunit.py +3 -3
  68. eva/vision/models/networks/backbones/pathology/mahmood.py +3 -3
  69. eva/vision/models/networks/backbones/pathology/owkin.py +3 -3
  70. eva/vision/models/networks/backbones/pathology/paige.py +3 -3
  71. eva/vision/models/networks/backbones/radiology/swin_unetr.py +2 -2
  72. eva/vision/models/networks/backbones/radiology/voco.py +5 -5
  73. eva/vision/models/networks/backbones/registry.py +2 -44
  74. eva/vision/models/networks/backbones/timm/backbones.py +2 -2
  75. eva/vision/models/networks/backbones/universal/__init__.py +8 -1
  76. eva/vision/models/networks/backbones/universal/vit.py +53 -3
  77. eva/vision/models/networks/decoders/segmentation/decoder2d.py +1 -1
  78. eva/vision/models/networks/decoders/segmentation/linear.py +1 -1
  79. eva/vision/models/networks/decoders/segmentation/semantic/common.py +2 -2
  80. eva/vision/models/networks/decoders/segmentation/typings.py +1 -1
  81. eva/vision/models/wrappers/from_registry.py +14 -9
  82. eva/vision/models/wrappers/from_timm.py +6 -5
  83. {kaiko_eva-0.2.2.dist-info → kaiko_eva-0.3.1.dist-info}/METADATA +10 -2
  84. {kaiko_eva-0.2.2.dist-info → kaiko_eva-0.3.1.dist-info}/RECORD +88 -57
  85. {kaiko_eva-0.2.2.dist-info → kaiko_eva-0.3.1.dist-info}/WHEEL +1 -1
  86. eva/vision/data/datasets/segmentation/lits.py +0 -199
  87. eva/vision/data/datasets/segmentation/lits_balanced.py +0 -94
  88. /eva/vision/data/datasets/segmentation/{_total_segmentator.py → metadata/_total_segmentator.py} +0 -0
  89. {kaiko_eva-0.2.2.dist-info → kaiko_eva-0.3.1.dist-info}/entry_points.txt +0 -0
  90. {kaiko_eva-0.2.2.dist-info → kaiko_eva-0.3.1.dist-info}/licenses/LICENSE +0 -0
@@ -4,12 +4,13 @@ from typing import Any, Callable, Dict, Tuple
4
4
  from urllib import parse
5
5
 
6
6
  import timm
7
+ import torch
7
8
  from typing_extensions import override
8
9
 
9
- from eva.core.models import wrappers
10
+ from eva.core.models.wrappers import base
10
11
 
11
12
 
12
- class TimmModel(wrappers.BaseModel):
13
+ class TimmModel(base.BaseModel[torch.Tensor, torch.Tensor]):
13
14
  """Model wrapper for `timm` models.
14
15
 
15
16
  Note that only models with `forward_intermediates`
@@ -23,7 +24,7 @@ class TimmModel(wrappers.BaseModel):
23
24
  checkpoint_path: str = "",
24
25
  out_indices: int | Tuple[int, ...] | None = None,
25
26
  model_kwargs: Dict[str, Any] | None = None,
26
- tensor_transforms: Callable | None = None,
27
+ transforms: Callable | None = None,
27
28
  ) -> None:
28
29
  """Initializes the encoder.
29
30
 
@@ -34,10 +35,10 @@ class TimmModel(wrappers.BaseModel):
34
35
  out_indices: Returns last n blocks if `int`, all if `None`, select
35
36
  matching indices if sequence.
36
37
  model_kwargs: Extra model arguments.
37
- tensor_transforms: The transforms to apply to the output tensor
38
+ transforms: The transforms to apply to the output tensor
38
39
  produced by the model.
39
40
  """
40
- super().__init__(tensor_transforms=tensor_transforms)
41
+ super().__init__(transforms=transforms)
41
42
 
42
43
  self._model_name = model_name
43
44
  self._pretrained = pretrained
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kaiko-eva
3
- Version: 0.2.2
3
+ Version: 0.3.1
4
4
  Summary: Evaluation Framework for oncology foundation models.
5
5
  Keywords: machine-learning,evaluation-framework,oncology,foundation-models
6
6
  Author-Email: Ioannis Gatopoulos <ioannis@kaiko.ai>, =?utf-8?q?Nicolas_K=C3=A4nzig?= <nicolas@kaiko.ai>, Roman Moser <roman@kaiko.ai>
@@ -242,6 +242,9 @@ Requires-Dist: imagesize>=1.4.1; extra == "vision"
242
242
  Requires-Dist: scipy>=1.14.0; extra == "vision"
243
243
  Requires-Dist: monai>=1.3.2; extra == "vision"
244
244
  Requires-Dist: einops>=0.8.1; extra == "vision"
245
+ Provides-Extra: language
246
+ Requires-Dist: datasets<4.0.0,>=2.19.0; extra == "language"
247
+ Requires-Dist: litellm>=1.61.8; extra == "language"
245
248
  Provides-Extra: all
246
249
  Requires-Dist: h5py>=3.10.0; extra == "all"
247
250
  Requires-Dist: nibabel>=4.0.1; extra == "all"
@@ -255,6 +258,8 @@ Requires-Dist: imagesize>=1.4.1; extra == "all"
255
258
  Requires-Dist: scipy>=1.14.0; extra == "all"
256
259
  Requires-Dist: monai>=1.3.2; extra == "all"
257
260
  Requires-Dist: einops>=0.8.1; extra == "all"
261
+ Requires-Dist: datasets<4.0.0,>=2.19.0; extra == "all"
262
+ Requires-Dist: litellm>=1.61.8; extra == "all"
258
263
  Description-Content-Type: text/markdown
259
264
 
260
265
  <div align="center">
@@ -293,7 +298,7 @@ Check out the [documentation](https://kaiko-ai.github.io/eva/) for more informat
293
298
 
294
299
  ### Highlights:
295
300
  - Easy and reliable benchmark of Oncology FMs
296
- - Supports patch-level classification, slide-level classification and semantic segmentation downstream tasks
301
+ - Supports patch-level classification, slide-level classification, semantic segmentation, and text classification downstream tasks
297
302
  - Automatic embedding inference and evaluation of a downstream task
298
303
  - Native support of popular medical [datasets](https://kaiko-ai.github.io/eva/dev/datasets/) and models
299
304
  - Produce statistics over multiple evaluation fits and multiple metrics
@@ -308,6 +313,9 @@ pip install kaiko-eva
308
313
  # to install the expanded `vision` version
309
314
  pip install 'kaiko-eva[vision]'
310
315
 
316
+ # to install the expanded `language` version
317
+ pip install 'kaiko-eva[language]'
318
+
311
319
  # to install everything
312
320
  pip install 'kaiko-eva[all]'
313
321
  ```
@@ -16,8 +16,10 @@ eva/core/cli/cli.py,sha256=AZ4B4OP3D2af9H2RYBd5nxoy5I9DlaClZaadSWpPEPI,422
16
16
  eva/core/cli/logo.py,sha256=x6-vGWI0s9gza-xxQrBDi2wneb2wFU_mQGHgpAiq2MQ,786
17
17
  eva/core/cli/setup.py,sha256=kR-7l4X5Hu8kSLoQZGYGIeLXtn9S_EU52dauDy6fm0w,2663
18
18
  eva/core/data/__init__.py,sha256=yG3BeOWhp1EjVYMFqx8M_TBWFDyfIwwksQGQmMdSPaI,340
19
- eva/core/data/dataloaders/__init__.py,sha256=fbNClVZ8J3QoGi4qiPq635ig1j9GdI7six3RhfwDbjY,110
20
- eva/core/data/dataloaders/dataloader.py,sha256=Ek_OqlOuQSiipFjBJ39DFyWvW3CdqAB2bufOcEs0ChU,2525
19
+ eva/core/data/dataloaders/__init__.py,sha256=0AvpsPOdh4NX5rwkX9Th1M_rzxZVVzTPTdC5oTGFd5w,194
20
+ eva/core/data/dataloaders/collate_fn/__init__.py,sha256=CfSAVrPD36shpyYAkob2ny05VWymb95MutawQcZkbuo,134
21
+ eva/core/data/dataloaders/collate_fn/collate.py,sha256=oBdxaDCIaXBB6H8LB90Qsi2Inw1tyHGF4kAFBINPOeQ,689
22
+ eva/core/data/dataloaders/dataloader.py,sha256=amRIwbmI6Hi2qS-ciKkv7ehgkBbMCHx7XUaEMlttWkw,2688
21
23
  eva/core/data/datamodules/__init__.py,sha256=qZchYbgxo9lxYnGoqdk0C6MfS2IbF0WItO0kCdP9Mqc,229
22
24
  eva/core/data/datamodules/call.py,sha256=jjj9w3UXYuQB-qyCcw1EZpRJW10OC1I3dvgvsuQWLck,940
23
25
  eva/core/data/datamodules/datamodule.py,sha256=_pK59oXDe53oDkmv6eoJUvfl44WlFkrbC8KXSRMs_20,5533
@@ -46,7 +48,7 @@ eva/core/data/transforms/padding/pad_2d_tensor.py,sha256=J4maGFmeQf9IHRxt5kU-6eI
46
48
  eva/core/data/transforms/sampling/__init__.py,sha256=BFKbvRjlZrwS0GcNrM54ZSWt6PrQARfFlXM1jJ-wpvo,149
47
49
  eva/core/data/transforms/sampling/sample_from_axis.py,sha256=Zbhp94lVa70WQKmSOKMTsOMe2c7wLqNZto7JqWhSdtI,1229
48
50
  eva/core/interface/__init__.py,sha256=chdpKXipxe1NP-Fgr_d9r6X1gMna0XiEa38waJ6FzTM,98
49
- eva/core/interface/interface.py,sha256=GzjneNHhTIEuLbydUG9cSmpHjJ4_IENGM-glN8RaRxY,2741
51
+ eva/core/interface/interface.py,sha256=VNagHKsr2T7Ufm1rpA0NCnMi6F2qpKEmMBq_3eGZIRE,3826
50
52
  eva/core/loggers/__init__.py,sha256=4YMLNlN9LnuKqhBI1R1keh69dmMD-2lcH3HKwwyn380,266
51
53
  eva/core/loggers/dummy.py,sha256=Y7ypH0ecSAIkkZ5LzTmNNEzlKkqeaHfUNMCDKVOg6D4,1204
52
54
  eva/core/loggers/experimental_loggers.py,sha256=p5uCK_9QCYufRhE-LZQUJWbhGElyobX_zRM78yX4p2o,230
@@ -64,7 +66,7 @@ eva/core/metrics/binary_balanced_accuracy.py,sha256=MabsXAtVfLqSaSIIpE0HIM6bo8uR
64
66
  eva/core/metrics/defaults/__init__.py,sha256=IeqLcoxLNddtuuFao5r85ZVxTyFP6iFsj1K8iXCTSes,255
65
67
  eva/core/metrics/defaults/classification/__init__.py,sha256=xMzE4yV8NoUdcmk2FCKohEUav1GJcxYn60S1KNgXbJY,316
66
68
  eva/core/metrics/defaults/classification/binary.py,sha256=9ll6ZOcNGQdsg7ln9DAQ0u-OzsXSzEbueXe-dVJkJZ8,2322
67
- eva/core/metrics/defaults/classification/multiclass.py,sha256=8Aesy_rKtp4KxfXJtDCmk6FsGxIFS4Ywu2CH1VIRL7M,2518
69
+ eva/core/metrics/defaults/classification/multiclass.py,sha256=i5Oimxps3kjIwGAHKIdylOPJUUkzo6_wfG5JAw6vyAc,2796
68
70
  eva/core/metrics/structs/__init__.py,sha256=cvn7E4k5vJmpwJj_zezmtZa_Nl_RddDM1G-MO8TP0po,422
69
71
  eva/core/metrics/structs/collection.py,sha256=bNfCekHN8pzD49-YTqVxrmxFtiQfNxnv-RwkxCL6rbc,149
70
72
  eva/core/metrics/structs/metric.py,sha256=zdnE0ZVTSYAMl7rW_OL6e1XiZDvLTirYqV0lgJCleXY,109
@@ -72,10 +74,11 @@ eva/core/metrics/structs/module.py,sha256=pHpIAt5HQDoYWvyFXxYTZleTKMW1iaTCgwAkty
72
74
  eva/core/metrics/structs/schemas.py,sha256=ZaSrx0j_NfIwT7joMUD1LyrKdAXTLaeSzWYTHDsc6h0,1641
73
75
  eva/core/metrics/structs/typings.py,sha256=qJd-FiD2IhJgBeo8FyP0vpVUIH4RKb1k6zYvHtjUA04,388
74
76
  eva/core/models/__init__.py,sha256=T6Fo886LxMj-Y58_ylzkPkFSnFR2aISiMIbuO_weC4s,430
75
- eva/core/models/modules/__init__.py,sha256=QJWJ42BceXZBzDGgk5FHBcCaRrB9egTFKVF6gDsBYfM,255
77
+ eva/core/models/modules/__init__.py,sha256=_4VjN9Qs4_mftEVgt0KqVxCRAtDcKYbUnBA3Ox1Pzis,350
76
78
  eva/core/models/modules/head.py,sha256=bZ45RBPi3N8sjvKyt2_TeKWI1eB6GyBeGzV6J11ERO8,5225
77
79
  eva/core/models/modules/inference.py,sha256=ih-0Rr2oNf2N6maiXPOW7XH5KVwUT1_MOxnJKOhJ1uQ,978
78
80
  eva/core/models/modules/module.py,sha256=LtjYxTZb7UY0owonmt_yQ5EySw3sX-xD9HLN2io8EK4,6697
81
+ eva/core/models/modules/scheduler.py,sha256=orv5TDq9vEItxE7rTj0sQlzdChpx5jHx-3ypnHjUyr4,1650
79
82
  eva/core/models/modules/typings.py,sha256=LPR8JdIid2gJZpjMG1FcH5OZ60JlFOj_LupIh__2k_8,803
80
83
  eva/core/models/modules/utils/__init__.py,sha256=ScLCHwQfzlg_UsHVi5sf_SavUkh9secwtRn_umC_qA8,325
81
84
  eva/core/models/modules/utils/batch_postprocess.py,sha256=RwnDcjJy3uvVirpgx_80Q2CUYKfJKipVwjyX7AF2CKw,3088
@@ -85,23 +88,24 @@ eva/core/models/networks/__init__.py,sha256=yqx6UmG1Eg3vb1O_tnK_axnJWabEl9ULkDWi
85
88
  eva/core/models/networks/mlp.py,sha256=thk-x4pviE3fCaMW9k3I2Oe5_DxfC-CqUrtolvVdXug,2418
86
89
  eva/core/models/transforms/__init__.py,sha256=AOy_2VY3ITLRk2PMqe6xfErvV7V2_XsnPQwEMhovxOU,333
87
90
  eva/core/models/transforms/as_discrete.py,sha256=1w2NmcLzEuyPhaoVXl6jZTdblk7DPf6W6gQ_qi96hQM,1901
88
- eva/core/models/transforms/extract_cls_features.py,sha256=tFRd4H-eGFIGCfZt6wuZGibDmAoNXKSsn15bBw0IDdc,1482
89
- eva/core/models/transforms/extract_patch_features.py,sha256=k50jTLPWxbfvciH9QZSzTAGqWwDSVpXAteme_Qg2d6E,2202
91
+ eva/core/models/transforms/extract_cls_features.py,sha256=A-oNNiGYBkMLK_E9DrzJUJuTZETERX75w0lg35-rj0g,1498
92
+ eva/core/models/transforms/extract_patch_features.py,sha256=5UsHjZnh3Uk2jAfi3GLtxoJ14ALTrfVoWUy8nTspRDY,2218
90
93
  eva/core/models/wrappers/__init__.py,sha256=jaiANQdbO-IPgH8U-Y0ftFsuuCAM5i5KuYRHauKw5k8,450
91
94
  eva/core/models/wrappers/_utils.py,sha256=ZWe9Ih_0kH5Wg_AQAtAn77LZ_CODAve5u3G12ifLNsc,4902
92
- eva/core/models/wrappers/base.py,sha256=xKMUSXk93wI67p_wmh7jujK-bxvIefO1noYaAJN_5Ak,1359
93
- eva/core/models/wrappers/from_function.py,sha256=_vKBwtfilCNCnOaJTofE6l5bM2K3qJ8GyBT-0CM5FXY,1831
94
- eva/core/models/wrappers/from_torchhub.py,sha256=OAImGKRG4pfDXHsoriykC_iiO8QvK3nAWnQCE0mIGuk,3285
95
- eva/core/models/wrappers/huggingface.py,sha256=5CoNieivdjwvoawo7dZtWfYZkW-Mey1j0EjazuxDaqU,1302
96
- eva/core/models/wrappers/onnx.py,sha256=-iV-IlczTvTTEQuJycZeSVWdSl2kVJXc1eeRLgQQZ7Q,1834
95
+ eva/core/models/wrappers/base.py,sha256=S3LGbb_wo2ZTeDvoLvSlaymBsRN6UAuKrCmVAGasHWY,1452
96
+ eva/core/models/wrappers/from_function.py,sha256=LVnfW3bL1mRtO8kL1D0MLJOiAiyiHkM7RX6-JIJzI0E,1844
97
+ eva/core/models/wrappers/from_torchhub.py,sha256=-saKMxYq4KvVpaJL6BdHmUcEYHuaNvm2-0O_eQMg3GA,3250
98
+ eva/core/models/wrappers/huggingface.py,sha256=-_fA81YRnoMc7O7SbrnCEj1dM_xArpQ8WdsZk9z_f1E,1449
99
+ eva/core/models/wrappers/onnx.py,sha256=34li_xSwPryN8nJDrFyif_Hve1AEH7Ry9E_lZmf7JJM,1834
97
100
  eva/core/trainers/__init__.py,sha256=jhsKJF7HAae7EOiG3gKIAHH_h3dZlTE2JRcCHJmOzJc,208
98
101
  eva/core/trainers/_logging.py,sha256=gi4FqPy2GuVmh0WZY6mYwF7zMPvnoFA050B0XdCP6PU,2571
99
102
  eva/core/trainers/_recorder.py,sha256=uD17l_WVveFuWuann59VU9iJ-Jumdh9F6vnAcL3M_FU,7855
100
103
  eva/core/trainers/_utils.py,sha256=M3h8lVhUmkeSiEXpX9hRdMvThGFCnTP15gv-hd1CZkc,321
101
- eva/core/trainers/functional.py,sha256=rLtQZw8TcAa4NYIf901TmoQiJDNm4RGVLN-64nku3Jo,4445
104
+ eva/core/trainers/functional.py,sha256=tsBfpXjEQ8BiBJ9wZWp0AUUOOxy7UUrLX4GSjQZTeCs,4510
102
105
  eva/core/trainers/trainer.py,sha256=a3OwLWOZKDqxayrd0ugUmxJKyQx6XDb4GHtdL8-AEV0,4826
103
106
  eva/core/utils/__init__.py,sha256=cndVBvtYxEW7hykH39GCNVI86zkXNn8Lw2A0sUJHS04,237
104
107
  eva/core/utils/clone.py,sha256=qcThZOuAs1cs0uV3BL5eKeM2VIBjuRPBe1t-NiUFM5Y,569
108
+ eva/core/utils/factory.py,sha256=upWPWkWW7p3_ZoePAzbkkZvEPiAY4OaOQkwNvh0oa-E,2397
105
109
  eva/core/utils/io/__init__.py,sha256=Py03AmoxhmTHkro6CzNps27uXKkXPzdA18mG97xHhWI,172
106
110
  eva/core/utils/io/dataframe.py,sha256=CIHFowljH17waDkJ9YJVEVXAIcxMwoLjUgoBttiNk8w,509
107
111
  eva/core/utils/io/gz.py,sha256=xxDkOUV2TFEK8pT7j6S_6iSzUUUmXN-sTum-gRuhij0,919
@@ -110,8 +114,27 @@ eva/core/utils/multiprocessing.py,sha256=BWX8AW_KPLgIIlbsPG1kYdtbHPx6Dklw13bu4u8
110
114
  eva/core/utils/operations.py,sha256=eoC_ScuHUMDCuk08j1bosiQZdPrgiIODqqheR9MtJHQ,641
111
115
  eva/core/utils/parser.py,sha256=2czmwEGJJ6PtmaD86s9I14P-_sek4DmDCkEatRGT5sI,725
112
116
  eva/core/utils/progress_bar.py,sha256=KvvsM_v3_Fhb4JvbEEPHb4PJMokg6mNLj-o6dkfzcMc,499
117
+ eva/core/utils/registry.py,sha256=iEDVsQ7DdPXhzfW32z3lWNgwjagR_jh_g4VkvajjRK4,1348
118
+ eva/core/utils/requirements.py,sha256=u01QPnBDu0YApncxgvg7xEANubyGSptqja12cycg3k0,909
113
119
  eva/core/utils/suppress_logs.py,sha256=pOk1076J0mKWn2lgDqEVC1g65FXhA_2IkC4LBEWhnwQ,902
114
120
  eva/core/utils/workers.py,sha256=hfx63M82qNg0Dwhre2tl53MnhtRsV7APaDONM9nhVB8,634
121
+ eva/language/__init__.py,sha256=EQxtGM6zT3y-6EGecGAo2IXKS6J8dyWNOGP2mvD7fZ0,360
122
+ eva/language/data/__init__.py,sha256=ONF-CLBK2HO2p1bMCAbdvaHfSsYjdSE9-O3lWgwQQGk,89
123
+ eva/language/data/datasets/__init__.py,sha256=AeGw3EVyz5w8Ja2m8Hd3FoKkCeG68M0BwtF64VCvOk4,211
124
+ eva/language/data/datasets/classification/__init__.py,sha256=HXtgJpMt9CgqvdeWc147bOn-9YeUMcL8v-0UHHkY_A4,144
125
+ eva/language/data/datasets/classification/base.py,sha256=53Tbu8dRSMuNfRB_xEBk_3wSe0jRqd_S-IwE9WIpgW0,1669
126
+ eva/language/data/datasets/classification/pubmedqa.py,sha256=AxqW5OQAtDBLVX-Sr0WIt3E7GBbkGMUPOvhOlKLTLh8,5439
127
+ eva/language/data/datasets/language.py,sha256=Kjy1X9xZBIfjjEYJKMikgKhLaquugRSjXTYM_HWiHmw,305
128
+ eva/language/models/__init__.py,sha256=ggL8uDv_J8vQTr5fc5j8UENoPA24h2Bo3uNSdd_vRU0,606
129
+ eva/language/models/modules/__init__.py,sha256=UWO_yq7kPzTrptl6iWTFy07E_Cyzf-sTu-zAK-sN3CI,112
130
+ eva/language/models/modules/text.py,sha256=wknEnS_t8IToLTo_F9AG4EXkt0_6o4n6dYm-8AVxgOY,2906
131
+ eva/language/models/modules/typings.py,sha256=dWq3FTbvGZGoIMTEIX1yWBk_VFKIHsvxaTvBXlbJfCA,379
132
+ eva/language/models/wrappers/__init__.py,sha256=syKHBqX3MRD2oq_MjDjvoNC6W0F8TdmGYqhwrGLBahI,402
133
+ eva/language/models/wrappers/huggingface.py,sha256=l3wVOB4QOe2_yeG4xwNm0imP_kWbjM8RG0ISE4ANAyE,2396
134
+ eva/language/models/wrappers/litellm.py,sha256=0GSo3gxUv1Ah6FQ6VqTiiu_0TVx-h4xUkZqdOAGbKTM,2619
135
+ eva/language/models/wrappers/vllm.py,sha256=LXadfveGSHmjWAlOBrJm1QHkKzB_O-s3M8U7bnKfW-E,5684
136
+ eva/language/utils/__init__.py,sha256=jqrU3o80NAwt6HbNDWSM3e3B1-65vw811DqiUWT0Bys,150
137
+ eva/language/utils/str_to_int_tensor.py,sha256=YlNppLlAoX0ZRVfcDX4BStCQPbS1NNParcwptxH0_wE,3143
115
138
  eva/vision/__init__.py,sha256=oUZXFYjwtkWzi8An0uS5Xc84pLKintlXe2iti8zW6BQ,480
116
139
  eva/vision/callbacks/__init__.py,sha256=su1V73L0dDVYWSyvV_lnWbszDi2KikRraF7OsgeaKl4,139
117
140
  eva/vision/callbacks/loggers/__init__.py,sha256=td1JRJbE08nsGIZdO64_yLC3FUuMDp0kma0HjpUdXT4,161
@@ -119,10 +142,11 @@ eva/vision/callbacks/loggers/batch/__init__.py,sha256=DVYP7Aonbi4wg_ERHRj_8kb87E
119
142
  eva/vision/callbacks/loggers/batch/base.py,sha256=hcAd5iiHvjZ0DIf4Qt4ENT54D6ky_1OO4rKQZqeo-1k,3628
120
143
  eva/vision/callbacks/loggers/batch/segmentation.py,sha256=GYh2kfexW5pUZ0BdApYJI3e8xsuNkjIzkj5jnuKtHR4,6886
121
144
  eva/vision/data/__init__.py,sha256=zuLOC8ExyeQGlwib1LB70RedrTC9w0siOhFTQIRb0V8,137
122
- eva/vision/data/dataloaders/__init__.py,sha256=7AOD_UF3hMokrGxJ2tbawH44ujQaesDsaW-3HWorYv8,128
145
+ eva/vision/data/dataloaders/__init__.py,sha256=9ykBD4vyZ-Yv3IEnqvVcSMURS-gXWjOunA43brQK6Hg,207
123
146
  eva/vision/data/dataloaders/collate_fn/__init__.py,sha256=GCvJaeILmAc_-lhGw8yzj2cC2KG4i1PvSWAyVzPKvVo,146
124
147
  eva/vision/data/dataloaders/collate_fn/collection.py,sha256=45s9fKjVBnqfnuGWmJZMtt_DDGnfuf7qkWe0QmxXMKo,611
125
- eva/vision/data/datasets/__init__.py,sha256=NW034jrOnpDwYBQfsTjWG6jDJY_dPWKV-cq37HkBm10,1014
148
+ eva/vision/data/dataloaders/worker_init.py,sha256=lFWywHGCC4QxHeDXrneF8DQ45XG3WmVltEELJrPyLz0,1182
149
+ eva/vision/data/datasets/__init__.py,sha256=s3h4w71LiM6dT6AYWzCG2-nexkSuuTWixw4KrCGAhS8,1026
126
150
  eva/vision/data/datasets/_utils.py,sha256=epPcaYE4w2_LtUKLLQJh6qQxUNVBe22JA06k4WUerYQ,1430
127
151
  eva/vision/data/datasets/_validators.py,sha256=77WZj8ewsuxUjW5WegJ-7zDuR6WdF5JbaOYdywhKIK4,2594
128
152
  eva/vision/data/datasets/classification/__init__.py,sha256=5fOGZxKGPeMCf3Jd9qAOYADPrkZnYg97_QE4DC79AMI,1074
@@ -137,30 +161,37 @@ eva/vision/data/datasets/classification/panda.py,sha256=HVfCvByyajdo5o_waqTpzZWC
137
161
  eva/vision/data/datasets/classification/patch_camelyon.py,sha256=1yXkfP680qxkQUFAPKRFbZv0cHAFx23s2vvT9th2nKM,7149
138
162
  eva/vision/data/datasets/classification/unitopatho.py,sha256=IO3msEsuOnmdcYZxF-eBpo0K97y54rWFmCb_KxuF4bk,5129
139
163
  eva/vision/data/datasets/classification/wsi.py,sha256=YMGxU8ECjudizt_uXUevuPS8k66HxtEQ7M2IZJmL6kE,4079
140
- eva/vision/data/datasets/segmentation/__init__.py,sha256=YA7qx4B-pfsILfONa2AfIQHKzDnv8l0sHwjsSXa5-vQ,765
141
- eva/vision/data/datasets/segmentation/_total_segmentator.py,sha256=DTaQaAisY7j1h0-zYk1_81Sr4b3D9PTMieYX0PMPtIc,3127
164
+ eva/vision/data/datasets/segmentation/__init__.py,sha256=y_BjUj6kF-WeouSz0CCpPdOdX7n5hUrqsZGF68Xu9Hw,784
142
165
  eva/vision/data/datasets/segmentation/_utils.py,sha256=aXUHrnbefP6-OgSvDQHqssFKhUwETul_8aosqYiOfm8,3065
143
166
  eva/vision/data/datasets/segmentation/bcss.py,sha256=rqk6VqK0QCHLFnMnDuHd1JPJVK5_C6WnsmnNSKBw6Uo,8230
144
- eva/vision/data/datasets/segmentation/btcv.py,sha256=GNgr8pLx7uvZ2pxnYZ8N9SfB9luduMTM9IQ1OHPgBxI,8257
145
- eva/vision/data/datasets/segmentation/consep.py,sha256=SBH1vD3RjFNRMVeo07d2PqSAInZsWHR2d3xCpCoDVpM,6014
167
+ eva/vision/data/datasets/segmentation/btcv.py,sha256=9rlEqGyb2SGJBY6Oj42FlHajQF8csf1Jq6jeuPSsfXI,8396
168
+ eva/vision/data/datasets/segmentation/consep.py,sha256=VgP69sjwPVfIY8-5MbsNDu1tdDrTac2DQI2e5vukk9c,6065
146
169
  eva/vision/data/datasets/segmentation/embeddings.py,sha256=RsTuAwGEJPnWPY7q3pwcjmqtEj0wtRBNRBD4a0RcGtA,1218
147
- eva/vision/data/datasets/segmentation/lits.py,sha256=AsKsTQZBNXlYU_UllBrdr04rS8K4TDkG_vqR-aVr5ik,7267
148
- eva/vision/data/datasets/segmentation/lits_balanced.py,sha256=OQ2AK6-wLE0uMvgQJtfBJTUJqS_WBfmsJXgBfe4gU8A,3451
170
+ eva/vision/data/datasets/segmentation/lits17.py,sha256=kcSCKxsgtUuCD1YEYvrb_L_BgOtZC8xDq1lX8ldSZc4,7635
171
+ eva/vision/data/datasets/segmentation/metadata/__init__.py,sha256=o9Od0v6N9dNdf8hfefn2QaNNCD2sZMvc2K58zHA_Nrg,24
172
+ eva/vision/data/datasets/segmentation/metadata/_msd_task7_pancreas.py,sha256=O2-ye0A7wIjcI_D857uvpYw-jckTqfhBUrhinqSNWq0,2553
173
+ eva/vision/data/datasets/segmentation/metadata/_total_segmentator.py,sha256=DTaQaAisY7j1h0-zYk1_81Sr4b3D9PTMieYX0PMPtIc,3127
149
174
  eva/vision/data/datasets/segmentation/monusac.py,sha256=iv9-MFaTsGfGV1u6_lQNcSEeSpmVBDQC1Oa123iEtu0,8410
150
- eva/vision/data/datasets/segmentation/total_segmentator_2d.py,sha256=3cWpJkZmJ7IUJhigw69YLFOg2_-yzXSLGXqWVPUsn8Y,16978
175
+ eva/vision/data/datasets/segmentation/msd_task7_pancreas.py,sha256=dTsPD73PAP15VOXdHnX4eQqbpz2jGpCB31YISzinUd4,8964
176
+ eva/vision/data/datasets/segmentation/total_segmentator_2d.py,sha256=TGz67AGuv8_Bm5DM5TyCtzRTuGXOuctZZNxdQtBxF1g,16987
151
177
  eva/vision/data/datasets/structs.py,sha256=RaTDW-B36PumcR5gymhCiX-r8GiKqIFcjqoEEjjFyUE,389
152
178
  eva/vision/data/datasets/vision.py,sha256=-_WRiyICMgqABR6Ay_RKBMfsPGwgx9MQfCA7WChHo24,3219
153
179
  eva/vision/data/datasets/wsi.py,sha256=dEAT_Si_Qb3qdSovUPeoiWeoPb7m-NGYqq44e3UXHk8,8384
154
- eva/vision/data/transforms/__init__.py,sha256=Bv1aPvjahteAZzVGSuxzHz2LRwa63NV7IcoPzKUt_fY,720
155
- eva/vision/data/transforms/common/__init__.py,sha256=ZHzpdr-THc9CgFFbAVMWUiZrUNUiHnCDM8GYhM7tMfU,138
180
+ eva/vision/data/transforms/__init__.py,sha256=39KkoDaIILjgbCEi_WD48a0p6TKV7Woe6744a-CLeOU,863
181
+ eva/vision/data/transforms/base/__init__.py,sha256=d3gAnEHx8WZHuUpKLr0YAC1JR60qhRY02yeoADrx1Yw,143
182
+ eva/vision/data/transforms/base/monai.py,sha256=g2U4eNxKiPm6TXjHjKZ4L8-1zxa1tEf-c73kKI_7WwQ,1081
183
+ eva/vision/data/transforms/common/__init__.py,sha256=LWA5u0VsWzEEugdKpjpePr7fgV_XIGdASGwoaOU29Ac,211
156
184
  eva/vision/data/transforms/common/resize_and_crop.py,sha256=GI1HTkbJ9qg4p8c6vk_XkXO0Qi6mBeUeiZIA0jVtmAw,1360
157
- eva/vision/data/transforms/croppad/__init__.py,sha256=d36WGe9x39p-d7VymRM29qdquv8YEa0RfsTfwt7Cou4,375
185
+ eva/vision/data/transforms/common/squeeze.py,sha256=N9G2XRKOCuqaaLc27jWy8a-x1kgxez0hM2fDPH4w5Ak,726
186
+ eva/vision/data/transforms/croppad/__init__.py,sha256=sa-M4JMdZGB39JqWOLAJvoun4YK3N4hBFuM--DZ8SnE,606
158
187
  eva/vision/data/transforms/croppad/crop_foreground.py,sha256=3o27nOgxfRo8ap45lpmnaiAIZ08kdyp14vYpr4BC8zc,4865
159
- eva/vision/data/transforms/croppad/rand_crop_by_pos_neg_label.py,sha256=8CwMYAOQgOIb1Uw1jc219aqY3s2tCWd6r-2nU7kqOLc,5538
188
+ eva/vision/data/transforms/croppad/rand_crop_by_label_classes.py,sha256=9oRY1ZuvUGuJNRkAT7JEquWBrYmNAD82FebrbvwVEtc,2587
189
+ eva/vision/data/transforms/croppad/rand_crop_by_pos_neg_label.py,sha256=5omKDSECPdnWEt-AXlu8oeN7N-qa1MXekEggZ08KeIQ,5670
190
+ eva/vision/data/transforms/croppad/rand_spatial_crop.py,sha256=yKAW5GN1B1vac74REYpBbrRhKxI2BhGA_LDEJ_AcO5w,3769
160
191
  eva/vision/data/transforms/croppad/spatial_pad.py,sha256=j5V2vvgGcf75GzGyAT7mGgpvlEOS2BnAcThRdt7Und4,2857
161
192
  eva/vision/data/transforms/intensity/__init__.py,sha256=mNp6pi0pnHcA24kQuiGHzMb4XLRaR0Lgi-Vb7Nl-Aoo,408
162
- eva/vision/data/transforms/intensity/rand_scale_intensity.py,sha256=DDcFWTmq5UbwISO9qGIPOQJ72rx7JQWtVi2OxggLzyE,2041
163
- eva/vision/data/transforms/intensity/rand_shift_intensity.py,sha256=9YNREhRoCzLOt2C21daum62cbB53ZRcYOSuSW_Jz7eQ,1974
193
+ eva/vision/data/transforms/intensity/rand_scale_intensity.py,sha256=x3gYRRAFbZXFWgqvoUjT0sDFWT6zbhja3PJLXbOBtTg,2184
194
+ eva/vision/data/transforms/intensity/rand_shift_intensity.py,sha256=5ngSHfHgJOR9KQykNKJhQMWhQYj781Mj_JKJ_ADchbM,2117
164
195
  eva/vision/data/transforms/intensity/scale_intensity_ranged.py,sha256=VLvYZYG6jQCuR5poJsAlhIFjw6VjPEpcDPKBlJTjYBM,1873
165
196
  eva/vision/data/transforms/spatial/__init__.py,sha256=k7C_p4fMZd7A00ikldAMsprYDedKrlMjKQB6BLA5InA,284
166
197
  eva/vision/data/transforms/spatial/flip.py,sha256=jfRc-wPBvG58OtCNU3GrOkb57kcRddRqpwcAdCB0_No,2553
@@ -201,42 +232,42 @@ eva/vision/metrics/wrappers/__init__.py,sha256=V4z3hradMa6CQgTkk1bc2cbZzCgcoIYw7
201
232
  eva/vision/metrics/wrappers/monai.py,sha256=FNa1yHN2U3vO6BGqS0BFm8uJAL6DCzSE4XOFCV4aBjg,885
202
233
  eva/vision/models/__init__.py,sha256=a-P6JL73A3miHQnqgqUz07XtVmQB_o4DqPImk5rEATo,275
203
234
  eva/vision/models/modules/__init__.py,sha256=vaM_V6OF2s0lYjralP8dzv8mAtv_xIMZItfXgz0NZg8,156
204
- eva/vision/models/modules/semantic_segmentation.py,sha256=f04QwxSt8x9oVHf5JMeN5b_PMPmfLcso_icDBma1ToE,7930
235
+ eva/vision/models/modules/semantic_segmentation.py,sha256=57sfKU6AAaWp8qTTYwcDFNwrTCMpVVbJtOdbGUmXb7Y,8308
205
236
  eva/vision/models/networks/__init__.py,sha256=j43IurizNlAyKPH2jwDHaeq49L2QvwbHWqUaptA1mG4,100
206
237
  eva/vision/models/networks/abmil.py,sha256=N1eH4fn1nXmgXurSQyQIxxonv7nsqeeuPWaQSHeltfs,6796
207
- eva/vision/models/networks/backbones/__init__.py,sha256=mvYVtmJOvYLCXDX52hP6dzQxj9cQikwSeBZvEDNyNmU,347
208
- eva/vision/models/networks/backbones/_utils.py,sha256=V7xeod4mElEuuO1TRW0xJE051cUyS1Saraw3-KcK1Mw,1667
238
+ eva/vision/models/networks/backbones/__init__.py,sha256=oczI7-IyKWyrkM-tIDYIrw01sCVJoWvxrDjvCcVU_vg,301
239
+ eva/vision/models/networks/backbones/_utils.py,sha256=sF9y7vRBc1nFLLGmxramzMQmVwf3oiMMI7XcpKDiBss,1660
209
240
  eva/vision/models/networks/backbones/pathology/__init__.py,sha256=JZ1mhKm4w89JTrXDfTM02OyFWtDuxRhhvpytDk_t500,1386
210
- eva/vision/models/networks/backbones/pathology/bioptimus.py,sha256=NrS0WJqiJKjDYT3odQGLPgnzMuCbJfWoW1Dal-L9F50,2626
211
- eva/vision/models/networks/backbones/pathology/gigapath.py,sha256=mfGXtKhY7XLpKQQAFNVZYsM-aeHCEbOVUrxpAEOr-l8,955
212
- eva/vision/models/networks/backbones/pathology/histai.py,sha256=X_we3U7GK91RrXyOX2PJB-YFDF2ozdL2fzZhNxm9SVU,1914
213
- eva/vision/models/networks/backbones/pathology/hkust.py,sha256=bZpzx7EvK4CVefNnJmyz-2Ta-WdYDwEDzf-zWoZkoCQ,2308
214
- eva/vision/models/networks/backbones/pathology/kaiko.py,sha256=lVzgWhgFn1iOlfSSxsX2cH16rrFQFjzdaF6_HS1y-6c,4517
215
- eva/vision/models/networks/backbones/pathology/lunit.py,sha256=ku4lr9pWeeHatHN4x4OVgwlve9sVqiRqIbgI0PXLiqg,2160
216
- eva/vision/models/networks/backbones/pathology/mahmood.py,sha256=VYoVWrMNkoaEqa0och-GbwGd0VISQmbtzk1dSBZ1M0I,2464
217
- eva/vision/models/networks/backbones/pathology/owkin.py,sha256=uWJV5fgY7UZX6ilgGzkPY9fnlOiF03W7E8rc9TmlHGg,1231
218
- eva/vision/models/networks/backbones/pathology/paige.py,sha256=MjOLgdEKk8tdAIpCiHelasGwPE7xgzaooW6EE7IsuEE,1642
241
+ eva/vision/models/networks/backbones/pathology/bioptimus.py,sha256=N14sD2LMHKO_V8frN8FpXfPc2p6k8tnZAiAQhEw1dOs,2646
242
+ eva/vision/models/networks/backbones/pathology/gigapath.py,sha256=YTBYSb1KduwYsFOTFzNkzI9-GtjU7UjPLzqPzwIJEvs,970
243
+ eva/vision/models/networks/backbones/pathology/histai.py,sha256=vq-sBh63z1dkXa8i0nJeVJM4mnVNvuv0wRG911JjeXI,1941
244
+ eva/vision/models/networks/backbones/pathology/hkust.py,sha256=vTyvCwwQrh9GWtJXfY7GIOz5DWHM6zGRKx-gPpHWncw,2323
245
+ eva/vision/models/networks/backbones/pathology/kaiko.py,sha256=JNu3s3WOgguP3ccishGU1yCCvikgqhIniFdfHSkwEMA,4592
246
+ eva/vision/models/networks/backbones/pathology/lunit.py,sha256=_0ERN18W2hOO7qnzC-HpRA5N-3tw31BK1zlnvj5sqXc,2187
247
+ eva/vision/models/networks/backbones/pathology/mahmood.py,sha256=aN6lfrpIsLLODDWFZDS39hJ9iiAVbI49d26S9C8iNiI,2491
248
+ eva/vision/models/networks/backbones/pathology/owkin.py,sha256=tJ6ZMbtLKfvS__B9Dtb__MT03ZbR3zKLTrCv_Uk1y4c,1258
249
+ eva/vision/models/networks/backbones/pathology/paige.py,sha256=MxwHtjn72G6nvNWiE05rQ_qXub9caC8rp72BbqtVrZA,1650
219
250
  eva/vision/models/networks/backbones/radiology/__init__.py,sha256=pD8ijQZRaX_Lu3tPBV73qUVaAURDrB_2pEyyBdRZmis,294
220
- eva/vision/models/networks/backbones/radiology/swin_unetr.py,sha256=n5lJkoKjxKogs5Q_XuKh7Q5J96Bgln5W4ShL-VwSZXs,7976
221
- eva/vision/models/networks/backbones/radiology/voco.py,sha256=sICZnsxQYnqYEmauhB6CBmaqpzBoAB6CpXJjNm5FesI,2464
222
- eva/vision/models/networks/backbones/registry.py,sha256=anjILtEHHB6Ltwiw22h1bsgWtIjh_l5_fkPh87K7-d0,1631
251
+ eva/vision/models/networks/backbones/radiology/swin_unetr.py,sha256=mqoqgc6Zq4tS-wswSe4lKuSEfcPYtpelrqbFqWekOl4,7991
252
+ eva/vision/models/networks/backbones/radiology/voco.py,sha256=Rzwbk6nMrjnd71KDLf8uyXntGY0YMTLDkHhzP56p_7Q,2519
253
+ eva/vision/models/networks/backbones/registry.py,sha256=ubLPktCXZu_CLTfPk3sO-JzOHSkD6kTYDuyTD12JpeI,109
223
254
  eva/vision/models/networks/backbones/timm/__init__.py,sha256=cZH3av9gIZcvEVD0rwKsI-MEq7zPqaW4dQ0E05CksvQ,128
224
- eva/vision/models/networks/backbones/timm/backbones.py,sha256=fCTiwqU6NhQ-ccAMzmpPDddXkFzRAB3mw4lcQ9um_PU,1646
225
- eva/vision/models/networks/backbones/universal/__init__.py,sha256=MAlkALSJ2_w6spSbB7NmKlL0Jsk1YKEycatdI0xO0_I,252
226
- eva/vision/models/networks/backbones/universal/vit.py,sha256=kpUCoXpefR34hRNlQDFK9lGr4oqS8Mn5vTLKWZ-gaOs,1820
255
+ eva/vision/models/networks/backbones/timm/backbones.py,sha256=ZbF9MMiL4Ylyy79XLew61QxmnH9MPXUHPounzzqOKVc,1638
256
+ eva/vision/models/networks/backbones/universal/__init__.py,sha256=xgn3crSqlmUPYz-t2CR1zDKxhlyAEeApA-a6Y_eWQvc,417
257
+ eva/vision/models/networks/backbones/universal/vit.py,sha256=To0OzwpuX5Y5PwjGidwV0Ssq3xa81dve081buwG_Ofg,3658
227
258
  eva/vision/models/networks/decoders/__init__.py,sha256=RXFWmoYw2i6E9VOUCJmU8c72icHannVuo-cUKy6fnLM,200
228
259
  eva/vision/models/networks/decoders/segmentation/__init__.py,sha256=SqmxtzxwBRF8g2hsiqe0o3Nr0HFK97azTnWLyqsYigY,652
229
260
  eva/vision/models/networks/decoders/segmentation/base.py,sha256=b2TIJKiJR9vejVRpNyedMJLPTrpHhAEXvco8atb9TPU,411
230
- eva/vision/models/networks/decoders/segmentation/decoder2d.py,sha256=A7vz0LJ_YweftpKeEBJm0Y3N7hbVLDSIkAajaQv1UgE,4456
231
- eva/vision/models/networks/decoders/segmentation/linear.py,sha256=PZeEIH0ybgxgIKtmcflh8jsARo5NQqkgoGbpAZd7yj4,4650
261
+ eva/vision/models/networks/decoders/segmentation/decoder2d.py,sha256=HRonYTSriiq13aZCSNiYUc484qfOhkVT0yFiMW06CDc,4472
262
+ eva/vision/models/networks/decoders/segmentation/linear.py,sha256=ui3-Y0rl4VEF75-sUghaF29P9wpxCVlp5iR_Ym-utUE,4666
232
263
  eva/vision/models/networks/decoders/segmentation/semantic/__init__.py,sha256=2yol7W1ARXL-Ge7gYxjUzaGTjH6nfMBlNqQJHprEWGg,539
233
- eva/vision/models/networks/decoders/segmentation/semantic/common.py,sha256=fPTb0T-2FiOU-jT81ynASKaW7fJiRk6vQjuPkzHOluc,2530
264
+ eva/vision/models/networks/decoders/segmentation/semantic/common.py,sha256=FSf_eI-FaBroxPRJd4TiV97RCreauJh1IznIVzBT2eg,2528
234
265
  eva/vision/models/networks/decoders/segmentation/semantic/swin_unetr.py,sha256=ODUpnJrpDQl0m8CC2SPnE_lpFflzS0GSiCZOmrjL6uQ,3373
235
266
  eva/vision/models/networks/decoders/segmentation/semantic/with_image.py,sha256=I5PyGKKo8DcXYcw4xlCFzuavRJNRrzGT-szpDidMPXI,3516
236
- eva/vision/models/networks/decoders/segmentation/typings.py,sha256=8zAqIJLlQdCjsx-Dl4lnF4BB1VxTg_AyIquBVwpZlHg,537
267
+ eva/vision/models/networks/decoders/segmentation/typings.py,sha256=rY4CXp0MNF16SHnx9TgGjXI_r8bVGSqAWdR835hXndg,537
237
268
  eva/vision/models/wrappers/__init__.py,sha256=ogmr-eeVuGaOCcsuxSp6PGyauP2QqWTb8dGTtbC7lRU,210
238
- eva/vision/models/wrappers/from_registry.py,sha256=gdnxyg9drqlxfTNuS3aLbWGbZIwX1VNl0uudfjzVsXM,1614
239
- eva/vision/models/wrappers/from_timm.py,sha256=Z38Nb1i6OPKkgvFZOvGx-O3AZQuscf1zRVyrEBXQdJg,2320
269
+ eva/vision/models/wrappers/from_registry.py,sha256=2vpKTkoa_CGjVLbhnUclstFzDfkXCqMxjdtxTk212HQ,1729
270
+ eva/vision/models/wrappers/from_timm.py,sha256=Lkdibly4H_XNjl863YFBj10ZbSTN1onT2Bc6yV072m0,2334
240
271
  eva/vision/utils/__init__.py,sha256=vaUovprE743SmyFH8l6uk4pYSWpI4zxn7lN0EwePTJI,96
241
272
  eva/vision/utils/colormap.py,sha256=sP1F0JCX3abZfFgdxEjLJO-LhNYKjXZvXxs03ZgrEvI,2876
242
273
  eva/vision/utils/convert.py,sha256=fqGmKrg5-JJLrTkTXB4YDcWTudXPrO1gGjsckVRUesU,1881
@@ -246,8 +277,8 @@ eva/vision/utils/io/image.py,sha256=IdOkr5MYqhYHz8U9drZ7wULTM3YHwCWSjZlu_Qdl4GQ,
246
277
  eva/vision/utils/io/mat.py,sha256=qpGifyjmpE0Xhv567Si7-zxKrgkgE0sywP70cHiLFGU,808
247
278
  eva/vision/utils/io/nifti.py,sha256=TFMgNhLqIK3sl3RjIRXEABM7FmSQjqVOwk1vXkuvX2w,4983
248
279
  eva/vision/utils/io/text.py,sha256=qYgfo_ZaDZWfG02NkVVYzo5QFySqdCCz5uLA9d-zXtI,701
249
- kaiko_eva-0.2.2.dist-info/METADATA,sha256=hiFFWrNu2fMZd7VLI08q4EDOc0IU6X4T00RGkHC0QT8,25363
250
- kaiko_eva-0.2.2.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
251
- kaiko_eva-0.2.2.dist-info/entry_points.txt,sha256=6CSLu9bmQYJSXEg8gbOzRhxH0AGs75BB-vPm3VvfcNE,88
252
- kaiko_eva-0.2.2.dist-info/licenses/LICENSE,sha256=e6AEzr7j_R-PYr2qLO-JwLn8y70jbVD3U2mxbRmwcI4,11338
253
- kaiko_eva-0.2.2.dist-info/RECORD,,
280
+ kaiko_eva-0.3.1.dist-info/METADATA,sha256=gXYGvp6Ap95944atE7L9Dxk8AnmuVhn22sHAC2iIl_g,25704
281
+ kaiko_eva-0.3.1.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
282
+ kaiko_eva-0.3.1.dist-info/entry_points.txt,sha256=6CSLu9bmQYJSXEg8gbOzRhxH0AGs75BB-vPm3VvfcNE,88
283
+ kaiko_eva-0.3.1.dist-info/licenses/LICENSE,sha256=e6AEzr7j_R-PYr2qLO-JwLn8y70jbVD3U2mxbRmwcI4,11338
284
+ kaiko_eva-0.3.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: pdm-backend (2.4.4)
2
+ Generator: pdm-backend (2.4.5)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,199 +0,0 @@
1
- """LiTS dataset."""
2
-
3
- import functools
4
- import glob
5
- import os
6
- from typing import Any, Callable, Dict, List, Literal, Tuple
7
-
8
- import numpy as np
9
- import numpy.typing as npt
10
- import torch
11
- from torchvision import tv_tensors
12
- from typing_extensions import override
13
-
14
- from eva.core import utils
15
- from eva.core.data import splitting
16
- from eva.vision.data.datasets import _validators, vision
17
- from eva.vision.utils import io
18
-
19
-
20
- class LiTS(vision.VisionDataset[tv_tensors.Image, tv_tensors.Mask]):
21
- """LiTS - Liver Tumor Segmentation Challenge.
22
-
23
- Webpage: https://competitions.codalab.org/competitions/17094
24
- """
25
-
26
- _train_ratio: float = 0.7
27
- _val_ratio: float = 0.15
28
- _test_ratio: float = 0.15
29
- """Index ranges per split."""
30
-
31
- _fix_orientation: bool = True
32
- """Whether to fix the orientation of the images to match the default for radiologists."""
33
-
34
- _sample_every_n_slices: int | None = None
35
- """The amount of slices to sub-sample per 3D CT scan image."""
36
-
37
- _expected_dataset_lengths: Dict[str | None, int] = {
38
- "train": 38686,
39
- "val": 11192,
40
- "test": 8760,
41
- None: 58638,
42
- }
43
- """Dataset version and split to the expected size."""
44
-
45
- _license: str = (
46
- "Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License "
47
- "(https://creativecommons.org/licenses/by-nc-nd/4.0/deed.en)"
48
- )
49
- """Dataset license."""
50
-
51
- def __init__(
52
- self,
53
- root: str,
54
- split: Literal["train", "val", "test"] | None = None,
55
- transforms: Callable | None = None,
56
- seed: int = 8,
57
- ) -> None:
58
- """Initialize dataset.
59
-
60
- Args:
61
- root: Path to the root directory of the dataset. The dataset will
62
- be downloaded and extracted here, if it does not already exist.
63
- split: Dataset split to use.
64
- transforms: A function/transforms that takes in an image and a target
65
- mask and returns the transformed versions of both.
66
- seed: Seed used for generating the dataset splits.
67
- """
68
- super().__init__(transforms=transforms)
69
-
70
- self._root = root
71
- self._split = split
72
- self._seed = seed
73
- self._indices: List[Tuple[int, int]] = []
74
-
75
- @property
76
- @override
77
- def classes(self) -> List[str]:
78
- return ["background", "liver", "tumor"]
79
-
80
- @functools.cached_property
81
- @override
82
- def class_to_idx(self) -> Dict[str, int]:
83
- return {label: index for index, label in enumerate(self.classes)}
84
-
85
- @override
86
- def filename(self, index: int) -> str:
87
- sample_index, _ = self._indices[index]
88
- volume_file_path = self._volume_files[sample_index]
89
- return os.path.relpath(volume_file_path, self._root)
90
-
91
- @override
92
- def configure(self) -> None:
93
- self._indices = self._create_indices()
94
-
95
- @override
96
- def validate(self) -> None:
97
- for i in range(len(self._volume_files)):
98
- seg_path = self._segmentation_file(i)
99
- if not os.path.exists(seg_path):
100
- raise FileNotFoundError(
101
- f"Segmentation file {seg_path} not found for volume {self._volume_files[i]}."
102
- )
103
-
104
- _validators.check_dataset_integrity(
105
- self,
106
- length=self._expected_dataset_lengths.get(self._split, 0),
107
- n_classes=3,
108
- first_and_last_labels=("background", "tumor"),
109
- )
110
-
111
- @override
112
- def load_data(self, index: int) -> tv_tensors.Image:
113
- sample_index, slice_index = self._indices[index]
114
- volume_path = self._volume_files[sample_index]
115
- image_nii = io.read_nifti(volume_path, slice_index)
116
- image_array = io.nifti_to_array(image_nii)
117
- if self._fix_orientation:
118
- image_array = self._orientation(image_array, sample_index)
119
- return tv_tensors.Image(image_array.transpose(2, 0, 1))
120
-
121
- @override
122
- def load_target(self, index: int) -> tv_tensors.Mask:
123
- sample_index, slice_index = self._indices[index]
124
- segmentation_path = self._segmentation_file(sample_index)
125
- mask_nii = io.read_nifti(segmentation_path, slice_index)
126
- mask_array = io.nifti_to_array(mask_nii)
127
- if self._fix_orientation:
128
- semantic_labels = self._orientation(mask_array, sample_index)
129
- return tv_tensors.Mask(semantic_labels.squeeze(), dtype=torch.int64) # type: ignore[reportCallIssue]
130
-
131
- def _orientation(self, array: npt.NDArray, sample_index: int) -> npt.NDArray:
132
- volume_path = self._volume_files[sample_index]
133
- orientation = io.fetch_nifti_axis_direction_code(volume_path)
134
- array = np.rot90(array, axes=(0, 1))
135
- if orientation == "LPS":
136
- array = np.flip(array, axis=0)
137
- return array.copy()
138
-
139
- @override
140
- def load_metadata(self, index: int) -> Dict[str, Any]:
141
- _, slice_index = self._indices[index]
142
- return {"slice_index": slice_index}
143
-
144
- @override
145
- def __len__(self) -> int:
146
- return len(self._indices)
147
-
148
- def _get_number_of_slices_per_volume(self, sample_index: int) -> int:
149
- """Returns the total amount of slices of a volume."""
150
- file_path = self._volume_files[sample_index]
151
- volume_shape = io.fetch_nifti_shape(file_path)
152
- return volume_shape[-1]
153
-
154
- @functools.cached_property
155
- def _volume_files(self) -> List[str]:
156
- files_pattern = os.path.join(self._root, "**", "volume-*.nii")
157
- files = glob.glob(files_pattern, recursive=True)
158
- return utils.numeric_sort(files)
159
-
160
- def _segmentation_file(self, index: int) -> str:
161
- volume_file_path = self._volume_files[index]
162
- segmentation_file = os.path.basename(volume_file_path).replace("volume", "segmentation")
163
- return os.path.join(os.path.dirname(volume_file_path), segmentation_file)
164
-
165
- def _create_indices(self) -> List[Tuple[int, int]]:
166
- """Builds the dataset indices for the specified split.
167
-
168
- Returns:
169
- A list of tuples, where the first value indicates the
170
- sample index which the second its corresponding slice
171
- index.
172
- """
173
- indices = [
174
- (sample_idx, slide_idx)
175
- for sample_idx in self._get_split_indices()
176
- for slide_idx in range(self._get_number_of_slices_per_volume(sample_idx))
177
- if slide_idx % (self._sample_every_n_slices or 1) == 0
178
- ]
179
- return indices
180
-
181
- def _get_split_indices(self) -> List[int]:
182
- """Returns the sample indices for the specified dataset split."""
183
- indices = list(range(len(self._volume_files)))
184
- train_indices, val_indices, test_indices = splitting.random_split(
185
- indices, self._train_ratio, self._val_ratio, self._test_ratio, seed=self._seed
186
- )
187
- split_indices_dict = {
188
- "train": train_indices,
189
- "val": val_indices,
190
- "test": test_indices,
191
- None: indices,
192
- }
193
- if self._split not in split_indices_dict:
194
- raise ValueError("Invalid data split. Use 'train', 'val', 'test' or `None`.")
195
- return list(split_indices_dict[self._split])
196
-
197
- def _print_license(self) -> None:
198
- """Prints the dataset license."""
199
- print(f"Dataset license: {self._license}")