kaiko-eva 0.1.8__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. eva/core/data/datasets/base.py +7 -2
  2. eva/core/data/datasets/classification/embeddings.py +2 -2
  3. eva/core/data/datasets/classification/multi_embeddings.py +2 -2
  4. eva/core/data/datasets/embeddings.py +4 -4
  5. eva/core/data/samplers/classification/balanced.py +19 -18
  6. eva/core/loggers/utils/wandb.py +33 -0
  7. eva/core/models/modules/head.py +5 -3
  8. eva/core/models/modules/typings.py +2 -2
  9. eva/core/models/transforms/__init__.py +2 -1
  10. eva/core/models/transforms/as_discrete.py +57 -0
  11. eva/core/models/wrappers/_utils.py +121 -1
  12. eva/core/trainers/functional.py +8 -5
  13. eva/core/trainers/trainer.py +32 -17
  14. eva/core/utils/suppress_logs.py +28 -0
  15. eva/vision/data/__init__.py +2 -2
  16. eva/vision/data/dataloaders/__init__.py +5 -0
  17. eva/vision/data/dataloaders/collate_fn/__init__.py +5 -0
  18. eva/vision/data/dataloaders/collate_fn/collection.py +22 -0
  19. eva/vision/data/datasets/__init__.py +10 -2
  20. eva/vision/data/datasets/classification/__init__.py +9 -0
  21. eva/vision/data/datasets/classification/bach.py +3 -4
  22. eva/vision/data/datasets/classification/bracs.py +111 -0
  23. eva/vision/data/datasets/classification/breakhis.py +209 -0
  24. eva/vision/data/datasets/classification/camelyon16.py +4 -5
  25. eva/vision/data/datasets/classification/crc.py +3 -4
  26. eva/vision/data/datasets/classification/gleason_arvaniti.py +171 -0
  27. eva/vision/data/datasets/classification/mhist.py +3 -4
  28. eva/vision/data/datasets/classification/panda.py +4 -5
  29. eva/vision/data/datasets/classification/patch_camelyon.py +3 -4
  30. eva/vision/data/datasets/classification/unitopatho.py +158 -0
  31. eva/vision/data/datasets/classification/wsi.py +6 -5
  32. eva/vision/data/datasets/segmentation/__init__.py +2 -2
  33. eva/vision/data/datasets/segmentation/_utils.py +47 -0
  34. eva/vision/data/datasets/segmentation/bcss.py +7 -8
  35. eva/vision/data/datasets/segmentation/btcv.py +236 -0
  36. eva/vision/data/datasets/segmentation/consep.py +6 -7
  37. eva/vision/data/datasets/segmentation/embeddings.py +2 -2
  38. eva/vision/data/datasets/segmentation/lits.py +9 -8
  39. eva/vision/data/datasets/segmentation/lits_balanced.py +2 -1
  40. eva/vision/data/datasets/segmentation/monusac.py +4 -5
  41. eva/vision/data/datasets/segmentation/total_segmentator_2d.py +12 -10
  42. eva/vision/data/datasets/vision.py +95 -4
  43. eva/vision/data/datasets/wsi.py +5 -5
  44. eva/vision/data/transforms/__init__.py +22 -3
  45. eva/vision/data/transforms/common/__init__.py +1 -2
  46. eva/vision/data/transforms/croppad/__init__.py +11 -0
  47. eva/vision/data/transforms/croppad/crop_foreground.py +110 -0
  48. eva/vision/data/transforms/croppad/rand_crop_by_pos_neg_label.py +109 -0
  49. eva/vision/data/transforms/croppad/spatial_pad.py +67 -0
  50. eva/vision/data/transforms/intensity/__init__.py +11 -0
  51. eva/vision/data/transforms/intensity/rand_scale_intensity.py +59 -0
  52. eva/vision/data/transforms/intensity/rand_shift_intensity.py +55 -0
  53. eva/vision/data/transforms/intensity/scale_intensity_ranged.py +56 -0
  54. eva/vision/data/transforms/spatial/__init__.py +7 -0
  55. eva/vision/data/transforms/spatial/flip.py +72 -0
  56. eva/vision/data/transforms/spatial/rotate.py +53 -0
  57. eva/vision/data/transforms/spatial/spacing.py +69 -0
  58. eva/vision/data/transforms/utility/__init__.py +5 -0
  59. eva/vision/data/transforms/utility/ensure_channel_first.py +51 -0
  60. eva/vision/data/tv_tensors/__init__.py +5 -0
  61. eva/vision/data/tv_tensors/volume.py +61 -0
  62. eva/vision/metrics/segmentation/monai_dice.py +9 -2
  63. eva/vision/models/modules/semantic_segmentation.py +28 -20
  64. eva/vision/models/networks/backbones/__init__.py +9 -2
  65. eva/vision/models/networks/backbones/pathology/__init__.py +11 -2
  66. eva/vision/models/networks/backbones/pathology/bioptimus.py +47 -1
  67. eva/vision/models/networks/backbones/pathology/hkust.py +69 -0
  68. eva/vision/models/networks/backbones/pathology/kaiko.py +18 -0
  69. eva/vision/models/networks/backbones/pathology/mahmood.py +46 -19
  70. eva/vision/models/networks/backbones/radiology/__init__.py +11 -0
  71. eva/vision/models/networks/backbones/radiology/swin_unetr.py +231 -0
  72. eva/vision/models/networks/backbones/radiology/voco.py +75 -0
  73. eva/vision/models/networks/decoders/segmentation/__init__.py +6 -2
  74. eva/vision/models/networks/decoders/segmentation/linear.py +5 -10
  75. eva/vision/models/networks/decoders/segmentation/semantic/__init__.py +8 -1
  76. eva/vision/models/networks/decoders/segmentation/semantic/swin_unetr.py +104 -0
  77. eva/vision/utils/io/__init__.py +2 -0
  78. eva/vision/utils/io/nifti.py +91 -11
  79. {kaiko_eva-0.1.8.dist-info → kaiko_eva-0.2.1.dist-info}/METADATA +3 -1
  80. {kaiko_eva-0.1.8.dist-info → kaiko_eva-0.2.1.dist-info}/RECORD +83 -62
  81. {kaiko_eva-0.1.8.dist-info → kaiko_eva-0.2.1.dist-info}/WHEEL +1 -1
  82. eva/vision/data/datasets/classification/base.py +0 -96
  83. eva/vision/data/datasets/segmentation/base.py +0 -96
  84. eva/vision/data/transforms/common/resize_and_clamp.py +0 -51
  85. eva/vision/data/transforms/normalization/__init__.py +0 -6
  86. eva/vision/data/transforms/normalization/clamp.py +0 -43
  87. eva/vision/data/transforms/normalization/functional/__init__.py +0 -5
  88. eva/vision/data/transforms/normalization/functional/rescale_intensity.py +0 -28
  89. eva/vision/data/transforms/normalization/rescale_intensity.py +0 -53
  90. eva/vision/metrics/segmentation/BUILD +0 -1
  91. eva/vision/models/networks/backbones/torchhub/__init__.py +0 -5
  92. eva/vision/models/networks/backbones/torchhub/backbones.py +0 -61
  93. {kaiko_eva-0.1.8.dist-info → kaiko_eva-0.2.1.dist-info}/entry_points.txt +0 -0
  94. {kaiko_eva-0.1.8.dist-info → kaiko_eva-0.2.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,104 @@
1
+ """Decoder based on Swin UNETR."""
2
+
3
+ from typing import List
4
+
5
+ import torch
6
+ from monai.networks.blocks import dynunet_block, unetr_block
7
+ from torch import nn
8
+
9
+
10
+ class SwinUNETRDecoder(nn.Module):
11
+ """Swin transformer decoder based on UNETR [0].
12
+
13
+ - [0] UNETR: Transformers for 3D Medical Image Segmentation
14
+ https://arxiv.org/pdf/2103.10504
15
+ """
16
+
17
+ def __init__(
18
+ self,
19
+ out_channels: int,
20
+ feature_size: int = 48,
21
+ spatial_dims: int = 3,
22
+ ) -> None:
23
+ """Builds the decoder.
24
+
25
+ Args:
26
+ out_channels: Number of output channels.
27
+ feature_size: Dimension of network feature size.
28
+ spatial_dims: Number of spatial dimensions.
29
+ """
30
+ super().__init__()
31
+
32
+ self.decoder5 = unetr_block.UnetrUpBlock(
33
+ spatial_dims=spatial_dims,
34
+ in_channels=16 * feature_size,
35
+ out_channels=8 * feature_size,
36
+ kernel_size=3,
37
+ upsample_kernel_size=2,
38
+ norm_name="instance",
39
+ res_block=True,
40
+ )
41
+ self.decoder4 = unetr_block.UnetrUpBlock(
42
+ spatial_dims=spatial_dims,
43
+ in_channels=feature_size * 8,
44
+ out_channels=feature_size * 4,
45
+ kernel_size=3,
46
+ upsample_kernel_size=2,
47
+ norm_name="instance",
48
+ res_block=True,
49
+ )
50
+ self.decoder3 = unetr_block.UnetrUpBlock(
51
+ spatial_dims=spatial_dims,
52
+ in_channels=feature_size * 4,
53
+ out_channels=feature_size * 2,
54
+ kernel_size=3,
55
+ upsample_kernel_size=2,
56
+ norm_name="instance",
57
+ res_block=True,
58
+ )
59
+ self.decoder2 = unetr_block.UnetrUpBlock(
60
+ spatial_dims=spatial_dims,
61
+ in_channels=feature_size * 2,
62
+ out_channels=feature_size,
63
+ kernel_size=3,
64
+ upsample_kernel_size=2,
65
+ norm_name="instance",
66
+ res_block=True,
67
+ )
68
+ self.decoder1 = unetr_block.UnetrUpBlock(
69
+ spatial_dims=spatial_dims,
70
+ in_channels=feature_size,
71
+ out_channels=feature_size,
72
+ kernel_size=3,
73
+ upsample_kernel_size=2,
74
+ norm_name="instance",
75
+ res_block=True,
76
+ )
77
+ self.out = dynunet_block.UnetOutBlock(
78
+ spatial_dims=spatial_dims,
79
+ in_channels=feature_size,
80
+ out_channels=out_channels,
81
+ )
82
+
83
+ def _forward_features(self, features: List[torch.Tensor]) -> torch.Tensor:
84
+ """Forward function for multi-level feature maps to a single one."""
85
+ enc0, enc1, enc2, enc3, hid3, dec4 = features
86
+ dec3 = self.decoder5(dec4, hid3)
87
+ dec2 = self.decoder4(dec3, enc3)
88
+ dec1 = self.decoder3(dec2, enc2)
89
+ dec0 = self.decoder2(dec1, enc1)
90
+ out = self.decoder1(dec0, enc0)
91
+ return self.out(out)
92
+
93
+ def forward(self, features: List[torch.Tensor]) -> torch.Tensor:
94
+ """Maps the patch embeddings to a segmentation mask.
95
+
96
+ Args:
97
+ features: List of multi-level intermediate features from
98
+ :class:`SwinUNETREncoder`.
99
+
100
+ Returns:
101
+ Tensor containing scores for all of the classes with shape
102
+ (batch_size, n_classes, image_height, image_width).
103
+ """
104
+ return self._forward_features(features)
@@ -5,6 +5,7 @@ from eva.vision.utils.io.mat import read_mat, save_mat
5
5
  from eva.vision.utils.io.nifti import (
6
6
  fetch_nifti_axis_direction_code,
7
7
  fetch_nifti_shape,
8
+ nifti_to_array,
8
9
  read_nifti,
9
10
  save_array_as_nifti,
10
11
  )
@@ -16,6 +17,7 @@ __all__ = [
16
17
  "read_image_as_tensor",
17
18
  "fetch_nifti_shape",
18
19
  "fetch_nifti_axis_direction_code",
20
+ "nifti_to_array",
19
21
  "read_nifti",
20
22
  "save_array_as_nifti",
21
23
  "read_csv",
@@ -1,3 +1,4 @@
1
+ # type: ignore
1
2
  """NIfTI I/O related functions."""
2
3
 
3
4
  from typing import Any, Tuple
@@ -7,36 +8,63 @@ import numpy as np
7
8
  import numpy.typing as npt
8
9
  from nibabel import orientations
9
10
 
11
+ from eva.core.utils.suppress_logs import SuppressLogs
10
12
  from eva.vision.utils.io import _utils
11
13
 
12
14
 
13
15
  def read_nifti(
14
- path: str, slice_index: int | None = None, *, use_storage_dtype: bool = True
15
- ) -> npt.NDArray[Any]:
16
+ path: str,
17
+ slice_index: int | None = None,
18
+ *,
19
+ orientation: str | None = None,
20
+ orientation_reference: str | None = None,
21
+ ) -> nib.nifti1.Nifti1Image:
16
22
  """Reads and loads a NIfTI image from a file path.
17
23
 
18
24
  Args:
19
25
  path: The path to the NIfTI file.
20
26
  slice_index: Whether to read only a slice from the file.
27
+ orientation: The orientation code to reorient the nifti image.
28
+ orientation_reference: Path to a NIfTI file which
29
+ will be used as a reference for the orientation
30
+ transform in case the file missing the pixdim array
31
+ in the NIfTI header.
21
32
  use_storage_dtype: Whether to cast the raw image
22
33
  array to the inferred type.
23
34
 
24
35
  Returns:
25
- The image as a numpy array (height, width, channels).
36
+ The NIfTI image class instance.
26
37
 
27
38
  Raises:
28
39
  FileExistsError: If the path does not exist or it is unreachable.
29
40
  ValueError: If the input channel is invalid for the image.
30
41
  """
31
42
  _utils.check_file(path)
32
- image_data: nib.Nifti1Image = nib.load(path) # type: ignore
43
+ image_data = _load_nifti_silently(path)
33
44
  if slice_index is not None:
34
45
  image_data = image_data.slicer[:, :, slice_index : slice_index + 1]
46
+ if orientation:
47
+ image_data = _reorient(
48
+ image_data, orientation=orientation, reference_file=orientation_reference
49
+ )
35
50
 
36
- image_array = image_data.get_fdata()
37
- if use_storage_dtype:
38
- image_array = image_array.astype(image_data.get_data_dtype())
51
+ return image_data
52
+
53
+
54
+ def nifti_to_array(nii: nib.Nifti1Image, use_storage_dtype: bool = True) -> npt.NDArray[Any]:
55
+ """Converts a NIfTI image to a numpy array.
56
+
57
+ Args:
58
+ nii: The input NIfTI image.
59
+ use_storage_dtype: Whether to cast the raw image
60
+ array to the inferred type.
39
61
 
62
+ Returns:
63
+ The image as a numpy array (height, width, channels).
64
+ """
65
+ image_array = nii.get_fdata()
66
+ if use_storage_dtype:
67
+ image_array = image_array.astype(nii.get_data_dtype())
40
68
  return image_array
41
69
 
42
70
 
@@ -53,7 +81,7 @@ def save_array_as_nifti(
53
81
  filename: The name to save the image like.
54
82
  dtype: The data type to save the image.
55
83
  """
56
- nifti_image = nib.Nifti1Image(array, affine=np.eye(4), dtype=dtype) # type: ignore
84
+ nifti_image = nib.Nifti1Image(array, affine=np.eye(4), dtype=dtype)
57
85
  nifti_image.to_filename(filename)
58
86
 
59
87
 
@@ -71,8 +99,22 @@ def fetch_nifti_shape(path: str) -> Tuple[int]:
71
99
  ValueError: If the input channel is invalid for the image.
72
100
  """
73
101
  _utils.check_file(path)
74
- image = nib.load(path) # type: ignore
75
- return image.header.get_data_shape() # type: ignore
102
+ nii = _load_nifti_silently(path)
103
+ return nii.header.get_data_shape() # type: ignore
104
+
105
+
106
+ def fetch_nifti_orientation(path: str) -> npt.NDArray[Any]:
107
+ """Fetches the NIfTI image orientation.
108
+
109
+ Args:
110
+ path: The path to the NIfTI file.
111
+
112
+ Returns:
113
+ The array orientation.
114
+ """
115
+ _utils.check_file(path)
116
+ nii = _load_nifti_silently(path)
117
+ return nib.io_orientation(nii.affine)
76
118
 
77
119
 
78
120
  def fetch_nifti_axis_direction_code(path: str) -> str:
@@ -85,5 +127,43 @@ def fetch_nifti_axis_direction_code(path: str) -> str:
85
127
  The axis direction codes as string (e.g. "LAS").
86
128
  """
87
129
  _utils.check_file(path)
88
- image_data: nib.Nifti1Image = nib.load(path) # type: ignore
130
+ image_data: nib.Nifti1Image = nib.load(path)
89
131
  return "".join(orientations.aff2axcodes(image_data.affine))
132
+
133
+
134
+ def _load_nifti_silently(path: str) -> nib.Nifti1Image:
135
+ """Reads a NIfTI image in silent mode."""
136
+ with SuppressLogs():
137
+ return nib.load(path)
138
+ raise ValueError(f"Failed to load NIfTI file: {path}")
139
+
140
+
141
+ def _reorient(
142
+ nii: nib.Nifti1Image,
143
+ /,
144
+ orientation: str | tuple[str, str, str] = "RAS",
145
+ reference_file: str | None = None,
146
+ ) -> nib.Nifti1Image:
147
+ """Reorients a NIfTI image to a specified orientation.
148
+
149
+ Args:
150
+ nii: The input NIfTI image.
151
+ orientation: Desired orientation expressed as a
152
+ three-character string (e.g., "RAS") or a tuple
153
+ (e.g., ("R", "A", "S")).
154
+ reference_file: Path to a reference NIfTI file whose
155
+ orientation should be used if the input image lacks
156
+ a valid affine transformation.
157
+
158
+ Returns:
159
+ The reoriented NIfTI image.
160
+ """
161
+ affine_matrix, _ = nii.get_qform(coded=True)
162
+ orig_ornt = (
163
+ fetch_nifti_orientation(reference_file)
164
+ if reference_file and affine_matrix is None
165
+ else nib.io_orientation(nii.affine)
166
+ )
167
+ targ_ornt = orientations.axcodes2ornt(orientation)
168
+ transform = orientations.ornt_transform(orig_ornt, targ_ornt)
169
+ return nii.as_reoriented(transform)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kaiko-eva
3
- Version: 0.1.8
3
+ Version: 0.2.1
4
4
  Summary: Evaluation Framework for oncology foundation models.
5
5
  Keywords: machine-learning,evaluation-framework,oncology,foundation-models
6
6
  Author-Email: Ioannis Gatopoulos <ioannis@kaiko.ai>, =?utf-8?q?Nicolas_K=C3=A4nzig?= <nicolas@kaiko.ai>, Roman Moser <roman@kaiko.ai>
@@ -241,6 +241,7 @@ Requires-Dist: scikit-image>=0.24.0; extra == "vision"
241
241
  Requires-Dist: imagesize>=1.4.1; extra == "vision"
242
242
  Requires-Dist: scipy>=1.14.0; extra == "vision"
243
243
  Requires-Dist: monai>=1.3.2; extra == "vision"
244
+ Requires-Dist: einops>=0.8.1; extra == "vision"
244
245
  Provides-Extra: all
245
246
  Requires-Dist: h5py>=3.10.0; extra == "all"
246
247
  Requires-Dist: nibabel>=4.0.1; extra == "all"
@@ -253,6 +254,7 @@ Requires-Dist: scikit-image>=0.24.0; extra == "all"
253
254
  Requires-Dist: imagesize>=1.4.1; extra == "all"
254
255
  Requires-Dist: scipy>=1.14.0; extra == "all"
255
256
  Requires-Dist: monai>=1.3.2; extra == "all"
257
+ Requires-Dist: einops>=0.8.1; extra == "all"
256
258
  Description-Content-Type: text/markdown
257
259
 
258
260
  <div align="center">
@@ -23,16 +23,16 @@ eva/core/data/datamodules/call.py,sha256=jjj9w3UXYuQB-qyCcw1EZpRJW10OC1I3dvgvsuQ
23
23
  eva/core/data/datamodules/datamodule.py,sha256=_pK59oXDe53oDkmv6eoJUvfl44WlFkrbC8KXSRMs_20,5533
24
24
  eva/core/data/datamodules/schemas.py,sha256=rzcf3uow6T6slVSwxEGDVmpi3QUvkiDoT_gCF3aMAEE,2262
25
25
  eva/core/data/datasets/__init__.py,sha256=jWPxT3gjQjwS6HqVZAb7KhMgzgklPgHeH51iPxDh_Tg,493
26
- eva/core/data/datasets/base.py,sha256=BLzlRFuByhrGmI7NFwn7-Tw0vpSYSRhl2Y65iX4KaMw,2526
26
+ eva/core/data/datasets/base.py,sha256=w8c9Jh3DiXVfBEdLfWpdE190lPAeOFqCQlXl7RqlDOU,2671
27
27
  eva/core/data/datasets/classification/__init__.py,sha256=wJ2jD9YODftt-dMcMf0TbCjJt47qXYBKkD4-XXajvRQ,340
28
- eva/core/data/datasets/classification/embeddings.py,sha256=bgBVQyGxlxVCvGjmwNB52E360QwzrhGZQ44rPNFR4k8,1110
29
- eva/core/data/datasets/classification/multi_embeddings.py,sha256=j_o0MH2gwn_y3rNFXEUzNg6WErlG3Rq_vn5Og1Yk7J0,4603
28
+ eva/core/data/datasets/classification/embeddings.py,sha256=hBO6dIRHAhoCaYb3ANc9fgvdBjyQNKPTrIhjc9y8-Ys,1108
29
+ eva/core/data/datasets/classification/multi_embeddings.py,sha256=4hQy4741NDKqWCpm3kGq7aC28DF5gcwUuIpYhnbTyeM,4601
30
30
  eva/core/data/datasets/dataset.py,sha256=tA6Wd_7vqOE9GsukSWrgN9zaZKtKCHaE58SqIfWxWdg,124
31
- eva/core/data/datasets/embeddings.py,sha256=zNEO8KxqiOopcN_lTjwtEAm9xbnYDSjOE8X2-iZQIhU,5545
31
+ eva/core/data/datasets/embeddings.py,sha256=0y7Fa4zHr4Y0mcB9pyP26YaeTPtetwVf_n6fnkTcgp0,5541
32
32
  eva/core/data/datasets/typings.py,sha256=KSmckjsU64pGV-8uSLkD1HmvPKYlyypngiRx9yy4RDs,383
33
33
  eva/core/data/samplers/__init__.py,sha256=rRrKtg4l6YoziD3M0MkctQvX1NdRxaQa5sm6RHH_jXc,315
34
34
  eva/core/data/samplers/classification/__init__.py,sha256=gvv7BH4lG9JlkMaTOnaL0f4k1ghiVBgrH64bh1-rreQ,147
35
- eva/core/data/samplers/classification/balanced.py,sha256=YE6InKu12Jnu7AObi_gjKLzeHAFlQsbJVrggeA8X4DU,3517
35
+ eva/core/data/samplers/classification/balanced.py,sha256=MGTHt-WQaQKiJ5A1D_P6HJ6YzPTD-ERhc0R7rNMFqfg,3788
36
36
  eva/core/data/samplers/random.py,sha256=znl0Z9a-X-3attP-EH9jwwo83n40FXW_JzOLNZAml_c,1252
37
37
  eva/core/data/samplers/sampler.py,sha256=0DOLUzFoweqEubuO1A4bZBRU0AWFoWGWrO3pawRT-eI,877
38
38
  eva/core/data/splitting/__init__.py,sha256=VQJ8lfakbv6y2kAk3VDtITAvh7kcZo3H1JwJBc5jT08,198
@@ -55,6 +55,7 @@ eva/core/loggers/log/image.py,sha256=iUwntQCdRNLtkSdqu8CvV34l06zPYVo4NAW2gUeiJIM
55
55
  eva/core/loggers/log/parameters.py,sha256=7Xi-I5gQvEVv71d58bwdZ-Hb4287NXxaUyMfriq_KDU,1634
56
56
  eva/core/loggers/log/utils.py,sha256=k4Q7uKpAQctfDv0EEYPnPv6wt9LnckEeqGvbYSLfKO0,415
57
57
  eva/core/loggers/loggers.py,sha256=igHxdxJSotWSg6nEOKnfFuBszzblHgi8T7sBrE00FEs,166
58
+ eva/core/loggers/utils/wandb.py,sha256=GdwzEeFTAng5kl_kIVRxKL7rvwqyicQHSaZS8VSMXvU,747
58
59
  eva/core/losses/__init__.py,sha256=D-Mp9fUFFFoH9YYWntVH3B839zHS3GjFJzkbQThzj6Y,118
59
60
  eva/core/losses/cross_entropy.py,sha256=Sunz7ogDAJpGvZtuk9cAxKZJBO08CKIEvbCoewEvees,862
60
61
  eva/core/metrics/__init__.py,sha256=-9Qch4npEQpy3oF6NUhh9WinCmFBFe0D2eEYCR0S0xU,558
@@ -72,21 +73,22 @@ eva/core/metrics/structs/schemas.py,sha256=ZaSrx0j_NfIwT7joMUD1LyrKdAXTLaeSzWYTH
72
73
  eva/core/metrics/structs/typings.py,sha256=qJd-FiD2IhJgBeo8FyP0vpVUIH4RKb1k6zYvHtjUA04,388
73
74
  eva/core/models/__init__.py,sha256=T6Fo886LxMj-Y58_ylzkPkFSnFR2aISiMIbuO_weC4s,430
74
75
  eva/core/models/modules/__init__.py,sha256=QJWJ42BceXZBzDGgk5FHBcCaRrB9egTFKVF6gDsBYfM,255
75
- eva/core/models/modules/head.py,sha256=Wza8IFAXFl_DwVnNqYKproI06iS-oIuUlGjRE6jAKXw,5185
76
+ eva/core/models/modules/head.py,sha256=bZ45RBPi3N8sjvKyt2_TeKWI1eB6GyBeGzV6J11ERO8,5225
76
77
  eva/core/models/modules/inference.py,sha256=ih-0Rr2oNf2N6maiXPOW7XH5KVwUT1_MOxnJKOhJ1uQ,978
77
78
  eva/core/models/modules/module.py,sha256=LtjYxTZb7UY0owonmt_yQ5EySw3sX-xD9HLN2io8EK4,6697
78
- eva/core/models/modules/typings.py,sha256=yFMJCE4Nrfd8VEXU1zk8p6Sz5M7UslwitYPVC2OPLSY,776
79
+ eva/core/models/modules/typings.py,sha256=LPR8JdIid2gJZpjMG1FcH5OZ60JlFOj_LupIh__2k_8,803
79
80
  eva/core/models/modules/utils/__init__.py,sha256=ScLCHwQfzlg_UsHVi5sf_SavUkh9secwtRn_umC_qA8,325
80
81
  eva/core/models/modules/utils/batch_postprocess.py,sha256=RwnDcjJy3uvVirpgx_80Q2CUYKfJKipVwjyX7AF2CKw,3088
81
82
  eva/core/models/modules/utils/checkpoint.py,sha256=Zp42rtmjgUC4VUMwFyG5aar-E0Hc5i7qUsxkV7AVKkE,700
82
83
  eva/core/models/modules/utils/grad.py,sha256=bl8qb8g4Nhg1KAGfbEV_9HTKkoT0azRwfs9KGX9swGs,706
83
84
  eva/core/models/networks/__init__.py,sha256=yqx6UmG1Eg3vb1O_tnK_axnJWabEl9ULkDWiPN440Xc,85
84
85
  eva/core/models/networks/mlp.py,sha256=thk-x4pviE3fCaMW9k3I2Oe5_DxfC-CqUrtolvVdXug,2418
85
- eva/core/models/transforms/__init__.py,sha256=oYL3gNUUKZFViTu6GT1jVE2Kv1xFYPuyiYp-sErtVVg,257
86
+ eva/core/models/transforms/__init__.py,sha256=AOy_2VY3ITLRk2PMqe6xfErvV7V2_XsnPQwEMhovxOU,333
87
+ eva/core/models/transforms/as_discrete.py,sha256=1w2NmcLzEuyPhaoVXl6jZTdblk7DPf6W6gQ_qi96hQM,1901
86
88
  eva/core/models/transforms/extract_cls_features.py,sha256=tFRd4H-eGFIGCfZt6wuZGibDmAoNXKSsn15bBw0IDdc,1482
87
89
  eva/core/models/transforms/extract_patch_features.py,sha256=k50jTLPWxbfvciH9QZSzTAGqWwDSVpXAteme_Qg2d6E,2202
88
90
  eva/core/models/wrappers/__init__.py,sha256=jaiANQdbO-IPgH8U-Y0ftFsuuCAM5i5KuYRHauKw5k8,450
89
- eva/core/models/wrappers/_utils.py,sha256=HXUyGcILaa8GK31ViIHCKRU4f9kbjAPYQmhvN2N7jSc,957
91
+ eva/core/models/wrappers/_utils.py,sha256=ZWe9Ih_0kH5Wg_AQAtAn77LZ_CODAve5u3G12ifLNsc,4902
90
92
  eva/core/models/wrappers/base.py,sha256=xKMUSXk93wI67p_wmh7jujK-bxvIefO1noYaAJN_5Ak,1359
91
93
  eva/core/models/wrappers/from_function.py,sha256=_vKBwtfilCNCnOaJTofE6l5bM2K3qJ8GyBT-0CM5FXY,1831
92
94
  eva/core/models/wrappers/from_torchhub.py,sha256=OAImGKRG4pfDXHsoriykC_iiO8QvK3nAWnQCE0mIGuk,3285
@@ -96,8 +98,8 @@ eva/core/trainers/__init__.py,sha256=jhsKJF7HAae7EOiG3gKIAHH_h3dZlTE2JRcCHJmOzJc
96
98
  eva/core/trainers/_logging.py,sha256=gi4FqPy2GuVmh0WZY6mYwF7zMPvnoFA050B0XdCP6PU,2571
97
99
  eva/core/trainers/_recorder.py,sha256=y6i5hfXftWjeV3eQHmMjUOkWumnZ2QNv_u275LLmvPA,7702
98
100
  eva/core/trainers/_utils.py,sha256=M3h8lVhUmkeSiEXpX9hRdMvThGFCnTP15gv-hd1CZkc,321
99
- eva/core/trainers/functional.py,sha256=7OK2BNfX4_amHsyucr1ZNQRG3RgVKoagzd1zNN4nU3U,4472
100
- eva/core/trainers/trainer.py,sha256=HJNSfTG0k4j2ShqZzuUUSxnSu8NrwJ4karhvAto2Zn0,4229
101
+ eva/core/trainers/functional.py,sha256=rLtQZw8TcAa4NYIf901TmoQiJDNm4RGVLN-64nku3Jo,4445
102
+ eva/core/trainers/trainer.py,sha256=a3OwLWOZKDqxayrd0ugUmxJKyQx6XDb4GHtdL8-AEV0,4826
101
103
  eva/core/utils/__init__.py,sha256=cndVBvtYxEW7hykH39GCNVI86zkXNn8Lw2A0sUJHS04,237
102
104
  eva/core/utils/clone.py,sha256=qcThZOuAs1cs0uV3BL5eKeM2VIBjuRPBe1t-NiUFM5Y,569
103
105
  eva/core/utils/io/__init__.py,sha256=Py03AmoxhmTHkro6CzNps27uXKkXPzdA18mG97xHhWI,172
@@ -108,6 +110,7 @@ eva/core/utils/multiprocessing.py,sha256=BWX8AW_KPLgIIlbsPG1kYdtbHPx6Dklw13bu4u8
108
110
  eva/core/utils/operations.py,sha256=eoC_ScuHUMDCuk08j1bosiQZdPrgiIODqqheR9MtJHQ,641
109
111
  eva/core/utils/parser.py,sha256=2czmwEGJJ6PtmaD86s9I14P-_sek4DmDCkEatRGT5sI,725
110
112
  eva/core/utils/progress_bar.py,sha256=KvvsM_v3_Fhb4JvbEEPHb4PJMokg6mNLj-o6dkfzcMc,499
113
+ eva/core/utils/suppress_logs.py,sha256=pOk1076J0mKWn2lgDqEVC1g65FXhA_2IkC4LBEWhnwQ,902
111
114
  eva/core/utils/workers.py,sha256=hfx63M82qNg0Dwhre2tl53MnhtRsV7APaDONM9nhVB8,634
112
115
  eva/vision/__init__.py,sha256=oUZXFYjwtkWzi8An0uS5Xc84pLKintlXe2iti8zW6BQ,480
113
116
  eva/vision/callbacks/__init__.py,sha256=su1V73L0dDVYWSyvV_lnWbszDi2KikRraF7OsgeaKl4,139
@@ -115,42 +118,58 @@ eva/vision/callbacks/loggers/__init__.py,sha256=td1JRJbE08nsGIZdO64_yLC3FUuMDp0k
115
118
  eva/vision/callbacks/loggers/batch/__init__.py,sha256=DVYP7Aonbi4wg_ERHRj_8kb87Ee_75wRZzdduJ_icQk,173
116
119
  eva/vision/callbacks/loggers/batch/base.py,sha256=hcAd5iiHvjZ0DIf4Qt4ENT54D6ky_1OO4rKQZqeo-1k,3628
117
120
  eva/vision/callbacks/loggers/batch/segmentation.py,sha256=GYh2kfexW5pUZ0BdApYJI3e8xsuNkjIzkj5jnuKtHR4,6886
118
- eva/vision/data/__init__.py,sha256=aoKPmX8P2Q2k2W3nlq8vFU41FV6Sze-0SDuWtU-ETh4,111
119
- eva/vision/data/datasets/__init__.py,sha256=COhMRB9QJcjfbmfpRcYEztDwN9pl7IJNiH29pCZo4CA,908
121
+ eva/vision/data/__init__.py,sha256=zuLOC8ExyeQGlwib1LB70RedrTC9w0siOhFTQIRb0V8,137
122
+ eva/vision/data/dataloaders/__init__.py,sha256=7AOD_UF3hMokrGxJ2tbawH44ujQaesDsaW-3HWorYv8,128
123
+ eva/vision/data/dataloaders/collate_fn/__init__.py,sha256=GCvJaeILmAc_-lhGw8yzj2cC2KG4i1PvSWAyVzPKvVo,146
124
+ eva/vision/data/dataloaders/collate_fn/collection.py,sha256=45s9fKjVBnqfnuGWmJZMtt_DDGnfuf7qkWe0QmxXMKo,611
125
+ eva/vision/data/datasets/__init__.py,sha256=NW034jrOnpDwYBQfsTjWG6jDJY_dPWKV-cq37HkBm10,1014
120
126
  eva/vision/data/datasets/_utils.py,sha256=epPcaYE4w2_LtUKLLQJh6qQxUNVBe22JA06k4WUerYQ,1430
121
127
  eva/vision/data/datasets/_validators.py,sha256=77WZj8ewsuxUjW5WegJ-7zDuR6WdF5JbaOYdywhKIK4,2594
122
- eva/vision/data/datasets/classification/__init__.py,sha256=T2eg8k3xxd_Pdbrr7TGYICSo7BVOTMOs1bL-rLnMmro,693
123
- eva/vision/data/datasets/classification/bach.py,sha256=kZba1dQlJWZAmA03akJ4fVUU-y9W8ezOwlgs2zL-QrE,5432
124
- eva/vision/data/datasets/classification/base.py,sha256=Ci0HoOhOuHwICTi1TUGA1PwZe642RywolTVfMhKrFHk,2772
125
- eva/vision/data/datasets/classification/camelyon16.py,sha256=sChvRo0jbOVUMJvfpsFxgFOsYgci3v9wjeMBEjUysJU,8287
126
- eva/vision/data/datasets/classification/crc.py,sha256=8qjz9OklLg1gAr46RKZdlClmlO9awwfp0dkTs8v5jTE,5670
127
- eva/vision/data/datasets/classification/mhist.py,sha256=xzShPncSfAV6Q5ojfimeq748MfA0n77fGWa9EpdRzYU,3055
128
- eva/vision/data/datasets/classification/panda.py,sha256=BU_gDoX3ZSDUugwaO2n0XSZhzseK1rkPoHMRoJLGL84,7303
129
- eva/vision/data/datasets/classification/patch_camelyon.py,sha256=fElKteZKx4M6AjylnhhgNH1jewHegWc1K8h4FFKp0gE,7171
130
- eva/vision/data/datasets/classification/wsi.py,sha256=x3mQ8iwyiSdfQOjJuV7_cd8-LRjjhY9tjtzuD8O87Lg,4099
131
- eva/vision/data/datasets/segmentation/__init__.py,sha256=hGNr7BM_StxvmlOKWWfHp615qgsrB6BB3qMOiYhE0Og,791
128
+ eva/vision/data/datasets/classification/__init__.py,sha256=5fOGZxKGPeMCf3Jd9qAOYADPrkZnYg97_QE4DC79AMI,1074
129
+ eva/vision/data/datasets/classification/bach.py,sha256=Qzkb0aUNR8yMXwjDx8RmIfvPYVmlUW3dippMKrIVqzU,5410
130
+ eva/vision/data/datasets/classification/bracs.py,sha256=FYe33SmdlFOAl1Ef10uXN7Y8kvlbKuWBqIycFPHtMMU,3325
131
+ eva/vision/data/datasets/classification/breakhis.py,sha256=e01gguDCHvp6U18exCm5svWr8EpM_HLbLAq9KJifkpo,6883
132
+ eva/vision/data/datasets/classification/camelyon16.py,sha256=7E0ju4cctUyprBK063TVXLBN4Fp6cKMICoquv3e5JhQ,8261
133
+ eva/vision/data/datasets/classification/crc.py,sha256=sv18Lw4iUqGkYXEQN-kyZV_Foc_X15praVR4nN_klkg,5648
134
+ eva/vision/data/datasets/classification/gleason_arvaniti.py,sha256=z1OQlxZYx-n3S2wcfu-GuchySRw0E70PURJDsvejFjE,5871
135
+ eva/vision/data/datasets/classification/mhist.py,sha256=I-guWIucQZBHdSx-TWP24NXHf9IA9lU4hyfPZbJop0g,3033
136
+ eva/vision/data/datasets/classification/panda.py,sha256=HVfCvByyajdo5o_waqTpzZWCbQXQqPjvvyS5I0NAvns,7277
137
+ eva/vision/data/datasets/classification/patch_camelyon.py,sha256=1yXkfP680qxkQUFAPKRFbZv0cHAFx23s2vvT9th2nKM,7149
138
+ eva/vision/data/datasets/classification/unitopatho.py,sha256=IO3msEsuOnmdcYZxF-eBpo0K97y54rWFmCb_KxuF4bk,5129
139
+ eva/vision/data/datasets/classification/wsi.py,sha256=YMGxU8ECjudizt_uXUevuPS8k66HxtEQ7M2IZJmL6kE,4079
140
+ eva/vision/data/datasets/segmentation/__init__.py,sha256=YA7qx4B-pfsILfONa2AfIQHKzDnv8l0sHwjsSXa5-vQ,765
132
141
  eva/vision/data/datasets/segmentation/_total_segmentator.py,sha256=DTaQaAisY7j1h0-zYk1_81Sr4b3D9PTMieYX0PMPtIc,3127
133
- eva/vision/data/datasets/segmentation/_utils.py,sha256=ps1qpuEkPgvwUw6H-KKaLaYqDBGmN7dNGk3bnS1l6sI,1261
134
- eva/vision/data/datasets/segmentation/base.py,sha256=11IMODMB7KJ8Bs5p7MyOsBXCyPFJXfYcDLAIMitUwEk,3023
135
- eva/vision/data/datasets/segmentation/bcss.py,sha256=NHjHd1tgIfIw6TxsZTGb63iMEwXFbWX_JAwRT5WVsj4,8274
136
- eva/vision/data/datasets/segmentation/consep.py,sha256=Pw3LvVIK2scj_ys7rVNRb9B8snP8HlDIAbaI3v6ObQk,6056
137
- eva/vision/data/datasets/segmentation/embeddings.py,sha256=0KaadzPxN6OrKNnFu3YsGBFkG6XqqvkOZYUhERPwL4A,1220
138
- eva/vision/data/datasets/segmentation/lits.py,sha256=cBRU5lkiTMAi_ZwyDQUN3ODyXUlLtuMWFLPDajcZnOo,7194
139
- eva/vision/data/datasets/segmentation/lits_balanced.py,sha256=s5kPfqB41Vkcm5Jh34mLAO0NweMSIlV2fMXJsRjJsF8,3384
140
- eva/vision/data/datasets/segmentation/monusac.py,sha256=OTWHAD1b48WeT6phVf466w_nJUOGdBCGKWiWw68PAdw,8423
141
- eva/vision/data/datasets/segmentation/total_segmentator_2d.py,sha256=A6A_lXmGDfV_9Mcp9KSgN6K8Q0T8XXjv6lT4I7iLUcw,16833
142
+ eva/vision/data/datasets/segmentation/_utils.py,sha256=aXUHrnbefP6-OgSvDQHqssFKhUwETul_8aosqYiOfm8,3065
143
+ eva/vision/data/datasets/segmentation/bcss.py,sha256=rqk6VqK0QCHLFnMnDuHd1JPJVK5_C6WnsmnNSKBw6Uo,8230
144
+ eva/vision/data/datasets/segmentation/btcv.py,sha256=GNgr8pLx7uvZ2pxnYZ8N9SfB9luduMTM9IQ1OHPgBxI,8257
145
+ eva/vision/data/datasets/segmentation/consep.py,sha256=SBH1vD3RjFNRMVeo07d2PqSAInZsWHR2d3xCpCoDVpM,6014
146
+ eva/vision/data/datasets/segmentation/embeddings.py,sha256=RsTuAwGEJPnWPY7q3pwcjmqtEj0wtRBNRBD4a0RcGtA,1218
147
+ eva/vision/data/datasets/segmentation/lits.py,sha256=AsKsTQZBNXlYU_UllBrdr04rS8K4TDkG_vqR-aVr5ik,7267
148
+ eva/vision/data/datasets/segmentation/lits_balanced.py,sha256=OQ2AK6-wLE0uMvgQJtfBJTUJqS_WBfmsJXgBfe4gU8A,3451
149
+ eva/vision/data/datasets/segmentation/monusac.py,sha256=iv9-MFaTsGfGV1u6_lQNcSEeSpmVBDQC1Oa123iEtu0,8410
150
+ eva/vision/data/datasets/segmentation/total_segmentator_2d.py,sha256=3cWpJkZmJ7IUJhigw69YLFOg2_-yzXSLGXqWVPUsn8Y,16978
142
151
  eva/vision/data/datasets/structs.py,sha256=RaTDW-B36PumcR5gymhCiX-r8GiKqIFcjqoEEjjFyUE,389
143
- eva/vision/data/datasets/vision.py,sha256=RHcBBNTd5u1OB6l2iA5V8pv8kjZsTehi9At7J-FVqr4,657
144
- eva/vision/data/datasets/wsi.py,sha256=-rypkcd6CPBM_oPuLszUx9q4zSPzeO1H6JKqvOtLlHw,8282
145
- eva/vision/data/transforms/__init__.py,sha256=WeFii6JwB0CiOOGLR3tkgAoKgRdmOf2lm0Dadixn8OI,260
146
- eva/vision/data/transforms/common/__init__.py,sha256=6tvxUgb8wfhgvqejMVulwqssHTJLF7f4_vpf44kxgxY,234
147
- eva/vision/data/transforms/common/resize_and_clamp.py,sha256=f9-YIX0S9GMAXHP7TWlyRlGfZIVvHgoBHqQ8PzaKbKs,1736
152
+ eva/vision/data/datasets/vision.py,sha256=-_WRiyICMgqABR6Ay_RKBMfsPGwgx9MQfCA7WChHo24,3219
153
+ eva/vision/data/datasets/wsi.py,sha256=dEAT_Si_Qb3qdSovUPeoiWeoPb7m-NGYqq44e3UXHk8,8384
154
+ eva/vision/data/transforms/__init__.py,sha256=Bv1aPvjahteAZzVGSuxzHz2LRwa63NV7IcoPzKUt_fY,720
155
+ eva/vision/data/transforms/common/__init__.py,sha256=ZHzpdr-THc9CgFFbAVMWUiZrUNUiHnCDM8GYhM7tMfU,138
148
156
  eva/vision/data/transforms/common/resize_and_crop.py,sha256=GI1HTkbJ9qg4p8c6vk_XkXO0Qi6mBeUeiZIA0jVtmAw,1360
149
- eva/vision/data/transforms/normalization/__init__.py,sha256=0MZ1KphOr6LxBCOBn7LZ8H8M6-0CuFqvynTON5pedxg,240
150
- eva/vision/data/transforms/normalization/clamp.py,sha256=B-QyMCFEJPiJagpPr7JhrzOJMVuUB-D_qrmjvthJTyE,1412
151
- eva/vision/data/transforms/normalization/functional/__init__.py,sha256=ICg611_heHCiNxTNoteFX2MTav59fv7vLkTM8c4eS3w,194
152
- eva/vision/data/transforms/normalization/functional/rescale_intensity.py,sha256=ihJdDRogrJbvFpb8LcPdRzCFWdlMcBTpWD5RY2MOPbE,844
153
- eva/vision/data/transforms/normalization/rescale_intensity.py,sha256=BNzDeyzT0GG_FBtlZauCL-K4E_KVWH9SzTSN1SsFNJw,1756
157
+ eva/vision/data/transforms/croppad/__init__.py,sha256=d36WGe9x39p-d7VymRM29qdquv8YEa0RfsTfwt7Cou4,375
158
+ eva/vision/data/transforms/croppad/crop_foreground.py,sha256=3o27nOgxfRo8ap45lpmnaiAIZ08kdyp14vYpr4BC8zc,4865
159
+ eva/vision/data/transforms/croppad/rand_crop_by_pos_neg_label.py,sha256=8CwMYAOQgOIb1Uw1jc219aqY3s2tCWd6r-2nU7kqOLc,5538
160
+ eva/vision/data/transforms/croppad/spatial_pad.py,sha256=j5V2vvgGcf75GzGyAT7mGgpvlEOS2BnAcThRdt7Und4,2857
161
+ eva/vision/data/transforms/intensity/__init__.py,sha256=mNp6pi0pnHcA24kQuiGHzMb4XLRaR0Lgi-Vb7Nl-Aoo,408
162
+ eva/vision/data/transforms/intensity/rand_scale_intensity.py,sha256=DDcFWTmq5UbwISO9qGIPOQJ72rx7JQWtVi2OxggLzyE,2041
163
+ eva/vision/data/transforms/intensity/rand_shift_intensity.py,sha256=9YNREhRoCzLOt2C21daum62cbB53ZRcYOSuSW_Jz7eQ,1974
164
+ eva/vision/data/transforms/intensity/scale_intensity_ranged.py,sha256=VLvYZYG6jQCuR5poJsAlhIFjw6VjPEpcDPKBlJTjYBM,1873
165
+ eva/vision/data/transforms/spatial/__init__.py,sha256=k7C_p4fMZd7A00ikldAMsprYDedKrlMjKQB6BLA5InA,284
166
+ eva/vision/data/transforms/spatial/flip.py,sha256=jfRc-wPBvG58OtCNU3GrOkb57kcRddRqpwcAdCB0_No,2553
167
+ eva/vision/data/transforms/spatial/rotate.py,sha256=FpMTAPWtgrG10yQ3R1_Ii6obPcn3boNWOuLhsblxUbQ,1793
168
+ eva/vision/data/transforms/spatial/spacing.py,sha256=T1UhqK-OhhbLQxzejMyI8BQzYRF44PNc02Qap4nk1hY,2695
169
+ eva/vision/data/transforms/utility/__init__.py,sha256=TjncS2aOgRJwjjRuIvmr4eRz2nKVg6b76tThp4UlzII,163
170
+ eva/vision/data/transforms/utility/ensure_channel_first.py,sha256=jpnV7oWel1ZSL2VUf3wUdbB8xM2OFD8R6xpHcPCJVgw,1945
171
+ eva/vision/data/tv_tensors/__init__.py,sha256=qla_QYWN52vP0IlTmHlTZF4kLh9xj-Zy-WxQgXakYyk,125
172
+ eva/vision/data/tv_tensors/volume.py,sha256=VlWTIbswNv-aUqEWd1EJgoqEH60d-gNALPG815TD_W8,2381
154
173
  eva/vision/data/wsi/__init__.py,sha256=vfSfyogsj4OS1sGKfsYWyj2O5ZMT9iqkc1lvcuZJVGI,422
155
174
  eva/vision/data/wsi/backends/__init__.py,sha256=wX7cjeT7ktX8sH6lRDEEU5cgRKLH6RhPyey16aJthJ4,2251
156
175
  eva/vision/data/wsi/backends/base.py,sha256=0oFzMc3zklLyqyD_kzDKekydeFyDReqjBBj1qJLdM9Y,4094
@@ -172,45 +191,47 @@ eva/vision/metrics/__init__.py,sha256=zXOc1Idgfk86CGE5yBHn3B22iD5tRyfl4H-kTSB2dC
172
191
  eva/vision/metrics/defaults/__init__.py,sha256=ncQ9uH5q5SpfalyPX6dINPRLk34HLw6z9u8ny_HHbFQ,174
173
192
  eva/vision/metrics/defaults/segmentation/__init__.py,sha256=ve6dwyfhJGYBYKS6l6OySCBs32JnEBFnvhAyNvj-Uqo,191
174
193
  eva/vision/metrics/defaults/segmentation/multiclass.py,sha256=MUBp-PIyiJB2VVV_NintRrP7Ha2lJ75_3xvqSdeDYwE,2855
175
- eva/vision/metrics/segmentation/BUILD,sha256=Nf7BYWWe1USoFEIsIiEVZ8sa05J5FPkMJ-UIMDLrU8o,17
176
194
  eva/vision/metrics/segmentation/__init__.py,sha256=7iz3fFNd-iBuNyxdeSfsgp6D7oZtmPsbyA0ZKRzzRCw,402
177
195
  eva/vision/metrics/segmentation/_utils.py,sha256=_ubv2sP1-f_dLKy8Y4wLkj5ed56fAFLURfv1shQWVcs,2402
178
196
  eva/vision/metrics/segmentation/dice.py,sha256=H_U6XSZcieX0xb6aptxxW1s-Jshs8Lp4P1SAwjdwntM,2905
179
197
  eva/vision/metrics/segmentation/generalized_dice.py,sha256=T57An-lBVefnlv6dIWVRNghFxy0e0K470xwla0TbCSk,2436
180
198
  eva/vision/metrics/segmentation/mean_iou.py,sha256=2PjqTa_VAtnW4nxHzT93uBKgnml7INU-wt_jR68RM54,2104
181
- eva/vision/metrics/segmentation/monai_dice.py,sha256=febnvA2gtTyydLZMwjQBS1zq2NjZcsXf0EcV0eRn8Aw,2117
199
+ eva/vision/metrics/segmentation/monai_dice.py,sha256=I_DX6r4y5d9QzxI3WyMV14uwt1uqrKlRqbNHqGMtmy0,2421
182
200
  eva/vision/metrics/wrappers/__init__.py,sha256=V4z3hradMa6CQgTkk1bc2cbZzCgcoIYw7-hufMK3D_4,128
183
201
  eva/vision/metrics/wrappers/monai.py,sha256=FNa1yHN2U3vO6BGqS0BFm8uJAL6DCzSE4XOFCV4aBjg,885
184
202
  eva/vision/models/__init__.py,sha256=a-P6JL73A3miHQnqgqUz07XtVmQB_o4DqPImk5rEATo,275
185
203
  eva/vision/models/modules/__init__.py,sha256=vaM_V6OF2s0lYjralP8dzv8mAtv_xIMZItfXgz0NZg8,156
186
- eva/vision/models/modules/semantic_segmentation.py,sha256=PSeqm5h6YgbzQ0jA9lUexGYUE3ehfWx-LH1NgZ7cGhw,7300
204
+ eva/vision/models/modules/semantic_segmentation.py,sha256=eXRx7wXKDLqMYHGj9IH_6WxlQNYaYEU6J70soVFedp0,7629
187
205
  eva/vision/models/networks/__init__.py,sha256=j43IurizNlAyKPH2jwDHaeq49L2QvwbHWqUaptA1mG4,100
188
206
  eva/vision/models/networks/abmil.py,sha256=N1eH4fn1nXmgXurSQyQIxxonv7nsqeeuPWaQSHeltfs,6796
189
- eva/vision/models/networks/backbones/__init__.py,sha256=CvK0sHKufUq4chwX-p2cFVBZFReMuwmeHFTG5LUA6CM,318
207
+ eva/vision/models/networks/backbones/__init__.py,sha256=mvYVtmJOvYLCXDX52hP6dzQxj9cQikwSeBZvEDNyNmU,347
190
208
  eva/vision/models/networks/backbones/_utils.py,sha256=V7xeod4mElEuuO1TRW0xJE051cUyS1Saraw3-KcK1Mw,1667
191
- eva/vision/models/networks/backbones/pathology/__init__.py,sha256=goR59h8bfzd-Wa3rxPPdaSlAOH_df8SHBkTSKi08TS8,1147
192
- eva/vision/models/networks/backbones/pathology/bioptimus.py,sha256=wUSKjYgxcRV3FRHGaPwF1uRAQcGO0rHNHGmK1QDJXk4,991
209
+ eva/vision/models/networks/backbones/pathology/__init__.py,sha256=JZ1mhKm4w89JTrXDfTM02OyFWtDuxRhhvpytDk_t500,1386
210
+ eva/vision/models/networks/backbones/pathology/bioptimus.py,sha256=NrS0WJqiJKjDYT3odQGLPgnzMuCbJfWoW1Dal-L9F50,2626
193
211
  eva/vision/models/networks/backbones/pathology/gigapath.py,sha256=mfGXtKhY7XLpKQQAFNVZYsM-aeHCEbOVUrxpAEOr-l8,955
194
212
  eva/vision/models/networks/backbones/pathology/histai.py,sha256=X_we3U7GK91RrXyOX2PJB-YFDF2ozdL2fzZhNxm9SVU,1914
195
- eva/vision/models/networks/backbones/pathology/kaiko.py,sha256=GSdBG4WXrs1PWB2hr-sy_dFe2riwpPKwHx71esDoVfE,3952
213
+ eva/vision/models/networks/backbones/pathology/hkust.py,sha256=bZpzx7EvK4CVefNnJmyz-2Ta-WdYDwEDzf-zWoZkoCQ,2308
214
+ eva/vision/models/networks/backbones/pathology/kaiko.py,sha256=lVzgWhgFn1iOlfSSxsX2cH16rrFQFjzdaF6_HS1y-6c,4517
196
215
  eva/vision/models/networks/backbones/pathology/lunit.py,sha256=ku4lr9pWeeHatHN4x4OVgwlve9sVqiRqIbgI0PXLiqg,2160
197
- eva/vision/models/networks/backbones/pathology/mahmood.py,sha256=me8DXf9nsEegDmltP8f7ZnG89xYVEKzZLKfVzMZjWDs,1832
216
+ eva/vision/models/networks/backbones/pathology/mahmood.py,sha256=VYoVWrMNkoaEqa0och-GbwGd0VISQmbtzk1dSBZ1M0I,2464
198
217
  eva/vision/models/networks/backbones/pathology/owkin.py,sha256=uWJV5fgY7UZX6ilgGzkPY9fnlOiF03W7E8rc9TmlHGg,1231
199
218
  eva/vision/models/networks/backbones/pathology/paige.py,sha256=MjOLgdEKk8tdAIpCiHelasGwPE7xgzaooW6EE7IsuEE,1642
219
+ eva/vision/models/networks/backbones/radiology/__init__.py,sha256=pD8ijQZRaX_Lu3tPBV73qUVaAURDrB_2pEyyBdRZmis,294
220
+ eva/vision/models/networks/backbones/radiology/swin_unetr.py,sha256=n5lJkoKjxKogs5Q_XuKh7Q5J96Bgln5W4ShL-VwSZXs,7976
221
+ eva/vision/models/networks/backbones/radiology/voco.py,sha256=sICZnsxQYnqYEmauhB6CBmaqpzBoAB6CpXJjNm5FesI,2464
200
222
  eva/vision/models/networks/backbones/registry.py,sha256=anjILtEHHB6Ltwiw22h1bsgWtIjh_l5_fkPh87K7-d0,1631
201
223
  eva/vision/models/networks/backbones/timm/__init__.py,sha256=cZH3av9gIZcvEVD0rwKsI-MEq7zPqaW4dQ0E05CksvQ,128
202
224
  eva/vision/models/networks/backbones/timm/backbones.py,sha256=fCTiwqU6NhQ-ccAMzmpPDddXkFzRAB3mw4lcQ9um_PU,1646
203
- eva/vision/models/networks/backbones/torchhub/__init__.py,sha256=zBLJBvkwKJ1jD7M3Wt5BE6Cx-R8G2YRoyPG7p2V-3nQ,147
204
- eva/vision/models/networks/backbones/torchhub/backbones.py,sha256=hgCCoP8AdRSsli0w9a_PRNB-UR36-SLLhBIW0BFrkdE,1911
205
225
  eva/vision/models/networks/backbones/universal/__init__.py,sha256=MAlkALSJ2_w6spSbB7NmKlL0Jsk1YKEycatdI0xO0_I,252
206
226
  eva/vision/models/networks/backbones/universal/vit.py,sha256=kpUCoXpefR34hRNlQDFK9lGr4oqS8Mn5vTLKWZ-gaOs,1820
207
227
  eva/vision/models/networks/decoders/__init__.py,sha256=RXFWmoYw2i6E9VOUCJmU8c72icHannVuo-cUKy6fnLM,200
208
- eva/vision/models/networks/decoders/segmentation/__init__.py,sha256=N6jrhXHj0P7i7RptZbZ-JFehT2BM7meFyNIK0owAkaE,517
228
+ eva/vision/models/networks/decoders/segmentation/__init__.py,sha256=SqmxtzxwBRF8g2hsiqe0o3Nr0HFK97azTnWLyqsYigY,652
209
229
  eva/vision/models/networks/decoders/segmentation/base.py,sha256=b2TIJKiJR9vejVRpNyedMJLPTrpHhAEXvco8atb9TPU,411
210
230
  eva/vision/models/networks/decoders/segmentation/decoder2d.py,sha256=A7vz0LJ_YweftpKeEBJm0Y3N7hbVLDSIkAajaQv1UgE,4456
211
- eva/vision/models/networks/decoders/segmentation/linear.py,sha256=-i9RVaKM1UsB3AXDDKdMmHiD7y2sr5HfF-WvkB47Fhw,4743
212
- eva/vision/models/networks/decoders/segmentation/semantic/__init__.py,sha256=Ubs8GXyQpEHs26JUeUuiVP3jfn47eiBZM_UVbu749XU,398
231
+ eva/vision/models/networks/decoders/segmentation/linear.py,sha256=PZeEIH0ybgxgIKtmcflh8jsARo5NQqkgoGbpAZd7yj4,4650
232
+ eva/vision/models/networks/decoders/segmentation/semantic/__init__.py,sha256=2yol7W1ARXL-Ge7gYxjUzaGTjH6nfMBlNqQJHprEWGg,539
213
233
  eva/vision/models/networks/decoders/segmentation/semantic/common.py,sha256=fPTb0T-2FiOU-jT81ynASKaW7fJiRk6vQjuPkzHOluc,2530
234
+ eva/vision/models/networks/decoders/segmentation/semantic/swin_unetr.py,sha256=ODUpnJrpDQl0m8CC2SPnE_lpFflzS0GSiCZOmrjL6uQ,3373
214
235
  eva/vision/models/networks/decoders/segmentation/semantic/with_image.py,sha256=I5PyGKKo8DcXYcw4xlCFzuavRJNRrzGT-szpDidMPXI,3516
215
236
  eva/vision/models/networks/decoders/segmentation/typings.py,sha256=8zAqIJLlQdCjsx-Dl4lnF4BB1VxTg_AyIquBVwpZlHg,537
216
237
  eva/vision/models/wrappers/__init__.py,sha256=ogmr-eeVuGaOCcsuxSp6PGyauP2QqWTb8dGTtbC7lRU,210
@@ -219,14 +240,14 @@ eva/vision/models/wrappers/from_timm.py,sha256=Z38Nb1i6OPKkgvFZOvGx-O3AZQuscf1zR
219
240
  eva/vision/utils/__init__.py,sha256=vaUovprE743SmyFH8l6uk4pYSWpI4zxn7lN0EwePTJI,96
220
241
  eva/vision/utils/colormap.py,sha256=sP1F0JCX3abZfFgdxEjLJO-LhNYKjXZvXxs03ZgrEvI,2876
221
242
  eva/vision/utils/convert.py,sha256=fqGmKrg5-JJLrTkTXB4YDcWTudXPrO1gGjsckVRUesU,1881
222
- eva/vision/utils/io/__init__.py,sha256=XGJ_W94DVEYXJ_tVpr_20NMpR5JLWEWHGF3v9Low79A,610
243
+ eva/vision/utils/io/__init__.py,sha256=Oa4CjmqXN0wzkG1PW79zSsHrN1jlI7_VJ5NSXLKx0eA,652
223
244
  eva/vision/utils/io/_utils.py,sha256=JzOt7Frj6ScF_aNjFtfHBn4ROnl6NhUZucmQhLc4Cww,768
224
245
  eva/vision/utils/io/image.py,sha256=IdOkr5MYqhYHz8U9drZ7wULTM3YHwCWSjZlu_Qdl4GQ,2053
225
246
  eva/vision/utils/io/mat.py,sha256=qpGifyjmpE0Xhv567Si7-zxKrgkgE0sywP70cHiLFGU,808
226
- eva/vision/utils/io/nifti.py,sha256=4YoKjKuoNdE0qY7tYB_WlnSsYAx2oBzZRZXczc_8HAU,2555
247
+ eva/vision/utils/io/nifti.py,sha256=TFMgNhLqIK3sl3RjIRXEABM7FmSQjqVOwk1vXkuvX2w,4983
227
248
  eva/vision/utils/io/text.py,sha256=qYgfo_ZaDZWfG02NkVVYzo5QFySqdCCz5uLA9d-zXtI,701
228
- kaiko_eva-0.1.8.dist-info/METADATA,sha256=rjAETMOw3T9oYRqiWvzsedU0hFHFa6ivyZfiS0WCjKA,24899
229
- kaiko_eva-0.1.8.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
230
- kaiko_eva-0.1.8.dist-info/entry_points.txt,sha256=6CSLu9bmQYJSXEg8gbOzRhxH0AGs75BB-vPm3VvfcNE,88
231
- kaiko_eva-0.1.8.dist-info/licenses/LICENSE,sha256=e6AEzr7j_R-PYr2qLO-JwLn8y70jbVD3U2mxbRmwcI4,11338
232
- kaiko_eva-0.1.8.dist-info/RECORD,,
249
+ kaiko_eva-0.2.1.dist-info/METADATA,sha256=78-RgtBLumKmrWLlv6Q8iJ6JU-InxPCudfJcuy7pVUQ,24992
250
+ kaiko_eva-0.2.1.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
251
+ kaiko_eva-0.2.1.dist-info/entry_points.txt,sha256=6CSLu9bmQYJSXEg8gbOzRhxH0AGs75BB-vPm3VvfcNE,88
252
+ kaiko_eva-0.2.1.dist-info/licenses/LICENSE,sha256=e6AEzr7j_R-PYr2qLO-JwLn8y70jbVD3U2mxbRmwcI4,11338
253
+ kaiko_eva-0.2.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: pdm-backend (2.4.3)
2
+ Generator: pdm-backend (2.4.4)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any