kaiko-eva 0.0.1__tar.gz → 0.0.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kaiko-eva might be problematic. Click here for more details.
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/PKG-INFO +51 -25
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/README.md +42 -18
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/pyproject.toml +6 -4
- kaiko_eva-0.0.2/src/eva/.DS_Store +0 -0
- kaiko_eva-0.0.2/src/eva/core/callbacks/__init__.py +6 -0
- kaiko_eva-0.0.2/src/eva/core/callbacks/config.py +143 -0
- kaiko_eva-0.0.2/src/eva/core/data/datasets/__init__.py +15 -0
- kaiko_eva-0.0.2/src/eva/core/data/datasets/embeddings/__init__.py +13 -0
- kaiko_eva-0.0.1/src/eva/core/data/datasets/classification/embeddings.py → kaiko_eva-0.0.2/src/eva/core/data/datasets/embeddings/base.py +41 -43
- kaiko_eva-0.0.2/src/eva/core/data/datasets/embeddings/classification/__init__.py +10 -0
- kaiko_eva-0.0.2/src/eva/core/data/datasets/embeddings/classification/embeddings.py +66 -0
- kaiko_eva-0.0.2/src/eva/core/data/datasets/embeddings/classification/multi_embeddings.py +106 -0
- kaiko_eva-0.0.2/src/eva/core/data/transforms/__init__.py +7 -0
- kaiko_eva-0.0.2/src/eva/core/data/transforms/padding/__init__.py +5 -0
- kaiko_eva-0.0.2/src/eva/core/data/transforms/padding/pad_2d_tensor.py +38 -0
- kaiko_eva-0.0.2/src/eva/core/data/transforms/sampling/__init__.py +5 -0
- kaiko_eva-0.0.2/src/eva/core/data/transforms/sampling/sample_from_axis.py +40 -0
- kaiko_eva-0.0.2/src/eva/core/loggers/__init__.py +7 -0
- kaiko_eva-0.0.2/src/eva/core/loggers/dummy.py +38 -0
- kaiko_eva-0.0.2/src/eva/core/loggers/experimental_loggers.py +8 -0
- kaiko_eva-0.0.2/src/eva/core/loggers/log/__init__.py +5 -0
- kaiko_eva-0.0.2/src/eva/core/loggers/log/parameters.py +64 -0
- kaiko_eva-0.0.2/src/eva/core/loggers/log/utils.py +13 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/modules/head.py +6 -11
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/modules/module.py +25 -1
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/trainers/_recorder.py +69 -7
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/trainers/functional.py +22 -5
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/trainers/trainer.py +20 -6
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/__init__.py +1 -8
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/_utils.py +3 -3
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/classification/__init__.py +1 -8
- kaiko_eva-0.0.2/src/eva/vision/data/datasets/segmentation/base.py +97 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/segmentation/total_segmentator.py +88 -69
- kaiko_eva-0.0.2/src/eva/vision/models/.DS_Store +0 -0
- kaiko_eva-0.0.2/src/eva/vision/models/networks/.DS_Store +0 -0
- kaiko_eva-0.0.2/src/eva/vision/utils/convert.py +24 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/utils/io/nifti.py +10 -6
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_0_shape_8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_1_shape_8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_2_shape_8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_3_shape_8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_4_shape_1x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_5_shape_1x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_6_shape_1x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_7_shape_1x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/embeddings/manifest.csv +9 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_0_shape_6x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_1_shape_3x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_2_shape_1x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_3_shape_2x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_4_shape_5x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_5_shape_3x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_6_shape_1x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_7_shape_6x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_8_shape_2x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_9_shape_5x8.pt +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/core/datasets/multi-embeddings/manifest.csv +11 -0
- kaiko_eva-0.0.2/tests/eva/assets/images/random_bgr_32x32.png +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/images/random_grayscale_32x32.png +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b001.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b002.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b003.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b004.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b005.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b006.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is001.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is002.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is003.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is004.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is005.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is006.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv001.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv002.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv003.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv004.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv005.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv006.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n001.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n002.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n003.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n004.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n005.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n006.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/ADI/ADI-SIHVHHPH.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/ADI/ADI-SIHWWQMY.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/BACK/BACK-YYYHKNMK.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/BACK/BACK-YYYMDTNW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/DEB/DEB-YYYRSHLP.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/DEB/DEB-YYYTCTDR.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/LYM/LYM-YYWRPGDD.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/LYM/LYM-YYYTKMWW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/MUC/MUC-YYYNWSAM.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/MUC/MUC-YYYRQDLW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/MUS/MUS-YYYNVQVQ.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/MUS/MUS-YYYRWWNH.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/NORM/NORM-YYTTIRVD.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/NORM/NORM-YYVAFTKA.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/STR/STR-YYYHNSSM.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/STR/STR-YYYWVWFG.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/TUM/TUM-YYYSGWYW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/TUM/TUM-YYYYQFVN.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/ADI/ADI-SIHVHHPH.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/ADI/ADI-SIHWWQMY.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/BACK/BACK-YYYHKNMK.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/BACK/BACK-YYYMDTNW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/DEB/DEB-YYYRSHLP.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/DEB/DEB-YYYTCTDR.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/LYM/LYM-YYWRPGDD.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/LYM/LYM-YYYTKMWW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/MUC/MUC-YYYNWSAM.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/MUC/MUC-YYYRQDLW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/MUS/MUS-YYYNVQVQ.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/MUS/MUS-YYYRWWNH.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/NORM/NORM-YYTTIRVD.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/NORM/NORM-YYVAFTKA.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/STR/STR-YYYHNSSM.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/STR/STR-YYYWVWFG.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/TUM/TUM-YYYSGWYW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/TUM/TUM-YYYYQFVN.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/ADI/ADI-SIHVHHPH.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/ADI/ADI-SIHWWQMY.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/BACK/BACK-YYYHKNMK.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/BACK/BACK-YYYMDTNW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/DEB/DEB-YYYRSHLP.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/DEB/DEB-YYYTCTDR.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/LYM/LYM-YYWRPGDD.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/LYM/LYM-YYYTKMWW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/MUC/MUC-YYYNWSAM.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/MUC/MUC-YYYRQDLW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/MUS/MUS-YYYNVQVQ.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/MUS/MUS-YYYRWWNH.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/NORM/NORM-YYTTIRVD.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/NORM/NORM-YYVAFTKA.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/STR/STR-YYYHNSSM.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/STR/STR-YYYWVWFG.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/TUM/TUM-YYYSGWYW.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/TUM/TUM-YYYYQFVN.tif +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/mhist/annotations.csv +8 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/mhist/images/MHIST_aaa.png +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/mhist/images/MHIST_aab.png +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/mhist/images/MHIST_aac.png +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/mhist/images/MHIST_aae.png +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/mhist/images/MHIST_aaf.png +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/mhist/images/MHIST_aag.png +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/mhist/images/MHIST_aah.png +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_test_x.h5 +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_test_y.h5 +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_train_x.h5 +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_train_y.h5 +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_valid_x.h5 +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_valid_y.h5 +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0011/segmentations/semantic_labels/masks.nii.gz +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0461/segmentations/semantic_labels/masks.nii.gz +0 -0
- kaiko_eva-0.0.2/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0762/segmentations/semantic_labels/masks.nii.gz +0 -0
- kaiko_eva-0.0.2/tests/eva/core/data/datasets/embeddings/__init__.py +1 -0
- kaiko_eva-0.0.2/tests/eva/core/data/datasets/embeddings/classification/__init__.py +1 -0
- kaiko_eva-0.0.1/tests/eva/core/data/datasets/classification/test_embedding_datasets.py → kaiko_eva-0.0.2/tests/eva/core/data/datasets/embeddings/classification/test_embeddings.py +16 -16
- kaiko_eva-0.0.2/tests/eva/core/data/datasets/embeddings/classification/test_multi_embeddings.py +106 -0
- kaiko_eva-0.0.2/tests/eva/core/data/transforms/__init__.py +1 -0
- kaiko_eva-0.0.2/tests/eva/core/data/transforms/padding/__init__.py +1 -0
- kaiko_eva-0.0.2/tests/eva/core/data/transforms/padding/test_pad_2d_tensor.py +49 -0
- kaiko_eva-0.0.2/tests/eva/core/data/transforms/sampling/__init__.py +1 -0
- kaiko_eva-0.0.2/tests/eva/core/data/transforms/sampling/test_sample_from_axis.py +71 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/datasets/segmentation/test_total_segmentator.py +9 -7
- kaiko_eva-0.0.2/tests/eva/vision/utils/test_convert.py +50 -0
- kaiko_eva-0.0.1/src/eva/core/callbacks/__init__.py +0 -5
- kaiko_eva-0.0.1/src/eva/core/data/datasets/__init__.py +0 -7
- kaiko_eva-0.0.1/src/eva/core/data/datasets/classification/__init__.py +0 -5
- kaiko_eva-0.0.1/src/eva/core/data/transforms/__init__.py +0 -5
- kaiko_eva-0.0.1/src/eva/vision/data/datasets/classification/total_segmentator.py +0 -213
- kaiko_eva-0.0.1/src/eva/vision/data/datasets/segmentation/base.py +0 -112
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_0_shape_8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_1_shape_8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_2_shape_8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_3_shape_8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_4_shape_1x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_5_shape_1x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_6_shape_1x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/embeddings/embeddings/tensor_7_shape_1x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/embeddings/manifest.csv +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_0_shape_6x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_1_shape_3x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_2_shape_1x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_3_shape_2x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_4_shape_5x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_5_shape_3x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_6_shape_1x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_7_shape_6x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_8_shape_2x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/embeddings/tensor_9_shape_5x8.pt +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/core/datasets/multi-embeddings/manifest.csv +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/images/random_bgr_32x32.png +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/images/random_grayscale_32x32.png +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b001.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b002.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b003.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b004.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b005.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Benign/b006.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is001.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is002.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is003.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is004.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is005.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/InSitu/is006.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv001.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv002.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv003.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv004.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv005.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Invasive/iv006.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n001.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n002.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n003.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n004.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n005.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/bach/ICIAR2018_BACH_Challenge/Photos/Normal/n006.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/ADI/ADI-SIHVHHPH.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/ADI/ADI-SIHWWQMY.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/BACK/BACK-YYYHKNMK.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/BACK/BACK-YYYMDTNW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/DEB/DEB-YYYRSHLP.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/DEB/DEB-YYYTCTDR.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/LYM/LYM-YYWRPGDD.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/LYM/LYM-YYYTKMWW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/MUC/MUC-YYYNWSAM.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/MUC/MUC-YYYRQDLW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/MUS/MUS-YYYNVQVQ.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/MUS/MUS-YYYRWWNH.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/NORM/NORM-YYTTIRVD.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/NORM/NORM-YYVAFTKA.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/STR/STR-YYYHNSSM.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/STR/STR-YYYWVWFG.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/TUM/TUM-YYYSGWYW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/CRC-VAL-HE-7K/TUM/TUM-YYYYQFVN.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/ADI/ADI-SIHVHHPH.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/ADI/ADI-SIHWWQMY.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/BACK/BACK-YYYHKNMK.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/BACK/BACK-YYYMDTNW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/DEB/DEB-YYYRSHLP.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/DEB/DEB-YYYTCTDR.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/LYM/LYM-YYWRPGDD.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/LYM/LYM-YYYTKMWW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/MUC/MUC-YYYNWSAM.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/MUC/MUC-YYYRQDLW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/MUS/MUS-YYYNVQVQ.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/MUS/MUS-YYYRWWNH.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/NORM/NORM-YYTTIRVD.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/NORM/NORM-YYVAFTKA.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/STR/STR-YYYHNSSM.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/STR/STR-YYYWVWFG.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/TUM/TUM-YYYSGWYW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K/TUM/TUM-YYYYQFVN.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/ADI/ADI-SIHVHHPH.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/ADI/ADI-SIHWWQMY.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/BACK/BACK-YYYHKNMK.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/BACK/BACK-YYYMDTNW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/DEB/DEB-YYYRSHLP.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/DEB/DEB-YYYTCTDR.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/LYM/LYM-YYWRPGDD.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/LYM/LYM-YYYTKMWW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/MUC/MUC-YYYNWSAM.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/MUC/MUC-YYYRQDLW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/MUS/MUS-YYYNVQVQ.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/MUS/MUS-YYYRWWNH.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/NORM/NORM-YYTTIRVD.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/NORM/NORM-YYVAFTKA.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/STR/STR-YYYHNSSM.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/STR/STR-YYYWVWFG.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/TUM/TUM-YYYSGWYW.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/crc/NCT-CRC-HE-100K-NONORM/TUM/TUM-YYYYQFVN.tif +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/mhist/annotations.csv +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/mhist/images/MHIST_aaa.png +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/mhist/images/MHIST_aab.png +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/mhist/images/MHIST_aac.png +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/mhist/images/MHIST_aae.png +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/mhist/images/MHIST_aaf.png +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/mhist/images/MHIST_aag.png +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/mhist/images/MHIST_aah.png +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_test_x.h5 +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_test_y.h5 +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_train_x.h5 +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_train_y.h5 +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_valid_x.h5 +0 -3
- kaiko_eva-0.0.1/tests/eva/assets/vision/datasets/patch_camelyon/camelyonpatch_level_2_split_valid_y.h5 +0 -3
- kaiko_eva-0.0.1/tests/eva/core/data/datasets/classification/__init__.py +0 -1
- kaiko_eva-0.0.1/tests/eva/vision/data/datasets/classification/test_total_segmentator.py +0 -63
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/LICENSE +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/__main__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/__version__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/callbacks/writers/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/callbacks/writers/embeddings.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/callbacks/writers/typings.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/cli/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/cli/cli.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/cli/logo.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/cli/setup.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/dataloaders/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/dataloaders/dataloader.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/datamodules/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/datamodules/call.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/datamodules/datamodule.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/datamodules/schemas.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/datasets/base.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/datasets/dataset.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/samplers/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/samplers/sampler.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/transforms/dtype/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/data/transforms/dtype/array.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/interface/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/interface/interface.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/average_loss.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/binary_balanced_accuracy.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/defaults/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/defaults/classification/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/defaults/classification/binary.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/defaults/classification/multiclass.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/structs/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/structs/collection.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/structs/metric.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/structs/module.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/structs/schemas.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/metrics/structs/typings.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/modules/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/modules/inference.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/modules/typings.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/modules/utils/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/modules/utils/batch_postprocess.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/modules/utils/grad.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/_utils.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/mlp.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/transforms/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/transforms/extract_cls_features.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/wrappers/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/wrappers/base.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/wrappers/from_function.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/wrappers/huggingface.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/models/networks/wrappers/onnx.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/trainers/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/trainers/_logging.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/trainers/_utils.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/utils/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/utils/io/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/utils/io/dataframe.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/utils/multiprocessing.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/core/utils/workers.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/_validators.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/classification/bach.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/classification/base.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/classification/crc.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/classification/mhist.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/classification/patch_camelyon.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/segmentation/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/structs.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/datasets/vision.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/transforms/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/transforms/common/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/data/transforms/common/resize_and_crop.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/models/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/models/networks/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/models/networks/abmil.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/models/networks/postprocesses/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/models/networks/postprocesses/cls.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/utils/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/utils/io/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/utils/io/_utils.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/utils/io/image.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/src/eva/vision/utils/io/text.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/_cli.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0011/ct.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0011/segmentations/aorta_small.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0011/segmentations/brain_small.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0011/segmentations/colon_small.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0461/ct.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0461/segmentations/aorta_small.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0461/segmentations/brain_small.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0461/segmentations/colon_small.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0762/ct.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0762/segmentations/aorta_small.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0762/segmentations/brain_small.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/assets/vision/datasets/total_segmentator/Totalsegmentator_dataset_v201/s0762/segmentations/colon_small.nii.gz +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/conftest.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/callbacks/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/callbacks/conftest.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/callbacks/writers/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/callbacks/writers/test_embeddings.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/data/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/data/dataloaders/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/data/dataloaders/test_dataloader.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/data/datamodules/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/data/datamodules/_utils.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/data/datamodules/test_datamodule.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/data/datamodules/test_schemas.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/data/datasets/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/core/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/core/test_metric_module.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/core/test_schemas.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/defaults/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/defaults/classification/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/defaults/classification/test_binary.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/defaults/classification/test_multiclass.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/test_average_loss.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/metrics/test_binary_balanced_accuracy.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/modules/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/modules/conftest.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/modules/test_head.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/modules/test_inference.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/modules/utils/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/modules/utils/test_batch_postproces.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/networks/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/networks/test_mlp.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/networks/wrappers/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/networks/wrappers/test_from_function.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/networks/wrappers/test_huggingface.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/models/networks/wrappers/test_onnx.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/test_cli.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/trainers/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/core/trainers/test_recorder.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/datasets/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/datasets/classification/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/datasets/classification/test_bach.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/datasets/classification/test_crc.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/datasets/classification/test_mhist.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/datasets/classification/test_patch_camelyon.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/datasets/segmentation/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/transforms/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/transforms/common/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/data/transforms/common/test_resize_and_crop.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/models/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/models/networks/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/models/networks/test_abmil.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/test_vision_cli.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/utils/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/utils/io/__init__.py +0 -0
- {kaiko_eva-0.0.1 → kaiko_eva-0.0.2}/tests/eva/vision/utils/io/test_image.py +0 -0
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: kaiko-eva
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.2
|
|
4
4
|
Summary: Evaluation Framework for oncology foundation models.
|
|
5
|
-
Keywords: machine-learning
|
|
6
|
-
Author-Email: Ioannis Gatopoulos <ioannis@kaiko.ai>,
|
|
7
|
-
Maintainer-Email: Ioannis Gatopoulos <ioannis@kaiko.ai>,
|
|
5
|
+
Keywords: machine-learning,evaluation-framework,oncology,foundation-models
|
|
6
|
+
Author-Email: Ioannis Gatopoulos <ioannis@kaiko.ai>, =?utf-8?q?Nicolas_K=C3=A4nzig?= <nicolas@kaiko.ai>, Roman Moser <roman@kaiko.ai>
|
|
7
|
+
Maintainer-Email: Ioannis Gatopoulos <ioannis@kaiko.ai>, =?utf-8?q?Nicolas_K=C3=A4nzig?= <nicolas@kaiko.ai>, Roman Moser <roman@kaiko.ai>
|
|
8
8
|
License: Apache License
|
|
9
9
|
Version 2.0, January 2004
|
|
10
10
|
http://www.apache.org/licenses/
|
|
@@ -215,15 +215,17 @@ Project-URL: Homepage, https://kaiko-ai.github.io/eva/dev/
|
|
|
215
215
|
Project-URL: Repository, https://github.com/kaiko-ai/eva
|
|
216
216
|
Project-URL: Documentation, https://kaiko-ai.github.io/eva/dev/
|
|
217
217
|
Requires-Python: >=3.10
|
|
218
|
-
Requires-Dist:
|
|
219
|
-
Requires-Dist:
|
|
218
|
+
Requires-Dist: torch==2.3.0
|
|
219
|
+
Requires-Dist: lightning>=2.2.2
|
|
220
|
+
Requires-Dist: jsonargparse[omegaconf]==4.28
|
|
220
221
|
Requires-Dist: tensorboard>=2.16.2
|
|
221
222
|
Requires-Dist: loguru>=0.7.2
|
|
222
223
|
Requires-Dist: pandas>=2.2.0
|
|
223
224
|
Requires-Dist: transformers>=4.38.2
|
|
224
225
|
Requires-Dist: onnxruntime>=1.17.1
|
|
225
|
-
Requires-Dist: onnx>=1.
|
|
226
|
+
Requires-Dist: onnx>=1.16.0
|
|
226
227
|
Requires-Dist: toolz>=0.12.1
|
|
228
|
+
Requires-Dist: rich>=13.7.1
|
|
227
229
|
Requires-Dist: h5py>=3.10.0; extra == "vision"
|
|
228
230
|
Requires-Dist: nibabel>=5.2.0; extra == "vision"
|
|
229
231
|
Requires-Dist: opencv-python-headless>=4.9.0.80; extra == "vision"
|
|
@@ -240,15 +242,19 @@ Description-Content-Type: text/markdown
|
|
|
240
242
|
|
|
241
243
|
<div align="center">
|
|
242
244
|
|
|
243
|
-
<
|
|
245
|
+
<br />
|
|
246
|
+
|
|
247
|
+
<img src="https://github.com/kaiko-ai/eva/blob/main/docs/images/eva-logo.png?raw=true" width="340">
|
|
244
248
|
|
|
249
|
+
<br />
|
|
245
250
|
<br />
|
|
246
251
|
|
|
247
252
|
_Oncology FM Evaluation Framework by kaiko.ai_
|
|
248
253
|
|
|
249
254
|
[](https://pypi.python.org/pypi/kaiko-eva)
|
|
250
|
-
[](https://kaiko-ai.github.io/eva/latest)
|
|
256
|
+
[](https://github.com/kaiko-ai/eva#license)<br>
|
|
257
|
+
[](https://openreview.net/forum?id=FNBQOPj18N¬eId=FNBQOPj18N)
|
|
252
258
|
|
|
253
259
|
<p align="center">
|
|
254
260
|
<a href="https://github.com/kaiko-ai/eva#installation">Installation</a> •
|
|
@@ -299,18 +305,18 @@ eva --version
|
|
|
299
305
|
|
|
300
306
|
## How To Use
|
|
301
307
|
|
|
302
|
-
|
|
308
|
+
_`eva`_ can be used directly from the terminal as a CLI tool as follows:
|
|
303
309
|
```sh
|
|
304
310
|
eva {fit,predict,predict_fit} --config url/or/path/to/the/config.yaml
|
|
305
311
|
```
|
|
306
312
|
|
|
307
|
-
When used as a CLI tool, `
|
|
313
|
+
When used as a CLI tool, _`eva`_ supports configuration files (`.yaml`) as an argument to define its functionality.
|
|
308
314
|
Native supported configs can be found at the [configs](https://github.com/kaiko-ai/eva/tree/main/configs) directory
|
|
309
315
|
of the repo. Apart from cloning the repo, you can download the latest config folder as `.zip` from your browser from
|
|
310
316
|
[here](https://download-directory.github.io/?url=https://github.com/kaiko-ai/eva/tree/main/configs). Alternatively,
|
|
311
317
|
from a specific release the configs can be downloaded from the terminal as follows:
|
|
312
318
|
```sh
|
|
313
|
-
curl -LO https://github.com/kaiko-ai/eva/releases/download/0.0.1/configs.zip | unzip configs
|
|
319
|
+
curl -LO https://github.com/kaiko-ai/eva/releases/download/0.0.1/configs.zip | unzip configs
|
|
314
320
|
```
|
|
315
321
|
|
|
316
322
|
For example, to perform a downstream evaluation of DINO ViT-S/16 on the BACH dataset with
|
|
@@ -338,7 +344,7 @@ and [tutorials](https://kaiko-ai.github.io/eva/dev/user-guide/advanced/replicate
|
|
|
338
344
|
|
|
339
345
|
## Benchmarks
|
|
340
346
|
|
|
341
|
-
In this section you will find model benchmarks which were generated with
|
|
347
|
+
In this section you will find model benchmarks which were generated with _`eva`_.
|
|
342
348
|
|
|
343
349
|
### Table I: WSI patch-level benchmark
|
|
344
350
|
|
|
@@ -351,13 +357,15 @@ In this section you will find model benchmarks which were generated with _eva_.
|
|
|
351
357
|
| ViT-S/16 _(random)_ <sup>[1]</sup> | 0.410 | 0.617 | 0.501 | 0.753 | 0.728 |
|
|
352
358
|
| ViT-S/16 _(ImageNet)_ <sup>[1]</sup> | 0.695 | 0.935 | 0.831 | 0.864 | 0.849 |
|
|
353
359
|
| ViT-B/8 _(ImageNet)_ <sup>[1]</sup> | 0.710 | 0.939 | 0.814 | 0.870 | 0.856 |
|
|
360
|
+
| ViT-L/14 _(ImageNet)_ <sup>[1]</sup> | 0.707 | 0.916 | 0.832 | 0.873 | 0.888 |
|
|
354
361
|
| DINO<sub>(p=16)</sub> <sup>[2]</sup> | 0.801 | 0.934 | 0.768 | 0.889 | 0.895 |
|
|
355
362
|
| Phikon <sup>[3]</sup> | 0.725 | 0.935 | 0.777 | 0.912 | 0.915 |
|
|
356
|
-
|
|
|
357
|
-
| ViT-S/
|
|
358
|
-
| ViT-
|
|
359
|
-
| ViT-B/
|
|
360
|
-
| ViT-
|
|
363
|
+
| UNI <sup>[4]</sup> | 0.814 | 0.950 | 0.837 | 0.936 | 0.938 |
|
|
364
|
+
| ViT-S/16 _(kaiko.ai)_ <sup>[5]</sup> | 0.797 | 0.943 | 0.828 | 0.903 | 0.893 |
|
|
365
|
+
| ViT-S/8 _(kaiko.ai)_ <sup>[5]</sup> | 0.834 | 0.946 | 0.832 | 0.897 | 0.887 |
|
|
366
|
+
| ViT-B/16 _(kaiko.ai)_ <sup>[5]</sup> | 0.810 | 0.960 | 0.826 | 0.900 | 0.898 |
|
|
367
|
+
| ViT-B/8 _(kaiko.ai)_ <sup>[5]</sup> | 0.865 | 0.956 | 0.809 | 0.913 | 0.921 |
|
|
368
|
+
| ViT-L/14 _(kaiko.ai)_ <sup>[5]</sup> | 0.870 | 0.930 | 0.809 | 0.908 | 0.898 |
|
|
361
369
|
|
|
362
370
|
_Table I: Linear probing evaluation of FMs on patch-level downstream datasets.<br> We report averaged balanced accuracy
|
|
363
371
|
over 5 runs, with an average standard deviation of ±0.003._
|
|
@@ -367,14 +375,15 @@ over 5 runs, with an average standard deviation of ±0.003._
|
|
|
367
375
|
<br />
|
|
368
376
|
|
|
369
377
|
_References_:
|
|
370
|
-
1. _"Emerging properties in self-supervised vision transformers”_
|
|
371
|
-
2. _"Benchmarking self-supervised learning on diverse pathology datasets”_
|
|
372
|
-
3. _"Scaling self-supervised learning for histopathology with masked image modeling”_
|
|
373
|
-
4. _"
|
|
378
|
+
1. _"Emerging properties in self-supervised vision transformers”_, [arXiv](https://arxiv.org/abs/2104.14294)
|
|
379
|
+
2. _"Benchmarking self-supervised learning on diverse pathology datasets”_, [arXiv](https://arxiv.org/abs/2212.04690)
|
|
380
|
+
3. _"Scaling self-supervised learning for histopathology with masked image modeling”_, [medRxiv](https://www.medrxiv.org/content/10.1101/2023.07.21.23292757v1)
|
|
381
|
+
4. _"A General-Purpose Self-Supervised Model for Computational Pathology”_, [arXiv](https://arxiv.org/abs/2308.15474)
|
|
382
|
+
5. _"Towards Training Large-Scale Pathology Foundation Models: from TCGA to Hospital Scale”_, [arXiv](https://arxiv.org/pdf/2404.15217)
|
|
374
383
|
|
|
375
384
|
## Contributing
|
|
376
385
|
|
|
377
|
-
|
|
386
|
+
_`eva`_ is an open source project and welcomes contributions of all kinds. Please checkout the [developer](./docs/DEVELOPER_GUIDE.md)
|
|
378
387
|
and [contributing guide](./docs/CONTRIBUTING.md) for help on how to do so.
|
|
379
388
|
|
|
380
389
|
All contributors must follow the [code of conduct](./docs/CODE_OF_CONDUCT.md).
|
|
@@ -399,7 +408,24 @@ Our codebase is built using multiple opensource contributions
|
|
|
399
408
|
|
|
400
409
|
</div>
|
|
401
410
|
|
|
402
|
-
|
|
411
|
+
|
|
412
|
+
## Citation
|
|
413
|
+
|
|
414
|
+
If you find this repository useful, please consider giving a star ⭐ and adding the following citation:
|
|
415
|
+
|
|
416
|
+
```
|
|
417
|
+
@inproceedings{
|
|
418
|
+
kaiko.ai2024eva,
|
|
419
|
+
title={eva: Evaluation framework for pathology foundation models},
|
|
420
|
+
author={kaiko.ai and Ioannis Gatopoulos and Nicolas K{\"a}nzig and Roman Moser and Sebastian Ot{\'a}lora},
|
|
421
|
+
booktitle={Medical Imaging with Deep Learning},
|
|
422
|
+
year={2024},
|
|
423
|
+
url={https://openreview.net/forum?id=FNBQOPj18N}
|
|
424
|
+
}
|
|
425
|
+
```
|
|
426
|
+
|
|
427
|
+
<br />
|
|
428
|
+
|
|
403
429
|
<div align="center">
|
|
404
430
|
<img src="https://github.com/kaiko-ai/eva/blob/main/docs/images/kaiko-logo.png?raw=true" width="200">
|
|
405
431
|
</div>
|
|
@@ -1,14 +1,18 @@
|
|
|
1
1
|
<div align="center">
|
|
2
2
|
|
|
3
|
-
<
|
|
3
|
+
<br />
|
|
4
|
+
|
|
5
|
+
<img src="https://github.com/kaiko-ai/eva/blob/main/docs/images/eva-logo.png?raw=true" width="340">
|
|
4
6
|
|
|
7
|
+
<br />
|
|
5
8
|
<br />
|
|
6
9
|
|
|
7
10
|
_Oncology FM Evaluation Framework by kaiko.ai_
|
|
8
11
|
|
|
9
12
|
[](https://pypi.python.org/pypi/kaiko-eva)
|
|
10
|
-
[](https://kaiko-ai.github.io/eva/latest)
|
|
14
|
+
[](https://github.com/kaiko-ai/eva#license)<br>
|
|
15
|
+
[](https://openreview.net/forum?id=FNBQOPj18N¬eId=FNBQOPj18N)
|
|
12
16
|
|
|
13
17
|
<p align="center">
|
|
14
18
|
<a href="https://github.com/kaiko-ai/eva#installation">Installation</a> •
|
|
@@ -59,18 +63,18 @@ eva --version
|
|
|
59
63
|
|
|
60
64
|
## How To Use
|
|
61
65
|
|
|
62
|
-
|
|
66
|
+
_`eva`_ can be used directly from the terminal as a CLI tool as follows:
|
|
63
67
|
```sh
|
|
64
68
|
eva {fit,predict,predict_fit} --config url/or/path/to/the/config.yaml
|
|
65
69
|
```
|
|
66
70
|
|
|
67
|
-
When used as a CLI tool, `
|
|
71
|
+
When used as a CLI tool, _`eva`_ supports configuration files (`.yaml`) as an argument to define its functionality.
|
|
68
72
|
Native supported configs can be found at the [configs](https://github.com/kaiko-ai/eva/tree/main/configs) directory
|
|
69
73
|
of the repo. Apart from cloning the repo, you can download the latest config folder as `.zip` from your browser from
|
|
70
74
|
[here](https://download-directory.github.io/?url=https://github.com/kaiko-ai/eva/tree/main/configs). Alternatively,
|
|
71
75
|
from a specific release the configs can be downloaded from the terminal as follows:
|
|
72
76
|
```sh
|
|
73
|
-
curl -LO https://github.com/kaiko-ai/eva/releases/download/0.0.1/configs.zip | unzip configs
|
|
77
|
+
curl -LO https://github.com/kaiko-ai/eva/releases/download/0.0.1/configs.zip | unzip configs
|
|
74
78
|
```
|
|
75
79
|
|
|
76
80
|
For example, to perform a downstream evaluation of DINO ViT-S/16 on the BACH dataset with
|
|
@@ -98,7 +102,7 @@ and [tutorials](https://kaiko-ai.github.io/eva/dev/user-guide/advanced/replicate
|
|
|
98
102
|
|
|
99
103
|
## Benchmarks
|
|
100
104
|
|
|
101
|
-
In this section you will find model benchmarks which were generated with
|
|
105
|
+
In this section you will find model benchmarks which were generated with _`eva`_.
|
|
102
106
|
|
|
103
107
|
### Table I: WSI patch-level benchmark
|
|
104
108
|
|
|
@@ -111,13 +115,15 @@ In this section you will find model benchmarks which were generated with _eva_.
|
|
|
111
115
|
| ViT-S/16 _(random)_ <sup>[1]</sup> | 0.410 | 0.617 | 0.501 | 0.753 | 0.728 |
|
|
112
116
|
| ViT-S/16 _(ImageNet)_ <sup>[1]</sup> | 0.695 | 0.935 | 0.831 | 0.864 | 0.849 |
|
|
113
117
|
| ViT-B/8 _(ImageNet)_ <sup>[1]</sup> | 0.710 | 0.939 | 0.814 | 0.870 | 0.856 |
|
|
118
|
+
| ViT-L/14 _(ImageNet)_ <sup>[1]</sup> | 0.707 | 0.916 | 0.832 | 0.873 | 0.888 |
|
|
114
119
|
| DINO<sub>(p=16)</sub> <sup>[2]</sup> | 0.801 | 0.934 | 0.768 | 0.889 | 0.895 |
|
|
115
120
|
| Phikon <sup>[3]</sup> | 0.725 | 0.935 | 0.777 | 0.912 | 0.915 |
|
|
116
|
-
|
|
|
117
|
-
| ViT-S/
|
|
118
|
-
| ViT-
|
|
119
|
-
| ViT-B/
|
|
120
|
-
| ViT-
|
|
121
|
+
| UNI <sup>[4]</sup> | 0.814 | 0.950 | 0.837 | 0.936 | 0.938 |
|
|
122
|
+
| ViT-S/16 _(kaiko.ai)_ <sup>[5]</sup> | 0.797 | 0.943 | 0.828 | 0.903 | 0.893 |
|
|
123
|
+
| ViT-S/8 _(kaiko.ai)_ <sup>[5]</sup> | 0.834 | 0.946 | 0.832 | 0.897 | 0.887 |
|
|
124
|
+
| ViT-B/16 _(kaiko.ai)_ <sup>[5]</sup> | 0.810 | 0.960 | 0.826 | 0.900 | 0.898 |
|
|
125
|
+
| ViT-B/8 _(kaiko.ai)_ <sup>[5]</sup> | 0.865 | 0.956 | 0.809 | 0.913 | 0.921 |
|
|
126
|
+
| ViT-L/14 _(kaiko.ai)_ <sup>[5]</sup> | 0.870 | 0.930 | 0.809 | 0.908 | 0.898 |
|
|
121
127
|
|
|
122
128
|
_Table I: Linear probing evaluation of FMs on patch-level downstream datasets.<br> We report averaged balanced accuracy
|
|
123
129
|
over 5 runs, with an average standard deviation of ±0.003._
|
|
@@ -127,14 +133,15 @@ over 5 runs, with an average standard deviation of ±0.003._
|
|
|
127
133
|
<br />
|
|
128
134
|
|
|
129
135
|
_References_:
|
|
130
|
-
1. _"Emerging properties in self-supervised vision transformers”_
|
|
131
|
-
2. _"Benchmarking self-supervised learning on diverse pathology datasets”_
|
|
132
|
-
3. _"Scaling self-supervised learning for histopathology with masked image modeling”_
|
|
133
|
-
4. _"
|
|
136
|
+
1. _"Emerging properties in self-supervised vision transformers”_, [arXiv](https://arxiv.org/abs/2104.14294)
|
|
137
|
+
2. _"Benchmarking self-supervised learning on diverse pathology datasets”_, [arXiv](https://arxiv.org/abs/2212.04690)
|
|
138
|
+
3. _"Scaling self-supervised learning for histopathology with masked image modeling”_, [medRxiv](https://www.medrxiv.org/content/10.1101/2023.07.21.23292757v1)
|
|
139
|
+
4. _"A General-Purpose Self-Supervised Model for Computational Pathology”_, [arXiv](https://arxiv.org/abs/2308.15474)
|
|
140
|
+
5. _"Towards Training Large-Scale Pathology Foundation Models: from TCGA to Hospital Scale”_, [arXiv](https://arxiv.org/pdf/2404.15217)
|
|
134
141
|
|
|
135
142
|
## Contributing
|
|
136
143
|
|
|
137
|
-
|
|
144
|
+
_`eva`_ is an open source project and welcomes contributions of all kinds. Please checkout the [developer](./docs/DEVELOPER_GUIDE.md)
|
|
138
145
|
and [contributing guide](./docs/CONTRIBUTING.md) for help on how to do so.
|
|
139
146
|
|
|
140
147
|
All contributors must follow the [code of conduct](./docs/CODE_OF_CONDUCT.md).
|
|
@@ -159,7 +166,24 @@ Our codebase is built using multiple opensource contributions
|
|
|
159
166
|
|
|
160
167
|
</div>
|
|
161
168
|
|
|
162
|
-
|
|
169
|
+
|
|
170
|
+
## Citation
|
|
171
|
+
|
|
172
|
+
If you find this repository useful, please consider giving a star ⭐ and adding the following citation:
|
|
173
|
+
|
|
174
|
+
```
|
|
175
|
+
@inproceedings{
|
|
176
|
+
kaiko.ai2024eva,
|
|
177
|
+
title={eva: Evaluation framework for pathology foundation models},
|
|
178
|
+
author={kaiko.ai and Ioannis Gatopoulos and Nicolas K{\"a}nzig and Roman Moser and Sebastian Ot{\'a}lora},
|
|
179
|
+
booktitle={Medical Imaging with Deep Learning},
|
|
180
|
+
year={2024},
|
|
181
|
+
url={https://openreview.net/forum?id=FNBQOPj18N}
|
|
182
|
+
}
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
<br />
|
|
186
|
+
|
|
163
187
|
<div align="center">
|
|
164
188
|
<img src="https://github.com/kaiko-ai/eva/blob/main/docs/images/kaiko-logo.png?raw=true" width="200">
|
|
165
189
|
</div>
|
|
@@ -6,7 +6,7 @@ build-backend = "pdm.backend"
|
|
|
6
6
|
|
|
7
7
|
[project]
|
|
8
8
|
name = "kaiko-eva"
|
|
9
|
-
version = "0.0.
|
|
9
|
+
version = "0.0.2"
|
|
10
10
|
description = "Evaluation Framework for oncology foundation models."
|
|
11
11
|
keywords = [
|
|
12
12
|
"machine-learning",
|
|
@@ -34,15 +34,17 @@ maintainers = [
|
|
|
34
34
|
]
|
|
35
35
|
requires-python = ">=3.10"
|
|
36
36
|
dependencies = [
|
|
37
|
-
"
|
|
38
|
-
"
|
|
37
|
+
"torch==2.3.0",
|
|
38
|
+
"lightning>=2.2.2",
|
|
39
|
+
"jsonargparse[omegaconf]==4.28",
|
|
39
40
|
"tensorboard>=2.16.2",
|
|
40
41
|
"loguru>=0.7.2",
|
|
41
42
|
"pandas>=2.2.0",
|
|
42
43
|
"transformers>=4.38.2",
|
|
43
44
|
"onnxruntime>=1.17.1",
|
|
44
|
-
"onnx>=1.
|
|
45
|
+
"onnx>=1.16.0",
|
|
45
46
|
"toolz>=0.12.1",
|
|
47
|
+
"rich>=13.7.1",
|
|
46
48
|
]
|
|
47
49
|
|
|
48
50
|
[project.urls]
|
|
Binary file
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
"""Configuration logger callback."""
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
import os
|
|
5
|
+
import sys
|
|
6
|
+
from types import BuiltinFunctionType
|
|
7
|
+
from typing import Any, Dict, List
|
|
8
|
+
|
|
9
|
+
import lightning.pytorch as pl
|
|
10
|
+
import yaml
|
|
11
|
+
from lightning_fabric.utilities import cloud_io
|
|
12
|
+
from loguru import logger as cli_logger
|
|
13
|
+
from omegaconf import OmegaConf
|
|
14
|
+
from typing_extensions import TypeGuard, override
|
|
15
|
+
|
|
16
|
+
from eva.core import loggers
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ConfigurationLogger(pl.Callback):
|
|
20
|
+
"""Logs the submitted configuration to the experimental logger."""
|
|
21
|
+
|
|
22
|
+
_save_as: str = "config.yaml"
|
|
23
|
+
|
|
24
|
+
def __init__(self, verbose: bool = True) -> None:
|
|
25
|
+
"""Initializes the callback.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
verbose: Whether to print the configurations to print the
|
|
29
|
+
configuration to the terminal.
|
|
30
|
+
"""
|
|
31
|
+
super().__init__()
|
|
32
|
+
|
|
33
|
+
self._verbose = verbose
|
|
34
|
+
|
|
35
|
+
@override
|
|
36
|
+
def setup(
|
|
37
|
+
self,
|
|
38
|
+
trainer: pl.Trainer,
|
|
39
|
+
pl_module: pl.LightningModule,
|
|
40
|
+
stage: str | None = None,
|
|
41
|
+
) -> None:
|
|
42
|
+
log_dir = trainer.log_dir
|
|
43
|
+
if not _logdir_exists(log_dir):
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
configuration = _load_submitted_config()
|
|
47
|
+
|
|
48
|
+
if self._verbose:
|
|
49
|
+
config_as_text = yaml.dump(configuration, sort_keys=False)
|
|
50
|
+
print(f"Configuration:\033[94m\n---\n{config_as_text}\033[0m")
|
|
51
|
+
|
|
52
|
+
save_as = os.path.join(log_dir, self._save_as)
|
|
53
|
+
fs = cloud_io.get_filesystem(log_dir)
|
|
54
|
+
with fs.open(save_as, "w") as output_file:
|
|
55
|
+
yaml.dump(configuration, output_file, sort_keys=False)
|
|
56
|
+
|
|
57
|
+
loggers.log_parameters(trainer.loggers, tag="configuration", parameters=configuration)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _logdir_exists(logdir: str | None, verbose: bool = True) -> TypeGuard[str]:
|
|
61
|
+
"""Checks if the trainer has a log directory.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
logdir: Trainer's logdir.
|
|
65
|
+
name: The name to log with.
|
|
66
|
+
verbose: Whether to log if it does not exist.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
A bool indicating if the log directory exists or not.
|
|
70
|
+
"""
|
|
71
|
+
exists = isinstance(logdir, str)
|
|
72
|
+
if not exists and verbose:
|
|
73
|
+
print("\n")
|
|
74
|
+
cli_logger.warning("Log directory is `None`. Configuration file will not be logged.\n")
|
|
75
|
+
return exists
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _load_submitted_config() -> Dict[str, Any]:
|
|
79
|
+
"""Retrieves and loads the submitted configuration.
|
|
80
|
+
|
|
81
|
+
Returns:
|
|
82
|
+
The path to the configuration file.
|
|
83
|
+
"""
|
|
84
|
+
config_paths = _fetch_submitted_config_path()
|
|
85
|
+
return _load_yaml_files(config_paths)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _fetch_submitted_config_path() -> List[str]:
|
|
89
|
+
"""Fetches the config path from command line arguments.
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
The path to the configuration file.
|
|
93
|
+
"""
|
|
94
|
+
return list(filter(lambda f: f.endswith(".yaml"), sys.argv))
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _load_yaml_files(paths: List[str]) -> Dict[str, Any]:
|
|
98
|
+
"""Loads yaml files and merge them from multiple paths.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
paths: The paths to the yaml files.
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
The merged configurations as a dictionary.
|
|
105
|
+
"""
|
|
106
|
+
merged_config = {}
|
|
107
|
+
for config_path in paths:
|
|
108
|
+
fs = cloud_io.get_filesystem(config_path)
|
|
109
|
+
with fs.open(config_path, "r") as file:
|
|
110
|
+
omegaconf_file = OmegaConf.load(file) # type: ignore
|
|
111
|
+
config_dict = OmegaConf.to_object(omegaconf_file) # type: ignore
|
|
112
|
+
parsed_config = _type_resolver(config_dict) # type: ignore
|
|
113
|
+
merged_config.update(parsed_config)
|
|
114
|
+
return merged_config
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _type_resolver(mapping: Dict[str, Any]) -> Dict[str, Any]:
|
|
118
|
+
"""Parses the string values of a dictionary in-place.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
mapping: A dictionary object.
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
The mapping with the formatted values.
|
|
125
|
+
"""
|
|
126
|
+
for key, value in mapping.items():
|
|
127
|
+
if isinstance(value, dict):
|
|
128
|
+
formatted_value = _type_resolver(value)
|
|
129
|
+
elif isinstance(value, list) and isinstance(value[0], dict):
|
|
130
|
+
formatted_value = [_type_resolver(subvalue) for subvalue in value]
|
|
131
|
+
else:
|
|
132
|
+
try:
|
|
133
|
+
parsed_value = ast.literal_eval(value) # type: ignore
|
|
134
|
+
formatted_value = (
|
|
135
|
+
value if isinstance(parsed_value, BuiltinFunctionType) else parsed_value
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
except Exception:
|
|
139
|
+
formatted_value = value
|
|
140
|
+
|
|
141
|
+
mapping[key] = formatted_value
|
|
142
|
+
|
|
143
|
+
return mapping
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""Datasets API."""
|
|
2
|
+
|
|
3
|
+
from eva.core.data.datasets.base import Dataset
|
|
4
|
+
from eva.core.data.datasets.dataset import TorchDataset
|
|
5
|
+
from eva.core.data.datasets.embeddings import (
|
|
6
|
+
EmbeddingsClassificationDataset,
|
|
7
|
+
MultiEmbeddingsClassificationDataset,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"Dataset",
|
|
12
|
+
"EmbeddingsClassificationDataset",
|
|
13
|
+
"MultiEmbeddingsClassificationDataset",
|
|
14
|
+
"TorchDataset",
|
|
15
|
+
]
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"""Datasets API."""
|
|
2
|
+
|
|
3
|
+
from eva.core.data.datasets.embeddings.base import EmbeddingsDataset
|
|
4
|
+
from eva.core.data.datasets.embeddings.classification import (
|
|
5
|
+
EmbeddingsClassificationDataset,
|
|
6
|
+
MultiEmbeddingsClassificationDataset,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"EmbeddingsDataset",
|
|
11
|
+
"EmbeddingsClassificationDataset",
|
|
12
|
+
"MultiEmbeddingsClassificationDataset",
|
|
13
|
+
]
|
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""Base dataset class for Embeddings."""
|
|
2
2
|
|
|
3
|
+
import abc
|
|
3
4
|
import os
|
|
4
|
-
from typing import Callable, Dict, Tuple
|
|
5
|
+
from typing import Callable, Dict, Literal, Tuple
|
|
5
6
|
|
|
6
7
|
import numpy as np
|
|
7
8
|
import pandas as pd
|
|
@@ -11,22 +12,23 @@ from typing_extensions import override
|
|
|
11
12
|
from eva.core.data.datasets import base
|
|
12
13
|
from eva.core.utils import io
|
|
13
14
|
|
|
15
|
+
default_column_mapping: Dict[str, str] = {
|
|
16
|
+
"path": "embeddings",
|
|
17
|
+
"target": "target",
|
|
18
|
+
"split": "split",
|
|
19
|
+
"multi_id": "slide_id",
|
|
20
|
+
}
|
|
21
|
+
"""The default column mapping of the variables to the manifest columns."""
|
|
14
22
|
|
|
15
|
-
class EmbeddingsClassificationDataset(base.Dataset):
|
|
16
|
-
"""Embeddings classification dataset."""
|
|
17
23
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
"target": "target",
|
|
21
|
-
"split": "split",
|
|
22
|
-
}
|
|
23
|
-
"""The default column mapping of the variables to the manifest columns."""
|
|
24
|
+
class EmbeddingsDataset(base.Dataset):
|
|
25
|
+
"""Abstract base class for embedding datasets."""
|
|
24
26
|
|
|
25
27
|
def __init__(
|
|
26
28
|
self,
|
|
27
29
|
root: str,
|
|
28
30
|
manifest_file: str,
|
|
29
|
-
split:
|
|
31
|
+
split: Literal["train", "val", "test"] | None = None,
|
|
30
32
|
column_mapping: Dict[str, str] = default_column_mapping,
|
|
31
33
|
embeddings_transforms: Callable | None = None,
|
|
32
34
|
target_transforms: Callable | None = None,
|
|
@@ -54,12 +56,38 @@ class EmbeddingsClassificationDataset(base.Dataset):
|
|
|
54
56
|
self._root = root
|
|
55
57
|
self._manifest_file = manifest_file
|
|
56
58
|
self._split = split
|
|
57
|
-
self._column_mapping =
|
|
59
|
+
self._column_mapping = default_column_mapping | column_mapping
|
|
58
60
|
self._embeddings_transforms = embeddings_transforms
|
|
59
61
|
self._target_transforms = target_transforms
|
|
60
62
|
|
|
61
63
|
self._data: pd.DataFrame
|
|
62
64
|
|
|
65
|
+
@abc.abstractmethod
|
|
66
|
+
def _load_embeddings(self, index: int) -> torch.Tensor:
|
|
67
|
+
"""Returns the `index`'th embedding sample.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
index: The index of the data sample to load.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
The embedding sample as a tensor.
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
@abc.abstractmethod
|
|
77
|
+
def _load_target(self, index: int) -> np.ndarray:
|
|
78
|
+
"""Returns the `index`'th target sample.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
index: The index of the data sample to load.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
The sample target as an array.
|
|
85
|
+
"""
|
|
86
|
+
|
|
87
|
+
@abc.abstractmethod
|
|
88
|
+
def __len__(self) -> int:
|
|
89
|
+
"""Returns the total length of the data."""
|
|
90
|
+
|
|
63
91
|
def filename(self, index: int) -> str:
|
|
64
92
|
"""Returns the filename of the `index`'th data sample.
|
|
65
93
|
|
|
@@ -71,7 +99,7 @@ class EmbeddingsClassificationDataset(base.Dataset):
|
|
|
71
99
|
Returns:
|
|
72
100
|
The filename of the `index`'th data sample.
|
|
73
101
|
"""
|
|
74
|
-
return self._data.at[index, self._column_mapping["
|
|
102
|
+
return self._data.at[index, self._column_mapping["path"]]
|
|
75
103
|
|
|
76
104
|
@override
|
|
77
105
|
def setup(self):
|
|
@@ -90,36 +118,6 @@ class EmbeddingsClassificationDataset(base.Dataset):
|
|
|
90
118
|
target = self._load_target(index)
|
|
91
119
|
return self._apply_transforms(embeddings, target)
|
|
92
120
|
|
|
93
|
-
def __len__(self) -> int:
|
|
94
|
-
"""Returns the total length of the data."""
|
|
95
|
-
return len(self._data)
|
|
96
|
-
|
|
97
|
-
def _load_embeddings(self, index: int) -> torch.Tensor:
|
|
98
|
-
"""Returns the `index`'th embedding sample.
|
|
99
|
-
|
|
100
|
-
Args:
|
|
101
|
-
index: The index of the data sample to load.
|
|
102
|
-
|
|
103
|
-
Returns:
|
|
104
|
-
The sample embedding as an array.
|
|
105
|
-
"""
|
|
106
|
-
filename = self.filename(index)
|
|
107
|
-
embeddings_path = os.path.join(self._root, filename)
|
|
108
|
-
tensor = torch.load(embeddings_path, map_location="cpu")
|
|
109
|
-
return tensor.squeeze(0)
|
|
110
|
-
|
|
111
|
-
def _load_target(self, index: int) -> np.ndarray:
|
|
112
|
-
"""Returns the `index`'th target sample.
|
|
113
|
-
|
|
114
|
-
Args:
|
|
115
|
-
index: The index of the data sample to load.
|
|
116
|
-
|
|
117
|
-
Returns:
|
|
118
|
-
The sample target as an array.
|
|
119
|
-
"""
|
|
120
|
-
target = self._data.at[index, self._column_mapping["target"]]
|
|
121
|
-
return np.asarray(target, dtype=np.int64)
|
|
122
|
-
|
|
123
121
|
def _load_manifest(self) -> pd.DataFrame:
|
|
124
122
|
"""Loads manifest file and filters the data based on the split column.
|
|
125
123
|
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"""Embedding cllassification datasets API."""
|
|
2
|
+
|
|
3
|
+
from eva.core.data.datasets.embeddings.classification.embeddings import (
|
|
4
|
+
EmbeddingsClassificationDataset,
|
|
5
|
+
)
|
|
6
|
+
from eva.core.data.datasets.embeddings.classification.multi_embeddings import (
|
|
7
|
+
MultiEmbeddingsClassificationDataset,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
__all__ = ["EmbeddingsClassificationDataset", "MultiEmbeddingsClassificationDataset"]
|