brainscore-vision 2.1__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- brainscore_vision/__init__.py +105 -0
- brainscore_vision/__main__.py +20 -0
- brainscore_vision/benchmark_helpers/__init__.py +67 -0
- brainscore_vision/benchmark_helpers/neural_common.py +70 -0
- brainscore_vision/benchmark_helpers/properties_common.py +424 -0
- brainscore_vision/benchmark_helpers/screen.py +126 -0
- brainscore_vision/benchmark_helpers/test_helper.py +160 -0
- brainscore_vision/benchmarks/README.md +7 -0
- brainscore_vision/benchmarks/__init__.py +122 -0
- brainscore_vision/benchmarks/baker2022/__init__.py +9 -0
- brainscore_vision/benchmarks/baker2022/benchmark.py +125 -0
- brainscore_vision/benchmarks/baker2022/requirements.txt +1 -0
- brainscore_vision/benchmarks/baker2022/test.py +90 -0
- brainscore_vision/benchmarks/bmd2024/__init__.py +8 -0
- brainscore_vision/benchmarks/bmd2024/benchmark.py +51 -0
- brainscore_vision/benchmarks/bmd2024/test.py +29 -0
- brainscore_vision/benchmarks/bracci2019/__init__.py +8 -0
- brainscore_vision/benchmarks/bracci2019/benchmark.py +286 -0
- brainscore_vision/benchmarks/bracci2019/requirements.txt +3 -0
- brainscore_vision/benchmarks/cadena2017/__init__.py +5 -0
- brainscore_vision/benchmarks/cadena2017/benchmark.py +91 -0
- brainscore_vision/benchmarks/cadena2017/test.py +35 -0
- brainscore_vision/benchmarks/coggan2024_behavior/__init__.py +8 -0
- brainscore_vision/benchmarks/coggan2024_behavior/benchmark.py +133 -0
- brainscore_vision/benchmarks/coggan2024_behavior/test.py +21 -0
- brainscore_vision/benchmarks/coggan2024_fMRI/__init__.py +15 -0
- brainscore_vision/benchmarks/coggan2024_fMRI/benchmark.py +201 -0
- brainscore_vision/benchmarks/coggan2024_fMRI/test.py +25 -0
- brainscore_vision/benchmarks/ferguson2024/__init__.py +24 -0
- brainscore_vision/benchmarks/ferguson2024/benchmark.py +210 -0
- brainscore_vision/benchmarks/ferguson2024/helpers/helpers.py +251 -0
- brainscore_vision/benchmarks/ferguson2024/requirements.txt +5 -0
- brainscore_vision/benchmarks/ferguson2024/test.py +114 -0
- brainscore_vision/benchmarks/freemanziemba2013/__init__.py +10 -0
- brainscore_vision/benchmarks/freemanziemba2013/benchmarks/benchmark.py +53 -0
- brainscore_vision/benchmarks/freemanziemba2013/benchmarks/public_benchmarks.py +37 -0
- brainscore_vision/benchmarks/freemanziemba2013/test.py +98 -0
- brainscore_vision/benchmarks/geirhos2021/__init__.py +59 -0
- brainscore_vision/benchmarks/geirhos2021/benchmark.py +132 -0
- brainscore_vision/benchmarks/geirhos2021/test.py +189 -0
- brainscore_vision/benchmarks/hebart2023/__init__.py +4 -0
- brainscore_vision/benchmarks/hebart2023/benchmark.py +72 -0
- brainscore_vision/benchmarks/hebart2023/test.py +19 -0
- brainscore_vision/benchmarks/hermann2020/__init__.py +6 -0
- brainscore_vision/benchmarks/hermann2020/benchmark.py +63 -0
- brainscore_vision/benchmarks/hermann2020/test.py +28 -0
- brainscore_vision/benchmarks/igustibagus2024/__init__.py +11 -0
- brainscore_vision/benchmarks/igustibagus2024/domain_transfer_analysis.py +306 -0
- brainscore_vision/benchmarks/igustibagus2024/domain_transfer_neural.py +134 -0
- brainscore_vision/benchmarks/igustibagus2024/test.py +45 -0
- brainscore_vision/benchmarks/imagenet/__init__.py +4 -0
- brainscore_vision/benchmarks/imagenet/benchmark.py +50 -0
- brainscore_vision/benchmarks/imagenet/imagenet2012.csv +50001 -0
- brainscore_vision/benchmarks/imagenet/test.py +32 -0
- brainscore_vision/benchmarks/imagenet_c/__init__.py +7 -0
- brainscore_vision/benchmarks/imagenet_c/benchmark.py +204 -0
- brainscore_vision/benchmarks/imagenet_c/test.py +57 -0
- brainscore_vision/benchmarks/islam2021/__init__.py +11 -0
- brainscore_vision/benchmarks/islam2021/benchmark.py +107 -0
- brainscore_vision/benchmarks/islam2021/test.py +47 -0
- brainscore_vision/benchmarks/kar2019/__init__.py +4 -0
- brainscore_vision/benchmarks/kar2019/benchmark.py +88 -0
- brainscore_vision/benchmarks/kar2019/test.py +93 -0
- brainscore_vision/benchmarks/majajhong2015/__init__.py +18 -0
- brainscore_vision/benchmarks/majajhong2015/benchmark.py +96 -0
- brainscore_vision/benchmarks/majajhong2015/test.py +103 -0
- brainscore_vision/benchmarks/malania2007/__init__.py +13 -0
- brainscore_vision/benchmarks/malania2007/benchmark.py +235 -0
- brainscore_vision/benchmarks/malania2007/test.py +64 -0
- brainscore_vision/benchmarks/maniquet2024/__init__.py +6 -0
- brainscore_vision/benchmarks/maniquet2024/benchmark.py +199 -0
- brainscore_vision/benchmarks/maniquet2024/test.py +17 -0
- brainscore_vision/benchmarks/marques2020/__init__.py +76 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/cavanaugh2002a_benchmark.py +119 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/devalois1982a_benchmark.py +84 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/devalois1982b_benchmark.py +88 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/freemanZiemba2013_benchmark.py +138 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/ringach2002_benchmark.py +167 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/schiller1976_benchmark.py +100 -0
- brainscore_vision/benchmarks/marques2020/test.py +135 -0
- brainscore_vision/benchmarks/objectnet/__init__.py +4 -0
- brainscore_vision/benchmarks/objectnet/benchmark.py +52 -0
- brainscore_vision/benchmarks/objectnet/test.py +33 -0
- brainscore_vision/benchmarks/rajalingham2018/__init__.py +10 -0
- brainscore_vision/benchmarks/rajalingham2018/benchmarks/benchmark.py +74 -0
- brainscore_vision/benchmarks/rajalingham2018/benchmarks/public_benchmark.py +10 -0
- brainscore_vision/benchmarks/rajalingham2018/test.py +125 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/alexnet-probabilities.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=alexnet,stimuli_identifier=objectome-240.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=resnet18,stimuli_identifier=objectome-240.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=resnet34,stimuli_identifier=objectome-240.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/resnet18-probabilities.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/resnet34-probabilities.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2020/__init__.py +4 -0
- brainscore_vision/benchmarks/rajalingham2020/benchmark.py +52 -0
- brainscore_vision/benchmarks/rajalingham2020/test.py +39 -0
- brainscore_vision/benchmarks/sanghavi2020/__init__.py +17 -0
- brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavi2020_benchmark.py +44 -0
- brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavijozwik2020_benchmark.py +44 -0
- brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavimurty2020_benchmark.py +44 -0
- brainscore_vision/benchmarks/sanghavi2020/test.py +83 -0
- brainscore_vision/benchmarks/scialom2024/__init__.py +52 -0
- brainscore_vision/benchmarks/scialom2024/benchmark.py +97 -0
- brainscore_vision/benchmarks/scialom2024/test.py +162 -0
- brainscore_vision/data/__init__.py +0 -0
- brainscore_vision/data/baker2022/__init__.py +40 -0
- brainscore_vision/data/baker2022/data_packaging/inverted_distortion_data_assembly.py +43 -0
- brainscore_vision/data/baker2022/data_packaging/inverted_distortion_stimulus_set.py +81 -0
- brainscore_vision/data/baker2022/data_packaging/mapping.py +60 -0
- brainscore_vision/data/baker2022/data_packaging/normal_distortion_data_assembly.py +46 -0
- brainscore_vision/data/baker2022/data_packaging/normal_distortion_stimulus_set.py +94 -0
- brainscore_vision/data/baker2022/test.py +135 -0
- brainscore_vision/data/barbumayo2019/BarbuMayo2019.py +26 -0
- brainscore_vision/data/barbumayo2019/__init__.py +23 -0
- brainscore_vision/data/barbumayo2019/test.py +10 -0
- brainscore_vision/data/bashivankar2019/__init__.py +52 -0
- brainscore_vision/data/bashivankar2019/data_packaging/2020-08-17_npc_v4_data.h5.png +0 -0
- brainscore_vision/data/bashivankar2019/data_packaging/requirements.txt +4 -0
- brainscore_vision/data/bashivankar2019/data_packaging/synthetic.py +162 -0
- brainscore_vision/data/bashivankar2019/test.py +15 -0
- brainscore_vision/data/bmd2024/__init__.py +69 -0
- brainscore_vision/data/bmd2024/data_packaging/BMD_2024_data_assembly.py +91 -0
- brainscore_vision/data/bmd2024/data_packaging/BMD_2024_simulus_set.py +48 -0
- brainscore_vision/data/bmd2024/data_packaging/stim_meta.csv +401 -0
- brainscore_vision/data/bmd2024/test.py +130 -0
- brainscore_vision/data/bracci2019/__init__.py +36 -0
- brainscore_vision/data/bracci2019/data_packaging.py +221 -0
- brainscore_vision/data/bracci2019/test.py +16 -0
- brainscore_vision/data/cadena2017/__init__.py +52 -0
- brainscore_vision/data/cadena2017/data_packaging/2018-08-07_tolias_v1.ipynb +25880 -0
- brainscore_vision/data/cadena2017/data_packaging/analysis.py +26 -0
- brainscore_vision/data/cadena2017/test.py +24 -0
- brainscore_vision/data/cichy2019/__init__.py +38 -0
- brainscore_vision/data/cichy2019/test.py +8 -0
- brainscore_vision/data/coggan2024_behavior/__init__.py +36 -0
- brainscore_vision/data/coggan2024_behavior/data_packaging.py +166 -0
- brainscore_vision/data/coggan2024_behavior/test.py +32 -0
- brainscore_vision/data/coggan2024_fMRI/__init__.py +27 -0
- brainscore_vision/data/coggan2024_fMRI/data_packaging.py +123 -0
- brainscore_vision/data/coggan2024_fMRI/test.py +25 -0
- brainscore_vision/data/david2004/__init__.py +34 -0
- brainscore_vision/data/david2004/data_packaging/2018-05-10_gallant_data.ipynb +3647 -0
- brainscore_vision/data/david2004/data_packaging/2018-05-23_gallant_data.ipynb +3149 -0
- brainscore_vision/data/david2004/data_packaging/2018-06-05_gallant_data.ipynb +3628 -0
- brainscore_vision/data/david2004/data_packaging/__init__.py +61 -0
- brainscore_vision/data/david2004/data_packaging/convertGallant.m +100 -0
- brainscore_vision/data/david2004/data_packaging/convertGallantV1Aligned.m +58 -0
- brainscore_vision/data/david2004/data_packaging/lib/DataHash_20160618/DataHash.m +484 -0
- brainscore_vision/data/david2004/data_packaging/lib/DataHash_20160618/license.txt +24 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.c +895 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.m +107 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.mexw64 +0 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5_helper.m +91 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/InstallMex.m +307 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/license.txt +24 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/uTest_GetMD5.m +290 -0
- brainscore_vision/data/david2004/data_packaging/lib/glob/glob.m +472 -0
- brainscore_vision/data/david2004/data_packaging/lib/glob/license.txt +27 -0
- brainscore_vision/data/david2004/data_packaging/xr_align_debug.py +137 -0
- brainscore_vision/data/david2004/test.py +8 -0
- brainscore_vision/data/deng2009/__init__.py +22 -0
- brainscore_vision/data/deng2009/deng2009imagenet.py +33 -0
- brainscore_vision/data/deng2009/test.py +9 -0
- brainscore_vision/data/ferguson2024/__init__.py +401 -0
- brainscore_vision/data/ferguson2024/data_packaging/data_packaging.py +164 -0
- brainscore_vision/data/ferguson2024/data_packaging/fitting_stimuli.py +20 -0
- brainscore_vision/data/ferguson2024/requirements.txt +2 -0
- brainscore_vision/data/ferguson2024/test.py +155 -0
- brainscore_vision/data/freemanziemba2013/__init__.py +133 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/2018-10-05_movshon.ipynb +2002 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/2020-02-21_movshon_aperture.ipynb +4730 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/2020-02-26_movshon_aperture_test.ipynb +2228 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/aperture_correct.py +160 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/data_packaging.py +57 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/movshon.py +202 -0
- brainscore_vision/data/freemanziemba2013/test.py +97 -0
- brainscore_vision/data/geirhos2021/__init__.py +358 -0
- brainscore_vision/data/geirhos2021/creating_geirhos_ids.ipynb +468 -0
- brainscore_vision/data/geirhos2021/data_packaging/colour/colour_data_assembly.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/colour/colour_stimulus_set.py +81 -0
- brainscore_vision/data/geirhos2021/data_packaging/contrast/contrast_data_assembly.py +83 -0
- brainscore_vision/data/geirhos2021/data_packaging/contrast/contrast_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/cue-conflict/cue-conflict_data_assembly.py +100 -0
- brainscore_vision/data/geirhos2021/data_packaging/cue-conflict/cue-conflict_stimulus_set.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/edge/edge_data_assembly.py +96 -0
- brainscore_vision/data/geirhos2021/data_packaging/edge/edge_stimulus_set.py +69 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonI/eidolonI_data_assembly.py +92 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonI/eidolonI_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonII/eidolonII_data_assembly.py +92 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonII/eidolonII_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonIII/eidolonIII_data_assembly.py +92 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonIII/eidolonIII_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/false-colour/false-colour_data_assembly.py +83 -0
- brainscore_vision/data/geirhos2021/data_packaging/false-colour/false-colour_stimulus_set.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/high-pass/high-pass_data_assembly.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/high-pass/high-pass_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/low-pass/low-pass_data_assembly.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/low-pass/low-pass_stimulus_set.py +81 -0
- brainscore_vision/data/geirhos2021/data_packaging/phase-scrambling/phase-scrambling_data_assembly.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/phase-scrambling/phase-scrambling_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/power-equalisation/power-equalisation_data_assembly.py +88 -0
- brainscore_vision/data/geirhos2021/data_packaging/power-equalisation/power-equalisation_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/rotation/rotation_data_assembly.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/rotation/rotation_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/silhouette/silhouette_data_assembly.py +100 -0
- brainscore_vision/data/geirhos2021/data_packaging/silhouette/silhouette_stimulus_set.py +71 -0
- brainscore_vision/data/geirhos2021/data_packaging/sketch/sketch_data_assembly.py +88 -0
- brainscore_vision/data/geirhos2021/data_packaging/sketch/sketch_stimulus_set.py +75 -0
- brainscore_vision/data/geirhos2021/data_packaging/stylized/stylized_data_assembly.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/stylized/stylized_stimulus_set.py +75 -0
- brainscore_vision/data/geirhos2021/data_packaging/uniform-noise/uniform-noise_data_assembly.py +86 -0
- brainscore_vision/data/geirhos2021/data_packaging/uniform-noise/uniform-noise_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/geirhos_hashes.csv +52 -0
- brainscore_vision/data/geirhos2021/test.py +330 -0
- brainscore_vision/data/hebart2023/__init__.py +23 -0
- brainscore_vision/data/hebart2023/packaging/data_assembly.py +40 -0
- brainscore_vision/data/hebart2023/packaging/stimulus_set.py +72 -0
- brainscore_vision/data/hebart2023/test.py +42 -0
- brainscore_vision/data/hendrycks2019/__init__.py +45 -0
- brainscore_vision/data/hendrycks2019/test.py +26 -0
- brainscore_vision/data/igustibagus2024/__init__.py +23 -0
- brainscore_vision/data/igustibagus2024/dependencies/data_pico/stimulus_dicarlo_domain_transfer.csv +3139 -0
- brainscore_vision/data/igustibagus2024/investigation_consistency.ipynb +346 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/__init__.py +0 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/create_merged_assembly.ipynb +649 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/create_merged_assembly_and_stim.py +152 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/create_stimulus_set_with_background-id.py +45 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/helpers_background_id.py +849 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/merged_stimulus_set.csv +3139 -0
- brainscore_vision/data/igustibagus2024/oleo_pico_exploration.ipynb +410 -0
- brainscore_vision/data/igustibagus2024/test.py +26 -0
- brainscore_vision/data/imagenetslim15000/ImageNetSlim15000.py +30 -0
- brainscore_vision/data/imagenetslim15000/__init__.py +11 -0
- brainscore_vision/data/imagenetslim15000/test.py +8 -0
- brainscore_vision/data/islam2021/__init__.py +18 -0
- brainscore_vision/data/islam2021/data_packaging.py +64 -0
- brainscore_vision/data/islam2021/test.py +11 -0
- brainscore_vision/data/kar2018/__init__.py +58 -0
- brainscore_vision/data/kar2018/data_packaging/kar_coco.py +97 -0
- brainscore_vision/data/kar2018/data_packaging/kar_hvm.py +77 -0
- brainscore_vision/data/kar2018/data_packaging/requirements.txt +1 -0
- brainscore_vision/data/kar2018/test.py +10 -0
- brainscore_vision/data/kar2019/__init__.py +43 -0
- brainscore_vision/data/kar2019/data_packaging.py +116 -0
- brainscore_vision/data/kar2019/test.py +8 -0
- brainscore_vision/data/kuzovkin2018/__init__.py +36 -0
- brainscore_vision/data/kuzovkin2018/createAssembliesBrainScore.py +103 -0
- brainscore_vision/data/kuzovkin2018/test.py +8 -0
- brainscore_vision/data/majajhong2015/__init__.py +113 -0
- brainscore_vision/data/majajhong2015/data_packaging/darren10ms.py +32 -0
- brainscore_vision/data/majajhong2015/data_packaging/data_packaging.py +65 -0
- brainscore_vision/data/majajhong2015/test.py +38 -0
- brainscore_vision/data/malania2007/__init__.py +254 -0
- brainscore_vision/data/malania2007/data_packaging/malania_data_assembly.py +79 -0
- brainscore_vision/data/malania2007/data_packaging/malania_stimulus_set.py +79 -0
- brainscore_vision/data/malania2007/test.py +147 -0
- brainscore_vision/data/maniquet2024/__init__.py +57 -0
- brainscore_vision/data/maniquet2024/data_packaging.py +151 -0
- brainscore_vision/data/maniquet2024/test.py +16 -0
- brainscore_vision/data/marques2020/__init__.py +123 -0
- brainscore_vision/data/marques2020/data_packaging/marques_cavanaugh2002a.py +84 -0
- brainscore_vision/data/marques2020/data_packaging/marques_devalois1982a.py +44 -0
- brainscore_vision/data/marques2020/data_packaging/marques_devalois1982b.py +54 -0
- brainscore_vision/data/marques2020/data_packaging/marques_freemanZiemba2013.py +252 -0
- brainscore_vision/data/marques2020/data_packaging/marques_gen_stim.py +95 -0
- brainscore_vision/data/marques2020/data_packaging/marques_ringach2002.py +95 -0
- brainscore_vision/data/marques2020/data_packaging/marques_schiller1976c.py +60 -0
- brainscore_vision/data/marques2020/data_packaging/marques_stim_common.py +389 -0
- brainscore_vision/data/marques2020/data_packaging/marques_utils.py +21 -0
- brainscore_vision/data/marques2020/data_packaging/setup.py +13 -0
- brainscore_vision/data/marques2020/test.py +54 -0
- brainscore_vision/data/rajalingham2018/__init__.py +56 -0
- brainscore_vision/data/rajalingham2018/rajalingham2018objectome.py +193 -0
- brainscore_vision/data/rajalingham2018/test.py +10 -0
- brainscore_vision/data/rajalingham2020/__init__.py +39 -0
- brainscore_vision/data/rajalingham2020/rajalingham2020orthographic_IT.py +97 -0
- brainscore_vision/data/rajalingham2020/test.py +8 -0
- brainscore_vision/data/rust2012/2020-12-28_rust.ipynb +3301 -0
- brainscore_vision/data/rust2012/__init__.py +45 -0
- brainscore_vision/data/rust2012/rust305.py +35 -0
- brainscore_vision/data/rust2012/test.py +47 -0
- brainscore_vision/data/sanghavi2020/__init__.py +119 -0
- brainscore_vision/data/sanghavi2020/data_packaging/environment.yml +36 -0
- brainscore_vision/data/sanghavi2020/data_packaging/requirements.txt +4 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavi2020.py +101 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavijozwik2020.py +148 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavikar2020.py +131 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020.py +120 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things.py +138 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things1.py +118 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things2.py +118 -0
- brainscore_vision/data/sanghavi2020/test.py +13 -0
- brainscore_vision/data/scialom2024/__init__.py +386 -0
- brainscore_vision/data/scialom2024/data_packaging/scialom_data_assembly.py +164 -0
- brainscore_vision/data/scialom2024/data_packaging/scialom_stimulus_set.py +117 -0
- brainscore_vision/data/scialom2024/test.py +301 -0
- brainscore_vision/data/seibert2019/__init__.py +25 -0
- brainscore_vision/data/seibert2019/data_packaging/2020-10-13_juvenile.ipynb +35703 -0
- brainscore_vision/data/seibert2019/data_packaging/2020-11-18_juvenile_scratch.txt +556 -0
- brainscore_vision/data/seibert2019/data_packaging/2020-11-22_juvenile_dldata.ipynb +3614 -0
- brainscore_vision/data/seibert2019/data_packaging/juvenile.py +103 -0
- brainscore_vision/data/seibert2019/test.py +35 -0
- brainscore_vision/data/zhang2018/__init__.py +38 -0
- brainscore_vision/data/zhang2018/test.py +29 -0
- brainscore_vision/data_helpers/__init__.py +0 -0
- brainscore_vision/data_helpers/lookup_legacy.py +15 -0
- brainscore_vision/data_helpers/s3.py +79 -0
- brainscore_vision/metric_helpers/__init__.py +5 -0
- brainscore_vision/metric_helpers/temporal.py +119 -0
- brainscore_vision/metric_helpers/transformations.py +379 -0
- brainscore_vision/metric_helpers/utils.py +71 -0
- brainscore_vision/metric_helpers/xarray_utils.py +151 -0
- brainscore_vision/metrics/__init__.py +7 -0
- brainscore_vision/metrics/accuracy/__init__.py +4 -0
- brainscore_vision/metrics/accuracy/metric.py +16 -0
- brainscore_vision/metrics/accuracy/test.py +11 -0
- brainscore_vision/metrics/accuracy_distance/__init__.py +4 -0
- brainscore_vision/metrics/accuracy_distance/metric.py +109 -0
- brainscore_vision/metrics/accuracy_distance/test.py +57 -0
- brainscore_vision/metrics/baker_accuracy_delta/__init__.py +4 -0
- brainscore_vision/metrics/baker_accuracy_delta/metric.py +94 -0
- brainscore_vision/metrics/baker_accuracy_delta/requirements.txt +1 -0
- brainscore_vision/metrics/baker_accuracy_delta/test.py +1 -0
- brainscore_vision/metrics/cka/__init__.py +14 -0
- brainscore_vision/metrics/cka/metric.py +105 -0
- brainscore_vision/metrics/cka/test.py +28 -0
- brainscore_vision/metrics/dimensionality/__init__.py +13 -0
- brainscore_vision/metrics/dimensionality/metric.py +45 -0
- brainscore_vision/metrics/distribution_similarity/__init__.py +14 -0
- brainscore_vision/metrics/distribution_similarity/metric.py +84 -0
- brainscore_vision/metrics/distribution_similarity/test.py +10 -0
- brainscore_vision/metrics/error_consistency/__init__.py +13 -0
- brainscore_vision/metrics/error_consistency/metric.py +93 -0
- brainscore_vision/metrics/error_consistency/test.py +39 -0
- brainscore_vision/metrics/i1i2/__init__.py +16 -0
- brainscore_vision/metrics/i1i2/metric.py +299 -0
- brainscore_vision/metrics/i1i2/requirements.txt +2 -0
- brainscore_vision/metrics/i1i2/test.py +36 -0
- brainscore_vision/metrics/i1i2/test_resources/alexnet-probabilities.nc +0 -0
- brainscore_vision/metrics/i1i2/test_resources/resnet18-probabilities.nc +0 -0
- brainscore_vision/metrics/i1i2/test_resources/resnet34-probabilities.nc +0 -0
- brainscore_vision/metrics/internal_consistency/__init__.py +8 -0
- brainscore_vision/metrics/internal_consistency/ceiling.py +127 -0
- brainscore_vision/metrics/internal_consistency/requirements.txt +1 -0
- brainscore_vision/metrics/internal_consistency/test.py +39 -0
- brainscore_vision/metrics/maniquet2024_metrics/__init__.py +19 -0
- brainscore_vision/metrics/maniquet2024_metrics/metric.py +416 -0
- brainscore_vision/metrics/maniquet2024_metrics/test.py +8 -0
- brainscore_vision/metrics/mask_regression/__init__.py +16 -0
- brainscore_vision/metrics/mask_regression/metric.py +242 -0
- brainscore_vision/metrics/mask_regression/requirements.txt +1 -0
- brainscore_vision/metrics/mask_regression/test.py +0 -0
- brainscore_vision/metrics/ost/__init__.py +23 -0
- brainscore_vision/metrics/ost/metric.py +350 -0
- brainscore_vision/metrics/ost/requirements.txt +2 -0
- brainscore_vision/metrics/ost/test.py +0 -0
- brainscore_vision/metrics/rdm/__init__.py +14 -0
- brainscore_vision/metrics/rdm/metric.py +101 -0
- brainscore_vision/metrics/rdm/requirements.txt +2 -0
- brainscore_vision/metrics/rdm/test.py +63 -0
- brainscore_vision/metrics/regression_correlation/__init__.py +48 -0
- brainscore_vision/metrics/regression_correlation/mask_regression.py +232 -0
- brainscore_vision/metrics/regression_correlation/metric.py +125 -0
- brainscore_vision/metrics/regression_correlation/requirements.txt +3 -0
- brainscore_vision/metrics/regression_correlation/test.py +36 -0
- brainscore_vision/metrics/threshold/__init__.py +5 -0
- brainscore_vision/metrics/threshold/metric.py +481 -0
- brainscore_vision/metrics/threshold/test.py +71 -0
- brainscore_vision/metrics/value_delta/__init__.py +4 -0
- brainscore_vision/metrics/value_delta/metric.py +30 -0
- brainscore_vision/metrics/value_delta/requirements.txt +1 -0
- brainscore_vision/metrics/value_delta/test.py +40 -0
- brainscore_vision/model_helpers/__init__.py +3 -0
- brainscore_vision/model_helpers/activations/__init__.py +1 -0
- brainscore_vision/model_helpers/activations/core.py +635 -0
- brainscore_vision/model_helpers/activations/pca.py +117 -0
- brainscore_vision/model_helpers/activations/pytorch.py +152 -0
- brainscore_vision/model_helpers/activations/temporal/__init__.py +0 -0
- brainscore_vision/model_helpers/activations/temporal/core/__init__.py +3 -0
- brainscore_vision/model_helpers/activations/temporal/core/executor.py +219 -0
- brainscore_vision/model_helpers/activations/temporal/core/extractor.py +282 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/base.py +274 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/base.py +134 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/base.py +99 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/block.py +77 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/causal.py +86 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/time_aligner.py +73 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/__init__.py +3 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/base.py +17 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/image.py +50 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/video.py +186 -0
- brainscore_vision/model_helpers/activations/temporal/model/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/model/base.py +33 -0
- brainscore_vision/model_helpers/activations/temporal/model/pytorch.py +107 -0
- brainscore_vision/model_helpers/activations/temporal/utils.py +228 -0
- brainscore_vision/model_helpers/brain_transformation/__init__.py +97 -0
- brainscore_vision/model_helpers/brain_transformation/behavior.py +348 -0
- brainscore_vision/model_helpers/brain_transformation/imagenet_classes.txt +1000 -0
- brainscore_vision/model_helpers/brain_transformation/neural.py +159 -0
- brainscore_vision/model_helpers/brain_transformation/temporal.py +199 -0
- brainscore_vision/model_helpers/check_submission/__init__.py +0 -0
- brainscore_vision/model_helpers/check_submission/check_models.py +87 -0
- brainscore_vision/model_helpers/check_submission/images/1.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/10.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/11.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/12.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/13.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/14.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/15.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/16.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/17.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/18.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/19.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/2.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/20.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/3.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/4.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/5.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/6.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/7.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/8.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/9.png +0 -0
- brainscore_vision/model_helpers/conftest.py +3 -0
- brainscore_vision/model_helpers/generic_plugin_tests.py +119 -0
- brainscore_vision/model_helpers/s3.py +62 -0
- brainscore_vision/model_helpers/utils/__init__.py +15 -0
- brainscore_vision/model_helpers/utils/s3.py +42 -0
- brainscore_vision/model_interface.py +214 -0
- brainscore_vision/models/AdvProp_efficientne_b6/__init__.py +5 -0
- brainscore_vision/models/AdvProp_efficientne_b6/model.py +75 -0
- brainscore_vision/models/AdvProp_efficientne_b6/requirements.txt +1 -0
- brainscore_vision/models/AdvProp_efficientne_b6/test.py +9 -0
- brainscore_vision/models/AlexNet_SIN/__init__.py +8 -0
- brainscore_vision/models/AlexNet_SIN/model.py +29 -0
- brainscore_vision/models/AlexNet_SIN/requirements.txt +2 -0
- brainscore_vision/models/AlexNet_SIN/test.py +1 -0
- brainscore_vision/models/Soumyadeep_inf_1/__init__.py +5 -0
- brainscore_vision/models/Soumyadeep_inf_1/model.py +60 -0
- brainscore_vision/models/Soumyadeep_inf_1/setup.py +26 -0
- brainscore_vision/models/Soumyadeep_inf_1/test.py +1 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/__init__.py +8 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/model.py +43 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/requirements.txt +4 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/test.py +8 -0
- brainscore_vision/models/__init__.py +0 -0
- brainscore_vision/models/alexnet/__init__.py +8 -0
- brainscore_vision/models/alexnet/model.py +28 -0
- brainscore_vision/models/alexnet/requirements.txt +2 -0
- brainscore_vision/models/alexnet/test.py +15 -0
- brainscore_vision/models/alexnet_7be5be79/__init__.py +7 -0
- brainscore_vision/models/alexnet_7be5be79/model.py +44 -0
- brainscore_vision/models/alexnet_7be5be79/setup.py +26 -0
- brainscore_vision/models/alexnet_7be5be79/test.py +1 -0
- brainscore_vision/models/alexnet_7be5be79_convs/__init__.py +5 -0
- brainscore_vision/models/alexnet_7be5be79_convs/model.py +42 -0
- brainscore_vision/models/alexnet_7be5be79_convs/setup.py +25 -0
- brainscore_vision/models/alexnet_7be5be79_convs/test.py +1 -0
- brainscore_vision/models/alexnet_ks_torevert/__init__.py +8 -0
- brainscore_vision/models/alexnet_ks_torevert/model.py +28 -0
- brainscore_vision/models/alexnet_ks_torevert/requirements.txt +2 -0
- brainscore_vision/models/alexnet_ks_torevert/test.py +15 -0
- brainscore_vision/models/alexnet_simclr_run1/__init__.py +7 -0
- brainscore_vision/models/alexnet_simclr_run1/model.py +267 -0
- brainscore_vision/models/alexnet_simclr_run1/requirements.txt +2 -0
- brainscore_vision/models/alexnet_simclr_run1/test.py +1 -0
- brainscore_vision/models/alexnet_testing/__init__.py +8 -0
- brainscore_vision/models/alexnet_testing/model.py +28 -0
- brainscore_vision/models/alexnet_testing/requirements.txt +2 -0
- brainscore_vision/models/alexnet_testing/setup.py +24 -0
- brainscore_vision/models/alexnet_testing/test.py +15 -0
- brainscore_vision/models/antialias_resnet152/__init__.py +7 -0
- brainscore_vision/models/antialias_resnet152/model.py +35 -0
- brainscore_vision/models/antialias_resnet152/requirements.txt +3 -0
- brainscore_vision/models/antialias_resnet152/test.py +8 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/__init__.py +7 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/model.py +35 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/requirements.txt +1 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/test.py +8 -0
- brainscore_vision/models/bp_resnet50_julios/__init__.py +5 -0
- brainscore_vision/models/bp_resnet50_julios/model.py +52 -0
- brainscore_vision/models/bp_resnet50_julios/setup.py +24 -0
- brainscore_vision/models/bp_resnet50_julios/test.py +1 -0
- brainscore_vision/models/clip/__init__.py +5 -0
- brainscore_vision/models/clip/model.py +179 -0
- brainscore_vision/models/clip/requirements.txt +4 -0
- brainscore_vision/models/clip/test.py +1 -0
- brainscore_vision/models/clipvision/__init__.py +5 -0
- brainscore_vision/models/clipvision/model.py +179 -0
- brainscore_vision/models/clipvision/requirements.txt +4 -0
- brainscore_vision/models/clipvision/test.py +1 -0
- brainscore_vision/models/cornet_s/__init__.py +8 -0
- brainscore_vision/models/cornet_s/helpers/helpers.py +215 -0
- brainscore_vision/models/cornet_s/model.py +77 -0
- brainscore_vision/models/cornet_s/requirements.txt +7 -0
- brainscore_vision/models/cornet_s/test.py +8 -0
- brainscore_vision/models/cornet_s_ynshah/__init__.py +388 -0
- brainscore_vision/models/cornet_s_ynshah/model.py +192 -0
- brainscore_vision/models/cornet_s_ynshah/setup.py +24 -0
- brainscore_vision/models/cornet_s_ynshah/test.py +0 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/__init__.py +7 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/model.py +75 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/requirements.txt +4 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/test.py +8 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/__init__.py +8 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/model.py +57 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/requirements.txt +3 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/test.py +25 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/__init__.py +9 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/model.py +134 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/requirements.txt +4 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/test.py +8 -0
- brainscore_vision/models/dbp_resnet50_julios/__init__.py +5 -0
- brainscore_vision/models/dbp_resnet50_julios/model.py +52 -0
- brainscore_vision/models/dbp_resnet50_julios/setup.py +24 -0
- brainscore_vision/models/dbp_resnet50_julios/test.py +1 -0
- brainscore_vision/models/densenet_201_pytorch/__init__.py +7 -0
- brainscore_vision/models/densenet_201_pytorch/model.py +59 -0
- brainscore_vision/models/densenet_201_pytorch/requirements.txt +3 -0
- brainscore_vision/models/densenet_201_pytorch/test.py +8 -0
- brainscore_vision/models/eBarlow_Vanilla/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla/model.py +50 -0
- brainscore_vision/models/eBarlow_Vanilla/requirements.txt +2 -0
- brainscore_vision/models/eBarlow_Vanilla/setup.py +24 -0
- brainscore_vision/models/eBarlow_Vanilla/test.py +1 -0
- brainscore_vision/models/eBarlow_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eBarlow_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/model.py +84 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/setup.py +25 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/test.py +1 -0
- brainscore_vision/models/eBarlow_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eBarlow_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eBarlow_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eBarlow_augself_linear_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_augself_linear_1/model.py +65 -0
- brainscore_vision/models/eBarlow_augself_linear_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_augself_linear_1/test.py +1 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/model.py +65 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_001_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_001_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_001_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_001_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_001_2/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_001_2/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_001_2/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_001_2/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_001_3/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_001_3/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_001_3/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_001_3/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_01/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_01/model.py +50 -0
- brainscore_vision/models/eBarlow_lmda_01/requirements.txt +2 -0
- brainscore_vision/models/eBarlow_lmda_01/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_01/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_02_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_02_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/model.py +84 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/setup.py +25 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/model.py +85 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/setup.py +25 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/model.py +85 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/setup.py +25 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_03_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_03_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_03_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_03_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_04_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_04_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_04_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_04_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_05_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_05_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_05_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_05_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Vanilla/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Vanilla/model.py +50 -0
- brainscore_vision/models/eMMCR_Vanilla/setup.py +24 -0
- brainscore_vision/models/eMMCR_Vanilla/test.py +1 -0
- brainscore_vision/models/eMMCR_VanillaV2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_VanillaV2/model.py +50 -0
- brainscore_vision/models/eMMCR_VanillaV2/setup.py +24 -0
- brainscore_vision/models/eMMCR_VanillaV2/test.py +1 -0
- brainscore_vision/models/eMMCR_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eMMCR_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eMMCR_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01/model.py +50 -0
- brainscore_vision/models/eMMCR_lmda_01/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01V2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01V2/model.py +50 -0
- brainscore_vision/models/eMMCR_lmda_01V2/requirements.txt +2 -0
- brainscore_vision/models/eMMCR_lmda_01V2/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01V2/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eMMCR_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eMMCR_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01_3/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01_3/model.py +65 -0
- brainscore_vision/models/eMMCR_lmda_01_3/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01_3/test.py +1 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/test.py +1 -0
- brainscore_vision/models/effnetb1_272x240/__init__.py +5 -0
- brainscore_vision/models/effnetb1_272x240/model.py +126 -0
- brainscore_vision/models/effnetb1_272x240/requirements.txt +3 -0
- brainscore_vision/models/effnetb1_272x240/test.py +9 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/__init__.py +9 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/model.py +111 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/requirements.txt +6 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/test.py +8 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/__init__.py +5 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/model.py +142 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/requirements.txt +5 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/test.py +8 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/__init__.py +9 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/model.py +140 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/requirements.txt +5 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/test.py +8 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/__init__.py +5 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/model.py +62 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/requirements.txt +3 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/test.py +8 -0
- brainscore_vision/models/hmax/__init__.py +7 -0
- brainscore_vision/models/hmax/helpers/hmax.py +438 -0
- brainscore_vision/models/hmax/helpers/pytorch.py +216 -0
- brainscore_vision/models/hmax/model.py +69 -0
- brainscore_vision/models/hmax/requirements.txt +5 -0
- brainscore_vision/models/hmax/test.py +8 -0
- brainscore_vision/models/inception_v3_pytorch/__init__.py +7 -0
- brainscore_vision/models/inception_v3_pytorch/model.py +68 -0
- brainscore_vision/models/inception_v3_pytorch/requirements.txt +3 -0
- brainscore_vision/models/inception_v3_pytorch/test.py +8 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/__init__.py +7 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/model.py +60 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/requirements.txt +3 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/test.py +8 -0
- brainscore_vision/models/mobilevit_small/__init__.py +7 -0
- brainscore_vision/models/mobilevit_small/model.py +49 -0
- brainscore_vision/models/mobilevit_small/requirements.txt +3 -0
- brainscore_vision/models/mobilevit_small/test.py +8 -0
- brainscore_vision/models/pixels/__init__.py +8 -0
- brainscore_vision/models/pixels/model.py +35 -0
- brainscore_vision/models/pixels/test.py +15 -0
- brainscore_vision/models/pnasnet_large_pytorch/__init__.py +7 -0
- brainscore_vision/models/pnasnet_large_pytorch/model.py +59 -0
- brainscore_vision/models/pnasnet_large_pytorch/requirements.txt +3 -0
- brainscore_vision/models/pnasnet_large_pytorch/test.py +8 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/model.py +64 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/test.py +1 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/model.py +65 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/test.py +1 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/model.py +65 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/test.py +1 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/model.py +67 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/test.py +1 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/model.py +66 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/setup.py +25 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/test.py +1 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/model.py +66 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/setup.py +25 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/test.py +1 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/model.py +66 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/setup.py +25 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/test.py +1 -0
- brainscore_vision/models/r50_tvpt/__init__.py +9 -0
- brainscore_vision/models/r50_tvpt/model.py +47 -0
- brainscore_vision/models/r50_tvpt/setup.py +24 -0
- brainscore_vision/models/r50_tvpt/test.py +1 -0
- brainscore_vision/models/regnet/__init__.py +14 -0
- brainscore_vision/models/regnet/model.py +17 -0
- brainscore_vision/models/regnet/requirements.txt +2 -0
- brainscore_vision/models/regnet/test.py +17 -0
- brainscore_vision/models/resnet18_imagenet21kP/__init__.py +6 -0
- brainscore_vision/models/resnet18_imagenet21kP/model.py +119 -0
- brainscore_vision/models/resnet18_imagenet21kP/setup.py +18 -0
- brainscore_vision/models/resnet18_imagenet21kP/test.py +0 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/__init__.py +5 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/model.py +59 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/setup.py +24 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/test.py +1 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/__init__.py +9 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/model.py +72 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/setup.py +24 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/test.py +1 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/__init__.py +9 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/model.py +72 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/setup.py +24 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/test.py +1 -0
- brainscore_vision/models/resnet50_julios/__init__.py +5 -0
- brainscore_vision/models/resnet50_julios/model.py +54 -0
- brainscore_vision/models/resnet50_julios/setup.py +24 -0
- brainscore_vision/models/resnet50_julios/test.py +1 -0
- brainscore_vision/models/resnet50_tutorial/__init__.py +5 -0
- brainscore_vision/models/resnet50_tutorial/model.py +34 -0
- brainscore_vision/models/resnet50_tutorial/requirements.txt +2 -0
- brainscore_vision/models/resnet50_tutorial/test.py +8 -0
- brainscore_vision/models/resnet_152_v2_pytorch/__init__.py +7 -0
- brainscore_vision/models/resnet_152_v2_pytorch/model.py +59 -0
- brainscore_vision/models/resnet_152_v2_pytorch/requirements.txt +2 -0
- brainscore_vision/models/resnet_152_v2_pytorch/test.py +8 -0
- brainscore_vision/models/resnet_50_robust/__init__.py +7 -0
- brainscore_vision/models/resnet_50_robust/model.py +55 -0
- brainscore_vision/models/resnet_50_robust/requirements.txt +3 -0
- brainscore_vision/models/resnet_50_robust/test.py +8 -0
- brainscore_vision/models/resnext101_32x16d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x16d_wsl/model.py +38 -0
- brainscore_vision/models/resnext101_32x16d_wsl/requirements.txt +2 -0
- brainscore_vision/models/resnext101_32x16d_wsl/test.py +8 -0
- brainscore_vision/models/resnext101_32x32d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x32d_wsl/model.py +40 -0
- brainscore_vision/models/resnext101_32x32d_wsl/requirements.txt +2 -0
- brainscore_vision/models/resnext101_32x32d_wsl/test.py +8 -0
- brainscore_vision/models/resnext101_32x48d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x48d_wsl/model.py +38 -0
- brainscore_vision/models/resnext101_32x48d_wsl/requirements.txt +3 -0
- brainscore_vision/models/resnext101_32x48d_wsl/test.py +8 -0
- brainscore_vision/models/resnext101_32x8d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x8d_wsl/model.py +44 -0
- brainscore_vision/models/resnext101_32x8d_wsl/requirements.txt +2 -0
- brainscore_vision/models/resnext101_32x8d_wsl/test.py +8 -0
- brainscore_vision/models/temporal_model_AVID_CMA/__init__.py +17 -0
- brainscore_vision/models/temporal_model_AVID_CMA/model.py +92 -0
- brainscore_vision/models/temporal_model_AVID_CMA/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_AVID_CMA/test.py +18 -0
- brainscore_vision/models/temporal_model_GDT/__init__.py +16 -0
- brainscore_vision/models/temporal_model_GDT/model.py +72 -0
- brainscore_vision/models/temporal_model_GDT/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_GDT/test.py +17 -0
- brainscore_vision/models/temporal_model_S3D_text_video/__init__.py +14 -0
- brainscore_vision/models/temporal_model_S3D_text_video/model.py +65 -0
- brainscore_vision/models/temporal_model_S3D_text_video/requirements.txt +1 -0
- brainscore_vision/models/temporal_model_S3D_text_video/test.py +15 -0
- brainscore_vision/models/temporal_model_SeLaVi/__init__.py +17 -0
- brainscore_vision/models/temporal_model_SeLaVi/model.py +68 -0
- brainscore_vision/models/temporal_model_SeLaVi/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_SeLaVi/test.py +18 -0
- brainscore_vision/models/temporal_model_VideoMAE/__init__.py +15 -0
- brainscore_vision/models/temporal_model_VideoMAE/model.py +100 -0
- brainscore_vision/models/temporal_model_VideoMAE/requirements.txt +6 -0
- brainscore_vision/models/temporal_model_VideoMAE/test.py +16 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/__init__.py +14 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/model.py +109 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/requirements.txt +4 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/test.py +16 -0
- brainscore_vision/models/temporal_model_mae_st/__init__.py +15 -0
- brainscore_vision/models/temporal_model_mae_st/model.py +120 -0
- brainscore_vision/models/temporal_model_mae_st/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_mae_st/test.py +16 -0
- brainscore_vision/models/temporal_model_mmaction2/__init__.py +23 -0
- brainscore_vision/models/temporal_model_mmaction2/mmaction2.csv +24 -0
- brainscore_vision/models/temporal_model_mmaction2/model.py +226 -0
- brainscore_vision/models/temporal_model_mmaction2/requirements.txt +5 -0
- brainscore_vision/models/temporal_model_mmaction2/test.py +24 -0
- brainscore_vision/models/temporal_model_openstl/__init__.py +18 -0
- brainscore_vision/models/temporal_model_openstl/model.py +206 -0
- brainscore_vision/models/temporal_model_openstl/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_openstl/test.py +19 -0
- brainscore_vision/models/temporal_model_torchvision/__init__.py +19 -0
- brainscore_vision/models/temporal_model_torchvision/model.py +92 -0
- brainscore_vision/models/temporal_model_torchvision/requirements.txt +2 -0
- brainscore_vision/models/temporal_model_torchvision/test.py +20 -0
- brainscore_vision/models/tv_efficientnet_b1/__init__.py +5 -0
- brainscore_vision/models/tv_efficientnet_b1/model.py +54 -0
- brainscore_vision/models/tv_efficientnet_b1/setup.py +24 -0
- brainscore_vision/models/tv_efficientnet_b1/test.py +1 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/__init__.py +7 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/model.py +104 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/requirements.txt +8 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/test.py +8 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/LICENSE +674 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/README.md +105 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/run.py +136 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/setup.py +41 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/train.py +383 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/__init__.py +71 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/back_ends.py +337 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/modules.py +126 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/params.py +100 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/utils.py +32 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/vonenet.py +68 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet_tutorial-activations.ipynb +352 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/test.py +1 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/test.py +1 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/test.py +1 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/test.py +1 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/__init__.py +7 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/model.py +66 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/setup.py +24 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/test.py +1 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/__init__.py +7 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/model.py +68 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/setup.py +24 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/test.py +1 -0
- brainscore_vision/submission/__init__.py +0 -0
- brainscore_vision/submission/actions_helpers.py +153 -0
- brainscore_vision/submission/config.py +7 -0
- brainscore_vision/submission/endpoints.py +58 -0
- brainscore_vision/utils/__init__.py +91 -0
- brainscore_vision-2.1.dist-info/LICENSE +11 -0
- brainscore_vision-2.1.dist-info/METADATA +152 -0
- brainscore_vision-2.1.dist-info/RECORD +1009 -0
- brainscore_vision-2.1.dist-info/WHEEL +5 -0
- brainscore_vision-2.1.dist-info/top_level.txt +4 -0
- docs/Makefile +20 -0
- docs/source/conf.py +78 -0
- docs/source/index.rst +21 -0
- docs/source/modules/api_reference.rst +10 -0
- docs/source/modules/benchmarks.rst +8 -0
- docs/source/modules/brainscore_submission.png +0 -0
- docs/source/modules/developer_clarifications.rst +36 -0
- docs/source/modules/metrics.rst +8 -0
- docs/source/modules/model_interface.rst +8 -0
- docs/source/modules/submission.rst +112 -0
- docs/source/modules/tutorial_screenshots/brain-score_logo.png +0 -0
- docs/source/modules/tutorial_screenshots/final_submit.png +0 -0
- docs/source/modules/tutorial_screenshots/init_py.png +0 -0
- docs/source/modules/tutorial_screenshots/mms.png +0 -0
- docs/source/modules/tutorial_screenshots/setup.png +0 -0
- docs/source/modules/tutorial_screenshots/sms.png +0 -0
- docs/source/modules/tutorial_screenshots/subfolders.png +0 -0
- docs/source/modules/utils.rst +22 -0
- migrations/2020-12-20_pkl_to_nc.py +90 -0
- tests/__init__.py +6 -0
- tests/conftest.py +26 -0
- tests/test_benchmark_helpers/__init__.py +0 -0
- tests/test_benchmark_helpers/test_screen.py +75 -0
- tests/test_examples.py +41 -0
- tests/test_integration.py +43 -0
- tests/test_metric_helpers/__init__.py +0 -0
- tests/test_metric_helpers/test_temporal.py +80 -0
- tests/test_metric_helpers/test_transformations.py +171 -0
- tests/test_metric_helpers/test_xarray_utils.py +85 -0
- tests/test_model_helpers/__init__.py +6 -0
- tests/test_model_helpers/activations/__init__.py +0 -0
- tests/test_model_helpers/activations/test___init__.py +404 -0
- tests/test_model_helpers/brain_transformation/__init__.py +0 -0
- tests/test_model_helpers/brain_transformation/test___init__.py +18 -0
- tests/test_model_helpers/brain_transformation/test_behavior.py +181 -0
- tests/test_model_helpers/brain_transformation/test_neural.py +70 -0
- tests/test_model_helpers/brain_transformation/test_temporal.py +66 -0
- tests/test_model_helpers/temporal/__init__.py +0 -0
- tests/test_model_helpers/temporal/activations/__init__.py +0 -0
- tests/test_model_helpers/temporal/activations/test_extractor.py +96 -0
- tests/test_model_helpers/temporal/activations/test_inferencer.py +189 -0
- tests/test_model_helpers/temporal/activations/test_inputs.py +103 -0
- tests/test_model_helpers/temporal/brain_transformation/__init__.py +0 -0
- tests/test_model_helpers/temporal/brain_transformation/test_temporal_ops.py +122 -0
- tests/test_model_helpers/temporal/test_utils.py +61 -0
- tests/test_model_helpers/test_generic_plugin_tests.py +310 -0
- tests/test_model_helpers/test_imports.py +10 -0
- tests/test_model_helpers/test_s3.py +38 -0
- tests/test_models.py +15 -0
- tests/test_stimuli.py +0 -0
- tests/test_submission/__init__.py +0 -0
- tests/test_submission/mock_config.py +3 -0
- tests/test_submission/test_actions_helpers.py +67 -0
- tests/test_submission/test_db.py +54 -0
- tests/test_submission/test_endpoints.py +125 -0
- tests/test_utils.py +21 -0
@@ -0,0 +1,75 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from brainio.stimuli import StimulusSet
|
3
|
+
from brainio.packaging import package_stimulus_set
|
4
|
+
|
5
|
+
stimuli = []
|
6
|
+
image_paths = {}
|
7
|
+
stimuli_directory = '../datasets/stylized/dnn/session-1'
|
8
|
+
|
9
|
+
|
10
|
+
'''
|
11
|
+
Dataset Meta Info (from https://github.com/rgeirhos/generalisation-humans-DNNs)
|
12
|
+
|
13
|
+
Sample image from dataset:
|
14
|
+
0779_sty_s01_0_chair_00_chair-0053-ILSVRC2012-val-00001855.png
|
15
|
+
|
16
|
+
This is a concatenation of the following information (separated by '_'):
|
17
|
+
|
18
|
+
1) a four-digit number starting with 0000 for the first image in an experiment;
|
19
|
+
the last image therefore has the number n-1 if n is the number of images in a certain experiment
|
20
|
+
2) short code for experiment name, e.g. 'eid' for eidolon-experiment
|
21
|
+
3) subject: either e.g. 's01' for 'subject-01', or 'dnn' for DNNs
|
22
|
+
4) condition
|
23
|
+
5) category (ground truth)
|
24
|
+
6) a number (just ignore it)
|
25
|
+
7) image lookup ID: the exact image shown to the subject
|
26
|
+
|
27
|
+
'''
|
28
|
+
|
29
|
+
for filepath in Path(stimuli_directory).glob('*.png'):
|
30
|
+
|
31
|
+
# entire name of image file:
|
32
|
+
image_id = filepath.stem
|
33
|
+
image_id_long = image_id
|
34
|
+
split_name = filepath.stem.split('_')
|
35
|
+
|
36
|
+
# ensure proper metadata length per image in set
|
37
|
+
assert len(split_name) == 7
|
38
|
+
|
39
|
+
# Dataset image data, 1-6 from above:
|
40
|
+
image_number = split_name[0]
|
41
|
+
experiment_code = split_name[1]
|
42
|
+
subject = split_name[2]
|
43
|
+
condition = split_name[3]
|
44
|
+
category_ground_truth = split_name[4]
|
45
|
+
random_number = split_name[5]
|
46
|
+
|
47
|
+
# this is the same as data assembly's image_lookup_id
|
48
|
+
image_lookup_id = f"{condition}_{category_ground_truth}_{random_number}_{split_name[6]}"
|
49
|
+
|
50
|
+
image_paths[image_id] = filepath
|
51
|
+
stimuli.append({
|
52
|
+
'image_id': image_lookup_id,
|
53
|
+
'image_id_long': image_id_long,
|
54
|
+
'experiment_code': experiment_code,
|
55
|
+
'condition': condition,
|
56
|
+
'truth': category_ground_truth,
|
57
|
+
'category_ground_truth': category_ground_truth,
|
58
|
+
'random_number': random_number,
|
59
|
+
'image_number': random_number,
|
60
|
+
'image_lookup_id': image_lookup_id,
|
61
|
+
})
|
62
|
+
|
63
|
+
stimuli = StimulusSet(stimuli)
|
64
|
+
image_id_to_lookup = dict(zip(stimuli['image_id_long'], stimuli['image_id']))
|
65
|
+
stimuli.image_paths = image_paths
|
66
|
+
stimuli.image_paths = {image_id_to_lookup[image_id]: path
|
67
|
+
for image_id, path in stimuli.image_paths.items()}
|
68
|
+
stimuli.name = 'Geirhos2021_stylized'
|
69
|
+
|
70
|
+
# Ensure 800 images in dataset
|
71
|
+
assert len(stimuli) == 800
|
72
|
+
|
73
|
+
# upload to S3
|
74
|
+
package_stimulus_set("brainio_brainscore", stimuli, stimulus_set_identifier=stimuli.name,
|
75
|
+
bucket_name="brainio-brainscore")
|
brainscore_vision/data/geirhos2021/data_packaging/uniform-noise/uniform-noise_data_assembly.py
ADDED
@@ -0,0 +1,86 @@
|
|
1
|
+
import numpy as np
|
2
|
+
|
3
|
+
from brainio.assemblies import BehavioralAssembly
|
4
|
+
from brainio.packaging import package_data_assembly
|
5
|
+
import pandas as pd
|
6
|
+
|
7
|
+
'''
|
8
|
+
Experiment Information:
|
9
|
+
https://arxiv.org/pdf/1706.06969.pdf
|
10
|
+
|
11
|
+
- 4 subjects
|
12
|
+
- 1280 images each
|
13
|
+
- 5120 total images shown
|
14
|
+
- match to sample task, 16AFC
|
15
|
+
- 16 image categories
|
16
|
+
- for the this benchmark (uniform-noise) subjects saw the EXACT image indicated with the variable/column name
|
17
|
+
image_lookup_id, and not a variation of it (no distortions, editing, etc). Condition is:
|
18
|
+
|
19
|
+
noise-experiment: '0', '0.03', ... '0.9' for noise width;
|
20
|
+
|
21
|
+
'''
|
22
|
+
|
23
|
+
# initial csv to dataframe processing:
|
24
|
+
subject_1 = pd.read_csv('data_assemblies/uniform-noise_subject-01_session_1.csv')
|
25
|
+
subject_2 = pd.read_csv('data_assemblies/uniform-noise_subject-02_session_1.csv')
|
26
|
+
subject_3 = pd.read_csv('data_assemblies/uniform-noise_subject-03_session_1.csv')
|
27
|
+
subject_4 = pd.read_csv('data_assemblies/uniform-noise_subject-04_session_1.csv')
|
28
|
+
|
29
|
+
all_subjects = pd.concat([subject_1, subject_2, subject_3, subject_4])
|
30
|
+
|
31
|
+
# parse df for the image lookup id. This relates the data assembly with the stimulus set.
|
32
|
+
split_cols = all_subjects['imagename'].str.split("_", expand=True)
|
33
|
+
drop_cols = split_cols.drop(split_cols.columns[[0, 1, 2]], axis=1)
|
34
|
+
all_subjects['image_lookup_id'] = drop_cols.agg("_".join, axis=1).str.replace(".png", "")
|
35
|
+
|
36
|
+
|
37
|
+
# construct the assembly
|
38
|
+
assembly = BehavioralAssembly(all_subjects['object_response'],
|
39
|
+
coords={
|
40
|
+
'image_id': ('presentation', all_subjects['image_lookup_id']),
|
41
|
+
'image_id_long': ('presentation', all_subjects['imagename']),
|
42
|
+
'truth': ('presentation', all_subjects['category']),
|
43
|
+
'choice': ('presentation', all_subjects['object_response']),
|
44
|
+
'category': ('presentation', all_subjects['category']),
|
45
|
+
'condition': ('presentation', all_subjects['condition']),
|
46
|
+
'response_time': ('presentation', all_subjects['rt']),
|
47
|
+
'trial': ('presentation', all_subjects['trial']),
|
48
|
+
'subject': ('presentation', all_subjects['subj']),
|
49
|
+
'session': ('presentation', all_subjects['Session']),
|
50
|
+
},
|
51
|
+
dims=['presentation']
|
52
|
+
)
|
53
|
+
|
54
|
+
# give the assembly an identifier name
|
55
|
+
assembly.name = 'brendel.Geirhos2021_uniform-noise'
|
56
|
+
|
57
|
+
# make sure assembly dim is correct length
|
58
|
+
assert len(assembly['presentation']) == 5120
|
59
|
+
|
60
|
+
# make sure assembly coords are correct length
|
61
|
+
assert len(assembly['image_id']) == 5120
|
62
|
+
assert len(assembly['image_id_long']) == 5120
|
63
|
+
assert len(assembly['truth']) == 5120
|
64
|
+
assert len(assembly['category']) == 5120
|
65
|
+
assert len(assembly['condition']) == 5120
|
66
|
+
assert len(assembly['response_time']) == 5120
|
67
|
+
assert len(assembly['trial']) == 5120
|
68
|
+
assert len(assembly['subject']) == 5120
|
69
|
+
assert len(assembly['session']) == 5120
|
70
|
+
|
71
|
+
|
72
|
+
# make sure there are 1280 unique images (shown 1 time for each of 4 subjects, total of 4 * 1280 = 5120 images shown)
|
73
|
+
assert len(np.unique(assembly['image_id'].values)) == 1280
|
74
|
+
|
75
|
+
# make sure there are 4 unique subjects:
|
76
|
+
assert len(np.unique(assembly['subject'].values)) == 4
|
77
|
+
|
78
|
+
# make sure there are 16 unique object categories (ground truths):
|
79
|
+
assert len(np.unique(assembly['truth'].values)) == 16
|
80
|
+
assert len(np.unique(assembly['category'].values)) == 16
|
81
|
+
|
82
|
+
|
83
|
+
# upload to S3
|
84
|
+
package_data_assembly('brainio_brainscore', assembly, assembly_identifier=assembly.name,
|
85
|
+
stimulus_set_identifier="Geirhos2021_uniform-noise",
|
86
|
+
assembly_class="BehavioralAssembly", bucket_name="brainio-brainscore")
|
@@ -0,0 +1,82 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from brainio.stimuli import StimulusSet
|
3
|
+
from brainio.packaging import package_stimulus_set
|
4
|
+
|
5
|
+
stimuli = []
|
6
|
+
image_paths = {}
|
7
|
+
stimuli_directory = '../datasets/uniform-noise/dnn/session-1'
|
8
|
+
|
9
|
+
|
10
|
+
'''
|
11
|
+
Dataset Meta Info (from https://github.com/rgeirhos/generalisation-humans-DNNs)
|
12
|
+
|
13
|
+
Sample image from dataset:
|
14
|
+
0001_nse_s01_0.03_elephant_10_n02504458_7209.png
|
15
|
+
|
16
|
+
This is a concatenation of the following information (separated by '_'):
|
17
|
+
|
18
|
+
1) a four-digit number starting with 0000 for the first image in an experiment;
|
19
|
+
the last image therefore has the number n-1 if n is the number of images in a certain experiment
|
20
|
+
2) short code for experiment name, e.g. 'eid' for eidolon-experiment
|
21
|
+
3) either e.g. 's01' for 'subject-01', or 'dnn' for DNNs
|
22
|
+
4) condition
|
23
|
+
5) category (ground truth)
|
24
|
+
6) a number (just ignore it)
|
25
|
+
7) image identifier in the form a_b.JPEG (or a_b.png), with a being the
|
26
|
+
WNID (WordNet ID) of the corresponding synset and b being an integer.
|
27
|
+
'''
|
28
|
+
|
29
|
+
for filepath in Path(stimuli_directory).glob('*.png'):
|
30
|
+
|
31
|
+
# entire name of image file:
|
32
|
+
image_id = filepath.stem
|
33
|
+
image_id_long = image_id
|
34
|
+
split_name = filepath.stem.split('_')
|
35
|
+
|
36
|
+
# ensure proper metadata length per image in set
|
37
|
+
assert len(split_name) == 8
|
38
|
+
|
39
|
+
# Dataset image data, 1-7 from above:
|
40
|
+
image_number = split_name[0]
|
41
|
+
experiment_code = split_name[1]
|
42
|
+
subject = split_name[2]
|
43
|
+
condition = split_name[3]
|
44
|
+
category_ground_truth = split_name[4]
|
45
|
+
random_number = split_name[5]
|
46
|
+
|
47
|
+
# note the split, for a total of 8 metadata fields:
|
48
|
+
wordnet_a = split_name[6]
|
49
|
+
wordnet_b = split_name[7]
|
50
|
+
|
51
|
+
# image lookup ID, same as data assembly. This is the exact image shown to participant.
|
52
|
+
# This is needed, as the raw images have "dnn" in the subject field, even when a human subject
|
53
|
+
# was used. Otherwise, the image name data in the raw data table and the image names themselves are the same.
|
54
|
+
image_lookup_id = "_".join(split_name[3:])
|
55
|
+
|
56
|
+
image_paths[image_id] = filepath
|
57
|
+
stimuli.append({
|
58
|
+
'image_id': image_lookup_id,
|
59
|
+
'image_id_long': image_id_long,
|
60
|
+
'image_number': image_number,
|
61
|
+
'experiment_code': experiment_code,
|
62
|
+
'condition': condition,
|
63
|
+
'truth': category_ground_truth,
|
64
|
+
'category_ground_truth': category_ground_truth,
|
65
|
+
'random_number': random_number,
|
66
|
+
'wordnet_a': wordnet_a,
|
67
|
+
'wordnet_b': wordnet_b,
|
68
|
+
})
|
69
|
+
|
70
|
+
stimuli = StimulusSet(stimuli)
|
71
|
+
image_id_to_lookup = dict(zip(stimuli['image_id_long'], stimuli['image_id']))
|
72
|
+
stimuli.image_paths = image_paths
|
73
|
+
stimuli.image_paths = {image_id_to_lookup[image_id]: path
|
74
|
+
for image_id, path in stimuli.image_paths.items()}
|
75
|
+
stimuli.name = 'Geirhos2021_uniform-noise' # give the StimulusSet an identifier name
|
76
|
+
|
77
|
+
# Ensure 1280 images in dataset
|
78
|
+
assert len(stimuli) == 1280
|
79
|
+
|
80
|
+
# upload to S3
|
81
|
+
package_stimulus_set("brainio_brainscore", stimuli, stimulus_set_identifier=stimuli.name,
|
82
|
+
bucket_name="brainio-brainscore")
|
@@ -0,0 +1,52 @@
|
|
1
|
+
identifier,lookup_type,class,location_type,location,sha1,stimulus_set_identifier,lookup_source
|
2
|
+
brendel.Geirhos2021_colour,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_colour.nc,258862d82467614e45cc1e488a5ac909eb6e122d,brendel.Geirhos2021_colour,brainio_brainscore
|
3
|
+
brendel.Geirhos2021_colour,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_colour.csv,9c97c155fd6039a95978be89eb604c6894c5fa16,,brainio_brainscore
|
4
|
+
brendel.Geirhos2021_colour,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_colour.zip,d166f1d3dc3d00c4f51a489e6fcf96dbbe778d2c,,brainio_brainscore
|
5
|
+
brendel.Geirhos2021_contrast,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_contrast.nc,1e114c987dc035ccca43781ff8cee9689acd3c3f,brendel.Geirhos2021_contrast,brainio_brainscore
|
6
|
+
brendel.Geirhos2021_contrast,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_contrast.csv,f6ef69a2e8937e1d0d83c8a21b325b4273494cb5,,brainio_brainscore
|
7
|
+
brendel.Geirhos2021_contrast,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_contrast.zip,ebeeef2f9c6a7282e20ef2026dc77eefa026957b,,brainio_brainscore
|
8
|
+
brendel.Geirhos2021_cue-conflict,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_cue-conflict.nc,cc214e3595d34565b13963c5f56049769a39a5c9,brendel.Geirhos2021_cue-conflict,brainio_brainscore
|
9
|
+
brendel.Geirhos2021_cue-conflict,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_cue-conflict.csv,8d3ae89d8870fb7d7c5d5ff387085b1f0116e2b7,,brainio_brainscore
|
10
|
+
brendel.Geirhos2021_cue-conflict,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_cue-conflict.zip,7e601186b181102939cd0b43a4e8a3ca95c18259,,brainio_brainscore
|
11
|
+
brendel.Geirhos2021_edge,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_edge.nc,ab1dc9e188e248da07215b375eb3dbcc58fde7fb,brendel.Geirhos2021_edge,brainio_brainscore
|
12
|
+
brendel.Geirhos2021_edge,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_edge.csv,fb57005ecb80e4e37b01e084f1f176fe7f59ff7f,,brainio_brainscore
|
13
|
+
brendel.Geirhos2021_edge,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_edge.zip,6c1199d90836a26be454aa799864a63c5efacaa1,,brainio_brainscore
|
14
|
+
brendel.Geirhos2021_eidolonI,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_eidolonI.nc,0f01f351ae19eafc2cb5e504d98e5cd01b4c07b4,brendel.Geirhos2021_eidolonI,brainio_brainscore
|
15
|
+
brendel.Geirhos2021_eidolonI,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_eidolonI.csv,049adbed36fed52c609d38d5230e6084336df6b9,,brainio_brainscore
|
16
|
+
brendel.Geirhos2021_eidolonI,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_eidolonI.zip,abaa45a225628dd66e424ec7d8e2b10a0c88bc0d,,brainio_brainscore
|
17
|
+
brendel.Geirhos2021_eidolonII,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_eidolonII.nc,499eea0f0c0817d02b5b97d2ebab89bc4c40a153,brendel.Geirhos2021_eidolonII,brainio_brainscore
|
18
|
+
brendel.Geirhos2021_eidolonII,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_eidolonII.csv,1806ada128c260ab54db570d2e73aea71d679754,,brainio_brainscore
|
19
|
+
brendel.Geirhos2021_eidolonII,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_eidolonII.zip,2654ba55291f8ab972f18b36565f9ead80a45339,,brainio_brainscore
|
20
|
+
brendel.Geirhos2021_eidolonIII,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_eidolonIII.nc,e7c9a49e729f8666f8aedc6e47c746fbbe2ebe36,brendel.Geirhos2021_eidolonIII,brainio_brainscore
|
21
|
+
brendel.Geirhos2021_eidolonIII,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_eidolonIII.csv,ba0173b315f02df16d418dc3ff1df7dc498b4893,,brainio_brainscore
|
22
|
+
brendel.Geirhos2021_eidolonIII,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_eidolonIII.zip,d0304c0c0024d0f493ea9c0c47ae0221da391016,,brainio_brainscore
|
23
|
+
brendel.Geirhos2021_false-colour,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_false-colour.nc,4dc072264651c81575564ba4818a12b8e8039c65,brendel.Geirhos2021_false-colour,brainio_brainscore
|
24
|
+
brendel.Geirhos2021_false-colour,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_false-colour.csv,8a09a7af8ec44339bcae5500ae5900d9c4309042,,brainio_brainscore
|
25
|
+
brendel.Geirhos2021_false-colour,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_false-colour.zip,ec0ba347fc14d0c0587d38bfa96e4ab5d2f7979a,,brainio_brainscore
|
26
|
+
brendel.Geirhos2021_high-pass,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_high-pass.nc,5df45c69127758f1ba3391671c521711050e3b4d,brendel.Geirhos2021_high-pass,brainio_brainscore
|
27
|
+
brendel.Geirhos2021_high-pass,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_high-pass.csv,ddf523dcf43398cc15894c7b51c436d526e6c992,,brainio_brainscore
|
28
|
+
brendel.Geirhos2021_high-pass,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_high-pass.zip,12322bb17270a5dde206314fcdc125c4bb235e3b,,brainio_brainscore
|
29
|
+
brendel.Geirhos2021_low-pass,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_low-pass.nc,75ab628d9e6d0d634290567b1cb261d7f8dc61e2,brendel.Geirhos2021_low-pass,brainio_brainscore
|
30
|
+
brendel.Geirhos2021_low-pass,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_low-pass.csv,1264f9be407c7d428cf3d62a7bb1b1bb45a821bc,,brainio_brainscore
|
31
|
+
brendel.Geirhos2021_low-pass,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_low-pass.zip,ad087676e04e51adadea7c7c5c1fa28e4dd6360c,,brainio_brainscore
|
32
|
+
brendel.Geirhos2021_phase-scrambling,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_phase-scrambling.nc,4124f9f5b86fb6ed82c98197d292eef50b608aba,brendel.Geirhos2021_phase-scrambling,brainio_brainscore
|
33
|
+
brendel.Geirhos2021_phase-scrambling,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_phase-scrambling.csv,0cc87f7ac42c2266f98d3a08783f7173499ec2fc,,brainio_brainscore
|
34
|
+
brendel.Geirhos2021_phase-scrambling,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_phase-scrambling.zip,462e77ab9533072b7036118f1f697e8c9bf30ae4,,brainio_brainscore
|
35
|
+
brendel.Geirhos2021_power-equalisation,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_power-equalisation.nc,0aba1b50a7e0802d76c41d332a121a3f96ef4f7d,brendel.Geirhos2021_power-equalisation,brainio_brainscore
|
36
|
+
brendel.Geirhos2021_power-equalisation,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_power-equalisation.csv,743935476b1fe4b7bd4d01a4eed24cd9ed5b3a22,,brainio_brainscore
|
37
|
+
brendel.Geirhos2021_power-equalisation,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_power-equalisation.zip,a0d5307525bccf8da8f3d293e7e324b9b20248c6,,brainio_brainscore
|
38
|
+
brendel.Geirhos2021_rotation,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_rotation.nc,e51a5c3bc95ade159e71aa602232063730bcd57b,brendel.Geirhos2021_rotation,brainio_brainscore
|
39
|
+
brendel.Geirhos2021_rotation,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_rotation.csv,2577831e9ead905669613fa17cb2651d0c48a455,,brainio_brainscore
|
40
|
+
brendel.Geirhos2021_rotation,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_rotation.zip,8f5d9cb217807e96ace61337144e429d0d4ba04c,,brainio_brainscore
|
41
|
+
brendel.Geirhos2021_silhouette,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_silhouette.nc,7dc94991465fe8009244e0d6fb8283419a1f9885,brendel.Geirhos2021_silhouette,brainio_brainscore
|
42
|
+
brendel.Geirhos2021_silhouette,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_silhouette.csv,fb57005ecb80e4e37b01e084f1f176fe7f59ff7f,,brainio_brainscore
|
43
|
+
brendel.Geirhos2021_silhouette,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_silhouette.zip,36c8a481f7876a2af2ad4fe80890b302fe3ae91e,,brainio_brainscore
|
44
|
+
brendel.Geirhos2021_sketch,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_sketch.nc,6709850864cea16d99a29fb31ae3c4a489983562,brendel.Geirhos2021_sketch,brainio_brainscore
|
45
|
+
brendel.Geirhos2021_sketch,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_sketch.csv,a5741b2f7bd08541a0dbefd7fb2d6a3845ca800b,,brainio_brainscore
|
46
|
+
brendel.Geirhos2021_sketch,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_sketch.zip,8e8712f08a5ad3655ea2bd8cd675db8cdf65129a,,brainio_brainscore
|
47
|
+
brendel.Geirhos2021_stylized,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_stylized.nc,dcf15f292e787a88e1e0f271e6b2838d6bdadfd3,brendel.Geirhos2021_stylized,brainio_brainscore
|
48
|
+
brendel.Geirhos2021_stylized,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_stylized.csv,2265a540dee6915150bf7c61143eaf788f603866,,brainio_brainscore
|
49
|
+
brendel.Geirhos2021_stylized,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_stylized.zip,75d273e8de643b0d814fbe60cd237c31ebe19c44,,brainio_brainscore
|
50
|
+
brendel.Geirhos2021_uniform-noise,assembly,BehavioralAssembly,S3,https://brainio-brainscore.s3.amazonaws.com/assy_brendel_Geirhos2021_uniform-noise.nc,f5e8b2636738f978c71591b8df6f8a21a66b72d1,brendel.Geirhos2021_uniform-noise,brainio_brainscore
|
51
|
+
brendel.Geirhos2021_uniform-noise,stimulus_set,StimulusSet,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_uniform-noise.csv,89b62a6af878974d388278ed0e23e8ed1c2fd855,,brainio_brainscore
|
52
|
+
brendel.Geirhos2021_uniform-noise,stimulus_set,,S3,https://brainio-brainscore.s3.amazonaws.com/image_brendel_Geirhos2021_uniform-noise.zip,ff4566542d65056028660293e2409b532e887714,,brainio_brainscore
|
@@ -0,0 +1,330 @@
|
|
1
|
+
import numpy as np
|
2
|
+
import pytest
|
3
|
+
|
4
|
+
from brainscore_vision import load_stimulus_set, load_dataset
|
5
|
+
from brainscore_vision.benchmarks.geirhos2021.benchmark import DATASETS
|
6
|
+
|
7
|
+
|
8
|
+
def test_count():
|
9
|
+
assert len(DATASETS) == 12 + 5
|
10
|
+
|
11
|
+
|
12
|
+
@pytest.mark.parametrize('assembly_identifier', [
|
13
|
+
pytest.param('Geirhos2021_colour', marks=[pytest.mark.private_access]),
|
14
|
+
pytest.param('Geirhos2021_contrast', marks=[pytest.mark.private_access]),
|
15
|
+
pytest.param('Geirhos2021_cue-conflict', marks=[pytest.mark.private_access]),
|
16
|
+
pytest.param('Geirhos2021_edge', marks=[pytest.mark.private_access]),
|
17
|
+
pytest.param('Geirhos2021_eidolonI', marks=[pytest.mark.private_access]),
|
18
|
+
pytest.param('Geirhos2021_eidolonII', marks=[pytest.mark.private_access]),
|
19
|
+
pytest.param('Geirhos2021_eidolonIII', marks=[pytest.mark.private_access]),
|
20
|
+
pytest.param('Geirhos2021_false-colour', marks=[pytest.mark.private_access]),
|
21
|
+
pytest.param('Geirhos2021_high-pass', marks=[pytest.mark.private_access]),
|
22
|
+
pytest.param('Geirhos2021_low-pass', marks=[pytest.mark.private_access]),
|
23
|
+
pytest.param('Geirhos2021_phase-scrambling', marks=[pytest.mark.private_access]),
|
24
|
+
pytest.param('Geirhos2021_power-equalisation', marks=[pytest.mark.private_access]),
|
25
|
+
pytest.param('Geirhos2021_rotation', marks=[pytest.mark.private_access]),
|
26
|
+
pytest.param('Geirhos2021_silhouette', marks=[pytest.mark.private_access]),
|
27
|
+
pytest.param('Geirhos2021_stylized', marks=[pytest.mark.private_access]),
|
28
|
+
pytest.param('Geirhos2021_sketch', marks=[pytest.mark.private_access]),
|
29
|
+
pytest.param('Geirhos2021_uniform-noise', marks=[pytest.mark.private_access]),
|
30
|
+
])
|
31
|
+
def test_existence(assembly_identifier):
|
32
|
+
assert load_dataset(assembly_identifier) is not None
|
33
|
+
|
34
|
+
|
35
|
+
class TestAssemblies:
|
36
|
+
# test stimulus_set data alignment with assembly:
|
37
|
+
@pytest.mark.parametrize('identifier', [
|
38
|
+
'colour',
|
39
|
+
'contrast',
|
40
|
+
'cue-conflict',
|
41
|
+
'edge',
|
42
|
+
'eidolonI',
|
43
|
+
'eidolonII',
|
44
|
+
'eidolonIII',
|
45
|
+
'false-colour',
|
46
|
+
'high-pass',
|
47
|
+
'low-pass',
|
48
|
+
'phase-scrambling',
|
49
|
+
'power-equalisation',
|
50
|
+
'rotation',
|
51
|
+
'silhouette',
|
52
|
+
'stylized',
|
53
|
+
'sketch',
|
54
|
+
'uniform-noise',
|
55
|
+
])
|
56
|
+
@pytest.mark.parametrize('field', [
|
57
|
+
'image_id',
|
58
|
+
'condition',
|
59
|
+
'truth',
|
60
|
+
])
|
61
|
+
def test_stimulus_set_assembly_alignment(self, identifier, field):
|
62
|
+
full_name = f"Geirhos2021_{identifier}"
|
63
|
+
assembly = load_dataset(full_name)
|
64
|
+
assert assembly.stimulus_set is not None
|
65
|
+
assert assembly.stimulus_set.identifier == full_name
|
66
|
+
assert set(assembly.stimulus_set[field]) == set(assembly[field].values)
|
67
|
+
|
68
|
+
# test the number of subjects:
|
69
|
+
@pytest.mark.parametrize('identifier, num_subjects', [
|
70
|
+
('colour', 4),
|
71
|
+
('contrast', 4),
|
72
|
+
('cue-conflict', 10),
|
73
|
+
('edge', 10),
|
74
|
+
('eidolonI', 4),
|
75
|
+
('eidolonII', 4),
|
76
|
+
('eidolonIII', 4),
|
77
|
+
('false-colour', 4),
|
78
|
+
('high-pass', 4),
|
79
|
+
('low-pass', 4),
|
80
|
+
('phase-scrambling', 4),
|
81
|
+
('power-equalisation', 4),
|
82
|
+
('rotation', 4),
|
83
|
+
('silhouette', 10),
|
84
|
+
('stylized', 5),
|
85
|
+
('sketch', 7),
|
86
|
+
('uniform-noise', 4),
|
87
|
+
])
|
88
|
+
def test_num_subjects(self, identifier, num_subjects):
|
89
|
+
assembly = load_dataset(f"Geirhos2021_{identifier}")
|
90
|
+
assert len(np.unique(assembly['subject'].values)) == num_subjects
|
91
|
+
|
92
|
+
# test the number of images
|
93
|
+
@pytest.mark.parametrize('identifier, num_images', [
|
94
|
+
('colour', 1280),
|
95
|
+
('contrast', 1280),
|
96
|
+
('cue-conflict', 1280),
|
97
|
+
('edge', 160),
|
98
|
+
('eidolonI', 1280),
|
99
|
+
('eidolonII', 1280),
|
100
|
+
('eidolonIII', 1280),
|
101
|
+
('false-colour', 1120),
|
102
|
+
('high-pass', 1280),
|
103
|
+
('low-pass', 1280),
|
104
|
+
('phase-scrambling', 1120),
|
105
|
+
('power-equalisation', 1120),
|
106
|
+
('rotation', 1280),
|
107
|
+
('silhouette', 160),
|
108
|
+
('stylized', 800),
|
109
|
+
('sketch', 800),
|
110
|
+
('uniform-noise', 1280),
|
111
|
+
])
|
112
|
+
def test_num_images(self, identifier, num_images):
|
113
|
+
assembly = load_dataset(f"Geirhos2021_{identifier}")
|
114
|
+
assert len(np.unique(assembly['image_id'].values)) == num_images
|
115
|
+
|
116
|
+
# tests assembly dim for ALL 17 sets:
|
117
|
+
@pytest.mark.parametrize('identifier, length', [
|
118
|
+
('colour', 5120),
|
119
|
+
('contrast', 5120),
|
120
|
+
('cue-conflict', 12800),
|
121
|
+
('edge', 1600),
|
122
|
+
('eidolonI', 5120),
|
123
|
+
('eidolonII', 5120),
|
124
|
+
('eidolonIII', 5120),
|
125
|
+
('false-colour', 4480),
|
126
|
+
('high-pass', 5120),
|
127
|
+
('low-pass', 5120),
|
128
|
+
('phase-scrambling', 4480),
|
129
|
+
('power-equalisation', 4480),
|
130
|
+
('rotation', 5120),
|
131
|
+
('silhouette', 1600),
|
132
|
+
('stylized', 4000),
|
133
|
+
('sketch', 5600),
|
134
|
+
('uniform-noise', 5120),
|
135
|
+
])
|
136
|
+
def test_length(self, identifier, length):
|
137
|
+
assembly = load_dataset(f"Geirhos2021_{identifier}")
|
138
|
+
assert len(assembly['presentation']) == length
|
139
|
+
|
140
|
+
# test assembly coords present in ALL 17 sets:
|
141
|
+
@pytest.mark.parametrize('identifier', [
|
142
|
+
'colour',
|
143
|
+
'contrast',
|
144
|
+
'cue-conflict',
|
145
|
+
'edge',
|
146
|
+
'eidolonI',
|
147
|
+
'eidolonII',
|
148
|
+
'eidolonIII',
|
149
|
+
'false-colour',
|
150
|
+
'high-pass',
|
151
|
+
'low-pass',
|
152
|
+
'phase-scrambling',
|
153
|
+
'power-equalisation',
|
154
|
+
'rotation',
|
155
|
+
'silhouette',
|
156
|
+
'stylized',
|
157
|
+
'sketch',
|
158
|
+
'uniform-noise',
|
159
|
+
])
|
160
|
+
@pytest.mark.parametrize('field', [
|
161
|
+
'image_id',
|
162
|
+
'image_id_long',
|
163
|
+
'choice',
|
164
|
+
'truth',
|
165
|
+
'condition',
|
166
|
+
'response_time',
|
167
|
+
'trial',
|
168
|
+
'subject',
|
169
|
+
'session',
|
170
|
+
])
|
171
|
+
def test_fields_present(self, identifier, field):
|
172
|
+
assembly = load_dataset(f"Geirhos2021_{identifier}")
|
173
|
+
assert hasattr(assembly, field)
|
174
|
+
|
175
|
+
# tests assembly coords for the 2 "abnormal" sets:
|
176
|
+
@pytest.mark.parametrize('identifier', [
|
177
|
+
'edge',
|
178
|
+
'silhouette',
|
179
|
+
])
|
180
|
+
@pytest.mark.parametrize('field', [
|
181
|
+
'image_id',
|
182
|
+
'image_category',
|
183
|
+
'truth',
|
184
|
+
'image_variation',
|
185
|
+
'condition',
|
186
|
+
])
|
187
|
+
def test_fields_present_abnormal_sets(self, identifier, field):
|
188
|
+
assembly = load_dataset(f"Geirhos2021_{identifier}")
|
189
|
+
assert hasattr(assembly, field)
|
190
|
+
|
191
|
+
# tests assembly coords for the cue-conflict different set:
|
192
|
+
@pytest.mark.parametrize('identifier', [
|
193
|
+
'cue-conflict',
|
194
|
+
])
|
195
|
+
@pytest.mark.parametrize('field', [
|
196
|
+
'image_id',
|
197
|
+
'original_image',
|
198
|
+
'truth',
|
199
|
+
'category',
|
200
|
+
'conflict_image',
|
201
|
+
'original_image_category',
|
202
|
+
'original_image_variation',
|
203
|
+
'conflict_image_category',
|
204
|
+
'conflict_image_variation',
|
205
|
+
'condition',
|
206
|
+
])
|
207
|
+
def test_fields_present_cue_conflict(self, identifier, field):
|
208
|
+
assembly = load_dataset(f"Geirhos2021_{identifier}")
|
209
|
+
assert hasattr(assembly, field)
|
210
|
+
|
211
|
+
|
212
|
+
# testing stimulus sets
|
213
|
+
@pytest.mark.slow
|
214
|
+
class TestStimulusSets:
|
215
|
+
# test stimulus_set data:
|
216
|
+
@pytest.mark.parametrize('identifier', [
|
217
|
+
'colour',
|
218
|
+
'contrast',
|
219
|
+
'cue-conflict',
|
220
|
+
'edge',
|
221
|
+
'eidolonI',
|
222
|
+
'eidolonII',
|
223
|
+
'eidolonIII',
|
224
|
+
'false-colour',
|
225
|
+
'high-pass',
|
226
|
+
'low-pass',
|
227
|
+
'phase-scrambling',
|
228
|
+
'power-equalisation',
|
229
|
+
'rotation',
|
230
|
+
'silhouette',
|
231
|
+
'stylized',
|
232
|
+
'sketch',
|
233
|
+
'uniform-noise',
|
234
|
+
])
|
235
|
+
def test_stimulus_set_exist(self, identifier):
|
236
|
+
full_name = f"Geirhos2021_{identifier}"
|
237
|
+
stimulus_set = load_stimulus_set(full_name)
|
238
|
+
assert stimulus_set is not None
|
239
|
+
assert stimulus_set.identifier == f"{full_name}"
|
240
|
+
|
241
|
+
# test the number of images
|
242
|
+
@pytest.mark.parametrize('identifier, num_images', [
|
243
|
+
('colour', 1280),
|
244
|
+
('contrast', 1280),
|
245
|
+
('cue-conflict', 1280),
|
246
|
+
('edge', 160),
|
247
|
+
('eidolonI', 1280),
|
248
|
+
('eidolonII', 1280),
|
249
|
+
('eidolonIII', 1280),
|
250
|
+
('false-colour', 1120),
|
251
|
+
('high-pass', 1280),
|
252
|
+
('low-pass', 1280),
|
253
|
+
('phase-scrambling', 1120),
|
254
|
+
('power-equalisation', 1120),
|
255
|
+
('rotation', 1280),
|
256
|
+
('silhouette', 160),
|
257
|
+
('stylized', 800),
|
258
|
+
('sketch', 800),
|
259
|
+
('uniform-noise', 1280),
|
260
|
+
])
|
261
|
+
def test_num_images(self, identifier, num_images):
|
262
|
+
stimulus_set = load_stimulus_set(f"Geirhos2021_{identifier}")
|
263
|
+
assert len(np.unique(stimulus_set['image_id'].values)) == num_images
|
264
|
+
|
265
|
+
# tests stimulus_set coords for the 14 "normal" sets:
|
266
|
+
@pytest.mark.parametrize('identifier', [
|
267
|
+
'colour',
|
268
|
+
'contrast',
|
269
|
+
'eidolonI',
|
270
|
+
'eidolonII',
|
271
|
+
'eidolonIII',
|
272
|
+
'false-colour',
|
273
|
+
'high-pass',
|
274
|
+
'low-pass',
|
275
|
+
'phase-scrambling',
|
276
|
+
'power-equalisation',
|
277
|
+
'rotation',
|
278
|
+
'stylized',
|
279
|
+
'sketch',
|
280
|
+
'uniform-noise',
|
281
|
+
])
|
282
|
+
@pytest.mark.parametrize('field', [
|
283
|
+
'image_id',
|
284
|
+
'image_id_long',
|
285
|
+
'image_number',
|
286
|
+
'experiment_code',
|
287
|
+
'condition',
|
288
|
+
'truth',
|
289
|
+
'category_ground_truth',
|
290
|
+
'random_number',
|
291
|
+
])
|
292
|
+
def test_fields_present(self, identifier, field):
|
293
|
+
stimulus_set = load_stimulus_set(f"Geirhos2021_{identifier}")
|
294
|
+
assert hasattr(stimulus_set, field)
|
295
|
+
|
296
|
+
# tests assembly coords for the 2 "abnormal" sets:
|
297
|
+
@pytest.mark.parametrize('identifier', [
|
298
|
+
'edge',
|
299
|
+
'silhouette',
|
300
|
+
])
|
301
|
+
@pytest.mark.parametrize('field', [
|
302
|
+
'image_id',
|
303
|
+
'image_category',
|
304
|
+
'truth',
|
305
|
+
'image_variation',
|
306
|
+
'condition',
|
307
|
+
])
|
308
|
+
def test_fields_present2(self, identifier, field):
|
309
|
+
stimulus_set = load_dataset(f"Geirhos2021_{identifier}")
|
310
|
+
assert hasattr(stimulus_set, field)
|
311
|
+
|
312
|
+
# test assembly fields for cue-conflict's odd stimulus_set:
|
313
|
+
@pytest.mark.parametrize('identifier', [
|
314
|
+
'cue-conflict',
|
315
|
+
])
|
316
|
+
@pytest.mark.parametrize('field', [
|
317
|
+
'image_id',
|
318
|
+
'original_image',
|
319
|
+
'truth',
|
320
|
+
'category',
|
321
|
+
'conflict_image',
|
322
|
+
'original_image_category',
|
323
|
+
'original_image_variation',
|
324
|
+
'conflict_image_category',
|
325
|
+
'conflict_image_variation',
|
326
|
+
'condition',
|
327
|
+
])
|
328
|
+
def test_fields_present3(self, identifier, field):
|
329
|
+
stimulus_set = load_dataset(f"Geirhos2021_{identifier}")
|
330
|
+
assert hasattr(stimulus_set, field)
|
@@ -0,0 +1,23 @@
|
|
1
|
+
from brainio.assemblies import NeuronRecordingAssembly
|
2
|
+
from brainscore_vision import load_stimulus_set
|
3
|
+
from brainscore_vision import stimulus_set_registry, data_registry
|
4
|
+
from brainscore_vision.data_helpers.s3 import load_assembly_from_s3, load_stimulus_set_from_s3
|
5
|
+
|
6
|
+
stimulus_set_registry['Hebart2023'] = lambda: load_stimulus_set_from_s3(
|
7
|
+
identifier="Hebart2023",
|
8
|
+
bucket="brainio-brainscore",
|
9
|
+
csv_version_id="rHfHstbIZesJuXguqTpO3kAsDdRJ8FY_",
|
10
|
+
csv_sha1="8fd5a8d4d68cc206000878dd835829fd14a426e8",
|
11
|
+
zip_version_id="Vo4af.yuEzGR8d.tHtKzeQNAigThiiyE",
|
12
|
+
zip_sha1="f244ebfe07c98470885026188c801857ba2ec0ea",
|
13
|
+
filename_prefix='stimulus_',
|
14
|
+
)
|
15
|
+
|
16
|
+
data_registry['Hebart2023'] = lambda: load_assembly_from_s3(
|
17
|
+
identifier="Hebart2023",
|
18
|
+
version_id="nO4rlBtVj1agBGyrguZfLsAxJ9qDIzfy",
|
19
|
+
sha1="90f66d37c202dcd1c74d82854efd4a1e8c5fe82e",
|
20
|
+
bucket="brainio-brainscore",
|
21
|
+
cls=NeuronRecordingAssembly,
|
22
|
+
stimulus_set_loader=lambda: load_stimulus_set('Hebart2023'),
|
23
|
+
)
|