brainscore-vision 2.1__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- brainscore_vision/__init__.py +105 -0
- brainscore_vision/__main__.py +20 -0
- brainscore_vision/benchmark_helpers/__init__.py +67 -0
- brainscore_vision/benchmark_helpers/neural_common.py +70 -0
- brainscore_vision/benchmark_helpers/properties_common.py +424 -0
- brainscore_vision/benchmark_helpers/screen.py +126 -0
- brainscore_vision/benchmark_helpers/test_helper.py +160 -0
- brainscore_vision/benchmarks/README.md +7 -0
- brainscore_vision/benchmarks/__init__.py +122 -0
- brainscore_vision/benchmarks/baker2022/__init__.py +9 -0
- brainscore_vision/benchmarks/baker2022/benchmark.py +125 -0
- brainscore_vision/benchmarks/baker2022/requirements.txt +1 -0
- brainscore_vision/benchmarks/baker2022/test.py +90 -0
- brainscore_vision/benchmarks/bmd2024/__init__.py +8 -0
- brainscore_vision/benchmarks/bmd2024/benchmark.py +51 -0
- brainscore_vision/benchmarks/bmd2024/test.py +29 -0
- brainscore_vision/benchmarks/bracci2019/__init__.py +8 -0
- brainscore_vision/benchmarks/bracci2019/benchmark.py +286 -0
- brainscore_vision/benchmarks/bracci2019/requirements.txt +3 -0
- brainscore_vision/benchmarks/cadena2017/__init__.py +5 -0
- brainscore_vision/benchmarks/cadena2017/benchmark.py +91 -0
- brainscore_vision/benchmarks/cadena2017/test.py +35 -0
- brainscore_vision/benchmarks/coggan2024_behavior/__init__.py +8 -0
- brainscore_vision/benchmarks/coggan2024_behavior/benchmark.py +133 -0
- brainscore_vision/benchmarks/coggan2024_behavior/test.py +21 -0
- brainscore_vision/benchmarks/coggan2024_fMRI/__init__.py +15 -0
- brainscore_vision/benchmarks/coggan2024_fMRI/benchmark.py +201 -0
- brainscore_vision/benchmarks/coggan2024_fMRI/test.py +25 -0
- brainscore_vision/benchmarks/ferguson2024/__init__.py +24 -0
- brainscore_vision/benchmarks/ferguson2024/benchmark.py +210 -0
- brainscore_vision/benchmarks/ferguson2024/helpers/helpers.py +251 -0
- brainscore_vision/benchmarks/ferguson2024/requirements.txt +5 -0
- brainscore_vision/benchmarks/ferguson2024/test.py +114 -0
- brainscore_vision/benchmarks/freemanziemba2013/__init__.py +10 -0
- brainscore_vision/benchmarks/freemanziemba2013/benchmarks/benchmark.py +53 -0
- brainscore_vision/benchmarks/freemanziemba2013/benchmarks/public_benchmarks.py +37 -0
- brainscore_vision/benchmarks/freemanziemba2013/test.py +98 -0
- brainscore_vision/benchmarks/geirhos2021/__init__.py +59 -0
- brainscore_vision/benchmarks/geirhos2021/benchmark.py +132 -0
- brainscore_vision/benchmarks/geirhos2021/test.py +189 -0
- brainscore_vision/benchmarks/hebart2023/__init__.py +4 -0
- brainscore_vision/benchmarks/hebart2023/benchmark.py +72 -0
- brainscore_vision/benchmarks/hebart2023/test.py +19 -0
- brainscore_vision/benchmarks/hermann2020/__init__.py +6 -0
- brainscore_vision/benchmarks/hermann2020/benchmark.py +63 -0
- brainscore_vision/benchmarks/hermann2020/test.py +28 -0
- brainscore_vision/benchmarks/igustibagus2024/__init__.py +11 -0
- brainscore_vision/benchmarks/igustibagus2024/domain_transfer_analysis.py +306 -0
- brainscore_vision/benchmarks/igustibagus2024/domain_transfer_neural.py +134 -0
- brainscore_vision/benchmarks/igustibagus2024/test.py +45 -0
- brainscore_vision/benchmarks/imagenet/__init__.py +4 -0
- brainscore_vision/benchmarks/imagenet/benchmark.py +50 -0
- brainscore_vision/benchmarks/imagenet/imagenet2012.csv +50001 -0
- brainscore_vision/benchmarks/imagenet/test.py +32 -0
- brainscore_vision/benchmarks/imagenet_c/__init__.py +7 -0
- brainscore_vision/benchmarks/imagenet_c/benchmark.py +204 -0
- brainscore_vision/benchmarks/imagenet_c/test.py +57 -0
- brainscore_vision/benchmarks/islam2021/__init__.py +11 -0
- brainscore_vision/benchmarks/islam2021/benchmark.py +107 -0
- brainscore_vision/benchmarks/islam2021/test.py +47 -0
- brainscore_vision/benchmarks/kar2019/__init__.py +4 -0
- brainscore_vision/benchmarks/kar2019/benchmark.py +88 -0
- brainscore_vision/benchmarks/kar2019/test.py +93 -0
- brainscore_vision/benchmarks/majajhong2015/__init__.py +18 -0
- brainscore_vision/benchmarks/majajhong2015/benchmark.py +96 -0
- brainscore_vision/benchmarks/majajhong2015/test.py +103 -0
- brainscore_vision/benchmarks/malania2007/__init__.py +13 -0
- brainscore_vision/benchmarks/malania2007/benchmark.py +235 -0
- brainscore_vision/benchmarks/malania2007/test.py +64 -0
- brainscore_vision/benchmarks/maniquet2024/__init__.py +6 -0
- brainscore_vision/benchmarks/maniquet2024/benchmark.py +199 -0
- brainscore_vision/benchmarks/maniquet2024/test.py +17 -0
- brainscore_vision/benchmarks/marques2020/__init__.py +76 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/cavanaugh2002a_benchmark.py +119 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/devalois1982a_benchmark.py +84 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/devalois1982b_benchmark.py +88 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/freemanZiemba2013_benchmark.py +138 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/ringach2002_benchmark.py +167 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/schiller1976_benchmark.py +100 -0
- brainscore_vision/benchmarks/marques2020/test.py +135 -0
- brainscore_vision/benchmarks/objectnet/__init__.py +4 -0
- brainscore_vision/benchmarks/objectnet/benchmark.py +52 -0
- brainscore_vision/benchmarks/objectnet/test.py +33 -0
- brainscore_vision/benchmarks/rajalingham2018/__init__.py +10 -0
- brainscore_vision/benchmarks/rajalingham2018/benchmarks/benchmark.py +74 -0
- brainscore_vision/benchmarks/rajalingham2018/benchmarks/public_benchmark.py +10 -0
- brainscore_vision/benchmarks/rajalingham2018/test.py +125 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/alexnet-probabilities.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=alexnet,stimuli_identifier=objectome-240.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=resnet18,stimuli_identifier=objectome-240.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=resnet34,stimuli_identifier=objectome-240.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/resnet18-probabilities.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/resnet34-probabilities.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2020/__init__.py +4 -0
- brainscore_vision/benchmarks/rajalingham2020/benchmark.py +52 -0
- brainscore_vision/benchmarks/rajalingham2020/test.py +39 -0
- brainscore_vision/benchmarks/sanghavi2020/__init__.py +17 -0
- brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavi2020_benchmark.py +44 -0
- brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavijozwik2020_benchmark.py +44 -0
- brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavimurty2020_benchmark.py +44 -0
- brainscore_vision/benchmarks/sanghavi2020/test.py +83 -0
- brainscore_vision/benchmarks/scialom2024/__init__.py +52 -0
- brainscore_vision/benchmarks/scialom2024/benchmark.py +97 -0
- brainscore_vision/benchmarks/scialom2024/test.py +162 -0
- brainscore_vision/data/__init__.py +0 -0
- brainscore_vision/data/baker2022/__init__.py +40 -0
- brainscore_vision/data/baker2022/data_packaging/inverted_distortion_data_assembly.py +43 -0
- brainscore_vision/data/baker2022/data_packaging/inverted_distortion_stimulus_set.py +81 -0
- brainscore_vision/data/baker2022/data_packaging/mapping.py +60 -0
- brainscore_vision/data/baker2022/data_packaging/normal_distortion_data_assembly.py +46 -0
- brainscore_vision/data/baker2022/data_packaging/normal_distortion_stimulus_set.py +94 -0
- brainscore_vision/data/baker2022/test.py +135 -0
- brainscore_vision/data/barbumayo2019/BarbuMayo2019.py +26 -0
- brainscore_vision/data/barbumayo2019/__init__.py +23 -0
- brainscore_vision/data/barbumayo2019/test.py +10 -0
- brainscore_vision/data/bashivankar2019/__init__.py +52 -0
- brainscore_vision/data/bashivankar2019/data_packaging/2020-08-17_npc_v4_data.h5.png +0 -0
- brainscore_vision/data/bashivankar2019/data_packaging/requirements.txt +4 -0
- brainscore_vision/data/bashivankar2019/data_packaging/synthetic.py +162 -0
- brainscore_vision/data/bashivankar2019/test.py +15 -0
- brainscore_vision/data/bmd2024/__init__.py +69 -0
- brainscore_vision/data/bmd2024/data_packaging/BMD_2024_data_assembly.py +91 -0
- brainscore_vision/data/bmd2024/data_packaging/BMD_2024_simulus_set.py +48 -0
- brainscore_vision/data/bmd2024/data_packaging/stim_meta.csv +401 -0
- brainscore_vision/data/bmd2024/test.py +130 -0
- brainscore_vision/data/bracci2019/__init__.py +36 -0
- brainscore_vision/data/bracci2019/data_packaging.py +221 -0
- brainscore_vision/data/bracci2019/test.py +16 -0
- brainscore_vision/data/cadena2017/__init__.py +52 -0
- brainscore_vision/data/cadena2017/data_packaging/2018-08-07_tolias_v1.ipynb +25880 -0
- brainscore_vision/data/cadena2017/data_packaging/analysis.py +26 -0
- brainscore_vision/data/cadena2017/test.py +24 -0
- brainscore_vision/data/cichy2019/__init__.py +38 -0
- brainscore_vision/data/cichy2019/test.py +8 -0
- brainscore_vision/data/coggan2024_behavior/__init__.py +36 -0
- brainscore_vision/data/coggan2024_behavior/data_packaging.py +166 -0
- brainscore_vision/data/coggan2024_behavior/test.py +32 -0
- brainscore_vision/data/coggan2024_fMRI/__init__.py +27 -0
- brainscore_vision/data/coggan2024_fMRI/data_packaging.py +123 -0
- brainscore_vision/data/coggan2024_fMRI/test.py +25 -0
- brainscore_vision/data/david2004/__init__.py +34 -0
- brainscore_vision/data/david2004/data_packaging/2018-05-10_gallant_data.ipynb +3647 -0
- brainscore_vision/data/david2004/data_packaging/2018-05-23_gallant_data.ipynb +3149 -0
- brainscore_vision/data/david2004/data_packaging/2018-06-05_gallant_data.ipynb +3628 -0
- brainscore_vision/data/david2004/data_packaging/__init__.py +61 -0
- brainscore_vision/data/david2004/data_packaging/convertGallant.m +100 -0
- brainscore_vision/data/david2004/data_packaging/convertGallantV1Aligned.m +58 -0
- brainscore_vision/data/david2004/data_packaging/lib/DataHash_20160618/DataHash.m +484 -0
- brainscore_vision/data/david2004/data_packaging/lib/DataHash_20160618/license.txt +24 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.c +895 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.m +107 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.mexw64 +0 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5_helper.m +91 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/InstallMex.m +307 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/license.txt +24 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/uTest_GetMD5.m +290 -0
- brainscore_vision/data/david2004/data_packaging/lib/glob/glob.m +472 -0
- brainscore_vision/data/david2004/data_packaging/lib/glob/license.txt +27 -0
- brainscore_vision/data/david2004/data_packaging/xr_align_debug.py +137 -0
- brainscore_vision/data/david2004/test.py +8 -0
- brainscore_vision/data/deng2009/__init__.py +22 -0
- brainscore_vision/data/deng2009/deng2009imagenet.py +33 -0
- brainscore_vision/data/deng2009/test.py +9 -0
- brainscore_vision/data/ferguson2024/__init__.py +401 -0
- brainscore_vision/data/ferguson2024/data_packaging/data_packaging.py +164 -0
- brainscore_vision/data/ferguson2024/data_packaging/fitting_stimuli.py +20 -0
- brainscore_vision/data/ferguson2024/requirements.txt +2 -0
- brainscore_vision/data/ferguson2024/test.py +155 -0
- brainscore_vision/data/freemanziemba2013/__init__.py +133 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/2018-10-05_movshon.ipynb +2002 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/2020-02-21_movshon_aperture.ipynb +4730 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/2020-02-26_movshon_aperture_test.ipynb +2228 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/aperture_correct.py +160 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/data_packaging.py +57 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/movshon.py +202 -0
- brainscore_vision/data/freemanziemba2013/test.py +97 -0
- brainscore_vision/data/geirhos2021/__init__.py +358 -0
- brainscore_vision/data/geirhos2021/creating_geirhos_ids.ipynb +468 -0
- brainscore_vision/data/geirhos2021/data_packaging/colour/colour_data_assembly.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/colour/colour_stimulus_set.py +81 -0
- brainscore_vision/data/geirhos2021/data_packaging/contrast/contrast_data_assembly.py +83 -0
- brainscore_vision/data/geirhos2021/data_packaging/contrast/contrast_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/cue-conflict/cue-conflict_data_assembly.py +100 -0
- brainscore_vision/data/geirhos2021/data_packaging/cue-conflict/cue-conflict_stimulus_set.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/edge/edge_data_assembly.py +96 -0
- brainscore_vision/data/geirhos2021/data_packaging/edge/edge_stimulus_set.py +69 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonI/eidolonI_data_assembly.py +92 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonI/eidolonI_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonII/eidolonII_data_assembly.py +92 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonII/eidolonII_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonIII/eidolonIII_data_assembly.py +92 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonIII/eidolonIII_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/false-colour/false-colour_data_assembly.py +83 -0
- brainscore_vision/data/geirhos2021/data_packaging/false-colour/false-colour_stimulus_set.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/high-pass/high-pass_data_assembly.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/high-pass/high-pass_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/low-pass/low-pass_data_assembly.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/low-pass/low-pass_stimulus_set.py +81 -0
- brainscore_vision/data/geirhos2021/data_packaging/phase-scrambling/phase-scrambling_data_assembly.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/phase-scrambling/phase-scrambling_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/power-equalisation/power-equalisation_data_assembly.py +88 -0
- brainscore_vision/data/geirhos2021/data_packaging/power-equalisation/power-equalisation_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/rotation/rotation_data_assembly.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/rotation/rotation_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/silhouette/silhouette_data_assembly.py +100 -0
- brainscore_vision/data/geirhos2021/data_packaging/silhouette/silhouette_stimulus_set.py +71 -0
- brainscore_vision/data/geirhos2021/data_packaging/sketch/sketch_data_assembly.py +88 -0
- brainscore_vision/data/geirhos2021/data_packaging/sketch/sketch_stimulus_set.py +75 -0
- brainscore_vision/data/geirhos2021/data_packaging/stylized/stylized_data_assembly.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/stylized/stylized_stimulus_set.py +75 -0
- brainscore_vision/data/geirhos2021/data_packaging/uniform-noise/uniform-noise_data_assembly.py +86 -0
- brainscore_vision/data/geirhos2021/data_packaging/uniform-noise/uniform-noise_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/geirhos_hashes.csv +52 -0
- brainscore_vision/data/geirhos2021/test.py +330 -0
- brainscore_vision/data/hebart2023/__init__.py +23 -0
- brainscore_vision/data/hebart2023/packaging/data_assembly.py +40 -0
- brainscore_vision/data/hebart2023/packaging/stimulus_set.py +72 -0
- brainscore_vision/data/hebart2023/test.py +42 -0
- brainscore_vision/data/hendrycks2019/__init__.py +45 -0
- brainscore_vision/data/hendrycks2019/test.py +26 -0
- brainscore_vision/data/igustibagus2024/__init__.py +23 -0
- brainscore_vision/data/igustibagus2024/dependencies/data_pico/stimulus_dicarlo_domain_transfer.csv +3139 -0
- brainscore_vision/data/igustibagus2024/investigation_consistency.ipynb +346 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/__init__.py +0 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/create_merged_assembly.ipynb +649 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/create_merged_assembly_and_stim.py +152 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/create_stimulus_set_with_background-id.py +45 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/helpers_background_id.py +849 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/merged_stimulus_set.csv +3139 -0
- brainscore_vision/data/igustibagus2024/oleo_pico_exploration.ipynb +410 -0
- brainscore_vision/data/igustibagus2024/test.py +26 -0
- brainscore_vision/data/imagenetslim15000/ImageNetSlim15000.py +30 -0
- brainscore_vision/data/imagenetslim15000/__init__.py +11 -0
- brainscore_vision/data/imagenetslim15000/test.py +8 -0
- brainscore_vision/data/islam2021/__init__.py +18 -0
- brainscore_vision/data/islam2021/data_packaging.py +64 -0
- brainscore_vision/data/islam2021/test.py +11 -0
- brainscore_vision/data/kar2018/__init__.py +58 -0
- brainscore_vision/data/kar2018/data_packaging/kar_coco.py +97 -0
- brainscore_vision/data/kar2018/data_packaging/kar_hvm.py +77 -0
- brainscore_vision/data/kar2018/data_packaging/requirements.txt +1 -0
- brainscore_vision/data/kar2018/test.py +10 -0
- brainscore_vision/data/kar2019/__init__.py +43 -0
- brainscore_vision/data/kar2019/data_packaging.py +116 -0
- brainscore_vision/data/kar2019/test.py +8 -0
- brainscore_vision/data/kuzovkin2018/__init__.py +36 -0
- brainscore_vision/data/kuzovkin2018/createAssembliesBrainScore.py +103 -0
- brainscore_vision/data/kuzovkin2018/test.py +8 -0
- brainscore_vision/data/majajhong2015/__init__.py +113 -0
- brainscore_vision/data/majajhong2015/data_packaging/darren10ms.py +32 -0
- brainscore_vision/data/majajhong2015/data_packaging/data_packaging.py +65 -0
- brainscore_vision/data/majajhong2015/test.py +38 -0
- brainscore_vision/data/malania2007/__init__.py +254 -0
- brainscore_vision/data/malania2007/data_packaging/malania_data_assembly.py +79 -0
- brainscore_vision/data/malania2007/data_packaging/malania_stimulus_set.py +79 -0
- brainscore_vision/data/malania2007/test.py +147 -0
- brainscore_vision/data/maniquet2024/__init__.py +57 -0
- brainscore_vision/data/maniquet2024/data_packaging.py +151 -0
- brainscore_vision/data/maniquet2024/test.py +16 -0
- brainscore_vision/data/marques2020/__init__.py +123 -0
- brainscore_vision/data/marques2020/data_packaging/marques_cavanaugh2002a.py +84 -0
- brainscore_vision/data/marques2020/data_packaging/marques_devalois1982a.py +44 -0
- brainscore_vision/data/marques2020/data_packaging/marques_devalois1982b.py +54 -0
- brainscore_vision/data/marques2020/data_packaging/marques_freemanZiemba2013.py +252 -0
- brainscore_vision/data/marques2020/data_packaging/marques_gen_stim.py +95 -0
- brainscore_vision/data/marques2020/data_packaging/marques_ringach2002.py +95 -0
- brainscore_vision/data/marques2020/data_packaging/marques_schiller1976c.py +60 -0
- brainscore_vision/data/marques2020/data_packaging/marques_stim_common.py +389 -0
- brainscore_vision/data/marques2020/data_packaging/marques_utils.py +21 -0
- brainscore_vision/data/marques2020/data_packaging/setup.py +13 -0
- brainscore_vision/data/marques2020/test.py +54 -0
- brainscore_vision/data/rajalingham2018/__init__.py +56 -0
- brainscore_vision/data/rajalingham2018/rajalingham2018objectome.py +193 -0
- brainscore_vision/data/rajalingham2018/test.py +10 -0
- brainscore_vision/data/rajalingham2020/__init__.py +39 -0
- brainscore_vision/data/rajalingham2020/rajalingham2020orthographic_IT.py +97 -0
- brainscore_vision/data/rajalingham2020/test.py +8 -0
- brainscore_vision/data/rust2012/2020-12-28_rust.ipynb +3301 -0
- brainscore_vision/data/rust2012/__init__.py +45 -0
- brainscore_vision/data/rust2012/rust305.py +35 -0
- brainscore_vision/data/rust2012/test.py +47 -0
- brainscore_vision/data/sanghavi2020/__init__.py +119 -0
- brainscore_vision/data/sanghavi2020/data_packaging/environment.yml +36 -0
- brainscore_vision/data/sanghavi2020/data_packaging/requirements.txt +4 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavi2020.py +101 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavijozwik2020.py +148 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavikar2020.py +131 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020.py +120 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things.py +138 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things1.py +118 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things2.py +118 -0
- brainscore_vision/data/sanghavi2020/test.py +13 -0
- brainscore_vision/data/scialom2024/__init__.py +386 -0
- brainscore_vision/data/scialom2024/data_packaging/scialom_data_assembly.py +164 -0
- brainscore_vision/data/scialom2024/data_packaging/scialom_stimulus_set.py +117 -0
- brainscore_vision/data/scialom2024/test.py +301 -0
- brainscore_vision/data/seibert2019/__init__.py +25 -0
- brainscore_vision/data/seibert2019/data_packaging/2020-10-13_juvenile.ipynb +35703 -0
- brainscore_vision/data/seibert2019/data_packaging/2020-11-18_juvenile_scratch.txt +556 -0
- brainscore_vision/data/seibert2019/data_packaging/2020-11-22_juvenile_dldata.ipynb +3614 -0
- brainscore_vision/data/seibert2019/data_packaging/juvenile.py +103 -0
- brainscore_vision/data/seibert2019/test.py +35 -0
- brainscore_vision/data/zhang2018/__init__.py +38 -0
- brainscore_vision/data/zhang2018/test.py +29 -0
- brainscore_vision/data_helpers/__init__.py +0 -0
- brainscore_vision/data_helpers/lookup_legacy.py +15 -0
- brainscore_vision/data_helpers/s3.py +79 -0
- brainscore_vision/metric_helpers/__init__.py +5 -0
- brainscore_vision/metric_helpers/temporal.py +119 -0
- brainscore_vision/metric_helpers/transformations.py +379 -0
- brainscore_vision/metric_helpers/utils.py +71 -0
- brainscore_vision/metric_helpers/xarray_utils.py +151 -0
- brainscore_vision/metrics/__init__.py +7 -0
- brainscore_vision/metrics/accuracy/__init__.py +4 -0
- brainscore_vision/metrics/accuracy/metric.py +16 -0
- brainscore_vision/metrics/accuracy/test.py +11 -0
- brainscore_vision/metrics/accuracy_distance/__init__.py +4 -0
- brainscore_vision/metrics/accuracy_distance/metric.py +109 -0
- brainscore_vision/metrics/accuracy_distance/test.py +57 -0
- brainscore_vision/metrics/baker_accuracy_delta/__init__.py +4 -0
- brainscore_vision/metrics/baker_accuracy_delta/metric.py +94 -0
- brainscore_vision/metrics/baker_accuracy_delta/requirements.txt +1 -0
- brainscore_vision/metrics/baker_accuracy_delta/test.py +1 -0
- brainscore_vision/metrics/cka/__init__.py +14 -0
- brainscore_vision/metrics/cka/metric.py +105 -0
- brainscore_vision/metrics/cka/test.py +28 -0
- brainscore_vision/metrics/dimensionality/__init__.py +13 -0
- brainscore_vision/metrics/dimensionality/metric.py +45 -0
- brainscore_vision/metrics/distribution_similarity/__init__.py +14 -0
- brainscore_vision/metrics/distribution_similarity/metric.py +84 -0
- brainscore_vision/metrics/distribution_similarity/test.py +10 -0
- brainscore_vision/metrics/error_consistency/__init__.py +13 -0
- brainscore_vision/metrics/error_consistency/metric.py +93 -0
- brainscore_vision/metrics/error_consistency/test.py +39 -0
- brainscore_vision/metrics/i1i2/__init__.py +16 -0
- brainscore_vision/metrics/i1i2/metric.py +299 -0
- brainscore_vision/metrics/i1i2/requirements.txt +2 -0
- brainscore_vision/metrics/i1i2/test.py +36 -0
- brainscore_vision/metrics/i1i2/test_resources/alexnet-probabilities.nc +0 -0
- brainscore_vision/metrics/i1i2/test_resources/resnet18-probabilities.nc +0 -0
- brainscore_vision/metrics/i1i2/test_resources/resnet34-probabilities.nc +0 -0
- brainscore_vision/metrics/internal_consistency/__init__.py +8 -0
- brainscore_vision/metrics/internal_consistency/ceiling.py +127 -0
- brainscore_vision/metrics/internal_consistency/requirements.txt +1 -0
- brainscore_vision/metrics/internal_consistency/test.py +39 -0
- brainscore_vision/metrics/maniquet2024_metrics/__init__.py +19 -0
- brainscore_vision/metrics/maniquet2024_metrics/metric.py +416 -0
- brainscore_vision/metrics/maniquet2024_metrics/test.py +8 -0
- brainscore_vision/metrics/mask_regression/__init__.py +16 -0
- brainscore_vision/metrics/mask_regression/metric.py +242 -0
- brainscore_vision/metrics/mask_regression/requirements.txt +1 -0
- brainscore_vision/metrics/mask_regression/test.py +0 -0
- brainscore_vision/metrics/ost/__init__.py +23 -0
- brainscore_vision/metrics/ost/metric.py +350 -0
- brainscore_vision/metrics/ost/requirements.txt +2 -0
- brainscore_vision/metrics/ost/test.py +0 -0
- brainscore_vision/metrics/rdm/__init__.py +14 -0
- brainscore_vision/metrics/rdm/metric.py +101 -0
- brainscore_vision/metrics/rdm/requirements.txt +2 -0
- brainscore_vision/metrics/rdm/test.py +63 -0
- brainscore_vision/metrics/regression_correlation/__init__.py +48 -0
- brainscore_vision/metrics/regression_correlation/mask_regression.py +232 -0
- brainscore_vision/metrics/regression_correlation/metric.py +125 -0
- brainscore_vision/metrics/regression_correlation/requirements.txt +3 -0
- brainscore_vision/metrics/regression_correlation/test.py +36 -0
- brainscore_vision/metrics/threshold/__init__.py +5 -0
- brainscore_vision/metrics/threshold/metric.py +481 -0
- brainscore_vision/metrics/threshold/test.py +71 -0
- brainscore_vision/metrics/value_delta/__init__.py +4 -0
- brainscore_vision/metrics/value_delta/metric.py +30 -0
- brainscore_vision/metrics/value_delta/requirements.txt +1 -0
- brainscore_vision/metrics/value_delta/test.py +40 -0
- brainscore_vision/model_helpers/__init__.py +3 -0
- brainscore_vision/model_helpers/activations/__init__.py +1 -0
- brainscore_vision/model_helpers/activations/core.py +635 -0
- brainscore_vision/model_helpers/activations/pca.py +117 -0
- brainscore_vision/model_helpers/activations/pytorch.py +152 -0
- brainscore_vision/model_helpers/activations/temporal/__init__.py +0 -0
- brainscore_vision/model_helpers/activations/temporal/core/__init__.py +3 -0
- brainscore_vision/model_helpers/activations/temporal/core/executor.py +219 -0
- brainscore_vision/model_helpers/activations/temporal/core/extractor.py +282 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/base.py +274 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/base.py +134 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/base.py +99 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/block.py +77 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/causal.py +86 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/time_aligner.py +73 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/__init__.py +3 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/base.py +17 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/image.py +50 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/video.py +186 -0
- brainscore_vision/model_helpers/activations/temporal/model/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/model/base.py +33 -0
- brainscore_vision/model_helpers/activations/temporal/model/pytorch.py +107 -0
- brainscore_vision/model_helpers/activations/temporal/utils.py +228 -0
- brainscore_vision/model_helpers/brain_transformation/__init__.py +97 -0
- brainscore_vision/model_helpers/brain_transformation/behavior.py +348 -0
- brainscore_vision/model_helpers/brain_transformation/imagenet_classes.txt +1000 -0
- brainscore_vision/model_helpers/brain_transformation/neural.py +159 -0
- brainscore_vision/model_helpers/brain_transformation/temporal.py +199 -0
- brainscore_vision/model_helpers/check_submission/__init__.py +0 -0
- brainscore_vision/model_helpers/check_submission/check_models.py +87 -0
- brainscore_vision/model_helpers/check_submission/images/1.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/10.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/11.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/12.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/13.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/14.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/15.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/16.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/17.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/18.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/19.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/2.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/20.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/3.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/4.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/5.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/6.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/7.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/8.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/9.png +0 -0
- brainscore_vision/model_helpers/conftest.py +3 -0
- brainscore_vision/model_helpers/generic_plugin_tests.py +119 -0
- brainscore_vision/model_helpers/s3.py +62 -0
- brainscore_vision/model_helpers/utils/__init__.py +15 -0
- brainscore_vision/model_helpers/utils/s3.py +42 -0
- brainscore_vision/model_interface.py +214 -0
- brainscore_vision/models/AdvProp_efficientne_b6/__init__.py +5 -0
- brainscore_vision/models/AdvProp_efficientne_b6/model.py +75 -0
- brainscore_vision/models/AdvProp_efficientne_b6/requirements.txt +1 -0
- brainscore_vision/models/AdvProp_efficientne_b6/test.py +9 -0
- brainscore_vision/models/AlexNet_SIN/__init__.py +8 -0
- brainscore_vision/models/AlexNet_SIN/model.py +29 -0
- brainscore_vision/models/AlexNet_SIN/requirements.txt +2 -0
- brainscore_vision/models/AlexNet_SIN/test.py +1 -0
- brainscore_vision/models/Soumyadeep_inf_1/__init__.py +5 -0
- brainscore_vision/models/Soumyadeep_inf_1/model.py +60 -0
- brainscore_vision/models/Soumyadeep_inf_1/setup.py +26 -0
- brainscore_vision/models/Soumyadeep_inf_1/test.py +1 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/__init__.py +8 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/model.py +43 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/requirements.txt +4 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/test.py +8 -0
- brainscore_vision/models/__init__.py +0 -0
- brainscore_vision/models/alexnet/__init__.py +8 -0
- brainscore_vision/models/alexnet/model.py +28 -0
- brainscore_vision/models/alexnet/requirements.txt +2 -0
- brainscore_vision/models/alexnet/test.py +15 -0
- brainscore_vision/models/alexnet_7be5be79/__init__.py +7 -0
- brainscore_vision/models/alexnet_7be5be79/model.py +44 -0
- brainscore_vision/models/alexnet_7be5be79/setup.py +26 -0
- brainscore_vision/models/alexnet_7be5be79/test.py +1 -0
- brainscore_vision/models/alexnet_7be5be79_convs/__init__.py +5 -0
- brainscore_vision/models/alexnet_7be5be79_convs/model.py +42 -0
- brainscore_vision/models/alexnet_7be5be79_convs/setup.py +25 -0
- brainscore_vision/models/alexnet_7be5be79_convs/test.py +1 -0
- brainscore_vision/models/alexnet_ks_torevert/__init__.py +8 -0
- brainscore_vision/models/alexnet_ks_torevert/model.py +28 -0
- brainscore_vision/models/alexnet_ks_torevert/requirements.txt +2 -0
- brainscore_vision/models/alexnet_ks_torevert/test.py +15 -0
- brainscore_vision/models/alexnet_simclr_run1/__init__.py +7 -0
- brainscore_vision/models/alexnet_simclr_run1/model.py +267 -0
- brainscore_vision/models/alexnet_simclr_run1/requirements.txt +2 -0
- brainscore_vision/models/alexnet_simclr_run1/test.py +1 -0
- brainscore_vision/models/alexnet_testing/__init__.py +8 -0
- brainscore_vision/models/alexnet_testing/model.py +28 -0
- brainscore_vision/models/alexnet_testing/requirements.txt +2 -0
- brainscore_vision/models/alexnet_testing/setup.py +24 -0
- brainscore_vision/models/alexnet_testing/test.py +15 -0
- brainscore_vision/models/antialias_resnet152/__init__.py +7 -0
- brainscore_vision/models/antialias_resnet152/model.py +35 -0
- brainscore_vision/models/antialias_resnet152/requirements.txt +3 -0
- brainscore_vision/models/antialias_resnet152/test.py +8 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/__init__.py +7 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/model.py +35 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/requirements.txt +1 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/test.py +8 -0
- brainscore_vision/models/bp_resnet50_julios/__init__.py +5 -0
- brainscore_vision/models/bp_resnet50_julios/model.py +52 -0
- brainscore_vision/models/bp_resnet50_julios/setup.py +24 -0
- brainscore_vision/models/bp_resnet50_julios/test.py +1 -0
- brainscore_vision/models/clip/__init__.py +5 -0
- brainscore_vision/models/clip/model.py +179 -0
- brainscore_vision/models/clip/requirements.txt +4 -0
- brainscore_vision/models/clip/test.py +1 -0
- brainscore_vision/models/clipvision/__init__.py +5 -0
- brainscore_vision/models/clipvision/model.py +179 -0
- brainscore_vision/models/clipvision/requirements.txt +4 -0
- brainscore_vision/models/clipvision/test.py +1 -0
- brainscore_vision/models/cornet_s/__init__.py +8 -0
- brainscore_vision/models/cornet_s/helpers/helpers.py +215 -0
- brainscore_vision/models/cornet_s/model.py +77 -0
- brainscore_vision/models/cornet_s/requirements.txt +7 -0
- brainscore_vision/models/cornet_s/test.py +8 -0
- brainscore_vision/models/cornet_s_ynshah/__init__.py +388 -0
- brainscore_vision/models/cornet_s_ynshah/model.py +192 -0
- brainscore_vision/models/cornet_s_ynshah/setup.py +24 -0
- brainscore_vision/models/cornet_s_ynshah/test.py +0 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/__init__.py +7 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/model.py +75 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/requirements.txt +4 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/test.py +8 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/__init__.py +8 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/model.py +57 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/requirements.txt +3 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/test.py +25 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/__init__.py +9 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/model.py +134 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/requirements.txt +4 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/test.py +8 -0
- brainscore_vision/models/dbp_resnet50_julios/__init__.py +5 -0
- brainscore_vision/models/dbp_resnet50_julios/model.py +52 -0
- brainscore_vision/models/dbp_resnet50_julios/setup.py +24 -0
- brainscore_vision/models/dbp_resnet50_julios/test.py +1 -0
- brainscore_vision/models/densenet_201_pytorch/__init__.py +7 -0
- brainscore_vision/models/densenet_201_pytorch/model.py +59 -0
- brainscore_vision/models/densenet_201_pytorch/requirements.txt +3 -0
- brainscore_vision/models/densenet_201_pytorch/test.py +8 -0
- brainscore_vision/models/eBarlow_Vanilla/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla/model.py +50 -0
- brainscore_vision/models/eBarlow_Vanilla/requirements.txt +2 -0
- brainscore_vision/models/eBarlow_Vanilla/setup.py +24 -0
- brainscore_vision/models/eBarlow_Vanilla/test.py +1 -0
- brainscore_vision/models/eBarlow_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eBarlow_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/model.py +84 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/setup.py +25 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/test.py +1 -0
- brainscore_vision/models/eBarlow_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eBarlow_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eBarlow_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eBarlow_augself_linear_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_augself_linear_1/model.py +65 -0
- brainscore_vision/models/eBarlow_augself_linear_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_augself_linear_1/test.py +1 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/model.py +65 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_001_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_001_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_001_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_001_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_001_2/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_001_2/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_001_2/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_001_2/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_001_3/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_001_3/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_001_3/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_001_3/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_01/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_01/model.py +50 -0
- brainscore_vision/models/eBarlow_lmda_01/requirements.txt +2 -0
- brainscore_vision/models/eBarlow_lmda_01/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_01/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_02_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_02_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/model.py +84 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/setup.py +25 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/model.py +85 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/setup.py +25 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/model.py +85 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/setup.py +25 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_03_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_03_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_03_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_03_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_04_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_04_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_04_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_04_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_05_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_05_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_05_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_05_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Vanilla/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Vanilla/model.py +50 -0
- brainscore_vision/models/eMMCR_Vanilla/setup.py +24 -0
- brainscore_vision/models/eMMCR_Vanilla/test.py +1 -0
- brainscore_vision/models/eMMCR_VanillaV2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_VanillaV2/model.py +50 -0
- brainscore_vision/models/eMMCR_VanillaV2/setup.py +24 -0
- brainscore_vision/models/eMMCR_VanillaV2/test.py +1 -0
- brainscore_vision/models/eMMCR_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eMMCR_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eMMCR_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01/model.py +50 -0
- brainscore_vision/models/eMMCR_lmda_01/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01V2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01V2/model.py +50 -0
- brainscore_vision/models/eMMCR_lmda_01V2/requirements.txt +2 -0
- brainscore_vision/models/eMMCR_lmda_01V2/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01V2/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eMMCR_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eMMCR_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01_3/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01_3/model.py +65 -0
- brainscore_vision/models/eMMCR_lmda_01_3/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01_3/test.py +1 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/test.py +1 -0
- brainscore_vision/models/effnetb1_272x240/__init__.py +5 -0
- brainscore_vision/models/effnetb1_272x240/model.py +126 -0
- brainscore_vision/models/effnetb1_272x240/requirements.txt +3 -0
- brainscore_vision/models/effnetb1_272x240/test.py +9 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/__init__.py +9 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/model.py +111 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/requirements.txt +6 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/test.py +8 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/__init__.py +5 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/model.py +142 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/requirements.txt +5 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/test.py +8 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/__init__.py +9 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/model.py +140 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/requirements.txt +5 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/test.py +8 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/__init__.py +5 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/model.py +62 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/requirements.txt +3 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/test.py +8 -0
- brainscore_vision/models/hmax/__init__.py +7 -0
- brainscore_vision/models/hmax/helpers/hmax.py +438 -0
- brainscore_vision/models/hmax/helpers/pytorch.py +216 -0
- brainscore_vision/models/hmax/model.py +69 -0
- brainscore_vision/models/hmax/requirements.txt +5 -0
- brainscore_vision/models/hmax/test.py +8 -0
- brainscore_vision/models/inception_v3_pytorch/__init__.py +7 -0
- brainscore_vision/models/inception_v3_pytorch/model.py +68 -0
- brainscore_vision/models/inception_v3_pytorch/requirements.txt +3 -0
- brainscore_vision/models/inception_v3_pytorch/test.py +8 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/__init__.py +7 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/model.py +60 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/requirements.txt +3 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/test.py +8 -0
- brainscore_vision/models/mobilevit_small/__init__.py +7 -0
- brainscore_vision/models/mobilevit_small/model.py +49 -0
- brainscore_vision/models/mobilevit_small/requirements.txt +3 -0
- brainscore_vision/models/mobilevit_small/test.py +8 -0
- brainscore_vision/models/pixels/__init__.py +8 -0
- brainscore_vision/models/pixels/model.py +35 -0
- brainscore_vision/models/pixels/test.py +15 -0
- brainscore_vision/models/pnasnet_large_pytorch/__init__.py +7 -0
- brainscore_vision/models/pnasnet_large_pytorch/model.py +59 -0
- brainscore_vision/models/pnasnet_large_pytorch/requirements.txt +3 -0
- brainscore_vision/models/pnasnet_large_pytorch/test.py +8 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/model.py +64 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/test.py +1 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/model.py +65 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/test.py +1 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/model.py +65 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/test.py +1 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/model.py +67 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/test.py +1 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/model.py +66 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/setup.py +25 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/test.py +1 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/model.py +66 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/setup.py +25 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/test.py +1 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/model.py +66 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/setup.py +25 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/test.py +1 -0
- brainscore_vision/models/r50_tvpt/__init__.py +9 -0
- brainscore_vision/models/r50_tvpt/model.py +47 -0
- brainscore_vision/models/r50_tvpt/setup.py +24 -0
- brainscore_vision/models/r50_tvpt/test.py +1 -0
- brainscore_vision/models/regnet/__init__.py +14 -0
- brainscore_vision/models/regnet/model.py +17 -0
- brainscore_vision/models/regnet/requirements.txt +2 -0
- brainscore_vision/models/regnet/test.py +17 -0
- brainscore_vision/models/resnet18_imagenet21kP/__init__.py +6 -0
- brainscore_vision/models/resnet18_imagenet21kP/model.py +119 -0
- brainscore_vision/models/resnet18_imagenet21kP/setup.py +18 -0
- brainscore_vision/models/resnet18_imagenet21kP/test.py +0 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/__init__.py +5 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/model.py +59 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/setup.py +24 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/test.py +1 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/__init__.py +9 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/model.py +72 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/setup.py +24 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/test.py +1 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/__init__.py +9 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/model.py +72 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/setup.py +24 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/test.py +1 -0
- brainscore_vision/models/resnet50_julios/__init__.py +5 -0
- brainscore_vision/models/resnet50_julios/model.py +54 -0
- brainscore_vision/models/resnet50_julios/setup.py +24 -0
- brainscore_vision/models/resnet50_julios/test.py +1 -0
- brainscore_vision/models/resnet50_tutorial/__init__.py +5 -0
- brainscore_vision/models/resnet50_tutorial/model.py +34 -0
- brainscore_vision/models/resnet50_tutorial/requirements.txt +2 -0
- brainscore_vision/models/resnet50_tutorial/test.py +8 -0
- brainscore_vision/models/resnet_152_v2_pytorch/__init__.py +7 -0
- brainscore_vision/models/resnet_152_v2_pytorch/model.py +59 -0
- brainscore_vision/models/resnet_152_v2_pytorch/requirements.txt +2 -0
- brainscore_vision/models/resnet_152_v2_pytorch/test.py +8 -0
- brainscore_vision/models/resnet_50_robust/__init__.py +7 -0
- brainscore_vision/models/resnet_50_robust/model.py +55 -0
- brainscore_vision/models/resnet_50_robust/requirements.txt +3 -0
- brainscore_vision/models/resnet_50_robust/test.py +8 -0
- brainscore_vision/models/resnext101_32x16d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x16d_wsl/model.py +38 -0
- brainscore_vision/models/resnext101_32x16d_wsl/requirements.txt +2 -0
- brainscore_vision/models/resnext101_32x16d_wsl/test.py +8 -0
- brainscore_vision/models/resnext101_32x32d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x32d_wsl/model.py +40 -0
- brainscore_vision/models/resnext101_32x32d_wsl/requirements.txt +2 -0
- brainscore_vision/models/resnext101_32x32d_wsl/test.py +8 -0
- brainscore_vision/models/resnext101_32x48d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x48d_wsl/model.py +38 -0
- brainscore_vision/models/resnext101_32x48d_wsl/requirements.txt +3 -0
- brainscore_vision/models/resnext101_32x48d_wsl/test.py +8 -0
- brainscore_vision/models/resnext101_32x8d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x8d_wsl/model.py +44 -0
- brainscore_vision/models/resnext101_32x8d_wsl/requirements.txt +2 -0
- brainscore_vision/models/resnext101_32x8d_wsl/test.py +8 -0
- brainscore_vision/models/temporal_model_AVID_CMA/__init__.py +17 -0
- brainscore_vision/models/temporal_model_AVID_CMA/model.py +92 -0
- brainscore_vision/models/temporal_model_AVID_CMA/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_AVID_CMA/test.py +18 -0
- brainscore_vision/models/temporal_model_GDT/__init__.py +16 -0
- brainscore_vision/models/temporal_model_GDT/model.py +72 -0
- brainscore_vision/models/temporal_model_GDT/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_GDT/test.py +17 -0
- brainscore_vision/models/temporal_model_S3D_text_video/__init__.py +14 -0
- brainscore_vision/models/temporal_model_S3D_text_video/model.py +65 -0
- brainscore_vision/models/temporal_model_S3D_text_video/requirements.txt +1 -0
- brainscore_vision/models/temporal_model_S3D_text_video/test.py +15 -0
- brainscore_vision/models/temporal_model_SeLaVi/__init__.py +17 -0
- brainscore_vision/models/temporal_model_SeLaVi/model.py +68 -0
- brainscore_vision/models/temporal_model_SeLaVi/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_SeLaVi/test.py +18 -0
- brainscore_vision/models/temporal_model_VideoMAE/__init__.py +15 -0
- brainscore_vision/models/temporal_model_VideoMAE/model.py +100 -0
- brainscore_vision/models/temporal_model_VideoMAE/requirements.txt +6 -0
- brainscore_vision/models/temporal_model_VideoMAE/test.py +16 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/__init__.py +14 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/model.py +109 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/requirements.txt +4 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/test.py +16 -0
- brainscore_vision/models/temporal_model_mae_st/__init__.py +15 -0
- brainscore_vision/models/temporal_model_mae_st/model.py +120 -0
- brainscore_vision/models/temporal_model_mae_st/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_mae_st/test.py +16 -0
- brainscore_vision/models/temporal_model_mmaction2/__init__.py +23 -0
- brainscore_vision/models/temporal_model_mmaction2/mmaction2.csv +24 -0
- brainscore_vision/models/temporal_model_mmaction2/model.py +226 -0
- brainscore_vision/models/temporal_model_mmaction2/requirements.txt +5 -0
- brainscore_vision/models/temporal_model_mmaction2/test.py +24 -0
- brainscore_vision/models/temporal_model_openstl/__init__.py +18 -0
- brainscore_vision/models/temporal_model_openstl/model.py +206 -0
- brainscore_vision/models/temporal_model_openstl/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_openstl/test.py +19 -0
- brainscore_vision/models/temporal_model_torchvision/__init__.py +19 -0
- brainscore_vision/models/temporal_model_torchvision/model.py +92 -0
- brainscore_vision/models/temporal_model_torchvision/requirements.txt +2 -0
- brainscore_vision/models/temporal_model_torchvision/test.py +20 -0
- brainscore_vision/models/tv_efficientnet_b1/__init__.py +5 -0
- brainscore_vision/models/tv_efficientnet_b1/model.py +54 -0
- brainscore_vision/models/tv_efficientnet_b1/setup.py +24 -0
- brainscore_vision/models/tv_efficientnet_b1/test.py +1 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/__init__.py +7 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/model.py +104 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/requirements.txt +8 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/test.py +8 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/LICENSE +674 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/README.md +105 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/run.py +136 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/setup.py +41 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/train.py +383 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/__init__.py +71 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/back_ends.py +337 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/modules.py +126 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/params.py +100 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/utils.py +32 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/vonenet.py +68 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet_tutorial-activations.ipynb +352 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/test.py +1 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/test.py +1 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/test.py +1 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/test.py +1 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/__init__.py +7 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/model.py +66 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/setup.py +24 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/test.py +1 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/__init__.py +7 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/model.py +68 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/setup.py +24 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/test.py +1 -0
- brainscore_vision/submission/__init__.py +0 -0
- brainscore_vision/submission/actions_helpers.py +153 -0
- brainscore_vision/submission/config.py +7 -0
- brainscore_vision/submission/endpoints.py +58 -0
- brainscore_vision/utils/__init__.py +91 -0
- brainscore_vision-2.1.dist-info/LICENSE +11 -0
- brainscore_vision-2.1.dist-info/METADATA +152 -0
- brainscore_vision-2.1.dist-info/RECORD +1009 -0
- brainscore_vision-2.1.dist-info/WHEEL +5 -0
- brainscore_vision-2.1.dist-info/top_level.txt +4 -0
- docs/Makefile +20 -0
- docs/source/conf.py +78 -0
- docs/source/index.rst +21 -0
- docs/source/modules/api_reference.rst +10 -0
- docs/source/modules/benchmarks.rst +8 -0
- docs/source/modules/brainscore_submission.png +0 -0
- docs/source/modules/developer_clarifications.rst +36 -0
- docs/source/modules/metrics.rst +8 -0
- docs/source/modules/model_interface.rst +8 -0
- docs/source/modules/submission.rst +112 -0
- docs/source/modules/tutorial_screenshots/brain-score_logo.png +0 -0
- docs/source/modules/tutorial_screenshots/final_submit.png +0 -0
- docs/source/modules/tutorial_screenshots/init_py.png +0 -0
- docs/source/modules/tutorial_screenshots/mms.png +0 -0
- docs/source/modules/tutorial_screenshots/setup.png +0 -0
- docs/source/modules/tutorial_screenshots/sms.png +0 -0
- docs/source/modules/tutorial_screenshots/subfolders.png +0 -0
- docs/source/modules/utils.rst +22 -0
- migrations/2020-12-20_pkl_to_nc.py +90 -0
- tests/__init__.py +6 -0
- tests/conftest.py +26 -0
- tests/test_benchmark_helpers/__init__.py +0 -0
- tests/test_benchmark_helpers/test_screen.py +75 -0
- tests/test_examples.py +41 -0
- tests/test_integration.py +43 -0
- tests/test_metric_helpers/__init__.py +0 -0
- tests/test_metric_helpers/test_temporal.py +80 -0
- tests/test_metric_helpers/test_transformations.py +171 -0
- tests/test_metric_helpers/test_xarray_utils.py +85 -0
- tests/test_model_helpers/__init__.py +6 -0
- tests/test_model_helpers/activations/__init__.py +0 -0
- tests/test_model_helpers/activations/test___init__.py +404 -0
- tests/test_model_helpers/brain_transformation/__init__.py +0 -0
- tests/test_model_helpers/brain_transformation/test___init__.py +18 -0
- tests/test_model_helpers/brain_transformation/test_behavior.py +181 -0
- tests/test_model_helpers/brain_transformation/test_neural.py +70 -0
- tests/test_model_helpers/brain_transformation/test_temporal.py +66 -0
- tests/test_model_helpers/temporal/__init__.py +0 -0
- tests/test_model_helpers/temporal/activations/__init__.py +0 -0
- tests/test_model_helpers/temporal/activations/test_extractor.py +96 -0
- tests/test_model_helpers/temporal/activations/test_inferencer.py +189 -0
- tests/test_model_helpers/temporal/activations/test_inputs.py +103 -0
- tests/test_model_helpers/temporal/brain_transformation/__init__.py +0 -0
- tests/test_model_helpers/temporal/brain_transformation/test_temporal_ops.py +122 -0
- tests/test_model_helpers/temporal/test_utils.py +61 -0
- tests/test_model_helpers/test_generic_plugin_tests.py +310 -0
- tests/test_model_helpers/test_imports.py +10 -0
- tests/test_model_helpers/test_s3.py +38 -0
- tests/test_models.py +15 -0
- tests/test_stimuli.py +0 -0
- tests/test_submission/__init__.py +0 -0
- tests/test_submission/mock_config.py +3 -0
- tests/test_submission/test_actions_helpers.py +67 -0
- tests/test_submission/test_db.py +54 -0
- tests/test_submission/test_endpoints.py +125 -0
- tests/test_utils.py +21 -0
@@ -0,0 +1,242 @@
|
|
1
|
+
import logging
|
2
|
+
from typing import Union
|
3
|
+
|
4
|
+
import numpy as np
|
5
|
+
from tqdm import tqdm
|
6
|
+
|
7
|
+
from brainscore_vision.metric_helpers.xarray_utils import XarrayRegression
|
8
|
+
from brainscore_vision.utils import fullname
|
9
|
+
|
10
|
+
|
11
|
+
# do not import tensorflow at top level to avoid forcing users to install it if they don't use this metric
|
12
|
+
def mask_regression():
|
13
|
+
regression = MaskRegression()
|
14
|
+
regression = XarrayRegression(regression)
|
15
|
+
return regression
|
16
|
+
|
17
|
+
|
18
|
+
class MaskRegression:
|
19
|
+
"""
|
20
|
+
Klindt et. al, NIPS 2017
|
21
|
+
https://papers.nips.cc/paper/6942-neural-system-identification-for-large-populations-separating-what-and-where
|
22
|
+
|
23
|
+
TF implementation of Receptive Field factorized regression
|
24
|
+
"""
|
25
|
+
|
26
|
+
def __init__(self, init_lr=0.01,
|
27
|
+
max_epochs=40, tol=0.1, batch_size=50, ls=.1, ld=.1, decay_rate=25,
|
28
|
+
inits: Union[None, dict] = None, log_rate=10, gpu_options=None):
|
29
|
+
"""
|
30
|
+
mapping function class.
|
31
|
+
:param batch_size: batch size
|
32
|
+
:param init_lr: initial learning rate
|
33
|
+
:param ls: regularization coefficient for spatial parameters (spatial convolution)
|
34
|
+
:param ld: regularization coefficient for depth parameters (depth convolution)
|
35
|
+
:param tol: tolerance - stops the optimization if reaches below tol
|
36
|
+
:param max_epochs: maximum number of epochs to train
|
37
|
+
:param inits: initial values for the mapping function parameters. A dictionary containing
|
38
|
+
any of the following keys ['s_w', 'd_w', 'bias']
|
39
|
+
:param log_rate: rate of logging the loss values
|
40
|
+
:param decay_rate: rate of decay for learning rate (#epochs)
|
41
|
+
"""
|
42
|
+
self._ld = ld
|
43
|
+
self._ls = ls
|
44
|
+
self._tol = tol
|
45
|
+
self._batch_size = batch_size
|
46
|
+
self._lr = init_lr
|
47
|
+
self._max_epochs = max_epochs
|
48
|
+
self._inits = inits
|
49
|
+
self._log_rate = log_rate
|
50
|
+
self._decay_rate = decay_rate
|
51
|
+
self._gpu_options = gpu_options
|
52
|
+
|
53
|
+
self._graph = None
|
54
|
+
self._lr_ph = None
|
55
|
+
self._opt = None
|
56
|
+
self._logger = logging.getLogger(fullname(self))
|
57
|
+
|
58
|
+
def _iterate_minibatches(self, inputs, targets=None, batchsize=128, shuffle=False):
|
59
|
+
"""
|
60
|
+
Iterates over inputs with minibatches
|
61
|
+
:param inputs: input dataset, first dimension should be examples
|
62
|
+
:param targets: [n_examples, n_neurons] response values, first dimension should be examples
|
63
|
+
:param batchsize: batch size
|
64
|
+
:param shuffle: flag indicating whether to shuffle the data while making minibatches
|
65
|
+
:return: minibatch of (X, Y)
|
66
|
+
"""
|
67
|
+
input_len = inputs.shape[0]
|
68
|
+
if shuffle:
|
69
|
+
indices = np.arange(input_len)
|
70
|
+
np.random.shuffle(indices)
|
71
|
+
for start_idx in range(0, input_len, batchsize):
|
72
|
+
if shuffle:
|
73
|
+
excerpt = indices[start_idx:start_idx + batchsize]
|
74
|
+
else:
|
75
|
+
excerpt = slice(start_idx, start_idx + batchsize)
|
76
|
+
if targets is None:
|
77
|
+
yield inputs[excerpt]
|
78
|
+
else:
|
79
|
+
yield inputs[excerpt], targets[excerpt]
|
80
|
+
|
81
|
+
def fit(self, X, Y):
|
82
|
+
"""
|
83
|
+
Fits the parameters to the data
|
84
|
+
:param X: Source data, first dimension is examples
|
85
|
+
:param Y: Target data, first dimension is examples
|
86
|
+
"""
|
87
|
+
assert not np.isnan(X).any() and not np.isnan(Y).any()
|
88
|
+
self.setup()
|
89
|
+
X = self.reindex(X)
|
90
|
+
assert X.ndim == 4, 'Input matrix rank should be 4.'
|
91
|
+
with self._graph.as_default():
|
92
|
+
self._init_mapper(X, Y)
|
93
|
+
lr = self._lr
|
94
|
+
for epoch in tqdm(range(self._max_epochs), desc='mask epochs'):
|
95
|
+
for counter, batch in enumerate(
|
96
|
+
self._iterate_minibatches(X, Y, batchsize=self._batch_size, shuffle=True)):
|
97
|
+
feed_dict = {self._input_placeholder: batch[0],
|
98
|
+
self._target_placeholder: batch[1],
|
99
|
+
self._lr_ph: lr}
|
100
|
+
_, loss_value, reg_loss_value = self._sess.run([self.train_op, self.l2_error, self.reg_loss],
|
101
|
+
feed_dict=feed_dict)
|
102
|
+
if epoch % self._log_rate == 0:
|
103
|
+
self._logger.debug(f'Epoch: {epoch}, Err Loss: {loss_value:.2f}, Reg Loss: {reg_loss_value:.2f}')
|
104
|
+
if epoch % self._decay_rate == 0 and epoch != 0:
|
105
|
+
lr /= 10.
|
106
|
+
if loss_value < self._tol:
|
107
|
+
self._logger.debug('Converged.')
|
108
|
+
break
|
109
|
+
|
110
|
+
def predict(self, X):
|
111
|
+
"""
|
112
|
+
Predicts the responses to the give input X
|
113
|
+
:param X: Input data, first dimension is examples
|
114
|
+
:return: predictions
|
115
|
+
"""
|
116
|
+
assert not np.isnan(X).any()
|
117
|
+
X = self.reindex(X)
|
118
|
+
with self._graph.as_default():
|
119
|
+
preds = []
|
120
|
+
for batch in self._iterate_minibatches(X, batchsize=self._batch_size, shuffle=False):
|
121
|
+
feed_dict = {self._input_placeholder: batch}
|
122
|
+
preds.append(np.squeeze(self._sess.run([self._predictions], feed_dict=feed_dict)))
|
123
|
+
return np.concatenate(preds, axis=0)
|
124
|
+
|
125
|
+
def setup(self):
|
126
|
+
import tensorflow as tf
|
127
|
+
self._graph = tf.Graph()
|
128
|
+
with self._graph.as_default():
|
129
|
+
self._lr_ph = tf.compat.v1.placeholder(dtype=tf.float32)
|
130
|
+
self._opt = tf.compat.v1.train.AdamOptimizer(learning_rate=self._lr_ph)
|
131
|
+
|
132
|
+
def reindex(self, X):
|
133
|
+
channel_names = ['channel', 'channel_x', 'channel_y']
|
134
|
+
assert all(hasattr(X, coord) for coord in channel_names)
|
135
|
+
shapes = [len(set(X[channel].values)) for channel in channel_names]
|
136
|
+
X = np.reshape(X.values, [X.shape[0]] + shapes)
|
137
|
+
X = np.transpose(X, axes=[0, 2, 3, 1])
|
138
|
+
return X
|
139
|
+
|
140
|
+
def _make_separable_map(self):
|
141
|
+
"""
|
142
|
+
Makes the mapping function computational graph
|
143
|
+
"""
|
144
|
+
import tensorflow as tf
|
145
|
+
with self._graph.as_default():
|
146
|
+
with tf.compat.v1.variable_scope('mapping'):
|
147
|
+
input_shape = self._input_placeholder.shape
|
148
|
+
preds = []
|
149
|
+
for n in range(self._target_placeholder.shape[1]):
|
150
|
+
with tf.compat.v1.variable_scope('N_{}'.format(n)):
|
151
|
+
# for all variables, either use pre-defined initial value or initialize randomly
|
152
|
+
if self._inits is not None and 's_w' in self._inits:
|
153
|
+
s_w = tf.Variable(initial_value=
|
154
|
+
self._inits['s_w'][n].reshape((1, input_shape[1], input_shape[2], 1)),
|
155
|
+
dtype=tf.float32)
|
156
|
+
else:
|
157
|
+
s_w = tf.Variable(initial_value=np.random.randn(1, input_shape[1], input_shape[2], 1),
|
158
|
+
dtype=tf.float32)
|
159
|
+
if self._inits is not None and 'd_w' in self._inits:
|
160
|
+
d_w = tf.Variable(initial_value=self._inits['d_w'][n].reshape(1, 1, input_shape[-1], 1),
|
161
|
+
dtype=tf.float32)
|
162
|
+
else:
|
163
|
+
d_w = tf.Variable(initial_value=np.random.randn(1, 1, input_shape[-1], 1),
|
164
|
+
dtype=tf.float32)
|
165
|
+
if self._inits is not None and 'bias' in self._inits:
|
166
|
+
bias = tf.Variable(initial_value=self._inits['bias'][n].reshape(1, 1, 1, 1),
|
167
|
+
dtype=tf.float32)
|
168
|
+
else:
|
169
|
+
bias = tf.Variable(initial_value=np.zeros((1, 1, 1, 1)), dtype=tf.float32)
|
170
|
+
|
171
|
+
tf.compat.v1.add_to_collection('s_w', s_w)
|
172
|
+
out = s_w * self._input_placeholder
|
173
|
+
|
174
|
+
tf.compat.v1.add_to_collection('d_w', d_w)
|
175
|
+
out = tf.reduce_sum(input_tensor=out, axis=[1, 2], keepdims=True)
|
176
|
+
out = tf.nn.conv2d(input=out, filters=d_w, strides=[1, 1, 1, 1], padding='SAME')
|
177
|
+
|
178
|
+
tf.compat.v1.add_to_collection('bias', bias)
|
179
|
+
preds.append(tf.squeeze(out, axis=[1, 2]) + bias)
|
180
|
+
|
181
|
+
self._predictions = tf.concat(preds, -1)
|
182
|
+
|
183
|
+
def _make_loss(self):
|
184
|
+
"""
|
185
|
+
Makes the loss computational graph
|
186
|
+
"""
|
187
|
+
import tensorflow as tf
|
188
|
+
with self._graph.as_default():
|
189
|
+
with tf.compat.v1.variable_scope('loss'):
|
190
|
+
self.l2_error = tf.norm(tensor=self._predictions - self._target_placeholder, ord=2)
|
191
|
+
# For separable mapping
|
192
|
+
self._s_vars = tf.compat.v1.get_collection('s_w')
|
193
|
+
self._d_vars = tf.compat.v1.get_collection('d_w')
|
194
|
+
self._biases = tf.compat.v1.get_collection('bias')
|
195
|
+
|
196
|
+
# Laplacian loss
|
197
|
+
laplace_filter = tf.constant(np.array([0, -1, 0, -1, 4, -1, 0, -1, 0]).reshape((3, 3, 1, 1)),
|
198
|
+
dtype=tf.float32)
|
199
|
+
laplace_loss = tf.reduce_sum(
|
200
|
+
input_tensor=[tf.norm(
|
201
|
+
tensor=tf.nn.conv2d(input=t, filters=laplace_filter, strides=[1, 1, 1, 1], padding='SAME')) for
|
202
|
+
t in self._s_vars])
|
203
|
+
l2_loss = tf.reduce_sum(input_tensor=[tf.reduce_sum(input_tensor=tf.pow(t, 2)) for t in self._s_vars])
|
204
|
+
self.reg_loss = self._ls * (l2_loss + laplace_loss) + \
|
205
|
+
self._ld * tf.reduce_sum(
|
206
|
+
input_tensor=[tf.reduce_sum(input_tensor=tf.pow(t, 2)) for t in self._d_vars])
|
207
|
+
|
208
|
+
self.total_loss = self.l2_error + self.reg_loss
|
209
|
+
self.tvars = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
|
210
|
+
self.train_op = self._opt.minimize(self.total_loss, var_list=self.tvars,
|
211
|
+
global_step=tf.compat.v1.train.get_or_create_global_step())
|
212
|
+
|
213
|
+
def _init_mapper(self, X, Y):
|
214
|
+
"""
|
215
|
+
Initializes the mapping function graph
|
216
|
+
:param X: input data
|
217
|
+
"""
|
218
|
+
import tensorflow as tf
|
219
|
+
assert len(Y.shape) == 2
|
220
|
+
with self._graph.as_default():
|
221
|
+
self._input_placeholder = tf.compat.v1.placeholder(dtype=tf.float32, shape=[None] + list(X.shape[1:]))
|
222
|
+
self._target_placeholder = tf.compat.v1.placeholder(dtype=tf.float32, shape=[None, Y.shape[1]])
|
223
|
+
# Build the model graph
|
224
|
+
self._make_separable_map()
|
225
|
+
self._make_loss()
|
226
|
+
|
227
|
+
# initialize graph
|
228
|
+
self._logger.debug('Initializing mapper')
|
229
|
+
init_op = tf.compat.v1.variables_initializer(
|
230
|
+
tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.GLOBAL_VARIABLES))
|
231
|
+
self._sess = tf.compat.v1.Session(
|
232
|
+
config=tf.compat.v1.ConfigProto(
|
233
|
+
gpu_options=self._gpu_options) if self._gpu_options is not None else None)
|
234
|
+
self._sess.run(init_op)
|
235
|
+
|
236
|
+
def close(self):
|
237
|
+
"""
|
238
|
+
Closes occupied resources
|
239
|
+
"""
|
240
|
+
import tensorflow as tf
|
241
|
+
tf.compat.v1.reset_default_graph()
|
242
|
+
self._sess.close()
|
@@ -0,0 +1 @@
|
|
1
|
+
tensorflow
|
File without changes
|
@@ -0,0 +1,23 @@
|
|
1
|
+
from brainscore_vision import metric_registry
|
2
|
+
from .metric import OSTCorrelation
|
3
|
+
|
4
|
+
metric_registry['ost'] = OSTCorrelation
|
5
|
+
|
6
|
+
BIBTEX = """@article{kubiliusschrimpf2019brain,
|
7
|
+
title={Brain-like object recognition with high-performing shallow recurrent ANNs},
|
8
|
+
author={Kubilius, Jonas and Schrimpf, Martin and Kar, Kohitij and Rajalingham, Rishi and Hong, Ha and Majaj, Najib and Issa, Elias and Bashivan, Pouya and Prescott-Roy, Jonathan and Schmidt, Kailyn and others},
|
9
|
+
journal={Advances in neural information processing systems},
|
10
|
+
volume={32},
|
11
|
+
year={2019}
|
12
|
+
}"""
|
13
|
+
|
14
|
+
BIBTEX_CHARACTERIZATION = """@article{kar2019evidence,
|
15
|
+
title={Evidence that recurrent circuits are critical to the ventral stream’s execution of core object recognition behavior},
|
16
|
+
author={Kar, Kohitij and Kubilius, Jonas and Schmidt, Kailyn and Issa, Elias B and DiCarlo, James J},
|
17
|
+
journal={Nature neuroscience},
|
18
|
+
volume={22},
|
19
|
+
number={6},
|
20
|
+
pages={974--983},
|
21
|
+
year={2019},
|
22
|
+
publisher={Nature Publishing Group US New York}
|
23
|
+
}"""
|
@@ -0,0 +1,350 @@
|
|
1
|
+
import logging
|
2
|
+
from collections import OrderedDict
|
3
|
+
|
4
|
+
import numpy as np
|
5
|
+
from numpy.random.mtrand import RandomState
|
6
|
+
from scipy.interpolate import interp1d
|
7
|
+
from scipy.optimize import fsolve
|
8
|
+
from scipy.stats import spearmanr
|
9
|
+
from tqdm import tqdm
|
10
|
+
|
11
|
+
from brainio.assemblies import walk_coords, array_is_element, BehavioralAssembly, DataAssembly
|
12
|
+
from brainscore_core.metrics import Metric, Score
|
13
|
+
from brainscore_vision import load_metric
|
14
|
+
from brainscore_vision.metric_helpers.transformations import CrossValidation
|
15
|
+
from brainscore_vision.utils import fullname
|
16
|
+
|
17
|
+
|
18
|
+
class OSTCorrelation(Metric):
|
19
|
+
"""
|
20
|
+
Object Solution Times (OST) metric, Kubilius & Schrimpf et al., NeurIPS 2019 https://papers.nips.cc/paper/9441-brain-like-object-recognition-with-high-performing-shallow-recurrent-anns
|
21
|
+
OST characterization, Kar et al. Nature Neuroscience 2019 https://www.nature.com/articles/s41593-019-0392-5.
|
22
|
+
"""
|
23
|
+
def __init__(self):
|
24
|
+
self._cross_validation = CrossValidation(stratification_coord=None, splits=10, test_size=0.1)
|
25
|
+
self._i1 = load_metric('i1')
|
26
|
+
self._predicted_osts, self._target_osts = [], []
|
27
|
+
|
28
|
+
def __call__(self, source_recordings: DataAssembly, target_osts: DataAssembly) -> Score:
|
29
|
+
score = self._cross_validation(source_recordings, target_osts, apply=self.apply)
|
30
|
+
return score
|
31
|
+
|
32
|
+
def apply(self, train_source, train_osts, test_source, test_osts):
|
33
|
+
train_source, test_source = self._align(train_source, train_osts), self._align(test_source, test_osts)
|
34
|
+
predicted_osts = self.compute_osts(train_source, test_source, test_osts)
|
35
|
+
score = self.correlate(predicted_osts, test_osts.values)
|
36
|
+
return score
|
37
|
+
|
38
|
+
def _align(self, source, target, on='stimulus_id'):
|
39
|
+
return source.isel(presentation=[np.where(source[on].values == key)[0][0] for key in target[on].values])
|
40
|
+
|
41
|
+
def compute_osts(self, train_source, test_source, test_osts):
|
42
|
+
last_osts, hit_osts = [None] * len(test_osts), [None] * len(test_osts)
|
43
|
+
for time_bin_start in tqdm(sorted(set(train_source['time_bin_start'].values)), desc='time_bins'):
|
44
|
+
time_train_source = train_source.sel(time_bin_start=time_bin_start).squeeze('time_bin_end')
|
45
|
+
time_test_source = test_source.sel(time_bin_start=time_bin_start).squeeze('time_bin_end')
|
46
|
+
time_train_source = time_train_source.transpose('presentation', 'neuroid')
|
47
|
+
time_test_source = time_test_source.transpose('presentation', 'neuroid')
|
48
|
+
|
49
|
+
classifier = TFProbabilitiesClassifier()
|
50
|
+
classifier.fit(time_train_source, time_train_source['image_label'])
|
51
|
+
prediction_probabilities = classifier.predict_proba(time_test_source)
|
52
|
+
classifier.close()
|
53
|
+
source_i1 = self.i1(prediction_probabilities)
|
54
|
+
assert all(source_i1['stimulus_id'].values == test_osts['stimulus_id'].values)
|
55
|
+
for i, (image_source_i1, threshold_i1) in enumerate(zip(
|
56
|
+
source_i1.values, test_osts['i1'].values)):
|
57
|
+
if hit_osts[i] is None:
|
58
|
+
if image_source_i1 < threshold_i1 and (last_osts[i] is None or last_osts[i][1] < image_source_i1):
|
59
|
+
last_osts[i] = time_bin_start, image_source_i1
|
60
|
+
if image_source_i1 >= threshold_i1:
|
61
|
+
hit_osts[i] = time_bin_start, image_source_i1
|
62
|
+
if not any(hit_ost is None for hit_ost in hit_osts):
|
63
|
+
break # stop early if threshold is already hit for every image
|
64
|
+
|
65
|
+
# interpolate
|
66
|
+
predicted_osts = np.empty(len(test_osts), dtype=np.float64)
|
67
|
+
predicted_osts[:] = np.nan
|
68
|
+
for i, (last_ost, hit_ost) in enumerate(zip(last_osts, hit_osts)):
|
69
|
+
if hit_ost is None:
|
70
|
+
predicted_osts[i] = np.nan
|
71
|
+
continue
|
72
|
+
(hit_ost_time, hit_ost_i1) = hit_ost
|
73
|
+
if last_ost is None:
|
74
|
+
predicted_osts[i] = hit_ost_time
|
75
|
+
continue
|
76
|
+
(last_ost_time, last_ost_i1) = last_ost
|
77
|
+
fit = interp1d([last_ost_time, hit_ost_time], [last_ost_i1, hit_ost_i1], fill_value='extrapolate')
|
78
|
+
ost = fsolve(lambda xs: [fit(x) - test_osts['i1'].values[i] for x in xs], x0=hit_ost_time)[0]
|
79
|
+
predicted_osts[i] = ost
|
80
|
+
return predicted_osts
|
81
|
+
|
82
|
+
def correlate(self, predicted_osts, target_osts):
|
83
|
+
non_nan = np.logical_and(~np.isnan(predicted_osts), ~np.isnan(target_osts))
|
84
|
+
predicted_osts, target_osts = predicted_osts[non_nan], target_osts[non_nan]
|
85
|
+
# use Spearman over Pearson since it tests whether the rank orders are the same,
|
86
|
+
# which allows for nonlinear correlates whereas Pearson assumes linearity.
|
87
|
+
correlation, p = spearmanr(predicted_osts, target_osts)
|
88
|
+
return Score(correlation)
|
89
|
+
|
90
|
+
def i1(self, prediction_probabilities):
|
91
|
+
response_matrix = self._i1.target_distractor_scores(prediction_probabilities)
|
92
|
+
response_matrix = self._i1.dprimes(response_matrix)
|
93
|
+
response_matrix = self._i1.collapse_distractors(response_matrix)
|
94
|
+
return response_matrix
|
95
|
+
|
96
|
+
|
97
|
+
class TFProbabilitiesClassifier:
|
98
|
+
def __init__(self,
|
99
|
+
init_lr=1e-4,
|
100
|
+
max_epochs=40,
|
101
|
+
zscore_feats=True,
|
102
|
+
train_batch_size=64,
|
103
|
+
eval_batch_size=240,
|
104
|
+
activation=None,
|
105
|
+
fc_weight_decay=0.463, # 1/(C_svc * num_objectome_imgs) = 1/(1e-3 * 2160),
|
106
|
+
# based on https://stats.stackexchange.com/questions/216095/how-does-alpha-relate-to-c-in-scikit-learns-sgdclassifier
|
107
|
+
fc_dropout=1.0,
|
108
|
+
tol=1e-4,
|
109
|
+
log_rate=5, gpu_options=None):
|
110
|
+
"""
|
111
|
+
mapping function class.
|
112
|
+
:param train_batch_size: train batch size
|
113
|
+
:param eval_batch_size: prediction batch size
|
114
|
+
:param activation: what activation to use if any
|
115
|
+
:param init_lr: initial learning rate
|
116
|
+
:param zscore_feats: whether to zscore model features
|
117
|
+
:param tol: tolerance - stops the optimization if reaches below tol
|
118
|
+
:param fc_weight_decay: regularization coefficient for fully connected layers (inverse of sklearn C)
|
119
|
+
:params fc_dropout: dropout parameter for fc layers
|
120
|
+
:param log_rate: rate of logging the loss values (in epochs)
|
121
|
+
"""
|
122
|
+
self._train_batch_size = train_batch_size
|
123
|
+
self._eval_batch_size = eval_batch_size
|
124
|
+
self._zscore_feats = zscore_feats
|
125
|
+
self._lr = init_lr
|
126
|
+
self._tol = tol
|
127
|
+
self._activation = activation
|
128
|
+
self._fc_weight_decay = fc_weight_decay
|
129
|
+
self._fc_dropout = fc_dropout
|
130
|
+
self._max_epochs = max_epochs
|
131
|
+
self._log_rate = log_rate
|
132
|
+
self._gpu_options = gpu_options
|
133
|
+
|
134
|
+
self._graph = None
|
135
|
+
self._lr_ph = None
|
136
|
+
self._opt = None
|
137
|
+
self._scaler = None
|
138
|
+
self._logger = logging.getLogger(fullname(self))
|
139
|
+
|
140
|
+
def _iterate_minibatches(self, inputs, targets=None, batchsize=240, shuffle=False, random_state=None):
|
141
|
+
"""
|
142
|
+
Iterates over inputs with minibatches
|
143
|
+
:param inputs: input dataset, first dimension should be examples
|
144
|
+
:param targets: [n_examples, ...] response values, first dimension should be examples
|
145
|
+
:param batchsize: batch size
|
146
|
+
:param shuffle: flag indicating whether to shuffle the data while making minibatches
|
147
|
+
:return: minibatch of (X, Y)
|
148
|
+
"""
|
149
|
+
input_len = inputs.shape[0]
|
150
|
+
if shuffle:
|
151
|
+
indices = np.arange(input_len)
|
152
|
+
random_state.shuffle(indices)
|
153
|
+
for start_idx in range(0, input_len, batchsize):
|
154
|
+
if shuffle:
|
155
|
+
excerpt = indices[start_idx:start_idx + batchsize]
|
156
|
+
else:
|
157
|
+
excerpt = slice(start_idx, start_idx + batchsize)
|
158
|
+
if targets is None:
|
159
|
+
yield inputs[excerpt]
|
160
|
+
else:
|
161
|
+
yield inputs[excerpt], targets[excerpt]
|
162
|
+
|
163
|
+
def setup(self):
|
164
|
+
import tensorflow as tf
|
165
|
+
self._graph = tf.Graph()
|
166
|
+
with self._graph.as_default():
|
167
|
+
self._lr_ph = tf.compat.v1.placeholder(dtype=tf.float32)
|
168
|
+
self._opt = tf.compat.v1.train.AdamOptimizer(learning_rate=self._lr_ph)
|
169
|
+
|
170
|
+
def initializer(self, kind='xavier', *args, **kwargs):
|
171
|
+
import tensorflow as tf
|
172
|
+
if kind == 'xavier':
|
173
|
+
init = tf.compat.v1.keras.initializers.VarianceScaling(*args, **kwargs, seed=0)
|
174
|
+
else:
|
175
|
+
init = getattr(tf, kind + '_initializer')(*args, **kwargs)
|
176
|
+
return init
|
177
|
+
|
178
|
+
def fc(self,
|
179
|
+
inp,
|
180
|
+
out_depth,
|
181
|
+
kernel_init='xavier',
|
182
|
+
kernel_init_kwargs=None,
|
183
|
+
bias=1,
|
184
|
+
weight_decay=None,
|
185
|
+
activation=None,
|
186
|
+
name='fc'):
|
187
|
+
|
188
|
+
import tensorflow as tf
|
189
|
+
if weight_decay is None:
|
190
|
+
weight_decay = 0.
|
191
|
+
# assert out_shape is not None
|
192
|
+
if kernel_init_kwargs is None:
|
193
|
+
kernel_init_kwargs = {}
|
194
|
+
resh = inp
|
195
|
+
assert (len(resh.get_shape().as_list()) == 2)
|
196
|
+
in_depth = resh.get_shape().as_list()[-1]
|
197
|
+
|
198
|
+
# weights
|
199
|
+
init = self.initializer(kernel_init, **kernel_init_kwargs)
|
200
|
+
kernel = tf.compat.v1.get_variable(initializer=init,
|
201
|
+
shape=[in_depth, out_depth],
|
202
|
+
dtype=tf.float32,
|
203
|
+
regularizer=tf.keras.regularizers.l2(0.5 * (weight_decay)),
|
204
|
+
name='weights')
|
205
|
+
init = self.initializer(kind='constant', value=bias)
|
206
|
+
biases = tf.compat.v1.get_variable(initializer=init,
|
207
|
+
shape=[out_depth],
|
208
|
+
dtype=tf.float32,
|
209
|
+
regularizer=tf.keras.regularizers.l2(0.5 * (weight_decay)),
|
210
|
+
name='bias')
|
211
|
+
|
212
|
+
# ops
|
213
|
+
fcm = tf.matmul(resh, kernel)
|
214
|
+
output = tf.nn.bias_add(fcm, biases, name=name)
|
215
|
+
|
216
|
+
if activation is not None:
|
217
|
+
output = getattr(tf.nn, activation)(output, name=activation)
|
218
|
+
return output
|
219
|
+
|
220
|
+
def _make_behavioral_map(self):
|
221
|
+
"""
|
222
|
+
Makes the temporal mapping function computational graph
|
223
|
+
"""
|
224
|
+
import tensorflow as tf
|
225
|
+
num_classes = len(self._label_mapping.keys())
|
226
|
+
with self._graph.as_default():
|
227
|
+
with tf.compat.v1.variable_scope('behavioral_mapping'):
|
228
|
+
out = self._input_placeholder
|
229
|
+
out = tf.nn.dropout(out, rate=1 - (self._fc_keep_prob), name="dropout_out")
|
230
|
+
pred = self.fc(out,
|
231
|
+
out_depth=num_classes,
|
232
|
+
activation=self._activation,
|
233
|
+
weight_decay=self._fc_weight_decay, name="out")
|
234
|
+
|
235
|
+
self._predictions = pred
|
236
|
+
|
237
|
+
def _make_loss(self):
|
238
|
+
"""
|
239
|
+
Makes the loss computational graph
|
240
|
+
"""
|
241
|
+
import tensorflow as tf
|
242
|
+
with self._graph.as_default():
|
243
|
+
with tf.compat.v1.variable_scope('loss'):
|
244
|
+
logits = self._predictions
|
245
|
+
|
246
|
+
self.classification_error = tf.reduce_mean(
|
247
|
+
input_tensor=tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=self._target_placeholder))
|
248
|
+
self.reg_loss = tf.add_n(tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.REGULARIZATION_LOSSES))
|
249
|
+
|
250
|
+
self.total_loss = self.classification_error + self.reg_loss
|
251
|
+
self.tvars = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
|
252
|
+
self.train_op = self._opt.minimize(self.total_loss, var_list=self.tvars,
|
253
|
+
global_step=tf.compat.v1.train.get_or_create_global_step())
|
254
|
+
|
255
|
+
def _init_mapper(self, X, Y):
|
256
|
+
"""
|
257
|
+
Initializes the mapping function graph
|
258
|
+
:param X: input data
|
259
|
+
"""
|
260
|
+
import tensorflow as tf
|
261
|
+
assert len(Y.shape) == 1
|
262
|
+
with self._graph.as_default():
|
263
|
+
self._input_placeholder = tf.compat.v1.placeholder(dtype=tf.float32, shape=[None] + list(X.shape[1:]))
|
264
|
+
self._target_placeholder = tf.compat.v1.placeholder(dtype=tf.int32, shape=[None])
|
265
|
+
self._fc_keep_prob = tf.compat.v1.placeholder(dtype=tf.float32, shape=[], name='dropout_keep_prob')
|
266
|
+
# Build the model graph
|
267
|
+
self._make_behavioral_map()
|
268
|
+
self._make_loss()
|
269
|
+
|
270
|
+
# initialize graph
|
271
|
+
self._logger.debug('Initializing mapper')
|
272
|
+
init_op = tf.compat.v1.variables_initializer(tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.GLOBAL_VARIABLES))
|
273
|
+
self._sess = tf.compat.v1.Session(
|
274
|
+
config=tf.compat.v1.ConfigProto(gpu_options=self._gpu_options) if self._gpu_options is not None else None)
|
275
|
+
self._sess.run(init_op)
|
276
|
+
|
277
|
+
def labels_to_indices(self, labels):
|
278
|
+
label2index = OrderedDict()
|
279
|
+
indices = []
|
280
|
+
for label in labels:
|
281
|
+
if label not in label2index:
|
282
|
+
label2index[label] = (max(label2index.values()) + 1) if len(label2index) > 0 else 0
|
283
|
+
indices.append(label2index[label])
|
284
|
+
index2label = OrderedDict((index, label) for label, index in label2index.items())
|
285
|
+
return np.array(indices), index2label
|
286
|
+
|
287
|
+
def fit(self, X, Y):
|
288
|
+
"""
|
289
|
+
Fits the parameters to the data
|
290
|
+
:param X: Source data, first dimension is examples
|
291
|
+
:param Y: Target data, first dimension is examples
|
292
|
+
"""
|
293
|
+
if self._zscore_feats:
|
294
|
+
import sklearn
|
295
|
+
self._scaler = sklearn.preprocessing.StandardScaler().fit(X)
|
296
|
+
X = self._scaler.transform(X)
|
297
|
+
Y, self._label_mapping = self.labels_to_indices(Y.values)
|
298
|
+
self.setup()
|
299
|
+
assert X.ndim == 2, 'Input matrix rank should be 2.'
|
300
|
+
random_state = RandomState(0)
|
301
|
+
with self._graph.as_default():
|
302
|
+
self._init_mapper(X, Y)
|
303
|
+
lr = self._lr
|
304
|
+
for epoch in tqdm(range(self._max_epochs), desc='epochs'):
|
305
|
+
for counter, batch in enumerate(
|
306
|
+
self._iterate_minibatches(X, Y, batchsize=self._train_batch_size, shuffle=True,
|
307
|
+
random_state=random_state)):
|
308
|
+
feed_dict = {self._input_placeholder: batch[0],
|
309
|
+
self._target_placeholder: batch[1],
|
310
|
+
self._lr_ph: lr,
|
311
|
+
self._fc_keep_prob: self._fc_dropout}
|
312
|
+
_, loss_value, reg_loss_value = self._sess.run(
|
313
|
+
[self.train_op, self.classification_error, self.reg_loss],
|
314
|
+
feed_dict=feed_dict)
|
315
|
+
if epoch % self._log_rate == 0:
|
316
|
+
self._logger.debug(f'Epoch: {epoch}, Err Loss: {loss_value:.2f}, Reg Loss: {reg_loss_value:.2f}')
|
317
|
+
|
318
|
+
if loss_value < self._tol:
|
319
|
+
self._logger.debug('Converged.')
|
320
|
+
break
|
321
|
+
|
322
|
+
def predict_proba(self, X):
|
323
|
+
import tensorflow as tf
|
324
|
+
assert len(X.shape) == 2, "expected 2-dimensional input"
|
325
|
+
if self._zscore_feats:
|
326
|
+
scaled_X = self._scaler.transform(X)
|
327
|
+
else:
|
328
|
+
scaled_X = X
|
329
|
+
with self._graph.as_default():
|
330
|
+
preds = []
|
331
|
+
for batch in self._iterate_minibatches(scaled_X, batchsize=self._eval_batch_size, shuffle=False):
|
332
|
+
feed_dict = {self._input_placeholder: batch, self._fc_keep_prob: 1.0}
|
333
|
+
softmax = self._sess.run([tf.nn.softmax(self._predictions)], feed_dict=feed_dict)
|
334
|
+
preds.append(np.squeeze(softmax))
|
335
|
+
proba = np.concatenate(preds, axis=0)
|
336
|
+
# we take only the 0th dimension because the 1st dimension is just the features
|
337
|
+
X_coords = {coord: (dims, value) for coord, dims, value in walk_coords(X)
|
338
|
+
if array_is_element(dims, X.dims[0])}
|
339
|
+
proba = BehavioralAssembly(proba,
|
340
|
+
coords={**X_coords, **{'choice': list(self._label_mapping.values())}},
|
341
|
+
dims=[X.dims[0], 'choice'])
|
342
|
+
return proba
|
343
|
+
|
344
|
+
def close(self):
|
345
|
+
"""
|
346
|
+
Closes occupied resources
|
347
|
+
"""
|
348
|
+
import tensorflow as tf
|
349
|
+
tf.compat.v1.reset_default_graph()
|
350
|
+
self._sess.close()
|
File without changes
|
@@ -0,0 +1,14 @@
|
|
1
|
+
from brainscore_vision import metric_registry
|
2
|
+
from .metric import RDMMetric, RDMCrossValidated
|
3
|
+
|
4
|
+
metric_registry['rdm'] = RDMMetric
|
5
|
+
metric_registry['rdm_cv'] = RDMCrossValidated
|
6
|
+
|
7
|
+
BIBTEX = """@article{kriegeskorte2008representational,
|
8
|
+
title={Representational similarity analysis-connecting the branches of systems neuroscience},
|
9
|
+
author={Kriegeskorte, Nikolaus and Mur, Marieke and Bandettini, Peter A},
|
10
|
+
journal={Frontiers in systems neuroscience},
|
11
|
+
pages={4},
|
12
|
+
year={2008},
|
13
|
+
publisher={Frontiers}
|
14
|
+
}"""
|