brainscore-vision 2.1__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- brainscore_vision/__init__.py +105 -0
- brainscore_vision/__main__.py +20 -0
- brainscore_vision/benchmark_helpers/__init__.py +67 -0
- brainscore_vision/benchmark_helpers/neural_common.py +70 -0
- brainscore_vision/benchmark_helpers/properties_common.py +424 -0
- brainscore_vision/benchmark_helpers/screen.py +126 -0
- brainscore_vision/benchmark_helpers/test_helper.py +160 -0
- brainscore_vision/benchmarks/README.md +7 -0
- brainscore_vision/benchmarks/__init__.py +122 -0
- brainscore_vision/benchmarks/baker2022/__init__.py +9 -0
- brainscore_vision/benchmarks/baker2022/benchmark.py +125 -0
- brainscore_vision/benchmarks/baker2022/requirements.txt +1 -0
- brainscore_vision/benchmarks/baker2022/test.py +90 -0
- brainscore_vision/benchmarks/bmd2024/__init__.py +8 -0
- brainscore_vision/benchmarks/bmd2024/benchmark.py +51 -0
- brainscore_vision/benchmarks/bmd2024/test.py +29 -0
- brainscore_vision/benchmarks/bracci2019/__init__.py +8 -0
- brainscore_vision/benchmarks/bracci2019/benchmark.py +286 -0
- brainscore_vision/benchmarks/bracci2019/requirements.txt +3 -0
- brainscore_vision/benchmarks/cadena2017/__init__.py +5 -0
- brainscore_vision/benchmarks/cadena2017/benchmark.py +91 -0
- brainscore_vision/benchmarks/cadena2017/test.py +35 -0
- brainscore_vision/benchmarks/coggan2024_behavior/__init__.py +8 -0
- brainscore_vision/benchmarks/coggan2024_behavior/benchmark.py +133 -0
- brainscore_vision/benchmarks/coggan2024_behavior/test.py +21 -0
- brainscore_vision/benchmarks/coggan2024_fMRI/__init__.py +15 -0
- brainscore_vision/benchmarks/coggan2024_fMRI/benchmark.py +201 -0
- brainscore_vision/benchmarks/coggan2024_fMRI/test.py +25 -0
- brainscore_vision/benchmarks/ferguson2024/__init__.py +24 -0
- brainscore_vision/benchmarks/ferguson2024/benchmark.py +210 -0
- brainscore_vision/benchmarks/ferguson2024/helpers/helpers.py +251 -0
- brainscore_vision/benchmarks/ferguson2024/requirements.txt +5 -0
- brainscore_vision/benchmarks/ferguson2024/test.py +114 -0
- brainscore_vision/benchmarks/freemanziemba2013/__init__.py +10 -0
- brainscore_vision/benchmarks/freemanziemba2013/benchmarks/benchmark.py +53 -0
- brainscore_vision/benchmarks/freemanziemba2013/benchmarks/public_benchmarks.py +37 -0
- brainscore_vision/benchmarks/freemanziemba2013/test.py +98 -0
- brainscore_vision/benchmarks/geirhos2021/__init__.py +59 -0
- brainscore_vision/benchmarks/geirhos2021/benchmark.py +132 -0
- brainscore_vision/benchmarks/geirhos2021/test.py +189 -0
- brainscore_vision/benchmarks/hebart2023/__init__.py +4 -0
- brainscore_vision/benchmarks/hebart2023/benchmark.py +72 -0
- brainscore_vision/benchmarks/hebart2023/test.py +19 -0
- brainscore_vision/benchmarks/hermann2020/__init__.py +6 -0
- brainscore_vision/benchmarks/hermann2020/benchmark.py +63 -0
- brainscore_vision/benchmarks/hermann2020/test.py +28 -0
- brainscore_vision/benchmarks/igustibagus2024/__init__.py +11 -0
- brainscore_vision/benchmarks/igustibagus2024/domain_transfer_analysis.py +306 -0
- brainscore_vision/benchmarks/igustibagus2024/domain_transfer_neural.py +134 -0
- brainscore_vision/benchmarks/igustibagus2024/test.py +45 -0
- brainscore_vision/benchmarks/imagenet/__init__.py +4 -0
- brainscore_vision/benchmarks/imagenet/benchmark.py +50 -0
- brainscore_vision/benchmarks/imagenet/imagenet2012.csv +50001 -0
- brainscore_vision/benchmarks/imagenet/test.py +32 -0
- brainscore_vision/benchmarks/imagenet_c/__init__.py +7 -0
- brainscore_vision/benchmarks/imagenet_c/benchmark.py +204 -0
- brainscore_vision/benchmarks/imagenet_c/test.py +57 -0
- brainscore_vision/benchmarks/islam2021/__init__.py +11 -0
- brainscore_vision/benchmarks/islam2021/benchmark.py +107 -0
- brainscore_vision/benchmarks/islam2021/test.py +47 -0
- brainscore_vision/benchmarks/kar2019/__init__.py +4 -0
- brainscore_vision/benchmarks/kar2019/benchmark.py +88 -0
- brainscore_vision/benchmarks/kar2019/test.py +93 -0
- brainscore_vision/benchmarks/majajhong2015/__init__.py +18 -0
- brainscore_vision/benchmarks/majajhong2015/benchmark.py +96 -0
- brainscore_vision/benchmarks/majajhong2015/test.py +103 -0
- brainscore_vision/benchmarks/malania2007/__init__.py +13 -0
- brainscore_vision/benchmarks/malania2007/benchmark.py +235 -0
- brainscore_vision/benchmarks/malania2007/test.py +64 -0
- brainscore_vision/benchmarks/maniquet2024/__init__.py +6 -0
- brainscore_vision/benchmarks/maniquet2024/benchmark.py +199 -0
- brainscore_vision/benchmarks/maniquet2024/test.py +17 -0
- brainscore_vision/benchmarks/marques2020/__init__.py +76 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/cavanaugh2002a_benchmark.py +119 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/devalois1982a_benchmark.py +84 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/devalois1982b_benchmark.py +88 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/freemanZiemba2013_benchmark.py +138 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/ringach2002_benchmark.py +167 -0
- brainscore_vision/benchmarks/marques2020/benchmarks/schiller1976_benchmark.py +100 -0
- brainscore_vision/benchmarks/marques2020/test.py +135 -0
- brainscore_vision/benchmarks/objectnet/__init__.py +4 -0
- brainscore_vision/benchmarks/objectnet/benchmark.py +52 -0
- brainscore_vision/benchmarks/objectnet/test.py +33 -0
- brainscore_vision/benchmarks/rajalingham2018/__init__.py +10 -0
- brainscore_vision/benchmarks/rajalingham2018/benchmarks/benchmark.py +74 -0
- brainscore_vision/benchmarks/rajalingham2018/benchmarks/public_benchmark.py +10 -0
- brainscore_vision/benchmarks/rajalingham2018/test.py +125 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/alexnet-probabilities.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=alexnet,stimuli_identifier=objectome-240.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=resnet18,stimuli_identifier=objectome-240.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=resnet34,stimuli_identifier=objectome-240.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/resnet18-probabilities.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2018/test_resources/resnet34-probabilities.nc +0 -0
- brainscore_vision/benchmarks/rajalingham2020/__init__.py +4 -0
- brainscore_vision/benchmarks/rajalingham2020/benchmark.py +52 -0
- brainscore_vision/benchmarks/rajalingham2020/test.py +39 -0
- brainscore_vision/benchmarks/sanghavi2020/__init__.py +17 -0
- brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavi2020_benchmark.py +44 -0
- brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavijozwik2020_benchmark.py +44 -0
- brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavimurty2020_benchmark.py +44 -0
- brainscore_vision/benchmarks/sanghavi2020/test.py +83 -0
- brainscore_vision/benchmarks/scialom2024/__init__.py +52 -0
- brainscore_vision/benchmarks/scialom2024/benchmark.py +97 -0
- brainscore_vision/benchmarks/scialom2024/test.py +162 -0
- brainscore_vision/data/__init__.py +0 -0
- brainscore_vision/data/baker2022/__init__.py +40 -0
- brainscore_vision/data/baker2022/data_packaging/inverted_distortion_data_assembly.py +43 -0
- brainscore_vision/data/baker2022/data_packaging/inverted_distortion_stimulus_set.py +81 -0
- brainscore_vision/data/baker2022/data_packaging/mapping.py +60 -0
- brainscore_vision/data/baker2022/data_packaging/normal_distortion_data_assembly.py +46 -0
- brainscore_vision/data/baker2022/data_packaging/normal_distortion_stimulus_set.py +94 -0
- brainscore_vision/data/baker2022/test.py +135 -0
- brainscore_vision/data/barbumayo2019/BarbuMayo2019.py +26 -0
- brainscore_vision/data/barbumayo2019/__init__.py +23 -0
- brainscore_vision/data/barbumayo2019/test.py +10 -0
- brainscore_vision/data/bashivankar2019/__init__.py +52 -0
- brainscore_vision/data/bashivankar2019/data_packaging/2020-08-17_npc_v4_data.h5.png +0 -0
- brainscore_vision/data/bashivankar2019/data_packaging/requirements.txt +4 -0
- brainscore_vision/data/bashivankar2019/data_packaging/synthetic.py +162 -0
- brainscore_vision/data/bashivankar2019/test.py +15 -0
- brainscore_vision/data/bmd2024/__init__.py +69 -0
- brainscore_vision/data/bmd2024/data_packaging/BMD_2024_data_assembly.py +91 -0
- brainscore_vision/data/bmd2024/data_packaging/BMD_2024_simulus_set.py +48 -0
- brainscore_vision/data/bmd2024/data_packaging/stim_meta.csv +401 -0
- brainscore_vision/data/bmd2024/test.py +130 -0
- brainscore_vision/data/bracci2019/__init__.py +36 -0
- brainscore_vision/data/bracci2019/data_packaging.py +221 -0
- brainscore_vision/data/bracci2019/test.py +16 -0
- brainscore_vision/data/cadena2017/__init__.py +52 -0
- brainscore_vision/data/cadena2017/data_packaging/2018-08-07_tolias_v1.ipynb +25880 -0
- brainscore_vision/data/cadena2017/data_packaging/analysis.py +26 -0
- brainscore_vision/data/cadena2017/test.py +24 -0
- brainscore_vision/data/cichy2019/__init__.py +38 -0
- brainscore_vision/data/cichy2019/test.py +8 -0
- brainscore_vision/data/coggan2024_behavior/__init__.py +36 -0
- brainscore_vision/data/coggan2024_behavior/data_packaging.py +166 -0
- brainscore_vision/data/coggan2024_behavior/test.py +32 -0
- brainscore_vision/data/coggan2024_fMRI/__init__.py +27 -0
- brainscore_vision/data/coggan2024_fMRI/data_packaging.py +123 -0
- brainscore_vision/data/coggan2024_fMRI/test.py +25 -0
- brainscore_vision/data/david2004/__init__.py +34 -0
- brainscore_vision/data/david2004/data_packaging/2018-05-10_gallant_data.ipynb +3647 -0
- brainscore_vision/data/david2004/data_packaging/2018-05-23_gallant_data.ipynb +3149 -0
- brainscore_vision/data/david2004/data_packaging/2018-06-05_gallant_data.ipynb +3628 -0
- brainscore_vision/data/david2004/data_packaging/__init__.py +61 -0
- brainscore_vision/data/david2004/data_packaging/convertGallant.m +100 -0
- brainscore_vision/data/david2004/data_packaging/convertGallantV1Aligned.m +58 -0
- brainscore_vision/data/david2004/data_packaging/lib/DataHash_20160618/DataHash.m +484 -0
- brainscore_vision/data/david2004/data_packaging/lib/DataHash_20160618/license.txt +24 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.c +895 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.m +107 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.mexw64 +0 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5_helper.m +91 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/InstallMex.m +307 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/license.txt +24 -0
- brainscore_vision/data/david2004/data_packaging/lib/GetMD5/uTest_GetMD5.m +290 -0
- brainscore_vision/data/david2004/data_packaging/lib/glob/glob.m +472 -0
- brainscore_vision/data/david2004/data_packaging/lib/glob/license.txt +27 -0
- brainscore_vision/data/david2004/data_packaging/xr_align_debug.py +137 -0
- brainscore_vision/data/david2004/test.py +8 -0
- brainscore_vision/data/deng2009/__init__.py +22 -0
- brainscore_vision/data/deng2009/deng2009imagenet.py +33 -0
- brainscore_vision/data/deng2009/test.py +9 -0
- brainscore_vision/data/ferguson2024/__init__.py +401 -0
- brainscore_vision/data/ferguson2024/data_packaging/data_packaging.py +164 -0
- brainscore_vision/data/ferguson2024/data_packaging/fitting_stimuli.py +20 -0
- brainscore_vision/data/ferguson2024/requirements.txt +2 -0
- brainscore_vision/data/ferguson2024/test.py +155 -0
- brainscore_vision/data/freemanziemba2013/__init__.py +133 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/2018-10-05_movshon.ipynb +2002 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/2020-02-21_movshon_aperture.ipynb +4730 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/2020-02-26_movshon_aperture_test.ipynb +2228 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/aperture_correct.py +160 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/data_packaging.py +57 -0
- brainscore_vision/data/freemanziemba2013/data_packaging/movshon.py +202 -0
- brainscore_vision/data/freemanziemba2013/test.py +97 -0
- brainscore_vision/data/geirhos2021/__init__.py +358 -0
- brainscore_vision/data/geirhos2021/creating_geirhos_ids.ipynb +468 -0
- brainscore_vision/data/geirhos2021/data_packaging/colour/colour_data_assembly.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/colour/colour_stimulus_set.py +81 -0
- brainscore_vision/data/geirhos2021/data_packaging/contrast/contrast_data_assembly.py +83 -0
- brainscore_vision/data/geirhos2021/data_packaging/contrast/contrast_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/cue-conflict/cue-conflict_data_assembly.py +100 -0
- brainscore_vision/data/geirhos2021/data_packaging/cue-conflict/cue-conflict_stimulus_set.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/edge/edge_data_assembly.py +96 -0
- brainscore_vision/data/geirhos2021/data_packaging/edge/edge_stimulus_set.py +69 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonI/eidolonI_data_assembly.py +92 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonI/eidolonI_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonII/eidolonII_data_assembly.py +92 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonII/eidolonII_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonIII/eidolonIII_data_assembly.py +92 -0
- brainscore_vision/data/geirhos2021/data_packaging/eidolonIII/eidolonIII_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/false-colour/false-colour_data_assembly.py +83 -0
- brainscore_vision/data/geirhos2021/data_packaging/false-colour/false-colour_stimulus_set.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/high-pass/high-pass_data_assembly.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/high-pass/high-pass_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/low-pass/low-pass_data_assembly.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/low-pass/low-pass_stimulus_set.py +81 -0
- brainscore_vision/data/geirhos2021/data_packaging/phase-scrambling/phase-scrambling_data_assembly.py +84 -0
- brainscore_vision/data/geirhos2021/data_packaging/phase-scrambling/phase-scrambling_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/power-equalisation/power-equalisation_data_assembly.py +88 -0
- brainscore_vision/data/geirhos2021/data_packaging/power-equalisation/power-equalisation_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/rotation/rotation_data_assembly.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/rotation/rotation_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/data_packaging/silhouette/silhouette_data_assembly.py +100 -0
- brainscore_vision/data/geirhos2021/data_packaging/silhouette/silhouette_stimulus_set.py +71 -0
- brainscore_vision/data/geirhos2021/data_packaging/sketch/sketch_data_assembly.py +88 -0
- brainscore_vision/data/geirhos2021/data_packaging/sketch/sketch_stimulus_set.py +75 -0
- brainscore_vision/data/geirhos2021/data_packaging/stylized/stylized_data_assembly.py +87 -0
- brainscore_vision/data/geirhos2021/data_packaging/stylized/stylized_stimulus_set.py +75 -0
- brainscore_vision/data/geirhos2021/data_packaging/uniform-noise/uniform-noise_data_assembly.py +86 -0
- brainscore_vision/data/geirhos2021/data_packaging/uniform-noise/uniform-noise_stimulus_set.py +82 -0
- brainscore_vision/data/geirhos2021/geirhos_hashes.csv +52 -0
- brainscore_vision/data/geirhos2021/test.py +330 -0
- brainscore_vision/data/hebart2023/__init__.py +23 -0
- brainscore_vision/data/hebart2023/packaging/data_assembly.py +40 -0
- brainscore_vision/data/hebart2023/packaging/stimulus_set.py +72 -0
- brainscore_vision/data/hebart2023/test.py +42 -0
- brainscore_vision/data/hendrycks2019/__init__.py +45 -0
- brainscore_vision/data/hendrycks2019/test.py +26 -0
- brainscore_vision/data/igustibagus2024/__init__.py +23 -0
- brainscore_vision/data/igustibagus2024/dependencies/data_pico/stimulus_dicarlo_domain_transfer.csv +3139 -0
- brainscore_vision/data/igustibagus2024/investigation_consistency.ipynb +346 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/__init__.py +0 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/create_merged_assembly.ipynb +649 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/create_merged_assembly_and_stim.py +152 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/create_stimulus_set_with_background-id.py +45 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/helpers_background_id.py +849 -0
- brainscore_vision/data/igustibagus2024/merged_assembly/merged_stimulus_set.csv +3139 -0
- brainscore_vision/data/igustibagus2024/oleo_pico_exploration.ipynb +410 -0
- brainscore_vision/data/igustibagus2024/test.py +26 -0
- brainscore_vision/data/imagenetslim15000/ImageNetSlim15000.py +30 -0
- brainscore_vision/data/imagenetslim15000/__init__.py +11 -0
- brainscore_vision/data/imagenetslim15000/test.py +8 -0
- brainscore_vision/data/islam2021/__init__.py +18 -0
- brainscore_vision/data/islam2021/data_packaging.py +64 -0
- brainscore_vision/data/islam2021/test.py +11 -0
- brainscore_vision/data/kar2018/__init__.py +58 -0
- brainscore_vision/data/kar2018/data_packaging/kar_coco.py +97 -0
- brainscore_vision/data/kar2018/data_packaging/kar_hvm.py +77 -0
- brainscore_vision/data/kar2018/data_packaging/requirements.txt +1 -0
- brainscore_vision/data/kar2018/test.py +10 -0
- brainscore_vision/data/kar2019/__init__.py +43 -0
- brainscore_vision/data/kar2019/data_packaging.py +116 -0
- brainscore_vision/data/kar2019/test.py +8 -0
- brainscore_vision/data/kuzovkin2018/__init__.py +36 -0
- brainscore_vision/data/kuzovkin2018/createAssembliesBrainScore.py +103 -0
- brainscore_vision/data/kuzovkin2018/test.py +8 -0
- brainscore_vision/data/majajhong2015/__init__.py +113 -0
- brainscore_vision/data/majajhong2015/data_packaging/darren10ms.py +32 -0
- brainscore_vision/data/majajhong2015/data_packaging/data_packaging.py +65 -0
- brainscore_vision/data/majajhong2015/test.py +38 -0
- brainscore_vision/data/malania2007/__init__.py +254 -0
- brainscore_vision/data/malania2007/data_packaging/malania_data_assembly.py +79 -0
- brainscore_vision/data/malania2007/data_packaging/malania_stimulus_set.py +79 -0
- brainscore_vision/data/malania2007/test.py +147 -0
- brainscore_vision/data/maniquet2024/__init__.py +57 -0
- brainscore_vision/data/maniquet2024/data_packaging.py +151 -0
- brainscore_vision/data/maniquet2024/test.py +16 -0
- brainscore_vision/data/marques2020/__init__.py +123 -0
- brainscore_vision/data/marques2020/data_packaging/marques_cavanaugh2002a.py +84 -0
- brainscore_vision/data/marques2020/data_packaging/marques_devalois1982a.py +44 -0
- brainscore_vision/data/marques2020/data_packaging/marques_devalois1982b.py +54 -0
- brainscore_vision/data/marques2020/data_packaging/marques_freemanZiemba2013.py +252 -0
- brainscore_vision/data/marques2020/data_packaging/marques_gen_stim.py +95 -0
- brainscore_vision/data/marques2020/data_packaging/marques_ringach2002.py +95 -0
- brainscore_vision/data/marques2020/data_packaging/marques_schiller1976c.py +60 -0
- brainscore_vision/data/marques2020/data_packaging/marques_stim_common.py +389 -0
- brainscore_vision/data/marques2020/data_packaging/marques_utils.py +21 -0
- brainscore_vision/data/marques2020/data_packaging/setup.py +13 -0
- brainscore_vision/data/marques2020/test.py +54 -0
- brainscore_vision/data/rajalingham2018/__init__.py +56 -0
- brainscore_vision/data/rajalingham2018/rajalingham2018objectome.py +193 -0
- brainscore_vision/data/rajalingham2018/test.py +10 -0
- brainscore_vision/data/rajalingham2020/__init__.py +39 -0
- brainscore_vision/data/rajalingham2020/rajalingham2020orthographic_IT.py +97 -0
- brainscore_vision/data/rajalingham2020/test.py +8 -0
- brainscore_vision/data/rust2012/2020-12-28_rust.ipynb +3301 -0
- brainscore_vision/data/rust2012/__init__.py +45 -0
- brainscore_vision/data/rust2012/rust305.py +35 -0
- brainscore_vision/data/rust2012/test.py +47 -0
- brainscore_vision/data/sanghavi2020/__init__.py +119 -0
- brainscore_vision/data/sanghavi2020/data_packaging/environment.yml +36 -0
- brainscore_vision/data/sanghavi2020/data_packaging/requirements.txt +4 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavi2020.py +101 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavijozwik2020.py +148 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavikar2020.py +131 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020.py +120 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things.py +138 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things1.py +118 -0
- brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things2.py +118 -0
- brainscore_vision/data/sanghavi2020/test.py +13 -0
- brainscore_vision/data/scialom2024/__init__.py +386 -0
- brainscore_vision/data/scialom2024/data_packaging/scialom_data_assembly.py +164 -0
- brainscore_vision/data/scialom2024/data_packaging/scialom_stimulus_set.py +117 -0
- brainscore_vision/data/scialom2024/test.py +301 -0
- brainscore_vision/data/seibert2019/__init__.py +25 -0
- brainscore_vision/data/seibert2019/data_packaging/2020-10-13_juvenile.ipynb +35703 -0
- brainscore_vision/data/seibert2019/data_packaging/2020-11-18_juvenile_scratch.txt +556 -0
- brainscore_vision/data/seibert2019/data_packaging/2020-11-22_juvenile_dldata.ipynb +3614 -0
- brainscore_vision/data/seibert2019/data_packaging/juvenile.py +103 -0
- brainscore_vision/data/seibert2019/test.py +35 -0
- brainscore_vision/data/zhang2018/__init__.py +38 -0
- brainscore_vision/data/zhang2018/test.py +29 -0
- brainscore_vision/data_helpers/__init__.py +0 -0
- brainscore_vision/data_helpers/lookup_legacy.py +15 -0
- brainscore_vision/data_helpers/s3.py +79 -0
- brainscore_vision/metric_helpers/__init__.py +5 -0
- brainscore_vision/metric_helpers/temporal.py +119 -0
- brainscore_vision/metric_helpers/transformations.py +379 -0
- brainscore_vision/metric_helpers/utils.py +71 -0
- brainscore_vision/metric_helpers/xarray_utils.py +151 -0
- brainscore_vision/metrics/__init__.py +7 -0
- brainscore_vision/metrics/accuracy/__init__.py +4 -0
- brainscore_vision/metrics/accuracy/metric.py +16 -0
- brainscore_vision/metrics/accuracy/test.py +11 -0
- brainscore_vision/metrics/accuracy_distance/__init__.py +4 -0
- brainscore_vision/metrics/accuracy_distance/metric.py +109 -0
- brainscore_vision/metrics/accuracy_distance/test.py +57 -0
- brainscore_vision/metrics/baker_accuracy_delta/__init__.py +4 -0
- brainscore_vision/metrics/baker_accuracy_delta/metric.py +94 -0
- brainscore_vision/metrics/baker_accuracy_delta/requirements.txt +1 -0
- brainscore_vision/metrics/baker_accuracy_delta/test.py +1 -0
- brainscore_vision/metrics/cka/__init__.py +14 -0
- brainscore_vision/metrics/cka/metric.py +105 -0
- brainscore_vision/metrics/cka/test.py +28 -0
- brainscore_vision/metrics/dimensionality/__init__.py +13 -0
- brainscore_vision/metrics/dimensionality/metric.py +45 -0
- brainscore_vision/metrics/distribution_similarity/__init__.py +14 -0
- brainscore_vision/metrics/distribution_similarity/metric.py +84 -0
- brainscore_vision/metrics/distribution_similarity/test.py +10 -0
- brainscore_vision/metrics/error_consistency/__init__.py +13 -0
- brainscore_vision/metrics/error_consistency/metric.py +93 -0
- brainscore_vision/metrics/error_consistency/test.py +39 -0
- brainscore_vision/metrics/i1i2/__init__.py +16 -0
- brainscore_vision/metrics/i1i2/metric.py +299 -0
- brainscore_vision/metrics/i1i2/requirements.txt +2 -0
- brainscore_vision/metrics/i1i2/test.py +36 -0
- brainscore_vision/metrics/i1i2/test_resources/alexnet-probabilities.nc +0 -0
- brainscore_vision/metrics/i1i2/test_resources/resnet18-probabilities.nc +0 -0
- brainscore_vision/metrics/i1i2/test_resources/resnet34-probabilities.nc +0 -0
- brainscore_vision/metrics/internal_consistency/__init__.py +8 -0
- brainscore_vision/metrics/internal_consistency/ceiling.py +127 -0
- brainscore_vision/metrics/internal_consistency/requirements.txt +1 -0
- brainscore_vision/metrics/internal_consistency/test.py +39 -0
- brainscore_vision/metrics/maniquet2024_metrics/__init__.py +19 -0
- brainscore_vision/metrics/maniquet2024_metrics/metric.py +416 -0
- brainscore_vision/metrics/maniquet2024_metrics/test.py +8 -0
- brainscore_vision/metrics/mask_regression/__init__.py +16 -0
- brainscore_vision/metrics/mask_regression/metric.py +242 -0
- brainscore_vision/metrics/mask_regression/requirements.txt +1 -0
- brainscore_vision/metrics/mask_regression/test.py +0 -0
- brainscore_vision/metrics/ost/__init__.py +23 -0
- brainscore_vision/metrics/ost/metric.py +350 -0
- brainscore_vision/metrics/ost/requirements.txt +2 -0
- brainscore_vision/metrics/ost/test.py +0 -0
- brainscore_vision/metrics/rdm/__init__.py +14 -0
- brainscore_vision/metrics/rdm/metric.py +101 -0
- brainscore_vision/metrics/rdm/requirements.txt +2 -0
- brainscore_vision/metrics/rdm/test.py +63 -0
- brainscore_vision/metrics/regression_correlation/__init__.py +48 -0
- brainscore_vision/metrics/regression_correlation/mask_regression.py +232 -0
- brainscore_vision/metrics/regression_correlation/metric.py +125 -0
- brainscore_vision/metrics/regression_correlation/requirements.txt +3 -0
- brainscore_vision/metrics/regression_correlation/test.py +36 -0
- brainscore_vision/metrics/threshold/__init__.py +5 -0
- brainscore_vision/metrics/threshold/metric.py +481 -0
- brainscore_vision/metrics/threshold/test.py +71 -0
- brainscore_vision/metrics/value_delta/__init__.py +4 -0
- brainscore_vision/metrics/value_delta/metric.py +30 -0
- brainscore_vision/metrics/value_delta/requirements.txt +1 -0
- brainscore_vision/metrics/value_delta/test.py +40 -0
- brainscore_vision/model_helpers/__init__.py +3 -0
- brainscore_vision/model_helpers/activations/__init__.py +1 -0
- brainscore_vision/model_helpers/activations/core.py +635 -0
- brainscore_vision/model_helpers/activations/pca.py +117 -0
- brainscore_vision/model_helpers/activations/pytorch.py +152 -0
- brainscore_vision/model_helpers/activations/temporal/__init__.py +0 -0
- brainscore_vision/model_helpers/activations/temporal/core/__init__.py +3 -0
- brainscore_vision/model_helpers/activations/temporal/core/executor.py +219 -0
- brainscore_vision/model_helpers/activations/temporal/core/extractor.py +282 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/base.py +274 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/base.py +134 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/base.py +99 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/block.py +77 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/causal.py +86 -0
- brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/time_aligner.py +73 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/__init__.py +3 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/base.py +17 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/image.py +50 -0
- brainscore_vision/model_helpers/activations/temporal/inputs/video.py +186 -0
- brainscore_vision/model_helpers/activations/temporal/model/__init__.py +2 -0
- brainscore_vision/model_helpers/activations/temporal/model/base.py +33 -0
- brainscore_vision/model_helpers/activations/temporal/model/pytorch.py +107 -0
- brainscore_vision/model_helpers/activations/temporal/utils.py +228 -0
- brainscore_vision/model_helpers/brain_transformation/__init__.py +97 -0
- brainscore_vision/model_helpers/brain_transformation/behavior.py +348 -0
- brainscore_vision/model_helpers/brain_transformation/imagenet_classes.txt +1000 -0
- brainscore_vision/model_helpers/brain_transformation/neural.py +159 -0
- brainscore_vision/model_helpers/brain_transformation/temporal.py +199 -0
- brainscore_vision/model_helpers/check_submission/__init__.py +0 -0
- brainscore_vision/model_helpers/check_submission/check_models.py +87 -0
- brainscore_vision/model_helpers/check_submission/images/1.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/10.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/11.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/12.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/13.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/14.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/15.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/16.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/17.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/18.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/19.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/2.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/20.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/3.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/4.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/5.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/6.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/7.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/8.png +0 -0
- brainscore_vision/model_helpers/check_submission/images/9.png +0 -0
- brainscore_vision/model_helpers/conftest.py +3 -0
- brainscore_vision/model_helpers/generic_plugin_tests.py +119 -0
- brainscore_vision/model_helpers/s3.py +62 -0
- brainscore_vision/model_helpers/utils/__init__.py +15 -0
- brainscore_vision/model_helpers/utils/s3.py +42 -0
- brainscore_vision/model_interface.py +214 -0
- brainscore_vision/models/AdvProp_efficientne_b6/__init__.py +5 -0
- brainscore_vision/models/AdvProp_efficientne_b6/model.py +75 -0
- brainscore_vision/models/AdvProp_efficientne_b6/requirements.txt +1 -0
- brainscore_vision/models/AdvProp_efficientne_b6/test.py +9 -0
- brainscore_vision/models/AlexNet_SIN/__init__.py +8 -0
- brainscore_vision/models/AlexNet_SIN/model.py +29 -0
- brainscore_vision/models/AlexNet_SIN/requirements.txt +2 -0
- brainscore_vision/models/AlexNet_SIN/test.py +1 -0
- brainscore_vision/models/Soumyadeep_inf_1/__init__.py +5 -0
- brainscore_vision/models/Soumyadeep_inf_1/model.py +60 -0
- brainscore_vision/models/Soumyadeep_inf_1/setup.py +26 -0
- brainscore_vision/models/Soumyadeep_inf_1/test.py +1 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/__init__.py +8 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/model.py +43 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/requirements.txt +4 -0
- brainscore_vision/models/ViT_L_32_imagenet1k/test.py +8 -0
- brainscore_vision/models/__init__.py +0 -0
- brainscore_vision/models/alexnet/__init__.py +8 -0
- brainscore_vision/models/alexnet/model.py +28 -0
- brainscore_vision/models/alexnet/requirements.txt +2 -0
- brainscore_vision/models/alexnet/test.py +15 -0
- brainscore_vision/models/alexnet_7be5be79/__init__.py +7 -0
- brainscore_vision/models/alexnet_7be5be79/model.py +44 -0
- brainscore_vision/models/alexnet_7be5be79/setup.py +26 -0
- brainscore_vision/models/alexnet_7be5be79/test.py +1 -0
- brainscore_vision/models/alexnet_7be5be79_convs/__init__.py +5 -0
- brainscore_vision/models/alexnet_7be5be79_convs/model.py +42 -0
- brainscore_vision/models/alexnet_7be5be79_convs/setup.py +25 -0
- brainscore_vision/models/alexnet_7be5be79_convs/test.py +1 -0
- brainscore_vision/models/alexnet_ks_torevert/__init__.py +8 -0
- brainscore_vision/models/alexnet_ks_torevert/model.py +28 -0
- brainscore_vision/models/alexnet_ks_torevert/requirements.txt +2 -0
- brainscore_vision/models/alexnet_ks_torevert/test.py +15 -0
- brainscore_vision/models/alexnet_simclr_run1/__init__.py +7 -0
- brainscore_vision/models/alexnet_simclr_run1/model.py +267 -0
- brainscore_vision/models/alexnet_simclr_run1/requirements.txt +2 -0
- brainscore_vision/models/alexnet_simclr_run1/test.py +1 -0
- brainscore_vision/models/alexnet_testing/__init__.py +8 -0
- brainscore_vision/models/alexnet_testing/model.py +28 -0
- brainscore_vision/models/alexnet_testing/requirements.txt +2 -0
- brainscore_vision/models/alexnet_testing/setup.py +24 -0
- brainscore_vision/models/alexnet_testing/test.py +15 -0
- brainscore_vision/models/antialias_resnet152/__init__.py +7 -0
- brainscore_vision/models/antialias_resnet152/model.py +35 -0
- brainscore_vision/models/antialias_resnet152/requirements.txt +3 -0
- brainscore_vision/models/antialias_resnet152/test.py +8 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/__init__.py +7 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/model.py +35 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/requirements.txt +1 -0
- brainscore_vision/models/antialiased_rnext101_32x8d/test.py +8 -0
- brainscore_vision/models/bp_resnet50_julios/__init__.py +5 -0
- brainscore_vision/models/bp_resnet50_julios/model.py +52 -0
- brainscore_vision/models/bp_resnet50_julios/setup.py +24 -0
- brainscore_vision/models/bp_resnet50_julios/test.py +1 -0
- brainscore_vision/models/clip/__init__.py +5 -0
- brainscore_vision/models/clip/model.py +179 -0
- brainscore_vision/models/clip/requirements.txt +4 -0
- brainscore_vision/models/clip/test.py +1 -0
- brainscore_vision/models/clipvision/__init__.py +5 -0
- brainscore_vision/models/clipvision/model.py +179 -0
- brainscore_vision/models/clipvision/requirements.txt +4 -0
- brainscore_vision/models/clipvision/test.py +1 -0
- brainscore_vision/models/cornet_s/__init__.py +8 -0
- brainscore_vision/models/cornet_s/helpers/helpers.py +215 -0
- brainscore_vision/models/cornet_s/model.py +77 -0
- brainscore_vision/models/cornet_s/requirements.txt +7 -0
- brainscore_vision/models/cornet_s/test.py +8 -0
- brainscore_vision/models/cornet_s_ynshah/__init__.py +388 -0
- brainscore_vision/models/cornet_s_ynshah/model.py +192 -0
- brainscore_vision/models/cornet_s_ynshah/setup.py +24 -0
- brainscore_vision/models/cornet_s_ynshah/test.py +0 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/__init__.py +7 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/model.py +75 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/requirements.txt +4 -0
- brainscore_vision/models/custom_model_cv_18_dagger_408/test.py +8 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/__init__.py +8 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/model.py +57 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/requirements.txt +3 -0
- brainscore_vision/models/cv_18_dagger_408_pretrained/test.py +25 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/__init__.py +9 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/model.py +134 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/requirements.txt +4 -0
- brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/test.py +8 -0
- brainscore_vision/models/dbp_resnet50_julios/__init__.py +5 -0
- brainscore_vision/models/dbp_resnet50_julios/model.py +52 -0
- brainscore_vision/models/dbp_resnet50_julios/setup.py +24 -0
- brainscore_vision/models/dbp_resnet50_julios/test.py +1 -0
- brainscore_vision/models/densenet_201_pytorch/__init__.py +7 -0
- brainscore_vision/models/densenet_201_pytorch/model.py +59 -0
- brainscore_vision/models/densenet_201_pytorch/requirements.txt +3 -0
- brainscore_vision/models/densenet_201_pytorch/test.py +8 -0
- brainscore_vision/models/eBarlow_Vanilla/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla/model.py +50 -0
- brainscore_vision/models/eBarlow_Vanilla/requirements.txt +2 -0
- brainscore_vision/models/eBarlow_Vanilla/setup.py +24 -0
- brainscore_vision/models/eBarlow_Vanilla/test.py +1 -0
- brainscore_vision/models/eBarlow_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eBarlow_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/model.py +84 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/setup.py +25 -0
- brainscore_vision/models/eBarlow_Vanilla_1_full/test.py +1 -0
- brainscore_vision/models/eBarlow_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eBarlow_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eBarlow_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eBarlow_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eBarlow_augself_linear_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_augself_linear_1/model.py +65 -0
- brainscore_vision/models/eBarlow_augself_linear_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_augself_linear_1/test.py +1 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/model.py +65 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_augself_mlp_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_0001_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_001_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_001_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_001_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_001_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_001_2/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_001_2/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_001_2/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_001_2/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_001_3/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_001_3/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_001_3/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_001_3/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_01/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_01/model.py +50 -0
- brainscore_vision/models/eBarlow_lmda_01/requirements.txt +2 -0
- brainscore_vision/models/eBarlow_lmda_01/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_01/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_02_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_02_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/model.py +84 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/setup.py +25 -0
- brainscore_vision/models/eBarlow_lmda_02_1000ep/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/model.py +85 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/setup.py +25 -0
- brainscore_vision/models/eBarlow_lmda_02_1_full/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/model.py +85 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/setup.py +25 -0
- brainscore_vision/models/eBarlow_lmda_02_200_full/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_03_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_03_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_03_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_03_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_04_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_04_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_04_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_04_1/test.py +1 -0
- brainscore_vision/models/eBarlow_lmda_05_1/__init__.py +9 -0
- brainscore_vision/models/eBarlow_lmda_05_1/model.py +65 -0
- brainscore_vision/models/eBarlow_lmda_05_1/setup.py +24 -0
- brainscore_vision/models/eBarlow_lmda_05_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_0001_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_001_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_02_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_03_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_04_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/model.py +65 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Mom_lmda_05_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Vanilla/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Vanilla/model.py +50 -0
- brainscore_vision/models/eMMCR_Vanilla/setup.py +24 -0
- brainscore_vision/models/eMMCR_Vanilla/test.py +1 -0
- brainscore_vision/models/eMMCR_VanillaV2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_VanillaV2/model.py +50 -0
- brainscore_vision/models/eMMCR_VanillaV2/setup.py +24 -0
- brainscore_vision/models/eMMCR_VanillaV2/test.py +1 -0
- brainscore_vision/models/eMMCR_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eMMCR_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eMMCR_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eMMCR_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01/model.py +50 -0
- brainscore_vision/models/eMMCR_lmda_01/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01V2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01V2/model.py +50 -0
- brainscore_vision/models/eMMCR_lmda_01V2/requirements.txt +2 -0
- brainscore_vision/models/eMMCR_lmda_01V2/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01V2/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eMMCR_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eMMCR_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eMMCR_lmda_01_3/__init__.py +9 -0
- brainscore_vision/models/eMMCR_lmda_01_3/model.py +65 -0
- brainscore_vision/models/eMMCR_lmda_01_3/setup.py +24 -0
- brainscore_vision/models/eMMCR_lmda_01_3/test.py +1 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/model.py +64 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_Vanilla_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/model.py +64 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/setup.py +24 -0
- brainscore_vision/models/eSimCLR_Vanilla_2/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_0001_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_001_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_01_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_01_2/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_02_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_02_1_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_03_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_04_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_04_1_1/test.py +1 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/__init__.py +9 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/model.py +65 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/setup.py +24 -0
- brainscore_vision/models/eSimCLR_lmda_05_1/test.py +1 -0
- brainscore_vision/models/effnetb1_272x240/__init__.py +5 -0
- brainscore_vision/models/effnetb1_272x240/model.py +126 -0
- brainscore_vision/models/effnetb1_272x240/requirements.txt +3 -0
- brainscore_vision/models/effnetb1_272x240/test.py +9 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/__init__.py +9 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/model.py +111 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/requirements.txt +6 -0
- brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/test.py +8 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/__init__.py +5 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/model.py +142 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/requirements.txt +5 -0
- brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/test.py +8 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/__init__.py +9 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/model.py +140 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/requirements.txt +5 -0
- brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/test.py +8 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/__init__.py +5 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/model.py +62 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/requirements.txt +3 -0
- brainscore_vision/models/focalnet_tiny_in1k_submission/test.py +8 -0
- brainscore_vision/models/hmax/__init__.py +7 -0
- brainscore_vision/models/hmax/helpers/hmax.py +438 -0
- brainscore_vision/models/hmax/helpers/pytorch.py +216 -0
- brainscore_vision/models/hmax/model.py +69 -0
- brainscore_vision/models/hmax/requirements.txt +5 -0
- brainscore_vision/models/hmax/test.py +8 -0
- brainscore_vision/models/inception_v3_pytorch/__init__.py +7 -0
- brainscore_vision/models/inception_v3_pytorch/model.py +68 -0
- brainscore_vision/models/inception_v3_pytorch/requirements.txt +3 -0
- brainscore_vision/models/inception_v3_pytorch/test.py +8 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/__init__.py +7 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/model.py +60 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/requirements.txt +3 -0
- brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/test.py +8 -0
- brainscore_vision/models/mobilevit_small/__init__.py +7 -0
- brainscore_vision/models/mobilevit_small/model.py +49 -0
- brainscore_vision/models/mobilevit_small/requirements.txt +3 -0
- brainscore_vision/models/mobilevit_small/test.py +8 -0
- brainscore_vision/models/pixels/__init__.py +8 -0
- brainscore_vision/models/pixels/model.py +35 -0
- brainscore_vision/models/pixels/test.py +15 -0
- brainscore_vision/models/pnasnet_large_pytorch/__init__.py +7 -0
- brainscore_vision/models/pnasnet_large_pytorch/model.py +59 -0
- brainscore_vision/models/pnasnet_large_pytorch/requirements.txt +3 -0
- brainscore_vision/models/pnasnet_large_pytorch/test.py +8 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/model.py +64 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_Vanilla_1/test.py +1 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/model.py +65 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_lmda_01_1/test.py +1 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/model.py +65 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1/test.py +1 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/__init__.py +9 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/model.py +67 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/setup.py +25 -0
- brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/test.py +1 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/__init__.py +9 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/model.py +66 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/setup.py +25 -0
- brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/test.py +1 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/__init__.py +9 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/model.py +66 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/setup.py +25 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/test.py +1 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/__init__.py +9 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/model.py +66 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/setup.py +25 -0
- brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/test.py +1 -0
- brainscore_vision/models/r50_tvpt/__init__.py +9 -0
- brainscore_vision/models/r50_tvpt/model.py +47 -0
- brainscore_vision/models/r50_tvpt/setup.py +24 -0
- brainscore_vision/models/r50_tvpt/test.py +1 -0
- brainscore_vision/models/regnet/__init__.py +14 -0
- brainscore_vision/models/regnet/model.py +17 -0
- brainscore_vision/models/regnet/requirements.txt +2 -0
- brainscore_vision/models/regnet/test.py +17 -0
- brainscore_vision/models/resnet18_imagenet21kP/__init__.py +6 -0
- brainscore_vision/models/resnet18_imagenet21kP/model.py +119 -0
- brainscore_vision/models/resnet18_imagenet21kP/setup.py +18 -0
- brainscore_vision/models/resnet18_imagenet21kP/test.py +0 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/__init__.py +5 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/model.py +59 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/setup.py +24 -0
- brainscore_vision/models/resnet50_eMMCR_Vanilla/test.py +1 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/__init__.py +9 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/model.py +72 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/setup.py +24 -0
- brainscore_vision/models/resnet50_eMMCR_VanillaV2/test.py +1 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/__init__.py +9 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/model.py +72 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/setup.py +24 -0
- brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/test.py +1 -0
- brainscore_vision/models/resnet50_julios/__init__.py +5 -0
- brainscore_vision/models/resnet50_julios/model.py +54 -0
- brainscore_vision/models/resnet50_julios/setup.py +24 -0
- brainscore_vision/models/resnet50_julios/test.py +1 -0
- brainscore_vision/models/resnet50_tutorial/__init__.py +5 -0
- brainscore_vision/models/resnet50_tutorial/model.py +34 -0
- brainscore_vision/models/resnet50_tutorial/requirements.txt +2 -0
- brainscore_vision/models/resnet50_tutorial/test.py +8 -0
- brainscore_vision/models/resnet_152_v2_pytorch/__init__.py +7 -0
- brainscore_vision/models/resnet_152_v2_pytorch/model.py +59 -0
- brainscore_vision/models/resnet_152_v2_pytorch/requirements.txt +2 -0
- brainscore_vision/models/resnet_152_v2_pytorch/test.py +8 -0
- brainscore_vision/models/resnet_50_robust/__init__.py +7 -0
- brainscore_vision/models/resnet_50_robust/model.py +55 -0
- brainscore_vision/models/resnet_50_robust/requirements.txt +3 -0
- brainscore_vision/models/resnet_50_robust/test.py +8 -0
- brainscore_vision/models/resnext101_32x16d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x16d_wsl/model.py +38 -0
- brainscore_vision/models/resnext101_32x16d_wsl/requirements.txt +2 -0
- brainscore_vision/models/resnext101_32x16d_wsl/test.py +8 -0
- brainscore_vision/models/resnext101_32x32d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x32d_wsl/model.py +40 -0
- brainscore_vision/models/resnext101_32x32d_wsl/requirements.txt +2 -0
- brainscore_vision/models/resnext101_32x32d_wsl/test.py +8 -0
- brainscore_vision/models/resnext101_32x48d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x48d_wsl/model.py +38 -0
- brainscore_vision/models/resnext101_32x48d_wsl/requirements.txt +3 -0
- brainscore_vision/models/resnext101_32x48d_wsl/test.py +8 -0
- brainscore_vision/models/resnext101_32x8d_wsl/__init__.py +7 -0
- brainscore_vision/models/resnext101_32x8d_wsl/model.py +44 -0
- brainscore_vision/models/resnext101_32x8d_wsl/requirements.txt +2 -0
- brainscore_vision/models/resnext101_32x8d_wsl/test.py +8 -0
- brainscore_vision/models/temporal_model_AVID_CMA/__init__.py +17 -0
- brainscore_vision/models/temporal_model_AVID_CMA/model.py +92 -0
- brainscore_vision/models/temporal_model_AVID_CMA/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_AVID_CMA/test.py +18 -0
- brainscore_vision/models/temporal_model_GDT/__init__.py +16 -0
- brainscore_vision/models/temporal_model_GDT/model.py +72 -0
- brainscore_vision/models/temporal_model_GDT/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_GDT/test.py +17 -0
- brainscore_vision/models/temporal_model_S3D_text_video/__init__.py +14 -0
- brainscore_vision/models/temporal_model_S3D_text_video/model.py +65 -0
- brainscore_vision/models/temporal_model_S3D_text_video/requirements.txt +1 -0
- brainscore_vision/models/temporal_model_S3D_text_video/test.py +15 -0
- brainscore_vision/models/temporal_model_SeLaVi/__init__.py +17 -0
- brainscore_vision/models/temporal_model_SeLaVi/model.py +68 -0
- brainscore_vision/models/temporal_model_SeLaVi/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_SeLaVi/test.py +18 -0
- brainscore_vision/models/temporal_model_VideoMAE/__init__.py +15 -0
- brainscore_vision/models/temporal_model_VideoMAE/model.py +100 -0
- brainscore_vision/models/temporal_model_VideoMAE/requirements.txt +6 -0
- brainscore_vision/models/temporal_model_VideoMAE/test.py +16 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/__init__.py +14 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/model.py +109 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/requirements.txt +4 -0
- brainscore_vision/models/temporal_model_VideoMAEv2/test.py +16 -0
- brainscore_vision/models/temporal_model_mae_st/__init__.py +15 -0
- brainscore_vision/models/temporal_model_mae_st/model.py +120 -0
- brainscore_vision/models/temporal_model_mae_st/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_mae_st/test.py +16 -0
- brainscore_vision/models/temporal_model_mmaction2/__init__.py +23 -0
- brainscore_vision/models/temporal_model_mmaction2/mmaction2.csv +24 -0
- brainscore_vision/models/temporal_model_mmaction2/model.py +226 -0
- brainscore_vision/models/temporal_model_mmaction2/requirements.txt +5 -0
- brainscore_vision/models/temporal_model_mmaction2/test.py +24 -0
- brainscore_vision/models/temporal_model_openstl/__init__.py +18 -0
- brainscore_vision/models/temporal_model_openstl/model.py +206 -0
- brainscore_vision/models/temporal_model_openstl/requirements.txt +3 -0
- brainscore_vision/models/temporal_model_openstl/test.py +19 -0
- brainscore_vision/models/temporal_model_torchvision/__init__.py +19 -0
- brainscore_vision/models/temporal_model_torchvision/model.py +92 -0
- brainscore_vision/models/temporal_model_torchvision/requirements.txt +2 -0
- brainscore_vision/models/temporal_model_torchvision/test.py +20 -0
- brainscore_vision/models/tv_efficientnet_b1/__init__.py +5 -0
- brainscore_vision/models/tv_efficientnet_b1/model.py +54 -0
- brainscore_vision/models/tv_efficientnet_b1/setup.py +24 -0
- brainscore_vision/models/tv_efficientnet_b1/test.py +1 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/__init__.py +7 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/model.py +104 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/requirements.txt +8 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/test.py +8 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/LICENSE +674 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/README.md +105 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/run.py +136 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/setup.py +41 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/train.py +383 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/__init__.py +71 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/back_ends.py +337 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/modules.py +126 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/params.py +100 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/utils.py +32 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/vonenet.py +68 -0
- brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet_tutorial-activations.ipynb +352 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_0/test.py +1 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_1/test.py +1 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_10/test.py +1 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/__init__.py +11 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/model.py +60 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/setup.py +25 -0
- brainscore_vision/models/yudixie_resnet18_240719_2/test.py +1 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/__init__.py +7 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/model.py +66 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/setup.py +24 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/test.py +1 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/__init__.py +7 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/model.py +68 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/setup.py +24 -0
- brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/test.py +1 -0
- brainscore_vision/submission/__init__.py +0 -0
- brainscore_vision/submission/actions_helpers.py +153 -0
- brainscore_vision/submission/config.py +7 -0
- brainscore_vision/submission/endpoints.py +58 -0
- brainscore_vision/utils/__init__.py +91 -0
- brainscore_vision-2.1.dist-info/LICENSE +11 -0
- brainscore_vision-2.1.dist-info/METADATA +152 -0
- brainscore_vision-2.1.dist-info/RECORD +1009 -0
- brainscore_vision-2.1.dist-info/WHEEL +5 -0
- brainscore_vision-2.1.dist-info/top_level.txt +4 -0
- docs/Makefile +20 -0
- docs/source/conf.py +78 -0
- docs/source/index.rst +21 -0
- docs/source/modules/api_reference.rst +10 -0
- docs/source/modules/benchmarks.rst +8 -0
- docs/source/modules/brainscore_submission.png +0 -0
- docs/source/modules/developer_clarifications.rst +36 -0
- docs/source/modules/metrics.rst +8 -0
- docs/source/modules/model_interface.rst +8 -0
- docs/source/modules/submission.rst +112 -0
- docs/source/modules/tutorial_screenshots/brain-score_logo.png +0 -0
- docs/source/modules/tutorial_screenshots/final_submit.png +0 -0
- docs/source/modules/tutorial_screenshots/init_py.png +0 -0
- docs/source/modules/tutorial_screenshots/mms.png +0 -0
- docs/source/modules/tutorial_screenshots/setup.png +0 -0
- docs/source/modules/tutorial_screenshots/sms.png +0 -0
- docs/source/modules/tutorial_screenshots/subfolders.png +0 -0
- docs/source/modules/utils.rst +22 -0
- migrations/2020-12-20_pkl_to_nc.py +90 -0
- tests/__init__.py +6 -0
- tests/conftest.py +26 -0
- tests/test_benchmark_helpers/__init__.py +0 -0
- tests/test_benchmark_helpers/test_screen.py +75 -0
- tests/test_examples.py +41 -0
- tests/test_integration.py +43 -0
- tests/test_metric_helpers/__init__.py +0 -0
- tests/test_metric_helpers/test_temporal.py +80 -0
- tests/test_metric_helpers/test_transformations.py +171 -0
- tests/test_metric_helpers/test_xarray_utils.py +85 -0
- tests/test_model_helpers/__init__.py +6 -0
- tests/test_model_helpers/activations/__init__.py +0 -0
- tests/test_model_helpers/activations/test___init__.py +404 -0
- tests/test_model_helpers/brain_transformation/__init__.py +0 -0
- tests/test_model_helpers/brain_transformation/test___init__.py +18 -0
- tests/test_model_helpers/brain_transformation/test_behavior.py +181 -0
- tests/test_model_helpers/brain_transformation/test_neural.py +70 -0
- tests/test_model_helpers/brain_transformation/test_temporal.py +66 -0
- tests/test_model_helpers/temporal/__init__.py +0 -0
- tests/test_model_helpers/temporal/activations/__init__.py +0 -0
- tests/test_model_helpers/temporal/activations/test_extractor.py +96 -0
- tests/test_model_helpers/temporal/activations/test_inferencer.py +189 -0
- tests/test_model_helpers/temporal/activations/test_inputs.py +103 -0
- tests/test_model_helpers/temporal/brain_transformation/__init__.py +0 -0
- tests/test_model_helpers/temporal/brain_transformation/test_temporal_ops.py +122 -0
- tests/test_model_helpers/temporal/test_utils.py +61 -0
- tests/test_model_helpers/test_generic_plugin_tests.py +310 -0
- tests/test_model_helpers/test_imports.py +10 -0
- tests/test_model_helpers/test_s3.py +38 -0
- tests/test_models.py +15 -0
- tests/test_stimuli.py +0 -0
- tests/test_submission/__init__.py +0 -0
- tests/test_submission/mock_config.py +3 -0
- tests/test_submission/test_actions_helpers.py +67 -0
- tests/test_submission/test_db.py +54 -0
- tests/test_submission/test_endpoints.py +125 -0
- tests/test_utils.py +21 -0
@@ -0,0 +1,33 @@
|
|
1
|
+
import numpy as np
|
2
|
+
import os
|
3
|
+
import pandas as pd
|
4
|
+
from pathlib import Path
|
5
|
+
|
6
|
+
from brainio_base.stimuli import StimulusSet
|
7
|
+
from brainio_collection.packaging import package_stimulus_set
|
8
|
+
|
9
|
+
|
10
|
+
def collect_stimuli(data_dir):
|
11
|
+
stimulus_set = pd.read_csv(data_dir / 'imagenet2012.csv')
|
12
|
+
stimulus_set = StimulusSet(stimulus_set)
|
13
|
+
stimulus_set.image_paths = {row.image_id: row.filepath for row in stimulus_set.itertuples()}
|
14
|
+
stimulus_set['image_path_within_store'] = stimulus_set['filename'].apply(
|
15
|
+
lambda filename: os.path.splitext(filename)[0])
|
16
|
+
stimulus_set = stimulus_set[['image_id', 'label', 'synset', 'image_file_sha1', 'image_path_within_store']]
|
17
|
+
assert len(np.unique(stimulus_set['image_id'])) == len(stimulus_set), "duplicate entries"
|
18
|
+
return stimulus_set
|
19
|
+
|
20
|
+
|
21
|
+
def main():
|
22
|
+
data_dir = Path('/braintree/home/msch/brain-score/brainscore/benchmarks')
|
23
|
+
assert os.path.isdir(data_dir)
|
24
|
+
|
25
|
+
stimuli = collect_stimuli(data_dir)
|
26
|
+
stimuli.identifier = 'imagenet_val'
|
27
|
+
|
28
|
+
print('Packaging stimuli')
|
29
|
+
package_stimulus_set(stimuli, stimulus_set_identifier=stimuli.identifier, bucket_name='brainio.contrib')
|
30
|
+
|
31
|
+
|
32
|
+
if __name__ == '__main__':
|
33
|
+
main()
|
@@ -0,0 +1,401 @@
|
|
1
|
+
from brainio.assemblies import BehavioralAssembly
|
2
|
+
from brainscore_vision import data_registry, stimulus_set_registry, load_stimulus_set
|
3
|
+
from brainscore_vision.data_helpers.s3 import load_assembly_from_s3, load_stimulus_set_from_s3
|
4
|
+
|
5
|
+
BIBTEX = """TBD"""
|
6
|
+
|
7
|
+
# circle_line:
|
8
|
+
stimulus_set_registry['Ferguson2024_circle_line'] = lambda: load_stimulus_set_from_s3(
|
9
|
+
identifier='Ferguson2024_circle_line',
|
10
|
+
bucket="brainio-brainscore",
|
11
|
+
csv_sha1="fc59d23ccfb41b4f98cf02865fc335439d2ad222",
|
12
|
+
zip_sha1="1f0065910b01a1a0e12611fe61252eafb9c534c3",
|
13
|
+
csv_version_id="Dcr1JsAE_bYBQwxYqem9JINE3d_bMLGu",
|
14
|
+
zip_version_id="ss4.fqG7b6NaHkbUXO.iH8f32J07_dmo")
|
15
|
+
|
16
|
+
data_registry['Ferguson2024_circle_line'] = lambda: load_assembly_from_s3(
|
17
|
+
identifier='Ferguson2024_circle_line',
|
18
|
+
version_id="2EVlerzlieVA1NbfFiOx2xnhJdVagV4j",
|
19
|
+
sha1="586da7b1c7cb5a60fe72bc148513e3159a27b134",
|
20
|
+
bucket="brainio-brainscore",
|
21
|
+
cls=BehavioralAssembly,
|
22
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_circle_line'),
|
23
|
+
)
|
24
|
+
|
25
|
+
|
26
|
+
# color:
|
27
|
+
stimulus_set_registry['Ferguson2024_color'] = lambda: load_stimulus_set_from_s3(
|
28
|
+
identifier='Ferguson2024_color',
|
29
|
+
bucket="brainio-brainscore",
|
30
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
31
|
+
zip_sha1="6ad04c58de8cc8c28b309572cc41c86470f0c322",
|
32
|
+
csv_version_id="jK6ddF6hF_oWmE5ccm4MyprQwjfeti3.",
|
33
|
+
zip_version_id="Eku5pHE1CNJBrcaRstb8PCYCjOliHQmY")
|
34
|
+
|
35
|
+
data_registry['Ferguson2024_color'] = lambda: load_assembly_from_s3(
|
36
|
+
identifier='Ferguson2024_color',
|
37
|
+
version_id="VQo0U9ag8r7r9DREexvSlAD_Z326Iumr",
|
38
|
+
sha1="5b5d67fa3189db9984006910d1954586e6a5a9f3",
|
39
|
+
bucket="brainio-brainscore",
|
40
|
+
cls=BehavioralAssembly,
|
41
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_color'),
|
42
|
+
)
|
43
|
+
|
44
|
+
|
45
|
+
# convergence:
|
46
|
+
stimulus_set_registry['Ferguson2024_convergence'] = lambda: load_stimulus_set_from_s3(
|
47
|
+
identifier='Ferguson2024_convergence',
|
48
|
+
bucket="brainio-brainscore",
|
49
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
50
|
+
zip_sha1="d65bdf6654e08c9107a20028281ab5e38a0be981",
|
51
|
+
csv_version_id="PcRg7fdEJI.Ce3wkd0v6sTC3jSw6xiiq",
|
52
|
+
zip_version_id="4EJaobPVM8STsvMKE.hEcePXcLAjB5VG")
|
53
|
+
|
54
|
+
data_registry['Ferguson2024_convergence'] = lambda: load_assembly_from_s3(
|
55
|
+
identifier='Ferguson2024_convergence',
|
56
|
+
version_id="l.nJf3IXYqi5euv5xsqS_ip7Bs0ZpZLX",
|
57
|
+
sha1="5165c4b0da30826b89c2c242826bb79a4417b9a5",
|
58
|
+
bucket="brainio-brainscore",
|
59
|
+
cls=BehavioralAssembly,
|
60
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_convergence'),
|
61
|
+
)
|
62
|
+
|
63
|
+
|
64
|
+
# eighth:
|
65
|
+
stimulus_set_registry['Ferguson2024_eighth'] = lambda: load_stimulus_set_from_s3(
|
66
|
+
identifier='Ferguson2024_eighth',
|
67
|
+
bucket="brainio-brainscore",
|
68
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
69
|
+
zip_sha1="6ee1931b316fd4ccf6eeb16788aa42bb7a36aa41",
|
70
|
+
csv_version_id="fVbTijqnoE61rcXNCopHMhXdrTavCIjS",
|
71
|
+
zip_version_id="ifwG3beZ0ePhQGqbo6S7D9Jj1LPCvwsJ")
|
72
|
+
|
73
|
+
data_registry['Ferguson2024_eighth'] = lambda: load_assembly_from_s3(
|
74
|
+
identifier='Ferguson2024_eighth',
|
75
|
+
version_id="VklOC2KrpgLJpD1.kGj6Y5D4kLYSwr3s",
|
76
|
+
sha1="984f9498c42b14cfae6c7272a8707df96fea7ee2",
|
77
|
+
bucket="brainio-brainscore",
|
78
|
+
cls=BehavioralAssembly,
|
79
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_eighth'),
|
80
|
+
)
|
81
|
+
|
82
|
+
|
83
|
+
# gray_easy:
|
84
|
+
stimulus_set_registry['Ferguson2024_gray_easy'] = lambda: load_stimulus_set_from_s3(
|
85
|
+
identifier='Ferguson2024_gray_easy',
|
86
|
+
bucket="brainio-brainscore",
|
87
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
88
|
+
zip_sha1="da76bdebf46fe0eb853ea1b877274b1f26f56dfc",
|
89
|
+
csv_version_id="WCw44X7HWimdn3qLi2D9DSOm5i2bLyrd",
|
90
|
+
zip_version_id="UJp9O0lHnMMPMFmwY29g5v1cHXvF1XpH")
|
91
|
+
|
92
|
+
data_registry['Ferguson2024_gray_easy'] = lambda: load_assembly_from_s3(
|
93
|
+
identifier='Ferguson2024_gray_easy',
|
94
|
+
version_id="gaK.0mU6IVHjkI6MG9eE5Hz5Jt7_gxc6",
|
95
|
+
sha1="7b09c2f1e8199e680167cfeb124c28dc68c804ab",
|
96
|
+
bucket="brainio-brainscore",
|
97
|
+
cls=BehavioralAssembly,
|
98
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_gray_easy'),
|
99
|
+
)
|
100
|
+
|
101
|
+
|
102
|
+
# gray_hard:
|
103
|
+
stimulus_set_registry['Ferguson2024_gray_hard'] = lambda: load_stimulus_set_from_s3(
|
104
|
+
identifier='Ferguson2024_gray_hard',
|
105
|
+
bucket="brainio-brainscore",
|
106
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
107
|
+
zip_sha1="93f02c051f4d89fe059345c0af7ba6fc83b65b35",
|
108
|
+
csv_version_id="bxWCJhmQw9RYxQx8qzGSltZCnSY4UTRI",
|
109
|
+
zip_version_id="WLtKonQVU9Og0ZbmVRJKx4Zzxb4INsT8")
|
110
|
+
|
111
|
+
data_registry['Ferguson2024_gray_hard'] = lambda: load_assembly_from_s3(
|
112
|
+
identifier='Ferguson2024_gray_hard',
|
113
|
+
version_id="KSpwyfIqK6uovFojNd2_w08lKUJvfOWl",
|
114
|
+
sha1="2fa35d41e73053ece6d1f0120ca4dc9bc4a9d4ae",
|
115
|
+
bucket="brainio-brainscore",
|
116
|
+
cls=BehavioralAssembly,
|
117
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_gray_hard'),
|
118
|
+
)
|
119
|
+
|
120
|
+
|
121
|
+
# half:
|
122
|
+
stimulus_set_registry['Ferguson2024_half'] = lambda: load_stimulus_set_from_s3(
|
123
|
+
identifier='Ferguson2024_half',
|
124
|
+
bucket="brainio-brainscore",
|
125
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
126
|
+
zip_sha1="6461a1d19e031943d42e97e8b557a97d14b18c55",
|
127
|
+
csv_version_id="WGrCxoue4oPYUKz81t30jcScz1dWs5Dv",
|
128
|
+
zip_version_id="9pvmNpTauZECPkemXEfLV_wYA9JZT0Iw")
|
129
|
+
|
130
|
+
data_registry['Ferguson2024_half'] = lambda: load_assembly_from_s3(
|
131
|
+
identifier='Ferguson2024_half',
|
132
|
+
version_id="Z2Mpv3qH9foT9qggDIxWVHoEuKb6mC.a",
|
133
|
+
sha1="b65e14c5d62fee715438a613e55fffa5e6f76c40",
|
134
|
+
bucket="brainio-brainscore",
|
135
|
+
cls=BehavioralAssembly,
|
136
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_half'),
|
137
|
+
)
|
138
|
+
|
139
|
+
|
140
|
+
# juncture:
|
141
|
+
stimulus_set_registry['Ferguson2024_juncture'] = lambda: load_stimulus_set_from_s3(
|
142
|
+
identifier='Ferguson2024_juncture',
|
143
|
+
bucket="brainio-brainscore",
|
144
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
145
|
+
zip_sha1="832102f1eaa713fdfd03c512df2b8feea422c61d",
|
146
|
+
csv_version_id="J3wrdsSM9LMlGFoC3ks5ees_t1sKjvKc",
|
147
|
+
zip_version_id="zNu6swQFgclcS8.miCuDBk4AQ4G54KT2")
|
148
|
+
|
149
|
+
data_registry['Ferguson2024_juncture'] = lambda: load_assembly_from_s3(
|
150
|
+
identifier='Ferguson2024_juncture',
|
151
|
+
version_id="RstO_IgzeE2UbmHbMw6RN7vV8doFZKBq",
|
152
|
+
sha1="b18148383ef2158aa795b3cff8a8e237e08b5070",
|
153
|
+
bucket="brainio-brainscore",
|
154
|
+
cls=BehavioralAssembly,
|
155
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_juncture'),
|
156
|
+
)
|
157
|
+
|
158
|
+
|
159
|
+
# lle:
|
160
|
+
stimulus_set_registry['Ferguson2024_lle'] = lambda: load_stimulus_set_from_s3(
|
161
|
+
identifier='Ferguson2024_lle',
|
162
|
+
bucket="brainio-brainscore",
|
163
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
164
|
+
zip_sha1="27817e955da9e4747d2aeb8757b7f6492bc7767e",
|
165
|
+
csv_version_id="y3epQUp6h7zH5h8251G8DlYzwtk6VYxW",
|
166
|
+
zip_version_id="RCPB0_kLL0GF3xrR0Nl.c11uAL8yYF8c")
|
167
|
+
|
168
|
+
data_registry['Ferguson2024_lle'] = lambda: load_assembly_from_s3(
|
169
|
+
identifier='Ferguson2024_lle',
|
170
|
+
version_id="nXWjKJJyGtX.67m.M03oRw7ysfP76e4e",
|
171
|
+
sha1="08e98305657cd374d9ea103df0fe06783a70344a",
|
172
|
+
bucket="brainio-brainscore",
|
173
|
+
cls=BehavioralAssembly,
|
174
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_lle'),
|
175
|
+
)
|
176
|
+
|
177
|
+
|
178
|
+
# llh (assuming 'llh' is correct and not a placeholder):
|
179
|
+
stimulus_set_registry['Ferguson2024_llh'] = lambda: load_stimulus_set_from_s3(
|
180
|
+
identifier='Ferguson2024_llh',
|
181
|
+
bucket="brainio-brainscore",
|
182
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
183
|
+
zip_sha1="56cdf86ecd0b52349b29b2ab0be89daeed9b0eb6",
|
184
|
+
csv_version_id="n3gooGN6lqWT5c.Qa3.kpUGUwogDtQUT",
|
185
|
+
zip_version_id="3A2EgFZ9Un_uFl43xqXIudDHUHdF7le1")
|
186
|
+
|
187
|
+
data_registry['Ferguson2024_llh'] = lambda: load_assembly_from_s3(
|
188
|
+
identifier='Ferguson2024_llh',
|
189
|
+
version_id="prACZ4sm395A5yfJEYDG77MfGMJhXaXv",
|
190
|
+
sha1="864d49c00e777f3d464c6c0c59fee087c1de9037",
|
191
|
+
bucket="brainio-brainscore",
|
192
|
+
cls=BehavioralAssembly,
|
193
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_llh'),
|
194
|
+
)
|
195
|
+
|
196
|
+
|
197
|
+
# quarter:
|
198
|
+
stimulus_set_registry['Ferguson2024_quarter'] = lambda: load_stimulus_set_from_s3(
|
199
|
+
identifier='Ferguson2024_quarter',
|
200
|
+
bucket="brainio-brainscore",
|
201
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
202
|
+
zip_sha1="c16c5ecf1f38af0d02255a78a6c438074ec8d446",
|
203
|
+
csv_version_id="lnk7H5WiGe3oB0i5PMTrXA_q058kZSDz",
|
204
|
+
zip_version_id="frHF3zSr4cCUEs7bVYjjaM3c0WQgwiA9")
|
205
|
+
|
206
|
+
data_registry['Ferguson2024_quarter'] = lambda: load_assembly_from_s3(
|
207
|
+
identifier='Ferguson2024_quarter',
|
208
|
+
version_id="_q5R_GoANyjQ8DWQsY.2HBtzW8DoSGpm",
|
209
|
+
sha1="921b3b51208cdd5f163eca288ea83be47a2b482f",
|
210
|
+
bucket="brainio-brainscore",
|
211
|
+
cls=BehavioralAssembly,
|
212
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_quarter'),
|
213
|
+
)
|
214
|
+
|
215
|
+
|
216
|
+
# round_f:
|
217
|
+
stimulus_set_registry['Ferguson2024_round_f'] = lambda: load_stimulus_set_from_s3(
|
218
|
+
identifier='Ferguson2024_round_f',
|
219
|
+
bucket="brainio-brainscore",
|
220
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
221
|
+
zip_sha1="7f456e390cb93805187266d05756beb9cf225e1d",
|
222
|
+
csv_version_id="jheoU.xYIbSk2hFPhCue2MGmXckqMooe",
|
223
|
+
zip_version_id="FtLIcpUQzHA_jPdRl_6iSJoqXZKDCeJn")
|
224
|
+
|
225
|
+
data_registry['Ferguson2024_round_f'] = lambda: load_assembly_from_s3(
|
226
|
+
identifier='Ferguson2024_round_f',
|
227
|
+
version_id="0E7lr44ha3rV7xpnWnE1MpDV79seDxCe",
|
228
|
+
sha1="acb19ac865b45199a58609db31d3e885ff272fd4",
|
229
|
+
bucket="brainio-brainscore",
|
230
|
+
cls=BehavioralAssembly,
|
231
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_round_f'),
|
232
|
+
)
|
233
|
+
|
234
|
+
|
235
|
+
# round_v:
|
236
|
+
stimulus_set_registry['Ferguson2024_round_v'] = lambda: load_stimulus_set_from_s3(
|
237
|
+
identifier='Ferguson2024_round_v',
|
238
|
+
bucket="brainio-brainscore",
|
239
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
240
|
+
zip_sha1="cebb84d2363c1539368e3e1b1bfd83305ad9ae13",
|
241
|
+
csv_version_id="6_N_s3Cz_g32jncN0bWoDCh.1pWdKCv2",
|
242
|
+
zip_version_id="r8e50KhAeIc0mKz1qE_xN2z4rMYaNsJ_")
|
243
|
+
|
244
|
+
data_registry['Ferguson2024_round_v'] = lambda: load_assembly_from_s3(
|
245
|
+
identifier='Ferguson2024_round_v',
|
246
|
+
version_id="VS.8.ocCdNugRJNU6ha2Wm3K1lK4vK5k",
|
247
|
+
sha1="ce0361c4386dc7b8866d78023044b3009c84aa4b",
|
248
|
+
bucket="brainio-brainscore",
|
249
|
+
cls=BehavioralAssembly,
|
250
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_round_v'),
|
251
|
+
)
|
252
|
+
|
253
|
+
|
254
|
+
# tilted_line:
|
255
|
+
stimulus_set_registry['Ferguson2024_tilted_line'] = lambda: load_stimulus_set_from_s3(
|
256
|
+
identifier='Ferguson2024_tilted_line',
|
257
|
+
bucket="brainio-brainscore",
|
258
|
+
csv_sha1="bc351933e1f21eee9704985c1b8231be6955d816",
|
259
|
+
zip_sha1="bb3d7bcb60ba586c8552266839187a59c2b3138f",
|
260
|
+
csv_version_id="7mcYPI8IYpS9Rz7pLm6QOxBne29.WcWp",
|
261
|
+
zip_version_id="5dvzTilCQkDUHG85qCCQZOhB6ZLpfU5_")
|
262
|
+
|
263
|
+
data_registry['Ferguson2024_tilted_line'] = lambda: load_assembly_from_s3(
|
264
|
+
identifier='Ferguson2024_tilted_line',
|
265
|
+
version_id="ae4Dbo9JU_PDwTqKGD1G4DQNrdh2cVE2",
|
266
|
+
sha1="1806034da0c25e8625255eb94dc0a05c7e9cda1f",
|
267
|
+
bucket="brainio-brainscore",
|
268
|
+
cls=BehavioralAssembly,
|
269
|
+
stimulus_set_loader=lambda: load_stimulus_set('Ferguson2024_tilted_line'),
|
270
|
+
)
|
271
|
+
|
272
|
+
|
273
|
+
'''
|
274
|
+
Pretraining Stimuli:
|
275
|
+
'''
|
276
|
+
|
277
|
+
# circle_line
|
278
|
+
stimulus_set_registry['Ferguson2024_circle_line_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
279
|
+
identifier='Ferguson2024_circle_line_training_stimuli',
|
280
|
+
bucket="brainio-brainscore",
|
281
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
282
|
+
zip_sha1="ba9088601d8c79ea5ff3d513e1a76b1232491918",
|
283
|
+
csv_version_id="fhYDvXCZNhij.2gnNfbTPlD.yOeiuz9G",
|
284
|
+
zip_version_id="i3lS29oWEn3JMReUaKZerehZKvZqaHq7")
|
285
|
+
|
286
|
+
# color
|
287
|
+
stimulus_set_registry['Ferguson2024_color_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
288
|
+
identifier='Ferguson2024_color_training_stimuli',
|
289
|
+
bucket="brainio-brainscore",
|
290
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
291
|
+
zip_sha1="dcd5044c73e7523afc94f91543adb444a437f911",
|
292
|
+
csv_version_id="hfvHFxmWOQUwq0LnSwrhk8xecaa9XhQW",
|
293
|
+
zip_version_id="uwMSKXr5yRYqVDXA9aoS66BBXtsu2kcx")
|
294
|
+
|
295
|
+
# convergence
|
296
|
+
stimulus_set_registry['Ferguson2024_convergence_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
297
|
+
identifier='Ferguson2024_convergence_training_stimuli',
|
298
|
+
bucket="brainio-brainscore",
|
299
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
300
|
+
zip_sha1="00eb401ddfc075a4bb448ec36b8a1c8f3ec1d6e4",
|
301
|
+
csv_version_id=".ZIqJlEMSgY_U5PeXBU33ifj2KMeMz2e",
|
302
|
+
zip_version_id="2bCuP2jVWc2WIuE9tD6b7TyPkuBbxyn0")
|
303
|
+
|
304
|
+
# eighth
|
305
|
+
stimulus_set_registry['Ferguson2024_eighth_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
306
|
+
identifier='Ferguson2024_eighth_training_stimuli',
|
307
|
+
bucket="brainio-brainscore",
|
308
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
309
|
+
zip_sha1="26edaec1d0dd14343a117340283e091a2245f3aa",
|
310
|
+
csv_version_id="X7gv5Rztd.VmOIr8rmEd7XYBWtsGDJdR",
|
311
|
+
zip_version_id="wVfBxoqcy6YIZnFLLu6rj.8XUXQmOQMg")
|
312
|
+
|
313
|
+
# gray_easy
|
314
|
+
stimulus_set_registry['Ferguson2024_gray_easy_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
315
|
+
identifier='Ferguson2024_gray_easy_training_stimuli',
|
316
|
+
bucket="brainio-brainscore",
|
317
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
318
|
+
zip_sha1="18211af83c680d5c916ec15b57b5b871494d6b28",
|
319
|
+
csv_version_id="j25..m3F2t7j.47YEiOHxTZxiq7ViPxc",
|
320
|
+
zip_version_id="h6fYQQ.DIWqr09rrqZIupCdUzJXFMTG9")
|
321
|
+
|
322
|
+
# gray hard
|
323
|
+
stimulus_set_registry['Ferguson2024_gray_hard_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
324
|
+
identifier='Ferguson2024_gray_hard_training_stimuli',
|
325
|
+
bucket="brainio-brainscore",
|
326
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
327
|
+
zip_sha1="a54d84dbb548706bbfeb239113a1d92205dc3f67",
|
328
|
+
csv_version_id="nNGjK3Mgo2h4WVT0yx_yvJeP1htuWSUl",
|
329
|
+
zip_version_id="MeyqeiOhGSRRLYzG_bLsG.Nj4W2ktRf8")
|
330
|
+
|
331
|
+
# half
|
332
|
+
stimulus_set_registry['Ferguson2024_half_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
333
|
+
identifier='Ferguson2024_half_training_stimuli',
|
334
|
+
bucket="brainio-brainscore",
|
335
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
336
|
+
zip_sha1="0db665619307d5c532a3ccd4311611e5a3830a10",
|
337
|
+
csv_version_id="bAg_H4VtFostaowCDqy2htVL9iBWCENh",
|
338
|
+
zip_version_id="rEsJ7ZopuRTyxSnA97ifpiHtkXqnWvR5")
|
339
|
+
|
340
|
+
# juncture
|
341
|
+
stimulus_set_registry['Ferguson2024_juncture_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
342
|
+
identifier='Ferguson2024_juncture_training_stimuli',
|
343
|
+
bucket="brainio-brainscore",
|
344
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
345
|
+
zip_sha1="eb99fd862bec5e61900d037b6b38abf2a278c9f0",
|
346
|
+
csv_version_id="Uikb_kSDojTsL8LXORmShk_cuW8lFxa.",
|
347
|
+
zip_version_id="wRFpwf_J2kC2WtBUDGiv1Enhrj5Ah5Gh")
|
348
|
+
|
349
|
+
# lle
|
350
|
+
stimulus_set_registry['Ferguson2024_lle_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
351
|
+
identifier='Ferguson2024_lle_training_stimuli',
|
352
|
+
bucket="brainio-brainscore",
|
353
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
354
|
+
zip_sha1="a1d19d0b77f0eb17ae886a1b7ccc649c5e51d84e",
|
355
|
+
csv_version_id="QXbtxFHLywcvLQy2enqL2Lxv9.bMgAwo",
|
356
|
+
zip_version_id="3izgx5jOCHDjH1fy_ncHOL7HxZIYt5nr")
|
357
|
+
|
358
|
+
# llh
|
359
|
+
stimulus_set_registry['Ferguson2024_llh_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
360
|
+
identifier='Ferguson2024_llh_training_stimuli',
|
361
|
+
bucket="brainio-brainscore",
|
362
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
363
|
+
zip_sha1="1550f9f71e6930caa15b96aaf811aa97d48d7267",
|
364
|
+
csv_version_id="M3WlC_zVg5m8rYLyJd1KlKo2wQkf36G7",
|
365
|
+
zip_version_id="brEvqix1vzPM6mX8Jnx7pOgJEHETpOXM")
|
366
|
+
|
367
|
+
# quarter
|
368
|
+
stimulus_set_registry['Ferguson2024_quarter_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
369
|
+
identifier='Ferguson2024_quarter_training_stimuli',
|
370
|
+
bucket="brainio-brainscore",
|
371
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
372
|
+
zip_sha1="22669e4a94718b3cbde3f5b2a493044bc091257e",
|
373
|
+
csv_version_id="lP4fsstG0Jfcnistm2H0AUhmPMHqAfTU",
|
374
|
+
zip_version_id="zpwv2_fwsmHk1TyR9_DYdmNGuLykgGX_")
|
375
|
+
|
376
|
+
# round_f
|
377
|
+
stimulus_set_registry['Ferguson2024_round_f_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
378
|
+
identifier='Ferguson2024_round_f_training_stimuli',
|
379
|
+
bucket="brainio-brainscore",
|
380
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
381
|
+
zip_sha1="e33855c899f78a115cf377a228e07d87baa554b7",
|
382
|
+
csv_version_id="csLNw6RL7nen9TFyH552JSahJkKbnNLE",
|
383
|
+
zip_version_id="7YYhm.tjysTS2e.IhjBx0ovOxWdAVv1M")
|
384
|
+
|
385
|
+
# round_v
|
386
|
+
stimulus_set_registry['Ferguson2024_round_v_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
387
|
+
identifier='Ferguson2024_round_v_training_stimuli',
|
388
|
+
bucket="brainio-brainscore",
|
389
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
390
|
+
zip_sha1="b1555f8a140a12e01a87a2f4e452d5863be43a5b",
|
391
|
+
csv_version_id="QeNeoWjAxMZO4AjmB2SZFC4qEzwf1cBw",
|
392
|
+
zip_version_id="gj32aM8zE_VXh_N9hNI42g1Uo5AxNDJh")
|
393
|
+
|
394
|
+
# tilted_line
|
395
|
+
stimulus_set_registry['Ferguson2024_tilted_line_training_stimuli'] = lambda: load_stimulus_set_from_s3(
|
396
|
+
identifier='Ferguson2024_tilted_line_training_stimuli',
|
397
|
+
bucket="brainio-brainscore",
|
398
|
+
csv_sha1="098eb5999e9c4b723abc35ade862d2dc45899230",
|
399
|
+
zip_sha1="e92533d8aded07ed90ef25650d0cf07c3a458be7",
|
400
|
+
csv_version_id="l.8gS70OruIDfDU9Oj.DAWw6BQNB.LKc",
|
401
|
+
zip_version_id="cAv1IPQkKX8Jey1gFc4VCwItECIiSlLV")
|
@@ -0,0 +1,164 @@
|
|
1
|
+
from brainio.packaging import package_data_assembly
|
2
|
+
from pathlib import Path
|
3
|
+
from shutil import copy
|
4
|
+
from brainio.stimuli import StimulusSet
|
5
|
+
from brainio.packaging import package_stimulus_set
|
6
|
+
from brainio.assemblies import BehavioralAssembly
|
7
|
+
import pandas as pd
|
8
|
+
|
9
|
+
DATASETS = ['circle_line', 'color', 'convergence', 'eighth',
|
10
|
+
'gray_easy', 'gray_hard', 'half', 'juncture',
|
11
|
+
'lle', 'llh', 'quarter', 'round_f',
|
12
|
+
'round_v', 'tilted_line']
|
13
|
+
|
14
|
+
|
15
|
+
# Packages the stimulus_sets for the Ferguson2024 experiment. There are 14 in all.
|
16
|
+
def create_stimulus_set_and_upload(name: str, experiment: str, upload_to_s3=True) -> StimulusSet:
|
17
|
+
"""
|
18
|
+
|
19
|
+
Sample image from dataset:
|
20
|
+
first_block_0.png
|
21
|
+
|
22
|
+
1) first_block -> what block the stimuli belong two (which image is target, which is distractor)
|
23
|
+
2) 0 -> a number, 0-23 indicating which variation the image is
|
24
|
+
|
25
|
+
There are 24 images in the first block, and 24 in the second block, so the combined stimulus_set is length 48.
|
26
|
+
The packaged stimuli were structured so that the root folder (tilted_line) had two subfolders, /first_block and /second_block.
|
27
|
+
|
28
|
+
:param name: the name of the experiment, usually Ferguson2024
|
29
|
+
:param experiment: the dataset, i.e. color
|
30
|
+
:param upload_to_s3: True if you want to upload this to BrainIO on S3
|
31
|
+
:return: the Stimulus Set
|
32
|
+
"""
|
33
|
+
|
34
|
+
stimuli = []
|
35
|
+
stimulus_paths = {}
|
36
|
+
stimuli_directory = f'{experiment}'
|
37
|
+
combine_block_images(stimuli_directory)
|
38
|
+
for filepath in Path(f"{stimuli_directory}/final").glob('**/*.png'):
|
39
|
+
stimulus_id = filepath.stem
|
40
|
+
parts_list = stimulus_id.split("_")
|
41
|
+
block = parts_list[0]
|
42
|
+
image_number = parts_list[2]
|
43
|
+
|
44
|
+
stimulus_paths[stimulus_id] = filepath
|
45
|
+
stimuli.append({
|
46
|
+
'stimulus_id': stimulus_id,
|
47
|
+
'image_number': image_number,
|
48
|
+
'block': block,
|
49
|
+
})
|
50
|
+
|
51
|
+
stimuli = StimulusSet(stimuli)
|
52
|
+
stimuli.stimulus_paths = stimulus_paths
|
53
|
+
stimuli.name = f'{name}_{experiment}' # give the StimulusSet an identifier name
|
54
|
+
|
55
|
+
# upload to S3
|
56
|
+
if upload_to_s3:
|
57
|
+
init_data = package_stimulus_set(catalog_name=None, proto_stimulus_set=stimuli,
|
58
|
+
stimulus_set_identifier=stimuli.name, bucket_name="brainio-brainscore")
|
59
|
+
print(f"{experiment} stimulus_set\n{init_data}")
|
60
|
+
return stimuli
|
61
|
+
|
62
|
+
|
63
|
+
# Packages the assemblies for the Ferguson2024 experiment. There are 14 in all.
|
64
|
+
def create_assembly_and_upload(name: str, experiment: str, upload_to_s3=True) -> BehavioralAssembly:
|
65
|
+
"""
|
66
|
+
Takes in a sanity-processed csv file, converts to an assembly, and uploads it to BrainIO
|
67
|
+
|
68
|
+
:param name: the name of the experiment, usually Ferguson2024
|
69
|
+
:param experiment: the dataset, i.e. color
|
70
|
+
:param upload_to_s3: True if you want to upload this to BrainIO on S3
|
71
|
+
:return: the assmebly
|
72
|
+
"""
|
73
|
+
all_subjects = pd.read_csv(f'csvs/{experiment}_sanity_processed.csv')
|
74
|
+
|
75
|
+
# only look at testing data (no warmup or sanity data):
|
76
|
+
all_subjects = all_subjects[all_subjects["trial_type"] == "normal"]
|
77
|
+
all_subjects = bool_to_int(all_subjects, ['correct', 'target_present']) # cast bool to int for NetCDF
|
78
|
+
|
79
|
+
# create an ID that is equal to the stimulus_set ID
|
80
|
+
all_subjects['stimulus_id'] = all_subjects['stimulus'].apply(extract_and_concatenate)
|
81
|
+
|
82
|
+
assembly = BehavioralAssembly(all_subjects['correct'],
|
83
|
+
coords={
|
84
|
+
'stimulus_id': ('presentation', all_subjects['stimulus_id']),
|
85
|
+
'stimulus_id_long': ('presentation', all_subjects['stimulus']),
|
86
|
+
'participant_id': ('presentation', all_subjects['participant_id']),
|
87
|
+
'response_time_ms': ('presentation', all_subjects['response_time_ms']),
|
88
|
+
'correct': ('presentation', all_subjects['correct']),
|
89
|
+
'target_present': ('presentation', all_subjects['target_present']),
|
90
|
+
'distractor_nums': ('presentation', all_subjects['distractor_nums']),
|
91
|
+
'block': ('presentation', all_subjects['block']),
|
92
|
+
'keypress_response': ('presentation', all_subjects['response']),
|
93
|
+
'trial_type': ('presentation', all_subjects['trial_type']),
|
94
|
+
},
|
95
|
+
dims=['presentation']
|
96
|
+
)
|
97
|
+
|
98
|
+
assembly.name = f"{name}_{experiment}"
|
99
|
+
|
100
|
+
# upload assembly to S3
|
101
|
+
if upload_to_s3:
|
102
|
+
init_data = package_data_assembly(None, assembly, assembly_identifier=assembly.name,
|
103
|
+
stimulus_set_identifier=f"{name}_{experiment}",
|
104
|
+
assembly_class_name="BehavioralAssembly",
|
105
|
+
bucket_name="brainio-brainscore")
|
106
|
+
print(f"{experiment} assembly\n{init_data}")
|
107
|
+
return assembly
|
108
|
+
|
109
|
+
|
110
|
+
# helper function to take in a folder with the structure outlined in the above file docs, and move them
|
111
|
+
# all into one folder
|
112
|
+
def combine_block_images(stimuli_directory: str) -> None:
|
113
|
+
"""
|
114
|
+
|
115
|
+
:param stimuli_directory: the path where your stimuli are located. This folder has two subfolders, /first_block and
|
116
|
+
/second_block
|
117
|
+
"""
|
118
|
+
final_directory_path = Path(stimuli_directory) / 'final'
|
119
|
+
final_directory_path.mkdir(exist_ok=True)
|
120
|
+
subfolders = ['first_block', 'second_block']
|
121
|
+
for subfolder in subfolders:
|
122
|
+
current_folder_path = Path(stimuli_directory) / subfolder
|
123
|
+
if not current_folder_path.exists():
|
124
|
+
continue
|
125
|
+
for filepath in current_folder_path.glob('*.png'):
|
126
|
+
stimulus_id = filepath.stem
|
127
|
+
new_file_name = f"{subfolder}_{stimulus_id}.png"
|
128
|
+
new_file_path = final_directory_path / new_file_name
|
129
|
+
copy(filepath, new_file_path)
|
130
|
+
|
131
|
+
|
132
|
+
# helper function to get the stimulus_set stimulus_id from the assembly stimulus:
|
133
|
+
def extract_and_concatenate(url):
|
134
|
+
parts = url.split('/')
|
135
|
+
block_part = parts[-3]
|
136
|
+
file_name = parts[-1].replace(".png", "")
|
137
|
+
return f"{block_part}_{file_name}"
|
138
|
+
|
139
|
+
|
140
|
+
# Converts boolean values to integers in specified columns of a DataFrame.
|
141
|
+
def bool_to_int(df, columns):
|
142
|
+
for column in columns:
|
143
|
+
if column in df.columns:
|
144
|
+
df[column] = df[column].map({'True': 1, 'False': 0, True: 1, False: 0}).fillna(df[column])
|
145
|
+
else:
|
146
|
+
print(f"Column '{column}' not found in DataFrame.")
|
147
|
+
return df
|
148
|
+
|
149
|
+
|
150
|
+
# wrapper function to loop over all datasets
|
151
|
+
def package_all_stimulus_sets(name):
|
152
|
+
for experiment in DATASETS:
|
153
|
+
create_stimulus_set_and_upload(name, experiment)
|
154
|
+
|
155
|
+
|
156
|
+
# wrapper function to loop over all datasets:
|
157
|
+
def package_all_assemblies(name):
|
158
|
+
for experiment in DATASETS:
|
159
|
+
create_assembly_and_upload(name, experiment)
|
160
|
+
|
161
|
+
|
162
|
+
if __name__ == '__main__':
|
163
|
+
package_all_stimulus_sets(name='Ferguson2024')
|
164
|
+
package_all_assemblies(name='Ferguson2024')
|
@@ -0,0 +1,20 @@
|
|
1
|
+
from data_packaging import create_stimulus_set_and_upload, DATASETS
|
2
|
+
|
3
|
+
# package and upload all 14 training stimuli sets
|
4
|
+
all_stimulus_sets = []
|
5
|
+
paths = {}
|
6
|
+
for dataset in DATASETS:
|
7
|
+
|
8
|
+
"""
|
9
|
+
For the fitting stimuli, each stimulus set will have 1920 images in them:
|
10
|
+
320 images x 3 distractors (low, medium, high) x 2 types (target on distractor, distractor on target)
|
11
|
+
"""
|
12
|
+
|
13
|
+
stimulus_set = create_stimulus_set_and_upload("Ferguson2024", f"{dataset}_training_stimuli", upload_to_s3=True)
|
14
|
+
all_stimulus_sets.append(stimulus_set)
|
15
|
+
|
16
|
+
|
17
|
+
# label each dataset with the name of the dataset
|
18
|
+
for df, name in zip(all_stimulus_sets, DATASETS):
|
19
|
+
df['experiment'] = name
|
20
|
+
|