brainscore-vision 2.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (1009) hide show
  1. brainscore_vision/__init__.py +105 -0
  2. brainscore_vision/__main__.py +20 -0
  3. brainscore_vision/benchmark_helpers/__init__.py +67 -0
  4. brainscore_vision/benchmark_helpers/neural_common.py +70 -0
  5. brainscore_vision/benchmark_helpers/properties_common.py +424 -0
  6. brainscore_vision/benchmark_helpers/screen.py +126 -0
  7. brainscore_vision/benchmark_helpers/test_helper.py +160 -0
  8. brainscore_vision/benchmarks/README.md +7 -0
  9. brainscore_vision/benchmarks/__init__.py +122 -0
  10. brainscore_vision/benchmarks/baker2022/__init__.py +9 -0
  11. brainscore_vision/benchmarks/baker2022/benchmark.py +125 -0
  12. brainscore_vision/benchmarks/baker2022/requirements.txt +1 -0
  13. brainscore_vision/benchmarks/baker2022/test.py +90 -0
  14. brainscore_vision/benchmarks/bmd2024/__init__.py +8 -0
  15. brainscore_vision/benchmarks/bmd2024/benchmark.py +51 -0
  16. brainscore_vision/benchmarks/bmd2024/test.py +29 -0
  17. brainscore_vision/benchmarks/bracci2019/__init__.py +8 -0
  18. brainscore_vision/benchmarks/bracci2019/benchmark.py +286 -0
  19. brainscore_vision/benchmarks/bracci2019/requirements.txt +3 -0
  20. brainscore_vision/benchmarks/cadena2017/__init__.py +5 -0
  21. brainscore_vision/benchmarks/cadena2017/benchmark.py +91 -0
  22. brainscore_vision/benchmarks/cadena2017/test.py +35 -0
  23. brainscore_vision/benchmarks/coggan2024_behavior/__init__.py +8 -0
  24. brainscore_vision/benchmarks/coggan2024_behavior/benchmark.py +133 -0
  25. brainscore_vision/benchmarks/coggan2024_behavior/test.py +21 -0
  26. brainscore_vision/benchmarks/coggan2024_fMRI/__init__.py +15 -0
  27. brainscore_vision/benchmarks/coggan2024_fMRI/benchmark.py +201 -0
  28. brainscore_vision/benchmarks/coggan2024_fMRI/test.py +25 -0
  29. brainscore_vision/benchmarks/ferguson2024/__init__.py +24 -0
  30. brainscore_vision/benchmarks/ferguson2024/benchmark.py +210 -0
  31. brainscore_vision/benchmarks/ferguson2024/helpers/helpers.py +251 -0
  32. brainscore_vision/benchmarks/ferguson2024/requirements.txt +5 -0
  33. brainscore_vision/benchmarks/ferguson2024/test.py +114 -0
  34. brainscore_vision/benchmarks/freemanziemba2013/__init__.py +10 -0
  35. brainscore_vision/benchmarks/freemanziemba2013/benchmarks/benchmark.py +53 -0
  36. brainscore_vision/benchmarks/freemanziemba2013/benchmarks/public_benchmarks.py +37 -0
  37. brainscore_vision/benchmarks/freemanziemba2013/test.py +98 -0
  38. brainscore_vision/benchmarks/geirhos2021/__init__.py +59 -0
  39. brainscore_vision/benchmarks/geirhos2021/benchmark.py +132 -0
  40. brainscore_vision/benchmarks/geirhos2021/test.py +189 -0
  41. brainscore_vision/benchmarks/hebart2023/__init__.py +4 -0
  42. brainscore_vision/benchmarks/hebart2023/benchmark.py +72 -0
  43. brainscore_vision/benchmarks/hebart2023/test.py +19 -0
  44. brainscore_vision/benchmarks/hermann2020/__init__.py +6 -0
  45. brainscore_vision/benchmarks/hermann2020/benchmark.py +63 -0
  46. brainscore_vision/benchmarks/hermann2020/test.py +28 -0
  47. brainscore_vision/benchmarks/igustibagus2024/__init__.py +11 -0
  48. brainscore_vision/benchmarks/igustibagus2024/domain_transfer_analysis.py +306 -0
  49. brainscore_vision/benchmarks/igustibagus2024/domain_transfer_neural.py +134 -0
  50. brainscore_vision/benchmarks/igustibagus2024/test.py +45 -0
  51. brainscore_vision/benchmarks/imagenet/__init__.py +4 -0
  52. brainscore_vision/benchmarks/imagenet/benchmark.py +50 -0
  53. brainscore_vision/benchmarks/imagenet/imagenet2012.csv +50001 -0
  54. brainscore_vision/benchmarks/imagenet/test.py +32 -0
  55. brainscore_vision/benchmarks/imagenet_c/__init__.py +7 -0
  56. brainscore_vision/benchmarks/imagenet_c/benchmark.py +204 -0
  57. brainscore_vision/benchmarks/imagenet_c/test.py +57 -0
  58. brainscore_vision/benchmarks/islam2021/__init__.py +11 -0
  59. brainscore_vision/benchmarks/islam2021/benchmark.py +107 -0
  60. brainscore_vision/benchmarks/islam2021/test.py +47 -0
  61. brainscore_vision/benchmarks/kar2019/__init__.py +4 -0
  62. brainscore_vision/benchmarks/kar2019/benchmark.py +88 -0
  63. brainscore_vision/benchmarks/kar2019/test.py +93 -0
  64. brainscore_vision/benchmarks/majajhong2015/__init__.py +18 -0
  65. brainscore_vision/benchmarks/majajhong2015/benchmark.py +96 -0
  66. brainscore_vision/benchmarks/majajhong2015/test.py +103 -0
  67. brainscore_vision/benchmarks/malania2007/__init__.py +13 -0
  68. brainscore_vision/benchmarks/malania2007/benchmark.py +235 -0
  69. brainscore_vision/benchmarks/malania2007/test.py +64 -0
  70. brainscore_vision/benchmarks/maniquet2024/__init__.py +6 -0
  71. brainscore_vision/benchmarks/maniquet2024/benchmark.py +199 -0
  72. brainscore_vision/benchmarks/maniquet2024/test.py +17 -0
  73. brainscore_vision/benchmarks/marques2020/__init__.py +76 -0
  74. brainscore_vision/benchmarks/marques2020/benchmarks/cavanaugh2002a_benchmark.py +119 -0
  75. brainscore_vision/benchmarks/marques2020/benchmarks/devalois1982a_benchmark.py +84 -0
  76. brainscore_vision/benchmarks/marques2020/benchmarks/devalois1982b_benchmark.py +88 -0
  77. brainscore_vision/benchmarks/marques2020/benchmarks/freemanZiemba2013_benchmark.py +138 -0
  78. brainscore_vision/benchmarks/marques2020/benchmarks/ringach2002_benchmark.py +167 -0
  79. brainscore_vision/benchmarks/marques2020/benchmarks/schiller1976_benchmark.py +100 -0
  80. brainscore_vision/benchmarks/marques2020/test.py +135 -0
  81. brainscore_vision/benchmarks/objectnet/__init__.py +4 -0
  82. brainscore_vision/benchmarks/objectnet/benchmark.py +52 -0
  83. brainscore_vision/benchmarks/objectnet/test.py +33 -0
  84. brainscore_vision/benchmarks/rajalingham2018/__init__.py +10 -0
  85. brainscore_vision/benchmarks/rajalingham2018/benchmarks/benchmark.py +74 -0
  86. brainscore_vision/benchmarks/rajalingham2018/benchmarks/public_benchmark.py +10 -0
  87. brainscore_vision/benchmarks/rajalingham2018/test.py +125 -0
  88. brainscore_vision/benchmarks/rajalingham2018/test_resources/alexnet-probabilities.nc +0 -0
  89. brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=alexnet,stimuli_identifier=objectome-240.nc +0 -0
  90. brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=resnet18,stimuli_identifier=objectome-240.nc +0 -0
  91. brainscore_vision/benchmarks/rajalingham2018/test_resources/identifier=resnet34,stimuli_identifier=objectome-240.nc +0 -0
  92. brainscore_vision/benchmarks/rajalingham2018/test_resources/resnet18-probabilities.nc +0 -0
  93. brainscore_vision/benchmarks/rajalingham2018/test_resources/resnet34-probabilities.nc +0 -0
  94. brainscore_vision/benchmarks/rajalingham2020/__init__.py +4 -0
  95. brainscore_vision/benchmarks/rajalingham2020/benchmark.py +52 -0
  96. brainscore_vision/benchmarks/rajalingham2020/test.py +39 -0
  97. brainscore_vision/benchmarks/sanghavi2020/__init__.py +17 -0
  98. brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavi2020_benchmark.py +44 -0
  99. brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavijozwik2020_benchmark.py +44 -0
  100. brainscore_vision/benchmarks/sanghavi2020/benchmarks/sanghavimurty2020_benchmark.py +44 -0
  101. brainscore_vision/benchmarks/sanghavi2020/test.py +83 -0
  102. brainscore_vision/benchmarks/scialom2024/__init__.py +52 -0
  103. brainscore_vision/benchmarks/scialom2024/benchmark.py +97 -0
  104. brainscore_vision/benchmarks/scialom2024/test.py +162 -0
  105. brainscore_vision/data/__init__.py +0 -0
  106. brainscore_vision/data/baker2022/__init__.py +40 -0
  107. brainscore_vision/data/baker2022/data_packaging/inverted_distortion_data_assembly.py +43 -0
  108. brainscore_vision/data/baker2022/data_packaging/inverted_distortion_stimulus_set.py +81 -0
  109. brainscore_vision/data/baker2022/data_packaging/mapping.py +60 -0
  110. brainscore_vision/data/baker2022/data_packaging/normal_distortion_data_assembly.py +46 -0
  111. brainscore_vision/data/baker2022/data_packaging/normal_distortion_stimulus_set.py +94 -0
  112. brainscore_vision/data/baker2022/test.py +135 -0
  113. brainscore_vision/data/barbumayo2019/BarbuMayo2019.py +26 -0
  114. brainscore_vision/data/barbumayo2019/__init__.py +23 -0
  115. brainscore_vision/data/barbumayo2019/test.py +10 -0
  116. brainscore_vision/data/bashivankar2019/__init__.py +52 -0
  117. brainscore_vision/data/bashivankar2019/data_packaging/2020-08-17_npc_v4_data.h5.png +0 -0
  118. brainscore_vision/data/bashivankar2019/data_packaging/requirements.txt +4 -0
  119. brainscore_vision/data/bashivankar2019/data_packaging/synthetic.py +162 -0
  120. brainscore_vision/data/bashivankar2019/test.py +15 -0
  121. brainscore_vision/data/bmd2024/__init__.py +69 -0
  122. brainscore_vision/data/bmd2024/data_packaging/BMD_2024_data_assembly.py +91 -0
  123. brainscore_vision/data/bmd2024/data_packaging/BMD_2024_simulus_set.py +48 -0
  124. brainscore_vision/data/bmd2024/data_packaging/stim_meta.csv +401 -0
  125. brainscore_vision/data/bmd2024/test.py +130 -0
  126. brainscore_vision/data/bracci2019/__init__.py +36 -0
  127. brainscore_vision/data/bracci2019/data_packaging.py +221 -0
  128. brainscore_vision/data/bracci2019/test.py +16 -0
  129. brainscore_vision/data/cadena2017/__init__.py +52 -0
  130. brainscore_vision/data/cadena2017/data_packaging/2018-08-07_tolias_v1.ipynb +25880 -0
  131. brainscore_vision/data/cadena2017/data_packaging/analysis.py +26 -0
  132. brainscore_vision/data/cadena2017/test.py +24 -0
  133. brainscore_vision/data/cichy2019/__init__.py +38 -0
  134. brainscore_vision/data/cichy2019/test.py +8 -0
  135. brainscore_vision/data/coggan2024_behavior/__init__.py +36 -0
  136. brainscore_vision/data/coggan2024_behavior/data_packaging.py +166 -0
  137. brainscore_vision/data/coggan2024_behavior/test.py +32 -0
  138. brainscore_vision/data/coggan2024_fMRI/__init__.py +27 -0
  139. brainscore_vision/data/coggan2024_fMRI/data_packaging.py +123 -0
  140. brainscore_vision/data/coggan2024_fMRI/test.py +25 -0
  141. brainscore_vision/data/david2004/__init__.py +34 -0
  142. brainscore_vision/data/david2004/data_packaging/2018-05-10_gallant_data.ipynb +3647 -0
  143. brainscore_vision/data/david2004/data_packaging/2018-05-23_gallant_data.ipynb +3149 -0
  144. brainscore_vision/data/david2004/data_packaging/2018-06-05_gallant_data.ipynb +3628 -0
  145. brainscore_vision/data/david2004/data_packaging/__init__.py +61 -0
  146. brainscore_vision/data/david2004/data_packaging/convertGallant.m +100 -0
  147. brainscore_vision/data/david2004/data_packaging/convertGallantV1Aligned.m +58 -0
  148. brainscore_vision/data/david2004/data_packaging/lib/DataHash_20160618/DataHash.m +484 -0
  149. brainscore_vision/data/david2004/data_packaging/lib/DataHash_20160618/license.txt +24 -0
  150. brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.c +895 -0
  151. brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.m +107 -0
  152. brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5.mexw64 +0 -0
  153. brainscore_vision/data/david2004/data_packaging/lib/GetMD5/GetMD5_helper.m +91 -0
  154. brainscore_vision/data/david2004/data_packaging/lib/GetMD5/InstallMex.m +307 -0
  155. brainscore_vision/data/david2004/data_packaging/lib/GetMD5/license.txt +24 -0
  156. brainscore_vision/data/david2004/data_packaging/lib/GetMD5/uTest_GetMD5.m +290 -0
  157. brainscore_vision/data/david2004/data_packaging/lib/glob/glob.m +472 -0
  158. brainscore_vision/data/david2004/data_packaging/lib/glob/license.txt +27 -0
  159. brainscore_vision/data/david2004/data_packaging/xr_align_debug.py +137 -0
  160. brainscore_vision/data/david2004/test.py +8 -0
  161. brainscore_vision/data/deng2009/__init__.py +22 -0
  162. brainscore_vision/data/deng2009/deng2009imagenet.py +33 -0
  163. brainscore_vision/data/deng2009/test.py +9 -0
  164. brainscore_vision/data/ferguson2024/__init__.py +401 -0
  165. brainscore_vision/data/ferguson2024/data_packaging/data_packaging.py +164 -0
  166. brainscore_vision/data/ferguson2024/data_packaging/fitting_stimuli.py +20 -0
  167. brainscore_vision/data/ferguson2024/requirements.txt +2 -0
  168. brainscore_vision/data/ferguson2024/test.py +155 -0
  169. brainscore_vision/data/freemanziemba2013/__init__.py +133 -0
  170. brainscore_vision/data/freemanziemba2013/data_packaging/2018-10-05_movshon.ipynb +2002 -0
  171. brainscore_vision/data/freemanziemba2013/data_packaging/2020-02-21_movshon_aperture.ipynb +4730 -0
  172. brainscore_vision/data/freemanziemba2013/data_packaging/2020-02-26_movshon_aperture_test.ipynb +2228 -0
  173. brainscore_vision/data/freemanziemba2013/data_packaging/aperture_correct.py +160 -0
  174. brainscore_vision/data/freemanziemba2013/data_packaging/data_packaging.py +57 -0
  175. brainscore_vision/data/freemanziemba2013/data_packaging/movshon.py +202 -0
  176. brainscore_vision/data/freemanziemba2013/test.py +97 -0
  177. brainscore_vision/data/geirhos2021/__init__.py +358 -0
  178. brainscore_vision/data/geirhos2021/creating_geirhos_ids.ipynb +468 -0
  179. brainscore_vision/data/geirhos2021/data_packaging/colour/colour_data_assembly.py +87 -0
  180. brainscore_vision/data/geirhos2021/data_packaging/colour/colour_stimulus_set.py +81 -0
  181. brainscore_vision/data/geirhos2021/data_packaging/contrast/contrast_data_assembly.py +83 -0
  182. brainscore_vision/data/geirhos2021/data_packaging/contrast/contrast_stimulus_set.py +82 -0
  183. brainscore_vision/data/geirhos2021/data_packaging/cue-conflict/cue-conflict_data_assembly.py +100 -0
  184. brainscore_vision/data/geirhos2021/data_packaging/cue-conflict/cue-conflict_stimulus_set.py +84 -0
  185. brainscore_vision/data/geirhos2021/data_packaging/edge/edge_data_assembly.py +96 -0
  186. brainscore_vision/data/geirhos2021/data_packaging/edge/edge_stimulus_set.py +69 -0
  187. brainscore_vision/data/geirhos2021/data_packaging/eidolonI/eidolonI_data_assembly.py +92 -0
  188. brainscore_vision/data/geirhos2021/data_packaging/eidolonI/eidolonI_stimulus_set.py +82 -0
  189. brainscore_vision/data/geirhos2021/data_packaging/eidolonII/eidolonII_data_assembly.py +92 -0
  190. brainscore_vision/data/geirhos2021/data_packaging/eidolonII/eidolonII_stimulus_set.py +82 -0
  191. brainscore_vision/data/geirhos2021/data_packaging/eidolonIII/eidolonIII_data_assembly.py +92 -0
  192. brainscore_vision/data/geirhos2021/data_packaging/eidolonIII/eidolonIII_stimulus_set.py +82 -0
  193. brainscore_vision/data/geirhos2021/data_packaging/false-colour/false-colour_data_assembly.py +83 -0
  194. brainscore_vision/data/geirhos2021/data_packaging/false-colour/false-colour_stimulus_set.py +87 -0
  195. brainscore_vision/data/geirhos2021/data_packaging/high-pass/high-pass_data_assembly.py +84 -0
  196. brainscore_vision/data/geirhos2021/data_packaging/high-pass/high-pass_stimulus_set.py +82 -0
  197. brainscore_vision/data/geirhos2021/data_packaging/low-pass/low-pass_data_assembly.py +84 -0
  198. brainscore_vision/data/geirhos2021/data_packaging/low-pass/low-pass_stimulus_set.py +81 -0
  199. brainscore_vision/data/geirhos2021/data_packaging/phase-scrambling/phase-scrambling_data_assembly.py +84 -0
  200. brainscore_vision/data/geirhos2021/data_packaging/phase-scrambling/phase-scrambling_stimulus_set.py +82 -0
  201. brainscore_vision/data/geirhos2021/data_packaging/power-equalisation/power-equalisation_data_assembly.py +88 -0
  202. brainscore_vision/data/geirhos2021/data_packaging/power-equalisation/power-equalisation_stimulus_set.py +82 -0
  203. brainscore_vision/data/geirhos2021/data_packaging/rotation/rotation_data_assembly.py +87 -0
  204. brainscore_vision/data/geirhos2021/data_packaging/rotation/rotation_stimulus_set.py +82 -0
  205. brainscore_vision/data/geirhos2021/data_packaging/silhouette/silhouette_data_assembly.py +100 -0
  206. brainscore_vision/data/geirhos2021/data_packaging/silhouette/silhouette_stimulus_set.py +71 -0
  207. brainscore_vision/data/geirhos2021/data_packaging/sketch/sketch_data_assembly.py +88 -0
  208. brainscore_vision/data/geirhos2021/data_packaging/sketch/sketch_stimulus_set.py +75 -0
  209. brainscore_vision/data/geirhos2021/data_packaging/stylized/stylized_data_assembly.py +87 -0
  210. brainscore_vision/data/geirhos2021/data_packaging/stylized/stylized_stimulus_set.py +75 -0
  211. brainscore_vision/data/geirhos2021/data_packaging/uniform-noise/uniform-noise_data_assembly.py +86 -0
  212. brainscore_vision/data/geirhos2021/data_packaging/uniform-noise/uniform-noise_stimulus_set.py +82 -0
  213. brainscore_vision/data/geirhos2021/geirhos_hashes.csv +52 -0
  214. brainscore_vision/data/geirhos2021/test.py +330 -0
  215. brainscore_vision/data/hebart2023/__init__.py +23 -0
  216. brainscore_vision/data/hebart2023/packaging/data_assembly.py +40 -0
  217. brainscore_vision/data/hebart2023/packaging/stimulus_set.py +72 -0
  218. brainscore_vision/data/hebart2023/test.py +42 -0
  219. brainscore_vision/data/hendrycks2019/__init__.py +45 -0
  220. brainscore_vision/data/hendrycks2019/test.py +26 -0
  221. brainscore_vision/data/igustibagus2024/__init__.py +23 -0
  222. brainscore_vision/data/igustibagus2024/dependencies/data_pico/stimulus_dicarlo_domain_transfer.csv +3139 -0
  223. brainscore_vision/data/igustibagus2024/investigation_consistency.ipynb +346 -0
  224. brainscore_vision/data/igustibagus2024/merged_assembly/__init__.py +0 -0
  225. brainscore_vision/data/igustibagus2024/merged_assembly/create_merged_assembly.ipynb +649 -0
  226. brainscore_vision/data/igustibagus2024/merged_assembly/create_merged_assembly_and_stim.py +152 -0
  227. brainscore_vision/data/igustibagus2024/merged_assembly/create_stimulus_set_with_background-id.py +45 -0
  228. brainscore_vision/data/igustibagus2024/merged_assembly/helpers_background_id.py +849 -0
  229. brainscore_vision/data/igustibagus2024/merged_assembly/merged_stimulus_set.csv +3139 -0
  230. brainscore_vision/data/igustibagus2024/oleo_pico_exploration.ipynb +410 -0
  231. brainscore_vision/data/igustibagus2024/test.py +26 -0
  232. brainscore_vision/data/imagenetslim15000/ImageNetSlim15000.py +30 -0
  233. brainscore_vision/data/imagenetslim15000/__init__.py +11 -0
  234. brainscore_vision/data/imagenetslim15000/test.py +8 -0
  235. brainscore_vision/data/islam2021/__init__.py +18 -0
  236. brainscore_vision/data/islam2021/data_packaging.py +64 -0
  237. brainscore_vision/data/islam2021/test.py +11 -0
  238. brainscore_vision/data/kar2018/__init__.py +58 -0
  239. brainscore_vision/data/kar2018/data_packaging/kar_coco.py +97 -0
  240. brainscore_vision/data/kar2018/data_packaging/kar_hvm.py +77 -0
  241. brainscore_vision/data/kar2018/data_packaging/requirements.txt +1 -0
  242. brainscore_vision/data/kar2018/test.py +10 -0
  243. brainscore_vision/data/kar2019/__init__.py +43 -0
  244. brainscore_vision/data/kar2019/data_packaging.py +116 -0
  245. brainscore_vision/data/kar2019/test.py +8 -0
  246. brainscore_vision/data/kuzovkin2018/__init__.py +36 -0
  247. brainscore_vision/data/kuzovkin2018/createAssembliesBrainScore.py +103 -0
  248. brainscore_vision/data/kuzovkin2018/test.py +8 -0
  249. brainscore_vision/data/majajhong2015/__init__.py +113 -0
  250. brainscore_vision/data/majajhong2015/data_packaging/darren10ms.py +32 -0
  251. brainscore_vision/data/majajhong2015/data_packaging/data_packaging.py +65 -0
  252. brainscore_vision/data/majajhong2015/test.py +38 -0
  253. brainscore_vision/data/malania2007/__init__.py +254 -0
  254. brainscore_vision/data/malania2007/data_packaging/malania_data_assembly.py +79 -0
  255. brainscore_vision/data/malania2007/data_packaging/malania_stimulus_set.py +79 -0
  256. brainscore_vision/data/malania2007/test.py +147 -0
  257. brainscore_vision/data/maniquet2024/__init__.py +57 -0
  258. brainscore_vision/data/maniquet2024/data_packaging.py +151 -0
  259. brainscore_vision/data/maniquet2024/test.py +16 -0
  260. brainscore_vision/data/marques2020/__init__.py +123 -0
  261. brainscore_vision/data/marques2020/data_packaging/marques_cavanaugh2002a.py +84 -0
  262. brainscore_vision/data/marques2020/data_packaging/marques_devalois1982a.py +44 -0
  263. brainscore_vision/data/marques2020/data_packaging/marques_devalois1982b.py +54 -0
  264. brainscore_vision/data/marques2020/data_packaging/marques_freemanZiemba2013.py +252 -0
  265. brainscore_vision/data/marques2020/data_packaging/marques_gen_stim.py +95 -0
  266. brainscore_vision/data/marques2020/data_packaging/marques_ringach2002.py +95 -0
  267. brainscore_vision/data/marques2020/data_packaging/marques_schiller1976c.py +60 -0
  268. brainscore_vision/data/marques2020/data_packaging/marques_stim_common.py +389 -0
  269. brainscore_vision/data/marques2020/data_packaging/marques_utils.py +21 -0
  270. brainscore_vision/data/marques2020/data_packaging/setup.py +13 -0
  271. brainscore_vision/data/marques2020/test.py +54 -0
  272. brainscore_vision/data/rajalingham2018/__init__.py +56 -0
  273. brainscore_vision/data/rajalingham2018/rajalingham2018objectome.py +193 -0
  274. brainscore_vision/data/rajalingham2018/test.py +10 -0
  275. brainscore_vision/data/rajalingham2020/__init__.py +39 -0
  276. brainscore_vision/data/rajalingham2020/rajalingham2020orthographic_IT.py +97 -0
  277. brainscore_vision/data/rajalingham2020/test.py +8 -0
  278. brainscore_vision/data/rust2012/2020-12-28_rust.ipynb +3301 -0
  279. brainscore_vision/data/rust2012/__init__.py +45 -0
  280. brainscore_vision/data/rust2012/rust305.py +35 -0
  281. brainscore_vision/data/rust2012/test.py +47 -0
  282. brainscore_vision/data/sanghavi2020/__init__.py +119 -0
  283. brainscore_vision/data/sanghavi2020/data_packaging/environment.yml +36 -0
  284. brainscore_vision/data/sanghavi2020/data_packaging/requirements.txt +4 -0
  285. brainscore_vision/data/sanghavi2020/data_packaging/sanghavi2020.py +101 -0
  286. brainscore_vision/data/sanghavi2020/data_packaging/sanghavijozwik2020.py +148 -0
  287. brainscore_vision/data/sanghavi2020/data_packaging/sanghavikar2020.py +131 -0
  288. brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020.py +120 -0
  289. brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things.py +138 -0
  290. brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things1.py +118 -0
  291. brainscore_vision/data/sanghavi2020/data_packaging/sanghavimurty2020things2.py +118 -0
  292. brainscore_vision/data/sanghavi2020/test.py +13 -0
  293. brainscore_vision/data/scialom2024/__init__.py +386 -0
  294. brainscore_vision/data/scialom2024/data_packaging/scialom_data_assembly.py +164 -0
  295. brainscore_vision/data/scialom2024/data_packaging/scialom_stimulus_set.py +117 -0
  296. brainscore_vision/data/scialom2024/test.py +301 -0
  297. brainscore_vision/data/seibert2019/__init__.py +25 -0
  298. brainscore_vision/data/seibert2019/data_packaging/2020-10-13_juvenile.ipynb +35703 -0
  299. brainscore_vision/data/seibert2019/data_packaging/2020-11-18_juvenile_scratch.txt +556 -0
  300. brainscore_vision/data/seibert2019/data_packaging/2020-11-22_juvenile_dldata.ipynb +3614 -0
  301. brainscore_vision/data/seibert2019/data_packaging/juvenile.py +103 -0
  302. brainscore_vision/data/seibert2019/test.py +35 -0
  303. brainscore_vision/data/zhang2018/__init__.py +38 -0
  304. brainscore_vision/data/zhang2018/test.py +29 -0
  305. brainscore_vision/data_helpers/__init__.py +0 -0
  306. brainscore_vision/data_helpers/lookup_legacy.py +15 -0
  307. brainscore_vision/data_helpers/s3.py +79 -0
  308. brainscore_vision/metric_helpers/__init__.py +5 -0
  309. brainscore_vision/metric_helpers/temporal.py +119 -0
  310. brainscore_vision/metric_helpers/transformations.py +379 -0
  311. brainscore_vision/metric_helpers/utils.py +71 -0
  312. brainscore_vision/metric_helpers/xarray_utils.py +151 -0
  313. brainscore_vision/metrics/__init__.py +7 -0
  314. brainscore_vision/metrics/accuracy/__init__.py +4 -0
  315. brainscore_vision/metrics/accuracy/metric.py +16 -0
  316. brainscore_vision/metrics/accuracy/test.py +11 -0
  317. brainscore_vision/metrics/accuracy_distance/__init__.py +4 -0
  318. brainscore_vision/metrics/accuracy_distance/metric.py +109 -0
  319. brainscore_vision/metrics/accuracy_distance/test.py +57 -0
  320. brainscore_vision/metrics/baker_accuracy_delta/__init__.py +4 -0
  321. brainscore_vision/metrics/baker_accuracy_delta/metric.py +94 -0
  322. brainscore_vision/metrics/baker_accuracy_delta/requirements.txt +1 -0
  323. brainscore_vision/metrics/baker_accuracy_delta/test.py +1 -0
  324. brainscore_vision/metrics/cka/__init__.py +14 -0
  325. brainscore_vision/metrics/cka/metric.py +105 -0
  326. brainscore_vision/metrics/cka/test.py +28 -0
  327. brainscore_vision/metrics/dimensionality/__init__.py +13 -0
  328. brainscore_vision/metrics/dimensionality/metric.py +45 -0
  329. brainscore_vision/metrics/distribution_similarity/__init__.py +14 -0
  330. brainscore_vision/metrics/distribution_similarity/metric.py +84 -0
  331. brainscore_vision/metrics/distribution_similarity/test.py +10 -0
  332. brainscore_vision/metrics/error_consistency/__init__.py +13 -0
  333. brainscore_vision/metrics/error_consistency/metric.py +93 -0
  334. brainscore_vision/metrics/error_consistency/test.py +39 -0
  335. brainscore_vision/metrics/i1i2/__init__.py +16 -0
  336. brainscore_vision/metrics/i1i2/metric.py +299 -0
  337. brainscore_vision/metrics/i1i2/requirements.txt +2 -0
  338. brainscore_vision/metrics/i1i2/test.py +36 -0
  339. brainscore_vision/metrics/i1i2/test_resources/alexnet-probabilities.nc +0 -0
  340. brainscore_vision/metrics/i1i2/test_resources/resnet18-probabilities.nc +0 -0
  341. brainscore_vision/metrics/i1i2/test_resources/resnet34-probabilities.nc +0 -0
  342. brainscore_vision/metrics/internal_consistency/__init__.py +8 -0
  343. brainscore_vision/metrics/internal_consistency/ceiling.py +127 -0
  344. brainscore_vision/metrics/internal_consistency/requirements.txt +1 -0
  345. brainscore_vision/metrics/internal_consistency/test.py +39 -0
  346. brainscore_vision/metrics/maniquet2024_metrics/__init__.py +19 -0
  347. brainscore_vision/metrics/maniquet2024_metrics/metric.py +416 -0
  348. brainscore_vision/metrics/maniquet2024_metrics/test.py +8 -0
  349. brainscore_vision/metrics/mask_regression/__init__.py +16 -0
  350. brainscore_vision/metrics/mask_regression/metric.py +242 -0
  351. brainscore_vision/metrics/mask_regression/requirements.txt +1 -0
  352. brainscore_vision/metrics/mask_regression/test.py +0 -0
  353. brainscore_vision/metrics/ost/__init__.py +23 -0
  354. brainscore_vision/metrics/ost/metric.py +350 -0
  355. brainscore_vision/metrics/ost/requirements.txt +2 -0
  356. brainscore_vision/metrics/ost/test.py +0 -0
  357. brainscore_vision/metrics/rdm/__init__.py +14 -0
  358. brainscore_vision/metrics/rdm/metric.py +101 -0
  359. brainscore_vision/metrics/rdm/requirements.txt +2 -0
  360. brainscore_vision/metrics/rdm/test.py +63 -0
  361. brainscore_vision/metrics/regression_correlation/__init__.py +48 -0
  362. brainscore_vision/metrics/regression_correlation/mask_regression.py +232 -0
  363. brainscore_vision/metrics/regression_correlation/metric.py +125 -0
  364. brainscore_vision/metrics/regression_correlation/requirements.txt +3 -0
  365. brainscore_vision/metrics/regression_correlation/test.py +36 -0
  366. brainscore_vision/metrics/threshold/__init__.py +5 -0
  367. brainscore_vision/metrics/threshold/metric.py +481 -0
  368. brainscore_vision/metrics/threshold/test.py +71 -0
  369. brainscore_vision/metrics/value_delta/__init__.py +4 -0
  370. brainscore_vision/metrics/value_delta/metric.py +30 -0
  371. brainscore_vision/metrics/value_delta/requirements.txt +1 -0
  372. brainscore_vision/metrics/value_delta/test.py +40 -0
  373. brainscore_vision/model_helpers/__init__.py +3 -0
  374. brainscore_vision/model_helpers/activations/__init__.py +1 -0
  375. brainscore_vision/model_helpers/activations/core.py +635 -0
  376. brainscore_vision/model_helpers/activations/pca.py +117 -0
  377. brainscore_vision/model_helpers/activations/pytorch.py +152 -0
  378. brainscore_vision/model_helpers/activations/temporal/__init__.py +0 -0
  379. brainscore_vision/model_helpers/activations/temporal/core/__init__.py +3 -0
  380. brainscore_vision/model_helpers/activations/temporal/core/executor.py +219 -0
  381. brainscore_vision/model_helpers/activations/temporal/core/extractor.py +282 -0
  382. brainscore_vision/model_helpers/activations/temporal/core/inferencer/__init__.py +2 -0
  383. brainscore_vision/model_helpers/activations/temporal/core/inferencer/base.py +274 -0
  384. brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/__init__.py +2 -0
  385. brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/base.py +134 -0
  386. brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/__init__.py +2 -0
  387. brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/base.py +99 -0
  388. brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/block.py +77 -0
  389. brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/temporal_context/causal.py +86 -0
  390. brainscore_vision/model_helpers/activations/temporal/core/inferencer/video/time_aligner.py +73 -0
  391. brainscore_vision/model_helpers/activations/temporal/inputs/__init__.py +3 -0
  392. brainscore_vision/model_helpers/activations/temporal/inputs/base.py +17 -0
  393. brainscore_vision/model_helpers/activations/temporal/inputs/image.py +50 -0
  394. brainscore_vision/model_helpers/activations/temporal/inputs/video.py +186 -0
  395. brainscore_vision/model_helpers/activations/temporal/model/__init__.py +2 -0
  396. brainscore_vision/model_helpers/activations/temporal/model/base.py +33 -0
  397. brainscore_vision/model_helpers/activations/temporal/model/pytorch.py +107 -0
  398. brainscore_vision/model_helpers/activations/temporal/utils.py +228 -0
  399. brainscore_vision/model_helpers/brain_transformation/__init__.py +97 -0
  400. brainscore_vision/model_helpers/brain_transformation/behavior.py +348 -0
  401. brainscore_vision/model_helpers/brain_transformation/imagenet_classes.txt +1000 -0
  402. brainscore_vision/model_helpers/brain_transformation/neural.py +159 -0
  403. brainscore_vision/model_helpers/brain_transformation/temporal.py +199 -0
  404. brainscore_vision/model_helpers/check_submission/__init__.py +0 -0
  405. brainscore_vision/model_helpers/check_submission/check_models.py +87 -0
  406. brainscore_vision/model_helpers/check_submission/images/1.png +0 -0
  407. brainscore_vision/model_helpers/check_submission/images/10.png +0 -0
  408. brainscore_vision/model_helpers/check_submission/images/11.png +0 -0
  409. brainscore_vision/model_helpers/check_submission/images/12.png +0 -0
  410. brainscore_vision/model_helpers/check_submission/images/13.png +0 -0
  411. brainscore_vision/model_helpers/check_submission/images/14.png +0 -0
  412. brainscore_vision/model_helpers/check_submission/images/15.png +0 -0
  413. brainscore_vision/model_helpers/check_submission/images/16.png +0 -0
  414. brainscore_vision/model_helpers/check_submission/images/17.png +0 -0
  415. brainscore_vision/model_helpers/check_submission/images/18.png +0 -0
  416. brainscore_vision/model_helpers/check_submission/images/19.png +0 -0
  417. brainscore_vision/model_helpers/check_submission/images/2.png +0 -0
  418. brainscore_vision/model_helpers/check_submission/images/20.png +0 -0
  419. brainscore_vision/model_helpers/check_submission/images/3.png +0 -0
  420. brainscore_vision/model_helpers/check_submission/images/4.png +0 -0
  421. brainscore_vision/model_helpers/check_submission/images/5.png +0 -0
  422. brainscore_vision/model_helpers/check_submission/images/6.png +0 -0
  423. brainscore_vision/model_helpers/check_submission/images/7.png +0 -0
  424. brainscore_vision/model_helpers/check_submission/images/8.png +0 -0
  425. brainscore_vision/model_helpers/check_submission/images/9.png +0 -0
  426. brainscore_vision/model_helpers/conftest.py +3 -0
  427. brainscore_vision/model_helpers/generic_plugin_tests.py +119 -0
  428. brainscore_vision/model_helpers/s3.py +62 -0
  429. brainscore_vision/model_helpers/utils/__init__.py +15 -0
  430. brainscore_vision/model_helpers/utils/s3.py +42 -0
  431. brainscore_vision/model_interface.py +214 -0
  432. brainscore_vision/models/AdvProp_efficientne_b6/__init__.py +5 -0
  433. brainscore_vision/models/AdvProp_efficientne_b6/model.py +75 -0
  434. brainscore_vision/models/AdvProp_efficientne_b6/requirements.txt +1 -0
  435. brainscore_vision/models/AdvProp_efficientne_b6/test.py +9 -0
  436. brainscore_vision/models/AlexNet_SIN/__init__.py +8 -0
  437. brainscore_vision/models/AlexNet_SIN/model.py +29 -0
  438. brainscore_vision/models/AlexNet_SIN/requirements.txt +2 -0
  439. brainscore_vision/models/AlexNet_SIN/test.py +1 -0
  440. brainscore_vision/models/Soumyadeep_inf_1/__init__.py +5 -0
  441. brainscore_vision/models/Soumyadeep_inf_1/model.py +60 -0
  442. brainscore_vision/models/Soumyadeep_inf_1/setup.py +26 -0
  443. brainscore_vision/models/Soumyadeep_inf_1/test.py +1 -0
  444. brainscore_vision/models/ViT_L_32_imagenet1k/__init__.py +8 -0
  445. brainscore_vision/models/ViT_L_32_imagenet1k/model.py +43 -0
  446. brainscore_vision/models/ViT_L_32_imagenet1k/requirements.txt +4 -0
  447. brainscore_vision/models/ViT_L_32_imagenet1k/test.py +8 -0
  448. brainscore_vision/models/__init__.py +0 -0
  449. brainscore_vision/models/alexnet/__init__.py +8 -0
  450. brainscore_vision/models/alexnet/model.py +28 -0
  451. brainscore_vision/models/alexnet/requirements.txt +2 -0
  452. brainscore_vision/models/alexnet/test.py +15 -0
  453. brainscore_vision/models/alexnet_7be5be79/__init__.py +7 -0
  454. brainscore_vision/models/alexnet_7be5be79/model.py +44 -0
  455. brainscore_vision/models/alexnet_7be5be79/setup.py +26 -0
  456. brainscore_vision/models/alexnet_7be5be79/test.py +1 -0
  457. brainscore_vision/models/alexnet_7be5be79_convs/__init__.py +5 -0
  458. brainscore_vision/models/alexnet_7be5be79_convs/model.py +42 -0
  459. brainscore_vision/models/alexnet_7be5be79_convs/setup.py +25 -0
  460. brainscore_vision/models/alexnet_7be5be79_convs/test.py +1 -0
  461. brainscore_vision/models/alexnet_ks_torevert/__init__.py +8 -0
  462. brainscore_vision/models/alexnet_ks_torevert/model.py +28 -0
  463. brainscore_vision/models/alexnet_ks_torevert/requirements.txt +2 -0
  464. brainscore_vision/models/alexnet_ks_torevert/test.py +15 -0
  465. brainscore_vision/models/alexnet_simclr_run1/__init__.py +7 -0
  466. brainscore_vision/models/alexnet_simclr_run1/model.py +267 -0
  467. brainscore_vision/models/alexnet_simclr_run1/requirements.txt +2 -0
  468. brainscore_vision/models/alexnet_simclr_run1/test.py +1 -0
  469. brainscore_vision/models/alexnet_testing/__init__.py +8 -0
  470. brainscore_vision/models/alexnet_testing/model.py +28 -0
  471. brainscore_vision/models/alexnet_testing/requirements.txt +2 -0
  472. brainscore_vision/models/alexnet_testing/setup.py +24 -0
  473. brainscore_vision/models/alexnet_testing/test.py +15 -0
  474. brainscore_vision/models/antialias_resnet152/__init__.py +7 -0
  475. brainscore_vision/models/antialias_resnet152/model.py +35 -0
  476. brainscore_vision/models/antialias_resnet152/requirements.txt +3 -0
  477. brainscore_vision/models/antialias_resnet152/test.py +8 -0
  478. brainscore_vision/models/antialiased_rnext101_32x8d/__init__.py +7 -0
  479. brainscore_vision/models/antialiased_rnext101_32x8d/model.py +35 -0
  480. brainscore_vision/models/antialiased_rnext101_32x8d/requirements.txt +1 -0
  481. brainscore_vision/models/antialiased_rnext101_32x8d/test.py +8 -0
  482. brainscore_vision/models/bp_resnet50_julios/__init__.py +5 -0
  483. brainscore_vision/models/bp_resnet50_julios/model.py +52 -0
  484. brainscore_vision/models/bp_resnet50_julios/setup.py +24 -0
  485. brainscore_vision/models/bp_resnet50_julios/test.py +1 -0
  486. brainscore_vision/models/clip/__init__.py +5 -0
  487. brainscore_vision/models/clip/model.py +179 -0
  488. brainscore_vision/models/clip/requirements.txt +4 -0
  489. brainscore_vision/models/clip/test.py +1 -0
  490. brainscore_vision/models/clipvision/__init__.py +5 -0
  491. brainscore_vision/models/clipvision/model.py +179 -0
  492. brainscore_vision/models/clipvision/requirements.txt +4 -0
  493. brainscore_vision/models/clipvision/test.py +1 -0
  494. brainscore_vision/models/cornet_s/__init__.py +8 -0
  495. brainscore_vision/models/cornet_s/helpers/helpers.py +215 -0
  496. brainscore_vision/models/cornet_s/model.py +77 -0
  497. brainscore_vision/models/cornet_s/requirements.txt +7 -0
  498. brainscore_vision/models/cornet_s/test.py +8 -0
  499. brainscore_vision/models/cornet_s_ynshah/__init__.py +388 -0
  500. brainscore_vision/models/cornet_s_ynshah/model.py +192 -0
  501. brainscore_vision/models/cornet_s_ynshah/setup.py +24 -0
  502. brainscore_vision/models/cornet_s_ynshah/test.py +0 -0
  503. brainscore_vision/models/custom_model_cv_18_dagger_408/__init__.py +7 -0
  504. brainscore_vision/models/custom_model_cv_18_dagger_408/model.py +75 -0
  505. brainscore_vision/models/custom_model_cv_18_dagger_408/requirements.txt +4 -0
  506. brainscore_vision/models/custom_model_cv_18_dagger_408/test.py +8 -0
  507. brainscore_vision/models/cv_18_dagger_408_pretrained/__init__.py +8 -0
  508. brainscore_vision/models/cv_18_dagger_408_pretrained/model.py +57 -0
  509. brainscore_vision/models/cv_18_dagger_408_pretrained/requirements.txt +3 -0
  510. brainscore_vision/models/cv_18_dagger_408_pretrained/test.py +25 -0
  511. brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/__init__.py +9 -0
  512. brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/model.py +134 -0
  513. brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/requirements.txt +4 -0
  514. brainscore_vision/models/cvt_cvt_w24_384_in22k_finetuned_in1k_4/test.py +8 -0
  515. brainscore_vision/models/dbp_resnet50_julios/__init__.py +5 -0
  516. brainscore_vision/models/dbp_resnet50_julios/model.py +52 -0
  517. brainscore_vision/models/dbp_resnet50_julios/setup.py +24 -0
  518. brainscore_vision/models/dbp_resnet50_julios/test.py +1 -0
  519. brainscore_vision/models/densenet_201_pytorch/__init__.py +7 -0
  520. brainscore_vision/models/densenet_201_pytorch/model.py +59 -0
  521. brainscore_vision/models/densenet_201_pytorch/requirements.txt +3 -0
  522. brainscore_vision/models/densenet_201_pytorch/test.py +8 -0
  523. brainscore_vision/models/eBarlow_Vanilla/__init__.py +9 -0
  524. brainscore_vision/models/eBarlow_Vanilla/model.py +50 -0
  525. brainscore_vision/models/eBarlow_Vanilla/requirements.txt +2 -0
  526. brainscore_vision/models/eBarlow_Vanilla/setup.py +24 -0
  527. brainscore_vision/models/eBarlow_Vanilla/test.py +1 -0
  528. brainscore_vision/models/eBarlow_Vanilla_1/__init__.py +9 -0
  529. brainscore_vision/models/eBarlow_Vanilla_1/model.py +64 -0
  530. brainscore_vision/models/eBarlow_Vanilla_1/setup.py +24 -0
  531. brainscore_vision/models/eBarlow_Vanilla_1/test.py +1 -0
  532. brainscore_vision/models/eBarlow_Vanilla_1_full/__init__.py +9 -0
  533. brainscore_vision/models/eBarlow_Vanilla_1_full/model.py +84 -0
  534. brainscore_vision/models/eBarlow_Vanilla_1_full/setup.py +25 -0
  535. brainscore_vision/models/eBarlow_Vanilla_1_full/test.py +1 -0
  536. brainscore_vision/models/eBarlow_Vanilla_2/__init__.py +9 -0
  537. brainscore_vision/models/eBarlow_Vanilla_2/model.py +64 -0
  538. brainscore_vision/models/eBarlow_Vanilla_2/setup.py +24 -0
  539. brainscore_vision/models/eBarlow_Vanilla_2/test.py +1 -0
  540. brainscore_vision/models/eBarlow_augself_linear_1/__init__.py +9 -0
  541. brainscore_vision/models/eBarlow_augself_linear_1/model.py +65 -0
  542. brainscore_vision/models/eBarlow_augself_linear_1/setup.py +24 -0
  543. brainscore_vision/models/eBarlow_augself_linear_1/test.py +1 -0
  544. brainscore_vision/models/eBarlow_augself_mlp_1/__init__.py +9 -0
  545. brainscore_vision/models/eBarlow_augself_mlp_1/model.py +65 -0
  546. brainscore_vision/models/eBarlow_augself_mlp_1/setup.py +24 -0
  547. brainscore_vision/models/eBarlow_augself_mlp_1/test.py +1 -0
  548. brainscore_vision/models/eBarlow_lmda_0001_1/__init__.py +9 -0
  549. brainscore_vision/models/eBarlow_lmda_0001_1/model.py +65 -0
  550. brainscore_vision/models/eBarlow_lmda_0001_1/setup.py +24 -0
  551. brainscore_vision/models/eBarlow_lmda_0001_1/test.py +1 -0
  552. brainscore_vision/models/eBarlow_lmda_001_1/__init__.py +9 -0
  553. brainscore_vision/models/eBarlow_lmda_001_1/model.py +65 -0
  554. brainscore_vision/models/eBarlow_lmda_001_1/setup.py +24 -0
  555. brainscore_vision/models/eBarlow_lmda_001_1/test.py +1 -0
  556. brainscore_vision/models/eBarlow_lmda_001_2/__init__.py +9 -0
  557. brainscore_vision/models/eBarlow_lmda_001_2/model.py +65 -0
  558. brainscore_vision/models/eBarlow_lmda_001_2/setup.py +24 -0
  559. brainscore_vision/models/eBarlow_lmda_001_2/test.py +1 -0
  560. brainscore_vision/models/eBarlow_lmda_001_3/__init__.py +9 -0
  561. brainscore_vision/models/eBarlow_lmda_001_3/model.py +65 -0
  562. brainscore_vision/models/eBarlow_lmda_001_3/setup.py +24 -0
  563. brainscore_vision/models/eBarlow_lmda_001_3/test.py +1 -0
  564. brainscore_vision/models/eBarlow_lmda_01/__init__.py +9 -0
  565. brainscore_vision/models/eBarlow_lmda_01/model.py +50 -0
  566. brainscore_vision/models/eBarlow_lmda_01/requirements.txt +2 -0
  567. brainscore_vision/models/eBarlow_lmda_01/setup.py +24 -0
  568. brainscore_vision/models/eBarlow_lmda_01/test.py +1 -0
  569. brainscore_vision/models/eBarlow_lmda_01_1/__init__.py +9 -0
  570. brainscore_vision/models/eBarlow_lmda_01_1/model.py +65 -0
  571. brainscore_vision/models/eBarlow_lmda_01_1/setup.py +24 -0
  572. brainscore_vision/models/eBarlow_lmda_01_1/test.py +1 -0
  573. brainscore_vision/models/eBarlow_lmda_01_2/__init__.py +9 -0
  574. brainscore_vision/models/eBarlow_lmda_01_2/model.py +65 -0
  575. brainscore_vision/models/eBarlow_lmda_01_2/setup.py +24 -0
  576. brainscore_vision/models/eBarlow_lmda_01_2/test.py +1 -0
  577. brainscore_vision/models/eBarlow_lmda_02_1/__init__.py +9 -0
  578. brainscore_vision/models/eBarlow_lmda_02_1/model.py +65 -0
  579. brainscore_vision/models/eBarlow_lmda_02_1/setup.py +24 -0
  580. brainscore_vision/models/eBarlow_lmda_02_1/test.py +1 -0
  581. brainscore_vision/models/eBarlow_lmda_02_1000ep/__init__.py +9 -0
  582. brainscore_vision/models/eBarlow_lmda_02_1000ep/model.py +84 -0
  583. brainscore_vision/models/eBarlow_lmda_02_1000ep/setup.py +25 -0
  584. brainscore_vision/models/eBarlow_lmda_02_1000ep/test.py +1 -0
  585. brainscore_vision/models/eBarlow_lmda_02_1_full/__init__.py +9 -0
  586. brainscore_vision/models/eBarlow_lmda_02_1_full/model.py +85 -0
  587. brainscore_vision/models/eBarlow_lmda_02_1_full/setup.py +25 -0
  588. brainscore_vision/models/eBarlow_lmda_02_1_full/test.py +1 -0
  589. brainscore_vision/models/eBarlow_lmda_02_200_full/__init__.py +9 -0
  590. brainscore_vision/models/eBarlow_lmda_02_200_full/model.py +85 -0
  591. brainscore_vision/models/eBarlow_lmda_02_200_full/setup.py +25 -0
  592. brainscore_vision/models/eBarlow_lmda_02_200_full/test.py +1 -0
  593. brainscore_vision/models/eBarlow_lmda_03_1/__init__.py +9 -0
  594. brainscore_vision/models/eBarlow_lmda_03_1/model.py +65 -0
  595. brainscore_vision/models/eBarlow_lmda_03_1/setup.py +24 -0
  596. brainscore_vision/models/eBarlow_lmda_03_1/test.py +1 -0
  597. brainscore_vision/models/eBarlow_lmda_04_1/__init__.py +9 -0
  598. brainscore_vision/models/eBarlow_lmda_04_1/model.py +65 -0
  599. brainscore_vision/models/eBarlow_lmda_04_1/setup.py +24 -0
  600. brainscore_vision/models/eBarlow_lmda_04_1/test.py +1 -0
  601. brainscore_vision/models/eBarlow_lmda_05_1/__init__.py +9 -0
  602. brainscore_vision/models/eBarlow_lmda_05_1/model.py +65 -0
  603. brainscore_vision/models/eBarlow_lmda_05_1/setup.py +24 -0
  604. brainscore_vision/models/eBarlow_lmda_05_1/test.py +1 -0
  605. brainscore_vision/models/eMMCR_Mom_Vanilla_1/__init__.py +9 -0
  606. brainscore_vision/models/eMMCR_Mom_Vanilla_1/model.py +64 -0
  607. brainscore_vision/models/eMMCR_Mom_Vanilla_1/setup.py +24 -0
  608. brainscore_vision/models/eMMCR_Mom_Vanilla_1/test.py +1 -0
  609. brainscore_vision/models/eMMCR_Mom_Vanilla_2/__init__.py +9 -0
  610. brainscore_vision/models/eMMCR_Mom_Vanilla_2/model.py +64 -0
  611. brainscore_vision/models/eMMCR_Mom_Vanilla_2/setup.py +24 -0
  612. brainscore_vision/models/eMMCR_Mom_Vanilla_2/test.py +1 -0
  613. brainscore_vision/models/eMMCR_Mom_lmda_0001_1/__init__.py +9 -0
  614. brainscore_vision/models/eMMCR_Mom_lmda_0001_1/model.py +65 -0
  615. brainscore_vision/models/eMMCR_Mom_lmda_0001_1/setup.py +24 -0
  616. brainscore_vision/models/eMMCR_Mom_lmda_0001_1/test.py +1 -0
  617. brainscore_vision/models/eMMCR_Mom_lmda_001_1/__init__.py +9 -0
  618. brainscore_vision/models/eMMCR_Mom_lmda_001_1/model.py +65 -0
  619. brainscore_vision/models/eMMCR_Mom_lmda_001_1/setup.py +24 -0
  620. brainscore_vision/models/eMMCR_Mom_lmda_001_1/test.py +1 -0
  621. brainscore_vision/models/eMMCR_Mom_lmda_01_1/__init__.py +9 -0
  622. brainscore_vision/models/eMMCR_Mom_lmda_01_1/model.py +65 -0
  623. brainscore_vision/models/eMMCR_Mom_lmda_01_1/setup.py +24 -0
  624. brainscore_vision/models/eMMCR_Mom_lmda_01_1/test.py +1 -0
  625. brainscore_vision/models/eMMCR_Mom_lmda_01_2/__init__.py +9 -0
  626. brainscore_vision/models/eMMCR_Mom_lmda_01_2/model.py +65 -0
  627. brainscore_vision/models/eMMCR_Mom_lmda_01_2/setup.py +24 -0
  628. brainscore_vision/models/eMMCR_Mom_lmda_01_2/test.py +1 -0
  629. brainscore_vision/models/eMMCR_Mom_lmda_02_1/__init__.py +9 -0
  630. brainscore_vision/models/eMMCR_Mom_lmda_02_1/model.py +65 -0
  631. brainscore_vision/models/eMMCR_Mom_lmda_02_1/setup.py +24 -0
  632. brainscore_vision/models/eMMCR_Mom_lmda_02_1/test.py +1 -0
  633. brainscore_vision/models/eMMCR_Mom_lmda_03_1/__init__.py +9 -0
  634. brainscore_vision/models/eMMCR_Mom_lmda_03_1/model.py +65 -0
  635. brainscore_vision/models/eMMCR_Mom_lmda_03_1/setup.py +24 -0
  636. brainscore_vision/models/eMMCR_Mom_lmda_03_1/test.py +1 -0
  637. brainscore_vision/models/eMMCR_Mom_lmda_04_1/__init__.py +9 -0
  638. brainscore_vision/models/eMMCR_Mom_lmda_04_1/model.py +65 -0
  639. brainscore_vision/models/eMMCR_Mom_lmda_04_1/setup.py +24 -0
  640. brainscore_vision/models/eMMCR_Mom_lmda_04_1/test.py +1 -0
  641. brainscore_vision/models/eMMCR_Mom_lmda_05_1/__init__.py +9 -0
  642. brainscore_vision/models/eMMCR_Mom_lmda_05_1/model.py +65 -0
  643. brainscore_vision/models/eMMCR_Mom_lmda_05_1/setup.py +24 -0
  644. brainscore_vision/models/eMMCR_Mom_lmda_05_1/test.py +1 -0
  645. brainscore_vision/models/eMMCR_Vanilla/__init__.py +9 -0
  646. brainscore_vision/models/eMMCR_Vanilla/model.py +50 -0
  647. brainscore_vision/models/eMMCR_Vanilla/setup.py +24 -0
  648. brainscore_vision/models/eMMCR_Vanilla/test.py +1 -0
  649. brainscore_vision/models/eMMCR_VanillaV2/__init__.py +9 -0
  650. brainscore_vision/models/eMMCR_VanillaV2/model.py +50 -0
  651. brainscore_vision/models/eMMCR_VanillaV2/setup.py +24 -0
  652. brainscore_vision/models/eMMCR_VanillaV2/test.py +1 -0
  653. brainscore_vision/models/eMMCR_Vanilla_1/__init__.py +9 -0
  654. brainscore_vision/models/eMMCR_Vanilla_1/model.py +64 -0
  655. brainscore_vision/models/eMMCR_Vanilla_1/setup.py +24 -0
  656. brainscore_vision/models/eMMCR_Vanilla_1/test.py +1 -0
  657. brainscore_vision/models/eMMCR_Vanilla_2/__init__.py +9 -0
  658. brainscore_vision/models/eMMCR_Vanilla_2/model.py +64 -0
  659. brainscore_vision/models/eMMCR_Vanilla_2/setup.py +24 -0
  660. brainscore_vision/models/eMMCR_Vanilla_2/test.py +1 -0
  661. brainscore_vision/models/eMMCR_lmda_01/__init__.py +9 -0
  662. brainscore_vision/models/eMMCR_lmda_01/model.py +50 -0
  663. brainscore_vision/models/eMMCR_lmda_01/setup.py +24 -0
  664. brainscore_vision/models/eMMCR_lmda_01/test.py +1 -0
  665. brainscore_vision/models/eMMCR_lmda_01V2/__init__.py +9 -0
  666. brainscore_vision/models/eMMCR_lmda_01V2/model.py +50 -0
  667. brainscore_vision/models/eMMCR_lmda_01V2/requirements.txt +2 -0
  668. brainscore_vision/models/eMMCR_lmda_01V2/setup.py +24 -0
  669. brainscore_vision/models/eMMCR_lmda_01V2/test.py +1 -0
  670. brainscore_vision/models/eMMCR_lmda_01_1/__init__.py +9 -0
  671. brainscore_vision/models/eMMCR_lmda_01_1/model.py +65 -0
  672. brainscore_vision/models/eMMCR_lmda_01_1/setup.py +24 -0
  673. brainscore_vision/models/eMMCR_lmda_01_1/test.py +1 -0
  674. brainscore_vision/models/eMMCR_lmda_01_2/__init__.py +9 -0
  675. brainscore_vision/models/eMMCR_lmda_01_2/model.py +65 -0
  676. brainscore_vision/models/eMMCR_lmda_01_2/setup.py +24 -0
  677. brainscore_vision/models/eMMCR_lmda_01_2/test.py +1 -0
  678. brainscore_vision/models/eMMCR_lmda_01_3/__init__.py +9 -0
  679. brainscore_vision/models/eMMCR_lmda_01_3/model.py +65 -0
  680. brainscore_vision/models/eMMCR_lmda_01_3/setup.py +24 -0
  681. brainscore_vision/models/eMMCR_lmda_01_3/test.py +1 -0
  682. brainscore_vision/models/eSimCLR_Vanilla_1/__init__.py +9 -0
  683. brainscore_vision/models/eSimCLR_Vanilla_1/model.py +64 -0
  684. brainscore_vision/models/eSimCLR_Vanilla_1/setup.py +24 -0
  685. brainscore_vision/models/eSimCLR_Vanilla_1/test.py +1 -0
  686. brainscore_vision/models/eSimCLR_Vanilla_2/__init__.py +9 -0
  687. brainscore_vision/models/eSimCLR_Vanilla_2/model.py +64 -0
  688. brainscore_vision/models/eSimCLR_Vanilla_2/setup.py +24 -0
  689. brainscore_vision/models/eSimCLR_Vanilla_2/test.py +1 -0
  690. brainscore_vision/models/eSimCLR_lmda_0001_1/__init__.py +9 -0
  691. brainscore_vision/models/eSimCLR_lmda_0001_1/model.py +65 -0
  692. brainscore_vision/models/eSimCLR_lmda_0001_1/setup.py +24 -0
  693. brainscore_vision/models/eSimCLR_lmda_0001_1/test.py +1 -0
  694. brainscore_vision/models/eSimCLR_lmda_001_1/__init__.py +9 -0
  695. brainscore_vision/models/eSimCLR_lmda_001_1/model.py +65 -0
  696. brainscore_vision/models/eSimCLR_lmda_001_1/setup.py +24 -0
  697. brainscore_vision/models/eSimCLR_lmda_001_1/test.py +1 -0
  698. brainscore_vision/models/eSimCLR_lmda_01_1/__init__.py +9 -0
  699. brainscore_vision/models/eSimCLR_lmda_01_1/model.py +65 -0
  700. brainscore_vision/models/eSimCLR_lmda_01_1/setup.py +24 -0
  701. brainscore_vision/models/eSimCLR_lmda_01_1/test.py +1 -0
  702. brainscore_vision/models/eSimCLR_lmda_01_2/__init__.py +9 -0
  703. brainscore_vision/models/eSimCLR_lmda_01_2/model.py +65 -0
  704. brainscore_vision/models/eSimCLR_lmda_01_2/setup.py +24 -0
  705. brainscore_vision/models/eSimCLR_lmda_01_2/test.py +1 -0
  706. brainscore_vision/models/eSimCLR_lmda_02_1/__init__.py +9 -0
  707. brainscore_vision/models/eSimCLR_lmda_02_1/model.py +65 -0
  708. brainscore_vision/models/eSimCLR_lmda_02_1/setup.py +24 -0
  709. brainscore_vision/models/eSimCLR_lmda_02_1/test.py +1 -0
  710. brainscore_vision/models/eSimCLR_lmda_02_1_1/__init__.py +9 -0
  711. brainscore_vision/models/eSimCLR_lmda_02_1_1/model.py +65 -0
  712. brainscore_vision/models/eSimCLR_lmda_02_1_1/setup.py +24 -0
  713. brainscore_vision/models/eSimCLR_lmda_02_1_1/test.py +1 -0
  714. brainscore_vision/models/eSimCLR_lmda_03_1/__init__.py +9 -0
  715. brainscore_vision/models/eSimCLR_lmda_03_1/model.py +65 -0
  716. brainscore_vision/models/eSimCLR_lmda_03_1/setup.py +24 -0
  717. brainscore_vision/models/eSimCLR_lmda_03_1/test.py +1 -0
  718. brainscore_vision/models/eSimCLR_lmda_04_1/__init__.py +9 -0
  719. brainscore_vision/models/eSimCLR_lmda_04_1/model.py +65 -0
  720. brainscore_vision/models/eSimCLR_lmda_04_1/setup.py +24 -0
  721. brainscore_vision/models/eSimCLR_lmda_04_1/test.py +1 -0
  722. brainscore_vision/models/eSimCLR_lmda_04_1_1/__init__.py +9 -0
  723. brainscore_vision/models/eSimCLR_lmda_04_1_1/model.py +65 -0
  724. brainscore_vision/models/eSimCLR_lmda_04_1_1/setup.py +24 -0
  725. brainscore_vision/models/eSimCLR_lmda_04_1_1/test.py +1 -0
  726. brainscore_vision/models/eSimCLR_lmda_05_1/__init__.py +9 -0
  727. brainscore_vision/models/eSimCLR_lmda_05_1/model.py +65 -0
  728. brainscore_vision/models/eSimCLR_lmda_05_1/setup.py +24 -0
  729. brainscore_vision/models/eSimCLR_lmda_05_1/test.py +1 -0
  730. brainscore_vision/models/effnetb1_272x240/__init__.py +5 -0
  731. brainscore_vision/models/effnetb1_272x240/model.py +126 -0
  732. brainscore_vision/models/effnetb1_272x240/requirements.txt +3 -0
  733. brainscore_vision/models/effnetb1_272x240/test.py +9 -0
  734. brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/__init__.py +9 -0
  735. brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/model.py +111 -0
  736. brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/requirements.txt +6 -0
  737. brainscore_vision/models/effnetb1_cutmix_augmix_sam_e1_5avg_424x377/test.py +8 -0
  738. brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/__init__.py +5 -0
  739. brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/model.py +142 -0
  740. brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/requirements.txt +5 -0
  741. brainscore_vision/models/effnetb1_cutmixpatch_SAM_robust32_avge6e8e9e10_manylayers_324x288/test.py +8 -0
  742. brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/__init__.py +9 -0
  743. brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/model.py +140 -0
  744. brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/requirements.txt +5 -0
  745. brainscore_vision/models/effnetb1_cutmixpatch_augmix_robust32_avge4e7_manylayers_324x288/test.py +8 -0
  746. brainscore_vision/models/focalnet_tiny_in1k_submission/__init__.py +5 -0
  747. brainscore_vision/models/focalnet_tiny_in1k_submission/model.py +62 -0
  748. brainscore_vision/models/focalnet_tiny_in1k_submission/requirements.txt +3 -0
  749. brainscore_vision/models/focalnet_tiny_in1k_submission/test.py +8 -0
  750. brainscore_vision/models/hmax/__init__.py +7 -0
  751. brainscore_vision/models/hmax/helpers/hmax.py +438 -0
  752. brainscore_vision/models/hmax/helpers/pytorch.py +216 -0
  753. brainscore_vision/models/hmax/model.py +69 -0
  754. brainscore_vision/models/hmax/requirements.txt +5 -0
  755. brainscore_vision/models/hmax/test.py +8 -0
  756. brainscore_vision/models/inception_v3_pytorch/__init__.py +7 -0
  757. brainscore_vision/models/inception_v3_pytorch/model.py +68 -0
  758. brainscore_vision/models/inception_v3_pytorch/requirements.txt +3 -0
  759. brainscore_vision/models/inception_v3_pytorch/test.py +8 -0
  760. brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/__init__.py +7 -0
  761. brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/model.py +60 -0
  762. brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/requirements.txt +3 -0
  763. brainscore_vision/models/mobilenet_v2_1_4_224_pytorch/test.py +8 -0
  764. brainscore_vision/models/mobilevit_small/__init__.py +7 -0
  765. brainscore_vision/models/mobilevit_small/model.py +49 -0
  766. brainscore_vision/models/mobilevit_small/requirements.txt +3 -0
  767. brainscore_vision/models/mobilevit_small/test.py +8 -0
  768. brainscore_vision/models/pixels/__init__.py +8 -0
  769. brainscore_vision/models/pixels/model.py +35 -0
  770. brainscore_vision/models/pixels/test.py +15 -0
  771. brainscore_vision/models/pnasnet_large_pytorch/__init__.py +7 -0
  772. brainscore_vision/models/pnasnet_large_pytorch/model.py +59 -0
  773. brainscore_vision/models/pnasnet_large_pytorch/requirements.txt +3 -0
  774. brainscore_vision/models/pnasnet_large_pytorch/test.py +8 -0
  775. brainscore_vision/models/r101_eBarlow_Vanilla_1/__init__.py +9 -0
  776. brainscore_vision/models/r101_eBarlow_Vanilla_1/model.py +64 -0
  777. brainscore_vision/models/r101_eBarlow_Vanilla_1/setup.py +25 -0
  778. brainscore_vision/models/r101_eBarlow_Vanilla_1/test.py +1 -0
  779. brainscore_vision/models/r101_eBarlow_lmda_01_1/__init__.py +9 -0
  780. brainscore_vision/models/r101_eBarlow_lmda_01_1/model.py +65 -0
  781. brainscore_vision/models/r101_eBarlow_lmda_01_1/setup.py +25 -0
  782. brainscore_vision/models/r101_eBarlow_lmda_01_1/test.py +1 -0
  783. brainscore_vision/models/r101_eBarlow_lmda_02_1/__init__.py +9 -0
  784. brainscore_vision/models/r101_eBarlow_lmda_02_1/model.py +65 -0
  785. brainscore_vision/models/r101_eBarlow_lmda_02_1/setup.py +25 -0
  786. brainscore_vision/models/r101_eBarlow_lmda_02_1/test.py +1 -0
  787. brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/__init__.py +9 -0
  788. brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/model.py +67 -0
  789. brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/setup.py +25 -0
  790. brainscore_vision/models/r101_eBarlow_lmda_02_1_copy/test.py +1 -0
  791. brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/__init__.py +9 -0
  792. brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/model.py +66 -0
  793. brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/setup.py +25 -0
  794. brainscore_vision/models/r34_eMMCR_Mom_Vanilla_1/test.py +1 -0
  795. brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/__init__.py +9 -0
  796. brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/model.py +66 -0
  797. brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/setup.py +25 -0
  798. brainscore_vision/models/r34_eMMCR_Mom_lmda_01_1/test.py +1 -0
  799. brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/__init__.py +9 -0
  800. brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/model.py +66 -0
  801. brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/setup.py +25 -0
  802. brainscore_vision/models/r34_eMMCR_Mom_lmda_02_1/test.py +1 -0
  803. brainscore_vision/models/r50_tvpt/__init__.py +9 -0
  804. brainscore_vision/models/r50_tvpt/model.py +47 -0
  805. brainscore_vision/models/r50_tvpt/setup.py +24 -0
  806. brainscore_vision/models/r50_tvpt/test.py +1 -0
  807. brainscore_vision/models/regnet/__init__.py +14 -0
  808. brainscore_vision/models/regnet/model.py +17 -0
  809. brainscore_vision/models/regnet/requirements.txt +2 -0
  810. brainscore_vision/models/regnet/test.py +17 -0
  811. brainscore_vision/models/resnet18_imagenet21kP/__init__.py +6 -0
  812. brainscore_vision/models/resnet18_imagenet21kP/model.py +119 -0
  813. brainscore_vision/models/resnet18_imagenet21kP/setup.py +18 -0
  814. brainscore_vision/models/resnet18_imagenet21kP/test.py +0 -0
  815. brainscore_vision/models/resnet50_eMMCR_Vanilla/__init__.py +5 -0
  816. brainscore_vision/models/resnet50_eMMCR_Vanilla/model.py +59 -0
  817. brainscore_vision/models/resnet50_eMMCR_Vanilla/setup.py +24 -0
  818. brainscore_vision/models/resnet50_eMMCR_Vanilla/test.py +1 -0
  819. brainscore_vision/models/resnet50_eMMCR_VanillaV2/__init__.py +9 -0
  820. brainscore_vision/models/resnet50_eMMCR_VanillaV2/model.py +72 -0
  821. brainscore_vision/models/resnet50_eMMCR_VanillaV2/setup.py +24 -0
  822. brainscore_vision/models/resnet50_eMMCR_VanillaV2/test.py +1 -0
  823. brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/__init__.py +9 -0
  824. brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/model.py +72 -0
  825. brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/setup.py +24 -0
  826. brainscore_vision/models/resnet50_eMMCR_eqp10_lm1/test.py +1 -0
  827. brainscore_vision/models/resnet50_julios/__init__.py +5 -0
  828. brainscore_vision/models/resnet50_julios/model.py +54 -0
  829. brainscore_vision/models/resnet50_julios/setup.py +24 -0
  830. brainscore_vision/models/resnet50_julios/test.py +1 -0
  831. brainscore_vision/models/resnet50_tutorial/__init__.py +5 -0
  832. brainscore_vision/models/resnet50_tutorial/model.py +34 -0
  833. brainscore_vision/models/resnet50_tutorial/requirements.txt +2 -0
  834. brainscore_vision/models/resnet50_tutorial/test.py +8 -0
  835. brainscore_vision/models/resnet_152_v2_pytorch/__init__.py +7 -0
  836. brainscore_vision/models/resnet_152_v2_pytorch/model.py +59 -0
  837. brainscore_vision/models/resnet_152_v2_pytorch/requirements.txt +2 -0
  838. brainscore_vision/models/resnet_152_v2_pytorch/test.py +8 -0
  839. brainscore_vision/models/resnet_50_robust/__init__.py +7 -0
  840. brainscore_vision/models/resnet_50_robust/model.py +55 -0
  841. brainscore_vision/models/resnet_50_robust/requirements.txt +3 -0
  842. brainscore_vision/models/resnet_50_robust/test.py +8 -0
  843. brainscore_vision/models/resnext101_32x16d_wsl/__init__.py +7 -0
  844. brainscore_vision/models/resnext101_32x16d_wsl/model.py +38 -0
  845. brainscore_vision/models/resnext101_32x16d_wsl/requirements.txt +2 -0
  846. brainscore_vision/models/resnext101_32x16d_wsl/test.py +8 -0
  847. brainscore_vision/models/resnext101_32x32d_wsl/__init__.py +7 -0
  848. brainscore_vision/models/resnext101_32x32d_wsl/model.py +40 -0
  849. brainscore_vision/models/resnext101_32x32d_wsl/requirements.txt +2 -0
  850. brainscore_vision/models/resnext101_32x32d_wsl/test.py +8 -0
  851. brainscore_vision/models/resnext101_32x48d_wsl/__init__.py +7 -0
  852. brainscore_vision/models/resnext101_32x48d_wsl/model.py +38 -0
  853. brainscore_vision/models/resnext101_32x48d_wsl/requirements.txt +3 -0
  854. brainscore_vision/models/resnext101_32x48d_wsl/test.py +8 -0
  855. brainscore_vision/models/resnext101_32x8d_wsl/__init__.py +7 -0
  856. brainscore_vision/models/resnext101_32x8d_wsl/model.py +44 -0
  857. brainscore_vision/models/resnext101_32x8d_wsl/requirements.txt +2 -0
  858. brainscore_vision/models/resnext101_32x8d_wsl/test.py +8 -0
  859. brainscore_vision/models/temporal_model_AVID_CMA/__init__.py +17 -0
  860. brainscore_vision/models/temporal_model_AVID_CMA/model.py +92 -0
  861. brainscore_vision/models/temporal_model_AVID_CMA/requirements.txt +3 -0
  862. brainscore_vision/models/temporal_model_AVID_CMA/test.py +18 -0
  863. brainscore_vision/models/temporal_model_GDT/__init__.py +16 -0
  864. brainscore_vision/models/temporal_model_GDT/model.py +72 -0
  865. brainscore_vision/models/temporal_model_GDT/requirements.txt +3 -0
  866. brainscore_vision/models/temporal_model_GDT/test.py +17 -0
  867. brainscore_vision/models/temporal_model_S3D_text_video/__init__.py +14 -0
  868. brainscore_vision/models/temporal_model_S3D_text_video/model.py +65 -0
  869. brainscore_vision/models/temporal_model_S3D_text_video/requirements.txt +1 -0
  870. brainscore_vision/models/temporal_model_S3D_text_video/test.py +15 -0
  871. brainscore_vision/models/temporal_model_SeLaVi/__init__.py +17 -0
  872. brainscore_vision/models/temporal_model_SeLaVi/model.py +68 -0
  873. brainscore_vision/models/temporal_model_SeLaVi/requirements.txt +3 -0
  874. brainscore_vision/models/temporal_model_SeLaVi/test.py +18 -0
  875. brainscore_vision/models/temporal_model_VideoMAE/__init__.py +15 -0
  876. brainscore_vision/models/temporal_model_VideoMAE/model.py +100 -0
  877. brainscore_vision/models/temporal_model_VideoMAE/requirements.txt +6 -0
  878. brainscore_vision/models/temporal_model_VideoMAE/test.py +16 -0
  879. brainscore_vision/models/temporal_model_VideoMAEv2/__init__.py +14 -0
  880. brainscore_vision/models/temporal_model_VideoMAEv2/model.py +109 -0
  881. brainscore_vision/models/temporal_model_VideoMAEv2/requirements.txt +4 -0
  882. brainscore_vision/models/temporal_model_VideoMAEv2/test.py +16 -0
  883. brainscore_vision/models/temporal_model_mae_st/__init__.py +15 -0
  884. brainscore_vision/models/temporal_model_mae_st/model.py +120 -0
  885. brainscore_vision/models/temporal_model_mae_st/requirements.txt +3 -0
  886. brainscore_vision/models/temporal_model_mae_st/test.py +16 -0
  887. brainscore_vision/models/temporal_model_mmaction2/__init__.py +23 -0
  888. brainscore_vision/models/temporal_model_mmaction2/mmaction2.csv +24 -0
  889. brainscore_vision/models/temporal_model_mmaction2/model.py +226 -0
  890. brainscore_vision/models/temporal_model_mmaction2/requirements.txt +5 -0
  891. brainscore_vision/models/temporal_model_mmaction2/test.py +24 -0
  892. brainscore_vision/models/temporal_model_openstl/__init__.py +18 -0
  893. brainscore_vision/models/temporal_model_openstl/model.py +206 -0
  894. brainscore_vision/models/temporal_model_openstl/requirements.txt +3 -0
  895. brainscore_vision/models/temporal_model_openstl/test.py +19 -0
  896. brainscore_vision/models/temporal_model_torchvision/__init__.py +19 -0
  897. brainscore_vision/models/temporal_model_torchvision/model.py +92 -0
  898. brainscore_vision/models/temporal_model_torchvision/requirements.txt +2 -0
  899. brainscore_vision/models/temporal_model_torchvision/test.py +20 -0
  900. brainscore_vision/models/tv_efficientnet_b1/__init__.py +5 -0
  901. brainscore_vision/models/tv_efficientnet_b1/model.py +54 -0
  902. brainscore_vision/models/tv_efficientnet_b1/setup.py +24 -0
  903. brainscore_vision/models/tv_efficientnet_b1/test.py +1 -0
  904. brainscore_vision/models/voneresnet_50_non_stochastic/__init__.py +7 -0
  905. brainscore_vision/models/voneresnet_50_non_stochastic/model.py +104 -0
  906. brainscore_vision/models/voneresnet_50_non_stochastic/requirements.txt +8 -0
  907. brainscore_vision/models/voneresnet_50_non_stochastic/test.py +8 -0
  908. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/LICENSE +674 -0
  909. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/README.md +105 -0
  910. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/run.py +136 -0
  911. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/setup.py +41 -0
  912. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/train.py +383 -0
  913. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/__init__.py +71 -0
  914. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/back_ends.py +337 -0
  915. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/modules.py +126 -0
  916. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/params.py +100 -0
  917. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/utils.py +32 -0
  918. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet/vonenet.py +68 -0
  919. brainscore_vision/models/voneresnet_50_non_stochastic/vonenet/vonenet_tutorial-activations.ipynb +352 -0
  920. brainscore_vision/models/yudixie_resnet18_240719_0/__init__.py +11 -0
  921. brainscore_vision/models/yudixie_resnet18_240719_0/model.py +60 -0
  922. brainscore_vision/models/yudixie_resnet18_240719_0/setup.py +25 -0
  923. brainscore_vision/models/yudixie_resnet18_240719_0/test.py +1 -0
  924. brainscore_vision/models/yudixie_resnet18_240719_1/__init__.py +11 -0
  925. brainscore_vision/models/yudixie_resnet18_240719_1/model.py +60 -0
  926. brainscore_vision/models/yudixie_resnet18_240719_1/setup.py +25 -0
  927. brainscore_vision/models/yudixie_resnet18_240719_1/test.py +1 -0
  928. brainscore_vision/models/yudixie_resnet18_240719_10/__init__.py +11 -0
  929. brainscore_vision/models/yudixie_resnet18_240719_10/model.py +60 -0
  930. brainscore_vision/models/yudixie_resnet18_240719_10/setup.py +25 -0
  931. brainscore_vision/models/yudixie_resnet18_240719_10/test.py +1 -0
  932. brainscore_vision/models/yudixie_resnet18_240719_2/__init__.py +11 -0
  933. brainscore_vision/models/yudixie_resnet18_240719_2/model.py +60 -0
  934. brainscore_vision/models/yudixie_resnet18_240719_2/setup.py +25 -0
  935. brainscore_vision/models/yudixie_resnet18_240719_2/test.py +1 -0
  936. brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/__init__.py +7 -0
  937. brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/model.py +66 -0
  938. brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/setup.py +24 -0
  939. brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240222/test.py +1 -0
  940. brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/__init__.py +7 -0
  941. brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/model.py +68 -0
  942. brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/setup.py +24 -0
  943. brainscore_vision/models/yudixie_resnet50_imagenet1kpret_0_240312/test.py +1 -0
  944. brainscore_vision/submission/__init__.py +0 -0
  945. brainscore_vision/submission/actions_helpers.py +153 -0
  946. brainscore_vision/submission/config.py +7 -0
  947. brainscore_vision/submission/endpoints.py +58 -0
  948. brainscore_vision/utils/__init__.py +91 -0
  949. brainscore_vision-2.1.dist-info/LICENSE +11 -0
  950. brainscore_vision-2.1.dist-info/METADATA +152 -0
  951. brainscore_vision-2.1.dist-info/RECORD +1009 -0
  952. brainscore_vision-2.1.dist-info/WHEEL +5 -0
  953. brainscore_vision-2.1.dist-info/top_level.txt +4 -0
  954. docs/Makefile +20 -0
  955. docs/source/conf.py +78 -0
  956. docs/source/index.rst +21 -0
  957. docs/source/modules/api_reference.rst +10 -0
  958. docs/source/modules/benchmarks.rst +8 -0
  959. docs/source/modules/brainscore_submission.png +0 -0
  960. docs/source/modules/developer_clarifications.rst +36 -0
  961. docs/source/modules/metrics.rst +8 -0
  962. docs/source/modules/model_interface.rst +8 -0
  963. docs/source/modules/submission.rst +112 -0
  964. docs/source/modules/tutorial_screenshots/brain-score_logo.png +0 -0
  965. docs/source/modules/tutorial_screenshots/final_submit.png +0 -0
  966. docs/source/modules/tutorial_screenshots/init_py.png +0 -0
  967. docs/source/modules/tutorial_screenshots/mms.png +0 -0
  968. docs/source/modules/tutorial_screenshots/setup.png +0 -0
  969. docs/source/modules/tutorial_screenshots/sms.png +0 -0
  970. docs/source/modules/tutorial_screenshots/subfolders.png +0 -0
  971. docs/source/modules/utils.rst +22 -0
  972. migrations/2020-12-20_pkl_to_nc.py +90 -0
  973. tests/__init__.py +6 -0
  974. tests/conftest.py +26 -0
  975. tests/test_benchmark_helpers/__init__.py +0 -0
  976. tests/test_benchmark_helpers/test_screen.py +75 -0
  977. tests/test_examples.py +41 -0
  978. tests/test_integration.py +43 -0
  979. tests/test_metric_helpers/__init__.py +0 -0
  980. tests/test_metric_helpers/test_temporal.py +80 -0
  981. tests/test_metric_helpers/test_transformations.py +171 -0
  982. tests/test_metric_helpers/test_xarray_utils.py +85 -0
  983. tests/test_model_helpers/__init__.py +6 -0
  984. tests/test_model_helpers/activations/__init__.py +0 -0
  985. tests/test_model_helpers/activations/test___init__.py +404 -0
  986. tests/test_model_helpers/brain_transformation/__init__.py +0 -0
  987. tests/test_model_helpers/brain_transformation/test___init__.py +18 -0
  988. tests/test_model_helpers/brain_transformation/test_behavior.py +181 -0
  989. tests/test_model_helpers/brain_transformation/test_neural.py +70 -0
  990. tests/test_model_helpers/brain_transformation/test_temporal.py +66 -0
  991. tests/test_model_helpers/temporal/__init__.py +0 -0
  992. tests/test_model_helpers/temporal/activations/__init__.py +0 -0
  993. tests/test_model_helpers/temporal/activations/test_extractor.py +96 -0
  994. tests/test_model_helpers/temporal/activations/test_inferencer.py +189 -0
  995. tests/test_model_helpers/temporal/activations/test_inputs.py +103 -0
  996. tests/test_model_helpers/temporal/brain_transformation/__init__.py +0 -0
  997. tests/test_model_helpers/temporal/brain_transformation/test_temporal_ops.py +122 -0
  998. tests/test_model_helpers/temporal/test_utils.py +61 -0
  999. tests/test_model_helpers/test_generic_plugin_tests.py +310 -0
  1000. tests/test_model_helpers/test_imports.py +10 -0
  1001. tests/test_model_helpers/test_s3.py +38 -0
  1002. tests/test_models.py +15 -0
  1003. tests/test_stimuli.py +0 -0
  1004. tests/test_submission/__init__.py +0 -0
  1005. tests/test_submission/mock_config.py +3 -0
  1006. tests/test_submission/test_actions_helpers.py +67 -0
  1007. tests/test_submission/test_db.py +54 -0
  1008. tests/test_submission/test_endpoints.py +125 -0
  1009. tests/test_utils.py +21 -0
@@ -0,0 +1,2228 @@
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "source": [
6
+ "from brainio_collection import list_stimulus_sets, list_assemblies, get_stimulus_set, get_assembly"
7
+ ],
8
+ "outputs": [
9
+ {
10
+ "output_type": "stream",
11
+ "name": "stderr",
12
+ "text": [
13
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/merge.py:10: FutureWarning: The Panel class is removed from pandas. Accessing it from the top-level namespace will also be removed in the next version\n",
14
+ " PANDAS_TYPES = (pd.Series, pd.DataFrame, pd.Panel)\n"
15
+ ]
16
+ }
17
+ ],
18
+ "execution_count": 1,
19
+ "metadata": {
20
+ "collapsed": false,
21
+ "jupyter": {
22
+ "source_hidden": false,
23
+ "outputs_hidden": false
24
+ },
25
+ "nteract": {
26
+ "transient": {
27
+ "deleting": false
28
+ }
29
+ },
30
+ "execution": {
31
+ "iopub.status.busy": "2020-02-27T19:28:09.361Z",
32
+ "iopub.execute_input": "2020-02-27T19:28:09.374Z",
33
+ "iopub.status.idle": "2020-02-27T19:28:10.278Z",
34
+ "shell.execute_reply": "2020-02-27T19:28:10.299Z"
35
+ }
36
+ }
37
+ },
38
+ {
39
+ "cell_type": "code",
40
+ "source": [
41
+ "list_stimulus_sets()"
42
+ ],
43
+ "outputs": [
44
+ {
45
+ "output_type": "execute_result",
46
+ "execution_count": 7,
47
+ "data": {
48
+ "text/plain": [
49
+ "['dicarlo.hvm',\n",
50
+ " 'gallant.David2004',\n",
51
+ " 'tolias.Cadena2017',\n",
52
+ " 'movshon.FreemanZiemba2013',\n",
53
+ " 'dicarlo.objectome.public',\n",
54
+ " 'dicarlo.objectome.private',\n",
55
+ " 'dicarlo.Kar2019',\n",
56
+ " 'dicarlo.Kar2018cocogray',\n",
57
+ " 'dicarlo.hvm-public',\n",
58
+ " 'dicarlo.hvm-private',\n",
59
+ " 'movshon.FreemanZiemba2013-public',\n",
60
+ " 'movshon.FreemanZiemba2013-private',\n",
61
+ " 'klab.Zhang2018.search_obj_array',\n",
62
+ " 'movshon.FreemanZiemba2013.aperture-public',\n",
63
+ " 'movshon.FreemanZiemba2013.aperture-private',\n",
64
+ " 'movshon.FreemanZiemba2013.aperture']"
65
+ ]
66
+ },
67
+ "metadata": {}
68
+ }
69
+ ],
70
+ "execution_count": 7,
71
+ "metadata": {
72
+ "collapsed": false,
73
+ "jupyter": {
74
+ "source_hidden": false,
75
+ "outputs_hidden": false
76
+ },
77
+ "nteract": {
78
+ "transient": {
79
+ "deleting": false
80
+ }
81
+ },
82
+ "execution": {
83
+ "iopub.status.busy": "2020-02-27T17:13:17.610Z",
84
+ "iopub.execute_input": "2020-02-27T17:13:17.613Z",
85
+ "iopub.status.idle": "2020-02-27T17:13:17.623Z",
86
+ "shell.execute_reply": "2020-02-27T17:13:17.625Z"
87
+ }
88
+ }
89
+ },
90
+ {
91
+ "cell_type": "code",
92
+ "source": [
93
+ "list_assemblies()"
94
+ ],
95
+ "outputs": [
96
+ {
97
+ "output_type": "execute_result",
98
+ "execution_count": 8,
99
+ "data": {
100
+ "text/plain": [
101
+ "['dicarlo.Majaj2015',\n",
102
+ " 'gallant.David2004',\n",
103
+ " 'dicarlo.Majaj2015.temporal',\n",
104
+ " 'tolias.Cadena2017',\n",
105
+ " 'movshon.FreemanZiemba2013',\n",
106
+ " 'dicarlo.Rajalingham2018.public',\n",
107
+ " 'dicarlo.Rajalingham2018.private',\n",
108
+ " 'dicarlo.Kar2019',\n",
109
+ " 'dicarlo.Majaj2015.temporal-10ms',\n",
110
+ " 'dicarlo.Kar2018hvm',\n",
111
+ " 'dicarlo.Kar2018cocogray',\n",
112
+ " 'dicarlo.Majaj2015.public',\n",
113
+ " 'dicarlo.Majaj2015.private',\n",
114
+ " 'dicarlo.Majaj2015.temporal.public',\n",
115
+ " 'dicarlo.Majaj2015.temporal.private',\n",
116
+ " 'movshon.FreemanZiemba2013.public',\n",
117
+ " 'movshon.FreemanZiemba2013.private',\n",
118
+ " 'klab.Zhang2018search_obj_array',\n",
119
+ " 'movshon.FreemanZiemba2013.aperture.public',\n",
120
+ " 'movshon.FreemanZiemba2013.aperture.private',\n",
121
+ " 'movshon.FreemanZiemba2013.aperture']"
122
+ ]
123
+ },
124
+ "metadata": {}
125
+ }
126
+ ],
127
+ "execution_count": 8,
128
+ "metadata": {
129
+ "collapsed": false,
130
+ "jupyter": {
131
+ "source_hidden": false,
132
+ "outputs_hidden": false
133
+ },
134
+ "nteract": {
135
+ "transient": {
136
+ "deleting": false
137
+ }
138
+ },
139
+ "execution": {
140
+ "iopub.status.busy": "2020-02-27T17:13:20.087Z",
141
+ "iopub.execute_input": "2020-02-27T17:13:20.091Z",
142
+ "iopub.status.idle": "2020-02-27T17:13:20.102Z",
143
+ "shell.execute_reply": "2020-02-27T17:13:20.104Z"
144
+ }
145
+ }
146
+ },
147
+ {
148
+ "cell_type": "code",
149
+ "source": [
150
+ "stim_pub = get_stimulus_set(\"movshon.FreemanZiemba2013.aperture-public\")\n",
151
+ "stim_pub"
152
+ ],
153
+ "outputs": [
154
+ {
155
+ "output_type": "stream",
156
+ "name": "stderr",
157
+ "text": [
158
+ "brainio-contrib/image_movshon_FreemanZiemba2013_aperture-public.zip: 100%|██████████| 5.31M/5.31M [00:00<00:00, 14.5MB/s]\n"
159
+ ]
160
+ },
161
+ {
162
+ "output_type": "execute_result",
163
+ "execution_count": 4,
164
+ "data": {
165
+ "text/plain": [
166
+ " id ... image_id_without_aperture\n",
167
+ "0 133032 ... 6ddcf4b55f9151ac77250706510dcd00fdfb466e\n",
168
+ "1 133033 ... cd5d18cc4d174dc89d9c07951c208fdceb568ac2\n",
169
+ "2 133035 ... 95689155a87077e7a5c5dfd93aa8cdf51da16e04\n",
170
+ "3 133040 ... ac2a808a4bd86810f4ae9aedce65e1ec79e03ba4\n",
171
+ "4 133041 ... 8278ab60ef19334b2dc54fa0b06091a968ef703d\n",
172
+ ".. ... ... ...\n",
173
+ "130 133468 ... 01e8fc3aebd53914cc4c0a9321a8759054bd79f3\n",
174
+ "131 133469 ... 81581e8d38dec64d3746851aa4e45e33457d0bf8\n",
175
+ "132 133470 ... 43de5603b357dce25a030bbc3611abdaef5cb805\n",
176
+ "133 133472 ... 844305832982a3f3482d2fbf1e36edaeb1528387\n",
177
+ "134 133473 ... 54f31d7e051b8c28164ba0253fd21b619d817e3d\n",
178
+ "\n",
179
+ "[135 rows x 9 columns]"
180
+ ],
181
+ "text/html": [
182
+ "<div>\n",
183
+ "<style scoped>\n",
184
+ " .dataframe tbody tr th:only-of-type {\n",
185
+ " vertical-align: middle;\n",
186
+ " }\n",
187
+ "\n",
188
+ " .dataframe tbody tr th {\n",
189
+ " vertical-align: top;\n",
190
+ " }\n",
191
+ "\n",
192
+ " .dataframe thead th {\n",
193
+ " text-align: right;\n",
194
+ " }\n",
195
+ "</style>\n",
196
+ "<table border=\"1\" class=\"dataframe\">\n",
197
+ " <thead>\n",
198
+ " <tr style=\"text-align: right;\">\n",
199
+ " <th></th>\n",
200
+ " <th>id</th>\n",
201
+ " <th>image_id</th>\n",
202
+ " <th>image_file_sha1</th>\n",
203
+ " <th>image_file_name</th>\n",
204
+ " <th>texture_type</th>\n",
205
+ " <th>texture_family</th>\n",
206
+ " <th>sample</th>\n",
207
+ " <th>resolution</th>\n",
208
+ " <th>image_id_without_aperture</th>\n",
209
+ " </tr>\n",
210
+ " </thead>\n",
211
+ " <tbody>\n",
212
+ " <tr>\n",
213
+ " <th>0</th>\n",
214
+ " <td>133032</td>\n",
215
+ " <td>21041db1f26c142812a66277c2957fb3e2070916</td>\n",
216
+ " <td>21041db1f26c142812a66277c2957fb3e2070916</td>\n",
217
+ " <td>noise-320x320-im38-smp5.png</td>\n",
218
+ " <td>noise</td>\n",
219
+ " <td>38</td>\n",
220
+ " <td>5</td>\n",
221
+ " <td>320x320</td>\n",
222
+ " <td>6ddcf4b55f9151ac77250706510dcd00fdfb466e</td>\n",
223
+ " </tr>\n",
224
+ " <tr>\n",
225
+ " <th>1</th>\n",
226
+ " <td>133033</td>\n",
227
+ " <td>0d9074b184dd4abbd8dd79500d8869e90e3759f0</td>\n",
228
+ " <td>0d9074b184dd4abbd8dd79500d8869e90e3759f0</td>\n",
229
+ " <td>tex-320x320-im393-smp15.png</td>\n",
230
+ " <td>texture</td>\n",
231
+ " <td>393</td>\n",
232
+ " <td>15</td>\n",
233
+ " <td>320x320</td>\n",
234
+ " <td>cd5d18cc4d174dc89d9c07951c208fdceb568ac2</td>\n",
235
+ " </tr>\n",
236
+ " <tr>\n",
237
+ " <th>2</th>\n",
238
+ " <td>133035</td>\n",
239
+ " <td>28528e98f687f3790cb2d35ef1374aef0ff12bb9</td>\n",
240
+ " <td>28528e98f687f3790cb2d35ef1374aef0ff12bb9</td>\n",
241
+ " <td>tex-320x320-im30-smp3.png</td>\n",
242
+ " <td>texture</td>\n",
243
+ " <td>30</td>\n",
244
+ " <td>3</td>\n",
245
+ " <td>320x320</td>\n",
246
+ " <td>95689155a87077e7a5c5dfd93aa8cdf51da16e04</td>\n",
247
+ " </tr>\n",
248
+ " <tr>\n",
249
+ " <th>3</th>\n",
250
+ " <td>133040</td>\n",
251
+ " <td>22eb327d95d9d18fc3cfdb3cb7122e49eb8cd5b9</td>\n",
252
+ " <td>22eb327d95d9d18fc3cfdb3cb7122e49eb8cd5b9</td>\n",
253
+ " <td>noise-320x320-im18-smp13.png</td>\n",
254
+ " <td>noise</td>\n",
255
+ " <td>18</td>\n",
256
+ " <td>13</td>\n",
257
+ " <td>320x320</td>\n",
258
+ " <td>ac2a808a4bd86810f4ae9aedce65e1ec79e03ba4</td>\n",
259
+ " </tr>\n",
260
+ " <tr>\n",
261
+ " <th>4</th>\n",
262
+ " <td>133041</td>\n",
263
+ " <td>5a49c84be5539e3b1b7b4f767c74b1355cf5145a</td>\n",
264
+ " <td>5a49c84be5539e3b1b7b4f767c74b1355cf5145a</td>\n",
265
+ " <td>noise-320x320-im18-smp12.png</td>\n",
266
+ " <td>noise</td>\n",
267
+ " <td>18</td>\n",
268
+ " <td>12</td>\n",
269
+ " <td>320x320</td>\n",
270
+ " <td>8278ab60ef19334b2dc54fa0b06091a968ef703d</td>\n",
271
+ " </tr>\n",
272
+ " <tr>\n",
273
+ " <th>...</th>\n",
274
+ " <td>...</td>\n",
275
+ " <td>...</td>\n",
276
+ " <td>...</td>\n",
277
+ " <td>...</td>\n",
278
+ " <td>...</td>\n",
279
+ " <td>...</td>\n",
280
+ " <td>...</td>\n",
281
+ " <td>...</td>\n",
282
+ " <td>...</td>\n",
283
+ " </tr>\n",
284
+ " <tr>\n",
285
+ " <th>130</th>\n",
286
+ " <td>133468</td>\n",
287
+ " <td>65a263b58b098973995a40d39785f1776b3e757d</td>\n",
288
+ " <td>65a263b58b098973995a40d39785f1776b3e757d</td>\n",
289
+ " <td>noise-320x320-im327-smp7.png</td>\n",
290
+ " <td>noise</td>\n",
291
+ " <td>327</td>\n",
292
+ " <td>7</td>\n",
293
+ " <td>320x320</td>\n",
294
+ " <td>01e8fc3aebd53914cc4c0a9321a8759054bd79f3</td>\n",
295
+ " </tr>\n",
296
+ " <tr>\n",
297
+ " <th>131</th>\n",
298
+ " <td>133469</td>\n",
299
+ " <td>1e4309bec2dc7f15d30dcb5768d6f5ae06928086</td>\n",
300
+ " <td>1e4309bec2dc7f15d30dcb5768d6f5ae06928086</td>\n",
301
+ " <td>noise-320x320-im38-smp8.png</td>\n",
302
+ " <td>noise</td>\n",
303
+ " <td>38</td>\n",
304
+ " <td>8</td>\n",
305
+ " <td>320x320</td>\n",
306
+ " <td>81581e8d38dec64d3746851aa4e45e33457d0bf8</td>\n",
307
+ " </tr>\n",
308
+ " <tr>\n",
309
+ " <th>132</th>\n",
310
+ " <td>133470</td>\n",
311
+ " <td>bad7530828a8c6aab2ebaa83f34a1353efe0c395</td>\n",
312
+ " <td>bad7530828a8c6aab2ebaa83f34a1353efe0c395</td>\n",
313
+ " <td>noise-320x320-im56-smp6.png</td>\n",
314
+ " <td>noise</td>\n",
315
+ " <td>56</td>\n",
316
+ " <td>6</td>\n",
317
+ " <td>320x320</td>\n",
318
+ " <td>43de5603b357dce25a030bbc3611abdaef5cb805</td>\n",
319
+ " </tr>\n",
320
+ " <tr>\n",
321
+ " <th>133</th>\n",
322
+ " <td>133472</td>\n",
323
+ " <td>b429ff650a6f868b5124cc04c6d79ec69fb35c73</td>\n",
324
+ " <td>b429ff650a6f868b5124cc04c6d79ec69fb35c73</td>\n",
325
+ " <td>tex-320x320-im48-smp6.png</td>\n",
326
+ " <td>texture</td>\n",
327
+ " <td>48</td>\n",
328
+ " <td>6</td>\n",
329
+ " <td>320x320</td>\n",
330
+ " <td>844305832982a3f3482d2fbf1e36edaeb1528387</td>\n",
331
+ " </tr>\n",
332
+ " <tr>\n",
333
+ " <th>134</th>\n",
334
+ " <td>133473</td>\n",
335
+ " <td>d01f48d87a4b49f50c021c612da9d3180488f6ba</td>\n",
336
+ " <td>d01f48d87a4b49f50c021c612da9d3180488f6ba</td>\n",
337
+ " <td>tex-320x320-im30-smp10.png</td>\n",
338
+ " <td>texture</td>\n",
339
+ " <td>30</td>\n",
340
+ " <td>10</td>\n",
341
+ " <td>320x320</td>\n",
342
+ " <td>54f31d7e051b8c28164ba0253fd21b619d817e3d</td>\n",
343
+ " </tr>\n",
344
+ " </tbody>\n",
345
+ "</table>\n",
346
+ "<p>135 rows × 9 columns</p>\n",
347
+ "</div>"
348
+ ]
349
+ },
350
+ "metadata": {}
351
+ }
352
+ ],
353
+ "execution_count": 4,
354
+ "metadata": {
355
+ "collapsed": false,
356
+ "jupyter": {
357
+ "source_hidden": false,
358
+ "outputs_hidden": false
359
+ },
360
+ "nteract": {
361
+ "transient": {
362
+ "deleting": false
363
+ }
364
+ },
365
+ "execution": {
366
+ "iopub.status.busy": "2020-02-26T22:20:28.899Z",
367
+ "iopub.execute_input": "2020-02-26T22:20:28.903Z",
368
+ "iopub.status.idle": "2020-02-26T22:20:29.891Z",
369
+ "shell.execute_reply": "2020-02-26T22:20:29.938Z"
370
+ }
371
+ }
372
+ },
373
+ {
374
+ "cell_type": "code",
375
+ "source": [
376
+ "assy_pub = get_assembly(\"movshon.FreemanZiemba2013.aperture.public\")\n",
377
+ "assy_pub"
378
+ ],
379
+ "outputs": [
380
+ {
381
+ "output_type": "stream",
382
+ "name": "stderr",
383
+ "text": [
384
+ "brainio-contrib/assy_movshon_FreemanZiemba2013_aperture_public.nc: 100%|██████████| 1.33G/1.33G [00:13<00:00, 96.6MB/s]\n",
385
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataarray.py:217: FutureWarning: The Panel class is removed from pandas. Accessing it from the top-level namespace will also be removed in the next version\n",
386
+ " elif isinstance(data, pd.Panel):\n",
387
+ "/Users/jjpr/dev/brainio_base/brainio_base/assemblies.py:213: FutureWarning: The inplace argument has been deprecated and will be removed in xarray 0.12.0.\n",
388
+ " xr_data.set_index(append=True, inplace=True, **coords_d)\n",
389
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataset.py:167: FutureWarning: the 'labels' keyword is deprecated, use 'codes' instead\n",
390
+ " idx = pd.MultiIndex(labels=labels, levels=levels, names=names)\n"
391
+ ]
392
+ },
393
+ {
394
+ "output_type": "execute_result",
395
+ "execution_count": 5,
396
+ "data": {
397
+ "text/plain": [
398
+ "<xarray.NeuronRecordingAssembly 'movshon.FreemanZiemba2013.aperture.public' (neuroid: 205, time_bin: 300, presentation: 2700)>\n",
399
+ "array([[[0., 0., ..., 0., 0.],\n",
400
+ " [0., 0., ..., 0., 0.],\n",
401
+ " ...,\n",
402
+ " [0., 0., ..., 0., 0.],\n",
403
+ " [0., 0., ..., 0., 0.]],\n",
404
+ "\n",
405
+ " [[0., 0., ..., 0., 0.],\n",
406
+ " [0., 0., ..., 0., 1.],\n",
407
+ " ...,\n",
408
+ " [0., 0., ..., 0., 0.],\n",
409
+ " [0., 0., ..., 0., 0.]],\n",
410
+ "\n",
411
+ " ...,\n",
412
+ "\n",
413
+ " [[0., 0., ..., 0., 0.],\n",
414
+ " [0., 0., ..., 0., 0.],\n",
415
+ " ...,\n",
416
+ " [0., 0., ..., 0., 0.],\n",
417
+ " [0., 0., ..., 0., 0.]],\n",
418
+ "\n",
419
+ " [[0., 0., ..., 0., 0.],\n",
420
+ " [0., 0., ..., 0., 0.],\n",
421
+ " ...,\n",
422
+ " [0., 0., ..., 1., 0.],\n",
423
+ " [0., 0., ..., 0., 0.]]])\n",
424
+ "Coordinates:\n",
425
+ " * neuroid (neuroid) MultiIndex\n",
426
+ " - neuroid_id (neuroid) int64 1 2 3 4 5 6 ... 25 26 27 28 29 30\n",
427
+ " - region (neuroid) object 'V1' 'V1' 'V1' ... 'V1' 'V1'\n",
428
+ " * time_bin (time_bin) MultiIndex\n",
429
+ " - time_bin_start (time_bin) int64 0 1 2 3 4 5 ... 25 26 27 28 29\n",
430
+ " - time_bin_end (time_bin) int64 1 2 3 4 5 6 ... 26 27 28 29 30\n",
431
+ " * presentation (presentation) MultiIndex\n",
432
+ " - repetition (presentation) int64 0 1 2 3 4 5 ... 4 5 6 7 8 9\n",
433
+ " - image_id (presentation) object 'dfa618e0503a4251582450e88daf0c21a5abed95' ... '4ba2605a32ef862bf6f1fc4032aa25a4381f0114'\n",
434
+ " - id (presentation) int64 133388 133388 ... 133453\n",
435
+ " - image_file_sha1 (presentation) object 'dfa618e0503a4251582450e88daf0c21a5abed95' ... '4ba2605a32ef862bf6f1fc4032aa25a4381f0114'\n",
436
+ " - image_file_name (presentation) object 'noise-320x320-im327-smp2.png' ... 'noise-320x320-im327-smp6.png'\n",
437
+ " - texture_type (presentation) object 'noise' 'noise' ... 'noise'\n",
438
+ " - texture_family (presentation) int64 327 327 327 ... 327 327 327\n",
439
+ " - sample (presentation) int64 2 2 2 2 2 2 ... 6 6 6 6 6 6\n",
440
+ " - resolution (presentation) object '320x320' ... '320x320'\n",
441
+ " - image_id_without_aperture (presentation) object '0e8609dc2778a848d7ed8b355d9331160ac2974d' ... '888526f132a230245a15ef3aa10ca05cac246574'\n",
442
+ "Attributes:\n",
443
+ " stimulus_set_name: movshon.FreemanZiemba2013.aperture-public\n",
444
+ " stimulus_set: id ... image_id_without_ape..."
445
+ ]
446
+ },
447
+ "metadata": {}
448
+ }
449
+ ],
450
+ "execution_count": 5,
451
+ "metadata": {
452
+ "collapsed": false,
453
+ "jupyter": {
454
+ "source_hidden": false,
455
+ "outputs_hidden": false
456
+ },
457
+ "nteract": {
458
+ "transient": {
459
+ "deleting": false
460
+ }
461
+ },
462
+ "execution": {
463
+ "iopub.status.busy": "2020-02-26T22:38:54.050Z",
464
+ "iopub.execute_input": "2020-02-26T22:38:54.054Z",
465
+ "iopub.status.idle": "2020-02-26T22:39:11.640Z",
466
+ "shell.execute_reply": "2020-02-26T22:39:11.655Z"
467
+ }
468
+ }
469
+ },
470
+ {
471
+ "cell_type": "code",
472
+ "source": [
473
+ "assy_pub.stimulus_set"
474
+ ],
475
+ "outputs": [
476
+ {
477
+ "output_type": "execute_result",
478
+ "execution_count": 6,
479
+ "data": {
480
+ "text/plain": [
481
+ " id ... image_id_without_aperture\n",
482
+ "0 133032 ... 6ddcf4b55f9151ac77250706510dcd00fdfb466e\n",
483
+ "1 133033 ... cd5d18cc4d174dc89d9c07951c208fdceb568ac2\n",
484
+ "2 133035 ... 95689155a87077e7a5c5dfd93aa8cdf51da16e04\n",
485
+ "3 133040 ... ac2a808a4bd86810f4ae9aedce65e1ec79e03ba4\n",
486
+ "4 133041 ... 8278ab60ef19334b2dc54fa0b06091a968ef703d\n",
487
+ ".. ... ... ...\n",
488
+ "130 133468 ... 01e8fc3aebd53914cc4c0a9321a8759054bd79f3\n",
489
+ "131 133469 ... 81581e8d38dec64d3746851aa4e45e33457d0bf8\n",
490
+ "132 133470 ... 43de5603b357dce25a030bbc3611abdaef5cb805\n",
491
+ "133 133472 ... 844305832982a3f3482d2fbf1e36edaeb1528387\n",
492
+ "134 133473 ... 54f31d7e051b8c28164ba0253fd21b619d817e3d\n",
493
+ "\n",
494
+ "[135 rows x 9 columns]"
495
+ ],
496
+ "text/html": [
497
+ "<div>\n",
498
+ "<style scoped>\n",
499
+ " .dataframe tbody tr th:only-of-type {\n",
500
+ " vertical-align: middle;\n",
501
+ " }\n",
502
+ "\n",
503
+ " .dataframe tbody tr th {\n",
504
+ " vertical-align: top;\n",
505
+ " }\n",
506
+ "\n",
507
+ " .dataframe thead th {\n",
508
+ " text-align: right;\n",
509
+ " }\n",
510
+ "</style>\n",
511
+ "<table border=\"1\" class=\"dataframe\">\n",
512
+ " <thead>\n",
513
+ " <tr style=\"text-align: right;\">\n",
514
+ " <th></th>\n",
515
+ " <th>id</th>\n",
516
+ " <th>image_id</th>\n",
517
+ " <th>image_file_sha1</th>\n",
518
+ " <th>image_file_name</th>\n",
519
+ " <th>texture_type</th>\n",
520
+ " <th>texture_family</th>\n",
521
+ " <th>sample</th>\n",
522
+ " <th>resolution</th>\n",
523
+ " <th>image_id_without_aperture</th>\n",
524
+ " </tr>\n",
525
+ " </thead>\n",
526
+ " <tbody>\n",
527
+ " <tr>\n",
528
+ " <th>0</th>\n",
529
+ " <td>133032</td>\n",
530
+ " <td>21041db1f26c142812a66277c2957fb3e2070916</td>\n",
531
+ " <td>21041db1f26c142812a66277c2957fb3e2070916</td>\n",
532
+ " <td>noise-320x320-im38-smp5.png</td>\n",
533
+ " <td>noise</td>\n",
534
+ " <td>38</td>\n",
535
+ " <td>5</td>\n",
536
+ " <td>320x320</td>\n",
537
+ " <td>6ddcf4b55f9151ac77250706510dcd00fdfb466e</td>\n",
538
+ " </tr>\n",
539
+ " <tr>\n",
540
+ " <th>1</th>\n",
541
+ " <td>133033</td>\n",
542
+ " <td>0d9074b184dd4abbd8dd79500d8869e90e3759f0</td>\n",
543
+ " <td>0d9074b184dd4abbd8dd79500d8869e90e3759f0</td>\n",
544
+ " <td>tex-320x320-im393-smp15.png</td>\n",
545
+ " <td>texture</td>\n",
546
+ " <td>393</td>\n",
547
+ " <td>15</td>\n",
548
+ " <td>320x320</td>\n",
549
+ " <td>cd5d18cc4d174dc89d9c07951c208fdceb568ac2</td>\n",
550
+ " </tr>\n",
551
+ " <tr>\n",
552
+ " <th>2</th>\n",
553
+ " <td>133035</td>\n",
554
+ " <td>28528e98f687f3790cb2d35ef1374aef0ff12bb9</td>\n",
555
+ " <td>28528e98f687f3790cb2d35ef1374aef0ff12bb9</td>\n",
556
+ " <td>tex-320x320-im30-smp3.png</td>\n",
557
+ " <td>texture</td>\n",
558
+ " <td>30</td>\n",
559
+ " <td>3</td>\n",
560
+ " <td>320x320</td>\n",
561
+ " <td>95689155a87077e7a5c5dfd93aa8cdf51da16e04</td>\n",
562
+ " </tr>\n",
563
+ " <tr>\n",
564
+ " <th>3</th>\n",
565
+ " <td>133040</td>\n",
566
+ " <td>22eb327d95d9d18fc3cfdb3cb7122e49eb8cd5b9</td>\n",
567
+ " <td>22eb327d95d9d18fc3cfdb3cb7122e49eb8cd5b9</td>\n",
568
+ " <td>noise-320x320-im18-smp13.png</td>\n",
569
+ " <td>noise</td>\n",
570
+ " <td>18</td>\n",
571
+ " <td>13</td>\n",
572
+ " <td>320x320</td>\n",
573
+ " <td>ac2a808a4bd86810f4ae9aedce65e1ec79e03ba4</td>\n",
574
+ " </tr>\n",
575
+ " <tr>\n",
576
+ " <th>4</th>\n",
577
+ " <td>133041</td>\n",
578
+ " <td>5a49c84be5539e3b1b7b4f767c74b1355cf5145a</td>\n",
579
+ " <td>5a49c84be5539e3b1b7b4f767c74b1355cf5145a</td>\n",
580
+ " <td>noise-320x320-im18-smp12.png</td>\n",
581
+ " <td>noise</td>\n",
582
+ " <td>18</td>\n",
583
+ " <td>12</td>\n",
584
+ " <td>320x320</td>\n",
585
+ " <td>8278ab60ef19334b2dc54fa0b06091a968ef703d</td>\n",
586
+ " </tr>\n",
587
+ " <tr>\n",
588
+ " <th>...</th>\n",
589
+ " <td>...</td>\n",
590
+ " <td>...</td>\n",
591
+ " <td>...</td>\n",
592
+ " <td>...</td>\n",
593
+ " <td>...</td>\n",
594
+ " <td>...</td>\n",
595
+ " <td>...</td>\n",
596
+ " <td>...</td>\n",
597
+ " <td>...</td>\n",
598
+ " </tr>\n",
599
+ " <tr>\n",
600
+ " <th>130</th>\n",
601
+ " <td>133468</td>\n",
602
+ " <td>65a263b58b098973995a40d39785f1776b3e757d</td>\n",
603
+ " <td>65a263b58b098973995a40d39785f1776b3e757d</td>\n",
604
+ " <td>noise-320x320-im327-smp7.png</td>\n",
605
+ " <td>noise</td>\n",
606
+ " <td>327</td>\n",
607
+ " <td>7</td>\n",
608
+ " <td>320x320</td>\n",
609
+ " <td>01e8fc3aebd53914cc4c0a9321a8759054bd79f3</td>\n",
610
+ " </tr>\n",
611
+ " <tr>\n",
612
+ " <th>131</th>\n",
613
+ " <td>133469</td>\n",
614
+ " <td>1e4309bec2dc7f15d30dcb5768d6f5ae06928086</td>\n",
615
+ " <td>1e4309bec2dc7f15d30dcb5768d6f5ae06928086</td>\n",
616
+ " <td>noise-320x320-im38-smp8.png</td>\n",
617
+ " <td>noise</td>\n",
618
+ " <td>38</td>\n",
619
+ " <td>8</td>\n",
620
+ " <td>320x320</td>\n",
621
+ " <td>81581e8d38dec64d3746851aa4e45e33457d0bf8</td>\n",
622
+ " </tr>\n",
623
+ " <tr>\n",
624
+ " <th>132</th>\n",
625
+ " <td>133470</td>\n",
626
+ " <td>bad7530828a8c6aab2ebaa83f34a1353efe0c395</td>\n",
627
+ " <td>bad7530828a8c6aab2ebaa83f34a1353efe0c395</td>\n",
628
+ " <td>noise-320x320-im56-smp6.png</td>\n",
629
+ " <td>noise</td>\n",
630
+ " <td>56</td>\n",
631
+ " <td>6</td>\n",
632
+ " <td>320x320</td>\n",
633
+ " <td>43de5603b357dce25a030bbc3611abdaef5cb805</td>\n",
634
+ " </tr>\n",
635
+ " <tr>\n",
636
+ " <th>133</th>\n",
637
+ " <td>133472</td>\n",
638
+ " <td>b429ff650a6f868b5124cc04c6d79ec69fb35c73</td>\n",
639
+ " <td>b429ff650a6f868b5124cc04c6d79ec69fb35c73</td>\n",
640
+ " <td>tex-320x320-im48-smp6.png</td>\n",
641
+ " <td>texture</td>\n",
642
+ " <td>48</td>\n",
643
+ " <td>6</td>\n",
644
+ " <td>320x320</td>\n",
645
+ " <td>844305832982a3f3482d2fbf1e36edaeb1528387</td>\n",
646
+ " </tr>\n",
647
+ " <tr>\n",
648
+ " <th>134</th>\n",
649
+ " <td>133473</td>\n",
650
+ " <td>d01f48d87a4b49f50c021c612da9d3180488f6ba</td>\n",
651
+ " <td>d01f48d87a4b49f50c021c612da9d3180488f6ba</td>\n",
652
+ " <td>tex-320x320-im30-smp10.png</td>\n",
653
+ " <td>texture</td>\n",
654
+ " <td>30</td>\n",
655
+ " <td>10</td>\n",
656
+ " <td>320x320</td>\n",
657
+ " <td>54f31d7e051b8c28164ba0253fd21b619d817e3d</td>\n",
658
+ " </tr>\n",
659
+ " </tbody>\n",
660
+ "</table>\n",
661
+ "<p>135 rows × 9 columns</p>\n",
662
+ "</div>"
663
+ ]
664
+ },
665
+ "metadata": {}
666
+ }
667
+ ],
668
+ "execution_count": 6,
669
+ "metadata": {
670
+ "collapsed": false,
671
+ "jupyter": {
672
+ "source_hidden": false,
673
+ "outputs_hidden": false
674
+ },
675
+ "nteract": {
676
+ "transient": {
677
+ "deleting": false
678
+ }
679
+ },
680
+ "execution": {
681
+ "iopub.status.busy": "2020-02-26T22:42:10.496Z",
682
+ "iopub.execute_input": "2020-02-26T22:42:10.498Z",
683
+ "iopub.status.idle": "2020-02-26T22:42:10.506Z",
684
+ "shell.execute_reply": "2020-02-26T22:42:10.521Z"
685
+ }
686
+ }
687
+ },
688
+ {
689
+ "cell_type": "code",
690
+ "source": [
691
+ "stim_pri = get_stimulus_set(\"movshon.FreemanZiemba2013.aperture-private\")\n",
692
+ "stim_pri"
693
+ ],
694
+ "outputs": [
695
+ {
696
+ "output_type": "stream",
697
+ "name": "stderr",
698
+ "text": [
699
+ "brainio-contrib/image_movshon_FreemanZiemba2013_aperture-private.zip: 100%|██████████| 12.3M/12.3M [00:00<00:00, 26.4MB/s]\n"
700
+ ]
701
+ },
702
+ {
703
+ "output_type": "execute_result",
704
+ "execution_count": 4,
705
+ "data": {
706
+ "text/plain": [
707
+ " id image_id \\\n",
708
+ "0 133027 c3a633a13e736394f213ddf44bf124fe80cabe07 \n",
709
+ "1 133028 b67c9f5930555d4b772a803f886fa46d3221f923 \n",
710
+ "2 133029 0f3e6b96136fb6f86fdd70dacff2d5c71979329f \n",
711
+ "3 133030 ba9b8a0de15c75c91c392e3bccb3e66315b6526c \n",
712
+ "4 133031 2fbd82d8c92189450513fbc15acbd5ac2b76dbc9 \n",
713
+ ".. ... ... \n",
714
+ "310 133467 9d7393bc900ff0de4f1e7cb9954bc2577cef128a \n",
715
+ "311 133471 eb6a132f1dc052e2f71be6283ac2f1d8e77f5af7 \n",
716
+ "312 133474 f5a78410833f2c60739751fc3ac21478e0b92aaf \n",
717
+ "313 133475 c39cafa818aa8aab7ad6b9ae7178ae7433bb811a \n",
718
+ "314 133476 de7570acd85c2ce391b45b1386b944a2b09be6c4 \n",
719
+ "\n",
720
+ " image_file_sha1 image_file_name \\\n",
721
+ "0 c3a633a13e736394f213ddf44bf124fe80cabe07 tex-320x320-im336-smp9.png \n",
722
+ "1 b67c9f5930555d4b772a803f886fa46d3221f923 noise-320x320-im52-smp1.png \n",
723
+ "2 0f3e6b96136fb6f86fdd70dacff2d5c71979329f tex-320x320-im56-smp11.png \n",
724
+ "3 ba9b8a0de15c75c91c392e3bccb3e66315b6526c tex-320x320-im13-smp7.png \n",
725
+ "4 2fbd82d8c92189450513fbc15acbd5ac2b76dbc9 tex-320x320-im99-smp8.png \n",
726
+ ".. ... ... \n",
727
+ "310 9d7393bc900ff0de4f1e7cb9954bc2577cef128a tex-320x320-im99-smp5.png \n",
728
+ "311 eb6a132f1dc052e2f71be6283ac2f1d8e77f5af7 tex-320x320-im60-smp1.png \n",
729
+ "312 f5a78410833f2c60739751fc3ac21478e0b92aaf tex-320x320-im336-smp4.png \n",
730
+ "313 c39cafa818aa8aab7ad6b9ae7178ae7433bb811a tex-320x320-im23-smp2.png \n",
731
+ "314 de7570acd85c2ce391b45b1386b944a2b09be6c4 noise-320x320-im23-smp14.png \n",
732
+ "\n",
733
+ " texture_type texture_family sample resolution \\\n",
734
+ "0 texture 336 9 320x320 \n",
735
+ "1 noise 52 1 320x320 \n",
736
+ "2 texture 56 11 320x320 \n",
737
+ "3 texture 13 7 320x320 \n",
738
+ "4 texture 99 8 320x320 \n",
739
+ ".. ... ... ... ... \n",
740
+ "310 texture 99 5 320x320 \n",
741
+ "311 texture 60 1 320x320 \n",
742
+ "312 texture 336 4 320x320 \n",
743
+ "313 texture 23 2 320x320 \n",
744
+ "314 noise 23 14 320x320 \n",
745
+ "\n",
746
+ " image_id_without_aperture \n",
747
+ "0 fac76a1d313ee9ea0ffc22da1064f780c2ed81ad \n",
748
+ "1 8d88f93e77fbc48666eea18ef4061de9df2b4479 \n",
749
+ "2 8994366744469afbb1eb3607abd56b40d6b66a35 \n",
750
+ "3 cb83632f4e5e430dfbd383217107f009669f4e22 \n",
751
+ "4 759dc176abaf240eb172ec69ea5db89ae3f05c6b \n",
752
+ ".. ... \n",
753
+ "310 90123b39bf9444bb40420a08ccee3503cefddade \n",
754
+ "311 7daa75526b1e9f08acbc7168b7aa361587cf8d17 \n",
755
+ "312 90dc23ebd6e1b99e7c18ab24a1a301043cf1e914 \n",
756
+ "313 8a7247eb6e8094a28c29f279ef8f8756aeb82ed9 \n",
757
+ "314 3b49c35e006202b74459449be448b980a887345f \n",
758
+ "\n",
759
+ "[315 rows x 9 columns]"
760
+ ],
761
+ "text/html": [
762
+ "<div>\n",
763
+ "<style scoped>\n",
764
+ " .dataframe tbody tr th:only-of-type {\n",
765
+ " vertical-align: middle;\n",
766
+ " }\n",
767
+ "\n",
768
+ " .dataframe tbody tr th {\n",
769
+ " vertical-align: top;\n",
770
+ " }\n",
771
+ "\n",
772
+ " .dataframe thead th {\n",
773
+ " text-align: right;\n",
774
+ " }\n",
775
+ "</style>\n",
776
+ "<table border=\"1\" class=\"dataframe\">\n",
777
+ " <thead>\n",
778
+ " <tr style=\"text-align: right;\">\n",
779
+ " <th></th>\n",
780
+ " <th>id</th>\n",
781
+ " <th>image_id</th>\n",
782
+ " <th>image_file_sha1</th>\n",
783
+ " <th>image_file_name</th>\n",
784
+ " <th>texture_type</th>\n",
785
+ " <th>texture_family</th>\n",
786
+ " <th>sample</th>\n",
787
+ " <th>resolution</th>\n",
788
+ " <th>image_id_without_aperture</th>\n",
789
+ " </tr>\n",
790
+ " </thead>\n",
791
+ " <tbody>\n",
792
+ " <tr>\n",
793
+ " <th>0</th>\n",
794
+ " <td>133027</td>\n",
795
+ " <td>c3a633a13e736394f213ddf44bf124fe80cabe07</td>\n",
796
+ " <td>c3a633a13e736394f213ddf44bf124fe80cabe07</td>\n",
797
+ " <td>tex-320x320-im336-smp9.png</td>\n",
798
+ " <td>texture</td>\n",
799
+ " <td>336</td>\n",
800
+ " <td>9</td>\n",
801
+ " <td>320x320</td>\n",
802
+ " <td>fac76a1d313ee9ea0ffc22da1064f780c2ed81ad</td>\n",
803
+ " </tr>\n",
804
+ " <tr>\n",
805
+ " <th>1</th>\n",
806
+ " <td>133028</td>\n",
807
+ " <td>b67c9f5930555d4b772a803f886fa46d3221f923</td>\n",
808
+ " <td>b67c9f5930555d4b772a803f886fa46d3221f923</td>\n",
809
+ " <td>noise-320x320-im52-smp1.png</td>\n",
810
+ " <td>noise</td>\n",
811
+ " <td>52</td>\n",
812
+ " <td>1</td>\n",
813
+ " <td>320x320</td>\n",
814
+ " <td>8d88f93e77fbc48666eea18ef4061de9df2b4479</td>\n",
815
+ " </tr>\n",
816
+ " <tr>\n",
817
+ " <th>2</th>\n",
818
+ " <td>133029</td>\n",
819
+ " <td>0f3e6b96136fb6f86fdd70dacff2d5c71979329f</td>\n",
820
+ " <td>0f3e6b96136fb6f86fdd70dacff2d5c71979329f</td>\n",
821
+ " <td>tex-320x320-im56-smp11.png</td>\n",
822
+ " <td>texture</td>\n",
823
+ " <td>56</td>\n",
824
+ " <td>11</td>\n",
825
+ " <td>320x320</td>\n",
826
+ " <td>8994366744469afbb1eb3607abd56b40d6b66a35</td>\n",
827
+ " </tr>\n",
828
+ " <tr>\n",
829
+ " <th>3</th>\n",
830
+ " <td>133030</td>\n",
831
+ " <td>ba9b8a0de15c75c91c392e3bccb3e66315b6526c</td>\n",
832
+ " <td>ba9b8a0de15c75c91c392e3bccb3e66315b6526c</td>\n",
833
+ " <td>tex-320x320-im13-smp7.png</td>\n",
834
+ " <td>texture</td>\n",
835
+ " <td>13</td>\n",
836
+ " <td>7</td>\n",
837
+ " <td>320x320</td>\n",
838
+ " <td>cb83632f4e5e430dfbd383217107f009669f4e22</td>\n",
839
+ " </tr>\n",
840
+ " <tr>\n",
841
+ " <th>4</th>\n",
842
+ " <td>133031</td>\n",
843
+ " <td>2fbd82d8c92189450513fbc15acbd5ac2b76dbc9</td>\n",
844
+ " <td>2fbd82d8c92189450513fbc15acbd5ac2b76dbc9</td>\n",
845
+ " <td>tex-320x320-im99-smp8.png</td>\n",
846
+ " <td>texture</td>\n",
847
+ " <td>99</td>\n",
848
+ " <td>8</td>\n",
849
+ " <td>320x320</td>\n",
850
+ " <td>759dc176abaf240eb172ec69ea5db89ae3f05c6b</td>\n",
851
+ " </tr>\n",
852
+ " <tr>\n",
853
+ " <th>...</th>\n",
854
+ " <td>...</td>\n",
855
+ " <td>...</td>\n",
856
+ " <td>...</td>\n",
857
+ " <td>...</td>\n",
858
+ " <td>...</td>\n",
859
+ " <td>...</td>\n",
860
+ " <td>...</td>\n",
861
+ " <td>...</td>\n",
862
+ " <td>...</td>\n",
863
+ " </tr>\n",
864
+ " <tr>\n",
865
+ " <th>310</th>\n",
866
+ " <td>133467</td>\n",
867
+ " <td>9d7393bc900ff0de4f1e7cb9954bc2577cef128a</td>\n",
868
+ " <td>9d7393bc900ff0de4f1e7cb9954bc2577cef128a</td>\n",
869
+ " <td>tex-320x320-im99-smp5.png</td>\n",
870
+ " <td>texture</td>\n",
871
+ " <td>99</td>\n",
872
+ " <td>5</td>\n",
873
+ " <td>320x320</td>\n",
874
+ " <td>90123b39bf9444bb40420a08ccee3503cefddade</td>\n",
875
+ " </tr>\n",
876
+ " <tr>\n",
877
+ " <th>311</th>\n",
878
+ " <td>133471</td>\n",
879
+ " <td>eb6a132f1dc052e2f71be6283ac2f1d8e77f5af7</td>\n",
880
+ " <td>eb6a132f1dc052e2f71be6283ac2f1d8e77f5af7</td>\n",
881
+ " <td>tex-320x320-im60-smp1.png</td>\n",
882
+ " <td>texture</td>\n",
883
+ " <td>60</td>\n",
884
+ " <td>1</td>\n",
885
+ " <td>320x320</td>\n",
886
+ " <td>7daa75526b1e9f08acbc7168b7aa361587cf8d17</td>\n",
887
+ " </tr>\n",
888
+ " <tr>\n",
889
+ " <th>312</th>\n",
890
+ " <td>133474</td>\n",
891
+ " <td>f5a78410833f2c60739751fc3ac21478e0b92aaf</td>\n",
892
+ " <td>f5a78410833f2c60739751fc3ac21478e0b92aaf</td>\n",
893
+ " <td>tex-320x320-im336-smp4.png</td>\n",
894
+ " <td>texture</td>\n",
895
+ " <td>336</td>\n",
896
+ " <td>4</td>\n",
897
+ " <td>320x320</td>\n",
898
+ " <td>90dc23ebd6e1b99e7c18ab24a1a301043cf1e914</td>\n",
899
+ " </tr>\n",
900
+ " <tr>\n",
901
+ " <th>313</th>\n",
902
+ " <td>133475</td>\n",
903
+ " <td>c39cafa818aa8aab7ad6b9ae7178ae7433bb811a</td>\n",
904
+ " <td>c39cafa818aa8aab7ad6b9ae7178ae7433bb811a</td>\n",
905
+ " <td>tex-320x320-im23-smp2.png</td>\n",
906
+ " <td>texture</td>\n",
907
+ " <td>23</td>\n",
908
+ " <td>2</td>\n",
909
+ " <td>320x320</td>\n",
910
+ " <td>8a7247eb6e8094a28c29f279ef8f8756aeb82ed9</td>\n",
911
+ " </tr>\n",
912
+ " <tr>\n",
913
+ " <th>314</th>\n",
914
+ " <td>133476</td>\n",
915
+ " <td>de7570acd85c2ce391b45b1386b944a2b09be6c4</td>\n",
916
+ " <td>de7570acd85c2ce391b45b1386b944a2b09be6c4</td>\n",
917
+ " <td>noise-320x320-im23-smp14.png</td>\n",
918
+ " <td>noise</td>\n",
919
+ " <td>23</td>\n",
920
+ " <td>14</td>\n",
921
+ " <td>320x320</td>\n",
922
+ " <td>3b49c35e006202b74459449be448b980a887345f</td>\n",
923
+ " </tr>\n",
924
+ " </tbody>\n",
925
+ "</table>\n",
926
+ "<p>315 rows × 9 columns</p>\n",
927
+ "</div>"
928
+ ]
929
+ },
930
+ "metadata": {}
931
+ }
932
+ ],
933
+ "execution_count": 4,
934
+ "metadata": {
935
+ "collapsed": false,
936
+ "jupyter": {
937
+ "source_hidden": false,
938
+ "outputs_hidden": false
939
+ },
940
+ "nteract": {
941
+ "transient": {
942
+ "deleting": false
943
+ }
944
+ },
945
+ "execution": {
946
+ "iopub.status.busy": "2020-02-27T16:45:58.145Z",
947
+ "iopub.execute_input": "2020-02-27T16:45:58.149Z",
948
+ "iopub.status.idle": "2020-02-27T16:45:59.919Z",
949
+ "shell.execute_reply": "2020-02-27T16:45:59.934Z"
950
+ }
951
+ }
952
+ },
953
+ {
954
+ "cell_type": "code",
955
+ "source": [
956
+ "assy_pri = get_assembly(\"movshon.FreemanZiemba2013.aperture.private\")\n",
957
+ "assy_pri"
958
+ ],
959
+ "outputs": [
960
+ {
961
+ "output_type": "stream",
962
+ "name": "stderr",
963
+ "text": [
964
+ "brainio-contrib/assy_movshon_FreemanZiemba2013_aperture_private.nc: 100%|██████████| 3.10G/3.10G [00:40<00:00, 76.4MB/s]\n",
965
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataarray.py:217: FutureWarning: The Panel class is removed from pandas. Accessing it from the top-level namespace will also be removed in the next version\n",
966
+ " elif isinstance(data, pd.Panel):\n",
967
+ "/Users/jjpr/dev/brainio_base/brainio_base/assemblies.py:213: FutureWarning: The inplace argument has been deprecated and will be removed in xarray 0.12.0.\n",
968
+ " xr_data.set_index(append=True, inplace=True, **coords_d)\n",
969
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataset.py:167: FutureWarning: the 'labels' keyword is deprecated, use 'codes' instead\n",
970
+ " idx = pd.MultiIndex(labels=labels, levels=levels, names=names)\n"
971
+ ]
972
+ },
973
+ {
974
+ "output_type": "execute_result",
975
+ "execution_count": 5,
976
+ "data": {
977
+ "text/plain": [
978
+ "<xarray.NeuronRecordingAssembly 'movshon.FreemanZiemba2013.aperture.private' (neuroid: 205, time_bin: 300, presentation: 6300)>\n",
979
+ "array([[[0., 0., ..., 0., 0.],\n",
980
+ " [0., 0., ..., 0., 0.],\n",
981
+ " ...,\n",
982
+ " [0., 0., ..., 0., 0.],\n",
983
+ " [0., 0., ..., 0., 0.]],\n",
984
+ "\n",
985
+ " [[0., 0., ..., 0., 0.],\n",
986
+ " [0., 0., ..., 0., 0.],\n",
987
+ " ...,\n",
988
+ " [0., 0., ..., 0., 0.],\n",
989
+ " [0., 0., ..., 0., 0.]],\n",
990
+ "\n",
991
+ " ...,\n",
992
+ "\n",
993
+ " [[0., 0., ..., 0., 0.],\n",
994
+ " [0., 0., ..., 0., 0.],\n",
995
+ " ...,\n",
996
+ " [0., 0., ..., 0., 0.],\n",
997
+ " [0., 0., ..., 0., 0.]],\n",
998
+ "\n",
999
+ " [[0., 0., ..., 0., 0.],\n",
1000
+ " [0., 0., ..., 0., 0.],\n",
1001
+ " ...,\n",
1002
+ " [0., 0., ..., 0., 0.],\n",
1003
+ " [0., 0., ..., 0., 0.]]])\n",
1004
+ "Coordinates:\n",
1005
+ " * neuroid (neuroid) MultiIndex\n",
1006
+ " - neuroid_id (neuroid) int64 1 2 3 4 5 6 ... 25 26 27 28 29 30\n",
1007
+ " - region (neuroid) object 'V1' 'V1' 'V1' ... 'V1' 'V1'\n",
1008
+ " * time_bin (time_bin) MultiIndex\n",
1009
+ " - time_bin_start (time_bin) int64 0 1 2 3 4 5 ... 25 26 27 28 29\n",
1010
+ " - time_bin_end (time_bin) int64 1 2 3 4 5 6 ... 26 27 28 29 30\n",
1011
+ " * presentation (presentation) MultiIndex\n",
1012
+ " - repetition (presentation) int64 0 1 2 3 4 5 ... 4 5 6 7 8 9\n",
1013
+ " - image_id (presentation) object 'bfd26c127f8ba028cc95cdc95f00c45c8884b365' ... '26747eaff286d025cfff822d7c8cdd6b16b7198b'\n",
1014
+ " - id (presentation) int64 133415 133415 ... 133378\n",
1015
+ " - image_file_sha1 (presentation) object 'bfd26c127f8ba028cc95cdc95f00c45c8884b365' ... '26747eaff286d025cfff822d7c8cdd6b16b7198b'\n",
1016
+ " - image_file_name (presentation) object 'noise-320x320-im327-smp1.png' ... 'noise-320x320-im327-smp3.png'\n",
1017
+ " - texture_type (presentation) object 'noise' 'noise' ... 'noise'\n",
1018
+ " - texture_family (presentation) int64 327 327 327 ... 327 327 327\n",
1019
+ " - sample (presentation) int64 1 1 1 1 1 1 ... 3 3 3 3 3 3\n",
1020
+ " - resolution (presentation) object '320x320' ... '320x320'\n",
1021
+ " - image_id_without_aperture (presentation) object 'd8a999326b756317c7d5cf513d0cfe8eb2d7c62b' ... 'e6910fe0b53858d294b2890024d355d92606a1f4'\n",
1022
+ "Attributes:\n",
1023
+ " stimulus_set_name: movshon.FreemanZiemba2013.aperture-private\n",
1024
+ " stimulus_set: id image_id..."
1025
+ ]
1026
+ },
1027
+ "metadata": {}
1028
+ }
1029
+ ],
1030
+ "execution_count": 5,
1031
+ "metadata": {
1032
+ "collapsed": false,
1033
+ "jupyter": {
1034
+ "source_hidden": false,
1035
+ "outputs_hidden": false
1036
+ },
1037
+ "nteract": {
1038
+ "transient": {
1039
+ "deleting": false
1040
+ }
1041
+ },
1042
+ "execution": {
1043
+ "iopub.status.busy": "2020-02-27T16:47:27.397Z",
1044
+ "iopub.execute_input": "2020-02-27T16:47:27.401Z",
1045
+ "iopub.status.idle": "2020-02-27T16:48:16.964Z",
1046
+ "shell.execute_reply": "2020-02-27T16:48:16.977Z"
1047
+ }
1048
+ }
1049
+ },
1050
+ {
1051
+ "cell_type": "code",
1052
+ "source": [
1053
+ "assy_pri.stimulus_set"
1054
+ ],
1055
+ "outputs": [
1056
+ {
1057
+ "output_type": "execute_result",
1058
+ "execution_count": 6,
1059
+ "data": {
1060
+ "text/plain": [
1061
+ " id image_id \\\n",
1062
+ "0 133027 c3a633a13e736394f213ddf44bf124fe80cabe07 \n",
1063
+ "1 133028 b67c9f5930555d4b772a803f886fa46d3221f923 \n",
1064
+ "2 133029 0f3e6b96136fb6f86fdd70dacff2d5c71979329f \n",
1065
+ "3 133030 ba9b8a0de15c75c91c392e3bccb3e66315b6526c \n",
1066
+ "4 133031 2fbd82d8c92189450513fbc15acbd5ac2b76dbc9 \n",
1067
+ ".. ... ... \n",
1068
+ "310 133467 9d7393bc900ff0de4f1e7cb9954bc2577cef128a \n",
1069
+ "311 133471 eb6a132f1dc052e2f71be6283ac2f1d8e77f5af7 \n",
1070
+ "312 133474 f5a78410833f2c60739751fc3ac21478e0b92aaf \n",
1071
+ "313 133475 c39cafa818aa8aab7ad6b9ae7178ae7433bb811a \n",
1072
+ "314 133476 de7570acd85c2ce391b45b1386b944a2b09be6c4 \n",
1073
+ "\n",
1074
+ " image_file_sha1 image_file_name \\\n",
1075
+ "0 c3a633a13e736394f213ddf44bf124fe80cabe07 tex-320x320-im336-smp9.png \n",
1076
+ "1 b67c9f5930555d4b772a803f886fa46d3221f923 noise-320x320-im52-smp1.png \n",
1077
+ "2 0f3e6b96136fb6f86fdd70dacff2d5c71979329f tex-320x320-im56-smp11.png \n",
1078
+ "3 ba9b8a0de15c75c91c392e3bccb3e66315b6526c tex-320x320-im13-smp7.png \n",
1079
+ "4 2fbd82d8c92189450513fbc15acbd5ac2b76dbc9 tex-320x320-im99-smp8.png \n",
1080
+ ".. ... ... \n",
1081
+ "310 9d7393bc900ff0de4f1e7cb9954bc2577cef128a tex-320x320-im99-smp5.png \n",
1082
+ "311 eb6a132f1dc052e2f71be6283ac2f1d8e77f5af7 tex-320x320-im60-smp1.png \n",
1083
+ "312 f5a78410833f2c60739751fc3ac21478e0b92aaf tex-320x320-im336-smp4.png \n",
1084
+ "313 c39cafa818aa8aab7ad6b9ae7178ae7433bb811a tex-320x320-im23-smp2.png \n",
1085
+ "314 de7570acd85c2ce391b45b1386b944a2b09be6c4 noise-320x320-im23-smp14.png \n",
1086
+ "\n",
1087
+ " texture_type texture_family sample resolution \\\n",
1088
+ "0 texture 336 9 320x320 \n",
1089
+ "1 noise 52 1 320x320 \n",
1090
+ "2 texture 56 11 320x320 \n",
1091
+ "3 texture 13 7 320x320 \n",
1092
+ "4 texture 99 8 320x320 \n",
1093
+ ".. ... ... ... ... \n",
1094
+ "310 texture 99 5 320x320 \n",
1095
+ "311 texture 60 1 320x320 \n",
1096
+ "312 texture 336 4 320x320 \n",
1097
+ "313 texture 23 2 320x320 \n",
1098
+ "314 noise 23 14 320x320 \n",
1099
+ "\n",
1100
+ " image_id_without_aperture \n",
1101
+ "0 fac76a1d313ee9ea0ffc22da1064f780c2ed81ad \n",
1102
+ "1 8d88f93e77fbc48666eea18ef4061de9df2b4479 \n",
1103
+ "2 8994366744469afbb1eb3607abd56b40d6b66a35 \n",
1104
+ "3 cb83632f4e5e430dfbd383217107f009669f4e22 \n",
1105
+ "4 759dc176abaf240eb172ec69ea5db89ae3f05c6b \n",
1106
+ ".. ... \n",
1107
+ "310 90123b39bf9444bb40420a08ccee3503cefddade \n",
1108
+ "311 7daa75526b1e9f08acbc7168b7aa361587cf8d17 \n",
1109
+ "312 90dc23ebd6e1b99e7c18ab24a1a301043cf1e914 \n",
1110
+ "313 8a7247eb6e8094a28c29f279ef8f8756aeb82ed9 \n",
1111
+ "314 3b49c35e006202b74459449be448b980a887345f \n",
1112
+ "\n",
1113
+ "[315 rows x 9 columns]"
1114
+ ],
1115
+ "text/html": [
1116
+ "<div>\n",
1117
+ "<style scoped>\n",
1118
+ " .dataframe tbody tr th:only-of-type {\n",
1119
+ " vertical-align: middle;\n",
1120
+ " }\n",
1121
+ "\n",
1122
+ " .dataframe tbody tr th {\n",
1123
+ " vertical-align: top;\n",
1124
+ " }\n",
1125
+ "\n",
1126
+ " .dataframe thead th {\n",
1127
+ " text-align: right;\n",
1128
+ " }\n",
1129
+ "</style>\n",
1130
+ "<table border=\"1\" class=\"dataframe\">\n",
1131
+ " <thead>\n",
1132
+ " <tr style=\"text-align: right;\">\n",
1133
+ " <th></th>\n",
1134
+ " <th>id</th>\n",
1135
+ " <th>image_id</th>\n",
1136
+ " <th>image_file_sha1</th>\n",
1137
+ " <th>image_file_name</th>\n",
1138
+ " <th>texture_type</th>\n",
1139
+ " <th>texture_family</th>\n",
1140
+ " <th>sample</th>\n",
1141
+ " <th>resolution</th>\n",
1142
+ " <th>image_id_without_aperture</th>\n",
1143
+ " </tr>\n",
1144
+ " </thead>\n",
1145
+ " <tbody>\n",
1146
+ " <tr>\n",
1147
+ " <th>0</th>\n",
1148
+ " <td>133027</td>\n",
1149
+ " <td>c3a633a13e736394f213ddf44bf124fe80cabe07</td>\n",
1150
+ " <td>c3a633a13e736394f213ddf44bf124fe80cabe07</td>\n",
1151
+ " <td>tex-320x320-im336-smp9.png</td>\n",
1152
+ " <td>texture</td>\n",
1153
+ " <td>336</td>\n",
1154
+ " <td>9</td>\n",
1155
+ " <td>320x320</td>\n",
1156
+ " <td>fac76a1d313ee9ea0ffc22da1064f780c2ed81ad</td>\n",
1157
+ " </tr>\n",
1158
+ " <tr>\n",
1159
+ " <th>1</th>\n",
1160
+ " <td>133028</td>\n",
1161
+ " <td>b67c9f5930555d4b772a803f886fa46d3221f923</td>\n",
1162
+ " <td>b67c9f5930555d4b772a803f886fa46d3221f923</td>\n",
1163
+ " <td>noise-320x320-im52-smp1.png</td>\n",
1164
+ " <td>noise</td>\n",
1165
+ " <td>52</td>\n",
1166
+ " <td>1</td>\n",
1167
+ " <td>320x320</td>\n",
1168
+ " <td>8d88f93e77fbc48666eea18ef4061de9df2b4479</td>\n",
1169
+ " </tr>\n",
1170
+ " <tr>\n",
1171
+ " <th>2</th>\n",
1172
+ " <td>133029</td>\n",
1173
+ " <td>0f3e6b96136fb6f86fdd70dacff2d5c71979329f</td>\n",
1174
+ " <td>0f3e6b96136fb6f86fdd70dacff2d5c71979329f</td>\n",
1175
+ " <td>tex-320x320-im56-smp11.png</td>\n",
1176
+ " <td>texture</td>\n",
1177
+ " <td>56</td>\n",
1178
+ " <td>11</td>\n",
1179
+ " <td>320x320</td>\n",
1180
+ " <td>8994366744469afbb1eb3607abd56b40d6b66a35</td>\n",
1181
+ " </tr>\n",
1182
+ " <tr>\n",
1183
+ " <th>3</th>\n",
1184
+ " <td>133030</td>\n",
1185
+ " <td>ba9b8a0de15c75c91c392e3bccb3e66315b6526c</td>\n",
1186
+ " <td>ba9b8a0de15c75c91c392e3bccb3e66315b6526c</td>\n",
1187
+ " <td>tex-320x320-im13-smp7.png</td>\n",
1188
+ " <td>texture</td>\n",
1189
+ " <td>13</td>\n",
1190
+ " <td>7</td>\n",
1191
+ " <td>320x320</td>\n",
1192
+ " <td>cb83632f4e5e430dfbd383217107f009669f4e22</td>\n",
1193
+ " </tr>\n",
1194
+ " <tr>\n",
1195
+ " <th>4</th>\n",
1196
+ " <td>133031</td>\n",
1197
+ " <td>2fbd82d8c92189450513fbc15acbd5ac2b76dbc9</td>\n",
1198
+ " <td>2fbd82d8c92189450513fbc15acbd5ac2b76dbc9</td>\n",
1199
+ " <td>tex-320x320-im99-smp8.png</td>\n",
1200
+ " <td>texture</td>\n",
1201
+ " <td>99</td>\n",
1202
+ " <td>8</td>\n",
1203
+ " <td>320x320</td>\n",
1204
+ " <td>759dc176abaf240eb172ec69ea5db89ae3f05c6b</td>\n",
1205
+ " </tr>\n",
1206
+ " <tr>\n",
1207
+ " <th>...</th>\n",
1208
+ " <td>...</td>\n",
1209
+ " <td>...</td>\n",
1210
+ " <td>...</td>\n",
1211
+ " <td>...</td>\n",
1212
+ " <td>...</td>\n",
1213
+ " <td>...</td>\n",
1214
+ " <td>...</td>\n",
1215
+ " <td>...</td>\n",
1216
+ " <td>...</td>\n",
1217
+ " </tr>\n",
1218
+ " <tr>\n",
1219
+ " <th>310</th>\n",
1220
+ " <td>133467</td>\n",
1221
+ " <td>9d7393bc900ff0de4f1e7cb9954bc2577cef128a</td>\n",
1222
+ " <td>9d7393bc900ff0de4f1e7cb9954bc2577cef128a</td>\n",
1223
+ " <td>tex-320x320-im99-smp5.png</td>\n",
1224
+ " <td>texture</td>\n",
1225
+ " <td>99</td>\n",
1226
+ " <td>5</td>\n",
1227
+ " <td>320x320</td>\n",
1228
+ " <td>90123b39bf9444bb40420a08ccee3503cefddade</td>\n",
1229
+ " </tr>\n",
1230
+ " <tr>\n",
1231
+ " <th>311</th>\n",
1232
+ " <td>133471</td>\n",
1233
+ " <td>eb6a132f1dc052e2f71be6283ac2f1d8e77f5af7</td>\n",
1234
+ " <td>eb6a132f1dc052e2f71be6283ac2f1d8e77f5af7</td>\n",
1235
+ " <td>tex-320x320-im60-smp1.png</td>\n",
1236
+ " <td>texture</td>\n",
1237
+ " <td>60</td>\n",
1238
+ " <td>1</td>\n",
1239
+ " <td>320x320</td>\n",
1240
+ " <td>7daa75526b1e9f08acbc7168b7aa361587cf8d17</td>\n",
1241
+ " </tr>\n",
1242
+ " <tr>\n",
1243
+ " <th>312</th>\n",
1244
+ " <td>133474</td>\n",
1245
+ " <td>f5a78410833f2c60739751fc3ac21478e0b92aaf</td>\n",
1246
+ " <td>f5a78410833f2c60739751fc3ac21478e0b92aaf</td>\n",
1247
+ " <td>tex-320x320-im336-smp4.png</td>\n",
1248
+ " <td>texture</td>\n",
1249
+ " <td>336</td>\n",
1250
+ " <td>4</td>\n",
1251
+ " <td>320x320</td>\n",
1252
+ " <td>90dc23ebd6e1b99e7c18ab24a1a301043cf1e914</td>\n",
1253
+ " </tr>\n",
1254
+ " <tr>\n",
1255
+ " <th>313</th>\n",
1256
+ " <td>133475</td>\n",
1257
+ " <td>c39cafa818aa8aab7ad6b9ae7178ae7433bb811a</td>\n",
1258
+ " <td>c39cafa818aa8aab7ad6b9ae7178ae7433bb811a</td>\n",
1259
+ " <td>tex-320x320-im23-smp2.png</td>\n",
1260
+ " <td>texture</td>\n",
1261
+ " <td>23</td>\n",
1262
+ " <td>2</td>\n",
1263
+ " <td>320x320</td>\n",
1264
+ " <td>8a7247eb6e8094a28c29f279ef8f8756aeb82ed9</td>\n",
1265
+ " </tr>\n",
1266
+ " <tr>\n",
1267
+ " <th>314</th>\n",
1268
+ " <td>133476</td>\n",
1269
+ " <td>de7570acd85c2ce391b45b1386b944a2b09be6c4</td>\n",
1270
+ " <td>de7570acd85c2ce391b45b1386b944a2b09be6c4</td>\n",
1271
+ " <td>noise-320x320-im23-smp14.png</td>\n",
1272
+ " <td>noise</td>\n",
1273
+ " <td>23</td>\n",
1274
+ " <td>14</td>\n",
1275
+ " <td>320x320</td>\n",
1276
+ " <td>3b49c35e006202b74459449be448b980a887345f</td>\n",
1277
+ " </tr>\n",
1278
+ " </tbody>\n",
1279
+ "</table>\n",
1280
+ "<p>315 rows × 9 columns</p>\n",
1281
+ "</div>"
1282
+ ]
1283
+ },
1284
+ "metadata": {}
1285
+ }
1286
+ ],
1287
+ "execution_count": 6,
1288
+ "metadata": {
1289
+ "collapsed": false,
1290
+ "jupyter": {
1291
+ "source_hidden": false,
1292
+ "outputs_hidden": false
1293
+ },
1294
+ "nteract": {
1295
+ "transient": {
1296
+ "deleting": false
1297
+ }
1298
+ },
1299
+ "execution": {
1300
+ "iopub.status.busy": "2020-02-27T16:49:52.115Z",
1301
+ "iopub.execute_input": "2020-02-27T16:49:52.117Z",
1302
+ "iopub.status.idle": "2020-02-27T16:49:52.126Z",
1303
+ "shell.execute_reply": "2020-02-27T16:49:52.129Z"
1304
+ }
1305
+ }
1306
+ },
1307
+ {
1308
+ "cell_type": "code",
1309
+ "source": [
1310
+ "stim_both = get_stimulus_set(\"movshon.FreemanZiemba2013.aperture\")\n",
1311
+ "stim_both"
1312
+ ],
1313
+ "outputs": [
1314
+ {
1315
+ "output_type": "stream",
1316
+ "name": "stderr",
1317
+ "text": [
1318
+ "brainio-contrib/image_movshon_FreemanZiemba2013_aperture.zip: 100%|██████████| 17.6M/17.6M [00:01<00:00, 16.4MB/s]\n"
1319
+ ]
1320
+ },
1321
+ {
1322
+ "output_type": "execute_result",
1323
+ "execution_count": 9,
1324
+ "data": {
1325
+ "text/plain": [
1326
+ " id image_id \\\n",
1327
+ "0 133027 c3a633a13e736394f213ddf44bf124fe80cabe07 \n",
1328
+ "1 133028 b67c9f5930555d4b772a803f886fa46d3221f923 \n",
1329
+ "2 133029 0f3e6b96136fb6f86fdd70dacff2d5c71979329f \n",
1330
+ "3 133030 ba9b8a0de15c75c91c392e3bccb3e66315b6526c \n",
1331
+ "4 133031 2fbd82d8c92189450513fbc15acbd5ac2b76dbc9 \n",
1332
+ ".. ... ... \n",
1333
+ "445 133472 b429ff650a6f868b5124cc04c6d79ec69fb35c73 \n",
1334
+ "446 133473 d01f48d87a4b49f50c021c612da9d3180488f6ba \n",
1335
+ "447 133474 f5a78410833f2c60739751fc3ac21478e0b92aaf \n",
1336
+ "448 133475 c39cafa818aa8aab7ad6b9ae7178ae7433bb811a \n",
1337
+ "449 133476 de7570acd85c2ce391b45b1386b944a2b09be6c4 \n",
1338
+ "\n",
1339
+ " image_file_sha1 image_file_name \\\n",
1340
+ "0 c3a633a13e736394f213ddf44bf124fe80cabe07 tex-320x320-im336-smp9.png \n",
1341
+ "1 b67c9f5930555d4b772a803f886fa46d3221f923 noise-320x320-im52-smp1.png \n",
1342
+ "2 0f3e6b96136fb6f86fdd70dacff2d5c71979329f tex-320x320-im56-smp11.png \n",
1343
+ "3 ba9b8a0de15c75c91c392e3bccb3e66315b6526c tex-320x320-im13-smp7.png \n",
1344
+ "4 2fbd82d8c92189450513fbc15acbd5ac2b76dbc9 tex-320x320-im99-smp8.png \n",
1345
+ ".. ... ... \n",
1346
+ "445 b429ff650a6f868b5124cc04c6d79ec69fb35c73 tex-320x320-im48-smp6.png \n",
1347
+ "446 d01f48d87a4b49f50c021c612da9d3180488f6ba tex-320x320-im30-smp10.png \n",
1348
+ "447 f5a78410833f2c60739751fc3ac21478e0b92aaf tex-320x320-im336-smp4.png \n",
1349
+ "448 c39cafa818aa8aab7ad6b9ae7178ae7433bb811a tex-320x320-im23-smp2.png \n",
1350
+ "449 de7570acd85c2ce391b45b1386b944a2b09be6c4 noise-320x320-im23-smp14.png \n",
1351
+ "\n",
1352
+ " texture_type texture_family sample resolution \\\n",
1353
+ "0 texture 336 9 320x320 \n",
1354
+ "1 noise 52 1 320x320 \n",
1355
+ "2 texture 56 11 320x320 \n",
1356
+ "3 texture 13 7 320x320 \n",
1357
+ "4 texture 99 8 320x320 \n",
1358
+ ".. ... ... ... ... \n",
1359
+ "445 texture 48 6 320x320 \n",
1360
+ "446 texture 30 10 320x320 \n",
1361
+ "447 texture 336 4 320x320 \n",
1362
+ "448 texture 23 2 320x320 \n",
1363
+ "449 noise 23 14 320x320 \n",
1364
+ "\n",
1365
+ " image_id_without_aperture \n",
1366
+ "0 fac76a1d313ee9ea0ffc22da1064f780c2ed81ad \n",
1367
+ "1 8d88f93e77fbc48666eea18ef4061de9df2b4479 \n",
1368
+ "2 8994366744469afbb1eb3607abd56b40d6b66a35 \n",
1369
+ "3 cb83632f4e5e430dfbd383217107f009669f4e22 \n",
1370
+ "4 759dc176abaf240eb172ec69ea5db89ae3f05c6b \n",
1371
+ ".. ... \n",
1372
+ "445 844305832982a3f3482d2fbf1e36edaeb1528387 \n",
1373
+ "446 54f31d7e051b8c28164ba0253fd21b619d817e3d \n",
1374
+ "447 90dc23ebd6e1b99e7c18ab24a1a301043cf1e914 \n",
1375
+ "448 8a7247eb6e8094a28c29f279ef8f8756aeb82ed9 \n",
1376
+ "449 3b49c35e006202b74459449be448b980a887345f \n",
1377
+ "\n",
1378
+ "[450 rows x 9 columns]"
1379
+ ],
1380
+ "text/html": [
1381
+ "<div>\n",
1382
+ "<style scoped>\n",
1383
+ " .dataframe tbody tr th:only-of-type {\n",
1384
+ " vertical-align: middle;\n",
1385
+ " }\n",
1386
+ "\n",
1387
+ " .dataframe tbody tr th {\n",
1388
+ " vertical-align: top;\n",
1389
+ " }\n",
1390
+ "\n",
1391
+ " .dataframe thead th {\n",
1392
+ " text-align: right;\n",
1393
+ " }\n",
1394
+ "</style>\n",
1395
+ "<table border=\"1\" class=\"dataframe\">\n",
1396
+ " <thead>\n",
1397
+ " <tr style=\"text-align: right;\">\n",
1398
+ " <th></th>\n",
1399
+ " <th>id</th>\n",
1400
+ " <th>image_id</th>\n",
1401
+ " <th>image_file_sha1</th>\n",
1402
+ " <th>image_file_name</th>\n",
1403
+ " <th>texture_type</th>\n",
1404
+ " <th>texture_family</th>\n",
1405
+ " <th>sample</th>\n",
1406
+ " <th>resolution</th>\n",
1407
+ " <th>image_id_without_aperture</th>\n",
1408
+ " </tr>\n",
1409
+ " </thead>\n",
1410
+ " <tbody>\n",
1411
+ " <tr>\n",
1412
+ " <th>0</th>\n",
1413
+ " <td>133027</td>\n",
1414
+ " <td>c3a633a13e736394f213ddf44bf124fe80cabe07</td>\n",
1415
+ " <td>c3a633a13e736394f213ddf44bf124fe80cabe07</td>\n",
1416
+ " <td>tex-320x320-im336-smp9.png</td>\n",
1417
+ " <td>texture</td>\n",
1418
+ " <td>336</td>\n",
1419
+ " <td>9</td>\n",
1420
+ " <td>320x320</td>\n",
1421
+ " <td>fac76a1d313ee9ea0ffc22da1064f780c2ed81ad</td>\n",
1422
+ " </tr>\n",
1423
+ " <tr>\n",
1424
+ " <th>1</th>\n",
1425
+ " <td>133028</td>\n",
1426
+ " <td>b67c9f5930555d4b772a803f886fa46d3221f923</td>\n",
1427
+ " <td>b67c9f5930555d4b772a803f886fa46d3221f923</td>\n",
1428
+ " <td>noise-320x320-im52-smp1.png</td>\n",
1429
+ " <td>noise</td>\n",
1430
+ " <td>52</td>\n",
1431
+ " <td>1</td>\n",
1432
+ " <td>320x320</td>\n",
1433
+ " <td>8d88f93e77fbc48666eea18ef4061de9df2b4479</td>\n",
1434
+ " </tr>\n",
1435
+ " <tr>\n",
1436
+ " <th>2</th>\n",
1437
+ " <td>133029</td>\n",
1438
+ " <td>0f3e6b96136fb6f86fdd70dacff2d5c71979329f</td>\n",
1439
+ " <td>0f3e6b96136fb6f86fdd70dacff2d5c71979329f</td>\n",
1440
+ " <td>tex-320x320-im56-smp11.png</td>\n",
1441
+ " <td>texture</td>\n",
1442
+ " <td>56</td>\n",
1443
+ " <td>11</td>\n",
1444
+ " <td>320x320</td>\n",
1445
+ " <td>8994366744469afbb1eb3607abd56b40d6b66a35</td>\n",
1446
+ " </tr>\n",
1447
+ " <tr>\n",
1448
+ " <th>3</th>\n",
1449
+ " <td>133030</td>\n",
1450
+ " <td>ba9b8a0de15c75c91c392e3bccb3e66315b6526c</td>\n",
1451
+ " <td>ba9b8a0de15c75c91c392e3bccb3e66315b6526c</td>\n",
1452
+ " <td>tex-320x320-im13-smp7.png</td>\n",
1453
+ " <td>texture</td>\n",
1454
+ " <td>13</td>\n",
1455
+ " <td>7</td>\n",
1456
+ " <td>320x320</td>\n",
1457
+ " <td>cb83632f4e5e430dfbd383217107f009669f4e22</td>\n",
1458
+ " </tr>\n",
1459
+ " <tr>\n",
1460
+ " <th>4</th>\n",
1461
+ " <td>133031</td>\n",
1462
+ " <td>2fbd82d8c92189450513fbc15acbd5ac2b76dbc9</td>\n",
1463
+ " <td>2fbd82d8c92189450513fbc15acbd5ac2b76dbc9</td>\n",
1464
+ " <td>tex-320x320-im99-smp8.png</td>\n",
1465
+ " <td>texture</td>\n",
1466
+ " <td>99</td>\n",
1467
+ " <td>8</td>\n",
1468
+ " <td>320x320</td>\n",
1469
+ " <td>759dc176abaf240eb172ec69ea5db89ae3f05c6b</td>\n",
1470
+ " </tr>\n",
1471
+ " <tr>\n",
1472
+ " <th>...</th>\n",
1473
+ " <td>...</td>\n",
1474
+ " <td>...</td>\n",
1475
+ " <td>...</td>\n",
1476
+ " <td>...</td>\n",
1477
+ " <td>...</td>\n",
1478
+ " <td>...</td>\n",
1479
+ " <td>...</td>\n",
1480
+ " <td>...</td>\n",
1481
+ " <td>...</td>\n",
1482
+ " </tr>\n",
1483
+ " <tr>\n",
1484
+ " <th>445</th>\n",
1485
+ " <td>133472</td>\n",
1486
+ " <td>b429ff650a6f868b5124cc04c6d79ec69fb35c73</td>\n",
1487
+ " <td>b429ff650a6f868b5124cc04c6d79ec69fb35c73</td>\n",
1488
+ " <td>tex-320x320-im48-smp6.png</td>\n",
1489
+ " <td>texture</td>\n",
1490
+ " <td>48</td>\n",
1491
+ " <td>6</td>\n",
1492
+ " <td>320x320</td>\n",
1493
+ " <td>844305832982a3f3482d2fbf1e36edaeb1528387</td>\n",
1494
+ " </tr>\n",
1495
+ " <tr>\n",
1496
+ " <th>446</th>\n",
1497
+ " <td>133473</td>\n",
1498
+ " <td>d01f48d87a4b49f50c021c612da9d3180488f6ba</td>\n",
1499
+ " <td>d01f48d87a4b49f50c021c612da9d3180488f6ba</td>\n",
1500
+ " <td>tex-320x320-im30-smp10.png</td>\n",
1501
+ " <td>texture</td>\n",
1502
+ " <td>30</td>\n",
1503
+ " <td>10</td>\n",
1504
+ " <td>320x320</td>\n",
1505
+ " <td>54f31d7e051b8c28164ba0253fd21b619d817e3d</td>\n",
1506
+ " </tr>\n",
1507
+ " <tr>\n",
1508
+ " <th>447</th>\n",
1509
+ " <td>133474</td>\n",
1510
+ " <td>f5a78410833f2c60739751fc3ac21478e0b92aaf</td>\n",
1511
+ " <td>f5a78410833f2c60739751fc3ac21478e0b92aaf</td>\n",
1512
+ " <td>tex-320x320-im336-smp4.png</td>\n",
1513
+ " <td>texture</td>\n",
1514
+ " <td>336</td>\n",
1515
+ " <td>4</td>\n",
1516
+ " <td>320x320</td>\n",
1517
+ " <td>90dc23ebd6e1b99e7c18ab24a1a301043cf1e914</td>\n",
1518
+ " </tr>\n",
1519
+ " <tr>\n",
1520
+ " <th>448</th>\n",
1521
+ " <td>133475</td>\n",
1522
+ " <td>c39cafa818aa8aab7ad6b9ae7178ae7433bb811a</td>\n",
1523
+ " <td>c39cafa818aa8aab7ad6b9ae7178ae7433bb811a</td>\n",
1524
+ " <td>tex-320x320-im23-smp2.png</td>\n",
1525
+ " <td>texture</td>\n",
1526
+ " <td>23</td>\n",
1527
+ " <td>2</td>\n",
1528
+ " <td>320x320</td>\n",
1529
+ " <td>8a7247eb6e8094a28c29f279ef8f8756aeb82ed9</td>\n",
1530
+ " </tr>\n",
1531
+ " <tr>\n",
1532
+ " <th>449</th>\n",
1533
+ " <td>133476</td>\n",
1534
+ " <td>de7570acd85c2ce391b45b1386b944a2b09be6c4</td>\n",
1535
+ " <td>de7570acd85c2ce391b45b1386b944a2b09be6c4</td>\n",
1536
+ " <td>noise-320x320-im23-smp14.png</td>\n",
1537
+ " <td>noise</td>\n",
1538
+ " <td>23</td>\n",
1539
+ " <td>14</td>\n",
1540
+ " <td>320x320</td>\n",
1541
+ " <td>3b49c35e006202b74459449be448b980a887345f</td>\n",
1542
+ " </tr>\n",
1543
+ " </tbody>\n",
1544
+ "</table>\n",
1545
+ "<p>450 rows × 9 columns</p>\n",
1546
+ "</div>"
1547
+ ]
1548
+ },
1549
+ "metadata": {}
1550
+ }
1551
+ ],
1552
+ "execution_count": 9,
1553
+ "metadata": {
1554
+ "collapsed": false,
1555
+ "jupyter": {
1556
+ "source_hidden": false,
1557
+ "outputs_hidden": false
1558
+ },
1559
+ "nteract": {
1560
+ "transient": {
1561
+ "deleting": false
1562
+ }
1563
+ },
1564
+ "execution": {
1565
+ "iopub.status.busy": "2020-02-27T17:13:54.981Z",
1566
+ "iopub.execute_input": "2020-02-27T17:13:54.987Z",
1567
+ "iopub.status.idle": "2020-02-27T17:13:57.657Z",
1568
+ "shell.execute_reply": "2020-02-27T17:13:57.675Z"
1569
+ }
1570
+ }
1571
+ },
1572
+ {
1573
+ "cell_type": "code",
1574
+ "source": [
1575
+ "assy_both = get_assembly(\"movshon.FreemanZiemba2013.aperture\")\n",
1576
+ "assy_both"
1577
+ ],
1578
+ "outputs": [
1579
+ {
1580
+ "output_type": "stream",
1581
+ "name": "stderr",
1582
+ "text": [
1583
+ "brainio-contrib/assy_movshon_FreemanZiemba2013_aperture.nc: 100%|██████████| 4.43G/4.43G [00:55<00:00, 79.3MB/s]\n",
1584
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataarray.py:217: FutureWarning: The Panel class is removed from pandas. Accessing it from the top-level namespace will also be removed in the next version\n",
1585
+ " elif isinstance(data, pd.Panel):\n",
1586
+ "/Users/jjpr/dev/brainio_base/brainio_base/assemblies.py:213: FutureWarning: The inplace argument has been deprecated and will be removed in xarray 0.12.0.\n",
1587
+ " xr_data.set_index(append=True, inplace=True, **coords_d)\n",
1588
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataset.py:167: FutureWarning: the 'labels' keyword is deprecated, use 'codes' instead\n",
1589
+ " idx = pd.MultiIndex(labels=labels, levels=levels, names=names)\n"
1590
+ ]
1591
+ },
1592
+ {
1593
+ "output_type": "execute_result",
1594
+ "execution_count": 10,
1595
+ "data": {
1596
+ "text/plain": [
1597
+ "<xarray.NeuronRecordingAssembly 'movshon.FreemanZiemba2013.aperture' (neuroid: 205, time_bin: 300, presentation: 9000)>\n",
1598
+ "array([[[0., 0., ..., 0., 0.],\n",
1599
+ " [0., 0., ..., 0., 0.],\n",
1600
+ " ...,\n",
1601
+ " [0., 0., ..., 0., 0.],\n",
1602
+ " [0., 0., ..., 0., 0.]],\n",
1603
+ "\n",
1604
+ " [[0., 0., ..., 0., 0.],\n",
1605
+ " [0., 0., ..., 0., 0.],\n",
1606
+ " ...,\n",
1607
+ " [0., 0., ..., 0., 0.],\n",
1608
+ " [0., 0., ..., 0., 0.]],\n",
1609
+ "\n",
1610
+ " ...,\n",
1611
+ "\n",
1612
+ " [[0., 0., ..., 0., 0.],\n",
1613
+ " [0., 0., ..., 0., 0.],\n",
1614
+ " ...,\n",
1615
+ " [0., 0., ..., 0., 0.],\n",
1616
+ " [0., 0., ..., 0., 0.]],\n",
1617
+ "\n",
1618
+ " [[0., 0., ..., 0., 0.],\n",
1619
+ " [0., 0., ..., 0., 0.],\n",
1620
+ " ...,\n",
1621
+ " [0., 0., ..., 0., 0.],\n",
1622
+ " [0., 0., ..., 0., 0.]]])\n",
1623
+ "Coordinates:\n",
1624
+ " * neuroid (neuroid) MultiIndex\n",
1625
+ " - neuroid_id (neuroid) int64 1 2 3 4 5 6 ... 25 26 27 28 29 30\n",
1626
+ " - region (neuroid) object 'V1' 'V1' 'V1' ... 'V1' 'V1'\n",
1627
+ " * time_bin (time_bin) MultiIndex\n",
1628
+ " - time_bin_start (time_bin) int64 0 1 2 3 4 5 ... 25 26 27 28 29\n",
1629
+ " - time_bin_end (time_bin) int64 1 2 3 4 5 6 ... 26 27 28 29 30\n",
1630
+ " * presentation (presentation) MultiIndex\n",
1631
+ " - repetition (presentation) int64 0 1 2 3 4 5 ... 4 5 6 7 8 9\n",
1632
+ " - image_id (presentation) object 'bfd26c127f8ba028cc95cdc95f00c45c8884b365' ... 'dfa618e0503a4251582450e88daf0c21a5abed95'\n",
1633
+ " - id (presentation) int64 133415 133415 ... 133388\n",
1634
+ " - image_file_sha1 (presentation) object 'bfd26c127f8ba028cc95cdc95f00c45c8884b365' ... 'dfa618e0503a4251582450e88daf0c21a5abed95'\n",
1635
+ " - image_file_name (presentation) object 'noise-320x320-im327-smp1.png' ... 'noise-320x320-im327-smp2.png'\n",
1636
+ " - texture_type (presentation) object 'noise' 'noise' ... 'noise'\n",
1637
+ " - texture_family (presentation) int64 327 327 327 ... 327 327 327\n",
1638
+ " - sample (presentation) int64 1 1 1 1 1 1 ... 2 2 2 2 2 2\n",
1639
+ " - resolution (presentation) object '320x320' ... '320x320'\n",
1640
+ " - image_id_without_aperture (presentation) object 'd8a999326b756317c7d5cf513d0cfe8eb2d7c62b' ... '0e8609dc2778a848d7ed8b355d9331160ac2974d'\n",
1641
+ "Attributes:\n",
1642
+ " stimulus_set_name: movshon.FreemanZiemba2013.aperture\n",
1643
+ " stimulus_set: id image_id..."
1644
+ ]
1645
+ },
1646
+ "metadata": {}
1647
+ }
1648
+ ],
1649
+ "execution_count": 10,
1650
+ "metadata": {
1651
+ "collapsed": false,
1652
+ "jupyter": {
1653
+ "source_hidden": false,
1654
+ "outputs_hidden": false
1655
+ },
1656
+ "nteract": {
1657
+ "transient": {
1658
+ "deleting": false
1659
+ }
1660
+ },
1661
+ "execution": {
1662
+ "iopub.status.busy": "2020-02-27T17:16:33.377Z",
1663
+ "iopub.execute_input": "2020-02-27T17:16:33.380Z",
1664
+ "iopub.status.idle": "2020-02-27T17:17:41.366Z",
1665
+ "shell.execute_reply": "2020-02-27T17:17:41.382Z"
1666
+ }
1667
+ }
1668
+ },
1669
+ {
1670
+ "cell_type": "code",
1671
+ "source": [
1672
+ "assy_both.stimulus_set"
1673
+ ],
1674
+ "outputs": [
1675
+ {
1676
+ "output_type": "execute_result",
1677
+ "execution_count": 11,
1678
+ "data": {
1679
+ "text/plain": [
1680
+ " id image_id \\\n",
1681
+ "0 133027 c3a633a13e736394f213ddf44bf124fe80cabe07 \n",
1682
+ "1 133028 b67c9f5930555d4b772a803f886fa46d3221f923 \n",
1683
+ "2 133029 0f3e6b96136fb6f86fdd70dacff2d5c71979329f \n",
1684
+ "3 133030 ba9b8a0de15c75c91c392e3bccb3e66315b6526c \n",
1685
+ "4 133031 2fbd82d8c92189450513fbc15acbd5ac2b76dbc9 \n",
1686
+ ".. ... ... \n",
1687
+ "445 133472 b429ff650a6f868b5124cc04c6d79ec69fb35c73 \n",
1688
+ "446 133473 d01f48d87a4b49f50c021c612da9d3180488f6ba \n",
1689
+ "447 133474 f5a78410833f2c60739751fc3ac21478e0b92aaf \n",
1690
+ "448 133475 c39cafa818aa8aab7ad6b9ae7178ae7433bb811a \n",
1691
+ "449 133476 de7570acd85c2ce391b45b1386b944a2b09be6c4 \n",
1692
+ "\n",
1693
+ " image_file_sha1 image_file_name \\\n",
1694
+ "0 c3a633a13e736394f213ddf44bf124fe80cabe07 tex-320x320-im336-smp9.png \n",
1695
+ "1 b67c9f5930555d4b772a803f886fa46d3221f923 noise-320x320-im52-smp1.png \n",
1696
+ "2 0f3e6b96136fb6f86fdd70dacff2d5c71979329f tex-320x320-im56-smp11.png \n",
1697
+ "3 ba9b8a0de15c75c91c392e3bccb3e66315b6526c tex-320x320-im13-smp7.png \n",
1698
+ "4 2fbd82d8c92189450513fbc15acbd5ac2b76dbc9 tex-320x320-im99-smp8.png \n",
1699
+ ".. ... ... \n",
1700
+ "445 b429ff650a6f868b5124cc04c6d79ec69fb35c73 tex-320x320-im48-smp6.png \n",
1701
+ "446 d01f48d87a4b49f50c021c612da9d3180488f6ba tex-320x320-im30-smp10.png \n",
1702
+ "447 f5a78410833f2c60739751fc3ac21478e0b92aaf tex-320x320-im336-smp4.png \n",
1703
+ "448 c39cafa818aa8aab7ad6b9ae7178ae7433bb811a tex-320x320-im23-smp2.png \n",
1704
+ "449 de7570acd85c2ce391b45b1386b944a2b09be6c4 noise-320x320-im23-smp14.png \n",
1705
+ "\n",
1706
+ " texture_type texture_family sample resolution \\\n",
1707
+ "0 texture 336 9 320x320 \n",
1708
+ "1 noise 52 1 320x320 \n",
1709
+ "2 texture 56 11 320x320 \n",
1710
+ "3 texture 13 7 320x320 \n",
1711
+ "4 texture 99 8 320x320 \n",
1712
+ ".. ... ... ... ... \n",
1713
+ "445 texture 48 6 320x320 \n",
1714
+ "446 texture 30 10 320x320 \n",
1715
+ "447 texture 336 4 320x320 \n",
1716
+ "448 texture 23 2 320x320 \n",
1717
+ "449 noise 23 14 320x320 \n",
1718
+ "\n",
1719
+ " image_id_without_aperture \n",
1720
+ "0 fac76a1d313ee9ea0ffc22da1064f780c2ed81ad \n",
1721
+ "1 8d88f93e77fbc48666eea18ef4061de9df2b4479 \n",
1722
+ "2 8994366744469afbb1eb3607abd56b40d6b66a35 \n",
1723
+ "3 cb83632f4e5e430dfbd383217107f009669f4e22 \n",
1724
+ "4 759dc176abaf240eb172ec69ea5db89ae3f05c6b \n",
1725
+ ".. ... \n",
1726
+ "445 844305832982a3f3482d2fbf1e36edaeb1528387 \n",
1727
+ "446 54f31d7e051b8c28164ba0253fd21b619d817e3d \n",
1728
+ "447 90dc23ebd6e1b99e7c18ab24a1a301043cf1e914 \n",
1729
+ "448 8a7247eb6e8094a28c29f279ef8f8756aeb82ed9 \n",
1730
+ "449 3b49c35e006202b74459449be448b980a887345f \n",
1731
+ "\n",
1732
+ "[450 rows x 9 columns]"
1733
+ ],
1734
+ "text/html": [
1735
+ "<div>\n",
1736
+ "<style scoped>\n",
1737
+ " .dataframe tbody tr th:only-of-type {\n",
1738
+ " vertical-align: middle;\n",
1739
+ " }\n",
1740
+ "\n",
1741
+ " .dataframe tbody tr th {\n",
1742
+ " vertical-align: top;\n",
1743
+ " }\n",
1744
+ "\n",
1745
+ " .dataframe thead th {\n",
1746
+ " text-align: right;\n",
1747
+ " }\n",
1748
+ "</style>\n",
1749
+ "<table border=\"1\" class=\"dataframe\">\n",
1750
+ " <thead>\n",
1751
+ " <tr style=\"text-align: right;\">\n",
1752
+ " <th></th>\n",
1753
+ " <th>id</th>\n",
1754
+ " <th>image_id</th>\n",
1755
+ " <th>image_file_sha1</th>\n",
1756
+ " <th>image_file_name</th>\n",
1757
+ " <th>texture_type</th>\n",
1758
+ " <th>texture_family</th>\n",
1759
+ " <th>sample</th>\n",
1760
+ " <th>resolution</th>\n",
1761
+ " <th>image_id_without_aperture</th>\n",
1762
+ " </tr>\n",
1763
+ " </thead>\n",
1764
+ " <tbody>\n",
1765
+ " <tr>\n",
1766
+ " <th>0</th>\n",
1767
+ " <td>133027</td>\n",
1768
+ " <td>c3a633a13e736394f213ddf44bf124fe80cabe07</td>\n",
1769
+ " <td>c3a633a13e736394f213ddf44bf124fe80cabe07</td>\n",
1770
+ " <td>tex-320x320-im336-smp9.png</td>\n",
1771
+ " <td>texture</td>\n",
1772
+ " <td>336</td>\n",
1773
+ " <td>9</td>\n",
1774
+ " <td>320x320</td>\n",
1775
+ " <td>fac76a1d313ee9ea0ffc22da1064f780c2ed81ad</td>\n",
1776
+ " </tr>\n",
1777
+ " <tr>\n",
1778
+ " <th>1</th>\n",
1779
+ " <td>133028</td>\n",
1780
+ " <td>b67c9f5930555d4b772a803f886fa46d3221f923</td>\n",
1781
+ " <td>b67c9f5930555d4b772a803f886fa46d3221f923</td>\n",
1782
+ " <td>noise-320x320-im52-smp1.png</td>\n",
1783
+ " <td>noise</td>\n",
1784
+ " <td>52</td>\n",
1785
+ " <td>1</td>\n",
1786
+ " <td>320x320</td>\n",
1787
+ " <td>8d88f93e77fbc48666eea18ef4061de9df2b4479</td>\n",
1788
+ " </tr>\n",
1789
+ " <tr>\n",
1790
+ " <th>2</th>\n",
1791
+ " <td>133029</td>\n",
1792
+ " <td>0f3e6b96136fb6f86fdd70dacff2d5c71979329f</td>\n",
1793
+ " <td>0f3e6b96136fb6f86fdd70dacff2d5c71979329f</td>\n",
1794
+ " <td>tex-320x320-im56-smp11.png</td>\n",
1795
+ " <td>texture</td>\n",
1796
+ " <td>56</td>\n",
1797
+ " <td>11</td>\n",
1798
+ " <td>320x320</td>\n",
1799
+ " <td>8994366744469afbb1eb3607abd56b40d6b66a35</td>\n",
1800
+ " </tr>\n",
1801
+ " <tr>\n",
1802
+ " <th>3</th>\n",
1803
+ " <td>133030</td>\n",
1804
+ " <td>ba9b8a0de15c75c91c392e3bccb3e66315b6526c</td>\n",
1805
+ " <td>ba9b8a0de15c75c91c392e3bccb3e66315b6526c</td>\n",
1806
+ " <td>tex-320x320-im13-smp7.png</td>\n",
1807
+ " <td>texture</td>\n",
1808
+ " <td>13</td>\n",
1809
+ " <td>7</td>\n",
1810
+ " <td>320x320</td>\n",
1811
+ " <td>cb83632f4e5e430dfbd383217107f009669f4e22</td>\n",
1812
+ " </tr>\n",
1813
+ " <tr>\n",
1814
+ " <th>4</th>\n",
1815
+ " <td>133031</td>\n",
1816
+ " <td>2fbd82d8c92189450513fbc15acbd5ac2b76dbc9</td>\n",
1817
+ " <td>2fbd82d8c92189450513fbc15acbd5ac2b76dbc9</td>\n",
1818
+ " <td>tex-320x320-im99-smp8.png</td>\n",
1819
+ " <td>texture</td>\n",
1820
+ " <td>99</td>\n",
1821
+ " <td>8</td>\n",
1822
+ " <td>320x320</td>\n",
1823
+ " <td>759dc176abaf240eb172ec69ea5db89ae3f05c6b</td>\n",
1824
+ " </tr>\n",
1825
+ " <tr>\n",
1826
+ " <th>...</th>\n",
1827
+ " <td>...</td>\n",
1828
+ " <td>...</td>\n",
1829
+ " <td>...</td>\n",
1830
+ " <td>...</td>\n",
1831
+ " <td>...</td>\n",
1832
+ " <td>...</td>\n",
1833
+ " <td>...</td>\n",
1834
+ " <td>...</td>\n",
1835
+ " <td>...</td>\n",
1836
+ " </tr>\n",
1837
+ " <tr>\n",
1838
+ " <th>445</th>\n",
1839
+ " <td>133472</td>\n",
1840
+ " <td>b429ff650a6f868b5124cc04c6d79ec69fb35c73</td>\n",
1841
+ " <td>b429ff650a6f868b5124cc04c6d79ec69fb35c73</td>\n",
1842
+ " <td>tex-320x320-im48-smp6.png</td>\n",
1843
+ " <td>texture</td>\n",
1844
+ " <td>48</td>\n",
1845
+ " <td>6</td>\n",
1846
+ " <td>320x320</td>\n",
1847
+ " <td>844305832982a3f3482d2fbf1e36edaeb1528387</td>\n",
1848
+ " </tr>\n",
1849
+ " <tr>\n",
1850
+ " <th>446</th>\n",
1851
+ " <td>133473</td>\n",
1852
+ " <td>d01f48d87a4b49f50c021c612da9d3180488f6ba</td>\n",
1853
+ " <td>d01f48d87a4b49f50c021c612da9d3180488f6ba</td>\n",
1854
+ " <td>tex-320x320-im30-smp10.png</td>\n",
1855
+ " <td>texture</td>\n",
1856
+ " <td>30</td>\n",
1857
+ " <td>10</td>\n",
1858
+ " <td>320x320</td>\n",
1859
+ " <td>54f31d7e051b8c28164ba0253fd21b619d817e3d</td>\n",
1860
+ " </tr>\n",
1861
+ " <tr>\n",
1862
+ " <th>447</th>\n",
1863
+ " <td>133474</td>\n",
1864
+ " <td>f5a78410833f2c60739751fc3ac21478e0b92aaf</td>\n",
1865
+ " <td>f5a78410833f2c60739751fc3ac21478e0b92aaf</td>\n",
1866
+ " <td>tex-320x320-im336-smp4.png</td>\n",
1867
+ " <td>texture</td>\n",
1868
+ " <td>336</td>\n",
1869
+ " <td>4</td>\n",
1870
+ " <td>320x320</td>\n",
1871
+ " <td>90dc23ebd6e1b99e7c18ab24a1a301043cf1e914</td>\n",
1872
+ " </tr>\n",
1873
+ " <tr>\n",
1874
+ " <th>448</th>\n",
1875
+ " <td>133475</td>\n",
1876
+ " <td>c39cafa818aa8aab7ad6b9ae7178ae7433bb811a</td>\n",
1877
+ " <td>c39cafa818aa8aab7ad6b9ae7178ae7433bb811a</td>\n",
1878
+ " <td>tex-320x320-im23-smp2.png</td>\n",
1879
+ " <td>texture</td>\n",
1880
+ " <td>23</td>\n",
1881
+ " <td>2</td>\n",
1882
+ " <td>320x320</td>\n",
1883
+ " <td>8a7247eb6e8094a28c29f279ef8f8756aeb82ed9</td>\n",
1884
+ " </tr>\n",
1885
+ " <tr>\n",
1886
+ " <th>449</th>\n",
1887
+ " <td>133476</td>\n",
1888
+ " <td>de7570acd85c2ce391b45b1386b944a2b09be6c4</td>\n",
1889
+ " <td>de7570acd85c2ce391b45b1386b944a2b09be6c4</td>\n",
1890
+ " <td>noise-320x320-im23-smp14.png</td>\n",
1891
+ " <td>noise</td>\n",
1892
+ " <td>23</td>\n",
1893
+ " <td>14</td>\n",
1894
+ " <td>320x320</td>\n",
1895
+ " <td>3b49c35e006202b74459449be448b980a887345f</td>\n",
1896
+ " </tr>\n",
1897
+ " </tbody>\n",
1898
+ "</table>\n",
1899
+ "<p>450 rows × 9 columns</p>\n",
1900
+ "</div>"
1901
+ ]
1902
+ },
1903
+ "metadata": {}
1904
+ }
1905
+ ],
1906
+ "execution_count": 11,
1907
+ "metadata": {
1908
+ "collapsed": false,
1909
+ "jupyter": {
1910
+ "source_hidden": false,
1911
+ "outputs_hidden": false
1912
+ },
1913
+ "nteract": {
1914
+ "transient": {
1915
+ "deleting": false
1916
+ }
1917
+ },
1918
+ "execution": {
1919
+ "iopub.status.busy": "2020-02-27T17:18:29.343Z",
1920
+ "iopub.execute_input": "2020-02-27T17:18:29.349Z",
1921
+ "iopub.status.idle": "2020-02-27T17:18:29.359Z",
1922
+ "shell.execute_reply": "2020-02-27T17:18:29.363Z"
1923
+ }
1924
+ }
1925
+ },
1926
+ {
1927
+ "cell_type": "code",
1928
+ "source": [
1929
+ "list_assemblies()"
1930
+ ],
1931
+ "outputs": [
1932
+ {
1933
+ "output_type": "execute_result",
1934
+ "execution_count": 3,
1935
+ "data": {
1936
+ "text/plain": [
1937
+ "['dicarlo.Majaj2015',\n",
1938
+ " 'gallant.David2004',\n",
1939
+ " 'dicarlo.Majaj2015.temporal',\n",
1940
+ " 'tolias.Cadena2017',\n",
1941
+ " 'movshon.FreemanZiemba2013.noaperture',\n",
1942
+ " 'dicarlo.Rajalingham2018.public',\n",
1943
+ " 'dicarlo.Rajalingham2018.private',\n",
1944
+ " 'dicarlo.Kar2019',\n",
1945
+ " 'dicarlo.Majaj2015.temporal-10ms',\n",
1946
+ " 'dicarlo.Kar2018hvm',\n",
1947
+ " 'dicarlo.Kar2018cocogray',\n",
1948
+ " 'dicarlo.Majaj2015.public',\n",
1949
+ " 'dicarlo.Majaj2015.private',\n",
1950
+ " 'dicarlo.Majaj2015.temporal.public',\n",
1951
+ " 'dicarlo.Majaj2015.temporal.private',\n",
1952
+ " 'movshon.FreemanZiemba2013.noaperture.public',\n",
1953
+ " 'movshon.FreemanZiemba2013.noaperture.private',\n",
1954
+ " 'klab.Zhang2018search_obj_array',\n",
1955
+ " 'movshon.FreemanZiemba2013.public',\n",
1956
+ " 'movshon.FreemanZiemba2013.private',\n",
1957
+ " 'movshon.FreemanZiemba2013']"
1958
+ ]
1959
+ },
1960
+ "metadata": {}
1961
+ }
1962
+ ],
1963
+ "execution_count": 3,
1964
+ "metadata": {
1965
+ "collapsed": false,
1966
+ "jupyter": {
1967
+ "source_hidden": false,
1968
+ "outputs_hidden": false
1969
+ },
1970
+ "nteract": {
1971
+ "transient": {
1972
+ "deleting": false
1973
+ }
1974
+ },
1975
+ "execution": {
1976
+ "iopub.status.busy": "2020-02-27T19:31:32.450Z",
1977
+ "iopub.execute_input": "2020-02-27T19:31:32.455Z",
1978
+ "iopub.status.idle": "2020-02-27T19:31:32.471Z",
1979
+ "shell.execute_reply": "2020-02-27T19:31:32.476Z"
1980
+ }
1981
+ }
1982
+ },
1983
+ {
1984
+ "cell_type": "code",
1985
+ "source": [
1986
+ "assy_pub_new = get_assembly(\"movshon.FreemanZiemba2013.public\")\n",
1987
+ "assy_pub_new"
1988
+ ],
1989
+ "outputs": [
1990
+ {
1991
+ "output_type": "stream",
1992
+ "name": "stderr",
1993
+ "text": [
1994
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataarray.py:217: FutureWarning: The Panel class is removed from pandas. Accessing it from the top-level namespace will also be removed in the next version\n",
1995
+ " elif isinstance(data, pd.Panel):\n",
1996
+ "/Users/jjpr/dev/brainio_base/brainio_base/assemblies.py:213: FutureWarning: The inplace argument has been deprecated and will be removed in xarray 0.12.0.\n",
1997
+ " xr_data.set_index(append=True, inplace=True, **coords_d)\n",
1998
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataset.py:167: FutureWarning: the 'labels' keyword is deprecated, use 'codes' instead\n",
1999
+ " idx = pd.MultiIndex(labels=labels, levels=levels, names=names)\n"
2000
+ ]
2001
+ },
2002
+ {
2003
+ "output_type": "execute_result",
2004
+ "execution_count": 4,
2005
+ "data": {
2006
+ "text/plain": [
2007
+ "<xarray.NeuronRecordingAssembly 'movshon.FreemanZiemba2013.public' (neuroid: 205, time_bin: 300, presentation: 2700)>\n",
2008
+ "array([[[0., 0., ..., 0., 0.],\n",
2009
+ " [0., 0., ..., 0., 0.],\n",
2010
+ " ...,\n",
2011
+ " [0., 0., ..., 0., 0.],\n",
2012
+ " [0., 0., ..., 0., 0.]],\n",
2013
+ "\n",
2014
+ " [[0., 0., ..., 0., 0.],\n",
2015
+ " [0., 0., ..., 0., 1.],\n",
2016
+ " ...,\n",
2017
+ " [0., 0., ..., 0., 0.],\n",
2018
+ " [0., 0., ..., 0., 0.]],\n",
2019
+ "\n",
2020
+ " ...,\n",
2021
+ "\n",
2022
+ " [[0., 0., ..., 0., 0.],\n",
2023
+ " [0., 0., ..., 0., 0.],\n",
2024
+ " ...,\n",
2025
+ " [0., 0., ..., 0., 0.],\n",
2026
+ " [0., 0., ..., 0., 0.]],\n",
2027
+ "\n",
2028
+ " [[0., 0., ..., 0., 0.],\n",
2029
+ " [0., 0., ..., 0., 0.],\n",
2030
+ " ...,\n",
2031
+ " [0., 0., ..., 1., 0.],\n",
2032
+ " [0., 0., ..., 0., 0.]]])\n",
2033
+ "Coordinates:\n",
2034
+ " * neuroid (neuroid) MultiIndex\n",
2035
+ " - neuroid_id (neuroid) int64 1 2 3 4 5 6 ... 25 26 27 28 29 30\n",
2036
+ " - region (neuroid) object 'V1' 'V1' 'V1' ... 'V1' 'V1'\n",
2037
+ " * time_bin (time_bin) MultiIndex\n",
2038
+ " - time_bin_start (time_bin) int64 0 1 2 3 4 5 ... 25 26 27 28 29\n",
2039
+ " - time_bin_end (time_bin) int64 1 2 3 4 5 6 ... 26 27 28 29 30\n",
2040
+ " * presentation (presentation) MultiIndex\n",
2041
+ " - repetition (presentation) int64 0 1 2 3 4 5 ... 4 5 6 7 8 9\n",
2042
+ " - image_id (presentation) object 'dfa618e0503a4251582450e88daf0c21a5abed95' ... '4ba2605a32ef862bf6f1fc4032aa25a4381f0114'\n",
2043
+ " - id (presentation) int64 133388 133388 ... 133453\n",
2044
+ " - image_file_sha1 (presentation) object 'dfa618e0503a4251582450e88daf0c21a5abed95' ... '4ba2605a32ef862bf6f1fc4032aa25a4381f0114'\n",
2045
+ " - image_file_name (presentation) object 'noise-320x320-im327-smp2.png' ... 'noise-320x320-im327-smp6.png'\n",
2046
+ " - texture_type (presentation) object 'noise' 'noise' ... 'noise'\n",
2047
+ " - texture_family (presentation) int64 327 327 327 ... 327 327 327\n",
2048
+ " - sample (presentation) int64 2 2 2 2 2 2 ... 6 6 6 6 6 6\n",
2049
+ " - resolution (presentation) object '320x320' ... '320x320'\n",
2050
+ " - image_id_without_aperture (presentation) object '0e8609dc2778a848d7ed8b355d9331160ac2974d' ... '888526f132a230245a15ef3aa10ca05cac246574'\n",
2051
+ "Attributes:\n",
2052
+ " stimulus_set_name: movshon.FreemanZiemba2013.aperture-public\n",
2053
+ " stimulus_set: id image_id..."
2054
+ ]
2055
+ },
2056
+ "metadata": {}
2057
+ }
2058
+ ],
2059
+ "execution_count": 4,
2060
+ "metadata": {
2061
+ "collapsed": false,
2062
+ "jupyter": {
2063
+ "source_hidden": false,
2064
+ "outputs_hidden": false
2065
+ },
2066
+ "nteract": {
2067
+ "transient": {
2068
+ "deleting": false
2069
+ }
2070
+ },
2071
+ "execution": {
2072
+ "iopub.status.busy": "2020-02-27T19:32:45.348Z",
2073
+ "iopub.execute_input": "2020-02-27T19:32:45.355Z",
2074
+ "iopub.status.idle": "2020-02-27T19:32:50.327Z",
2075
+ "shell.execute_reply": "2020-02-27T19:32:50.343Z"
2076
+ }
2077
+ }
2078
+ },
2079
+ {
2080
+ "cell_type": "code",
2081
+ "source": [
2082
+ "assy_pri_new = get_assembly(\"movshon.FreemanZiemba2013.private\")\n",
2083
+ "assy_pri_new"
2084
+ ],
2085
+ "outputs": [
2086
+ {
2087
+ "output_type": "stream",
2088
+ "name": "stderr",
2089
+ "text": [
2090
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataarray.py:217: FutureWarning: The Panel class is removed from pandas. Accessing it from the top-level namespace will also be removed in the next version\n",
2091
+ " elif isinstance(data, pd.Panel):\n",
2092
+ "/Users/jjpr/dev/brainio_base/brainio_base/assemblies.py:213: FutureWarning: The inplace argument has been deprecated and will be removed in xarray 0.12.0.\n",
2093
+ " xr_data.set_index(append=True, inplace=True, **coords_d)\n",
2094
+ "/Users/jjpr/anaconda/envs/brainio_contrib/lib/python3.7/site-packages/xarray/core/dataset.py:167: FutureWarning: the 'labels' keyword is deprecated, use 'codes' instead\n",
2095
+ " idx = pd.MultiIndex(labels=labels, levels=levels, names=names)\n"
2096
+ ]
2097
+ },
2098
+ {
2099
+ "output_type": "execute_result",
2100
+ "execution_count": 5,
2101
+ "data": {
2102
+ "text/plain": [
2103
+ "<xarray.NeuronRecordingAssembly 'movshon.FreemanZiemba2013.private' (neuroid: 205, time_bin: 300, presentation: 6300)>\n",
2104
+ "array([[[0., 0., ..., 0., 0.],\n",
2105
+ " [0., 0., ..., 0., 0.],\n",
2106
+ " ...,\n",
2107
+ " [0., 0., ..., 0., 0.],\n",
2108
+ " [0., 0., ..., 0., 0.]],\n",
2109
+ "\n",
2110
+ " [[0., 0., ..., 0., 0.],\n",
2111
+ " [0., 0., ..., 0., 0.],\n",
2112
+ " ...,\n",
2113
+ " [0., 0., ..., 0., 0.],\n",
2114
+ " [0., 0., ..., 0., 0.]],\n",
2115
+ "\n",
2116
+ " ...,\n",
2117
+ "\n",
2118
+ " [[0., 0., ..., 0., 0.],\n",
2119
+ " [0., 0., ..., 0., 0.],\n",
2120
+ " ...,\n",
2121
+ " [0., 0., ..., 0., 0.],\n",
2122
+ " [0., 0., ..., 0., 0.]],\n",
2123
+ "\n",
2124
+ " [[0., 0., ..., 0., 0.],\n",
2125
+ " [0., 0., ..., 0., 0.],\n",
2126
+ " ...,\n",
2127
+ " [0., 0., ..., 0., 0.],\n",
2128
+ " [0., 0., ..., 0., 0.]]])\n",
2129
+ "Coordinates:\n",
2130
+ " * neuroid (neuroid) MultiIndex\n",
2131
+ " - neuroid_id (neuroid) int64 1 2 3 4 5 6 ... 25 26 27 28 29 30\n",
2132
+ " - region (neuroid) object 'V1' 'V1' 'V1' ... 'V1' 'V1'\n",
2133
+ " * time_bin (time_bin) MultiIndex\n",
2134
+ " - time_bin_start (time_bin) int64 0 1 2 3 4 5 ... 25 26 27 28 29\n",
2135
+ " - time_bin_end (time_bin) int64 1 2 3 4 5 6 ... 26 27 28 29 30\n",
2136
+ " * presentation (presentation) MultiIndex\n",
2137
+ " - repetition (presentation) int64 0 1 2 3 4 5 ... 4 5 6 7 8 9\n",
2138
+ " - image_id (presentation) object 'bfd26c127f8ba028cc95cdc95f00c45c8884b365' ... '26747eaff286d025cfff822d7c8cdd6b16b7198b'\n",
2139
+ " - id (presentation) int64 133415 133415 ... 133378\n",
2140
+ " - image_file_sha1 (presentation) object 'bfd26c127f8ba028cc95cdc95f00c45c8884b365' ... '26747eaff286d025cfff822d7c8cdd6b16b7198b'\n",
2141
+ " - image_file_name (presentation) object 'noise-320x320-im327-smp1.png' ... 'noise-320x320-im327-smp3.png'\n",
2142
+ " - texture_type (presentation) object 'noise' 'noise' ... 'noise'\n",
2143
+ " - texture_family (presentation) int64 327 327 327 ... 327 327 327\n",
2144
+ " - sample (presentation) int64 1 1 1 1 1 1 ... 3 3 3 3 3 3\n",
2145
+ " - resolution (presentation) object '320x320' ... '320x320'\n",
2146
+ " - image_id_without_aperture (presentation) object 'd8a999326b756317c7d5cf513d0cfe8eb2d7c62b' ... 'e6910fe0b53858d294b2890024d355d92606a1f4'\n",
2147
+ "Attributes:\n",
2148
+ " stimulus_set_name: movshon.FreemanZiemba2013.aperture-private\n",
2149
+ " stimulus_set: id image_id..."
2150
+ ]
2151
+ },
2152
+ "metadata": {}
2153
+ }
2154
+ ],
2155
+ "execution_count": 5,
2156
+ "metadata": {
2157
+ "collapsed": false,
2158
+ "jupyter": {
2159
+ "source_hidden": false,
2160
+ "outputs_hidden": false
2161
+ },
2162
+ "nteract": {
2163
+ "transient": {
2164
+ "deleting": false
2165
+ }
2166
+ },
2167
+ "execution": {
2168
+ "iopub.status.busy": "2020-02-27T19:34:21.669Z",
2169
+ "iopub.execute_input": "2020-02-27T19:34:21.676Z",
2170
+ "iopub.status.idle": "2020-02-27T19:34:30.244Z",
2171
+ "shell.execute_reply": "2020-02-27T19:34:30.261Z"
2172
+ }
2173
+ }
2174
+ },
2175
+ {
2176
+ "cell_type": "code",
2177
+ "source": [],
2178
+ "outputs": [],
2179
+ "execution_count": null,
2180
+ "metadata": {
2181
+ "collapsed": false,
2182
+ "jupyter": {
2183
+ "source_hidden": false,
2184
+ "outputs_hidden": false
2185
+ },
2186
+ "nteract": {
2187
+ "transient": {
2188
+ "deleting": false
2189
+ }
2190
+ }
2191
+ }
2192
+ }
2193
+ ],
2194
+ "metadata": {
2195
+ "kernel_info": {
2196
+ "name": "brainio_contrib"
2197
+ },
2198
+ "language_info": {
2199
+ "name": "python",
2200
+ "version": "3.7.2",
2201
+ "mimetype": "text/x-python",
2202
+ "codemirror_mode": {
2203
+ "name": "ipython",
2204
+ "version": 3
2205
+ },
2206
+ "pygments_lexer": "ipython3",
2207
+ "nbconvert_exporter": "python",
2208
+ "file_extension": ".py"
2209
+ },
2210
+ "kernelspec": {
2211
+ "argv": [
2212
+ "/Users/jjpr/anaconda/envs/brainio_contrib/bin/python",
2213
+ "-m",
2214
+ "ipykernel_launcher",
2215
+ "-f",
2216
+ "{connection_file}"
2217
+ ],
2218
+ "display_name": "brainio_contrib",
2219
+ "language": "python",
2220
+ "name": "brainio_contrib"
2221
+ },
2222
+ "nteract": {
2223
+ "version": "0.22.0"
2224
+ }
2225
+ },
2226
+ "nbformat": 4,
2227
+ "nbformat_minor": 0
2228
+ }