sinabs 3.0.4.dev2__tar.gz → 3.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (329) hide show
  1. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/.github/workflows/ci-pipeline.yml +15 -8
  2. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/AUTHORS +1 -0
  3. sinabs-3.1.0/PKG-INFO +107 -0
  4. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/about/differences.md +2 -2
  5. sinabs-3.0.4.dev2/ChangeLog → sinabs-3.1.0/docs/about/release_notes.md +80 -83
  6. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/contact.md +1 -1
  7. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/getting_started/quickstart.ipynb +2 -8
  8. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/how_tos/activations.ipynb +3 -1
  9. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/how_tos/custom_hooks.ipynb +6 -2
  10. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/how_tos/synops_loss_ann.ipynb +27 -11
  11. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/how_tos/synops_loss_snn.ipynb +21 -13
  12. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/faqs/available_network_arch.md +140 -11
  13. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/faqs/device_management.md +1 -1
  14. sinabs-3.1.0/docs/speck/faqs/imgs/network-with-merge-and-split.png +0 -0
  15. sinabs-3.1.0/docs/speck/faqs/imgs/two-independent-networks.png +0 -0
  16. sinabs-3.1.0/docs/speck/faqs/imgs/two-networks-merging-output.png +0 -0
  17. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/faqs/tips_for_training.md +3 -3
  18. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/notebooks/leak_neuron.ipynb +10 -7
  19. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/notebooks/nmnist_quick_start.ipynb +6 -6
  20. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/notebooks/play_with_speck_dvs.ipynb +71 -57
  21. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/notebooks/power_monitoring.ipynb +51 -38
  22. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/notebooks/using_readout_layer.ipynb +74 -46
  23. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/notebooks/visualize_speck_dvs_input.ipynb +23 -21
  24. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/notebooks/visualize_spike_count.ipynb +62 -35
  25. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/overview.md +7 -7
  26. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/visualizer.md +2 -2
  27. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/tutorials/LeNet_5_EngChinese.ipynb +1 -1
  28. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/tutorials/bptt.ipynb +0 -1
  29. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/tutorials/nir_to_speck.ipynb +29 -21
  30. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/tutorials/nmnist.ipynb +30 -20
  31. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/tutorials/weight_transfer_mnist.ipynb +5 -1
  32. sinabs-3.1.0/examples/dynapcnn_network/snn_DVSLayer_given.ipynb +337 -0
  33. sinabs-3.1.0/examples/dynapcnn_network/snn_DVSLayer_given_followed_by_pool.ipynb +341 -0
  34. sinabs-3.1.0/examples/dynapcnn_network/snn_deployment.ipynb +882 -0
  35. sinabs-3.1.0/examples/dynapcnn_network/snn_need_create_DVSLayer.ipynb +333 -0
  36. sinabs-3.1.0/examples/dynapcnn_network/snn_no_DVSLayer.ipynb +324 -0
  37. sinabs-3.1.0/examples/dynapcnn_network/snn_with_batchnorm.ipynb +300 -0
  38. sinabs-3.1.0/examples/dynapcnn_network/snn_with_multiple_batchnorm.ipynb +380 -0
  39. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/pull_request_template.md +1 -1
  40. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/requirements.txt +1 -1
  41. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/activation/reset_mechanism.py +3 -3
  42. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/activation/surrogate_gradient_fn.py +4 -4
  43. sinabs-3.1.0/sinabs/backend/dynapcnn/__init__.py +6 -0
  44. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/chip_factory.py +33 -61
  45. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/chips/dynapcnn.py +182 -86
  46. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/chips/speck2e.py +6 -5
  47. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/chips/speck2f.py +6 -5
  48. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/config_builder.py +39 -59
  49. sinabs-3.1.0/sinabs/backend/dynapcnn/connectivity_specs.py +48 -0
  50. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/discretize.py +91 -156
  51. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/dvs_layer.py +59 -101
  52. sinabs-3.1.0/sinabs/backend/dynapcnn/dynapcnn_layer.py +270 -0
  53. sinabs-3.1.0/sinabs/backend/dynapcnn/dynapcnn_layer_utils.py +335 -0
  54. sinabs-3.1.0/sinabs/backend/dynapcnn/dynapcnn_network.py +784 -0
  55. sinabs-3.1.0/sinabs/backend/dynapcnn/dynapcnnnetwork_module.py +370 -0
  56. sinabs-3.1.0/sinabs/backend/dynapcnn/exceptions.py +138 -0
  57. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/io.py +51 -91
  58. sinabs-3.1.0/sinabs/backend/dynapcnn/mapping.py +231 -0
  59. sinabs-3.1.0/sinabs/backend/dynapcnn/nir_graph_extractor.py +877 -0
  60. sinabs-3.1.0/sinabs/backend/dynapcnn/sinabs_edges_handler.py +1024 -0
  61. sinabs-3.1.0/sinabs/backend/dynapcnn/utils.py +294 -0
  62. sinabs-3.1.0/sinabs/backend/dynapcnn/weight_rescaling_methods.py +53 -0
  63. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/conversion.py +2 -2
  64. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/from_torch.py +23 -1
  65. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/hooks.py +38 -41
  66. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/alif.py +16 -16
  67. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/crop2d.py +2 -2
  68. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/exp_leak.py +1 -1
  69. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/iaf.py +11 -11
  70. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/lif.py +9 -9
  71. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/neuromorphic_relu.py +9 -8
  72. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/pool2d.py +5 -5
  73. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/quantize.py +1 -1
  74. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/stateful_layer.py +10 -7
  75. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/to_spike.py +9 -9
  76. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/network.py +14 -12
  77. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/synopcounter.py +10 -7
  78. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/utils.py +155 -7
  79. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/validate_memory_speck.py +0 -5
  80. sinabs-3.1.0/sinabs.egg-info/PKG-INFO +107 -0
  81. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs.egg-info/SOURCES.txt +39 -3
  82. sinabs-3.1.0/sinabs.egg-info/pbr.json +1 -0
  83. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs.egg-info/requires.txt +1 -0
  84. sinabs-3.1.0/tests/__init__.py +0 -0
  85. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_copy.py +0 -2
  86. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/hw_utils.py +3 -1
  87. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_auto_mapping.py +1 -1
  88. sinabs-3.1.0/tests/test_dynapcnn/test_compatible_layer_build.py +41 -0
  89. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_config_making.py +18 -7
  90. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_device_movement.py +0 -1
  91. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_discover_device.py +0 -13
  92. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_doorbell.py +24 -5
  93. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_dvs_input.py +51 -27
  94. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_dvs_layer.py +0 -31
  95. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_individual_cases.py +12 -20
  96. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_large_net.py +22 -13
  97. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_learning.py +1 -0
  98. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_monitoring.py +14 -12
  99. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_neuron_leak.py +4 -2
  100. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_single_neuron_hardware.py +4 -3
  101. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_speck2e.py +2 -2
  102. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_visualizer.py +2 -4
  103. sinabs-3.1.0/tests/test_dynapcnnlayer/__init__.py +0 -0
  104. sinabs-3.1.0/tests/test_dynapcnnlayer/conftest_dynapcnnlayer.py +16 -0
  105. sinabs-3.1.0/tests/test_dynapcnnlayer/model_dummy_1.py +194 -0
  106. sinabs-3.1.0/tests/test_dynapcnnlayer/model_dummy_2.py +255 -0
  107. sinabs-3.1.0/tests/test_dynapcnnlayer/model_dummy_3.py +321 -0
  108. sinabs-3.1.0/tests/test_dynapcnnlayer/model_dummy_4.py +228 -0
  109. sinabs-3.1.0/tests/test_dynapcnnlayer/test_dynapcnnlayer.py +69 -0
  110. sinabs-3.1.0/tests/test_dynapcnnnetwork/__init__.py +0 -0
  111. sinabs-3.1.0/tests/test_dynapcnnnetwork/conftest_dynapcnnnetwork.py +32 -0
  112. sinabs-3.1.0/tests/test_dynapcnnnetwork/model_dummy_1.py +118 -0
  113. sinabs-3.1.0/tests/test_dynapcnnnetwork/model_dummy_2.py +161 -0
  114. sinabs-3.1.0/tests/test_dynapcnnnetwork/model_dummy_3.py +188 -0
  115. sinabs-3.1.0/tests/test_dynapcnnnetwork/model_dummy_4.py +186 -0
  116. sinabs-3.1.0/tests/test_dynapcnnnetwork/model_dummy_seq.py +73 -0
  117. sinabs-3.1.0/tests/test_dynapcnnnetwork/test_dynapcnnnetwork.py +60 -0
  118. sinabs-3.1.0/tests/test_dynapcnnnetwork/test_failcases.py +94 -0
  119. sinabs-3.1.0/tests/test_graph_extractor/conftest_graph_extractor.py +19 -0
  120. sinabs-3.1.0/tests/test_graph_extractor/model_dummy_1.py +151 -0
  121. sinabs-3.1.0/tests/test_graph_extractor/model_dummy_2.py +200 -0
  122. sinabs-3.1.0/tests/test_graph_extractor/model_dummy_3.py +235 -0
  123. sinabs-3.1.0/tests/test_graph_extractor/model_dummy_4.py +192 -0
  124. sinabs-3.1.0/tests/test_graph_extractor/test_graph_extractor.py +62 -0
  125. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_hooks.py +6 -7
  126. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_alif.py +1 -1
  127. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_iaf.py +2 -2
  128. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_lif.py +3 -3
  129. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_maxpooling.py +0 -1
  130. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_synops_counter.py +1 -1
  131. sinabs-3.0.4.dev2/PKG-INFO +0 -89
  132. sinabs-3.0.4.dev2/Pipfile +0 -17
  133. sinabs-3.0.4.dev2/Pipfile.lock +0 -411
  134. sinabs-3.0.4.dev2/docs/about/release_notes.md +0 -25
  135. sinabs-3.0.4.dev2/sinabs/backend/dynapcnn/__init__.py +0 -5
  136. sinabs-3.0.4.dev2/sinabs/backend/dynapcnn/dynapcnn_layer.py +0 -204
  137. sinabs-3.0.4.dev2/sinabs/backend/dynapcnn/dynapcnn_network.py +0 -508
  138. sinabs-3.0.4.dev2/sinabs/backend/dynapcnn/exceptions.py +0 -19
  139. sinabs-3.0.4.dev2/sinabs/backend/dynapcnn/mapping.py +0 -195
  140. sinabs-3.0.4.dev2/sinabs/backend/dynapcnn/utils.py +0 -539
  141. sinabs-3.0.4.dev2/sinabs.egg-info/PKG-INFO +0 -89
  142. sinabs-3.0.4.dev2/sinabs.egg-info/pbr.json +0 -1
  143. sinabs-3.0.4.dev2/tests/test_dynapcnn/test_compatible_layer_build.py +0 -197
  144. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/.coveragerc +0 -0
  145. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/.pre-commit-config.yaml +0 -0
  146. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/.readthedocs.yaml +0 -0
  147. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/CITATION.cff +0 -0
  148. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/LICENSE +0 -0
  149. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/README.md +0 -0
  150. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/codecov.yml +0 -0
  151. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/Makefile +0 -0
  152. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/Overview/dataflow_layers.png +0 -0
  153. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/Overview/event_preprocessing_pipeline.png +0 -0
  154. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/Overview/memory_constraints.png +0 -0
  155. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/Overview/sinabs-dynapcnn-role.png +0 -0
  156. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/Overview/speck_dynapcnn.png +0 -0
  157. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/Overview/speck_top_level.png +0 -0
  158. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/devkits_images/dynapcnn_devkit.png +0 -0
  159. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/devkits_images/speck_devkit.png +0 -0
  160. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/devkits_images/speck_module.png +0 -0
  161. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/devkits_images/speck_module_devkit.png +0 -0
  162. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/nmnist_quick_start/dvs_input_flow.png +0 -0
  163. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/nmnist_quick_start/dynapcnn_visualizer.png +0 -0
  164. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/nmnist_quick_start/spike_input_flow.png +0 -0
  165. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/power_monitoring/dynamic_power_samna_graph.png +0 -0
  166. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/power_monitoring/idle_power_samna_graph.png +0 -0
  167. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/power_monitoring/power_plot.png +0 -0
  168. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/sinabs-logo-lowercase-whitebg.png +0 -0
  169. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/sinabs-logo-lowercase.png +0 -0
  170. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/tips_for_training/exceeding_bandwidth.png +0 -0
  171. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/using_readout_layer/handcraft_weights.png +0 -0
  172. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/using_readout_layer/neuron_id_mismatch.png +0 -0
  173. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/using_readout_layer/readout_layer.png +0 -0
  174. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/using_readout_layer/samna_graph.png +0 -0
  175. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/visualize_speck_dvs/samna_graph.png +0 -0
  176. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/visualize_spike_count/samna_graph.png +0 -0
  177. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_static/visualize_spike_count/spike_count.png +0 -0
  178. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_templates/class_activation.rst +0 -0
  179. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/_templates/class_layer.rst +0 -0
  180. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/about/about.rst +0 -0
  181. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/about/contributing.md +0 -0
  182. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/about/info.md +0 -0
  183. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/api/activation.rst +0 -0
  184. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/api/api.rst +0 -0
  185. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/api/from_torch.rst +0 -0
  186. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/api/hooks.rst +0 -0
  187. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/api/layers.rst +0 -0
  188. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/api/network.rst +0 -0
  189. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/api/nir.rst +0 -0
  190. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/api/synopcounter.rst +0 -0
  191. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/api/utils.rst +0 -0
  192. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/conf.py +0 -0
  193. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/README.rst +0 -0
  194. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/layers/README.rst +0 -0
  195. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/layers/plot_alif.py +0 -0
  196. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/layers/plot_exp_leaky.py +0 -0
  197. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/layers/plot_iaf.py +0 -0
  198. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/layers/plot_lif.py +0 -0
  199. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/layers/utils.py +0 -0
  200. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/spike_fns/README.rst +0 -0
  201. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/spike_fns/plot_maxspike.py +0 -0
  202. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/spike_fns/plot_multispike.py +0 -0
  203. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/spike_fns/plot_singlespike.py +0 -0
  204. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/surrogate_grad_fns/README.rst +0 -0
  205. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/surrogate_grad_fns/plot_gaussian.py +0 -0
  206. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/surrogate_grad_fns/plot_heaviside.py +0 -0
  207. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/surrogate_grad_fns/plot_multigaussian.py +0 -0
  208. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/surrogate_grad_fns/plot_periodicexponential.py +0 -0
  209. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/gallery/surrogate_grad_fns/plot_singleexponential.py +0 -0
  210. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/getting_started/fundamentals.rst +0 -0
  211. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/getting_started/getting_started.rst +0 -0
  212. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/getting_started/iaf_neuron_model.ipynb +0 -0
  213. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/getting_started/install.rst +0 -0
  214. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/getting_started/python_pyenv_pipenv.rst +0 -0
  215. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/how_tos/how_tos.rst +0 -0
  216. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/index.md +0 -0
  217. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/make.bat +0 -0
  218. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/plugins/plugins.rst +0 -0
  219. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/requirements.txt +0 -0
  220. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/advanced_concepts.rst +0 -0
  221. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/chip_factory.rst +0 -0
  222. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/config_builder.rst +0 -0
  223. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/crop2d.rst +0 -0
  224. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/discretize.rst +0 -0
  225. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/dvs_layer.rst +0 -0
  226. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/dynapcnn.rst +0 -0
  227. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/dynapcnn_layer.rst +0 -0
  228. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/dynapcnn_network.rst +0 -0
  229. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/dynapcnn_visualizer.rst +0 -0
  230. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/exceptions.rst +0 -0
  231. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/flipdims.rst +0 -0
  232. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/io.rst +0 -0
  233. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/mapping.rst +0 -0
  234. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/specksim.rst +0 -0
  235. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/api/dynapcnn/utils.rst +0 -0
  236. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/dangers.md +0 -0
  237. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/faqs/add_new_device.md +0 -0
  238. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/faqs/available_algorithmic_operation.md +0 -0
  239. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/faqs/chip_errata.md +0 -0
  240. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/faqs/index.rst +0 -0
  241. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/faqs/output_monitoring.md +0 -0
  242. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/faqs/save_hardware_config_as_binary.md +0 -0
  243. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/index.rst +0 -0
  244. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/specksim.md +0 -0
  245. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/the_basics.md +0 -0
  246. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/speck/tutorials.rst +0 -0
  247. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/tutorials/scnn_mnist.nir +0 -0
  248. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/tutorials/tutorials.rst +0 -0
  249. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/docs/tutorials/weight_scaling.md +0 -0
  250. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/mnist/dynapcnn_network.py +0 -0
  251. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/mnist/mnist_params.pt +0 -0
  252. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/mnist/specksim_network.py +0 -0
  253. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/dvs_gesture_params.pt +0 -0
  254. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/gesture_viz.py +0 -0
  255. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/01_armroll.png +0 -0
  256. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/02_handclap.png +0 -0
  257. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/03_lefthandclockwise.png +0 -0
  258. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/04_lefthandcounterclockwise.png +0 -0
  259. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/05_lefthandwave.png +0 -0
  260. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/06_righthandwave.png +0 -0
  261. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/07_righthandclockwise.png +0 -0
  262. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/08_righthandcounterclockwise.png +0 -0
  263. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/09_airdrums.png +0 -0
  264. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/10_airguitar.png +0 -0
  265. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/examples/visualizer/icons/11_other.png +0 -0
  266. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/jupyterlab-requirements.txt +0 -0
  267. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/setup.cfg +0 -0
  268. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/setup.py +0 -0
  269. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/__init__.py +0 -0
  270. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/activation/__init__.py +0 -0
  271. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/activation/quantize.py +0 -0
  272. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/activation/spike_generation.py +0 -0
  273. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/__init__.py +0 -0
  274. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/chips/__init__.py +0 -0
  275. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/crop2d.py +0 -0
  276. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/dynapcnn_visualizer.py +0 -0
  277. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/flipdims.py +0 -0
  278. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/backend/dynapcnn/specksim.py +0 -0
  279. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/cnnutils.py +0 -0
  280. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/__init__.py +0 -0
  281. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/channel_shift.py +0 -0
  282. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/functional/__init__.py +0 -0
  283. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/functional/alif.py +0 -0
  284. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/functional/lif.py +0 -0
  285. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/merge.py +0 -0
  286. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/layers/reshape.py +0 -0
  287. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs/nir.py +0 -0
  288. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs.egg-info/dependency_links.txt +0 -0
  289. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs.egg-info/not-zip-safe +0 -0
  290. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/sinabs.egg-info/top_level.txt +0 -0
  291. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/inputs_and_results/hooks/conv_input.pth +0 -0
  292. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/inputs_and_results/hooks/conv_layer_synops.pth +0 -0
  293. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/inputs_and_results/hooks/firing_rates.pth +0 -0
  294. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/inputs_and_results/hooks/firing_rates_per_neuron.pth +0 -0
  295. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/inputs_and_results/hooks/input_diffs.pth +0 -0
  296. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/inputs_and_results/hooks/model_synops.pth +0 -0
  297. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/mnist_params.pt +0 -0
  298. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/models/README.txt +0 -0
  299. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/models/synop_hook_model.pth +0 -0
  300. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/requirements.txt +0 -0
  301. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_activations.py +0 -0
  302. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_batch_mismatch.py +0 -0
  303. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_batch_size_update.py +0 -0
  304. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_conversion.py +0 -0
  305. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/custom_jit_filters.py +0 -0
  306. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_device_name_mapping.py +0 -0
  307. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_discretized.py +0 -0
  308. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_dynapcnn/test_event_conversion.py +0 -0
  309. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_from_model.py +0 -0
  310. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_channelshift.py +0 -0
  311. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_crop2d.py +0 -0
  312. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_exp_leak.py +0 -0
  313. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_img2spk.py +0 -0
  314. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_merge.py +0 -0
  315. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_neuromorphic_relu.py +0 -0
  316. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_reshaping.py +0 -0
  317. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_sig2spk.py +0 -0
  318. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_layers/test_stateful_layer.py +0 -0
  319. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_network_class.py +0 -0
  320. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_nir.py +0 -0
  321. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_normalize_weights.py +0 -0
  322. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_onnx.py +0 -0
  323. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_quantize.py +0 -0
  324. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_specksim/test_specksim_bindings.py +0 -0
  325. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_specksim/test_specksim_conversion.py +0 -0
  326. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_specksim/test_specksim_network.py +0 -0
  327. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_surrogate_gradients.py +0 -0
  328. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/test_utils.py +0 -0
  329. {sinabs-3.0.4.dev2 → sinabs-3.1.0}/tests/weights/README.txt +0 -0
@@ -16,8 +16,15 @@ jobs:
16
16
  fail-fast: false
17
17
  matrix:
18
18
  os: [ubuntu-latest]
19
- python-version: ["3.10"]
20
- torch-version: ["2.0.0",]
19
+ python-version: ["3.10", "3.11", "3.12"]
20
+ torch-version: ["2.0.0", "2.8.0"]
21
+ exclude:
22
+ - python-version: "3.10"
23
+ torch-version: "2.8.0"
24
+ - python-version: "3.11"
25
+ torch-version: "2.8.0"
26
+ - python-version: "3.12"
27
+ torch-version: "2.0.0"
21
28
  steps:
22
29
  - uses: actions/checkout@v4
23
30
  - name: Set up Python ${{ matrix.python-version }}
@@ -41,10 +48,10 @@ jobs:
41
48
  runs-on: ubuntu-latest
42
49
  steps:
43
50
  - uses: actions/checkout@v4
44
- - name: Setup Python 3.8
51
+ - name: Setup Python 3.12
45
52
  uses: actions/setup-python@v5
46
53
  with:
47
- python-version: 3.8
54
+ python-version: 3.12
48
55
  - name: Generate coverage report
49
56
  run: |
50
57
  sudo apt-get update && sudo apt-get -y install libglu1-mesa
@@ -63,10 +70,10 @@ jobs:
63
70
  - uses: actions/checkout@v4
64
71
  with:
65
72
  fetch-depth: 0
66
- - name: Setup Python 3.10
73
+ - name: Setup Python 3.12
67
74
  uses: actions/setup-python@v5
68
75
  with:
69
- python-version: "3.10.18"
76
+ python-version: "3.12"
70
77
  - name: Install dependencies
71
78
  run: |
72
79
  sudo apt-get update && sudo apt-get -y install libglu1-mesa
@@ -85,10 +92,10 @@ jobs:
85
92
  - uses: actions/checkout@v4
86
93
  with:
87
94
  fetch-depth: 0
88
- - name: Set up Python 3.8
95
+ - name: Set up Python 3.12
89
96
  uses: actions/setup-python@v5
90
97
  with:
91
- python-version: 3.8
98
+ python-version: 3.12
92
99
  - name: Install build packages
93
100
  run: pip install wheel pbr setuptools
94
101
  - name: Build a binary wheel and a source tarball
@@ -22,6 +22,7 @@ Vanessa Leite <vanessa.leite@synsense.ai>
22
22
  Vanessa Leite <vanessa@ini.uzh.ch>
23
23
  Vanessa Leite <vanessinhaleite.cp.ufma@gmail.com>
24
24
  Vanessa Leite <vrcleite@gmail.com>
25
+ Willian Girao <williansoaresgirao@gmail.com>
25
26
  Willian-Girao <williansoaresgirao@gmail.com>
26
27
  Yalun Hu <yalun.hu@synsense.ai>
27
28
  Yalun_Hu <yalun.hu@synsense.ai>
sinabs-3.1.0/PKG-INFO ADDED
@@ -0,0 +1,107 @@
1
+ Metadata-Version: 2.4
2
+ Name: sinabs
3
+ Version: 3.1.0
4
+ Summary: SynSense Spiking Neural Network simulator for deep neural networks (DNNs).
5
+ Author: SynSense (formerly AiCTX)
6
+ Author-email: support@synsense.ai
7
+ License: Apache 2.0
8
+ Project-URL: Source code, https://github.com/synsense/sinabs
9
+ Project-URL: Documentation, https://readthedocs.org/projects/sinabs/
10
+ Keywords: spiking neural networks,machine learning,SNN,DYNAPCNN,Speck
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Environment :: Console
13
+ Classifier: Intended Audience :: Science/Research
14
+ Classifier: Intended Audience :: Developers
15
+ Classifier: License :: OSI Approved :: Apache Software License
16
+ Classifier: Programming Language :: Python :: 3
17
+ Description-Content-Type: text/markdown
18
+ License-File: LICENSE
19
+ License-File: AUTHORS
20
+ Requires-Dist: pbr
21
+ Requires-Dist: numpy
22
+ Requires-Dist: torch>=1.8
23
+ Requires-Dist: nir<=1.0.4
24
+ Requires-Dist: nirtorch
25
+ Requires-Dist: samna>=0.33
26
+ Requires-Dist: matplotlib
27
+ Dynamic: author
28
+ Dynamic: author-email
29
+ Dynamic: classifier
30
+ Dynamic: description
31
+ Dynamic: description-content-type
32
+ Dynamic: keywords
33
+ Dynamic: license
34
+ Dynamic: license-file
35
+ Dynamic: project-url
36
+ Dynamic: requires-dist
37
+ Dynamic: summary
38
+
39
+ [![PyPI - Package](https://img.shields.io/pypi/v/sinabs.svg)](https://pypi.org/project/sinabs/)
40
+ [![Documentation Status](https://readthedocs.org/projects/sinabs/badge/?version=main)](https://sinabs.readthedocs.io)
41
+ [![codecov](https://codecov.io/gh/synsense/sinabs/branch/develop/graph/badge.svg?token=JPGAW4SH1W)](https://codecov.io/gh/synsense/sinabs)
42
+ [![PyPI - Downloads](https://img.shields.io/pypi/dd/sinabs)](https://pepy.tech/project/sinabs)
43
+ [![Discord](https://img.shields.io/discord/852094154188259338)](https://discord.gg/V6FHBZURkg)
44
+ ![sinabs](docs/_static/sinabs-logo-lowercase-whitebg.png)
45
+
46
+ Sinabs (Sinabs Is Not A Brain Simulator) is a python library for the development and implementation of Spiking Convolutional Neural Networks (SCNNs).
47
+ The library implements several layers that are `spiking` equivalents of CNN layers.
48
+ In addition it provides support to import CNN models implemented in torch conveniently to test their `spiking` equivalent implementation.
49
+ This project is managed by SynSense (former aiCTX AG).
50
+
51
+ The `sinabs-dynapcnn` was incorporated to this project, and it enables porting sinabs models to chips and dev-kits with DYNAP-CNN technology.
52
+
53
+
54
+ Installation
55
+ ------------
56
+ For the stable release on the main branch:
57
+ ```
58
+ pip install sinabs
59
+ ```
60
+ or (thanks to [@Tobias-Fischer](https://github.com/Tobias-Fischer))
61
+ ```
62
+ conda install -c conda-forge sinabs
63
+ ```
64
+
65
+ For the latest pre-release on the develop branch that passed the tests:
66
+ ```
67
+ pip install sinabs --pre
68
+ ```
69
+ The package has been tested on the following configurations
70
+ [![](http://github-actions.40ants.com/synsense/sinabs/matrix.svg?only=ci.multitest)](https://github.com/synsense/sinabs)
71
+
72
+
73
+ Documentation and Examples
74
+ --------------------------
75
+ [https://sinabs.readthedocs.io/](https://sinabs.readthedocs.io/)
76
+
77
+ Questions? Feedback?
78
+ --------------------
79
+ Please join us on the [#sinabs Discord channel](https://discord.gg/V6FHBZURkg)!
80
+
81
+ - If you would like to report bugs or push any changes, you can do this on our [github repository](https://github.com/synsense/sinabs/issues).
82
+
83
+ License
84
+ -------
85
+ Sinabs is published under Apache v2.0. See the LICENSE file for details.
86
+
87
+
88
+ Contributing to Sinabs
89
+ ------------------------
90
+ Checkout the [contributing](https://sinabs.readthedocs.io/en/develop/about/contributing.html) page for more info.
91
+
92
+
93
+ Citation
94
+ --------
95
+
96
+ In case you find this software library useful for your work please consider citing it as follows:
97
+
98
+ ```
99
+ @software{sinabs,
100
+ author = {Sheik, Sadique and Lenz, Gregor and Bauer, Felix and Kuepelioglu, Nogay },
101
+ doi = {10.5281/zenodo.8385545},
102
+ license = {Apache-2.0},
103
+ title = {{SINABS: A simple Pytorch based SNN library specialised for Speck}},
104
+ url = {https://github.com/synsense/sinabs}
105
+ }
106
+ ```
107
+
@@ -1,8 +1,8 @@
1
1
  How is Sinabs different?
2
2
  ========================
3
3
 
4
- There are many SNN simulators out there. What does Sinabs do differently? Sinabs is meant to extend PyTorch by adding stateful and spiking layers, which can then take full advantage of the optimised, gradient-based training mechanisms. You might find that the design of our sequential models differs slightly from that of recurrent layers in PyTorch, as we do not pass the state as an input or receive it as output. We do this so that we can be compatible with the nn.Sequential architecture, without having to manually define the flow of tensors between layers.
4
+ There are many SNN simulators out there. What does Sinabs do differently? Sinabs is meant to extend PyTorch by adding stateful and spiking layers, which can then take full advantage of the optimised, gradient-based training mechanisms. You might find that the design of our sequential models differs slightly from that of recurrent layers in PyTorch, as we do not pass the state as an input or receive it as output. We do this so that we can be compatible with the nn.Sequential architecture, without having to manually define the flow of tensors between layers.
5
5
 
6
6
  What is the difference between Rockpool and Sinabs?
7
7
  ---------------------------------------------------
8
- Rockpool provides multiple computational backends such as Jax, Nest or PyTorch and wraps its own api around it! That allows for powerful abstractions and many additional features such as graph tracing, support for continuous time systems and neuromorphic hardware. Sinabs on the other hand focuses on simplicity: Built exclusively on PyTorch, it's meant to be a thin layer that adds support for spiking layers that are not part of PyTorch. Traditionally, Sinabs added support for SynSense's convolutional neuromorphic hardware (Dynap-CNN), while Rockpool focuses on SynSense's hardware for lower-dimensional signals (Xylo). You can read about both hardware architectures [here](https://www.synsense-neuromorphic.com/technology). That means that Sinabs comes with built-in weight transfer functionality which converts a pre-trained ANN to an SNN, because vision models often have strong spatial dependencies. Rockpool on the other hand adds support for analog audio frontends and exact hardware simulation in software.
8
+ Rockpool provides multiple computational backends such as Jax, Nest or PyTorch and wraps its own api around it! That allows for powerful abstractions and many additional features such as graph tracing, support for continuous time systems and neuromorphic hardware. Sinabs on the other hand focuses on simplicity: Built exclusively on PyTorch, it's meant to be a thin layer that adds support for spiking layers that are not part of PyTorch. Traditionally, Sinabs added support for SynSense's convolutional neuromorphic hardware (Dynap-CNN), while Rockpool focuses on SynSense's hardware for lower-dimensional signals (Xylo). You can read about both hardware architectures [here](https://www.synsense.ai/products/speck-2/). That means that Sinabs comes with built-in weight transfer functionality which converts a pre-trained ANN to an SNN, because vision models often have strong spatial dependencies. Rockpool on the other hand adds support for analog audio frontends and exact hardware simulation in software.
@@ -1,39 +1,51 @@
1
- CHANGES
2
- =======
1
+ # Release notes
3
2
 
4
- v3.0.3
3
+ ## v3.1.0 (31/10/2025)
4
+
5
+ * Fix tests crashing in case an unexpected board was found.
6
+ * Fix tests that were not running for speck2fdevkit.
7
+ * Fix error when initializing sinabs without matplotlib: matplotlib is not a sinabs dependency.
8
+ * Add mapping of non-sequential networks
9
+ * Now, layers in Sinabs models can receive inputs from and send outputs to multiple layers.
10
+ * Deprecate `chip_layers_ordering` from DynapCNNNetwork. Use `layer2core_map` instead.
11
+ * Deprecate `DynapcnnCompatibleNetwork`. Use `DynapcnnNetwork` instead.
12
+ * Deprecate `merge_conv_bn` from `sinabs.utils`. Use `merge_bn` instead.
13
+
14
+ ## v3.0.4 (08/09/2025)
15
+
16
+ * Update sinabs code to be consistent with Python 3.12 and Numpy > 2.0.
17
+ * Merge Release Notes and Changelog.
18
+ * Remove Changelog file as it was not being used.
19
+ * Remove Pipfile and Pipfile.lock files as they were not being used anymore.
20
+ * Specific/minimal library versions can be found in requirements.txt
21
+
22
+ ## v3.0.3 (22/07/2025)
5
23
 
6
24
  * Add function in utils to help identify issues with memory constraints when mapping a network on Speck.
7
25
  (https://sinabs.readthedocs.io/v3.0.3/api/utils.html#sinabs.utils.validate_memory_mapping_speck)
8
26
 
9
- v3.0.2
10
- ------
27
+ ## v3.0.2 (10/06/2025)
11
28
 
12
29
  * Update contact email. Now, any support requests and information about contributors license agreement needs to be sent to `support@synsense.ai`.
13
30
 
14
- v3.0.1
15
- ------
31
+ ## v3.0.1 (06/06/2025)
16
32
 
17
33
  * Update the release of the project when `main` branch is updated instead of `develop`
18
34
 
19
- v3.0.0
20
- ------
35
+ ## v3.0.0 (06/06/2025)
21
36
 
22
37
  * Remove support for older boards following update on Samna 0.46.0
23
38
 
24
- v2.0.3
25
- ------
39
+ ## v2.0.3 (18/03/2025)
26
40
 
27
41
  * Update Sinabs license to Apache 2.0
28
42
 
29
- v2.0.2
30
- ------
43
+ ## v2.0.2 (23/01/2025)
31
44
 
32
45
  * Spike count plot in 'DynapcnnVisualizer' is optional
33
46
  * `DynapcnnVisualizer` allows custom JIT filters to make readout predictions
34
47
 
35
- v2.0.0
36
- ------
48
+ ## v2.0.0 (14/03/2024)
37
49
 
38
50
  * Fix typos and broken links in the documentation
39
51
  * Move instructions for adding new device to FAQ
@@ -66,8 +78,7 @@ v2.0.0
66
78
  * Remove easy-install command
67
79
  * Add graphviz dependency
68
80
 
69
- v1.2.10
70
- -------
81
+ ## v1.2.10 (07/12/2023)
71
82
 
72
83
  * Update codecov.yml
73
84
  * Update ci-pipeline.yml
@@ -97,8 +108,7 @@ v1.2.10
97
108
  * set default value to 1 for stride in SumPool2d
98
109
  * updated nir to support Flatten and SumPool2d
99
110
 
100
- v1.2.9
101
- ------
111
+ ## v1.2.9 (31/08/2023)
102
112
 
103
113
  * Add unit test to ensure that subract value of MembraneSubtract can be a tensor
104
114
  * Correct test whether subtract value is None
@@ -165,8 +175,7 @@ v1.2.9
165
175
  * add nir IF support
166
176
  * updated Sinabs to latest NIR
167
177
 
168
- 1.2.8
169
- -----
178
+ ## 1.2.8 (10/07/2023)
170
179
 
171
180
  * add nir and nirtorch to requirements
172
181
  * implemented sequential for from\_nir
@@ -181,8 +190,7 @@ v1.2.9
181
190
  * wip conversion from nir to sinabs
182
191
  * Fix issue #99, add unit test
183
192
 
184
- v1.2.6
185
- ------
193
+ ## v1.2.6 (30/06/2023)
186
194
 
187
195
  * change nmnist tutorial title
188
196
  * added NMNIST tutorial
@@ -279,15 +287,13 @@ v1.2.6
279
287
  * try another way install graphviz
280
288
  * fix CI pipeline failure
281
289
 
282
- v1.2.5
283
- ------
290
+ ## v1.2.5 (24/04/2023)
284
291
 
285
292
  * make contact title bold like the others
286
293
  * add contact section to documentation
287
294
  * checked tests pass on mac
288
295
 
289
- v1.2.4
290
- ------
296
+ ## v1.2.4 (05/04/2023)
291
297
 
292
298
  * replace torch.inf with math.inf
293
299
  * ignore data and cache directories
@@ -298,14 +304,12 @@ v1.2.4
298
304
  * added update keyring to gitlab-ci
299
305
  * renamed test
300
306
 
301
- v1.2.3
302
- ------
307
+ ## v1.2.3
303
308
 
304
309
  * fix synops/s for Linear layers when no Squeeze layers are used in network
305
310
  * update pre-commit config
306
311
 
307
- v1.2.2
308
- ------
312
+ ## v1.2.2 (17/03/2023)
309
313
 
310
314
  * expand saved input in spiking layers for analyzer
311
315
  * derive spiking layer stats from single saved output tensor and also save input
@@ -317,16 +321,16 @@ v1.2.2
317
321
  * update ChangeLog
318
322
  * add speck2f module devkit support and remove speck2f characterization board support
319
323
 
320
- v1.2.1
321
- ------
324
+ ## v1.2.1 (23/02/2023)
322
325
 
323
326
  * distinguish between accumulated and mini batch stats for firing rates
324
327
  * distinguish between accumulated and mini batch statistics in SNNAnalyzer for synops
325
328
  * only compute connection\_map once in SynopsHook
326
329
  * detach synops accumulation inbetween mini batches for SNNAnalyzer
327
330
 
328
- v1.2.0
329
- ------
331
+ ## v1.2.0 (15/02/2023)
332
+
333
+ Included the SNNAnalyzer module to collect model statistics such as number of synops or neurons automatically.
330
334
 
331
335
  * update release notes
332
336
  * make sure deconvolve outputs same size as conv input in SNNAnalyzer
@@ -403,8 +407,7 @@ v1.2.0
403
407
  * update synops\_loss\_ann notebook
404
408
  * use consistent American spelling of analyzer
405
409
 
406
- v1.1.6
407
- ------
410
+ ## v1.1.6 (08/12/2022)
408
411
 
409
412
  * make min\_vem and spike\_thresholds parameters do that they're included in state\_dict
410
413
  * Detach recordings before plotting
@@ -448,8 +451,7 @@ v1.1.6
448
451
  * Do not sleep while resetting the states when the function is called. Stop the input graph, while the neuron values are written. Sleep for a bit and then start the graph again
449
452
  * Fix to the bug
450
453
 
451
- v1.1.5
452
- ------
454
+ ## v1.1.5 (25/11/2022)
453
455
 
454
456
  * Fix backward pass for MaxSpike
455
457
  * update ChangeLog after add support of speck2e
@@ -477,8 +479,7 @@ v1.1.5
477
479
  * Requirements for samna version updated
478
480
  * Change to get the same functionality in 'to()' method in samna Version: 0.21.2.0
479
481
 
480
- v1.1.4
481
- ------
482
+ ## v1.1.4 (18/11/2022)
482
483
 
483
484
  * autoformat all docstrings using docformatter
484
485
  * fix bug where added\_spike\_output is a ReLU
@@ -487,14 +488,12 @@ v1.1.4
487
488
  * Update ci-pipeline.yml
488
489
  * Update ci-pipeline.yml
489
490
 
490
- v1.1.3
491
- ------
491
+ ## v1.1.3 (17/11/2022)
492
492
 
493
493
  * Update ci-pipeline.yml
494
494
  * add generic reset\_states and zero\_grad utility functions
495
495
 
496
- v1.1.2
497
- ------
496
+ ## v1.1.2 (16/11/2022)
498
497
 
499
498
  * make sure that add\_spike\_output doesn't add spiking layer in original ANN
500
499
  * removed unused imports
@@ -505,8 +504,7 @@ v1.1.2
505
504
  * add documentation about how to release a new Sinabs version
506
505
  * Hotfix new arg\_dict
507
506
 
508
- v1.1.1
509
- ------
507
+ ## v1.1.1 (02/11/2022)
510
508
 
511
509
  * add arg\_dict property to StatefulLayer
512
510
  * Update ci-pipeline.yml
@@ -518,16 +516,14 @@ v1.1.1
518
516
  * add a replace\_module function that will replace specific layer according to a mapper\_fn. Reworked from\_torch
519
517
  * requirement bumped to 1.0.7
520
518
 
521
- v1.0.7
522
- ------
519
+ ## v1.0.7 (27/10/2022)
523
520
 
524
521
  * check for pytorch version under test and either call testing.assert\_close or assert\_allclose depending on the version
525
522
  * using torch.equal instead of np equal
526
523
  * tests fixed
527
524
  * fixing test
528
525
 
529
- v1.0.6
530
- ------
526
+ ## v1.0.6
531
527
 
532
528
  * added zero\_grad to network
533
529
  * wip enable learning
@@ -537,8 +533,7 @@ v1.0.6
537
533
  * added zero\_grad function
538
534
  * updates to latest sinabs requirements
539
535
 
540
- v1.0.5
541
- ------
536
+ ## v1.0.5 (21/10/2022)
542
537
 
543
538
  * add utils functions to documentation
544
539
  * update tutorial notebooks to use batch\_size or num\_timesteps for from\_model
@@ -547,8 +542,7 @@ v1.0.5
547
542
  * exclude samna log files
548
543
  * get rid of test warnings: Dropout instead of Dropout2d, no autograd fn instantiation, torch.arange
549
544
 
550
- v1.0.4
551
- ------
545
+ ## v1.0.4 (05/10/2022)
552
546
 
553
547
  * more docstring updates for layers plus cross-references in API documentation
554
548
  * add shape and attributes to layer docstrings
@@ -558,27 +552,24 @@ v1.0.4
558
552
  * add Repeat auxiliary layer
559
553
  * Update ci-pipeline.yml
560
554
 
561
- v1.0.3
562
- ------
555
+ ## v1.0.3 (28/09/2022)
563
556
 
564
557
  * exclude generated authors / changelog files
565
558
  * Removed pandas dependency. Adjusted tests accordingly
566
559
  * add speck2e config builder
567
560
 
568
- v1.0.2
569
- ------
561
+ ## v1.0.2 (14/09/2022)
570
562
 
571
563
  * additional minor docstring update
572
564
  * update some more docstrings for activation modules
573
565
  * doc strings updated
574
566
 
575
- v1.0.1
576
- ------
567
+ ## v1.0.1 (26/08/2022)
577
568
 
569
+ Mostly making v0.3.x stable, with a revamped documentation that includes a gallery and how-tos!
578
570
  * add release notes for v1.0.1
579
571
 
580
- v1.0.0
581
- ------
572
+ ## v1.0.0
582
573
 
583
574
  * add complete tutorial notebooks since their execution is deactivated on the docs server
584
575
  * exclude some notebooks from automatic documentation build because they take very long
@@ -590,8 +581,7 @@ v1.0.0
590
581
  * first version of Sinabs gallery instead of tutorial notebook that plots neuron models
591
582
  * added pre-commit hooks for black
592
583
 
593
- v0.3.5
594
- ------
584
+ ## v0.3.5 (18/08/2022)
595
585
 
596
586
  * Bump stable PyTorch version from 1.10 to 1.12 in CI pipeline
597
587
  * Fix bug in tutorial due to API update
@@ -600,8 +590,7 @@ v0.3.5
600
590
  * Fix handling of non-scalar tau\_syn
601
591
  * Prevent non-integer arguments to Squeeze class from breaking code
602
592
 
603
- v0.3.4
604
- ------
593
+ ## v0.3.4 (21/06/2022)
605
594
 
606
595
  * Fix critical bug in LIF synaptic forward dynamics
607
596
  * re-naming from BufferSinkNode to BasicSinkNode
@@ -645,8 +634,7 @@ v0.3.4
645
634
  * Remove tau\_mem as a parameter from IAF
646
635
  * from\_model takes same parameters as IAF
647
636
 
648
- v0.3.3
649
- ------
637
+ ## v0.3.3 (20/04/2022)
650
638
 
651
639
  * update SNN synops tutorial
652
640
  * make SNNSynopsCounter work as a loss function
@@ -658,21 +646,28 @@ v0.3.3
658
646
  * moved parse\_device\_string into utils. No need of samna to run other modules
659
647
  * added speck2b tiny support
660
648
 
661
- v0.3.2
662
- ------
649
+ ## v0.3.2 (314/03/2022)
663
650
 
664
651
  * Rename remaining threshold arguments to spike\_threshold for more consistency
665
652
  * Update .gitlab-ci.yml
666
653
  * Update jupyterlab-requirements.txt
667
654
 
668
- v0.3.1
669
- ------
655
+ ## v0.3.1 (23/03/2022)
670
656
 
671
657
  * Update ci-pipeline.yml
672
658
  * Update requirements.txt
673
659
 
674
- v0.3.0
675
- ------
660
+ ## v0.3.0 (22/03/2022)
661
+
662
+ This is a major overhaul which rewrites a large part of the package.
663
+
664
+ * Addition of leaky models such as Leaky Integrate and Fire (LIF), Exponential Leaky (ExpLeak) and Adaptive LIF (ALIF).
665
+ * Activation module: from sinabs.activation you'll now be able to pick and choose different spike generation, reset mechanism and surrogate gradient functions. You can pass them to the neuron model (LIF, IAF, ...) of your liking if you want to alter the default behavior.
666
+ * new documentation on readthedocs
667
+ * SpikingLayer has been renamed to IAF (Integrate and Fire).
668
+ * State variable names changed and new ones have been added: 'state' is now called 'v_mem' and 'i_syn' is added for neuron layers that use tau_syn.
669
+ * New neuron features: support for recurrent connections, recording internal states, normalising inputs by taus, initialisation with shape and more.
670
+ * We moved our repo to Github and changed the CI pipeline to Github actions.
676
671
 
677
672
  * add basic parameter printing in \_\_repr\_\_
678
673
  * Update ci-pipeline.yml
@@ -791,8 +786,13 @@ v0.3.0
791
786
  * tau\_mem for LIF neurons is now always calculated on CPU and transferred to original device, for better numerical comparison with SLAYER LIF layer
792
787
  * only zero gradients if state is initialised
793
788
 
794
- v0.2.1
795
- ------
789
+ ## v0.2.1 (22/02/2022)
790
+
791
+ * TorchLayer renamed to Layer
792
+ * Added a depricated class TorchLayer with warning
793
+ * Added some new layer types
794
+ * BPTT enabled spike activation layer
795
+ * SpikingTemporalConv1dLayer
796
796
 
797
797
  * added pip install
798
798
  * added requirements
@@ -1144,8 +1144,7 @@ v0.2.1
1144
1144
  * fixed docs, removed commented-out areas
1145
1145
  * removed dependency on samna for validation, and on SpikingLayerBPTT
1146
1146
 
1147
- v0.2.0
1148
- ------
1147
+ ## v0.2.0
1149
1148
 
1150
1149
  * Threshold gradient scaled by threshold (Bug fix)
1151
1150
  * updated docs, removed exclude\_negative\_spikes from fromtorch (no effect)
@@ -1324,8 +1323,7 @@ v0.2.0
1324
1323
  * Depricated TorchLayer added
1325
1324
  * merged master to bptt\_devel
1326
1325
 
1327
- v0.1.dev7
1328
- ---------
1326
+ ## v0.1.dev7 (09/04/2020)
1329
1327
 
1330
1328
  * install m2r with hotfix for new version of sphinx
1331
1329
  * changed membrane\_subtract and reset defaults, some cleanup
@@ -1504,8 +1502,7 @@ v0.1.dev7
1504
1502
  * merged
1505
1503
  * fixed typos in readme
1506
1504
 
1507
- v0.1.0
1508
- ------
1505
+ ## v0.1.0
1509
1506
 
1510
1507
  * fixed version number
1511
1508
  * removed contrib branch
@@ -1,5 +1,5 @@
1
1
  # **CONTACT US**
2
2
 
3
- Are you using Sinabs to train your networks? Do you have access to SynSense hardware and need help figuring something out? Found a bug? It's normally easiest to reach SynSense developers on the [SynSense Discord](https://discord.gg/V6FHBZURkg) server. There we have dedicated channels regarding Sinabs and our hardware.
3
+ Are you using Sinabs to train your networks? Do you have access to SynSense hardware and need help figuring something out? Found a bug? It's normally easiest to reach SynSense developers on the [SynSense Discord](https://discord.gg/9yY3uyJV) server. There we have dedicated channels regarding Sinabs and our hardware.
4
4
 
5
5
  If you don't have Discord, you can also start a discussion on [Github](https://github.com/synsense/sinabs/discussions)!
@@ -28,15 +28,9 @@
28
28
  "\n",
29
29
  "model = nn.Sequential(\n",
30
30
  " nn.Linear(16, 64),\n",
31
- " sl.LIF(\n",
32
- " tau_mem=10.0,\n",
33
- " surrogate_grad_fn=sinabs.activation.SingleExponential()\n",
34
- " ),\n",
31
+ " sl.LIF(tau_mem=10.0, surrogate_grad_fn=sinabs.activation.SingleExponential()),\n",
35
32
  " nn.Linear(64, 4),\n",
36
- " sl.LIF(\n",
37
- " tau_mem=10.0,\n",
38
- " surrogate_grad_fn=sinabs.activation.SingleExponential()\n",
39
- " ),\n",
33
+ " sl.LIF(tau_mem=10.0, surrogate_grad_fn=sinabs.activation.SingleExponential()),\n",
40
34
  ")"
41
35
  ]
42
36
  },
@@ -258,7 +258,9 @@
258
258
  "x = torch.linspace(-0.5, 3.5, 500)\n",
259
259
  "plt.plot(x, sina.Heaviside(window=0.5)(v_mem=x, spike_threshold=1.0), label=\"Heaviside\")\n",
260
260
  "plt.plot(x, sina.MultiGaussian()(v_mem=x, spike_threshold=1.0), label=\"MultiGaussian\")\n",
261
- "plt.plot(x, sina.SingleExponential()(v_mem=x, spike_threshold=1.0), label=\"SingleExponential\")\n",
261
+ "plt.plot(\n",
262
+ " x, sina.SingleExponential()(v_mem=x, spike_threshold=1.0), label=\"SingleExponential\"\n",
263
+ ")\n",
262
264
  "\n",
263
265
  "plt.xlabel(\"Membrane potential v_mem\")\n",
264
266
  "plt.ylabel(\"derivative of output activation\")\n",
@@ -27,6 +27,7 @@
27
27
  "from sinabs import layers as sl\n",
28
28
  "import sinabs.hooks\n",
29
29
  "\n",
30
+ "\n",
30
31
  "# - Define SNN\n",
31
32
  "class SNN(nn.Sequential):\n",
32
33
  " def __init__(self, batch_size):\n",
@@ -46,6 +47,7 @@
46
47
  " sl.UnflattenTime(batch_size=batch_size),\n",
47
48
  " )\n",
48
49
  "\n",
50
+ "\n",
49
51
  "batch_size = 5\n",
50
52
  "snn = SNN(batch_size=batch_size)"
51
53
  ]
@@ -162,7 +164,7 @@
162
164
  ],
163
165
  "source": [
164
166
  "# Forward pass\n",
165
- "rand_input_spikes = (torch.ones((batch_size, 10, 1, 28, 28)) ).float()\n",
167
+ "rand_input_spikes = (torch.ones((batch_size, 10, 1, 28, 28))).float()\n",
166
168
  "snn(rand_input_spikes)\n",
167
169
  "\n",
168
170
  "# Access and print hook data\n",
@@ -170,7 +172,9 @@
170
172
  " if hasattr(layer, \"hook_data\"):\n",
171
173
  " print(f\"Layer {idx}:\")\n",
172
174
  " print(f\"\\tBatch size: {layer.hook_data['batch_size']}\")\n",
173
- " print(f\"\\tShape: {layer.hook_data['neuron_shape']} - {layer.hook_data['num_neurons']} neurons in total\")"
175
+ " print(\n",
176
+ " f\"\\tShape: {layer.hook_data['neuron_shape']} - {layer.hook_data['num_neurons']} neurons in total\"\n",
177
+ " )"
174
178
  ]
175
179
  },
176
180
  {