sinabs 3.0.3.dev1__py3-none-any.whl → 3.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sinabs/__init__.py CHANGED
@@ -2,9 +2,15 @@ from pbr.version import VersionInfo
2
2
 
3
3
  __version__ = VersionInfo("sinabs").release_string()
4
4
 
5
- from . import conversion, utils
5
+ from . import conversion, utils, validate_memory_speck
6
6
  from .from_torch import from_model
7
7
  from .network import Network
8
8
  from .nir import from_nir, to_nir
9
9
  from .synopcounter import SNNAnalyzer, SynOpCounter
10
- from .utils import reset_states, set_batch_size, zero_grad
10
+ from .utils import (
11
+ reset_states,
12
+ set_batch_size,
13
+ zero_grad,
14
+ validate_memory_mapping_speck,
15
+ )
16
+ from .validate_memory_speck import ValidateMapping
@@ -287,7 +287,6 @@ def _discretize_conv_spk_(
287
287
  conv_bias = torch.zeros(conv_lyr.out_channels)
288
288
 
289
289
  if spike_lyr is None:
290
-
291
290
  discr_spk = False
292
291
 
293
292
  if spk_thr is None or spk_thr_low is None:
@@ -344,7 +344,6 @@ class DynapcnnNetwork(nn.Module):
344
344
  )
345
345
  # Validate config
346
346
  if is_compatible:
347
- print("Network is valid")
348
347
  return config
349
348
  else:
350
349
  raise ValueError(f"Generated config is not valid for {device}")
@@ -361,8 +360,8 @@ class DynapcnnNetwork(nn.Module):
361
360
  try:
362
361
  _, is_compatible = self._make_config(device=device_type)
363
362
  except ValueError as e:
364
- # Catch "No valid mapping found" error
365
- if e.args[0] == ("No valid mapping found"):
363
+ # Catch "No valid mapping found" error, it is the first sentence in the string
364
+ if e.args[0].find("No valid mapping found.") == 0:
366
365
  return False
367
366
  else:
368
367
  raise e
@@ -188,5 +188,8 @@ def recover_mapping(graph, layer_mapping) -> List[Tuple[int, int]]:
188
188
  if edge.flow == 1:
189
189
  mapping.append((i, edge.t - len(layer_mapping) - 1))
190
190
  if len(mapping) != len(layer_mapping):
191
- raise ValueError("No valid mapping found")
191
+ raise ValueError(
192
+ "No valid mapping found. "
193
+ "For Speck family you can use `utils.validate_memory_mapping_speck()` to get more information."
194
+ )
192
195
  return mapping
sinabs/utils.py CHANGED
@@ -1,10 +1,11 @@
1
- from typing import List
1
+ from typing import List, Tuple
2
2
 
3
3
  import numpy as np
4
4
  import torch
5
5
  import torch.nn as nn
6
6
 
7
7
  import sinabs
8
+ from .validate_memory_speck import ValidateMapping
8
9
 
9
10
 
10
11
  def reset_states(model: nn.Module) -> None:
@@ -179,3 +180,62 @@ def set_batch_size(model: nn.Module, batch_size: int):
179
180
  if isinstance(mod, sinabs.layers.SqueezeMixin):
180
181
  mod.batch_size = batch_size
181
182
  # reset_states(mod)
183
+
184
+
185
+ def validate_memory_mapping_speck(
186
+ input_feature_size: int,
187
+ output_feature_size: int,
188
+ kernel_size: Tuple[int, int],
189
+ stride: Tuple[int, int],
190
+ padding: Tuple[int, int],
191
+ input_dimension: Tuple[int, int] = [64, 64],
192
+ conv_2d: bool = True,
193
+ ):
194
+ """Helper function to verify if it is possible to map a specific layer on to speck.
195
+ This function validates kernel and neuron memories. It doesnt check for all the network layers together.
196
+ It considers the mapping of a Conv2D layer only.
197
+
198
+ Args:
199
+ input_feature_size (int): number of input channels
200
+ output_feature_size (int): number of output channels
201
+ kernel_size (Tuple[int, int]): 2D kernel size
202
+ stride (Tuple[int, int]): 2D stride size
203
+ padding (Tuple[int, int]): 2D padding size
204
+ input_dimension (Tuple[int, int]): 2D input dimension size. Defaults to [64,64]
205
+ conv_2d (bool): if it is mapping a Conv2D layers. Defaults to True. It won't validate other types of network.
206
+
207
+ Return:
208
+ msg (string): Message indicating layer can be mapped with total size of kernel and neuron memories.
209
+
210
+ Raises:
211
+ Exception: if neuron or kernel memories are higher than available on chip.
212
+ """
213
+
214
+ if not conv_2d:
215
+ raise ValueError("This function only validates Conv2D layers.")
216
+
217
+ validate = ValidateMapping(
218
+ input_feature_size,
219
+ output_feature_size,
220
+ kernel_size,
221
+ stride,
222
+ padding,
223
+ [input_dimension[0], input_dimension[1]],
224
+ conv_2d,
225
+ )
226
+ (
227
+ kernel,
228
+ neuron,
229
+ kernel_error_msg,
230
+ neuron_error_msg,
231
+ ) = validate.calculate_total_memory()
232
+
233
+ if kernel_error_msg != "" or neuron_error_msg != "":
234
+ raise Exception(kernel_error_msg + neuron_error_msg)
235
+ else:
236
+ msg = (
237
+ "Layer can be mapped successfully. "
238
+ f"Kernel memory is {kernel:g}Ki and neuron memory is {neuron:g}Ki."
239
+ )
240
+
241
+ return msg
@@ -0,0 +1,144 @@
1
+ from typing import Tuple
2
+ from matplotlib import pyplot as plt
3
+ from matplotlib import colors
4
+
5
+ import numpy as np
6
+
7
+
8
+ class ValidateMapping:
9
+ def __init__(
10
+ self,
11
+ input_feature_size: int,
12
+ output_feature_size: int,
13
+ kernel_size: Tuple[int, int],
14
+ stride: Tuple[int, int],
15
+ padding: Tuple[int, int],
16
+ input_dimension: Tuple[int, int] = [64, 64],
17
+ conv_2d: bool = True,
18
+ ):
19
+ self.input_feature_size = input_feature_size
20
+ self.output_feature_size = output_feature_size
21
+
22
+ self.kernel_size = kernel_size
23
+ self.stride = stride
24
+ self.padding = padding
25
+ self.input_dimension = input_dimension
26
+
27
+ # - If not Conv2D layer, assuming it is AvgPool2D layer
28
+ if not conv_2d:
29
+ if (
30
+ kernel_size[0] != kernel_size[1]
31
+ or kernel_size[0] == 3
32
+ or kernel_size[0] > 4
33
+ ):
34
+ raise Exception(
35
+ "Kernel size is limited to 1x1, 2x2 or 4x4 for AvgPool2D layer."
36
+ )
37
+
38
+ if (
39
+ len(kernel_size) > 2
40
+ or len(stride) > 2
41
+ or len(padding) > 2
42
+ or len(input_dimension) > 2
43
+ ):
44
+ raise Exception(
45
+ "We expect input dimension kernel, stride and padding to be 2D elements, i.e.,"
46
+ "to have only two positions: x and y."
47
+ )
48
+
49
+ if kernel_size[0] > 16 or kernel_size[1] > 16:
50
+ raise Exception("Kernel size is limited to, at most, 16x16.")
51
+
52
+ if output_feature_size > 1024:
53
+ raise Exception("Output feature size is limited to, at most, 1024.")
54
+
55
+ if not self.check_stride():
56
+ raise Warning("Kernel stride can be 1, 2, 4 or 8 and, at most, 8x8.")
57
+
58
+ def calculate_total_memory(self):
59
+ kernel_memory = self.calculate_kernel_memory()
60
+ neuron_memory = self.calculate_neuron_memory()
61
+
62
+ kernel_error_msg = self.verify_combined_memories(
63
+ "kernel", kernel_memory, "neuron", neuron_memory
64
+ )
65
+ neuron_error_msg = self.verify_combined_memories(
66
+ "neuron", neuron_memory, "kernel", kernel_memory
67
+ )
68
+
69
+ print(kernel_error_msg)
70
+ print(neuron_error_msg)
71
+ return (
72
+ kernel_memory / 1024,
73
+ neuron_memory / 1024,
74
+ kernel_error_msg,
75
+ neuron_error_msg,
76
+ )
77
+
78
+ def calculate_kernel_memory(self):
79
+ return self.input_feature_size * pow(
80
+ 2,
81
+ np.ceil(np.log2(self.kernel_size[0] * self.kernel_size[1]))
82
+ + np.ceil(np.log2(self.output_feature_size)),
83
+ )
84
+
85
+ def calculate_neuron_memory(self):
86
+ fx = (
87
+ (self.input_dimension[0] - self.kernel_size[0] + 2 * self.padding[0])
88
+ / self.stride[0]
89
+ ) + 1
90
+ fy = (
91
+ (self.input_dimension[1] - self.kernel_size[1] + 2 * self.padding[1])
92
+ / self.stride[1]
93
+ ) + 1
94
+ return self.output_feature_size * pow(
95
+ 2, (np.ceil(np.log2(fx)) + np.ceil(np.log2(fy)))
96
+ )
97
+
98
+ def check_stride(self):
99
+ for i in range(len(self.stride)):
100
+ if (
101
+ self.stride[i] == 1
102
+ or self.stride[i] == 2
103
+ or self.stride[i] == 4
104
+ or self.stride[i] == 8
105
+ ):
106
+ return True
107
+ return False
108
+
109
+ def verify_combined_memories(
110
+ self, base_name: str, base_memory: int, compared_name: str, compared_memory: int
111
+ ):
112
+ # core ids --------- kernel memory -------- neuron memory
113
+ # [0, 1, 2] ------------- 16Ki ----------------- 64Ki
114
+ # [3, 4] ---------------- 32Ki ----------------- 32Ki
115
+ # [5, 6] ---------------- 64Ki ----------------- 16Ki
116
+ # [7, 8] ---------------- 16Ki ----------------- 16Ki
117
+
118
+ base_memory = base_memory / 1024
119
+ compared_memory = compared_memory / 1024
120
+
121
+ error_msg = ""
122
+ if base_memory > 64:
123
+ error_msg = (
124
+ f"{base_name.capitalize()} memory is {base_memory:g}Ki and can not be mapped on chip. "
125
+ f"{base_name.capitalize()} memory on chip needs to be at most 64Ki."
126
+ )
127
+
128
+ if base_memory > 16 and base_memory <= 32:
129
+ if compared_memory > 32:
130
+ error_msg = (
131
+ "There is no core on chip to fit neuron and kernel memories. "
132
+ f"When {base_name} memory is higher than 16Ki, {compared_name} memory needs to be at most 32Ki. "
133
+ f"{base_name.capitalize()} is {base_memory:g}Ki and {compared_name} is {compared_memory:g}Ki."
134
+ )
135
+
136
+ if base_memory > 32:
137
+ if compared_memory > 16:
138
+ error_msg = (
139
+ "There is no core on chip to fit neuron and kernel memories. "
140
+ f"When {base_name} memory is higher than 32Ki, {compared_name} memory needs to be at most 16Ki. "
141
+ f"{base_name.capitalize()} is {base_memory:g}Ki and {compared_name} is {compared_memory:g}Ki."
142
+ )
143
+
144
+ return error_msg
@@ -1,15 +1,13 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: sinabs
3
- Version: 3.0.3.dev1
3
+ Version: 3.0.4
4
4
  Summary: SynSense Spiking Neural Network simulator for deep neural networks (DNNs).
5
- Home-page: UNKNOWN
6
5
  Author: SynSense (formerly AiCTX)
7
6
  Author-email: support@synsense.ai
8
7
  License: Apache 2.0
9
8
  Project-URL: Source code, https://github.com/synsense/sinabs
10
9
  Project-URL: Documentation, https://readthedocs.org/projects/sinabs/
11
10
  Keywords: spiking neural networks,machine learning,SNN,DYNAPCNN,Speck
12
- Platform: UNKNOWN
13
11
  Classifier: Development Status :: 4 - Beta
14
12
  Classifier: Environment :: Console
15
13
  Classifier: Intended Audience :: Science/Research
@@ -17,12 +15,25 @@ Classifier: Intended Audience :: Developers
17
15
  Classifier: License :: OSI Approved :: Apache Software License
18
16
  Classifier: Programming Language :: Python :: 3
19
17
  Description-Content-Type: text/markdown
18
+ License-File: LICENSE
19
+ License-File: AUTHORS
20
20
  Requires-Dist: pbr
21
21
  Requires-Dist: numpy
22
22
  Requires-Dist: torch>=1.8
23
23
  Requires-Dist: nir<=1.0.4
24
24
  Requires-Dist: nirtorch
25
25
  Requires-Dist: samna>=0.33
26
+ Dynamic: author
27
+ Dynamic: author-email
28
+ Dynamic: classifier
29
+ Dynamic: description
30
+ Dynamic: description-content-type
31
+ Dynamic: keywords
32
+ Dynamic: license
33
+ Dynamic: license-file
34
+ Dynamic: project-url
35
+ Dynamic: requires-dist
36
+ Dynamic: summary
26
37
 
27
38
  [![PyPI - Package](https://img.shields.io/pypi/v/sinabs.svg)](https://pypi.org/project/sinabs/)
28
39
  [![Documentation Status](https://readthedocs.org/projects/sinabs/badge/?version=main)](https://sinabs.readthedocs.io)
@@ -93,5 +104,3 @@ url = {https://github.com/synsense/sinabs}
93
104
  }
94
105
  ```
95
106
 
96
-
97
-
@@ -1,4 +1,4 @@
1
- sinabs/__init__.py,sha256=JkDiILG7hqkPMBA3IpxOYGuWmUXpIwf2fcAewdn7kdU,332
1
+ sinabs/__init__.py,sha256=vvWzU_lrsSob2JpOdG7ok8Rm0FjR-v7ogYgK6zruAGk,458
2
2
  sinabs/cnnutils.py,sha256=MTVTmTnLYMiDQZozfgH7UhMCYQPpOto0vDa0kMjADiA,2406
3
3
  sinabs/conversion.py,sha256=k9pNqOtmA4JhKXIyGoFY6Fl8jyZp7DtlmpS4ym8pN0w,2023
4
4
  sinabs/from_torch.py,sha256=L_n7BRj7llKI-4Er1cPOZo6VVoInxehMk1PVlDiFIt8,4957
@@ -6,7 +6,8 @@ sinabs/hooks.py,sha256=7jK44SxPcnQhWScvML9QIXyX5sfA_1E-lHpYjz2_4qc,16197
6
6
  sinabs/network.py,sha256=If6Qz2MDEpHPHD0bEStDyCif1EUw4ef3CXUQr4og9rA,9489
7
7
  sinabs/nir.py,sha256=r72RZ2WNuhnHvQ2MaSJ04J-Bes1mAdzoU9LKbJupZzE,8695
8
8
  sinabs/synopcounter.py,sha256=ZF7f9Au-j5wC3gPixWxj4yq8v8KdjDUMJWExyKi6Y5s,12759
9
- sinabs/utils.py,sha256=134zD1Kj7u2oFPoxgy8bwBrfiHS-ct7A-TlU8ghtL30,5655
9
+ sinabs/utils.py,sha256=wNFwlf03SOYAEgdZiA1cxAf1XEnRXlycutEu2uhDBSQ,7696
10
+ sinabs/validate_memory_speck.py,sha256=8uByUV9VNHncbYX4Wxq3qaepwfp-tH3c2MZeHRZSUeU,5134
10
11
  sinabs/activation/__init__.py,sha256=cHXmIvV9fYZhqKVVTzD2F1a6KQklJPgTgDFjNzIEII8,311
11
12
  sinabs/activation/quantize.py,sha256=AzrIQbIlSPoiPgueC4XkRGNSeNoU5V9B7mtXbq0Kzk8,1166
12
13
  sinabs/activation/reset_mechanism.py,sha256=aKtQFxB8WqzuSir0NocdkqTF_YD7E365QBj66g3wQvE,1419
@@ -17,15 +18,15 @@ sinabs/backend/dynapcnn/__init__.py,sha256=m0Zr7WkmTHyGth0iQ7vsojnXd2Tb0yHE0hW6w
17
18
  sinabs/backend/dynapcnn/chip_factory.py,sha256=Avy-CVu4-wxEMNcJmj0XVTfUEX7IGDbvVckMg5enVIo,6910
18
19
  sinabs/backend/dynapcnn/config_builder.py,sha256=oGbSwPxRd-qCT4cI6CBupbTl5Jh_B_3RhT3w-jaP8gQ,4795
19
20
  sinabs/backend/dynapcnn/crop2d.py,sha256=-FKOQHdx8GjEXK64OlWZyc1GId4FFRptVnt80jLBlcs,1439
20
- sinabs/backend/dynapcnn/discretize.py,sha256=PyamxI-AoekChUZa_nCKatVre-gXBFWqnzPDcvnDQh4,13475
21
+ sinabs/backend/dynapcnn/discretize.py,sha256=5ZG4KHrIKB79ifGvt1gu6j8OlH0xc51cWQtyQHelJvg,13474
21
22
  sinabs/backend/dynapcnn/dvs_layer.py,sha256=Aauw7u7IJvtUkjOpYo1snqzVyFns6DZ5bmZGaj0Y7pA,9468
22
23
  sinabs/backend/dynapcnn/dynapcnn_layer.py,sha256=53u_7NqlNqJxTjrOeEOz4WABrYLtmclaN8sAwaSfr9Y,6702
23
- sinabs/backend/dynapcnn/dynapcnn_network.py,sha256=z6wSBynI7R8SWUS2TG7xKIWXcQNVI86JF1MPp3dXZG0,20547
24
+ sinabs/backend/dynapcnn/dynapcnn_network.py,sha256=ZHNrf-KVwEf1gpUnH9CpHSUCVSAqxLAb6Ks3zbdZ6N0,20556
24
25
  sinabs/backend/dynapcnn/dynapcnn_visualizer.py,sha256=MRewU6519dAtAMxf-JlFBrlynJTZeLiDfB0d85-mMFQ,24262
25
26
  sinabs/backend/dynapcnn/exceptions.py,sha256=hEei4gOniq3ByYXkJovlAeaUfZ8Q9BWTHTb0DJ9pHeQ,485
26
27
  sinabs/backend/dynapcnn/flipdims.py,sha256=I0I1nakrF0ngWBh-2SHHg7OkCOxotqukwHOQ45GWyCs,860
27
28
  sinabs/backend/dynapcnn/io.py,sha256=1AN4CcixXM1PFZ6U3LeNGQ71ajSXaV3lTyq__j4sxns,9952
28
- sinabs/backend/dynapcnn/mapping.py,sha256=JQnu_lzppPwlDOZNwqgyavoPmJ-Oz73VwE0T0azXbyc,5976
29
+ sinabs/backend/dynapcnn/mapping.py,sha256=h63M9hhI6enZtQyq-may9hU7YcnGNCumiHalh8ohMno,6108
29
30
  sinabs/backend/dynapcnn/specksim.py,sha256=UKh_lH_yHIZaYEONWsAOChrD-vBdSawBxRBeDHlSv84,17138
30
31
  sinabs/backend/dynapcnn/utils.py,sha256=LYihrBIiPTDLUwsUikVOzWva4RwPc27LoHRCUDlDm-4,17934
31
32
  sinabs/backend/dynapcnn/chips/__init__.py,sha256=zJQ7f7bp_cF0US1pZ8ga4-3Bo32T0GB9gD2RN3uKlsM,130
@@ -49,10 +50,10 @@ sinabs/layers/to_spike.py,sha256=97ar-tiDZCgckBLdnKoHzm8PjTFwDXra0weOFgAf6_4,331
49
50
  sinabs/layers/functional/__init__.py,sha256=v0c7DHizKg8jfelmFYeMMg9vDafKvzoenakc4SPpj84,91
50
51
  sinabs/layers/functional/alif.py,sha256=ycJ7rlcBAd-lq5GCDZrcNPeV-7fztt3uy43XhBtTKHI,4599
51
52
  sinabs/layers/functional/lif.py,sha256=QRjiWDCBaJFk4J7RRMgktMaLCyN6xEXAKvC9Bu_PICU,4259
52
- sinabs-3.0.3.dev1.dist-info/AUTHORS,sha256=jdt0oxfM_OW0_e5-ptxORAJ8U0uTzZjaB-F5iF2i50E,1802
53
- sinabs-3.0.3.dev1.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
54
- sinabs-3.0.3.dev1.dist-info/METADATA,sha256=hlutAzxSv37lzRsc31GiTn2Iczdi7R5cECuubFxjAlI,3656
55
- sinabs-3.0.3.dev1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
56
- sinabs-3.0.3.dev1.dist-info/pbr.json,sha256=84-FHs3AncXAagUxPme2CI_3qf3KHng4nSI6-x_oj0g,47
57
- sinabs-3.0.3.dev1.dist-info/top_level.txt,sha256=QOXGzf0ZeDjRnJ9OgAjkk6h5jrh66cwrwvtPJTyfDk8,7
58
- sinabs-3.0.3.dev1.dist-info/RECORD,,
53
+ sinabs-3.0.4.dist-info/licenses/AUTHORS,sha256=jdt0oxfM_OW0_e5-ptxORAJ8U0uTzZjaB-F5iF2i50E,1802
54
+ sinabs-3.0.4.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
55
+ sinabs-3.0.4.dist-info/METADATA,sha256=0ZVtQZcQXqtlrpD6vkkZeQydylC1DneKLWiJpho0Zpg,3887
56
+ sinabs-3.0.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
57
+ sinabs-3.0.4.dist-info/pbr.json,sha256=zYdy8PXt9BBw1ow-q9S5eIaTUBP1ICuh2q7SrAbkG40,47
58
+ sinabs-3.0.4.dist-info/top_level.txt,sha256=QOXGzf0ZeDjRnJ9OgAjkk6h5jrh66cwrwvtPJTyfDk8,7
59
+ sinabs-3.0.4.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.45.1)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -0,0 +1 @@
1
+ {"git_version": "b38b049", "is_release": false}
@@ -1 +0,0 @@
1
- {"git_version": "b584589", "is_release": false}