sinabs 3.0.4.dev25__py3-none-any.whl → 3.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. sinabs/activation/reset_mechanism.py +3 -3
  2. sinabs/activation/surrogate_gradient_fn.py +4 -4
  3. sinabs/backend/dynapcnn/__init__.py +5 -4
  4. sinabs/backend/dynapcnn/chip_factory.py +33 -61
  5. sinabs/backend/dynapcnn/chips/dynapcnn.py +182 -86
  6. sinabs/backend/dynapcnn/chips/speck2e.py +6 -5
  7. sinabs/backend/dynapcnn/chips/speck2f.py +6 -5
  8. sinabs/backend/dynapcnn/config_builder.py +39 -59
  9. sinabs/backend/dynapcnn/connectivity_specs.py +48 -0
  10. sinabs/backend/dynapcnn/discretize.py +91 -155
  11. sinabs/backend/dynapcnn/dvs_layer.py +59 -101
  12. sinabs/backend/dynapcnn/dynapcnn_layer.py +185 -119
  13. sinabs/backend/dynapcnn/dynapcnn_layer_utils.py +335 -0
  14. sinabs/backend/dynapcnn/dynapcnn_network.py +602 -325
  15. sinabs/backend/dynapcnn/dynapcnnnetwork_module.py +370 -0
  16. sinabs/backend/dynapcnn/exceptions.py +122 -3
  17. sinabs/backend/dynapcnn/io.py +51 -91
  18. sinabs/backend/dynapcnn/mapping.py +111 -75
  19. sinabs/backend/dynapcnn/nir_graph_extractor.py +877 -0
  20. sinabs/backend/dynapcnn/sinabs_edges_handler.py +1024 -0
  21. sinabs/backend/dynapcnn/utils.py +214 -459
  22. sinabs/backend/dynapcnn/weight_rescaling_methods.py +53 -0
  23. sinabs/conversion.py +2 -2
  24. sinabs/from_torch.py +23 -1
  25. sinabs/hooks.py +38 -41
  26. sinabs/layers/alif.py +16 -16
  27. sinabs/layers/crop2d.py +2 -2
  28. sinabs/layers/exp_leak.py +1 -1
  29. sinabs/layers/iaf.py +11 -11
  30. sinabs/layers/lif.py +9 -9
  31. sinabs/layers/neuromorphic_relu.py +9 -8
  32. sinabs/layers/pool2d.py +5 -5
  33. sinabs/layers/quantize.py +1 -1
  34. sinabs/layers/stateful_layer.py +10 -7
  35. sinabs/layers/to_spike.py +9 -9
  36. sinabs/network.py +14 -12
  37. sinabs/synopcounter.py +10 -7
  38. sinabs/utils.py +155 -7
  39. sinabs/validate_memory_speck.py +0 -5
  40. {sinabs-3.0.4.dev25.dist-info → sinabs-3.1.0.dist-info}/METADATA +2 -1
  41. sinabs-3.1.0.dist-info/RECORD +65 -0
  42. {sinabs-3.0.4.dev25.dist-info → sinabs-3.1.0.dist-info}/licenses/AUTHORS +1 -0
  43. sinabs-3.1.0.dist-info/pbr.json +1 -0
  44. sinabs-3.0.4.dev25.dist-info/RECORD +0 -59
  45. sinabs-3.0.4.dev25.dist-info/pbr.json +0 -1
  46. {sinabs-3.0.4.dev25.dist-info → sinabs-3.1.0.dist-info}/WHEEL +0 -0
  47. {sinabs-3.0.4.dev25.dist-info → sinabs-3.1.0.dist-info}/licenses/LICENSE +0 -0
  48. {sinabs-3.0.4.dev25.dist-info → sinabs-3.1.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,370 @@
1
+ from pprint import pformat
2
+ from typing import Dict, List, Optional, Set, Union
3
+ from warnings import warn
4
+
5
+ import torch.nn as nn
6
+ from torch import Tensor
7
+
8
+ import sinabs.layers as sl
9
+
10
+ from .dvs_layer import DVSLayer
11
+ from .dynapcnn_layer import DynapcnnLayer
12
+ from .utils import Edge, topological_sorting
13
+
14
+
15
+ class DynapcnnNetworkModule(nn.Module):
16
+ """Allow forward (and backward) passing through a network of `DynapcnnLayer`s.
17
+
18
+ Internally constructs a graph representation based on the provided arguments
19
+ and uses this to pass data through all layers in correct order.
20
+
21
+ This class internally builds a graph with `DynapcnnLayer` as nodes and their
22
+ connections as edges. Several data structures help efficient retrieval of
23
+ information required for the forward pass:
24
+
25
+ Args:
26
+ dynapcnn_layers (dict): a mapper containing `DynapcnnLayer` instances.
27
+ destination_map (dict): Maps layer indices to list of destination
28
+ indices. Exit destinations are marked by negative integers.
29
+ entry_points (set): Set of layer indices that act as network entry
30
+ points.
31
+ dvs_node_info (dict): contains information associated with the
32
+ `DVSLayer` node. `None` if no DVS node exists.
33
+
34
+ Attributes:
35
+ _dynapcnnlayer_edges: Set of edges connecting dynapcnn layers. Tuples
36
+ of indices of source and target layers.
37
+ _sorted_nodes: List of layer indices in topological order, to ensure
38
+ forward calls to layers only happen when required inputs are
39
+ available.
40
+ _node_source_map: Dict with layer indices as keys and list of input
41
+ layer indices as values.
42
+ """
43
+
44
+ def __init__(
45
+ self,
46
+ dynapcnn_layers: Dict[int, DynapcnnLayer],
47
+ destination_map: Dict[int, List[int]],
48
+ entry_points: Set[int],
49
+ dvs_node_info: Optional[Dict] = None,
50
+ ):
51
+ super().__init__()
52
+
53
+ self._dvs_node_info = dvs_node_info
54
+
55
+ # Unfortunately ModuleDict does not allow for integer keys
56
+ module_dict = {str(idx): lyr for idx, lyr in dynapcnn_layers.items()}
57
+ self._dynapcnn_layers = nn.ModuleDict(module_dict)
58
+
59
+ if self._dvs_node_info is not None:
60
+ self._dvs_layer = dvs_node_info["module"]
61
+ else:
62
+ self._dvs_layer = None
63
+
64
+ self._destination_map = destination_map
65
+ self._entry_points = entry_points
66
+
67
+ # `Merge` layers are stateless. One instance can be used for all merge points during forward pass
68
+ self.merge_layer = sl.Merge()
69
+
70
+ @property
71
+ def all_layers(self):
72
+ layers = self.dynapcnn_layers
73
+ if self.dvs_layer is not None:
74
+ # `self.dynapcnn_layers` is a (shallow) copy. Adding entries won't
75
+ # affect `self._dynapcnn_layers`
76
+ layers["dvs"] = self.dvs_layer
77
+ return layers
78
+
79
+ @property
80
+ def dvs_node_info(self):
81
+ return self._dvs_node_info
82
+
83
+ @property
84
+ def dvs_layer(self):
85
+ return self._dvs_layer
86
+
87
+ @property
88
+ def destination_map(self):
89
+ return self._destination_map
90
+
91
+ @property
92
+ def dynapcnn_layers(self):
93
+ # Convert string-indices to integer-indices and sort by index
94
+ return {int(idx): lyr for idx, lyr in sorted(self._dynapcnn_layers.items())}
95
+
96
+ @property
97
+ def entry_points(self):
98
+ return self._entry_points
99
+
100
+ @property
101
+ def sorted_nodes(self):
102
+ return self._sorted_nodes
103
+
104
+ @property
105
+ def node_source_map(self):
106
+ return self._node_source_map
107
+
108
+ def get_exit_layers(self) -> List[int]:
109
+ """Get layers that act as exit points of the network
110
+
111
+ Returns:
112
+ Layer indices with at least one exit destination.
113
+ """
114
+ return [
115
+ layer_idx
116
+ for layer_idx, destinations in self.destination_map.items()
117
+ if any(d < 0 for d in destinations)
118
+ ]
119
+
120
+ def get_exit_points(self) -> Dict[int, Dict]:
121
+ """Get details of layers that act as exit points of the network
122
+
123
+ Returns:
124
+ Dictionary whose keys are layer indices of `dynapcnn_layers`
125
+ with at least one exit destination. Values are list of dicts, providing
126
+ for each exit destination the negative valued ID ('destination_id'),
127
+ the index of that destination within the list of destinations of the
128
+ corresponding `DynapcnnLayer` ('destination_index'), and the pooling
129
+ for this destination.
130
+ """
131
+ exit_layers = dict()
132
+ for layer_idx, destinations in self.destination_map.items():
133
+ exit_destinations = []
134
+ for i, dest in enumerate(destinations):
135
+ if dest < 0:
136
+ exit_destinations.append(
137
+ {
138
+ "destination_id": dest,
139
+ "destination_index": i,
140
+ "pooling": self.dynapcnn_layers[layer_idx].pool[i],
141
+ }
142
+ )
143
+ if exit_destinations:
144
+ exit_layers[layer_idx] = exit_destinations
145
+
146
+ return exit_layers
147
+
148
+ def setup_dynapcnnlayer_graph(
149
+ self, index_layers_topologically: bool = False
150
+ ) -> None:
151
+ """Set up data structures to run forward pass through dynapcnn layers
152
+
153
+ Args:
154
+ index_layers_topologically (bool): If True, will assign new indices
155
+ to dynapcnn layers such that they match their topological order
156
+ within the network graph. This is not necessary but can help
157
+ understand the network more easily when inspecting it.
158
+ """
159
+ self._dynapcnnlayer_edges = self.get_dynapcnnlayers_edges()
160
+ self.add_entry_points_edges(self._dynapcnnlayer_edges)
161
+ self._sorted_nodes = topological_sorting(self._dynapcnnlayer_edges)
162
+ self._node_source_map = self.get_node_source_map(self._dynapcnnlayer_edges)
163
+ if index_layers_topologically:
164
+ self.reindex_layers(self._sorted_nodes)
165
+
166
+ def get_dynapcnnlayers_edges(self) -> Set[Edge]:
167
+ """Create edges representing connections between `DynapcnnLayer` instances.
168
+
169
+ Returns:
170
+ A set of edges using the IDs of `DynapcnnLayer` instances. These
171
+ edges describe the computational graph implemented by the layers
172
+ of the model (i.e., how the `DynapcnnLayer` instances address each
173
+ other).
174
+ """
175
+ dcnnl_edges = set()
176
+
177
+ for dcnnl_idx, destination_indices in self._destination_map.items():
178
+ for dest in destination_indices:
179
+ if dest >= 0: # Ignore negative destinations (network exit points)
180
+ dcnnl_edges.add((dcnnl_idx, dest))
181
+
182
+ return dcnnl_edges
183
+
184
+ def add_entry_points_edges(self, dcnnl_edges: Set[Edge]) -> None:
185
+ """Add extra edges `('input', X)` to `dcnnl_edges` for
186
+ layers which are entry points of the `DynapcnnNetwork`.
187
+
188
+ Args:
189
+ dcnnl_edges (Set): tuples representing the output->input mapping
190
+ between `DynapcnnLayer` instances. Will be changed in place.
191
+ """
192
+ for indx in self._entry_points:
193
+ dcnnl_edges.add(("input", indx))
194
+
195
+ def get_node_source_map(self, dcnnl_edges: Set[Edge]) -> Dict[int, List[int]]:
196
+ """From a set of edges, create a dict that maps to each node its sources
197
+
198
+ Args:
199
+ dcnnl_edges (Set): tuples representing the output->input mapping
200
+ between `DynapcnnLayer` instances.
201
+
202
+ Returns:
203
+ Dict with layer indices (int) as keys and list of layer indices that
204
+ map to corresponding layer.
205
+ """
206
+ sources = dict()
207
+
208
+ for src, trg in dcnnl_edges:
209
+ if trg in sources:
210
+ sources[trg].append(src)
211
+ else:
212
+ sources[trg] = [src]
213
+
214
+ return sources
215
+
216
+ def forward(
217
+ self, x, return_complete: bool = False
218
+ ) -> Union[Tensor, Dict[int, Dict[int, Tensor]]]:
219
+ """Perform a forward pass through all dynapcnn layers
220
+ The `setup_dynapcnnlayer_graph` method has to be executed beforehand.
221
+
222
+ Args:
223
+ x: Tensor that serves as input to network. Is passed to all layers
224
+ that are marked as entry points
225
+ return_complete: bool that indicates whether all layer outputs
226
+ should be return or only those with no further destinations
227
+ (default).
228
+
229
+ Returns:
230
+ The returned object depends on whether `return_complete` is set and
231
+ on the network configuration:
232
+ * If `return_complete` is `True`, all layer outputs will be
233
+ returned in a dict, with layer indices as keys, and nested dicts as
234
+ values, which hold destination indices as keys and output tensors
235
+ as values.
236
+ * If `return_complete` is `False` and there is only a single
237
+ destination in the whole network that is marked as exit point (i.e.
238
+ destination index in dynapcnn layer handler is negative), it will
239
+ return the output as a single tensor.
240
+ * If `return_complete` is `False` and no destination in the network
241
+ is marked as exit point, a warning will be raised and the function
242
+ returns an empty dict.
243
+ * In all other cases a dict will be returned that is of the same
244
+ structure as if `return_complete` is `True`, but only with entries
245
+ where the destination is marked as exit point.
246
+ """
247
+ if not hasattr(self, "_sorted_nodes"):
248
+ raise RuntimeError(
249
+ "It looks like `setup_dynapcnnlayers_graph` has never been executed. "
250
+ "It needs to be called at least once before calling `forward`."
251
+ )
252
+
253
+ # For each layer store its outputs as dict with destination layers as keys.
254
+ # For input set `x` as input to entry points
255
+ layers_outputs = {"input": {ep: x for ep in self.entry_points}}
256
+
257
+ for idx_curr in self._sorted_nodes:
258
+ # Get inputs to the layer
259
+ if len(sources := self._node_source_map[idx_curr]) > 1:
260
+ # Layer has multiple inputs
261
+ inputs = [layers_outputs[idx_src][idx_curr] for idx_src in sources]
262
+ current_input = self.merge_layer(*inputs)
263
+ else:
264
+ idx_src = sources[0]
265
+ current_input = layers_outputs[idx_src][idx_curr]
266
+
267
+ # Get current layer instance and destinations
268
+ layer = self.all_layers[idx_curr]
269
+ destinations = self._destination_map[idx_curr]
270
+
271
+ # Forward pass through layer
272
+ output = layer(current_input)
273
+
274
+ # Store layer output for all destinations
275
+ if len(destinations) == 1:
276
+ # Output is single tensor
277
+ layers_outputs[idx_curr] = {destinations[0]: output}
278
+ else:
279
+ if isinstance(layer, DVSLayer):
280
+ # DVSLayer returns a single tensor (same for all its destinations).
281
+ layers_outputs[idx_curr] = {
282
+ idx_dest: output for idx_dest in destinations
283
+ }
284
+ else:
285
+ # Output is list of tensors for different destinations
286
+ layers_outputs[idx_curr] = {
287
+ idx_dest: out for idx_dest, out in zip(destinations, output)
288
+ }
289
+
290
+ if return_complete:
291
+ return layers_outputs
292
+
293
+ # Take outputs with exit point destinations as network output
294
+ network_outputs = {}
295
+ for layer_idx, layer_out in layers_outputs.items():
296
+ outputs = {
297
+ idx_dest: out
298
+ for idx_dest, out in layer_out.items()
299
+ if isinstance(idx_dest, int) and idx_dest < 0
300
+ }
301
+ if outputs:
302
+ network_outputs[layer_idx] = outputs
303
+
304
+ # If no outputs have been found return None and warn
305
+ if not network_outputs:
306
+ warn(
307
+ "No exit points have been found. Try setting `return_complete` "
308
+ "`True` to get all outputs, or mark exit points by setting "
309
+ "corresponding destination layer indices in destination_map "
310
+ " to negative integer values"
311
+ )
312
+ return dict()
313
+
314
+ # Special case with single output: return single tensor
315
+ if (
316
+ len(network_outputs) == 1
317
+ and len(out := (next(iter(network_outputs.values())))) == 1
318
+ ):
319
+ return next(iter(out.values()))
320
+
321
+ # If there is output from multiple layers return all of them in a dict
322
+ return network_outputs
323
+
324
+ def reindex_layers(self, index_order: List[int]) -> None:
325
+ """Reindex layers based on provided order
326
+
327
+ Will assign new index to dynapcnn layers and update all internal
328
+ attributes accordingly.
329
+
330
+ Args:
331
+ index_order: List of integers indicating new order of layers.
332
+ The position of layer index within this list indicates new
333
+ index.
334
+ """
335
+ mapping = {old: new for new, old in enumerate(index_order)}
336
+
337
+ def remap(key):
338
+ if key in ["dvs", "input"] or (isinstance(key, int) and key < 0):
339
+ # Entries 'dvs', 'input' and negative indices are not changed
340
+ return key
341
+ else:
342
+ return mapping[key]
343
+
344
+ # Remap all internal objects
345
+ self._dynapcnn_layers = nn.ModuleDict(
346
+ {str(remap(int(idx))): lyr for idx, lyr in self._dynapcnn_layers.items()}
347
+ )
348
+
349
+ self._entry_points = {remap(idx) for idx in self._entry_points}
350
+
351
+ self._destination_map = {
352
+ remap(idx): [remap(dest) for dest in destinations]
353
+ for idx, destinations in self._destination_map.items()
354
+ }
355
+
356
+ self._dynapcnnlayer_edges = {
357
+ (remap(src), remap(trg)) for (src, trg) in self._dynapcnnlayer_edges
358
+ }
359
+
360
+ self._sorted_nodes = [remap(idx) for idx in self._sorted_nodes]
361
+
362
+ self._node_source_map = {
363
+ remap(node): [remap(src) for src in sources]
364
+ for node, sources in self._node_source_map.items()
365
+ }
366
+
367
+ def __repr__(self):
368
+ return f"DVS Layer: {pformat(self.dvs_layer)}\n\nDynapCNN Layers:\n" + pformat(
369
+ self.dynapcnn_layers
370
+ )
@@ -1,3 +1,13 @@
1
+ from typing import Set, Tuple, Type
2
+
3
+ default_invalid_structure_string = (
4
+ "Invalid structure found. This is most likely due to an unsupported SNN "
5
+ "architecture. In general, a DynapCNN network should consist of groups of "
6
+ "a weight layer (conv or linear), a spiking layer (IAFSqueeze), and "
7
+ "optionally a pooling layer."
8
+ )
9
+
10
+
1
11
  class MissingLayer(Exception):
2
12
  index: int
3
13
 
@@ -6,14 +16,123 @@ class MissingLayer(Exception):
6
16
 
7
17
 
8
18
  class UnexpectedLayer(Exception):
9
- layer_type_found: type
10
- layer_type_expected: type
19
+ layer_type_found: Type
20
+ layer_type_expected: Type
11
21
 
12
22
  def __init__(self, expected, found):
13
23
  super().__init__(f"Expected {expected} but found {found}")
14
24
 
15
25
 
16
26
  class InputConfigurationError(Exception):
17
- """Is raised when input to dynapcnn is not configured correctly."""
27
+ """Is raised when input to DynapCNN is not configured correctly."""
28
+
29
+ pass
30
+
31
+
32
+ class WrongModuleCount(Exception):
33
+ dynapcnnlayer_indx: Type
34
+ modules_count: Type
35
+
36
+ def __init__(self, dynapcnnlayer_indx, modules_count):
37
+ super().__init__(
38
+ f"A DynapCNNLayer {dynapcnnlayer_indx} should have 2 or 3 modules but found {modules_count}."
39
+ )
40
+
41
+
42
+ class WrongPoolingModule(Exception):
43
+ pooling_module: Type
44
+
45
+ def __init__(
46
+ self,
47
+ pooling_module,
48
+ ):
49
+ super().__init__(
50
+ f"The function 'utils.build_SumPool2d(mod)' expects 'mod = nn.AvgPool2d' but got 'mod = {pooling_module}'."
51
+ )
52
+
53
+
54
+ class UnsupportedLayerType(Exception):
55
+ pass
56
+
57
+
58
+ class InvalidModel(Exception):
59
+ model: Type
60
+
61
+ def __init__(
62
+ self,
63
+ model,
64
+ ):
65
+ super().__init__(
66
+ f"'model' accepts either a DynapCNNNetwork or a DynapCNNNetworkGraph but {model} was given."
67
+ )
68
+
69
+
70
+ class InvalidTorchModel(Exception):
71
+ network_type: str
72
+
73
+ def __init__(self, network_type):
74
+ super().__init__(f"A {network_type} needs to be of type nn.Module.")
18
75
 
76
+
77
+ class InvalidGraphStructure(Exception):
19
78
  pass
79
+
80
+
81
+ class InvalidModelWithDVSSetup(Exception):
82
+ def __init__(self):
83
+ super().__init__(
84
+ "The network provided has a DVSLayer instance but argument 'dvs_input' is set to False."
85
+ )
86
+
87
+
88
+ # Edge exceptions.
89
+
90
+
91
+ class InvalidEdge(Exception):
92
+ edge: Tuple[int, int]
93
+ source: Type
94
+ target: Type
95
+
96
+ def __init__(self, edge, source, target):
97
+ super().__init__(
98
+ f"Invalid edge {edge}: {source} can not target {target}. "
99
+ + default_invalid_structure_string
100
+ )
101
+
102
+
103
+ class UnknownNode(Exception):
104
+ node: int
105
+
106
+ def __init__(self, node):
107
+ super().__init__(
108
+ f"Node {node} could not be found within any DynapCNNLayer mapper."
109
+ )
110
+
111
+
112
+ class MaxDestinationsReached(Exception):
113
+ dynapcnnlayer_index: int
114
+
115
+ def __init__(self, dynapcnnlayer_index):
116
+ super().__init__(
117
+ f"DynapCNNLayer with index {dynapcnnlayer_index} has more than two destinations, and a maximum of two are allowed"
118
+ )
119
+
120
+
121
+ class InvalidLayerLoop(Exception):
122
+ dynapcnnlayerA_index: int
123
+ dynapcnnlayerB_index: int
124
+
125
+ def __init__(self, dynapcnnlayerA_index, dynapcnnlayerB_index):
126
+ super().__init__(
127
+ f"DynapCNNLayer {dynapcnnlayerA_index} can not connect to {dynapcnnlayerB_index} since a reverse edge already exists."
128
+ )
129
+
130
+
131
+ class InvalidLayerDestination(Exception):
132
+ dynapcnnlayerA: Type
133
+ dynapcnnlayerB: Type
134
+
135
+ def __init__(self, dynapcnnlayerA, dynapcnnlayerB):
136
+ super().__init__(
137
+ f"DynapCNNLayer {dynapcnnlayerA} can not connect to {dynapcnnlayerB}."
138
+ )