brainstate 0.1.8__py2.py3-none-any.whl → 0.1.10__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. brainstate/__init__.py +58 -51
  2. brainstate/_compatible_import.py +148 -148
  3. brainstate/_state.py +1605 -1663
  4. brainstate/_state_test.py +52 -52
  5. brainstate/_utils.py +47 -47
  6. brainstate/augment/__init__.py +30 -30
  7. brainstate/augment/_autograd.py +778 -778
  8. brainstate/augment/_autograd_test.py +1289 -1289
  9. brainstate/augment/_eval_shape.py +99 -99
  10. brainstate/augment/_eval_shape_test.py +38 -38
  11. brainstate/augment/_mapping.py +1060 -1060
  12. brainstate/augment/_mapping_test.py +597 -597
  13. brainstate/augment/_random.py +151 -151
  14. brainstate/compile/__init__.py +38 -38
  15. brainstate/compile/_ad_checkpoint.py +204 -204
  16. brainstate/compile/_ad_checkpoint_test.py +49 -49
  17. brainstate/compile/_conditions.py +256 -256
  18. brainstate/compile/_conditions_test.py +220 -220
  19. brainstate/compile/_error_if.py +92 -92
  20. brainstate/compile/_error_if_test.py +52 -52
  21. brainstate/compile/_jit.py +346 -346
  22. brainstate/compile/_jit_test.py +143 -143
  23. brainstate/compile/_loop_collect_return.py +536 -536
  24. brainstate/compile/_loop_collect_return_test.py +58 -58
  25. brainstate/compile/_loop_no_collection.py +184 -184
  26. brainstate/compile/_loop_no_collection_test.py +50 -50
  27. brainstate/compile/_make_jaxpr.py +888 -888
  28. brainstate/compile/_make_jaxpr_test.py +156 -156
  29. brainstate/compile/_progress_bar.py +202 -202
  30. brainstate/compile/_unvmap.py +159 -159
  31. brainstate/compile/_util.py +147 -147
  32. brainstate/environ.py +563 -563
  33. brainstate/environ_test.py +62 -62
  34. brainstate/functional/__init__.py +27 -26
  35. brainstate/graph/__init__.py +29 -29
  36. brainstate/graph/_graph_node.py +244 -244
  37. brainstate/graph/_graph_node_test.py +73 -73
  38. brainstate/graph/_graph_operation.py +1738 -1738
  39. brainstate/graph/_graph_operation_test.py +563 -563
  40. brainstate/init/__init__.py +26 -26
  41. brainstate/init/_base.py +52 -52
  42. brainstate/init/_generic.py +244 -244
  43. brainstate/init/_random_inits.py +553 -553
  44. brainstate/init/_random_inits_test.py +149 -149
  45. brainstate/init/_regular_inits.py +105 -105
  46. brainstate/init/_regular_inits_test.py +50 -50
  47. brainstate/mixin.py +365 -363
  48. brainstate/mixin_test.py +77 -73
  49. brainstate/nn/__init__.py +135 -131
  50. brainstate/{functional → nn}/_activations.py +808 -813
  51. brainstate/{functional → nn}/_activations_test.py +331 -331
  52. brainstate/nn/_collective_ops.py +514 -514
  53. brainstate/nn/_collective_ops_test.py +43 -43
  54. brainstate/nn/_common.py +178 -178
  55. brainstate/nn/_conv.py +501 -501
  56. brainstate/nn/_conv_test.py +238 -238
  57. brainstate/nn/_delay.py +588 -502
  58. brainstate/nn/_delay_test.py +238 -184
  59. brainstate/nn/_dropout.py +426 -426
  60. brainstate/nn/_dropout_test.py +100 -100
  61. brainstate/nn/_dynamics.py +1343 -1343
  62. brainstate/nn/_dynamics_test.py +78 -78
  63. brainstate/nn/_elementwise.py +1119 -1119
  64. brainstate/nn/_elementwise_test.py +169 -169
  65. brainstate/nn/_embedding.py +58 -58
  66. brainstate/nn/_exp_euler.py +92 -92
  67. brainstate/nn/_exp_euler_test.py +35 -35
  68. brainstate/nn/_fixedprob.py +239 -239
  69. brainstate/nn/_fixedprob_test.py +114 -114
  70. brainstate/nn/_inputs.py +608 -608
  71. brainstate/nn/_linear.py +424 -424
  72. brainstate/nn/_linear_mv.py +83 -83
  73. brainstate/nn/_linear_mv_test.py +120 -120
  74. brainstate/nn/_linear_test.py +107 -107
  75. brainstate/nn/_ltp.py +28 -28
  76. brainstate/nn/_module.py +377 -377
  77. brainstate/nn/_module_test.py +40 -40
  78. brainstate/nn/_neuron.py +705 -705
  79. brainstate/nn/_neuron_test.py +161 -161
  80. brainstate/nn/_normalizations.py +975 -918
  81. brainstate/nn/_normalizations_test.py +73 -73
  82. brainstate/{functional → nn}/_others.py +46 -46
  83. brainstate/nn/_poolings.py +1177 -1177
  84. brainstate/nn/_poolings_test.py +217 -217
  85. brainstate/nn/_projection.py +486 -486
  86. brainstate/nn/_rate_rnns.py +554 -554
  87. brainstate/nn/_rate_rnns_test.py +63 -63
  88. brainstate/nn/_readout.py +209 -209
  89. brainstate/nn/_readout_test.py +53 -53
  90. brainstate/nn/_stp.py +236 -236
  91. brainstate/nn/_synapse.py +505 -505
  92. brainstate/nn/_synapse_test.py +131 -131
  93. brainstate/nn/_synaptic_projection.py +423 -423
  94. brainstate/nn/_synouts.py +162 -162
  95. brainstate/nn/_synouts_test.py +57 -57
  96. brainstate/nn/_utils.py +89 -89
  97. brainstate/nn/metrics.py +388 -388
  98. brainstate/optim/__init__.py +38 -38
  99. brainstate/optim/_base.py +64 -64
  100. brainstate/optim/_lr_scheduler.py +448 -448
  101. brainstate/optim/_lr_scheduler_test.py +50 -50
  102. brainstate/optim/_optax_optimizer.py +152 -152
  103. brainstate/optim/_optax_optimizer_test.py +53 -53
  104. brainstate/optim/_sgd_optimizer.py +1104 -1104
  105. brainstate/random/__init__.py +24 -24
  106. brainstate/random/_rand_funs.py +3616 -3616
  107. brainstate/random/_rand_funs_test.py +567 -567
  108. brainstate/random/_rand_seed.py +210 -210
  109. brainstate/random/_rand_seed_test.py +48 -48
  110. brainstate/random/_rand_state.py +1409 -1409
  111. brainstate/random/_random_for_unit.py +52 -52
  112. brainstate/surrogate.py +1957 -1957
  113. brainstate/transform.py +23 -23
  114. brainstate/typing.py +304 -304
  115. brainstate/util/__init__.py +50 -50
  116. brainstate/util/caller.py +98 -98
  117. brainstate/util/error.py +55 -55
  118. brainstate/util/filter.py +469 -469
  119. brainstate/util/others.py +540 -540
  120. brainstate/util/pretty_pytree.py +945 -945
  121. brainstate/util/pretty_pytree_test.py +159 -159
  122. brainstate/util/pretty_repr.py +328 -328
  123. brainstate/util/pretty_table.py +2954 -2954
  124. brainstate/util/scaling.py +258 -258
  125. brainstate/util/struct.py +523 -523
  126. {brainstate-0.1.8.dist-info → brainstate-0.1.10.dist-info}/METADATA +91 -99
  127. brainstate-0.1.10.dist-info/RECORD +130 -0
  128. {brainstate-0.1.8.dist-info → brainstate-0.1.10.dist-info}/WHEEL +1 -1
  129. {brainstate-0.1.8.dist-info → brainstate-0.1.10.dist-info/licenses}/LICENSE +202 -202
  130. brainstate/functional/_normalization.py +0 -81
  131. brainstate/functional/_spikes.py +0 -204
  132. brainstate-0.1.8.dist-info/RECORD +0 -132
  133. {brainstate-0.1.8.dist-info → brainstate-0.1.10.dist-info}/top_level.txt +0 -0
brainstate/nn/_module.py CHANGED
@@ -1,377 +1,377 @@
1
- # Copyright 2024 BDP Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
- # -*- coding: utf-8 -*-
17
-
18
-
19
- """
20
- All the basic classes for neural networks in ``brainstate``.
21
-
22
- The basic classes include:
23
-
24
- - ``Module``: The base class for all the objects in the ecosystem.
25
- - ``Sequential``: The class for a sequential of modules, which update the modules sequentially.
26
-
27
- """
28
-
29
- import warnings
30
- from typing import Sequence, Optional, Tuple, Union, TYPE_CHECKING, Callable
31
-
32
- import numpy as np
33
-
34
- from brainstate._state import State
35
- from brainstate.graph import Node, states, nodes, flatten
36
- from brainstate.mixin import ParamDescriber, ParamDesc
37
- from brainstate.typing import PathParts, Size
38
- from brainstate.util import FlattedDict, NestedDict, BrainStateError
39
-
40
- # maximum integer
41
- max_int = np.iinfo(np.int32).max
42
-
43
- __all__ = [
44
- 'Module', 'ElementWiseBlock', 'Sequential',
45
- ]
46
-
47
-
48
- class Module(Node, ParamDesc):
49
- """
50
- The Module class for the whole ecosystem.
51
-
52
- The ``Module`` is the base class for all the objects in the ecosystem. It
53
- provides the basic functionalities for the objects, including:
54
-
55
- - ``states()``: Collect all states in this node and the children nodes.
56
- - ``nodes()``: Collect all children nodes.
57
- - ``update()``: The function to specify the updating rule.
58
- - ``init_state()``: State initialization function.
59
- - ``reset_state()``: State resetting function.
60
-
61
- """
62
-
63
- __module__ = 'brainstate.nn'
64
-
65
- _in_size: Optional[Size]
66
- _out_size: Optional[Size]
67
- _name: Optional[str]
68
-
69
- if not TYPE_CHECKING:
70
- def __init__(self, name: str = None):
71
- # check the name
72
- if name is not None:
73
- assert isinstance(name, str), f'The name must be a string, but we got {type(name)}: {name}'
74
- self._name = name
75
-
76
- # input and output size
77
- self._in_size = None
78
- self._out_size = None
79
-
80
- @property
81
- def name(self):
82
- """Name of the model."""
83
- return self._name
84
-
85
- @name.setter
86
- def name(self, name: str = None):
87
- raise AttributeError('The name of the model is read-only.')
88
-
89
- @property
90
- def in_size(self) -> Size:
91
- return self._in_size
92
-
93
- @in_size.setter
94
- def in_size(self, in_size: Sequence[int] | int):
95
- if isinstance(in_size, int):
96
- in_size = (in_size,)
97
- assert isinstance(in_size, (tuple, list)), f"Invalid type of in_size: {type(in_size)}"
98
- self._in_size = tuple(in_size)
99
-
100
- @property
101
- def out_size(self) -> Size:
102
- return self._out_size
103
-
104
- @out_size.setter
105
- def out_size(self, out_size: Sequence[int] | int):
106
- if isinstance(out_size, int):
107
- out_size = (out_size,)
108
- assert isinstance(out_size, (tuple, list)), f"Invalid type of out_size: {type(out_size)}"
109
- self._out_size = tuple(out_size)
110
-
111
- def update(self, *args, **kwargs):
112
- """
113
- The function to specify the updating rule.
114
- """
115
- raise NotImplementedError(
116
- f'Subclass of {self.__class__.__name__} must implement "update" function. \n'
117
- f'This instance is: \n'
118
- f'{self}'
119
- )
120
-
121
- def __call__(self, *args, **kwargs):
122
- return self.update(*args, **kwargs)
123
-
124
- def __rrshift__(self, other):
125
- """
126
- Support using right shift operator to call modules.
127
-
128
- Examples
129
- --------
130
-
131
- >>> import brainstate as brainstate
132
- >>> x = brainstate.random.rand((10, 10))
133
- >>> l = brainstate.nn.Dropout(0.5)
134
- >>> y = x >> l
135
- """
136
- return self.__call__(other)
137
-
138
- def states(
139
- self,
140
- *filters,
141
- allowed_hierarchy: Tuple[int, int] = (0, max_int),
142
- level: int = None,
143
- ) -> FlattedDict[PathParts, State] | Tuple[FlattedDict[PathParts, State], ...]:
144
- """
145
- Collect all states in this node and the children nodes.
146
-
147
- Parameters
148
- ----------
149
- filters : Any
150
- The filters to select the states.
151
- allowed_hierarchy : tuple of int
152
- The hierarchy of the states to be collected.
153
- level : int
154
- The level of the states to be collected. Has been deprecated.
155
-
156
- Returns
157
- -------
158
- states : FlattedDict, tuple of FlattedDict
159
- The collection contained (the path, the state).
160
- """
161
- if level is not None:
162
- allowed_hierarchy = (0, level)
163
- warnings.warn('The "level" argument is deprecated. Please use "allowed_hierarchy" instead.',
164
- DeprecationWarning)
165
-
166
- return states(self, *filters, allowed_hierarchy=allowed_hierarchy)
167
-
168
- def state_trees(
169
- self,
170
- *filters,
171
- ) -> NestedDict[PathParts, State] | Tuple[NestedDict[PathParts, State], ...]:
172
- """
173
- Collect all states in this node and the children nodes.
174
-
175
- Parameters
176
- ----------
177
- filters : tuple
178
- The filters to select the states.
179
-
180
- Returns
181
- -------
182
- states : FlattedDict, tuple of FlattedDict
183
- The collection contained (the path, the state).
184
- """
185
- graph_def, state_tree = flatten(self)
186
- if len(filters):
187
- return state_tree.filter(*filters)
188
- return state_tree
189
-
190
- def nodes(
191
- self,
192
- *filters,
193
- allowed_hierarchy: Tuple[int, int] = (0, max_int),
194
- level: int = None,
195
- ) -> FlattedDict[PathParts, Node] | Tuple[FlattedDict[PathParts, Node], ...]:
196
- """
197
- Collect all children nodes.
198
-
199
- Parameters
200
- ----------
201
- filters : Any
202
- The filters to select the states.
203
- allowed_hierarchy : tuple of int
204
- The hierarchy of the states to be collected.
205
- level : int
206
- The level of the states to be collected. Has been deprecated.
207
-
208
- Returns
209
- -------
210
- nodes : FlattedDict, tuple of FlattedDict
211
- The collection contained (the path, the node).
212
- """
213
- if level is not None:
214
- allowed_hierarchy = (0, level)
215
- warnings.warn('The "level" argument is deprecated. Please use "allowed_hierarchy" instead.',
216
- DeprecationWarning)
217
-
218
- return nodes(self, *filters, allowed_hierarchy=allowed_hierarchy)
219
-
220
- def init_state(self, *args, **kwargs):
221
- """
222
- State initialization function.
223
- """
224
- pass
225
-
226
- def reset_state(self, *args, **kwargs):
227
- """
228
- State resetting function.
229
- """
230
- pass
231
-
232
- def __pretty_repr_item__(self, name, value):
233
- if name.startswith('_'):
234
- return None if value is None else (name[1:], value) # skip the first `_`
235
- return name, value
236
-
237
-
238
- class ElementWiseBlock(Module):
239
- __module__ = 'brainstate.nn'
240
-
241
-
242
- class Sequential(Module):
243
- """
244
- A sequential `input-output` module.
245
-
246
- Modules will be added to it in the order they are passed in the
247
- constructor. Alternatively, an ``dict`` of modules can be
248
- passed in. The ``update()`` method of ``Sequential`` accepts any
249
- input and forwards it to the first module it contains. It then
250
- "chains" outputs to inputs sequentially for each subsequent module,
251
- finally returning the output of the last module.
252
-
253
- The value a ``Sequential`` provides over manually calling a sequence
254
- of modules is that it allows treating the whole container as a
255
- single module, such that performing a transformation on the
256
- ``Sequential`` applies to each of the modules it stores (which are
257
- each a registered submodule of the ``Sequential``).
258
-
259
- What's the difference between a ``Sequential`` and a
260
- :py:class:`Container`? A ``Container`` is exactly what it
261
- sounds like--a container to store :py:class:`DynamicalSystem` s!
262
- On the other hand, the layers in a ``Sequential`` are connected
263
- in a cascading way.
264
-
265
- Examples
266
- --------
267
-
268
- >>> import jax
269
- >>> import brainstate as brainstate
270
- >>> import brainstate.nn as nn
271
- >>>
272
- >>> # composing ANN models
273
- >>> l = nn.Sequential(nn.Linear(100, 10),
274
- >>> jax.nn.relu,
275
- >>> nn.Linear(10, 2))
276
- >>> l(brainstate.random.random((256, 100)))
277
-
278
- Args:
279
- modules_as_tuple: The children modules.
280
- modules_as_dict: The children modules.
281
- name: The object name.
282
- """
283
- __module__ = 'brainstate.nn'
284
-
285
- def __init__(self, first: Module, *layers):
286
- super().__init__()
287
- self.layers = []
288
-
289
- # add all modules
290
- assert isinstance(first, Module), 'The first module should be an instance of Module.'
291
- in_size = first.out_size
292
- self.layers.append(first)
293
- for module in layers:
294
- module, in_size = self._format_module(module, in_size)
295
- self.layers.append(module)
296
-
297
- # the input and output shape
298
- if first.in_size is not None:
299
- self.in_size = first.in_size
300
- if in_size is not None:
301
- self.out_size = tuple(in_size)
302
-
303
- def update(self, x):
304
- """Update function of a sequential model.
305
- """
306
- for m in self.layers:
307
- try:
308
- x = m(x)
309
- except Exception as e:
310
- raise BrainStateError(
311
- f'The module \n'
312
- f'{m}\n'
313
- f'failed to update with input {x}\n'
314
- ) from e
315
- return x
316
-
317
- def __getitem__(self, key: Union[int, slice]):
318
- if isinstance(key, slice):
319
- return Sequential(*self.layers[key])
320
- elif isinstance(key, int):
321
- return self.layers[key]
322
- elif isinstance(key, (tuple, list)):
323
- return Sequential(*[self.layers[k] for k in key])
324
- else:
325
- raise KeyError(f'Unknown type of key: {type(key)}')
326
-
327
- def append(self, layer: Callable):
328
- """
329
- Append a layer to the sequential model.
330
-
331
- This method adds a new layer to the end of the sequential model. The layer can be
332
- either a Module instance, an ElementWiseBlock instance, or a callable function. If the
333
- layer is a callable function, it will be wrapped in an ElementWiseBlock instance.
334
-
335
- Parameters:
336
- ----------
337
- layer : Callable
338
- The layer to be appended to the sequential model. It can be a Module instance,
339
- an ElementWiseBlock instance, or a callable function.
340
-
341
- Raises:
342
- -------
343
- ValueError
344
- If the sequential model is empty and the first layer is a callable function.
345
-
346
- Returns:
347
- --------
348
- None
349
- The method does not return any value. It modifies the sequential model by adding
350
- the new layer to the end.
351
- """
352
- if len(self.layers) == 0:
353
- raise ValueError('The first layer should be a module, not a function.')
354
- module, in_size = self._format_module(layer, self.out_size)
355
- self.layers.append(module)
356
- self.out_size = in_size
357
-
358
- def _format_module(self, module, in_size):
359
- if isinstance(module, ParamDescriber):
360
- if in_size is None:
361
- raise ValueError(
362
- 'The input size should be specified. '
363
- f'Please set the in_size attribute of the previous module: \n'
364
- f'{self.layers[-1]}'
365
- )
366
- module = module(in_size=in_size)
367
- assert isinstance(module, Module), 'The module should be an instance of Module.'
368
- out_size = module.out_size
369
- elif isinstance(module, ElementWiseBlock):
370
- out_size = in_size
371
- elif isinstance(module, Module):
372
- out_size = module.out_size
373
- elif callable(module):
374
- out_size = in_size
375
- else:
376
- raise TypeError(f"Unsupported type {type(module)}. ")
377
- return module, out_size
1
+ # Copyright 2024 BDP Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ # -*- coding: utf-8 -*-
17
+
18
+
19
+ """
20
+ All the basic classes for neural networks in ``brainstate``.
21
+
22
+ The basic classes include:
23
+
24
+ - ``Module``: The base class for all the objects in the ecosystem.
25
+ - ``Sequential``: The class for a sequential of modules, which update the modules sequentially.
26
+
27
+ """
28
+
29
+ import warnings
30
+ from typing import Sequence, Optional, Tuple, Union, TYPE_CHECKING, Callable
31
+
32
+ import numpy as np
33
+
34
+ from brainstate._state import State
35
+ from brainstate.graph import Node, states, nodes, flatten
36
+ from brainstate.mixin import ParamDescriber, ParamDesc
37
+ from brainstate.typing import PathParts, Size
38
+ from brainstate.util import FlattedDict, NestedDict, BrainStateError
39
+
40
+ # maximum integer
41
+ max_int = np.iinfo(np.int32).max
42
+
43
+ __all__ = [
44
+ 'Module', 'ElementWiseBlock', 'Sequential',
45
+ ]
46
+
47
+
48
+ class Module(Node, ParamDesc):
49
+ """
50
+ The Module class for the whole ecosystem.
51
+
52
+ The ``Module`` is the base class for all the objects in the ecosystem. It
53
+ provides the basic functionalities for the objects, including:
54
+
55
+ - ``states()``: Collect all states in this node and the children nodes.
56
+ - ``nodes()``: Collect all children nodes.
57
+ - ``update()``: The function to specify the updating rule.
58
+ - ``init_state()``: State initialization function.
59
+ - ``reset_state()``: State resetting function.
60
+
61
+ """
62
+
63
+ __module__ = 'brainstate.nn'
64
+
65
+ _in_size: Optional[Size]
66
+ _out_size: Optional[Size]
67
+ _name: Optional[str]
68
+
69
+ if not TYPE_CHECKING:
70
+ def __init__(self, name: str = None):
71
+ # check the name
72
+ if name is not None:
73
+ assert isinstance(name, str), f'The name must be a string, but we got {type(name)}: {name}'
74
+ self._name = name
75
+
76
+ # input and output size
77
+ self._in_size = None
78
+ self._out_size = None
79
+
80
+ @property
81
+ def name(self):
82
+ """Name of the model."""
83
+ return self._name
84
+
85
+ @name.setter
86
+ def name(self, name: str = None):
87
+ raise AttributeError('The name of the model is read-only.')
88
+
89
+ @property
90
+ def in_size(self) -> Size:
91
+ return self._in_size
92
+
93
+ @in_size.setter
94
+ def in_size(self, in_size: Sequence[int] | int):
95
+ if isinstance(in_size, int):
96
+ in_size = (in_size,)
97
+ assert isinstance(in_size, (tuple, list)), f"Invalid type of in_size: {type(in_size)}"
98
+ self._in_size = tuple(in_size)
99
+
100
+ @property
101
+ def out_size(self) -> Size:
102
+ return self._out_size
103
+
104
+ @out_size.setter
105
+ def out_size(self, out_size: Sequence[int] | int):
106
+ if isinstance(out_size, int):
107
+ out_size = (out_size,)
108
+ assert isinstance(out_size, (tuple, list)), f"Invalid type of out_size: {type(out_size)}"
109
+ self._out_size = tuple(out_size)
110
+
111
+ def update(self, *args, **kwargs):
112
+ """
113
+ The function to specify the updating rule.
114
+ """
115
+ raise NotImplementedError(
116
+ f'Subclass of {self.__class__.__name__} must implement "update" function. \n'
117
+ f'This instance is: \n'
118
+ f'{self}'
119
+ )
120
+
121
+ def __call__(self, *args, **kwargs):
122
+ return self.update(*args, **kwargs)
123
+
124
+ def __rrshift__(self, other):
125
+ """
126
+ Support using right shift operator to call modules.
127
+
128
+ Examples
129
+ --------
130
+
131
+ >>> import brainstate as brainstate
132
+ >>> x = brainstate.random.rand((10, 10))
133
+ >>> l = brainstate.nn.Dropout(0.5)
134
+ >>> y = x >> l
135
+ """
136
+ return self.__call__(other)
137
+
138
+ def states(
139
+ self,
140
+ *filters,
141
+ allowed_hierarchy: Tuple[int, int] = (0, max_int),
142
+ level: int = None,
143
+ ) -> FlattedDict[PathParts, State] | Tuple[FlattedDict[PathParts, State], ...]:
144
+ """
145
+ Collect all states in this node and the children nodes.
146
+
147
+ Parameters
148
+ ----------
149
+ filters : Any
150
+ The filters to select the states.
151
+ allowed_hierarchy : tuple of int
152
+ The hierarchy of the states to be collected.
153
+ level : int
154
+ The level of the states to be collected. Has been deprecated.
155
+
156
+ Returns
157
+ -------
158
+ states : FlattedDict, tuple of FlattedDict
159
+ The collection contained (the path, the state).
160
+ """
161
+ if level is not None:
162
+ allowed_hierarchy = (0, level)
163
+ warnings.warn('The "level" argument is deprecated. Please use "allowed_hierarchy" instead.',
164
+ DeprecationWarning)
165
+
166
+ return states(self, *filters, allowed_hierarchy=allowed_hierarchy)
167
+
168
+ def state_trees(
169
+ self,
170
+ *filters,
171
+ ) -> NestedDict[PathParts, State] | Tuple[NestedDict[PathParts, State], ...]:
172
+ """
173
+ Collect all states in this node and the children nodes.
174
+
175
+ Parameters
176
+ ----------
177
+ filters : tuple
178
+ The filters to select the states.
179
+
180
+ Returns
181
+ -------
182
+ states : FlattedDict, tuple of FlattedDict
183
+ The collection contained (the path, the state).
184
+ """
185
+ graph_def, state_tree = flatten(self)
186
+ if len(filters):
187
+ return state_tree.filter(*filters)
188
+ return state_tree
189
+
190
+ def nodes(
191
+ self,
192
+ *filters,
193
+ allowed_hierarchy: Tuple[int, int] = (0, max_int),
194
+ level: int = None,
195
+ ) -> FlattedDict[PathParts, Node] | Tuple[FlattedDict[PathParts, Node], ...]:
196
+ """
197
+ Collect all children nodes.
198
+
199
+ Parameters
200
+ ----------
201
+ filters : Any
202
+ The filters to select the states.
203
+ allowed_hierarchy : tuple of int
204
+ The hierarchy of the states to be collected.
205
+ level : int
206
+ The level of the states to be collected. Has been deprecated.
207
+
208
+ Returns
209
+ -------
210
+ nodes : FlattedDict, tuple of FlattedDict
211
+ The collection contained (the path, the node).
212
+ """
213
+ if level is not None:
214
+ allowed_hierarchy = (0, level)
215
+ warnings.warn('The "level" argument is deprecated. Please use "allowed_hierarchy" instead.',
216
+ DeprecationWarning)
217
+
218
+ return nodes(self, *filters, allowed_hierarchy=allowed_hierarchy)
219
+
220
+ def init_state(self, *args, **kwargs):
221
+ """
222
+ State initialization function.
223
+ """
224
+ pass
225
+
226
+ def reset_state(self, *args, **kwargs):
227
+ """
228
+ State resetting function.
229
+ """
230
+ pass
231
+
232
+ def __pretty_repr_item__(self, name, value):
233
+ if name.startswith('_'):
234
+ return None if value is None else (name[1:], value) # skip the first `_`
235
+ return name, value
236
+
237
+
238
+ class ElementWiseBlock(Module):
239
+ __module__ = 'brainstate.nn'
240
+
241
+
242
+ class Sequential(Module):
243
+ """
244
+ A sequential `input-output` module.
245
+
246
+ Modules will be added to it in the order they are passed in the
247
+ constructor. Alternatively, an ``dict`` of modules can be
248
+ passed in. The ``update()`` method of ``Sequential`` accepts any
249
+ input and forwards it to the first module it contains. It then
250
+ "chains" outputs to inputs sequentially for each subsequent module,
251
+ finally returning the output of the last module.
252
+
253
+ The value a ``Sequential`` provides over manually calling a sequence
254
+ of modules is that it allows treating the whole container as a
255
+ single module, such that performing a transformation on the
256
+ ``Sequential`` applies to each of the modules it stores (which are
257
+ each a registered submodule of the ``Sequential``).
258
+
259
+ What's the difference between a ``Sequential`` and a
260
+ :py:class:`Container`? A ``Container`` is exactly what it
261
+ sounds like--a container to store :py:class:`DynamicalSystem` s!
262
+ On the other hand, the layers in a ``Sequential`` are connected
263
+ in a cascading way.
264
+
265
+ Examples
266
+ --------
267
+
268
+ >>> import jax
269
+ >>> import brainstate as brainstate
270
+ >>> import brainstate.nn as nn
271
+ >>>
272
+ >>> # composing ANN models
273
+ >>> l = nn.Sequential(nn.Linear(100, 10),
274
+ >>> jax.nn.relu,
275
+ >>> nn.Linear(10, 2))
276
+ >>> l(brainstate.random.random((256, 100)))
277
+
278
+ Args:
279
+ modules_as_tuple: The children modules.
280
+ modules_as_dict: The children modules.
281
+ name: The object name.
282
+ """
283
+ __module__ = 'brainstate.nn'
284
+
285
+ def __init__(self, first: Module, *layers):
286
+ super().__init__()
287
+ self.layers = []
288
+
289
+ # add all modules
290
+ assert isinstance(first, Module), 'The first module should be an instance of Module.'
291
+ in_size = first.out_size
292
+ self.layers.append(first)
293
+ for module in layers:
294
+ module, in_size = self._format_module(module, in_size)
295
+ self.layers.append(module)
296
+
297
+ # the input and output shape
298
+ if first.in_size is not None:
299
+ self.in_size = first.in_size
300
+ if in_size is not None:
301
+ self.out_size = tuple(in_size)
302
+
303
+ def update(self, x):
304
+ """Update function of a sequential model.
305
+ """
306
+ for m in self.layers:
307
+ try:
308
+ x = m(x)
309
+ except Exception as e:
310
+ raise BrainStateError(
311
+ f'The module \n'
312
+ f'{m}\n'
313
+ f'failed to update with input {x}\n'
314
+ ) from e
315
+ return x
316
+
317
+ def __getitem__(self, key: Union[int, slice]):
318
+ if isinstance(key, slice):
319
+ return Sequential(*self.layers[key])
320
+ elif isinstance(key, int):
321
+ return self.layers[key]
322
+ elif isinstance(key, (tuple, list)):
323
+ return Sequential(*[self.layers[k] for k in key])
324
+ else:
325
+ raise KeyError(f'Unknown type of key: {type(key)}')
326
+
327
+ def append(self, layer: Callable):
328
+ """
329
+ Append a layer to the sequential model.
330
+
331
+ This method adds a new layer to the end of the sequential model. The layer can be
332
+ either a Module instance, an ElementWiseBlock instance, or a callable function. If the
333
+ layer is a callable function, it will be wrapped in an ElementWiseBlock instance.
334
+
335
+ Parameters:
336
+ ----------
337
+ layer : Callable
338
+ The layer to be appended to the sequential model. It can be a Module instance,
339
+ an ElementWiseBlock instance, or a callable function.
340
+
341
+ Raises:
342
+ -------
343
+ ValueError
344
+ If the sequential model is empty and the first layer is a callable function.
345
+
346
+ Returns:
347
+ --------
348
+ None
349
+ The method does not return any value. It modifies the sequential model by adding
350
+ the new layer to the end.
351
+ """
352
+ if len(self.layers) == 0:
353
+ raise ValueError('The first layer should be a module, not a function.')
354
+ module, in_size = self._format_module(layer, self.out_size)
355
+ self.layers.append(module)
356
+ self.out_size = in_size
357
+
358
+ def _format_module(self, module, in_size):
359
+ if isinstance(module, ParamDescriber):
360
+ if in_size is None:
361
+ raise ValueError(
362
+ 'The input size should be specified. '
363
+ f'Please set the in_size attribute of the previous module: \n'
364
+ f'{self.layers[-1]}'
365
+ )
366
+ module = module(in_size=in_size)
367
+ assert isinstance(module, Module), 'The module should be an instance of Module.'
368
+ out_size = module.out_size
369
+ elif isinstance(module, ElementWiseBlock):
370
+ out_size = in_size
371
+ elif isinstance(module, Module):
372
+ out_size = module.out_size
373
+ elif callable(module):
374
+ out_size = in_size
375
+ else:
376
+ raise TypeError(f"Unsupported type {type(module)}. ")
377
+ return module, out_size