brainstate 0.2.1__py2.py3-none-any.whl → 0.2.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. brainstate/__init__.py +167 -169
  2. brainstate/_compatible_import.py +340 -340
  3. brainstate/_compatible_import_test.py +681 -681
  4. brainstate/_deprecation.py +210 -210
  5. brainstate/_deprecation_test.py +2297 -2319
  6. brainstate/_error.py +45 -45
  7. brainstate/_state.py +2157 -1652
  8. brainstate/_state_test.py +1129 -52
  9. brainstate/_utils.py +47 -47
  10. brainstate/environ.py +1495 -1495
  11. brainstate/environ_test.py +1223 -1223
  12. brainstate/graph/__init__.py +22 -22
  13. brainstate/graph/_node.py +240 -240
  14. brainstate/graph/_node_test.py +589 -589
  15. brainstate/graph/_operation.py +1620 -1624
  16. brainstate/graph/_operation_test.py +1147 -1147
  17. brainstate/mixin.py +1447 -1433
  18. brainstate/mixin_test.py +1017 -1017
  19. brainstate/nn/__init__.py +146 -137
  20. brainstate/nn/_activations.py +1100 -1100
  21. brainstate/nn/_activations_test.py +354 -354
  22. brainstate/nn/_collective_ops.py +635 -633
  23. brainstate/nn/_collective_ops_test.py +774 -774
  24. brainstate/nn/_common.py +226 -226
  25. brainstate/nn/_common_test.py +134 -154
  26. brainstate/nn/_conv.py +2010 -2010
  27. brainstate/nn/_conv_test.py +849 -849
  28. brainstate/nn/_delay.py +575 -575
  29. brainstate/nn/_delay_test.py +243 -243
  30. brainstate/nn/_dropout.py +618 -618
  31. brainstate/nn/_dropout_test.py +480 -477
  32. brainstate/nn/_dynamics.py +870 -1267
  33. brainstate/nn/_dynamics_test.py +53 -67
  34. brainstate/nn/_elementwise.py +1298 -1298
  35. brainstate/nn/_elementwise_test.py +829 -829
  36. brainstate/nn/_embedding.py +408 -408
  37. brainstate/nn/_embedding_test.py +156 -156
  38. brainstate/nn/_event_fixedprob.py +233 -233
  39. brainstate/nn/_event_fixedprob_test.py +115 -115
  40. brainstate/nn/_event_linear.py +83 -83
  41. brainstate/nn/_event_linear_test.py +121 -121
  42. brainstate/nn/_exp_euler.py +254 -254
  43. brainstate/nn/_exp_euler_test.py +377 -377
  44. brainstate/nn/_linear.py +744 -744
  45. brainstate/nn/_linear_test.py +475 -475
  46. brainstate/nn/_metrics.py +1070 -1070
  47. brainstate/nn/_metrics_test.py +611 -611
  48. brainstate/nn/_module.py +391 -384
  49. brainstate/nn/_module_test.py +40 -40
  50. brainstate/nn/_normalizations.py +1334 -1334
  51. brainstate/nn/_normalizations_test.py +699 -699
  52. brainstate/nn/_paddings.py +1020 -1020
  53. brainstate/nn/_paddings_test.py +722 -722
  54. brainstate/nn/_poolings.py +2239 -2239
  55. brainstate/nn/_poolings_test.py +952 -952
  56. brainstate/nn/_rnns.py +946 -946
  57. brainstate/nn/_rnns_test.py +592 -592
  58. brainstate/nn/_utils.py +216 -216
  59. brainstate/nn/_utils_test.py +401 -401
  60. brainstate/nn/init.py +809 -809
  61. brainstate/nn/init_test.py +180 -180
  62. brainstate/random/__init__.py +270 -270
  63. brainstate/random/{_rand_funs.py → _fun.py} +3938 -3938
  64. brainstate/random/{_rand_funs_test.py → _fun_test.py} +638 -640
  65. brainstate/random/_impl.py +672 -0
  66. brainstate/random/{_rand_seed.py → _seed.py} +675 -675
  67. brainstate/random/{_rand_seed_test.py → _seed_test.py} +48 -48
  68. brainstate/random/{_rand_state.py → _state.py} +1320 -1617
  69. brainstate/random/{_rand_state_test.py → _state_test.py} +551 -551
  70. brainstate/transform/__init__.py +56 -59
  71. brainstate/transform/_ad_checkpoint.py +176 -176
  72. brainstate/transform/_ad_checkpoint_test.py +49 -49
  73. brainstate/transform/_autograd.py +1025 -1025
  74. brainstate/transform/_autograd_test.py +1289 -1289
  75. brainstate/transform/_conditions.py +316 -316
  76. brainstate/transform/_conditions_test.py +220 -220
  77. brainstate/transform/_error_if.py +94 -94
  78. brainstate/transform/_error_if_test.py +52 -52
  79. brainstate/transform/_find_state.py +200 -0
  80. brainstate/transform/_find_state_test.py +84 -0
  81. brainstate/transform/_jit.py +399 -399
  82. brainstate/transform/_jit_test.py +143 -143
  83. brainstate/transform/_loop_collect_return.py +675 -675
  84. brainstate/transform/_loop_collect_return_test.py +58 -58
  85. brainstate/transform/_loop_no_collection.py +283 -283
  86. brainstate/transform/_loop_no_collection_test.py +50 -50
  87. brainstate/transform/_make_jaxpr.py +2176 -2016
  88. brainstate/transform/_make_jaxpr_test.py +1634 -1510
  89. brainstate/transform/_mapping.py +607 -529
  90. brainstate/transform/_mapping_test.py +104 -194
  91. brainstate/transform/_progress_bar.py +255 -255
  92. brainstate/transform/_unvmap.py +256 -256
  93. brainstate/transform/_util.py +286 -286
  94. brainstate/typing.py +837 -837
  95. brainstate/typing_test.py +780 -780
  96. brainstate/util/__init__.py +27 -27
  97. brainstate/util/_others.py +1024 -1024
  98. brainstate/util/_others_test.py +962 -962
  99. brainstate/util/_pretty_pytree.py +1301 -1301
  100. brainstate/util/_pretty_pytree_test.py +675 -675
  101. brainstate/util/_pretty_repr.py +462 -462
  102. brainstate/util/_pretty_repr_test.py +696 -696
  103. brainstate/util/filter.py +945 -945
  104. brainstate/util/filter_test.py +911 -911
  105. brainstate/util/struct.py +910 -910
  106. brainstate/util/struct_test.py +602 -602
  107. {brainstate-0.2.1.dist-info → brainstate-0.2.2.dist-info}/METADATA +108 -108
  108. brainstate-0.2.2.dist-info/RECORD +111 -0
  109. {brainstate-0.2.1.dist-info → brainstate-0.2.2.dist-info}/licenses/LICENSE +202 -202
  110. brainstate/transform/_eval_shape.py +0 -145
  111. brainstate/transform/_eval_shape_test.py +0 -38
  112. brainstate/transform/_random.py +0 -171
  113. brainstate-0.2.1.dist-info/RECORD +0 -111
  114. {brainstate-0.2.1.dist-info → brainstate-0.2.2.dist-info}/WHEEL +0 -0
  115. {brainstate-0.2.1.dist-info → brainstate-0.2.2.dist-info}/top_level.txt +0 -0
brainstate/nn/_module.py CHANGED
@@ -1,384 +1,391 @@
1
- # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
- # -*- coding: utf-8 -*-
17
-
18
-
19
- """
20
- All the basic classes for neural networks in ``brainstate``.
21
-
22
- The basic classes include:
23
-
24
- - ``Module``: The base class for all the objects in the ecosystem.
25
- - ``Sequential``: The class for a sequential of modules, which update the modules sequentially.
26
-
27
- """
28
-
29
- import warnings
30
- from typing import Sequence, Optional, Tuple, Union, TYPE_CHECKING, Callable
31
-
32
- import numpy as np
33
-
34
- from brainstate._error import BrainStateError
35
- from brainstate._state import State
36
- from brainstate.graph import Node, states, nodes, flatten
37
- from brainstate.mixin import ParamDescriber, ParamDesc
38
- from brainstate.typing import PathParts, Size
39
- from brainstate.util import FlattedDict, NestedDict
40
-
41
- # maximum integer
42
- max_int = np.iinfo(np.int32).max
43
-
44
- __all__ = [
45
- 'Module', 'ElementWiseBlock', 'Sequential',
46
- ]
47
-
48
-
49
- class Module(Node, ParamDesc):
50
- """
51
- The Module class for the whole ecosystem.
52
-
53
- The ``Module`` is the base class for all the objects in the ecosystem. It
54
- provides the basic functionalities for the objects, including:
55
-
56
- - ``states()``: Collect all states in this node and the children nodes.
57
- - ``nodes()``: Collect all children nodes.
58
- - ``update()``: The function to specify the updating rule.
59
- - ``init_state()``: State initialization function.
60
- - ``reset_state()``: State resetting function.
61
-
62
- """
63
-
64
- __module__ = 'brainstate.nn'
65
-
66
- _in_size: Optional[Size]
67
- _out_size: Optional[Size]
68
- _name: Optional[str]
69
-
70
- if not TYPE_CHECKING:
71
- def __init__(self, name: str = None):
72
- # check the name
73
- if name is not None:
74
- assert isinstance(name, str), f'The name must be a string, but we got {type(name)}: {name}'
75
- self._name = name
76
-
77
- # input and output size
78
- self._in_size = None
79
- self._out_size = None
80
-
81
- @property
82
- def name(self):
83
- """Name of the model."""
84
- return self._name
85
-
86
- @name.setter
87
- def name(self, name: str = None):
88
- raise AttributeError('The name of the model is read-only.')
89
-
90
- @property
91
- def in_size(self) -> Size:
92
- return self._in_size
93
-
94
- @in_size.setter
95
- def in_size(self, in_size: Sequence[int] | int):
96
- if isinstance(in_size, int):
97
- in_size = (in_size,)
98
- elif isinstance(in_size, np.generic):
99
- if np.issubdtype(in_size, np.integer) and in_size.ndim == 0:
100
- in_size = (int(in_size),)
101
- assert isinstance(in_size, (tuple, list)), f"Invalid type of in_size: {in_size} {type(in_size)}"
102
- self._in_size = tuple(in_size)
103
-
104
- @property
105
- def out_size(self) -> Size:
106
- return self._out_size
107
-
108
- @out_size.setter
109
- def out_size(self, out_size: Sequence[int] | int):
110
- if isinstance(out_size, int):
111
- out_size = (out_size,)
112
- elif isinstance(out_size, np.ndarray):
113
- if np.issubdtype(out_size, np.integer) and out_size.ndim == 0:
114
- out_size = (int(out_size),)
115
- assert isinstance(out_size, (tuple, list)), f"Invalid type of out_size: {type(out_size)}"
116
- self._out_size = tuple(out_size)
117
-
118
- def update(self, *args, **kwargs):
119
- """
120
- The function to specify the updating rule.
121
- """
122
- raise NotImplementedError(
123
- f'Subclass of {self.__class__.__name__} must implement "update" function. \n'
124
- f'This instance is: \n'
125
- f'{self}'
126
- )
127
-
128
- def __call__(self, *args, **kwargs):
129
- return self.update(*args, **kwargs)
130
-
131
- def __rrshift__(self, other):
132
- """
133
- Support using right shift operator to call modules.
134
-
135
- Examples
136
- --------
137
-
138
- >>> import brainstate as brainstate
139
- >>> x = brainstate.random.rand((10, 10))
140
- >>> l = brainstate.nn.Dropout(0.5)
141
- >>> y = x >> l
142
- """
143
- return self.__call__(other)
144
-
145
- def states(
146
- self,
147
- *filters,
148
- allowed_hierarchy: Tuple[int, int] = (0, max_int),
149
- level: int = None,
150
- ) -> FlattedDict[PathParts, State] | Tuple[FlattedDict[PathParts, State], ...]:
151
- """
152
- Collect all states in this node and the children nodes.
153
-
154
- Parameters
155
- ----------
156
- filters : Any
157
- The filters to select the states.
158
- allowed_hierarchy : tuple of int
159
- The hierarchy of the states to be collected.
160
- level : int
161
- The level of the states to be collected. Has been deprecated.
162
-
163
- Returns
164
- -------
165
- states : FlattedDict, tuple of FlattedDict
166
- The collection contained (the path, the state).
167
- """
168
- if level is not None:
169
- allowed_hierarchy = (0, level)
170
- warnings.warn('The "level" argument is deprecated. Please use "allowed_hierarchy" instead.',
171
- DeprecationWarning)
172
-
173
- return states(self, *filters, allowed_hierarchy=allowed_hierarchy)
174
-
175
- def state_trees(
176
- self,
177
- *filters,
178
- ) -> NestedDict[PathParts, State] | Tuple[NestedDict[PathParts, State], ...]:
179
- """
180
- Collect all states in this node and the children nodes.
181
-
182
- Parameters
183
- ----------
184
- filters : tuple
185
- The filters to select the states.
186
-
187
- Returns
188
- -------
189
- states : FlattedDict, tuple of FlattedDict
190
- The collection contained (the path, the state).
191
- """
192
- graph_def, state_tree = flatten(self)
193
- if len(filters):
194
- return state_tree.filter(*filters)
195
- return state_tree
196
-
197
- def nodes(
198
- self,
199
- *filters,
200
- allowed_hierarchy: Tuple[int, int] = (0, max_int),
201
- level: int = None,
202
- ) -> FlattedDict[PathParts, Node] | Tuple[FlattedDict[PathParts, Node], ...]:
203
- """
204
- Collect all children nodes.
205
-
206
- Parameters
207
- ----------
208
- filters : Any
209
- The filters to select the states.
210
- allowed_hierarchy : tuple of int
211
- The hierarchy of the states to be collected.
212
- level : int
213
- The level of the states to be collected. Has been deprecated.
214
-
215
- Returns
216
- -------
217
- nodes : FlattedDict, tuple of FlattedDict
218
- The collection contained (the path, the node).
219
- """
220
- if level is not None:
221
- allowed_hierarchy = (0, level)
222
- warnings.warn('The "level" argument is deprecated. Please use "allowed_hierarchy" instead.',
223
- DeprecationWarning)
224
-
225
- return nodes(self, *filters, allowed_hierarchy=allowed_hierarchy)
226
-
227
- def init_state(self, *args, **kwargs):
228
- """
229
- State initialization function.
230
- """
231
- pass
232
-
233
- def reset_state(self, *args, **kwargs):
234
- """
235
- State resetting function.
236
- """
237
- pass
238
-
239
- def __pretty_repr_item__(self, name, value):
240
- if name.startswith('_'):
241
- return None if value is None else (name[1:], value) # skip the first `_`
242
- return name, value
243
-
244
-
245
- class ElementWiseBlock(Module):
246
- __module__ = 'brainstate.nn'
247
-
248
-
249
- class Sequential(Module):
250
- """
251
- A sequential `input-output` module.
252
-
253
- Modules will be added to it in the order they are passed in the
254
- constructor. Alternatively, an ``dict`` of modules can be
255
- passed in. The ``update()`` method of ``Sequential`` accepts any
256
- input and forwards it to the first module it contains. It then
257
- "chains" outputs to inputs sequentially for each subsequent module,
258
- finally returning the output of the last module.
259
-
260
- The value a ``Sequential`` provides over manually calling a sequence
261
- of modules is that it allows treating the whole container as a
262
- single module, such that performing a transformation on the
263
- ``Sequential`` applies to each of the modules it stores (which are
264
- each a registered submodule of the ``Sequential``).
265
-
266
- What's the difference between a ``Sequential`` and a
267
- :py:class:`Container`? A ``Container`` is exactly what it
268
- sounds like--a container to store :py:class:`DynamicalSystem` s!
269
- On the other hand, the layers in a ``Sequential`` are connected
270
- in a cascading way.
271
-
272
- Examples
273
- --------
274
-
275
- >>> import jax
276
- >>> import brainstate as brainstate
277
- >>> import brainstate.nn as nn
278
- >>>
279
- >>> # composing ANN models
280
- >>> l = nn.Sequential(nn.Linear(100, 10),
281
- >>> jax.nn.relu,
282
- >>> nn.Linear(10, 2))
283
- >>> l(brainstate.random.random((256, 100)))
284
-
285
- Args:
286
- modules_as_tuple: The children modules.
287
- modules_as_dict: The children modules.
288
- name: The object name.
289
- """
290
- __module__ = 'brainstate.nn'
291
-
292
- def __init__(self, first: Module, *layers):
293
- super().__init__()
294
- self.layers = []
295
-
296
- # add all modules
297
- assert isinstance(first, Module), 'The first module should be an instance of Module.'
298
- in_size = first.out_size
299
- self.layers.append(first)
300
- for module in layers:
301
- module, in_size = self._format_module(module, in_size)
302
- self.layers.append(module)
303
-
304
- # the input and output shape
305
- if first.in_size is not None:
306
- self.in_size = first.in_size
307
- if in_size is not None:
308
- self.out_size = tuple(in_size)
309
-
310
- def update(self, x):
311
- """Update function of a sequential model.
312
- """
313
- for m in self.layers:
314
- try:
315
- x = m(x)
316
- except Exception as e:
317
- raise BrainStateError(
318
- f'The module \n'
319
- f'{m}\n'
320
- f'failed to update with input {x}\n'
321
- ) from e
322
- return x
323
-
324
- def __getitem__(self, key: Union[int, slice]):
325
- if isinstance(key, slice):
326
- return Sequential(*self.layers[key])
327
- elif isinstance(key, int):
328
- return self.layers[key]
329
- elif isinstance(key, (tuple, list)):
330
- return Sequential(*[self.layers[k] for k in key])
331
- else:
332
- raise KeyError(f'Unknown type of key: {type(key)}')
333
-
334
- def append(self, layer: Callable):
335
- """
336
- Append a layer to the sequential model.
337
-
338
- This method adds a new layer to the end of the sequential model. The layer can be
339
- either a Module instance, an ElementWiseBlock instance, or a callable function. If the
340
- layer is a callable function, it will be wrapped in an ElementWiseBlock instance.
341
-
342
- Parameters:
343
- ----------
344
- layer : Callable
345
- The layer to be appended to the sequential model. It can be a Module instance,
346
- an ElementWiseBlock instance, or a callable function.
347
-
348
- Raises:
349
- -------
350
- ValueError
351
- If the sequential model is empty and the first layer is a callable function.
352
-
353
- Returns:
354
- --------
355
- None
356
- The method does not return any value. It modifies the sequential model by adding
357
- the new layer to the end.
358
- """
359
- if len(self.layers) == 0:
360
- raise ValueError('The first layer should be a module, not a function.')
361
- module, in_size = self._format_module(layer, self.out_size)
362
- self.layers.append(module)
363
- self.out_size = in_size
364
-
365
- def _format_module(self, module, in_size):
366
- if isinstance(module, ParamDescriber):
367
- if in_size is None:
368
- raise ValueError(
369
- 'The input size should be specified. '
370
- f'Please set the in_size attribute of the previous module: \n'
371
- f'{self.layers[-1]}'
372
- )
373
- module = module(in_size=in_size)
374
- assert isinstance(module, Module), 'The module should be an instance of Module.'
375
- out_size = module.out_size
376
- elif isinstance(module, ElementWiseBlock):
377
- out_size = in_size
378
- elif isinstance(module, Module):
379
- out_size = module.out_size
380
- elif callable(module):
381
- out_size = in_size
382
- else:
383
- raise TypeError(f"Unsupported type {type(module)}. ")
384
- return module, out_size
1
+ # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ # -*- coding: utf-8 -*-
17
+
18
+
19
+ """
20
+ All the basic classes for neural networks in ``brainstate``.
21
+
22
+ The basic classes include:
23
+
24
+ - ``Module``: The base class for all the objects in the ecosystem.
25
+ - ``Sequential``: The class for a sequential of modules, which update the modules sequentially.
26
+
27
+ """
28
+
29
+ import warnings
30
+ from typing import Sequence, Optional, Tuple, Union, TYPE_CHECKING, Callable
31
+
32
+ import numpy as np
33
+
34
+ from brainstate._error import BrainStateError
35
+ from brainstate._state import State
36
+ from brainstate.graph import Node, states, nodes, flatten
37
+ from brainstate.mixin import ParamDescriber, ParamDesc
38
+ from brainstate.typing import PathParts, Size
39
+ from brainstate.util import FlattedDict, NestedDict
40
+
41
+ # maximum integer
42
+ max_int = np.iinfo(np.int32).max
43
+
44
+ __all__ = [
45
+ 'Module', 'ElementWiseBlock', 'Sequential',
46
+ ]
47
+
48
+
49
+ class Module(Node, ParamDesc):
50
+ """
51
+ The Module class for the whole ecosystem.
52
+
53
+ The ``Module`` is the base class for all the objects in the ecosystem. It
54
+ provides the basic functionalities for the objects, including:
55
+
56
+ - ``states()``: Collect all states in this node and the children nodes.
57
+ - ``nodes()``: Collect all children nodes.
58
+ - ``update()``: The function to specify the updating rule.
59
+ - ``init_state()``: State initialization function.
60
+ - ``reset_state()``: State resetting function.
61
+
62
+ """
63
+
64
+ __module__ = 'brainstate.nn'
65
+
66
+ _in_size: Optional[Size]
67
+ _out_size: Optional[Size]
68
+ _name: Optional[str]
69
+
70
+ if not TYPE_CHECKING:
71
+ def __init__(self, name: str = None):
72
+ # check the name
73
+ if name is not None:
74
+ assert isinstance(name, str), f'The name must be a string, but we got {type(name)}: {name}'
75
+ self._name = name
76
+
77
+ # input and output size
78
+ self._in_size = None
79
+ self._out_size = None
80
+
81
+ @property
82
+ def name(self):
83
+ """Name of the model."""
84
+ return self._name
85
+
86
+ @name.setter
87
+ def name(self, name: str = None):
88
+ raise AttributeError('The name of the model is read-only.')
89
+
90
+ @property
91
+ def in_size(self) -> Size:
92
+ return self._in_size
93
+
94
+ @in_size.setter
95
+ def in_size(self, in_size: Sequence[int] | int):
96
+ if isinstance(in_size, int):
97
+ in_size = (in_size,)
98
+ elif isinstance(in_size, np.generic):
99
+ if np.issubdtype(in_size, np.integer) and in_size.ndim == 0:
100
+ in_size = (int(in_size),)
101
+ assert isinstance(in_size, (tuple, list)), f"Invalid type of in_size: {in_size} {type(in_size)}"
102
+ self._in_size = tuple(in_size)
103
+
104
+ @property
105
+ def out_size(self) -> Size:
106
+ return self._out_size
107
+
108
+ @out_size.setter
109
+ def out_size(self, out_size: Sequence[int] | int):
110
+ if isinstance(out_size, int):
111
+ out_size = (out_size,)
112
+ elif isinstance(out_size, np.ndarray):
113
+ if np.issubdtype(out_size, np.integer) and out_size.ndim == 0:
114
+ out_size = (int(out_size),)
115
+ assert isinstance(out_size, (tuple, list)), f"Invalid type of out_size: {type(out_size)}"
116
+ self._out_size = tuple(out_size)
117
+
118
+ def update(self, *args, **kwargs):
119
+ """
120
+ The function to specify the updating rule.
121
+ """
122
+ raise NotImplementedError(
123
+ f'Subclass of {self.__class__.__name__} must implement "update" function. \n'
124
+ f'This instance is: \n'
125
+ f'{self}'
126
+ )
127
+
128
+ def __call__(self, *args, **kwargs):
129
+ return self.update(*args, **kwargs)
130
+
131
+ def __rrshift__(self, other):
132
+ """
133
+ Support using right shift operator to call modules.
134
+
135
+ Examples
136
+ --------
137
+
138
+ >>> import brainstate as brainstate
139
+ >>> x = brainstate.random.rand((10, 10))
140
+ >>> l = brainstate.nn.Dropout(0.5)
141
+ >>> y = x >> l
142
+ """
143
+ return self.__call__(other)
144
+
145
+ def states(
146
+ self,
147
+ *filters,
148
+ allowed_hierarchy: Tuple[int, int] = (0, max_int),
149
+ level: int = None,
150
+ ) -> FlattedDict[PathParts, State] | Tuple[FlattedDict[PathParts, State], ...]:
151
+ """
152
+ Collect all states in this node and the children nodes.
153
+
154
+ Parameters
155
+ ----------
156
+ filters : Any
157
+ The filters to select the states.
158
+ allowed_hierarchy : tuple of int
159
+ The hierarchy of the states to be collected.
160
+ level : int
161
+ The level of the states to be collected. Has been deprecated.
162
+
163
+ Returns
164
+ -------
165
+ states : FlattedDict, tuple of FlattedDict
166
+ The collection contained (the path, the state).
167
+ """
168
+ if level is not None:
169
+ allowed_hierarchy = (0, level)
170
+ warnings.warn('The "level" argument is deprecated. Please use "allowed_hierarchy" instead.',
171
+ DeprecationWarning)
172
+
173
+ return states(self, *filters, allowed_hierarchy=allowed_hierarchy)
174
+
175
+ def state_trees(
176
+ self,
177
+ *filters,
178
+ ) -> NestedDict[PathParts, State] | Tuple[NestedDict[PathParts, State], ...]:
179
+ """
180
+ Collect all states in this node and the children nodes.
181
+
182
+ Parameters
183
+ ----------
184
+ filters : tuple
185
+ The filters to select the states.
186
+
187
+ Returns
188
+ -------
189
+ states : FlattedDict, tuple of FlattedDict
190
+ The collection contained (the path, the state).
191
+ """
192
+ graph_def, state_tree = flatten(self)
193
+ if len(filters):
194
+ return state_tree.filter(*filters)
195
+ return state_tree
196
+
197
+ def nodes(
198
+ self,
199
+ *filters,
200
+ allowed_hierarchy: Tuple[int, int] = (0, max_int),
201
+ level: int = None,
202
+ ) -> FlattedDict[PathParts, Node] | Tuple[FlattedDict[PathParts, Node], ...]:
203
+ """
204
+ Collect all children nodes.
205
+
206
+ Parameters
207
+ ----------
208
+ filters : Any
209
+ The filters to select the states.
210
+ allowed_hierarchy : tuple of int
211
+ The hierarchy of the states to be collected.
212
+ level : int
213
+ The level of the states to be collected. Has been deprecated.
214
+
215
+ Returns
216
+ -------
217
+ nodes : FlattedDict, tuple of FlattedDict
218
+ The collection contained (the path, the node).
219
+ """
220
+ if level is not None:
221
+ allowed_hierarchy = (0, level)
222
+ warnings.warn('The "level" argument is deprecated. Please use "allowed_hierarchy" instead.',
223
+ DeprecationWarning)
224
+
225
+ return nodes(self, *filters, allowed_hierarchy=allowed_hierarchy)
226
+
227
+ def init_state(self, *args, **kwargs):
228
+ """
229
+ State initialization function.
230
+ """
231
+ pass
232
+
233
+ def reset_state(self, *args, **kwargs):
234
+ """
235
+ State resetting function.
236
+ """
237
+ pass
238
+
239
+ def __pretty_repr_item__(self, name, value):
240
+ if name.startswith('_'):
241
+ return None if value is None else (name[1:], value) # skip the first `_`
242
+ return name, value
243
+
244
+
245
+ class ElementWiseBlock(Module):
246
+ __module__ = 'brainstate.nn'
247
+
248
+
249
+ class Sequential(Module):
250
+ """
251
+ A sequential `input-output` module.
252
+
253
+ Modules will be added to it in the order they are passed in the
254
+ constructor. Alternatively, an ``dict`` of modules can be
255
+ passed in. The ``update()`` method of ``Sequential`` accepts any
256
+ input and forwards it to the first module it contains. It then
257
+ "chains" outputs to inputs sequentially for each subsequent module,
258
+ finally returning the output of the last module.
259
+
260
+ The value a ``Sequential`` provides over manually calling a sequence
261
+ of modules is that it allows treating the whole container as a
262
+ single module, such that performing a transformation on the
263
+ ``Sequential`` applies to each of the modules it stores (which are
264
+ each a registered submodule of the ``Sequential``).
265
+
266
+ What's the difference between a ``Sequential`` and a
267
+ :py:class:`Container`? A ``Container`` is exactly what it
268
+ sounds like--a container to store :py:class:`DynamicalSystem` s!
269
+ On the other hand, the layers in a ``Sequential`` are connected
270
+ in a cascading way.
271
+
272
+ Examples
273
+ --------
274
+
275
+ >>> import jax
276
+ >>> import brainstate as brainstate
277
+ >>> import brainstate.nn as nn
278
+ >>>
279
+ >>> # composing ANN models
280
+ >>> l = nn.Sequential(nn.Linear(100, 10),
281
+ >>> jax.nn.relu,
282
+ >>> nn.Linear(10, 2))
283
+ >>> l(brainstate.random.random((256, 100)))
284
+
285
+ Args:
286
+ modules_as_tuple: The children modules.
287
+ modules_as_dict: The children modules.
288
+ name: The object name.
289
+ """
290
+ __module__ = 'brainstate.nn'
291
+
292
+ def __init__(self, first: Module, *layers):
293
+ super().__init__()
294
+ self.layers = []
295
+
296
+ # add all modules
297
+ assert isinstance(first, Module), 'The first module should be an instance of Module.'
298
+ in_size = first.out_size
299
+ self.layers.append(first)
300
+ for module in layers:
301
+ module, in_size = self._format_module(module, in_size)
302
+ self.layers.append(module)
303
+
304
+ # the input and output shape
305
+ if first.in_size is not None:
306
+ self.in_size = first.in_size
307
+ if in_size is not None:
308
+ self.out_size = tuple(in_size)
309
+
310
+ def update(self, x):
311
+ """Update function of a sequential model.
312
+ """
313
+ for m in self.layers:
314
+ try:
315
+ x = m(x)
316
+ except Exception as e:
317
+ raise BrainStateError(
318
+ f'The module \n'
319
+ f'{m}\n'
320
+ f'failed to update with input {x}\n'
321
+ ) from e
322
+ return x
323
+
324
+ def __getitem__(self, key: Union[int, slice]):
325
+ if isinstance(key, slice):
326
+ return Sequential(*self.layers[key])
327
+ elif isinstance(key, int):
328
+ return self.layers[key]
329
+ elif isinstance(key, (tuple, list)):
330
+ return Sequential(*[self.layers[k] for k in key])
331
+ else:
332
+ raise KeyError(f'Unknown type of key: {type(key)}')
333
+
334
+ def append(self, layer: Callable):
335
+ """
336
+ Append a layer to the sequential model.
337
+
338
+ This method adds a new layer to the end of the sequential model. The layer can be
339
+ either a Module instance, an ElementWiseBlock instance, or a callable function. If the
340
+ layer is a callable function, it will be wrapped in an ElementWiseBlock instance.
341
+
342
+ Parameters:
343
+ ----------
344
+ layer : Callable
345
+ The layer to be appended to the sequential model. It can be a Module instance,
346
+ an ElementWiseBlock instance, or a callable function.
347
+
348
+ Raises:
349
+ -------
350
+ ValueError
351
+ If the sequential model is empty and the first layer is a callable function.
352
+
353
+ Returns:
354
+ --------
355
+ None
356
+ The method does not return any value. It modifies the sequential model by adding
357
+ the new layer to the end.
358
+ """
359
+ if len(self.layers) == 0:
360
+ raise ValueError('The first layer should be a module, not a function.')
361
+ module, in_size = self._format_module(layer, self.out_size)
362
+ self.layers.append(module)
363
+ self.out_size = in_size
364
+
365
+ def _format_module(self, module, in_size):
366
+ try:
367
+ if isinstance(module, ParamDescriber):
368
+ if in_size is None:
369
+ raise ValueError(
370
+ 'The input size should be specified. '
371
+ f'Please set the in_size attribute of the previous module: \n'
372
+ f'{self.layers[-1]}'
373
+ )
374
+ module = module(in_size=in_size)
375
+ assert isinstance(module, Module), 'The module should be an instance of Module.'
376
+ out_size = module.out_size
377
+ elif isinstance(module, ElementWiseBlock):
378
+ out_size = in_size
379
+ elif isinstance(module, Module):
380
+ out_size = module.out_size
381
+ elif callable(module):
382
+ out_size = in_size
383
+ else:
384
+ raise TypeError(f"Unsupported type {type(module)}. ")
385
+ except Exception as e:
386
+ raise BrainStateError(
387
+ f'Failed to format the module: \n'
388
+ f'{module}\n'
389
+ f'with input size: {in_size}\n'
390
+ ) from e
391
+ return module, out_size