brainstate 0.1.10__py2.py3-none-any.whl → 0.2.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (163) hide show
  1. brainstate/__init__.py +169 -58
  2. brainstate/_compatible_import.py +340 -148
  3. brainstate/_compatible_import_test.py +681 -0
  4. brainstate/_deprecation.py +210 -0
  5. brainstate/_deprecation_test.py +2319 -0
  6. brainstate/{util/error.py → _error.py} +45 -55
  7. brainstate/_state.py +1652 -1605
  8. brainstate/_state_test.py +52 -52
  9. brainstate/_utils.py +47 -47
  10. brainstate/environ.py +1495 -563
  11. brainstate/environ_test.py +1223 -62
  12. brainstate/graph/__init__.py +22 -29
  13. brainstate/graph/_node.py +240 -0
  14. brainstate/graph/_node_test.py +589 -0
  15. brainstate/graph/{_graph_operation.py → _operation.py} +1624 -1738
  16. brainstate/graph/_operation_test.py +1147 -0
  17. brainstate/mixin.py +1433 -365
  18. brainstate/mixin_test.py +1017 -77
  19. brainstate/nn/__init__.py +137 -135
  20. brainstate/nn/_activations.py +1100 -808
  21. brainstate/nn/_activations_test.py +354 -331
  22. brainstate/nn/_collective_ops.py +633 -514
  23. brainstate/nn/_collective_ops_test.py +774 -43
  24. brainstate/nn/_common.py +226 -178
  25. brainstate/nn/_common_test.py +154 -0
  26. brainstate/nn/_conv.py +2010 -501
  27. brainstate/nn/_conv_test.py +849 -238
  28. brainstate/nn/_delay.py +575 -588
  29. brainstate/nn/_delay_test.py +243 -238
  30. brainstate/nn/_dropout.py +618 -426
  31. brainstate/nn/_dropout_test.py +477 -100
  32. brainstate/nn/_dynamics.py +1267 -1343
  33. brainstate/nn/_dynamics_test.py +67 -78
  34. brainstate/nn/_elementwise.py +1298 -1119
  35. brainstate/nn/_elementwise_test.py +830 -169
  36. brainstate/nn/_embedding.py +408 -58
  37. brainstate/nn/_embedding_test.py +156 -0
  38. brainstate/nn/{_fixedprob.py → _event_fixedprob.py} +233 -239
  39. brainstate/nn/{_fixedprob_test.py → _event_fixedprob_test.py} +115 -114
  40. brainstate/nn/{_linear_mv.py → _event_linear.py} +83 -83
  41. brainstate/nn/{_linear_mv_test.py → _event_linear_test.py} +121 -120
  42. brainstate/nn/_exp_euler.py +254 -92
  43. brainstate/nn/_exp_euler_test.py +377 -35
  44. brainstate/nn/_linear.py +744 -424
  45. brainstate/nn/_linear_test.py +475 -107
  46. brainstate/nn/_metrics.py +1070 -0
  47. brainstate/nn/_metrics_test.py +611 -0
  48. brainstate/nn/_module.py +384 -377
  49. brainstate/nn/_module_test.py +40 -40
  50. brainstate/nn/_normalizations.py +1334 -975
  51. brainstate/nn/_normalizations_test.py +699 -73
  52. brainstate/nn/_paddings.py +1020 -0
  53. brainstate/nn/_paddings_test.py +723 -0
  54. brainstate/nn/_poolings.py +2239 -1177
  55. brainstate/nn/_poolings_test.py +953 -217
  56. brainstate/nn/{_rate_rnns.py → _rnns.py} +946 -554
  57. brainstate/nn/_rnns_test.py +593 -0
  58. brainstate/nn/_utils.py +216 -89
  59. brainstate/nn/_utils_test.py +402 -0
  60. brainstate/{init/_random_inits.py → nn/init.py} +809 -553
  61. brainstate/{init/_random_inits_test.py → nn/init_test.py} +180 -149
  62. brainstate/random/__init__.py +270 -24
  63. brainstate/random/_rand_funs.py +3938 -3616
  64. brainstate/random/_rand_funs_test.py +640 -567
  65. brainstate/random/_rand_seed.py +675 -210
  66. brainstate/random/_rand_seed_test.py +48 -48
  67. brainstate/random/_rand_state.py +1617 -1409
  68. brainstate/random/_rand_state_test.py +551 -0
  69. brainstate/transform/__init__.py +59 -0
  70. brainstate/transform/_ad_checkpoint.py +176 -0
  71. brainstate/{compile → transform}/_ad_checkpoint_test.py +49 -49
  72. brainstate/{augment → transform}/_autograd.py +1025 -778
  73. brainstate/{augment → transform}/_autograd_test.py +1289 -1289
  74. brainstate/transform/_conditions.py +316 -0
  75. brainstate/{compile → transform}/_conditions_test.py +220 -220
  76. brainstate/{compile → transform}/_error_if.py +94 -92
  77. brainstate/{compile → transform}/_error_if_test.py +52 -52
  78. brainstate/transform/_eval_shape.py +145 -0
  79. brainstate/{augment → transform}/_eval_shape_test.py +38 -38
  80. brainstate/{compile → transform}/_jit.py +399 -346
  81. brainstate/{compile → transform}/_jit_test.py +143 -143
  82. brainstate/{compile → transform}/_loop_collect_return.py +675 -536
  83. brainstate/{compile → transform}/_loop_collect_return_test.py +58 -58
  84. brainstate/{compile → transform}/_loop_no_collection.py +283 -184
  85. brainstate/{compile → transform}/_loop_no_collection_test.py +50 -50
  86. brainstate/transform/_make_jaxpr.py +2016 -0
  87. brainstate/transform/_make_jaxpr_test.py +1510 -0
  88. brainstate/transform/_mapping.py +529 -0
  89. brainstate/transform/_mapping_test.py +194 -0
  90. brainstate/{compile → transform}/_progress_bar.py +255 -202
  91. brainstate/{augment → transform}/_random.py +171 -151
  92. brainstate/{compile → transform}/_unvmap.py +256 -159
  93. brainstate/transform/_util.py +286 -0
  94. brainstate/typing.py +837 -304
  95. brainstate/typing_test.py +780 -0
  96. brainstate/util/__init__.py +27 -50
  97. brainstate/util/_others.py +1025 -0
  98. brainstate/util/_others_test.py +962 -0
  99. brainstate/util/_pretty_pytree.py +1301 -0
  100. brainstate/util/_pretty_pytree_test.py +675 -0
  101. brainstate/util/{pretty_repr.py → _pretty_repr.py} +462 -328
  102. brainstate/util/_pretty_repr_test.py +696 -0
  103. brainstate/util/filter.py +945 -469
  104. brainstate/util/filter_test.py +912 -0
  105. brainstate/util/struct.py +910 -523
  106. brainstate/util/struct_test.py +602 -0
  107. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/METADATA +108 -91
  108. brainstate-0.2.1.dist-info/RECORD +111 -0
  109. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/licenses/LICENSE +202 -202
  110. brainstate/augment/__init__.py +0 -30
  111. brainstate/augment/_eval_shape.py +0 -99
  112. brainstate/augment/_mapping.py +0 -1060
  113. brainstate/augment/_mapping_test.py +0 -597
  114. brainstate/compile/__init__.py +0 -38
  115. brainstate/compile/_ad_checkpoint.py +0 -204
  116. brainstate/compile/_conditions.py +0 -256
  117. brainstate/compile/_make_jaxpr.py +0 -888
  118. brainstate/compile/_make_jaxpr_test.py +0 -156
  119. brainstate/compile/_util.py +0 -147
  120. brainstate/functional/__init__.py +0 -27
  121. brainstate/graph/_graph_node.py +0 -244
  122. brainstate/graph/_graph_node_test.py +0 -73
  123. brainstate/graph/_graph_operation_test.py +0 -563
  124. brainstate/init/__init__.py +0 -26
  125. brainstate/init/_base.py +0 -52
  126. brainstate/init/_generic.py +0 -244
  127. brainstate/init/_regular_inits.py +0 -105
  128. brainstate/init/_regular_inits_test.py +0 -50
  129. brainstate/nn/_inputs.py +0 -608
  130. brainstate/nn/_ltp.py +0 -28
  131. brainstate/nn/_neuron.py +0 -705
  132. brainstate/nn/_neuron_test.py +0 -161
  133. brainstate/nn/_others.py +0 -46
  134. brainstate/nn/_projection.py +0 -486
  135. brainstate/nn/_rate_rnns_test.py +0 -63
  136. brainstate/nn/_readout.py +0 -209
  137. brainstate/nn/_readout_test.py +0 -53
  138. brainstate/nn/_stp.py +0 -236
  139. brainstate/nn/_synapse.py +0 -505
  140. brainstate/nn/_synapse_test.py +0 -131
  141. brainstate/nn/_synaptic_projection.py +0 -423
  142. brainstate/nn/_synouts.py +0 -162
  143. brainstate/nn/_synouts_test.py +0 -57
  144. brainstate/nn/metrics.py +0 -388
  145. brainstate/optim/__init__.py +0 -38
  146. brainstate/optim/_base.py +0 -64
  147. brainstate/optim/_lr_scheduler.py +0 -448
  148. brainstate/optim/_lr_scheduler_test.py +0 -50
  149. brainstate/optim/_optax_optimizer.py +0 -152
  150. brainstate/optim/_optax_optimizer_test.py +0 -53
  151. brainstate/optim/_sgd_optimizer.py +0 -1104
  152. brainstate/random/_random_for_unit.py +0 -52
  153. brainstate/surrogate.py +0 -1957
  154. brainstate/transform.py +0 -23
  155. brainstate/util/caller.py +0 -98
  156. brainstate/util/others.py +0 -540
  157. brainstate/util/pretty_pytree.py +0 -945
  158. brainstate/util/pretty_pytree_test.py +0 -159
  159. brainstate/util/pretty_table.py +0 -2954
  160. brainstate/util/scaling.py +0 -258
  161. brainstate-0.1.10.dist-info/RECORD +0 -130
  162. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/WHEEL +0 -0
  163. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/top_level.txt +0 -0
brainstate/nn/_utils.py CHANGED
@@ -1,89 +1,216 @@
1
- # Copyright 2025 BDP Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
- # -*- coding: utf-8 -*-
17
-
18
- from typing import Union, Tuple
19
-
20
- from brainstate._state import ParamState
21
- from brainstate.util import PrettyTable
22
- from ._module import Module
23
-
24
- __all__ = [
25
- "count_parameters",
26
- ]
27
-
28
-
29
- def _format_parameter_count(num_params, precision=2):
30
- if num_params < 1000:
31
- return str(num_params)
32
-
33
- suffixes = ['', 'K', 'M', 'B', 'T', 'P', 'E']
34
- magnitude = 0
35
- while abs(num_params) >= 1000:
36
- magnitude += 1
37
- num_params /= 1000.0
38
-
39
- format_string = '{:.' + str(precision) + 'f}{}'
40
- formatted_value = format_string.format(num_params, suffixes[magnitude])
41
-
42
- # 检查是否接近 1000,如果是,尝试使用更大的基数
43
- if magnitude < len(suffixes) - 1 and num_params >= 1000 * (1 - 10 ** (-precision)):
44
- magnitude += 1
45
- num_params /= 1000.0
46
- formatted_value = format_string.format(num_params, suffixes[magnitude])
47
-
48
- return formatted_value
49
-
50
-
51
- def count_parameters(
52
- module: Module,
53
- precision: int = 2,
54
- return_table: bool = False,
55
- ) -> Union[Tuple[PrettyTable, int], int]:
56
- """
57
- Count and display the number of trainable parameters in a neural network model.
58
-
59
- This function iterates through all the parameters of the given model,
60
- counts the number of parameters for each module, and displays them in a table.
61
- It also calculates and returns the total number of trainable parameters.
62
-
63
- Parameters:
64
- -----------
65
- model : brainstate.nn.Module
66
- The neural network model for which to count parameters.
67
-
68
- Returns:
69
- --------
70
- int
71
- The total number of trainable parameters in the model.
72
-
73
- Prints:
74
- -------
75
- A pretty-formatted table showing the number of parameters for each module,
76
- followed by the total number of trainable parameters.
77
- """
78
- assert isinstance(module, Module), "Input must be a neural network module" # noqa: E501
79
- table = PrettyTable(["Modules", "Parameters"])
80
- total_params = 0
81
- for name, parameter in module.states(ParamState).items():
82
- param = parameter.numel()
83
- table.add_row([name, _format_parameter_count(param, precision=precision)])
84
- total_params += param
85
- table.add_row(["Total", _format_parameter_count(total_params, precision=precision)])
86
- print(table)
87
- if return_table:
88
- return table, total_params
89
- return total_params
1
+ # Copyright 2025 BrainX Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ # -*- coding: utf-8 -*-
17
+
18
+ from brainstate._state import ParamState
19
+ from ._module import Module
20
+ from functools import partial
21
+
22
+ import jax
23
+ import jax.numpy as jnp
24
+
25
+ from brainstate.typing import PyTree
26
+
27
+ __all__ = [
28
+ "count_parameters",
29
+ "clip_grad_norm",
30
+ ]
31
+
32
+
33
+ def _format_parameter_count(num_params, precision=2):
34
+ if num_params < 1000:
35
+ return str(num_params)
36
+
37
+ suffixes = ['', 'K', 'M', 'B', 'T', 'P', 'E']
38
+ magnitude = 0
39
+ while abs(num_params) >= 1000:
40
+ magnitude += 1
41
+ num_params /= 1000.0
42
+
43
+ format_string = '{:.' + str(precision) + 'f}{}'
44
+ formatted_value = format_string.format(num_params, suffixes[magnitude])
45
+
46
+ # 检查是否接近 1000,如果是,尝试使用更大的基数
47
+ if magnitude < len(suffixes) - 1 and num_params >= 1000 * (1 - 10 ** (-precision)):
48
+ magnitude += 1
49
+ num_params /= 1000.0
50
+ formatted_value = format_string.format(num_params, suffixes[magnitude])
51
+
52
+ return formatted_value
53
+
54
+
55
+ def count_parameters(
56
+ module: Module,
57
+ precision: int = 2,
58
+ return_table: bool = False,
59
+ ):
60
+ """
61
+ Count and display the number of trainable parameters in a neural network model.
62
+
63
+ This function iterates through all the parameters of the given model,
64
+ counts the number of parameters for each module, and displays them in a table.
65
+ It also calculates and returns the total number of trainable parameters.
66
+
67
+ Parameters:
68
+ -----------
69
+ model : brainstate.nn.Module
70
+ The neural network model for which to count parameters.
71
+
72
+ Returns:
73
+ --------
74
+ int
75
+ The total number of trainable parameters in the model.
76
+
77
+ Prints:
78
+ -------
79
+ A pretty-formatted table showing the number of parameters for each module,
80
+ followed by the total number of trainable parameters.
81
+ """
82
+ assert isinstance(module, Module), "Input must be a neural network module" # noqa: E501
83
+ from prettytable import PrettyTable # noqa: E501
84
+ table = PrettyTable(["Modules", "Parameters"])
85
+ total_params = 0
86
+ for name, parameter in module.states(ParamState).items():
87
+ param = parameter.numel()
88
+ table.add_row([name, _format_parameter_count(param, precision=precision)])
89
+ total_params += param
90
+ table.add_row(["Total", _format_parameter_count(total_params, precision=precision)])
91
+ print(table)
92
+ if return_table:
93
+ return table, total_params
94
+ return total_params
95
+
96
+
97
+ def clip_grad_norm(
98
+ grad: PyTree,
99
+ max_norm: float | jax.Array,
100
+ norm_type: int | float | str | None = 2.0,
101
+ return_norm: bool = False,
102
+ ) -> PyTree | tuple[PyTree, jax.Array]:
103
+ """
104
+ Clip gradient norm of a PyTree of parameters.
105
+
106
+ The norm is computed over all gradients together, as if they were
107
+ concatenated into a single vector. Gradients are scaled if their
108
+ norm exceeds the specified maximum.
109
+
110
+ Parameters
111
+ ----------
112
+ grad : PyTree
113
+ A PyTree structure (nested dict, list, tuple, etc.) containing
114
+ JAX arrays representing gradients to be normalized.
115
+ max_norm : float or jax.Array
116
+ Maximum allowed norm of the gradients. If the computed norm
117
+ exceeds this value, gradients will be scaled down proportionally.
118
+ norm_type : int, float, str, or None, optional
119
+ Type of the p-norm to compute. Default is 2.0 (L2 norm).
120
+ Can be:
121
+
122
+ - float: p-norm for any p >= 1
123
+ - 'inf' or jnp.inf: infinity norm (maximum absolute value)
124
+ - '-inf' or -jnp.inf: negative infinity norm (minimum absolute value)
125
+ - int: integer p-norm
126
+ - None: defaults to 2.0 (Euclidean norm)
127
+ return_norm : bool, optional
128
+ If True, returns a tuple (clipped_grad, total_norm).
129
+ If False, returns only clipped_grad. Default is False.
130
+
131
+ Returns
132
+ -------
133
+ clipped_grad : PyTree
134
+ The input gradient structure with norms clipped to max_norm.
135
+ total_norm : jax.Array, optional
136
+ The computed norm of the gradients before clipping.
137
+ Only returned if return_norm=True.
138
+
139
+ Notes
140
+ -----
141
+ The gradient clipping is performed as:
142
+
143
+ .. math::
144
+ g_{\\text{clipped}} = g \\cdot \\min\\left(1, \\frac{\\text{max\\_norm}}{\\|g\\|_p}\\right)
145
+
146
+ where :math:`\\|g\\|_p` is the p-norm of the concatenated gradient vector.
147
+
148
+ Examples
149
+ --------
150
+ .. code-block:: python
151
+
152
+ >>> import jax.numpy as jnp
153
+ >>> import brainstate
154
+
155
+ >>> # Simple gradient clipping without returning norm
156
+ >>> grads = {'w': jnp.array([3.0, 4.0]), 'b': jnp.array([12.0])}
157
+ >>> clipped_grads = brainstate.nn.clip_grad_norm(grads, max_norm=5.0)
158
+ >>> print(f"Clipped w: {clipped_grads['w']}")
159
+ Clipped w: [1.1538461 1.5384616]
160
+
161
+ >>> # Gradient clipping with norm returned
162
+ >>> grads = {'w': jnp.array([3.0, 4.0]), 'b': jnp.array([12.0])}
163
+ >>> clipped_grads, norm = brainstate.nn.clip_grad_norm(grads, max_norm=5.0, return_norm=True)
164
+ >>> print(f"Original norm: {norm:.2f}")
165
+ Original norm: 13.00
166
+
167
+ >>> # Using different norm types
168
+ >>> grads = {'layer1': jnp.array([[-2.0, 3.0], [1.0, -4.0]])}
169
+ >>>
170
+ >>> # L2 norm (default)
171
+ >>> clipped_l2, norm_l2 = brainstate.nn.clip_grad_norm(grads, max_norm=3.0, norm_type=2, return_norm=True)
172
+ >>> print(f"L2 norm: {norm_l2:.2f}")
173
+ L2 norm: 5.48
174
+ >>>
175
+ >>> # L1 norm
176
+ >>> clipped_l1, norm_l1 = brainstate.nn.clip_grad_norm(grads, max_norm=5.0, norm_type=1, return_norm=True)
177
+ >>> print(f"L1 norm: {norm_l1:.2f}")
178
+ L1 norm: 10.00
179
+ >>>
180
+ >>> # Infinity norm
181
+ >>> clipped_inf, norm_inf = brainstate.nn.clip_grad_norm(grads, max_norm=2.0, norm_type='inf', return_norm=True)
182
+ >>> print(f"Inf norm: {norm_inf:.2f}")
183
+ Inf norm: 4.00
184
+ """
185
+ if norm_type is None:
186
+ norm_type = 2.0
187
+
188
+ # Convert string 'inf' to jnp.inf for compatibility
189
+ if norm_type == 'inf':
190
+ norm_type = jnp.inf
191
+ elif norm_type == '-inf':
192
+ norm_type = -jnp.inf
193
+
194
+ # Get all gradient leaves
195
+ grad_leaves = jax.tree.leaves(grad)
196
+
197
+ # Handle empty PyTree
198
+ if not grad_leaves:
199
+ if return_norm:
200
+ return grad, jnp.array(0.0)
201
+ return grad
202
+
203
+ # Compute norm over flattened gradient values
204
+ norm_fn = partial(jnp.linalg.norm, ord=norm_type)
205
+ flat_grads = jnp.concatenate([g.ravel() for g in grad_leaves])
206
+ total_norm = norm_fn(flat_grads)
207
+
208
+ # Compute scaling factor
209
+ clip_factor = jnp.minimum(1.0, max_norm / (total_norm + 1e-6))
210
+
211
+ # Apply clipping
212
+ clipped_grad = jax.tree.map(lambda g: g * clip_factor, grad)
213
+
214
+ if return_norm:
215
+ return clipped_grad, total_norm
216
+ return clipped_grad