brainstate 0.1.10__py2.py3-none-any.whl → 0.2.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (163) hide show
  1. brainstate/__init__.py +169 -58
  2. brainstate/_compatible_import.py +340 -148
  3. brainstate/_compatible_import_test.py +681 -0
  4. brainstate/_deprecation.py +210 -0
  5. brainstate/_deprecation_test.py +2319 -0
  6. brainstate/{util/error.py → _error.py} +45 -55
  7. brainstate/_state.py +1652 -1605
  8. brainstate/_state_test.py +52 -52
  9. brainstate/_utils.py +47 -47
  10. brainstate/environ.py +1495 -563
  11. brainstate/environ_test.py +1223 -62
  12. brainstate/graph/__init__.py +22 -29
  13. brainstate/graph/_node.py +240 -0
  14. brainstate/graph/_node_test.py +589 -0
  15. brainstate/graph/{_graph_operation.py → _operation.py} +1624 -1738
  16. brainstate/graph/_operation_test.py +1147 -0
  17. brainstate/mixin.py +1433 -365
  18. brainstate/mixin_test.py +1017 -77
  19. brainstate/nn/__init__.py +137 -135
  20. brainstate/nn/_activations.py +1100 -808
  21. brainstate/nn/_activations_test.py +354 -331
  22. brainstate/nn/_collective_ops.py +633 -514
  23. brainstate/nn/_collective_ops_test.py +774 -43
  24. brainstate/nn/_common.py +226 -178
  25. brainstate/nn/_common_test.py +154 -0
  26. brainstate/nn/_conv.py +2010 -501
  27. brainstate/nn/_conv_test.py +849 -238
  28. brainstate/nn/_delay.py +575 -588
  29. brainstate/nn/_delay_test.py +243 -238
  30. brainstate/nn/_dropout.py +618 -426
  31. brainstate/nn/_dropout_test.py +477 -100
  32. brainstate/nn/_dynamics.py +1267 -1343
  33. brainstate/nn/_dynamics_test.py +67 -78
  34. brainstate/nn/_elementwise.py +1298 -1119
  35. brainstate/nn/_elementwise_test.py +830 -169
  36. brainstate/nn/_embedding.py +408 -58
  37. brainstate/nn/_embedding_test.py +156 -0
  38. brainstate/nn/{_fixedprob.py → _event_fixedprob.py} +233 -239
  39. brainstate/nn/{_fixedprob_test.py → _event_fixedprob_test.py} +115 -114
  40. brainstate/nn/{_linear_mv.py → _event_linear.py} +83 -83
  41. brainstate/nn/{_linear_mv_test.py → _event_linear_test.py} +121 -120
  42. brainstate/nn/_exp_euler.py +254 -92
  43. brainstate/nn/_exp_euler_test.py +377 -35
  44. brainstate/nn/_linear.py +744 -424
  45. brainstate/nn/_linear_test.py +475 -107
  46. brainstate/nn/_metrics.py +1070 -0
  47. brainstate/nn/_metrics_test.py +611 -0
  48. brainstate/nn/_module.py +384 -377
  49. brainstate/nn/_module_test.py +40 -40
  50. brainstate/nn/_normalizations.py +1334 -975
  51. brainstate/nn/_normalizations_test.py +699 -73
  52. brainstate/nn/_paddings.py +1020 -0
  53. brainstate/nn/_paddings_test.py +723 -0
  54. brainstate/nn/_poolings.py +2239 -1177
  55. brainstate/nn/_poolings_test.py +953 -217
  56. brainstate/nn/{_rate_rnns.py → _rnns.py} +946 -554
  57. brainstate/nn/_rnns_test.py +593 -0
  58. brainstate/nn/_utils.py +216 -89
  59. brainstate/nn/_utils_test.py +402 -0
  60. brainstate/{init/_random_inits.py → nn/init.py} +809 -553
  61. brainstate/{init/_random_inits_test.py → nn/init_test.py} +180 -149
  62. brainstate/random/__init__.py +270 -24
  63. brainstate/random/_rand_funs.py +3938 -3616
  64. brainstate/random/_rand_funs_test.py +640 -567
  65. brainstate/random/_rand_seed.py +675 -210
  66. brainstate/random/_rand_seed_test.py +48 -48
  67. brainstate/random/_rand_state.py +1617 -1409
  68. brainstate/random/_rand_state_test.py +551 -0
  69. brainstate/transform/__init__.py +59 -0
  70. brainstate/transform/_ad_checkpoint.py +176 -0
  71. brainstate/{compile → transform}/_ad_checkpoint_test.py +49 -49
  72. brainstate/{augment → transform}/_autograd.py +1025 -778
  73. brainstate/{augment → transform}/_autograd_test.py +1289 -1289
  74. brainstate/transform/_conditions.py +316 -0
  75. brainstate/{compile → transform}/_conditions_test.py +220 -220
  76. brainstate/{compile → transform}/_error_if.py +94 -92
  77. brainstate/{compile → transform}/_error_if_test.py +52 -52
  78. brainstate/transform/_eval_shape.py +145 -0
  79. brainstate/{augment → transform}/_eval_shape_test.py +38 -38
  80. brainstate/{compile → transform}/_jit.py +399 -346
  81. brainstate/{compile → transform}/_jit_test.py +143 -143
  82. brainstate/{compile → transform}/_loop_collect_return.py +675 -536
  83. brainstate/{compile → transform}/_loop_collect_return_test.py +58 -58
  84. brainstate/{compile → transform}/_loop_no_collection.py +283 -184
  85. brainstate/{compile → transform}/_loop_no_collection_test.py +50 -50
  86. brainstate/transform/_make_jaxpr.py +2016 -0
  87. brainstate/transform/_make_jaxpr_test.py +1510 -0
  88. brainstate/transform/_mapping.py +529 -0
  89. brainstate/transform/_mapping_test.py +194 -0
  90. brainstate/{compile → transform}/_progress_bar.py +255 -202
  91. brainstate/{augment → transform}/_random.py +171 -151
  92. brainstate/{compile → transform}/_unvmap.py +256 -159
  93. brainstate/transform/_util.py +286 -0
  94. brainstate/typing.py +837 -304
  95. brainstate/typing_test.py +780 -0
  96. brainstate/util/__init__.py +27 -50
  97. brainstate/util/_others.py +1025 -0
  98. brainstate/util/_others_test.py +962 -0
  99. brainstate/util/_pretty_pytree.py +1301 -0
  100. brainstate/util/_pretty_pytree_test.py +675 -0
  101. brainstate/util/{pretty_repr.py → _pretty_repr.py} +462 -328
  102. brainstate/util/_pretty_repr_test.py +696 -0
  103. brainstate/util/filter.py +945 -469
  104. brainstate/util/filter_test.py +912 -0
  105. brainstate/util/struct.py +910 -523
  106. brainstate/util/struct_test.py +602 -0
  107. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/METADATA +108 -91
  108. brainstate-0.2.1.dist-info/RECORD +111 -0
  109. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/licenses/LICENSE +202 -202
  110. brainstate/augment/__init__.py +0 -30
  111. brainstate/augment/_eval_shape.py +0 -99
  112. brainstate/augment/_mapping.py +0 -1060
  113. brainstate/augment/_mapping_test.py +0 -597
  114. brainstate/compile/__init__.py +0 -38
  115. brainstate/compile/_ad_checkpoint.py +0 -204
  116. brainstate/compile/_conditions.py +0 -256
  117. brainstate/compile/_make_jaxpr.py +0 -888
  118. brainstate/compile/_make_jaxpr_test.py +0 -156
  119. brainstate/compile/_util.py +0 -147
  120. brainstate/functional/__init__.py +0 -27
  121. brainstate/graph/_graph_node.py +0 -244
  122. brainstate/graph/_graph_node_test.py +0 -73
  123. brainstate/graph/_graph_operation_test.py +0 -563
  124. brainstate/init/__init__.py +0 -26
  125. brainstate/init/_base.py +0 -52
  126. brainstate/init/_generic.py +0 -244
  127. brainstate/init/_regular_inits.py +0 -105
  128. brainstate/init/_regular_inits_test.py +0 -50
  129. brainstate/nn/_inputs.py +0 -608
  130. brainstate/nn/_ltp.py +0 -28
  131. brainstate/nn/_neuron.py +0 -705
  132. brainstate/nn/_neuron_test.py +0 -161
  133. brainstate/nn/_others.py +0 -46
  134. brainstate/nn/_projection.py +0 -486
  135. brainstate/nn/_rate_rnns_test.py +0 -63
  136. brainstate/nn/_readout.py +0 -209
  137. brainstate/nn/_readout_test.py +0 -53
  138. brainstate/nn/_stp.py +0 -236
  139. brainstate/nn/_synapse.py +0 -505
  140. brainstate/nn/_synapse_test.py +0 -131
  141. brainstate/nn/_synaptic_projection.py +0 -423
  142. brainstate/nn/_synouts.py +0 -162
  143. brainstate/nn/_synouts_test.py +0 -57
  144. brainstate/nn/metrics.py +0 -388
  145. brainstate/optim/__init__.py +0 -38
  146. brainstate/optim/_base.py +0 -64
  147. brainstate/optim/_lr_scheduler.py +0 -448
  148. brainstate/optim/_lr_scheduler_test.py +0 -50
  149. brainstate/optim/_optax_optimizer.py +0 -152
  150. brainstate/optim/_optax_optimizer_test.py +0 -53
  151. brainstate/optim/_sgd_optimizer.py +0 -1104
  152. brainstate/random/_random_for_unit.py +0 -52
  153. brainstate/surrogate.py +0 -1957
  154. brainstate/transform.py +0 -23
  155. brainstate/util/caller.py +0 -98
  156. brainstate/util/others.py +0 -540
  157. brainstate/util/pretty_pytree.py +0 -945
  158. brainstate/util/pretty_pytree_test.py +0 -159
  159. brainstate/util/pretty_table.py +0 -2954
  160. brainstate/util/scaling.py +0 -258
  161. brainstate-0.1.10.dist-info/RECORD +0 -130
  162. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/WHEEL +0 -0
  163. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/top_level.txt +0 -0
@@ -1,331 +1,354 @@
1
- # Copyright 2024 BDP Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
- """Tests for nn module."""
17
-
18
- import itertools
19
- from functools import partial
20
-
21
- import jax
22
- import jax.numpy as jnp
23
- import scipy.stats
24
- from absl.testing import parameterized
25
- from jax._src import test_util as jtu
26
- from jax.test_util import check_grads
27
-
28
- import brainstate
29
-
30
-
31
- class NNFunctionsTest(jtu.JaxTestCase):
32
- @jtu.skip_on_flag("jax_skip_slow_tests", True)
33
- def testSoftplusGrad(self):
34
- check_grads(brainstate.functional.softplus, (1e-8,), order=4, )
35
-
36
- def testSoftplusGradZero(self):
37
- check_grads(brainstate.functional.softplus, (0.,), order=1)
38
-
39
- def testSoftplusGradInf(self):
40
- self.assertAllClose(1., jax.grad(brainstate.functional.softplus)(float('inf')))
41
-
42
- def testSoftplusGradNegInf(self):
43
- check_grads(brainstate.functional.softplus, (-float('inf'),), order=1)
44
-
45
- def testSoftplusGradNan(self):
46
- check_grads(brainstate.functional.softplus, (float('nan'),), order=1)
47
-
48
- @parameterized.parameters([int, float] + jtu.dtypes.floating + jtu.dtypes.integer)
49
- def testSoftplusZero(self, dtype):
50
- self.assertEqual(jnp.log(dtype(2)), brainstate.functional.softplus(dtype(0)))
51
-
52
- def testSparseplusGradZero(self):
53
- check_grads(brainstate.functional.sparse_plus, (-2.,), order=1)
54
-
55
- def testSparseplusGrad(self):
56
- check_grads(brainstate.functional.sparse_plus, (0.,), order=1)
57
-
58
- def testSparseplusAndSparseSigmoid(self):
59
- self.assertAllClose(
60
- jax.grad(brainstate.functional.sparse_plus)(0.),
61
- brainstate.functional.sparse_sigmoid(0.),
62
- check_dtypes=False)
63
- self.assertAllClose(
64
- jax.grad(brainstate.functional.sparse_plus)(2.),
65
- brainstate.functional.sparse_sigmoid(2.),
66
- check_dtypes=False)
67
- self.assertAllClose(
68
- jax.grad(brainstate.functional.sparse_plus)(-2.),
69
- brainstate.functional.sparse_sigmoid(-2.),
70
- check_dtypes=False)
71
-
72
- # def testSquareplusGrad(self):
73
- # check_grads(brainstate.functional.squareplus, (1e-8,), order=4,
74
- # )
75
-
76
- # def testSquareplusGradZero(self):
77
- # check_grads(brainstate.functional.squareplus, (0.,), order=1,
78
- # )
79
-
80
- # def testSquareplusGradNegInf(self):
81
- # check_grads(brainstate.functional.squareplus, (-float('inf'),), order=1,
82
- # )
83
-
84
- # def testSquareplusGradNan(self):
85
- # check_grads(brainstate.functional.squareplus, (float('nan'),), order=1,
86
- # )
87
-
88
- # @parameterized.parameters([float] + jtu.dtypes.floating)
89
- # def testSquareplusZero(self, dtype):
90
- # self.assertEqual(dtype(1), brainstate.functional.squareplus(dtype(0), dtype(4)))
91
- #
92
- # def testMishGrad(self):
93
- # check_grads(brainstate.functional.mish, (1e-8,), order=4,
94
- # )
95
- #
96
- # def testMishGradZero(self):
97
- # check_grads(brainstate.functional.mish, (0.,), order=1,
98
- # )
99
- #
100
- # def testMishGradNegInf(self):
101
- # check_grads(brainstate.functional.mish, (-float('inf'),), order=1,
102
- # )
103
- #
104
- # def testMishGradNan(self):
105
- # check_grads(brainstate.functional.mish, (float('nan'),), order=1,
106
- # )
107
-
108
- @parameterized.parameters([float] + jtu.dtypes.floating)
109
- def testMishZero(self, dtype):
110
- self.assertEqual(dtype(0), brainstate.functional.mish(dtype(0)))
111
-
112
- def testReluGrad(self):
113
- rtol = None
114
- check_grads(brainstate.functional.relu, (1.,), order=3, rtol=rtol)
115
- check_grads(brainstate.functional.relu, (-1.,), order=3, rtol=rtol)
116
- jaxpr = jax.make_jaxpr(jax.grad(brainstate.functional.relu))(0.)
117
- self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
118
-
119
- def testRelu6Grad(self):
120
- rtol = None
121
- check_grads(brainstate.functional.relu6, (1.,), order=3, rtol=rtol)
122
- check_grads(brainstate.functional.relu6, (-1.,), order=3, rtol=rtol)
123
- self.assertAllClose(jax.grad(brainstate.functional.relu6)(0.), 0., check_dtypes=False)
124
- self.assertAllClose(jax.grad(brainstate.functional.relu6)(6.), 0., check_dtypes=False)
125
-
126
- def testSoftplusValue(self):
127
- val = brainstate.functional.softplus(89.)
128
- self.assertAllClose(val, 89., check_dtypes=False)
129
-
130
- def testSparseplusValue(self):
131
- val = brainstate.functional.sparse_plus(89.)
132
- self.assertAllClose(val, 89., check_dtypes=False)
133
-
134
- def testSparsesigmoidValue(self):
135
- self.assertAllClose(brainstate.functional.sparse_sigmoid(-2.), 0., check_dtypes=False)
136
- self.assertAllClose(brainstate.functional.sparse_sigmoid(2.), 1., check_dtypes=False)
137
- self.assertAllClose(brainstate.functional.sparse_sigmoid(0.), .5, check_dtypes=False)
138
-
139
- # def testSquareplusValue(self):
140
- # val = brainstate.functional.squareplus(1e3)
141
- # self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
142
-
143
- def testMishValue(self):
144
- val = brainstate.functional.mish(1e3)
145
- self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
146
-
147
- def testEluValue(self):
148
- val = brainstate.functional.elu(1e4)
149
- self.assertAllClose(val, 1e4, check_dtypes=False)
150
-
151
- def testGluValue(self):
152
- val = brainstate.functional.glu(jnp.array([1.0, 0.0]), axis=0)
153
- self.assertAllClose(val, jnp.array([0.5]))
154
-
155
- @parameterized.parameters(False, True)
156
- def testGeluIntType(self, approximate):
157
- val_float = brainstate.functional.gelu(jnp.array(-1.0), approximate=approximate)
158
- val_int = brainstate.functional.gelu(jnp.array(-1), approximate=approximate)
159
- self.assertAllClose(val_float, val_int)
160
-
161
- @parameterized.parameters(False, True)
162
- def testGelu(self, approximate):
163
- def gelu_reference(x):
164
- return x * scipy.stats.norm.cdf(x)
165
-
166
- rng = jtu.rand_default(self.rng())
167
- args_maker = lambda: [rng((4, 5, 6), jnp.float32)]
168
- self._CheckAgainstNumpy(
169
- gelu_reference, partial(brainstate.functional.gelu, approximate=approximate), args_maker,
170
- check_dtypes=False, tol=1e-3 if approximate else None)
171
-
172
- @parameterized.parameters(*itertools.product(
173
- (jnp.float32, jnp.bfloat16, jnp.float16),
174
- (partial(brainstate.functional.gelu, approximate=False),
175
- partial(brainstate.functional.gelu, approximate=True),
176
- brainstate.functional.relu,
177
- brainstate.functional.softplus,
178
- brainstate.functional.sparse_plus,
179
- brainstate.functional.sigmoid,
180
- # brainstate.functional.squareplus,
181
- brainstate.functional.mish)))
182
- def testDtypeMatchesInput(self, dtype, fn):
183
- x = jnp.zeros((), dtype=dtype)
184
- out = fn(x)
185
- self.assertEqual(out.dtype, dtype)
186
-
187
- def testEluMemory(self):
188
- # see https://github.com/google/jax/pull/1640
189
- with jax.enable_checks(False): # With checks we materialize the array
190
- jax.make_jaxpr(lambda: brainstate.functional.elu(jnp.ones((10 ** 12,)))) # don't oom
191
-
192
- def testHardTanhMemory(self):
193
- # see https://github.com/google/jax/pull/1640
194
- with jax.enable_checks(False): # With checks we materialize the array
195
- jax.make_jaxpr(lambda: brainstate.functional.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
196
-
197
- @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
198
- def testSoftmaxEmptyArray(self, fn):
199
- x = jnp.array([], dtype=float)
200
- self.assertArraysEqual(fn(x), x)
201
-
202
- @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
203
- def testSoftmaxEmptyMask(self, fn):
204
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
205
- m = jnp.zeros_like(x, dtype=bool)
206
- expected = jnp.full_like(x, 0.0 if fn is brainstate.functional.softmax else -jnp.inf)
207
- self.assertArraysEqual(fn(x, where=m), expected)
208
-
209
- @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
210
- def testSoftmaxWhereMask(self, fn):
211
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
212
- m = jnp.array([True, False, True, True])
213
-
214
- out = fn(x, where=m)
215
- self.assertAllClose(out[m], fn(x[m]))
216
-
217
- probs = out if fn is brainstate.functional.softmax else jnp.exp(out)
218
- self.assertAllClose(probs.sum(), 1.0)
219
-
220
- @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
221
- def testSoftmaxWhereGrad(self, fn):
222
- # regression test for https://github.com/google/jax/issues/19490
223
- x = jnp.array([36., 10000.])
224
- mask = x < 1000
225
-
226
- f = lambda x, mask: fn(x, where=mask)[0]
227
-
228
- self.assertAllClose(jax.grad(f)(x, mask), jnp.zeros_like(x))
229
-
230
- def testSoftmaxGrad(self):
231
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
232
- jtu.check_grads(brainstate.functional.softmax, (x,), order=2, atol=5e-3)
233
-
234
- def testStandardizeWhereMask(self):
235
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
236
- m = jnp.array([True, False, True, True])
237
- x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
238
-
239
- out_masked = jnp.take(brainstate.functional.standardize(x, where=m), jnp.array([0, 2, 3]))
240
- out_filtered = brainstate.functional.standardize(x_filtered)
241
-
242
- self.assertAllClose(out_masked, out_filtered)
243
-
244
- def testOneHot(self):
245
- actual = brainstate.functional.one_hot(jnp.array([0, 1, 2]), 3)
246
- expected = jnp.array([[1., 0., 0.],
247
- [0., 1., 0.],
248
- [0., 0., 1.]])
249
- self.assertAllClose(actual, expected, check_dtypes=False)
250
-
251
- actual = brainstate.functional.one_hot(jnp.array([1, 2, 0]), 3)
252
- expected = jnp.array([[0., 1., 0.],
253
- [0., 0., 1.],
254
- [1., 0., 0.]])
255
- self.assertAllClose(actual, expected, check_dtypes=False)
256
-
257
- def testOneHotOutOfBound(self):
258
- actual = brainstate.functional.one_hot(jnp.array([-1, 3]), 3)
259
- expected = jnp.array([[0., 0., 0.],
260
- [0., 0., 0.]])
261
- self.assertAllClose(actual, expected, check_dtypes=False)
262
-
263
- def testOneHotNonArrayInput(self):
264
- actual = brainstate.functional.one_hot([0, 1, 2], 3)
265
- expected = jnp.array([[1., 0., 0.],
266
- [0., 1., 0.],
267
- [0., 0., 1.]])
268
- self.assertAllClose(actual, expected, check_dtypes=False)
269
-
270
- def testOneHotCustomDtype(self):
271
- actual = brainstate.functional.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
272
- expected = jnp.array([[True, False, False],
273
- [False, True, False],
274
- [False, False, True]])
275
- self.assertAllClose(actual, expected)
276
-
277
- def testOneHotAxis(self):
278
- expected = jnp.array([[0., 1., 0.],
279
- [0., 0., 1.],
280
- [1., 0., 0.]]).T
281
-
282
- actual = brainstate.functional.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
283
- self.assertAllClose(actual, expected, check_dtypes=False)
284
-
285
- actual = brainstate.functional.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
286
- self.assertAllClose(actual, expected, check_dtypes=False)
287
-
288
- def testTanhExists(self):
289
- print(brainstate.functional.tanh) # doesn't crash
290
-
291
- def testCustomJVPLeak(self):
292
- # https://github.com/google/jax/issues/8171
293
- @jax.jit
294
- def fwd():
295
- a = jnp.array(1.)
296
-
297
- def f(hx, _):
298
- hx = brainstate.functional.sigmoid(hx + a)
299
- return hx, None
300
-
301
- hx = jnp.array(0.)
302
- jax.lax.scan(f, hx, None, length=2)
303
-
304
- with jax.checking_leaks():
305
- fwd() # doesn't crash
306
-
307
- def testCustomJVPLeak2(self):
308
- # https://github.com/google/jax/issues/8171
309
- # The above test uses jax.brainstate.functional.sigmoid, as in the original #8171, but that
310
- # function no longer actually has a custom_jvp! So we inline the old def.
311
-
312
- @jax.custom_jvp
313
- def sigmoid(x):
314
- one = jnp.float32(1)
315
- return jax.lax.div(one, jax.lax.add(one, jax.lax.exp(jax.lax.neg(x))))
316
-
317
- sigmoid.defjvps(lambda g, ans, x: g * ans * (jnp.float32(1) - ans))
318
-
319
- @jax.jit
320
- def fwd():
321
- a = jnp.array(1., 'float32')
322
-
323
- def f(hx, _):
324
- hx = sigmoid(hx + a)
325
- return hx, None
326
-
327
- hx = jnp.array(0., 'float32')
328
- jax.lax.scan(f, hx, None, length=2)
329
-
330
- with jax.checking_leaks():
331
- fwd() # doesn't crash
1
+ # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ """Tests for nn module."""
17
+
18
+ import itertools
19
+ from functools import partial
20
+
21
+ import jax
22
+ import jax.numpy as jnp
23
+ import numpy as np
24
+ import scipy.stats
25
+ from absl.testing import absltest, parameterized
26
+ from jax.test_util import check_grads
27
+
28
+ import brainstate
29
+
30
+
31
+ class NNFunctionsTest(parameterized.TestCase):
32
+ def setUp(self):
33
+ super().setUp()
34
+ self.rng_key = jax.random.PRNGKey(0)
35
+
36
+ def assertAllClose(self, a, b, check_dtypes=True, atol=None, rtol=None):
37
+ """Helper method for backwards compatibility with JAX test utilities."""
38
+ a = np.asarray(a)
39
+ b = np.asarray(b)
40
+ kw = {}
41
+ if atol is not None:
42
+ kw['atol'] = atol
43
+ if rtol is not None:
44
+ kw['rtol'] = rtol
45
+ np.testing.assert_allclose(a, b, **kw)
46
+ if check_dtypes:
47
+ self.assertEqual(a.dtype, b.dtype)
48
+
49
+ def assertArraysEqual(self, a, b):
50
+ """Helper method for backwards compatibility with JAX test utilities."""
51
+ np.testing.assert_array_equal(np.asarray(a), np.asarray(b))
52
+
53
+ def testSoftplusGrad(self):
54
+ check_grads(brainstate.nn.softplus, (1e-8,), order=4, )
55
+
56
+ def testSoftplusGradZero(self):
57
+ check_grads(brainstate.nn.softplus, (0.,), order=1)
58
+
59
+ def testSoftplusGradInf(self):
60
+ self.assertAllClose(1., jax.grad(brainstate.nn.softplus)(float('inf')), check_dtypes=False)
61
+
62
+ def testSoftplusGradNegInf(self):
63
+ check_grads(brainstate.nn.softplus, (-float('inf'),), order=1)
64
+
65
+ def testSoftplusGradNan(self):
66
+ check_grads(brainstate.nn.softplus, (float('nan'),), order=1)
67
+
68
+ @parameterized.parameters([int, float, jnp.float32, jnp.float64, jnp.int32, jnp.int64])
69
+ def testSoftplusZero(self, dtype):
70
+ self.assertEqual(jnp.log(dtype(2)), brainstate.nn.softplus(dtype(0)))
71
+
72
+ def testSparseplusGradZero(self):
73
+ check_grads(brainstate.nn.sparse_plus, (-2.,), order=1)
74
+
75
+ def testSparseplusGrad(self):
76
+ check_grads(brainstate.nn.sparse_plus, (0.,), order=1)
77
+
78
+ def testSparseplusAndSparseSigmoid(self):
79
+ self.assertAllClose(
80
+ jax.grad(brainstate.nn.sparse_plus)(0.),
81
+ brainstate.nn.sparse_sigmoid(0.),
82
+ check_dtypes=False)
83
+ self.assertAllClose(
84
+ jax.grad(brainstate.nn.sparse_plus)(2.),
85
+ brainstate.nn.sparse_sigmoid(2.),
86
+ check_dtypes=False)
87
+ self.assertAllClose(
88
+ jax.grad(brainstate.nn.sparse_plus)(-2.),
89
+ brainstate.nn.sparse_sigmoid(-2.),
90
+ check_dtypes=False)
91
+
92
+ # def testSquareplusGrad(self):
93
+ # check_grads(brainstate.nn.squareplus, (1e-8,), order=4,
94
+ # )
95
+
96
+ # def testSquareplusGradZero(self):
97
+ # check_grads(brainstate.nn.squareplus, (0.,), order=1,
98
+ # )
99
+
100
+ # def testSquareplusGradNegInf(self):
101
+ # check_grads(brainstate.nn.squareplus, (-float('inf'),), order=1,
102
+ # )
103
+
104
+ # def testSquareplusGradNan(self):
105
+ # check_grads(brainstate.nn.squareplus, (float('nan'),), order=1,
106
+ # )
107
+
108
+ # @parameterized.parameters([float, jnp.float32, jnp.float64])
109
+ # def testSquareplusZero(self, dtype):
110
+ # self.assertEqual(dtype(1), brainstate.nn.squareplus(dtype(0), dtype(4)))
111
+ #
112
+ # def testMishGrad(self):
113
+ # check_grads(brainstate.nn.mish, (1e-8,), order=4,
114
+ # )
115
+ #
116
+ # def testMishGradZero(self):
117
+ # check_grads(brainstate.nn.mish, (0.,), order=1,
118
+ # )
119
+ #
120
+ # def testMishGradNegInf(self):
121
+ # check_grads(brainstate.nn.mish, (-float('inf'),), order=1,
122
+ # )
123
+ #
124
+ # def testMishGradNan(self):
125
+ # check_grads(brainstate.nn.mish, (float('nan'),), order=1,
126
+ # )
127
+
128
+ @parameterized.parameters([float, jnp.float32, jnp.float64])
129
+ def testMishZero(self, dtype):
130
+ self.assertEqual(dtype(0), brainstate.nn.mish(dtype(0)))
131
+
132
+ def testReluGrad(self):
133
+ rtol = None
134
+ check_grads(brainstate.nn.relu, (1.,), order=3, rtol=rtol)
135
+ check_grads(brainstate.nn.relu, (-1.,), order=3, rtol=rtol)
136
+ jaxpr = jax.make_jaxpr(jax.grad(brainstate.nn.relu))(0.)
137
+ self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
138
+
139
+ def testRelu6Grad(self):
140
+ rtol = None
141
+ check_grads(brainstate.nn.relu6, (1.,), order=3, rtol=rtol)
142
+ check_grads(brainstate.nn.relu6, (-1.,), order=3, rtol=rtol)
143
+ self.assertAllClose(jax.grad(brainstate.nn.relu6)(0.), 0., check_dtypes=False)
144
+ self.assertAllClose(jax.grad(brainstate.nn.relu6)(6.), 0., check_dtypes=False)
145
+
146
+ def testSoftplusValue(self):
147
+ val = brainstate.nn.softplus(89.)
148
+ self.assertAllClose(val, 89., check_dtypes=False)
149
+
150
+ def testSparseplusValue(self):
151
+ val = brainstate.nn.sparse_plus(89.)
152
+ self.assertAllClose(val, 89., check_dtypes=False)
153
+
154
+ def testSparsesigmoidValue(self):
155
+ self.assertAllClose(brainstate.nn.sparse_sigmoid(-2.), 0., check_dtypes=False)
156
+ self.assertAllClose(brainstate.nn.sparse_sigmoid(2.), 1., check_dtypes=False)
157
+ self.assertAllClose(brainstate.nn.sparse_sigmoid(0.), .5, check_dtypes=False)
158
+
159
+ # def testSquareplusValue(self):
160
+ # val = brainstate.nn.squareplus(1e3)
161
+ # self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
162
+
163
+ def testMishValue(self):
164
+ val = brainstate.nn.mish(1e3)
165
+ self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
166
+
167
+ def testEluValue(self):
168
+ val = brainstate.nn.elu(1e4)
169
+ self.assertAllClose(val, 1e4, check_dtypes=False)
170
+
171
+ def testGluValue(self):
172
+ val = brainstate.nn.glu(jnp.array([1.0, 0.0]), axis=0)
173
+ self.assertAllClose(val, jnp.array([0.5]))
174
+
175
+ @parameterized.parameters(False, True)
176
+ def testGeluIntType(self, approximate):
177
+ val_float = brainstate.nn.gelu(jnp.array(-1.0), approximate=approximate)
178
+ val_int = brainstate.nn.gelu(jnp.array(-1), approximate=approximate)
179
+ self.assertAllClose(val_float, val_int)
180
+
181
+ @parameterized.parameters(False, True)
182
+ def testGelu(self, approximate):
183
+ def gelu_reference(x):
184
+ return x * scipy.stats.norm.cdf(x)
185
+
186
+ x = jax.random.normal(self.rng_key, (4, 5, 6), dtype=jnp.float32)
187
+ expected = gelu_reference(x)
188
+ actual = brainstate.nn.gelu(x, approximate=approximate)
189
+ np.testing.assert_allclose(actual, expected, rtol=1e-2 if approximate else 1e-5, atol=1e-3 if approximate else 1e-5)
190
+
191
+ @parameterized.parameters(*itertools.product(
192
+ (jnp.float32, jnp.bfloat16, jnp.float16),
193
+ (partial(brainstate.nn.gelu, approximate=False),
194
+ partial(brainstate.nn.gelu, approximate=True),
195
+ brainstate.nn.relu,
196
+ brainstate.nn.softplus,
197
+ brainstate.nn.sparse_plus,
198
+ brainstate.nn.sigmoid,
199
+ # brainstate.nn.squareplus,
200
+ brainstate.nn.mish)))
201
+ def testDtypeMatchesInput(self, dtype, fn):
202
+ x = jnp.zeros((), dtype=dtype)
203
+ out = fn(x)
204
+ self.assertEqual(out.dtype, dtype)
205
+
206
+ def testEluMemory(self):
207
+ # see https://github.com/google/jax/pull/1640
208
+ with jax.enable_checks(False): # With checks we materialize the array
209
+ jax.make_jaxpr(lambda: brainstate.nn.elu(jnp.ones((10 ** 12,)))) # don't oom
210
+
211
+ def testHardTanhMemory(self):
212
+ # see https://github.com/google/jax/pull/1640
213
+ with jax.enable_checks(False): # With checks we materialize the array
214
+ jax.make_jaxpr(lambda: brainstate.nn.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
215
+
216
+ @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
217
+ def testSoftmaxEmptyArray(self, fn):
218
+ x = jnp.array([], dtype=float)
219
+ self.assertArraysEqual(fn(x), x)
220
+
221
+ @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
222
+ def testSoftmaxEmptyMask(self, fn):
223
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
224
+ m = jnp.zeros_like(x, dtype=bool)
225
+ expected = jnp.full_like(x, 0.0 if fn is brainstate.nn.softmax else -jnp.inf)
226
+ self.assertArraysEqual(fn(x, where=m), expected)
227
+
228
+ @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
229
+ def testSoftmaxWhereMask(self, fn):
230
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
231
+ m = jnp.array([True, False, True, True])
232
+
233
+ out = fn(x, where=m)
234
+ self.assertAllClose(out[m], fn(x[m]))
235
+
236
+ probs = out if fn is brainstate.nn.softmax else jnp.exp(out)
237
+ self.assertAllClose(probs.sum(), 1.0, check_dtypes=False)
238
+
239
+ @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
240
+ def testSoftmaxWhereGrad(self, fn):
241
+ # regression test for https://github.com/google/jax/issues/19490
242
+ x = jnp.array([36., 10000.])
243
+ mask = x < 1000
244
+
245
+ f = lambda x, mask: fn(x, where=mask)[0]
246
+
247
+ self.assertAllClose(jax.grad(f)(x, mask), jnp.zeros_like(x))
248
+
249
+ def testSoftmaxGrad(self):
250
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
251
+ check_grads(brainstate.nn.softmax, (x,), order=2, atol=5e-3)
252
+
253
+ def testStandardizeWhereMask(self):
254
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
255
+ m = jnp.array([True, False, True, True])
256
+ x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
257
+
258
+ out_masked = jnp.take(brainstate.nn.standardize(x, where=m), jnp.array([0, 2, 3]))
259
+ out_filtered = brainstate.nn.standardize(x_filtered)
260
+
261
+ self.assertAllClose(out_masked, out_filtered, rtol=1e-6, atol=1e-6)
262
+
263
+ def testOneHot(self):
264
+ actual = brainstate.nn.one_hot(jnp.array([0, 1, 2]), 3)
265
+ expected = jnp.array([[1., 0., 0.],
266
+ [0., 1., 0.],
267
+ [0., 0., 1.]])
268
+ self.assertAllClose(actual, expected, check_dtypes=False)
269
+
270
+ actual = brainstate.nn.one_hot(jnp.array([1, 2, 0]), 3)
271
+ expected = jnp.array([[0., 1., 0.],
272
+ [0., 0., 1.],
273
+ [1., 0., 0.]])
274
+ self.assertAllClose(actual, expected, check_dtypes=False)
275
+
276
+ def testOneHotOutOfBound(self):
277
+ actual = brainstate.nn.one_hot(jnp.array([-1, 3]), 3)
278
+ expected = jnp.array([[0., 0., 0.],
279
+ [0., 0., 0.]])
280
+ self.assertAllClose(actual, expected, check_dtypes=False)
281
+
282
+ def testOneHotNonArrayInput(self):
283
+ actual = brainstate.nn.one_hot([0, 1, 2], 3)
284
+ expected = jnp.array([[1., 0., 0.],
285
+ [0., 1., 0.],
286
+ [0., 0., 1.]])
287
+ self.assertAllClose(actual, expected, check_dtypes=False)
288
+
289
+ def testOneHotCustomDtype(self):
290
+ actual = brainstate.nn.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
291
+ expected = jnp.array([[True, False, False],
292
+ [False, True, False],
293
+ [False, False, True]])
294
+ self.assertAllClose(actual, expected)
295
+
296
+ def testOneHotAxis(self):
297
+ expected = jnp.array([[0., 1., 0.],
298
+ [0., 0., 1.],
299
+ [1., 0., 0.]]).T
300
+
301
+ actual = brainstate.nn.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
302
+ self.assertAllClose(actual, expected, check_dtypes=False)
303
+
304
+ actual = brainstate.nn.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
305
+ self.assertAllClose(actual, expected, check_dtypes=False)
306
+
307
+ def testTanhExists(self):
308
+ print(brainstate.nn.tanh) # doesn't crash
309
+
310
+ def testCustomJVPLeak(self):
311
+ # https://github.com/google/jax/issues/8171
312
+ @jax.jit
313
+ def fwd():
314
+ a = jnp.array(1.)
315
+
316
+ def f(hx, _):
317
+ hx = brainstate.nn.sigmoid(hx + a)
318
+ return hx, None
319
+
320
+ hx = jnp.array(0.)
321
+ jax.lax.scan(f, hx, None, length=2)
322
+
323
+ with jax.checking_leaks():
324
+ fwd() # doesn't crash
325
+
326
+ def testCustomJVPLeak2(self):
327
+ # https://github.com/google/jax/issues/8171
328
+ # The above test uses jax.brainstate.nn.sigmoid, as in the original #8171, but that
329
+ # function no longer actually has a custom_jvp! So we inline the old def.
330
+
331
+ @jax.custom_jvp
332
+ def sigmoid(x):
333
+ one = jnp.float32(1)
334
+ return jax.lax.div(one, jax.lax.add(one, jax.lax.exp(jax.lax.neg(x))))
335
+
336
+ sigmoid.defjvps(lambda g, ans, x: g * ans * (jnp.float32(1) - ans))
337
+
338
+ @jax.jit
339
+ def fwd():
340
+ a = jnp.array(1., 'float32')
341
+
342
+ def f(hx, _):
343
+ hx = sigmoid(hx + a)
344
+ return hx, None
345
+
346
+ hx = jnp.array(0., 'float32')
347
+ jax.lax.scan(f, hx, None, length=2)
348
+
349
+ with jax.checking_leaks():
350
+ fwd() # doesn't crash
351
+
352
+
353
+ if __name__ == '__main__':
354
+ absltest.main()