brainstate 0.2.0__py2.py3-none-any.whl → 0.2.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. brainstate/__init__.py +169 -169
  2. brainstate/_compatible_import.py +340 -340
  3. brainstate/_compatible_import_test.py +681 -681
  4. brainstate/_deprecation.py +210 -210
  5. brainstate/_deprecation_test.py +2319 -2319
  6. brainstate/_error.py +45 -45
  7. brainstate/_state.py +1652 -1652
  8. brainstate/_state_test.py +52 -52
  9. brainstate/_utils.py +47 -47
  10. brainstate/environ.py +1495 -1495
  11. brainstate/environ_test.py +1223 -1223
  12. brainstate/graph/__init__.py +22 -22
  13. brainstate/graph/_node.py +240 -240
  14. brainstate/graph/_node_test.py +589 -589
  15. brainstate/graph/_operation.py +1624 -1624
  16. brainstate/graph/_operation_test.py +1147 -1147
  17. brainstate/mixin.py +1433 -1433
  18. brainstate/mixin_test.py +1017 -1017
  19. brainstate/nn/__init__.py +137 -137
  20. brainstate/nn/_activations.py +1100 -1100
  21. brainstate/nn/_activations_test.py +354 -354
  22. brainstate/nn/_collective_ops.py +633 -633
  23. brainstate/nn/_collective_ops_test.py +774 -774
  24. brainstate/nn/_common.py +226 -226
  25. brainstate/nn/_common_test.py +154 -154
  26. brainstate/nn/_conv.py +2010 -2010
  27. brainstate/nn/_conv_test.py +849 -849
  28. brainstate/nn/_delay.py +575 -575
  29. brainstate/nn/_delay_test.py +243 -243
  30. brainstate/nn/_dropout.py +618 -618
  31. brainstate/nn/_dropout_test.py +477 -477
  32. brainstate/nn/_dynamics.py +1267 -1267
  33. brainstate/nn/_dynamics_test.py +67 -67
  34. brainstate/nn/_elementwise.py +1298 -1298
  35. brainstate/nn/_elementwise_test.py +829 -829
  36. brainstate/nn/_embedding.py +408 -408
  37. brainstate/nn/_embedding_test.py +156 -156
  38. brainstate/nn/_event_fixedprob.py +233 -233
  39. brainstate/nn/_event_fixedprob_test.py +115 -115
  40. brainstate/nn/_event_linear.py +83 -83
  41. brainstate/nn/_event_linear_test.py +121 -121
  42. brainstate/nn/_exp_euler.py +254 -254
  43. brainstate/nn/_exp_euler_test.py +377 -377
  44. brainstate/nn/_linear.py +744 -744
  45. brainstate/nn/_linear_test.py +475 -475
  46. brainstate/nn/_metrics.py +1070 -1070
  47. brainstate/nn/_metrics_test.py +611 -611
  48. brainstate/nn/_module.py +384 -384
  49. brainstate/nn/_module_test.py +40 -40
  50. brainstate/nn/_normalizations.py +1334 -1334
  51. brainstate/nn/_normalizations_test.py +699 -699
  52. brainstate/nn/_paddings.py +1020 -1020
  53. brainstate/nn/_paddings_test.py +722 -722
  54. brainstate/nn/_poolings.py +2239 -2239
  55. brainstate/nn/_poolings_test.py +952 -952
  56. brainstate/nn/_rnns.py +946 -946
  57. brainstate/nn/_rnns_test.py +592 -592
  58. brainstate/nn/_utils.py +216 -216
  59. brainstate/nn/_utils_test.py +401 -401
  60. brainstate/nn/init.py +809 -809
  61. brainstate/nn/init_test.py +180 -180
  62. brainstate/random/__init__.py +270 -270
  63. brainstate/random/_rand_funs.py +3938 -3938
  64. brainstate/random/_rand_funs_test.py +640 -640
  65. brainstate/random/_rand_seed.py +675 -675
  66. brainstate/random/_rand_seed_test.py +48 -48
  67. brainstate/random/_rand_state.py +1617 -1617
  68. brainstate/random/_rand_state_test.py +551 -551
  69. brainstate/transform/__init__.py +59 -59
  70. brainstate/transform/_ad_checkpoint.py +176 -176
  71. brainstate/transform/_ad_checkpoint_test.py +49 -49
  72. brainstate/transform/_autograd.py +1025 -1025
  73. brainstate/transform/_autograd_test.py +1289 -1289
  74. brainstate/transform/_conditions.py +316 -316
  75. brainstate/transform/_conditions_test.py +220 -220
  76. brainstate/transform/_error_if.py +94 -94
  77. brainstate/transform/_error_if_test.py +52 -52
  78. brainstate/transform/_eval_shape.py +145 -145
  79. brainstate/transform/_eval_shape_test.py +38 -38
  80. brainstate/transform/_jit.py +399 -399
  81. brainstate/transform/_jit_test.py +143 -143
  82. brainstate/transform/_loop_collect_return.py +675 -675
  83. brainstate/transform/_loop_collect_return_test.py +58 -58
  84. brainstate/transform/_loop_no_collection.py +283 -283
  85. brainstate/transform/_loop_no_collection_test.py +50 -50
  86. brainstate/transform/_make_jaxpr.py +2016 -2016
  87. brainstate/transform/_make_jaxpr_test.py +1510 -1510
  88. brainstate/transform/_mapping.py +529 -529
  89. brainstate/transform/_mapping_test.py +194 -194
  90. brainstate/transform/_progress_bar.py +255 -255
  91. brainstate/transform/_random.py +171 -171
  92. brainstate/transform/_unvmap.py +256 -256
  93. brainstate/transform/_util.py +286 -286
  94. brainstate/typing.py +837 -837
  95. brainstate/typing_test.py +780 -780
  96. brainstate/util/__init__.py +27 -27
  97. brainstate/util/_others.py +1024 -1024
  98. brainstate/util/_others_test.py +962 -962
  99. brainstate/util/_pretty_pytree.py +1301 -1301
  100. brainstate/util/_pretty_pytree_test.py +675 -675
  101. brainstate/util/_pretty_repr.py +462 -462
  102. brainstate/util/_pretty_repr_test.py +696 -696
  103. brainstate/util/filter.py +945 -945
  104. brainstate/util/filter_test.py +911 -911
  105. brainstate/util/struct.py +910 -910
  106. brainstate/util/struct_test.py +602 -602
  107. {brainstate-0.2.0.dist-info → brainstate-0.2.1.dist-info}/METADATA +108 -108
  108. brainstate-0.2.1.dist-info/RECORD +111 -0
  109. {brainstate-0.2.0.dist-info → brainstate-0.2.1.dist-info}/licenses/LICENSE +202 -202
  110. brainstate-0.2.0.dist-info/RECORD +0 -111
  111. {brainstate-0.2.0.dist-info → brainstate-0.2.1.dist-info}/WHEEL +0 -0
  112. {brainstate-0.2.0.dist-info → brainstate-0.2.1.dist-info}/top_level.txt +0 -0
@@ -1,354 +1,354 @@
1
- # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
- """Tests for nn module."""
17
-
18
- import itertools
19
- from functools import partial
20
-
21
- import jax
22
- import jax.numpy as jnp
23
- import numpy as np
24
- import scipy.stats
25
- from absl.testing import absltest, parameterized
26
- from jax.test_util import check_grads
27
-
28
- import brainstate
29
-
30
-
31
- class NNFunctionsTest(parameterized.TestCase):
32
- def setUp(self):
33
- super().setUp()
34
- self.rng_key = jax.random.PRNGKey(0)
35
-
36
- def assertAllClose(self, a, b, check_dtypes=True, atol=None, rtol=None):
37
- """Helper method for backwards compatibility with JAX test utilities."""
38
- a = np.asarray(a)
39
- b = np.asarray(b)
40
- kw = {}
41
- if atol is not None:
42
- kw['atol'] = atol
43
- if rtol is not None:
44
- kw['rtol'] = rtol
45
- np.testing.assert_allclose(a, b, **kw)
46
- if check_dtypes:
47
- self.assertEqual(a.dtype, b.dtype)
48
-
49
- def assertArraysEqual(self, a, b):
50
- """Helper method for backwards compatibility with JAX test utilities."""
51
- np.testing.assert_array_equal(np.asarray(a), np.asarray(b))
52
-
53
- def testSoftplusGrad(self):
54
- check_grads(brainstate.nn.softplus, (1e-8,), order=4, )
55
-
56
- def testSoftplusGradZero(self):
57
- check_grads(brainstate.nn.softplus, (0.,), order=1)
58
-
59
- def testSoftplusGradInf(self):
60
- self.assertAllClose(1., jax.grad(brainstate.nn.softplus)(float('inf')), check_dtypes=False)
61
-
62
- def testSoftplusGradNegInf(self):
63
- check_grads(brainstate.nn.softplus, (-float('inf'),), order=1)
64
-
65
- def testSoftplusGradNan(self):
66
- check_grads(brainstate.nn.softplus, (float('nan'),), order=1)
67
-
68
- @parameterized.parameters([int, float, jnp.float32, jnp.float64, jnp.int32, jnp.int64])
69
- def testSoftplusZero(self, dtype):
70
- self.assertEqual(jnp.log(dtype(2)), brainstate.nn.softplus(dtype(0)))
71
-
72
- def testSparseplusGradZero(self):
73
- check_grads(brainstate.nn.sparse_plus, (-2.,), order=1)
74
-
75
- def testSparseplusGrad(self):
76
- check_grads(brainstate.nn.sparse_plus, (0.,), order=1)
77
-
78
- def testSparseplusAndSparseSigmoid(self):
79
- self.assertAllClose(
80
- jax.grad(brainstate.nn.sparse_plus)(0.),
81
- brainstate.nn.sparse_sigmoid(0.),
82
- check_dtypes=False)
83
- self.assertAllClose(
84
- jax.grad(brainstate.nn.sparse_plus)(2.),
85
- brainstate.nn.sparse_sigmoid(2.),
86
- check_dtypes=False)
87
- self.assertAllClose(
88
- jax.grad(brainstate.nn.sparse_plus)(-2.),
89
- brainstate.nn.sparse_sigmoid(-2.),
90
- check_dtypes=False)
91
-
92
- # def testSquareplusGrad(self):
93
- # check_grads(brainstate.nn.squareplus, (1e-8,), order=4,
94
- # )
95
-
96
- # def testSquareplusGradZero(self):
97
- # check_grads(brainstate.nn.squareplus, (0.,), order=1,
98
- # )
99
-
100
- # def testSquareplusGradNegInf(self):
101
- # check_grads(brainstate.nn.squareplus, (-float('inf'),), order=1,
102
- # )
103
-
104
- # def testSquareplusGradNan(self):
105
- # check_grads(brainstate.nn.squareplus, (float('nan'),), order=1,
106
- # )
107
-
108
- # @parameterized.parameters([float, jnp.float32, jnp.float64])
109
- # def testSquareplusZero(self, dtype):
110
- # self.assertEqual(dtype(1), brainstate.nn.squareplus(dtype(0), dtype(4)))
111
- #
112
- # def testMishGrad(self):
113
- # check_grads(brainstate.nn.mish, (1e-8,), order=4,
114
- # )
115
- #
116
- # def testMishGradZero(self):
117
- # check_grads(brainstate.nn.mish, (0.,), order=1,
118
- # )
119
- #
120
- # def testMishGradNegInf(self):
121
- # check_grads(brainstate.nn.mish, (-float('inf'),), order=1,
122
- # )
123
- #
124
- # def testMishGradNan(self):
125
- # check_grads(brainstate.nn.mish, (float('nan'),), order=1,
126
- # )
127
-
128
- @parameterized.parameters([float, jnp.float32, jnp.float64])
129
- def testMishZero(self, dtype):
130
- self.assertEqual(dtype(0), brainstate.nn.mish(dtype(0)))
131
-
132
- def testReluGrad(self):
133
- rtol = None
134
- check_grads(brainstate.nn.relu, (1.,), order=3, rtol=rtol)
135
- check_grads(brainstate.nn.relu, (-1.,), order=3, rtol=rtol)
136
- jaxpr = jax.make_jaxpr(jax.grad(brainstate.nn.relu))(0.)
137
- self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
138
-
139
- def testRelu6Grad(self):
140
- rtol = None
141
- check_grads(brainstate.nn.relu6, (1.,), order=3, rtol=rtol)
142
- check_grads(brainstate.nn.relu6, (-1.,), order=3, rtol=rtol)
143
- self.assertAllClose(jax.grad(brainstate.nn.relu6)(0.), 0., check_dtypes=False)
144
- self.assertAllClose(jax.grad(brainstate.nn.relu6)(6.), 0., check_dtypes=False)
145
-
146
- def testSoftplusValue(self):
147
- val = brainstate.nn.softplus(89.)
148
- self.assertAllClose(val, 89., check_dtypes=False)
149
-
150
- def testSparseplusValue(self):
151
- val = brainstate.nn.sparse_plus(89.)
152
- self.assertAllClose(val, 89., check_dtypes=False)
153
-
154
- def testSparsesigmoidValue(self):
155
- self.assertAllClose(brainstate.nn.sparse_sigmoid(-2.), 0., check_dtypes=False)
156
- self.assertAllClose(brainstate.nn.sparse_sigmoid(2.), 1., check_dtypes=False)
157
- self.assertAllClose(brainstate.nn.sparse_sigmoid(0.), .5, check_dtypes=False)
158
-
159
- # def testSquareplusValue(self):
160
- # val = brainstate.nn.squareplus(1e3)
161
- # self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
162
-
163
- def testMishValue(self):
164
- val = brainstate.nn.mish(1e3)
165
- self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
166
-
167
- def testEluValue(self):
168
- val = brainstate.nn.elu(1e4)
169
- self.assertAllClose(val, 1e4, check_dtypes=False)
170
-
171
- def testGluValue(self):
172
- val = brainstate.nn.glu(jnp.array([1.0, 0.0]), axis=0)
173
- self.assertAllClose(val, jnp.array([0.5]))
174
-
175
- @parameterized.parameters(False, True)
176
- def testGeluIntType(self, approximate):
177
- val_float = brainstate.nn.gelu(jnp.array(-1.0), approximate=approximate)
178
- val_int = brainstate.nn.gelu(jnp.array(-1), approximate=approximate)
179
- self.assertAllClose(val_float, val_int)
180
-
181
- @parameterized.parameters(False, True)
182
- def testGelu(self, approximate):
183
- def gelu_reference(x):
184
- return x * scipy.stats.norm.cdf(x)
185
-
186
- x = jax.random.normal(self.rng_key, (4, 5, 6), dtype=jnp.float32)
187
- expected = gelu_reference(x)
188
- actual = brainstate.nn.gelu(x, approximate=approximate)
189
- np.testing.assert_allclose(actual, expected, rtol=1e-2 if approximate else 1e-5, atol=1e-3 if approximate else 1e-5)
190
-
191
- @parameterized.parameters(*itertools.product(
192
- (jnp.float32, jnp.bfloat16, jnp.float16),
193
- (partial(brainstate.nn.gelu, approximate=False),
194
- partial(brainstate.nn.gelu, approximate=True),
195
- brainstate.nn.relu,
196
- brainstate.nn.softplus,
197
- brainstate.nn.sparse_plus,
198
- brainstate.nn.sigmoid,
199
- # brainstate.nn.squareplus,
200
- brainstate.nn.mish)))
201
- def testDtypeMatchesInput(self, dtype, fn):
202
- x = jnp.zeros((), dtype=dtype)
203
- out = fn(x)
204
- self.assertEqual(out.dtype, dtype)
205
-
206
- def testEluMemory(self):
207
- # see https://github.com/google/jax/pull/1640
208
- with jax.enable_checks(False): # With checks we materialize the array
209
- jax.make_jaxpr(lambda: brainstate.nn.elu(jnp.ones((10 ** 12,)))) # don't oom
210
-
211
- def testHardTanhMemory(self):
212
- # see https://github.com/google/jax/pull/1640
213
- with jax.enable_checks(False): # With checks we materialize the array
214
- jax.make_jaxpr(lambda: brainstate.nn.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
215
-
216
- @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
217
- def testSoftmaxEmptyArray(self, fn):
218
- x = jnp.array([], dtype=float)
219
- self.assertArraysEqual(fn(x), x)
220
-
221
- @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
222
- def testSoftmaxEmptyMask(self, fn):
223
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
224
- m = jnp.zeros_like(x, dtype=bool)
225
- expected = jnp.full_like(x, 0.0 if fn is brainstate.nn.softmax else -jnp.inf)
226
- self.assertArraysEqual(fn(x, where=m), expected)
227
-
228
- @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
229
- def testSoftmaxWhereMask(self, fn):
230
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
231
- m = jnp.array([True, False, True, True])
232
-
233
- out = fn(x, where=m)
234
- self.assertAllClose(out[m], fn(x[m]))
235
-
236
- probs = out if fn is brainstate.nn.softmax else jnp.exp(out)
237
- self.assertAllClose(probs.sum(), 1.0, check_dtypes=False)
238
-
239
- @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
240
- def testSoftmaxWhereGrad(self, fn):
241
- # regression test for https://github.com/google/jax/issues/19490
242
- x = jnp.array([36., 10000.])
243
- mask = x < 1000
244
-
245
- f = lambda x, mask: fn(x, where=mask)[0]
246
-
247
- self.assertAllClose(jax.grad(f)(x, mask), jnp.zeros_like(x))
248
-
249
- def testSoftmaxGrad(self):
250
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
251
- check_grads(brainstate.nn.softmax, (x,), order=2, atol=5e-3)
252
-
253
- def testStandardizeWhereMask(self):
254
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
255
- m = jnp.array([True, False, True, True])
256
- x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
257
-
258
- out_masked = jnp.take(brainstate.nn.standardize(x, where=m), jnp.array([0, 2, 3]))
259
- out_filtered = brainstate.nn.standardize(x_filtered)
260
-
261
- self.assertAllClose(out_masked, out_filtered, rtol=1e-6, atol=1e-6)
262
-
263
- def testOneHot(self):
264
- actual = brainstate.nn.one_hot(jnp.array([0, 1, 2]), 3)
265
- expected = jnp.array([[1., 0., 0.],
266
- [0., 1., 0.],
267
- [0., 0., 1.]])
268
- self.assertAllClose(actual, expected, check_dtypes=False)
269
-
270
- actual = brainstate.nn.one_hot(jnp.array([1, 2, 0]), 3)
271
- expected = jnp.array([[0., 1., 0.],
272
- [0., 0., 1.],
273
- [1., 0., 0.]])
274
- self.assertAllClose(actual, expected, check_dtypes=False)
275
-
276
- def testOneHotOutOfBound(self):
277
- actual = brainstate.nn.one_hot(jnp.array([-1, 3]), 3)
278
- expected = jnp.array([[0., 0., 0.],
279
- [0., 0., 0.]])
280
- self.assertAllClose(actual, expected, check_dtypes=False)
281
-
282
- def testOneHotNonArrayInput(self):
283
- actual = brainstate.nn.one_hot([0, 1, 2], 3)
284
- expected = jnp.array([[1., 0., 0.],
285
- [0., 1., 0.],
286
- [0., 0., 1.]])
287
- self.assertAllClose(actual, expected, check_dtypes=False)
288
-
289
- def testOneHotCustomDtype(self):
290
- actual = brainstate.nn.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
291
- expected = jnp.array([[True, False, False],
292
- [False, True, False],
293
- [False, False, True]])
294
- self.assertAllClose(actual, expected)
295
-
296
- def testOneHotAxis(self):
297
- expected = jnp.array([[0., 1., 0.],
298
- [0., 0., 1.],
299
- [1., 0., 0.]]).T
300
-
301
- actual = brainstate.nn.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
302
- self.assertAllClose(actual, expected, check_dtypes=False)
303
-
304
- actual = brainstate.nn.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
305
- self.assertAllClose(actual, expected, check_dtypes=False)
306
-
307
- def testTanhExists(self):
308
- print(brainstate.nn.tanh) # doesn't crash
309
-
310
- def testCustomJVPLeak(self):
311
- # https://github.com/google/jax/issues/8171
312
- @jax.jit
313
- def fwd():
314
- a = jnp.array(1.)
315
-
316
- def f(hx, _):
317
- hx = brainstate.nn.sigmoid(hx + a)
318
- return hx, None
319
-
320
- hx = jnp.array(0.)
321
- jax.lax.scan(f, hx, None, length=2)
322
-
323
- with jax.checking_leaks():
324
- fwd() # doesn't crash
325
-
326
- def testCustomJVPLeak2(self):
327
- # https://github.com/google/jax/issues/8171
328
- # The above test uses jax.brainstate.nn.sigmoid, as in the original #8171, but that
329
- # function no longer actually has a custom_jvp! So we inline the old def.
330
-
331
- @jax.custom_jvp
332
- def sigmoid(x):
333
- one = jnp.float32(1)
334
- return jax.lax.div(one, jax.lax.add(one, jax.lax.exp(jax.lax.neg(x))))
335
-
336
- sigmoid.defjvps(lambda g, ans, x: g * ans * (jnp.float32(1) - ans))
337
-
338
- @jax.jit
339
- def fwd():
340
- a = jnp.array(1., 'float32')
341
-
342
- def f(hx, _):
343
- hx = sigmoid(hx + a)
344
- return hx, None
345
-
346
- hx = jnp.array(0., 'float32')
347
- jax.lax.scan(f, hx, None, length=2)
348
-
349
- with jax.checking_leaks():
350
- fwd() # doesn't crash
351
-
352
-
353
- if __name__ == '__main__':
354
- absltest.main()
1
+ # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ """Tests for nn module."""
17
+
18
+ import itertools
19
+ from functools import partial
20
+
21
+ import jax
22
+ import jax.numpy as jnp
23
+ import numpy as np
24
+ import scipy.stats
25
+ from absl.testing import absltest, parameterized
26
+ from jax.test_util import check_grads
27
+
28
+ import brainstate
29
+
30
+
31
+ class NNFunctionsTest(parameterized.TestCase):
32
+ def setUp(self):
33
+ super().setUp()
34
+ self.rng_key = jax.random.PRNGKey(0)
35
+
36
+ def assertAllClose(self, a, b, check_dtypes=True, atol=None, rtol=None):
37
+ """Helper method for backwards compatibility with JAX test utilities."""
38
+ a = np.asarray(a)
39
+ b = np.asarray(b)
40
+ kw = {}
41
+ if atol is not None:
42
+ kw['atol'] = atol
43
+ if rtol is not None:
44
+ kw['rtol'] = rtol
45
+ np.testing.assert_allclose(a, b, **kw)
46
+ if check_dtypes:
47
+ self.assertEqual(a.dtype, b.dtype)
48
+
49
+ def assertArraysEqual(self, a, b):
50
+ """Helper method for backwards compatibility with JAX test utilities."""
51
+ np.testing.assert_array_equal(np.asarray(a), np.asarray(b))
52
+
53
+ def testSoftplusGrad(self):
54
+ check_grads(brainstate.nn.softplus, (1e-8,), order=4, )
55
+
56
+ def testSoftplusGradZero(self):
57
+ check_grads(brainstate.nn.softplus, (0.,), order=1)
58
+
59
+ def testSoftplusGradInf(self):
60
+ self.assertAllClose(1., jax.grad(brainstate.nn.softplus)(float('inf')), check_dtypes=False)
61
+
62
+ def testSoftplusGradNegInf(self):
63
+ check_grads(brainstate.nn.softplus, (-float('inf'),), order=1)
64
+
65
+ def testSoftplusGradNan(self):
66
+ check_grads(brainstate.nn.softplus, (float('nan'),), order=1)
67
+
68
+ @parameterized.parameters([int, float, jnp.float32, jnp.float64, jnp.int32, jnp.int64])
69
+ def testSoftplusZero(self, dtype):
70
+ self.assertEqual(jnp.log(dtype(2)), brainstate.nn.softplus(dtype(0)))
71
+
72
+ def testSparseplusGradZero(self):
73
+ check_grads(brainstate.nn.sparse_plus, (-2.,), order=1)
74
+
75
+ def testSparseplusGrad(self):
76
+ check_grads(brainstate.nn.sparse_plus, (0.,), order=1)
77
+
78
+ def testSparseplusAndSparseSigmoid(self):
79
+ self.assertAllClose(
80
+ jax.grad(brainstate.nn.sparse_plus)(0.),
81
+ brainstate.nn.sparse_sigmoid(0.),
82
+ check_dtypes=False)
83
+ self.assertAllClose(
84
+ jax.grad(brainstate.nn.sparse_plus)(2.),
85
+ brainstate.nn.sparse_sigmoid(2.),
86
+ check_dtypes=False)
87
+ self.assertAllClose(
88
+ jax.grad(brainstate.nn.sparse_plus)(-2.),
89
+ brainstate.nn.sparse_sigmoid(-2.),
90
+ check_dtypes=False)
91
+
92
+ # def testSquareplusGrad(self):
93
+ # check_grads(brainstate.nn.squareplus, (1e-8,), order=4,
94
+ # )
95
+
96
+ # def testSquareplusGradZero(self):
97
+ # check_grads(brainstate.nn.squareplus, (0.,), order=1,
98
+ # )
99
+
100
+ # def testSquareplusGradNegInf(self):
101
+ # check_grads(brainstate.nn.squareplus, (-float('inf'),), order=1,
102
+ # )
103
+
104
+ # def testSquareplusGradNan(self):
105
+ # check_grads(brainstate.nn.squareplus, (float('nan'),), order=1,
106
+ # )
107
+
108
+ # @parameterized.parameters([float, jnp.float32, jnp.float64])
109
+ # def testSquareplusZero(self, dtype):
110
+ # self.assertEqual(dtype(1), brainstate.nn.squareplus(dtype(0), dtype(4)))
111
+ #
112
+ # def testMishGrad(self):
113
+ # check_grads(brainstate.nn.mish, (1e-8,), order=4,
114
+ # )
115
+ #
116
+ # def testMishGradZero(self):
117
+ # check_grads(brainstate.nn.mish, (0.,), order=1,
118
+ # )
119
+ #
120
+ # def testMishGradNegInf(self):
121
+ # check_grads(brainstate.nn.mish, (-float('inf'),), order=1,
122
+ # )
123
+ #
124
+ # def testMishGradNan(self):
125
+ # check_grads(brainstate.nn.mish, (float('nan'),), order=1,
126
+ # )
127
+
128
+ @parameterized.parameters([float, jnp.float32, jnp.float64])
129
+ def testMishZero(self, dtype):
130
+ self.assertEqual(dtype(0), brainstate.nn.mish(dtype(0)))
131
+
132
+ def testReluGrad(self):
133
+ rtol = None
134
+ check_grads(brainstate.nn.relu, (1.,), order=3, rtol=rtol)
135
+ check_grads(brainstate.nn.relu, (-1.,), order=3, rtol=rtol)
136
+ jaxpr = jax.make_jaxpr(jax.grad(brainstate.nn.relu))(0.)
137
+ self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
138
+
139
+ def testRelu6Grad(self):
140
+ rtol = None
141
+ check_grads(brainstate.nn.relu6, (1.,), order=3, rtol=rtol)
142
+ check_grads(brainstate.nn.relu6, (-1.,), order=3, rtol=rtol)
143
+ self.assertAllClose(jax.grad(brainstate.nn.relu6)(0.), 0., check_dtypes=False)
144
+ self.assertAllClose(jax.grad(brainstate.nn.relu6)(6.), 0., check_dtypes=False)
145
+
146
+ def testSoftplusValue(self):
147
+ val = brainstate.nn.softplus(89.)
148
+ self.assertAllClose(val, 89., check_dtypes=False)
149
+
150
+ def testSparseplusValue(self):
151
+ val = brainstate.nn.sparse_plus(89.)
152
+ self.assertAllClose(val, 89., check_dtypes=False)
153
+
154
+ def testSparsesigmoidValue(self):
155
+ self.assertAllClose(brainstate.nn.sparse_sigmoid(-2.), 0., check_dtypes=False)
156
+ self.assertAllClose(brainstate.nn.sparse_sigmoid(2.), 1., check_dtypes=False)
157
+ self.assertAllClose(brainstate.nn.sparse_sigmoid(0.), .5, check_dtypes=False)
158
+
159
+ # def testSquareplusValue(self):
160
+ # val = brainstate.nn.squareplus(1e3)
161
+ # self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
162
+
163
+ def testMishValue(self):
164
+ val = brainstate.nn.mish(1e3)
165
+ self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
166
+
167
+ def testEluValue(self):
168
+ val = brainstate.nn.elu(1e4)
169
+ self.assertAllClose(val, 1e4, check_dtypes=False)
170
+
171
+ def testGluValue(self):
172
+ val = brainstate.nn.glu(jnp.array([1.0, 0.0]), axis=0)
173
+ self.assertAllClose(val, jnp.array([0.5]))
174
+
175
+ @parameterized.parameters(False, True)
176
+ def testGeluIntType(self, approximate):
177
+ val_float = brainstate.nn.gelu(jnp.array(-1.0), approximate=approximate)
178
+ val_int = brainstate.nn.gelu(jnp.array(-1), approximate=approximate)
179
+ self.assertAllClose(val_float, val_int)
180
+
181
+ @parameterized.parameters(False, True)
182
+ def testGelu(self, approximate):
183
+ def gelu_reference(x):
184
+ return x * scipy.stats.norm.cdf(x)
185
+
186
+ x = jax.random.normal(self.rng_key, (4, 5, 6), dtype=jnp.float32)
187
+ expected = gelu_reference(x)
188
+ actual = brainstate.nn.gelu(x, approximate=approximate)
189
+ np.testing.assert_allclose(actual, expected, rtol=1e-2 if approximate else 1e-5, atol=1e-3 if approximate else 1e-5)
190
+
191
+ @parameterized.parameters(*itertools.product(
192
+ (jnp.float32, jnp.bfloat16, jnp.float16),
193
+ (partial(brainstate.nn.gelu, approximate=False),
194
+ partial(brainstate.nn.gelu, approximate=True),
195
+ brainstate.nn.relu,
196
+ brainstate.nn.softplus,
197
+ brainstate.nn.sparse_plus,
198
+ brainstate.nn.sigmoid,
199
+ # brainstate.nn.squareplus,
200
+ brainstate.nn.mish)))
201
+ def testDtypeMatchesInput(self, dtype, fn):
202
+ x = jnp.zeros((), dtype=dtype)
203
+ out = fn(x)
204
+ self.assertEqual(out.dtype, dtype)
205
+
206
+ def testEluMemory(self):
207
+ # see https://github.com/google/jax/pull/1640
208
+ with jax.enable_checks(False): # With checks we materialize the array
209
+ jax.make_jaxpr(lambda: brainstate.nn.elu(jnp.ones((10 ** 12,)))) # don't oom
210
+
211
+ def testHardTanhMemory(self):
212
+ # see https://github.com/google/jax/pull/1640
213
+ with jax.enable_checks(False): # With checks we materialize the array
214
+ jax.make_jaxpr(lambda: brainstate.nn.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
215
+
216
+ @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
217
+ def testSoftmaxEmptyArray(self, fn):
218
+ x = jnp.array([], dtype=float)
219
+ self.assertArraysEqual(fn(x), x)
220
+
221
+ @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
222
+ def testSoftmaxEmptyMask(self, fn):
223
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
224
+ m = jnp.zeros_like(x, dtype=bool)
225
+ expected = jnp.full_like(x, 0.0 if fn is brainstate.nn.softmax else -jnp.inf)
226
+ self.assertArraysEqual(fn(x, where=m), expected)
227
+
228
+ @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
229
+ def testSoftmaxWhereMask(self, fn):
230
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
231
+ m = jnp.array([True, False, True, True])
232
+
233
+ out = fn(x, where=m)
234
+ self.assertAllClose(out[m], fn(x[m]))
235
+
236
+ probs = out if fn is brainstate.nn.softmax else jnp.exp(out)
237
+ self.assertAllClose(probs.sum(), 1.0, check_dtypes=False)
238
+
239
+ @parameterized.parameters([brainstate.nn.softmax, brainstate.nn.log_softmax])
240
+ def testSoftmaxWhereGrad(self, fn):
241
+ # regression test for https://github.com/google/jax/issues/19490
242
+ x = jnp.array([36., 10000.])
243
+ mask = x < 1000
244
+
245
+ f = lambda x, mask: fn(x, where=mask)[0]
246
+
247
+ self.assertAllClose(jax.grad(f)(x, mask), jnp.zeros_like(x))
248
+
249
+ def testSoftmaxGrad(self):
250
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
251
+ check_grads(brainstate.nn.softmax, (x,), order=2, atol=5e-3)
252
+
253
+ def testStandardizeWhereMask(self):
254
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
255
+ m = jnp.array([True, False, True, True])
256
+ x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
257
+
258
+ out_masked = jnp.take(brainstate.nn.standardize(x, where=m), jnp.array([0, 2, 3]))
259
+ out_filtered = brainstate.nn.standardize(x_filtered)
260
+
261
+ self.assertAllClose(out_masked, out_filtered, rtol=1e-6, atol=1e-6)
262
+
263
+ def testOneHot(self):
264
+ actual = brainstate.nn.one_hot(jnp.array([0, 1, 2]), 3)
265
+ expected = jnp.array([[1., 0., 0.],
266
+ [0., 1., 0.],
267
+ [0., 0., 1.]])
268
+ self.assertAllClose(actual, expected, check_dtypes=False)
269
+
270
+ actual = brainstate.nn.one_hot(jnp.array([1, 2, 0]), 3)
271
+ expected = jnp.array([[0., 1., 0.],
272
+ [0., 0., 1.],
273
+ [1., 0., 0.]])
274
+ self.assertAllClose(actual, expected, check_dtypes=False)
275
+
276
+ def testOneHotOutOfBound(self):
277
+ actual = brainstate.nn.one_hot(jnp.array([-1, 3]), 3)
278
+ expected = jnp.array([[0., 0., 0.],
279
+ [0., 0., 0.]])
280
+ self.assertAllClose(actual, expected, check_dtypes=False)
281
+
282
+ def testOneHotNonArrayInput(self):
283
+ actual = brainstate.nn.one_hot([0, 1, 2], 3)
284
+ expected = jnp.array([[1., 0., 0.],
285
+ [0., 1., 0.],
286
+ [0., 0., 1.]])
287
+ self.assertAllClose(actual, expected, check_dtypes=False)
288
+
289
+ def testOneHotCustomDtype(self):
290
+ actual = brainstate.nn.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
291
+ expected = jnp.array([[True, False, False],
292
+ [False, True, False],
293
+ [False, False, True]])
294
+ self.assertAllClose(actual, expected)
295
+
296
+ def testOneHotAxis(self):
297
+ expected = jnp.array([[0., 1., 0.],
298
+ [0., 0., 1.],
299
+ [1., 0., 0.]]).T
300
+
301
+ actual = brainstate.nn.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
302
+ self.assertAllClose(actual, expected, check_dtypes=False)
303
+
304
+ actual = brainstate.nn.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
305
+ self.assertAllClose(actual, expected, check_dtypes=False)
306
+
307
+ def testTanhExists(self):
308
+ print(brainstate.nn.tanh) # doesn't crash
309
+
310
+ def testCustomJVPLeak(self):
311
+ # https://github.com/google/jax/issues/8171
312
+ @jax.jit
313
+ def fwd():
314
+ a = jnp.array(1.)
315
+
316
+ def f(hx, _):
317
+ hx = brainstate.nn.sigmoid(hx + a)
318
+ return hx, None
319
+
320
+ hx = jnp.array(0.)
321
+ jax.lax.scan(f, hx, None, length=2)
322
+
323
+ with jax.checking_leaks():
324
+ fwd() # doesn't crash
325
+
326
+ def testCustomJVPLeak2(self):
327
+ # https://github.com/google/jax/issues/8171
328
+ # The above test uses jax.brainstate.nn.sigmoid, as in the original #8171, but that
329
+ # function no longer actually has a custom_jvp! So we inline the old def.
330
+
331
+ @jax.custom_jvp
332
+ def sigmoid(x):
333
+ one = jnp.float32(1)
334
+ return jax.lax.div(one, jax.lax.add(one, jax.lax.exp(jax.lax.neg(x))))
335
+
336
+ sigmoid.defjvps(lambda g, ans, x: g * ans * (jnp.float32(1) - ans))
337
+
338
+ @jax.jit
339
+ def fwd():
340
+ a = jnp.array(1., 'float32')
341
+
342
+ def f(hx, _):
343
+ hx = sigmoid(hx + a)
344
+ return hx, None
345
+
346
+ hx = jnp.array(0., 'float32')
347
+ jax.lax.scan(f, hx, None, length=2)
348
+
349
+ with jax.checking_leaks():
350
+ fwd() # doesn't crash
351
+
352
+
353
+ if __name__ == '__main__':
354
+ absltest.main()