brainstate 0.1.7__py2.py3-none-any.whl → 0.1.9__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. brainstate/__init__.py +58 -51
  2. brainstate/_compatible_import.py +148 -148
  3. brainstate/_state.py +1605 -1663
  4. brainstate/_state_test.py +52 -52
  5. brainstate/_utils.py +47 -47
  6. brainstate/augment/__init__.py +30 -30
  7. brainstate/augment/_autograd.py +778 -778
  8. brainstate/augment/_autograd_test.py +1289 -1289
  9. brainstate/augment/_eval_shape.py +99 -99
  10. brainstate/augment/_eval_shape_test.py +38 -38
  11. brainstate/augment/_mapping.py +1060 -1060
  12. brainstate/augment/_mapping_test.py +597 -597
  13. brainstate/augment/_random.py +151 -151
  14. brainstate/compile/__init__.py +38 -38
  15. brainstate/compile/_ad_checkpoint.py +204 -204
  16. brainstate/compile/_ad_checkpoint_test.py +49 -49
  17. brainstate/compile/_conditions.py +256 -256
  18. brainstate/compile/_conditions_test.py +220 -220
  19. brainstate/compile/_error_if.py +92 -92
  20. brainstate/compile/_error_if_test.py +52 -52
  21. brainstate/compile/_jit.py +346 -346
  22. brainstate/compile/_jit_test.py +143 -143
  23. brainstate/compile/_loop_collect_return.py +536 -536
  24. brainstate/compile/_loop_collect_return_test.py +58 -58
  25. brainstate/compile/_loop_no_collection.py +184 -184
  26. brainstate/compile/_loop_no_collection_test.py +50 -50
  27. brainstate/compile/_make_jaxpr.py +888 -888
  28. brainstate/compile/_make_jaxpr_test.py +156 -146
  29. brainstate/compile/_progress_bar.py +202 -202
  30. brainstate/compile/_unvmap.py +159 -159
  31. brainstate/compile/_util.py +147 -147
  32. brainstate/environ.py +563 -563
  33. brainstate/environ_test.py +62 -62
  34. brainstate/functional/__init__.py +27 -26
  35. brainstate/graph/__init__.py +29 -29
  36. brainstate/graph/_graph_node.py +244 -244
  37. brainstate/graph/_graph_node_test.py +73 -73
  38. brainstate/graph/_graph_operation.py +1738 -1738
  39. brainstate/graph/_graph_operation_test.py +563 -563
  40. brainstate/init/__init__.py +26 -26
  41. brainstate/init/_base.py +52 -52
  42. brainstate/init/_generic.py +244 -244
  43. brainstate/init/_random_inits.py +553 -553
  44. brainstate/init/_random_inits_test.py +149 -149
  45. brainstate/init/_regular_inits.py +105 -105
  46. brainstate/init/_regular_inits_test.py +50 -50
  47. brainstate/mixin.py +365 -363
  48. brainstate/mixin_test.py +77 -73
  49. brainstate/nn/__init__.py +135 -131
  50. brainstate/{functional → nn}/_activations.py +808 -813
  51. brainstate/{functional → nn}/_activations_test.py +331 -331
  52. brainstate/nn/_collective_ops.py +514 -514
  53. brainstate/nn/_collective_ops_test.py +43 -43
  54. brainstate/nn/_common.py +178 -178
  55. brainstate/nn/_conv.py +501 -501
  56. brainstate/nn/_conv_test.py +238 -238
  57. brainstate/nn/_delay.py +509 -470
  58. brainstate/nn/_delay_test.py +238 -0
  59. brainstate/nn/_dropout.py +426 -426
  60. brainstate/nn/_dropout_test.py +100 -100
  61. brainstate/nn/_dynamics.py +1343 -1361
  62. brainstate/nn/_dynamics_test.py +78 -78
  63. brainstate/nn/_elementwise.py +1119 -1120
  64. brainstate/nn/_elementwise_test.py +169 -169
  65. brainstate/nn/_embedding.py +58 -58
  66. brainstate/nn/_exp_euler.py +92 -92
  67. brainstate/nn/_exp_euler_test.py +35 -35
  68. brainstate/nn/_fixedprob.py +239 -239
  69. brainstate/nn/_fixedprob_test.py +114 -114
  70. brainstate/nn/_inputs.py +608 -608
  71. brainstate/nn/_linear.py +424 -424
  72. brainstate/nn/_linear_mv.py +83 -83
  73. brainstate/nn/_linear_mv_test.py +120 -120
  74. brainstate/nn/_linear_test.py +107 -107
  75. brainstate/nn/_ltp.py +28 -28
  76. brainstate/nn/_module.py +377 -377
  77. brainstate/nn/_module_test.py +40 -208
  78. brainstate/nn/_neuron.py +705 -705
  79. brainstate/nn/_neuron_test.py +161 -161
  80. brainstate/nn/_normalizations.py +975 -918
  81. brainstate/nn/_normalizations_test.py +73 -73
  82. brainstate/{functional → nn}/_others.py +46 -46
  83. brainstate/nn/_poolings.py +1177 -1177
  84. brainstate/nn/_poolings_test.py +217 -217
  85. brainstate/nn/_projection.py +486 -486
  86. brainstate/nn/_rate_rnns.py +554 -554
  87. brainstate/nn/_rate_rnns_test.py +63 -63
  88. brainstate/nn/_readout.py +209 -209
  89. brainstate/nn/_readout_test.py +53 -53
  90. brainstate/nn/_stp.py +236 -236
  91. brainstate/nn/_synapse.py +505 -505
  92. brainstate/nn/_synapse_test.py +131 -131
  93. brainstate/nn/_synaptic_projection.py +423 -423
  94. brainstate/nn/_synouts.py +162 -162
  95. brainstate/nn/_synouts_test.py +57 -57
  96. brainstate/nn/_utils.py +89 -89
  97. brainstate/nn/metrics.py +388 -388
  98. brainstate/optim/__init__.py +38 -38
  99. brainstate/optim/_base.py +64 -64
  100. brainstate/optim/_lr_scheduler.py +448 -448
  101. brainstate/optim/_lr_scheduler_test.py +50 -50
  102. brainstate/optim/_optax_optimizer.py +152 -152
  103. brainstate/optim/_optax_optimizer_test.py +53 -53
  104. brainstate/optim/_sgd_optimizer.py +1104 -1104
  105. brainstate/random/__init__.py +24 -24
  106. brainstate/random/_rand_funs.py +3616 -3616
  107. brainstate/random/_rand_funs_test.py +567 -567
  108. brainstate/random/_rand_seed.py +210 -210
  109. brainstate/random/_rand_seed_test.py +48 -48
  110. brainstate/random/_rand_state.py +1409 -1409
  111. brainstate/random/_random_for_unit.py +52 -52
  112. brainstate/surrogate.py +1957 -1957
  113. brainstate/transform.py +23 -23
  114. brainstate/typing.py +304 -304
  115. brainstate/util/__init__.py +50 -50
  116. brainstate/util/caller.py +98 -98
  117. brainstate/util/error.py +55 -55
  118. brainstate/util/filter.py +469 -469
  119. brainstate/util/others.py +540 -540
  120. brainstate/util/pretty_pytree.py +945 -945
  121. brainstate/util/pretty_pytree_test.py +159 -159
  122. brainstate/util/pretty_repr.py +328 -328
  123. brainstate/util/pretty_table.py +2954 -2954
  124. brainstate/util/scaling.py +258 -258
  125. brainstate/util/struct.py +523 -523
  126. {brainstate-0.1.7.dist-info → brainstate-0.1.9.dist-info}/METADATA +91 -99
  127. brainstate-0.1.9.dist-info/RECORD +130 -0
  128. {brainstate-0.1.7.dist-info → brainstate-0.1.9.dist-info}/WHEEL +1 -1
  129. {brainstate-0.1.7.dist-info → brainstate-0.1.9.dist-info/licenses}/LICENSE +202 -202
  130. brainstate/functional/_normalization.py +0 -81
  131. brainstate/functional/_spikes.py +0 -204
  132. brainstate-0.1.7.dist-info/RECORD +0 -131
  133. {brainstate-0.1.7.dist-info → brainstate-0.1.9.dist-info}/top_level.txt +0 -0
@@ -1,331 +1,331 @@
1
- # Copyright 2024 BDP Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
- """Tests for nn module."""
17
-
18
- import itertools
19
- from functools import partial
20
-
21
- import jax
22
- import jax.numpy as jnp
23
- import scipy.stats
24
- from absl.testing import parameterized
25
- from jax._src import test_util as jtu
26
- from jax.test_util import check_grads
27
-
28
- import brainstate
29
-
30
-
31
- class NNFunctionsTest(jtu.JaxTestCase):
32
- @jtu.skip_on_flag("jax_skip_slow_tests", True)
33
- def testSoftplusGrad(self):
34
- check_grads(brainstate.functional.softplus, (1e-8,), order=4, )
35
-
36
- def testSoftplusGradZero(self):
37
- check_grads(brainstate.functional.softplus, (0.,), order=1)
38
-
39
- def testSoftplusGradInf(self):
40
- self.assertAllClose(1., jax.grad(brainstate.functional.softplus)(float('inf')))
41
-
42
- def testSoftplusGradNegInf(self):
43
- check_grads(brainstate.functional.softplus, (-float('inf'),), order=1)
44
-
45
- def testSoftplusGradNan(self):
46
- check_grads(brainstate.functional.softplus, (float('nan'),), order=1)
47
-
48
- @parameterized.parameters([int, float] + jtu.dtypes.floating + jtu.dtypes.integer)
49
- def testSoftplusZero(self, dtype):
50
- self.assertEqual(jnp.log(dtype(2)), brainstate.functional.softplus(dtype(0)))
51
-
52
- def testSparseplusGradZero(self):
53
- check_grads(brainstate.functional.sparse_plus, (-2.,), order=1)
54
-
55
- def testSparseplusGrad(self):
56
- check_grads(brainstate.functional.sparse_plus, (0.,), order=1)
57
-
58
- def testSparseplusAndSparseSigmoid(self):
59
- self.assertAllClose(
60
- jax.grad(brainstate.functional.sparse_plus)(0.),
61
- brainstate.functional.sparse_sigmoid(0.),
62
- check_dtypes=False)
63
- self.assertAllClose(
64
- jax.grad(brainstate.functional.sparse_plus)(2.),
65
- brainstate.functional.sparse_sigmoid(2.),
66
- check_dtypes=False)
67
- self.assertAllClose(
68
- jax.grad(brainstate.functional.sparse_plus)(-2.),
69
- brainstate.functional.sparse_sigmoid(-2.),
70
- check_dtypes=False)
71
-
72
- # def testSquareplusGrad(self):
73
- # check_grads(brainstate.functional.squareplus, (1e-8,), order=4,
74
- # )
75
-
76
- # def testSquareplusGradZero(self):
77
- # check_grads(brainstate.functional.squareplus, (0.,), order=1,
78
- # )
79
-
80
- # def testSquareplusGradNegInf(self):
81
- # check_grads(brainstate.functional.squareplus, (-float('inf'),), order=1,
82
- # )
83
-
84
- # def testSquareplusGradNan(self):
85
- # check_grads(brainstate.functional.squareplus, (float('nan'),), order=1,
86
- # )
87
-
88
- # @parameterized.parameters([float] + jtu.dtypes.floating)
89
- # def testSquareplusZero(self, dtype):
90
- # self.assertEqual(dtype(1), brainstate.functional.squareplus(dtype(0), dtype(4)))
91
- #
92
- # def testMishGrad(self):
93
- # check_grads(brainstate.functional.mish, (1e-8,), order=4,
94
- # )
95
- #
96
- # def testMishGradZero(self):
97
- # check_grads(brainstate.functional.mish, (0.,), order=1,
98
- # )
99
- #
100
- # def testMishGradNegInf(self):
101
- # check_grads(brainstate.functional.mish, (-float('inf'),), order=1,
102
- # )
103
- #
104
- # def testMishGradNan(self):
105
- # check_grads(brainstate.functional.mish, (float('nan'),), order=1,
106
- # )
107
-
108
- @parameterized.parameters([float] + jtu.dtypes.floating)
109
- def testMishZero(self, dtype):
110
- self.assertEqual(dtype(0), brainstate.functional.mish(dtype(0)))
111
-
112
- def testReluGrad(self):
113
- rtol = None
114
- check_grads(brainstate.functional.relu, (1.,), order=3, rtol=rtol)
115
- check_grads(brainstate.functional.relu, (-1.,), order=3, rtol=rtol)
116
- jaxpr = jax.make_jaxpr(jax.grad(brainstate.functional.relu))(0.)
117
- self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
118
-
119
- def testRelu6Grad(self):
120
- rtol = None
121
- check_grads(brainstate.functional.relu6, (1.,), order=3, rtol=rtol)
122
- check_grads(brainstate.functional.relu6, (-1.,), order=3, rtol=rtol)
123
- self.assertAllClose(jax.grad(brainstate.functional.relu6)(0.), 0., check_dtypes=False)
124
- self.assertAllClose(jax.grad(brainstate.functional.relu6)(6.), 0., check_dtypes=False)
125
-
126
- def testSoftplusValue(self):
127
- val = brainstate.functional.softplus(89.)
128
- self.assertAllClose(val, 89., check_dtypes=False)
129
-
130
- def testSparseplusValue(self):
131
- val = brainstate.functional.sparse_plus(89.)
132
- self.assertAllClose(val, 89., check_dtypes=False)
133
-
134
- def testSparsesigmoidValue(self):
135
- self.assertAllClose(brainstate.functional.sparse_sigmoid(-2.), 0., check_dtypes=False)
136
- self.assertAllClose(brainstate.functional.sparse_sigmoid(2.), 1., check_dtypes=False)
137
- self.assertAllClose(brainstate.functional.sparse_sigmoid(0.), .5, check_dtypes=False)
138
-
139
- # def testSquareplusValue(self):
140
- # val = brainstate.functional.squareplus(1e3)
141
- # self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
142
-
143
- def testMishValue(self):
144
- val = brainstate.functional.mish(1e3)
145
- self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
146
-
147
- def testEluValue(self):
148
- val = brainstate.functional.elu(1e4)
149
- self.assertAllClose(val, 1e4, check_dtypes=False)
150
-
151
- def testGluValue(self):
152
- val = brainstate.functional.glu(jnp.array([1.0, 0.0]), axis=0)
153
- self.assertAllClose(val, jnp.array([0.5]))
154
-
155
- @parameterized.parameters(False, True)
156
- def testGeluIntType(self, approximate):
157
- val_float = brainstate.functional.gelu(jnp.array(-1.0), approximate=approximate)
158
- val_int = brainstate.functional.gelu(jnp.array(-1), approximate=approximate)
159
- self.assertAllClose(val_float, val_int)
160
-
161
- @parameterized.parameters(False, True)
162
- def testGelu(self, approximate):
163
- def gelu_reference(x):
164
- return x * scipy.stats.norm.cdf(x)
165
-
166
- rng = jtu.rand_default(self.rng())
167
- args_maker = lambda: [rng((4, 5, 6), jnp.float32)]
168
- self._CheckAgainstNumpy(
169
- gelu_reference, partial(brainstate.functional.gelu, approximate=approximate), args_maker,
170
- check_dtypes=False, tol=1e-3 if approximate else None)
171
-
172
- @parameterized.parameters(*itertools.product(
173
- (jnp.float32, jnp.bfloat16, jnp.float16),
174
- (partial(brainstate.functional.gelu, approximate=False),
175
- partial(brainstate.functional.gelu, approximate=True),
176
- brainstate.functional.relu,
177
- brainstate.functional.softplus,
178
- brainstate.functional.sparse_plus,
179
- brainstate.functional.sigmoid,
180
- # brainstate.functional.squareplus,
181
- brainstate.functional.mish)))
182
- def testDtypeMatchesInput(self, dtype, fn):
183
- x = jnp.zeros((), dtype=dtype)
184
- out = fn(x)
185
- self.assertEqual(out.dtype, dtype)
186
-
187
- def testEluMemory(self):
188
- # see https://github.com/google/jax/pull/1640
189
- with jax.enable_checks(False): # With checks we materialize the array
190
- jax.make_jaxpr(lambda: brainstate.functional.elu(jnp.ones((10 ** 12,)))) # don't oom
191
-
192
- def testHardTanhMemory(self):
193
- # see https://github.com/google/jax/pull/1640
194
- with jax.enable_checks(False): # With checks we materialize the array
195
- jax.make_jaxpr(lambda: brainstate.functional.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
196
-
197
- @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
198
- def testSoftmaxEmptyArray(self, fn):
199
- x = jnp.array([], dtype=float)
200
- self.assertArraysEqual(fn(x), x)
201
-
202
- @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
203
- def testSoftmaxEmptyMask(self, fn):
204
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
205
- m = jnp.zeros_like(x, dtype=bool)
206
- expected = jnp.full_like(x, 0.0 if fn is brainstate.functional.softmax else -jnp.inf)
207
- self.assertArraysEqual(fn(x, where=m), expected)
208
-
209
- @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
210
- def testSoftmaxWhereMask(self, fn):
211
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
212
- m = jnp.array([True, False, True, True])
213
-
214
- out = fn(x, where=m)
215
- self.assertAllClose(out[m], fn(x[m]))
216
-
217
- probs = out if fn is brainstate.functional.softmax else jnp.exp(out)
218
- self.assertAllClose(probs.sum(), 1.0)
219
-
220
- @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
221
- def testSoftmaxWhereGrad(self, fn):
222
- # regression test for https://github.com/google/jax/issues/19490
223
- x = jnp.array([36., 10000.])
224
- mask = x < 1000
225
-
226
- f = lambda x, mask: fn(x, where=mask)[0]
227
-
228
- self.assertAllClose(jax.grad(f)(x, mask), jnp.zeros_like(x))
229
-
230
- def testSoftmaxGrad(self):
231
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
232
- jtu.check_grads(brainstate.functional.softmax, (x,), order=2, atol=5e-3)
233
-
234
- def testStandardizeWhereMask(self):
235
- x = jnp.array([5.5, 1.3, -4.2, 0.9])
236
- m = jnp.array([True, False, True, True])
237
- x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
238
-
239
- out_masked = jnp.take(brainstate.functional.standardize(x, where=m), jnp.array([0, 2, 3]))
240
- out_filtered = brainstate.functional.standardize(x_filtered)
241
-
242
- self.assertAllClose(out_masked, out_filtered)
243
-
244
- def testOneHot(self):
245
- actual = brainstate.functional.one_hot(jnp.array([0, 1, 2]), 3)
246
- expected = jnp.array([[1., 0., 0.],
247
- [0., 1., 0.],
248
- [0., 0., 1.]])
249
- self.assertAllClose(actual, expected, check_dtypes=False)
250
-
251
- actual = brainstate.functional.one_hot(jnp.array([1, 2, 0]), 3)
252
- expected = jnp.array([[0., 1., 0.],
253
- [0., 0., 1.],
254
- [1., 0., 0.]])
255
- self.assertAllClose(actual, expected, check_dtypes=False)
256
-
257
- def testOneHotOutOfBound(self):
258
- actual = brainstate.functional.one_hot(jnp.array([-1, 3]), 3)
259
- expected = jnp.array([[0., 0., 0.],
260
- [0., 0., 0.]])
261
- self.assertAllClose(actual, expected, check_dtypes=False)
262
-
263
- def testOneHotNonArrayInput(self):
264
- actual = brainstate.functional.one_hot([0, 1, 2], 3)
265
- expected = jnp.array([[1., 0., 0.],
266
- [0., 1., 0.],
267
- [0., 0., 1.]])
268
- self.assertAllClose(actual, expected, check_dtypes=False)
269
-
270
- def testOneHotCustomDtype(self):
271
- actual = brainstate.functional.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
272
- expected = jnp.array([[True, False, False],
273
- [False, True, False],
274
- [False, False, True]])
275
- self.assertAllClose(actual, expected)
276
-
277
- def testOneHotAxis(self):
278
- expected = jnp.array([[0., 1., 0.],
279
- [0., 0., 1.],
280
- [1., 0., 0.]]).T
281
-
282
- actual = brainstate.functional.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
283
- self.assertAllClose(actual, expected, check_dtypes=False)
284
-
285
- actual = brainstate.functional.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
286
- self.assertAllClose(actual, expected, check_dtypes=False)
287
-
288
- def testTanhExists(self):
289
- print(brainstate.functional.tanh) # doesn't crash
290
-
291
- def testCustomJVPLeak(self):
292
- # https://github.com/google/jax/issues/8171
293
- @jax.jit
294
- def fwd():
295
- a = jnp.array(1.)
296
-
297
- def f(hx, _):
298
- hx = brainstate.functional.sigmoid(hx + a)
299
- return hx, None
300
-
301
- hx = jnp.array(0.)
302
- jax.lax.scan(f, hx, None, length=2)
303
-
304
- with jax.checking_leaks():
305
- fwd() # doesn't crash
306
-
307
- def testCustomJVPLeak2(self):
308
- # https://github.com/google/jax/issues/8171
309
- # The above test uses jax.brainstate.functional.sigmoid, as in the original #8171, but that
310
- # function no longer actually has a custom_jvp! So we inline the old def.
311
-
312
- @jax.custom_jvp
313
- def sigmoid(x):
314
- one = jnp.float32(1)
315
- return jax.lax.div(one, jax.lax.add(one, jax.lax.exp(jax.lax.neg(x))))
316
-
317
- sigmoid.defjvps(lambda g, ans, x: g * ans * (jnp.float32(1) - ans))
318
-
319
- @jax.jit
320
- def fwd():
321
- a = jnp.array(1., 'float32')
322
-
323
- def f(hx, _):
324
- hx = sigmoid(hx + a)
325
- return hx, None
326
-
327
- hx = jnp.array(0., 'float32')
328
- jax.lax.scan(f, hx, None, length=2)
329
-
330
- with jax.checking_leaks():
331
- fwd() # doesn't crash
1
+ # Copyright 2024 BDP Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ """Tests for nn module."""
17
+
18
+ import itertools
19
+ from functools import partial
20
+
21
+ import jax
22
+ import jax.numpy as jnp
23
+ import scipy.stats
24
+ from absl.testing import parameterized
25
+ from jax._src import test_util as jtu
26
+ from jax.test_util import check_grads
27
+
28
+ import brainstate
29
+
30
+
31
+ class NNFunctionsTest(jtu.JaxTestCase):
32
+ @jtu.skip_on_flag("jax_skip_slow_tests", True)
33
+ def testSoftplusGrad(self):
34
+ check_grads(brainstate.functional.softplus, (1e-8,), order=4, )
35
+
36
+ def testSoftplusGradZero(self):
37
+ check_grads(brainstate.functional.softplus, (0.,), order=1)
38
+
39
+ def testSoftplusGradInf(self):
40
+ self.assertAllClose(1., jax.grad(brainstate.functional.softplus)(float('inf')))
41
+
42
+ def testSoftplusGradNegInf(self):
43
+ check_grads(brainstate.functional.softplus, (-float('inf'),), order=1)
44
+
45
+ def testSoftplusGradNan(self):
46
+ check_grads(brainstate.functional.softplus, (float('nan'),), order=1)
47
+
48
+ @parameterized.parameters([int, float] + jtu.dtypes.floating + jtu.dtypes.integer)
49
+ def testSoftplusZero(self, dtype):
50
+ self.assertEqual(jnp.log(dtype(2)), brainstate.functional.softplus(dtype(0)))
51
+
52
+ def testSparseplusGradZero(self):
53
+ check_grads(brainstate.functional.sparse_plus, (-2.,), order=1)
54
+
55
+ def testSparseplusGrad(self):
56
+ check_grads(brainstate.functional.sparse_plus, (0.,), order=1)
57
+
58
+ def testSparseplusAndSparseSigmoid(self):
59
+ self.assertAllClose(
60
+ jax.grad(brainstate.functional.sparse_plus)(0.),
61
+ brainstate.functional.sparse_sigmoid(0.),
62
+ check_dtypes=False)
63
+ self.assertAllClose(
64
+ jax.grad(brainstate.functional.sparse_plus)(2.),
65
+ brainstate.functional.sparse_sigmoid(2.),
66
+ check_dtypes=False)
67
+ self.assertAllClose(
68
+ jax.grad(brainstate.functional.sparse_plus)(-2.),
69
+ brainstate.functional.sparse_sigmoid(-2.),
70
+ check_dtypes=False)
71
+
72
+ # def testSquareplusGrad(self):
73
+ # check_grads(brainstate.functional.squareplus, (1e-8,), order=4,
74
+ # )
75
+
76
+ # def testSquareplusGradZero(self):
77
+ # check_grads(brainstate.functional.squareplus, (0.,), order=1,
78
+ # )
79
+
80
+ # def testSquareplusGradNegInf(self):
81
+ # check_grads(brainstate.functional.squareplus, (-float('inf'),), order=1,
82
+ # )
83
+
84
+ # def testSquareplusGradNan(self):
85
+ # check_grads(brainstate.functional.squareplus, (float('nan'),), order=1,
86
+ # )
87
+
88
+ # @parameterized.parameters([float] + jtu.dtypes.floating)
89
+ # def testSquareplusZero(self, dtype):
90
+ # self.assertEqual(dtype(1), brainstate.functional.squareplus(dtype(0), dtype(4)))
91
+ #
92
+ # def testMishGrad(self):
93
+ # check_grads(brainstate.functional.mish, (1e-8,), order=4,
94
+ # )
95
+ #
96
+ # def testMishGradZero(self):
97
+ # check_grads(brainstate.functional.mish, (0.,), order=1,
98
+ # )
99
+ #
100
+ # def testMishGradNegInf(self):
101
+ # check_grads(brainstate.functional.mish, (-float('inf'),), order=1,
102
+ # )
103
+ #
104
+ # def testMishGradNan(self):
105
+ # check_grads(brainstate.functional.mish, (float('nan'),), order=1,
106
+ # )
107
+
108
+ @parameterized.parameters([float] + jtu.dtypes.floating)
109
+ def testMishZero(self, dtype):
110
+ self.assertEqual(dtype(0), brainstate.functional.mish(dtype(0)))
111
+
112
+ def testReluGrad(self):
113
+ rtol = None
114
+ check_grads(brainstate.functional.relu, (1.,), order=3, rtol=rtol)
115
+ check_grads(brainstate.functional.relu, (-1.,), order=3, rtol=rtol)
116
+ jaxpr = jax.make_jaxpr(jax.grad(brainstate.functional.relu))(0.)
117
+ self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
118
+
119
+ def testRelu6Grad(self):
120
+ rtol = None
121
+ check_grads(brainstate.functional.relu6, (1.,), order=3, rtol=rtol)
122
+ check_grads(brainstate.functional.relu6, (-1.,), order=3, rtol=rtol)
123
+ self.assertAllClose(jax.grad(brainstate.functional.relu6)(0.), 0., check_dtypes=False)
124
+ self.assertAllClose(jax.grad(brainstate.functional.relu6)(6.), 0., check_dtypes=False)
125
+
126
+ def testSoftplusValue(self):
127
+ val = brainstate.functional.softplus(89.)
128
+ self.assertAllClose(val, 89., check_dtypes=False)
129
+
130
+ def testSparseplusValue(self):
131
+ val = brainstate.functional.sparse_plus(89.)
132
+ self.assertAllClose(val, 89., check_dtypes=False)
133
+
134
+ def testSparsesigmoidValue(self):
135
+ self.assertAllClose(brainstate.functional.sparse_sigmoid(-2.), 0., check_dtypes=False)
136
+ self.assertAllClose(brainstate.functional.sparse_sigmoid(2.), 1., check_dtypes=False)
137
+ self.assertAllClose(brainstate.functional.sparse_sigmoid(0.), .5, check_dtypes=False)
138
+
139
+ # def testSquareplusValue(self):
140
+ # val = brainstate.functional.squareplus(1e3)
141
+ # self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
142
+
143
+ def testMishValue(self):
144
+ val = brainstate.functional.mish(1e3)
145
+ self.assertAllClose(val, 1e3, check_dtypes=False, atol=1e-3)
146
+
147
+ def testEluValue(self):
148
+ val = brainstate.functional.elu(1e4)
149
+ self.assertAllClose(val, 1e4, check_dtypes=False)
150
+
151
+ def testGluValue(self):
152
+ val = brainstate.functional.glu(jnp.array([1.0, 0.0]), axis=0)
153
+ self.assertAllClose(val, jnp.array([0.5]))
154
+
155
+ @parameterized.parameters(False, True)
156
+ def testGeluIntType(self, approximate):
157
+ val_float = brainstate.functional.gelu(jnp.array(-1.0), approximate=approximate)
158
+ val_int = brainstate.functional.gelu(jnp.array(-1), approximate=approximate)
159
+ self.assertAllClose(val_float, val_int)
160
+
161
+ @parameterized.parameters(False, True)
162
+ def testGelu(self, approximate):
163
+ def gelu_reference(x):
164
+ return x * scipy.stats.norm.cdf(x)
165
+
166
+ rng = jtu.rand_default(self.rng())
167
+ args_maker = lambda: [rng((4, 5, 6), jnp.float32)]
168
+ self._CheckAgainstNumpy(
169
+ gelu_reference, partial(brainstate.functional.gelu, approximate=approximate), args_maker,
170
+ check_dtypes=False, tol=1e-3 if approximate else None)
171
+
172
+ @parameterized.parameters(*itertools.product(
173
+ (jnp.float32, jnp.bfloat16, jnp.float16),
174
+ (partial(brainstate.functional.gelu, approximate=False),
175
+ partial(brainstate.functional.gelu, approximate=True),
176
+ brainstate.functional.relu,
177
+ brainstate.functional.softplus,
178
+ brainstate.functional.sparse_plus,
179
+ brainstate.functional.sigmoid,
180
+ # brainstate.functional.squareplus,
181
+ brainstate.functional.mish)))
182
+ def testDtypeMatchesInput(self, dtype, fn):
183
+ x = jnp.zeros((), dtype=dtype)
184
+ out = fn(x)
185
+ self.assertEqual(out.dtype, dtype)
186
+
187
+ def testEluMemory(self):
188
+ # see https://github.com/google/jax/pull/1640
189
+ with jax.enable_checks(False): # With checks we materialize the array
190
+ jax.make_jaxpr(lambda: brainstate.functional.elu(jnp.ones((10 ** 12,)))) # don't oom
191
+
192
+ def testHardTanhMemory(self):
193
+ # see https://github.com/google/jax/pull/1640
194
+ with jax.enable_checks(False): # With checks we materialize the array
195
+ jax.make_jaxpr(lambda: brainstate.functional.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
196
+
197
+ @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
198
+ def testSoftmaxEmptyArray(self, fn):
199
+ x = jnp.array([], dtype=float)
200
+ self.assertArraysEqual(fn(x), x)
201
+
202
+ @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
203
+ def testSoftmaxEmptyMask(self, fn):
204
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
205
+ m = jnp.zeros_like(x, dtype=bool)
206
+ expected = jnp.full_like(x, 0.0 if fn is brainstate.functional.softmax else -jnp.inf)
207
+ self.assertArraysEqual(fn(x, where=m), expected)
208
+
209
+ @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
210
+ def testSoftmaxWhereMask(self, fn):
211
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
212
+ m = jnp.array([True, False, True, True])
213
+
214
+ out = fn(x, where=m)
215
+ self.assertAllClose(out[m], fn(x[m]))
216
+
217
+ probs = out if fn is brainstate.functional.softmax else jnp.exp(out)
218
+ self.assertAllClose(probs.sum(), 1.0)
219
+
220
+ @parameterized.parameters([brainstate.functional.softmax, brainstate.functional.log_softmax])
221
+ def testSoftmaxWhereGrad(self, fn):
222
+ # regression test for https://github.com/google/jax/issues/19490
223
+ x = jnp.array([36., 10000.])
224
+ mask = x < 1000
225
+
226
+ f = lambda x, mask: fn(x, where=mask)[0]
227
+
228
+ self.assertAllClose(jax.grad(f)(x, mask), jnp.zeros_like(x))
229
+
230
+ def testSoftmaxGrad(self):
231
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
232
+ jtu.check_grads(brainstate.functional.softmax, (x,), order=2, atol=5e-3)
233
+
234
+ def testStandardizeWhereMask(self):
235
+ x = jnp.array([5.5, 1.3, -4.2, 0.9])
236
+ m = jnp.array([True, False, True, True])
237
+ x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
238
+
239
+ out_masked = jnp.take(brainstate.functional.standardize(x, where=m), jnp.array([0, 2, 3]))
240
+ out_filtered = brainstate.functional.standardize(x_filtered)
241
+
242
+ self.assertAllClose(out_masked, out_filtered)
243
+
244
+ def testOneHot(self):
245
+ actual = brainstate.functional.one_hot(jnp.array([0, 1, 2]), 3)
246
+ expected = jnp.array([[1., 0., 0.],
247
+ [0., 1., 0.],
248
+ [0., 0., 1.]])
249
+ self.assertAllClose(actual, expected, check_dtypes=False)
250
+
251
+ actual = brainstate.functional.one_hot(jnp.array([1, 2, 0]), 3)
252
+ expected = jnp.array([[0., 1., 0.],
253
+ [0., 0., 1.],
254
+ [1., 0., 0.]])
255
+ self.assertAllClose(actual, expected, check_dtypes=False)
256
+
257
+ def testOneHotOutOfBound(self):
258
+ actual = brainstate.functional.one_hot(jnp.array([-1, 3]), 3)
259
+ expected = jnp.array([[0., 0., 0.],
260
+ [0., 0., 0.]])
261
+ self.assertAllClose(actual, expected, check_dtypes=False)
262
+
263
+ def testOneHotNonArrayInput(self):
264
+ actual = brainstate.functional.one_hot([0, 1, 2], 3)
265
+ expected = jnp.array([[1., 0., 0.],
266
+ [0., 1., 0.],
267
+ [0., 0., 1.]])
268
+ self.assertAllClose(actual, expected, check_dtypes=False)
269
+
270
+ def testOneHotCustomDtype(self):
271
+ actual = brainstate.functional.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
272
+ expected = jnp.array([[True, False, False],
273
+ [False, True, False],
274
+ [False, False, True]])
275
+ self.assertAllClose(actual, expected)
276
+
277
+ def testOneHotAxis(self):
278
+ expected = jnp.array([[0., 1., 0.],
279
+ [0., 0., 1.],
280
+ [1., 0., 0.]]).T
281
+
282
+ actual = brainstate.functional.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
283
+ self.assertAllClose(actual, expected, check_dtypes=False)
284
+
285
+ actual = brainstate.functional.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
286
+ self.assertAllClose(actual, expected, check_dtypes=False)
287
+
288
+ def testTanhExists(self):
289
+ print(brainstate.functional.tanh) # doesn't crash
290
+
291
+ def testCustomJVPLeak(self):
292
+ # https://github.com/google/jax/issues/8171
293
+ @jax.jit
294
+ def fwd():
295
+ a = jnp.array(1.)
296
+
297
+ def f(hx, _):
298
+ hx = brainstate.functional.sigmoid(hx + a)
299
+ return hx, None
300
+
301
+ hx = jnp.array(0.)
302
+ jax.lax.scan(f, hx, None, length=2)
303
+
304
+ with jax.checking_leaks():
305
+ fwd() # doesn't crash
306
+
307
+ def testCustomJVPLeak2(self):
308
+ # https://github.com/google/jax/issues/8171
309
+ # The above test uses jax.brainstate.functional.sigmoid, as in the original #8171, but that
310
+ # function no longer actually has a custom_jvp! So we inline the old def.
311
+
312
+ @jax.custom_jvp
313
+ def sigmoid(x):
314
+ one = jnp.float32(1)
315
+ return jax.lax.div(one, jax.lax.add(one, jax.lax.exp(jax.lax.neg(x))))
316
+
317
+ sigmoid.defjvps(lambda g, ans, x: g * ans * (jnp.float32(1) - ans))
318
+
319
+ @jax.jit
320
+ def fwd():
321
+ a = jnp.array(1., 'float32')
322
+
323
+ def f(hx, _):
324
+ hx = sigmoid(hx + a)
325
+ return hx, None
326
+
327
+ hx = jnp.array(0., 'float32')
328
+ jax.lax.scan(f, hx, None, length=2)
329
+
330
+ with jax.checking_leaks():
331
+ fwd() # doesn't crash