brainstate 0.1.10__py2.py3-none-any.whl → 0.2.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (163) hide show
  1. brainstate/__init__.py +169 -58
  2. brainstate/_compatible_import.py +340 -148
  3. brainstate/_compatible_import_test.py +681 -0
  4. brainstate/_deprecation.py +210 -0
  5. brainstate/_deprecation_test.py +2319 -0
  6. brainstate/{util/error.py → _error.py} +45 -55
  7. brainstate/_state.py +1652 -1605
  8. brainstate/_state_test.py +52 -52
  9. brainstate/_utils.py +47 -47
  10. brainstate/environ.py +1495 -563
  11. brainstate/environ_test.py +1223 -62
  12. brainstate/graph/__init__.py +22 -29
  13. brainstate/graph/_node.py +240 -0
  14. brainstate/graph/_node_test.py +589 -0
  15. brainstate/graph/{_graph_operation.py → _operation.py} +1624 -1738
  16. brainstate/graph/_operation_test.py +1147 -0
  17. brainstate/mixin.py +1433 -365
  18. brainstate/mixin_test.py +1017 -77
  19. brainstate/nn/__init__.py +137 -135
  20. brainstate/nn/_activations.py +1100 -808
  21. brainstate/nn/_activations_test.py +354 -331
  22. brainstate/nn/_collective_ops.py +633 -514
  23. brainstate/nn/_collective_ops_test.py +774 -43
  24. brainstate/nn/_common.py +226 -178
  25. brainstate/nn/_common_test.py +154 -0
  26. brainstate/nn/_conv.py +2010 -501
  27. brainstate/nn/_conv_test.py +849 -238
  28. brainstate/nn/_delay.py +575 -588
  29. brainstate/nn/_delay_test.py +243 -238
  30. brainstate/nn/_dropout.py +618 -426
  31. brainstate/nn/_dropout_test.py +477 -100
  32. brainstate/nn/_dynamics.py +1267 -1343
  33. brainstate/nn/_dynamics_test.py +67 -78
  34. brainstate/nn/_elementwise.py +1298 -1119
  35. brainstate/nn/_elementwise_test.py +830 -169
  36. brainstate/nn/_embedding.py +408 -58
  37. brainstate/nn/_embedding_test.py +156 -0
  38. brainstate/nn/{_fixedprob.py → _event_fixedprob.py} +233 -239
  39. brainstate/nn/{_fixedprob_test.py → _event_fixedprob_test.py} +115 -114
  40. brainstate/nn/{_linear_mv.py → _event_linear.py} +83 -83
  41. brainstate/nn/{_linear_mv_test.py → _event_linear_test.py} +121 -120
  42. brainstate/nn/_exp_euler.py +254 -92
  43. brainstate/nn/_exp_euler_test.py +377 -35
  44. brainstate/nn/_linear.py +744 -424
  45. brainstate/nn/_linear_test.py +475 -107
  46. brainstate/nn/_metrics.py +1070 -0
  47. brainstate/nn/_metrics_test.py +611 -0
  48. brainstate/nn/_module.py +384 -377
  49. brainstate/nn/_module_test.py +40 -40
  50. brainstate/nn/_normalizations.py +1334 -975
  51. brainstate/nn/_normalizations_test.py +699 -73
  52. brainstate/nn/_paddings.py +1020 -0
  53. brainstate/nn/_paddings_test.py +723 -0
  54. brainstate/nn/_poolings.py +2239 -1177
  55. brainstate/nn/_poolings_test.py +953 -217
  56. brainstate/nn/{_rate_rnns.py → _rnns.py} +946 -554
  57. brainstate/nn/_rnns_test.py +593 -0
  58. brainstate/nn/_utils.py +216 -89
  59. brainstate/nn/_utils_test.py +402 -0
  60. brainstate/{init/_random_inits.py → nn/init.py} +809 -553
  61. brainstate/{init/_random_inits_test.py → nn/init_test.py} +180 -149
  62. brainstate/random/__init__.py +270 -24
  63. brainstate/random/_rand_funs.py +3938 -3616
  64. brainstate/random/_rand_funs_test.py +640 -567
  65. brainstate/random/_rand_seed.py +675 -210
  66. brainstate/random/_rand_seed_test.py +48 -48
  67. brainstate/random/_rand_state.py +1617 -1409
  68. brainstate/random/_rand_state_test.py +551 -0
  69. brainstate/transform/__init__.py +59 -0
  70. brainstate/transform/_ad_checkpoint.py +176 -0
  71. brainstate/{compile → transform}/_ad_checkpoint_test.py +49 -49
  72. brainstate/{augment → transform}/_autograd.py +1025 -778
  73. brainstate/{augment → transform}/_autograd_test.py +1289 -1289
  74. brainstate/transform/_conditions.py +316 -0
  75. brainstate/{compile → transform}/_conditions_test.py +220 -220
  76. brainstate/{compile → transform}/_error_if.py +94 -92
  77. brainstate/{compile → transform}/_error_if_test.py +52 -52
  78. brainstate/transform/_eval_shape.py +145 -0
  79. brainstate/{augment → transform}/_eval_shape_test.py +38 -38
  80. brainstate/{compile → transform}/_jit.py +399 -346
  81. brainstate/{compile → transform}/_jit_test.py +143 -143
  82. brainstate/{compile → transform}/_loop_collect_return.py +675 -536
  83. brainstate/{compile → transform}/_loop_collect_return_test.py +58 -58
  84. brainstate/{compile → transform}/_loop_no_collection.py +283 -184
  85. brainstate/{compile → transform}/_loop_no_collection_test.py +50 -50
  86. brainstate/transform/_make_jaxpr.py +2016 -0
  87. brainstate/transform/_make_jaxpr_test.py +1510 -0
  88. brainstate/transform/_mapping.py +529 -0
  89. brainstate/transform/_mapping_test.py +194 -0
  90. brainstate/{compile → transform}/_progress_bar.py +255 -202
  91. brainstate/{augment → transform}/_random.py +171 -151
  92. brainstate/{compile → transform}/_unvmap.py +256 -159
  93. brainstate/transform/_util.py +286 -0
  94. brainstate/typing.py +837 -304
  95. brainstate/typing_test.py +780 -0
  96. brainstate/util/__init__.py +27 -50
  97. brainstate/util/_others.py +1025 -0
  98. brainstate/util/_others_test.py +962 -0
  99. brainstate/util/_pretty_pytree.py +1301 -0
  100. brainstate/util/_pretty_pytree_test.py +675 -0
  101. brainstate/util/{pretty_repr.py → _pretty_repr.py} +462 -328
  102. brainstate/util/_pretty_repr_test.py +696 -0
  103. brainstate/util/filter.py +945 -469
  104. brainstate/util/filter_test.py +912 -0
  105. brainstate/util/struct.py +910 -523
  106. brainstate/util/struct_test.py +602 -0
  107. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/METADATA +108 -91
  108. brainstate-0.2.1.dist-info/RECORD +111 -0
  109. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/licenses/LICENSE +202 -202
  110. brainstate/augment/__init__.py +0 -30
  111. brainstate/augment/_eval_shape.py +0 -99
  112. brainstate/augment/_mapping.py +0 -1060
  113. brainstate/augment/_mapping_test.py +0 -597
  114. brainstate/compile/__init__.py +0 -38
  115. brainstate/compile/_ad_checkpoint.py +0 -204
  116. brainstate/compile/_conditions.py +0 -256
  117. brainstate/compile/_make_jaxpr.py +0 -888
  118. brainstate/compile/_make_jaxpr_test.py +0 -156
  119. brainstate/compile/_util.py +0 -147
  120. brainstate/functional/__init__.py +0 -27
  121. brainstate/graph/_graph_node.py +0 -244
  122. brainstate/graph/_graph_node_test.py +0 -73
  123. brainstate/graph/_graph_operation_test.py +0 -563
  124. brainstate/init/__init__.py +0 -26
  125. brainstate/init/_base.py +0 -52
  126. brainstate/init/_generic.py +0 -244
  127. brainstate/init/_regular_inits.py +0 -105
  128. brainstate/init/_regular_inits_test.py +0 -50
  129. brainstate/nn/_inputs.py +0 -608
  130. brainstate/nn/_ltp.py +0 -28
  131. brainstate/nn/_neuron.py +0 -705
  132. brainstate/nn/_neuron_test.py +0 -161
  133. brainstate/nn/_others.py +0 -46
  134. brainstate/nn/_projection.py +0 -486
  135. brainstate/nn/_rate_rnns_test.py +0 -63
  136. brainstate/nn/_readout.py +0 -209
  137. brainstate/nn/_readout_test.py +0 -53
  138. brainstate/nn/_stp.py +0 -236
  139. brainstate/nn/_synapse.py +0 -505
  140. brainstate/nn/_synapse_test.py +0 -131
  141. brainstate/nn/_synaptic_projection.py +0 -423
  142. brainstate/nn/_synouts.py +0 -162
  143. brainstate/nn/_synouts_test.py +0 -57
  144. brainstate/nn/metrics.py +0 -388
  145. brainstate/optim/__init__.py +0 -38
  146. brainstate/optim/_base.py +0 -64
  147. brainstate/optim/_lr_scheduler.py +0 -448
  148. brainstate/optim/_lr_scheduler_test.py +0 -50
  149. brainstate/optim/_optax_optimizer.py +0 -152
  150. brainstate/optim/_optax_optimizer_test.py +0 -53
  151. brainstate/optim/_sgd_optimizer.py +0 -1104
  152. brainstate/random/_random_for_unit.py +0 -52
  153. brainstate/surrogate.py +0 -1957
  154. brainstate/transform.py +0 -23
  155. brainstate/util/caller.py +0 -98
  156. brainstate/util/others.py +0 -540
  157. brainstate/util/pretty_pytree.py +0 -945
  158. brainstate/util/pretty_pytree_test.py +0 -159
  159. brainstate/util/pretty_table.py +0 -2954
  160. brainstate/util/scaling.py +0 -258
  161. brainstate-0.1.10.dist-info/RECORD +0 -130
  162. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/WHEEL +0 -0
  163. {brainstate-0.1.10.dist-info → brainstate-0.2.1.dist-info}/top_level.txt +0 -0
@@ -1,114 +1,115 @@
1
- # Copyright 2024 BDP Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
-
17
- import jax.numpy
18
- import jax.numpy as jnp
19
- import pytest
20
-
21
- import brainstate
22
-
23
-
24
- class TestFixedProbCSR:
25
- @pytest.mark.parametrize('allow_multi_conn', [True, False, ])
26
- def test1(self, allow_multi_conn):
27
- x = brainstate.random.rand(20) < 0.1
28
- # x = brainstate.random.rand(20)
29
- m = brainstate.nn.EventFixedProb(20, 40, 0.1, 1.0, seed=123, allow_multi_conn=allow_multi_conn)
30
- y = m(x)
31
- print(y)
32
-
33
- m2 = brainstate.nn.EventFixedProb(20, 40, 0.1, brainstate.init.KaimingUniform(), seed=123)
34
- print(m2(x))
35
-
36
- def test_grad_bool(self):
37
- n_in = 20
38
- n_out = 30
39
- x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
40
- fn = brainstate.nn.EventFixedProb(n_in, n_out, 0.1, brainstate.init.KaimingUniform(), seed=123)
41
-
42
- def f(x):
43
- return fn(x).sum()
44
-
45
- print(jax.grad(f)(x))
46
-
47
- @pytest.mark.parametrize('homo_w', [True, False])
48
- def test_vjp(self, homo_w):
49
- n_in = 20
50
- n_out = 30
51
- x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
52
-
53
- if homo_w:
54
- fn = brainstate.nn.EventFixedProb(n_in, n_out, 0.1, 1.5, seed=123)
55
- else:
56
- fn = brainstate.nn.EventFixedProb(n_in, n_out, 0.1, brainstate.init.KaimingUniform(), seed=123)
57
- w = fn.weight.value
58
-
59
- def f(x, w):
60
- fn.weight.value = w
61
- return fn(x).sum()
62
-
63
- r = brainstate.augment.grad(f, argnums=(0, 1))(x, w)
64
-
65
- # -------------------
66
- # TRUE gradients
67
-
68
- def true_fn(x, w, indices, n_post):
69
- post = jnp.zeros((n_post,))
70
- for i in range(n_in):
71
- post = post.at[indices[i]].add(w * x[i] if homo_w else w[i] * x[i])
72
- return post
73
-
74
- def f2(x, w):
75
- return true_fn(x, w, fn.conn.indices, n_out).sum()
76
-
77
- r2 = jax.grad(f2, argnums=(0, 1))(x, w)
78
- assert (jnp.allclose(r[0], r2[0]))
79
- assert (jnp.allclose(r[1], r2[1]))
80
-
81
- @pytest.mark.parametrize('homo_w', [True, False])
82
- def test_jvp(self, homo_w):
83
- n_in = 20
84
- n_out = 30
85
- x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
86
-
87
- fn = brainstate.nn.EventFixedProb(
88
- n_in, n_out, 0.1, 1.5 if homo_w else brainstate.init.KaimingUniform(),
89
- seed=123,
90
- )
91
- w = fn.weight.value
92
-
93
- def f(x, w):
94
- fn.weight.value = w
95
- return fn(x)
96
-
97
- o1, r1 = jax.jvp(f, (x, w), (jnp.ones_like(x), jnp.ones_like(w)))
98
-
99
- # -------------------
100
- # TRUE gradients
101
-
102
- def true_fn(x, w, indices, n_post):
103
- post = jnp.zeros((n_post,))
104
- for i in range(n_in):
105
- post = post.at[indices[i]].add(w * x[i] if homo_w else w[i] * x[i])
106
- return post
107
-
108
- def f2(x, w):
109
- return true_fn(x, w, fn.conn.indices, n_out)
110
-
111
- o2, r2 = jax.jvp(f2, (x, w), (jnp.ones_like(x), jnp.ones_like(w)))
112
- assert (jnp.allclose(o1, o2))
113
- # assert jnp.allclose(r1, r2), f'r1={r1}, r2={r2}'
114
- assert (jnp.allclose(r1, r2, rtol=1e-4, atol=1e-4))
1
+ # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+
17
+ import jax.numpy
18
+ import jax.numpy as jnp
19
+ import pytest
20
+
21
+ import brainstate
22
+ import braintools
23
+
24
+
25
+ class TestFixedProbCSR:
26
+ @pytest.mark.parametrize('allow_multi_conn', [True, False, ])
27
+ def test1(self, allow_multi_conn):
28
+ x = brainstate.random.rand(20) < 0.1
29
+ # x = brainstate.random.rand(20)
30
+ m = brainstate.nn.EventFixedProb(20, 40, 0.1, 1.0, seed=123, allow_multi_conn=allow_multi_conn)
31
+ y = m(x)
32
+ print(y)
33
+
34
+ m2 = brainstate.nn.EventFixedProb(20, 40, 0.1, braintools.init.KaimingUniform(), seed=123)
35
+ print(m2(x))
36
+
37
+ def test_grad_bool(self):
38
+ n_in = 20
39
+ n_out = 30
40
+ x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
41
+ fn = brainstate.nn.EventFixedProb(n_in, n_out, 0.1, braintools.init.KaimingUniform(), seed=123)
42
+
43
+ def f(x):
44
+ return fn(x).sum()
45
+
46
+ print(jax.grad(f)(x))
47
+
48
+ @pytest.mark.parametrize('homo_w', [True, False])
49
+ def test_vjp(self, homo_w):
50
+ n_in = 20
51
+ n_out = 30
52
+ x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
53
+
54
+ if homo_w:
55
+ fn = brainstate.nn.EventFixedProb(n_in, n_out, 0.1, 1.5, seed=123)
56
+ else:
57
+ fn = brainstate.nn.EventFixedProb(n_in, n_out, 0.1, braintools.init.KaimingUniform(), seed=123)
58
+ w = fn.weight.value
59
+
60
+ def f(x, w):
61
+ fn.weight.value = w
62
+ return fn(x).sum()
63
+
64
+ r = brainstate.augment.grad(f, argnums=(0, 1))(x, w)
65
+
66
+ # -------------------
67
+ # TRUE gradients
68
+
69
+ def true_fn(x, w, indices, n_post):
70
+ post = jnp.zeros((n_post,))
71
+ for i in range(n_in):
72
+ post = post.at[indices[i]].add(w * x[i] if homo_w else w[i] * x[i])
73
+ return post
74
+
75
+ def f2(x, w):
76
+ return true_fn(x, w, fn.conn.indices, n_out).sum()
77
+
78
+ r2 = jax.grad(f2, argnums=(0, 1))(x, w)
79
+ assert (jnp.allclose(r[0], r2[0]))
80
+ assert (jnp.allclose(r[1], r2[1]))
81
+
82
+ @pytest.mark.parametrize('homo_w', [True, False])
83
+ def test_jvp(self, homo_w):
84
+ n_in = 20
85
+ n_out = 30
86
+ x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
87
+
88
+ fn = brainstate.nn.EventFixedProb(
89
+ n_in, n_out, 0.1, 1.5 if homo_w else braintools.init.KaimingUniform(),
90
+ seed=123,
91
+ )
92
+ w = fn.weight.value
93
+
94
+ def f(x, w):
95
+ fn.weight.value = w
96
+ return fn(x)
97
+
98
+ o1, r1 = jax.jvp(f, (x, w), (jnp.ones_like(x), jnp.ones_like(w)))
99
+
100
+ # -------------------
101
+ # TRUE gradients
102
+
103
+ def true_fn(x, w, indices, n_post):
104
+ post = jnp.zeros((n_post,))
105
+ for i in range(n_in):
106
+ post = post.at[indices[i]].add(w * x[i] if homo_w else w[i] * x[i])
107
+ return post
108
+
109
+ def f2(x, w):
110
+ return true_fn(x, w, fn.conn.indices, n_out)
111
+
112
+ o2, r2 = jax.jvp(f2, (x, w), (jnp.ones_like(x), jnp.ones_like(w)))
113
+ assert (jnp.allclose(o1, o2))
114
+ # assert jnp.allclose(r1, r2), f'r1={r1}, r2={r2}'
115
+ assert (jnp.allclose(r1, r2, rtol=1e-4, atol=1e-4))
@@ -1,83 +1,83 @@
1
- # Copyright 2024 BDP Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
- from typing import Union, Callable, Optional
17
-
18
- import brainevent
19
- import brainunit as u
20
- import jax
21
-
22
- from brainstate import init
23
- from brainstate._state import ParamState
24
- from brainstate.typing import Size, ArrayLike
25
- from ._module import Module
26
-
27
- __all__ = [
28
- 'EventLinear',
29
- ]
30
-
31
-
32
- class EventLinear(Module):
33
- """
34
-
35
- Parameters
36
- ----------
37
- in_size : Size
38
- Number of pre-synaptic neurons, i.e., input size.
39
- out_size : Size
40
- Number of post-synaptic neurons, i.e., output size.
41
- weight : float or callable or jax.Array or brainunit.Quantity
42
- Maximum synaptic conductance.
43
- block_size : int, optional
44
- Block size for parallel computation.
45
- float_as_event : bool, optional
46
- Whether to treat float as event.
47
- name : str, optional
48
- Name of the module.
49
- """
50
-
51
- __module__ = 'brainstate.nn'
52
-
53
- def __init__(
54
- self,
55
- in_size: Size,
56
- out_size: Size,
57
- weight: Union[Callable, ArrayLike],
58
- float_as_event: bool = True,
59
- block_size: int = 64,
60
- name: Optional[str] = None,
61
- param_type: type = ParamState,
62
- ):
63
- super().__init__(name=name)
64
-
65
- # network parameters
66
- self.in_size = in_size
67
- self.out_size = out_size
68
- self.float_as_event = float_as_event
69
- self.block_size = block_size
70
-
71
- # maximum synaptic conductance
72
- weight = init.param(weight, (self.in_size[-1], self.out_size[-1]), allow_none=False)
73
- self.weight = param_type(weight)
74
-
75
- def update(self, spk: jax.Array) -> Union[jax.Array, u.Quantity]:
76
- weight = self.weight.value
77
- if u.math.size(weight) == 1:
78
- return u.math.ones(self.out_size) * (u.math.sum(spk) * weight)
79
-
80
- if self.float_as_event:
81
- return brainevent.EventArray(spk) @ weight
82
- else:
83
- return spk @ weight
1
+ # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ from typing import Union, Callable, Optional
17
+
18
+ import brainevent
19
+ import brainunit as u
20
+ import jax
21
+
22
+ from brainstate._state import ParamState
23
+ from brainstate.typing import Size, ArrayLike
24
+ from . import init as init
25
+ from ._module import Module
26
+
27
+ __all__ = [
28
+ 'EventLinear',
29
+ ]
30
+
31
+
32
+ class EventLinear(Module):
33
+ """
34
+
35
+ Parameters
36
+ ----------
37
+ in_size : Size
38
+ Number of pre-synaptic neurons, i.e., input size.
39
+ out_size : Size
40
+ Number of post-synaptic neurons, i.e., output size.
41
+ weight : float or callable or jax.Array or brainunit.Quantity
42
+ Maximum synaptic conductance.
43
+ block_size : int, optional
44
+ Block size for parallel computation.
45
+ float_as_event : bool, optional
46
+ Whether to treat float as event.
47
+ name : str, optional
48
+ Name of the module.
49
+ """
50
+
51
+ __module__ = 'brainstate.nn'
52
+
53
+ def __init__(
54
+ self,
55
+ in_size: Size,
56
+ out_size: Size,
57
+ weight: Union[Callable, ArrayLike],
58
+ float_as_event: bool = True,
59
+ block_size: int = 64,
60
+ name: Optional[str] = None,
61
+ param_type: type = ParamState,
62
+ ):
63
+ super().__init__(name=name)
64
+
65
+ # network parameters
66
+ self.in_size = in_size
67
+ self.out_size = out_size
68
+ self.float_as_event = float_as_event
69
+ self.block_size = block_size
70
+
71
+ # maximum synaptic conductance
72
+ weight = init.param(weight, (self.in_size[-1], self.out_size[-1]), allow_none=False)
73
+ self.weight = param_type(weight)
74
+
75
+ def update(self, spk: jax.Array) -> Union[jax.Array, u.Quantity]:
76
+ weight = self.weight.value
77
+ if u.math.size(weight) == 1:
78
+ return u.math.ones(self.out_size) * (u.math.sum(spk) * weight)
79
+
80
+ if self.float_as_event:
81
+ return brainevent.EventArray(spk) @ weight
82
+ else:
83
+ return spk @ weight
@@ -1,120 +1,121 @@
1
- # Copyright 2024 BDP Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
-
17
- import jax
18
- import jax.numpy as jnp
19
- import pytest
20
-
21
- import brainstate
22
-
23
-
24
- class TestEventLinear:
25
- @pytest.mark.parametrize('bool_x', [True, False])
26
- @pytest.mark.parametrize('homo_w', [True, False])
27
- def test1(self, homo_w, bool_x):
28
- x = brainstate.random.rand(20) < 0.1
29
- if not bool_x:
30
- x = jnp.asarray(x, dtype=float)
31
- m = brainstate.nn.EventLinear(
32
- 20, 40,
33
- 1.5 if homo_w else brainstate.init.KaimingUniform(),
34
- float_as_event=bool_x
35
- )
36
- y = m(x)
37
- print(y)
38
-
39
- assert (jnp.allclose(y, (x.sum() * m.weight.value) if homo_w else (x @ m.weight.value)))
40
-
41
- def test_grad_bool(self):
42
- n_in = 20
43
- n_out = 30
44
- x = brainstate.random.rand(n_in) < 0.3
45
- fn = brainstate.nn.EventLinear(n_in, n_out, brainstate.init.KaimingUniform())
46
-
47
- with pytest.raises(TypeError):
48
- print(jax.grad(lambda x: fn(x).sum())(x))
49
-
50
- @pytest.mark.parametrize('bool_x', [True, False])
51
- @pytest.mark.parametrize('homo_w', [True, False])
52
- def test_vjp(self, bool_x, homo_w):
53
- n_in = 20
54
- n_out = 30
55
- if bool_x:
56
- x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
57
- else:
58
- x = brainstate.random.rand(n_in)
59
-
60
- fn = brainstate.nn.EventLinear(
61
- n_in,
62
- n_out,
63
- 1.5 if homo_w else brainstate.init.KaimingUniform(),
64
- float_as_event=bool_x
65
- )
66
- w = fn.weight.value
67
-
68
- def f(x, w):
69
- fn.weight.value = w
70
- return fn(x).sum()
71
-
72
- r1 = jax.grad(f, argnums=(0, 1))(x, w)
73
-
74
- # -------------------
75
- # TRUE gradients
76
-
77
- def f2(x, w):
78
- y = (x @ (jnp.ones([n_in, n_out]) * w)) if homo_w else (x @ w)
79
- return y.sum()
80
-
81
- r2 = jax.grad(f2, argnums=(0, 1))(x, w)
82
- assert (jnp.allclose(r1[0], r2[0]))
83
-
84
- if not jnp.allclose(r1[1], r2[1]):
85
- print(r1[1] - r2[1])
86
-
87
- assert (jnp.allclose(r1[1], r2[1]))
88
-
89
- @pytest.mark.parametrize('bool_x', [True, False])
90
- @pytest.mark.parametrize('homo_w', [True, False])
91
- def test_jvp(self, bool_x, homo_w):
92
- n_in = 20
93
- n_out = 30
94
- if bool_x:
95
- x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
96
- else:
97
- x = brainstate.random.rand(n_in)
98
-
99
- fn = brainstate.nn.EventLinear(
100
- n_in, n_out, 1.5 if homo_w else brainstate.init.KaimingUniform(),
101
- float_as_event=bool_x
102
- )
103
- w = fn.weight.value
104
-
105
- def f(x, w):
106
- fn.weight.value = w
107
- return fn(x)
108
-
109
- o1, r1 = jax.jvp(f, (x, w), (jnp.ones_like(x), jnp.ones_like(w)))
110
-
111
- # -------------------
112
- # TRUE gradients
113
-
114
- def f2(x, w):
115
- y = (x @ (jnp.ones([n_in, n_out]) * w)) if homo_w else (x @ w)
116
- return y
117
-
118
- o2, r2 = jax.jvp(f, (x, w), (jnp.ones_like(x), jnp.ones_like(w)))
119
- assert (jnp.allclose(o1, o2))
120
- assert (jnp.allclose(r1, r2))
1
+ # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+
17
+ import jax
18
+ import jax.numpy as jnp
19
+ import pytest
20
+
21
+ import braintools
22
+ import brainstate
23
+
24
+
25
+ class TestEventLinear:
26
+ @pytest.mark.parametrize('bool_x', [True, False])
27
+ @pytest.mark.parametrize('homo_w', [True, False])
28
+ def test1(self, homo_w, bool_x):
29
+ x = brainstate.random.rand(20) < 0.1
30
+ if not bool_x:
31
+ x = jnp.asarray(x, dtype=float)
32
+ m = brainstate.nn.EventLinear(
33
+ 20, 40,
34
+ 1.5 if homo_w else braintools.init.KaimingUniform(),
35
+ float_as_event=bool_x
36
+ )
37
+ y = m(x)
38
+ print(y)
39
+
40
+ assert (jnp.allclose(y, (x.sum() * m.weight.value) if homo_w else (x @ m.weight.value)))
41
+
42
+ def test_grad_bool(self):
43
+ n_in = 20
44
+ n_out = 30
45
+ x = brainstate.random.rand(n_in) < 0.3
46
+ fn = brainstate.nn.EventLinear(n_in, n_out, braintools.init.KaimingUniform())
47
+
48
+ with pytest.raises(TypeError):
49
+ print(jax.grad(lambda x: fn(x).sum())(x))
50
+
51
+ @pytest.mark.parametrize('bool_x', [True, False])
52
+ @pytest.mark.parametrize('homo_w', [True, False])
53
+ def test_vjp(self, bool_x, homo_w):
54
+ n_in = 20
55
+ n_out = 30
56
+ if bool_x:
57
+ x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
58
+ else:
59
+ x = brainstate.random.rand(n_in)
60
+
61
+ fn = brainstate.nn.EventLinear(
62
+ n_in,
63
+ n_out,
64
+ 1.5 if homo_w else braintools.init.KaimingUniform(),
65
+ float_as_event=bool_x
66
+ )
67
+ w = fn.weight.value
68
+
69
+ def f(x, w):
70
+ fn.weight.value = w
71
+ return fn(x).sum()
72
+
73
+ r1 = jax.grad(f, argnums=(0, 1))(x, w)
74
+
75
+ # -------------------
76
+ # TRUE gradients
77
+
78
+ def f2(x, w):
79
+ y = (x @ (jnp.ones([n_in, n_out]) * w)) if homo_w else (x @ w)
80
+ return y.sum()
81
+
82
+ r2 = jax.grad(f2, argnums=(0, 1))(x, w)
83
+ assert (jnp.allclose(r1[0], r2[0]))
84
+
85
+ if not jnp.allclose(r1[1], r2[1]):
86
+ print(r1[1] - r2[1])
87
+
88
+ assert (jnp.allclose(r1[1], r2[1]))
89
+
90
+ @pytest.mark.parametrize('bool_x', [True, False])
91
+ @pytest.mark.parametrize('homo_w', [True, False])
92
+ def test_jvp(self, bool_x, homo_w):
93
+ n_in = 20
94
+ n_out = 30
95
+ if bool_x:
96
+ x = jax.numpy.asarray(brainstate.random.rand(n_in) < 0.3, dtype=float)
97
+ else:
98
+ x = brainstate.random.rand(n_in)
99
+
100
+ fn = brainstate.nn.EventLinear(
101
+ n_in, n_out, 1.5 if homo_w else braintools.init.KaimingUniform(),
102
+ float_as_event=bool_x
103
+ )
104
+ w = fn.weight.value
105
+
106
+ def f(x, w):
107
+ fn.weight.value = w
108
+ return fn(x)
109
+
110
+ o1, r1 = jax.jvp(f, (x, w), (jnp.ones_like(x), jnp.ones_like(w)))
111
+
112
+ # -------------------
113
+ # TRUE gradients
114
+
115
+ def f2(x, w):
116
+ y = (x @ (jnp.ones([n_in, n_out]) * w)) if homo_w else (x @ w)
117
+ return y
118
+
119
+ o2, r2 = jax.jvp(f, (x, w), (jnp.ones_like(x), jnp.ones_like(w)))
120
+ assert (jnp.allclose(o1, o2))
121
+ assert (jnp.allclose(r1, r2))