brainstate 0.2.1__py2.py3-none-any.whl → 0.2.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. brainstate/__init__.py +167 -169
  2. brainstate/_compatible_import.py +340 -340
  3. brainstate/_compatible_import_test.py +681 -681
  4. brainstate/_deprecation.py +210 -210
  5. brainstate/_deprecation_test.py +2297 -2319
  6. brainstate/_error.py +45 -45
  7. brainstate/_state.py +2157 -1652
  8. brainstate/_state_test.py +1129 -52
  9. brainstate/_utils.py +47 -47
  10. brainstate/environ.py +1495 -1495
  11. brainstate/environ_test.py +1223 -1223
  12. brainstate/graph/__init__.py +22 -22
  13. brainstate/graph/_node.py +240 -240
  14. brainstate/graph/_node_test.py +589 -589
  15. brainstate/graph/_operation.py +1620 -1624
  16. brainstate/graph/_operation_test.py +1147 -1147
  17. brainstate/mixin.py +1447 -1433
  18. brainstate/mixin_test.py +1017 -1017
  19. brainstate/nn/__init__.py +146 -137
  20. brainstate/nn/_activations.py +1100 -1100
  21. brainstate/nn/_activations_test.py +354 -354
  22. brainstate/nn/_collective_ops.py +635 -633
  23. brainstate/nn/_collective_ops_test.py +774 -774
  24. brainstate/nn/_common.py +226 -226
  25. brainstate/nn/_common_test.py +134 -154
  26. brainstate/nn/_conv.py +2010 -2010
  27. brainstate/nn/_conv_test.py +849 -849
  28. brainstate/nn/_delay.py +575 -575
  29. brainstate/nn/_delay_test.py +243 -243
  30. brainstate/nn/_dropout.py +618 -618
  31. brainstate/nn/_dropout_test.py +480 -477
  32. brainstate/nn/_dynamics.py +870 -1267
  33. brainstate/nn/_dynamics_test.py +53 -67
  34. brainstate/nn/_elementwise.py +1298 -1298
  35. brainstate/nn/_elementwise_test.py +829 -829
  36. brainstate/nn/_embedding.py +408 -408
  37. brainstate/nn/_embedding_test.py +156 -156
  38. brainstate/nn/_event_fixedprob.py +233 -233
  39. brainstate/nn/_event_fixedprob_test.py +115 -115
  40. brainstate/nn/_event_linear.py +83 -83
  41. brainstate/nn/_event_linear_test.py +121 -121
  42. brainstate/nn/_exp_euler.py +254 -254
  43. brainstate/nn/_exp_euler_test.py +377 -377
  44. brainstate/nn/_linear.py +744 -744
  45. brainstate/nn/_linear_test.py +475 -475
  46. brainstate/nn/_metrics.py +1070 -1070
  47. brainstate/nn/_metrics_test.py +611 -611
  48. brainstate/nn/_module.py +391 -384
  49. brainstate/nn/_module_test.py +40 -40
  50. brainstate/nn/_normalizations.py +1334 -1334
  51. brainstate/nn/_normalizations_test.py +699 -699
  52. brainstate/nn/_paddings.py +1020 -1020
  53. brainstate/nn/_paddings_test.py +722 -722
  54. brainstate/nn/_poolings.py +2239 -2239
  55. brainstate/nn/_poolings_test.py +952 -952
  56. brainstate/nn/_rnns.py +946 -946
  57. brainstate/nn/_rnns_test.py +592 -592
  58. brainstate/nn/_utils.py +216 -216
  59. brainstate/nn/_utils_test.py +401 -401
  60. brainstate/nn/init.py +809 -809
  61. brainstate/nn/init_test.py +180 -180
  62. brainstate/random/__init__.py +270 -270
  63. brainstate/random/{_rand_funs.py → _fun.py} +3938 -3938
  64. brainstate/random/{_rand_funs_test.py → _fun_test.py} +638 -640
  65. brainstate/random/_impl.py +672 -0
  66. brainstate/random/{_rand_seed.py → _seed.py} +675 -675
  67. brainstate/random/{_rand_seed_test.py → _seed_test.py} +48 -48
  68. brainstate/random/{_rand_state.py → _state.py} +1320 -1617
  69. brainstate/random/{_rand_state_test.py → _state_test.py} +551 -551
  70. brainstate/transform/__init__.py +56 -59
  71. brainstate/transform/_ad_checkpoint.py +176 -176
  72. brainstate/transform/_ad_checkpoint_test.py +49 -49
  73. brainstate/transform/_autograd.py +1025 -1025
  74. brainstate/transform/_autograd_test.py +1289 -1289
  75. brainstate/transform/_conditions.py +316 -316
  76. brainstate/transform/_conditions_test.py +220 -220
  77. brainstate/transform/_error_if.py +94 -94
  78. brainstate/transform/_error_if_test.py +52 -52
  79. brainstate/transform/_find_state.py +200 -0
  80. brainstate/transform/_find_state_test.py +84 -0
  81. brainstate/transform/_jit.py +399 -399
  82. brainstate/transform/_jit_test.py +143 -143
  83. brainstate/transform/_loop_collect_return.py +675 -675
  84. brainstate/transform/_loop_collect_return_test.py +58 -58
  85. brainstate/transform/_loop_no_collection.py +283 -283
  86. brainstate/transform/_loop_no_collection_test.py +50 -50
  87. brainstate/transform/_make_jaxpr.py +2176 -2016
  88. brainstate/transform/_make_jaxpr_test.py +1634 -1510
  89. brainstate/transform/_mapping.py +607 -529
  90. brainstate/transform/_mapping_test.py +104 -194
  91. brainstate/transform/_progress_bar.py +255 -255
  92. brainstate/transform/_unvmap.py +256 -256
  93. brainstate/transform/_util.py +286 -286
  94. brainstate/typing.py +837 -837
  95. brainstate/typing_test.py +780 -780
  96. brainstate/util/__init__.py +27 -27
  97. brainstate/util/_others.py +1024 -1024
  98. brainstate/util/_others_test.py +962 -962
  99. brainstate/util/_pretty_pytree.py +1301 -1301
  100. brainstate/util/_pretty_pytree_test.py +675 -675
  101. brainstate/util/_pretty_repr.py +462 -462
  102. brainstate/util/_pretty_repr_test.py +696 -696
  103. brainstate/util/filter.py +945 -945
  104. brainstate/util/filter_test.py +911 -911
  105. brainstate/util/struct.py +910 -910
  106. brainstate/util/struct_test.py +602 -602
  107. {brainstate-0.2.1.dist-info → brainstate-0.2.2.dist-info}/METADATA +108 -108
  108. brainstate-0.2.2.dist-info/RECORD +111 -0
  109. {brainstate-0.2.1.dist-info → brainstate-0.2.2.dist-info}/licenses/LICENSE +202 -202
  110. brainstate/transform/_eval_shape.py +0 -145
  111. brainstate/transform/_eval_shape_test.py +0 -38
  112. brainstate/transform/_random.py +0 -171
  113. brainstate-0.2.1.dist-info/RECORD +0 -111
  114. {brainstate-0.2.1.dist-info → brainstate-0.2.2.dist-info}/WHEEL +0 -0
  115. {brainstate-0.2.1.dist-info → brainstate-0.2.2.dist-info}/top_level.txt +0 -0
@@ -1,475 +1,475 @@
1
- # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
- import unittest
17
-
18
- import brainunit as u
19
- import jax.numpy as jnp
20
- from absl.testing import parameterized
21
-
22
- import brainstate
23
- import braintools
24
-
25
-
26
- class TestLinear(parameterized.TestCase):
27
- """Test suite for Linear layer."""
28
-
29
- @parameterized.product(
30
- size=[(10,), (20, 10), (5, 8, 10)],
31
- num_out=[20, 5]
32
- )
33
- def test_linear_shapes(self, size, num_out):
34
- """Test output shapes with various input dimensions."""
35
- layer = brainstate.nn.Linear(10, num_out)
36
- x = brainstate.random.random(size)
37
- y = layer(x)
38
- self.assertEqual(y.shape, size[:-1] + (num_out,))
39
-
40
- def test_linear_with_bias(self):
41
- """Test linear layer with bias."""
42
- layer = brainstate.nn.Linear(10, 5)
43
- self.assertIn('bias', layer.weight.value)
44
- x = brainstate.random.random((3, 10))
45
- y = layer(x)
46
- self.assertEqual(y.shape, (3, 5))
47
-
48
- def test_linear_without_bias(self):
49
- """Test linear layer without bias."""
50
- layer = brainstate.nn.Linear(10, 5, b_init=None)
51
- self.assertNotIn('bias', layer.weight.value)
52
- x = brainstate.random.random((3, 10))
53
- y = layer(x)
54
- self.assertEqual(y.shape, (3, 5))
55
-
56
- def test_linear_with_mask(self):
57
- """Test linear layer with weight mask."""
58
- w_mask = jnp.ones((10, 5))
59
- w_mask = w_mask.at[:, 0].set(0) # mask out first output column
60
- layer = brainstate.nn.Linear(10, 5, w_mask=w_mask)
61
- x = jnp.ones((3, 10))
62
- y = layer(x)
63
- self.assertEqual(y.shape, (3, 5))
64
-
65
- def test_linear_weight_initialization(self):
66
- """Test custom weight initialization."""
67
- layer = brainstate.nn.Linear(
68
- 10, 5,
69
- w_init=braintools.init.ZeroInit(),
70
- b_init=braintools.init.Constant(1.0)
71
- )
72
- self.assertTrue(jnp.allclose(layer.weight.value['weight'], 0.0))
73
- self.assertTrue(jnp.allclose(layer.weight.value['bias'], 1.0))
74
-
75
- def test_linear_computation(self):
76
- """Test that computation is correct."""
77
- layer = brainstate.nn.Linear(3, 2, b_init=None)
78
- # Set known weights
79
- layer.weight.value = {'weight': jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0]])}
80
- x = jnp.array([[1.0, 2.0, 3.0]])
81
- y = layer(x)
82
- expected = jnp.array([[4.0, 5.0]]) # [1*1+2*0+3*1, 1*0+2*1+3*1]
83
- self.assertTrue(jnp.allclose(y, expected))
84
-
85
-
86
- class TestSignedWLinear(parameterized.TestCase):
87
- """Test suite for SignedWLinear layer."""
88
-
89
- @parameterized.product(
90
- in_size=[10, 20],
91
- out_size=[5, 10]
92
- )
93
- def test_signed_linear_shapes(self, in_size, out_size):
94
- """Test output shapes."""
95
- layer = brainstate.nn.SignedWLinear((in_size,), (out_size,))
96
- x = brainstate.random.random((3, in_size))
97
- y = layer(x)
98
- self.assertEqual(y.shape, (3, out_size))
99
-
100
- def test_signed_linear_positive_weights(self):
101
- """Test that weights are positive when w_sign is None."""
102
- layer = brainstate.nn.SignedWLinear((5,), (3,))
103
- # Set weights to negative values
104
- layer.weight.value = jnp.array([[-1.0, -2.0, -3.0]] * 5)
105
- x = jnp.ones((1, 5))
106
- y = layer(x)
107
- # Output should be positive since abs is applied
108
- self.assertTrue(jnp.all(y > 0))
109
-
110
- def test_signed_linear_with_sign_matrix(self):
111
- """Test signed linear with custom sign matrix."""
112
- w_sign = jnp.ones((5, 3)) * -1.0 # all negative
113
- layer = brainstate.nn.SignedWLinear((5,), (3,), w_sign=w_sign)
114
- layer.weight.value = jnp.ones((5, 3))
115
- x = jnp.ones((1, 5))
116
- y = layer(x)
117
- # All outputs should be negative
118
- self.assertTrue(jnp.all(y < 0))
119
-
120
- def test_signed_linear_mixed_signs(self):
121
- """Test with mixed positive and negative signs."""
122
- w_sign = jnp.array([[1.0, -1.0], [1.0, -1.0], [-1.0, 1.0]])
123
- layer = brainstate.nn.SignedWLinear((3,), (2,), w_sign=w_sign)
124
- layer.weight.value = jnp.ones((3, 2))
125
- x = jnp.array([[1.0, 1.0, 1.0]])
126
- y = layer(x)
127
- expected = jnp.array([[1.0, -1.0]]) # [1-1, -1+1]
128
- self.assertTrue(jnp.allclose(y, expected))
129
-
130
-
131
- class TestScaledWSLinear(parameterized.TestCase):
132
- """Test suite for ScaledWSLinear layer."""
133
-
134
- @parameterized.product(
135
- in_size=[10, 20],
136
- out_size=[5, 10],
137
- ws_gain=[True, False]
138
- )
139
- def test_scaled_ws_shapes(self, in_size, out_size, ws_gain):
140
- """Test output shapes with and without gain."""
141
- layer = brainstate.nn.ScaledWSLinear((in_size,), (out_size,), ws_gain=ws_gain)
142
- x = brainstate.random.random((3, in_size))
143
- y = layer(x)
144
- self.assertEqual(y.shape, (3, out_size))
145
-
146
- def test_scaled_ws_with_gain(self):
147
- """Test that gain parameter exists when ws_gain=True."""
148
- layer = brainstate.nn.ScaledWSLinear((10,), (5,), ws_gain=True)
149
- self.assertIn('gain', layer.weight.value)
150
-
151
- def test_scaled_ws_without_gain(self):
152
- """Test that gain parameter is absent when ws_gain=False."""
153
- layer = brainstate.nn.ScaledWSLinear((10,), (5,), ws_gain=False)
154
- self.assertNotIn('gain', layer.weight.value)
155
-
156
- def test_scaled_ws_with_mask(self):
157
- """Test scaled WS linear with weight mask."""
158
- w_mask = jnp.ones((10, 1))
159
- layer = brainstate.nn.ScaledWSLinear((10,), (5,), w_mask=w_mask)
160
- x = brainstate.random.random((3, 10))
161
- y = layer(x)
162
- self.assertEqual(y.shape, (3, 5))
163
-
164
- def test_scaled_ws_without_bias(self):
165
- """Test scaled WS linear without bias."""
166
- layer = brainstate.nn.ScaledWSLinear((10,), (5,), b_init=None)
167
- self.assertNotIn('bias', layer.weight.value)
168
- x = brainstate.random.random((3, 10))
169
- y = layer(x)
170
- self.assertEqual(y.shape, (3, 5))
171
-
172
- def test_scaled_ws_eps_parameter(self):
173
- """Test that eps parameter is stored correctly."""
174
- eps_value = 1e-5
175
- layer = brainstate.nn.ScaledWSLinear((10,), (5,), eps=eps_value)
176
- self.assertEqual(layer.eps, eps_value)
177
-
178
-
179
- class TestSparseLinear(unittest.TestCase):
180
- """Test suite for SparseLinear layer."""
181
-
182
- def test_sparse_csr(self):
183
- """Test SparseLinear with CSR format."""
184
- data = brainstate.random.rand(10, 20)
185
- data = data * (data > 0.9)
186
- layer = brainstate.nn.SparseLinear(u.sparse.CSR.fromdense(data))
187
-
188
- x = brainstate.random.rand(10)
189
- y = layer(x)
190
- self.assertTrue(u.math.allclose(y, x @ data))
191
-
192
- x = brainstate.random.rand(5, 10)
193
- y = layer(x)
194
- self.assertTrue(u.math.allclose(y, x @ data))
195
-
196
- def test_sparse_csc(self):
197
- """Test SparseLinear with CSC format."""
198
- data = brainstate.random.rand(10, 20)
199
- data = data * (data > 0.9)
200
- layer = brainstate.nn.SparseLinear(u.sparse.CSC.fromdense(data))
201
-
202
- x = brainstate.random.rand(10)
203
- y = layer(x)
204
- self.assertTrue(u.math.allclose(y, x @ data))
205
-
206
- x = brainstate.random.rand(5, 10)
207
- y = layer(x)
208
- self.assertTrue(u.math.allclose(y, x @ data))
209
-
210
- def test_sparse_coo(self):
211
- """Test SparseLinear with COO format."""
212
- data = brainstate.random.rand(10, 20)
213
- data = data * (data > 0.9)
214
- layer = brainstate.nn.SparseLinear(u.sparse.COO.fromdense(data))
215
-
216
- x = brainstate.random.rand(10)
217
- y = layer(x)
218
- self.assertTrue(u.math.allclose(y, x @ data))
219
-
220
- x = brainstate.random.rand(5, 10)
221
- y = layer(x)
222
- self.assertTrue(u.math.allclose(y, x @ data))
223
-
224
- def test_sparse_with_bias(self):
225
- """Test SparseLinear with bias."""
226
- data = brainstate.random.rand(10, 20)
227
- data = data * (data > 0.9)
228
- spar_mat = u.sparse.CSR.fromdense(data)
229
- layer = brainstate.nn.SparseLinear(
230
- spar_mat,
231
- b_init=braintools.init.Constant(0.5),
232
- in_size=(10,)
233
- )
234
- self.assertIn('bias', layer.weight.value)
235
- x = brainstate.random.rand(5, 10)
236
- y = layer(x)
237
- expected = x @ data + 0.5
238
- self.assertTrue(u.math.allclose(y, expected))
239
-
240
- def test_sparse_without_bias(self):
241
- """Test SparseLinear without bias."""
242
- data = brainstate.random.rand(10, 20)
243
- data = data * (data > 0.9)
244
- spar_mat = u.sparse.CSR.fromdense(data)
245
- layer = brainstate.nn.SparseLinear(spar_mat, b_init=None)
246
- self.assertNotIn('bias', layer.weight.value)
247
-
248
-
249
- class TestAllToAll(parameterized.TestCase):
250
- """Test suite for AllToAll connection layer."""
251
-
252
- @parameterized.product(
253
- in_size=[10, 20],
254
- out_size=[10, 15],
255
- include_self=[True, False]
256
- )
257
- def test_all_to_all_shapes(self, in_size, out_size, include_self):
258
- """Test output shapes with various configurations."""
259
- layer = brainstate.nn.AllToAll((in_size,), (out_size,), include_self=include_self)
260
- x = brainstate.random.random((3, in_size))
261
- y = layer(x)
262
- self.assertEqual(y.shape, (3, out_size))
263
-
264
- def test_all_to_all_with_self(self):
265
- """Test all-to-all with self-connections."""
266
- layer = brainstate.nn.AllToAll((5,), (5,), include_self=True)
267
- layer.weight.value = {'weight': jnp.eye(5)}
268
- x = jnp.ones((1, 5))
269
- y = layer(x)
270
- expected = jnp.ones((1, 5))
271
- self.assertTrue(jnp.allclose(y, expected))
272
-
273
- def test_all_to_all_without_self(self):
274
- """Test all-to-all without self-connections."""
275
- layer = brainstate.nn.AllToAll((5,), (5,), include_self=False)
276
- layer.weight.value = {'weight': jnp.eye(5)}
277
- x = jnp.ones((1, 5))
278
- y = layer(x)
279
- # Diagonal should be zeroed out
280
- expected = jnp.zeros((1, 5))
281
- self.assertTrue(jnp.allclose(y, expected))
282
-
283
- def test_all_to_all_scalar_weight(self):
284
- """Test all-to-all with scalar weight."""
285
- layer = brainstate.nn.AllToAll((5,), (5,), w_init=braintools.init.Constant(2.0))
286
- # Override with scalar
287
- layer.weight.value = {'weight': 2.0}
288
- x = jnp.ones((1, 5))
289
- y = layer(x)
290
- expected = jnp.ones((1, 5)) * 10.0 # sum of 5 ones * 2
291
- self.assertTrue(jnp.allclose(y, expected))
292
-
293
- def test_all_to_all_with_bias(self):
294
- """Test all-to-all with bias."""
295
- layer = brainstate.nn.AllToAll(
296
- (5,), (5,),
297
- b_init=braintools.init.Constant(1.0)
298
- )
299
- self.assertIn('bias', layer.weight.value)
300
- x = brainstate.random.random((3, 5))
301
- y = layer(x)
302
- self.assertEqual(y.shape, (3, 5))
303
-
304
- def test_all_to_all_with_units(self):
305
- """Test all-to-all with brainunit quantities."""
306
- layer = brainstate.nn.AllToAll((5,), (5,))
307
- layer.weight.value = {'weight': jnp.ones((5, 5)) * u.siemens}
308
- x = jnp.ones((1, 5)) * u.volt
309
- y = layer(x)
310
- # Should have units of siemens * volt
311
- self.assertTrue(hasattr(y, 'unit') or isinstance(y, u.Quantity))
312
-
313
-
314
- class TestOneToOne(parameterized.TestCase):
315
- """Test suite for OneToOne connection layer."""
316
-
317
- @parameterized.parameters(5, 10, 20)
318
- def test_one_to_one_shapes(self, size):
319
- """Test output shapes."""
320
- layer = brainstate.nn.OneToOne((size,))
321
- x = brainstate.random.random((3, size))
322
- y = layer(x)
323
- self.assertEqual(y.shape, (3, size))
324
-
325
- def test_one_to_one_computation(self):
326
- """Test element-wise multiplication."""
327
- layer = brainstate.nn.OneToOne((5,), b_init=None)
328
- layer.weight.value = {'weight': jnp.array([1.0, 2.0, 3.0, 4.0, 5.0])}
329
- x = jnp.ones((1, 5))
330
- y = layer(x)
331
- expected = jnp.array([[1.0, 2.0, 3.0, 4.0, 5.0]])
332
- self.assertTrue(jnp.allclose(y, expected))
333
-
334
- def test_one_to_one_with_bias(self):
335
- """Test one-to-one with bias."""
336
- layer = brainstate.nn.OneToOne((5,), b_init=braintools.init.Constant(1.0))
337
- self.assertIn('bias', layer.weight.value)
338
- layer.weight.value = {
339
- 'weight': jnp.ones(5),
340
- 'bias': jnp.ones(5)
341
- }
342
- x = jnp.ones((1, 5))
343
- y = layer(x)
344
- expected = jnp.ones((1, 5)) * 2.0 # 1*1 + 1
345
- self.assertTrue(jnp.allclose(y, expected))
346
-
347
- def test_one_to_one_without_bias(self):
348
- """Test one-to-one without bias."""
349
- layer = brainstate.nn.OneToOne((5,), b_init=None)
350
- self.assertNotIn('bias', layer.weight.value)
351
-
352
- def test_one_to_one_zero_weights(self):
353
- """Test one-to-one with zero weights."""
354
- layer = brainstate.nn.OneToOne((5,), w_init=braintools.init.ZeroInit(), b_init=None)
355
- x = jnp.ones((1, 5))
356
- y = layer(x)
357
- expected = jnp.zeros((1, 5))
358
- self.assertTrue(jnp.allclose(y, expected))
359
-
360
-
361
- class TestLoRA(parameterized.TestCase):
362
- """Test suite for LoRA layer."""
363
-
364
- @parameterized.product(
365
- in_features=[10, 20],
366
- lora_rank=[2, 4],
367
- out_features=[5, 10]
368
- )
369
- def test_lora_shapes(self, in_features, lora_rank, out_features):
370
- """Test output shapes with various configurations."""
371
- layer = brainstate.nn.LoRA(in_features, lora_rank, out_features)
372
- x = brainstate.random.random((3, in_features))
373
- y = layer(x)
374
- self.assertEqual(y.shape, (3, out_features))
375
-
376
- def test_lora_parameter_count(self):
377
- """Test that LoRA has correct number of parameters."""
378
- in_features, lora_rank, out_features = 10, 2, 5
379
- layer = brainstate.nn.LoRA(in_features, lora_rank, out_features)
380
- # lora_a: 10 x 2, lora_b: 2 x 5
381
- self.assertEqual(layer.weight.value['lora_a'].shape, (10, 2))
382
- self.assertEqual(layer.weight.value['lora_b'].shape, (2, 5))
383
-
384
- def test_lora_standalone(self):
385
- """Test standalone LoRA without base module."""
386
- layer = brainstate.nn.LoRA(5, 2, 3)
387
- layer.weight.value = {
388
- 'lora_a': jnp.ones((5, 2)),
389
- 'lora_b': jnp.ones((2, 3))
390
- }
391
- x = jnp.ones((1, 5))
392
- y = layer(x)
393
- # Each output: sum(5 ones) * 2 = 10
394
- expected = jnp.ones((1, 3)) * 10.0
395
- self.assertTrue(jnp.allclose(y, expected))
396
-
397
- def test_lora_with_base_module(self):
398
- """Test LoRA wrapped around base module."""
399
- base = brainstate.nn.Linear(5, 3, b_init=None)
400
- base.weight.value = {'weight': jnp.ones((5, 3))}
401
- layer = brainstate.nn.LoRA(5, 2, 3, base_module=base)
402
- layer.weight.value = {
403
- 'lora_a': jnp.ones((5, 2)),
404
- 'lora_b': jnp.ones((2, 3))
405
- }
406
- x = jnp.ones((1, 5))
407
- y = layer(x)
408
- # LoRA output: 10, Base output: 5, Total: 15
409
- expected = jnp.ones((1, 3)) * 15.0
410
- self.assertTrue(jnp.allclose(y, expected))
411
-
412
- def test_lora_base_module_attribute(self):
413
- """Test that base_module attribute is set correctly."""
414
- base = brainstate.nn.Linear(5, 3)
415
- layer = brainstate.nn.LoRA(5, 2, 3, base_module=base)
416
- self.assertEqual(layer.base_module, base)
417
-
418
- def test_lora_without_base_module(self):
419
- """Test that base_module is None when not provided."""
420
- layer = brainstate.nn.LoRA(5, 2, 3)
421
- self.assertIsNone(layer.base_module)
422
-
423
- def test_lora_size_attributes(self):
424
- """Test that size attributes are set correctly."""
425
- layer = brainstate.nn.LoRA(10, 3, 5, in_size=(10,))
426
- self.assertEqual(layer.in_features, 10)
427
- self.assertEqual(layer.out_features, 5)
428
- self.assertEqual(layer.in_size[0], 10)
429
- self.assertEqual(layer.out_size[0], 5)
430
-
431
- def test_lora_custom_initialization(self):
432
- """Test LoRA with custom initialization."""
433
- layer = brainstate.nn.LoRA(
434
- 5, 2, 3,
435
- kernel_init=braintools.init.ZeroInit()
436
- )
437
- self.assertTrue(jnp.allclose(layer.weight.value['lora_a'], 0.0))
438
- self.assertTrue(jnp.allclose(layer.weight.value['lora_b'], 0.0))
439
-
440
-
441
- class TestLinearEdgeCases(unittest.TestCase):
442
- """Test edge cases and error conditions for linear layers."""
443
-
444
- def test_linear_size_mismatch(self):
445
- """Test that size mismatch raises error."""
446
- with self.assertRaises(AssertionError):
447
- # Mismatched first dimensions
448
- brainstate.nn.Linear((5, 10), (3, 5))
449
-
450
- def test_linear_1d_sizes(self):
451
- """Test with 1D size specifications."""
452
- layer = brainstate.nn.Linear(10, 5)
453
- x = brainstate.random.random((3, 10))
454
- y = layer(x)
455
- self.assertEqual(y.shape, (3, 5))
456
-
457
- def test_signed_linear_size_mismatch(self):
458
- """Test SignedWLinear with size mismatch."""
459
- with self.assertRaises(AssertionError):
460
- brainstate.nn.SignedWLinear((5, 10), (3, 5))
461
-
462
- def test_all_to_all_size_mismatch(self):
463
- """Test AllToAll with size mismatch."""
464
- with self.assertRaises(AssertionError):
465
- brainstate.nn.AllToAll((5, 10), (3, 5))
466
-
467
- def test_sparse_linear_invalid_input(self):
468
- """Test SparseLinear with invalid sparse matrix."""
469
- with self.assertRaises(AssertionError):
470
- # Not a SparseMatrix
471
- brainstate.nn.SparseLinear(jnp.ones((5, 5)))
472
-
473
-
474
- if __name__ == '__main__':
475
- unittest.main()
1
+ # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ import unittest
17
+
18
+ import brainunit as u
19
+ import jax.numpy as jnp
20
+ from absl.testing import parameterized
21
+
22
+ import brainstate
23
+ import braintools
24
+
25
+
26
+ class TestLinear(parameterized.TestCase):
27
+ """Test suite for Linear layer."""
28
+
29
+ @parameterized.product(
30
+ size=[(10,), (20, 10), (5, 8, 10)],
31
+ num_out=[20, 5]
32
+ )
33
+ def test_linear_shapes(self, size, num_out):
34
+ """Test output shapes with various input dimensions."""
35
+ layer = brainstate.nn.Linear(10, num_out)
36
+ x = brainstate.random.random(size)
37
+ y = layer(x)
38
+ self.assertEqual(y.shape, size[:-1] + (num_out,))
39
+
40
+ def test_linear_with_bias(self):
41
+ """Test linear layer with bias."""
42
+ layer = brainstate.nn.Linear(10, 5)
43
+ self.assertIn('bias', layer.weight.value)
44
+ x = brainstate.random.random((3, 10))
45
+ y = layer(x)
46
+ self.assertEqual(y.shape, (3, 5))
47
+
48
+ def test_linear_without_bias(self):
49
+ """Test linear layer without bias."""
50
+ layer = brainstate.nn.Linear(10, 5, b_init=None)
51
+ self.assertNotIn('bias', layer.weight.value)
52
+ x = brainstate.random.random((3, 10))
53
+ y = layer(x)
54
+ self.assertEqual(y.shape, (3, 5))
55
+
56
+ def test_linear_with_mask(self):
57
+ """Test linear layer with weight mask."""
58
+ w_mask = jnp.ones((10, 5))
59
+ w_mask = w_mask.at[:, 0].set(0) # mask out first output column
60
+ layer = brainstate.nn.Linear(10, 5, w_mask=w_mask)
61
+ x = jnp.ones((3, 10))
62
+ y = layer(x)
63
+ self.assertEqual(y.shape, (3, 5))
64
+
65
+ def test_linear_weight_initialization(self):
66
+ """Test custom weight initialization."""
67
+ layer = brainstate.nn.Linear(
68
+ 10, 5,
69
+ w_init=braintools.init.ZeroInit(),
70
+ b_init=braintools.init.Constant(1.0)
71
+ )
72
+ self.assertTrue(jnp.allclose(layer.weight.value['weight'], 0.0))
73
+ self.assertTrue(jnp.allclose(layer.weight.value['bias'], 1.0))
74
+
75
+ def test_linear_computation(self):
76
+ """Test that computation is correct."""
77
+ layer = brainstate.nn.Linear(3, 2, b_init=None)
78
+ # Set known weights
79
+ layer.weight.value = {'weight': jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0]])}
80
+ x = jnp.array([[1.0, 2.0, 3.0]])
81
+ y = layer(x)
82
+ expected = jnp.array([[4.0, 5.0]]) # [1*1+2*0+3*1, 1*0+2*1+3*1]
83
+ self.assertTrue(jnp.allclose(y, expected))
84
+
85
+
86
+ class TestSignedWLinear(parameterized.TestCase):
87
+ """Test suite for SignedWLinear layer."""
88
+
89
+ @parameterized.product(
90
+ in_size=[10, 20],
91
+ out_size=[5, 10]
92
+ )
93
+ def test_signed_linear_shapes(self, in_size, out_size):
94
+ """Test output shapes."""
95
+ layer = brainstate.nn.SignedWLinear((in_size,), (out_size,))
96
+ x = brainstate.random.random((3, in_size))
97
+ y = layer(x)
98
+ self.assertEqual(y.shape, (3, out_size))
99
+
100
+ def test_signed_linear_positive_weights(self):
101
+ """Test that weights are positive when w_sign is None."""
102
+ layer = brainstate.nn.SignedWLinear((5,), (3,))
103
+ # Set weights to negative values
104
+ layer.weight.value = jnp.array([[-1.0, -2.0, -3.0]] * 5)
105
+ x = jnp.ones((1, 5))
106
+ y = layer(x)
107
+ # Output should be positive since abs is applied
108
+ self.assertTrue(jnp.all(y > 0))
109
+
110
+ def test_signed_linear_with_sign_matrix(self):
111
+ """Test signed linear with custom sign matrix."""
112
+ w_sign = jnp.ones((5, 3)) * -1.0 # all negative
113
+ layer = brainstate.nn.SignedWLinear((5,), (3,), w_sign=w_sign)
114
+ layer.weight.value = jnp.ones((5, 3))
115
+ x = jnp.ones((1, 5))
116
+ y = layer(x)
117
+ # All outputs should be negative
118
+ self.assertTrue(jnp.all(y < 0))
119
+
120
+ def test_signed_linear_mixed_signs(self):
121
+ """Test with mixed positive and negative signs."""
122
+ w_sign = jnp.array([[1.0, -1.0], [1.0, -1.0], [-1.0, 1.0]])
123
+ layer = brainstate.nn.SignedWLinear((3,), (2,), w_sign=w_sign)
124
+ layer.weight.value = jnp.ones((3, 2))
125
+ x = jnp.array([[1.0, 1.0, 1.0]])
126
+ y = layer(x)
127
+ expected = jnp.array([[1.0, -1.0]]) # [1-1, -1+1]
128
+ self.assertTrue(jnp.allclose(y, expected))
129
+
130
+
131
+ class TestScaledWSLinear(parameterized.TestCase):
132
+ """Test suite for ScaledWSLinear layer."""
133
+
134
+ @parameterized.product(
135
+ in_size=[10, 20],
136
+ out_size=[5, 10],
137
+ ws_gain=[True, False]
138
+ )
139
+ def test_scaled_ws_shapes(self, in_size, out_size, ws_gain):
140
+ """Test output shapes with and without gain."""
141
+ layer = brainstate.nn.ScaledWSLinear((in_size,), (out_size,), ws_gain=ws_gain)
142
+ x = brainstate.random.random((3, in_size))
143
+ y = layer(x)
144
+ self.assertEqual(y.shape, (3, out_size))
145
+
146
+ def test_scaled_ws_with_gain(self):
147
+ """Test that gain parameter exists when ws_gain=True."""
148
+ layer = brainstate.nn.ScaledWSLinear((10,), (5,), ws_gain=True)
149
+ self.assertIn('gain', layer.weight.value)
150
+
151
+ def test_scaled_ws_without_gain(self):
152
+ """Test that gain parameter is absent when ws_gain=False."""
153
+ layer = brainstate.nn.ScaledWSLinear((10,), (5,), ws_gain=False)
154
+ self.assertNotIn('gain', layer.weight.value)
155
+
156
+ def test_scaled_ws_with_mask(self):
157
+ """Test scaled WS linear with weight mask."""
158
+ w_mask = jnp.ones((10, 1))
159
+ layer = brainstate.nn.ScaledWSLinear((10,), (5,), w_mask=w_mask)
160
+ x = brainstate.random.random((3, 10))
161
+ y = layer(x)
162
+ self.assertEqual(y.shape, (3, 5))
163
+
164
+ def test_scaled_ws_without_bias(self):
165
+ """Test scaled WS linear without bias."""
166
+ layer = brainstate.nn.ScaledWSLinear((10,), (5,), b_init=None)
167
+ self.assertNotIn('bias', layer.weight.value)
168
+ x = brainstate.random.random((3, 10))
169
+ y = layer(x)
170
+ self.assertEqual(y.shape, (3, 5))
171
+
172
+ def test_scaled_ws_eps_parameter(self):
173
+ """Test that eps parameter is stored correctly."""
174
+ eps_value = 1e-5
175
+ layer = brainstate.nn.ScaledWSLinear((10,), (5,), eps=eps_value)
176
+ self.assertEqual(layer.eps, eps_value)
177
+
178
+
179
+ class TestSparseLinear(unittest.TestCase):
180
+ """Test suite for SparseLinear layer."""
181
+
182
+ def test_sparse_csr(self):
183
+ """Test SparseLinear with CSR format."""
184
+ data = brainstate.random.rand(10, 20)
185
+ data = data * (data > 0.9)
186
+ layer = brainstate.nn.SparseLinear(u.sparse.CSR.fromdense(data))
187
+
188
+ x = brainstate.random.rand(10)
189
+ y = layer(x)
190
+ self.assertTrue(u.math.allclose(y, x @ data))
191
+
192
+ x = brainstate.random.rand(5, 10)
193
+ y = layer(x)
194
+ self.assertTrue(u.math.allclose(y, x @ data))
195
+
196
+ def test_sparse_csc(self):
197
+ """Test SparseLinear with CSC format."""
198
+ data = brainstate.random.rand(10, 20)
199
+ data = data * (data > 0.9)
200
+ layer = brainstate.nn.SparseLinear(u.sparse.CSC.fromdense(data))
201
+
202
+ x = brainstate.random.rand(10)
203
+ y = layer(x)
204
+ self.assertTrue(u.math.allclose(y, x @ data))
205
+
206
+ x = brainstate.random.rand(5, 10)
207
+ y = layer(x)
208
+ self.assertTrue(u.math.allclose(y, x @ data))
209
+
210
+ def test_sparse_coo(self):
211
+ """Test SparseLinear with COO format."""
212
+ data = brainstate.random.rand(10, 20)
213
+ data = data * (data > 0.9)
214
+ layer = brainstate.nn.SparseLinear(u.sparse.COO.fromdense(data))
215
+
216
+ x = brainstate.random.rand(10)
217
+ y = layer(x)
218
+ self.assertTrue(u.math.allclose(y, x @ data))
219
+
220
+ x = brainstate.random.rand(5, 10)
221
+ y = layer(x)
222
+ self.assertTrue(u.math.allclose(y, x @ data))
223
+
224
+ def test_sparse_with_bias(self):
225
+ """Test SparseLinear with bias."""
226
+ data = brainstate.random.rand(10, 20)
227
+ data = data * (data > 0.9)
228
+ spar_mat = u.sparse.CSR.fromdense(data)
229
+ layer = brainstate.nn.SparseLinear(
230
+ spar_mat,
231
+ b_init=braintools.init.Constant(0.5),
232
+ in_size=(10,)
233
+ )
234
+ self.assertIn('bias', layer.weight.value)
235
+ x = brainstate.random.rand(5, 10)
236
+ y = layer(x)
237
+ expected = x @ data + 0.5
238
+ self.assertTrue(u.math.allclose(y, expected))
239
+
240
+ def test_sparse_without_bias(self):
241
+ """Test SparseLinear without bias."""
242
+ data = brainstate.random.rand(10, 20)
243
+ data = data * (data > 0.9)
244
+ spar_mat = u.sparse.CSR.fromdense(data)
245
+ layer = brainstate.nn.SparseLinear(spar_mat, b_init=None)
246
+ self.assertNotIn('bias', layer.weight.value)
247
+
248
+
249
+ class TestAllToAll(parameterized.TestCase):
250
+ """Test suite for AllToAll connection layer."""
251
+
252
+ @parameterized.product(
253
+ in_size=[10, 20],
254
+ out_size=[10, 15],
255
+ include_self=[True, False]
256
+ )
257
+ def test_all_to_all_shapes(self, in_size, out_size, include_self):
258
+ """Test output shapes with various configurations."""
259
+ layer = brainstate.nn.AllToAll((in_size,), (out_size,), include_self=include_self)
260
+ x = brainstate.random.random((3, in_size))
261
+ y = layer(x)
262
+ self.assertEqual(y.shape, (3, out_size))
263
+
264
+ def test_all_to_all_with_self(self):
265
+ """Test all-to-all with self-connections."""
266
+ layer = brainstate.nn.AllToAll((5,), (5,), include_self=True)
267
+ layer.weight.value = {'weight': jnp.eye(5)}
268
+ x = jnp.ones((1, 5))
269
+ y = layer(x)
270
+ expected = jnp.ones((1, 5))
271
+ self.assertTrue(jnp.allclose(y, expected))
272
+
273
+ def test_all_to_all_without_self(self):
274
+ """Test all-to-all without self-connections."""
275
+ layer = brainstate.nn.AllToAll((5,), (5,), include_self=False)
276
+ layer.weight.value = {'weight': jnp.eye(5)}
277
+ x = jnp.ones((1, 5))
278
+ y = layer(x)
279
+ # Diagonal should be zeroed out
280
+ expected = jnp.zeros((1, 5))
281
+ self.assertTrue(jnp.allclose(y, expected))
282
+
283
+ def test_all_to_all_scalar_weight(self):
284
+ """Test all-to-all with scalar weight."""
285
+ layer = brainstate.nn.AllToAll((5,), (5,), w_init=braintools.init.Constant(2.0))
286
+ # Override with scalar
287
+ layer.weight.value = {'weight': 2.0}
288
+ x = jnp.ones((1, 5))
289
+ y = layer(x)
290
+ expected = jnp.ones((1, 5)) * 10.0 # sum of 5 ones * 2
291
+ self.assertTrue(jnp.allclose(y, expected))
292
+
293
+ def test_all_to_all_with_bias(self):
294
+ """Test all-to-all with bias."""
295
+ layer = brainstate.nn.AllToAll(
296
+ (5,), (5,),
297
+ b_init=braintools.init.Constant(1.0)
298
+ )
299
+ self.assertIn('bias', layer.weight.value)
300
+ x = brainstate.random.random((3, 5))
301
+ y = layer(x)
302
+ self.assertEqual(y.shape, (3, 5))
303
+
304
+ def test_all_to_all_with_units(self):
305
+ """Test all-to-all with brainunit quantities."""
306
+ layer = brainstate.nn.AllToAll((5,), (5,))
307
+ layer.weight.value = {'weight': jnp.ones((5, 5)) * u.siemens}
308
+ x = jnp.ones((1, 5)) * u.volt
309
+ y = layer(x)
310
+ # Should have units of siemens * volt
311
+ self.assertTrue(hasattr(y, 'unit') or isinstance(y, u.Quantity))
312
+
313
+
314
+ class TestOneToOne(parameterized.TestCase):
315
+ """Test suite for OneToOne connection layer."""
316
+
317
+ @parameterized.parameters(5, 10, 20)
318
+ def test_one_to_one_shapes(self, size):
319
+ """Test output shapes."""
320
+ layer = brainstate.nn.OneToOne((size,))
321
+ x = brainstate.random.random((3, size))
322
+ y = layer(x)
323
+ self.assertEqual(y.shape, (3, size))
324
+
325
+ def test_one_to_one_computation(self):
326
+ """Test element-wise multiplication."""
327
+ layer = brainstate.nn.OneToOne((5,), b_init=None)
328
+ layer.weight.value = {'weight': jnp.array([1.0, 2.0, 3.0, 4.0, 5.0])}
329
+ x = jnp.ones((1, 5))
330
+ y = layer(x)
331
+ expected = jnp.array([[1.0, 2.0, 3.0, 4.0, 5.0]])
332
+ self.assertTrue(jnp.allclose(y, expected))
333
+
334
+ def test_one_to_one_with_bias(self):
335
+ """Test one-to-one with bias."""
336
+ layer = brainstate.nn.OneToOne((5,), b_init=braintools.init.Constant(1.0))
337
+ self.assertIn('bias', layer.weight.value)
338
+ layer.weight.value = {
339
+ 'weight': jnp.ones(5),
340
+ 'bias': jnp.ones(5)
341
+ }
342
+ x = jnp.ones((1, 5))
343
+ y = layer(x)
344
+ expected = jnp.ones((1, 5)) * 2.0 # 1*1 + 1
345
+ self.assertTrue(jnp.allclose(y, expected))
346
+
347
+ def test_one_to_one_without_bias(self):
348
+ """Test one-to-one without bias."""
349
+ layer = brainstate.nn.OneToOne((5,), b_init=None)
350
+ self.assertNotIn('bias', layer.weight.value)
351
+
352
+ def test_one_to_one_zero_weights(self):
353
+ """Test one-to-one with zero weights."""
354
+ layer = brainstate.nn.OneToOne((5,), w_init=braintools.init.ZeroInit(), b_init=None)
355
+ x = jnp.ones((1, 5))
356
+ y = layer(x)
357
+ expected = jnp.zeros((1, 5))
358
+ self.assertTrue(jnp.allclose(y, expected))
359
+
360
+
361
+ class TestLoRA(parameterized.TestCase):
362
+ """Test suite for LoRA layer."""
363
+
364
+ @parameterized.product(
365
+ in_features=[10, 20],
366
+ lora_rank=[2, 4],
367
+ out_features=[5, 10]
368
+ )
369
+ def test_lora_shapes(self, in_features, lora_rank, out_features):
370
+ """Test output shapes with various configurations."""
371
+ layer = brainstate.nn.LoRA(in_features, lora_rank, out_features)
372
+ x = brainstate.random.random((3, in_features))
373
+ y = layer(x)
374
+ self.assertEqual(y.shape, (3, out_features))
375
+
376
+ def test_lora_parameter_count(self):
377
+ """Test that LoRA has correct number of parameters."""
378
+ in_features, lora_rank, out_features = 10, 2, 5
379
+ layer = brainstate.nn.LoRA(in_features, lora_rank, out_features)
380
+ # lora_a: 10 x 2, lora_b: 2 x 5
381
+ self.assertEqual(layer.weight.value['lora_a'].shape, (10, 2))
382
+ self.assertEqual(layer.weight.value['lora_b'].shape, (2, 5))
383
+
384
+ def test_lora_standalone(self):
385
+ """Test standalone LoRA without base module."""
386
+ layer = brainstate.nn.LoRA(5, 2, 3)
387
+ layer.weight.value = {
388
+ 'lora_a': jnp.ones((5, 2)),
389
+ 'lora_b': jnp.ones((2, 3))
390
+ }
391
+ x = jnp.ones((1, 5))
392
+ y = layer(x)
393
+ # Each output: sum(5 ones) * 2 = 10
394
+ expected = jnp.ones((1, 3)) * 10.0
395
+ self.assertTrue(jnp.allclose(y, expected))
396
+
397
+ def test_lora_with_base_module(self):
398
+ """Test LoRA wrapped around base module."""
399
+ base = brainstate.nn.Linear(5, 3, b_init=None)
400
+ base.weight.value = {'weight': jnp.ones((5, 3))}
401
+ layer = brainstate.nn.LoRA(5, 2, 3, base_module=base)
402
+ layer.weight.value = {
403
+ 'lora_a': jnp.ones((5, 2)),
404
+ 'lora_b': jnp.ones((2, 3))
405
+ }
406
+ x = jnp.ones((1, 5))
407
+ y = layer(x)
408
+ # LoRA output: 10, Base output: 5, Total: 15
409
+ expected = jnp.ones((1, 3)) * 15.0
410
+ self.assertTrue(jnp.allclose(y, expected))
411
+
412
+ def test_lora_base_module_attribute(self):
413
+ """Test that base_module attribute is set correctly."""
414
+ base = brainstate.nn.Linear(5, 3)
415
+ layer = brainstate.nn.LoRA(5, 2, 3, base_module=base)
416
+ self.assertEqual(layer.base_module, base)
417
+
418
+ def test_lora_without_base_module(self):
419
+ """Test that base_module is None when not provided."""
420
+ layer = brainstate.nn.LoRA(5, 2, 3)
421
+ self.assertIsNone(layer.base_module)
422
+
423
+ def test_lora_size_attributes(self):
424
+ """Test that size attributes are set correctly."""
425
+ layer = brainstate.nn.LoRA(10, 3, 5, in_size=(10,))
426
+ self.assertEqual(layer.in_features, 10)
427
+ self.assertEqual(layer.out_features, 5)
428
+ self.assertEqual(layer.in_size[0], 10)
429
+ self.assertEqual(layer.out_size[0], 5)
430
+
431
+ def test_lora_custom_initialization(self):
432
+ """Test LoRA with custom initialization."""
433
+ layer = brainstate.nn.LoRA(
434
+ 5, 2, 3,
435
+ kernel_init=braintools.init.ZeroInit()
436
+ )
437
+ self.assertTrue(jnp.allclose(layer.weight.value['lora_a'], 0.0))
438
+ self.assertTrue(jnp.allclose(layer.weight.value['lora_b'], 0.0))
439
+
440
+
441
+ class TestLinearEdgeCases(unittest.TestCase):
442
+ """Test edge cases and error conditions for linear layers."""
443
+
444
+ def test_linear_size_mismatch(self):
445
+ """Test that size mismatch raises error."""
446
+ with self.assertRaises(AssertionError):
447
+ # Mismatched first dimensions
448
+ brainstate.nn.Linear((5, 10), (3, 5))
449
+
450
+ def test_linear_1d_sizes(self):
451
+ """Test with 1D size specifications."""
452
+ layer = brainstate.nn.Linear(10, 5)
453
+ x = brainstate.random.random((3, 10))
454
+ y = layer(x)
455
+ self.assertEqual(y.shape, (3, 5))
456
+
457
+ def test_signed_linear_size_mismatch(self):
458
+ """Test SignedWLinear with size mismatch."""
459
+ with self.assertRaises(AssertionError):
460
+ brainstate.nn.SignedWLinear((5, 10), (3, 5))
461
+
462
+ def test_all_to_all_size_mismatch(self):
463
+ """Test AllToAll with size mismatch."""
464
+ with self.assertRaises(AssertionError):
465
+ brainstate.nn.AllToAll((5, 10), (3, 5))
466
+
467
+ def test_sparse_linear_invalid_input(self):
468
+ """Test SparseLinear with invalid sparse matrix."""
469
+ with self.assertRaises(AssertionError):
470
+ # Not a SparseMatrix
471
+ brainstate.nn.SparseLinear(jnp.ones((5, 5)))
472
+
473
+
474
+ if __name__ == '__main__':
475
+ unittest.main()