brainstate 0.2.0__py2.py3-none-any.whl → 0.2.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. brainstate/__init__.py +169 -169
  2. brainstate/_compatible_import.py +340 -340
  3. brainstate/_compatible_import_test.py +681 -681
  4. brainstate/_deprecation.py +210 -210
  5. brainstate/_deprecation_test.py +2319 -2319
  6. brainstate/_error.py +45 -45
  7. brainstate/_state.py +1652 -1652
  8. brainstate/_state_test.py +52 -52
  9. brainstate/_utils.py +47 -47
  10. brainstate/environ.py +1495 -1495
  11. brainstate/environ_test.py +1223 -1223
  12. brainstate/graph/__init__.py +22 -22
  13. brainstate/graph/_node.py +240 -240
  14. brainstate/graph/_node_test.py +589 -589
  15. brainstate/graph/_operation.py +1624 -1624
  16. brainstate/graph/_operation_test.py +1147 -1147
  17. brainstate/mixin.py +1433 -1433
  18. brainstate/mixin_test.py +1017 -1017
  19. brainstate/nn/__init__.py +137 -137
  20. brainstate/nn/_activations.py +1100 -1100
  21. brainstate/nn/_activations_test.py +354 -354
  22. brainstate/nn/_collective_ops.py +633 -633
  23. brainstate/nn/_collective_ops_test.py +774 -774
  24. brainstate/nn/_common.py +226 -226
  25. brainstate/nn/_common_test.py +154 -154
  26. brainstate/nn/_conv.py +2010 -2010
  27. brainstate/nn/_conv_test.py +849 -849
  28. brainstate/nn/_delay.py +575 -575
  29. brainstate/nn/_delay_test.py +243 -243
  30. brainstate/nn/_dropout.py +618 -618
  31. brainstate/nn/_dropout_test.py +477 -477
  32. brainstate/nn/_dynamics.py +1267 -1267
  33. brainstate/nn/_dynamics_test.py +67 -67
  34. brainstate/nn/_elementwise.py +1298 -1298
  35. brainstate/nn/_elementwise_test.py +829 -829
  36. brainstate/nn/_embedding.py +408 -408
  37. brainstate/nn/_embedding_test.py +156 -156
  38. brainstate/nn/_event_fixedprob.py +233 -233
  39. brainstate/nn/_event_fixedprob_test.py +115 -115
  40. brainstate/nn/_event_linear.py +83 -83
  41. brainstate/nn/_event_linear_test.py +121 -121
  42. brainstate/nn/_exp_euler.py +254 -254
  43. brainstate/nn/_exp_euler_test.py +377 -377
  44. brainstate/nn/_linear.py +744 -744
  45. brainstate/nn/_linear_test.py +475 -475
  46. brainstate/nn/_metrics.py +1070 -1070
  47. brainstate/nn/_metrics_test.py +611 -611
  48. brainstate/nn/_module.py +384 -384
  49. brainstate/nn/_module_test.py +40 -40
  50. brainstate/nn/_normalizations.py +1334 -1334
  51. brainstate/nn/_normalizations_test.py +699 -699
  52. brainstate/nn/_paddings.py +1020 -1020
  53. brainstate/nn/_paddings_test.py +722 -722
  54. brainstate/nn/_poolings.py +2239 -2239
  55. brainstate/nn/_poolings_test.py +952 -952
  56. brainstate/nn/_rnns.py +946 -946
  57. brainstate/nn/_rnns_test.py +592 -592
  58. brainstate/nn/_utils.py +216 -216
  59. brainstate/nn/_utils_test.py +401 -401
  60. brainstate/nn/init.py +809 -809
  61. brainstate/nn/init_test.py +180 -180
  62. brainstate/random/__init__.py +270 -270
  63. brainstate/random/_rand_funs.py +3938 -3938
  64. brainstate/random/_rand_funs_test.py +640 -640
  65. brainstate/random/_rand_seed.py +675 -675
  66. brainstate/random/_rand_seed_test.py +48 -48
  67. brainstate/random/_rand_state.py +1617 -1617
  68. brainstate/random/_rand_state_test.py +551 -551
  69. brainstate/transform/__init__.py +59 -59
  70. brainstate/transform/_ad_checkpoint.py +176 -176
  71. brainstate/transform/_ad_checkpoint_test.py +49 -49
  72. brainstate/transform/_autograd.py +1025 -1025
  73. brainstate/transform/_autograd_test.py +1289 -1289
  74. brainstate/transform/_conditions.py +316 -316
  75. brainstate/transform/_conditions_test.py +220 -220
  76. brainstate/transform/_error_if.py +94 -94
  77. brainstate/transform/_error_if_test.py +52 -52
  78. brainstate/transform/_eval_shape.py +145 -145
  79. brainstate/transform/_eval_shape_test.py +38 -38
  80. brainstate/transform/_jit.py +399 -399
  81. brainstate/transform/_jit_test.py +143 -143
  82. brainstate/transform/_loop_collect_return.py +675 -675
  83. brainstate/transform/_loop_collect_return_test.py +58 -58
  84. brainstate/transform/_loop_no_collection.py +283 -283
  85. brainstate/transform/_loop_no_collection_test.py +50 -50
  86. brainstate/transform/_make_jaxpr.py +2016 -2016
  87. brainstate/transform/_make_jaxpr_test.py +1510 -1510
  88. brainstate/transform/_mapping.py +529 -529
  89. brainstate/transform/_mapping_test.py +194 -194
  90. brainstate/transform/_progress_bar.py +255 -255
  91. brainstate/transform/_random.py +171 -171
  92. brainstate/transform/_unvmap.py +256 -256
  93. brainstate/transform/_util.py +286 -286
  94. brainstate/typing.py +837 -837
  95. brainstate/typing_test.py +780 -780
  96. brainstate/util/__init__.py +27 -27
  97. brainstate/util/_others.py +1024 -1024
  98. brainstate/util/_others_test.py +962 -962
  99. brainstate/util/_pretty_pytree.py +1301 -1301
  100. brainstate/util/_pretty_pytree_test.py +675 -675
  101. brainstate/util/_pretty_repr.py +462 -462
  102. brainstate/util/_pretty_repr_test.py +696 -696
  103. brainstate/util/filter.py +945 -945
  104. brainstate/util/filter_test.py +911 -911
  105. brainstate/util/struct.py +910 -910
  106. brainstate/util/struct_test.py +602 -602
  107. {brainstate-0.2.0.dist-info → brainstate-0.2.1.dist-info}/METADATA +108 -108
  108. brainstate-0.2.1.dist-info/RECORD +111 -0
  109. {brainstate-0.2.0.dist-info → brainstate-0.2.1.dist-info}/licenses/LICENSE +202 -202
  110. brainstate-0.2.0.dist-info/RECORD +0 -111
  111. {brainstate-0.2.0.dist-info → brainstate-0.2.1.dist-info}/WHEEL +0 -0
  112. {brainstate-0.2.0.dist-info → brainstate-0.2.1.dist-info}/top_level.txt +0 -0
@@ -1,399 +1,399 @@
1
- # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
-
16
- import functools
17
- from collections.abc import Iterable, Sequence
18
- from typing import (Any, Callable, Union)
19
-
20
- import jax
21
- from jax._src import sharding_impls
22
-
23
- from brainstate._compatible_import import Device
24
- from brainstate._utils import set_module_as
25
- from brainstate.typing import Missing
26
- from ._make_jaxpr import StatefulFunction, _ensure_index_tuple
27
-
28
- __all__ = ['jit']
29
-
30
-
31
- class JittedFunction(Callable):
32
- """
33
- A wrapped version of ``fun``, set up for just-in-time compilation.
34
- """
35
- __module__ = 'brainstate.transform'
36
-
37
- origin_fun: Callable # the original function
38
- stateful_fun: StatefulFunction # the stateful function for extracting states
39
- jitted_fun: jax.stages.Wrapped # the jitted function
40
- clear_cache: Callable # clear the cache of the jitted function
41
- eval_shape: Callable # evaluate the shape of the jitted function
42
- compile: Callable # lower the jitted function
43
- trace: Callable # trace the jitted
44
-
45
- def __call__(self, *args, **kwargs):
46
- pass
47
-
48
-
49
- def _get_jitted_fun(
50
- fun: Callable,
51
- in_shardings,
52
- out_shardings,
53
- static_argnums,
54
- donate_argnums,
55
- static_argnames,
56
- donate_argnames,
57
- keep_unused,
58
- device,
59
- backend,
60
- inline,
61
- abstracted_axes,
62
- **kwargs
63
- ) -> JittedFunction:
64
- static_argnums = tuple() if static_argnums is None else _ensure_index_tuple(static_argnums)
65
- donate_argnums = tuple() if donate_argnums is None else _ensure_index_tuple(donate_argnums)
66
- fun = StatefulFunction(
67
- fun,
68
- static_argnums=static_argnums,
69
- static_argnames=static_argnames,
70
- abstracted_axes=abstracted_axes,
71
- name='jit',
72
- return_only_write=True
73
- )
74
- jit_fun = jax.jit(
75
- fun.jaxpr_call,
76
- static_argnums=tuple(i + 1 for i in static_argnums),
77
- static_argnames=static_argnames,
78
- donate_argnums=tuple(i + 1 for i in donate_argnums),
79
- donate_argnames=donate_argnames,
80
- keep_unused=keep_unused,
81
- device=device,
82
- backend=backend,
83
- inline=inline,
84
- in_shardings=in_shardings,
85
- out_shardings=out_shardings,
86
- abstracted_axes=abstracted_axes,
87
- **kwargs
88
- )
89
-
90
- @functools.wraps(fun.fun)
91
- def jitted_fun(*args, **params):
92
- if jax.config.jax_disable_jit:
93
- return fun.fun(*args, **params)
94
-
95
- # compile the function and get the state trace
96
- state_trace = fun.get_state_trace(*args, **params, compile_if_miss=True)
97
- read_state_vals = state_trace.get_read_state_values(True)
98
-
99
- # call the jitted function
100
- write_state_vals, outs = jit_fun(state_trace.get_state_values(), *args, **params)
101
-
102
- # write the state values back to the states
103
- state_trace.assign_state_vals_v2(read_state_vals, write_state_vals)
104
- return outs
105
-
106
- def clear_cache():
107
- """
108
- Clear the cache of the jitted function.
109
- """
110
- # clear the cache of the stateful function
111
- fun.clear_cache()
112
- try:
113
- # clear the cache of the jitted function
114
- jit_fun.clear_cache()
115
- except AttributeError:
116
- pass
117
-
118
- def eval_shape():
119
- raise NotImplementedError
120
-
121
- def trace():
122
- """Trace this function explicitly for the given arguments.
123
-
124
- A traced function is staged out of Python and translated to a jaxpr. It is
125
- ready for lowering but not yet lowered.
126
-
127
- Returns:
128
- A ``Traced`` instance representing the tracing.
129
- """
130
- raise NotImplementedError
131
-
132
- def compile(*args, **params):
133
- """Lower this function explicitly for the given arguments.
134
-
135
- A lowered function is staged out of Python and translated to a
136
- compiler's input language, possibly in a backend-dependent
137
- manner. It is ready for compilation but not yet compiled.
138
-
139
- Returns:
140
- A ``Lowered`` instance representing the lowering.
141
- """
142
- # compile the function and get the state trace
143
- state_trace = fun.get_state_trace(*args, **params, compile_if_miss=True)
144
- read_state_vals = state_trace.get_read_state_values(replace_writen=True)
145
- write_state_vals = state_trace.get_write_state_values(replace_read=True)
146
-
147
- # compile the model
148
- ret = jit_fun.lower(state_trace.get_state_values(), *args, **params).compile()
149
-
150
- # write the state values back to the states
151
- state_trace.assign_state_vals_v2(read_state_vals, write_state_vals)
152
- return ret
153
-
154
- jitted_fun: JittedFunction
155
-
156
- # the original function
157
- jitted_fun.origin_fun = fun.fun
158
-
159
- # the stateful function for extracting states
160
- jitted_fun.stateful_fun = fun
161
-
162
- # the jitted function
163
- jitted_fun.jitted_fun = jit_fun
164
-
165
- # clear cache
166
- jitted_fun.clear_cache = clear_cache
167
-
168
- # evaluate the shape of the jitted function
169
- jitted_fun.eval_shape = eval_shape
170
-
171
- # compile the jitted function
172
- jitted_fun.compile = compile
173
-
174
- # trace the jitted function
175
- jitted_fun.trace = trace
176
-
177
- return jitted_fun
178
-
179
-
180
- @set_module_as('brainstate.transform')
181
- def jit(
182
- fun: Callable | Missing = Missing(),
183
- in_shardings=sharding_impls.UNSPECIFIED,
184
- out_shardings=sharding_impls.UNSPECIFIED,
185
- static_argnums: int | Sequence[int] | None = None,
186
- donate_argnums: int | Sequence[int] | None = None,
187
- static_argnames: str | Sequence[str] | None = None,
188
- donate_argnames: str | Iterable[str] | None = None,
189
- keep_unused: bool = False,
190
- device: Device | None = None,
191
- backend: str | None = None,
192
- inline: bool = False,
193
- abstracted_axes: Any | None = None,
194
- **kwargs
195
- ) -> Union[JittedFunction, Callable[[Callable], JittedFunction]]:
196
- """
197
- Sets up ``fun`` for just-in-time compilation with XLA.
198
-
199
- Parameters
200
- ----------
201
- fun : callable or Missing, optional
202
- Function to be jitted.
203
- in_shardings : pytree, optional
204
- Pytree of structure matching that of arguments to ``fun``,
205
- with all actual arguments replaced by resource assignment specifications.
206
- It is also valid to specify a pytree prefix (e.g. one value in place of a
207
- whole subtree), in which case the leaves get broadcast to all values in
208
- that subtree.
209
-
210
- The ``in_shardings`` argument is optional. JAX will infer the shardings
211
- from the input :py:class:`jax.Array`'s and defaults to replicating the input
212
- if the sharding cannot be inferred.
213
-
214
- The valid resource assignment specifications are:
215
-
216
- - :py:class:`XLACompatibleSharding`, which will decide how the value
217
- will be partitioned. With this, using a mesh context manager is not
218
- required.
219
- - :py:obj:`None`, will give JAX the freedom to choose whatever sharding
220
- it wants.
221
- For in_shardings, JAX will mark is as replicated but this behavior
222
- can change in the future.
223
- For out_shardings, we will rely on the XLA GSPMD partitioner to
224
- determine the output shardings.
225
-
226
- The size of every dimension has to be a multiple of the total number of
227
- resources assigned to it. This is similar to pjit's in_shardings.
228
- out_shardings : pytree, optional
229
- Like ``in_shardings``, but specifies resource
230
- assignment for function outputs. This is similar to pjit's
231
- out_shardings.
232
-
233
- The ``out_shardings`` argument is optional. If not specified, :py:func:`jax.jit`
234
- will use GSPMD's sharding propagation to figure out what the sharding of the
235
- output(s) should be.
236
- static_argnums : int or sequence of int, optional
237
- An optional int or collection of ints that specify which
238
- positional arguments to treat as static (compile-time constant).
239
- Operations that only depend on static arguments will be constant-folded in
240
- Python (during tracing), and so the corresponding argument values can be
241
- any Python object.
242
-
243
- Static arguments should be hashable, meaning both ``__hash__`` and
244
- ``__eq__`` are implemented, and immutable. Calling the jitted function
245
- with different values for these constants will trigger recompilation.
246
- Arguments that are not arrays or containers thereof must be marked as
247
- static.
248
-
249
- If neither ``static_argnums`` nor ``static_argnames`` is provided, no
250
- arguments are treated as static. If ``static_argnums`` is not provided but
251
- ``static_argnames`` is, or vice versa, JAX uses
252
- :code:`inspect.signature(fun)` to find any positional arguments that
253
- correspond to ``static_argnames``
254
- (or vice versa). If both ``static_argnums`` and ``static_argnames`` are
255
- provided, ``inspect.signature`` is not used, and only actual
256
- parameters listed in either ``static_argnums`` or ``static_argnames`` will
257
- be treated as static.
258
- donate_argnums : int or sequence of int, optional
259
- Specify which positional argument buffers are "donated" to
260
- the computation. It is safe to donate argument buffers if you no longer
261
- need them once the computation has finished. In some cases XLA can make
262
- use of donated buffers to reduce the amount of memory needed to perform a
263
- computation, for example recycling one of your input buffers to store a
264
- result. You should not reuse buffers that you donate to a computation, JAX
265
- will raise an error if you try to. By default, no argument buffers are
266
- donated.
267
-
268
- If neither ``donate_argnums`` nor ``donate_argnames`` is provided, no
269
- arguments are donated. If ``donate_argnums`` is not provided but
270
- ``donate_argnames`` is, or vice versa, JAX uses
271
- :code:`inspect.signature(fun)` to find any positional arguments that
272
- correspond to ``donate_argnames``
273
- (or vice versa). If both ``donate_argnums`` and ``donate_argnames`` are
274
- provided, ``inspect.signature`` is not used, and only actual
275
- parameters listed in either ``donate_argnums`` or ``donate_argnames`` will
276
- be donated.
277
-
278
- For more details on buffer donation see the
279
- `FAQ <https://jax.readthedocs.io/en/latest/faq.html#buffer-donation>`_.
280
- static_argnames : str or sequence of str, optional
281
- An optional string or collection of strings specifying
282
- which named arguments are treated as static (compile-time constant).
283
- Operations that only depend on static arguments will be constant-folded in
284
- Python (during tracing), and so the corresponding argument values can be
285
- any Python object.
286
- donate_argnames : str or iterable of str, optional
287
- An optional string or collection of strings specifying
288
- which named arguments are donated to the computation. See the
289
- comment on ``donate_argnums`` for details. If not
290
- provided but ``donate_argnums`` is set, the default is based on calling
291
- ``inspect.signature(fun)`` to find corresponding named arguments.
292
- keep_unused : bool, default False
293
- If `False` (the default), arguments that JAX determines to be
294
- unused by `fun` *may* be dropped from resulting compiled XLA executables.
295
- Such arguments will not be transferred to the device nor provided to the
296
- underlying executable. If `True`, unused arguments will not be pruned.
297
- device : Device, optional
298
- This is an experimental feature and the API is likely to change.
299
- Optional, the Device the jitted function will run on. (Available devices
300
- can be retrieved via :py:func:`jax.devices`.) The default is inherited
301
- from XLA's DeviceAssignment logic and is usually to use
302
- ``jax.devices()[0]``.
303
- backend : str, optional
304
- This is an experimental feature and the API is likely to change.
305
- Optional, a string representing the XLA backend: ``'cpu'``, ``'gpu'``, or
306
- ``'tpu'``.
307
- inline : bool, default False
308
- Specify whether this function should be inlined into enclosing
309
- jaxprs (rather than being represented as an application of the xla_call
310
- primitive with its own subjaxpr). Default False.
311
- abstracted_axes : Any, optional
312
- Abstracted axes specification.
313
- **kwargs
314
- Additional keyword arguments passed to the underlying JAX jit function.
315
-
316
- Returns
317
- -------
318
- JittedFunction or callable
319
- A wrapped version of ``fun``, set up for just-in-time compilation.
320
- The returned object is a :py:class:`JittedFunction` that can be called with the same arguments
321
- and has the following attributes and methods:
322
-
323
- - ``stateful_fun`` : the stateful function for extracting states, an instance of :py:class:`StatefulFunction`.
324
- - ``origin_fun(*args, **kwargs)`` : the original function
325
- - ``jitted_fun(*args, **kwargs)`` : the jitted function
326
- - ``clear_cache(*args, **kwargs)`` : clear the cache of the jitted function
327
-
328
- Examples
329
- --------
330
- Basic usage with a simple function:
331
-
332
- .. code-block:: python
333
-
334
- >>> import brainstate
335
- >>> import jax.numpy as jnp
336
- >>>
337
- >>> @brainstate.transform.jit
338
- ... def f(x):
339
- ... return x ** 2
340
- >>>
341
- >>> result = f(jnp.array([1, 2, 3]))
342
-
343
- Using static arguments:
344
-
345
- .. code-block:: python
346
-
347
- >>> @brainstate.transform.jit(static_argnums=(1,))
348
- ... def g(x, n):
349
- ... return x ** n
350
- >>>
351
- >>> result = g(jnp.array([1, 2, 3]), 2)
352
-
353
- Manual jitting:
354
-
355
- .. code-block:: python
356
-
357
- >>> def h(x):
358
- ... return x * 2
359
- >>>
360
- >>> jitted_h = brainstate.transform.jit(h)
361
- >>> result = jitted_h(jnp.array([1, 2, 3]))
362
- """
363
-
364
- if isinstance(fun, Missing):
365
- def wrapper(fun_again: Callable) -> JittedFunction:
366
- return _get_jitted_fun(
367
- fun_again,
368
- in_shardings=in_shardings,
369
- out_shardings=out_shardings,
370
- static_argnums=static_argnums,
371
- donate_argnums=donate_argnums,
372
- static_argnames=static_argnames,
373
- donate_argnames=donate_argnames,
374
- keep_unused=keep_unused,
375
- device=device,
376
- backend=backend,
377
- inline=inline,
378
- abstracted_axes=abstracted_axes,
379
- **kwargs
380
- )
381
-
382
- return wrapper
383
-
384
- else:
385
- return _get_jitted_fun(
386
- fun,
387
- in_shardings,
388
- out_shardings,
389
- static_argnums,
390
- donate_argnums,
391
- static_argnames,
392
- donate_argnames,
393
- keep_unused,
394
- device,
395
- backend,
396
- inline,
397
- abstracted_axes,
398
- **kwargs
399
- )
1
+ # Copyright 2024 BrainX Ecosystem Limited. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ import functools
17
+ from collections.abc import Iterable, Sequence
18
+ from typing import (Any, Callable, Union)
19
+
20
+ import jax
21
+ from jax._src import sharding_impls
22
+
23
+ from brainstate._compatible_import import Device
24
+ from brainstate._utils import set_module_as
25
+ from brainstate.typing import Missing
26
+ from ._make_jaxpr import StatefulFunction, _ensure_index_tuple
27
+
28
+ __all__ = ['jit']
29
+
30
+
31
+ class JittedFunction(Callable):
32
+ """
33
+ A wrapped version of ``fun``, set up for just-in-time compilation.
34
+ """
35
+ __module__ = 'brainstate.transform'
36
+
37
+ origin_fun: Callable # the original function
38
+ stateful_fun: StatefulFunction # the stateful function for extracting states
39
+ jitted_fun: jax.stages.Wrapped # the jitted function
40
+ clear_cache: Callable # clear the cache of the jitted function
41
+ eval_shape: Callable # evaluate the shape of the jitted function
42
+ compile: Callable # lower the jitted function
43
+ trace: Callable # trace the jitted
44
+
45
+ def __call__(self, *args, **kwargs):
46
+ pass
47
+
48
+
49
+ def _get_jitted_fun(
50
+ fun: Callable,
51
+ in_shardings,
52
+ out_shardings,
53
+ static_argnums,
54
+ donate_argnums,
55
+ static_argnames,
56
+ donate_argnames,
57
+ keep_unused,
58
+ device,
59
+ backend,
60
+ inline,
61
+ abstracted_axes,
62
+ **kwargs
63
+ ) -> JittedFunction:
64
+ static_argnums = tuple() if static_argnums is None else _ensure_index_tuple(static_argnums)
65
+ donate_argnums = tuple() if donate_argnums is None else _ensure_index_tuple(donate_argnums)
66
+ fun = StatefulFunction(
67
+ fun,
68
+ static_argnums=static_argnums,
69
+ static_argnames=static_argnames,
70
+ abstracted_axes=abstracted_axes,
71
+ name='jit',
72
+ return_only_write=True
73
+ )
74
+ jit_fun = jax.jit(
75
+ fun.jaxpr_call,
76
+ static_argnums=tuple(i + 1 for i in static_argnums),
77
+ static_argnames=static_argnames,
78
+ donate_argnums=tuple(i + 1 for i in donate_argnums),
79
+ donate_argnames=donate_argnames,
80
+ keep_unused=keep_unused,
81
+ device=device,
82
+ backend=backend,
83
+ inline=inline,
84
+ in_shardings=in_shardings,
85
+ out_shardings=out_shardings,
86
+ abstracted_axes=abstracted_axes,
87
+ **kwargs
88
+ )
89
+
90
+ @functools.wraps(fun.fun)
91
+ def jitted_fun(*args, **params):
92
+ if jax.config.jax_disable_jit:
93
+ return fun.fun(*args, **params)
94
+
95
+ # compile the function and get the state trace
96
+ state_trace = fun.get_state_trace(*args, **params, compile_if_miss=True)
97
+ read_state_vals = state_trace.get_read_state_values(True)
98
+
99
+ # call the jitted function
100
+ write_state_vals, outs = jit_fun(state_trace.get_state_values(), *args, **params)
101
+
102
+ # write the state values back to the states
103
+ state_trace.assign_state_vals_v2(read_state_vals, write_state_vals)
104
+ return outs
105
+
106
+ def clear_cache():
107
+ """
108
+ Clear the cache of the jitted function.
109
+ """
110
+ # clear the cache of the stateful function
111
+ fun.clear_cache()
112
+ try:
113
+ # clear the cache of the jitted function
114
+ jit_fun.clear_cache()
115
+ except AttributeError:
116
+ pass
117
+
118
+ def eval_shape():
119
+ raise NotImplementedError
120
+
121
+ def trace():
122
+ """Trace this function explicitly for the given arguments.
123
+
124
+ A traced function is staged out of Python and translated to a jaxpr. It is
125
+ ready for lowering but not yet lowered.
126
+
127
+ Returns:
128
+ A ``Traced`` instance representing the tracing.
129
+ """
130
+ raise NotImplementedError
131
+
132
+ def compile(*args, **params):
133
+ """Lower this function explicitly for the given arguments.
134
+
135
+ A lowered function is staged out of Python and translated to a
136
+ compiler's input language, possibly in a backend-dependent
137
+ manner. It is ready for compilation but not yet compiled.
138
+
139
+ Returns:
140
+ A ``Lowered`` instance representing the lowering.
141
+ """
142
+ # compile the function and get the state trace
143
+ state_trace = fun.get_state_trace(*args, **params, compile_if_miss=True)
144
+ read_state_vals = state_trace.get_read_state_values(replace_writen=True)
145
+ write_state_vals = state_trace.get_write_state_values(replace_read=True)
146
+
147
+ # compile the model
148
+ ret = jit_fun.lower(state_trace.get_state_values(), *args, **params).compile()
149
+
150
+ # write the state values back to the states
151
+ state_trace.assign_state_vals_v2(read_state_vals, write_state_vals)
152
+ return ret
153
+
154
+ jitted_fun: JittedFunction
155
+
156
+ # the original function
157
+ jitted_fun.origin_fun = fun.fun
158
+
159
+ # the stateful function for extracting states
160
+ jitted_fun.stateful_fun = fun
161
+
162
+ # the jitted function
163
+ jitted_fun.jitted_fun = jit_fun
164
+
165
+ # clear cache
166
+ jitted_fun.clear_cache = clear_cache
167
+
168
+ # evaluate the shape of the jitted function
169
+ jitted_fun.eval_shape = eval_shape
170
+
171
+ # compile the jitted function
172
+ jitted_fun.compile = compile
173
+
174
+ # trace the jitted function
175
+ jitted_fun.trace = trace
176
+
177
+ return jitted_fun
178
+
179
+
180
+ @set_module_as('brainstate.transform')
181
+ def jit(
182
+ fun: Callable | Missing = Missing(),
183
+ in_shardings=sharding_impls.UNSPECIFIED,
184
+ out_shardings=sharding_impls.UNSPECIFIED,
185
+ static_argnums: int | Sequence[int] | None = None,
186
+ donate_argnums: int | Sequence[int] | None = None,
187
+ static_argnames: str | Sequence[str] | None = None,
188
+ donate_argnames: str | Iterable[str] | None = None,
189
+ keep_unused: bool = False,
190
+ device: Device | None = None,
191
+ backend: str | None = None,
192
+ inline: bool = False,
193
+ abstracted_axes: Any | None = None,
194
+ **kwargs
195
+ ) -> Union[JittedFunction, Callable[[Callable], JittedFunction]]:
196
+ """
197
+ Sets up ``fun`` for just-in-time compilation with XLA.
198
+
199
+ Parameters
200
+ ----------
201
+ fun : callable or Missing, optional
202
+ Function to be jitted.
203
+ in_shardings : pytree, optional
204
+ Pytree of structure matching that of arguments to ``fun``,
205
+ with all actual arguments replaced by resource assignment specifications.
206
+ It is also valid to specify a pytree prefix (e.g. one value in place of a
207
+ whole subtree), in which case the leaves get broadcast to all values in
208
+ that subtree.
209
+
210
+ The ``in_shardings`` argument is optional. JAX will infer the shardings
211
+ from the input :py:class:`jax.Array`'s and defaults to replicating the input
212
+ if the sharding cannot be inferred.
213
+
214
+ The valid resource assignment specifications are:
215
+
216
+ - :py:class:`XLACompatibleSharding`, which will decide how the value
217
+ will be partitioned. With this, using a mesh context manager is not
218
+ required.
219
+ - :py:obj:`None`, will give JAX the freedom to choose whatever sharding
220
+ it wants.
221
+ For in_shardings, JAX will mark is as replicated but this behavior
222
+ can change in the future.
223
+ For out_shardings, we will rely on the XLA GSPMD partitioner to
224
+ determine the output shardings.
225
+
226
+ The size of every dimension has to be a multiple of the total number of
227
+ resources assigned to it. This is similar to pjit's in_shardings.
228
+ out_shardings : pytree, optional
229
+ Like ``in_shardings``, but specifies resource
230
+ assignment for function outputs. This is similar to pjit's
231
+ out_shardings.
232
+
233
+ The ``out_shardings`` argument is optional. If not specified, :py:func:`jax.jit`
234
+ will use GSPMD's sharding propagation to figure out what the sharding of the
235
+ output(s) should be.
236
+ static_argnums : int or sequence of int, optional
237
+ An optional int or collection of ints that specify which
238
+ positional arguments to treat as static (compile-time constant).
239
+ Operations that only depend on static arguments will be constant-folded in
240
+ Python (during tracing), and so the corresponding argument values can be
241
+ any Python object.
242
+
243
+ Static arguments should be hashable, meaning both ``__hash__`` and
244
+ ``__eq__`` are implemented, and immutable. Calling the jitted function
245
+ with different values for these constants will trigger recompilation.
246
+ Arguments that are not arrays or containers thereof must be marked as
247
+ static.
248
+
249
+ If neither ``static_argnums`` nor ``static_argnames`` is provided, no
250
+ arguments are treated as static. If ``static_argnums`` is not provided but
251
+ ``static_argnames`` is, or vice versa, JAX uses
252
+ :code:`inspect.signature(fun)` to find any positional arguments that
253
+ correspond to ``static_argnames``
254
+ (or vice versa). If both ``static_argnums`` and ``static_argnames`` are
255
+ provided, ``inspect.signature`` is not used, and only actual
256
+ parameters listed in either ``static_argnums`` or ``static_argnames`` will
257
+ be treated as static.
258
+ donate_argnums : int or sequence of int, optional
259
+ Specify which positional argument buffers are "donated" to
260
+ the computation. It is safe to donate argument buffers if you no longer
261
+ need them once the computation has finished. In some cases XLA can make
262
+ use of donated buffers to reduce the amount of memory needed to perform a
263
+ computation, for example recycling one of your input buffers to store a
264
+ result. You should not reuse buffers that you donate to a computation, JAX
265
+ will raise an error if you try to. By default, no argument buffers are
266
+ donated.
267
+
268
+ If neither ``donate_argnums`` nor ``donate_argnames`` is provided, no
269
+ arguments are donated. If ``donate_argnums`` is not provided but
270
+ ``donate_argnames`` is, or vice versa, JAX uses
271
+ :code:`inspect.signature(fun)` to find any positional arguments that
272
+ correspond to ``donate_argnames``
273
+ (or vice versa). If both ``donate_argnums`` and ``donate_argnames`` are
274
+ provided, ``inspect.signature`` is not used, and only actual
275
+ parameters listed in either ``donate_argnums`` or ``donate_argnames`` will
276
+ be donated.
277
+
278
+ For more details on buffer donation see the
279
+ `FAQ <https://jax.readthedocs.io/en/latest/faq.html#buffer-donation>`_.
280
+ static_argnames : str or sequence of str, optional
281
+ An optional string or collection of strings specifying
282
+ which named arguments are treated as static (compile-time constant).
283
+ Operations that only depend on static arguments will be constant-folded in
284
+ Python (during tracing), and so the corresponding argument values can be
285
+ any Python object.
286
+ donate_argnames : str or iterable of str, optional
287
+ An optional string or collection of strings specifying
288
+ which named arguments are donated to the computation. See the
289
+ comment on ``donate_argnums`` for details. If not
290
+ provided but ``donate_argnums`` is set, the default is based on calling
291
+ ``inspect.signature(fun)`` to find corresponding named arguments.
292
+ keep_unused : bool, default False
293
+ If `False` (the default), arguments that JAX determines to be
294
+ unused by `fun` *may* be dropped from resulting compiled XLA executables.
295
+ Such arguments will not be transferred to the device nor provided to the
296
+ underlying executable. If `True`, unused arguments will not be pruned.
297
+ device : Device, optional
298
+ This is an experimental feature and the API is likely to change.
299
+ Optional, the Device the jitted function will run on. (Available devices
300
+ can be retrieved via :py:func:`jax.devices`.) The default is inherited
301
+ from XLA's DeviceAssignment logic and is usually to use
302
+ ``jax.devices()[0]``.
303
+ backend : str, optional
304
+ This is an experimental feature and the API is likely to change.
305
+ Optional, a string representing the XLA backend: ``'cpu'``, ``'gpu'``, or
306
+ ``'tpu'``.
307
+ inline : bool, default False
308
+ Specify whether this function should be inlined into enclosing
309
+ jaxprs (rather than being represented as an application of the xla_call
310
+ primitive with its own subjaxpr). Default False.
311
+ abstracted_axes : Any, optional
312
+ Abstracted axes specification.
313
+ **kwargs
314
+ Additional keyword arguments passed to the underlying JAX jit function.
315
+
316
+ Returns
317
+ -------
318
+ JittedFunction or callable
319
+ A wrapped version of ``fun``, set up for just-in-time compilation.
320
+ The returned object is a :py:class:`JittedFunction` that can be called with the same arguments
321
+ and has the following attributes and methods:
322
+
323
+ - ``stateful_fun`` : the stateful function for extracting states, an instance of :py:class:`StatefulFunction`.
324
+ - ``origin_fun(*args, **kwargs)`` : the original function
325
+ - ``jitted_fun(*args, **kwargs)`` : the jitted function
326
+ - ``clear_cache(*args, **kwargs)`` : clear the cache of the jitted function
327
+
328
+ Examples
329
+ --------
330
+ Basic usage with a simple function:
331
+
332
+ .. code-block:: python
333
+
334
+ >>> import brainstate
335
+ >>> import jax.numpy as jnp
336
+ >>>
337
+ >>> @brainstate.transform.jit
338
+ ... def f(x):
339
+ ... return x ** 2
340
+ >>>
341
+ >>> result = f(jnp.array([1, 2, 3]))
342
+
343
+ Using static arguments:
344
+
345
+ .. code-block:: python
346
+
347
+ >>> @brainstate.transform.jit(static_argnums=(1,))
348
+ ... def g(x, n):
349
+ ... return x ** n
350
+ >>>
351
+ >>> result = g(jnp.array([1, 2, 3]), 2)
352
+
353
+ Manual jitting:
354
+
355
+ .. code-block:: python
356
+
357
+ >>> def h(x):
358
+ ... return x * 2
359
+ >>>
360
+ >>> jitted_h = brainstate.transform.jit(h)
361
+ >>> result = jitted_h(jnp.array([1, 2, 3]))
362
+ """
363
+
364
+ if isinstance(fun, Missing):
365
+ def wrapper(fun_again: Callable) -> JittedFunction:
366
+ return _get_jitted_fun(
367
+ fun_again,
368
+ in_shardings=in_shardings,
369
+ out_shardings=out_shardings,
370
+ static_argnums=static_argnums,
371
+ donate_argnums=donate_argnums,
372
+ static_argnames=static_argnames,
373
+ donate_argnames=donate_argnames,
374
+ keep_unused=keep_unused,
375
+ device=device,
376
+ backend=backend,
377
+ inline=inline,
378
+ abstracted_axes=abstracted_axes,
379
+ **kwargs
380
+ )
381
+
382
+ return wrapper
383
+
384
+ else:
385
+ return _get_jitted_fun(
386
+ fun,
387
+ in_shardings,
388
+ out_shardings,
389
+ static_argnums,
390
+ donate_argnums,
391
+ static_argnames,
392
+ donate_argnames,
393
+ keep_unused,
394
+ device,
395
+ backend,
396
+ inline,
397
+ abstracted_axes,
398
+ **kwargs
399
+ )