keras-rs-nightly 0.2.2.dev202506170335__tar.gz → 0.2.2.dev202506190335__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of keras-rs-nightly might be problematic. Click here for more details.
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/PKG-INFO +1 -1
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/base_distributed_embedding.py +1 -4
- keras_rs_nightly-0.2.2.dev202506190335/keras_rs/src/layers/embedding/jax/checkpoint_utils.py +104 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/jax/config_conversion.py +75 -5
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/version.py +1 -1
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs_nightly.egg-info/PKG-INFO +1 -1
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs_nightly.egg-info/SOURCES.txt +1 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/README.md +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/api/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/api/layers/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/api/losses/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/api/metrics/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/api_export.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/distributed_embedding.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/distributed_embedding_config.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/embed_reduce.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/jax/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/jax/distributed_embedding.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/jax/embedding_lookup.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/jax/embedding_utils.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/tensorflow/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/tensorflow/config_conversion.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/embedding/tensorflow/distributed_embedding.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/feature_interaction/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/feature_interaction/dot_interaction.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/feature_interaction/feature_cross.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/retrieval/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/retrieval/brute_force_retrieval.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/retrieval/hard_negative_mining.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/retrieval/remove_accidental_hits.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/retrieval/retrieval.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/layers/retrieval/sampling_probability_correction.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/losses/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/losses/pairwise_hinge_loss.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/losses/pairwise_logistic_loss.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/losses/pairwise_loss.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/losses/pairwise_loss_utils.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/losses/pairwise_mean_squared_error.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/losses/pairwise_soft_zero_one_loss.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/dcg.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/mean_average_precision.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/mean_reciprocal_rank.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/ndcg.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/precision_at_k.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/ranking_metric.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/ranking_metrics_utils.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/recall_at_k.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/metrics/utils.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/types.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/utils/__init__.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/utils/doc_string_utils.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs/src/utils/keras_utils.py +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs_nightly.egg-info/dependency_links.txt +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs_nightly.egg-info/requires.txt +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/keras_rs_nightly.egg-info/top_level.txt +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/pyproject.toml +0 -0
- {keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/setup.cfg +0 -0
|
@@ -174,12 +174,9 @@ class DistributedEmbedding(keras.layers.Layer):
|
|
|
174
174
|
supported on all backends and accelerators:
|
|
175
175
|
|
|
176
176
|
- `keras.optimizers.Adagrad`
|
|
177
|
-
- `keras.optimizers.SGD`
|
|
178
|
-
|
|
179
|
-
The following are additionally available when using the TensorFlow backend:
|
|
180
|
-
|
|
181
177
|
- `keras.optimizers.Adam`
|
|
182
178
|
- `keras.optimizers.Ftrl`
|
|
179
|
+
- `keras.optimizers.SGD`
|
|
183
180
|
|
|
184
181
|
Also, not all parameters of the optimizers are supported (e.g. the
|
|
185
182
|
`nesterov` option of `SGD`). An error is raised when an unsupported
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"""A Wrapper over orbax CheckpointManager for Keras3 Jax TPU Embeddings."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import keras
|
|
6
|
+
import orbax.checkpoint as ocp
|
|
7
|
+
from etils import epath
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class JaxKeras3CheckpointManager(ocp.CheckpointManager):
|
|
11
|
+
"""A wrapper over orbax CheckpointManager for Keras3 Jax TPU Embeddings."""
|
|
12
|
+
|
|
13
|
+
def __init__(
|
|
14
|
+
self,
|
|
15
|
+
model: keras.Model,
|
|
16
|
+
checkpoint_dir: epath.PathLike,
|
|
17
|
+
max_to_keep: int,
|
|
18
|
+
steps_per_epoch: int = 1,
|
|
19
|
+
**kwargs: Any,
|
|
20
|
+
):
|
|
21
|
+
options = ocp.CheckpointManagerOptions(
|
|
22
|
+
max_to_keep=max_to_keep, enable_async_checkpointing=False, **kwargs
|
|
23
|
+
)
|
|
24
|
+
self._model = model
|
|
25
|
+
self._steps_per_epoch = steps_per_epoch
|
|
26
|
+
self._checkpoint_dir = checkpoint_dir
|
|
27
|
+
super().__init__(checkpoint_dir, options=options)
|
|
28
|
+
|
|
29
|
+
def _get_state(self) -> tuple[dict[str, Any], Any | None]:
|
|
30
|
+
"""Gets the model state and metrics"""
|
|
31
|
+
model_state = self._model.get_state_tree()
|
|
32
|
+
state = {}
|
|
33
|
+
metrics = None
|
|
34
|
+
for k, v in model_state.items():
|
|
35
|
+
if k == "metrics_variables":
|
|
36
|
+
metrics = v
|
|
37
|
+
else:
|
|
38
|
+
state[k] = v
|
|
39
|
+
return state, metrics
|
|
40
|
+
|
|
41
|
+
def save_state(self, epoch: int) -> None:
|
|
42
|
+
"""Saves the model to the checkpoint directory.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
epoch: The epoch number at which the state is saved.
|
|
46
|
+
"""
|
|
47
|
+
state, metrics_value = self._get_state()
|
|
48
|
+
self.save(
|
|
49
|
+
epoch * self._steps_per_epoch,
|
|
50
|
+
args=ocp.args.StandardSave(item=state),
|
|
51
|
+
metrics=metrics_value,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
def restore_state(self, step: int | None = None) -> None:
|
|
55
|
+
"""Restores the model from the checkpoint directory.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
step: The step .number to restore the state from. Default=None
|
|
59
|
+
restores the latest step.
|
|
60
|
+
"""
|
|
61
|
+
if step is None:
|
|
62
|
+
step = self.latest_step()
|
|
63
|
+
# Restore the model state only, not metrics.
|
|
64
|
+
state, _ = self._get_state()
|
|
65
|
+
restored_state = self.restore(
|
|
66
|
+
step, args=ocp.args.StandardRestore(item=state)
|
|
67
|
+
)
|
|
68
|
+
self._model.set_state_tree(restored_state)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class JaxKeras3CheckpointCallback(keras.callbacks.Callback):
|
|
72
|
+
"""A callback for checkpointing and restoring state using Orbax."""
|
|
73
|
+
|
|
74
|
+
def __init__(
|
|
75
|
+
self,
|
|
76
|
+
model: keras.Model,
|
|
77
|
+
checkpoint_dir: epath.PathLike,
|
|
78
|
+
max_to_keep: int,
|
|
79
|
+
steps_per_epoch: int = 1,
|
|
80
|
+
**kwargs: Any,
|
|
81
|
+
):
|
|
82
|
+
if keras.backend.backend() != "jax":
|
|
83
|
+
raise ValueError(
|
|
84
|
+
"`JaxKeras3CheckpointCallback` is only supported on a "
|
|
85
|
+
"`jax` backend."
|
|
86
|
+
)
|
|
87
|
+
self._checkpoint_manager = JaxKeras3CheckpointManager(
|
|
88
|
+
model, checkpoint_dir, max_to_keep, steps_per_epoch, **kwargs
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
def on_train_begin(self, logs: dict[str, Any] | None = None) -> None:
|
|
92
|
+
if not self.model.built or not self.model.optimizer.built:
|
|
93
|
+
raise ValueError(
|
|
94
|
+
"To use `JaxKeras3CheckpointCallback`, your model and "
|
|
95
|
+
"optimizer must be built before you call `fit()`."
|
|
96
|
+
)
|
|
97
|
+
latest_epoch = self._checkpoint_manager.latest_step()
|
|
98
|
+
if latest_epoch is not None:
|
|
99
|
+
self._checkpoint_manager.restore_state(step=latest_epoch)
|
|
100
|
+
|
|
101
|
+
def on_epoch_end(
|
|
102
|
+
self, epoch: int, logs: dict[str, Any] | None = None
|
|
103
|
+
) -> None:
|
|
104
|
+
self._checkpoint_manager.save_state(epoch)
|
|
@@ -229,18 +229,63 @@ def keras_to_jte_optimizer(
|
|
|
229
229
|
# pylint: disable-next=protected-access
|
|
230
230
|
learning_rate = keras_to_jte_learning_rate(optimizer._learning_rate)
|
|
231
231
|
|
|
232
|
-
#
|
|
232
|
+
# Unsupported keras optimizer general options.
|
|
233
|
+
if optimizer.clipnorm is not None:
|
|
234
|
+
raise ValueError("Unsupported optimizer option `clipnorm`.")
|
|
235
|
+
if optimizer.global_clipnorm is not None:
|
|
236
|
+
raise ValueError("Unsupported optimizer option `global_clipnorm`.")
|
|
237
|
+
if optimizer.use_ema:
|
|
238
|
+
raise ValueError("Unsupported optimizer option `use_ema`.")
|
|
239
|
+
if optimizer.loss_scale_factor is not None:
|
|
240
|
+
raise ValueError("Unsupported optimizer option `loss_scale_factor`.")
|
|
241
|
+
|
|
242
|
+
# Supported optimizers.
|
|
233
243
|
if isinstance(optimizer, keras.optimizers.SGD):
|
|
244
|
+
if getattr(optimizer, "nesterov", False):
|
|
245
|
+
raise ValueError("Unsupported optimizer option `nesterov`.")
|
|
246
|
+
if getattr(optimizer, "momentum", 0.0) != 0.0:
|
|
247
|
+
raise ValueError("Unsupported optimizer option `momentum`.")
|
|
234
248
|
return embedding_spec.SGDOptimizerSpec(learning_rate=learning_rate)
|
|
235
249
|
elif isinstance(optimizer, keras.optimizers.Adagrad):
|
|
250
|
+
if getattr(optimizer, "epsilon", 1e-7) != 1e-7:
|
|
251
|
+
raise ValueError("Unsupported optimizer option `epsilon`.")
|
|
236
252
|
return embedding_spec.AdagradOptimizerSpec(
|
|
237
253
|
learning_rate=learning_rate,
|
|
238
254
|
initial_accumulator_value=optimizer.initial_accumulator_value,
|
|
239
255
|
)
|
|
256
|
+
elif isinstance(optimizer, keras.optimizers.Adam):
|
|
257
|
+
if getattr(optimizer, "amsgrad", False):
|
|
258
|
+
raise ValueError("Unsupported optimizer option `amsgrad`.")
|
|
240
259
|
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
260
|
+
return embedding_spec.AdamOptimizerSpec(
|
|
261
|
+
learning_rate=learning_rate,
|
|
262
|
+
beta_1=optimizer.beta_1,
|
|
263
|
+
beta_2=optimizer.beta_2,
|
|
264
|
+
epsilon=optimizer.epsilon,
|
|
265
|
+
)
|
|
266
|
+
elif isinstance(optimizer, keras.optimizers.Ftrl):
|
|
267
|
+
if (
|
|
268
|
+
getattr(optimizer, "l2_shrinkage_regularization_strength", 0.0)
|
|
269
|
+
!= 0.0
|
|
270
|
+
):
|
|
271
|
+
raise ValueError(
|
|
272
|
+
"Unsupported optimizer option "
|
|
273
|
+
"`l2_shrinkage_regularization_strength`."
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
return embedding_spec.FTRLOptimizerSpec(
|
|
277
|
+
learning_rate=learning_rate,
|
|
278
|
+
learning_rate_power=optimizer.learning_rate_power,
|
|
279
|
+
l1_regularization_strength=optimizer.l1_regularization_strength,
|
|
280
|
+
l2_regularization_strength=optimizer.l2_regularization_strength,
|
|
281
|
+
beta=optimizer.beta,
|
|
282
|
+
initial_accumulator_value=optimizer.initial_accumulator_value,
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
raise ValueError(
|
|
286
|
+
f"Unsupported optimizer type {type(optimizer)}. Optimizer must be "
|
|
287
|
+
f"one of [Adagrad, Adam, Ftrl, SGD]."
|
|
288
|
+
)
|
|
244
289
|
|
|
245
290
|
|
|
246
291
|
def jte_to_keras_optimizer(
|
|
@@ -262,8 +307,33 @@ def jte_to_keras_optimizer(
|
|
|
262
307
|
learning_rate=learning_rate,
|
|
263
308
|
initial_accumulator_value=optimizer.initial_accumulator_value,
|
|
264
309
|
)
|
|
310
|
+
elif isinstance(optimizer, embedding_spec.AdamOptimizerSpec):
|
|
311
|
+
return keras.optimizers.Adam(
|
|
312
|
+
learning_rate=learning_rate,
|
|
313
|
+
beta_1=optimizer.beta_1,
|
|
314
|
+
beta_2=optimizer.beta_2,
|
|
315
|
+
epsilon=optimizer.epsilon,
|
|
316
|
+
)
|
|
317
|
+
elif isinstance(optimizer, embedding_spec.FTRLOptimizerSpec):
|
|
318
|
+
if getattr(optimizer, "initial_linear_value", 0.0) != 0.0:
|
|
319
|
+
raise ValueError(
|
|
320
|
+
"Unsupported optimizer option `initial_linear_value`."
|
|
321
|
+
)
|
|
322
|
+
if getattr(optimizer, "multiply_linear_by_learning_rate", False):
|
|
323
|
+
raise ValueError(
|
|
324
|
+
"Unsupported optimizer option "
|
|
325
|
+
"`multiply_linear_by_learning_rate`."
|
|
326
|
+
)
|
|
327
|
+
return keras.optimizers.Ftrl(
|
|
328
|
+
learning_rate=learning_rate,
|
|
329
|
+
learning_rate_power=optimizer.learning_rate_power,
|
|
330
|
+
initial_accumulator_value=optimizer.initial_accumulator_value,
|
|
331
|
+
l1_regularization_strength=optimizer.l1_regularization_strength,
|
|
332
|
+
l2_regularization_strength=optimizer.l2_regularization_strength,
|
|
333
|
+
beta=optimizer.beta,
|
|
334
|
+
)
|
|
265
335
|
|
|
266
|
-
raise ValueError(f"Unknown optimizer spec {optimizer}")
|
|
336
|
+
raise ValueError(f"Unknown optimizer spec {type(optimizer)}.")
|
|
267
337
|
|
|
268
338
|
|
|
269
339
|
def _keras_to_jte_table_config(
|
|
@@ -15,6 +15,7 @@ keras_rs/src/layers/embedding/distributed_embedding.py
|
|
|
15
15
|
keras_rs/src/layers/embedding/distributed_embedding_config.py
|
|
16
16
|
keras_rs/src/layers/embedding/embed_reduce.py
|
|
17
17
|
keras_rs/src/layers/embedding/jax/__init__.py
|
|
18
|
+
keras_rs/src/layers/embedding/jax/checkpoint_utils.py
|
|
18
19
|
keras_rs/src/layers/embedding/jax/config_conversion.py
|
|
19
20
|
keras_rs/src/layers/embedding/jax/distributed_embedding.py
|
|
20
21
|
keras_rs/src/layers/embedding/jax/embedding_lookup.py
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{keras_rs_nightly-0.2.2.dev202506170335 → keras_rs_nightly-0.2.2.dev202506190335}/pyproject.toml
RENAMED
|
File without changes
|
|
File without changes
|