keras-rs-nightly 0.2.2.dev202508140338__py3-none-any.whl → 0.2.2.dev202508150339__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of keras-rs-nightly might be problematic. Click here for more details.
- keras_rs/src/layers/embedding/base_distributed_embedding.py +51 -38
- keras_rs/src/version.py +1 -1
- {keras_rs_nightly-0.2.2.dev202508140338.dist-info → keras_rs_nightly-0.2.2.dev202508150339.dist-info}/METADATA +1 -1
- {keras_rs_nightly-0.2.2.dev202508140338.dist-info → keras_rs_nightly-0.2.2.dev202508150339.dist-info}/RECORD +6 -6
- {keras_rs_nightly-0.2.2.dev202508140338.dist-info → keras_rs_nightly-0.2.2.dev202508150339.dist-info}/WHEEL +0 -0
- {keras_rs_nightly-0.2.2.dev202508140338.dist-info → keras_rs_nightly-0.2.2.dev202508150339.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import collections
|
|
2
|
+
import dataclasses
|
|
2
3
|
import importlib.util
|
|
3
4
|
import typing
|
|
4
5
|
from typing import Any, Sequence
|
|
@@ -20,9 +21,10 @@ EmbedReduce = embed_reduce.EmbedReduce
|
|
|
20
21
|
SUPPORTED_PLACEMENTS = ("auto", "default_device", "sparsecore")
|
|
21
22
|
|
|
22
23
|
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
24
|
+
@dataclasses.dataclass(eq=True, unsafe_hash=True, order=True)
|
|
25
|
+
class PlacementAndPath:
|
|
26
|
+
placement: str
|
|
27
|
+
path: str
|
|
26
28
|
|
|
27
29
|
|
|
28
30
|
def _ragged_to_dense_inputs(
|
|
@@ -518,12 +520,12 @@ class DistributedEmbedding(keras.layers.Layer):
|
|
|
518
520
|
With these structures in place, the steps to:
|
|
519
521
|
- go from the deeply nested structure to the two-level structure are:
|
|
520
522
|
- `assert_same_struct` as `self._feature_configs`
|
|
521
|
-
- `
|
|
522
|
-
|
|
523
|
+
- use `self._feature_deeply_nested_placement_and_paths` to map from
|
|
524
|
+
deeply nested to two-level
|
|
523
525
|
- go from the two-level structure to the deeply nested structure:
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
526
|
+
- `assert_same_struct` as `self._placement_to_path_to_feature_config`
|
|
527
|
+
- use `self._feature_deeply_nested_placement_and_paths` to locate each
|
|
528
|
+
output in the two-level dicts
|
|
527
529
|
|
|
528
530
|
Args:
|
|
529
531
|
feature_configs: The deeply nested structure of `FeatureConfig` or
|
|
@@ -590,14 +592,14 @@ class DistributedEmbedding(keras.layers.Layer):
|
|
|
590
592
|
] = collections.defaultdict(dict)
|
|
591
593
|
|
|
592
594
|
def populate_placement_to_path_to_input_shape(
|
|
593
|
-
|
|
595
|
+
pp: PlacementAndPath, input_shape: types.Shape
|
|
594
596
|
) -> None:
|
|
595
|
-
placement_to_path_to_input_shape[
|
|
596
|
-
|
|
597
|
-
|
|
597
|
+
placement_to_path_to_input_shape[pp.placement][pp.path] = (
|
|
598
|
+
input_shape
|
|
599
|
+
)
|
|
598
600
|
|
|
599
601
|
keras.tree.map_structure_up_to(
|
|
600
|
-
self.
|
|
602
|
+
self._feature_deeply_nested_placement_and_paths,
|
|
601
603
|
populate_placement_to_path_to_input_shape,
|
|
602
604
|
self._feature_deeply_nested_placement_and_paths,
|
|
603
605
|
input_shapes,
|
|
@@ -645,35 +647,40 @@ class DistributedEmbedding(keras.layers.Layer):
|
|
|
645
647
|
"""
|
|
646
648
|
# Verify input structure.
|
|
647
649
|
keras.tree.assert_same_structure(self._feature_configs, inputs)
|
|
650
|
+
if weights is not None:
|
|
651
|
+
keras.tree.assert_same_structure(self._feature_configs, weights)
|
|
648
652
|
|
|
649
653
|
if not self.built:
|
|
650
|
-
input_shapes = keras.tree.
|
|
651
|
-
self._feature_configs,
|
|
654
|
+
input_shapes = keras.tree.map_structure(
|
|
652
655
|
lambda array: backend.standardize_shape(array.shape),
|
|
653
656
|
inputs,
|
|
654
657
|
)
|
|
655
658
|
self.build(input_shapes)
|
|
656
659
|
|
|
657
|
-
# Go from deeply nested
|
|
658
|
-
|
|
660
|
+
# Go from deeply nested to nested dict placement -> path -> input.
|
|
661
|
+
def to_placement_to_path(
|
|
662
|
+
tensors: types.Nested[types.Tensor],
|
|
663
|
+
) -> dict[str, dict[str, types.Tensor]]:
|
|
664
|
+
result: dict[str, dict[str, types.Tensor]] = {
|
|
665
|
+
p: dict() for p in self._placement_to_path_to_feature_config
|
|
666
|
+
}
|
|
659
667
|
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
self._placement_to_path_to_feature_config, flat_inputs
|
|
663
|
-
)
|
|
668
|
+
def populate(pp: PlacementAndPath, x: types.Tensor) -> None:
|
|
669
|
+
result[pp.placement][pp.path] = x
|
|
664
670
|
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
placement_to_path_to_weights = keras.tree.pack_sequence_as(
|
|
670
|
-
self._placement_to_path_to_feature_config, flat_weights
|
|
671
|
+
keras.tree.map_structure(
|
|
672
|
+
populate,
|
|
673
|
+
self._feature_deeply_nested_placement_and_paths,
|
|
674
|
+
tensors,
|
|
671
675
|
)
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
676
|
+
return result
|
|
677
|
+
|
|
678
|
+
placement_to_path_to_inputs = to_placement_to_path(inputs)
|
|
679
|
+
|
|
680
|
+
# Same for weights if present.
|
|
681
|
+
placement_to_path_to_weights = (
|
|
682
|
+
to_placement_to_path(weights) if weights is not None else None
|
|
683
|
+
)
|
|
677
684
|
|
|
678
685
|
placement_to_path_to_preprocessed: dict[
|
|
679
686
|
str, dict[str, dict[str, types.Nested[types.Tensor]]]
|
|
@@ -684,7 +691,9 @@ class DistributedEmbedding(keras.layers.Layer):
|
|
|
684
691
|
placement_to_path_to_preprocessed["sparsecore"] = (
|
|
685
692
|
self._sparsecore_preprocess(
|
|
686
693
|
placement_to_path_to_inputs["sparsecore"],
|
|
687
|
-
placement_to_path_to_weights["sparsecore"]
|
|
694
|
+
placement_to_path_to_weights["sparsecore"]
|
|
695
|
+
if placement_to_path_to_weights is not None
|
|
696
|
+
else None,
|
|
688
697
|
training,
|
|
689
698
|
)
|
|
690
699
|
)
|
|
@@ -694,7 +703,9 @@ class DistributedEmbedding(keras.layers.Layer):
|
|
|
694
703
|
placement_to_path_to_preprocessed["default_device"] = (
|
|
695
704
|
self._default_device_preprocess(
|
|
696
705
|
placement_to_path_to_inputs["default_device"],
|
|
697
|
-
placement_to_path_to_weights["default_device"]
|
|
706
|
+
placement_to_path_to_weights["default_device"]
|
|
707
|
+
if placement_to_path_to_weights is not None
|
|
708
|
+
else None,
|
|
698
709
|
training,
|
|
699
710
|
)
|
|
700
711
|
)
|
|
@@ -780,11 +791,13 @@ class DistributedEmbedding(keras.layers.Layer):
|
|
|
780
791
|
placement_to_path_to_outputs,
|
|
781
792
|
)
|
|
782
793
|
|
|
783
|
-
# Go from placement -> path -> output to
|
|
784
|
-
|
|
794
|
+
# Go from placement -> path -> output to deeply nested structure.
|
|
795
|
+
def populate_output(pp: PlacementAndPath) -> types.Tensor:
|
|
796
|
+
return placement_to_path_to_outputs[pp.placement][pp.path]
|
|
785
797
|
|
|
786
|
-
|
|
787
|
-
|
|
798
|
+
return keras.tree.map_structure(
|
|
799
|
+
populate_output, self._feature_deeply_nested_placement_and_paths
|
|
800
|
+
)
|
|
788
801
|
|
|
789
802
|
def get_embedding_tables(self) -> dict[str, types.Tensor]:
|
|
790
803
|
"""Return the content of the embedding tables by table name.
|
keras_rs/src/version.py
CHANGED
|
@@ -5,10 +5,10 @@ keras_rs/metrics/__init__.py,sha256=Qxpf6OFooIL9TIn2l3WgOea3HFRG0hq02glPAxtMZ9c,
|
|
|
5
5
|
keras_rs/src/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
keras_rs/src/api_export.py,sha256=RsmG-DvO-cdFeAF9W6LRzms0kvtm-Yp9BAA_d-952zI,510
|
|
7
7
|
keras_rs/src/types.py,sha256=1A-oLRdX1-f2DsVZBcNl8qNsaH8pM-gnleLT9FWZWBw,1189
|
|
8
|
-
keras_rs/src/version.py,sha256=
|
|
8
|
+
keras_rs/src/version.py,sha256=wvBc0HOPE8xIqLsp6cWz-5TtgtVINmg5amOyutxMFiY,224
|
|
9
9
|
keras_rs/src/layers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
10
|
keras_rs/src/layers/embedding/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
-
keras_rs/src/layers/embedding/base_distributed_embedding.py,sha256=
|
|
11
|
+
keras_rs/src/layers/embedding/base_distributed_embedding.py,sha256=iJC02gxreAthInO2NrKbemRpz7-2OwCg1yAKSDeOWb0,45461
|
|
12
12
|
keras_rs/src/layers/embedding/distributed_embedding.py,sha256=94jxUHoGK3Gs9yfV0KxFTuqPo7XFnhgCNlO2FEeiSgM,1072
|
|
13
13
|
keras_rs/src/layers/embedding/distributed_embedding_config.py,sha256=nmDqZlXiZGxc2UmEaUULaOvRjwq427ReSltDGEBrv5I,5816
|
|
14
14
|
keras_rs/src/layers/embedding/embed_reduce.py,sha256=c-MnEw1-KWs0jTf0JJ_ZBOY-9hRkiFyu989Dof3DnS8,12343
|
|
@@ -50,7 +50,7 @@ keras_rs/src/metrics/utils.py,sha256=fGTo8j0ykVE5Y3yQCS2orSFcHY20Uxt0NazyPsybUsw
|
|
|
50
50
|
keras_rs/src/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
51
51
|
keras_rs/src/utils/doc_string_utils.py,sha256=CmqomepmaYcvpACpXEXkrJb8DMnvIgmYK-lJ53lYarY,1675
|
|
52
52
|
keras_rs/src/utils/keras_utils.py,sha256=dc-NFzs3a-qmRw0vBDiMslPLfrm9yymGduLWesXPhuY,2123
|
|
53
|
-
keras_rs_nightly-0.2.2.
|
|
54
|
-
keras_rs_nightly-0.2.2.
|
|
55
|
-
keras_rs_nightly-0.2.2.
|
|
56
|
-
keras_rs_nightly-0.2.2.
|
|
53
|
+
keras_rs_nightly-0.2.2.dev202508150339.dist-info/METADATA,sha256=TqXJCEpnATn3zSL0bi-7-MadI5ojXvgUpQN4EzkhKkA,5273
|
|
54
|
+
keras_rs_nightly-0.2.2.dev202508150339.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
55
|
+
keras_rs_nightly-0.2.2.dev202508150339.dist-info/top_level.txt,sha256=pWs8X78Z0cn6lfcIb9VYOW5UeJ-TpoaO9dByzo7_FFo,9
|
|
56
|
+
keras_rs_nightly-0.2.2.dev202508150339.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|