keras-hub-nightly 0.16.1.dev202410080341__py3-none-any.whl → 0.16.1.dev202410100339__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. keras_hub/api/layers/__init__.py +3 -0
  2. keras_hub/api/models/__init__.py +11 -0
  3. keras_hub/src/layers/preprocessing/image_converter.py +2 -1
  4. keras_hub/src/models/image_to_image.py +411 -0
  5. keras_hub/src/models/inpaint.py +513 -0
  6. keras_hub/src/models/mix_transformer/__init__.py +12 -0
  7. keras_hub/src/models/mix_transformer/mix_transformer_classifier.py +4 -0
  8. keras_hub/src/models/mix_transformer/mix_transformer_classifier_preprocessor.py +16 -0
  9. keras_hub/src/models/mix_transformer/mix_transformer_image_converter.py +8 -0
  10. keras_hub/src/models/mix_transformer/mix_transformer_layers.py +9 -5
  11. keras_hub/src/models/mix_transformer/mix_transformer_presets.py +151 -0
  12. keras_hub/src/models/preprocessor.py +4 -4
  13. keras_hub/src/models/stable_diffusion_3/mmdit.py +308 -177
  14. keras_hub/src/models/stable_diffusion_3/stable_diffusion_3_backbone.py +87 -55
  15. keras_hub/src/models/stable_diffusion_3/stable_diffusion_3_image_to_image.py +171 -0
  16. keras_hub/src/models/stable_diffusion_3/stable_diffusion_3_inpaint.py +194 -0
  17. keras_hub/src/models/stable_diffusion_3/stable_diffusion_3_presets.py +1 -1
  18. keras_hub/src/models/stable_diffusion_3/stable_diffusion_3_text_to_image.py +13 -8
  19. keras_hub/src/models/task.py +1 -1
  20. keras_hub/src/models/text_to_image.py +89 -36
  21. keras_hub/src/tests/test_case.py +3 -1
  22. keras_hub/src/tokenizers/tokenizer.py +7 -7
  23. keras_hub/src/utils/preset_utils.py +7 -7
  24. keras_hub/src/utils/timm/preset_loader.py +1 -3
  25. keras_hub/src/version_utils.py +1 -1
  26. {keras_hub_nightly-0.16.1.dev202410080341.dist-info → keras_hub_nightly-0.16.1.dev202410100339.dist-info}/METADATA +1 -1
  27. {keras_hub_nightly-0.16.1.dev202410080341.dist-info → keras_hub_nightly-0.16.1.dev202410100339.dist-info}/RECORD +29 -22
  28. {keras_hub_nightly-0.16.1.dev202410080341.dist-info → keras_hub_nightly-0.16.1.dev202410100339.dist-info}/WHEEL +0 -0
  29. {keras_hub_nightly-0.16.1.dev202410080341.dist-info → keras_hub_nightly-0.16.1.dev202410100339.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,151 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # https://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+ """MiT model preset configurations."""
13
+
14
+ backbone_presets_with_weights = {
15
+ "mit_b0_ade20k_512": {
16
+ "metadata": {
17
+ "description": (
18
+ "MiT (MixTransformer) model with 8 transformer blocks."
19
+ ),
20
+ "params": 3321962,
21
+ "official_name": "MiT",
22
+ "path": "mit",
23
+ },
24
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b0_ade20k_512",
25
+ },
26
+ "mit_b1_ade20k_512": {
27
+ "metadata": {
28
+ "description": (
29
+ "MiT (MixTransformer) model with 8 transformer blocks."
30
+ ),
31
+ "params": 13156554,
32
+ "official_name": "MiT",
33
+ "path": "mit",
34
+ },
35
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b1_ade20k_512",
36
+ },
37
+ "mit_b2_ade20k_512": {
38
+ "metadata": {
39
+ "description": (
40
+ "MiT (MixTransformer) model with 16 transformer blocks."
41
+ ),
42
+ "params": 24201418,
43
+ "official_name": "MiT",
44
+ "path": "mit",
45
+ },
46
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b2_ade20k_512",
47
+ },
48
+ "mit_b3_ade20k_512": {
49
+ "metadata": {
50
+ "description": (
51
+ "MiT (MixTransformer) model with 28 transformer blocks."
52
+ ),
53
+ "params": 44077258,
54
+ "official_name": "MiT",
55
+ "path": "mit",
56
+ },
57
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b3_ade20k_512",
58
+ },
59
+ "mit_b4_ade20k_512": {
60
+ "metadata": {
61
+ "description": (
62
+ "MiT (MixTransformer) model with 41 transformer blocks."
63
+ ),
64
+ "params": 60847818,
65
+ "official_name": "MiT",
66
+ "path": "mit",
67
+ },
68
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b4_ade20k_512",
69
+ },
70
+ "mit_b5_ade20k_640": {
71
+ "metadata": {
72
+ "description": (
73
+ "MiT (MixTransformer) model with 52 transformer blocks."
74
+ ),
75
+ "params": 81448138,
76
+ "official_name": "MiT",
77
+ "path": "mit",
78
+ },
79
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b5_ade20k_512",
80
+ },
81
+ "mit_b0_cityscapes_1024": {
82
+ "metadata": {
83
+ "description": (
84
+ "MiT (MixTransformer) model with 8 transformer blocks."
85
+ ),
86
+ "params": 3321962,
87
+ "official_name": "MiT",
88
+ "path": "mit",
89
+ },
90
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b0_cityscapes_1024",
91
+ },
92
+ "mit_b1_cityscapes_1024": {
93
+ "metadata": {
94
+ "description": (
95
+ "MiT (MixTransformer) model with 8 transformer blocks."
96
+ ),
97
+ "params": 13156554,
98
+ "official_name": "MiT",
99
+ "path": "mit",
100
+ },
101
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b1_cityscapes_1024",
102
+ },
103
+ "mit_b2_cityscapes_1024": {
104
+ "metadata": {
105
+ "description": (
106
+ "MiT (MixTransformer) model with 16 transformer blocks."
107
+ ),
108
+ "params": 24201418,
109
+ "official_name": "MiT",
110
+ "path": "mit",
111
+ },
112
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b2_cityscapes_1024",
113
+ },
114
+ "mit_b3_cityscapes_1024": {
115
+ "metadata": {
116
+ "description": (
117
+ "MiT (MixTransformer) model with 28 transformer blocks."
118
+ ),
119
+ "params": 44077258,
120
+ "official_name": "MiT",
121
+ "path": "mit",
122
+ },
123
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b3_cityscapes_1024",
124
+ },
125
+ "mit_b4_cityscapes_1024": {
126
+ "metadata": {
127
+ "description": (
128
+ "MiT (MixTransformer) model with 41 transformer blocks."
129
+ ),
130
+ "params": 60847818,
131
+ "official_name": "MiT",
132
+ "path": "mit",
133
+ },
134
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b4_cityscapes_1024",
135
+ },
136
+ "mit_b5_cityscapes_1024": {
137
+ "metadata": {
138
+ "description": (
139
+ "MiT (MixTransformer) model with 52 transformer blocks."
140
+ ),
141
+ "params": 81448138,
142
+ "official_name": "MiT",
143
+ "path": "mit",
144
+ },
145
+ "kaggle_handle": "kaggle://kerashub/mix-transformer/keras/mit_b5_cityscapes_1024",
146
+ },
147
+ }
148
+
149
+ backbone_presets = {
150
+ **backbone_presets_with_weights,
151
+ }
@@ -32,7 +32,7 @@ class Preprocessor(PreprocessingLayer):
32
32
  image_converter_cls = None
33
33
 
34
34
  def __init__(self, *args, **kwargs):
35
- self.config_name = kwargs.pop("config_name", PREPROCESSOR_CONFIG_FILE)
35
+ self.config_file = kwargs.pop("config_file", PREPROCESSOR_CONFIG_FILE)
36
36
  super().__init__(*args, **kwargs)
37
37
  self._tokenizer = None
38
38
  self._image_converter = None
@@ -85,7 +85,7 @@ class Preprocessor(PreprocessingLayer):
85
85
  )
86
86
  config.update(
87
87
  {
88
- "config_name": self.config_name,
88
+ "config_file": self.config_file,
89
89
  }
90
90
  )
91
91
  return config
@@ -117,7 +117,7 @@ class Preprocessor(PreprocessingLayer):
117
117
  def from_preset(
118
118
  cls,
119
119
  preset,
120
- config_name=PREPROCESSOR_CONFIG_FILE,
120
+ config_file=PREPROCESSOR_CONFIG_FILE,
121
121
  **kwargs,
122
122
  ):
123
123
  """Instantiate a `keras_hub.models.Preprocessor` from a model preset.
@@ -167,7 +167,7 @@ class Preprocessor(PreprocessingLayer):
167
167
  # Detect the correct subclass if we need to.
168
168
  if cls.backbone_cls != backbone_cls:
169
169
  cls = find_subclass(preset, cls, backbone_cls)
170
- return loader.load_preprocessor(cls, config_name, **kwargs)
170
+ return loader.load_preprocessor(cls, config_file, **kwargs)
171
171
 
172
172
  @classmethod
173
173
  def _add_missing_kwargs(cls, loader, kwargs):