optimum-rbln 0.8.2a4__py3-none-any.whl → 0.9.3rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (167) hide show
  1. optimum/rbln/__init__.py +96 -9
  2. optimum/rbln/__version__.py +16 -3
  3. optimum/rbln/cli.py +660 -0
  4. optimum/rbln/configuration_utils.py +153 -42
  5. optimum/rbln/diffusers/__init__.py +7 -0
  6. optimum/rbln/diffusers/configurations/models/configuration_autoencoder_kl.py +3 -3
  7. optimum/rbln/diffusers/configurations/models/configuration_autoencoder_kl_cosmos.py +1 -1
  8. optimum/rbln/diffusers/configurations/models/configuration_controlnet.py +3 -3
  9. optimum/rbln/diffusers/configurations/models/configuration_prior_transformer.py +4 -4
  10. optimum/rbln/diffusers/configurations/models/configuration_transformer_cosmos.py +9 -4
  11. optimum/rbln/diffusers/configurations/models/configuration_transformer_sd3.py +9 -4
  12. optimum/rbln/diffusers/configurations/models/configuration_unet_2d_condition.py +3 -3
  13. optimum/rbln/diffusers/configurations/models/configuration_vq_model.py +3 -3
  14. optimum/rbln/diffusers/configurations/pipelines/configuration_controlnet.py +35 -19
  15. optimum/rbln/diffusers/configurations/pipelines/configuration_cosmos.py +14 -11
  16. optimum/rbln/diffusers/configurations/pipelines/configuration_kandinsky2_2.py +30 -20
  17. optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion.py +13 -9
  18. optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_3.py +17 -13
  19. optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_xl.py +17 -10
  20. optimum/rbln/diffusers/modeling_diffusers.py +30 -14
  21. optimum/rbln/diffusers/models/__init__.py +3 -13
  22. optimum/rbln/diffusers/models/autoencoders/autoencoder_kl.py +31 -3
  23. optimum/rbln/diffusers/models/autoencoders/autoencoder_kl_cosmos.py +28 -3
  24. optimum/rbln/diffusers/models/autoencoders/vq_model.py +31 -3
  25. optimum/rbln/diffusers/models/transformers/prior_transformer.py +1 -1
  26. optimum/rbln/diffusers/models/transformers/transformer_cosmos.py +9 -1
  27. optimum/rbln/diffusers/models/transformers/transformer_sd3.py +9 -1
  28. optimum/rbln/diffusers/models/unets/unet_2d_condition.py +6 -3
  29. optimum/rbln/diffusers/pipelines/__init__.py +11 -5
  30. optimum/rbln/diffusers/pipelines/auto_pipeline.py +307 -0
  31. optimum/rbln/diffusers/pipelines/cosmos/configuration_cosmos_guardrail.py +19 -16
  32. optimum/rbln/diffusers/pipelines/cosmos/cosmos_guardrail.py +14 -18
  33. optimum/rbln/diffusers/pipelines/cosmos/pipeline_cosmos_text2world.py +31 -1
  34. optimum/rbln/diffusers/pipelines/cosmos/pipeline_cosmos_video2world.py +31 -1
  35. optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_combined.py +1 -6
  36. optimum/rbln/modeling.py +71 -19
  37. optimum/rbln/modeling_base.py +99 -21
  38. optimum/rbln/ops/attn.py +158 -0
  39. optimum/rbln/ops/flash_attn.py +166 -0
  40. optimum/rbln/ops/kv_cache_update.py +5 -0
  41. optimum/rbln/ops/linear.py +7 -0
  42. optimum/rbln/transformers/__init__.py +92 -0
  43. optimum/rbln/transformers/configuration_generic.py +9 -7
  44. optimum/rbln/transformers/modeling_attention_utils.py +252 -0
  45. optimum/rbln/transformers/modeling_generic.py +51 -9
  46. optimum/rbln/transformers/modeling_outputs.py +37 -0
  47. optimum/rbln/transformers/models/__init__.py +91 -30
  48. optimum/rbln/transformers/models/auto/__init__.py +2 -0
  49. optimum/rbln/transformers/models/auto/auto_factory.py +92 -17
  50. optimum/rbln/transformers/models/auto/modeling_auto.py +45 -0
  51. optimum/rbln/transformers/models/bart/bart_architecture.py +1 -3
  52. optimum/rbln/transformers/models/bart/configuration_bart.py +2 -0
  53. optimum/rbln/transformers/models/bert/bert_architecture.py +16 -0
  54. optimum/rbln/transformers/models/bert/modeling_bert.py +8 -4
  55. optimum/rbln/transformers/models/blip_2/configuration_blip_2.py +42 -11
  56. optimum/rbln/transformers/models/blip_2/modeling_blip_2.py +94 -30
  57. optimum/rbln/transformers/models/clip/configuration_clip.py +10 -7
  58. optimum/rbln/transformers/models/clip/modeling_clip.py +27 -4
  59. optimum/rbln/transformers/models/colpali/colpali_architecture.py +3 -6
  60. optimum/rbln/transformers/models/colpali/configuration_colpali.py +37 -21
  61. optimum/rbln/transformers/models/colpali/modeling_colpali.py +113 -96
  62. optimum/rbln/transformers/models/colqwen2/__init__.py +2 -0
  63. optimum/rbln/transformers/models/colqwen2/colqwen2_architecture.py +233 -0
  64. optimum/rbln/transformers/models/colqwen2/configuration_colqwen2.py +74 -0
  65. optimum/rbln/transformers/models/colqwen2/modeling_colqwen2.py +446 -0
  66. optimum/rbln/transformers/models/decoderonly/__init__.py +3 -2
  67. optimum/rbln/transformers/models/decoderonly/configuration_decoderonly.py +109 -37
  68. optimum/rbln/transformers/models/decoderonly/configuration_lora.py +411 -0
  69. optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py +318 -309
  70. optimum/rbln/transformers/models/decoderonly/decoderonly_runtime_utils.py +504 -0
  71. optimum/rbln/transformers/models/decoderonly/generation_decoderonly.py +111 -0
  72. optimum/rbln/transformers/models/decoderonly/lora_architecture.py +204 -0
  73. optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py +453 -897
  74. optimum/rbln/transformers/models/depth_anything/__init__.py +16 -0
  75. optimum/rbln/transformers/models/depth_anything/configuration_depth_anything.py +24 -0
  76. optimum/rbln/transformers/models/depth_anything/modeling_depth_anything.py +25 -0
  77. optimum/rbln/transformers/models/exaone/modeling_exaone.py +42 -4
  78. optimum/rbln/transformers/models/gemma/__init__.py +2 -2
  79. optimum/rbln/transformers/models/gemma/configuration_gemma.py +9 -1
  80. optimum/rbln/transformers/models/gemma/gemma_architecture.py +1 -4
  81. optimum/rbln/transformers/models/gemma/modeling_gemma.py +22 -1
  82. optimum/rbln/transformers/models/gemma3/configuration_gemma3.py +49 -13
  83. optimum/rbln/transformers/models/gemma3/gemma3_architecture.py +12 -2
  84. optimum/rbln/transformers/models/gemma3/gemma3_runtime_utils.py +245 -0
  85. optimum/rbln/transformers/models/gemma3/modeling_gemma3.py +201 -349
  86. optimum/rbln/transformers/models/gpt2/__init__.py +2 -2
  87. optimum/rbln/transformers/models/gpt2/configuration_gpt2.py +31 -3
  88. optimum/rbln/transformers/models/gpt2/gpt2_architecture.py +10 -8
  89. optimum/rbln/transformers/models/gpt2/modeling_gpt2.py +18 -1
  90. optimum/rbln/transformers/models/grounding_dino/__init__.py +10 -0
  91. optimum/rbln/transformers/models/grounding_dino/configuration_grounding_dino.py +92 -0
  92. optimum/rbln/transformers/models/grounding_dino/grounding_dino_architecture.py +599 -0
  93. optimum/rbln/transformers/models/grounding_dino/modeling_grounding_dino.py +1032 -0
  94. optimum/rbln/transformers/models/idefics3/configuration_idefics3.py +35 -7
  95. optimum/rbln/transformers/models/idefics3/modeling_idefics3.py +26 -27
  96. optimum/rbln/transformers/models/llama/__init__.py +2 -2
  97. optimum/rbln/transformers/models/llama/configuration_llama.py +9 -1
  98. optimum/rbln/transformers/models/llama/modeling_llama.py +22 -1
  99. optimum/rbln/transformers/models/llava/__init__.py +16 -0
  100. optimum/rbln/transformers/models/llava/configuration_llava.py +72 -0
  101. optimum/rbln/transformers/models/llava/modeling_llava.py +478 -0
  102. optimum/rbln/transformers/models/llava_next/configuration_llava_next.py +15 -17
  103. optimum/rbln/transformers/models/llava_next/modeling_llava_next.py +235 -375
  104. optimum/rbln/transformers/models/midm/midm_architecture.py +4 -1
  105. optimum/rbln/transformers/models/midm/modeling_midm.py +42 -4
  106. optimum/rbln/transformers/models/mistral/__init__.py +2 -2
  107. optimum/rbln/transformers/models/mistral/configuration_mistral.py +9 -1
  108. optimum/rbln/transformers/models/mistral/mistral_architecture.py +1 -1
  109. optimum/rbln/transformers/models/mistral/modeling_mistral.py +26 -3
  110. optimum/rbln/transformers/models/opt/__init__.py +2 -2
  111. optimum/rbln/transformers/models/opt/configuration_opt.py +8 -1
  112. optimum/rbln/transformers/models/opt/modeling_opt.py +28 -16
  113. optimum/rbln/transformers/models/opt/opt_architecture.py +4 -4
  114. optimum/rbln/transformers/models/pegasus/__init__.py +17 -0
  115. optimum/rbln/transformers/models/pegasus/configuration_pegasus.py +38 -0
  116. optimum/rbln/transformers/models/pegasus/modeling_pegasus.py +71 -0
  117. optimum/rbln/transformers/models/pegasus/pegasus_architecture.py +161 -0
  118. optimum/rbln/transformers/models/phi/__init__.py +2 -2
  119. optimum/rbln/transformers/models/phi/configuration_phi.py +9 -1
  120. optimum/rbln/transformers/models/phi/modeling_phi.py +10 -1
  121. optimum/rbln/transformers/models/phi/phi_architecture.py +11 -7
  122. optimum/rbln/transformers/models/pixtral/__init__.py +16 -0
  123. optimum/rbln/transformers/models/pixtral/configuration_pixtral.py +43 -0
  124. optimum/rbln/transformers/models/pixtral/modeling_pixtral.py +310 -0
  125. optimum/rbln/transformers/models/pixtral/pixtral_architecture.py +73 -0
  126. optimum/rbln/transformers/models/qwen2/__init__.py +2 -2
  127. optimum/rbln/transformers/models/qwen2/configuration_qwen2.py +9 -1
  128. optimum/rbln/transformers/models/qwen2/modeling_qwen2.py +27 -1
  129. optimum/rbln/transformers/models/qwen2_5_vl/configuration_qwen2_5_vl.py +21 -6
  130. optimum/rbln/transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py +15 -21
  131. optimum/rbln/transformers/models/qwen2_5_vl/qwen2_5_vl_architecture.py +28 -7
  132. optimum/rbln/transformers/models/qwen2_vl/__init__.py +19 -0
  133. optimum/rbln/transformers/models/qwen2_vl/configuration_qwen2_vl.py +88 -0
  134. optimum/rbln/transformers/models/qwen2_vl/modeling_qwen2_vl.py +514 -0
  135. optimum/rbln/transformers/models/qwen2_vl/qwen2_vl_architecture.py +165 -0
  136. optimum/rbln/transformers/models/qwen3/configuration_qwen3.py +2 -2
  137. optimum/rbln/transformers/models/qwen3/modeling_qwen3.py +86 -330
  138. optimum/rbln/transformers/models/qwen3/qwen3_architecture.py +1 -245
  139. optimum/rbln/transformers/models/seq2seq/configuration_seq2seq.py +20 -13
  140. optimum/rbln/transformers/models/seq2seq/modeling_seq2seq.py +24 -3
  141. optimum/rbln/transformers/models/seq2seq/seq2seq_architecture.py +2 -2
  142. optimum/rbln/transformers/models/siglip/__init__.py +2 -6
  143. optimum/rbln/transformers/models/siglip/configuration_siglip.py +1 -1
  144. optimum/rbln/transformers/models/siglip/modeling_siglip.py +5 -16
  145. optimum/rbln/transformers/models/swin/__init__.py +16 -0
  146. optimum/rbln/transformers/models/swin/configuration_swin.py +42 -0
  147. optimum/rbln/transformers/models/swin/modeling_swin.py +341 -0
  148. optimum/rbln/transformers/models/t5/configuration_t5.py +2 -0
  149. optimum/rbln/transformers/models/t5/t5_architecture.py +8 -1
  150. optimum/rbln/transformers/models/time_series_transformer/configuration_time_series_transformer.py +3 -3
  151. optimum/rbln/transformers/models/time_series_transformer/modeling_time_series_transformer.py +4 -14
  152. optimum/rbln/transformers/models/time_series_transformer/time_series_transformers_architecture.py +7 -1
  153. optimum/rbln/transformers/models/wav2vec2/modeling_wav2vec2.py +1 -0
  154. optimum/rbln/transformers/models/whisper/configuration_whisper.py +12 -13
  155. optimum/rbln/transformers/models/whisper/generation_whisper.py +28 -6
  156. optimum/rbln/transformers/models/whisper/modeling_whisper.py +28 -3
  157. optimum/rbln/transformers/models/xlm_roberta/__init__.py +2 -8
  158. optimum/rbln/transformers/utils/rbln_quantization.py +391 -75
  159. optimum/rbln/transformers/utils/rbln_runtime_wrapper.py +79 -0
  160. optimum/rbln/utils/depreacate_utils.py +16 -0
  161. optimum/rbln/utils/runtime_utils.py +28 -18
  162. optimum/rbln/utils/submodule.py +31 -9
  163. {optimum_rbln-0.8.2a4.dist-info → optimum_rbln-0.9.3rc0.dist-info}/METADATA +8 -7
  164. {optimum_rbln-0.8.2a4.dist-info → optimum_rbln-0.9.3rc0.dist-info}/RECORD +167 -125
  165. optimum_rbln-0.9.3rc0.dist-info/entry_points.txt +2 -0
  166. {optimum_rbln-0.8.2a4.dist-info → optimum_rbln-0.9.3rc0.dist-info}/WHEEL +0 -0
  167. {optimum_rbln-0.8.2a4.dist-info → optimum_rbln-0.9.3rc0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,16 @@
1
+ from typing import Optional
2
+
3
+ import rebel
4
+
5
+ from .logging import get_logger
6
+
7
+
8
+ logger = get_logger(__name__)
9
+
10
+
11
+ def warn_deprecated_npu(npu: Optional[str] = None):
12
+ npu = npu or rebel.get_npu_name()
13
+ if npu == "RBLN-CA02":
14
+ logger.warning_once(
15
+ "Support for the RBLN-CA02 device is provided only up to optimum-rbln v0.8.0 and has reached end of life.",
16
+ )
@@ -14,7 +14,7 @@
14
14
 
15
15
  import re
16
16
  import threading
17
- from typing import Any, Dict, List, Optional, Union
17
+ from typing import Any, List, Optional, Union
18
18
 
19
19
  import rebel
20
20
  import torch
@@ -94,7 +94,7 @@ class RBLNPytorchRuntime:
94
94
  def __call__(self, *args: Any, **kwds: Any) -> Any:
95
95
  return self.forward(*args, **kwds)
96
96
 
97
- def forward(self, *args: List["torch.Tensor"], **kwargs: Dict[str, "torch.Tensor"]):
97
+ def forward(self, *args: List["torch.Tensor"], **kwargs: "torch.Tensor"):
98
98
  # filtering useless args or kwarg such as None.
99
99
  args = list(filter(lambda arg: isinstance(arg, torch.Tensor), args))
100
100
  kwargs = dict(filter(lambda kwarg: isinstance(kwarg[1], torch.Tensor) or kwarg[0] == "out", kwargs.items()))
@@ -142,7 +142,7 @@ class UnavailableRuntime:
142
142
  """Returns an iterator with self as the only item."""
143
143
  return iter([self])
144
144
 
145
- def forward(self, *args: List["torch.Tensor"], **kwargs: Dict[str, "torch.Tensor"]):
145
+ def forward(self, *args: List["torch.Tensor"], **kwargs: "torch.Tensor"):
146
146
  """Raises a detailed RuntimeError explaining why inference cannot be performed."""
147
147
  raise RuntimeError(
148
148
  "Cannot perform inference: RBLN runtime is not available.\n\n"
@@ -167,33 +167,44 @@ class ContextRblnConfig:
167
167
  device=None,
168
168
  device_map=None,
169
169
  create_runtimes=None,
170
- optimize_host_mem=None,
171
170
  activate_profiler=None,
172
171
  timeout=None,
173
172
  ):
174
173
  self.device = device
175
174
  self.device_map = device_map
176
175
  self.create_runtimes = create_runtimes
177
- self.optimize_host_mem = optimize_host_mem
178
176
  self.activate_profiler = activate_profiler
179
177
  self.timeout = timeout
178
+ self._previous_context = None
180
179
 
181
180
  def __enter__(self):
182
- self._local.device = self.device
183
- self._local.device_map = self.device_map
184
- self._local.create_runtimes = self.create_runtimes
185
- self._local.optimize_host_memory = self.optimize_host_mem
186
- self._local.activate_profiler = self.activate_profiler
187
- self._local.timeout = self.timeout
181
+ self._previous_context = {
182
+ "device": getattr(self._local, "device", None),
183
+ "device_map": getattr(self._local, "device_map", None),
184
+ "create_runtimes": getattr(self._local, "create_runtimes", None),
185
+ "activate_profiler": getattr(self._local, "activate_profiler", None),
186
+ "timeout": getattr(self._local, "timeout", None),
187
+ }
188
+
189
+ if self.device is not None:
190
+ self._local.device = self.device
191
+ if self.device_map is not None:
192
+ self._local.device_map = self.device_map
193
+ if self.create_runtimes is not None:
194
+ self._local.create_runtimes = self.create_runtimes
195
+ if self.activate_profiler is not None:
196
+ self._local.activate_profiler = self.activate_profiler
197
+ if self.timeout is not None:
198
+ self._local.timeout = self.timeout
188
199
  return self
189
200
 
190
201
  def __exit__(self, exc_type, exc_val, exc_tb):
191
- self._local.device = None
192
- self._local.device_map = None
193
- self._local.create_runtimes = None
194
- self._local.optimize_host_memory = None
195
- self._local.activate_profiler = None
196
- self._local.timeout = None
202
+ if self._previous_context is not None:
203
+ self._local.device = self._previous_context["device"]
204
+ self._local.device_map = self._previous_context["device_map"]
205
+ self._local.create_runtimes = self._previous_context["create_runtimes"]
206
+ self._local.activate_profiler = self._previous_context["activate_profiler"]
207
+ self._local.timeout = self._previous_context["timeout"]
197
208
 
198
209
  @classmethod
199
210
  def get_current_context(cls):
@@ -201,7 +212,6 @@ class ContextRblnConfig:
201
212
  "device": getattr(cls._local, "device", None),
202
213
  "device_map": getattr(cls._local, "device_map", None),
203
214
  "create_runtimes": getattr(cls._local, "create_runtimes", None),
204
- "optimize_host_memory": getattr(cls._local, "optimize_host_memory", None),
205
215
  "activate_profiler": getattr(cls._local, "activate_profiler", None),
206
216
  "timeout": getattr(cls._local, "timeout", None),
207
217
  }
@@ -13,16 +13,16 @@
13
13
  # limitations under the License.
14
14
 
15
15
  from pathlib import Path
16
- from typing import TYPE_CHECKING, Any, Dict, List, Type
16
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, Union
17
17
 
18
18
  from transformers import PretrainedConfig
19
19
 
20
- from ..configuration_utils import RBLNModelConfig
20
+ from ..configuration_utils import RBLNModelConfig, get_rbln_config_class
21
21
  from ..utils.model_utils import get_rbln_model_cls
22
22
 
23
23
 
24
24
  if TYPE_CHECKING:
25
- from transformers import PreTrainedModel
25
+ from transformers import AutoFeatureExtractor, AutoProcessor, AutoTokenizer, PreTrainedModel
26
26
 
27
27
  from ..modeling import RBLNModel
28
28
 
@@ -42,7 +42,21 @@ class SubModulesMixin:
42
42
  setattr(self, submodule_meta["name"], submodule)
43
43
 
44
44
  @classmethod
45
- def _update_submodule_config(cls, model: "PreTrainedModel", rbln_config: RBLNModelConfig):
45
+ def _get_submodule_config_class(
46
+ cls, cls_name: str, submodule_rbln_config: Dict[str, Any]
47
+ ) -> Type[RBLNModelConfig]:
48
+ if isinstance(submodule_rbln_config, dict) and "cls_name" in submodule_rbln_config:
49
+ config_cls_name = submodule_rbln_config["cls_name"]
50
+ return get_rbln_config_class(config_cls_name)
51
+ return get_rbln_config_class(f"RBLN{cls_name}Config")
52
+
53
+ @classmethod
54
+ def _update_submodule_config(
55
+ cls,
56
+ model: "PreTrainedModel",
57
+ rbln_config: RBLNModelConfig,
58
+ preprocessors: Optional[Union["AutoFeatureExtractor", "AutoProcessor", "AutoTokenizer"]],
59
+ ):
46
60
  return rbln_config
47
61
 
48
62
  @classmethod
@@ -51,6 +65,7 @@ class SubModulesMixin:
51
65
  ) -> List["RBLNModel"]:
52
66
  rbln_submodules = []
53
67
  submodule_prefix = getattr(cls, "_rbln_submodule_prefix", None)
68
+ preprocessors = kwargs.pop("preprocessors", [])
54
69
 
55
70
  for submodule in cls._rbln_submodules:
56
71
  submodule_name = submodule["name"]
@@ -63,13 +78,20 @@ class SubModulesMixin:
63
78
  cls_name = torch_submodule.__class__.__name__
64
79
  submodule_cls: Type["RBLNModel"] = get_rbln_model_cls(f"RBLN{cls_name}")
65
80
  submodule_rbln_config = getattr(rbln_config, submodule_name) or {}
81
+ submodule_config_cls = cls._get_submodule_config_class(cls_name, submodule_rbln_config)
66
82
 
67
83
  if isinstance(submodule_rbln_config, dict):
68
- submodule_rbln_config_class = submodule_cls.get_rbln_config_class()
69
- submodule_rbln_config = submodule_rbln_config_class(**submodule_rbln_config)
70
- setattr(rbln_config, submodule_name, submodule_rbln_config)
71
-
72
- submodule_rbln_config = submodule_cls._update_submodule_config(model, submodule_rbln_config)
84
+ filtered_kwargs = rbln_config.filter_parameters(submodule_config_cls, submodule_rbln_config)
85
+ filtered_kwargs["cls_name"] = submodule_config_cls.__name__
86
+ submodule_rbln_config = submodule_config_cls(**filtered_kwargs)
87
+ elif not isinstance(submodule_rbln_config, submodule_config_cls):
88
+ config_dict = {k: v for k, v in submodule_rbln_config.__dict__.items() if not k.startswith("_")}
89
+ filtered_kwargs = rbln_config.filter_parameters(submodule_config_cls, config_dict)
90
+ filtered_kwargs["cls_name"] = submodule_config_cls.__name__
91
+ submodule_rbln_config = submodule_config_cls(**filtered_kwargs)
92
+
93
+ setattr(rbln_config, submodule_name, submodule_rbln_config)
94
+ submodule_rbln_config = submodule_cls._update_submodule_config(model, submodule_rbln_config, preprocessors)
73
95
 
74
96
  rbln_submodule = submodule_cls.from_model(
75
97
  model=torch_submodule,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: optimum-rbln
3
- Version: 0.8.2a4
3
+ Version: 0.9.3rc0
4
4
  Summary: Optimum RBLN is the interface between the HuggingFace Transformers and Diffusers libraries and RBLN accelerators. It provides a set of tools enabling easy model loading and inference on single and multiple rbln device settings for different downstream tasks.
5
5
  Project-URL: Homepage, https://rebellions.ai
6
6
  Project-URL: Documentation, https://docs.rbln.ai
@@ -20,15 +20,16 @@ Classifier: Programming Language :: Python :: 3.9
20
20
  Classifier: Programming Language :: Python :: 3.10
21
21
  Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Programming Language :: Python :: 3.12
23
+ Classifier: Programming Language :: Python :: 3.13
23
24
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
24
- Requires-Python: <3.13,>=3.9
25
+ Requires-Python: <3.14,>=3.9
25
26
  Requires-Dist: accelerate>=1.0.1
26
- Requires-Dist: diffusers==0.34.0
27
+ Requires-Dist: diffusers==0.35.1
27
28
  Requires-Dist: packaging>=24.1
28
- Requires-Dist: torch==2.7.0
29
- Requires-Dist: torchaudio<=2.7.0
30
- Requires-Dist: torchvision<=0.22.0
31
- Requires-Dist: transformers==4.51.3
29
+ Requires-Dist: torch==2.8.0
30
+ Requires-Dist: torchaudio<=2.8.0
31
+ Requires-Dist: torchvision<=0.23.0
32
+ Requires-Dist: transformers==4.57.1
32
33
  Description-Content-Type: text/markdown
33
34
 
34
35