optimum-rbln 0.9.1__py3-none-any.whl → 0.9.2a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of optimum-rbln might be problematic. Click here for more details.

Files changed (36) hide show
  1. optimum/rbln/__version__.py +2 -2
  2. optimum/rbln/configuration_utils.py +54 -7
  3. optimum/rbln/diffusers/configurations/pipelines/configuration_controlnet.py +30 -14
  4. optimum/rbln/diffusers/configurations/pipelines/configuration_cosmos.py +11 -8
  5. optimum/rbln/diffusers/configurations/pipelines/configuration_kandinsky2_2.py +23 -13
  6. optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion.py +10 -6
  7. optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_3.py +14 -10
  8. optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_xl.py +14 -7
  9. optimum/rbln/diffusers/pipelines/cosmos/configuration_cosmos_guardrail.py +9 -11
  10. optimum/rbln/transformers/models/blip_2/configuration_blip_2.py +35 -3
  11. optimum/rbln/transformers/models/blip_2/modeling_blip_2.py +21 -22
  12. optimum/rbln/transformers/models/clip/modeling_clip.py +4 -0
  13. optimum/rbln/transformers/models/colpali/colpali_architecture.py +2 -2
  14. optimum/rbln/transformers/models/colpali/configuration_colpali.py +17 -1
  15. optimum/rbln/transformers/models/colpali/modeling_colpali.py +72 -79
  16. optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py +2 -2
  17. optimum/rbln/transformers/models/gemma3/configuration_gemma3.py +11 -3
  18. optimum/rbln/transformers/models/gemma3/modeling_gemma3.py +58 -43
  19. optimum/rbln/transformers/models/idefics3/configuration_idefics3.py +27 -3
  20. optimum/rbln/transformers/models/idefics3/modeling_idefics3.py +22 -15
  21. optimum/rbln/transformers/models/llava/configuration_llava.py +16 -2
  22. optimum/rbln/transformers/models/llava/modeling_llava.py +106 -49
  23. optimum/rbln/transformers/models/llava_next/configuration_llava_next.py +11 -13
  24. optimum/rbln/transformers/models/llava_next/modeling_llava_next.py +232 -342
  25. optimum/rbln/transformers/models/pixtral/modeling_pixtral.py +6 -11
  26. optimum/rbln/transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py +11 -1
  27. optimum/rbln/transformers/models/qwen2_5_vl/qwen2_5_vl_architecture.py +22 -0
  28. optimum/rbln/transformers/models/qwen2_vl/modeling_qwen2_vl.py +11 -1
  29. optimum/rbln/transformers/models/qwen2_vl/qwen2_vl_architecture.py +22 -0
  30. optimum/rbln/transformers/models/siglip/modeling_siglip.py +3 -14
  31. optimum/rbln/transformers/utils/rbln_runtime_wrapper.py +79 -0
  32. optimum/rbln/utils/submodule.py +21 -5
  33. {optimum_rbln-0.9.1.dist-info → optimum_rbln-0.9.2a0.dist-info}/METADATA +2 -2
  34. {optimum_rbln-0.9.1.dist-info → optimum_rbln-0.9.2a0.dist-info}/RECORD +36 -35
  35. {optimum_rbln-0.9.1.dist-info → optimum_rbln-0.9.2a0.dist-info}/WHEEL +0 -0
  36. {optimum_rbln-0.9.1.dist-info → optimum_rbln-0.9.2a0.dist-info}/licenses/LICENSE +0 -0
@@ -36,8 +36,6 @@ logger = get_logger(__name__)
36
36
  if TYPE_CHECKING:
37
37
  from transformers import AutoFeatureExtractor, AutoProcessor, AutoTokenizer, PreTrainedModel
38
38
 
39
- from ....diffusers.modeling_diffusers import RBLNDiffusionMixin, RBLNDiffusionMixinConfig
40
-
41
39
 
42
40
  class RBLNRuntimePixtralVisionModel(RBLNPytorchRuntime):
43
41
  mandatory_members = ["main_input_name"]
@@ -128,8 +126,11 @@ class RBLNRuntimePixtralVisionModel(RBLNPytorchRuntime):
128
126
  (1, patch_embed_seq.shape[-2]), fill_value=torch.finfo(patch_embed_seq.dtype).min
129
127
  )
130
128
  attention_mask[:, : h_patched_original * w_patched_original] = 0
131
-
132
- transformer_output = super().forward(patch_embed_seq, attention_mask, cos, sin)
129
+ if "out" in kwargs:
130
+ super().forward(patch_embed_seq, attention_mask, cos, sin, **kwargs)
131
+ transformer_output = kwargs["out"]
132
+ else:
133
+ transformer_output = super().forward(patch_embed_seq, attention_mask, cos, sin, **kwargs)
133
134
 
134
135
  last_hidden_state_list.append(transformer_output[0][:, : h_patched_original * w_patched_original, :])
135
136
  hidden_states = transformer_output[1:]
@@ -236,12 +237,6 @@ class RBLNPixtralVisionModel(RBLNModel):
236
237
  }
237
238
  return _PixtralVisionModel(model, **wrapper_cfg).eval()
238
239
 
239
- @classmethod
240
- def update_rbln_config_using_pipe(
241
- cls, pipe: "RBLNDiffusionMixin", rbln_config: "RBLNDiffusionMixinConfig", submodule_name: str
242
- ) -> "RBLNDiffusionMixinConfig":
243
- return rbln_config
244
-
245
240
  @classmethod
246
241
  def _update_rbln_config(
247
242
  cls,
@@ -309,7 +304,7 @@ class RBLNPixtralVisionModel(RBLNModel):
309
304
  )
310
305
 
311
306
  output = self.model(
312
- pixel_values, image_sizes, output_hidden_states=output_hidden_states, return_dict=return_dict
307
+ pixel_values, image_sizes, output_hidden_states=output_hidden_states, return_dict=return_dict, **kwargs
313
308
  )
314
309
 
315
310
  return output
@@ -23,6 +23,7 @@ from transformers.models.qwen2_5_vl.modeling_qwen2_5_vl import (
23
23
  Qwen2_5_VisionPatchEmbed,
24
24
  Qwen2_5_VisionRotaryEmbedding,
25
25
  Qwen2_5_VisionTransformerPretrainedModel,
26
+ Qwen2_5_VLModel,
26
27
  Qwen2_5_VLRotaryEmbedding,
27
28
  )
28
29
 
@@ -391,6 +392,14 @@ class RBLNQwen2_5_VLForConditionalGeneration(RBLNDecoderOnlyModelForCausalLM):
391
392
  def can_generate(self):
392
393
  return True
393
394
 
395
+ @classmethod
396
+ def get_pytorch_model(cls, *args, **kwargs):
397
+ model = super().get_pytorch_model(*args, **kwargs)
398
+ model.model.lm_head = model.lm_head
399
+ model.lm_head = None
400
+ del model.lm_head
401
+ return model
402
+
394
403
  @classmethod
395
404
  def update_kwargs(cls, kwargs):
396
405
  kwargs.update(
@@ -532,7 +541,8 @@ class RBLNQwen2_5_VLForConditionalGeneration(RBLNDecoderOnlyModelForCausalLM):
532
541
  vision_tokens = input_id[0][vision_start_indices + 1]
533
542
  image_nums = (vision_tokens == image_token_id).sum()
534
543
  video_nums = (vision_tokens == video_token_id).sum()
535
- position_ids, rope_deltas = self.get_rope_index(
544
+ position_ids, rope_deltas = Qwen2_5_VLModel.get_rope_index(
545
+ self,
536
546
  input_id,
537
547
  image_grid_thw[image_idx : image_idx + image_nums] if image_grid_thw is not None else None,
538
548
  video_grid_thw[video_idx : video_idx + video_nums] if video_grid_thw is not None else None,
@@ -3,6 +3,7 @@ from typing import Tuple
3
3
 
4
4
  import torch
5
5
  import torch.nn as nn
6
+ from transformers import PreTrainedModel
6
7
 
7
8
  from ..decoderonly.decoderonly_architecture import DecoderOnlyWrapper, apply_rotary_pos_emb
8
9
 
@@ -194,3 +195,24 @@ class Qwen2_5_VL_LanguageModelWrapper(DecoderOnlyWrapper):
194
195
  past_key_values,
195
196
  position_embeds,
196
197
  )
198
+
199
+ def convert_to_rbln_class(self, model: PreTrainedModel, max_seq_len: int):
200
+ new_layers = []
201
+
202
+ for layer_idx, layer in enumerate(model.model.language_model.layers):
203
+ is_sliding = layer_idx in self.rbln_config.sliding_window_layers
204
+ new_self_attn = self.get_rbln_attn_class()(
205
+ self.get_attn_layer(layer), self.rbln_config, is_sliding=is_sliding
206
+ )
207
+ new_layer = self.get_rbln_layer_class()(layer, new_self_attn)
208
+ new_layers.append(new_layer)
209
+
210
+ new_model = self.get_rbln_model_class()(
211
+ model.model.language_model,
212
+ new_layers,
213
+ self.rbln_config,
214
+ use_learned_pos_emb=self.__class__._use_learned_pos_emb,
215
+ )
216
+
217
+ new_model = self.get_rbln_causal_lm_class()(model.model, new_model)
218
+ return new_model
@@ -27,6 +27,7 @@ from transformers.modeling_utils import no_init_weights
27
27
  from transformers.models.qwen2_vl.modeling_qwen2_vl import (
28
28
  PatchEmbed,
29
29
  Qwen2VisionTransformerPretrainedModel,
30
+ Qwen2VLModel,
30
31
  Qwen2VLRotaryEmbedding,
31
32
  VisionRotaryEmbedding,
32
33
  )
@@ -280,6 +281,14 @@ class RBLNQwen2VLForConditionalGeneration(RBLNDecoderOnlyModelForCausalLM):
280
281
  def can_generate(self):
281
282
  return True
282
283
 
284
+ @classmethod
285
+ def get_pytorch_model(cls, *args, **kwargs):
286
+ model = super().get_pytorch_model(*args, **kwargs)
287
+ model.model.lm_head = model.lm_head
288
+ model.lm_head = None
289
+ del model.lm_head
290
+ return model
291
+
283
292
  @classmethod
284
293
  def get_input_info(
285
294
  cls,
@@ -402,7 +411,8 @@ class RBLNQwen2VLForConditionalGeneration(RBLNDecoderOnlyModelForCausalLM):
402
411
  vision_tokens = input_id[0][vision_start_indices + 1]
403
412
  image_nums = (vision_tokens == image_token_id).sum()
404
413
  video_nums = (vision_tokens == video_token_id).sum()
405
- position_ids, rope_deltas = self.get_rope_index(
414
+ position_ids, rope_deltas = Qwen2VLModel.get_rope_index(
415
+ self,
406
416
  input_id,
407
417
  image_grid_thw[image_idx : image_idx + image_nums] if image_grid_thw is not None else None,
408
418
  video_grid_thw[video_idx : video_idx + video_nums] if video_grid_thw is not None else None,
@@ -3,6 +3,7 @@ from typing import Tuple
3
3
 
4
4
  import torch
5
5
  import torch.nn as nn
6
+ from transformers import PreTrainedModel
6
7
 
7
8
  from ..decoderonly.decoderonly_architecture import (
8
9
  DecoderOnlyWrapper,
@@ -139,3 +140,24 @@ class Qwen2VL_LanguageModelWrapper(DecoderOnlyWrapper):
139
140
  past_key_values,
140
141
  position_embeds,
141
142
  )
143
+
144
+ def convert_to_rbln_class(self, model: PreTrainedModel, max_seq_len: int):
145
+ new_layers = []
146
+
147
+ for layer_idx, layer in enumerate(model.model.language_model.layers):
148
+ is_sliding = layer_idx in self.rbln_config.sliding_window_layers
149
+ new_self_attn = self.get_rbln_attn_class()(
150
+ self.get_attn_layer(layer), self.rbln_config, is_sliding=is_sliding
151
+ )
152
+ new_layer = self.get_rbln_layer_class()(layer, new_self_attn)
153
+ new_layers.append(new_layer)
154
+
155
+ new_model = self.get_rbln_model_class()(
156
+ model.model.language_model,
157
+ new_layers,
158
+ self.rbln_config,
159
+ use_learned_pos_emb=self.__class__._use_learned_pos_emb,
160
+ )
161
+
162
+ new_model = self.get_rbln_causal_lm_class()(model.model, new_model)
163
+ return new_model
@@ -29,8 +29,6 @@ logger = get_logger(__name__)
29
29
  if TYPE_CHECKING:
30
30
  from transformers import AutoFeatureExtractor, AutoProcessor, AutoTokenizer, PreTrainedModel
31
31
 
32
- from ....diffusers.modeling_diffusers import RBLNDiffusionMixin, RBLNDiffusionMixinConfig
33
-
34
32
 
35
33
  class _SiglipVisionModel(torch.nn.Module):
36
34
  def __init__(
@@ -65,6 +63,8 @@ class RBLNSiglipVisionModel(RBLNModel):
65
63
  on RBLN devices, supporting image encoding for multimodal vision-language tasks.
66
64
  """
67
65
 
66
+ _tp_support = False
67
+
68
68
  @classmethod
69
69
  def wrap_model_if_needed(cls, model: torch.nn.Module, rbln_config: RBLNSiglipVisionModelConfig) -> torch.nn.Module:
70
70
  wrapper_cfg = {
@@ -74,12 +74,6 @@ class RBLNSiglipVisionModel(RBLNModel):
74
74
  }
75
75
  return _SiglipVisionModel(model, **wrapper_cfg).eval()
76
76
 
77
- @classmethod
78
- def update_rbln_config_using_pipe(
79
- cls, pipe: "RBLNDiffusionMixin", rbln_config: "RBLNDiffusionMixinConfig", submodule_name: str
80
- ) -> "RBLNDiffusionMixinConfig":
81
- return rbln_config
82
-
83
77
  @classmethod
84
78
  def _update_rbln_config(
85
79
  cls,
@@ -128,11 +122,6 @@ class RBLNSiglipVisionModel(RBLNModel):
128
122
  interpolate_pos_encoding: bool = False,
129
123
  **kwargs: Any,
130
124
  ) -> Union[Tuple, BaseModelOutputWithPooling]:
131
- if len(kwargs) > 0 and any(value is not None for value in kwargs.values()):
132
- logger.warning(
133
- f"Currently, optimum-rbln does not support kwargs {kwargs.keys()} for {self.__class__.__name__}."
134
- )
135
-
136
125
  output_attentions = output_attentions if output_attentions is not None else self.rbln_config.output_attentions
137
126
  output_hidden_states = (
138
127
  output_hidden_states if output_hidden_states is not None else self.rbln_config.output_hidden_states
@@ -156,7 +145,7 @@ class RBLNSiglipVisionModel(RBLNModel):
156
145
  f"Please compile again with the correct argument."
157
146
  )
158
147
 
159
- output = super().forward(pixel_values, return_dict=return_dict)
148
+ output = super().forward(pixel_values, return_dict=return_dict, **kwargs)
160
149
  return output
161
150
 
162
151
  def _prepare_output(self, output, return_dict):
@@ -0,0 +1,79 @@
1
+ # Copyright 2025 Rebellions Inc. All rights reserved.
2
+
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at:
6
+
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ from abc import ABC, abstractmethod
17
+ from typing import TYPE_CHECKING, Any, Dict, List, Tuple, Union
18
+
19
+ from torch.nn import Module
20
+
21
+ from ...modeling import RBLNModel
22
+
23
+
24
+ if TYPE_CHECKING:
25
+ import rebel
26
+
27
+
28
+ class LoopProcessor(Module, ABC):
29
+ def __init__(self, model: Union[RBLNModel, "rebel.Runtime"]):
30
+ super().__init__()
31
+ self.model = model
32
+
33
+ def __repr__(self) -> str:
34
+ return repr(self.model)
35
+
36
+ def _is_batch_implemented(self) -> bool:
37
+ return self._forward_batch.__func__ is not LoopProcessor._forward_batch
38
+
39
+ def forward(self, *args, force_loop: bool = False, **kwargs) -> Any:
40
+ if not force_loop and self._is_batch_implemented():
41
+ return self._forward_batch(*args, **kwargs)
42
+ else:
43
+ return self._forward_loop(*args, **kwargs)
44
+
45
+ def _forward_loop(self, *args, **kwargs) -> Any:
46
+ batch_size = self._get_batch_size(*args, **kwargs)
47
+
48
+ if not isinstance(batch_size, int) or batch_size == 0:
49
+ return self._process_outputs([])
50
+
51
+ common_inputs = self._prepare_inputs_before_loop(*args, **kwargs)
52
+
53
+ outputs = []
54
+ for i in range(batch_size):
55
+ item_args, item_kwargs = self._prepare_inputs_for_iteration(i, common_inputs, *args, **kwargs)
56
+ item_output = self.model(*item_args, **item_kwargs)
57
+ outputs.append(item_output)
58
+
59
+ return self._process_outputs(outputs, **kwargs)
60
+
61
+ def _forward_batch(self, *args, **kwargs) -> Any:
62
+ raise NotImplementedError("The batch processing logic (_forward_batch) is not implemented in this class.")
63
+
64
+ @abstractmethod
65
+ def _get_batch_size(self, *args, **kwargs) -> int:
66
+ pass
67
+
68
+ @abstractmethod
69
+ def _prepare_inputs_for_iteration(
70
+ self, index: int, common_inputs: Dict[str, Any], *args, **kwargs
71
+ ) -> Tuple[List[Any], Dict[str, Any]]:
72
+ pass
73
+
74
+ def _prepare_inputs_before_loop(self, *args, **kwargs) -> Dict[str, Any]:
75
+ pass
76
+
77
+ @abstractmethod
78
+ def _process_outputs(self, outputs: List[Any], **kwargs) -> Any:
79
+ pass
@@ -17,7 +17,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, Union
17
17
 
18
18
  from transformers import PretrainedConfig
19
19
 
20
- from ..configuration_utils import RBLNModelConfig
20
+ from ..configuration_utils import RBLNModelConfig, get_rbln_config_class
21
21
  from ..utils.model_utils import get_rbln_model_cls
22
22
 
23
23
 
@@ -41,6 +41,15 @@ class SubModulesMixin:
41
41
  for submodule_meta, submodule in zip(self._rbln_submodules, rbln_submodules):
42
42
  setattr(self, submodule_meta["name"], submodule)
43
43
 
44
+ @classmethod
45
+ def _get_submodule_config_class(
46
+ cls, cls_name: str, submodule_rbln_config: Dict[str, Any]
47
+ ) -> Type[RBLNModelConfig]:
48
+ if isinstance(submodule_rbln_config, dict) and "cls_name" in submodule_rbln_config:
49
+ config_cls_name = submodule_rbln_config["cls_name"]
50
+ return get_rbln_config_class(config_cls_name)
51
+ return get_rbln_config_class(f"RBLN{cls_name}Config")
52
+
44
53
  @classmethod
45
54
  def _update_submodule_config(
46
55
  cls,
@@ -69,12 +78,19 @@ class SubModulesMixin:
69
78
  cls_name = torch_submodule.__class__.__name__
70
79
  submodule_cls: Type["RBLNModel"] = get_rbln_model_cls(f"RBLN{cls_name}")
71
80
  submodule_rbln_config = getattr(rbln_config, submodule_name) or {}
81
+ submodule_config_cls = cls._get_submodule_config_class(cls_name, submodule_rbln_config)
72
82
 
73
83
  if isinstance(submodule_rbln_config, dict):
74
- submodule_rbln_config_class = submodule_cls.get_rbln_config_class()
75
- submodule_rbln_config = submodule_rbln_config_class(**submodule_rbln_config)
76
- setattr(rbln_config, submodule_name, submodule_rbln_config)
77
-
84
+ filtered_kwargs = rbln_config.filter_parameters(submodule_config_cls, submodule_rbln_config)
85
+ filtered_kwargs["cls_name"] = submodule_config_cls.__name__
86
+ submodule_rbln_config = submodule_config_cls(**filtered_kwargs)
87
+ elif not isinstance(submodule_rbln_config, submodule_config_cls):
88
+ config_dict = {k: v for k, v in submodule_rbln_config.__dict__.items() if not k.startswith("_")}
89
+ filtered_kwargs = rbln_config.filter_parameters(submodule_config_cls, config_dict)
90
+ filtered_kwargs["cls_name"] = submodule_config_cls.__name__
91
+ submodule_rbln_config = submodule_config_cls(**filtered_kwargs)
92
+
93
+ setattr(rbln_config, submodule_name, submodule_rbln_config)
78
94
  submodule_rbln_config = submodule_cls._update_submodule_config(model, submodule_rbln_config, preprocessors)
79
95
 
80
96
  rbln_submodule = submodule_cls.from_model(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: optimum-rbln
3
- Version: 0.9.1
3
+ Version: 0.9.2a0
4
4
  Summary: Optimum RBLN is the interface between the HuggingFace Transformers and Diffusers libraries and RBLN accelerators. It provides a set of tools enabling easy model loading and inference on single and multiple rbln device settings for different downstream tasks.
5
5
  Project-URL: Homepage, https://rebellions.ai
6
6
  Project-URL: Documentation, https://docs.rbln.ai
@@ -29,7 +29,7 @@ Requires-Dist: packaging>=24.1
29
29
  Requires-Dist: torch==2.7.0
30
30
  Requires-Dist: torchaudio<=2.7.0
31
31
  Requires-Dist: torchvision<=0.22.0
32
- Requires-Dist: transformers==4.51.3
32
+ Requires-Dist: transformers==4.53.1
33
33
  Description-Content-Type: text/markdown
34
34
 
35
35
 
@@ -1,6 +1,6 @@
1
1
  optimum/rbln/__init__.py,sha256=DAJM5PWAYFiWVlyxVXUvj3CaFOEhX1yhEfhIt1LxL-A,18714
2
- optimum/rbln/__version__.py,sha256=LwGndsRSpclYq-j3wgRr2nzOXwUYj0Jtg7Kof7R0BEw,704
3
- optimum/rbln/configuration_utils.py,sha256=XYXqbriu7DZkoCqicM1iXzqrSd7BVO-e2vA-8WSGgN0,36248
2
+ optimum/rbln/__version__.py,sha256=N_fc72G3EaGWKHzHnNEtPoHLzurnsMp0Or2eVdiYVTM,712
3
+ optimum/rbln/configuration_utils.py,sha256=nAzDwa_2-X32bikEJ2zbRZIk68s50hAP7gTmKN07Ig0,38080
4
4
  optimum/rbln/modeling.py,sha256=IZ8loagxm--2BcqTl16KRHUR3hkccpeaY2grOWOtwqk,14473
5
5
  optimum/rbln/modeling_base.py,sha256=poXfHZCAlFd28MY9dvMi7tC2RytLx77Lee2XGS_KeZg,27684
6
6
  optimum/rbln/diffusers/__init__.py,sha256=1tgU_xWA42BmInqu9bBz_5R_E9TGhhK3mI06YlaiTLg,7232
@@ -16,12 +16,12 @@ optimum/rbln/diffusers/configurations/models/configuration_transformer_sd3.py,sh
16
16
  optimum/rbln/diffusers/configurations/models/configuration_unet_2d_condition.py,sha256=11kOnB-o5MjCJHDQvEhGxXpBNZLC1r3HU3-wiZ_3bL0,3611
17
17
  optimum/rbln/diffusers/configurations/models/configuration_vq_model.py,sha256=GbJKrTOUdvpcGQ786IIKF0rqkRrvZh_E5n3o9bLN8uY,3240
18
18
  optimum/rbln/diffusers/configurations/pipelines/__init__.py,sha256=RfJXQiYvgGc3Rp7JYk5s0AQd0XB5JCAb37_riGWQAYg,1268
19
- optimum/rbln/diffusers/configurations/pipelines/configuration_controlnet.py,sha256=6ObO1b3ZAwf9e6oQOeHh0sk8NzFXayDCXmxDBx-uNwc,14183
20
- optimum/rbln/diffusers/configurations/pipelines/configuration_cosmos.py,sha256=eirI9X0YubVQ85hd-BEH77I8SZ9hlnPJGGNpj-MO3eQ,4596
21
- optimum/rbln/diffusers/configurations/pipelines/configuration_kandinsky2_2.py,sha256=3vDGOgd3sNhh3D5mXVAljXXiwiUyZz7h9Z5752uB880,16341
22
- optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion.py,sha256=6kvLFH1vN59Pc_rHAyEcT3KXoqulxFiaFcZakyBv9hs,6572
23
- optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_3.py,sha256=qkbSG6eNhDQK7z-4FOBKf-zSsEdjtQH9PHkvlAuk1zE,7904
24
- optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_xl.py,sha256=hZOr0nVApS-HrwVim6S2_5gO0Zb8fburnaJXe8Xvqe4,7020
19
+ optimum/rbln/diffusers/configurations/pipelines/configuration_controlnet.py,sha256=ALwMwJret_KQKXld-Y-ZRJh_Wp4qza1xZsbOEUJutg0,14525
20
+ optimum/rbln/diffusers/configurations/pipelines/configuration_cosmos.py,sha256=niblhj4brobRCHF0NoVfdIhChUsup2O9IvFddnopIUc,4701
21
+ optimum/rbln/diffusers/configurations/pipelines/configuration_kandinsky2_2.py,sha256=_lNqy0OFA4sIsn3tOvYwR6X_-y3JTN67ePTWBkt2pFc,16598
22
+ optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion.py,sha256=NnisLdUkW3QQfzn8ZtWUbBwXZF6JF3vj9UI0MthH0EM,6625
23
+ optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_3.py,sha256=vDb7vqH2XycB86L5JH-ofXfdngU2BseJvVw7YXrzmgg,8039
24
+ optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_xl.py,sha256=qX6-HvOt8SBstEeigWWcor-z2bmyoqAucjRirNfma5o,7161
25
25
  optimum/rbln/diffusers/models/__init__.py,sha256=4xHGuJ0HmZXBt6Xc8rVmGKvvQ366yxJarV5Vg_4-cso,1541
26
26
  optimum/rbln/diffusers/models/controlnet.py,sha256=6owledPe9BXhbZOG8lbuuYvpBU0UrQV7zmat6SoMXOM,10585
27
27
  optimum/rbln/diffusers/models/autoencoders/__init__.py,sha256=kpINW4bWwy-Q2doPME4nZ8gXRmkK2DRv2kDdbZuQ3m8,738
@@ -44,7 +44,7 @@ optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py,sha25
44
44
  optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl.py,sha256=2w6dmGQuBWqVoocn27z2yMkG7fL7_MVDBcQNJPJsRXU,45300
45
45
  optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl_img2img.py,sha256=HX56itORMqXLjZcwv25C-_z3JyZn3v6BpfIjsrDO3mE,46640
46
46
  optimum/rbln/diffusers/pipelines/cosmos/__init__.py,sha256=h2j6S8IJPVHeNU8qmW9vyXMgHBw0d7kQcuMAA5YoHPU,795
47
- optimum/rbln/diffusers/pipelines/cosmos/configuration_cosmos_guardrail.py,sha256=NsEaRYgiddopL9OE_9CsZqzbikkzoxUoRQaRML-Jqrw,3709
47
+ optimum/rbln/diffusers/pipelines/cosmos/configuration_cosmos_guardrail.py,sha256=BRHWXCxRO5l0YTFTBSkS4sd5Hx8YeuHB14Fr2copYFY,3747
48
48
  optimum/rbln/diffusers/pipelines/cosmos/cosmos_guardrail.py,sha256=Hmklj-sGNtUst6sTLLDXyQzt7zFoNIHytZpPQ-V1CPw,16676
49
49
  optimum/rbln/diffusers/pipelines/cosmos/pipeline_cosmos_text2world.py,sha256=biveGIpJVGL0rH_YLgynvt47QQA41QdjRtY7G32jDXY,5638
50
50
  optimum/rbln/diffusers/pipelines/cosmos/pipeline_cosmos_video2world.py,sha256=96dHWMIe1x_Q3G64fsNhulfjmd_PrSXRt0GwgNjYMlg,5645
@@ -94,21 +94,21 @@ optimum/rbln/transformers/models/bert/bert_architecture.py,sha256=cZgf-B-FV8qbeJ
94
94
  optimum/rbln/transformers/models/bert/configuration_bert.py,sha256=nEZnX6LXpLKWaoPEd4pWSysw9h-PLb2ld0ibC3dcJ7w,1611
95
95
  optimum/rbln/transformers/models/bert/modeling_bert.py,sha256=7MQZS11k4__oyeni5ek2SzRf-gtD3_hMKl_oOzN7_XQ,2263
96
96
  optimum/rbln/transformers/models/blip_2/__init__.py,sha256=L01gPXcUCa8Vg-bcng20vZvBIN_jlqCzwUSFuq0QOag,855
97
- optimum/rbln/transformers/models/blip_2/configuration_blip_2.py,sha256=2pr8yPaV_GeQuOlTLceS7Ph18x4Kd7D6hbsLu75Hta8,3355
98
- optimum/rbln/transformers/models/blip_2/modeling_blip_2.py,sha256=eJEKk0TRHT9kqoG80xFlGo3PGamzoBDoQSZJPXmvgPE,16283
97
+ optimum/rbln/transformers/models/blip_2/configuration_blip_2.py,sha256=8eSilBwcPWQhBg-oilCmDPo-DN6V5lpLMlTB7WPknII,4630
98
+ optimum/rbln/transformers/models/blip_2/modeling_blip_2.py,sha256=_IGDGS7VJfw5Qi5AJYCjduRVlWHGADhSAiTY42Jppys,16389
99
99
  optimum/rbln/transformers/models/clip/__init__.py,sha256=TLeXDqcFK6M6v9x7Xr64kBbqGu3hFHM7p754dQ8UVQc,938
100
100
  optimum/rbln/transformers/models/clip/configuration_clip.py,sha256=Ea8TCVmMayydfw9p4kTP3UdtvoaPWf4Z4claB61JuE4,4175
101
- optimum/rbln/transformers/models/clip/modeling_clip.py,sha256=sIjZz1HtINZFDFyimbUVgbdUzxmoXHG3mqA5ROTir9w,13148
101
+ optimum/rbln/transformers/models/clip/modeling_clip.py,sha256=BLAYJAtv_2ZnKOlZ8iDBr2Su3bKM_eMWeUSK9MOaj7I,13198
102
102
  optimum/rbln/transformers/models/colpali/__init__.py,sha256=n3rueXT_oC0N8myoZiic0YkVK24CW5hZBPa-0L8so6Y,119
103
- optimum/rbln/transformers/models/colpali/colpali_architecture.py,sha256=Tl3_fA7faM3odc-bYp-zGjvBSsDFcrJ1m1d8ogFnk7s,8052
104
- optimum/rbln/transformers/models/colpali/configuration_colpali.py,sha256=U_UeaBnD_sQoDQznnbO1_SCHwz3yt3bnVJF1My0krxQ,2617
105
- optimum/rbln/transformers/models/colpali/modeling_colpali.py,sha256=Qw6REaRFFj0_BYGsUn-rLQY7eEi8QAioYwEqLKUdTXE,17542
103
+ optimum/rbln/transformers/models/colpali/colpali_architecture.py,sha256=TCOW3v5l9fIt1uIFtWa8ZAxq1cdCER8gXWjmbLQD20M,8079
104
+ optimum/rbln/transformers/models/colpali/configuration_colpali.py,sha256=_HuZBVV-ponml95UapkYpRhffZy53-9jSZknx7hID7o,3348
105
+ optimum/rbln/transformers/models/colpali/modeling_colpali.py,sha256=YnAUMTbcX2idRLMkZ3Qiv-GZFodfCun5-3sDfiIz77s,17955
106
106
  optimum/rbln/transformers/models/decoderonly/__init__.py,sha256=w3VZOIBYaHXVdnuhK4y0zWAj0IAv7_5LGTJYaz9oYmI,1056
107
107
  optimum/rbln/transformers/models/decoderonly/configuration_decoderonly.py,sha256=yUhB9yMFVbq7B9WtAM6m8cQ4K0U3a2N-TU34N5zvjRM,15180
108
108
  optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py,sha256=ySsiU0Acj5enJW-SqtFMAfBeH0HeqlhCd78QlpKJNQw,42780
109
109
  optimum/rbln/transformers/models/decoderonly/decoderonly_runtime_utils.py,sha256=v3mfIlQImQkYYr-rPn7rQR3GYdVUhALRttEduLI7H9c,20012
110
110
  optimum/rbln/transformers/models/decoderonly/generation_decoderonly.py,sha256=zabSgQd2VzHhkpbhUFW5Z-CjYB1JvSJOb5yXKjXCQV0,4326
111
- optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py,sha256=dAHV9NgdpXHyTJGT0lieXOB3Pzi_NPlR4rqmRtmAWzM,32412
111
+ optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py,sha256=1umcuZg_ifIyVBfp_6oHQJvZsp_y2UCiv45-rv89_VA,32434
112
112
  optimum/rbln/transformers/models/depth_anything/__init__.py,sha256=xvPSIriMJWyNeVYoVB1Z7YqB4kkHOIkaHq7loNps-dk,756
113
113
  optimum/rbln/transformers/models/depth_anything/configuration_depth_anything.py,sha256=JujBVEUa_zZDXNPr1y-B_PhK5SgFFcY8Ib4EoGjjtmE,989
114
114
  optimum/rbln/transformers/models/depth_anything/modeling_depth_anything.py,sha256=tTmsVaW9Wb2WD3nKRLwp7swn3hbMvgwUEJwwVIfNYEc,1008
@@ -127,10 +127,10 @@ optimum/rbln/transformers/models/gemma/configuration_gemma.py,sha256=H1nVp8HBJxx
127
127
  optimum/rbln/transformers/models/gemma/gemma_architecture.py,sha256=4Ry2pFfWg0sVijgTP9SYt1vwJr8DsCVcusg-z0ePX5c,943
128
128
  optimum/rbln/transformers/models/gemma/modeling_gemma.py,sha256=AsuFnrwZcRCKtF39BpHHNea0S34N2lNWKV4qZimmY8I,4170
129
129
  optimum/rbln/transformers/models/gemma3/__init__.py,sha256=6rugk3615SEt4lh7gduo_J9VyGiSReuEIvL0Uno0eaI,790
130
- optimum/rbln/transformers/models/gemma3/configuration_gemma3.py,sha256=Hn9WKxTOwciNvBYkoY2y-icQJ6MttMapSyWbNpIxw78,4522
130
+ optimum/rbln/transformers/models/gemma3/configuration_gemma3.py,sha256=NJJfarzbWJc3pm0XvICN7D0FFF9nqidagIEoOvYLixQ,4696
131
131
  optimum/rbln/transformers/models/gemma3/gemma3_architecture.py,sha256=fpLDAXCe5paWVsfc0tL59JkRQMRF-WNgIzOIb_QpSLU,6191
132
132
  optimum/rbln/transformers/models/gemma3/gemma3_runtime_utils.py,sha256=vYQ9sjRlkfamxZca_hVMQI0ylKeExsV02gOWaYVMjyg,9640
133
- optimum/rbln/transformers/models/gemma3/modeling_gemma3.py,sha256=7-JlJg9GeMSbtutjSYEY3ZyRJIxz-zqHk7Shshr8Pv4,24228
133
+ optimum/rbln/transformers/models/gemma3/modeling_gemma3.py,sha256=WQ5qRZcM2EYLNqogjy-I04abwZXLZilCGFM4vO-MH4c,25381
134
134
  optimum/rbln/transformers/models/gpt2/__init__.py,sha256=SsawHMStE3wYRtqkH5EvdTFkCdX0LLmp-QSKFhEBrHo,740
135
135
  optimum/rbln/transformers/models/gpt2/configuration_gpt2.py,sha256=iGdHfzG7plekZcIz-Z5U8lRE4SB8gbJJNcFQJ9l8Myg,1533
136
136
  optimum/rbln/transformers/models/gpt2/gpt2_architecture.py,sha256=MyAWReXmyuHnDpW5HI_TI7psyJZxLujZ9KT5XnNm7nA,2802
@@ -140,18 +140,18 @@ optimum/rbln/transformers/models/grounding_dino/configuration_grounding_dino.py,
140
140
  optimum/rbln/transformers/models/grounding_dino/grounding_dino_architecture.py,sha256=2BGhyKa7x6fiiZPaLy_S7zKr2NOdJnMLFMf6CEcegGE,26674
141
141
  optimum/rbln/transformers/models/grounding_dino/modeling_grounding_dino.py,sha256=bXAOs2QH4sy2UFoFLUSM6u1_VHouUT5COERLQX20F6Y,46897
142
142
  optimum/rbln/transformers/models/idefics3/__init__.py,sha256=ulxE7HEfXsNJhd25J9Fvi6vggo9aZH9sLKJjWB6LlzQ,814
143
- optimum/rbln/transformers/models/idefics3/configuration_idefics3.py,sha256=U3aDw2InB4BIHoujCaK3Q3pzyT_V6RepSdDhBU31WJ0,2773
144
- optimum/rbln/transformers/models/idefics3/modeling_idefics3.py,sha256=UqKUVZ6pZjP2VMfBa3-dJkLNPDqr3H1wHiOo9LPucjs,19636
143
+ optimum/rbln/transformers/models/idefics3/configuration_idefics3.py,sha256=7IENNxflZL8ZH3YRqtCXfYdKs-RdUeGiPzq-C03te_s,3679
144
+ optimum/rbln/transformers/models/idefics3/modeling_idefics3.py,sha256=ajgLsQsU4n5NwcCqZvG7gswXhjPTHeyTjwjhje5Z7xI,19936
145
145
  optimum/rbln/transformers/models/llama/__init__.py,sha256=6tgx9-qlM5r9ouoeZEouVRNLs3r6Sku-cuXNkyfeFHc,746
146
146
  optimum/rbln/transformers/models/llama/configuration_llama.py,sha256=_uxfH5kaGbeJTMJfESYn0Vg3OEkINS2ShGtVQTeOcs4,1578
147
147
  optimum/rbln/transformers/models/llama/llama_architecture.py,sha256=S7MCPfyjG5eUqgaS-QNBB0ApUD6wnb5fR0RHq7k7-pA,728
148
148
  optimum/rbln/transformers/models/llama/modeling_llama.py,sha256=uRxEXYhHOuEwPjBo_Ps3eFU1uwScasla6P8HwsQgAu0,4214
149
149
  optimum/rbln/transformers/models/llava/__init__.py,sha256=FaVLgBIqKGjT_nvwYO9k9BVqrzH_Ym3DfjGRCSUhG2s,734
150
- optimum/rbln/transformers/models/llava/configuration_llava.py,sha256=CmvuNvBjb27FNLPYlZRWfPXFOFP7jltP9ucGK_YAeHA,2544
151
- optimum/rbln/transformers/models/llava/modeling_llava.py,sha256=3k6lF3k0gjy4zptWH_fiKM7p7kUE1tzhBpwcitm5M1s,17268
150
+ optimum/rbln/transformers/models/llava/configuration_llava.py,sha256=c1rie8LCypxlsT7SNjZJE07_xCLAasV4EBs97o1757Q,2998
151
+ optimum/rbln/transformers/models/llava/modeling_llava.py,sha256=R_wDTJDNVweivDIsZ0eDd5Z21NEcH9O3xdTV9z8mpjQ,20239
152
152
  optimum/rbln/transformers/models/llava_next/__init__.py,sha256=kDXKr7wMkp1XqE__DER2B8kQF_NYMxhzsQS5ytGg56I,752
153
- optimum/rbln/transformers/models/llava_next/configuration_llava_next.py,sha256=cJSncotAJYLMgKUXir7FDVB5avZW_uM65cXtku2pDRw,2759
154
- optimum/rbln/transformers/models/llava_next/modeling_llava_next.py,sha256=E3w3-kTBzmcG0Xa3LjlSMlO8vFOFhNGoNIWCeQq-_G0,27092
153
+ optimum/rbln/transformers/models/llava_next/configuration_llava_next.py,sha256=Sz8L8p_23T7xw7pkUmW5pyK_wZclph1p_kQYbslc8m8,2708
154
+ optimum/rbln/transformers/models/llava_next/modeling_llava_next.py,sha256=ZGVTOaAOpUOvyVLfSsHpfje4W8FDR_PV6MhS9QNj-Uk,21230
155
155
  optimum/rbln/transformers/models/midm/__init__.py,sha256=IC3FETwgYinbp3wDj7tp4zIHJhbqM-c6GfTRdYcMNj8,913
156
156
  optimum/rbln/transformers/models/midm/configuration_midm.py,sha256=DxhcSJlApxfi00XxYmSkKZ6bY9vfLXT0zh-oMKkZot0,1365
157
157
  optimum/rbln/transformers/models/midm/midm_architecture.py,sha256=RlkmNhaWE5h_awt9aTtR8VZfshNTah0IoUfD2Z9vfxI,5055
@@ -174,7 +174,7 @@ optimum/rbln/transformers/models/phi/modeling_phi.py,sha256=r7B0NlqwIGjm-MmE-h5_
174
174
  optimum/rbln/transformers/models/phi/phi_architecture.py,sha256=ygJIJvn20bnxE9nHKo4CBW9_1FJsz7MEVolB5asTmI0,3684
175
175
  optimum/rbln/transformers/models/pixtral/__init__.py,sha256=fhclVAWnIDsfMfC-TW6mYrJXxgyehlLaadK64LOShH4,716
176
176
  optimum/rbln/transformers/models/pixtral/configuration_pixtral.py,sha256=b79zkJB1jzHx4S1wTe-Ju_Yel_PS5Q8bfmlQPzkchKU,1677
177
- optimum/rbln/transformers/models/pixtral/modeling_pixtral.py,sha256=4H-Mz8OdDgRUW30N5PnOf6vFqtez2iKgoJIXpdEYvXU,12361
177
+ optimum/rbln/transformers/models/pixtral/modeling_pixtral.py,sha256=P1lzi6JOTB43nBfCOonUDYhIXoMq6DnQpcvGfOO7ZP8,12252
178
178
  optimum/rbln/transformers/models/pixtral/pixtral_architecture.py,sha256=s-6C9DtHmSZEGJXo5b95RwZE2A5aR6ELMHlj7aK6CIg,2950
179
179
  optimum/rbln/transformers/models/qwen2/__init__.py,sha256=h9dWJ3HX4xspMLt44g7r3UGU8QL03Ynmz_Mi3Vlu6UA,746
180
180
  optimum/rbln/transformers/models/qwen2/configuration_qwen2.py,sha256=tTWcPOk_ycZvdSPlal9S5elTmWZAX2BbpZP5Ok2ySwI,1567
@@ -182,12 +182,12 @@ optimum/rbln/transformers/models/qwen2/modeling_qwen2.py,sha256=VOboPJF1rvvSVWkH
182
182
  optimum/rbln/transformers/models/qwen2/qwen2_architecture.py,sha256=XlNAMYAcDLohnSAhIFGKOPuCB5XLgzYs5ABWdeQSaZs,720
183
183
  optimum/rbln/transformers/models/qwen2_5_vl/__init__.py,sha256=rAW3DKQUzGL6EMwa5r1iLu94yhpiZpk6zfoD7TtYXrc,865
184
184
  optimum/rbln/transformers/models/qwen2_5_vl/configuration_qwen2_5_vl.py,sha256=WHLH72i7Pe16Ee1waMixMsR3eD6TsMGN08QD82qdVvw,6162
185
- optimum/rbln/transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py,sha256=hRvA37sPFC9xH1FqnFbtHS9rQOPwAvLYg4zl4oEyK-w,26639
186
- optimum/rbln/transformers/models/qwen2_5_vl/qwen2_5_vl_architecture.py,sha256=i_UUWhKoFjJ5CCpgeWicqABM23TxMEKPQ354LoZ6iUU,7445
185
+ optimum/rbln/transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py,sha256=pI1TNDMO-ZiqXtoHboxLlqfplZbRh22lT1gxhqy6Jtg,26939
186
+ optimum/rbln/transformers/models/qwen2_5_vl/qwen2_5_vl_architecture.py,sha256=y0W5qxrke7JbFNkPTkEOC8TvKnAYZP9bNsQK_IYvxnA,8340
187
187
  optimum/rbln/transformers/models/qwen2_vl/__init__.py,sha256=O3t6zKda92CnZDzEnz_dcisMOQ71-OOJxElXzKCH5e0,849
188
188
  optimum/rbln/transformers/models/qwen2_vl/configuration_qwen2_vl.py,sha256=mi5CqSKZ77G5Fib3g8a86_4CEB6lb-qJOhDnSqslvNk,4714
189
- optimum/rbln/transformers/models/qwen2_vl/modeling_qwen2_vl.py,sha256=10NR0LPJFICpPUrINpyMDWVx19HN8sXPpeAiVhE_k2k,20130
190
- optimum/rbln/transformers/models/qwen2_vl/qwen2_vl_architecture.py,sha256=EZlCuSRTIpSAGEjtDi4SY1V9RRdtgg76ie5jqec1UuI,4833
189
+ optimum/rbln/transformers/models/qwen2_vl/modeling_qwen2_vl.py,sha256=YO8cKBEb7dU9D--gidYsPyhS2arOwgVqDe3tLlGHdx4,20424
190
+ optimum/rbln/transformers/models/qwen2_vl/qwen2_vl_architecture.py,sha256=_5Erjk7udq7YFVVtx4XeWaIzpDlEZKJPu2bBSVBGfKE,5728
191
191
  optimum/rbln/transformers/models/qwen3/__init__.py,sha256=tI4KwvXpD35dUUaa8aLUXpWoU9gJGcmKXeywOlH14ZE,746
192
192
  optimum/rbln/transformers/models/qwen3/configuration_qwen3.py,sha256=BFRPggnH4VlsXlOa19C6KAID-bPgQ8ooQ29dvogh5zk,2102
193
193
  optimum/rbln/transformers/models/qwen3/modeling_qwen3.py,sha256=S05efusxjXJhMMYztstGes6ZbqkSr5I4fHFaLSYVG8c,5760
@@ -204,7 +204,7 @@ optimum/rbln/transformers/models/seq2seq/modeling_seq2seq.py,sha256=G7Rkx4paSDlS
204
204
  optimum/rbln/transformers/models/seq2seq/seq2seq_architecture.py,sha256=jmBgj7BkUS_S-T-9DI53rE3KXUHSCoIofr7k5JDVPrU,20024
205
205
  optimum/rbln/transformers/models/siglip/__init__.py,sha256=X1Fc1GUnJ2EIxFx45nbeoW-T2t0OyP3W73C0HD8Vowo,712
206
206
  optimum/rbln/transformers/models/siglip/configuration_siglip.py,sha256=Fy-ANF91bQno_QVd4ZpyRs-uNgC_XRyBRScBg2uKM6w,3029
207
- optimum/rbln/transformers/models/siglip/modeling_siglip.py,sha256=1TyRaxmhp6mg6UfhQTbZhW26013TE3nVnroYG7EROcU,8033
207
+ optimum/rbln/transformers/models/siglip/modeling_siglip.py,sha256=9_CeyL9Pgd-ZU7XfqMyoK-mP-ZYL-tia7YyI4wxgKMo,7509
208
208
  optimum/rbln/transformers/models/swin/__init__.py,sha256=gUsLDB8ceNxt53Cf69OT32JuZoRdmmIsRfjRdHTLDd0,698
209
209
  optimum/rbln/transformers/models/swin/configuration_swin.py,sha256=JE4oMdPhJmRwXxKUWQ3KHccthDLEcDiXEzjMcFx71K0,1690
210
210
  optimum/rbln/transformers/models/swin/modeling_swin.py,sha256=npQgTCEkonG41HzHzEk-a13NFLJHA-K82HFW2VyR0xc,13968
@@ -232,6 +232,7 @@ optimum/rbln/transformers/models/xlm_roberta/configuration_xlm_roberta.py,sha256
232
232
  optimum/rbln/transformers/models/xlm_roberta/modeling_xlm_roberta.py,sha256=EZd3flRUEE38DYtdqEnG70LV7fHhkamRZV51xrVyjYI,1093
233
233
  optimum/rbln/transformers/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
234
234
  optimum/rbln/transformers/utils/rbln_quantization.py,sha256=pORshQUgTInNaibUtd0HL-T8bKW5wuulZs2q0Oshppc,21659
235
+ optimum/rbln/transformers/utils/rbln_runtime_wrapper.py,sha256=l_-zWpRrp6hp-tDANTrEbspIZH-AUSi_jNJICns_QgE,2672
235
236
  optimum/rbln/utils/__init__.py,sha256=ieDBT2VFTt2E0M4v_POLBpuGW9LxSydpb_DuPd6PQqc,712
236
237
  optimum/rbln/utils/decorator_utils.py,sha256=xu-TrsNi33SRC2a7DBsyoo6-pEQxWKZPZSmM9QlDe2Y,3745
237
238
  optimum/rbln/utils/depreacate_utils.py,sha256=uKxl3ENUCNaZXPnaDQvNxrH8hUIWdBWfZH6BM7ZV__4,385
@@ -241,8 +242,8 @@ optimum/rbln/utils/logging.py,sha256=VKKBmlQSdg6iZCGmAXaWYiW67K84jyp1QJhLQSSjPPE
241
242
  optimum/rbln/utils/model_utils.py,sha256=4k5879Kh75m3x_vS4-qOGfqsOiAvc2kdNFFfvsFvz3k,1748
242
243
  optimum/rbln/utils/runtime_utils.py,sha256=R6uXDbeJP03-FWdd4vthNe2D4aCra5n12E3WB1ifiGM,7933
243
244
  optimum/rbln/utils/save_utils.py,sha256=hG5uOtYmecSXZuGTvCXsTM-SiyZpr5q3InUGCCq_jzQ,3619
244
- optimum/rbln/utils/submodule.py,sha256=60NGLFvnhjP1DJg1opdb-FVQDsthcLCwWjW_1WQaasU,5280
245
- optimum_rbln-0.9.1.dist-info/METADATA,sha256=AtptvW3UA4d23uu7EilHiZjQjo2FZOIqmlc7lBsAsho,5348
246
- optimum_rbln-0.9.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
247
- optimum_rbln-0.9.1.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
248
- optimum_rbln-0.9.1.dist-info/RECORD,,
245
+ optimum/rbln/utils/submodule.py,sha256=SKLnM3KsX8_rv3HauO4oB2-JSjzuadQjRwo_BhMUzLI,6362
246
+ optimum_rbln-0.9.2a0.dist-info/METADATA,sha256=WxK3HKWyo0Ye9NAY7BsxDuPuy0Ykkt4Z90RB7XlCeU8,5350
247
+ optimum_rbln-0.9.2a0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
248
+ optimum_rbln-0.9.2a0.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
249
+ optimum_rbln-0.9.2a0.dist-info/RECORD,,