optimum-rbln 0.8.0.post2__py3-none-any.whl → 0.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- optimum/rbln/__init__.py +24 -0
- optimum/rbln/__version__.py +2 -2
- optimum/rbln/configuration_utils.py +45 -33
- optimum/rbln/diffusers/__init__.py +21 -1
- optimum/rbln/diffusers/configurations/__init__.py +4 -0
- optimum/rbln/diffusers/configurations/models/__init__.py +2 -0
- optimum/rbln/diffusers/configurations/models/configuration_autoencoder_kl.py +9 -2
- optimum/rbln/diffusers/configurations/models/configuration_autoencoder_kl_cosmos.py +84 -0
- optimum/rbln/diffusers/configurations/models/configuration_controlnet.py +4 -2
- optimum/rbln/diffusers/configurations/models/configuration_prior_transformer.py +9 -2
- optimum/rbln/diffusers/configurations/models/configuration_transformer_cosmos.py +70 -0
- optimum/rbln/diffusers/configurations/models/configuration_transformer_sd3.py +4 -2
- optimum/rbln/diffusers/configurations/models/configuration_unet_2d_condition.py +9 -2
- optimum/rbln/diffusers/configurations/models/configuration_vq_model.py +9 -2
- optimum/rbln/diffusers/configurations/pipelines/__init__.py +1 -0
- optimum/rbln/diffusers/configurations/pipelines/configuration_controlnet.py +29 -9
- optimum/rbln/diffusers/configurations/pipelines/configuration_cosmos.py +114 -0
- optimum/rbln/diffusers/configurations/pipelines/configuration_kandinsky2_2.py +28 -12
- optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion.py +18 -6
- optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_3.py +13 -6
- optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_xl.py +12 -6
- optimum/rbln/diffusers/modeling_diffusers.py +72 -65
- optimum/rbln/diffusers/models/__init__.py +4 -0
- optimum/rbln/diffusers/models/autoencoders/__init__.py +1 -0
- optimum/rbln/diffusers/models/autoencoders/autoencoder_kl.py +17 -1
- optimum/rbln/diffusers/models/autoencoders/autoencoder_kl_cosmos.py +219 -0
- optimum/rbln/diffusers/models/autoencoders/vae.py +45 -8
- optimum/rbln/diffusers/models/autoencoders/vq_model.py +17 -1
- optimum/rbln/diffusers/models/controlnet.py +14 -8
- optimum/rbln/diffusers/models/transformers/__init__.py +1 -0
- optimum/rbln/diffusers/models/transformers/prior_transformer.py +10 -0
- optimum/rbln/diffusers/models/transformers/transformer_cosmos.py +321 -0
- optimum/rbln/diffusers/models/transformers/transformer_sd3.py +2 -0
- optimum/rbln/diffusers/models/unets/unet_2d_condition.py +11 -1
- optimum/rbln/diffusers/pipelines/__init__.py +10 -0
- optimum/rbln/diffusers/pipelines/controlnet/multicontrolnet.py +1 -4
- optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet.py +7 -0
- optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py +7 -0
- optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl.py +7 -0
- optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl_img2img.py +7 -0
- optimum/rbln/diffusers/pipelines/cosmos/__init__.py +17 -0
- optimum/rbln/diffusers/pipelines/cosmos/configuration_cosmos_guardrail.py +102 -0
- optimum/rbln/diffusers/pipelines/cosmos/cosmos_guardrail.py +455 -0
- optimum/rbln/diffusers/pipelines/cosmos/pipeline_cosmos_text2world.py +98 -0
- optimum/rbln/diffusers/pipelines/cosmos/pipeline_cosmos_video2world.py +98 -0
- optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2.py +7 -0
- optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_combined.py +48 -27
- optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_img2img.py +7 -0
- optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_inpaint.py +7 -0
- optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_prior.py +7 -0
- optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py +7 -0
- optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py +7 -0
- optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_inpaint.py +7 -0
- optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3.py +7 -0
- optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_img2img.py +7 -0
- optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_inpaint.py +7 -0
- optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl.py +7 -0
- optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_img2img.py +7 -0
- optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_inpaint.py +7 -0
- optimum/rbln/modeling.py +71 -37
- optimum/rbln/modeling_base.py +63 -109
- optimum/rbln/transformers/__init__.py +41 -47
- optimum/rbln/transformers/configuration_generic.py +16 -13
- optimum/rbln/transformers/modeling_generic.py +21 -22
- optimum/rbln/transformers/modeling_rope_utils.py +5 -2
- optimum/rbln/transformers/models/__init__.py +54 -4
- optimum/rbln/transformers/models/{wav2vec2/configuration_wav2vec.py → audio_spectrogram_transformer/__init__.py} +2 -4
- optimum/rbln/transformers/models/audio_spectrogram_transformer/configuration_audio_spectrogram_transformer.py +21 -0
- optimum/rbln/transformers/models/audio_spectrogram_transformer/modeling_audio_spectrogram_transformer.py +28 -0
- optimum/rbln/transformers/models/auto/auto_factory.py +35 -12
- optimum/rbln/transformers/models/bart/bart_architecture.py +14 -1
- optimum/rbln/transformers/models/bart/configuration_bart.py +12 -2
- optimum/rbln/transformers/models/bart/modeling_bart.py +16 -7
- optimum/rbln/transformers/models/bert/configuration_bert.py +18 -3
- optimum/rbln/transformers/models/bert/modeling_bert.py +24 -0
- optimum/rbln/transformers/models/blip_2/configuration_blip_2.py +15 -3
- optimum/rbln/transformers/models/blip_2/modeling_blip_2.py +50 -4
- optimum/rbln/transformers/models/clip/configuration_clip.py +15 -5
- optimum/rbln/transformers/models/clip/modeling_clip.py +38 -13
- optimum/rbln/transformers/models/colpali/__init__.py +2 -0
- optimum/rbln/transformers/models/colpali/colpali_architecture.py +221 -0
- optimum/rbln/transformers/models/colpali/configuration_colpali.py +68 -0
- optimum/rbln/transformers/models/colpali/modeling_colpali.py +383 -0
- optimum/rbln/transformers/models/decoderonly/configuration_decoderonly.py +111 -14
- optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py +102 -35
- optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py +253 -195
- optimum/rbln/transformers/models/distilbert/__init__.py +19 -0
- optimum/rbln/transformers/models/distilbert/configuration_distilbert.py +24 -0
- optimum/rbln/transformers/models/distilbert/modeling_distilbert.py +27 -0
- optimum/rbln/transformers/models/dpt/configuration_dpt.py +6 -1
- optimum/rbln/transformers/models/dpt/modeling_dpt.py +6 -1
- optimum/rbln/transformers/models/exaone/configuration_exaone.py +24 -1
- optimum/rbln/transformers/models/exaone/exaone_architecture.py +5 -1
- optimum/rbln/transformers/models/exaone/modeling_exaone.py +66 -5
- optimum/rbln/transformers/models/gemma/configuration_gemma.py +24 -1
- optimum/rbln/transformers/models/gemma/gemma_architecture.py +5 -1
- optimum/rbln/transformers/models/gemma/modeling_gemma.py +49 -0
- optimum/rbln/transformers/models/gemma3/configuration_gemma3.py +3 -3
- optimum/rbln/transformers/models/gemma3/gemma3_architecture.py +18 -250
- optimum/rbln/transformers/models/gemma3/modeling_gemma3.py +89 -244
- optimum/rbln/transformers/models/gpt2/configuration_gpt2.py +4 -1
- optimum/rbln/transformers/models/gpt2/gpt2_architecture.py +6 -1
- optimum/rbln/transformers/models/idefics3/configuration_idefics3.py +12 -2
- optimum/rbln/transformers/models/idefics3/modeling_idefics3.py +41 -4
- optimum/rbln/transformers/models/llama/configuration_llama.py +24 -1
- optimum/rbln/transformers/models/llama/modeling_llama.py +49 -0
- optimum/rbln/transformers/models/llava_next/configuration_llava_next.py +10 -2
- optimum/rbln/transformers/models/llava_next/modeling_llava_next.py +32 -4
- optimum/rbln/transformers/models/midm/configuration_midm.py +24 -1
- optimum/rbln/transformers/models/midm/midm_architecture.py +6 -1
- optimum/rbln/transformers/models/midm/modeling_midm.py +66 -5
- optimum/rbln/transformers/models/mistral/configuration_mistral.py +24 -1
- optimum/rbln/transformers/models/mistral/modeling_mistral.py +62 -4
- optimum/rbln/transformers/models/opt/configuration_opt.py +4 -1
- optimum/rbln/transformers/models/opt/modeling_opt.py +10 -0
- optimum/rbln/transformers/models/opt/opt_architecture.py +7 -1
- optimum/rbln/transformers/models/phi/configuration_phi.py +24 -1
- optimum/rbln/transformers/models/phi/modeling_phi.py +49 -0
- optimum/rbln/transformers/models/phi/phi_architecture.py +1 -1
- optimum/rbln/transformers/models/qwen2/configuration_qwen2.py +24 -1
- optimum/rbln/transformers/models/qwen2/modeling_qwen2.py +67 -4
- optimum/rbln/transformers/models/qwen2_5_vl/configuration_qwen2_5_vl.py +31 -3
- optimum/rbln/transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py +54 -25
- optimum/rbln/transformers/models/qwen2_5_vl/qwen2_5_vl_architecture.py +6 -4
- optimum/rbln/transformers/models/resnet/__init__.py +23 -0
- optimum/rbln/transformers/models/resnet/configuration_resnet.py +25 -0
- optimum/rbln/transformers/models/resnet/modeling_resnet.py +26 -0
- optimum/rbln/transformers/models/roberta/__init__.py +24 -0
- optimum/rbln/transformers/{configuration_alias.py → models/roberta/configuration_roberta.py} +12 -28
- optimum/rbln/transformers/{modeling_alias.py → models/roberta/modeling_roberta.py} +14 -28
- optimum/rbln/transformers/models/seq2seq/__init__.py +1 -1
- optimum/rbln/transformers/models/seq2seq/{configuration_seq2seq2.py → configuration_seq2seq.py} +2 -2
- optimum/rbln/transformers/models/seq2seq/modeling_seq2seq.py +7 -3
- optimum/rbln/transformers/models/seq2seq/seq2seq_architecture.py +41 -3
- optimum/rbln/transformers/models/siglip/configuration_siglip.py +10 -0
- optimum/rbln/transformers/models/siglip/modeling_siglip.py +69 -21
- optimum/rbln/transformers/models/t5/configuration_t5.py +12 -2
- optimum/rbln/transformers/models/t5/modeling_t5.py +56 -8
- optimum/rbln/transformers/models/t5/t5_architecture.py +5 -1
- optimum/rbln/transformers/models/{time_series_transformers → time_series_transformer}/__init__.py +1 -1
- optimum/rbln/transformers/models/{time_series_transformers → time_series_transformer}/configuration_time_series_transformer.py +9 -2
- optimum/rbln/transformers/models/{time_series_transformers/modeling_time_series_transformers.py → time_series_transformer/modeling_time_series_transformer.py} +20 -11
- optimum/rbln/transformers/models/vit/__init__.py +19 -0
- optimum/rbln/transformers/models/vit/configuration_vit.py +24 -0
- optimum/rbln/transformers/models/vit/modeling_vit.py +25 -0
- optimum/rbln/transformers/models/wav2vec2/__init__.py +1 -1
- optimum/rbln/transformers/models/wav2vec2/configuration_wav2vec2.py +26 -0
- optimum/rbln/transformers/models/wav2vec2/modeling_wav2vec2.py +1 -1
- optimum/rbln/transformers/models/whisper/configuration_whisper.py +10 -1
- optimum/rbln/transformers/models/whisper/modeling_whisper.py +41 -17
- optimum/rbln/transformers/models/xlm_roberta/__init__.py +16 -2
- optimum/rbln/transformers/models/xlm_roberta/configuration_xlm_roberta.py +15 -2
- optimum/rbln/transformers/models/xlm_roberta/modeling_xlm_roberta.py +12 -3
- optimum/rbln/utils/model_utils.py +20 -0
- optimum/rbln/utils/runtime_utils.py +49 -1
- optimum/rbln/utils/submodule.py +6 -8
- {optimum_rbln-0.8.0.post2.dist-info → optimum_rbln-0.8.1.dist-info}/METADATA +6 -6
- optimum_rbln-0.8.1.dist-info/RECORD +211 -0
- optimum_rbln-0.8.0.post2.dist-info/RECORD +0 -184
- /optimum/rbln/transformers/models/{time_series_transformers → time_series_transformer}/time_series_transformers_architecture.py +0 -0
- {optimum_rbln-0.8.0.post2.dist-info → optimum_rbln-0.8.1.dist-info}/WHEEL +0 -0
- {optimum_rbln-0.8.0.post2.dist-info → optimum_rbln-0.8.1.dist-info}/licenses/LICENSE +0 -0
@@ -104,13 +104,44 @@ class RBLNRuntimeDecoder(RBLNPytorchRuntime):
|
|
104
104
|
|
105
105
|
class RBLNWhisperForConditionalGeneration(RBLNModel, RBLNWhisperGenerationMixin):
|
106
106
|
"""
|
107
|
-
|
108
|
-
This model inherits from [`RBLNDecoderOnlyModelForCausalLM`]. Check the superclass documentation for the generic methods the library implements for all its models.
|
107
|
+
Whisper model for speech recognition and transcription optimized for RBLN NPU.
|
109
108
|
|
110
|
-
|
111
|
-
|
109
|
+
This model inherits from [`RBLNModel`]. It implements the methods to convert and run
|
110
|
+
pre-trained transformers based Whisper model on RBLN devices by:
|
112
111
|
- transferring the checkpoint weights of the original into an optimized RBLN graph,
|
113
112
|
- compiling the resulting graph using the RBLN compiler.
|
113
|
+
|
114
|
+
Example (Short form):
|
115
|
+
```python
|
116
|
+
import torch
|
117
|
+
from transformers import AutoProcessor
|
118
|
+
from datasets import load_dataset
|
119
|
+
from optimum.rbln import RBLNWhisperForConditionalGeneration
|
120
|
+
|
121
|
+
# Load processor and dataset
|
122
|
+
model_id = "openai/whisper-tiny"
|
123
|
+
processor = AutoProcessor.from_pretrained(model_id)
|
124
|
+
ds = load_dataset("hf-internal-testing/librispeech_asr_dummy", "clean", split="validation")
|
125
|
+
|
126
|
+
# Prepare input features
|
127
|
+
input_features = processor(
|
128
|
+
ds[0]["audio"]["array"],
|
129
|
+
sampling_rate=ds[0]["audio"]["sampling_rate"],
|
130
|
+
return_tensors="pt"
|
131
|
+
).input_features
|
132
|
+
|
133
|
+
# Load and compile model (or load pre-compiled model)
|
134
|
+
model = RBLNWhisperForConditionalGeneration.from_pretrained(
|
135
|
+
model_id=model_id,
|
136
|
+
export=True,
|
137
|
+
rbln_batch_size=1
|
138
|
+
)
|
139
|
+
|
140
|
+
# Generate transcription
|
141
|
+
outputs = model.generate(input_features=input_features, return_timestamps=True)
|
142
|
+
transcription = processor.batch_decode(outputs, skip_special_tokens=True)[0]
|
143
|
+
print(f"Transcription: {transcription}")
|
144
|
+
```
|
114
145
|
"""
|
115
146
|
|
116
147
|
auto_model_class = AutoModelForSpeechSeq2Seq
|
@@ -153,11 +184,6 @@ class RBLNWhisperForConditionalGeneration(RBLNModel, RBLNWhisperGenerationMixin)
|
|
153
184
|
return self.decoder
|
154
185
|
|
155
186
|
def __getattr__(self, __name: str) -> Any:
|
156
|
-
"""This is the key method to implement RBLN-Whisper.
|
157
|
-
Returns:
|
158
|
-
Any: Whisper's corresponding method
|
159
|
-
"""
|
160
|
-
|
161
187
|
def redirect(func):
|
162
188
|
return lambda *pargs, **kwargs: func(self, *pargs, **kwargs)
|
163
189
|
|
@@ -204,15 +230,19 @@ class RBLNWhisperForConditionalGeneration(RBLNModel, RBLNWhisperGenerationMixin)
|
|
204
230
|
if "key_value_states" in name:
|
205
231
|
context.mark_static_address(tensor)
|
206
232
|
|
207
|
-
compiled_encoder =
|
233
|
+
compiled_encoder = cls.compile(
|
208
234
|
wrapped_model.encoder,
|
209
235
|
enc_compile_config,
|
236
|
+
create_runtimes=rbln_config.create_runtimes,
|
237
|
+
device=rbln_config.device,
|
210
238
|
example_inputs=enc_example_inputs,
|
211
239
|
compile_context=context,
|
212
240
|
)
|
213
|
-
compiled_decoder =
|
241
|
+
compiled_decoder = cls.compile(
|
214
242
|
wrapped_model.decoder,
|
215
243
|
dec_compile_config,
|
244
|
+
create_runtimes=rbln_config.create_runtimes,
|
245
|
+
device=rbln_config.device,
|
216
246
|
example_inputs=dec_example_inputs,
|
217
247
|
compile_context=context,
|
218
248
|
)
|
@@ -331,12 +361,6 @@ class RBLNWhisperForConditionalGeneration(RBLNModel, RBLNWhisperGenerationMixin)
|
|
331
361
|
attention_mask: Optional[torch.Tensor] = None, # need for support transformers>=4.45.0
|
332
362
|
**kwargs,
|
333
363
|
):
|
334
|
-
"""
|
335
|
-
whisper don't use attention_mask,
|
336
|
-
attention_mask (`torch.Tensor`)`, *optional*):
|
337
|
-
Whisper does not support masking of the `input_features`, this argument is preserved for compatibility,
|
338
|
-
but it is not used. By default the silence in the input log mel spectrogram are ignored.
|
339
|
-
"""
|
340
364
|
return {
|
341
365
|
"input_ids": input_ids,
|
342
366
|
"cache_position": cache_position,
|
@@ -12,5 +12,19 @@
|
|
12
12
|
# See the License for the specific language governing permissions and
|
13
13
|
# limitations under the License.
|
14
14
|
|
15
|
-
from .configuration_xlm_roberta import
|
16
|
-
|
15
|
+
from .configuration_xlm_roberta import (
|
16
|
+
RBLNXLMRobertaForSequenceClassificationConfig,
|
17
|
+
RBLNXLMRobertaModelConfig,
|
18
|
+
)
|
19
|
+
from .modeling_xlm_roberta import (
|
20
|
+
RBLNXLMRobertaForSequenceClassification,
|
21
|
+
RBLNXLMRobertaModel,
|
22
|
+
)
|
23
|
+
|
24
|
+
|
25
|
+
__all__ = [
|
26
|
+
"RBLNXLMRobertaModelConfig",
|
27
|
+
"RBLNXLMRobertaForSequenceClassificationConfig",
|
28
|
+
"RBLNXLMRobertaModel",
|
29
|
+
"RBLNXLMRobertaForSequenceClassification",
|
30
|
+
]
|
@@ -12,8 +12,21 @@
|
|
12
12
|
# See the License for the specific language governing permissions and
|
13
13
|
# limitations under the License.
|
14
14
|
|
15
|
-
from ...configuration_generic import
|
15
|
+
from ...configuration_generic import (
|
16
|
+
RBLNModelForSequenceClassificationConfig,
|
17
|
+
RBLNTransformerEncoderForFeatureExtractionConfig,
|
18
|
+
)
|
16
19
|
|
17
20
|
|
18
21
|
class RBLNXLMRobertaModelConfig(RBLNTransformerEncoderForFeatureExtractionConfig):
|
19
|
-
|
22
|
+
"""
|
23
|
+
Configuration class for XLM-RoBERTa model.
|
24
|
+
Inherits from RBLNTransformerEncoderForFeatureExtractionConfig with no additional parameters.
|
25
|
+
"""
|
26
|
+
|
27
|
+
|
28
|
+
class RBLNXLMRobertaForSequenceClassificationConfig(RBLNModelForSequenceClassificationConfig):
|
29
|
+
"""
|
30
|
+
Configuration class for XLM-RoBERTa sequence classification model.
|
31
|
+
Inherits from RBLNModelForSequenceClassificationConfig with no additional parameters.
|
32
|
+
"""
|
@@ -12,9 +12,18 @@
|
|
12
12
|
# See the License for the specific language governing permissions and
|
13
13
|
# limitations under the License.
|
14
14
|
|
15
|
-
|
16
|
-
from ...modeling_generic import RBLNTransformerEncoderForFeatureExtraction
|
15
|
+
from ...modeling_generic import RBLNModelForSequenceClassification, RBLNTransformerEncoderForFeatureExtraction
|
17
16
|
|
18
17
|
|
19
18
|
class RBLNXLMRobertaModel(RBLNTransformerEncoderForFeatureExtraction):
|
20
|
-
|
19
|
+
"""
|
20
|
+
XLM-RoBERTa base model optimized for RBLN NPU.
|
21
|
+
"""
|
22
|
+
|
23
|
+
|
24
|
+
class RBLNXLMRobertaForSequenceClassification(RBLNModelForSequenceClassification):
|
25
|
+
"""
|
26
|
+
XLM-RoBERTa model for sequence classification tasks optimized for RBLN NPU.
|
27
|
+
"""
|
28
|
+
|
29
|
+
rbln_model_input_names = ["input_ids", "attention_mask"]
|
@@ -12,10 +12,20 @@
|
|
12
12
|
# See the License for the specific language governing permissions and
|
13
13
|
# limitations under the License.
|
14
14
|
|
15
|
+
import importlib
|
16
|
+
from typing import TYPE_CHECKING, Type
|
17
|
+
|
18
|
+
|
19
|
+
if TYPE_CHECKING:
|
20
|
+
from ..modeling import RBLNModel
|
21
|
+
|
15
22
|
# Prefix used for RBLN model class names
|
16
23
|
RBLN_PREFIX = "RBLN"
|
17
24
|
|
18
25
|
|
26
|
+
MODEL_MAPPING = {}
|
27
|
+
|
28
|
+
|
19
29
|
def convert_hf_to_rbln_model_name(hf_model_name: str):
|
20
30
|
"""
|
21
31
|
Convert HuggingFace model name to RBLN model name.
|
@@ -41,3 +51,13 @@ def convert_rbln_to_hf_model_name(rbln_model_name: str):
|
|
41
51
|
"""
|
42
52
|
|
43
53
|
return rbln_model_name.removeprefix(RBLN_PREFIX)
|
54
|
+
|
55
|
+
|
56
|
+
def get_rbln_model_cls(cls_name: str) -> Type["RBLNModel"]:
|
57
|
+
cls = getattr(importlib.import_module("optimum.rbln"), cls_name, None)
|
58
|
+
if cls is None:
|
59
|
+
if cls_name in MODEL_MAPPING:
|
60
|
+
cls = MODEL_MAPPING[cls_name]
|
61
|
+
else:
|
62
|
+
raise AttributeError(f"RBLNModel for {cls_name} not found.")
|
63
|
+
return cls
|
@@ -13,12 +13,57 @@
|
|
13
13
|
# limitations under the License.
|
14
14
|
|
15
15
|
import threading
|
16
|
-
from typing import Any, Dict, List
|
16
|
+
from typing import Any, Dict, List, Optional, Union
|
17
17
|
|
18
18
|
import rebel
|
19
19
|
import torch
|
20
20
|
|
21
21
|
|
22
|
+
def tp_and_devices_are_ok(
|
23
|
+
tensor_parallel_size: Optional[int] = None,
|
24
|
+
device: Optional[Union[int, List[int]]] = None,
|
25
|
+
npu: Optional[str] = None,
|
26
|
+
) -> Optional[str]:
|
27
|
+
if tensor_parallel_size is None:
|
28
|
+
tensor_parallel_size = 1
|
29
|
+
|
30
|
+
if rebel.device_count() < tensor_parallel_size:
|
31
|
+
return (
|
32
|
+
f"Tensor parallel size {tensor_parallel_size} is greater than "
|
33
|
+
f"the number of available devices {rebel.device_count()}."
|
34
|
+
)
|
35
|
+
|
36
|
+
if device is None:
|
37
|
+
device = list(range(tensor_parallel_size))
|
38
|
+
elif isinstance(device, int):
|
39
|
+
device = [device]
|
40
|
+
elif isinstance(device, list):
|
41
|
+
if any(not isinstance(d, int) for d in device):
|
42
|
+
return "Device must be a(n) (list of) integer(s)."
|
43
|
+
if len(device) != tensor_parallel_size:
|
44
|
+
return (
|
45
|
+
f"The number of devices ({len(device)}) does not match tensor parallel size ({tensor_parallel_size})."
|
46
|
+
)
|
47
|
+
else:
|
48
|
+
return f"Invalid device: {device}"
|
49
|
+
|
50
|
+
for device_id in device:
|
51
|
+
if device_id < 0: # if any device is dummy device, skip it
|
52
|
+
return None
|
53
|
+
if rebel.get_npu_name(device_id) is None:
|
54
|
+
return (
|
55
|
+
f"Device {device_id} is not a valid NPU device. Please check your NPU status with 'rbln-stat' command."
|
56
|
+
)
|
57
|
+
|
58
|
+
if npu is not None:
|
59
|
+
for device_id in device:
|
60
|
+
npu_name = rebel.get_npu_name(device_id)
|
61
|
+
if npu_name != npu:
|
62
|
+
return f"Device {device_id} ({npu_name}) is not on the same NPU as {npu}."
|
63
|
+
|
64
|
+
return None
|
65
|
+
|
66
|
+
|
22
67
|
class RBLNPytorchRuntime:
|
23
68
|
mandatory_members = []
|
24
69
|
|
@@ -43,6 +88,9 @@ class RBLNPytorchRuntime:
|
|
43
88
|
def __repr__(self) -> str:
|
44
89
|
return repr(self.runtime)
|
45
90
|
|
91
|
+
def parameters(self):
|
92
|
+
yield torch.tensor([1.0], dtype=torch.float32, device=torch.device("cpu"))
|
93
|
+
|
46
94
|
|
47
95
|
class UnavailableRuntime:
|
48
96
|
"""
|
optimum/rbln/utils/submodule.py
CHANGED
@@ -12,19 +12,19 @@
|
|
12
12
|
# See the License for the specific language governing permissions and
|
13
13
|
# limitations under the License.
|
14
14
|
|
15
|
-
import importlib
|
16
15
|
from pathlib import Path
|
17
16
|
from typing import TYPE_CHECKING, Any, Dict, List, Type
|
18
17
|
|
19
18
|
from transformers import PretrainedConfig
|
20
19
|
|
21
20
|
from ..configuration_utils import RBLNModelConfig
|
21
|
+
from ..utils.model_utils import get_rbln_model_cls
|
22
22
|
|
23
23
|
|
24
24
|
if TYPE_CHECKING:
|
25
25
|
from transformers import PreTrainedModel
|
26
26
|
|
27
|
-
from ..
|
27
|
+
from ..modeling import RBLNModel
|
28
28
|
|
29
29
|
|
30
30
|
class SubModulesMixin:
|
@@ -37,7 +37,7 @@ class SubModulesMixin:
|
|
37
37
|
|
38
38
|
_rbln_submodules: List[Dict[str, Any]] = []
|
39
39
|
|
40
|
-
def __init__(self, *, rbln_submodules: List["
|
40
|
+
def __init__(self, *, rbln_submodules: List["RBLNModel"] = [], **kwargs) -> None:
|
41
41
|
for submodule_meta, submodule in zip(self._rbln_submodules, rbln_submodules):
|
42
42
|
setattr(self, submodule_meta["name"], submodule)
|
43
43
|
|
@@ -48,7 +48,7 @@ class SubModulesMixin:
|
|
48
48
|
@classmethod
|
49
49
|
def _export_submodules_from_model(
|
50
50
|
cls, model: "PreTrainedModel", model_save_dir: str, rbln_config: RBLNModelConfig, **kwargs
|
51
|
-
) -> List["
|
51
|
+
) -> List["RBLNModel"]:
|
52
52
|
rbln_submodules = []
|
53
53
|
submodule_prefix = getattr(cls, "_rbln_submodule_prefix", None)
|
54
54
|
|
@@ -61,7 +61,7 @@ class SubModulesMixin:
|
|
61
61
|
torch_submodule: PreTrainedModel = getattr(model, submodule_name)
|
62
62
|
|
63
63
|
cls_name = torch_submodule.__class__.__name__
|
64
|
-
submodule_cls: Type["
|
64
|
+
submodule_cls: Type["RBLNModel"] = get_rbln_model_cls(f"RBLN{cls_name}")
|
65
65
|
submodule_rbln_config = getattr(rbln_config, submodule_name) or {}
|
66
66
|
|
67
67
|
if isinstance(submodule_rbln_config, dict):
|
@@ -95,9 +95,7 @@ class SubModulesMixin:
|
|
95
95
|
submodule_rbln_config = getattr(rbln_config, submodule_name)
|
96
96
|
|
97
97
|
# RBLNModelConfig -> RBLNModel
|
98
|
-
submodule_cls
|
99
|
-
importlib.import_module("optimum.rbln"), submodule_rbln_config.rbln_model_cls_name
|
100
|
-
)
|
98
|
+
submodule_cls = get_rbln_model_cls(submodule_rbln_config.rbln_model_cls_name)
|
101
99
|
|
102
100
|
json_file_path = Path(model_save_dir) / submodule_name / "config.json"
|
103
101
|
config = PretrainedConfig.from_json_file(json_file_path)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: optimum-rbln
|
3
|
-
Version: 0.8.
|
3
|
+
Version: 0.8.1
|
4
4
|
Summary: Optimum RBLN is the interface between the HuggingFace Transformers and Diffusers libraries and RBLN accelerators. It provides a set of tools enabling easy model loading and inference on single and multiple rbln device settings for different downstream tasks.
|
5
5
|
Project-URL: Homepage, https://rebellions.ai
|
6
6
|
Project-URL: Documentation, https://docs.rbln.ai
|
@@ -23,12 +23,12 @@ Classifier: Programming Language :: Python :: 3.12
|
|
23
23
|
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
24
24
|
Requires-Python: <3.13,>=3.9
|
25
25
|
Requires-Dist: accelerate>=1.0.1
|
26
|
-
Requires-Dist: diffusers
|
26
|
+
Requires-Dist: diffusers==0.34.0
|
27
27
|
Requires-Dist: packaging>=24.1
|
28
|
-
Requires-Dist: torch==2.
|
29
|
-
Requires-Dist: torchaudio<=2.
|
30
|
-
Requires-Dist: torchvision<=0.
|
31
|
-
Requires-Dist: transformers==4.
|
28
|
+
Requires-Dist: torch==2.7.0
|
29
|
+
Requires-Dist: torchaudio<=2.7.0
|
30
|
+
Requires-Dist: torchvision<=0.22.0
|
31
|
+
Requires-Dist: transformers==4.51.3
|
32
32
|
Description-Content-Type: text/markdown
|
33
33
|
|
34
34
|
|
@@ -0,0 +1,211 @@
|
|
1
|
+
optimum/rbln/__init__.py,sha256=MZCYmY4Y_Zfk0TGo3xK52osHDLZHz4cSdduXZt6RfSI,15316
|
2
|
+
optimum/rbln/__version__.py,sha256=Qh9qKQ7_wMS7wH5nRvhQyYwD9deQawM2RC-sVdT7RyM,511
|
3
|
+
optimum/rbln/configuration_utils.py,sha256=o5oer7fBdE-MHLGNXoP35FjmuQbMmjEIDv0QE_k3kpo,32336
|
4
|
+
optimum/rbln/modeling.py,sha256=bsvK6GQtoH9vx72Ea59kvv61jguOk9XDTzVjsY1ugkk,14248
|
5
|
+
optimum/rbln/modeling_base.py,sha256=QpNkU_Do__JKmnHjaPzv47OhQwgGfVohisip1jqXa7A,23871
|
6
|
+
optimum/rbln/diffusers/__init__.py,sha256=_3FoA0uxCdFd6YK9PMaptFmR9XvdB_PcvYR1MkbGpN8,6957
|
7
|
+
optimum/rbln/diffusers/modeling_diffusers.py,sha256=RjZNcYMU5daUIj-PAxyAwVoo2a9hnbR81qfcwUYJBPI,19844
|
8
|
+
optimum/rbln/diffusers/configurations/__init__.py,sha256=vMRnPY4s-Uju43xP038D2EA18X_mhy2YfsZVpSU-VoA,1322
|
9
|
+
optimum/rbln/diffusers/configurations/models/__init__.py,sha256=7q95gtgDzCeIBogGw8SLQoHT4Wch7vpLJVF2UQovuoo,567
|
10
|
+
optimum/rbln/diffusers/configurations/models/configuration_autoencoder_kl.py,sha256=ySetuNq6koleFIZ542zZLTzEEyl_CTul9l12ufWlQ_Y,3218
|
11
|
+
optimum/rbln/diffusers/configurations/models/configuration_autoencoder_kl_cosmos.py,sha256=SJfgbUz1LlRVuKQ_sHwPS262oOHF2TliKqM2z13wjEw,4172
|
12
|
+
optimum/rbln/diffusers/configurations/models/configuration_controlnet.py,sha256=VDO_YFS_QhcHhuRIXQL53JZXEO27yoKHtecq5hd2la8,2637
|
13
|
+
optimum/rbln/diffusers/configurations/models/configuration_prior_transformer.py,sha256=vE8RsXc27Z4-9k0KEM_vP7AWd5UUYvDgfX1g6nUrPp4,2224
|
14
|
+
optimum/rbln/diffusers/configurations/models/configuration_transformer_cosmos.py,sha256=tqzBWzkl5PX60v8REGHuUC1WdJuIQv_2BGUOne5UYL8,3127
|
15
|
+
optimum/rbln/diffusers/configurations/models/configuration_transformer_sd3.py,sha256=TAwHUyVy_9HSEZdXIuFCtrBfNIuYIedklJaCut5wEys,2412
|
16
|
+
optimum/rbln/diffusers/configurations/models/configuration_unet_2d_condition.py,sha256=mxcbrOqLMnPpP-jnjSeRWPj2zwPMsgeQSq6LzhG2btA,3630
|
17
|
+
optimum/rbln/diffusers/configurations/models/configuration_vq_model.py,sha256=dslGcfCZL_hNeVyjV-4FnCT1POmXuiaLbr6NcQSKgHg,3259
|
18
|
+
optimum/rbln/diffusers/configurations/pipelines/__init__.py,sha256=RfJXQiYvgGc3Rp7JYk5s0AQd0XB5JCAb37_riGWQAYg,1268
|
19
|
+
optimum/rbln/diffusers/configurations/pipelines/configuration_controlnet.py,sha256=nTtr2vqyr3zNSJXI0kiTAhOnVNhA-cVyaSnKOwBBZIo,14215
|
20
|
+
optimum/rbln/diffusers/configurations/pipelines/configuration_cosmos.py,sha256=tncXVraSYfrezqL9cT4kg5nuoifzYVfP0qHbgg0QUjA,4615
|
21
|
+
optimum/rbln/diffusers/configurations/pipelines/configuration_kandinsky2_2.py,sha256=1ve6o4OEpjPzTXWHXy_T5MAI0V-F08PMv2W6nBFfeKU,16386
|
22
|
+
optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion.py,sha256=kR8dV_RsmoDxhK5bAfv3PbtS5LpN5g-O-snAX1sP6Fo,6591
|
23
|
+
optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_3.py,sha256=f2VOwvCd-9kDnUpwhb0LaMWgfwdmBzUKMpmCdhUv2sc,7923
|
24
|
+
optimum/rbln/diffusers/configurations/pipelines/configuration_stable_diffusion_xl.py,sha256=-r8DMdiCWv6wUqAgvkWh03NuYlHXH2sj-i1chOfNaJk,7039
|
25
|
+
optimum/rbln/diffusers/models/__init__.py,sha256=TgrKGOPDJyEtDLiF22L1q1M8tCal4D9wBcrAtE5Zs5Y,1624
|
26
|
+
optimum/rbln/diffusers/models/controlnet.py,sha256=6owledPe9BXhbZOG8lbuuYvpBU0UrQV7zmat6SoMXOM,10585
|
27
|
+
optimum/rbln/diffusers/models/autoencoders/__init__.py,sha256=kpINW4bWwy-Q2doPME4nZ8gXRmkK2DRv2kDdbZuQ3m8,738
|
28
|
+
optimum/rbln/diffusers/models/autoencoders/autoencoder_kl.py,sha256=W5dAVtl7y3Ho63Zq9g-CG16_odku3uZ8fsD8IOePpgI,8534
|
29
|
+
optimum/rbln/diffusers/models/autoencoders/autoencoder_kl_cosmos.py,sha256=aHkpiMemMU9Yc0dKCt90RBEGlcGzdkO3pWip-w1itXM,8669
|
30
|
+
optimum/rbln/diffusers/models/autoencoders/vae.py,sha256=ZX6XH9eZxexSSN8sSKFDcvEK9mMuEQNyoalSpOOqQrE,5419
|
31
|
+
optimum/rbln/diffusers/models/autoencoders/vq_model.py,sha256=IA7D9c1SMnsZpDJ-xO85cbmXznA-vKhZkWwHZP0b3sw,6739
|
32
|
+
optimum/rbln/diffusers/models/transformers/__init__.py,sha256=3oTqAOok-dUR2KealC41CKt36dpKq3IT4kabmHkrCpg,767
|
33
|
+
optimum/rbln/diffusers/models/transformers/prior_transformer.py,sha256=SWoeVK--BRMwuXVABNVtonmzJDusx0iz4Q3EAvJ9uN8,5395
|
34
|
+
optimum/rbln/diffusers/models/transformers/transformer_cosmos.py,sha256=_YwjOqIGluIt-Nvax8_YJb7BtyZcSf9OAJoYSZsVB2I,12704
|
35
|
+
optimum/rbln/diffusers/models/transformers/transformer_sd3.py,sha256=yF7sS0QvawowpV9hR5GeT8DaE8CCp3mj1njHHd9cKTc,6630
|
36
|
+
optimum/rbln/diffusers/models/unets/__init__.py,sha256=MaICuK9CWjgzejXy8y2NDrphuEq1rkzanF8u45k6O5I,655
|
37
|
+
optimum/rbln/diffusers/models/unets/unet_2d_condition.py,sha256=v3WS9EGKROE_QClXrxC7rmRko1BspAvAbeIfh83LK88,15832
|
38
|
+
optimum/rbln/diffusers/pipelines/__init__.py,sha256=n41EZwEw23tVpe_cgDnuflST1lPiX1y8XQ-3ktTqBIo,3425
|
39
|
+
optimum/rbln/diffusers/pipelines/controlnet/__init__.py,sha256=n1Ef22TSeax-kENi_d8K6wGGHSNEo9QkUeygELHgcao,983
|
40
|
+
optimum/rbln/diffusers/pipelines/controlnet/multicontrolnet.py,sha256=3S9dogIHW8Bqg5kIlCudhCQG-4g3FcdOPEWhBOf7CJA,4059
|
41
|
+
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet.py,sha256=G96bh4D9Cu-w4F9gZBQF6wNzhJQv9kvI34ZFsuEDjSw,35714
|
42
|
+
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py,sha256=deGtaqgNumcvCKzKoHZrS-3UZxxWBP0ESizdfvCJlBE,34186
|
43
|
+
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl.py,sha256=2w6dmGQuBWqVoocn27z2yMkG7fL7_MVDBcQNJPJsRXU,45300
|
44
|
+
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl_img2img.py,sha256=HX56itORMqXLjZcwv25C-_z3JyZn3v6BpfIjsrDO3mE,46640
|
45
|
+
optimum/rbln/diffusers/pipelines/cosmos/__init__.py,sha256=h2j6S8IJPVHeNU8qmW9vyXMgHBw0d7kQcuMAA5YoHPU,795
|
46
|
+
optimum/rbln/diffusers/pipelines/cosmos/configuration_cosmos_guardrail.py,sha256=kDVnUBBGdumpDj2DaOpo5MSsFvlFIGY6BU1LZaFVqao,3327
|
47
|
+
optimum/rbln/diffusers/pipelines/cosmos/cosmos_guardrail.py,sha256=EAt2UICPRTaUz4SNsQYOa9aoW0USj2qamqdhlf2ajrA,18261
|
48
|
+
optimum/rbln/diffusers/pipelines/cosmos/pipeline_cosmos_text2world.py,sha256=TfhgAWVHUHvxsagBGLAVYKBoSMvuH7rg_xP5ZZ0rVU0,3910
|
49
|
+
optimum/rbln/diffusers/pipelines/cosmos/pipeline_cosmos_video2world.py,sha256=-dl8AMwSuorIOxRNfyu1XhkJfmNVbSo3_Wkb2gAmUpo,3917
|
50
|
+
optimum/rbln/diffusers/pipelines/kandinsky2_2/__init__.py,sha256=I4YQq2HfA3xONbWsdJ870IEJPyLWeCDDG-UCJsu9YO8,1035
|
51
|
+
optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2.py,sha256=2vgZZt0JrZlbHbUlBHrT0zKvYhuX1a4vwy3mxTPHisM,1335
|
52
|
+
optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_combined.py,sha256=cuN7w5LsnkSy0WUaCBPxVmYOJiBMihdh-KM7iV0tHL0,8149
|
53
|
+
optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_img2img.py,sha256=PxZTZFEYv7oUIQVjX33JbO46WBgT31iDJKAqKTyAVOc,1387
|
54
|
+
optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_inpaint.py,sha256=LKz28uSB1W4PDl0xffTuvs743e6thLKiXyzjYJnkl08,1379
|
55
|
+
optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_prior.py,sha256=ugZ0_zolIakQ_-YVkYK_5i-s1RJifxvAprX2J5xp2mc,1355
|
56
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion/__init__.py,sha256=gz6CbP4T6w8XH3PIGRIJXTmKFsChJIkwcAEAsiR5Ydg,830
|
57
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py,sha256=ACH5ueYgZVPu_s5mfLJ-2DAoQKoXKbPH2cX0xHY32qc,1321
|
58
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py,sha256=oWPiEGbLCmCESfOU-xojNypQNmqmRsb5irIn_M3nD-A,1370
|
59
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_inpaint.py,sha256=1oBsC-V-Eum6zza5-RTP1_WAl-_YXY-bvT0G4UKWChY,1356
|
60
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_3/__init__.py,sha256=Hh-JePj7nBsm81ioqdt8gfpS_I0sEHBinsAOEdraUno,839
|
61
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3.py,sha256=RdMh3KTBNH7mtGgRk-8RJwRwOpitcvS8nNhPkYA8dzM,1387
|
62
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_img2img.py,sha256=ZwmaqVpuLc8iGQDwOehP2KSEB1sZe_JuK0YRvKGK0FU,1433
|
63
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_inpaint.py,sha256=NrfdKXXzM2_yePf0olKNz3FtT6Nkeb8y55-KxmSO0_0,1425
|
64
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_xl/__init__.py,sha256=9iIMZYvpWEiLRrMEduhwVTE5IUix61OSLj7kd1e1FzY,845
|
65
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl.py,sha256=0ymBsxu6nBCUUv9dye-Vvd_hzvfmqrGZFFMMpUTsBd4,1375
|
66
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_img2img.py,sha256=fAYd9blytd9o3PmTEM4wo_XVtgB7xojCJBiK4QI8GJU,1408
|
67
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_inpaint.py,sha256=XUchrMMrm70KKOheSUM6mmzoGWtTyCKd9vkAcd5kvEo,1407
|
68
|
+
optimum/rbln/ops/__init__.py,sha256=SPepB2VbmvEgathWAs_oCbDOPVyBhPey3wZX2X6dIBM,738
|
69
|
+
optimum/rbln/ops/attn.py,sha256=lFAypLsboLq9p74y3n-x7iwYTjaKdCZGTKLgvFWkXTI,9481
|
70
|
+
optimum/rbln/ops/flash_attn.py,sha256=z39DJZSk94630ueoOCkiybxR5gzvNR-SRADHs0F6pzU,4346
|
71
|
+
optimum/rbln/ops/kv_cache_update.py,sha256=HjnHBR-oFrJQibsVnkYb0P5_-wEma8jl0mkjkylwakU,1270
|
72
|
+
optimum/rbln/ops/linear.py,sha256=1_7Hg-9wXxhu97fqPobotLQx17k7VPeSSL91_9Z7EDg,1018
|
73
|
+
optimum/rbln/ops/sliding_window_attn.py,sha256=EQrV_yRGc5z6kvwEsAcLP028bJWkQg2UPI3xubt9skU,3487
|
74
|
+
optimum/rbln/transformers/__init__.py,sha256=MF7OaGf-KI9rz4EOzejxHTDYUB3RO2L02BquTe0PXmI,9107
|
75
|
+
optimum/rbln/transformers/configuration_generic.py,sha256=kNhPWtzF0IovUnrsXfxXdXITqgpfCAAedjfB6jSAhEg,5131
|
76
|
+
optimum/rbln/transformers/modeling_generic.py,sha256=SXsZghRDsPolNnG5FFPRtXzIEmPQDnz0iRy6PIZvFVI,12225
|
77
|
+
optimum/rbln/transformers/modeling_rope_utils.py,sha256=6Zg3r-TeUk4WQAlr95pqfhuoAD_RQ4njT1rbO9uPL0Q,14379
|
78
|
+
optimum/rbln/transformers/models/__init__.py,sha256=VVQJgpUUnN4MPAQlOsxsw63w7WPK05ggFfRkGYuZFJQ,10266
|
79
|
+
optimum/rbln/transformers/models/audio_spectrogram_transformer/__init__.py,sha256=I2vL4lrzbT5p4eJcH-EKHzEfcPkj_XVsie7jb9q6yic,775
|
80
|
+
optimum/rbln/transformers/models/audio_spectrogram_transformer/configuration_audio_spectrogram_transformer.py,sha256=z7LJiVJPmnlCM3mcyhPJP8AufSrxO_dsPeJ51onq-Nc,833
|
81
|
+
optimum/rbln/transformers/models/audio_spectrogram_transformer/modeling_audio_spectrogram_transformer.py,sha256=FIKEVWpIt6-JQX9B_rAfCrAPqdUHtR2i8D_X2k7639E,1498
|
82
|
+
optimum/rbln/transformers/models/auto/__init__.py,sha256=34Xghf1ogG4u-jhBMlj134nHdgnR3JEHSeZTPuy3MpY,1071
|
83
|
+
optimum/rbln/transformers/models/auto/auto_factory.py,sha256=1CA52xV2dS1Uzumcgqe4zobdpoi-Xt2oNjP3uLFtm08,8020
|
84
|
+
optimum/rbln/transformers/models/auto/modeling_auto.py,sha256=Ex2ARRRIt3LtKhazr0UWy67R6WFAwSKRoi5n6B8TvoI,4213
|
85
|
+
optimum/rbln/transformers/models/bart/__init__.py,sha256=fVo-gZEmJ0yxkIxEX6ciuRAGgXNyuvaXE2s88bhbjAE,830
|
86
|
+
optimum/rbln/transformers/models/bart/bart_architecture.py,sha256=oxb3l0lw0qrnbeQxT-R3c0OH4IEp7UzEp2Iz_jhZe_Q,6373
|
87
|
+
optimum/rbln/transformers/models/bart/configuration_bart.py,sha256=L466cRy4egEbu_lTb_v1sK6t3Lbi4Bra6Jjkj3FjdQw,1315
|
88
|
+
optimum/rbln/transformers/models/bart/modeling_bart.py,sha256=H4MmQZbofb9kJq5WKqoFVjmj3HVtgns3t2F3QdSU-QQ,2337
|
89
|
+
optimum/rbln/transformers/models/bert/__init__.py,sha256=86FuGRBLw315_Roa9D5OUx6Ku2PM0DqSPZ-YSqbF-io,806
|
90
|
+
optimum/rbln/transformers/models/bert/configuration_bert.py,sha256=nEZnX6LXpLKWaoPEd4pWSysw9h-PLb2ld0ibC3dcJ7w,1611
|
91
|
+
optimum/rbln/transformers/models/bert/modeling_bert.py,sha256=zR0US2laTT0yUkL6yyvrR5STQNJcYqtG98ez4SUYQAY,2040
|
92
|
+
optimum/rbln/transformers/models/blip_2/__init__.py,sha256=L01gPXcUCa8Vg-bcng20vZvBIN_jlqCzwUSFuq0QOag,855
|
93
|
+
optimum/rbln/transformers/models/blip_2/configuration_blip_2.py,sha256=ke75GqPU139dNOY1nm6QE661LepbD_0V9Bx1QbtHhKA,3210
|
94
|
+
optimum/rbln/transformers/models/blip_2/modeling_blip_2.py,sha256=2sIVGrIn1B2nUZ8hw1sgW3VbJ2vxrlBRN37GgDiw0GU,16191
|
95
|
+
optimum/rbln/transformers/models/clip/__init__.py,sha256=TLeXDqcFK6M6v9x7Xr64kBbqGu3hFHM7p754dQ8UVQc,938
|
96
|
+
optimum/rbln/transformers/models/clip/configuration_clip.py,sha256=D7CIWpbMhXUrGv-CnhxRtSS3vAYb427-w7zSkfuJHEU,3455
|
97
|
+
optimum/rbln/transformers/models/clip/modeling_clip.py,sha256=QbYrt7pUWNal-p93fxmuKrHa2CPlCaq8F16qOfMAst0,8090
|
98
|
+
optimum/rbln/transformers/models/colpali/__init__.py,sha256=n3rueXT_oC0N8myoZiic0YkVK24CW5hZBPa-0L8so6Y,119
|
99
|
+
optimum/rbln/transformers/models/colpali/colpali_architecture.py,sha256=bWG7TehWRZkTh2y6mGkpd85_onWAyiyKdaQC9TFsy3E,8065
|
100
|
+
optimum/rbln/transformers/models/colpali/configuration_colpali.py,sha256=ieY-tuyDPObFUIJ5sfpcfuCsJ_HTAizN7ZGqirqeFRU,2636
|
101
|
+
optimum/rbln/transformers/models/colpali/modeling_colpali.py,sha256=jzvJCBrrCXSpjfmJ3O-VvPNFGWGaNbpOV09JwLPAZWs,15757
|
102
|
+
optimum/rbln/transformers/models/decoderonly/__init__.py,sha256=vQYZDDdoddwA7yKc5zzrq2Zs9sax-0p8rNF_aYfF4bk,1006
|
103
|
+
optimum/rbln/transformers/models/decoderonly/configuration_decoderonly.py,sha256=cakn8RGo8gS3nmXdEqOfC2xUBOMGInROgLEbCOoLFR0,13398
|
104
|
+
optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py,sha256=YAn8J_lIq4IS-HM_gbi5Qov8_osxhWtBr5z_28QRbGM,49667
|
105
|
+
optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py,sha256=NmWdodIcXXChI61-Ej7StTe52iQvalMYRUDuNtcQVEI,53342
|
106
|
+
optimum/rbln/transformers/models/distilbert/__init__.py,sha256=zXL78SOEORTnUN_wrdoaDaYpntG8lcFHvPobM6jC0CI,841
|
107
|
+
optimum/rbln/transformers/models/distilbert/configuration_distilbert.py,sha256=O3BW9JjyYk9PLyiofvOKEgTdMZ_jpIuPfot281pSsyg,984
|
108
|
+
optimum/rbln/transformers/models/distilbert/modeling_distilbert.py,sha256=LUh6zYGa8AR3Yxaj3gtyJRc-czBN3qnHTc-JTAhuqY0,1099
|
109
|
+
optimum/rbln/transformers/models/dpt/__init__.py,sha256=Nzep9mlzKyL1kV726IBqY8DnLp1DkH9JzFeknWSRhok,714
|
110
|
+
optimum/rbln/transformers/models/dpt/configuration_dpt.py,sha256=3Bb_K0sKI6TKeoHjikxUgT1tqbXhdBvVsk9bPVEID1g,984
|
111
|
+
optimum/rbln/transformers/models/dpt/modeling_dpt.py,sha256=uIwdHAhGgSyj_ljwJsRv6i5nUr9lTzB2Ss0iz0HplfY,978
|
112
|
+
optimum/rbln/transformers/models/exaone/__init__.py,sha256=eUL0mq3yGVzCQfjLlOtVF2MecIN3DQWm07EmXubGSTs,921
|
113
|
+
optimum/rbln/transformers/models/exaone/configuration_exaone.py,sha256=S4s4kJemPbmn-otYv-XNHE40DJaEYY6cmzaWV6MTGsY,1388
|
114
|
+
optimum/rbln/transformers/models/exaone/exaone_architecture.py,sha256=-gCUDIJ1SJqNgIALJejJ75XPtAkd83oyomBke9xGRsc,3610
|
115
|
+
optimum/rbln/transformers/models/exaone/modeling_exaone.py,sha256=sr_ICK-rw_fYmLY5r0IOc-vDtSZEcSwFIQp3Gn92zqE,3929
|
116
|
+
optimum/rbln/transformers/models/gemma/__init__.py,sha256=VqPIlokw3kjn_ZoLXINCLXw3vaysQFo5oPGGy6bnt4Q,708
|
117
|
+
optimum/rbln/transformers/models/gemma/configuration_gemma.py,sha256=3hAxl7LL9vFpCHrs-g3BwVDdVjnnJ-fzSO88wdfyGDQ,1361
|
118
|
+
optimum/rbln/transformers/models/gemma/gemma_architecture.py,sha256=I9EyRIEtMw9u7HhSa8PqUco4uNe3gl6_lx0r1uDT8hA,2546
|
119
|
+
optimum/rbln/transformers/models/gemma/modeling_gemma.py,sha256=Ojvum34EhDHWfMB4D6S1BrwoTNwuBSZuBzwdnAgvq38,3095
|
120
|
+
optimum/rbln/transformers/models/gemma3/__init__.py,sha256=6rugk3615SEt4lh7gduo_J9VyGiSReuEIvL0Uno0eaI,790
|
121
|
+
optimum/rbln/transformers/models/gemma3/configuration_gemma3.py,sha256=eupMGTHJGJNNrAZ3GE6M6GQBAQzBb7KFJvalyDmbM-A,3063
|
122
|
+
optimum/rbln/transformers/models/gemma3/gemma3_architecture.py,sha256=sgFQQbvEr15tb2Sxk_tgcgQFcjhKGbNSW6fm2u7-Vck,8609
|
123
|
+
optimum/rbln/transformers/models/gemma3/modeling_gemma3.py,sha256=-cpU0ot46VFUZ6PtfwN9VJ-E44n6mP1E3dKwB99MtBM,38389
|
124
|
+
optimum/rbln/transformers/models/gpt2/__init__.py,sha256=socBMIBZSiLbrVN12rQ4nL9gFeT0axMgz6SWaCaD4Ac,704
|
125
|
+
optimum/rbln/transformers/models/gpt2/configuration_gpt2.py,sha256=9sS6-EGapmow3rG9ViejK9qwrqy_X86VBxQ7u9x0Yqk,923
|
126
|
+
optimum/rbln/transformers/models/gpt2/gpt2_architecture.py,sha256=pnGgixjgjW7HULbs5211cC2guw_4e4-MlS69vdCRMMg,3206
|
127
|
+
optimum/rbln/transformers/models/gpt2/modeling_gpt2.py,sha256=qBDanUk_O-HtOIVCA4IE3FYyCsnL9xIDK00vft-0caw,1490
|
128
|
+
optimum/rbln/transformers/models/idefics3/__init__.py,sha256=ulxE7HEfXsNJhd25J9Fvi6vggo9aZH9sLKJjWB6LlzQ,814
|
129
|
+
optimum/rbln/transformers/models/idefics3/configuration_idefics3.py,sha256=wKroy3m65zS41G80QXssbndHoHU8wtHTteGU2Q6qbws,2390
|
130
|
+
optimum/rbln/transformers/models/idefics3/modeling_idefics3.py,sha256=qOJmf9VSJmNCxZ1OfRmVkDWYcVXGG7kN-UWWVIK1k4w,19693
|
131
|
+
optimum/rbln/transformers/models/llama/__init__.py,sha256=knxvRkPx8x6-WOxqSq_PlaKYD-9F9Q8dh7r095Esey0,708
|
132
|
+
optimum/rbln/transformers/models/llama/configuration_llama.py,sha256=X6SXtRXGBC8dtEfX6HJREfLYIidoneJauPgiH7KwDGY,1370
|
133
|
+
optimum/rbln/transformers/models/llama/llama_architecture.py,sha256=S7MCPfyjG5eUqgaS-QNBB0ApUD6wnb5fR0RHq7k7-pA,728
|
134
|
+
optimum/rbln/transformers/models/llama/modeling_llama.py,sha256=G91Yd8BfhiD8GT5bF46ZIHuc-ajGC-PO-mOQN3BhE1A,3122
|
135
|
+
optimum/rbln/transformers/models/llava_next/__init__.py,sha256=kDXKr7wMkp1XqE__DER2B8kQF_NYMxhzsQS5ytGg56I,752
|
136
|
+
optimum/rbln/transformers/models/llava_next/configuration_llava_next.py,sha256=b3roSXoAApUXk8dI4X__f3mWUnHcZDI1GAY34ckyYc4,2170
|
137
|
+
optimum/rbln/transformers/models/llava_next/modeling_llava_next.py,sha256=paYtCk58--FSZp8xjVrfZAxkJxO02X-jxaVPqL-l7ZU,27421
|
138
|
+
optimum/rbln/transformers/models/midm/__init__.py,sha256=IC3FETwgYinbp3wDj7tp4zIHJhbqM-c6GfTRdYcMNj8,913
|
139
|
+
optimum/rbln/transformers/models/midm/configuration_midm.py,sha256=DxhcSJlApxfi00XxYmSkKZ6bY9vfLXT0zh-oMKkZot0,1365
|
140
|
+
optimum/rbln/transformers/models/midm/midm_architecture.py,sha256=XXY_uDGkXeVQnKpmSWrgljgxtSdTgLLFLfMqtZdRJdM,5642
|
141
|
+
optimum/rbln/transformers/models/midm/modeling_midm.py,sha256=zbziYZ3f_dX_MOLwORTfJn22psZ1g3FFeQffM_TIh7A,3876
|
142
|
+
optimum/rbln/transformers/models/mistral/__init__.py,sha256=9FE64bCYfSIyrBkRcwlqF8QyacSJFWvwEufHFi1ZIrM,716
|
143
|
+
optimum/rbln/transformers/models/mistral/configuration_mistral.py,sha256=pMYJSwqmtx0uD2uExHx4S-JXal9rqQ5A2ulT2IoglTg,1383
|
144
|
+
optimum/rbln/transformers/models/mistral/mistral_architecture.py,sha256=_aU8TE_tdvfo0K7QpgTlz_d0qwk4O82dl9268lPL16E,733
|
145
|
+
optimum/rbln/transformers/models/mistral/modeling_mistral.py,sha256=SGzmn9EJeM27eHRqSAljlBOZgAdSnWJJxurasa_s9Bc,3651
|
146
|
+
optimum/rbln/transformers/models/opt/__init__.py,sha256=w0v8GzbzlR5_4yL851njGDSJgX89TrYxrHnpNfMHZEI,700
|
147
|
+
optimum/rbln/transformers/models/opt/configuration_opt.py,sha256=HgNCxnuoyZZwPoDMU41nvXG5DU9UHHSG8gvUSsm-r34,920
|
148
|
+
optimum/rbln/transformers/models/opt/modeling_opt.py,sha256=aDijHHFOWBAjCJ_YrI7dcmuVuY69S1QD0115MQO9YFU,3667
|
149
|
+
optimum/rbln/transformers/models/opt/opt_architecture.py,sha256=xRN0nNoZB4ZxKOmliFkI0xFQ1jy0hs42dv-hMyGOZ_Q,2802
|
150
|
+
optimum/rbln/transformers/models/phi/__init__.py,sha256=uqQb-sO1HXuaju2hfo7qJHk_IWhnptY-qFjNjK_uOc0,700
|
151
|
+
optimum/rbln/transformers/models/phi/configuration_phi.py,sha256=58jv3bIo_BcPcS9wU6NVgh67mGpHafdoQzStLKmfuU4,1349
|
152
|
+
optimum/rbln/transformers/models/phi/modeling_phi.py,sha256=sd8XYKJkpZM7pWqN0DE7B-dJuTpF9b2_ebZgJK1AuJ8,3061
|
153
|
+
optimum/rbln/transformers/models/phi/phi_architecture.py,sha256=nv3jx0zWeExYDSr9xHzgG8pssxC5qe6QGy6HmZKXYas,4241
|
154
|
+
optimum/rbln/transformers/models/qwen2/__init__.py,sha256=Tu4_AXy3ktTvxGwxED3kewiv62S75HgDWD6-TeC1DfA,708
|
155
|
+
optimum/rbln/transformers/models/qwen2/configuration_qwen2.py,sha256=Jc7qTFQgB9tbhJ-aPDN_lfyz9u0omNL84HWYBQ5fvcs,1359
|
156
|
+
optimum/rbln/transformers/models/qwen2/modeling_qwen2.py,sha256=OKd7SXQLLtzPVolr26P1TvCV7Gf0XG7k6BjzjuvrL4s,3885
|
157
|
+
optimum/rbln/transformers/models/qwen2/qwen2_architecture.py,sha256=XlNAMYAcDLohnSAhIFGKOPuCB5XLgzYs5ABWdeQSaZs,720
|
158
|
+
optimum/rbln/transformers/models/qwen2_5_vl/__init__.py,sha256=rAW3DKQUzGL6EMwa5r1iLu94yhpiZpk6zfoD7TtYXrc,865
|
159
|
+
optimum/rbln/transformers/models/qwen2_5_vl/configuration_qwen2_5_vl.py,sha256=lyT0U3Em7l3Y3ZETVEniDbEliVI6xGbYO0LSeqXSJNA,4763
|
160
|
+
optimum/rbln/transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py,sha256=APlEF_YAvhJdsTnYcmZuWe47lUZ_3fozhQA_bmCIYV4,26638
|
161
|
+
optimum/rbln/transformers/models/qwen2_5_vl/qwen2_5_vl_architecture.py,sha256=oU4MyNeDHzqD3dl1DgwrMev07yvMFhl_hXvV6tRdXCo,7422
|
162
|
+
optimum/rbln/transformers/models/resnet/__init__.py,sha256=0QqtEQF1IMYgEmmfXMGarCDS8kJB5tzODfwTEzDVZRg,837
|
163
|
+
optimum/rbln/transformers/models/resnet/configuration_resnet.py,sha256=aOHNDpSi630H3LhDx_8aWh6cSR_zA7rgoMdp6MiiPl8,983
|
164
|
+
optimum/rbln/transformers/models/resnet/modeling_resnet.py,sha256=6xKvD8HQCL-e-NtfvMrlL1fBWJBQtR_GK9zKTg2OJPQ,1021
|
165
|
+
optimum/rbln/transformers/models/roberta/__init__.py,sha256=SDoN6iKO6gZk2Wg-nfzEzxNe4jVsd9G4RsduFoMZkYo,974
|
166
|
+
optimum/rbln/transformers/models/roberta/configuration_roberta.py,sha256=6KhO-xBsDrYv5XFr6_JmOCFwpklpjB2fcA1V1nJVemo,1310
|
167
|
+
optimum/rbln/transformers/models/roberta/modeling_roberta.py,sha256=74Pswb5JJNtctvrQHlo2zYocKZN0npWhjAaKMUDVBUU,1535
|
168
|
+
optimum/rbln/transformers/models/seq2seq/__init__.py,sha256=HiSyWFcKeZ8okfo-s-_Mf_upyvAoZwraUIJyGNLNurY,714
|
169
|
+
optimum/rbln/transformers/models/seq2seq/configuration_seq2seq.py,sha256=lOLCGd9sv4N2fYsJmbf4eFPMpI5NFtA656qCEjpfulE,2651
|
170
|
+
optimum/rbln/transformers/models/seq2seq/modeling_seq2seq.py,sha256=a9VNsbUtFEOTnvs6OIEH0-3O2OQC7CIZrJStG5ub2bA,17163
|
171
|
+
optimum/rbln/transformers/models/seq2seq/seq2seq_architecture.py,sha256=4MupGjhe_DpBLl43VVYY72WWiVdAr0mRPp8cHalQh2w,20028
|
172
|
+
optimum/rbln/transformers/models/siglip/__init__.py,sha256=39MdhvWLZU0_9julQtJYVEiWI4csPrryS9krMauEA3s,730
|
173
|
+
optimum/rbln/transformers/models/siglip/configuration_siglip.py,sha256=m1h8iDx_X9VmHdJi0sc1a2KsAO3OnpMb4cd9jW2Ic-U,3031
|
174
|
+
optimum/rbln/transformers/models/siglip/modeling_siglip.py,sha256=ACNEYxEc_FH7CbQUHTKeHjWG6a2m9z5T7MQe_oejANk,8050
|
175
|
+
optimum/rbln/transformers/models/t5/__init__.py,sha256=R1Q8Z1vaIdx4rDjeCmm_ZMSgewWaqaI0l93AHwewtew,818
|
176
|
+
optimum/rbln/transformers/models/t5/configuration_t5.py,sha256=kjYBlho7c5-kUiPcQQTbpflZ75aK-nWWo4UudFRWmBw,1325
|
177
|
+
optimum/rbln/transformers/models/t5/modeling_t5.py,sha256=pdAWBLVknTzbma0Ij-VQ2Qve-frPjxL-AwMyU-zouPY,5123
|
178
|
+
optimum/rbln/transformers/models/t5/t5_architecture.py,sha256=X_9X4QRhkiiMrwFHv3mzER3yGmF9oQ2U-HdH6jbwVmw,9824
|
179
|
+
optimum/rbln/transformers/models/time_series_transformer/__init__.py,sha256=xJaFWQawlwtv4H5tVFcY1pxLYzjHtMAlLq6nXysdkN8,1243
|
180
|
+
optimum/rbln/transformers/models/time_series_transformer/configuration_time_series_transformer.py,sha256=9tomhFoGtLoWyrsAwbS6ArHZ9TcJy2M3Bm2PqR457_Y,1638
|
181
|
+
optimum/rbln/transformers/models/time_series_transformer/modeling_time_series_transformer.py,sha256=_03leZBn9OsmnW_H-D40s7CR4Pwqkh7u0cQW3GZqDOE,17242
|
182
|
+
optimum/rbln/transformers/models/time_series_transformer/time_series_transformers_architecture.py,sha256=XJDjQGbWXUq4ZimNojlcbm3mTDpxUMCl6tkFSzfYFl4,13769
|
183
|
+
optimum/rbln/transformers/models/vit/__init__.py,sha256=CrrkHehfCe3U-_rUS00aMBY7Tncdeh43sNUgVI9Dt_g,807
|
184
|
+
optimum/rbln/transformers/models/vit/configuration_vit.py,sha256=x98CxKR1cpKAG7Eh43uuPeGeGn4gS3HcKLPoDL3SWJo,994
|
185
|
+
optimum/rbln/transformers/models/vit/modeling_vit.py,sha256=Q8xvX2oG2dC2RYM4ocaS0H70a2q_vQ9DZK2mCdyvxa0,1058
|
186
|
+
optimum/rbln/transformers/models/wav2vec2/__init__.py,sha256=rI8yXNb0iV03o-DIn2or2bCCFAxKpZZgE51T4pH9lzU,710
|
187
|
+
optimum/rbln/transformers/models/wav2vec2/configuration_wav2vec2.py,sha256=24sXig0EaNp5enDB7uSMCK9d-qLwgUnoLcHN1NNnu_o,1004
|
188
|
+
optimum/rbln/transformers/models/wav2vec2/modeling_wav2vec2.py,sha256=vp5l0zEjeEBHq9ISeWUDkero2pUPZkhNPa_lh2DvU_s,1944
|
189
|
+
optimum/rbln/transformers/models/whisper/__init__.py,sha256=ErquiUlYycSYPsDcq9IwwmbZXoYLn1MVZ8VikWY5gQo,792
|
190
|
+
optimum/rbln/transformers/models/whisper/configuration_whisper.py,sha256=KjM11IRvlwXR1hs811onLi8dxm1-N59ajxe9X7wvfr0,2900
|
191
|
+
optimum/rbln/transformers/models/whisper/generation_whisper.py,sha256=GIHTca3b1VtW81kp7BzKQ7f77c2t9OsEsbZetripgDo,4582
|
192
|
+
optimum/rbln/transformers/models/whisper/modeling_whisper.py,sha256=MAcc9fYjpqywD5IS7y5ayXWbeJTij0XQoag4wakj2xg,18058
|
193
|
+
optimum/rbln/transformers/models/whisper/whisper_architecture.py,sha256=fKUbAMIl20o6EBMVcLg9TDSsJ1FDp8NKcl4jT9RWCEM,13981
|
194
|
+
optimum/rbln/transformers/models/xlm_roberta/__init__.py,sha256=S-ksBzS3Jg4lJlBjagU1qrmKsJ9xOBXKJU3OrGBF_po,1008
|
195
|
+
optimum/rbln/transformers/models/xlm_roberta/configuration_xlm_roberta.py,sha256=wHRpGTXL9khYqSkKL1IgA7__6_lt9QpOz9tHumjK7fo,1260
|
196
|
+
optimum/rbln/transformers/models/xlm_roberta/modeling_xlm_roberta.py,sha256=EZd3flRUEE38DYtdqEnG70LV7fHhkamRZV51xrVyjYI,1093
|
197
|
+
optimum/rbln/transformers/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
198
|
+
optimum/rbln/transformers/utils/rbln_quantization.py,sha256=um0N2ZruU_lNibo6rlzgwHAI2_8QOrYE7W7pA1qfXKM,9396
|
199
|
+
optimum/rbln/utils/__init__.py,sha256=ieDBT2VFTt2E0M4v_POLBpuGW9LxSydpb_DuPd6PQqc,712
|
200
|
+
optimum/rbln/utils/decorator_utils.py,sha256=xu-TrsNi33SRC2a7DBsyoo6-pEQxWKZPZSmM9QlDe2Y,3745
|
201
|
+
optimum/rbln/utils/hub.py,sha256=Z_R9Ic9VAew8bUmlaAlxZf5JGMDBivHvvFRI557pILY,4196
|
202
|
+
optimum/rbln/utils/import_utils.py,sha256=fpOERIIxXm-cDYGn1NN6c7aWDPQYVitPQW2MiyZ9NEY,5471
|
203
|
+
optimum/rbln/utils/logging.py,sha256=VKKBmlQSdg6iZCGmAXaWYiW67K84jyp1QJhLQSSjPPE,3453
|
204
|
+
optimum/rbln/utils/model_utils.py,sha256=4k5879Kh75m3x_vS4-qOGfqsOiAvc2kdNFFfvsFvz3k,1748
|
205
|
+
optimum/rbln/utils/runtime_utils.py,sha256=D9PS8hfH1NBf8yH8cAu-XfdC9fxKzPbt4LFBVpADbbs,7180
|
206
|
+
optimum/rbln/utils/save_utils.py,sha256=hG5uOtYmecSXZuGTvCXsTM-SiyZpr5q3InUGCCq_jzQ,3619
|
207
|
+
optimum/rbln/utils/submodule.py,sha256=w5mgPgncI740gVKMu3S-69DGNdUSI0bTZxegQGcZ98Y,5011
|
208
|
+
optimum_rbln-0.8.1.dist-info/METADATA,sha256=woHDVZXR4IIQywAEruvtjr_MaVurcMFI28PJe-pYxHM,5297
|
209
|
+
optimum_rbln-0.8.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
210
|
+
optimum_rbln-0.8.1.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
211
|
+
optimum_rbln-0.8.1.dist-info/RECORD,,
|