optimum-rbln 0.1.11__py3-none-any.whl → 0.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- optimum/rbln/__init__.py +10 -7
- optimum/rbln/__version__.py +1 -1
- optimum/rbln/diffusers/models/autoencoder_kl.py +0 -2
- optimum/rbln/diffusers/models/controlnet.py +0 -6
- optimum/rbln/diffusers/models/unet_2d_condition.py +0 -3
- optimum/rbln/diffusers/pipelines/controlnet/multicontrolnet.py +4 -0
- optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet.py +18 -20
- optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py +18 -20
- optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl.py +19 -34
- optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl_img2img.py +20 -35
- optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py +12 -13
- optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py +12 -14
- optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl.py +13 -14
- optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_img2img.py +13 -14
- optimum/rbln/modeling_alias.py +4 -9
- optimum/rbln/modeling_base.py +105 -139
- optimum/rbln/modeling_config.py +51 -0
- optimum/rbln/transformers/__init__.py +8 -0
- optimum/rbln/transformers/models/__init__.py +4 -1
- optimum/rbln/transformers/models/auto/modeling_auto.py +1 -0
- optimum/rbln/transformers/models/bart/__init__.py +1 -1
- optimum/rbln/transformers/models/bart/bart_architecture.py +18 -12
- optimum/rbln/transformers/models/bart/modeling_bart.py +25 -6
- optimum/rbln/transformers/models/bert/modeling_bert.py +1 -2
- optimum/rbln/transformers/models/clip/modeling_clip.py +0 -1
- optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py +172 -100
- optimum/rbln/transformers/models/dpt/modeling_dpt.py +0 -1
- optimum/rbln/transformers/models/exaone/__init__.py +32 -0
- optimum/rbln/transformers/models/exaone/exaone_architecture.py +72 -0
- optimum/rbln/transformers/models/exaone/hf_hub_cached/configuration_exaone.py +181 -0
- optimum/rbln/transformers/models/exaone/hf_hub_cached/modeling_exaone.py +1725 -0
- optimum/rbln/transformers/models/exaone/modeling_exaone.py +78 -0
- optimum/rbln/transformers/models/llava_next/modeling_llava_next.py +148 -152
- optimum/rbln/transformers/models/midm/modeling_midm.py +5 -0
- optimum/rbln/transformers/models/qwen2/__init__.py +24 -0
- optimum/rbln/transformers/models/qwen2/modeling_qwen2.py +67 -0
- optimum/rbln/transformers/models/qwen2/qwen2_architecture.py +29 -0
- optimum/rbln/transformers/models/seq2seq/__init__.py +24 -0
- optimum/rbln/{modeling_seq2seq.py → transformers/models/seq2seq/modeling_seq2seq.py} +107 -166
- optimum/rbln/transformers/models/t5/__init__.py +1 -0
- optimum/rbln/transformers/models/t5/modeling_t5.py +55 -0
- optimum/rbln/transformers/models/t5/t5_architecture.py +46 -32
- optimum/rbln/transformers/models/wav2vec2/modeling_wav2vec2.py +0 -1
- optimum/rbln/transformers/models/whisper/modeling_whisper.py +37 -12
- optimum/rbln/transformers/models/xlm_roberta/modeling_xlm_roberta.py +1 -2
- optimum/rbln/utils/import_utils.py +14 -0
- optimum/rbln/utils/logging.py +1 -1
- optimum/rbln/utils/runtime_utils.py +1 -1
- optimum/rbln/utils/timer_utils.py +26 -2
- {optimum_rbln-0.1.11.dist-info → optimum_rbln-0.1.12.dist-info}/METADATA +4 -3
- {optimum_rbln-0.1.11.dist-info → optimum_rbln-0.1.12.dist-info}/RECORD +54 -44
- {optimum_rbln-0.1.11.dist-info → optimum_rbln-0.1.12.dist-info}/WHEEL +1 -1
- {optimum_rbln-0.1.11.dist-info → optimum_rbln-0.1.12.dist-info}/entry_points.txt +0 -0
- {optimum_rbln-0.1.11.dist-info → optimum_rbln-0.1.12.dist-info}/licenses/LICENSE +0 -0
@@ -59,14 +59,16 @@ if TYPE_CHECKING:
|
|
59
59
|
class RBLNRuntimeEncoder(RBLNPytorchRuntime):
|
60
60
|
mandatory_members = ["main_input_name"]
|
61
61
|
|
62
|
-
def forward(self,
|
62
|
+
def forward(self, input_features: torch.Tensor = None):
|
63
63
|
# backward compatibility transformers==4.40.2
|
64
64
|
# https://github.com/huggingface/transformers/blob/4fdf58afb72b0754da30037fc800b6044e7d9c99/src/transformers/pipelines/automatic_speech_recognition.py#L494
|
65
|
-
|
66
|
-
|
67
|
-
|
65
|
+
|
66
|
+
n_pad_to_batch = self.batch_size - input_features.shape[0]
|
67
|
+
if n_pad_to_batch > 0:
|
68
|
+
input_features = torch.nn.functional.pad(input_features, (0, 0, 0, 0, 0, n_pad_to_batch))
|
68
69
|
|
69
70
|
_ = super().forward(input_features=input_features)
|
71
|
+
|
70
72
|
# dummy output for generation
|
71
73
|
return BaseModelOutput(last_hidden_state=torch.tensor([[-1.0]]))
|
72
74
|
|
@@ -74,12 +76,27 @@ class RBLNRuntimeEncoder(RBLNPytorchRuntime):
|
|
74
76
|
class RBLNRuntimeDecoder(RBLNPytorchRuntime):
|
75
77
|
mandatory_members = ["main_input_name"]
|
76
78
|
|
77
|
-
def forward(
|
78
|
-
|
79
|
+
def forward(
|
80
|
+
self,
|
81
|
+
decoder_input_ids: torch.Tensor = None,
|
82
|
+
decoder_attention_mask: torch.Tensor = None,
|
83
|
+
cache_position: torch.Tensor = None,
|
84
|
+
):
|
85
|
+
inputs_bsz = decoder_input_ids.shape[0]
|
86
|
+
padded_bsz = self.batch_size - inputs_bsz
|
87
|
+
if padded_bsz > 0:
|
88
|
+
decoder_input_ids = torch.nn.functional.pad(decoder_input_ids, (0, 0, 0, padded_bsz))
|
89
|
+
|
90
|
+
outputs = super().forward(
|
91
|
+
decoder_input_ids=decoder_input_ids,
|
92
|
+
decoder_attention_mask=decoder_attention_mask,
|
93
|
+
cache_position=cache_position,
|
94
|
+
)
|
95
|
+
|
79
96
|
if isinstance(outputs, torch.Tensor):
|
80
|
-
return Seq2SeqLMOutput(logits=outputs, cross_attentions=None)
|
97
|
+
return Seq2SeqLMOutput(logits=outputs[:inputs_bsz], cross_attentions=None)
|
81
98
|
else:
|
82
|
-
return Seq2SeqLMOutput(logits=outputs[0], cross_attentions=outputs[1])
|
99
|
+
return Seq2SeqLMOutput(logits=outputs[0][:inputs_bsz], cross_attentions=outputs[1][:, :inputs_bsz])
|
83
100
|
|
84
101
|
|
85
102
|
class RBLNWhisperForConditionalGeneration(RBLNModel, RBLNWhisperGenerationMixin):
|
@@ -93,7 +110,6 @@ class RBLNWhisperForConditionalGeneration(RBLNModel, RBLNWhisperGenerationMixin)
|
|
93
110
|
- compiling the resulting graph using the RBLN compiler.
|
94
111
|
"""
|
95
112
|
|
96
|
-
model_type = "rbln_model"
|
97
113
|
auto_model_class = AutoModelForSpeechSeq2Seq
|
98
114
|
main_input_name = "input_ids"
|
99
115
|
|
@@ -104,8 +120,12 @@ class RBLNWhisperForConditionalGeneration(RBLNModel, RBLNWhisperGenerationMixin)
|
|
104
120
|
self.dec_max_seq_len = self.rbln_config.model_cfg["dec_max_seq_len"]
|
105
121
|
self.rbln_token_timestamps = self.rbln_config.model_cfg["token_timestamps"]
|
106
122
|
|
107
|
-
self.encoder = RBLNRuntimeEncoder(
|
108
|
-
|
123
|
+
self.encoder = RBLNRuntimeEncoder(
|
124
|
+
runtime=self.model[0], main_input_name="input_features", batch_size=self.batch_size
|
125
|
+
)
|
126
|
+
self.decoder = RBLNRuntimeDecoder(
|
127
|
+
runtime=self.model[1], main_input_name="input_ids", batch_size=self.batch_size
|
128
|
+
)
|
109
129
|
|
110
130
|
# skip encoder & first decoder when language detected
|
111
131
|
self.is_language_detected = False
|
@@ -200,7 +220,11 @@ class RBLNWhisperForConditionalGeneration(RBLNModel, RBLNWhisperGenerationMixin)
|
|
200
220
|
expected_seq_len = model_config.max_source_positions * 2
|
201
221
|
num_mel_bins = model_config.num_mel_bins
|
202
222
|
enc_max_seq_len = model_config.max_source_positions
|
203
|
-
|
223
|
+
|
224
|
+
# 'whisper-large-v3-turbo' doesn't have 'max_length', but PretrainedConfig have default value for the key 'max_length'
|
225
|
+
rbln_dec_max_seq_len = getattr(model_config, "max_target_positions", None)
|
226
|
+
if rbln_dec_max_seq_len is None:
|
227
|
+
rbln_dec_max_seq_len = model_config.max_length
|
204
228
|
|
205
229
|
# model input info
|
206
230
|
enc_input_info = [("input_features", [rbln_batch_size, num_mel_bins, expected_seq_len], "float32")]
|
@@ -273,6 +297,7 @@ class RBLNWhisperForConditionalGeneration(RBLNModel, RBLNWhisperGenerationMixin)
|
|
273
297
|
self,
|
274
298
|
input_ids,
|
275
299
|
cache_position: Optional[torch.Tensor] = None,
|
300
|
+
attention_mask: Optional[torch.Tensor] = None, # need for support transformers>=4.45.0
|
276
301
|
**kwargs,
|
277
302
|
):
|
278
303
|
"""
|
@@ -25,7 +25,7 @@ import logging
|
|
25
25
|
from typing import TYPE_CHECKING, Any, Dict, Optional, Union
|
26
26
|
|
27
27
|
import torch
|
28
|
-
from transformers import
|
28
|
+
from transformers import PretrainedConfig, PreTrainedModel, XLMRobertaConfig, XLMRobertaModel
|
29
29
|
|
30
30
|
from ....modeling_base import RBLNModel
|
31
31
|
from ....modeling_config import RBLNCompileConfig, RBLNConfig
|
@@ -38,7 +38,6 @@ if TYPE_CHECKING:
|
|
38
38
|
|
39
39
|
|
40
40
|
class RBLNXLMRobertaModel(RBLNModel):
|
41
|
-
auto_model_class = AutoModel # feature extraction
|
42
41
|
original_model_class = XLMRobertaModel
|
43
42
|
original_config_class = XLMRobertaConfig
|
44
43
|
|
@@ -37,6 +37,20 @@ class VersionCompat:
|
|
37
37
|
|
38
38
|
|
39
39
|
RBLN_VERSION_COMPATS = {
|
40
|
+
"0.1.12": [
|
41
|
+
VersionCompat(
|
42
|
+
package_name="rebel-compiler",
|
43
|
+
min_version="0.5.12",
|
44
|
+
max_version="0.5.13",
|
45
|
+
),
|
46
|
+
],
|
47
|
+
"0.1.11": [
|
48
|
+
VersionCompat(
|
49
|
+
package_name="rebel-compiler",
|
50
|
+
min_version="0.5.10",
|
51
|
+
max_version="0.5.11",
|
52
|
+
),
|
53
|
+
],
|
40
54
|
"0.1.10": [
|
41
55
|
VersionCompat(
|
42
56
|
package_name="rebel-compiler",
|
optimum/rbln/utils/logging.py
CHANGED
@@ -76,7 +76,7 @@ class UnavailableRuntime:
|
|
76
76
|
class ContextRblnConfig:
|
77
77
|
_local = threading.local()
|
78
78
|
|
79
|
-
def __init__(self, device, device_map, create_runtimes, optimze_host_mem):
|
79
|
+
def __init__(self, device=None, device_map=None, create_runtimes=None, optimze_host_mem=None):
|
80
80
|
self.device = device
|
81
81
|
self.device_map = device_map
|
82
82
|
self.create_runtimes = create_runtimes
|
@@ -1,5 +1,8 @@
|
|
1
|
+
import os
|
1
2
|
from datetime import datetime
|
2
3
|
|
4
|
+
from halo import Halo
|
5
|
+
|
3
6
|
from .logging import get_logger
|
4
7
|
|
5
8
|
|
@@ -9,11 +12,32 @@ logger = get_logger()
|
|
9
12
|
def rbln_timer(print_name):
|
10
13
|
def decorator(function):
|
11
14
|
def wrapper(*args, **kwargs):
|
15
|
+
disable = os.getenv("OPTIMUM_RBLN_DISABLE_SPIN", 'False').lower() in ('true', '1', 't')
|
16
|
+
if disable:
|
17
|
+
logger.info(f"{print_name} ...")
|
18
|
+
|
19
|
+
spinner = Halo(text=f"{print_name} ...", spinner='dots', color='green', enabled=(not disable))
|
20
|
+
spinner.start()
|
21
|
+
|
22
|
+
# Start timer
|
12
23
|
tick = datetime.now()
|
13
|
-
|
14
|
-
|
24
|
+
try:
|
25
|
+
result = function(*args, **kwargs)
|
26
|
+
except Exception as e:
|
27
|
+
spinner.fail(f"{print_name} failed.")
|
28
|
+
raise e
|
29
|
+
|
30
|
+
# Print elapsed time.
|
31
|
+
if disable:
|
32
|
+
logger.info(f"{print_name} done. Elasped time: {format_elapsed_time(tick)}")
|
33
|
+
|
34
|
+
spinner.stop()
|
35
|
+
spinner.succeed(text=f"{print_name} done. Elasped time: {format_elapsed_time(tick)}")
|
15
36
|
return result
|
16
37
|
|
17
38
|
return wrapper
|
18
39
|
|
40
|
+
def format_elapsed_time(start_time: datetime) -> str:
|
41
|
+
return str(datetime.now() - start_time)[:7]
|
42
|
+
|
19
43
|
return decorator
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: optimum-rbln
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.12
|
4
4
|
Summary: Optimum RBLN is the interface between the Hugging Face Transformers and Diffusers libraries and RBLN accelerators.
|
5
5
|
It provides a set of tools enabling easy model loading and inference on single and multiple rbln device settings for different downstream tasks.
|
6
6
|
|
@@ -24,12 +24,13 @@ Requires-Python: <3.11,>=3.8
|
|
24
24
|
Requires-Dist: torch<=2.2.1
|
25
25
|
Requires-Dist: torchvision<=0.17.1
|
26
26
|
Requires-Dist: torchaudio<=2.2.1
|
27
|
-
Requires-Dist: optimum
|
27
|
+
Requires-Dist: optimum==1.23.1
|
28
28
|
Requires-Dist: accelerate>=0.28.0
|
29
|
-
Requires-Dist: transformers
|
29
|
+
Requires-Dist: transformers==4.45.2
|
30
30
|
Requires-Dist: diffusers<=0.30.3
|
31
31
|
Requires-Dist: einops>=0.8.0
|
32
32
|
Requires-Dist: packaging>=24.1
|
33
|
+
Requires-Dist: halo
|
33
34
|
Provides-Extra: tests
|
34
35
|
Requires-Dist: pytest>=8.1.1; extra == "tests"
|
35
36
|
Requires-Dist: psutil>=5.9.8; extra == "tests"
|
@@ -1,49 +1,53 @@
|
|
1
|
-
optimum/rbln/__init__.py,sha256=
|
2
|
-
optimum/rbln/__version__.py,sha256=
|
1
|
+
optimum/rbln/__init__.py,sha256=z_WB5rnZ7t1Q_B1DeyMOHzXREX-i67NxHa1pJ249PIU,6295
|
2
|
+
optimum/rbln/__version__.py,sha256=8yPskJaNfjtFCSb_-mBfyIbWJj31afL2gxVQ6POgv8A,22
|
3
3
|
optimum/rbln/diffusers/__init__.py,sha256=w4W7Wy-Mmh8CQZ5M9JnrrE5bN0UsfNehZI41QadE-hk,2605
|
4
4
|
optimum/rbln/diffusers/models/__init__.py,sha256=aY6Llq_31dZjdB9HPBDvi7sXVtdQT9r11gokXG5ffxA,1139
|
5
|
-
optimum/rbln/diffusers/models/autoencoder_kl.py,sha256=
|
6
|
-
optimum/rbln/diffusers/models/controlnet.py,sha256=
|
7
|
-
optimum/rbln/diffusers/models/unet_2d_condition.py,sha256=
|
5
|
+
optimum/rbln/diffusers/models/autoencoder_kl.py,sha256=mKf1f29lRveMFupo-_hC3XB5TQgeMNm_D9PxkvIQ9t4,9744
|
6
|
+
optimum/rbln/diffusers/models/controlnet.py,sha256=e94x-zFgXBmTAhGpVcevy222qvCNOYYV0-g5-1Hic7E,9648
|
7
|
+
optimum/rbln/diffusers/models/unet_2d_condition.py,sha256=gNuusFEsijFZatCprMS-348BKvutxZQtndPeYGojh_A,14946
|
8
8
|
optimum/rbln/diffusers/pipelines/__init__.py,sha256=Xr_bQbpbC5HbJB2NuUcVQu2BGebDkc2bhsGJmL6jgps,1449
|
9
9
|
optimum/rbln/diffusers/pipelines/controlnet/__init__.py,sha256=k0govvSBxBUR5qpxUGxRMHuQCMX7hXHVZ4EqVRw1LWk,1377
|
10
|
-
optimum/rbln/diffusers/pipelines/controlnet/multicontrolnet.py,sha256=
|
11
|
-
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet.py,sha256=
|
12
|
-
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py,sha256=
|
13
|
-
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl.py,sha256=
|
14
|
-
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl_img2img.py,sha256=
|
10
|
+
optimum/rbln/diffusers/pipelines/controlnet/multicontrolnet.py,sha256=XXHiXzE5Gyd1zLhLdc9O2cRJkgPeCTVzqF3-scn9jmM,5212
|
11
|
+
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet.py,sha256=Zl5lqnsB9xFJroBakyeK1c1HIsdajESMryhnYjdfmus,42709
|
12
|
+
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py,sha256=kMqt7hrHm2CjZVdEJc3KTmTkLr_jtPAROzaA1OTrL4Y,41303
|
13
|
+
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl.py,sha256=NqPidjFC0cW7_-vkj1VHlhNAqoXcg8ZZcd9EGWmjvqw,52634
|
14
|
+
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl_img2img.py,sha256=1Hqu7Bm6f2lHjJrkVXtxx-s_hQ7yxdJ_O4WMZShbSHs,53968
|
15
15
|
optimum/rbln/diffusers/pipelines/stable_diffusion/__init__.py,sha256=qf_uMWSwD-CyRMRC73y1QsTMyl_qCMreIdg0a8rhJuA,1142
|
16
|
-
optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py,sha256=
|
17
|
-
optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py,sha256=
|
16
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py,sha256=_pNJwtjSzDpJgE6-_E5SW6m7DTPBepGCCcxPnhSyf4U,5711
|
17
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py,sha256=W098xtly5IGx-ieZurGbR0wWecql2lPbD0NsTUCQgcc,5990
|
18
18
|
optimum/rbln/diffusers/pipelines/stable_diffusion_xl/__init__.py,sha256=8MDMHIVsDrM6lZAyvpjFtWOFwiY_IoSxzCQe-gJYTPI,159
|
19
|
-
optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl.py,sha256=
|
20
|
-
optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_img2img.py,sha256=
|
19
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl.py,sha256=FcezpMdeszOby-dCbRKLPktfxIGFlxKNFC5RlUnuPH0,5953
|
20
|
+
optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_img2img.py,sha256=NRRSwT3NV9KO5RMBxzIvQ7aFFJ3ob1exdxOPBDsiY2k,6129
|
21
21
|
optimum/rbln/modeling.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
22
|
-
optimum/rbln/modeling_alias.py,sha256=
|
23
|
-
optimum/rbln/modeling_base.py,sha256=
|
24
|
-
optimum/rbln/modeling_config.py,sha256=
|
25
|
-
optimum/rbln/
|
26
|
-
optimum/rbln/transformers/__init__.py,sha256=9NiMF-uKmqWJyV27xerBbJMf_04QasI-75kn5ue6Myo,3507
|
22
|
+
optimum/rbln/modeling_alias.py,sha256=lvyUxIrrXWgV2o0ymbeFkhz_ou8S9_CRRfQ_EfAvQZU,2133
|
23
|
+
optimum/rbln/modeling_base.py,sha256=lOC2qoL6D68pzekFOtKwbrGcWnZ_ANUHRjrIjL7Q7eI,43802
|
24
|
+
optimum/rbln/modeling_config.py,sha256=va58Gpbn3rolqKu9y2u3vYVT6kynBGpox_jod6cs-j0,10612
|
25
|
+
optimum/rbln/transformers/__init__.py,sha256=qoswlx1hgsdNxjLv5RnOZQThi5aN4dGiPd4x-axuyaA,3801
|
27
26
|
optimum/rbln/transformers/cache_utils.py,sha256=VfArIkAJn3zPXdu-6RXiCWlU-uVwxvhgoMiGxrPtk40,3835
|
28
27
|
optimum/rbln/transformers/generation/__init__.py,sha256=6MmqS9D21ir4dcH6_fq8kRsX1VK3QspSn6Qw83F4ORE,1081
|
29
28
|
optimum/rbln/transformers/generation/streamers.py,sha256=X-dEmL1L_0Oy0QSFj2RNdamb_xbDWLXd-Ms8ckx6OZ4,5778
|
30
29
|
optimum/rbln/transformers/generation/utils.py,sha256=XqPbYQCe6cEuWssG9iHpbbL-wgSQTcyKHabGwKj7lnE,19462
|
31
|
-
optimum/rbln/transformers/models/__init__.py,sha256=
|
30
|
+
optimum/rbln/transformers/models/__init__.py,sha256=bXKob99DNgkqCho6NTvzOHf5wMdLNeOmIl5e80ni_Nc,2221
|
32
31
|
optimum/rbln/transformers/models/auto/__init__.py,sha256=zMqaMIdGwuZJq4gLjRC-69M2mGUKrd0aRpmb4Rpm6-g,435
|
33
32
|
optimum/rbln/transformers/models/auto/auto_factory.py,sha256=Up052AXID12uqa5UgLRW89EkYXDpuXR70Bt-xNHEZto,3159
|
34
|
-
optimum/rbln/transformers/models/auto/modeling_auto.py,sha256=
|
35
|
-
optimum/rbln/transformers/models/bart/__init__.py,sha256
|
36
|
-
optimum/rbln/transformers/models/bart/bart_architecture.py,sha256=
|
37
|
-
optimum/rbln/transformers/models/bart/modeling_bart.py,sha256
|
33
|
+
optimum/rbln/transformers/models/auto/modeling_auto.py,sha256=zET3k5GiqREvPuc_d9Sauea7rITlP6Wl32xdlCFqdhY,3165
|
34
|
+
optimum/rbln/transformers/models/bart/__init__.py,sha256=-mrB4kmPpEIVk713yEIRtC57AZ7kZ23g4bsOKcvBFNE,1138
|
35
|
+
optimum/rbln/transformers/models/bart/bart_architecture.py,sha256=W6XeumvuKv1i7p4YzKM0NgpO3NCnc1qwGXknZZrPlP0,21298
|
36
|
+
optimum/rbln/transformers/models/bart/modeling_bart.py,sha256=-ulamfBSlXDL67Q1Bzo4Q8sGuuzJBjut9XPRTeQhCbA,5261
|
38
37
|
optimum/rbln/transformers/models/bert/__init__.py,sha256=divBpVNrRAdNAPgnQkGiEZI4oJHCJtLuwdYpMbD3dMM,1034
|
39
|
-
optimum/rbln/transformers/models/bert/modeling_bert.py,sha256=
|
38
|
+
optimum/rbln/transformers/models/bert/modeling_bert.py,sha256=F5FKVgiIBdDFq-Ql8AmE0WT_mjL0gFfr1AGE_frTexs,4228
|
40
39
|
optimum/rbln/transformers/models/clip/__init__.py,sha256=iXZfPPIztzMDOkY3fbEzx9dCkFKKtWYXCpLGfjEUeZE,1092
|
41
|
-
optimum/rbln/transformers/models/clip/modeling_clip.py,sha256=
|
40
|
+
optimum/rbln/transformers/models/clip/modeling_clip.py,sha256=NpQgw6fJLFz746iF9hH2-k-6V8wdg0v22y0ZWji77sU,7114
|
42
41
|
optimum/rbln/transformers/models/decoderonly/__init__.py,sha256=AG3ib8iZAEDAvVTNhieCyojWZtA67voPB0dI8lbCXTQ,1371
|
43
42
|
optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py,sha256=25YMgIGYCNSawLf9Gg0HLncb7sqi2FI6sAbt4nitWJI,20047
|
44
|
-
optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py,sha256=
|
43
|
+
optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py,sha256=0H3U0AnQZ3mDEoSsIebd1e7jdP4qfxSVDEziJClV5d0,27381
|
45
44
|
optimum/rbln/transformers/models/dpt/__init__.py,sha256=R8OHDxOAYPjkk5t8osaPqRh85Pf1Cg1BtzqesqFRcTI,1045
|
46
|
-
optimum/rbln/transformers/models/dpt/modeling_dpt.py,sha256=
|
45
|
+
optimum/rbln/transformers/models/dpt/modeling_dpt.py,sha256=pKQ2vmR50GyEyKK0aV7p33PqTuksiorDhC0cH5qUYiw,3810
|
46
|
+
optimum/rbln/transformers/models/exaone/__init__.py,sha256=CuWNwnZrbd_cLU7jDDPqC0kZIqx1ii_BYyQ98CKDag0,1253
|
47
|
+
optimum/rbln/transformers/models/exaone/exaone_architecture.py,sha256=w7hi8gcjOgWwgQdinJ5aMkmwEfERTkWYsAezk5kfvD8,2669
|
48
|
+
optimum/rbln/transformers/models/exaone/hf_hub_cached/configuration_exaone.py,sha256=cKtUHugxwnbR6JOtD2a0912a2iepRpX9dEAWDeSu194,10537
|
49
|
+
optimum/rbln/transformers/models/exaone/hf_hub_cached/modeling_exaone.py,sha256=CUd-z7f-BMIe8WPRVx-u5A60ljW2C6u8CzrAuw__d5M,80983
|
50
|
+
optimum/rbln/transformers/models/exaone/modeling_exaone.py,sha256=nvhNo071DDGmCFQjSleByJLQPqGjpnagYErentuYj9I,2958
|
47
51
|
optimum/rbln/transformers/models/gemma/__init__.py,sha256=L1Qfr6dufWtoUZND_ofwxXPSxivEvPR8exy16a_nM2o,1042
|
48
52
|
optimum/rbln/transformers/models/gemma/gemma_architecture.py,sha256=hT0CqL_jhKWi8cDa1zFcAFPyli844wkliJ3bL5OyEdQ,4376
|
49
53
|
optimum/rbln/transformers/models/gemma/modeling_gemma.py,sha256=ErAa3NlsNhy7ocSMjGrDaNLmJsn74NeU_OZQQNRpMvY,2643
|
@@ -54,40 +58,46 @@ optimum/rbln/transformers/models/llama/__init__.py,sha256=5mX-MuKzVBj6WQeVxyPhtv
|
|
54
58
|
optimum/rbln/transformers/models/llama/llama_architecture.py,sha256=j4mifSOaIk7wwV9fL9wQSt5kR3rpnvjtxd3VzhMNdgY,1123
|
55
59
|
optimum/rbln/transformers/models/llama/modeling_llama.py,sha256=ZMKigYHGlRhi6asAWpC5tayb4l1Rslt2quJUjL_lgxw,2643
|
56
60
|
optimum/rbln/transformers/models/llava_next/__init__.py,sha256=3vi2rmTeKBydGRFOtxELhxWixZggFMpGex6xqfMgi-I,1064
|
57
|
-
optimum/rbln/transformers/models/llava_next/modeling_llava_next.py,sha256=
|
61
|
+
optimum/rbln/transformers/models/llava_next/modeling_llava_next.py,sha256=AIpIk7EExj9VNxFWzDALDzMNqfZ4ke0IlRF3Dsi9r9I,27582
|
58
62
|
optimum/rbln/transformers/models/midm/__init__.py,sha256=_6kYchy47frGMZ8uoUspZ9IwrmCBQJ-8kVfXM7xOMew,1249
|
59
63
|
optimum/rbln/transformers/models/midm/hf_hub_cached/configuration_midm.py,sha256=P5JqTTcx56HOccxKbR14ZjA67BI0RNnJycG738JMaJ4,833
|
60
64
|
optimum/rbln/transformers/models/midm/hf_hub_cached/midm_bitext_tokenization.py,sha256=5lhMXfqnIak1PJ9YL-vUxIdY_3DUr3IBXzTqf3ofpmI,12835
|
61
65
|
optimum/rbln/transformers/models/midm/hf_hub_cached/modeling_midm.py,sha256=54__wd9EXwGxmaHDksTTcUD2aWl6WoszYsR8dlL1wfE,61031
|
62
66
|
optimum/rbln/transformers/models/midm/hf_hub_cached/rotary_position_embedding.py,sha256=5ywaUVKTvqO8GRsHOSXOOGlbiEn-DbGkpJs59_dFb18,4059
|
63
67
|
optimum/rbln/transformers/models/midm/midm_architecture.py,sha256=CYtFrFQ2L3u2_81TrTbEwBqgGHHQBh1sTs3vjF0xbp8,11505
|
64
|
-
optimum/rbln/transformers/models/midm/modeling_midm.py,sha256=
|
68
|
+
optimum/rbln/transformers/models/midm/modeling_midm.py,sha256=WGbzrusYg9BU2pTOvdCMCS6D129_2oD9i166bYURYw4,2953
|
65
69
|
optimum/rbln/transformers/models/mistral/__init__.py,sha256=XtuOmzBITjj-H1yctXobJjHF908x1Wlxr_p4hi06v8I,1046
|
66
70
|
optimum/rbln/transformers/models/mistral/mistral_architecture.py,sha256=LCvY4L0Wq1VruKhZ3JTSiuZJqQRJlTae5A2bKsUBGAg,1128
|
67
71
|
optimum/rbln/transformers/models/mistral/modeling_mistral.py,sha256=TB6Ju-yJt57xx4YSYSvPTvLg51s7JeRtHiAA61gsewA,2678
|
68
72
|
optimum/rbln/transformers/models/phi/__init__.py,sha256=LrGFTUo1oQnsPSTlxJqAJVVNUdUwq4u_Bf60RUgjLz4,1038
|
69
73
|
optimum/rbln/transformers/models/phi/modeling_phi.py,sha256=Qh1YkWMVREIpYiq8_z5IEepLeyY-yTxmNjHHYrpez18,2639
|
70
74
|
optimum/rbln/transformers/models/phi/phi_architecture.py,sha256=kgUqXnZvFiIB87-_5fdz29JwtrDAmzifbWTNN5aY1ks,15725
|
71
|
-
optimum/rbln/transformers/models/
|
72
|
-
optimum/rbln/transformers/models/
|
75
|
+
optimum/rbln/transformers/models/qwen2/__init__.py,sha256=1PLl1rlF14C6eSk3EZaDfyEHPaC4DZ2vwVlrklTkOYg,1042
|
76
|
+
optimum/rbln/transformers/models/qwen2/modeling_qwen2.py,sha256=Mf0v-SQEuDSQ1GXgYw0C4KfeInLgYngusdCg3eibkao,2635
|
77
|
+
optimum/rbln/transformers/models/qwen2/qwen2_architecture.py,sha256=-X9OZ4HUCYDtwKnvidkWzCMPh_Xuu1wj-wRXIsQ9Pjg,1115
|
78
|
+
optimum/rbln/transformers/models/seq2seq/__init__.py,sha256=Oa11lBWDNQWahqvDco3JIsZldYS-lO8qjpnaGKSfR00,1045
|
79
|
+
optimum/rbln/transformers/models/seq2seq/modeling_seq2seq.py,sha256=ytDTNTTW221ShVRXClfQQBQV96NW-oYWwRjlbv9aXZU,18403
|
80
|
+
optimum/rbln/transformers/models/t5/__init__.py,sha256=BeLpy0izLHIpqkTCA1q0P7DynEjgRqwOZrGc-8MXQGI,1113
|
81
|
+
optimum/rbln/transformers/models/t5/modeling_t5.py,sha256=yjzvRGUME4LYUebUODrJRUkKHhI9rhcS5v8U3j8kMHc,1927
|
82
|
+
optimum/rbln/transformers/models/t5/t5_architecture.py,sha256=k3ROGNSGGuF1gFNV-LxoFFgfxo7ab5GSQA4GIi5MLsI,21074
|
73
83
|
optimum/rbln/transformers/models/wav2vec2/__init__.py,sha256=mz4cXqG9b0tDpTAw3qYn3FaJuolX601VmKBE3gohLSw,1043
|
74
|
-
optimum/rbln/transformers/models/wav2vec2/modeling_wav2vec2.py,sha256=
|
84
|
+
optimum/rbln/transformers/models/wav2vec2/modeling_wav2vec2.py,sha256=lTtLGKuAxuVNguqSc2y0D0MsE6eHCraDS7-l2-0QJEY,4236
|
75
85
|
optimum/rbln/transformers/models/whisper/__init__.py,sha256=PZ8qeAAFMas2MizwVYFxlpFWd5k1Pe1x-0IJfYAMhT8,1059
|
76
86
|
optimum/rbln/transformers/models/whisper/generation_whisper.py,sha256=Kwwskbp48wJxEkFGQLlm0L252rO7tx_YLYmOA-_IPwI,3387
|
77
|
-
optimum/rbln/transformers/models/whisper/modeling_whisper.py,sha256=
|
87
|
+
optimum/rbln/transformers/models/whisper/modeling_whisper.py,sha256=Yp5I1_cFClqzKr57X68Sz7Z9s5ri5cn_s9dLpRF3jpc,15343
|
78
88
|
optimum/rbln/transformers/models/whisper/whisper_architecture.py,sha256=OQzkGa2WSUn3OVQ1DYVOY49N46QvxO1hdEbQ7Ke-o_c,17203
|
79
89
|
optimum/rbln/transformers/models/xlm_roberta/__init__.py,sha256=NTj4hCpd8L2_i5DZuV5wp-h8OlTLYVUqTrJxzY_Dg9g,1047
|
80
|
-
optimum/rbln/transformers/models/xlm_roberta/modeling_xlm_roberta.py,sha256=
|
90
|
+
optimum/rbln/transformers/models/xlm_roberta/modeling_xlm_roberta.py,sha256=8xLhJvuFSCmURyKWpB3O1GLLUn00ewwdAdbzJCV7B78,4929
|
81
91
|
optimum/rbln/transformers/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
82
92
|
optimum/rbln/transformers/utils/rbln_quantization.py,sha256=aXlPjD17oi_rFgqv2yabkQZz6xFX7pHBLNJYJ2O95Qc,3530
|
83
93
|
optimum/rbln/utils/__init__.py,sha256=F6hJP00eV1_hT_IVwqqYwLWcLQAvZbmmrNMJTia3mjI,1106
|
84
|
-
optimum/rbln/utils/import_utils.py,sha256=
|
85
|
-
optimum/rbln/utils/logging.py,sha256=
|
86
|
-
optimum/rbln/utils/runtime_utils.py,sha256=
|
94
|
+
optimum/rbln/utils/import_utils.py,sha256=ochkue99SxwaG5WdNg3GMyh1Cbn0JnYX1nnLyqfDOFg,3789
|
95
|
+
optimum/rbln/utils/logging.py,sha256=xIcLmUQoIJoBj3owkXN5_WQkQljcos6J6KSdX35IApw,2271
|
96
|
+
optimum/rbln/utils/runtime_utils.py,sha256=RgZzyUo-RfVCf3IRmEim1ZzJzuZ-VNB98LK1NQjBrUA,3802
|
87
97
|
optimum/rbln/utils/save_utils.py,sha256=eFIPtmiblCJ3MvtxEPxmAR3iuLEUrzpyzwtVotDauhw,3283
|
88
|
-
optimum/rbln/utils/timer_utils.py,sha256=
|
89
|
-
optimum_rbln-0.1.
|
90
|
-
optimum_rbln-0.1.
|
91
|
-
optimum_rbln-0.1.
|
92
|
-
optimum_rbln-0.1.
|
93
|
-
optimum_rbln-0.1.
|
98
|
+
optimum/rbln/utils/timer_utils.py,sha256=9FtBJpqCcDWmilgP67IZqnj1UGZag4WO7XflEms-DB8,1229
|
99
|
+
optimum_rbln-0.1.12.dist-info/METADATA,sha256=RbGOydoke74MtRlasivGwjRNabuhuHonc8CFRV4PUVE,4601
|
100
|
+
optimum_rbln-0.1.12.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
101
|
+
optimum_rbln-0.1.12.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
102
|
+
optimum_rbln-0.1.12.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
103
|
+
optimum_rbln-0.1.12.dist-info/RECORD,,
|
File without changes
|
File without changes
|