optimum-rbln 0.8.4a0__py3-none-any.whl → 0.8.4a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of optimum-rbln might be problematic. Click here for more details.
- optimum/rbln/__init__.py +8 -0
- optimum/rbln/__version__.py +2 -2
- optimum/rbln/configuration_utils.py +10 -3
- optimum/rbln/diffusers/modeling_diffusers.py +15 -1
- optimum/rbln/diffusers/pipelines/auto_pipeline.py +37 -4
- optimum/rbln/modeling_base.py +40 -2
- optimum/rbln/transformers/__init__.py +8 -0
- optimum/rbln/transformers/models/__init__.py +12 -0
- optimum/rbln/transformers/models/auto/auto_factory.py +28 -13
- optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py +1 -1
- optimum/rbln/transformers/models/grounding_dino/grounding_dino_architecture.py +110 -18
- optimum/rbln/transformers/models/qwen2_vl/__init__.py +19 -0
- optimum/rbln/transformers/models/qwen2_vl/configuration_qwen2_vl.py +88 -0
- optimum/rbln/transformers/models/qwen2_vl/modeling_qwen2_vl.py +506 -0
- optimum/rbln/transformers/models/qwen2_vl/qwen2_vl_architecture.py +141 -0
- optimum/rbln/transformers/models/time_series_transformer/time_series_transformers_architecture.py +7 -1
- {optimum_rbln-0.8.4a0.dist-info → optimum_rbln-0.8.4a2.dist-info}/METADATA +1 -1
- {optimum_rbln-0.8.4a0.dist-info → optimum_rbln-0.8.4a2.dist-info}/RECORD +20 -16
- {optimum_rbln-0.8.4a0.dist-info → optimum_rbln-0.8.4a2.dist-info}/WHEEL +0 -0
- {optimum_rbln-0.8.4a0.dist-info → optimum_rbln-0.8.4a2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import math
|
|
2
|
+
from typing import Tuple
|
|
3
|
+
|
|
4
|
+
import torch
|
|
5
|
+
import torch.nn as nn
|
|
6
|
+
|
|
7
|
+
from ..decoderonly.decoderonly_architecture import (
|
|
8
|
+
DecoderOnlyWrapper,
|
|
9
|
+
apply_rotary_pos_emb,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class Qwen2VisionTransformerWrapper(nn.Module):
|
|
14
|
+
def __init__(self, model: torch.nn.Module):
|
|
15
|
+
super().__init__()
|
|
16
|
+
self._original_mod = model
|
|
17
|
+
self.merger = model.merger
|
|
18
|
+
self.blocks = self.wrap_vision_blocks(model.blocks)
|
|
19
|
+
|
|
20
|
+
def wrap_vision_blocks(self, blocks: torch.nn.ModuleList):
|
|
21
|
+
wrapped_blocks = []
|
|
22
|
+
for i, block in enumerate(blocks):
|
|
23
|
+
wrapped_blocks.append(Qwen2VLVisionBlock(block))
|
|
24
|
+
return nn.ModuleList(wrapped_blocks)
|
|
25
|
+
|
|
26
|
+
def forward(
|
|
27
|
+
self,
|
|
28
|
+
hidden_states: torch.Tensor,
|
|
29
|
+
full_attn_masks: torch.Tensor,
|
|
30
|
+
cos: torch.Tensor,
|
|
31
|
+
sin: torch.Tensor,
|
|
32
|
+
):
|
|
33
|
+
full_attn_masks = (1 - full_attn_masks) * torch.finfo(torch.float32).min
|
|
34
|
+
|
|
35
|
+
for block in self.blocks:
|
|
36
|
+
hidden_states = block(hidden_states, full_attn_masks, [cos, sin])
|
|
37
|
+
|
|
38
|
+
return self.merger(hidden_states)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class Qwen2VLVisionBlock(torch.nn.Module):
|
|
42
|
+
def __init__(self, model: torch.nn.Module):
|
|
43
|
+
super().__init__()
|
|
44
|
+
self._origin_model = model
|
|
45
|
+
self.norm1 = model.norm1
|
|
46
|
+
self.norm2 = model.norm2
|
|
47
|
+
|
|
48
|
+
self.attn = VisionAttention(model.attn)
|
|
49
|
+
self.mlp = model.mlp
|
|
50
|
+
|
|
51
|
+
def forward(
|
|
52
|
+
self,
|
|
53
|
+
hidden_states: torch.Tensor,
|
|
54
|
+
attn_masks: torch.Tensor,
|
|
55
|
+
position_embeddings: Tuple[torch.Tensor, torch.Tensor],
|
|
56
|
+
) -> torch.Tensor:
|
|
57
|
+
hidden_states = hidden_states + self.attn(
|
|
58
|
+
self.norm1(hidden_states),
|
|
59
|
+
attn_masks,
|
|
60
|
+
position_embeddings,
|
|
61
|
+
)
|
|
62
|
+
hidden_states = hidden_states + self.mlp(self.norm2(hidden_states))
|
|
63
|
+
return hidden_states
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class VisionAttention(nn.Module):
|
|
67
|
+
def __init__(self, model: nn.Module) -> None:
|
|
68
|
+
super().__init__()
|
|
69
|
+
self._origin_model = model
|
|
70
|
+
self.num_heads = model.num_heads
|
|
71
|
+
self.head_dim = getattr(model, "head_dim", model.proj.in_features // model.num_heads)
|
|
72
|
+
self.qkv = model.qkv
|
|
73
|
+
self.proj = model.proj
|
|
74
|
+
|
|
75
|
+
def forward(
|
|
76
|
+
self,
|
|
77
|
+
hidden_states: torch.Tensor,
|
|
78
|
+
attn_masks: torch.Tensor,
|
|
79
|
+
position_embeddings: Tuple[torch.Tensor, torch.Tensor],
|
|
80
|
+
) -> torch.Tensor:
|
|
81
|
+
seq_length = hidden_states.shape[0]
|
|
82
|
+
hidden_states = hidden_states.unsqueeze(0)
|
|
83
|
+
q, k, v = (
|
|
84
|
+
self.qkv(hidden_states).reshape(1, seq_length, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4).unbind(0)
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
cos, sin = position_embeddings
|
|
88
|
+
q, k = apply_rotary_pos_emb(q, k, cos, sin)
|
|
89
|
+
|
|
90
|
+
attn_weights = torch.matmul(q, k.transpose(2, 3)) / math.sqrt(self.head_dim)
|
|
91
|
+
attn_weights = attn_weights + attn_masks
|
|
92
|
+
attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32)
|
|
93
|
+
attn_output = torch.matmul(attn_weights, v)
|
|
94
|
+
attn_output = attn_output.transpose(1, 2)
|
|
95
|
+
attn_output = attn_output.reshape(1, seq_length, -1)
|
|
96
|
+
attn_output = self.proj(attn_output).squeeze(0)
|
|
97
|
+
|
|
98
|
+
return attn_output
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class Qwen2VL_LanguageModelWrapper(DecoderOnlyWrapper):
|
|
102
|
+
def prepare_forward_args(self, *args):
|
|
103
|
+
args = list(args)
|
|
104
|
+
input_ids = None if self.rbln_config.use_inputs_embeds else args.pop(0)
|
|
105
|
+
inputs_embeds = args.pop(0) if self.rbln_config.use_inputs_embeds else None
|
|
106
|
+
cache_position = args.pop(0)
|
|
107
|
+
global_block_tables = args.pop(0)
|
|
108
|
+
local_block_tables = None
|
|
109
|
+
position_embeds = args.pop(0)
|
|
110
|
+
query_position = args.pop(0) if self.phase == "prefill" else None
|
|
111
|
+
position_ids = None
|
|
112
|
+
attention_mask = args.pop(0) if self.rbln_config.use_attention_mask else None
|
|
113
|
+
past_key_values = args
|
|
114
|
+
|
|
115
|
+
if len(past_key_values) != 2 * self.num_hidden_layers:
|
|
116
|
+
raise ValueError(
|
|
117
|
+
f"Different past_key_values to model's config. {len(past_key_values)} != {2 * self.num_hidden_layers}"
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# [key, value] * n_layer -> ( (key, value) ) * n_layer
|
|
121
|
+
# cache shape : batch, n_heads, 1, max_seq_len, head_dim
|
|
122
|
+
_past_key_values = []
|
|
123
|
+
for i in range(self.config.num_hidden_layers):
|
|
124
|
+
key_states = past_key_values[i * 2]
|
|
125
|
+
value_states = past_key_values[i * 2 + 1]
|
|
126
|
+
past_key_value = [key_states, value_states]
|
|
127
|
+
_past_key_values.append(past_key_value)
|
|
128
|
+
past_key_values = _past_key_values
|
|
129
|
+
|
|
130
|
+
return (
|
|
131
|
+
input_ids,
|
|
132
|
+
inputs_embeds,
|
|
133
|
+
cache_position,
|
|
134
|
+
global_block_tables,
|
|
135
|
+
local_block_tables,
|
|
136
|
+
query_position,
|
|
137
|
+
attention_mask,
|
|
138
|
+
position_ids,
|
|
139
|
+
past_key_values,
|
|
140
|
+
position_embeds,
|
|
141
|
+
)
|
optimum/rbln/transformers/models/time_series_transformer/time_series_transformers_architecture.py
CHANGED
|
@@ -162,7 +162,13 @@ class TimeSeriesTransformersDecoder(nn.Module):
|
|
|
162
162
|
attention_mask = _prepare_4d_causal_attention_mask(attention_mask, input_shape, inputs_embeds, cache_position)
|
|
163
163
|
|
|
164
164
|
hidden_states = self.value_embedding(inputs_embeds)
|
|
165
|
-
|
|
165
|
+
embed_idx = cache_position + self.config.context_length
|
|
166
|
+
if torch.compiler.is_exporting():
|
|
167
|
+
embed_idx = embed_idx.item()
|
|
168
|
+
torch._check_is_size(embed_idx)
|
|
169
|
+
torch._check(embed_idx >= 0)
|
|
170
|
+
torch._check(embed_idx < len(self.embed_positions.weight))
|
|
171
|
+
embed_pos = self.embed_positions.weight[embed_idx]
|
|
166
172
|
hidden_states = self.layernorm_embedding(hidden_states + embed_pos)
|
|
167
173
|
|
|
168
174
|
# iterate decoder_layer
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: optimum-rbln
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.4a2
|
|
4
4
|
Summary: Optimum RBLN is the interface between the HuggingFace Transformers and Diffusers libraries and RBLN accelerators. It provides a set of tools enabling easy model loading and inference on single and multiple rbln device settings for different downstream tasks.
|
|
5
5
|
Project-URL: Homepage, https://rebellions.ai
|
|
6
6
|
Project-URL: Documentation, https://docs.rbln.ai
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
optimum/rbln/__init__.py,sha256=
|
|
2
|
-
optimum/rbln/__version__.py,sha256=
|
|
3
|
-
optimum/rbln/configuration_utils.py,sha256=
|
|
1
|
+
optimum/rbln/__init__.py,sha256=DAJM5PWAYFiWVlyxVXUvj3CaFOEhX1yhEfhIt1LxL-A,18714
|
|
2
|
+
optimum/rbln/__version__.py,sha256=6hrMyGZLwiE6FQKcZWu0sAdnavT4qOGFRA8YsAHksXI,712
|
|
3
|
+
optimum/rbln/configuration_utils.py,sha256=WNubd8EJIrdBkLOGT2UJJorgNL3lzhjg3a4bihAIptY,34761
|
|
4
4
|
optimum/rbln/modeling.py,sha256=cAIPWEw5DGzUWeqjCbocRhU6OO3jyhVGW60AmBLh1Nw,14134
|
|
5
|
-
optimum/rbln/modeling_base.py,sha256=
|
|
5
|
+
optimum/rbln/modeling_base.py,sha256=97ju0uHJXB7PaorKaspf-FbLfsaHy0HwRVLJqtVscXA,27574
|
|
6
6
|
optimum/rbln/diffusers/__init__.py,sha256=1tgU_xWA42BmInqu9bBz_5R_E9TGhhK3mI06YlaiTLg,7232
|
|
7
|
-
optimum/rbln/diffusers/modeling_diffusers.py,sha256=
|
|
7
|
+
optimum/rbln/diffusers/modeling_diffusers.py,sha256=3bzL0ZH7XyS8nGMWRSMIGjl9H3H2fhiZgmPaIF50mwg,20464
|
|
8
8
|
optimum/rbln/diffusers/configurations/__init__.py,sha256=vMRnPY4s-Uju43xP038D2EA18X_mhy2YfsZVpSU-VoA,1322
|
|
9
9
|
optimum/rbln/diffusers/configurations/models/__init__.py,sha256=7q95gtgDzCeIBogGw8SLQoHT4Wch7vpLJVF2UQovuoo,567
|
|
10
10
|
optimum/rbln/diffusers/configurations/models/configuration_autoencoder_kl.py,sha256=ADS4SGZbwY6fy3SVNhgo3Zg4KxzAAGq5_zsJ97Dezh4,3201
|
|
@@ -36,7 +36,7 @@ optimum/rbln/diffusers/models/transformers/transformer_sd3.py,sha256=yF7sS0Qvawo
|
|
|
36
36
|
optimum/rbln/diffusers/models/unets/__init__.py,sha256=MaICuK9CWjgzejXy8y2NDrphuEq1rkzanF8u45k6O5I,655
|
|
37
37
|
optimum/rbln/diffusers/models/unets/unet_2d_condition.py,sha256=v3WS9EGKROE_QClXrxC7rmRko1BspAvAbeIfh83LK88,15832
|
|
38
38
|
optimum/rbln/diffusers/pipelines/__init__.py,sha256=r8mu21102cKXdkG1II9tpfpUS6wuyren2oK9y_MptZY,3703
|
|
39
|
-
optimum/rbln/diffusers/pipelines/auto_pipeline.py,sha256=
|
|
39
|
+
optimum/rbln/diffusers/pipelines/auto_pipeline.py,sha256=DaDWla59LhKGv7h8sdnJrwYaxvcwnO3-qFc47NHvx20,10644
|
|
40
40
|
optimum/rbln/diffusers/pipelines/controlnet/__init__.py,sha256=n1Ef22TSeax-kENi_d8K6wGGHSNEo9QkUeygELHgcao,983
|
|
41
41
|
optimum/rbln/diffusers/pipelines/controlnet/multicontrolnet.py,sha256=3S9dogIHW8Bqg5kIlCudhCQG-4g3FcdOPEWhBOf7CJA,4059
|
|
42
42
|
optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet.py,sha256=G96bh4D9Cu-w4F9gZBQF6wNzhJQv9kvI34ZFsuEDjSw,35714
|
|
@@ -72,18 +72,18 @@ optimum/rbln/ops/flash_attn.py,sha256=yTCdYQVqm_1rHMHWjrMQaIR8WTuG_xA6t033x1IVvT
|
|
|
72
72
|
optimum/rbln/ops/kv_cache_update.py,sha256=aIvK2Sp7M3EfJzJgNvIvAJv4emoN6QOhmgaWj-VboLs,1440
|
|
73
73
|
optimum/rbln/ops/linear.py,sha256=5K3pcrrUHu_p8LrMIU-jX2TnafksveFjjZSCsYSp_yw,1328
|
|
74
74
|
optimum/rbln/ops/sliding_window_attn.py,sha256=EQrV_yRGc5z6kvwEsAcLP028bJWkQg2UPI3xubt9skU,3487
|
|
75
|
-
optimum/rbln/transformers/__init__.py,sha256=
|
|
75
|
+
optimum/rbln/transformers/__init__.py,sha256=g5G6Eqk80NzS0tMmwghFI2DMKgPaOpoafv1m0Euhw6A,12459
|
|
76
76
|
optimum/rbln/transformers/configuration_generic.py,sha256=jrehv1oONOS-iBTY5gj2TKUfWjDTnukNJt6cZfNMylU,5213
|
|
77
77
|
optimum/rbln/transformers/modeling_attention_utils.py,sha256=aLyOaq4me1m-JMmnKbuyNQageDxNU2jjEhGE_ew2P5o,11465
|
|
78
78
|
optimum/rbln/transformers/modeling_generic.py,sha256=82Wi2K6zAp5tjef05lzYIEqbK93h0_OkPDbElB-VMMs,12568
|
|
79
79
|
optimum/rbln/transformers/modeling_outputs.py,sha256=cd8ZlhHAGq7S6i5-QK6TJCxgORvoPMnZpqPBlUc_pMY,1177
|
|
80
80
|
optimum/rbln/transformers/modeling_rope_utils.py,sha256=6Zg3r-TeUk4WQAlr95pqfhuoAD_RQ4njT1rbO9uPL0Q,14379
|
|
81
|
-
optimum/rbln/transformers/models/__init__.py,sha256=
|
|
81
|
+
optimum/rbln/transformers/models/__init__.py,sha256=9gAXrYeYPdLbQH8KlRG4eSOFQ8h4kyWzXPM1grHvpDQ,13418
|
|
82
82
|
optimum/rbln/transformers/models/audio_spectrogram_transformer/__init__.py,sha256=I2vL4lrzbT5p4eJcH-EKHzEfcPkj_XVsie7jb9q6yic,775
|
|
83
83
|
optimum/rbln/transformers/models/audio_spectrogram_transformer/configuration_audio_spectrogram_transformer.py,sha256=z7LJiVJPmnlCM3mcyhPJP8AufSrxO_dsPeJ51onq-Nc,833
|
|
84
84
|
optimum/rbln/transformers/models/audio_spectrogram_transformer/modeling_audio_spectrogram_transformer.py,sha256=FIKEVWpIt6-JQX9B_rAfCrAPqdUHtR2i8D_X2k7639E,1498
|
|
85
85
|
optimum/rbln/transformers/models/auto/__init__.py,sha256=tdYqXkg9xBGNr4fZjH7_O3qRVbHvpEVjrJ6wtNUMMJM,1150
|
|
86
|
-
optimum/rbln/transformers/models/auto/auto_factory.py,sha256=
|
|
86
|
+
optimum/rbln/transformers/models/auto/auto_factory.py,sha256=9oaynN5f6aL6BTgDu5xF3b-5lz9eFuzLOdfVaZwIwvc,8834
|
|
87
87
|
optimum/rbln/transformers/models/auto/modeling_auto.py,sha256=SMsWnD8f7VhKmh7h_S2voksEWlNccfF4fQ7AmwLYq6U,4790
|
|
88
88
|
optimum/rbln/transformers/models/bart/__init__.py,sha256=fVo-gZEmJ0yxkIxEX6ciuRAGgXNyuvaXE2s88bhbjAE,830
|
|
89
89
|
optimum/rbln/transformers/models/bart/bart_architecture.py,sha256=mAepjL0paPMK180vGTTCxXQ-hVZ1DD6JR-GvVNGJLqY,6268
|
|
@@ -105,7 +105,7 @@ optimum/rbln/transformers/models/colpali/configuration_colpali.py,sha256=eDWPVlo
|
|
|
105
105
|
optimum/rbln/transformers/models/colpali/modeling_colpali.py,sha256=v9rPLmNx-BQZhDFhKnr2kmARElTtKdFZCgFIU4m-HPw,15703
|
|
106
106
|
optimum/rbln/transformers/models/decoderonly/__init__.py,sha256=w3VZOIBYaHXVdnuhK4y0zWAj0IAv7_5LGTJYaz9oYmI,1056
|
|
107
107
|
optimum/rbln/transformers/models/decoderonly/configuration_decoderonly.py,sha256=H2i9Iefy-q5X-0BLWQ-CrxK8ZoT3p9t0lt_3r4TFSCY,15182
|
|
108
|
-
optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py,sha256=
|
|
108
|
+
optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py,sha256=ySsiU0Acj5enJW-SqtFMAfBeH0HeqlhCd78QlpKJNQw,42780
|
|
109
109
|
optimum/rbln/transformers/models/decoderonly/decoderonly_runtime_utils.py,sha256=v3mfIlQImQkYYr-rPn7rQR3GYdVUhALRttEduLI7H9c,20012
|
|
110
110
|
optimum/rbln/transformers/models/decoderonly/generation_decoderonly.py,sha256=4D89IF0yQju_Dp_vLJN_dBkpe2U_LMWaUciYx57D-0M,3379
|
|
111
111
|
optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py,sha256=dAHV9NgdpXHyTJGT0lieXOB3Pzi_NPlR4rqmRtmAWzM,32412
|
|
@@ -137,7 +137,7 @@ optimum/rbln/transformers/models/gpt2/gpt2_architecture.py,sha256=MyAWReXmyuHnDp
|
|
|
137
137
|
optimum/rbln/transformers/models/gpt2/modeling_gpt2.py,sha256=DhF6hU3oCYGbZ7UijKCsRfTx-VCkTqqqNwqqMSrjqRE,2230
|
|
138
138
|
optimum/rbln/transformers/models/grounding_dino/__init__.py,sha256=DE7DipZGvrKC6b1T77k4I4X3G70ss8mlr-PrZCaohto,307
|
|
139
139
|
optimum/rbln/transformers/models/grounding_dino/configuration_grounding_dino.py,sha256=b6aeAlAMf0aOoTKAqe5nnBfontu_H3zvIHgOiCNMJ1I,3127
|
|
140
|
-
optimum/rbln/transformers/models/grounding_dino/grounding_dino_architecture.py,sha256=
|
|
140
|
+
optimum/rbln/transformers/models/grounding_dino/grounding_dino_architecture.py,sha256=2BGhyKa7x6fiiZPaLy_S7zKr2NOdJnMLFMf6CEcegGE,26674
|
|
141
141
|
optimum/rbln/transformers/models/grounding_dino/modeling_grounding_dino.py,sha256=bXAOs2QH4sy2UFoFLUSM6u1_VHouUT5COERLQX20F6Y,46897
|
|
142
142
|
optimum/rbln/transformers/models/idefics3/__init__.py,sha256=ulxE7HEfXsNJhd25J9Fvi6vggo9aZH9sLKJjWB6LlzQ,814
|
|
143
143
|
optimum/rbln/transformers/models/idefics3/configuration_idefics3.py,sha256=8BhPLkfE1_ZU0eSm2iTbWQOnVe1q0g99srYHWZM6VJ4,2373
|
|
@@ -184,6 +184,10 @@ optimum/rbln/transformers/models/qwen2_5_vl/__init__.py,sha256=rAW3DKQUzGL6EMwa5
|
|
|
184
184
|
optimum/rbln/transformers/models/qwen2_5_vl/configuration_qwen2_5_vl.py,sha256=1yyMFxh1SKsKR7rOjuotPvpSneN2_4a89bYfNk42370,4735
|
|
185
185
|
optimum/rbln/transformers/models/qwen2_5_vl/modeling_qwen2_5_vl.py,sha256=hRvA37sPFC9xH1FqnFbtHS9rQOPwAvLYg4zl4oEyK-w,26639
|
|
186
186
|
optimum/rbln/transformers/models/qwen2_5_vl/qwen2_5_vl_architecture.py,sha256=i_UUWhKoFjJ5CCpgeWicqABM23TxMEKPQ354LoZ6iUU,7445
|
|
187
|
+
optimum/rbln/transformers/models/qwen2_vl/__init__.py,sha256=O3t6zKda92CnZDzEnz_dcisMOQ71-OOJxElXzKCH5e0,849
|
|
188
|
+
optimum/rbln/transformers/models/qwen2_vl/configuration_qwen2_vl.py,sha256=OGIlUHWNymBTOxnwit-1gm2Gpl8bbGV0i076Sa4RgCw,4718
|
|
189
|
+
optimum/rbln/transformers/models/qwen2_vl/modeling_qwen2_vl.py,sha256=OKjhwWe0UDczmauCNQA838BF3n1BIz8c7oM5gaBVUz8,20286
|
|
190
|
+
optimum/rbln/transformers/models/qwen2_vl/qwen2_vl_architecture.py,sha256=EZlCuSRTIpSAGEjtDi4SY1V9RRdtgg76ie5jqec1UuI,4833
|
|
187
191
|
optimum/rbln/transformers/models/qwen3/__init__.py,sha256=tI4KwvXpD35dUUaa8aLUXpWoU9gJGcmKXeywOlH14ZE,746
|
|
188
192
|
optimum/rbln/transformers/models/qwen3/configuration_qwen3.py,sha256=BFRPggnH4VlsXlOa19C6KAID-bPgQ8ooQ29dvogh5zk,2102
|
|
189
193
|
optimum/rbln/transformers/models/qwen3/modeling_qwen3.py,sha256=S05efusxjXJhMMYztstGes6ZbqkSr5I4fHFaLSYVG8c,5760
|
|
@@ -211,7 +215,7 @@ optimum/rbln/transformers/models/t5/t5_architecture.py,sha256=DlJNrGk35NTBhcp76P
|
|
|
211
215
|
optimum/rbln/transformers/models/time_series_transformer/__init__.py,sha256=xJaFWQawlwtv4H5tVFcY1pxLYzjHtMAlLq6nXysdkN8,1243
|
|
212
216
|
optimum/rbln/transformers/models/time_series_transformer/configuration_time_series_transformer.py,sha256=MO-T4pcsea4EOmYeeg0tosUH6w76azqIPyV8Em8CMqw,1621
|
|
213
217
|
optimum/rbln/transformers/models/time_series_transformer/modeling_time_series_transformer.py,sha256=8orxM-LbShCt2jC8Uyx43cSxWN1CGxamS58pKPjvzxs,17167
|
|
214
|
-
optimum/rbln/transformers/models/time_series_transformer/time_series_transformers_architecture.py,sha256=
|
|
218
|
+
optimum/rbln/transformers/models/time_series_transformer/time_series_transformers_architecture.py,sha256=hAZXyXxzSDJMdkI883eefzpjz2L9KTVTRBeOVU8e92k,14038
|
|
215
219
|
optimum/rbln/transformers/models/vit/__init__.py,sha256=CrrkHehfCe3U-_rUS00aMBY7Tncdeh43sNUgVI9Dt_g,807
|
|
216
220
|
optimum/rbln/transformers/models/vit/configuration_vit.py,sha256=x98CxKR1cpKAG7Eh43uuPeGeGn4gS3HcKLPoDL3SWJo,994
|
|
217
221
|
optimum/rbln/transformers/models/vit/modeling_vit.py,sha256=Q8xvX2oG2dC2RYM4ocaS0H70a2q_vQ9DZK2mCdyvxa0,1058
|
|
@@ -238,7 +242,7 @@ optimum/rbln/utils/model_utils.py,sha256=4k5879Kh75m3x_vS4-qOGfqsOiAvc2kdNFFfvsF
|
|
|
238
242
|
optimum/rbln/utils/runtime_utils.py,sha256=R6uXDbeJP03-FWdd4vthNe2D4aCra5n12E3WB1ifiGM,7933
|
|
239
243
|
optimum/rbln/utils/save_utils.py,sha256=hG5uOtYmecSXZuGTvCXsTM-SiyZpr5q3InUGCCq_jzQ,3619
|
|
240
244
|
optimum/rbln/utils/submodule.py,sha256=60NGLFvnhjP1DJg1opdb-FVQDsthcLCwWjW_1WQaasU,5280
|
|
241
|
-
optimum_rbln-0.8.
|
|
242
|
-
optimum_rbln-0.8.
|
|
243
|
-
optimum_rbln-0.8.
|
|
244
|
-
optimum_rbln-0.8.
|
|
245
|
+
optimum_rbln-0.8.4a2.dist-info/METADATA,sha256=edliyp3YVFP4epCAMZjuPPSxo4LZyyLtux9uAa7EJH4,5299
|
|
246
|
+
optimum_rbln-0.8.4a2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
247
|
+
optimum_rbln-0.8.4a2.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
248
|
+
optimum_rbln-0.8.4a2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|