optimum-rbln 0.8.4a7__py3-none-any.whl → 0.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of optimum-rbln might be problematic. Click here for more details.

@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.8.4a7'
32
- __version_tuple__ = version_tuple = (0, 8, 4, 'a7')
31
+ __version__ = version = '0.9.1'
32
+ __version_tuple__ = version_tuple = (0, 9, 1)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -14,11 +14,13 @@
14
14
 
15
15
 
16
16
  import inspect
17
- from typing import Any, Callable
17
+ from pathlib import Path
18
+ from typing import Any, Callable, Dict, Optional, Union
18
19
 
19
20
  from transformers import AutoModelForCausalLM
20
21
  from transformers.generation.utils import GenerationMixin
21
22
 
23
+ from ....configuration_utils import RBLNModelConfig
22
24
  from ....utils import logging
23
25
  from ..decoderonly import RBLNDecoderOnlyModelForCausalLM
24
26
  from .exaone_architecture import ExaoneForCausalLMWrapper
@@ -92,9 +94,45 @@ class RBLNExaoneForCausalLM(RBLNDecoderOnlyModelForCausalLM):
92
94
  _supports_cache_class = True
93
95
 
94
96
  @classmethod
95
- def from_pretrained(cls, *args, **kwargs):
96
- kwargs.setdefault("trust_remote_code", True)
97
- return super().from_pretrained(*args, **kwargs)
97
+ def from_pretrained(
98
+ cls,
99
+ model_id: Union[str, Path],
100
+ *,
101
+ export: Optional[bool] = None,
102
+ rbln_config: Optional[Union[Dict, RBLNModelConfig]] = None,
103
+ trust_remote_code: Optional[bool] = None,
104
+ **kwargs: Any,
105
+ ):
106
+ """
107
+ The `from_pretrained()` function is utilized in its standard form as in the HuggingFace transformers library.
108
+ User can use this function to load a pre-trained model from the HuggingFace library and convert it to a RBLN model to be run on RBLN NPUs.
109
+
110
+ Args:
111
+ model_id (Union[str, Path]): The model id of the pre-trained model to be loaded.
112
+ It can be downloaded from the HuggingFace model hub or a local path, or a model id of a compiled model using the RBLN Compiler.
113
+ export (Optional[bool]): A boolean flag to indicate whether the model should be compiled.
114
+ If None, it will be determined based on the existence of the compiled model files in the model_id.
115
+ rbln_config (Optional[Union[Dict, RBLNModelConfig]]): Configuration for RBLN model compilation and runtime.
116
+ This can be provided as a dictionary or an instance of the model's configuration class (e.g., `RBLNExaoneForCausalLMConfig` for EXAONE models).
117
+ For detailed configuration options, see the specific model's configuration class documentation.
118
+ trust_remote_code (bool): Whether or not to trust the remote code when loading a model from the Hub.
119
+ kwargs: Additional keyword arguments. Arguments with the prefix `rbln_` are passed to rbln_config, while the remaining arguments are passed to the HuggingFace library.
120
+
121
+ Returns:
122
+ (RBLNModel): A RBLN model instance ready for inference on RBLN NPU devices.
123
+ """
124
+
125
+ if trust_remote_code is not None:
126
+ kwargs["trust_remote_code"] = trust_remote_code
127
+ elif "trust_remote_code" not in kwargs:
128
+ kwargs["trust_remote_code"] = True
129
+
130
+ return super().from_pretrained(
131
+ model_id=model_id,
132
+ export=export,
133
+ rbln_config=rbln_config,
134
+ **kwargs,
135
+ )
98
136
 
99
137
  def __getattr__(self, __name: str) -> Any:
100
138
  def redirect(func):
@@ -13,11 +13,13 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import inspect
16
- from typing import Any, Callable
16
+ from pathlib import Path
17
+ from typing import Any, Callable, Dict, Optional, Union
17
18
 
18
19
  from transformers import AutoModelForCausalLM
19
20
  from transformers.generation.utils import GenerationMixin
20
21
 
22
+ from ....configuration_utils import RBLNModelConfig
21
23
  from ....utils import logging
22
24
  from ..decoderonly import RBLNDecoderOnlyModelForCausalLM
23
25
  from .midm_architecture import MidmLMHeadModelWrapper
@@ -91,9 +93,45 @@ class RBLNMidmLMHeadModel(RBLNDecoderOnlyModelForCausalLM):
91
93
  _supports_cache_class = True
92
94
 
93
95
  @classmethod
94
- def from_pretrained(cls, *args, **kwargs):
95
- kwargs.setdefault("trust_remote_code", True)
96
- return super().from_pretrained(*args, **kwargs)
96
+ def from_pretrained(
97
+ cls,
98
+ model_id: Union[str, Path],
99
+ *,
100
+ export: Optional[bool] = None,
101
+ rbln_config: Optional[Union[Dict, RBLNModelConfig]] = None,
102
+ trust_remote_code: Optional[bool] = None,
103
+ **kwargs: Any,
104
+ ):
105
+ """
106
+ The `from_pretrained()` function is utilized in its standard form as in the HuggingFace transformers library.
107
+ User can use this function to load a pre-trained model from the HuggingFace library and convert it to a RBLN model to be run on RBLN NPUs.
108
+
109
+ Args:
110
+ model_id (Union[str, Path]): The model id of the pre-trained model to be loaded.
111
+ It can be downloaded from the HuggingFace model hub or a local path, or a model id of a compiled model using the RBLN Compiler.
112
+ export (Optional[bool]): A boolean flag to indicate whether the model should be compiled.
113
+ If None, it will be determined based on the existence of the compiled model files in the model_id.
114
+ rbln_config (Optional[Union[Dict, RBLNModelConfig]]): Configuration for RBLN model compilation and runtime.
115
+ This can be provided as a dictionary or an instance of the model's configuration class (e.g., `RBLNMidmLMHeadModelConfig` for Mi:dm models).
116
+ For detailed configuration options, see the specific model's configuration class documentation.
117
+ trust_remote_code (bool): Whether or not to trust the remote code when loading a model from the Hub.
118
+ kwargs: Additional keyword arguments. Arguments with the prefix `rbln_` are passed to rbln_config, while the remaining arguments are passed to the HuggingFace library.
119
+
120
+ Returns:
121
+ (RBLNModel): A RBLN model instance ready for inference on RBLN NPU devices.
122
+ """
123
+
124
+ if trust_remote_code is not None:
125
+ kwargs["trust_remote_code"] = trust_remote_code
126
+ elif "trust_remote_code" not in kwargs:
127
+ kwargs["trust_remote_code"] = True
128
+
129
+ return super().from_pretrained(
130
+ model_id=model_id,
131
+ export=export,
132
+ rbln_config=rbln_config,
133
+ **kwargs,
134
+ )
97
135
 
98
136
  def __getattr__(self, __name: str) -> Any:
99
137
  def redirect(func):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: optimum-rbln
3
- Version: 0.8.4a7
3
+ Version: 0.9.1
4
4
  Summary: Optimum RBLN is the interface between the HuggingFace Transformers and Diffusers libraries and RBLN accelerators. It provides a set of tools enabling easy model loading and inference on single and multiple rbln device settings for different downstream tasks.
5
5
  Project-URL: Homepage, https://rebellions.ai
6
6
  Project-URL: Documentation, https://docs.rbln.ai
@@ -20,8 +20,9 @@ Classifier: Programming Language :: Python :: 3.9
20
20
  Classifier: Programming Language :: Python :: 3.10
21
21
  Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Programming Language :: Python :: 3.12
23
+ Classifier: Programming Language :: Python :: 3.13
23
24
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
24
- Requires-Python: <3.13,>=3.9
25
+ Requires-Python: <3.14,>=3.9
25
26
  Requires-Dist: accelerate>=1.0.1
26
27
  Requires-Dist: diffusers==0.35.1
27
28
  Requires-Dist: packaging>=24.1
@@ -1,5 +1,5 @@
1
1
  optimum/rbln/__init__.py,sha256=DAJM5PWAYFiWVlyxVXUvj3CaFOEhX1yhEfhIt1LxL-A,18714
2
- optimum/rbln/__version__.py,sha256=eIYY814fo_sJ3kHmEYxeoREyyzG88opg1WTjombSMUU,712
2
+ optimum/rbln/__version__.py,sha256=LwGndsRSpclYq-j3wgRr2nzOXwUYj0Jtg7Kof7R0BEw,704
3
3
  optimum/rbln/configuration_utils.py,sha256=XYXqbriu7DZkoCqicM1iXzqrSd7BVO-e2vA-8WSGgN0,36248
4
4
  optimum/rbln/modeling.py,sha256=IZ8loagxm--2BcqTl16KRHUR3hkccpeaY2grOWOtwqk,14473
5
5
  optimum/rbln/modeling_base.py,sha256=poXfHZCAlFd28MY9dvMi7tC2RytLx77Lee2XGS_KeZg,27684
@@ -121,7 +121,7 @@ optimum/rbln/transformers/models/dpt/modeling_dpt.py,sha256=uIwdHAhGgSyj_ljwJsRv
121
121
  optimum/rbln/transformers/models/exaone/__init__.py,sha256=eUL0mq3yGVzCQfjLlOtVF2MecIN3DQWm07EmXubGSTs,921
122
122
  optimum/rbln/transformers/models/exaone/configuration_exaone.py,sha256=S4s4kJemPbmn-otYv-XNHE40DJaEYY6cmzaWV6MTGsY,1388
123
123
  optimum/rbln/transformers/models/exaone/exaone_architecture.py,sha256=lY4FwH2EZn_OY6sBIHlwxbfaEOEJ1eueUQJGB6Js62M,2306
124
- optimum/rbln/transformers/models/exaone/modeling_exaone.py,sha256=sr_ICK-rw_fYmLY5r0IOc-vDtSZEcSwFIQp3Gn92zqE,3929
124
+ optimum/rbln/transformers/models/exaone/modeling_exaone.py,sha256=yW0Hws35v_70OGn0nmDNMXAsnAzniyxUZ78VmjIbpJg,6060
125
125
  optimum/rbln/transformers/models/gemma/__init__.py,sha256=HQISZaSrhwGtsvGuHqkiyMhoS1QAVhd0tXXCaCruq4U,746
126
126
  optimum/rbln/transformers/models/gemma/configuration_gemma.py,sha256=H1nVp8HBJxxN__VFyDpBuhoqf_RZxgvfE2cVOSvxBIg,1569
127
127
  optimum/rbln/transformers/models/gemma/gemma_architecture.py,sha256=4Ry2pFfWg0sVijgTP9SYt1vwJr8DsCVcusg-z0ePX5c,943
@@ -155,7 +155,7 @@ optimum/rbln/transformers/models/llava_next/modeling_llava_next.py,sha256=E3w3-k
155
155
  optimum/rbln/transformers/models/midm/__init__.py,sha256=IC3FETwgYinbp3wDj7tp4zIHJhbqM-c6GfTRdYcMNj8,913
156
156
  optimum/rbln/transformers/models/midm/configuration_midm.py,sha256=DxhcSJlApxfi00XxYmSkKZ6bY9vfLXT0zh-oMKkZot0,1365
157
157
  optimum/rbln/transformers/models/midm/midm_architecture.py,sha256=RlkmNhaWE5h_awt9aTtR8VZfshNTah0IoUfD2Z9vfxI,5055
158
- optimum/rbln/transformers/models/midm/modeling_midm.py,sha256=zbziYZ3f_dX_MOLwORTfJn22psZ1g3FFeQffM_TIh7A,3876
158
+ optimum/rbln/transformers/models/midm/modeling_midm.py,sha256=EXTBFaOWco2h3NL8jUACeDmPFJuAjFrtQcgO_BTMuCk,6004
159
159
  optimum/rbln/transformers/models/mistral/__init__.py,sha256=bYPqrkmqXmhNDqRgKFaL9iH7piGLSHKzsVrGl_0qs1Q,758
160
160
  optimum/rbln/transformers/models/mistral/configuration_mistral.py,sha256=mIfz8J8GZV9ojCMuNj9Zeky_PNu1Ir34DQ7FDZrGkP8,1595
161
161
  optimum/rbln/transformers/models/mistral/mistral_architecture.py,sha256=gpQTcP83F4zYrCFXRFT_FAF66k5BSSfcYsaAr4eW9jI,722
@@ -242,7 +242,7 @@ optimum/rbln/utils/model_utils.py,sha256=4k5879Kh75m3x_vS4-qOGfqsOiAvc2kdNFFfvsF
242
242
  optimum/rbln/utils/runtime_utils.py,sha256=R6uXDbeJP03-FWdd4vthNe2D4aCra5n12E3WB1ifiGM,7933
243
243
  optimum/rbln/utils/save_utils.py,sha256=hG5uOtYmecSXZuGTvCXsTM-SiyZpr5q3InUGCCq_jzQ,3619
244
244
  optimum/rbln/utils/submodule.py,sha256=60NGLFvnhjP1DJg1opdb-FVQDsthcLCwWjW_1WQaasU,5280
245
- optimum_rbln-0.8.4a7.dist-info/METADATA,sha256=uXVaIXXVfB2tR6hYo6kjgfAJzVcN2R8JAr_chuG7_9A,5299
246
- optimum_rbln-0.8.4a7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
247
- optimum_rbln-0.8.4a7.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
248
- optimum_rbln-0.8.4a7.dist-info/RECORD,,
245
+ optimum_rbln-0.9.1.dist-info/METADATA,sha256=AtptvW3UA4d23uu7EilHiZjQjo2FZOIqmlc7lBsAsho,5348
246
+ optimum_rbln-0.9.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
247
+ optimum_rbln-0.9.1.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
248
+ optimum_rbln-0.9.1.dist-info/RECORD,,