mb-rag 1.1.20__py3-none-any.whl → 1.1.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mb-rag might be problematic. Click here for more details.

mb_rag/chatbot/basic.py CHANGED
@@ -70,6 +70,7 @@ class ModelFactory:
70
70
  'groq': self.create_groq,
71
71
  'deepseek': self.create_deepseek,
72
72
  'qwen' : self.create_qwen,
73
+ 'hugging_face': self.create_hugging_face
73
74
  }
74
75
 
75
76
  model_data = creators.get(model_type)
@@ -201,6 +202,57 @@ class ModelFactory:
201
202
  kwargs["model"] = model_name
202
203
  return ChatTongyi(streaming=True,**kwargs)
203
204
 
205
+ @classmethod
206
+ def create_hugging_face(cls, model_name: str = "Qwen/Qwen2.5-VL-7B-Instruct",model_function: str = "image-text-to-text",
207
+ device='cpu',**kwargs) -> Any:
208
+ """
209
+ Create and load hugging face model.
210
+ Args:
211
+ model_name (str): Name of the model
212
+ model_function (str): model function
213
+ device (str): Device to use. Default is cpu
214
+ **kwargs: Additional arguments
215
+ Returns:
216
+ ChatHuggingFace: Chatbot model
217
+ """
218
+ if not check_package("transformers"):
219
+ raise ImportError("Transformers package not found. Please install it using: pip install transformers")
220
+ if not check_package("langchain_huggingface"):
221
+ raise ImportError("Transformers package not found. Please install it using: pip install langchain_huggingface")
222
+ if not check_package("torch"):
223
+ raise ImportError("Torch package not found. Please install it using: pip install torch")
224
+
225
+ from langchain_huggingface import HuggingFacePipeline
226
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
227
+ import torch
228
+
229
+ device = torch.device(device) if torch.cuda.is_available() else torch.device("cpu")
230
+
231
+ temperature = kwargs.pop("temperature", 0.7)
232
+ max_length = kwargs.pop("max_length", 1024)
233
+
234
+ tokenizer = AutoTokenizer.from_pretrained(model_name,trust_remote_code=True)
235
+ model = AutoModelForCausalLM.from_pretrained(
236
+ model_name,
237
+ torch_dtype=torch.float16 if device == "cuda" else torch.float32,
238
+ device_map=device,
239
+ trust_remote_code=True,
240
+ **kwargs
241
+ )
242
+
243
+ # Create pipeline
244
+ pipe = pipeline(
245
+ model_function,
246
+ model=model,
247
+ tokenizer=tokenizer,
248
+ max_length=max_length,
249
+ temperature=temperature,
250
+ device=device
251
+ )
252
+
253
+ # Create and return LangChain HuggingFacePipeline
254
+ return HuggingFacePipeline(pipeline=pipe)
255
+
204
256
  def _reset_model(self):
205
257
  """Reset the model"""
206
258
  self.model = self.model.reset()
mb_rag/version.py CHANGED
@@ -1,5 +1,5 @@
1
1
  MAJOR_VERSION = 1
2
2
  MINOR_VERSION = 1
3
- PATCH_VERSION = 20
3
+ PATCH_VERSION = 22
4
4
  version = '{}.{}.{}'.format(MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION)
5
5
  __all__ = ['MAJOR_VERSION', 'MINOR_VERSION', 'PATCH_VERSION', 'version']
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mb_rag
3
- Version: 1.1.20
3
+ Version: 1.1.22
4
4
  Summary: RAG function file
5
5
  Author: ['Malav Bateriwala']
6
6
  Requires-Python: >=3.8
@@ -1,7 +1,7 @@
1
1
  mb_rag/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- mb_rag/version.py,sha256=nd1rf9Hb6HiPmEnAU3-NN_-pISzzUaujEjZCrxm0Xnc,207
2
+ mb_rag/version.py,sha256=MdYlquliW4HTyGehBHyYrWkbKgMYvz-ggotKwZvsUxE,207
3
3
  mb_rag/chatbot/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- mb_rag/chatbot/basic.py,sha256=0uk0UraJpqur46y5pRgif8uf0sluQXhHrNmhpd1bYZQ,20415
4
+ mb_rag/chatbot/basic.py,sha256=Bb3fAoYfkTPiB5vlQ9K4TfEO3m-TqukLhLzMbmdSOkc,22585
5
5
  mb_rag/chatbot/chains.py,sha256=vDbLX5R29sWN1pcFqJ5fyxJEgMCM81JAikunAEvMC9A,7223
6
6
  mb_rag/chatbot/prompts.py,sha256=n1PyiLbU-5fkslRv6aVOzt0dDlwya_cEdQ7kRnRhMuY,1749
7
7
  mb_rag/rag/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -9,7 +9,7 @@ mb_rag/rag/embeddings.py,sha256=KjBdekFDb5M3dRMco4r3dDMXMsG5dxdzKImuVIipsd0,2709
9
9
  mb_rag/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  mb_rag/utils/bounding_box.py,sha256=G0hdDam8QmYtD9lfwMeDHGm-TTo6KZg-yK5ESFL9zaM,8366
11
11
  mb_rag/utils/extra.py,sha256=spbFrGgdruNyYQ5PzgvpSIa6Nm0rn9bb4qc8W9g582o,2492
12
- mb_rag-1.1.20.dist-info/METADATA,sha256=3izFuvYT5hU9jfXfkhuyB3t_RsTLpzcBKNo05lEYXU4,234
13
- mb_rag-1.1.20.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
14
- mb_rag-1.1.20.dist-info/top_level.txt,sha256=FIK1eAa5uYnurgXZquBG-s3PIy-HDTC5yJBW4lTH_pM,7
15
- mb_rag-1.1.20.dist-info/RECORD,,
12
+ mb_rag-1.1.22.dist-info/METADATA,sha256=cghtqSh0h2irAQ3MZvHzRIiYwrPRyl5BPvMdzD34M2I,234
13
+ mb_rag-1.1.22.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
14
+ mb_rag-1.1.22.dist-info/top_level.txt,sha256=FIK1eAa5uYnurgXZquBG-s3PIy-HDTC5yJBW4lTH_pM,7
15
+ mb_rag-1.1.22.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (75.8.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5