mb-rag 1.1.39__tar.gz → 1.1.40__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mb-rag might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mb_rag
3
- Version: 1.1.39
3
+ Version: 1.1.40
4
4
  Summary: RAG function file
5
5
  Author: ['Malav Bateriwala']
6
6
  Requires-Python: >=3.8
@@ -292,7 +292,7 @@ class ModelFactory:
292
292
  except Exception as e:
293
293
  raise ValueError(f"Error with pydantic_model: {e}")
294
294
  if images:
295
- res = self._model_invoke_images(images=images,prompt=query,pydantic_model=pydantic_model)
295
+ res = self._model_invoke_images(images=images,prompt=query,pydantic_model=pydantic_model,get_content_only=get_content_only)
296
296
  else:
297
297
  res = self.model.invoke(query)
298
298
  if get_content_only:
@@ -306,7 +306,7 @@ class ModelFactory:
306
306
  with open(image, "rb") as f:
307
307
  return base64.b64encode(f.read()).decode('utf-8')
308
308
 
309
- def _model_invoke_images(self,images: list, prompt: str,pydantic_model = None):
309
+ def _model_invoke_images(self,images: list, prompt: str,pydantic_model = None,get_content_only: bool = True) -> str:
310
310
  """
311
311
  Function to invoke the model with images
312
312
  Args:
@@ -328,11 +328,27 @@ class ModelFactory:
328
328
  print("Continuing without structured output")
329
329
  message= HumanMessage(content=prompt_new,)
330
330
  response = self.model.invoke([message])
331
- try:
332
- return response.content
333
- except Exception:
331
+
332
+ if get_content_only:
333
+ try:
334
+ return response.content
335
+ except Exception:
336
+ print("Failed to get content from response. Returning response object")
337
+ return response
338
+ else:
334
339
  return response
335
340
 
341
+ def _get_llm_metadata(self):
342
+ """
343
+ Returns Basic metadata about the LLM
344
+ """
345
+ print("Model Name: ", self.model)
346
+ print("Model Temperature: ", self.temperature)
347
+ print("Model Max Tokens: ", self.max_output_tokens)
348
+ print("Model Top P: ", self.top_p)
349
+ print("Model Top K: ", self.top_k)
350
+ print("Model Input Schema:",self.input_schema)
351
+
336
352
  class ConversationModel:
337
353
  """
338
354
  A class to handle conversation with AI models
@@ -1,5 +1,5 @@
1
1
  MAJOR_VERSION = 1
2
2
  MINOR_VERSION = 1
3
- PATCH_VERSION = 39
3
+ PATCH_VERSION = 40
4
4
  version = '{}.{}.{}'.format(MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION)
5
5
  __all__ = ['MAJOR_VERSION', 'MINOR_VERSION', 'PATCH_VERSION', 'version']
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mb_rag
3
- Version: 1.1.39
3
+ Version: 1.1.40
4
4
  Summary: RAG function file
5
5
  Author: ['Malav Bateriwala']
6
6
  Requires-Python: >=3.8
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes