mb-rag 1.1.6__tar.gz → 1.1.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mb_rag-1.1.6 → mb_rag-1.1.7}/PKG-INFO +1 -1
- {mb_rag-1.1.6 → mb_rag-1.1.7}/README.md +1 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/chatbot/basic.py +20 -2
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/version.py +1 -1
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag.egg-info/PKG-INFO +1 -1
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/__init__.py +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/chatbot/__init__.py +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/chatbot/chains.py +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/chatbot/prompts.py +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/rag/__init__.py +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/rag/embeddings.py +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/utils/__init__.py +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/utils/bounding_box.py +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag/utils/extra.py +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag.egg-info/SOURCES.txt +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag.egg-info/dependency_links.txt +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag.egg-info/requires.txt +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/mb_rag.egg-info/top_level.txt +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/pyproject.toml +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/setup.cfg +0 -0
- {mb_rag-1.1.6 → mb_rag-1.1.7}/setup.py +0 -0
|
@@ -54,7 +54,7 @@ class ModelFactory:
|
|
|
54
54
|
"""
|
|
55
55
|
Factory method to create any type of model
|
|
56
56
|
Args:
|
|
57
|
-
model_type (str): Type of model to create. Default is OpenAI. Options are openai, anthropic, google, ollama
|
|
57
|
+
model_type (str): Type of model to create. Default is OpenAI. Options are openai, anthropic, google, ollama , groq
|
|
58
58
|
model_name (str): Name of the model
|
|
59
59
|
**kwargs: Additional arguments
|
|
60
60
|
Returns:
|
|
@@ -64,7 +64,8 @@ class ModelFactory:
|
|
|
64
64
|
'openai': self.create_openai,
|
|
65
65
|
'anthropic': self.create_anthropic,
|
|
66
66
|
'google': self.create_google,
|
|
67
|
-
'ollama': self.create_ollama
|
|
67
|
+
'ollama': self.create_ollama,
|
|
68
|
+
'groq': self.create_groq
|
|
68
69
|
}
|
|
69
70
|
|
|
70
71
|
model_data = creators.get(model_type)
|
|
@@ -144,6 +145,23 @@ class ModelFactory:
|
|
|
144
145
|
kwargs["model"] = model_name
|
|
145
146
|
return Ollama(**kwargs)
|
|
146
147
|
|
|
148
|
+
@classmethod
|
|
149
|
+
def create_groq(cls, model_name: str = "llama-3.3-70b-versatile", **kwargs) -> Any:
|
|
150
|
+
"""
|
|
151
|
+
Create Groq chatbot model
|
|
152
|
+
Args:
|
|
153
|
+
model_name (str): Name of the model
|
|
154
|
+
**kwargs: Additional arguments. Options are: temperature, groq_api_key, model_name
|
|
155
|
+
Returns:
|
|
156
|
+
ChatGroq: Chatbot model
|
|
157
|
+
"""
|
|
158
|
+
if not check_package("langchain-groq"):
|
|
159
|
+
raise ImportError("Langchain Groq package not found. Please install it using: pip install langchain-groq")
|
|
160
|
+
|
|
161
|
+
from langchain_groq import ChatGroq
|
|
162
|
+
kwargs["model"] = model_name
|
|
163
|
+
return ChatGroq(**kwargs)
|
|
164
|
+
|
|
147
165
|
def invoke_query(self,query: str,get_content_only: bool = True,images: list = None,pydantic_model = None) -> str:
|
|
148
166
|
"""
|
|
149
167
|
Invoke the model
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|