mb-rag 1.0.136__tar.gz → 1.1.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mb-rag might be problematic. Click here for more details.
- {mb_rag-1.0.136 → mb_rag-1.1.2}/PKG-INFO +1 -1
- {mb_rag-1.0.136 → mb_rag-1.1.2}/README.md +4 -4
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/version.py +2 -2
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag.egg-info/PKG-INFO +1 -1
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/__init__.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/chatbot/__init__.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/chatbot/basic.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/chatbot/chains.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/chatbot/prompts.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/rag/__init__.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/rag/embeddings.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/utils/__init__.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/utils/bounding_box.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag/utils/extra.py +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag.egg-info/SOURCES.txt +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag.egg-info/dependency_links.txt +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag.egg-info/requires.txt +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/mb_rag.egg-info/top_level.txt +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/pyproject.toml +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/setup.cfg +0 -0
- {mb_rag-1.0.136 → mb_rag-1.1.2}/setup.py +0 -0
|
@@ -37,12 +37,12 @@ pip install mb_rag
|
|
|
37
37
|
from mb_rag.chatbot.basic import ModelFactory, ConversationModel
|
|
38
38
|
|
|
39
39
|
# 1. Simple Query with ModelFactory
|
|
40
|
-
model = ModelFactory(model_type="openai", model_name="gpt-
|
|
40
|
+
model = ModelFactory(model_type="openai", model_name="gpt-4o")
|
|
41
41
|
response = model.invoke_query("What is artificial intelligence?")
|
|
42
42
|
print(response)
|
|
43
43
|
|
|
44
44
|
# 2. Image Analysis
|
|
45
|
-
model = ModelFactory(model_type="openai", model_name="gpt-
|
|
45
|
+
model = ModelFactory(model_type="openai", model_name="gpt-4o")
|
|
46
46
|
response = model.invoke_query(
|
|
47
47
|
"What's in these images?",
|
|
48
48
|
images=["image1.jpg", "image2.jpg"]
|
|
@@ -51,7 +51,7 @@ print(response)
|
|
|
51
51
|
|
|
52
52
|
# 3. Conversation with Context
|
|
53
53
|
conversation = ConversationModel(
|
|
54
|
-
model_name="gpt-
|
|
54
|
+
model_name="gpt-4o",
|
|
55
55
|
model_type="openai"
|
|
56
56
|
)
|
|
57
57
|
|
|
@@ -147,7 +147,7 @@ em_gen.add_data(
|
|
|
147
147
|
|
|
148
148
|
# Web scraping and embedding
|
|
149
149
|
db = em_gen.firecrawl_web(
|
|
150
|
-
website="https://
|
|
150
|
+
website="https://github.com",
|
|
151
151
|
mode="scrape",
|
|
152
152
|
file_to_save='./web_embeddings'
|
|
153
153
|
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|