mb-rag 1.1.56.post0__py3-none-any.whl → 1.1.58__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mb-rag might be problematic. Click here for more details.
- mb_rag/basic.py +375 -306
- mb_rag/chatbot/chains.py +206 -206
- mb_rag/chatbot/conversation.py +185 -185
- mb_rag/chatbot/prompts.py +58 -58
- mb_rag/rag/embeddings.py +810 -810
- mb_rag/utils/all_data_extract.py +64 -64
- mb_rag/utils/bounding_box.py +231 -231
- mb_rag/utils/document_extract.py +354 -354
- mb_rag/utils/extra.py +73 -73
- mb_rag/utils/pdf_extract.py +428 -428
- mb_rag/version.py +1 -1
- {mb_rag-1.1.56.post0.dist-info → mb_rag-1.1.58.dist-info}/METADATA +11 -11
- mb_rag-1.1.58.dist-info/RECORD +19 -0
- mb_rag-1.1.56.post0.dist-info/RECORD +0 -19
- {mb_rag-1.1.56.post0.dist-info → mb_rag-1.1.58.dist-info}/WHEEL +0 -0
- {mb_rag-1.1.56.post0.dist-info → mb_rag-1.1.58.dist-info}/top_level.txt +0 -0
mb_rag/chatbot/conversation.py
CHANGED
|
@@ -1,185 +1,185 @@
|
|
|
1
|
-
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
|
|
2
|
-
from typing import Optional, List, Any, Union
|
|
3
|
-
|
|
4
|
-
__all__ = [
|
|
5
|
-
'ConversationModel'
|
|
6
|
-
]
|
|
7
|
-
|
|
8
|
-
class ConversationModel:
|
|
9
|
-
"""
|
|
10
|
-
A class to handle conversation with AI models
|
|
11
|
-
|
|
12
|
-
Attributes:
|
|
13
|
-
chatbot: The AI model for conversation
|
|
14
|
-
message_list (List): List of conversation messages
|
|
15
|
-
file_path (str): Path to save/load conversations. Can be local or S3
|
|
16
|
-
"""
|
|
17
|
-
|
|
18
|
-
def __init__(self,
|
|
19
|
-
llm: Any,
|
|
20
|
-
message_list: Optional[List[Any]] = None,
|
|
21
|
-
file_path: Optional[str] = None,
|
|
22
|
-
**kwargs) -> None:
|
|
23
|
-
"""Initialize conversation model"""
|
|
24
|
-
self.chatbot = llm
|
|
25
|
-
if message_list:
|
|
26
|
-
self.message_list = message_list
|
|
27
|
-
else:
|
|
28
|
-
self.message_list = []
|
|
29
|
-
if file_path:
|
|
30
|
-
self.file_path = file_path
|
|
31
|
-
else:
|
|
32
|
-
self.file_path = None
|
|
33
|
-
|
|
34
|
-
def initialize_conversation(self,context_message: str = "") -> None:
|
|
35
|
-
"""Initialize conversation state.
|
|
36
|
-
Getting the content from file_path if provided"""
|
|
37
|
-
if self.file_path:
|
|
38
|
-
self.load_conversation()
|
|
39
|
-
|
|
40
|
-
if context_message:
|
|
41
|
-
self.message_list.append(SystemMessage(content=context_message))
|
|
42
|
-
else:
|
|
43
|
-
self.message_list.append(SystemMessage(content="""This is conversation model.
|
|
44
|
-
Look into the conversation history and answer the question if provided.
|
|
45
|
-
Give a brief introduction of the conversation history."""))
|
|
46
|
-
message_list_content = "".join(self.all_messages_content)
|
|
47
|
-
return self.add_message(message_list_content,get_content_only=True)
|
|
48
|
-
|
|
49
|
-
def _ask_question(self,query: str,images: list = None,
|
|
50
|
-
get_content_only: bool = True) -> str:
|
|
51
|
-
"""
|
|
52
|
-
Ask a question and get response
|
|
53
|
-
Args:
|
|
54
|
-
query: Question to ask
|
|
55
|
-
get_content_only: Whether to return only content
|
|
56
|
-
Returns:
|
|
57
|
-
str: Response from the model
|
|
58
|
-
"""
|
|
59
|
-
if images:
|
|
60
|
-
res = self.chatbot.invoke_query(query,images=images,get_content_only=get_content_only)
|
|
61
|
-
else:
|
|
62
|
-
res = self.chatbot.invoke_query(query,get_content_only=get_content_only)
|
|
63
|
-
return res
|
|
64
|
-
|
|
65
|
-
def add_message(self, query: str,images: list = None,get_content_only: bool = True) -> str:
|
|
66
|
-
"""
|
|
67
|
-
Add a message to the conversation
|
|
68
|
-
Args:
|
|
69
|
-
query (str): Question to ask
|
|
70
|
-
images (list): List of images to send to the model
|
|
71
|
-
get_content_only (bool): Whether to return only content
|
|
72
|
-
Returns:
|
|
73
|
-
str: Response from the chatbot
|
|
74
|
-
"""
|
|
75
|
-
self.message_list.append(HumanMessage(content=query))
|
|
76
|
-
res = self._ask_question(query,images=images,get_content_only=get_content_only)
|
|
77
|
-
self.message_list.append(AIMessage(content=res))
|
|
78
|
-
return res
|
|
79
|
-
|
|
80
|
-
@property
|
|
81
|
-
def all_messages(self) -> List[Union[SystemMessage, HumanMessage, AIMessage]]:
|
|
82
|
-
"""Get all messages"""
|
|
83
|
-
return self.message_list
|
|
84
|
-
|
|
85
|
-
@property
|
|
86
|
-
def last_message(self) -> str:
|
|
87
|
-
"""Get the last message"""
|
|
88
|
-
return self.message_list[-1].content
|
|
89
|
-
|
|
90
|
-
@property
|
|
91
|
-
def all_messages_content(self) -> List[str]:
|
|
92
|
-
"""Get content of all messages"""
|
|
93
|
-
return [message.content for message in self.message_list]
|
|
94
|
-
|
|
95
|
-
def _is_s3_path(self, path: str) -> bool:
|
|
96
|
-
"""
|
|
97
|
-
Check if path is an S3 path
|
|
98
|
-
Args:
|
|
99
|
-
path (str): Path to check
|
|
100
|
-
Returns:
|
|
101
|
-
bool: True if S3 path
|
|
102
|
-
"""
|
|
103
|
-
return path.startswith("s3://")
|
|
104
|
-
|
|
105
|
-
def save_conversation(self, file_path: Optional[str] = None, **kwargs) -> bool:
|
|
106
|
-
"""
|
|
107
|
-
Save the conversation
|
|
108
|
-
Args:
|
|
109
|
-
file_path: Path to save the conversation
|
|
110
|
-
**kwargs: Additional arguments for S3
|
|
111
|
-
Returns:
|
|
112
|
-
bool: Success status
|
|
113
|
-
"""
|
|
114
|
-
if self._is_s3_path(file_path or self.file_path):
|
|
115
|
-
print("Saving conversation to S3.")
|
|
116
|
-
self.save_file_path = file_path
|
|
117
|
-
return self._save_to_s3(self.file_path,**kwargs)
|
|
118
|
-
return self._save_to_file(file_path or self.file_path)
|
|
119
|
-
|
|
120
|
-
def _save_to_s3(self,**kwargs) -> bool:
|
|
121
|
-
"""Save conversation to S3"""
|
|
122
|
-
try:
|
|
123
|
-
client = kwargs.get('client', self.client)
|
|
124
|
-
bucket = kwargs.get('bucket', self.bucket)
|
|
125
|
-
client.put_object(
|
|
126
|
-
Body=str(self.message_list),
|
|
127
|
-
Bucket=bucket,
|
|
128
|
-
Key=self.save_file_path
|
|
129
|
-
)
|
|
130
|
-
print(f"Conversation saved to s3_path: {self.s3_path}")
|
|
131
|
-
return True
|
|
132
|
-
except Exception as e:
|
|
133
|
-
raise ValueError(f"Error saving conversation to s3: {e}")
|
|
134
|
-
|
|
135
|
-
def _save_to_file(self, file_path: str) -> bool:
|
|
136
|
-
"""Save conversation to file"""
|
|
137
|
-
try:
|
|
138
|
-
with open(file_path, 'w') as f:
|
|
139
|
-
for message in self.message_list:
|
|
140
|
-
f.write(f"{message.content}\n")
|
|
141
|
-
print(f"Conversation saved to file: {file_path}")
|
|
142
|
-
return True
|
|
143
|
-
except Exception as e:
|
|
144
|
-
raise ValueError(f"Error saving conversation to file: {e}")
|
|
145
|
-
|
|
146
|
-
def load_conversation(self, file_path: Optional[str] = None, **kwargs) -> List[Any]:
|
|
147
|
-
"""
|
|
148
|
-
Load a conversation
|
|
149
|
-
Args:
|
|
150
|
-
file_path: Path to load from
|
|
151
|
-
**kwargs: Additional arguments for S3
|
|
152
|
-
Returns:
|
|
153
|
-
List: Loaded messages
|
|
154
|
-
"""
|
|
155
|
-
self.message_list = []
|
|
156
|
-
if self._is_s3_path(file_path or self.file_path):
|
|
157
|
-
print("Loading conversation from S3.")
|
|
158
|
-
self.file_path = file_path
|
|
159
|
-
return self._load_from_s3(**kwargs)
|
|
160
|
-
return self._load_from_file(file_path or self.file_path)
|
|
161
|
-
|
|
162
|
-
def _load_from_s3(self, **kwargs) -> List[Any]:
|
|
163
|
-
"""Load conversation from S3"""
|
|
164
|
-
try:
|
|
165
|
-
client = kwargs.get('client', self.client)
|
|
166
|
-
bucket = kwargs.get('bucket', self.bucket)
|
|
167
|
-
res = client.get_response(client, bucket, self.s3_path)
|
|
168
|
-
res_str = eval(res['Body'].read().decode('utf-8'))
|
|
169
|
-
self.message_list = [SystemMessage(content=res_str)]
|
|
170
|
-
print(f"Conversation loaded from s3_path: {self.file_path}")
|
|
171
|
-
return self.message_list
|
|
172
|
-
except Exception as e:
|
|
173
|
-
raise ValueError(f"Error loading conversation from s3: {e}")
|
|
174
|
-
|
|
175
|
-
def _load_from_file(self, file_path: str) -> List[Any]:
|
|
176
|
-
"""Load conversation from file"""
|
|
177
|
-
try:
|
|
178
|
-
with open(file_path, 'r') as f:
|
|
179
|
-
lines = f.readlines()
|
|
180
|
-
for line in lines:
|
|
181
|
-
self.message_list.append(SystemMessage(content=line))
|
|
182
|
-
print(f"Conversation loaded from file: {file_path}")
|
|
183
|
-
return self.message_list
|
|
184
|
-
except Exception as e:
|
|
185
|
-
raise ValueError(f"Error loading conversation from file: {e}")
|
|
1
|
+
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
|
|
2
|
+
from typing import Optional, List, Any, Union
|
|
3
|
+
|
|
4
|
+
__all__ = [
|
|
5
|
+
'ConversationModel'
|
|
6
|
+
]
|
|
7
|
+
|
|
8
|
+
class ConversationModel:
|
|
9
|
+
"""
|
|
10
|
+
A class to handle conversation with AI models
|
|
11
|
+
|
|
12
|
+
Attributes:
|
|
13
|
+
chatbot: The AI model for conversation
|
|
14
|
+
message_list (List): List of conversation messages
|
|
15
|
+
file_path (str): Path to save/load conversations. Can be local or S3
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(self,
|
|
19
|
+
llm: Any,
|
|
20
|
+
message_list: Optional[List[Any]] = None,
|
|
21
|
+
file_path: Optional[str] = None,
|
|
22
|
+
**kwargs) -> None:
|
|
23
|
+
"""Initialize conversation model"""
|
|
24
|
+
self.chatbot = llm
|
|
25
|
+
if message_list:
|
|
26
|
+
self.message_list = message_list
|
|
27
|
+
else:
|
|
28
|
+
self.message_list = []
|
|
29
|
+
if file_path:
|
|
30
|
+
self.file_path = file_path
|
|
31
|
+
else:
|
|
32
|
+
self.file_path = None
|
|
33
|
+
|
|
34
|
+
def initialize_conversation(self,context_message: str = "") -> None:
|
|
35
|
+
"""Initialize conversation state.
|
|
36
|
+
Getting the content from file_path if provided"""
|
|
37
|
+
if self.file_path:
|
|
38
|
+
self.load_conversation()
|
|
39
|
+
|
|
40
|
+
if context_message:
|
|
41
|
+
self.message_list.append(SystemMessage(content=context_message))
|
|
42
|
+
else:
|
|
43
|
+
self.message_list.append(SystemMessage(content="""This is conversation model.
|
|
44
|
+
Look into the conversation history and answer the question if provided.
|
|
45
|
+
Give a brief introduction of the conversation history."""))
|
|
46
|
+
message_list_content = "".join(self.all_messages_content)
|
|
47
|
+
return self.add_message(message_list_content,get_content_only=True)
|
|
48
|
+
|
|
49
|
+
def _ask_question(self,query: str,images: list = None,
|
|
50
|
+
get_content_only: bool = True) -> str:
|
|
51
|
+
"""
|
|
52
|
+
Ask a question and get response
|
|
53
|
+
Args:
|
|
54
|
+
query: Question to ask
|
|
55
|
+
get_content_only: Whether to return only content
|
|
56
|
+
Returns:
|
|
57
|
+
str: Response from the model
|
|
58
|
+
"""
|
|
59
|
+
if images:
|
|
60
|
+
res = self.chatbot.invoke_query(query,images=images,get_content_only=get_content_only)
|
|
61
|
+
else:
|
|
62
|
+
res = self.chatbot.invoke_query(query,get_content_only=get_content_only)
|
|
63
|
+
return res
|
|
64
|
+
|
|
65
|
+
def add_message(self, query: str,images: list = None,get_content_only: bool = True) -> str:
|
|
66
|
+
"""
|
|
67
|
+
Add a message to the conversation
|
|
68
|
+
Args:
|
|
69
|
+
query (str): Question to ask
|
|
70
|
+
images (list): List of images to send to the model
|
|
71
|
+
get_content_only (bool): Whether to return only content
|
|
72
|
+
Returns:
|
|
73
|
+
str: Response from the chatbot
|
|
74
|
+
"""
|
|
75
|
+
self.message_list.append(HumanMessage(content=query))
|
|
76
|
+
res = self._ask_question(query,images=images,get_content_only=get_content_only)
|
|
77
|
+
self.message_list.append(AIMessage(content=res))
|
|
78
|
+
return res
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def all_messages(self) -> List[Union[SystemMessage, HumanMessage, AIMessage]]:
|
|
82
|
+
"""Get all messages"""
|
|
83
|
+
return self.message_list
|
|
84
|
+
|
|
85
|
+
@property
|
|
86
|
+
def last_message(self) -> str:
|
|
87
|
+
"""Get the last message"""
|
|
88
|
+
return self.message_list[-1].content
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def all_messages_content(self) -> List[str]:
|
|
92
|
+
"""Get content of all messages"""
|
|
93
|
+
return [message.content for message in self.message_list]
|
|
94
|
+
|
|
95
|
+
def _is_s3_path(self, path: str) -> bool:
|
|
96
|
+
"""
|
|
97
|
+
Check if path is an S3 path
|
|
98
|
+
Args:
|
|
99
|
+
path (str): Path to check
|
|
100
|
+
Returns:
|
|
101
|
+
bool: True if S3 path
|
|
102
|
+
"""
|
|
103
|
+
return path.startswith("s3://")
|
|
104
|
+
|
|
105
|
+
def save_conversation(self, file_path: Optional[str] = None, **kwargs) -> bool:
|
|
106
|
+
"""
|
|
107
|
+
Save the conversation
|
|
108
|
+
Args:
|
|
109
|
+
file_path: Path to save the conversation
|
|
110
|
+
**kwargs: Additional arguments for S3
|
|
111
|
+
Returns:
|
|
112
|
+
bool: Success status
|
|
113
|
+
"""
|
|
114
|
+
if self._is_s3_path(file_path or self.file_path):
|
|
115
|
+
print("Saving conversation to S3.")
|
|
116
|
+
self.save_file_path = file_path
|
|
117
|
+
return self._save_to_s3(self.file_path,**kwargs)
|
|
118
|
+
return self._save_to_file(file_path or self.file_path)
|
|
119
|
+
|
|
120
|
+
def _save_to_s3(self,**kwargs) -> bool:
|
|
121
|
+
"""Save conversation to S3"""
|
|
122
|
+
try:
|
|
123
|
+
client = kwargs.get('client', self.client)
|
|
124
|
+
bucket = kwargs.get('bucket', self.bucket)
|
|
125
|
+
client.put_object(
|
|
126
|
+
Body=str(self.message_list),
|
|
127
|
+
Bucket=bucket,
|
|
128
|
+
Key=self.save_file_path
|
|
129
|
+
)
|
|
130
|
+
print(f"Conversation saved to s3_path: {self.s3_path}")
|
|
131
|
+
return True
|
|
132
|
+
except Exception as e:
|
|
133
|
+
raise ValueError(f"Error saving conversation to s3: {e}")
|
|
134
|
+
|
|
135
|
+
def _save_to_file(self, file_path: str) -> bool:
|
|
136
|
+
"""Save conversation to file"""
|
|
137
|
+
try:
|
|
138
|
+
with open(file_path, 'w') as f:
|
|
139
|
+
for message in self.message_list:
|
|
140
|
+
f.write(f"{message.content}\n")
|
|
141
|
+
print(f"Conversation saved to file: {file_path}")
|
|
142
|
+
return True
|
|
143
|
+
except Exception as e:
|
|
144
|
+
raise ValueError(f"Error saving conversation to file: {e}")
|
|
145
|
+
|
|
146
|
+
def load_conversation(self, file_path: Optional[str] = None, **kwargs) -> List[Any]:
|
|
147
|
+
"""
|
|
148
|
+
Load a conversation
|
|
149
|
+
Args:
|
|
150
|
+
file_path: Path to load from
|
|
151
|
+
**kwargs: Additional arguments for S3
|
|
152
|
+
Returns:
|
|
153
|
+
List: Loaded messages
|
|
154
|
+
"""
|
|
155
|
+
self.message_list = []
|
|
156
|
+
if self._is_s3_path(file_path or self.file_path):
|
|
157
|
+
print("Loading conversation from S3.")
|
|
158
|
+
self.file_path = file_path
|
|
159
|
+
return self._load_from_s3(**kwargs)
|
|
160
|
+
return self._load_from_file(file_path or self.file_path)
|
|
161
|
+
|
|
162
|
+
def _load_from_s3(self, **kwargs) -> List[Any]:
|
|
163
|
+
"""Load conversation from S3"""
|
|
164
|
+
try:
|
|
165
|
+
client = kwargs.get('client', self.client)
|
|
166
|
+
bucket = kwargs.get('bucket', self.bucket)
|
|
167
|
+
res = client.get_response(client, bucket, self.s3_path)
|
|
168
|
+
res_str = eval(res['Body'].read().decode('utf-8'))
|
|
169
|
+
self.message_list = [SystemMessage(content=res_str)]
|
|
170
|
+
print(f"Conversation loaded from s3_path: {self.file_path}")
|
|
171
|
+
return self.message_list
|
|
172
|
+
except Exception as e:
|
|
173
|
+
raise ValueError(f"Error loading conversation from s3: {e}")
|
|
174
|
+
|
|
175
|
+
def _load_from_file(self, file_path: str) -> List[Any]:
|
|
176
|
+
"""Load conversation from file"""
|
|
177
|
+
try:
|
|
178
|
+
with open(file_path, 'r') as f:
|
|
179
|
+
lines = f.readlines()
|
|
180
|
+
for line in lines:
|
|
181
|
+
self.message_list.append(SystemMessage(content=line))
|
|
182
|
+
print(f"Conversation loaded from file: {file_path}")
|
|
183
|
+
return self.message_list
|
|
184
|
+
except Exception as e:
|
|
185
|
+
raise ValueError(f"Error loading conversation from file: {e}")
|
mb_rag/chatbot/prompts.py
CHANGED
|
@@ -1,59 +1,59 @@
|
|
|
1
|
-
## file for storing basic prompts template
|
|
2
|
-
from langchain.prompts import ChatPromptTemplate
|
|
3
|
-
|
|
4
|
-
__all__ = ["prompts", "invoke_prompt"]
|
|
5
|
-
|
|
6
|
-
class prompts:
|
|
7
|
-
"""
|
|
8
|
-
Class to get different prompts example for chatbot and templates
|
|
9
|
-
"""
|
|
10
|
-
|
|
11
|
-
def get_code_prompts(self):
|
|
12
|
-
"""
|
|
13
|
-
Get code prompts
|
|
14
|
-
Returns:
|
|
15
|
-
str: Code prompt
|
|
16
|
-
"""
|
|
17
|
-
list_code_prompts = {'coding_python ': """You are a Python developer.
|
|
18
|
-
Human: {}"""}
|
|
19
|
-
|
|
20
|
-
def get_text_prompts(self):
|
|
21
|
-
"""
|
|
22
|
-
Get text prompts
|
|
23
|
-
Returns:
|
|
24
|
-
str: Text prompt
|
|
25
|
-
"""
|
|
26
|
-
list_text_prompts = {
|
|
27
|
-
'multiple_placeholders': """You are a helpful assistant.
|
|
28
|
-
Human: Tell me a more about {adjective1} and its relation to {adjective2}.
|
|
29
|
-
Assistant:"""
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
def get_image_prompts(self):
|
|
33
|
-
"""
|
|
34
|
-
Get image prompts
|
|
35
|
-
Returns:
|
|
36
|
-
str: Image prompt
|
|
37
|
-
"""
|
|
38
|
-
list_image_prompts = {'map_function': "*map(lambda x: image_url, baseframes_list)"} # for passing multiple images from a video or a list of images
|
|
39
|
-
|
|
40
|
-
def get_assistant_prompts(self):
|
|
41
|
-
"""
|
|
42
|
-
Get assistant prompts
|
|
43
|
-
Returns:
|
|
44
|
-
str: Assistant prompt
|
|
45
|
-
"""
|
|
46
|
-
list_assistant_prompts = {}
|
|
47
|
-
|
|
48
|
-
def invoke_prompt(template: str, input_dict : dict = None):
|
|
49
|
-
"""
|
|
50
|
-
Invoke a prompt
|
|
51
|
-
Args:
|
|
52
|
-
template (str): Template for the prompt
|
|
53
|
-
input_dict (dict): Input dictionary for the prompt
|
|
54
|
-
Returns:
|
|
55
|
-
str: Prompt
|
|
56
|
-
"""
|
|
57
|
-
prompt_multiple = ChatPromptTemplate.from_template(template)
|
|
58
|
-
prompt = prompt_multiple.invoke(input_dict)
|
|
1
|
+
## file for storing basic prompts template
|
|
2
|
+
from langchain.prompts import ChatPromptTemplate
|
|
3
|
+
|
|
4
|
+
__all__ = ["prompts", "invoke_prompt"]
|
|
5
|
+
|
|
6
|
+
class prompts:
|
|
7
|
+
"""
|
|
8
|
+
Class to get different prompts example for chatbot and templates
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
def get_code_prompts(self):
|
|
12
|
+
"""
|
|
13
|
+
Get code prompts
|
|
14
|
+
Returns:
|
|
15
|
+
str: Code prompt
|
|
16
|
+
"""
|
|
17
|
+
list_code_prompts = {'coding_python ': """You are a Python developer.
|
|
18
|
+
Human: {}"""}
|
|
19
|
+
|
|
20
|
+
def get_text_prompts(self):
|
|
21
|
+
"""
|
|
22
|
+
Get text prompts
|
|
23
|
+
Returns:
|
|
24
|
+
str: Text prompt
|
|
25
|
+
"""
|
|
26
|
+
list_text_prompts = {
|
|
27
|
+
'multiple_placeholders': """You are a helpful assistant.
|
|
28
|
+
Human: Tell me a more about {adjective1} and its relation to {adjective2}.
|
|
29
|
+
Assistant:"""
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
def get_image_prompts(self):
|
|
33
|
+
"""
|
|
34
|
+
Get image prompts
|
|
35
|
+
Returns:
|
|
36
|
+
str: Image prompt
|
|
37
|
+
"""
|
|
38
|
+
list_image_prompts = {'map_function': "*map(lambda x: image_url, baseframes_list)"} # for passing multiple images from a video or a list of images
|
|
39
|
+
|
|
40
|
+
def get_assistant_prompts(self):
|
|
41
|
+
"""
|
|
42
|
+
Get assistant prompts
|
|
43
|
+
Returns:
|
|
44
|
+
str: Assistant prompt
|
|
45
|
+
"""
|
|
46
|
+
list_assistant_prompts = {}
|
|
47
|
+
|
|
48
|
+
def invoke_prompt(template: str, input_dict : dict = None):
|
|
49
|
+
"""
|
|
50
|
+
Invoke a prompt
|
|
51
|
+
Args:
|
|
52
|
+
template (str): Template for the prompt
|
|
53
|
+
input_dict (dict): Input dictionary for the prompt
|
|
54
|
+
Returns:
|
|
55
|
+
str: Prompt
|
|
56
|
+
"""
|
|
57
|
+
prompt_multiple = ChatPromptTemplate.from_template(template)
|
|
58
|
+
prompt = prompt_multiple.invoke(input_dict)
|
|
59
59
|
return prompt
|