hjxdl 0.2.8__py3-none-any.whl → 0.2.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hdl/_version.py +2 -2
- hdl/utils/llm/chatgr.py +2 -1
- hdl/utils/llm/visrag.py +10 -10
- {hjxdl-0.2.8.dist-info → hjxdl-0.2.10.dist-info}/METADATA +1 -1
- {hjxdl-0.2.8.dist-info → hjxdl-0.2.10.dist-info}/RECORD +7 -7
- {hjxdl-0.2.8.dist-info → hjxdl-0.2.10.dist-info}/WHEEL +0 -0
- {hjxdl-0.2.8.dist-info → hjxdl-0.2.10.dist-info}/top_level.txt +0 -0
hdl/_version.py
CHANGED
hdl/utils/llm/chatgr.py
CHANGED
@@ -9,7 +9,8 @@ def chat_with_llm(user_input, chat_history=[]):
|
|
9
9
|
|
10
10
|
bot_message = "" # Bot 消息初始化为空
|
11
11
|
resp = llm.stream(
|
12
|
-
"你的身份是VIVIBIT
|
12
|
+
"你的身份是VIVIBIT人工智能小助手,由芯途异构公司(ICTrek)研发,请回答如下问题,并保证回答所采用的语言与用户问题的语言保持一致。\n"
|
13
|
+
"Your identity is VIVIBIT AI Assistant, developed by ICTrek. Please answer the following question and ensure that the language used in the response matches the language of the user’s question.\n Question: "
|
13
14
|
+ user_input
|
14
15
|
) # 获取流式响应
|
15
16
|
|
hdl/utils/llm/visrag.py
CHANGED
@@ -138,14 +138,14 @@ def downvote(knowledge_base, query, cache_dir):
|
|
138
138
|
f.write(json.dumps(data, indent=4, ensure_ascii=False))
|
139
139
|
|
140
140
|
if __name__ == '__main__':
|
141
|
-
parser = argparse.ArgumentParser(description="RAG-PDFQA Script")
|
142
|
-
parser.add_argument('--cache_dir', type=str, required=True, help='Cache directory path')
|
143
|
-
parser.add_argument('--device', type=str, default='cuda:0', help='Device for model inference')
|
144
|
-
parser.add_argument('--model_path', type=str, required=True, help='Path to the embedding model')
|
145
|
-
parser.add_argument('--llm_host', type=str, default='127.0.0.0', help='LLM server IP address')
|
146
|
-
parser.add_argument('--llm_port', type=int, default=22299, help='LLM server port')
|
147
|
-
parser.add_argument('--server_name', type=str, default='0.0.0.0', help='Gradio server name')
|
148
|
-
parser.add_argument('--server_port', type=int, default=10077, help='Gradio server port')
|
141
|
+
parser = argparse.ArgumentParser(description="MiniCPMV-RAG-PDFQA Script")
|
142
|
+
parser.add_argument('--cache-dir', dest='cache_dir', type=str, required=True, help='Cache directory path')
|
143
|
+
parser.add_argument('--device', dest='device', type=str, default='cuda:0', help='Device for model inference')
|
144
|
+
parser.add_argument('--model-path', dest='model_path', type=str, required=True, help='Path to the embedding model')
|
145
|
+
parser.add_argument('--llm-host', dest='llm_host', type=str, default='127.0.0.0', help='LLM server IP address')
|
146
|
+
parser.add_argument('--llm-port', dest='llm_port', type=int, default=22299, help='LLM server port')
|
147
|
+
parser.add_argument('--server-name', dest='server_name', type=str, default='0.0.0.0', help='Gradio server name')
|
148
|
+
parser.add_argument('--server-port', dest='server_port', type=int, default=10077, help='Gradio server port')
|
149
149
|
|
150
150
|
args = parser.parse_args()
|
151
151
|
|
@@ -162,7 +162,7 @@ if __name__ == '__main__':
|
|
162
162
|
)
|
163
163
|
|
164
164
|
with gr.Blocks() as app:
|
165
|
-
gr.Markdown("# Vision Language Models Enable End-to-End RAG")
|
165
|
+
gr.Markdown("# MiniCPMV-RAG-PDFQA: Two Vision Language Models Enable End-to-End RAG")
|
166
166
|
|
167
167
|
file_input = gr.File(type="binary", label="Step 1: Upload PDF")
|
168
168
|
file_result = gr.Text(label="Knowledge Base ID")
|
@@ -181,7 +181,7 @@ if __name__ == '__main__':
|
|
181
181
|
_kwargs={'cache_dir': args.cache_dir, 'model': model, 'tokenizer': tokenizer})
|
182
182
|
|
183
183
|
button = gr.Button("Answer Question")
|
184
|
-
gen_model_response = gr.Textbox(label="
|
184
|
+
gen_model_response = gr.Textbox(label="MiniCPM-V-2.6's Answer")
|
185
185
|
|
186
186
|
button.click(answer_question, inputs=[images_output, query_input], outputs=gen_model_response, _kwargs={'gen_model': gen_model})
|
187
187
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
hdl/__init__.py,sha256=GffnD0jLJdhkd-vo989v40N90sQbofkayRBwxc6TVhQ,72
|
2
|
-
hdl/_version.py,sha256=
|
2
|
+
hdl/_version.py,sha256=YP9diSdvTdaLkQhlynYIFRmW3yUbT-ZgorqIhrsMGf8,413
|
3
3
|
hdl/args/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
4
|
hdl/args/loss_args.py,sha256=s7YzSdd7IjD24rZvvOrxLLFqMZQb9YylxKeyelSdrTk,70
|
5
5
|
hdl/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -128,17 +128,17 @@ hdl/utils/general/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
|
|
128
128
|
hdl/utils/general/glob.py,sha256=8-RCnt6L297wMIfn34ZAMCsGCZUjHG3MGglGZI1cX0g,491
|
129
129
|
hdl/utils/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
130
130
|
hdl/utils/llm/chat.py,sha256=OzyY9xACOOocx9zZigtq9YAPvHtDUo8v2fvf1Tyjg_U,14891
|
131
|
-
hdl/utils/llm/chatgr.py,sha256
|
131
|
+
hdl/utils/llm/chatgr.py,sha256=GO2G7g6YybduA5VCUuGjvEsJfC_6L7rycSnPeHMcxyM,2820
|
132
132
|
hdl/utils/llm/embs.py,sha256=Tf0FOYrOFZp7qQpEPiSCXzlgyHH0X9HVTUtsup74a9E,7174
|
133
133
|
hdl/utils/llm/extract.py,sha256=2sK_WJzmYIc8iuWaM9DA6Nw3_6q1O4lJ5pKpcZo-bBA,6512
|
134
134
|
hdl/utils/llm/llama_chat.py,sha256=watcHGOaz-bv3x-yDucYlGk5f8FiqfFhwWogrl334fk,4387
|
135
135
|
hdl/utils/llm/vis.py,sha256=2pI0439GWi_BEVfQJtY29Y72FkUa8jEvBeqMlwy7xkc,15716
|
136
|
-
hdl/utils/llm/visrag.py,sha256=
|
136
|
+
hdl/utils/llm/visrag.py,sha256=iceouI1OeGyA--zEeHjx7Zx0cfGmIV02A8RzYx7s9ro,7723
|
137
137
|
hdl/utils/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
138
138
|
hdl/utils/schedulers/norm_lr.py,sha256=bDwCmdEK-WkgxQMFBiMuchv8Mm7C0-GZJ6usm-PQk14,4461
|
139
139
|
hdl/utils/weather/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
140
140
|
hdl/utils/weather/weather.py,sha256=k11o6wM15kF8b9NMlEfrg68ak-SfSYLN3nOOflFUv-I,4381
|
141
|
-
hjxdl-0.2.
|
142
|
-
hjxdl-0.2.
|
143
|
-
hjxdl-0.2.
|
144
|
-
hjxdl-0.2.
|
141
|
+
hjxdl-0.2.10.dist-info/METADATA,sha256=M_Gb9sipw6cV22bS6d7tHBVwc0sYaAaZeP4Mh7vkaf0,836
|
142
|
+
hjxdl-0.2.10.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
|
143
|
+
hjxdl-0.2.10.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
|
144
|
+
hjxdl-0.2.10.dist-info/RECORD,,
|
File without changes
|
File without changes
|