hjxdl 0.2.9__py3-none-any.whl → 0.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hdl/_version.py +2 -2
- hdl/utils/llm/visrag.py +20 -17
- {hjxdl-0.2.9.dist-info → hjxdl-0.2.11.dist-info}/METADATA +1 -1
- {hjxdl-0.2.9.dist-info → hjxdl-0.2.11.dist-info}/RECORD +6 -6
- {hjxdl-0.2.9.dist-info → hjxdl-0.2.11.dist-info}/WHEEL +0 -0
- {hjxdl-0.2.9.dist-info → hjxdl-0.2.11.dist-info}/top_level.txt +0 -0
hdl/_version.py
CHANGED
hdl/utils/llm/visrag.py
CHANGED
@@ -138,14 +138,14 @@ def downvote(knowledge_base, query, cache_dir):
|
|
138
138
|
f.write(json.dumps(data, indent=4, ensure_ascii=False))
|
139
139
|
|
140
140
|
if __name__ == '__main__':
|
141
|
-
parser = argparse.ArgumentParser(description="RAG-PDFQA Script")
|
142
|
-
parser.add_argument('--cache_dir', type=str, required=True, help='Cache directory path')
|
143
|
-
parser.add_argument('--device', type=str, default='cuda:0', help='Device for model inference')
|
144
|
-
parser.add_argument('--model_path', type=str, required=True, help='Path to the embedding model')
|
145
|
-
parser.add_argument('--llm_host', type=str, default='127.0.0.
|
146
|
-
parser.add_argument('--llm_port', type=int, default=22299, help='LLM server port')
|
147
|
-
parser.add_argument('--server_name', type=str, default='0.0.0.0', help='Gradio server name')
|
148
|
-
parser.add_argument('--server_port', type=int, default=10077, help='Gradio server port')
|
141
|
+
parser = argparse.ArgumentParser(description="MiniCPMV-RAG-PDFQA Script")
|
142
|
+
parser.add_argument('--cache-dir', dest='cache_dir', type=str, required=True, help='Cache directory path')
|
143
|
+
parser.add_argument('--device', dest='device', type=str, default='cuda:0', help='Device for model inference')
|
144
|
+
parser.add_argument('--model-path', dest='model_path', type=str, required=True, help='Path to the embedding model')
|
145
|
+
parser.add_argument('--llm-host', dest='llm_host', type=str, default='127.0.0.1', help='LLM server IP address')
|
146
|
+
parser.add_argument('--llm-port', dest='llm_port', type=int, default=22299, help='LLM server port')
|
147
|
+
parser.add_argument('--server-name', dest='server_name', type=str, default='0.0.0.0', help='Gradio server name')
|
148
|
+
parser.add_argument('--server-port', dest='server_port', type=int, default=10077, help='Gradio server port')
|
149
149
|
|
150
150
|
args = parser.parse_args()
|
151
151
|
|
@@ -162,14 +162,14 @@ if __name__ == '__main__':
|
|
162
162
|
)
|
163
163
|
|
164
164
|
with gr.Blocks() as app:
|
165
|
-
gr.Markdown("# Vision Language Models Enable End-to-End RAG")
|
165
|
+
gr.Markdown("# MiniCPMV-RAG-PDFQA: Two Vision Language Models Enable End-to-End RAG")
|
166
166
|
|
167
167
|
file_input = gr.File(type="binary", label="Step 1: Upload PDF")
|
168
168
|
file_result = gr.Text(label="Knowledge Base ID")
|
169
169
|
process_button = gr.Button("Process PDF")
|
170
170
|
|
171
|
-
process_button.click(add_pdf_gradio,
|
172
|
-
|
171
|
+
process_button.click(lambda pdf: add_pdf_gradio(pdf, cache_dir=args.cache_dir, model=model, tokenizer=tokenizer),
|
172
|
+
inputs=file_input, outputs=file_result)
|
173
173
|
|
174
174
|
kb_id_input = gr.Text(label="Knowledge Base ID")
|
175
175
|
query_input = gr.Text(label="Your Question")
|
@@ -177,18 +177,21 @@ if __name__ == '__main__':
|
|
177
177
|
retrieve_button = gr.Button("Retrieve Pages")
|
178
178
|
images_output = gr.Gallery(label="Retrieved Pages")
|
179
179
|
|
180
|
-
retrieve_button.click(retrieve_gradio,
|
181
|
-
|
180
|
+
retrieve_button.click(lambda kb, query, topk: retrieve_gradio(kb, query, topk, cache_dir=args.cache_dir, model=model, tokenizer=tokenizer),
|
181
|
+
inputs=[kb_id_input, query_input, topk_input], outputs=images_output)
|
182
182
|
|
183
183
|
button = gr.Button("Answer Question")
|
184
|
-
gen_model_response = gr.Textbox(label="
|
184
|
+
gen_model_response = gr.Textbox(label="MiniCPM-V-2.6's Answer")
|
185
185
|
|
186
|
-
button.click(
|
186
|
+
button.click(lambda images, question: answer_question(images, question, gen_model),
|
187
|
+
inputs=[images_output, query_input], outputs=gen_model_response)
|
187
188
|
|
188
189
|
upvote_button = gr.Button("🤗 Upvote")
|
189
190
|
downvote_button = gr.Button("🤣 Downvote")
|
190
191
|
|
191
|
-
upvote_button.click(
|
192
|
-
|
192
|
+
upvote_button.click(lambda kb, query: upvote(kb, query, cache_dir=args.cache_dir),
|
193
|
+
inputs=[kb_id_input, query_input], outputs=None)
|
194
|
+
downvote_button.click(lambda kb, query: downvote(kb, query, cache_dir=args.cache_dir),
|
195
|
+
inputs=[kb_id_input, query_input], outputs=None)
|
193
196
|
|
194
197
|
app.launch(server_name=args.server_name, server_port=args.server_port)
|
@@ -1,5 +1,5 @@
|
|
1
1
|
hdl/__init__.py,sha256=GffnD0jLJdhkd-vo989v40N90sQbofkayRBwxc6TVhQ,72
|
2
|
-
hdl/_version.py,sha256=
|
2
|
+
hdl/_version.py,sha256=zHgAwrkDTK_-ggjmpfUbyN4DWYMgD-GquJ0wB1j66Z4,413
|
3
3
|
hdl/args/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
4
|
hdl/args/loss_args.py,sha256=s7YzSdd7IjD24rZvvOrxLLFqMZQb9YylxKeyelSdrTk,70
|
5
5
|
hdl/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -133,12 +133,12 @@ hdl/utils/llm/embs.py,sha256=Tf0FOYrOFZp7qQpEPiSCXzlgyHH0X9HVTUtsup74a9E,7174
|
|
133
133
|
hdl/utils/llm/extract.py,sha256=2sK_WJzmYIc8iuWaM9DA6Nw3_6q1O4lJ5pKpcZo-bBA,6512
|
134
134
|
hdl/utils/llm/llama_chat.py,sha256=watcHGOaz-bv3x-yDucYlGk5f8FiqfFhwWogrl334fk,4387
|
135
135
|
hdl/utils/llm/vis.py,sha256=2pI0439GWi_BEVfQJtY29Y72FkUa8jEvBeqMlwy7xkc,15716
|
136
|
-
hdl/utils/llm/visrag.py,sha256=
|
136
|
+
hdl/utils/llm/visrag.py,sha256=Nkt39OvltWvrYrHBw2eNgCMkArYtJI_RPttMi2se-Ns,7872
|
137
137
|
hdl/utils/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
138
138
|
hdl/utils/schedulers/norm_lr.py,sha256=bDwCmdEK-WkgxQMFBiMuchv8Mm7C0-GZJ6usm-PQk14,4461
|
139
139
|
hdl/utils/weather/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
140
140
|
hdl/utils/weather/weather.py,sha256=k11o6wM15kF8b9NMlEfrg68ak-SfSYLN3nOOflFUv-I,4381
|
141
|
-
hjxdl-0.2.
|
142
|
-
hjxdl-0.2.
|
143
|
-
hjxdl-0.2.
|
144
|
-
hjxdl-0.2.
|
141
|
+
hjxdl-0.2.11.dist-info/METADATA,sha256=Vllm_K5eqawd7WpTCegnb3KWBEKV1igVAj3FALI1-K0,836
|
142
|
+
hjxdl-0.2.11.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
|
143
|
+
hjxdl-0.2.11.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
|
144
|
+
hjxdl-0.2.11.dist-info/RECORD,,
|
File without changes
|
File without changes
|