hjxdl 0.2.9__py3-none-any.whl → 0.2.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hdl/_version.py +2 -2
- hdl/utils/llm/visrag.py +10 -10
- {hjxdl-0.2.9.dist-info → hjxdl-0.2.10.dist-info}/METADATA +1 -1
- {hjxdl-0.2.9.dist-info → hjxdl-0.2.10.dist-info}/RECORD +6 -6
- {hjxdl-0.2.9.dist-info → hjxdl-0.2.10.dist-info}/WHEEL +0 -0
- {hjxdl-0.2.9.dist-info → hjxdl-0.2.10.dist-info}/top_level.txt +0 -0
hdl/_version.py
CHANGED
hdl/utils/llm/visrag.py
CHANGED
@@ -138,14 +138,14 @@ def downvote(knowledge_base, query, cache_dir):
|
|
138
138
|
f.write(json.dumps(data, indent=4, ensure_ascii=False))
|
139
139
|
|
140
140
|
if __name__ == '__main__':
|
141
|
-
parser = argparse.ArgumentParser(description="RAG-PDFQA Script")
|
142
|
-
parser.add_argument('--cache_dir', type=str, required=True, help='Cache directory path')
|
143
|
-
parser.add_argument('--device', type=str, default='cuda:0', help='Device for model inference')
|
144
|
-
parser.add_argument('--model_path', type=str, required=True, help='Path to the embedding model')
|
145
|
-
parser.add_argument('--llm_host', type=str, default='127.0.0.0', help='LLM server IP address')
|
146
|
-
parser.add_argument('--llm_port', type=int, default=22299, help='LLM server port')
|
147
|
-
parser.add_argument('--server_name', type=str, default='0.0.0.0', help='Gradio server name')
|
148
|
-
parser.add_argument('--server_port', type=int, default=10077, help='Gradio server port')
|
141
|
+
parser = argparse.ArgumentParser(description="MiniCPMV-RAG-PDFQA Script")
|
142
|
+
parser.add_argument('--cache-dir', dest='cache_dir', type=str, required=True, help='Cache directory path')
|
143
|
+
parser.add_argument('--device', dest='device', type=str, default='cuda:0', help='Device for model inference')
|
144
|
+
parser.add_argument('--model-path', dest='model_path', type=str, required=True, help='Path to the embedding model')
|
145
|
+
parser.add_argument('--llm-host', dest='llm_host', type=str, default='127.0.0.0', help='LLM server IP address')
|
146
|
+
parser.add_argument('--llm-port', dest='llm_port', type=int, default=22299, help='LLM server port')
|
147
|
+
parser.add_argument('--server-name', dest='server_name', type=str, default='0.0.0.0', help='Gradio server name')
|
148
|
+
parser.add_argument('--server-port', dest='server_port', type=int, default=10077, help='Gradio server port')
|
149
149
|
|
150
150
|
args = parser.parse_args()
|
151
151
|
|
@@ -162,7 +162,7 @@ if __name__ == '__main__':
|
|
162
162
|
)
|
163
163
|
|
164
164
|
with gr.Blocks() as app:
|
165
|
-
gr.Markdown("# Vision Language Models Enable End-to-End RAG")
|
165
|
+
gr.Markdown("# MiniCPMV-RAG-PDFQA: Two Vision Language Models Enable End-to-End RAG")
|
166
166
|
|
167
167
|
file_input = gr.File(type="binary", label="Step 1: Upload PDF")
|
168
168
|
file_result = gr.Text(label="Knowledge Base ID")
|
@@ -181,7 +181,7 @@ if __name__ == '__main__':
|
|
181
181
|
_kwargs={'cache_dir': args.cache_dir, 'model': model, 'tokenizer': tokenizer})
|
182
182
|
|
183
183
|
button = gr.Button("Answer Question")
|
184
|
-
gen_model_response = gr.Textbox(label="
|
184
|
+
gen_model_response = gr.Textbox(label="MiniCPM-V-2.6's Answer")
|
185
185
|
|
186
186
|
button.click(answer_question, inputs=[images_output, query_input], outputs=gen_model_response, _kwargs={'gen_model': gen_model})
|
187
187
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
hdl/__init__.py,sha256=GffnD0jLJdhkd-vo989v40N90sQbofkayRBwxc6TVhQ,72
|
2
|
-
hdl/_version.py,sha256=
|
2
|
+
hdl/_version.py,sha256=YP9diSdvTdaLkQhlynYIFRmW3yUbT-ZgorqIhrsMGf8,413
|
3
3
|
hdl/args/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
4
|
hdl/args/loss_args.py,sha256=s7YzSdd7IjD24rZvvOrxLLFqMZQb9YylxKeyelSdrTk,70
|
5
5
|
hdl/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -133,12 +133,12 @@ hdl/utils/llm/embs.py,sha256=Tf0FOYrOFZp7qQpEPiSCXzlgyHH0X9HVTUtsup74a9E,7174
|
|
133
133
|
hdl/utils/llm/extract.py,sha256=2sK_WJzmYIc8iuWaM9DA6Nw3_6q1O4lJ5pKpcZo-bBA,6512
|
134
134
|
hdl/utils/llm/llama_chat.py,sha256=watcHGOaz-bv3x-yDucYlGk5f8FiqfFhwWogrl334fk,4387
|
135
135
|
hdl/utils/llm/vis.py,sha256=2pI0439GWi_BEVfQJtY29Y72FkUa8jEvBeqMlwy7xkc,15716
|
136
|
-
hdl/utils/llm/visrag.py,sha256=
|
136
|
+
hdl/utils/llm/visrag.py,sha256=iceouI1OeGyA--zEeHjx7Zx0cfGmIV02A8RzYx7s9ro,7723
|
137
137
|
hdl/utils/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
138
138
|
hdl/utils/schedulers/norm_lr.py,sha256=bDwCmdEK-WkgxQMFBiMuchv8Mm7C0-GZJ6usm-PQk14,4461
|
139
139
|
hdl/utils/weather/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
140
140
|
hdl/utils/weather/weather.py,sha256=k11o6wM15kF8b9NMlEfrg68ak-SfSYLN3nOOflFUv-I,4381
|
141
|
-
hjxdl-0.2.
|
142
|
-
hjxdl-0.2.
|
143
|
-
hjxdl-0.2.
|
144
|
-
hjxdl-0.2.
|
141
|
+
hjxdl-0.2.10.dist-info/METADATA,sha256=M_Gb9sipw6cV22bS6d7tHBVwc0sYaAaZeP4Mh7vkaf0,836
|
142
|
+
hjxdl-0.2.10.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
|
143
|
+
hjxdl-0.2.10.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
|
144
|
+
hjxdl-0.2.10.dist-info/RECORD,,
|
File without changes
|
File without changes
|