lollms-client 0.19.8__py3-none-any.whl → 0.19.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

lollms_client/__init__.py CHANGED
@@ -7,7 +7,7 @@ from lollms_client.lollms_utilities import PromptReshaper # Keep general utiliti
7
7
  from lollms_client.lollms_mcp_binding import LollmsMCPBinding, LollmsMCPBindingManager
8
8
 
9
9
 
10
- __version__ = "0.19.8" # Updated version
10
+ __version__ = "0.19.9" # Updated version
11
11
 
12
12
  # Optionally, you could define __all__ if you want to be explicit about exports
13
13
  __all__ = [
@@ -923,27 +923,34 @@ Respond with a JSON object containing ONE of the following structures:
923
923
  if streaming_callback:
924
924
  streaming_callback(f"Starting RAG Hop {hop_count + 1}", MSG_TYPE.MSG_TYPE_STEP, {"type": "rag_hop_start", "hop": hop_count + 1}, turn_rag_history_for_callback)
925
925
 
926
- # Generate refined query for multi-hop
927
- if hop_count > 0:
928
- # build system prompt and history...
929
- # (same as before, omitted for brevity)
930
- # result => current_query_for_rag
931
- pass
932
- elif current_query_for_rag is None:
933
- current_query_for_rag = prompt
934
-
935
- if not current_query_for_rag:
936
- rag_hops_details_list.append({
937
- "query": "EMPTY_QUERY_STOPPED_HOPS",
938
- "retrieved_chunks_details": [],
939
- "status": "Stopped: empty query."
940
- })
941
- break
926
+ txt_previous_queries = f"Previous queries:\n"+'\n'.join(previous_queries)+"\n\n" if len(previous_queries)>0 else ""
927
+ txt_informations = f"Information:\n"+'\n'.join([f"(from {chunk['document']}):{chunk['content']}" for _, chunk in all_unique_retrieved_chunks_map.items()]) if len(all_unique_retrieved_chunks_map)>0 else "This is the first request. No data received yet. Build a new query."
928
+ txt_sp = "Your objective is to analyze the provided chunks of information, then decise if they are sufficient to reach the objective. If you need more information, formulate a new query to extract more data."
929
+ txt_formatting = """The output format must be in form of json placed inside a json markdown tag. Here is the schema to use:
930
+ ```json
931
+ {
932
+ "decision": A boolean depicting your decision (true: more data is needed, false: there is enough data to reach objective),
933
+ "query": (optional, only if decision is true). A new query to recover more information from the data source (do not use previous queries as they have already been used)
934
+ }
935
+ ```
936
+ """
937
+ p = f"Objective:\n{objectives_text}\n\n{txt_previous_queries}\n\n{txt_informations}\n\n{txt_formatting}\n\n"
938
+ response = self.generate_code(p,system_prompt=txt_sp)
939
+ try:
940
+ answer = json.loads(response)
941
+ decision = answer["decision"]
942
+ if not decision:
943
+ break
944
+ else:
945
+ current_query_for_rag = answer["query"]
946
+ except Exception as ex:
947
+ trace_exception(ex)
942
948
 
943
949
  # Retrieve chunks
944
950
  try:
945
951
  retrieved = rag_query_function(current_query_for_rag, rag_vectorizer_name, rag_top_k, rag_min_similarity_percent)
946
952
  except Exception as e:
953
+ trace_exception(e)
947
954
  return {"final_answer": "", "rag_hops_history": rag_hops_details_list, "all_retrieved_sources": list(all_unique_retrieved_chunks_map.values()), "error": str(e)}
948
955
 
949
956
  hop_details = {"query": current_query_for_rag, "retrieved_chunks_details": [], "status": ""}
@@ -965,30 +972,7 @@ Respond with a JSON object containing ONE of the following structures:
965
972
  hop_details["status"] = "No *new* unique chunks retrieved"
966
973
  rag_hops_details_list.append(hop_details)
967
974
 
968
- # reset for next hop
969
- if hop_count < max_rag_hops:
970
- txt_previous_queries = f"Previous queries:\n"+'\n'.join(previous_queries)+"\n\n" if len(previous_queries)>0 else ""
971
- txt_informations = f"Information:\n"+'\n'.join([f"(from {chunk['document']}):{chunk['content']}" for _, chunk in all_unique_retrieved_chunks_map.items()])
972
- txt_sp = "Your objective is to analyze the provided chunks of information, then decise if they are sufficient to reach the objective. If you need more information, formulate a new query to extract more data."
973
- txt_formatting = """The output format must be in form of json placed inside a json markdown tag. Here is the schema to use:
974
- ```json
975
- {
976
- "decision": A boolean depicting your decision (true: more data is needed, false: there is enough data to reach objective),
977
- "query": (optional, only if decision is true). A new query to recover more information from the data source (do not use previous queries as they have already been used)
978
- }
979
- ```
980
- """
981
- p = f"Objective:\n{objectives_text}\n\n{txt_previous_queries}\n\n{txt_informations}\n\n{txt_formatting}\n\n"
982
- response = self.generate_code(p,system_prompt=txt_sp)
983
- try:
984
- answer = json.loads(response)
985
- decision = answer["decision"]
986
- if not decision:
987
- break
988
- else:
989
- current_query_for_rag = answer["query"]
990
- except Exception as ex:
991
- trace_exception(ex)
975
+
992
976
 
993
977
  # 2. Prepare & Summarize Context
994
978
  sorted_chunks = sorted(all_unique_retrieved_chunks_map.values(),
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lollms_client
3
- Version: 0.19.8
3
+ Version: 0.19.9
4
4
  Summary: A client library for LoLLMs generate endpoint
5
5
  Author-email: ParisNeo <parisneoai@gmail.com>
6
6
  License: Apache Software License
@@ -20,9 +20,9 @@ examples/personality_test/chat_test.py,sha256=o2jlpoddFc-T592iqAiA29xk3x27KsdK5D
20
20
  examples/personality_test/chat_with_aristotle.py,sha256=4X_fwubMpd0Eq2rCReS2bgVlUoAqJprjkLXk2Jz6pXU,1774
21
21
  examples/personality_test/tesks_test.py,sha256=7LIiwrEbva9WWZOLi34fsmCBN__RZbPpxoUOKA_AtYk,1924
22
22
  examples/test_local_models/local_chat.py,sha256=slakja2zaHOEAUsn2tn_VmI4kLx6luLBrPqAeaNsix8,456
23
- lollms_client/__init__.py,sha256=4hRrTRCQTe3p2BdddF-8fJyG0nXyLOe_Imfini-BgtQ,910
23
+ lollms_client/__init__.py,sha256=ZuMTyKsGxnpozXbTiKEBlP7iMSdWHqlU2mAw_Jp1NY8,910
24
24
  lollms_client/lollms_config.py,sha256=goEseDwDxYJf3WkYJ4IrLXwg3Tfw73CXV2Avg45M_hE,21876
25
- lollms_client/lollms_core.py,sha256=B1swe9E024JZigsQSXynuFHpJi-1dIEcIVN7EGGXZqk,113509
25
+ lollms_client/lollms_core.py,sha256=3SxNX4cUgP3zN8x0TYv-G5XeS8WhoSiyss69qmjweRE,112862
26
26
  lollms_client/lollms_discussion.py,sha256=EV90dIgw8a-f-82vB2GspR60RniYz7WnBmAWSIg5mW0,2158
27
27
  lollms_client/lollms_js_analyzer.py,sha256=01zUvuO2F_lnUe_0NLxe1MF5aHE1hO8RZi48mNPv-aw,8361
28
28
  lollms_client/lollms_llm_binding.py,sha256=bdElz_IBx0zZ-85YTT1fyY_mSoHo46tKIMiHYJlKCkM,9809
@@ -70,8 +70,8 @@ lollms_client/tts_bindings/piper_tts/__init__.py,sha256=0IEWG4zH3_sOkSb9WbZzkeV5
70
70
  lollms_client/tts_bindings/xtts/__init__.py,sha256=FgcdUH06X6ZR806WQe5ixaYx0QoxtAcOgYo87a2qxYc,18266
71
71
  lollms_client/ttv_bindings/__init__.py,sha256=UZ8o2izQOJLQgtZ1D1cXoNST7rzqW22rL2Vufc7ddRc,3141
72
72
  lollms_client/ttv_bindings/lollms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
- lollms_client-0.19.8.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
74
- lollms_client-0.19.8.dist-info/METADATA,sha256=uHDmoes5veI0cBqErMnJbDS8TvadIZb-xvMnOXhLclc,13374
75
- lollms_client-0.19.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
76
- lollms_client-0.19.8.dist-info/top_level.txt,sha256=NI_W8S4OYZvJjb0QWMZMSIpOrYzpqwPGYaklhyWKH2w,23
77
- lollms_client-0.19.8.dist-info/RECORD,,
73
+ lollms_client-0.19.9.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
74
+ lollms_client-0.19.9.dist-info/METADATA,sha256=ODlUMX37ZeZ1tJEPgJyc2yk40Dac-iivpGAM5IjPxSI,13374
75
+ lollms_client-0.19.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
76
+ lollms_client-0.19.9.dist-info/top_level.txt,sha256=NI_W8S4OYZvJjb0QWMZMSIpOrYzpqwPGYaklhyWKH2w,23
77
+ lollms_client-0.19.9.dist-info/RECORD,,