lollms-client 1.5.9__py3-none-any.whl → 1.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

lollms_client/__init__.py CHANGED
@@ -8,7 +8,7 @@ from lollms_client.lollms_utilities import PromptReshaper # Keep general utiliti
8
8
  from lollms_client.lollms_mcp_binding import LollmsMCPBinding, LollmsMCPBindingManager
9
9
  from lollms_client.lollms_llm_binding import LollmsLLMBindingManager
10
10
 
11
- __version__ = "1.5.9" # Updated version
11
+ __version__ = "1.6.1" # Updated version
12
12
 
13
13
  # Optionally, you could define __all__ if you want to be explicit about exports
14
14
  __all__ = [
@@ -5736,16 +5736,19 @@ Provide the final aggregated answer in {output_format} format, directly addressi
5736
5736
  self,
5737
5737
  text_to_process: str,
5738
5738
  contextual_prompt: Optional[str] = None,
5739
- system_prompt: str= None,
5739
+ system_prompt: str | None = None,
5740
5740
  context_fill_percentage: float = 0.75,
5741
- overlap_tokens: int = 0,
5741
+ overlap_tokens: int = 150, # Added a default for better context continuity
5742
5742
  expected_generation_tokens: int = 1500,
5743
5743
  streaming_callback: Optional[Callable] = None,
5744
5744
  return_scratchpad_only: bool = False,
5745
5745
  debug: bool = True,
5746
5746
  **kwargs
5747
5747
  ) -> str:
5748
- """Enhanced long context processing with Moby Dick literary analysis optimization."""
5748
+ """
5749
+ Processes long text by breaking it down into chunks, analyzing each one incrementally,
5750
+ and synthesizing the results into a comprehensive final response based on a user-defined objective.
5751
+ """
5749
5752
 
5750
5753
  if debug:
5751
5754
  print(f"\n🔧 DEBUG: Starting processing with {len(text_to_process):,} characters")
@@ -5756,9 +5759,9 @@ Provide the final aggregated answer in {output_format} format, directly addressi
5756
5759
 
5757
5760
  # Get context size
5758
5761
  try:
5759
- context_size = self.llm.get_context_size() or 4096
5762
+ context_size = self.llm.get_context_size() or 8192 # Using a more modern default
5760
5763
  except:
5761
- context_size = 4096
5764
+ context_size = 8192
5762
5765
 
5763
5766
  if debug:
5764
5767
  print(f"🔧 DEBUG: Context size: {context_size}, Fill %: {context_fill_percentage}")
@@ -5767,32 +5770,33 @@ Provide the final aggregated answer in {output_format} format, directly addressi
5767
5770
  if not text_to_process:
5768
5771
  return ""
5769
5772
 
5770
- # Use word-based tokenization
5773
+ # Use a simple word-based split for token estimation
5771
5774
  tokens = text_to_process.split()
5772
5775
  if debug:
5773
5776
  print(f"🔧 DEBUG: Tokenized into {len(tokens):,} word tokens")
5774
5777
 
5775
5778
  # Dynamic token budget calculation
5776
5779
  def calculate_token_budgets(scratchpad_content: str = "", step_num: int = 0) -> dict:
5777
- base_system_tokens = 250 # Increased for literary-specific prompts
5778
- user_template_tokens = 300 # Increased for detailed instructions
5780
+ # Generic prompt templates are more concise
5781
+ base_system_tokens = 150
5782
+ user_template_tokens = 250
5779
5783
  scratchpad_tokens = len(scratchpad_content.split()) * 1.3 if scratchpad_content else 0
5780
5784
 
5781
5785
  used_tokens = base_system_tokens + user_template_tokens + scratchpad_tokens + expected_generation_tokens
5782
5786
  total_budget = int(context_size * context_fill_percentage)
5783
- available_for_chunk = max(400, int(total_budget - used_tokens)) # Increased minimum for better context
5787
+ available_for_chunk = max(500, int(total_budget - used_tokens)) # Ensure a reasonable minimum chunk size
5784
5788
 
5785
5789
  budget_info = {
5786
5790
  "total_budget": total_budget,
5787
5791
  "chunk_budget": available_for_chunk,
5788
- "efficiency_ratio": available_for_chunk / total_budget,
5792
+ "efficiency_ratio": available_for_chunk / total_budget if total_budget > 0 else 0,
5789
5793
  "scratchpad_tokens": int(scratchpad_tokens),
5790
5794
  "used_tokens": int(used_tokens)
5791
5795
  }
5792
5796
 
5793
5797
  if debug:
5794
5798
  print(f"🔧 DEBUG Step {step_num}: Budget = {available_for_chunk}/{total_budget} tokens, "
5795
- f"Scratchpad = {int(scratchpad_tokens)} tokens")
5799
+ f"Scratchpad = {int(scratchpad_tokens)} tokens")
5796
5800
 
5797
5801
  return budget_info
5798
5802
 
@@ -5814,22 +5818,21 @@ Provide the final aggregated answer in {output_format} format, directly addressi
5814
5818
  # Single pass for short content
5815
5819
  if len(tokens) <= chunk_size_tokens:
5816
5820
  if debug:
5817
- print("🔧 DEBUG: Using single-pass processing")
5821
+ print("🔧 DEBUG: Content is short enough for single-pass processing")
5818
5822
 
5819
5823
  if streaming_callback:
5820
- streaming_callback("Content fits in single pass", MSG_TYPE.MSG_TYPE_STEP, {})
5824
+ streaming_callback("Content fits in a single pass", MSG_TYPE.MSG_TYPE_STEP, {})
5821
5825
 
5822
- # FIXED: Moby Dick-specific single-pass system prompt
5826
+ # Generic single-pass system prompt
5823
5827
  system_prompt = (
5824
- "You are a literary analysis expert specializing in Herman Melville's works. "
5825
- "Analyze the provided Moby Dick text with deep understanding that Melville's "
5826
- "detailed realism, technical descriptions, and cultural documentation are "
5827
- "integral literary techniques. Focus on themes, narrative methods, symbolism, "
5828
- "and cultural commentary."
5828
+ "You are an expert AI assistant for text analysis and summarization. "
5829
+ "Your task is to carefully analyze the provided text and generate a comprehensive, "
5830
+ "accurate, and well-structured response that directly addresses the user's objective. "
5831
+ "Focus on extracting key information, identifying main themes, and synthesizing the content effectively."
5829
5832
  )
5830
5833
 
5831
- prompt_objective = contextual_prompt or "Provide comprehensive Moby Dick literary analysis."
5832
- final_prompt = f"{prompt_objective}\n\n--- Moby Dick Content ---\n{text_to_process}"
5834
+ prompt_objective = contextual_prompt or "Provide a comprehensive summary and analysis of the provided text."
5835
+ final_prompt = f"Objective: {prompt_objective}\n\n--- Full Text Content ---\n{text_to_process}"
5833
5836
 
5834
5837
  try:
5835
5838
  result = self.remove_thinking_blocks(self.llm.generate_text(final_prompt, system_prompt=system_prompt, **kwargs))
@@ -5838,37 +5841,37 @@ Provide the final aggregated answer in {output_format} format, directly addressi
5838
5841
  return result
5839
5842
  except Exception as e:
5840
5843
  if debug:
5841
- print(f"🔧 DEBUG: Single-pass failed: {e}")
5844
+ print(f"🔧 DEBUG: Single-pass processing failed: {e}")
5842
5845
  return f"Error in single-pass processing: {e}"
5843
5846
 
5844
- # Multi-chunk processing with FIXED prompts
5847
+ # Multi-chunk processing for long content
5845
5848
  if debug:
5846
- print("🔧 DEBUG: Using multi-chunk processing with Moby Dick-optimized prompts")
5849
+ print("🔧 DEBUG: Using multi-chunk processing for long content")
5847
5850
 
5848
5851
  chunk_summaries = []
5849
5852
  current_position = 0
5850
5853
  step_number = 1
5851
5854
 
5852
5855
  while current_position < len(tokens):
5853
- # Recalculate budget
5854
- current_scratchpad = "\n\n---\n\n".join(chunk_summaries) if chunk_summaries else "[Empty]"
5856
+ # Recalculate budget for each step for dynamic adaptation
5857
+ current_scratchpad = "\n\n---\n\n".join(chunk_summaries)
5855
5858
  current_budget = calculate_token_budgets(current_scratchpad, step_number)
5856
- adaptive_chunk_size = max(400, current_budget["chunk_budget"]) # Increased minimum
5859
+ adaptive_chunk_size = max(500, current_budget["chunk_budget"])
5857
5860
 
5858
- # Extract chunk
5861
+ # Extract the next chunk of text
5859
5862
  chunk_end = min(current_position + adaptive_chunk_size, len(tokens))
5860
5863
  chunk_tokens = tokens[current_position:chunk_end]
5861
5864
  chunk_text = " ".join(chunk_tokens)
5862
5865
 
5863
5866
  if debug:
5864
- print(f"\n🔧 DEBUG Step {step_number}: Processing chunk {current_position}:{chunk_end} "
5865
- f"({len(chunk_tokens)} tokens, {len(chunk_text)} chars)")
5867
+ print(f"\n🔧 DEBUG Step {step_number}: Processing chunk from {current_position} to {chunk_end} "
5868
+ f"({len(chunk_tokens)} tokens)")
5866
5869
 
5867
5870
  # Progress calculation
5868
5871
  remaining_tokens = len(tokens) - current_position
5869
- estimated_remaining_steps = max(1, remaining_tokens // adaptive_chunk_size)
5870
- total_estimated_steps = step_number + estimated_remaining_steps - 1
5871
- progress = (current_position / len(tokens)) * 90
5872
+ estimated_remaining_steps = max(1, -(-remaining_tokens // adaptive_chunk_size)) # Ceiling division
5873
+ total_estimated_steps = step_number + estimated_remaining_steps -1
5874
+ progress = (current_position / len(tokens)) * 90 if len(tokens) > 0 else 0
5872
5875
 
5873
5876
  if streaming_callback:
5874
5877
  streaming_callback(
@@ -5879,86 +5882,58 @@ Provide the final aggregated answer in {output_format} format, directly addressi
5879
5882
  )
5880
5883
 
5881
5884
  try:
5882
- # FIXED: Moby Dick-specific system prompt that prevents false filtering
5885
+ # Generic, state-aware system prompt
5883
5886
  system_prompt = (
5884
- f"You are analyzing Herman Melville's \"Moby Dick\" - a complex literary work where EVERY passage contains literary value.\n\n"
5885
- f"**Critical Understanding:**\n"
5886
- f"- Melville's detailed descriptions of whaling culture ARE literary techniques\n"
5887
- f"- Technical passages reveal themes about knowledge, obsession, and human industry\n"
5888
- f"- Social customs and maritime protocols reflect broader themes of hierarchy and civilization\n"
5889
- f"- Even seemingly mundane details contribute to Melville's encyclopedic narrative style\n\n"
5890
- f"**Current Status:** Step {step_number} of ~{total_estimated_steps} | Progress: {progress:.1f}%\n\n"
5887
+ f"You are a component in a multi-step text processing pipeline. Your role is to analyze a chunk of text and extract key information relevant to a global objective.\n\n"
5888
+ f"**Current Status:** You are on step {step_number} of approximately {total_estimated_steps} steps. Progress is at {progress:.1f}%.\n\n"
5891
5889
  f"**Your Task:**\n"
5892
- f"Extract literary insights from this text chunk, focusing on:\n"
5893
- f"1. **Themes** (obsession, knowledge, nature vs civilization, social hierarchy)\n"
5894
- f"2. **Narrative Technique** (Melville's encyclopedic style, detailed realism)\n"
5895
- f"3. **Cultural Commentary** (maritime society, American industry, social structures)\n"
5896
- f"4. **Character Insights** (authority, dignity, social roles)\n"
5897
- f"5. **Symbolic Elements** (ships, sea, whaling practices as metaphors)\n\n"
5898
- f"**CRITICAL:** The scratchpad shows '{current_scratchpad[:20]}...' - if it shows '[Empty]', you are analyzing early content and everything you find is 'new' information. "
5899
- f"Do NOT say '[No new information]' unless the chunk is literally empty or corrupted.\n\n"
5900
- f"Be specific and extract concrete insights. Melville's detailed realism IS his literary technique."
5890
+ f"Analyze the 'New Text Chunk' provided below. Extract and summarize any information, data points, or key ideas that are relevant to the 'Global Objective'.\n"
5891
+ f"Review the 'Existing Scratchpad Content' to understand what has already been found. Your goal is to add *new* insights that are not already captured.\n\n"
5892
+ f"**CRITICAL:** Do NOT repeat information already present in the scratchpad. Focus only on new, relevant details from the current chunk. If the chunk contains no new relevant information, respond with '[No new information found in this chunk.]'."
5901
5893
  )
5902
5894
 
5903
- # FIXED: Moby Dick-specific user prompt with clear instructions
5904
- summarization_objective = contextual_prompt or "Create comprehensive literary analysis of Moby-Dick focusing on themes, character development, narrative techniques, and symbolism"
5905
-
5906
- # Determine scratchpad status for better context
5907
- scratchpad_status = "The analysis is just beginning - this is among the first substantial content to be processed." if current_scratchpad == "[Empty]" else f"Building on existing analysis with {len(chunk_summaries)} sections already completed."
5895
+ # Generic, context-aware user prompt
5896
+ summarization_objective = contextual_prompt or "Create a comprehensive summary by extracting all key facts, concepts, and conclusions from the text."
5897
+ scratchpad_status = "The analysis is just beginning; this is the first chunk." if not chunk_summaries else f"Building on existing analysis with {len(chunk_summaries)} sections already completed."
5908
5898
 
5909
5899
  user_prompt = (
5910
5900
  f"--- Global Objective ---\n{summarization_objective}\n\n"
5911
5901
  f"--- Current Progress ---\n"
5912
- f"Step {step_number} of ~{total_estimated_steps} | Progress: {progress:.1f}% | Token Budget: {adaptive_chunk_size:,}\n\n"
5913
- f"--- Current Analysis State ---\n{scratchpad_status}\n\n"
5914
- f"--- Existing Scratchpad Content ---\n{current_scratchpad}\n\n"
5915
- f"--- New Text Chunk from Moby Dick ---\n{chunk_text}\n\n"
5916
- f"--- Analysis Instructions ---\n"
5917
- f"This is Melville's \"Moby Dick\" - extract literary insights from this passage. Consider:\n\n"
5918
- f" **What themes** does this passage develop? (obsession with knowledge, social hierarchy, maritime culture)\n"
5919
- f"• **What narrative techniques** does Melville use? (detailed realism, encyclopedic style, technical precision)\n"
5920
- f"• **What cultural commentary** is present? (whaling society, American industry, social protocols)\n"
5921
- f"• **What character insights** emerge? (authority, dignity, social roles and expectations)\n"
5922
- f"• **What symbolic elements** appear? (ships, maritime customs, hierarchical structures)\n\n"
5923
- f"**Remember:** In Moby Dick, even technical descriptions serve literary purposes. Melville's detailed realism and cultural documentation ARE his narrative techniques.\n\n"
5924
- f"Provide specific, concrete analysis with examples from the text. Extract insights that are not already captured in the scratchpad above."
5902
+ f"{scratchpad_status} (Step {step_number}/{total_estimated_steps})\n\n"
5903
+ f"--- Existing Scratchpad Content (for context) ---\n{current_scratchpad}\n\n"
5904
+ f"--- New Text Chunk to Analyze ---\n{chunk_text}\n\n"
5905
+ f"--- Your Instructions ---\n"
5906
+ f"Extract key information from the 'New Text Chunk' that aligns with the 'Global Objective'. "
5907
+ f"Provide a concise summary of the new findings. Do not repeat what is already in the scratchpad. "
5908
+ f"If no new relevant information is found, state that clearly."
5925
5909
  )
5926
5910
 
5927
5911
  if debug:
5928
5912
  print(f"🔧 DEBUG: Sending {len(user_prompt)} char prompt to LLM")
5929
- print(f"🔧 DEBUG: Scratchpad status: {scratchpad_status}")
5930
5913
 
5931
5914
  chunk_summary = self.remove_thinking_blocks(self.llm.generate_text(user_prompt, system_prompt=system_prompt, **kwargs))
5932
5915
 
5933
5916
  if debug:
5934
- print(f"🔧 DEBUG: Received {len(chunk_summary)} char response")
5935
- print(f"🔧 DEBUG: Response preview: {chunk_summary[:200]}...")
5917
+ print(f"🔧 DEBUG: Received {len(chunk_summary)} char response preview: {chunk_summary[:200]}...")
5936
5918
 
5937
- # FIXED: More intelligent content filtering specifically for literary analysis
5919
+ # Generic content filtering
5938
5920
  filter_out = False
5921
+ filter_reason = "content accepted"
5939
5922
 
5940
5923
  # Check for explicit rejection signals
5941
- if (chunk_summary.strip().startswith('[No new insights]') or
5942
- chunk_summary.strip().startswith('[No new information]') or
5943
- chunk_summary.strip().startswith('[No significant') or
5944
- 'cannot provide' in chunk_summary.lower()[:100] or
5945
- 'unable to analyze' in chunk_summary.lower()[:100]):
5924
+ if (chunk_summary.strip().lower().startswith('[no new') or
5925
+ chunk_summary.strip().lower().startswith('no new information')):
5946
5926
  filter_out = True
5947
5927
  filter_reason = "explicit rejection signal"
5948
-
5949
- # Check for too short responses
5950
- elif len(chunk_summary.strip()) < 50:
5951
- filter_out = True
5952
- filter_reason = "response too short"
5953
-
5954
- # Check for error responses
5928
+ # Check for overly short or generic refusal responses
5929
+ elif len(chunk_summary.strip()) < 25:
5930
+ filter_out = True
5931
+ filter_reason = "response too short to be useful"
5932
+ # Check for common error phrases
5955
5933
  elif any(error_phrase in chunk_summary.lower()[:150] for error_phrase in [
5956
- 'error', 'failed', 'cannot', 'unable', 'not possible', 'insufficient']):
5934
+ 'error', 'failed', 'cannot provide', 'unable to analyze', 'not possible', 'insufficient information']):
5957
5935
  filter_out = True
5958
- filter_reason = "error response detected"
5959
-
5960
- else:
5961
- filter_reason = "content accepted"
5936
+ filter_reason = "error or refusal response detected"
5962
5937
 
5963
5938
  if not filter_out:
5964
5939
  chunk_summaries.append(chunk_summary.strip())
@@ -5970,22 +5945,15 @@ Provide the final aggregated answer in {output_format} format, directly addressi
5970
5945
  if debug:
5971
5946
  print(f"🔧 DEBUG: ❌ Content filtered out - {filter_reason}: {chunk_summary[:100]}...")
5972
5947
 
5973
- # Update progress
5948
+ # Update progress via callback
5974
5949
  if streaming_callback:
5975
5950
  updated_scratchpad = "\n\n---\n\n".join(chunk_summaries)
5976
5951
  streaming_callback(
5977
5952
  updated_scratchpad,
5978
5953
  MSG_TYPE.MSG_TYPE_SCRATCHPAD,
5979
- {
5980
- "step": step_number,
5981
- "sections": len(chunk_summaries),
5982
- "content_added": content_added,
5983
- "filter_reason": filter_reason
5984
- }
5954
+ {"step": step_number, "sections": len(chunk_summaries), "content_added": content_added, "filter_reason": filter_reason}
5985
5955
  )
5986
-
5987
- progress_after = ((current_position + len(chunk_tokens)) / len(tokens)) * 90
5988
- if streaming_callback:
5956
+ progress_after = ((current_position + len(chunk_tokens)) / len(tokens)) * 90 if len(tokens) > 0 else 90
5989
5957
  streaming_callback(
5990
5958
  f"Step {step_number} completed - {'Content added' if content_added else f'Filtered: {filter_reason}'}",
5991
5959
  MSG_TYPE.MSG_TYPE_STEP_END,
@@ -5999,87 +5967,79 @@ Provide the final aggregated answer in {output_format} format, directly addressi
5999
5967
  self.trace_exception(e)
6000
5968
  if streaming_callback:
6001
5969
  streaming_callback(error_msg, MSG_TYPE.MSG_TYPE_EXCEPTION)
6002
- chunk_summaries.append(f"[Error in step {step_number}: {str(e)[:100]}]")
5970
+ chunk_summaries.append(f"[Error processing chunk at step {step_number}: {str(e)[:150]}]")
6003
5971
 
6004
- # Move to next chunk
5972
+ # Move to the next chunk, allowing for overlap
6005
5973
  current_position += max(1, adaptive_chunk_size - overlap_tokens)
6006
5974
  step_number += 1
6007
-
6008
- # Safety break
6009
- if step_number > 50:
6010
- if debug:
6011
- print(f"🔧 DEBUG: Breaking after {step_number-1} steps for safety")
5975
+
5976
+ # Safety break for excessively long documents
5977
+ if step_number > 200:
5978
+ if debug: print(f"🔧 DEBUG: Safety break after {step_number-1} steps.")
5979
+ chunk_summaries.append("[Processing halted due to exceeding maximum step limit.]")
6012
5980
  break
6013
5981
 
6014
5982
  if debug:
6015
- print(f"\n🔧 DEBUG: Completed chunking. Total sections: {len(chunk_summaries)}")
5983
+ print(f"\n🔧 DEBUG: Chunk processing complete. Total sections gathered: {len(chunk_summaries)}")
6016
5984
 
6017
- # Return scratchpad if requested
5985
+ # Return only the scratchpad content if requested
6018
5986
  if return_scratchpad_only:
6019
5987
  final_scratchpad = "\n\n---\n\n".join(chunk_summaries)
6020
5988
  if streaming_callback:
6021
- streaming_callback("Returning scratchpad only", MSG_TYPE.MSG_TYPE_STEP, {})
5989
+ streaming_callback("Returning scratchpad content as final output.", MSG_TYPE.MSG_TYPE_STEP, {})
6022
5990
  return final_scratchpad.strip()
6023
5991
 
6024
- # Final synthesis
5992
+ # Final Synthesis Step
6025
5993
  if streaming_callback:
6026
- streaming_callback("Synthesizing final comprehensive analysis...", MSG_TYPE.MSG_TYPE_STEP_START, {"progress": 90})
5994
+ streaming_callback("Synthesizing final comprehensive response...", MSG_TYPE.MSG_TYPE_STEP_START, {"progress": 95})
6027
5995
 
6028
5996
  if not chunk_summaries:
6029
- error_msg = "No content was successfully processed. The text may not contain recognizable literary elements, or there may be an issue with the processing."
5997
+ error_msg = "No content was successfully processed or extracted from the document. The input might be empty or an issue occurred during processing."
6030
5998
  if debug:
6031
5999
  print(f"🔧 DEBUG: ❌ {error_msg}")
6032
6000
  return error_msg
6033
6001
 
6034
6002
  combined_scratchpad = "\n\n---\n\n".join(chunk_summaries)
6035
- synthesis_objective = contextual_prompt or "Create comprehensive literary analysis of Moby-Dick."
6003
+ synthesis_objective = contextual_prompt or "Provide a comprehensive, well-structured summary and analysis of the provided text."
6036
6004
 
6037
6005
  if debug:
6038
- print(f"🔧 DEBUG: Synthesizing from {len(combined_scratchpad):,} char scratchpad with {len(chunk_summaries)} sections")
6006
+ print(f"🔧 DEBUG: Synthesizing from {len(combined_scratchpad):,} char scratchpad with {len(chunk_summaries)} sections.")
6039
6007
 
6040
- # FIXED: Moby Dick-specific synthesis prompts
6008
+ # Generic synthesis prompts
6041
6009
  synthesis_system_prompt = (
6042
- "You are a literary analysis expert creating a final comprehensive analysis of Herman Melville's Moby Dick.\n"
6043
- "Synthesize all the insights from the analysis sections into a coherent, scholarly response.\n"
6044
- "Create clear sections with markdown headers, eliminate redundancy, and provide a thorough analysis.\n"
6045
- "Focus on Melville's major themes, narrative techniques, cultural commentary, and symbolic elements.\n"
6046
- "Use specific examples from the text and maintain academic rigor throughout."
6010
+ "You are an expert AI assistant specializing in synthesizing information. "
6011
+ "Your task is to consolidate a series of text analysis sections from a scratchpad into a single, coherent, and well-structured final response. "
6012
+ "Eliminate redundancy, organize the content logically, and ensure the final output directly and comprehensively addresses the user's primary objective. "
6013
+ "Use markdown for clear formatting (e.g., headers, lists, bold text)."
6047
6014
  )
6048
6015
 
6049
6016
  synthesis_user_prompt = (
6050
- f"--- Analysis Objective ---\n{synthesis_objective}\n\n"
6051
- f"--- Processing Summary ---\n"
6052
- f"Successfully analyzed {len(chunk_summaries)} sections of Moby Dick through incremental literary analysis.\n"
6053
- f"Total scratchpad content: {len(combined_scratchpad):,} characters of literary insights.\n\n"
6054
- f"--- Collected Literary Analysis Sections ---\n{combined_scratchpad}\n\n"
6055
- f"--- Final Synthesis Task ---\n"
6056
- f"Create a comprehensive, well-structured literary analysis of Moby Dick using ALL the insights above. "
6057
- f"Organize into clear sections with markdown headers (## Theme Analysis, ## Narrative Techniques, ## Cultural Commentary, ## Symbolism, etc.). "
6058
- f"Eliminate redundancy and create a coherent, scholarly analysis that demonstrates understanding of Melville's complex literary achievement. "
6059
- f"Include specific textual examples and maintain academic depth throughout."
6017
+ f"--- Final Objective ---\n{synthesis_objective}\n\n"
6018
+ f"--- Collected Analysis Sections (Scratchpad) ---\n{combined_scratchpad}\n\n"
6019
+ f"--- Your Final Task ---\n"
6020
+ f"Synthesize all the information from the 'Collected Analysis Sections' into a single, high-quality, and comprehensive response. "
6021
+ f"Your response must directly address the 'Final Objective'. "
6022
+ f"Organize your answer logically with clear sections using markdown headers. "
6023
+ f"Ensure all key information is included, remove any repetitive statements, and produce a polished, final document."
6060
6024
  )
6061
6025
 
6062
6026
  try:
6063
- final_answer = self.remove_thinking_blocks(self.llm.generate_text(synthesis_user_prompt, system_prompt=synthesis_system_prompt, **kwargs))
6064
-
6027
+ final_answer = self.remove_thinking_blocks(self.llm.generate_text(synthesis_user_prompt, system_prompt=synthesis_system_prompt, **kwargs))
6065
6028
  if debug:
6066
- print(f"🔧 DEBUG: Final analysis: {len(final_answer):,} characters")
6067
-
6029
+ print(f"🔧 DEBUG: Final synthesis generated: {len(final_answer):,} characters")
6068
6030
  if streaming_callback:
6069
- streaming_callback(f"Final synthesis completed - {len(final_answer):,} characters generated", MSG_TYPE.MSG_TYPE_STEP_END, {"progress": 100})
6070
-
6031
+ streaming_callback("Final synthesis complete.", MSG_TYPE.MSG_TYPE_STEP_END, {"progress": 100})
6071
6032
  return final_answer.strip()
6072
6033
 
6073
6034
  except Exception as e:
6074
- error_msg = f"Synthesis failed: {str(e)}. Returning organized scratchpad content."
6075
- if debug:
6076
- print(f"🔧 DEBUG: ❌ {error_msg}")
6077
-
6078
- # Return organized scratchpad as fallback
6035
+ error_msg = f"The final synthesis step failed: {str(e)}. Returning the organized scratchpad content as a fallback."
6036
+ if debug: print(f"🔧 DEBUG: ❌ {error_msg}")
6037
+
6038
+ # Fallback to returning the organized scratchpad
6079
6039
  organized_scratchpad = (
6080
- f"# Literary Analysis of Moby Dick\n\n"
6081
- f"*Note: Synthesis process encountered issues, presenting organized analysis sections:*\n\n"
6082
- f"## Analysis Sections\n\n"
6040
+ f"# Analysis Summary\n\n"
6041
+ f"*Note: The final synthesis process encountered an error. The raw, organized analysis sections are provided below.*\n\n"
6042
+ f"## Collected Sections\n\n"
6083
6043
  f"{combined_scratchpad}"
6084
6044
  )
6085
6045
  return organized_scratchpad
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lollms_client
3
- Version: 1.5.9
3
+ Version: 1.6.1
4
4
  Summary: A client library for LoLLMs generate endpoint
5
5
  Author-email: ParisNeo <parisneoai@gmail.com>
6
6
  License: Apache License
@@ -1,7 +1,7 @@
1
- lollms_client/__init__.py,sha256=pBf8IT3SoW2Y-kWsYgmN-bq3gC70uJf6m9ZjNqzg4IE,1146
1
+ lollms_client/__init__.py,sha256=lRGg7_mob01b8ZxJtc7S4djDcL7d8kA6nJfKHSIuOWY,1146
2
2
  lollms_client/lollms_agentic.py,sha256=pQiMEuB_XkG29-SW6u4KTaMFPr6eKqacInggcCuCW3k,13914
3
3
  lollms_client/lollms_config.py,sha256=goEseDwDxYJf3WkYJ4IrLXwg3Tfw73CXV2Avg45M_hE,21876
4
- lollms_client/lollms_core.py,sha256=Gc2KMyxnKWti-ljI7rf3XDb6a7NM2RUpktN8VnlWcyk,323791
4
+ lollms_client/lollms_core.py,sha256=lYsDsVr_SWhFhMiFxNzzY_ZTvNvFZAUdvbxQRpO9EcI,321464
5
5
  lollms_client/lollms_discussion.py,sha256=LZc9jYbUMRTovehiFJKEp-NXuCl_WnrqUtT3t4Nzayk,123922
6
6
  lollms_client/lollms_js_analyzer.py,sha256=01zUvuO2F_lnUe_0NLxe1MF5aHE1hO8RZi48mNPv-aw,8361
7
7
  lollms_client/lollms_llm_binding.py,sha256=tXuc3gxe6UrP36OBGsR-ESvQ9LpsB_nqtqL-GsEj6Uc,25019
@@ -80,8 +80,8 @@ lollms_client/tts_bindings/xtts/server/main.py,sha256=T-Kn5NM-u1FJMygeV8rOoZKlqn
80
80
  lollms_client/tts_bindings/xtts/server/setup_voices.py,sha256=UdHaPa5aNcw8dR-aRGkZr2OfSFFejH79lXgfwT0P3ss,1964
81
81
  lollms_client/ttv_bindings/__init__.py,sha256=UZ8o2izQOJLQgtZ1D1cXoNST7rzqW22rL2Vufc7ddRc,3141
82
82
  lollms_client/ttv_bindings/lollms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
83
- lollms_client-1.5.9.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
84
- lollms_client-1.5.9.dist-info/METADATA,sha256=LZ3XLpnSfsEKZHgVa3HpIX3Didujw4Eo5AMIIwi_khc,76825
85
- lollms_client-1.5.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
86
- lollms_client-1.5.9.dist-info/top_level.txt,sha256=Bk_kz-ri6Arwsk7YG-T5VsRorV66uVhcHGvb_g2WqgE,14
87
- lollms_client-1.5.9.dist-info/RECORD,,
83
+ lollms_client-1.6.1.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
84
+ lollms_client-1.6.1.dist-info/METADATA,sha256=wRlmSzWMv4IsGALozvPDg6CIiEzqeQb9qrgj99hMvyo,76825
85
+ lollms_client-1.6.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
86
+ lollms_client-1.6.1.dist-info/top_level.txt,sha256=Bk_kz-ri6Arwsk7YG-T5VsRorV66uVhcHGvb_g2WqgE,14
87
+ lollms_client-1.6.1.dist-info/RECORD,,