local-deep-research 0.1.0__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {local_deep_research-0.1.0/src/local_deep_research.egg-info → local_deep_research-0.1.1}/PKG-INFO +16 -4
  2. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/README.md +15 -3
  3. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/pyproject.toml +1 -1
  4. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/defaults/main.toml +5 -0
  5. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/search_system.py +98 -38
  6. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/app.py +360 -117
  7. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/static/css/styles.css +28 -2
  8. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/static/js/app.js +640 -197
  9. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/templates/index.html +3 -1
  10. local_deep_research-0.1.1/src/local_deep_research/web_search_engines/engines/search_engine_searxng.py +454 -0
  11. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/search_engine_factory.py +20 -1
  12. {local_deep_research-0.1.0 → local_deep_research-0.1.1/src/local_deep_research.egg-info}/PKG-INFO +16 -4
  13. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research.egg-info/SOURCES.txt +1 -0
  14. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/LICENSE +0 -0
  15. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/MANIFEST.in +0 -0
  16. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/requirements.txt +0 -0
  17. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/setup.cfg +0 -0
  18. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/__init__.py +0 -0
  19. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/citation_handler.py +0 -0
  20. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/config.py +0 -0
  21. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/defaults/__init__.py +0 -0
  22. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/defaults/llm_config.py +0 -0
  23. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/defaults/local_collections.toml +0 -0
  24. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/defaults/search_engines.toml +0 -0
  25. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/local_collections.py +0 -0
  26. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/main.py +0 -0
  27. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/report_generator.py +0 -0
  28. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/utilties/__init__.py +0 -0
  29. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/utilties/enums.py +0 -0
  30. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/utilties/llm_utils.py +0 -0
  31. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/utilties/search_utilities.py +0 -0
  32. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/utilties/setup_utils.py +0 -0
  33. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/__init__.py +0 -0
  34. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/templates/api_keys_config.html +0 -0
  35. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/templates/collections_config.html +0 -0
  36. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/templates/llm_config.html +0 -0
  37. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/templates/main_config.html +0 -0
  38. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/templates/search_engines_config.html +0 -0
  39. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/templates/settings.html +0 -0
  40. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web/templates/settings_dashboard.html +0 -0
  41. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/__init__.py +0 -0
  42. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/__init__.py +0 -0
  43. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/full_search.py +0 -0
  44. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/meta_search_engine.py +0 -0
  45. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_arxiv.py +0 -0
  46. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_brave.py +0 -0
  47. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_ddg.py +0 -0
  48. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_github.py +0 -0
  49. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_google_pse.py +0 -0
  50. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_guardian.py +0 -0
  51. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_local.py +0 -0
  52. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_local_all.py +0 -0
  53. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_medrxiv.py +0 -0
  54. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_pubmed.py +0 -0
  55. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_serpapi.py +0 -0
  56. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_wayback.py +0 -0
  57. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/engines/search_engine_wikipedia.py +0 -0
  58. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/full_search.py +0 -0
  59. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/search_engine_base.py +0 -0
  60. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research/web_search_engines/search_engines_config.py +0 -0
  61. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research.egg-info/dependency_links.txt +0 -0
  62. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research.egg-info/entry_points.txt +0 -0
  63. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research.egg-info/requires.txt +0 -0
  64. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/src/local_deep_research.egg-info/top_level.txt +0 -0
  65. {local_deep_research-0.1.0 → local_deep_research-0.1.1}/tests/test_google_pse.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: local-deep-research
3
- Version: 0.1.0
3
+ Version: 0.1.1
4
4
  Summary: AI-powered research assistant with deep, iterative analysis using LLMs and web searches
5
5
  Author-email: LearningCircuit <185559241+LearningCircuit@users.noreply.github.com>, HashedViking <6432677+HashedViking@users.noreply.github.com>
6
6
  License: MIT License
@@ -91,12 +91,13 @@ A powerful AI-powered research assistant that performs deep, iterative analysis
91
91
 
92
92
  - 🌐 **Enhanced Search Integration**
93
93
  - **Auto-selection of search sources**: The "auto" search engine intelligently analyzes your query and selects the most appropriate search engine based on the query content
94
+ - **SearXNG** integration for local web-search engine, great for privacy, no API key required (requires a searxng server)
94
95
  - Wikipedia integration for factual knowledge
95
96
  - arXiv integration for scientific papers and academic research
96
97
  - PubMed integration for biomedical literature and medical research
97
98
  - DuckDuckGo integration for web searches (may experience rate limiting)
98
99
  - SerpAPI integration for Google search results (requires API key)
99
- - **Google Programmable Search Engine** integration for custom search experiences (requires API key)
100
+ - Google Programmable Search Engine integration for custom search experiences (requires API key)
100
101
  - The Guardian integration for news articles and journalism (requires API key)
101
102
  - **Local RAG search for private documents** - search your own documents with vector embeddings
102
103
  - Full webpage content retrieval
@@ -127,10 +128,10 @@ This example showcases the system's ability to perform multiple research iterati
127
128
 
128
129
  1. Clone the repository:
129
130
  ```bash
130
- git clone https://github.com/yourusername/local-deep-research.git
131
+ git clone https://github.com/LearningCircuit/local-deep-research.git
131
132
  cd local-deep-research
132
133
  ```
133
-
134
+ (experimental pip install with new features (but not so well tested yet): **pip install local-deep-research** )
134
135
  2. Install dependencies:
135
136
  ```bash
136
137
  pip install -r requirements.txt
@@ -147,6 +148,15 @@ ollama pull mistral # Default model - many work really well choose best for you
147
148
  ```bash
148
149
  # Copy the template
149
150
  cp .env.template .env
151
+ ```
152
+
153
+ ## Experimental install
154
+ ```bash
155
+ #experimental pip install with new features (but not so well tested yet):
156
+ pip install local-deep-research
157
+ playwright install
158
+ ollama pull mistral
159
+ ```
150
160
 
151
161
  # Edit .env with your API keys (if using cloud LLMs)
152
162
  ANTHROPIC_API_KEY=your-api-key-here # For Claude
@@ -276,6 +286,7 @@ You can use local search in several ways:
276
286
  The system supports multiple search engines that can be selected by changing the `search_tool` variable in `config.py`:
277
287
 
278
288
  - **Auto** (`auto`): Intelligent search engine selector that analyzes your query and chooses the most appropriate source (Wikipedia, arXiv, local collections, etc.)
289
+ - **SearXNG** (`searxng`): Local web-search engine, great for privacy, no API key required (requires a searxng server)
279
290
  - **Wikipedia** (`wiki`): Best for general knowledge, facts, and overview information
280
291
  - **arXiv** (`arxiv`): Great for scientific and academic research, accessing preprints and papers
281
292
  - **PubMed** (`pubmed`): Excellent for biomedical literature, medical research, and health information
@@ -307,6 +318,7 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
307
318
  - [DuckDuckGo](https://duckduckgo.com) for web search
308
319
  - [The Guardian](https://www.theguardian.com/) for quality journalism
309
320
  - [SerpAPI](https://serpapi.com) for Google search results (requires API key)
321
+ - [SearXNG](https://searxng.org/) for local web-search engine
310
322
  - Built on [LangChain](https://github.com/hwchase17/langchain) framework
311
323
  - Uses [justext](https://github.com/miso-belica/justext) for content extraction
312
324
  - [Playwright](https://playwright.dev) for web content retrieval
@@ -29,12 +29,13 @@ A powerful AI-powered research assistant that performs deep, iterative analysis
29
29
 
30
30
  - 🌐 **Enhanced Search Integration**
31
31
  - **Auto-selection of search sources**: The "auto" search engine intelligently analyzes your query and selects the most appropriate search engine based on the query content
32
+ - **SearXNG** integration for local web-search engine, great for privacy, no API key required (requires a searxng server)
32
33
  - Wikipedia integration for factual knowledge
33
34
  - arXiv integration for scientific papers and academic research
34
35
  - PubMed integration for biomedical literature and medical research
35
36
  - DuckDuckGo integration for web searches (may experience rate limiting)
36
37
  - SerpAPI integration for Google search results (requires API key)
37
- - **Google Programmable Search Engine** integration for custom search experiences (requires API key)
38
+ - Google Programmable Search Engine integration for custom search experiences (requires API key)
38
39
  - The Guardian integration for news articles and journalism (requires API key)
39
40
  - **Local RAG search for private documents** - search your own documents with vector embeddings
40
41
  - Full webpage content retrieval
@@ -65,10 +66,10 @@ This example showcases the system's ability to perform multiple research iterati
65
66
 
66
67
  1. Clone the repository:
67
68
  ```bash
68
- git clone https://github.com/yourusername/local-deep-research.git
69
+ git clone https://github.com/LearningCircuit/local-deep-research.git
69
70
  cd local-deep-research
70
71
  ```
71
-
72
+ (experimental pip install with new features (but not so well tested yet): **pip install local-deep-research** )
72
73
  2. Install dependencies:
73
74
  ```bash
74
75
  pip install -r requirements.txt
@@ -85,6 +86,15 @@ ollama pull mistral # Default model - many work really well choose best for you
85
86
  ```bash
86
87
  # Copy the template
87
88
  cp .env.template .env
89
+ ```
90
+
91
+ ## Experimental install
92
+ ```bash
93
+ #experimental pip install with new features (but not so well tested yet):
94
+ pip install local-deep-research
95
+ playwright install
96
+ ollama pull mistral
97
+ ```
88
98
 
89
99
  # Edit .env with your API keys (if using cloud LLMs)
90
100
  ANTHROPIC_API_KEY=your-api-key-here # For Claude
@@ -214,6 +224,7 @@ You can use local search in several ways:
214
224
  The system supports multiple search engines that can be selected by changing the `search_tool` variable in `config.py`:
215
225
 
216
226
  - **Auto** (`auto`): Intelligent search engine selector that analyzes your query and chooses the most appropriate source (Wikipedia, arXiv, local collections, etc.)
227
+ - **SearXNG** (`searxng`): Local web-search engine, great for privacy, no API key required (requires a searxng server)
217
228
  - **Wikipedia** (`wiki`): Best for general knowledge, facts, and overview information
218
229
  - **arXiv** (`arxiv`): Great for scientific and academic research, accessing preprints and papers
219
230
  - **PubMed** (`pubmed`): Excellent for biomedical literature, medical research, and health information
@@ -245,6 +256,7 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
245
256
  - [DuckDuckGo](https://duckduckgo.com) for web search
246
257
  - [The Guardian](https://www.theguardian.com/) for quality journalism
247
258
  - [SerpAPI](https://serpapi.com) for Google search results (requires API key)
259
+ - [SearXNG](https://searxng.org/) for local web-search engine
248
260
  - Built on [LangChain](https://github.com/hwchase17/langchain) framework
249
261
  - Uses [justext](https://github.com/miso-belica/justext) for content extraction
250
262
  - [Playwright](https://playwright.dev) for web content retrieval
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "local-deep-research"
7
- version = "0.1.0"
7
+ version = "0.1.1"
8
8
  description = "AI-powered research assistant with deep, iterative analysis using LLMs and web searches"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.8"
@@ -14,6 +14,11 @@ knowledge_accumulation_context_limit = 2000000
14
14
  # Enable fact checking (experimental, works better with large LLMs)
15
15
  enable_fact_checking = false
16
16
 
17
+ [web]
18
+ port = 5000
19
+ host = "0.0.0.0"
20
+ debug = true
21
+
17
22
  [search]
18
23
  # Search tool to use (auto, wikipedia, arxiv, duckduckgo, serp, google_pse, etc.)
19
24
  # "auto" intelligently selects based on query content (recommended)
@@ -25,6 +25,11 @@ class AdvancedSearchSystem:
25
25
  self.citation_handler = CitationHandler(self.model)
26
26
  self.progress_callback = None
27
27
  self.all_links_of_system = list()
28
+
29
+ # Check if search is available, log warning if not
30
+ if self.search is None:
31
+ print("WARNING: Search system initialized with no search engine! Research will not be effective.")
32
+ self._update_progress("WARNING: No search engine available", None, {"error": "No search engine configured properly"})
28
33
 
29
34
 
30
35
 
@@ -114,6 +119,23 @@ class AdvancedSearchSystem:
114
119
  "phase": "init",
115
120
  "iterations_planned": total_iterations
116
121
  })
122
+
123
+ # Check if search engine is available
124
+ if self.search is None:
125
+ error_msg = "Error: No search engine available. Please check your configuration."
126
+ self._update_progress(error_msg, 100, {
127
+ "phase": "error",
128
+ "error": "No search engine available",
129
+ "status": "failed"
130
+ })
131
+ return {
132
+ "findings": [],
133
+ "iterations": 0,
134
+ "questions": {},
135
+ "formatted_findings": "Error: Unable to conduct research without a search engine.",
136
+ "current_knowledge": "",
137
+ "error": error_msg
138
+ }
117
139
 
118
140
  while iteration < self.max_iterations:
119
141
  iteration_progress_base = (iteration / total_iterations) * 100
@@ -133,7 +155,21 @@ class AdvancedSearchSystem:
133
155
  int(question_progress_base),
134
156
  {"phase": "search", "iteration": iteration + 1, "question_index": q_idx + 1})
135
157
 
136
- search_results = self.search.run(question)
158
+ try:
159
+ if self.search is None:
160
+ self._update_progress(f"Search engine unavailable, skipping search for: {question}",
161
+ int(question_progress_base + 2),
162
+ {"phase": "search_error", "error": "No search engine available"})
163
+ search_results = []
164
+ else:
165
+ search_results = self.search.run(question)
166
+ except Exception as e:
167
+ error_msg = f"Error during search: {str(e)}"
168
+ print(f"SEARCH ERROR: {error_msg}")
169
+ self._update_progress(error_msg,
170
+ int(question_progress_base + 2),
171
+ {"phase": "search_error", "error": str(e)})
172
+ search_results = []
137
173
 
138
174
  if search_results is None:
139
175
  self._update_progress(f"No search results found for question: {question}",
@@ -155,57 +191,81 @@ class AdvancedSearchSystem:
155
191
  int(question_progress_base + 5),
156
192
  {"phase": "analysis"})
157
193
  print("NR OF SOURCES: ", len(self.all_links_of_system))
158
- result = self.citation_handler.analyze_followup(
159
- question, search_results, current_knowledge, nr_of_links=len(self.all_links_of_system)
160
- )
161
- links = extract_links_from_search_results(search_results)
162
- self.all_links_of_system.extend(links)
163
- section_links.extend(links)
164
- formatted_links = ""
165
- if links:
166
- formatted_links=format_links(links=links)
167
-
168
- logger.debug(f"Generated questions: {formatted_links}")
169
- if result is not None:
170
- results_with_links = str(result["content"])
171
- findings.append(
172
- {
173
- "phase": f"Follow-up {iteration}.{questions.index(question) + 1}",
174
- "content": results_with_links,
175
- "question": question,
176
- "search_results": search_results,
177
- "documents": result["documents"],
178
- }
179
- )
180
194
 
181
- if settings.general.knowledge_accumulation != KnowledgeAccumulationApproach.NO_KNOWLEDGE:
182
- current_knowledge = current_knowledge + "\n\n\n New: \n" + results_with_links
183
-
184
- print(current_knowledge)
185
- if settings.general.knowledge_accumulation == KnowledgeAccumulationApproach.QUESTION:
186
- self._update_progress(f"Compress Knowledge for: {question}",
187
- int(question_progress_base + 0),
188
- {"phase": "analysis"})
189
- current_knowledge = self._compress_knowledge(current_knowledge , query, section_links)
195
+ try:
196
+ result = self.citation_handler.analyze_followup(
197
+ question, search_results, current_knowledge, nr_of_links=len(self.all_links_of_system)
198
+ )
199
+ links = extract_links_from_search_results(search_results)
200
+ self.all_links_of_system.extend(links)
201
+ section_links.extend(links)
202
+ formatted_links = ""
203
+ if links:
204
+ formatted_links=format_links(links=links)
190
205
 
191
- self._update_progress(f"Analysis complete for question: {question}",
192
- int(question_progress_base + 10),
193
- {"phase": "analysis_complete"})
206
+ logger.debug(f"Generated questions: {formatted_links}")
207
+ if result is not None:
208
+ results_with_links = str(result["content"])
209
+ findings.append(
210
+ {
211
+ "phase": f"Follow-up {iteration}.{questions.index(question) + 1}",
212
+ "content": results_with_links,
213
+ "question": question,
214
+ "search_results": search_results,
215
+ "documents": result["documents"],
216
+ }
217
+ )
194
218
 
219
+ if settings.general.knowledge_accumulation != KnowledgeAccumulationApproach.NO_KNOWLEDGE:
220
+ current_knowledge = current_knowledge + "\n\n\n New: \n" + results_with_links
221
+
222
+ print(current_knowledge)
223
+ if settings.general.knowledge_accumulation == KnowledgeAccumulationApproach.QUESTION:
224
+ self._update_progress(f"Compress Knowledge for: {question}",
225
+ int(question_progress_base + 0),
226
+ {"phase": "analysis"})
227
+ current_knowledge = self._compress_knowledge(current_knowledge , query, section_links)
228
+
229
+ self._update_progress(f"Analysis complete for question: {question}",
230
+ int(question_progress_base + 10),
231
+ {"phase": "analysis_complete"})
232
+ except Exception as e:
233
+ error_msg = f"Error analyzing results: {str(e)}"
234
+ print(f"ANALYSIS ERROR: {error_msg}")
235
+ self._update_progress(error_msg,
236
+ int(question_progress_base + 10),
237
+ {"phase": "analysis_error", "error": str(e)})
195
238
  iteration += 1
196
239
 
197
240
  self._update_progress(f"Compressing knowledge after iteration {iteration}",
198
241
  int((iteration / total_iterations) * 100 - 5),
199
242
  {"phase": "knowledge_compression"})
243
+
200
244
  if settings.general.knowledge_accumulation == KnowledgeAccumulationApproach.ITERATION:
201
- current_knowledge = self._compress_knowledge(current_knowledge , query, section_links)
245
+ try:
246
+ current_knowledge = self._compress_knowledge(current_knowledge , query, section_links)
247
+ except Exception as e:
248
+ error_msg = f"Error compressing knowledge: {str(e)}"
249
+ print(f"COMPRESSION ERROR: {error_msg}")
250
+ self._update_progress(error_msg,
251
+ int((iteration / total_iterations) * 100 - 3),
252
+ {"phase": "compression_error", "error": str(e)})
253
+
202
254
 
203
255
 
204
256
  self._update_progress(f"Iteration {iteration} complete",
205
257
  int((iteration / total_iterations) * 100),
206
258
  {"phase": "iteration_complete", "iteration": iteration})
207
259
 
208
- formatted_findings = self._save_findings(findings, current_knowledge, query)
260
+ try:
261
+ formatted_findings = self._save_findings(findings, current_knowledge, query)
262
+ except Exception as e:
263
+ error_msg = f"Error saving findings: {str(e)}"
264
+ print(f"SAVE ERROR: {error_msg}")
265
+ self._update_progress(error_msg,
266
+ int((iteration / total_iterations) * 100),
267
+ {"phase": "save_error", "error": str(e)})
268
+ formatted_findings = "Error: Could not format findings due to an error."
209
269
 
210
270
  self._update_progress("Research complete", 95, {"phase": "complete"})
211
271
 
@@ -213,7 +273,7 @@ class AdvancedSearchSystem:
213
273
  "findings": findings,
214
274
  "iterations": iteration,
215
275
  "questions": self.questions_by_iteration,
216
- "formatted_findings": formatted_findings,
276
+ "formatted_findings": formatted_findings if 'formatted_findings' in locals() else "Error: Findings not available.",
217
277
  "current_knowledge": current_knowledge
218
278
  }
219
279