local-deep-research 0.1.14__tar.gz → 0.1.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. {local_deep_research-0.1.14/src/local_deep_research.egg-info → local_deep_research-0.1.16}/PKG-INFO +1 -1
  2. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/pyproject.toml +1 -1
  3. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/defaults/search_engines.toml +2 -2
  4. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/search_system.py +9 -10
  5. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/app.py +6 -22
  6. local_deep_research-0.1.16/src/local_deep_research/web_search_engines/engines/search_engine_semantic_scholar.py +569 -0
  7. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/search_engine_base.py +5 -14
  8. {local_deep_research-0.1.14 → local_deep_research-0.1.16/src/local_deep_research.egg-info}/PKG-INFO +1 -1
  9. local_deep_research-0.1.14/src/local_deep_research/web_search_engines/engines/search_engine_semantic_scholar.py +0 -1128
  10. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/LICENSE +0 -0
  11. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/MANIFEST.in +0 -0
  12. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/README.md +0 -0
  13. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/requirements.txt +0 -0
  14. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/setup.cfg +0 -0
  15. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/__init__.py +0 -0
  16. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/citation_handler.py +0 -0
  17. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/config.py +0 -0
  18. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/defaults/__init__.py +0 -0
  19. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/defaults/llm_config.py +0 -0
  20. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/defaults/local_collections.toml +0 -0
  21. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/defaults/main.toml +0 -0
  22. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/local_collections.py +0 -0
  23. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/main.py +0 -0
  24. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/report_generator.py +0 -0
  25. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/utilties/__init__.py +0 -0
  26. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/utilties/enums.py +0 -0
  27. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/utilties/llm_utils.py +0 -0
  28. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/utilties/search_utilities.py +0 -0
  29. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/utilties/setup_utils.py +0 -0
  30. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/__init__.py +0 -0
  31. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/static/css/styles.css +0 -0
  32. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/static/js/app.js +0 -0
  33. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/templates/api_keys_config.html +0 -0
  34. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/templates/collections_config.html +0 -0
  35. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/templates/index.html +0 -0
  36. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/templates/llm_config.html +0 -0
  37. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/templates/main_config.html +0 -0
  38. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/templates/search_engines_config.html +0 -0
  39. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/templates/settings.html +0 -0
  40. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web/templates/settings_dashboard.html +0 -0
  41. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/__init__.py +0 -0
  42. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/__init__.py +0 -0
  43. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/full_search.py +0 -0
  44. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/meta_search_engine.py +0 -0
  45. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_arxiv.py +0 -0
  46. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_brave.py +0 -0
  47. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_ddg.py +0 -0
  48. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_github.py +0 -0
  49. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_google_pse.py +0 -0
  50. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_guardian.py +0 -0
  51. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_local.py +0 -0
  52. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_local_all.py +0 -0
  53. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_pubmed.py +0 -0
  54. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_searxng.py +0 -0
  55. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_serpapi.py +0 -0
  56. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_wayback.py +0 -0
  57. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/engines/search_engine_wikipedia.py +0 -0
  58. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/full_search.py +0 -0
  59. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/search_engine_factory.py +0 -0
  60. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research/web_search_engines/search_engines_config.py +0 -0
  61. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research.egg-info/SOURCES.txt +0 -0
  62. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research.egg-info/dependency_links.txt +0 -0
  63. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research.egg-info/entry_points.txt +0 -0
  64. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research.egg-info/requires.txt +0 -0
  65. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/src/local_deep_research.egg-info/top_level.txt +0 -0
  66. {local_deep_research-0.1.14 → local_deep_research-0.1.16}/tests/test_google_pse.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: local-deep-research
3
- Version: 0.1.14
3
+ Version: 0.1.16
4
4
  Summary: AI-powered research assistant with deep, iterative analysis using LLMs and web searches
5
5
  Author-email: LearningCircuit <185559241+LearningCircuit@users.noreply.github.com>, HashedViking <6432677+HashedViking@users.noreply.github.com>
6
6
  License: MIT License
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "local-deep-research"
7
- version = "0.1.14"
7
+ version = "0.1.16"
8
8
  description = "AI-powered research assistant with deep, iterative analysis using LLMs and web searches"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.8"
@@ -37,7 +37,7 @@ module_path = "local_deep_research.web_search_engines.engines.search_engine_pubm
37
37
  class_name = "PubMedSearchEngine"
38
38
  requires_api_key = false
39
39
  api_key_env = "NCBI_API_KEY"
40
- reliability = 0.95
40
+ reliability = 0.98
41
41
  strengths = [
42
42
  "biomedical literature", "medical research", "clinical studies",
43
43
  "life sciences", "health information", "scientific papers"
@@ -191,7 +191,7 @@ module_path = "local_deep_research.web_search_engines.engines.search_engine_sema
191
191
  class_name = "SemanticScholarSearchEngine"
192
192
  requires_api_key = false
193
193
  api_key_env = "S2_API_KEY"
194
- reliability = 0.95
194
+ reliability = 0.87
195
195
  strengths = [
196
196
  "comprehensive scientific literature",
197
197
  "extensive citation network",
@@ -28,7 +28,7 @@ class AdvancedSearchSystem:
28
28
 
29
29
  # Check if search is available, log warning if not
30
30
  if self.search is None:
31
- print("WARNING: Search system initialized with no search engine! Research will not be effective.")
31
+ logger.info("WARNING: Search system initialized with no search engine! Research will not be effective.")
32
32
  self._update_progress("WARNING: No search engine available", None, {"error": "No search engine configured properly"})
33
33
 
34
34
 
@@ -101,7 +101,7 @@ class AdvancedSearchSystem:
101
101
  self._update_progress("Knowledge compression complete", None)
102
102
  response = remove_think_tags(response.content)
103
103
  response = str(response) #+ "\n\n" + str(formatted_links)
104
- print(response)
104
+
105
105
  return response
106
106
 
107
107
  def analyze_topic(self, query: str) -> Dict:
@@ -165,7 +165,7 @@ class AdvancedSearchSystem:
165
165
  search_results = self.search.run(question)
166
166
  except Exception as e:
167
167
  error_msg = f"Error during search: {str(e)}"
168
- print(f"SEARCH ERROR: {error_msg}")
168
+ logger.info(f"SEARCH ERROR: {error_msg}")
169
169
  self._update_progress(error_msg,
170
170
  int(question_progress_base + 2),
171
171
  {"phase": "search_error", "error": str(e)})
@@ -190,7 +190,7 @@ class AdvancedSearchSystem:
190
190
  self._update_progress(f"Analyzing results for: {question}",
191
191
  int(question_progress_base + 5),
192
192
  {"phase": "analysis"})
193
- print("NR OF SOURCES: ", len(self.all_links_of_system))
193
+
194
194
 
195
195
  try:
196
196
  result = self.citation_handler.analyze_followup(
@@ -203,7 +203,7 @@ class AdvancedSearchSystem:
203
203
  if links:
204
204
  formatted_links=format_links(links=links)
205
205
 
206
- logger.debug(f"Generated questions: {formatted_links}")
206
+ logger.info(f"Generated questions: {formatted_links}")
207
207
  if result is not None:
208
208
  results_with_links = str(result["content"])
209
209
  findings.append(
@@ -219,7 +219,6 @@ class AdvancedSearchSystem:
219
219
  if settings.general.knowledge_accumulation != str(KnowledgeAccumulationApproach.NO_KNOWLEDGE.value):
220
220
  current_knowledge = current_knowledge + "\n\n\n New: \n" + results_with_links
221
221
 
222
- logger.info(settings.general.knowledge_accumulation)
223
222
  if settings.general.knowledge_accumulation == str(KnowledgeAccumulationApproach.QUESTION.value):
224
223
  logger.info("Compressing knowledge")
225
224
  self._update_progress(f"Compress Knowledge for: {question}",
@@ -232,7 +231,7 @@ class AdvancedSearchSystem:
232
231
  {"phase": "analysis_complete"})
233
232
  except Exception as e:
234
233
  error_msg = f"Error analyzing results: {str(e)}"
235
- print(f"ANALYSIS ERROR: {error_msg}")
234
+ logger.info(f"ANALYSIS ERROR: {error_msg}")
236
235
  self._update_progress(error_msg,
237
236
  int(question_progress_base + 10),
238
237
  {"phase": "analysis_error", "error": str(e)})
@@ -251,7 +250,7 @@ class AdvancedSearchSystem:
251
250
  logger.info("FINISHED ITERATION - Compressing Knowledge")
252
251
  except Exception as e:
253
252
  error_msg = f"Error compressing knowledge: {str(e)}"
254
- print(f"COMPRESSION ERROR: {error_msg}")
253
+ logger.info(f"COMPRESSION ERROR: {error_msg}")
255
254
  self._update_progress(error_msg,
256
255
  int((iteration / total_iterations) * 100 - 3),
257
256
  {"phase": "compression_error", "error": str(e)})
@@ -266,7 +265,7 @@ class AdvancedSearchSystem:
266
265
  formatted_findings = self._save_findings(findings, current_knowledge, query)
267
266
  except Exception as e:
268
267
  error_msg = f"Error saving findings: {str(e)}"
269
- print(f"SAVE ERROR: {error_msg}")
268
+ logger.info(f"SAVE ERROR: {error_msg}")
270
269
  self._update_progress(error_msg,
271
270
  int((iteration / total_iterations) * 100),
272
271
  {"phase": "save_error", "error": str(e)})
@@ -278,7 +277,7 @@ class AdvancedSearchSystem:
278
277
  "findings": findings,
279
278
  "iterations": iteration,
280
279
  "questions": self.questions_by_iteration,
281
- "formatted_findings": formatted_findings if 'formatted_findings' in locals() else "Error: Findings not available.",
280
+ "formatted_findings": formatted_findings,
282
281
  "current_knowledge": current_knowledge
283
282
  }
284
283
 
@@ -1001,27 +1001,14 @@ def run_research_process(research_id, query, mode):
1001
1001
  if mode == 'quick':
1002
1002
  # Quick Summary
1003
1003
  if results.get('findings'):
1004
- #initial_analysis = [finding['content'] for finding in results['findings']]
1005
- summary = ""
1006
-
1007
- # Safer access to formatted_findings with logging
1008
- print(f"Results keys: {list(results.keys())}")
1009
-
1010
- # Check if formatted_findings exists in results
1011
- if 'formatted_findings' not in results:
1012
- logger.info("WARNING: 'formatted_findings' not found in results, using fallback")
1013
- # Create fallback formatted findings from available data
1014
- raw_formatted_findings = "# Research Findings\n\n"
1015
- raw_formatted_findings = raw_formatted_findings + str(results.get('current_knowledge'))
1016
- for i, finding in enumerate(results.get('findings', [])):
1017
- raw_formatted_findings += f"## Finding {i+1}\n\n{finding.get('content', '')}\n\n"
1018
- else:
1019
- raw_formatted_findings = results['formatted_findings']
1020
- logger.info(f"Found formatted_findings of length: {len(str(raw_formatted_findings))}")
1004
+
1005
+ raw_formatted_findings = results['formatted_findings']
1006
+ logger.info(f"Found formatted_findings of length: {len(str(raw_formatted_findings))}")
1021
1007
 
1022
1008
  try:
1009
+ clean_markdown = raw_formatted_findings
1023
1010
  # ADDED CODE: Convert debug output to clean markdown
1024
- clean_markdown = convert_debug_to_markdown(raw_formatted_findings, query)
1011
+ #clean_markdown = convert_debug_to_markdown(raw_formatted_findings, query)
1025
1012
  print(f"Successfully converted to clean markdown of length: {len(clean_markdown)}")
1026
1013
 
1027
1014
  # First send a progress update for generating the summary
@@ -1693,10 +1680,7 @@ def convert_debug_to_markdown(raw_text, query):
1693
1680
  lines_after = len(content.split("\n"))
1694
1681
  print(f"Removed {lines_before - lines_after} divider lines")
1695
1682
 
1696
- # If COMPLETE RESEARCH OUTPUT exists, remove that section
1697
- if "COMPLETE RESEARCH OUTPUT" in content:
1698
- print("Found and removing COMPLETE RESEARCH OUTPUT section")
1699
- content = content.split("COMPLETE RESEARCH OUTPUT")[0].strip()
1683
+
1700
1684
 
1701
1685
  # Remove SEARCH QUESTIONS BY ITERATION section
1702
1686
  if "SEARCH QUESTIONS BY ITERATION:" in content: