local-deep-research 0.3.1__tar.gz → 0.3.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. local_deep_research-0.3.3/PKG-INFO +349 -0
  2. local_deep_research-0.3.3/README.md +278 -0
  3. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/pyproject.toml +1 -1
  4. local_deep_research-0.3.3/src/local_deep_research/__version__.py +1 -0
  5. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/filters/cross_engine_filter.py +1 -1
  6. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/config/llm_config.py +2 -3
  7. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/defaults/default_settings.json +1 -1
  8. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/routes/settings_routes.py +2 -4
  9. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/settings.js +8 -4
  10. local_deep_research-0.3.1/PKG-INFO +0 -549
  11. local_deep_research-0.3.1/README.md +0 -478
  12. local_deep_research-0.3.1/src/local_deep_research/__version__.py +0 -1
  13. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/LICENSE +0 -0
  14. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/__init__.py +0 -0
  15. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/__main__.py +0 -0
  16. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/__init__.py +0 -0
  17. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/filters/__init__.py +0 -0
  18. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/filters/base_filter.py +0 -0
  19. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/findings/base_findings.py +0 -0
  20. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/findings/repository.py +0 -0
  21. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/knowledge/__init__.py +0 -0
  22. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/knowledge/base_knowledge.py +0 -0
  23. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/knowledge/standard_knowledge.py +0 -0
  24. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/questions/__init__.py +0 -0
  25. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/questions/base_question.py +0 -0
  26. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/questions/decomposition_question.py +0 -0
  27. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/questions/standard_question.py +0 -0
  28. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/repositories/__init__.py +0 -0
  29. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/strategies/__init__.py +0 -0
  30. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/strategies/base_strategy.py +0 -0
  31. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/strategies/iterdrag_strategy.py +0 -0
  32. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/strategies/parallel_search_strategy.py +0 -0
  33. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/strategies/rapid_search_strategy.py +0 -0
  34. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/strategies/source_based_strategy.py +0 -0
  35. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/strategies/standard_strategy.py +0 -0
  36. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/tools/__init__.py +0 -0
  37. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/tools/base_tool.py +0 -0
  38. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/tools/knowledge_tools/__init__.py +0 -0
  39. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/tools/question_tools/__init__.py +0 -0
  40. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/advanced_search_system/tools/search_tools/__init__.py +0 -0
  41. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/api/__init__.py +0 -0
  42. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/api/research_functions.py +0 -0
  43. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/app.py +0 -0
  44. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/citation_handler.py +0 -0
  45. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/config/__init__.py +0 -0
  46. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/config/search_config.py +0 -0
  47. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/defaults/.env.template +0 -0
  48. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/defaults/__init__.py +0 -0
  49. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/main.py +0 -0
  50. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/migrate_db.py +0 -0
  51. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/report_generator.py +0 -0
  52. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/search_system.py +0 -0
  53. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/setup_data_dir.py +0 -0
  54. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/test_migration.py +0 -0
  55. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/utilities/__init__.py +0 -0
  56. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/utilities/db_utils.py +0 -0
  57. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/utilities/enums.py +0 -0
  58. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/utilities/llm_utils.py +0 -0
  59. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/utilities/search_utilities.py +0 -0
  60. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/utilities/setup_utils.py +0 -0
  61. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/__init__.py +0 -0
  62. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/app.py +0 -0
  63. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/app_factory.py +0 -0
  64. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/database/README.md +0 -0
  65. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/database/migrate_to_ldr_db.py +0 -0
  66. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/database/migrations.py +0 -0
  67. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/database/models.py +0 -0
  68. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/database/schema_upgrade.py +0 -0
  69. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/models/database.py +0 -0
  70. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/models/settings.py +0 -0
  71. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/routes/api_routes.py +0 -0
  72. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/routes/history_routes.py +0 -0
  73. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/routes/research_routes.py +0 -0
  74. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/services/research_service.py +0 -0
  75. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/services/resource_service.py +0 -0
  76. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/services/settings_manager.py +0 -0
  77. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/services/settings_service.py +0 -0
  78. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/services/socket_service.py +0 -0
  79. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/css/custom_dropdown.css +0 -0
  80. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/css/settings.css +0 -0
  81. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/css/styles.css +0 -0
  82. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/custom_dropdown.js +0 -0
  83. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/detail.js +0 -0
  84. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/fallback/formatting.js +0 -0
  85. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/fallback/ui.js +0 -0
  86. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/history.js +0 -0
  87. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/logpanel.js +0 -0
  88. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/progress.js +0 -0
  89. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/research.js +0 -0
  90. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/results.js +0 -0
  91. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/components/settings_sync.js +0 -0
  92. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/main.js +0 -0
  93. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/research_form.js +0 -0
  94. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/services/api.js +0 -0
  95. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/services/audio.js +0 -0
  96. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/services/formatting.js +0 -0
  97. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/services/pdf.js +0 -0
  98. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/services/socket.js +0 -0
  99. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/js/services/ui.js +0 -0
  100. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/sounds/README.md +0 -0
  101. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/sounds/error.mp3 +0 -0
  102. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/static/sounds/success.mp3 +0 -0
  103. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/base.html +0 -0
  104. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/components/custom_dropdown.html +0 -0
  105. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/components/log_panel.html +0 -0
  106. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/components/mobile_nav.html +0 -0
  107. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/components/settings_form.html +0 -0
  108. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/components/sidebar.html +0 -0
  109. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/pages/details.html +0 -0
  110. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/pages/history.html +0 -0
  111. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/pages/progress.html +0 -0
  112. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/pages/research.html +0 -0
  113. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/pages/results.html +0 -0
  114. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/templates/settings_dashboard.html +0 -0
  115. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/utils/__init__.py +0 -0
  116. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web/utils/formatters.py +0 -0
  117. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/__init__.py +0 -0
  118. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/__init__.py +0 -0
  119. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/full_search.py +0 -0
  120. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/meta_search_engine.py +0 -0
  121. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_arxiv.py +0 -0
  122. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_brave.py +0 -0
  123. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_ddg.py +0 -0
  124. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_github.py +0 -0
  125. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_google_pse.py +0 -0
  126. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_guardian.py +0 -0
  127. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_local.py +0 -0
  128. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_local_all.py +0 -0
  129. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_pubmed.py +0 -0
  130. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_searxng.py +0 -0
  131. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_semantic_scholar.py +0 -0
  132. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_serpapi.py +0 -0
  133. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_wayback.py +0 -0
  134. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/engines/search_engine_wikipedia.py +0 -0
  135. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/search_engine_base.py +0 -0
  136. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/search_engine_factory.py +0 -0
  137. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/src/local_deep_research/web_search_engines/search_engines_config.py +0 -0
  138. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/tests/__init__.py +0 -0
  139. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/tests/download_stuff_for_local_test.py +0 -0
  140. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/tests/searxng/test_searxng_instance.py +0 -0
  141. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/tests/searxng/test_searxng_integration.py +0 -0
  142. {local_deep_research-0.3.1 → local_deep_research-0.3.3}/tests/test_google_pse.py +0 -0
@@ -0,0 +1,349 @@
1
+ Metadata-Version: 2.1
2
+ Name: local-deep-research
3
+ Version: 0.3.3
4
+ Summary: AI-powered research assistant with deep, iterative analysis using LLMs and web searches
5
+ Author-Email: LearningCircuit <185559241+LearningCircuit@users.noreply.github.com>, HashedViking <6432677+HashedViking@users.noreply.github.com>
6
+ License: MIT License
7
+
8
+ Copyright (c) 2025 LearningCircuit
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the "Software"), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+
28
+ Classifier: Programming Language :: Python :: 3
29
+ Classifier: License :: OSI Approved :: MIT License
30
+ Classifier: Operating System :: OS Independent
31
+ Project-URL: Homepage, https://github.com/LearningCircuit/local-deep-research
32
+ Project-URL: Bug Tracker, https://github.com/LearningCircuit/local-deep-research/issues
33
+ Requires-Python: >=3.10
34
+ Requires-Dist: langchain>=0.3.18
35
+ Requires-Dist: langchain-community>=0.3.17
36
+ Requires-Dist: langchain-core>=0.3.34
37
+ Requires-Dist: langchain-ollama>=0.2.3
38
+ Requires-Dist: langchain-openai>=0.3.5
39
+ Requires-Dist: langchain_anthropic>=0.3.7
40
+ Requires-Dist: duckduckgo_search>=7.3.2
41
+ Requires-Dist: python-dateutil>=2.9.0
42
+ Requires-Dist: typing_extensions>=4.12.2
43
+ Requires-Dist: justext
44
+ Requires-Dist: playwright
45
+ Requires-Dist: beautifulsoup4
46
+ Requires-Dist: flask>=3.1.0
47
+ Requires-Dist: flask-cors>=3.0.10
48
+ Requires-Dist: flask-socketio>=5.1.1
49
+ Requires-Dist: sqlalchemy>=1.4.23
50
+ Requires-Dist: wikipedia
51
+ Requires-Dist: arxiv>=1.4.3
52
+ Requires-Dist: pypdf
53
+ Requires-Dist: sentence-transformers
54
+ Requires-Dist: faiss-cpu
55
+ Requires-Dist: pydantic>=2.0.0
56
+ Requires-Dist: pydantic-settings>=2.0.0
57
+ Requires-Dist: toml>=0.10.2
58
+ Requires-Dist: platformdirs>=3.0.0
59
+ Requires-Dist: dynaconf
60
+ Requires-Dist: requests>=2.28.0
61
+ Requires-Dist: tiktoken>=0.4.0
62
+ Requires-Dist: xmltodict>=0.13.0
63
+ Requires-Dist: lxml>=4.9.2
64
+ Requires-Dist: pdfplumber>=0.9.0
65
+ Requires-Dist: unstructured>=0.10.0
66
+ Requires-Dist: google-search-results
67
+ Requires-Dist: importlib-resources>=6.5.2
68
+ Requires-Dist: setuptools>=78.1.0
69
+ Requires-Dist: flask-wtf>=1.2.2
70
+ Description-Content-Type: text/markdown
71
+
72
+ # Local Deep Research
73
+
74
+ <div align="center">
75
+
76
+ [![GitHub stars](https://img.shields.io/github/stars/LearningCircuit/local-deep-research?style=for-the-badge)](https://github.com/LearningCircuit/local-deep-research/stargazers)
77
+ [![License](https://img.shields.io/badge/License-MIT-green.svg?style=for-the-badge)](LICENSE)
78
+ [![Discord](https://img.shields.io/discord/1352043059562680370?style=for-the-badge&logo=discord)](https://discord.gg/ttcqQeFcJ3)
79
+ [![Reddit](https://img.shields.io/badge/Reddit-r/LocalDeepResearch-FF4500?style=for-the-badge&logo=reddit)](https://www.reddit.com/r/LocalDeepResearch/)
80
+
81
+ *AI-powered research assistant that performs deep, iterative analysis using multiple LLMs and web searches*
82
+
83
+ <a href="https://www.youtube.com/watch?v=0ISreg9q0p0">
84
+ <img src="https://img.youtube.com/vi/0ISreg9q0p0/0.jpg" alt="Local Deep Research Demo" width="500">
85
+ </a>
86
+
87
+ </div>
88
+
89
+ ## 📋 Overview
90
+
91
+ Local Deep Research is a powerful AI research assistant that:
92
+
93
+ 1. **Performs iterative, multi-source research** on any topic
94
+ 2. **Creates comprehensive reports or quick summaries** with proper citations
95
+ 3. **Runs locally** for complete privacy when using local LLMs
96
+ 4. **Searches across multiple sources** including academic databases & the web
97
+ 5. **Processes your own documents** with vector search (RAG)
98
+ 6. **Optimized for speed** with parallel search processing
99
+
100
+ Local Deep Research combines the power of large language models with intelligent search strategies to provide well-researched, properly cited answers to complex questions. It can process queries in just seconds with the Quick Summary option, or create detailed reports with proper section organization for more comprehensive analysis.
101
+
102
+ ## ⚡ Quick Start (Recommended)
103
+
104
+ ```bash
105
+ # 1. Install
106
+ pip install local-deep-research
107
+
108
+ # 2. Setup SearXNG for best results
109
+ docker pull searxng/searxng
110
+ docker run -d -p 8080:8080 --name searxng searxng/searxng
111
+ docker start searxng (required after every reboot)
112
+
113
+ # 3. Install Ollama and pull a model
114
+ # Download from https://ollama.ai and run:
115
+ ollama pull gemma3:12b
116
+
117
+ # 4. Start the web interface
118
+ python -m local_deep_research.web.app
119
+ ```
120
+
121
+ Then visit `http://127.0.0.1:5000` to start researching!
122
+
123
+ ### Alternative Installation Options
124
+
125
+ **Windows Installer**: Download the [Windows Installer](https://github.com/LearningCircuit/local-deep-research/releases/download/v0.1.0/LocalDeepResearch_Setup.exe) for one-click setup.
126
+
127
+ **Docker**: Run with Docker using:
128
+ ```bash
129
+ docker run --network=host \
130
+ local-deep-research
131
+ ```
132
+
133
+ **Command Line**: Alternatively, use the CLI version with:
134
+ ```bash
135
+ python -m local_deep_research.main
136
+ ```
137
+
138
+ ## 🔍 Research Capabilities
139
+
140
+ ### Two Research Modes
141
+
142
+ - **Quick Summary**: Fast results (30s-3min) with key information and proper citations
143
+ - Perfect for rapid exploration and answering straightforward questions
144
+ - Supports multiple search engines in parallel for maximum efficiency
145
+ - Tables and structured information can be included when relevant
146
+
147
+ - **Detailed Report**: Comprehensive analysis with structured sections, table of contents, and in-depth exploration
148
+ - Creates professional-grade reports with proper organization
149
+ - Conducts separate research for each section to ensure comprehensive coverage
150
+ - Integrates information across sections for a cohesive analysis
151
+ - Includes proper citations and reference tracking
152
+
153
+ ### Performance Optimization
154
+
155
+ - **Use Direct SearXNG**: For maximum speed (bypasses LLM calls needed for engine selection)
156
+ - **Adjust Iteration Depth**:
157
+ - 1 iteration: Quick factual questions (~30 seconds)
158
+ - 2-3 iterations: Complex topics requiring deeper exploration (2-3 minutes)
159
+ - 3-5 iterations: Comprehensive research with follow-up investigation (5+ minutes)
160
+ - **Choose Appropriate Models**:
161
+ - 12B-30B parameter models offer good balance of quality and speed
162
+ - For complex research, larger models may provide better synthesis
163
+ - **For Detailed Reports**: Expect multiple research cycles (one per section) and longer processing times
164
+
165
+ ### Multi-Source Integration
166
+
167
+ - **Auto-Engine Selection**: The system intelligently selects the most appropriate search engines for your query
168
+ - **Academic Sources**: Direct access to Wikipedia, arXiv, PubMed, Semantic Scholar, and more
169
+ - **Web Search**: Via SearXNG, Brave Search, SerpAPI (for Google results), and more
170
+ - **Local Document Search**: Search through your private document collections with vector embeddings
171
+ - **Cross-Engine Filtering**: Smart result ranking across search engines for better information quality
172
+
173
+ ## 🤖 LLM Support
174
+
175
+ Local Deep Research works with both local and cloud LLMs:
176
+
177
+ ### Local Models (via Ollama)
178
+
179
+ Local models provide complete privacy and don't require API keys or internet connection for the LLM component (only search queries go online).
180
+
181
+ ```bash
182
+ # Install Ollama from https://ollama.ai
183
+ ollama pull gemma3:12b # Recommended model
184
+ ```
185
+
186
+ Recommended local models:
187
+ - **Gemma 3 (12B)** - Great balance of quality and speed
188
+ - **Mistral (7B/8x7B)** - Fast performance on most hardware
189
+ - **Llama 3 (8B/70B)** - Good performance across various tasks
190
+
191
+ ### Cloud Models
192
+
193
+ Cloud models can provide higher quality results for complex research tasks:
194
+
195
+ API keys can be configured directly through the web interface in the settings panel or via environment variables:
196
+
197
+ ```bash
198
+ # Cloud LLM providers - add to your .env file if not using the web UI
199
+ LDR_LLM_ANTHROPIC_API_KEY=your-api-key-here # For Claude models
200
+ LDR_LLM_OPENAI_API_KEY=your-openai-key-here # For GPT models
201
+ LDR_LLM_OPENAI_ENDPOINT_API_KEY=your-key-here # For OpenRouter or similar services
202
+
203
+ # Set your preferred provider and model
204
+ LDR_LLM_PROVIDER=ollama # Options: ollama, openai, anthropic, etc.
205
+ LDR_LLM_MODEL=gemma3:12b # Model name to use
206
+ ```
207
+
208
+ ### Supported Providers
209
+
210
+ | Provider | Type | Setup | Models |
211
+ |----------|------|---------|--------|
212
+ | `OLLAMA` | Local | Install from [ollama.ai](https://ollama.ai) | Mistral, Llama, Gemma, etc. |
213
+ | `OPENAI` | Cloud | API key required | GPT-3.5, GPT-4, GPT-4o |
214
+ | `ANTHROPIC` | Cloud | API key required | Claude 3 Opus, Sonnet, Haiku |
215
+ | `OPENAI_ENDPOINT` | Cloud | API key required | Any OpenAI-compatible API |
216
+ | `VLLM` | Local | Requires GPU setup | Any supported by vLLM |
217
+ | `LMSTUDIO` | Local | Use LM Studio server | Models from LM Studio |
218
+ | `LLAMACPP` | Local | Configure model path | GGUF model formats |
219
+
220
+ You can easily switch between models in the web interface or via environment variables without reinstalling.
221
+
222
+ ## 🌐 Search Engines
223
+
224
+ The system leverages multiple search engines to find the most relevant information for your queries.
225
+
226
+ ### Core Free Engines (No API Key Required)
227
+
228
+ - **`auto`**: Intelligently selects the best engines based on your query (recommended)
229
+ - **`wikipedia`**: General knowledge, facts, and encyclopedic information
230
+ - **`arxiv`**: Scientific papers and academic research
231
+ - **`pubmed`**: Medical and biomedical research and journals
232
+ - **`semantic_scholar`**: Academic literature across all fields
233
+ - **`github`**: Code repositories, documentation, and technical discussions
234
+ - **`searxng`**: Comprehensive web search via local SearXNG instance
235
+ - **`wayback`**: Historical web content from Internet Archive
236
+
237
+ ### Paid Engines (API Key Required)
238
+
239
+ For enhanced web search capabilities, you can configure these additional engines through the settings interface or via environment variables:
240
+
241
+ ```bash
242
+ # Search API keys (if not using the web UI)
243
+ SERP_API_KEY=your-key-here # Google results via SerpAPI
244
+ GOOGLE_PSE_API_KEY=your-key-here # Google Programmable Search
245
+ BRAVE_API_KEY=your-key-here # Brave Search
246
+ ```
247
+
248
+ ### Search Engine Comparison
249
+
250
+ | Engine | Specialization | Privacy | Speed | Results Quality |
251
+ |--------|----------------|---------|-------|-----------------|
252
+ | SearXNG | General web | ★★★★★ | ★★★★★ | ★★★★½ |
253
+ | Wikipedia | Facts & concepts | ★★★★★ | ★★★★☆ | ★★★★☆ |
254
+ | arXiv | Scientific research | ★★★★★ | ★★★★☆ | ★★★★★ |
255
+ | PubMed | Medical research | ★★★★★ | ★★★★☆ | ★★★★★ |
256
+ | GitHub | Code & tech | ★★★★★ | ★★★☆☆ | ★★★★☆ |
257
+ | SerpAPI | Web (Google) | ★★☆☆☆ | ★★★★☆ | ★★★★★ |
258
+ | Brave | Web (privacy-focused) | ★★★★☆ | ★★★★☆ | ★★★★☆ |
259
+
260
+ ## 📚 Local Document Search (RAG)
261
+
262
+ Local Deep Research includes powerful Retrieval Augmented Generation (RAG) capabilities, allowing you to search and analyze your own private documents using vector embeddings:
263
+
264
+ ### Supported Document Types
265
+
266
+ - PDF files
267
+ - Markdown (.md)
268
+ - Plain text (.txt)
269
+ - Microsoft Word (.docx, .doc)
270
+ - Excel spreadsheets (.xlsx, .xls)
271
+ - CSV files
272
+ - And more
273
+
274
+ ### Using Document Collections
275
+
276
+ You can use your documents in research via:
277
+ - Auto-selection (when relevant to query)
278
+ - Direct collection selection: `tool = "project_docs"`
279
+ - All collections: `tool = "local_all"`
280
+ - Query syntax: `collection:project_docs your query`
281
+
282
+ This allows you to integrate your private knowledge base with web search results for comprehensive research that includes your own documents and data.
283
+
284
+ ## 🛠️ Advanced Configuration
285
+
286
+ ### Web Interface
287
+
288
+ The easiest way to configure Local Deep Research is through the web interface, which provides:
289
+ - Complete settings management
290
+ - Model selection
291
+ - Search engine configuration
292
+ - Research parameter adjustment
293
+ - Local document collection setup
294
+
295
+ ### Configuration Documentation
296
+
297
+ For detailed configuration options, see our guides:
298
+ - [Environment Variables Guide](https://github.com/LearningCircuit/local-deep-research/blob/main/docs/env_configuration.md)
299
+ - [SearXNG Setup Guide](https://github.com/LearningCircuit/local-deep-research/blob/main/docs/SearXNG-Setup.md)
300
+ - [Docker Usage Guide](https://github.com/LearningCircuit/local-deep-research/blob/main/docs/docker-usage-readme.md)
301
+ - [Docker Compose Guide](https://github.com/LearningCircuit/local-deep-research/blob/main/docs/docker-compose-guide.md)
302
+
303
+ ### Programmatic Access
304
+
305
+ Use the Python API for integration with other tools or scripts:
306
+
307
+ ```python
308
+ from local_deep_research import quick_summary, generate_report
309
+
310
+ # Quick research with custom parameters
311
+ results = quick_summary(
312
+ query="advances in fusion energy",
313
+ search_tool="auto",
314
+ iterations=1,
315
+ questions_per_iteration=2,
316
+ max_results=30,
317
+ temperature=0.7
318
+ )
319
+ print(results["summary"])
320
+ ```
321
+
322
+ For more examples, see the [programmatic access tutorial](https://github.com/LearningCircuit/local-deep-research/blob/main/examples/programmatic_access.ipynb).
323
+
324
+ ## 📊 Examples & Documentation
325
+
326
+ For more information and examples of what Local Deep Research can produce:
327
+
328
+ - [Example Outputs](https://github.com/LearningCircuit/local-deep-research/tree/main/examples)
329
+ - [Documentation](https://github.com/LearningCircuit/local-deep-research/tree/main/docs)
330
+ - [Wiki](https://github.com/LearningCircuit/local-deep-research/wiki)
331
+
332
+ ## 🤝 Community & Support
333
+
334
+ - [Discord](https://discord.gg/ttcqQeFcJ3): Discuss features, get help, and share research techniques
335
+ - [Reddit](https://www.reddit.com/r/LocalDeepResearch/): Announcements, updates, and community showcase
336
+ - [GitHub Issues](https://github.com/LearningCircuit/local-deep-research/issues): Bug reports and feature requests
337
+
338
+ ## 📄 License & Acknowledgments
339
+
340
+ This project is licensed under the MIT License.
341
+
342
+ Built with powerful open-source tools:
343
+ - [LangChain](https://github.com/hwchase17/langchain) framework for LLM integration
344
+ - [Ollama](https://ollama.ai) for local AI model management
345
+ - [SearXNG](https://searxng.org/) for privacy-focused web search
346
+ - [FAISS](https://github.com/facebookresearch/faiss) for vector similarity search
347
+ - [justext](https://github.com/miso-belica/justext) and [Playwright](https://playwright.dev) for web content analysis
348
+
349
+ > **Support Free Knowledge:** If you frequently use the search engines in this tool, please consider making a donation to organizations like [Wikipedia](https://donate.wikimedia.org), [arXiv](https://arxiv.org/about/give), or [PubMed](https://www.nlm.nih.gov/pubs/donations/donations.html).
@@ -0,0 +1,278 @@
1
+ # Local Deep Research
2
+
3
+ <div align="center">
4
+
5
+ [![GitHub stars](https://img.shields.io/github/stars/LearningCircuit/local-deep-research?style=for-the-badge)](https://github.com/LearningCircuit/local-deep-research/stargazers)
6
+ [![License](https://img.shields.io/badge/License-MIT-green.svg?style=for-the-badge)](LICENSE)
7
+ [![Discord](https://img.shields.io/discord/1352043059562680370?style=for-the-badge&logo=discord)](https://discord.gg/ttcqQeFcJ3)
8
+ [![Reddit](https://img.shields.io/badge/Reddit-r/LocalDeepResearch-FF4500?style=for-the-badge&logo=reddit)](https://www.reddit.com/r/LocalDeepResearch/)
9
+
10
+ *AI-powered research assistant that performs deep, iterative analysis using multiple LLMs and web searches*
11
+
12
+ <a href="https://www.youtube.com/watch?v=0ISreg9q0p0">
13
+ <img src="https://img.youtube.com/vi/0ISreg9q0p0/0.jpg" alt="Local Deep Research Demo" width="500">
14
+ </a>
15
+
16
+ </div>
17
+
18
+ ## 📋 Overview
19
+
20
+ Local Deep Research is a powerful AI research assistant that:
21
+
22
+ 1. **Performs iterative, multi-source research** on any topic
23
+ 2. **Creates comprehensive reports or quick summaries** with proper citations
24
+ 3. **Runs locally** for complete privacy when using local LLMs
25
+ 4. **Searches across multiple sources** including academic databases & the web
26
+ 5. **Processes your own documents** with vector search (RAG)
27
+ 6. **Optimized for speed** with parallel search processing
28
+
29
+ Local Deep Research combines the power of large language models with intelligent search strategies to provide well-researched, properly cited answers to complex questions. It can process queries in just seconds with the Quick Summary option, or create detailed reports with proper section organization for more comprehensive analysis.
30
+
31
+ ## ⚡ Quick Start (Recommended)
32
+
33
+ ```bash
34
+ # 1. Install
35
+ pip install local-deep-research
36
+
37
+ # 2. Setup SearXNG for best results
38
+ docker pull searxng/searxng
39
+ docker run -d -p 8080:8080 --name searxng searxng/searxng
40
+ docker start searxng (required after every reboot)
41
+
42
+ # 3. Install Ollama and pull a model
43
+ # Download from https://ollama.ai and run:
44
+ ollama pull gemma3:12b
45
+
46
+ # 4. Start the web interface
47
+ python -m local_deep_research.web.app
48
+ ```
49
+
50
+ Then visit `http://127.0.0.1:5000` to start researching!
51
+
52
+ ### Alternative Installation Options
53
+
54
+ **Windows Installer**: Download the [Windows Installer](https://github.com/LearningCircuit/local-deep-research/releases/download/v0.1.0/LocalDeepResearch_Setup.exe) for one-click setup.
55
+
56
+ **Docker**: Run with Docker using:
57
+ ```bash
58
+ docker run --network=host \
59
+ local-deep-research
60
+ ```
61
+
62
+ **Command Line**: Alternatively, use the CLI version with:
63
+ ```bash
64
+ python -m local_deep_research.main
65
+ ```
66
+
67
+ ## 🔍 Research Capabilities
68
+
69
+ ### Two Research Modes
70
+
71
+ - **Quick Summary**: Fast results (30s-3min) with key information and proper citations
72
+ - Perfect for rapid exploration and answering straightforward questions
73
+ - Supports multiple search engines in parallel for maximum efficiency
74
+ - Tables and structured information can be included when relevant
75
+
76
+ - **Detailed Report**: Comprehensive analysis with structured sections, table of contents, and in-depth exploration
77
+ - Creates professional-grade reports with proper organization
78
+ - Conducts separate research for each section to ensure comprehensive coverage
79
+ - Integrates information across sections for a cohesive analysis
80
+ - Includes proper citations and reference tracking
81
+
82
+ ### Performance Optimization
83
+
84
+ - **Use Direct SearXNG**: For maximum speed (bypasses LLM calls needed for engine selection)
85
+ - **Adjust Iteration Depth**:
86
+ - 1 iteration: Quick factual questions (~30 seconds)
87
+ - 2-3 iterations: Complex topics requiring deeper exploration (2-3 minutes)
88
+ - 3-5 iterations: Comprehensive research with follow-up investigation (5+ minutes)
89
+ - **Choose Appropriate Models**:
90
+ - 12B-30B parameter models offer good balance of quality and speed
91
+ - For complex research, larger models may provide better synthesis
92
+ - **For Detailed Reports**: Expect multiple research cycles (one per section) and longer processing times
93
+
94
+ ### Multi-Source Integration
95
+
96
+ - **Auto-Engine Selection**: The system intelligently selects the most appropriate search engines for your query
97
+ - **Academic Sources**: Direct access to Wikipedia, arXiv, PubMed, Semantic Scholar, and more
98
+ - **Web Search**: Via SearXNG, Brave Search, SerpAPI (for Google results), and more
99
+ - **Local Document Search**: Search through your private document collections with vector embeddings
100
+ - **Cross-Engine Filtering**: Smart result ranking across search engines for better information quality
101
+
102
+ ## 🤖 LLM Support
103
+
104
+ Local Deep Research works with both local and cloud LLMs:
105
+
106
+ ### Local Models (via Ollama)
107
+
108
+ Local models provide complete privacy and don't require API keys or internet connection for the LLM component (only search queries go online).
109
+
110
+ ```bash
111
+ # Install Ollama from https://ollama.ai
112
+ ollama pull gemma3:12b # Recommended model
113
+ ```
114
+
115
+ Recommended local models:
116
+ - **Gemma 3 (12B)** - Great balance of quality and speed
117
+ - **Mistral (7B/8x7B)** - Fast performance on most hardware
118
+ - **Llama 3 (8B/70B)** - Good performance across various tasks
119
+
120
+ ### Cloud Models
121
+
122
+ Cloud models can provide higher quality results for complex research tasks:
123
+
124
+ API keys can be configured directly through the web interface in the settings panel or via environment variables:
125
+
126
+ ```bash
127
+ # Cloud LLM providers - add to your .env file if not using the web UI
128
+ LDR_LLM_ANTHROPIC_API_KEY=your-api-key-here # For Claude models
129
+ LDR_LLM_OPENAI_API_KEY=your-openai-key-here # For GPT models
130
+ LDR_LLM_OPENAI_ENDPOINT_API_KEY=your-key-here # For OpenRouter or similar services
131
+
132
+ # Set your preferred provider and model
133
+ LDR_LLM_PROVIDER=ollama # Options: ollama, openai, anthropic, etc.
134
+ LDR_LLM_MODEL=gemma3:12b # Model name to use
135
+ ```
136
+
137
+ ### Supported Providers
138
+
139
+ | Provider | Type | Setup | Models |
140
+ |----------|------|---------|--------|
141
+ | `OLLAMA` | Local | Install from [ollama.ai](https://ollama.ai) | Mistral, Llama, Gemma, etc. |
142
+ | `OPENAI` | Cloud | API key required | GPT-3.5, GPT-4, GPT-4o |
143
+ | `ANTHROPIC` | Cloud | API key required | Claude 3 Opus, Sonnet, Haiku |
144
+ | `OPENAI_ENDPOINT` | Cloud | API key required | Any OpenAI-compatible API |
145
+ | `VLLM` | Local | Requires GPU setup | Any supported by vLLM |
146
+ | `LMSTUDIO` | Local | Use LM Studio server | Models from LM Studio |
147
+ | `LLAMACPP` | Local | Configure model path | GGUF model formats |
148
+
149
+ You can easily switch between models in the web interface or via environment variables without reinstalling.
150
+
151
+ ## 🌐 Search Engines
152
+
153
+ The system leverages multiple search engines to find the most relevant information for your queries.
154
+
155
+ ### Core Free Engines (No API Key Required)
156
+
157
+ - **`auto`**: Intelligently selects the best engines based on your query (recommended)
158
+ - **`wikipedia`**: General knowledge, facts, and encyclopedic information
159
+ - **`arxiv`**: Scientific papers and academic research
160
+ - **`pubmed`**: Medical and biomedical research and journals
161
+ - **`semantic_scholar`**: Academic literature across all fields
162
+ - **`github`**: Code repositories, documentation, and technical discussions
163
+ - **`searxng`**: Comprehensive web search via local SearXNG instance
164
+ - **`wayback`**: Historical web content from Internet Archive
165
+
166
+ ### Paid Engines (API Key Required)
167
+
168
+ For enhanced web search capabilities, you can configure these additional engines through the settings interface or via environment variables:
169
+
170
+ ```bash
171
+ # Search API keys (if not using the web UI)
172
+ SERP_API_KEY=your-key-here # Google results via SerpAPI
173
+ GOOGLE_PSE_API_KEY=your-key-here # Google Programmable Search
174
+ BRAVE_API_KEY=your-key-here # Brave Search
175
+ ```
176
+
177
+ ### Search Engine Comparison
178
+
179
+ | Engine | Specialization | Privacy | Speed | Results Quality |
180
+ |--------|----------------|---------|-------|-----------------|
181
+ | SearXNG | General web | ★★★★★ | ★★★★★ | ★★★★½ |
182
+ | Wikipedia | Facts & concepts | ★★★★★ | ★★★★☆ | ★★★★☆ |
183
+ | arXiv | Scientific research | ★★★★★ | ★★★★☆ | ★★★★★ |
184
+ | PubMed | Medical research | ★★★★★ | ★★★★☆ | ★★★★★ |
185
+ | GitHub | Code & tech | ★★★★★ | ★★★☆☆ | ★★★★☆ |
186
+ | SerpAPI | Web (Google) | ★★☆☆☆ | ★★★★☆ | ★★★★★ |
187
+ | Brave | Web (privacy-focused) | ★★★★☆ | ★★★★☆ | ★★★★☆ |
188
+
189
+ ## 📚 Local Document Search (RAG)
190
+
191
+ Local Deep Research includes powerful Retrieval Augmented Generation (RAG) capabilities, allowing you to search and analyze your own private documents using vector embeddings:
192
+
193
+ ### Supported Document Types
194
+
195
+ - PDF files
196
+ - Markdown (.md)
197
+ - Plain text (.txt)
198
+ - Microsoft Word (.docx, .doc)
199
+ - Excel spreadsheets (.xlsx, .xls)
200
+ - CSV files
201
+ - And more
202
+
203
+ ### Using Document Collections
204
+
205
+ You can use your documents in research via:
206
+ - Auto-selection (when relevant to query)
207
+ - Direct collection selection: `tool = "project_docs"`
208
+ - All collections: `tool = "local_all"`
209
+ - Query syntax: `collection:project_docs your query`
210
+
211
+ This allows you to integrate your private knowledge base with web search results for comprehensive research that includes your own documents and data.
212
+
213
+ ## 🛠️ Advanced Configuration
214
+
215
+ ### Web Interface
216
+
217
+ The easiest way to configure Local Deep Research is through the web interface, which provides:
218
+ - Complete settings management
219
+ - Model selection
220
+ - Search engine configuration
221
+ - Research parameter adjustment
222
+ - Local document collection setup
223
+
224
+ ### Configuration Documentation
225
+
226
+ For detailed configuration options, see our guides:
227
+ - [Environment Variables Guide](https://github.com/LearningCircuit/local-deep-research/blob/main/docs/env_configuration.md)
228
+ - [SearXNG Setup Guide](https://github.com/LearningCircuit/local-deep-research/blob/main/docs/SearXNG-Setup.md)
229
+ - [Docker Usage Guide](https://github.com/LearningCircuit/local-deep-research/blob/main/docs/docker-usage-readme.md)
230
+ - [Docker Compose Guide](https://github.com/LearningCircuit/local-deep-research/blob/main/docs/docker-compose-guide.md)
231
+
232
+ ### Programmatic Access
233
+
234
+ Use the Python API for integration with other tools or scripts:
235
+
236
+ ```python
237
+ from local_deep_research import quick_summary, generate_report
238
+
239
+ # Quick research with custom parameters
240
+ results = quick_summary(
241
+ query="advances in fusion energy",
242
+ search_tool="auto",
243
+ iterations=1,
244
+ questions_per_iteration=2,
245
+ max_results=30,
246
+ temperature=0.7
247
+ )
248
+ print(results["summary"])
249
+ ```
250
+
251
+ For more examples, see the [programmatic access tutorial](https://github.com/LearningCircuit/local-deep-research/blob/main/examples/programmatic_access.ipynb).
252
+
253
+ ## 📊 Examples & Documentation
254
+
255
+ For more information and examples of what Local Deep Research can produce:
256
+
257
+ - [Example Outputs](https://github.com/LearningCircuit/local-deep-research/tree/main/examples)
258
+ - [Documentation](https://github.com/LearningCircuit/local-deep-research/tree/main/docs)
259
+ - [Wiki](https://github.com/LearningCircuit/local-deep-research/wiki)
260
+
261
+ ## 🤝 Community & Support
262
+
263
+ - [Discord](https://discord.gg/ttcqQeFcJ3): Discuss features, get help, and share research techniques
264
+ - [Reddit](https://www.reddit.com/r/LocalDeepResearch/): Announcements, updates, and community showcase
265
+ - [GitHub Issues](https://github.com/LearningCircuit/local-deep-research/issues): Bug reports and feature requests
266
+
267
+ ## 📄 License & Acknowledgments
268
+
269
+ This project is licensed under the MIT License.
270
+
271
+ Built with powerful open-source tools:
272
+ - [LangChain](https://github.com/hwchase17/langchain) framework for LLM integration
273
+ - [Ollama](https://ollama.ai) for local AI model management
274
+ - [SearXNG](https://searxng.org/) for privacy-focused web search
275
+ - [FAISS](https://github.com/facebookresearch/faiss) for vector similarity search
276
+ - [justext](https://github.com/miso-belica/justext) and [Playwright](https://playwright.dev) for web content analysis
277
+
278
+ > **Support Free Knowledge:** If you frequently use the search engines in this tool, please consider making a donation to organizations like [Wikipedia](https://donate.wikimedia.org), [arXiv](https://arxiv.org/about/give), or [PubMed](https://www.nlm.nih.gov/pubs/donations/donations.html).
@@ -57,7 +57,7 @@ dependencies = [
57
57
  "setuptools>=78.1.0",
58
58
  "flask-wtf>=1.2.2",
59
59
  ]
60
- version = "0.3.1"
60
+ version = "0.3.3"
61
61
 
62
62
  [project.license]
63
63
  file = "LICENSE"
@@ -0,0 +1 @@
1
+ __version__ = "0.3.3"
@@ -31,7 +31,7 @@ class CrossEngineFilter(BaseFilter):
31
31
  super().__init__(model)
32
32
  # Get max_results from database settings if not provided
33
33
  if max_results is None:
34
- max_results = get_db_setting("search.cross_engine_max_results", 100)
34
+ max_results = int(get_db_setting("search.cross_engine_max_results", 100))
35
35
  self.max_results = max_results
36
36
  self.default_reorder = default_reorder
37
37
  self.default_reindex = default_reindex
@@ -3,6 +3,7 @@ import os
3
3
 
4
4
  from langchain_anthropic import ChatAnthropic
5
5
  from langchain_community.llms import VLLM
6
+ from langchain_core.language_models import FakeListChatModel
6
7
  from langchain_ollama import ChatOllama
7
8
  from langchain_openai import ChatOpenAI
8
9
 
@@ -248,9 +249,7 @@ def get_llm(model_name=None, temperature=None, provider=None, openai_endpoint_ur
248
249
 
249
250
  def get_fallback_model(temperature=None):
250
251
  """Create a dummy model for when no providers are available"""
251
- from langchain_community.llms.fake import FakeListLLM
252
-
253
- return FakeListLLM(
252
+ return FakeListChatModel(
254
253
  responses=[
255
254
  "No language models are available. Please install Ollama or set up API keys."
256
255
  ]
@@ -10,7 +10,7 @@
10
10
  "step": null,
11
11
  "type": "APP",
12
12
  "ui_element": "text",
13
- "value": "0.3.0",
13
+ "value": "0.3.3",
14
14
  "visible": false
15
15
  },
16
16
  "app.debug": {