local-deep-research 0.3.10__tar.gz → 0.3.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/PKG-INFO +42 -17
  2. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/README.md +41 -16
  3. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/pyproject.toml +10 -1
  4. local_deep_research-0.3.12/src/local_deep_research/__version__.py +1 -0
  5. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/config/llm_config.py +11 -2
  6. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/config/search_config.py +1 -1
  7. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/defaults/default_settings.json +18 -5
  8. local_deep_research-0.3.12/src/local_deep_research/utilities/url_utils.py +57 -0
  9. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/routes/api_routes.py +18 -9
  10. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/routes/settings_routes.py +15 -5
  11. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/services/research_service.py +2 -2
  12. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/full_search.py +1 -1
  13. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_local.py +12 -3
  14. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_searxng.py +23 -8
  15. local_deep_research-0.3.12/tests/test_url_utils.py +55 -0
  16. local_deep_research-0.3.12/tests/test_url_utils_debug.py +33 -0
  17. local_deep_research-0.3.12/tests/test_url_utils_simple.py +39 -0
  18. local_deep_research-0.3.10/src/local_deep_research/__version__.py +0 -1
  19. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/LICENSE +0 -0
  20. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/__init__.py +0 -0
  21. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/__init__.py +0 -0
  22. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/filters/__init__.py +0 -0
  23. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/filters/base_filter.py +0 -0
  24. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/filters/cross_engine_filter.py +0 -0
  25. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/findings/base_findings.py +0 -0
  26. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/findings/repository.py +0 -0
  27. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/knowledge/__init__.py +0 -0
  28. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/knowledge/base_knowledge.py +0 -0
  29. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/knowledge/standard_knowledge.py +0 -0
  30. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/questions/__init__.py +0 -0
  31. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/questions/base_question.py +0 -0
  32. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/questions/decomposition_question.py +0 -0
  33. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/questions/standard_question.py +0 -0
  34. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/repositories/__init__.py +0 -0
  35. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/strategies/__init__.py +0 -0
  36. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/strategies/base_strategy.py +0 -0
  37. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/strategies/iterdrag_strategy.py +0 -0
  38. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/strategies/parallel_search_strategy.py +0 -0
  39. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/strategies/rapid_search_strategy.py +0 -0
  40. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/strategies/source_based_strategy.py +0 -0
  41. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/strategies/standard_strategy.py +0 -0
  42. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/tools/__init__.py +0 -0
  43. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/tools/base_tool.py +0 -0
  44. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/tools/knowledge_tools/__init__.py +0 -0
  45. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/tools/question_tools/__init__.py +0 -0
  46. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/advanced_search_system/tools/search_tools/__init__.py +0 -0
  47. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/api/__init__.py +0 -0
  48. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/api/research_functions.py +0 -0
  49. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/app.py +0 -0
  50. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/citation_handler.py +0 -0
  51. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/config/__init__.py +0 -0
  52. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/defaults/.env.template +0 -0
  53. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/defaults/__init__.py +0 -0
  54. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/migrate_db.py +0 -0
  55. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/report_generator.py +0 -0
  56. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/search_system.py +0 -0
  57. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/setup_data_dir.py +0 -0
  58. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/test_migration.py +0 -0
  59. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/utilities/__init__.py +0 -0
  60. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/utilities/db_utils.py +0 -0
  61. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/utilities/enums.py +0 -0
  62. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/utilities/llm_utils.py +0 -0
  63. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/utilities/search_utilities.py +0 -0
  64. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/utilities/setup_utils.py +0 -0
  65. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/__init__.py +0 -0
  66. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/app.py +0 -0
  67. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/app_factory.py +0 -0
  68. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/database/README.md +0 -0
  69. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/database/migrate_to_ldr_db.py +0 -0
  70. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/database/migrations.py +0 -0
  71. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/database/models.py +0 -0
  72. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/database/schema_upgrade.py +0 -0
  73. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/models/database.py +0 -0
  74. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/models/settings.py +0 -0
  75. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/routes/history_routes.py +0 -0
  76. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/routes/research_routes.py +0 -0
  77. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/services/resource_service.py +0 -0
  78. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/services/settings_manager.py +0 -0
  79. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/services/settings_service.py +0 -0
  80. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/services/socket_service.py +0 -0
  81. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/css/custom_dropdown.css +0 -0
  82. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/css/settings.css +0 -0
  83. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/css/styles.css +0 -0
  84. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/custom_dropdown.js +0 -0
  85. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/detail.js +0 -0
  86. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/fallback/formatting.js +0 -0
  87. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/fallback/ui.js +0 -0
  88. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/history.js +0 -0
  89. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/logpanel.js +0 -0
  90. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/progress.js +0 -0
  91. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/research.js +0 -0
  92. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/results.js +0 -0
  93. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/settings.js +0 -0
  94. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/components/settings_sync.js +0 -0
  95. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/main.js +0 -0
  96. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/research_form.js +0 -0
  97. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/services/api.js +0 -0
  98. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/services/audio.js +0 -0
  99. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/services/formatting.js +0 -0
  100. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/services/pdf.js +0 -0
  101. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/services/socket.js +0 -0
  102. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/js/services/ui.js +0 -0
  103. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/sounds/README.md +0 -0
  104. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/sounds/error.mp3 +0 -0
  105. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/static/sounds/success.mp3 +0 -0
  106. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/base.html +0 -0
  107. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/components/custom_dropdown.html +0 -0
  108. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/components/log_panel.html +0 -0
  109. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/components/mobile_nav.html +0 -0
  110. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/components/settings_form.html +0 -0
  111. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/components/sidebar.html +0 -0
  112. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/pages/details.html +0 -0
  113. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/pages/history.html +0 -0
  114. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/pages/progress.html +0 -0
  115. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/pages/research.html +0 -0
  116. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/pages/results.html +0 -0
  117. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/templates/settings_dashboard.html +0 -0
  118. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/utils/__init__.py +0 -0
  119. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/utils/formatters.py +0 -0
  120. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web/utils/templates.py +0 -0
  121. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/__init__.py +0 -0
  122. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/__init__.py +0 -0
  123. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/meta_search_engine.py +0 -0
  124. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_arxiv.py +0 -0
  125. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_brave.py +0 -0
  126. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_ddg.py +0 -0
  127. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_github.py +0 -0
  128. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_google_pse.py +0 -0
  129. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_guardian.py +0 -0
  130. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_local_all.py +0 -0
  131. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_pubmed.py +0 -0
  132. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_semantic_scholar.py +0 -0
  133. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_serpapi.py +0 -0
  134. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_wayback.py +0 -0
  135. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/engines/search_engine_wikipedia.py +0 -0
  136. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/search_engine_base.py +0 -0
  137. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/search_engine_factory.py +0 -0
  138. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/src/local_deep_research/web_search_engines/search_engines_config.py +0 -0
  139. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/tests/__init__.py +0 -0
  140. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/tests/download_stuff_for_local_test.py +0 -0
  141. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/tests/fix_tests/README.md +0 -0
  142. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/tests/fix_tests/test_duplicate_links_fix.py +0 -0
  143. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/tests/searxng/test_searxng_instance.py +0 -0
  144. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/tests/searxng/test_searxng_integration.py +0 -0
  145. {local_deep_research-0.3.10 → local_deep_research-0.3.12}/tests/test_google_pse.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: local-deep-research
3
- Version: 0.3.10
3
+ Version: 0.3.12
4
4
  Summary: AI-powered research assistant with deep, iterative analysis using LLMs and web searches
5
5
  Author-Email: LearningCircuit <185559241+LearningCircuit@users.noreply.github.com>, HashedViking <6432677+HashedViking@users.noreply.github.com>
6
6
  License: MIT License
@@ -106,7 +106,7 @@ Local Deep Research combines the power of large language models with intelligent
106
106
 
107
107
  ## ⚡ Quick Start
108
108
 
109
- ### Option 1: Docker (Recommended)
109
+ ### Option 1: Docker (Quickstart no MAC/ARM)
110
110
 
111
111
  ```bash
112
112
  # Step 1: Pull and run SearXNG for optimal search results
@@ -115,29 +115,54 @@ docker run -d -p 8080:8080 --name searxng searxng/searxng
115
115
 
116
116
  # Step 2: Pull and run Local Deep Research (Please build your own docker on ARM)
117
117
  docker pull localdeepresearch/local-deep-research
118
- docker run -d -p 5000:5000 --name local-deep-research localdeepresearch/local-deep-research
118
+ docker run -d -p 5000:5000 --network host --name local-deep-research localdeepresearch/local-deep-research
119
119
 
120
- # Optional 3a: For connecting to already installed local Ollama (https://ollama.com/download) or other local services
121
- # docker run -d -p 5000:5000 --network host --name local-deep-research localdeepresearch/local-deep-research
122
-
123
- # Optional 3b (recommended): Pull and run Ollama for local LLM capabilities
124
- # docker pull ollama/ollama
125
- # docker run -d -p 11434:11434 --name ollama ollama/ollama
126
- # docker exec -it ollama ollama pull gemma3:12b
127
-
128
- # Start containers - Required after each reboot (can be automated with this flag --restart unless-stopped in run)
120
+ # Start containers - Required after each reboot (can be automated with this flag in run command --restart unless-stopped)
129
121
  docker start searxng
130
122
  docker start local-deep-research
131
- # docker start ollama
123
+
124
+ ```
125
+
126
+ ### Option 2: Docker Compose (Recommended)
127
+
128
+ LDR uses Docker compose to bundle the web app and all it's dependencies so
129
+ you can get up and running quickly.
130
+
131
+ ### Prerequisites
132
+
133
+ - [Docker](https://docs.docker.com/engine/install/)
134
+ - [Docker Compose](https://docs.docker.com/compose/install/)
135
+ - `cookiecutter`: Run `pip install --user cookiecutter`
136
+
137
+ Clone the repository:
138
+
139
+ ```bash
140
+ git clone https://github.com/LearningCircuit/local-deep-research.git
141
+ cd local-deep-research
142
+ ```
143
+
144
+ ### Configuring with Docker Compose
145
+
146
+ In the LDR repository, run the following command
147
+ to do generate the compose file:
148
+
149
+ ```bash
150
+ cookiecutter cookiecutter-docker/
151
+ ```
152
+
153
+ This will prompt you to answer a series of questions. Hit Enter repeatedly
154
+ to accept the default values. It should generate a file in the repository called `docker-compose.default.yml`. To run LDR, use the following command:
155
+
156
+ ```bash
157
+ docker compose -f docker-compose.default.yml up
132
158
  ```
133
159
 
134
160
  Then visit `http://127.0.0.1:5000` to start researching!
135
161
 
136
- > **Note**: If you need to connect to local services (like Ollama), add `--network host` to the command.
137
- >
138
- > **Don't have Docker? It's installed in a few clicks: [Install Docker here](https://www.docker.com/get-started/)**
162
+ See [here](https://github.com/LearningCircuit/local-deep-research/wiki/Installation#docker-installation-recommended) for more information about
163
+ using Docker.
139
164
 
140
- ### Option 2: Python Package (mostly for programmatic access)
165
+ ### Option 3: Python Package (mostly for programmatic access)
141
166
 
142
167
  ```bash
143
168
  # Install the package
@@ -35,7 +35,7 @@ Local Deep Research combines the power of large language models with intelligent
35
35
 
36
36
  ## ⚡ Quick Start
37
37
 
38
- ### Option 1: Docker (Recommended)
38
+ ### Option 1: Docker (Quickstart no MAC/ARM)
39
39
 
40
40
  ```bash
41
41
  # Step 1: Pull and run SearXNG for optimal search results
@@ -44,29 +44,54 @@ docker run -d -p 8080:8080 --name searxng searxng/searxng
44
44
 
45
45
  # Step 2: Pull and run Local Deep Research (Please build your own docker on ARM)
46
46
  docker pull localdeepresearch/local-deep-research
47
- docker run -d -p 5000:5000 --name local-deep-research localdeepresearch/local-deep-research
47
+ docker run -d -p 5000:5000 --network host --name local-deep-research localdeepresearch/local-deep-research
48
48
 
49
- # Optional 3a: For connecting to already installed local Ollama (https://ollama.com/download) or other local services
50
- # docker run -d -p 5000:5000 --network host --name local-deep-research localdeepresearch/local-deep-research
51
-
52
- # Optional 3b (recommended): Pull and run Ollama for local LLM capabilities
53
- # docker pull ollama/ollama
54
- # docker run -d -p 11434:11434 --name ollama ollama/ollama
55
- # docker exec -it ollama ollama pull gemma3:12b
56
-
57
- # Start containers - Required after each reboot (can be automated with this flag --restart unless-stopped in run)
49
+ # Start containers - Required after each reboot (can be automated with this flag in run command --restart unless-stopped)
58
50
  docker start searxng
59
51
  docker start local-deep-research
60
- # docker start ollama
52
+
53
+ ```
54
+
55
+ ### Option 2: Docker Compose (Recommended)
56
+
57
+ LDR uses Docker compose to bundle the web app and all it's dependencies so
58
+ you can get up and running quickly.
59
+
60
+ ### Prerequisites
61
+
62
+ - [Docker](https://docs.docker.com/engine/install/)
63
+ - [Docker Compose](https://docs.docker.com/compose/install/)
64
+ - `cookiecutter`: Run `pip install --user cookiecutter`
65
+
66
+ Clone the repository:
67
+
68
+ ```bash
69
+ git clone https://github.com/LearningCircuit/local-deep-research.git
70
+ cd local-deep-research
71
+ ```
72
+
73
+ ### Configuring with Docker Compose
74
+
75
+ In the LDR repository, run the following command
76
+ to do generate the compose file:
77
+
78
+ ```bash
79
+ cookiecutter cookiecutter-docker/
80
+ ```
81
+
82
+ This will prompt you to answer a series of questions. Hit Enter repeatedly
83
+ to accept the default values. It should generate a file in the repository called `docker-compose.default.yml`. To run LDR, use the following command:
84
+
85
+ ```bash
86
+ docker compose -f docker-compose.default.yml up
61
87
  ```
62
88
 
63
89
  Then visit `http://127.0.0.1:5000` to start researching!
64
90
 
65
- > **Note**: If you need to connect to local services (like Ollama), add `--network host` to the command.
66
- >
67
- > **Don't have Docker? It's installed in a few clicks: [Install Docker here](https://www.docker.com/get-started/)**
91
+ See [here](https://github.com/LearningCircuit/local-deep-research/wiki/Installation#docker-installation-recommended) for more information about
92
+ using Docker.
68
93
 
69
- ### Option 2: Python Package (mostly for programmatic access)
94
+ ### Option 3: Python Package (mostly for programmatic access)
70
95
 
71
96
  ```bash
72
97
  # Install the package
@@ -57,7 +57,7 @@ dependencies = [
57
57
  "setuptools>=78.1.0",
58
58
  "flask-wtf>=1.2.2",
59
59
  ]
60
- version = "0.3.10"
60
+ version = "0.3.12"
61
61
 
62
62
  [project.license]
63
63
  file = "LICENSE"
@@ -93,6 +93,14 @@ distribution = true
93
93
  source = "file"
94
94
  path = "src/local_deep_research/__version__.py"
95
95
 
96
+ [[tool.pdm.source]]
97
+ url = "https://download.pytorch.org/whl/cpu"
98
+ name = "torch"
99
+ include_packages = [
100
+ "torch",
101
+ "torch*",
102
+ ]
103
+
96
104
  [dependency-groups]
97
105
  dev = [
98
106
  "isort>=6.0.1",
@@ -100,4 +108,5 @@ dev = [
100
108
  "pre-commit>=4.2.0",
101
109
  "flake8>=7.1.2",
102
110
  "jupyter>=1.1.1",
111
+ "cookiecutter>=2.6.0",
103
112
  ]
@@ -0,0 +1 @@
1
+ __version__ = "0.3.12"
@@ -9,6 +9,7 @@ from langchain_openai import ChatOpenAI
9
9
 
10
10
  from ..utilities.db_utils import get_db_setting
11
11
  from ..utilities.search_utilities import remove_think_tags
12
+ from ..utilities.url_utils import normalize_url
12
13
 
13
14
  # Setup logging
14
15
  logger = logging.getLogger(__name__)
@@ -141,7 +142,12 @@ def get_llm(model_name=None, temperature=None, provider=None, openai_endpoint_ur
141
142
  elif provider == "ollama":
142
143
  try:
143
144
  # Use the configurable Ollama base URL
144
- base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
145
+ raw_base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
146
+ base_url = (
147
+ normalize_url(raw_base_url)
148
+ if raw_base_url
149
+ else "http://localhost:11434"
150
+ )
145
151
 
146
152
  # Check if Ollama is available before trying to use it
147
153
  if not is_ollama_available():
@@ -371,7 +377,10 @@ def is_ollama_available():
371
377
  try:
372
378
  import requests
373
379
 
374
- base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
380
+ raw_base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
381
+ base_url = (
382
+ normalize_url(raw_base_url) if raw_base_url else "http://localhost:11434"
383
+ )
375
384
  logger.info(f"Checking Ollama availability at {base_url}/api/tags")
376
385
 
377
386
  try:
@@ -26,7 +26,7 @@ def get_search(search_tool=None, llm_instance=None):
26
26
  """
27
27
 
28
28
  # Use specified tool or default from settings
29
- tool = search_tool or get_db_setting("search.tool", "auto")
29
+ tool = search_tool or get_db_setting("search.tool", "searxng")
30
30
  logger.info(f"Creating search engine with tool: {tool}")
31
31
 
32
32
  # Get LLM instance (use provided or get fresh one)
@@ -733,7 +733,7 @@
733
733
  "step": null,
734
734
  "type": "SEARCH",
735
735
  "ui_element": "select",
736
- "value": "auto",
736
+ "value": "searxng",
737
737
  "visible": true
738
738
  },
739
739
  "search.engine.web.arxiv.display_name": {
@@ -3029,16 +3029,29 @@
3029
3029
  },
3030
3030
  "search.engine.web.searxng.default_params.safe_search": {
3031
3031
  "category": "searxng",
3032
- "description": "Setting for searxng.default_params.safe_search",
3032
+ "description": "Configure the safe search level",
3033
3033
  "editable": true,
3034
3034
  "max_value": null,
3035
3035
  "min_value": null,
3036
3036
  "name": "Safe Search",
3037
- "options": null,
3037
+ "options": [
3038
+ {
3039
+ "label": "Off",
3040
+ "value": "OFF"
3041
+ },
3042
+ {
3043
+ "label": "Moderate",
3044
+ "value": "MODERATE"
3045
+ },
3046
+ {
3047
+ "label": "Strict",
3048
+ "value": "STRICT"
3049
+ }
3050
+ ],
3038
3051
  "step": null,
3039
3052
  "type": "SEARCH",
3040
- "ui_element": "checkbox",
3041
- "value": 1,
3053
+ "ui_element": "select",
3054
+ "value": "OFF",
3042
3055
  "visible": true
3043
3056
  },
3044
3057
  "search.engine.web.searxng.full_search_class": {
@@ -0,0 +1,57 @@
1
+ """URL utility functions for the local deep research application."""
2
+
3
+ import logging
4
+
5
+ logger = logging.getLogger(__name__)
6
+
7
+
8
+ def normalize_url(raw_url: str) -> str:
9
+ """
10
+ Normalize a URL to ensure it has a proper scheme and format.
11
+
12
+ Args:
13
+ raw_url: The raw URL string to normalize
14
+
15
+ Returns:
16
+ A properly formatted URL string
17
+
18
+ Examples:
19
+ >>> normalize_url("localhost:11434")
20
+ 'http://localhost:11434'
21
+ >>> normalize_url("https://example.com:11434")
22
+ 'https://example.com:11434'
23
+ >>> normalize_url("http:example.com")
24
+ 'http://example.com'
25
+ """
26
+ if not raw_url:
27
+ raise ValueError("URL cannot be empty")
28
+
29
+ # Clean up the URL
30
+ raw_url = raw_url.strip()
31
+
32
+ # First check if the URL already has a proper scheme
33
+ if raw_url.startswith(("http://", "https://")):
34
+ return raw_url
35
+
36
+ # Handle case where URL is malformed like "http:hostname" (missing //)
37
+ if raw_url.startswith(("http:", "https:")) and not raw_url.startswith(
38
+ ("http://", "https://")
39
+ ):
40
+ scheme = raw_url.split(":", 1)[0]
41
+ rest = raw_url.split(":", 1)[1]
42
+ return f"{scheme}://{rest}"
43
+
44
+ # Handle URLs that start with //
45
+ if raw_url.startswith("//"):
46
+ # Remove the // and process
47
+ raw_url = raw_url[2:]
48
+
49
+ # At this point, we should have hostname:port or just hostname
50
+ # Determine if this is localhost or an external host
51
+ hostname = raw_url.split(":")[0].split("/")[0]
52
+ is_localhost = hostname in ("localhost", "127.0.0.1", "[::1]", "0.0.0.0")
53
+
54
+ # Use http for localhost, https for external hosts
55
+ scheme = "http" if is_localhost else "https"
56
+
57
+ return f"{scheme}://{raw_url}"
@@ -1,10 +1,10 @@
1
1
  import json
2
2
  import logging
3
- import os
4
3
 
5
4
  import requests
6
5
  from flask import Blueprint, current_app, jsonify, request
7
6
 
7
+ from ...utilities.url_utils import normalize_url
8
8
  from ..models.database import get_db_connection
9
9
  from ..routes.research_routes import active_research, termination_flags
10
10
  from ..services.research_service import (
@@ -49,7 +49,7 @@ def api_start_research():
49
49
  research_settings = {
50
50
  "model_provider": "OLLAMA", # Default
51
51
  "model": "llama2", # Default
52
- "search_engine": "auto", # Default
52
+ "search_engine": "searxng", # Default
53
53
  }
54
54
 
55
55
  cursor.execute(
@@ -255,10 +255,14 @@ def check_ollama_status():
255
255
  {"running": True, "message": f"Using provider: {provider}, not Ollama"}
256
256
  )
257
257
 
258
- # Get Ollama API URL
259
- ollama_base_url = os.getenv(
260
- "OLLAMA_BASE_URL",
261
- llm_config.get("ollama_base_url", "http://localhost:11434"),
258
+ # Get Ollama API URL from LLM config
259
+ raw_ollama_base_url = llm_config.get(
260
+ "ollama_base_url", "http://localhost:11434"
261
+ )
262
+ ollama_base_url = (
263
+ normalize_url(raw_ollama_base_url)
264
+ if raw_ollama_base_url
265
+ else "http://localhost:11434"
262
266
  )
263
267
 
264
268
  logger.info(f"Checking Ollama status at: {ollama_base_url}")
@@ -380,9 +384,14 @@ def check_ollama_model():
380
384
  # Log which model we're checking for debugging
381
385
  logger.info(f"Checking availability of Ollama model: {model_name}")
382
386
 
383
- ollama_base_url = os.getenv(
384
- "OLLAMA_BASE_URL",
385
- llm_config.get("ollama_base_url", "http://localhost:11434"),
387
+ # Get Ollama API URL from LLM config
388
+ raw_ollama_base_url = llm_config.get(
389
+ "ollama_base_url", "http://localhost:11434"
390
+ )
391
+ ollama_base_url = (
392
+ normalize_url(raw_ollama_base_url)
393
+ if raw_ollama_base_url
394
+ else "http://localhost:11434"
386
395
  )
387
396
 
388
397
  # Check if the model is available
@@ -19,6 +19,7 @@ from flask_wtf.csrf import generate_csrf
19
19
  from sqlalchemy.orm import Session
20
20
 
21
21
  from ...utilities.db_utils import get_db_setting
22
+ from ...utilities.url_utils import normalize_url
22
23
  from ..database.models import Setting, SettingType
23
24
  from ..services.settings_service import (
24
25
  create_or_update_setting,
@@ -667,7 +668,15 @@ def api_get_available_models():
667
668
  try:
668
669
  current_app.logger.info("Attempting to connect to Ollama API")
669
670
 
670
- base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
671
+ raw_base_url = get_db_setting(
672
+ "llm.ollama.url", "http://localhost:11434"
673
+ )
674
+ base_url = (
675
+ normalize_url(raw_base_url)
676
+ if raw_base_url
677
+ else "http://localhost:11434"
678
+ )
679
+
671
680
  ollama_response = requests.get(f"{base_url}/api/tags", timeout=5)
672
681
 
673
682
  current_app.logger.debug(
@@ -1269,11 +1278,12 @@ def fix_corrupted_settings():
1269
1278
  def check_ollama_status():
1270
1279
  """Check if Ollama is running and available"""
1271
1280
  try:
1272
- # Set a shorter timeout for the request
1273
- base_url = os.getenv(
1274
- "OLLAMA_BASE_URL",
1275
- "http://localhost:11434",
1281
+ # Get Ollama URL from settings
1282
+ raw_base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
1283
+ base_url = (
1284
+ normalize_url(raw_base_url) if raw_base_url else "http://localhost:11434"
1276
1285
  )
1286
+
1277
1287
  response = requests.get(f"{base_url}/api/version", timeout=2.0)
1278
1288
 
1279
1289
  if response.status_code == 200:
@@ -530,7 +530,7 @@ def run_research_process(
530
530
  report_path = os.path.join(
531
531
  OUTPUT_DIR,
532
532
  f"quick_summary_{safe_query}_"
533
- f"{datetime.now().isoformat()}.md",
533
+ f"{int(datetime.now().timestamp())}.md",
534
534
  )
535
535
 
536
536
  # Send progress update for writing to file
@@ -643,7 +643,7 @@ def run_research_process(
643
643
  safe_query = safe_query.replace(" ", "_").lower()
644
644
  report_path = os.path.join(
645
645
  OUTPUT_DIR,
646
- f"detailed_report_{safe_query}_{datetime.now().isoformat()}.md",
646
+ f"detailed_report_{safe_query}_{int(datetime.now().timestamp())}.md",
647
647
  )
648
648
 
649
649
  with open(report_path, "w", encoding="utf-8") as f:
@@ -25,7 +25,7 @@ class FullSearchResults:
25
25
  max_results: int = 10,
26
26
  region: str = "wt-wt",
27
27
  time: str = "y",
28
- safesearch: str = "Moderate",
28
+ safesearch: str | int = "Moderate",
29
29
  ):
30
30
  self.llm = llm
31
31
  self.output_format = output_format
@@ -32,6 +32,7 @@ from langchain_text_splitters import RecursiveCharacterTextSplitter
32
32
 
33
33
  from ...config import search_config
34
34
  from ...utilities.db_utils import get_db_setting
35
+ from ...utilities.url_utils import normalize_url
35
36
  from ..search_engine_base import BaseSearchEngine
36
37
 
37
38
  # Setup logging
@@ -169,12 +170,20 @@ class LocalEmbeddingManager:
169
170
  if self.embedding_model_type == "ollama":
170
171
  # Use Ollama for embeddings
171
172
  if not self.ollama_base_url:
172
- self.ollama_base_url = get_db_setting(
173
+ raw_ollama_base_url = get_db_setting(
173
174
  "llm.ollama.url", "http://localhost:11434"
174
175
  )
176
+ self.ollama_base_url = (
177
+ normalize_url(raw_ollama_base_url)
178
+ if raw_ollama_base_url
179
+ else "http://localhost:11434"
180
+ )
181
+ else:
182
+ # Ensure scheme is present if ollama_base_url was passed in constructor
183
+ self.ollama_base_url = normalize_url(self.ollama_base_url)
175
184
 
176
185
  logger.info(
177
- f"Initializing Ollama embeddings with model {self.embedding_model}"
186
+ f"Initializing Ollama embeddings with model {self.embedding_model} and base_url {self.ollama_base_url}"
178
187
  )
179
188
  return OllamaEmbeddings(
180
189
  model=self.embedding_model, base_url=self.ollama_base_url
@@ -563,7 +572,7 @@ class LocalEmbeddingManager:
563
572
  str(index_path),
564
573
  self.embeddings,
565
574
  allow_dangerous_deserialization=True,
566
- nomalize_L2=True,
575
+ normalize_L2=True,
567
576
  )
568
577
  except Exception as e:
569
578
  logger.error(f"Error loading index for {folder_path}: {e}")
@@ -1,3 +1,4 @@
1
+ import enum
1
2
  import logging
2
3
  import os
3
4
  import time
@@ -15,6 +16,17 @@ logging.basicConfig(level=logging.INFO)
15
16
  logger = logging.getLogger(__name__)
16
17
 
17
18
 
19
+ @enum.unique
20
+ class SafeSearchSetting(enum.IntEnum):
21
+ """
22
+ Acceptable settings for safe search.
23
+ """
24
+
25
+ OFF = 0
26
+ MODERATE = 1
27
+ STRICT = 2
28
+
29
+
18
30
  class SearXNGSearchEngine(BaseSearchEngine):
19
31
  """
20
32
  SearXNG search engine implementation that requires an instance URL provided via
@@ -29,7 +41,7 @@ class SearXNGSearchEngine(BaseSearchEngine):
29
41
  categories: Optional[List[str]] = None,
30
42
  engines: Optional[List[str]] = None,
31
43
  language: str = "en",
32
- safe_search: int = 1,
44
+ safe_search: str = SafeSearchSetting.OFF.name,
33
45
  time_range: Optional[str] = None,
34
46
  delay_between_requests: float = 0.0,
35
47
  llm: Optional[BaseLLM] = None,
@@ -89,7 +101,14 @@ class SearXNGSearchEngine(BaseSearchEngine):
89
101
  self.categories = categories or ["general"]
90
102
  self.engines = engines
91
103
  self.language = language
92
- self.safe_search = safe_search
104
+ try:
105
+ self.safe_search = SafeSearchSetting[safe_search]
106
+ except ValueError:
107
+ logger.error(
108
+ "'{}' is not a valid safe search setting. Disabling safe search",
109
+ safe_search,
110
+ )
111
+ self.safe_search = SafeSearchSetting.OFF
93
112
  self.time_range = time_range
94
113
 
95
114
  self.delay_between_requests = float(
@@ -114,11 +133,7 @@ class SearXNGSearchEngine(BaseSearchEngine):
114
133
  max_results=max_results,
115
134
  region="wt-wt",
116
135
  time="y",
117
- safesearch=(
118
- "Moderate"
119
- if safe_search == 1
120
- else "Off" if safe_search == 0 else "Strict"
121
- ),
136
+ safesearch=self.safe_search.value,
122
137
  )
123
138
 
124
139
  self.last_request_time = 0
@@ -177,7 +192,7 @@ class SearXNGSearchEngine(BaseSearchEngine):
177
192
  "language": self.language,
178
193
  "format": "html", # Use HTML format instead of JSON
179
194
  "pageno": 1,
180
- "safesearch": self.safe_search,
195
+ "safesearch": self.safe_search.value,
181
196
  "count": self.max_results,
182
197
  }
183
198
 
@@ -0,0 +1,55 @@
1
+ """Test URL utility functions."""
2
+
3
+ import pytest
4
+
5
+ from local_deep_research.utilities.url_utils import normalize_url
6
+
7
+
8
+ class TestNormalizeUrl:
9
+ """Test cases for the normalize_url function."""
10
+
11
+ def test_localhost_without_scheme(self):
12
+ """Test that localhost addresses get http:// prefix."""
13
+ assert normalize_url("localhost:11434") == "http://localhost:11434"
14
+ assert normalize_url("127.0.0.1:11434") == "http://127.0.0.1:11434"
15
+ assert normalize_url("[::1]:11434") == "http://[::1]:11434"
16
+ assert normalize_url("0.0.0.0:11434") == "http://0.0.0.0:11434"
17
+
18
+ def test_external_host_without_scheme(self):
19
+ """Test that external hosts get https:// prefix."""
20
+ assert normalize_url("example.com:11434") == "https://example.com:11434"
21
+ assert normalize_url("api.example.com") == "https://api.example.com"
22
+
23
+ def test_malformed_url_with_scheme(self):
24
+ """Test correction of malformed URLs like 'http:hostname'."""
25
+ assert normalize_url("http:localhost:11434") == "http://localhost:11434"
26
+ assert normalize_url("https:example.com:11434") == "https://example.com:11434"
27
+
28
+ def test_well_formed_urls(self):
29
+ """Test that well-formed URLs are unchanged."""
30
+ assert normalize_url("http://localhost:11434") == "http://localhost:11434"
31
+ assert normalize_url("https://example.com:11434") == "https://example.com:11434"
32
+ assert (
33
+ normalize_url("http://192.168.1.100:11434") == "http://192.168.1.100:11434"
34
+ )
35
+
36
+ def test_urls_with_double_slash_prefix(self):
37
+ """Test URLs that start with //."""
38
+ assert normalize_url("//localhost:11434") == "http://localhost:11434"
39
+ assert normalize_url("//example.com:11434") == "https://example.com:11434"
40
+
41
+ def test_empty_or_none_url(self):
42
+ """Test handling of empty or None URLs."""
43
+ with pytest.raises(ValueError):
44
+ normalize_url("")
45
+ with pytest.raises(ValueError):
46
+ normalize_url(None)
47
+
48
+ def test_url_with_path(self):
49
+ """Test URLs with paths."""
50
+ assert normalize_url("localhost:11434/api") == "http://localhost:11434/api"
51
+ assert normalize_url("example.com/api/v1") == "https://example.com/api/v1"
52
+
53
+
54
+ if __name__ == "__main__":
55
+ pytest.main([__file__, "-v"])