local-deep-research 0.3.8__py3-none-any.whl → 0.3.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__ = "0.3.8"
1
+ __version__ = "0.3.10"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: local-deep-research
3
- Version: 0.3.8
3
+ Version: 0.3.10
4
4
  Summary: AI-powered research assistant with deep, iterative analysis using LLMs and web searches
5
5
  Author-Email: LearningCircuit <185559241+LearningCircuit@users.noreply.github.com>, HashedViking <6432677+HashedViking@users.noreply.github.com>
6
6
  License: MIT License
@@ -113,7 +113,7 @@ Local Deep Research combines the power of large language models with intelligent
113
113
  docker pull searxng/searxng
114
114
  docker run -d -p 8080:8080 --name searxng searxng/searxng
115
115
 
116
- # Step 2: Pull and run Local Deep Research
116
+ # Step 2: Pull and run Local Deep Research (Please build your own docker on ARM)
117
117
  docker pull localdeepresearch/local-deep-research
118
118
  docker run -d -p 5000:5000 --name local-deep-research localdeepresearch/local-deep-research
119
119
 
@@ -1,10 +1,9 @@
1
- local_deep_research-0.3.8.dist-info/METADATA,sha256=JbBUBL45YxHNBx5eXFfTgMWmNhjo7MmCMTFgZdgFLlo,16628
2
- local_deep_research-0.3.8.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- local_deep_research-0.3.8.dist-info/entry_points.txt,sha256=GcXS501Rjh-P80S8db7hnrQ23mS_Jg27PwpVQVO77as,113
4
- local_deep_research-0.3.8.dist-info/licenses/LICENSE,sha256=Qg2CaTdu6SWnSqk1_JtgBPp_Da-LdqJDhT1Vt1MUc5s,1072
1
+ local_deep_research-0.3.10.dist-info/METADATA,sha256=qefTHFrq0UMfrQ3lsj77XlOvdLYM8_Exm1boN_IJAOk,16667
2
+ local_deep_research-0.3.10.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ local_deep_research-0.3.10.dist-info/entry_points.txt,sha256=GcXS501Rjh-P80S8db7hnrQ23mS_Jg27PwpVQVO77as,113
4
+ local_deep_research-0.3.10.dist-info/licenses/LICENSE,sha256=Qg2CaTdu6SWnSqk1_JtgBPp_Da-LdqJDhT1Vt1MUc5s,1072
5
5
  local_deep_research/__init__.py,sha256=9wV3oonZMEHsE_JhyZU9P0hW2Uwv47zotGlbAB_gQiA,885
6
- local_deep_research/__main__.py,sha256=LIxK5iS6aLAKMFBDpUS3V-jDcxchqi3eSUsI2jAZUXk,371
7
- local_deep_research/__version__.py,sha256=7dTW0A5-FkrEuNOotvR8oW59M2lvIwYouVqfJzvXpKk,22
6
+ local_deep_research/__version__.py,sha256=h9TycTJK2pK49s87IMbNRq4lTqRt3xctcJl2jxCe3sU,23
8
7
  local_deep_research/advanced_search_system/__init__.py,sha256=sGusMj4eFIrhXR6QbOM16UDKB6aI-iS4IFivKWpMlh0,234
9
8
  local_deep_research/advanced_search_system/filters/__init__.py,sha256=2dXrV4skcVHI2Lb3BSL2Ajq0rnLeSw7kc1MbIynMxa4,190
10
9
  local_deep_research/advanced_search_system/filters/base_filter.py,sha256=dFNQ7U2dj4bf3voT73YhcG-w9eW-BTlc4F9kstFcETY,969
@@ -41,7 +40,6 @@ local_deep_research/config/search_config.py,sha256=ruryPSS4Wy9-xi_02c-98KLKaELeL
41
40
  local_deep_research/defaults/.env.template,sha256=_eVCy4d_XwpGXy8n50CG3wH9xx2oqJCFKS7IbqgInDk,491
42
41
  local_deep_research/defaults/__init__.py,sha256=C_0t0uZmtrVB4rM9NM9Wx8PJU5kFcT-qOHvws5W2iOg,1352
43
42
  local_deep_research/defaults/default_settings.json,sha256=OcRS16WAP4zKvU0UAbXlvBcitt0wv_Z7hq93x1OZBdA,120559
44
- local_deep_research/main.py,sha256=umGmaQmW7bpx27wUAgSNjNr4oSHV6mDX5hoyfb22HEY,7033
45
43
  local_deep_research/migrate_db.py,sha256=S1h6Bv0OJdRW4BaH7MIMrUXBRV_yqgH2T6LVOZKTQjI,4634
46
44
  local_deep_research/report_generator.py,sha256=-G3KDEbsuU3PdxDfuo5v28DIX7RE1yJCCBU2KgRbNzI,9084
47
45
  local_deep_research/search_system.py,sha256=dq9US9zoB7TSiMorsrFFrSHlR6MSqE0IP3NBKB3fP8U,7830
@@ -133,4 +131,4 @@ local_deep_research/web_search_engines/engines/search_engine_wikipedia.py,sha256
133
131
  local_deep_research/web_search_engines/search_engine_base.py,sha256=PLU_sAWhWKTOQWcv32GINuhLdIwB0sEQy-pp9oG9Ggo,9835
134
132
  local_deep_research/web_search_engines/search_engine_factory.py,sha256=DghAkQvLKRJYl5xb9AUjUv7ydAQ4rPi-TvzrmqdyGxE,10890
135
133
  local_deep_research/web_search_engines/search_engines_config.py,sha256=UAE6TfxFXrt-RvSfGQ_FRsOGGrsSs8VI3n1i-0Lfo2s,4929
136
- local_deep_research-0.3.8.dist-info/RECORD,,
134
+ local_deep_research-0.3.10.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- """
2
- Main entry point when running the package with `python -m local_deep_research`.
3
- This avoids circular imports by directly importing the main function after
4
- the package is fully loaded.
5
- """
6
-
7
-
8
- def main():
9
- # Only import main after the whole package has been initialized
10
- from .main import main as main_func
11
-
12
- main_func()
13
-
14
-
15
- if __name__ == "__main__":
16
- main()
@@ -1,215 +0,0 @@
1
- import logging
2
- import sys
3
- from typing import Dict
4
-
5
- from . import get_advanced_search_system, get_report_generator
6
- from .config.config_files import settings
7
- from .utilities.db_utils import get_db_setting
8
-
9
-
10
- def print_report(report: Dict):
11
- """Print and save the report in a readable format"""
12
-
13
- # Print to console in readable format
14
- print("\n=== GENERATED REPORT ===\n")
15
-
16
- # Print content
17
- print(report["content"])
18
-
19
- # Save to file in markdown format
20
- with open("report.md", "w", encoding="utf-8") as markdown_file:
21
- # Write content
22
- markdown_file.write(report["content"])
23
-
24
- # Write metadata at the end of the file
25
- markdown_file.write("\n\n---\n\n")
26
- markdown_file.write("## Report Metadata\n")
27
-
28
- markdown_file.write(f"- Query: {report['metadata']['query']}\n")
29
-
30
- print("\nReport has been saved to report.md")
31
-
32
-
33
- # Create the report generator lazily to avoid circular imports
34
- def get_report_generator_instance():
35
- return get_report_generator()
36
-
37
-
38
- # report_generator = IntegratedReportGenerator()
39
- report_generator = None # Will be initialized when needed
40
-
41
-
42
- def main():
43
- import logging
44
-
45
- from .utilities.setup_utils import setup_user_directories
46
-
47
- # Configure logging
48
- logging.basicConfig(level=logging.INFO)
49
- logger = logging.getLogger(__name__)
50
- search_iterations = get_db_setting("search.iterations", settings.search.iterations)
51
- questions_per_iteration = get_db_setting(
52
- "search.questions_per_iteration", settings.search.questions_per_iteration
53
- )
54
- logger.info(
55
- f"Starting with settings: iterations={search_iterations}, "
56
- f"questions_per_iteration={questions_per_iteration}"
57
- )
58
-
59
- # Explicitly run setup
60
- logger.info("Initializing configuration...")
61
- setup_user_directories()
62
-
63
- system = get_advanced_search_system()
64
-
65
- print("Welcome to the Advanced Research System")
66
- print("Type 'quit' to exit")
67
-
68
- while True:
69
- print("\nSelect output type:")
70
- print("1) Quick Summary (Generated in a few minutes)")
71
- print(
72
- "2) Detailed Research Report (Recommended for deeper analysis - may take several hours)"
73
- )
74
- choice = input("Enter number (1 or 2): ").strip()
75
-
76
- while choice not in ["1", "2"]:
77
- print("\nInvalid input. Please enter 1 or 2:")
78
- print("1) Quick Summary (Generated in a few minutes)")
79
- print(
80
- "2) Detailed Research Report (Recommended for deeper analysis - may take several hours)"
81
- )
82
- choice = input("Enter number (1 or 2): ").strip()
83
-
84
- query = input("\nEnter your research query: ").strip()
85
-
86
- if query.lower() == "quit":
87
- break
88
-
89
- # System will automatically use updated configuration
90
- # through the automatic reloading in get_llm() and get_search()
91
-
92
- if choice == "1":
93
- print("\nResearching... This may take a few minutes.\n")
94
- else:
95
- print(
96
- "\nGenerating detailed report... This may take several hours. Please be patient as this enables deeper analysis.\n"
97
- )
98
-
99
- results = system.analyze_topic(query)
100
- if results:
101
- if choice == "1":
102
- # Quick Summary
103
- print("\n=== QUICK SUMMARY ===")
104
- if results["findings"] and len(results["findings"]) > 0:
105
- initial_analysis = [
106
- finding["content"] for finding in results["findings"]
107
- ]
108
- print(initial_analysis)
109
-
110
- else:
111
- # Full Report
112
- # Initialize report_generator if not already done
113
- global report_generator
114
- if report_generator is None:
115
- report_generator = get_report_generator()
116
-
117
- final_report = report_generator.generate_report(results, query)
118
- print("\n=== RESEARCH REPORT ===")
119
- print_report(final_report)
120
-
121
- print("\n=== RESEARCH METRICS ===")
122
- print(f"Search Iterations: {results['iterations']}")
123
-
124
- else:
125
- print("Research failed. Please try again.")
126
-
127
-
128
- # Add command for database migration
129
- if __name__ == "__main__":
130
- import argparse
131
-
132
- parser = argparse.ArgumentParser(description="Local Deep Research")
133
- parser.add_argument("--web", action="store_true", help="Start the web server")
134
- parser.add_argument(
135
- "--migrate-db", action="store_true", help="Migrate legacy databases to ldr.db"
136
- )
137
- parser.add_argument("--debug", action="store_true", help="Enable debug mode")
138
- parser.add_argument(
139
- "--test-migration",
140
- action="store_true",
141
- help="Test migration by checking database contents",
142
- )
143
- parser.add_argument(
144
- "--schema-upgrade",
145
- action="store_true",
146
- help="Run schema upgrades on the database (e.g., remove redundant tables)",
147
- )
148
-
149
- args = parser.parse_args()
150
-
151
- if args.debug:
152
- logging.basicConfig(level=logging.DEBUG)
153
- else:
154
- logging.basicConfig(level=logging.INFO)
155
-
156
- if args.migrate_db:
157
- try:
158
- # First ensure data directory exists
159
- from src.local_deep_research.setup_data_dir import setup_data_dir
160
-
161
- setup_data_dir()
162
-
163
- # Then run the migration
164
- from src.local_deep_research.web.database.migrate_to_ldr_db import (
165
- migrate_to_ldr_db,
166
- )
167
-
168
- print("Starting database migration...")
169
- success = migrate_to_ldr_db()
170
- if success:
171
- print("Database migration completed successfully")
172
- sys.exit(0)
173
- else:
174
- print("Database migration failed")
175
- sys.exit(1)
176
- except Exception as e:
177
- print(f"Error running database migration: {e}")
178
- sys.exit(1)
179
-
180
- if args.test_migration:
181
- try:
182
- from src.local_deep_research.test_migration import main as test_main
183
-
184
- sys.exit(test_main())
185
- except Exception as e:
186
- print(f"Error running migration test: {e}")
187
- sys.exit(1)
188
-
189
- if args.schema_upgrade:
190
- try:
191
- from src.local_deep_research.web.database.schema_upgrade import (
192
- run_schema_upgrades,
193
- )
194
-
195
- print("Running database schema upgrades...")
196
- success = run_schema_upgrades()
197
- if success:
198
- print("Schema upgrades completed successfully")
199
- sys.exit(0)
200
- else:
201
- print("Schema upgrades failed")
202
- sys.exit(1)
203
- except Exception as e:
204
- print(f"Error running schema upgrades: {e}")
205
- sys.exit(1)
206
-
207
- if args.web:
208
- from src.local_deep_research.web.app import main as web_main
209
-
210
- web_main()
211
- else:
212
- # Default to web if no command specified
213
- from src.local_deep_research.web.app import main as web_main
214
-
215
- web_main()