local-deep-research 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. local_deep_research/__init__.py +24 -0
  2. local_deep_research/citation_handler.py +113 -0
  3. local_deep_research/config.py +166 -0
  4. local_deep_research/defaults/__init__.py +44 -0
  5. local_deep_research/defaults/llm_config.py +269 -0
  6. local_deep_research/defaults/local_collections.toml +47 -0
  7. local_deep_research/defaults/main.toml +57 -0
  8. local_deep_research/defaults/search_engines.toml +244 -0
  9. local_deep_research/local_collections.py +141 -0
  10. local_deep_research/main.py +113 -0
  11. local_deep_research/report_generator.py +206 -0
  12. local_deep_research/search_system.py +241 -0
  13. local_deep_research/utilties/__init__.py +0 -0
  14. local_deep_research/utilties/enums.py +9 -0
  15. local_deep_research/utilties/llm_utils.py +116 -0
  16. local_deep_research/utilties/search_utilities.py +115 -0
  17. local_deep_research/utilties/setup_utils.py +6 -0
  18. local_deep_research/web/__init__.py +2 -0
  19. local_deep_research/web/app.py +1209 -0
  20. local_deep_research/web/static/css/styles.css +1008 -0
  21. local_deep_research/web/static/js/app.js +2078 -0
  22. local_deep_research/web/templates/api_keys_config.html +82 -0
  23. local_deep_research/web/templates/collections_config.html +90 -0
  24. local_deep_research/web/templates/index.html +312 -0
  25. local_deep_research/web/templates/llm_config.html +120 -0
  26. local_deep_research/web/templates/main_config.html +89 -0
  27. local_deep_research/web/templates/search_engines_config.html +154 -0
  28. local_deep_research/web/templates/settings.html +519 -0
  29. local_deep_research/web/templates/settings_dashboard.html +207 -0
  30. local_deep_research/web_search_engines/__init__.py +0 -0
  31. local_deep_research/web_search_engines/engines/__init__.py +0 -0
  32. local_deep_research/web_search_engines/engines/full_search.py +128 -0
  33. local_deep_research/web_search_engines/engines/meta_search_engine.py +274 -0
  34. local_deep_research/web_search_engines/engines/search_engine_arxiv.py +367 -0
  35. local_deep_research/web_search_engines/engines/search_engine_brave.py +245 -0
  36. local_deep_research/web_search_engines/engines/search_engine_ddg.py +123 -0
  37. local_deep_research/web_search_engines/engines/search_engine_github.py +663 -0
  38. local_deep_research/web_search_engines/engines/search_engine_google_pse.py +283 -0
  39. local_deep_research/web_search_engines/engines/search_engine_guardian.py +337 -0
  40. local_deep_research/web_search_engines/engines/search_engine_local.py +901 -0
  41. local_deep_research/web_search_engines/engines/search_engine_local_all.py +153 -0
  42. local_deep_research/web_search_engines/engines/search_engine_medrxiv.py +623 -0
  43. local_deep_research/web_search_engines/engines/search_engine_pubmed.py +992 -0
  44. local_deep_research/web_search_engines/engines/search_engine_serpapi.py +230 -0
  45. local_deep_research/web_search_engines/engines/search_engine_wayback.py +474 -0
  46. local_deep_research/web_search_engines/engines/search_engine_wikipedia.py +242 -0
  47. local_deep_research/web_search_engines/full_search.py +254 -0
  48. local_deep_research/web_search_engines/search_engine_base.py +197 -0
  49. local_deep_research/web_search_engines/search_engine_factory.py +233 -0
  50. local_deep_research/web_search_engines/search_engines_config.py +54 -0
  51. local_deep_research-0.1.0.dist-info/LICENSE +21 -0
  52. local_deep_research-0.1.0.dist-info/METADATA +328 -0
  53. local_deep_research-0.1.0.dist-info/RECORD +56 -0
  54. local_deep_research-0.1.0.dist-info/WHEEL +5 -0
  55. local_deep_research-0.1.0.dist-info/entry_points.txt +3 -0
  56. local_deep_research-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,57 @@
1
+
2
+ # Main configuration for Local Deep Research
3
+
4
+ [general]
5
+ # Directory for research outputs (relative to user data directory)
6
+ output_dir = "research_outputs"
7
+
8
+ # Knowledge accumulation approach (NONE, QUESTION, or ITERATION)
9
+ knowledge_accumulation = "ITERATION"
10
+
11
+ # Maximum context size for knowledge accumulation
12
+ knowledge_accumulation_context_limit = 2000000
13
+
14
+ # Enable fact checking (experimental, works better with large LLMs)
15
+ enable_fact_checking = false
16
+
17
+ [search]
18
+ # Search tool to use (auto, wikipedia, arxiv, duckduckgo, serp, google_pse, etc.)
19
+ # "auto" intelligently selects based on query content (recommended)
20
+ # "local_all" searches only local document collections
21
+ tool = "auto"
22
+
23
+ # Number of research cycles
24
+ iterations = 3
25
+
26
+ # Questions generated per cycle
27
+ questions_per_iteration = 3
28
+
29
+ # Searches per report section
30
+ searches_per_section = 3
31
+
32
+ # Results per search query
33
+ max_results = 50
34
+
35
+ # Results after relevance filtering
36
+ max_filtered_results = 5
37
+
38
+ # Search region
39
+ region = "us"
40
+
41
+ # Time period (d=day, w=week, m=month, y=year)
42
+ time_period = "y"
43
+
44
+ # Enable safe search
45
+ safe_search = true
46
+
47
+ # Search language
48
+ search_language = "English"
49
+
50
+ # Return only snippets, not full content (faster but less detailed)
51
+ snippets_only = false
52
+
53
+ # Skip relevance filtering (return all results)
54
+ skip_relevance_filter = false
55
+
56
+ # Check URL quality
57
+ quality_check_urls = true
@@ -0,0 +1,244 @@
1
+ # Search Engines Configuration for Local Deep Research
2
+ # This file defines all available search engines and their properties
3
+
4
+ [wikipedia]
5
+ module_path = "local_deep_research.web_search_engines.engines.search_engine_wikipedia"
6
+ class_name = "WikipediaSearchEngine"
7
+ requires_api_key = false
8
+ reliability = 0.95
9
+ strengths = [
10
+ "factual information", "general knowledge", "definitions",
11
+ "historical facts", "biographies", "overview information"
12
+ ]
13
+ weaknesses = ["recent events", "specialized academic topics", "product comparisons"]
14
+
15
+ [wikipedia.default_params]
16
+ max_results = 20
17
+ include_content = true
18
+
19
+ [arxiv]
20
+ module_path = "local_deep_research.web_search_engines.engines.search_engine_arxiv"
21
+ class_name = "ArXivSearchEngine"
22
+ requires_api_key = false
23
+ reliability = 0.9
24
+ strengths = [
25
+ "scientific papers", "academic research", "physics", "computer science",
26
+ "mathematics", "statistics", "machine learning", "preprints"
27
+ ]
28
+ weaknesses = ["non-academic topics", "consumer products", "news", "general information"]
29
+
30
+ [arxiv.default_params]
31
+ max_results = 20
32
+ sort_by = "relevance"
33
+ sort_order = "descending"
34
+
35
+ [pubmed]
36
+ module_path = "local_deep_research.web_search_engines.engines.search_engine_pubmed"
37
+ class_name = "PubMedSearchEngine"
38
+ requires_api_key = false
39
+ api_key_env = "NCBI_API_KEY"
40
+ reliability = 0.95
41
+ strengths = [
42
+ "biomedical literature", "medical research", "clinical studies",
43
+ "life sciences", "health information", "scientific papers"
44
+ ]
45
+ weaknesses = [
46
+ "non-medical topics", "very recent papers may be missing",
47
+ "limited to published research"
48
+ ]
49
+ requires_llm = true
50
+
51
+ [pubmed.default_params]
52
+ max_results = 20
53
+ get_abstracts = true
54
+ get_full_text = false
55
+ full_text_limit = 3
56
+ days_limit = 0
57
+ optimize_queries = true
58
+
59
+ [github]
60
+ module_path = "local_deep_research.web_search_engines.engines.search_engine_github"
61
+ class_name = "GitHubSearchEngine"
62
+ requires_api_key = false
63
+ reliability = 0.99
64
+ strengths = [
65
+ "code repositories", "software documentation", "open source projects",
66
+ "programming issues", "developer information", "technical documentation"
67
+ ]
68
+ weaknesses = ["non-technical content", "content outside GitHub", "rate limits without API key"]
69
+ supports_full_search = true
70
+
71
+ [github.default_params]
72
+ max_results = 15
73
+ search_type = "repositories"
74
+ include_readme = true
75
+ include_issues = false
76
+
77
+ [serpapi]
78
+ module_path = "local_deep_research.web_search_engines.engines.search_engine_serpapi"
79
+ class_name = "SerpAPISearchEngine"
80
+ requires_api_key = true
81
+ api_key_env = "SERP_API_KEY"
82
+ reliability = 0.6
83
+ strengths = [
84
+ "comprehensive web search", "product information", "reviews",
85
+ "recent content", "news", "broad coverage"
86
+ ]
87
+ weaknesses = ["requires API key with usage limits", "not specialized for academic content"]
88
+ supports_full_search = true
89
+ full_search_module = "local_deep_research.web_search_engines.engines.full_serp_search_results_old"
90
+ full_search_class = "FullSerpAPISearchResults"
91
+
92
+ [serpapi.default_params]
93
+ region = "us"
94
+ time_period = "y"
95
+ safe_search = true
96
+ search_language = "English"
97
+
98
+ [google_pse]
99
+ module_path = "local_deep_research.web_search_engines.engines.search_engine_google_pse"
100
+ class_name = "GooglePSESearchEngine"
101
+ requires_api_key = true
102
+ api_key_env = "GOOGLE_PSE_API_KEY"
103
+ reliability = 0.9
104
+ strengths = [
105
+ "custom search scope", "high-quality results", "domain-specific search",
106
+ "configurable search experience", "control over search index"
107
+ ]
108
+ weaknesses = [
109
+ "requires API key with usage limits",
110
+ "limited to 10,000 queries/day on free tier",
111
+ "requires search engine configuration in Google Control Panel"
112
+ ]
113
+ supports_full_search = true
114
+ full_search_module = "local_deep_research.web_search_engines.engines.full_search"
115
+ full_search_class = "FullSearchResults"
116
+
117
+ [google_pse.default_params]
118
+ region = "us"
119
+ safe_search = true
120
+ search_language = "English"
121
+
122
+ [brave]
123
+ module_path = "local_deep_research.web_search_engines.engines.search_engine_brave"
124
+ class_name = "BraveSearchEngine"
125
+ requires_api_key = true
126
+ api_key_env = "BRAVE_API_KEY"
127
+ reliability = 0.7
128
+ strengths = [
129
+ "privacy-focused web search", "product information", "reviews",
130
+ "recent content", "news", "broad coverage"
131
+ ]
132
+ weaknesses = ["requires API key with usage limits", "smaller index than Google"]
133
+ supports_full_search = true
134
+ full_search_module = "local_deep_research.web_search_engines.engines.full_search"
135
+ full_search_class = "FullSearchResults"
136
+
137
+ [brave.default_params]
138
+ region = "US"
139
+ time_period = "y"
140
+ safe_search = true
141
+ search_language = "English"
142
+
143
+ [wayback]
144
+ module_path = "local_deep_research.web_search_engines.engines.search_engine_wayback"
145
+ class_name = "WaybackSearchEngine"
146
+ requires_api_key = false
147
+ reliability = 0.5
148
+ strengths = [
149
+ "historical web content", "archived websites", "content verification",
150
+ "deleted or changed web pages", "website evolution tracking"
151
+ ]
152
+ weaknesses = [
153
+ "limited to previously archived content", "may miss recent changes",
154
+ "archiving quality varies"
155
+ ]
156
+ supports_full_search = true
157
+
158
+ [wayback.default_params]
159
+ max_results = 15
160
+ max_snapshots_per_url = 3
161
+ closest_only = false
162
+ language = "English"
163
+
164
+ [auto]
165
+ module_path = "local_deep_research.web_search_engines.engines.meta_search_engine"
166
+ class_name = "MetaSearchEngine"
167
+ requires_api_key = false
168
+ reliability = 0.85
169
+ strengths = [
170
+ "intelligent engine selection", "adaptable to query type",
171
+ "fallback capabilities"
172
+ ]
173
+ weaknesses = ["slightly slower due to LLM analysis"]
174
+ requires_llm = true
175
+
176
+ [auto.default_params]
177
+ use_api_key_services = true
178
+ max_engines_to_try = 3
179
+
180
+ [local_all]
181
+ module_path = "local_deep_research.web_search_engines.engines.search_engine_local_all"
182
+ class_name = "LocalAllSearchEngine"
183
+ requires_api_key = false
184
+ reliability = 0.85
185
+ strengths = ["searches all local collections", "personal documents", "offline access"]
186
+ weaknesses = ["may return too many results", "requires indexing"]
187
+ requires_llm = true
188
+
189
+ # Default search engine to use if none specified
190
+ DEFAULT_SEARCH_ENGINE = "wikipedia"
191
+
192
+ # Additional search engines can be added below
193
+ # Uncomment and modify these templates as needed
194
+
195
+ # [duckduckgo]
196
+ # module_path = "local_deep_research.web_search_engines.engines.search_engine_ddg"
197
+ # class_name = "DuckDuckGoSearchEngine"
198
+ # requires_api_key = false
199
+ # reliability = 0.4
200
+ # strengths = [
201
+ # "web search", "product information", "reviews", "recent information",
202
+ # "news", "general queries", "broad coverage"
203
+ # ]
204
+ # weaknesses = ["inconsistent due to rate limits", "not specialized for academic content"]
205
+ # supports_full_search = true
206
+ # full_search_module = "local_deep_research.web_search_engines.engines.full_search"
207
+ # full_search_class = "FullSearchResults"
208
+ #
209
+ # [duckduckgo.default_params]
210
+ # region = "us"
211
+ # safe_search = true
212
+
213
+ # [guardian]
214
+ # module_path = "local_deep_research.web_search_engines.engines.search_engine_guardian"
215
+ # class_name = "GuardianSearchEngine"
216
+ # requires_api_key = true
217
+ # api_key_env = "GUARDIAN_API_KEY"
218
+ # reliability = 0.5
219
+ # strengths = [
220
+ # "news articles", "current events", "opinion pieces", "journalism",
221
+ # "UK and global news", "political analysis"
222
+ # ]
223
+ # weaknesses = ["primarily focused on news", "limited historical content pre-1999"]
224
+ #
225
+ # [guardian.default_params]
226
+ # order_by = "relevance"
227
+
228
+ # [medrxiv]
229
+ # module_path = "local_deep_research.web_search_engines.engines.search_engine_medrxiv"
230
+ # class_name = "MedRxivSearchEngine"
231
+ # requires_api_key = false
232
+ # reliability = 0.85
233
+ # strengths = [
234
+ # "medical preprints", "health research", "covid-19 research",
235
+ # "clinical studies", "medical sciences", "preliminary results"
236
+ # ]
237
+ # weaknesses = ["not peer-reviewed", "preliminary findings", "limited to medical research"]
238
+ # requires_llm = true
239
+ #
240
+ # [medrxiv.default_params]
241
+ # sort_by = "relevance_score"
242
+ # sort_order = "desc"
243
+ # include_full_text = false
244
+ # optimize_queries = true
@@ -0,0 +1,141 @@
1
+ # local_collections.py
2
+ """
3
+ Configuration file for local document collections.
4
+ Each collection functions as an independent search engine.
5
+ """
6
+
7
+ import os
8
+ from typing import Dict, Any
9
+
10
+ # Registry of local document collections
11
+ # Each collection appears as a separate search engine in the main configuration
12
+ LOCAL_COLLECTIONS = {
13
+ # Project Documents Collection
14
+ "project_docs": {
15
+ "name": "Project Documents",
16
+ "description": "Project documentation and specifications",
17
+ "paths": [os.path.abspath("./local_search_files/project_documents")],
18
+ "enabled": True,
19
+ "embedding_model": "all-MiniLM-L6-v2",
20
+ "embedding_device": "cpu",
21
+ "embedding_model_type": "sentence_transformers",
22
+ "max_results": 20,
23
+ "max_filtered_results": 5,
24
+ "chunk_size": 1000,
25
+ "chunk_overlap": 200,
26
+ "cache_dir": ".cache/local_search/project_docs"
27
+ },
28
+
29
+ # Research Papers Collection
30
+ "research_papers": {
31
+ "name": "Research Papers",
32
+ "description": "Academic research papers and articles",
33
+ "paths": [os.path.abspath("local_search_files/research_papers")],
34
+ "enabled": True,
35
+ "embedding_model": "all-MiniLM-L6-v2",
36
+ "embedding_device": "cpu",
37
+ "embedding_model_type": "sentence_transformers",
38
+ "max_results": 20,
39
+ "max_filtered_results": 5,
40
+ "chunk_size": 800, # Smaller chunks for academic content
41
+ "chunk_overlap": 150,
42
+ "cache_dir": ".cache/local_search/research_papers"
43
+ },
44
+
45
+ # Personal Notes Collection
46
+ "personal_notes": {
47
+ "name": "Personal Notes",
48
+ "description": "Personal notes and documents",
49
+ "paths": [os.path.abspath("./local_search_files/personal_notes")],
50
+ "enabled": True,
51
+ "embedding_model": "all-MiniLM-L6-v2",
52
+ "embedding_device": "cpu",
53
+ "embedding_model_type": "sentence_transformers",
54
+ "max_results": 30,
55
+ "max_filtered_results": 10,
56
+ "chunk_size": 500, # Smaller chunks for notes
57
+ "chunk_overlap": 100,
58
+ "cache_dir": ".cache/local_search/personal_notes"
59
+ }
60
+ }
61
+
62
+ # Configuration for local search integration
63
+ LOCAL_SEARCH_CONFIG = {
64
+ # General embedding options
65
+ "DEFAULT_EMBEDDING_MODEL": "all-MiniLM-L6-v2",
66
+ "DEFAULT_EMBEDDING_DEVICE": "cpu", # "cpu" or "cuda" for GPU acceleration
67
+ "DEFAULT_EMBEDDING_MODEL_TYPE": "sentence_transformers", # or "ollama"
68
+
69
+ # Ollama settings (only used if model type is "ollama")
70
+ # Note: You must run 'ollama pull nomic-embed-text' first if using Ollama for embeddings
71
+ "OLLAMA_BASE_URL": "http://localhost:11434",
72
+ "OLLAMA_EMBEDDING_MODEL": "nomic-embed-text",
73
+
74
+ # Default indexing options
75
+ "FORCE_REINDEX": True, # Force reindexing on startup
76
+ "CACHE_DIR": ".cache/local_search", # Base directory for cache
77
+ }
78
+
79
+ def register_local_collections(search_engines_dict: Dict[str, Any]) -> None:
80
+ """
81
+ Register all enabled local collections as search engines.
82
+
83
+ Args:
84
+ search_engines_dict: The main search engines dictionary to update
85
+ """
86
+ for collection_id, collection in LOCAL_COLLECTIONS.items():
87
+ print(collection_id, collection)
88
+ if collection.get("enabled", True):
89
+ # Skip if already defined (don't override)
90
+ if collection_id in search_engines_dict:
91
+ continue
92
+
93
+ # Validate paths exist
94
+ paths = collection.get("paths", [])
95
+ valid_paths = []
96
+ for path in paths:
97
+ if os.path.exists(path) and os.path.isdir(path):
98
+ valid_paths.append(path)
99
+ else:
100
+ print(f"Warning: Collection '{collection_id}' contains non-existent folder: {path}")
101
+
102
+ # Log warning if no valid paths
103
+ if not valid_paths and paths:
104
+ print(f"Warning: Collection '{collection_id}' has no valid folders. It will be registered but won't return results.")
105
+
106
+ # Create a search engine entry for this collection
107
+ search_engines_dict[collection_id] = {
108
+ "module_path": "local_deep_research.web_search_engines.engines.search_engine_local",
109
+ "class_name": "LocalSearchEngine",
110
+ "requires_api_key": False,
111
+ "reliability": 0.9, # High reliability for local documents
112
+ "strengths": ["personal documents", "offline access",
113
+ collection.get("description", "local documents")],
114
+ "weaknesses": ["requires indexing", "limited to specific folders"],
115
+ "default_params": {
116
+ "folder_paths": collection.get("paths", []),
117
+ "embedding_model": collection.get(
118
+ "embedding_model",
119
+ LOCAL_SEARCH_CONFIG["DEFAULT_EMBEDDING_MODEL"]
120
+ ),
121
+ "embedding_device": collection.get(
122
+ "embedding_device",
123
+ LOCAL_SEARCH_CONFIG["DEFAULT_EMBEDDING_DEVICE"]
124
+ ),
125
+ "embedding_model_type": collection.get(
126
+ "embedding_model_type",
127
+ LOCAL_SEARCH_CONFIG["DEFAULT_EMBEDDING_MODEL_TYPE"]
128
+ ),
129
+ "chunk_size": collection.get("chunk_size", 1000),
130
+ "chunk_overlap": collection.get("chunk_overlap", 200),
131
+ "cache_dir": collection.get(
132
+ "cache_dir",
133
+ f"{LOCAL_SEARCH_CONFIG['CACHE_DIR']}/{collection_id}"
134
+ ),
135
+ "max_results": collection.get("max_results", 20),
136
+ "max_filtered_results": collection.get("max_filtered_results", 5),
137
+ "collection_name": collection.get("name", collection_id),
138
+ "collection_description": collection.get("description", "")
139
+ },
140
+ "requires_llm": True
141
+ }
@@ -0,0 +1,113 @@
1
+ from .search_system import AdvancedSearchSystem
2
+ from typing import Dict
3
+ from .config import settings
4
+
5
+ def print_report(report: Dict):
6
+ """Print and save the report in a readable format"""
7
+
8
+ # Print to console in readable format
9
+ print("\n=== GENERATED REPORT ===\n")
10
+
11
+ # Print content
12
+ print(report["content"])
13
+
14
+
15
+
16
+ # Save to file in markdown format
17
+ with open("report.md", "w", encoding="utf-8") as markdown_file:
18
+ # Write content
19
+ markdown_file.write(report["content"])
20
+
21
+ # Write metadata at the end of the file
22
+ markdown_file.write("\n\n---\n\n")
23
+ markdown_file.write("## Report Metadata\n")
24
+
25
+ markdown_file.write(f"- Query: {report['metadata']['query']}\n")
26
+
27
+ print(f"\nReport has been saved to report.md")
28
+
29
+
30
+ from .report_generator import IntegratedReportGenerator
31
+
32
+ report_generator = IntegratedReportGenerator()
33
+
34
+
35
+
36
+ def main():
37
+ import os
38
+ import logging
39
+ from .utilties.setup_utils import setup_user_directories
40
+
41
+ # Configure logging
42
+ logging.basicConfig(level=logging.INFO)
43
+ logger = logging.getLogger(__name__)
44
+ logger.info(f"Starting with settings: iterations={settings.search.iterations}, "
45
+ f"questions_per_iteration={settings.search.questions_per_iteration}")
46
+
47
+ # Explicitly run setup
48
+ logger.info("Initializing configuration...")
49
+ setup_user_directories()
50
+
51
+ system = AdvancedSearchSystem()
52
+
53
+ print("Welcome to the Advanced Research System")
54
+ print("Type 'quit' to exit")
55
+
56
+ while True:
57
+ print("\nSelect output type:")
58
+ print("1) Quick Summary (Generated in a few minutes)")
59
+ print(
60
+ "2) Detailed Research Report (Recommended for deeper analysis - may take several hours)"
61
+ )
62
+ choice = input("Enter number (1 or 2): ").strip()
63
+
64
+ while choice not in ["1", "2"]:
65
+ print("\nInvalid input. Please enter 1 or 2:")
66
+ print("1) Quick Summary (Generated in a few minutes)")
67
+ print(
68
+ "2) Detailed Research Report (Recommended for deeper analysis - may take several hours)"
69
+ )
70
+ choice = input("Enter number (1 or 2): ").strip()
71
+
72
+ query = input("\nEnter your research query: ").strip()
73
+
74
+ if query.lower() == "quit":
75
+ break
76
+
77
+ # System will automatically use updated configuration
78
+ # through the automatic reloading in get_llm() and get_search()
79
+
80
+ if choice == "1":
81
+ print("\nResearching... This may take a few minutes.\n")
82
+ else:
83
+ print(
84
+ "\nGenerating detailed report... This may take several hours. Please be patient as this enables deeper analysis.\n"
85
+ )
86
+
87
+ results = system.analyze_topic(query)
88
+ if results:
89
+ if choice == "1":
90
+ # Quick Summary
91
+ print("\n=== QUICK SUMMARY ===")
92
+ if results["findings"] and len(results["findings"]) > 0:
93
+ initial_analysis = [
94
+ finding["content"] for finding in results["findings"]
95
+ ]
96
+ print(initial_analysis)
97
+
98
+ else:
99
+ # Full Report
100
+ final_report = report_generator.generate_report(
101
+ results, query
102
+ )
103
+ print("\n=== RESEARCH REPORT ===")
104
+ print_report(final_report)
105
+
106
+ print("\n=== RESEARCH METRICS ===")
107
+ print(f"Search Iterations: {results['iterations']}")
108
+
109
+ else:
110
+ print("Research failed. Please try again.")
111
+
112
+ if __name__ == "__main__":
113
+ main()