local-deep-research 0.1.26__py3-none-any.whl → 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- local_deep_research/__init__.py +23 -22
- local_deep_research/__main__.py +16 -0
- local_deep_research/advanced_search_system/__init__.py +7 -0
- local_deep_research/advanced_search_system/filters/__init__.py +8 -0
- local_deep_research/advanced_search_system/filters/base_filter.py +38 -0
- local_deep_research/advanced_search_system/filters/cross_engine_filter.py +200 -0
- local_deep_research/advanced_search_system/findings/base_findings.py +81 -0
- local_deep_research/advanced_search_system/findings/repository.py +452 -0
- local_deep_research/advanced_search_system/knowledge/__init__.py +1 -0
- local_deep_research/advanced_search_system/knowledge/base_knowledge.py +151 -0
- local_deep_research/advanced_search_system/knowledge/standard_knowledge.py +159 -0
- local_deep_research/advanced_search_system/questions/__init__.py +1 -0
- local_deep_research/advanced_search_system/questions/base_question.py +64 -0
- local_deep_research/advanced_search_system/questions/decomposition_question.py +445 -0
- local_deep_research/advanced_search_system/questions/standard_question.py +119 -0
- local_deep_research/advanced_search_system/repositories/__init__.py +7 -0
- local_deep_research/advanced_search_system/strategies/__init__.py +1 -0
- local_deep_research/advanced_search_system/strategies/base_strategy.py +118 -0
- local_deep_research/advanced_search_system/strategies/iterdrag_strategy.py +450 -0
- local_deep_research/advanced_search_system/strategies/parallel_search_strategy.py +312 -0
- local_deep_research/advanced_search_system/strategies/rapid_search_strategy.py +270 -0
- local_deep_research/advanced_search_system/strategies/standard_strategy.py +300 -0
- local_deep_research/advanced_search_system/tools/__init__.py +1 -0
- local_deep_research/advanced_search_system/tools/base_tool.py +100 -0
- local_deep_research/advanced_search_system/tools/knowledge_tools/__init__.py +1 -0
- local_deep_research/advanced_search_system/tools/question_tools/__init__.py +1 -0
- local_deep_research/advanced_search_system/tools/search_tools/__init__.py +1 -0
- local_deep_research/api/__init__.py +5 -5
- local_deep_research/api/research_functions.py +154 -160
- local_deep_research/app.py +8 -0
- local_deep_research/citation_handler.py +25 -16
- local_deep_research/{config.py → config/config_files.py} +102 -110
- local_deep_research/config/llm_config.py +472 -0
- local_deep_research/config/search_config.py +77 -0
- local_deep_research/defaults/__init__.py +10 -5
- local_deep_research/defaults/main.toml +2 -2
- local_deep_research/defaults/search_engines.toml +60 -34
- local_deep_research/main.py +121 -19
- local_deep_research/migrate_db.py +147 -0
- local_deep_research/report_generator.py +87 -45
- local_deep_research/search_system.py +153 -283
- local_deep_research/setup_data_dir.py +35 -0
- local_deep_research/test_migration.py +178 -0
- local_deep_research/utilities/__init__.py +0 -0
- local_deep_research/utilities/db_utils.py +49 -0
- local_deep_research/{utilties → utilities}/enums.py +2 -2
- local_deep_research/{utilties → utilities}/llm_utils.py +63 -29
- local_deep_research/utilities/search_utilities.py +242 -0
- local_deep_research/{utilties → utilities}/setup_utils.py +4 -2
- local_deep_research/web/__init__.py +0 -1
- local_deep_research/web/app.py +86 -1709
- local_deep_research/web/app_factory.py +289 -0
- local_deep_research/web/database/README.md +70 -0
- local_deep_research/web/database/migrate_to_ldr_db.py +289 -0
- local_deep_research/web/database/migrations.py +447 -0
- local_deep_research/web/database/models.py +117 -0
- local_deep_research/web/database/schema_upgrade.py +107 -0
- local_deep_research/web/models/database.py +294 -0
- local_deep_research/web/models/settings.py +94 -0
- local_deep_research/web/routes/api_routes.py +559 -0
- local_deep_research/web/routes/history_routes.py +354 -0
- local_deep_research/web/routes/research_routes.py +715 -0
- local_deep_research/web/routes/settings_routes.py +1583 -0
- local_deep_research/web/services/research_service.py +947 -0
- local_deep_research/web/services/resource_service.py +149 -0
- local_deep_research/web/services/settings_manager.py +669 -0
- local_deep_research/web/services/settings_service.py +187 -0
- local_deep_research/web/services/socket_service.py +210 -0
- local_deep_research/web/static/css/custom_dropdown.css +277 -0
- local_deep_research/web/static/css/settings.css +1223 -0
- local_deep_research/web/static/css/styles.css +525 -48
- local_deep_research/web/static/js/components/custom_dropdown.js +428 -0
- local_deep_research/web/static/js/components/detail.js +348 -0
- local_deep_research/web/static/js/components/fallback/formatting.js +122 -0
- local_deep_research/web/static/js/components/fallback/ui.js +215 -0
- local_deep_research/web/static/js/components/history.js +487 -0
- local_deep_research/web/static/js/components/logpanel.js +949 -0
- local_deep_research/web/static/js/components/progress.js +1107 -0
- local_deep_research/web/static/js/components/research.js +1865 -0
- local_deep_research/web/static/js/components/results.js +766 -0
- local_deep_research/web/static/js/components/settings.js +3981 -0
- local_deep_research/web/static/js/components/settings_sync.js +106 -0
- local_deep_research/web/static/js/main.js +226 -0
- local_deep_research/web/static/js/services/api.js +253 -0
- local_deep_research/web/static/js/services/audio.js +31 -0
- local_deep_research/web/static/js/services/formatting.js +119 -0
- local_deep_research/web/static/js/services/pdf.js +622 -0
- local_deep_research/web/static/js/services/socket.js +882 -0
- local_deep_research/web/static/js/services/ui.js +546 -0
- local_deep_research/web/templates/base.html +72 -0
- local_deep_research/web/templates/components/custom_dropdown.html +47 -0
- local_deep_research/web/templates/components/log_panel.html +32 -0
- local_deep_research/web/templates/components/mobile_nav.html +22 -0
- local_deep_research/web/templates/components/settings_form.html +299 -0
- local_deep_research/web/templates/components/sidebar.html +21 -0
- local_deep_research/web/templates/pages/details.html +73 -0
- local_deep_research/web/templates/pages/history.html +51 -0
- local_deep_research/web/templates/pages/progress.html +57 -0
- local_deep_research/web/templates/pages/research.html +139 -0
- local_deep_research/web/templates/pages/results.html +59 -0
- local_deep_research/web/templates/settings_dashboard.html +78 -192
- local_deep_research/web/utils/__init__.py +0 -0
- local_deep_research/web/utils/formatters.py +76 -0
- local_deep_research/web_search_engines/engines/full_search.py +18 -16
- local_deep_research/web_search_engines/engines/meta_search_engine.py +182 -131
- local_deep_research/web_search_engines/engines/search_engine_arxiv.py +224 -139
- local_deep_research/web_search_engines/engines/search_engine_brave.py +88 -71
- local_deep_research/web_search_engines/engines/search_engine_ddg.py +48 -39
- local_deep_research/web_search_engines/engines/search_engine_github.py +415 -204
- local_deep_research/web_search_engines/engines/search_engine_google_pse.py +123 -90
- local_deep_research/web_search_engines/engines/search_engine_guardian.py +210 -157
- local_deep_research/web_search_engines/engines/search_engine_local.py +532 -369
- local_deep_research/web_search_engines/engines/search_engine_local_all.py +42 -36
- local_deep_research/web_search_engines/engines/search_engine_pubmed.py +358 -266
- local_deep_research/web_search_engines/engines/search_engine_searxng.py +212 -160
- local_deep_research/web_search_engines/engines/search_engine_semantic_scholar.py +213 -170
- local_deep_research/web_search_engines/engines/search_engine_serpapi.py +84 -68
- local_deep_research/web_search_engines/engines/search_engine_wayback.py +186 -154
- local_deep_research/web_search_engines/engines/search_engine_wikipedia.py +115 -77
- local_deep_research/web_search_engines/search_engine_base.py +174 -99
- local_deep_research/web_search_engines/search_engine_factory.py +192 -102
- local_deep_research/web_search_engines/search_engines_config.py +22 -15
- {local_deep_research-0.1.26.dist-info → local_deep_research-0.2.2.dist-info}/METADATA +177 -97
- local_deep_research-0.2.2.dist-info/RECORD +135 -0
- {local_deep_research-0.1.26.dist-info → local_deep_research-0.2.2.dist-info}/WHEEL +1 -2
- {local_deep_research-0.1.26.dist-info → local_deep_research-0.2.2.dist-info}/entry_points.txt +3 -0
- local_deep_research/defaults/llm_config.py +0 -338
- local_deep_research/utilties/search_utilities.py +0 -114
- local_deep_research/web/static/js/app.js +0 -3763
- local_deep_research/web/templates/api_keys_config.html +0 -82
- local_deep_research/web/templates/collections_config.html +0 -90
- local_deep_research/web/templates/index.html +0 -348
- local_deep_research/web/templates/llm_config.html +0 -120
- local_deep_research/web/templates/main_config.html +0 -89
- local_deep_research/web/templates/search_engines_config.html +0 -154
- local_deep_research/web/templates/settings.html +0 -519
- local_deep_research-0.1.26.dist-info/RECORD +0 -61
- local_deep_research-0.1.26.dist-info/top_level.txt +0 -1
- /local_deep_research/{utilties → config}/__init__.py +0 -0
- {local_deep_research-0.1.26.dist-info → local_deep_research-0.2.2.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,715 @@
|
|
1
|
+
import json
|
2
|
+
import logging
|
3
|
+
import os
|
4
|
+
import platform
|
5
|
+
import subprocess
|
6
|
+
from datetime import datetime
|
7
|
+
|
8
|
+
from flask import (
|
9
|
+
Blueprint,
|
10
|
+
current_app,
|
11
|
+
jsonify,
|
12
|
+
redirect,
|
13
|
+
render_template,
|
14
|
+
request,
|
15
|
+
send_from_directory,
|
16
|
+
url_for,
|
17
|
+
)
|
18
|
+
|
19
|
+
from ..models.database import add_log_to_db, calculate_duration, get_db_connection
|
20
|
+
from ..services.research_service import run_research_process, start_research_process
|
21
|
+
|
22
|
+
# Initialize logger
|
23
|
+
logger = logging.getLogger(__name__)
|
24
|
+
|
25
|
+
# Create a Blueprint for the research application
|
26
|
+
research_bp = Blueprint("research", __name__, url_prefix="/research")
|
27
|
+
|
28
|
+
# Active research processes and socket subscriptions
|
29
|
+
active_research = {}
|
30
|
+
socket_subscriptions = {}
|
31
|
+
|
32
|
+
# Add termination flags dictionary
|
33
|
+
termination_flags = {}
|
34
|
+
|
35
|
+
# Output directory for research results
|
36
|
+
OUTPUT_DIR = "research_outputs"
|
37
|
+
|
38
|
+
|
39
|
+
# Return reference to globals for other modules to access
|
40
|
+
def get_globals():
|
41
|
+
return {
|
42
|
+
"active_research": active_research,
|
43
|
+
"socket_subscriptions": socket_subscriptions,
|
44
|
+
"termination_flags": termination_flags,
|
45
|
+
}
|
46
|
+
|
47
|
+
|
48
|
+
# Route for index page - redirection
|
49
|
+
@research_bp.route("/")
|
50
|
+
def index():
|
51
|
+
return render_template("pages/research.html")
|
52
|
+
|
53
|
+
|
54
|
+
# Add the missing static file serving route
|
55
|
+
@research_bp.route("/static/<path:path>")
|
56
|
+
def serve_static(path):
|
57
|
+
"""Serve static files"""
|
58
|
+
return send_from_directory(
|
59
|
+
os.path.join(os.path.dirname(os.path.dirname(__file__)), "static"), path
|
60
|
+
)
|
61
|
+
|
62
|
+
|
63
|
+
# Add static route at the root level
|
64
|
+
@research_bp.route("/redirect-static/<path:path>")
|
65
|
+
def redirect_static(path):
|
66
|
+
"""Redirect old static URLs to new static URLs"""
|
67
|
+
return redirect(url_for("static", filename=path))
|
68
|
+
|
69
|
+
|
70
|
+
@research_bp.route("/progress/<int:research_id>")
|
71
|
+
def progress_page(research_id):
|
72
|
+
"""Render the research progress page"""
|
73
|
+
return render_template("pages/progress.html")
|
74
|
+
|
75
|
+
|
76
|
+
@research_bp.route("/details/<int:research_id>")
|
77
|
+
def research_details_page(research_id):
|
78
|
+
"""Render the research details page"""
|
79
|
+
return render_template("pages/details.html")
|
80
|
+
|
81
|
+
|
82
|
+
@research_bp.route("/results/<int:research_id>")
|
83
|
+
def results_page(research_id):
|
84
|
+
"""Render the research results page"""
|
85
|
+
return render_template("pages/results.html")
|
86
|
+
|
87
|
+
|
88
|
+
@research_bp.route("/history")
|
89
|
+
def history_page():
|
90
|
+
"""Render the history page"""
|
91
|
+
return render_template("pages/history.html")
|
92
|
+
|
93
|
+
|
94
|
+
# Add missing settings routes
|
95
|
+
@research_bp.route("/settings", methods=["GET"])
|
96
|
+
def settings_page():
|
97
|
+
"""Render the settings page"""
|
98
|
+
return render_template("settings_dashboard.html")
|
99
|
+
|
100
|
+
|
101
|
+
@research_bp.route("/settings/main", methods=["GET"])
|
102
|
+
def main_config_page():
|
103
|
+
"""Render the main settings config page"""
|
104
|
+
return render_template("main_config.html")
|
105
|
+
|
106
|
+
|
107
|
+
@research_bp.route("/settings/collections", methods=["GET"])
|
108
|
+
def collections_config_page():
|
109
|
+
"""Render the collections config page"""
|
110
|
+
return render_template("collections_config.html")
|
111
|
+
|
112
|
+
|
113
|
+
@research_bp.route("/settings/api_keys", methods=["GET"])
|
114
|
+
def api_keys_config_page():
|
115
|
+
"""Render the API keys config page"""
|
116
|
+
return render_template("api_keys_config.html")
|
117
|
+
|
118
|
+
|
119
|
+
@research_bp.route("/settings/search_engines", methods=["GET"])
|
120
|
+
def search_engines_config_page():
|
121
|
+
"""Render the search engines config page"""
|
122
|
+
return render_template("search_engines_config.html")
|
123
|
+
|
124
|
+
|
125
|
+
@research_bp.route("/settings/llm", methods=["GET"])
|
126
|
+
def llm_config_page():
|
127
|
+
"""Render the LLM config page"""
|
128
|
+
return render_template("llm_config.html")
|
129
|
+
|
130
|
+
|
131
|
+
@research_bp.route("/api/start_research", methods=["POST"])
|
132
|
+
def start_research():
|
133
|
+
data = request.json
|
134
|
+
query = data.get("query")
|
135
|
+
mode = data.get("mode", "quick")
|
136
|
+
|
137
|
+
# Get model provider and model selections
|
138
|
+
model_provider = data.get("model_provider", "OLLAMA")
|
139
|
+
model = data.get("model")
|
140
|
+
custom_endpoint = data.get("custom_endpoint")
|
141
|
+
search_engine = data.get("search_engine") or data.get("search_tool")
|
142
|
+
max_results = data.get("max_results")
|
143
|
+
time_period = data.get("time_period")
|
144
|
+
iterations = data.get("iterations")
|
145
|
+
questions_per_iteration = data.get("questions_per_iteration")
|
146
|
+
|
147
|
+
# Log the selections for troubleshooting
|
148
|
+
logger.info(
|
149
|
+
f"Starting research with provider: {model_provider}, model: {model}, search engine: {search_engine}"
|
150
|
+
)
|
151
|
+
logger.info(
|
152
|
+
f"Additional parameters: max_results={max_results}, time_period={time_period}, iterations={iterations}, questions={questions_per_iteration}"
|
153
|
+
)
|
154
|
+
|
155
|
+
if not query:
|
156
|
+
return jsonify({"status": "error", "message": "Query is required"}), 400
|
157
|
+
|
158
|
+
# Validate required parameters based on provider
|
159
|
+
if model_provider == "OPENAI_ENDPOINT" and not custom_endpoint:
|
160
|
+
return (
|
161
|
+
jsonify(
|
162
|
+
{
|
163
|
+
"status": "error",
|
164
|
+
"message": "Custom endpoint URL is required for OpenAI endpoint provider",
|
165
|
+
}
|
166
|
+
),
|
167
|
+
400,
|
168
|
+
)
|
169
|
+
|
170
|
+
if not model:
|
171
|
+
return jsonify({"status": "error", "message": "Model is required"}), 400
|
172
|
+
|
173
|
+
# Check if there's any active research that's actually still running
|
174
|
+
if active_research:
|
175
|
+
# Verify each active research is still valid
|
176
|
+
stale_research_ids = []
|
177
|
+
for research_id, research_data in list(active_research.items()):
|
178
|
+
# Check database status
|
179
|
+
conn = get_db_connection()
|
180
|
+
cursor = conn.cursor()
|
181
|
+
cursor.execute(
|
182
|
+
"SELECT status FROM research_history WHERE id = ?", (research_id,)
|
183
|
+
)
|
184
|
+
result = cursor.fetchone()
|
185
|
+
conn.close()
|
186
|
+
|
187
|
+
# If the research doesn't exist in DB or is not in_progress, it's stale
|
188
|
+
if not result or result[0] != "in_progress":
|
189
|
+
stale_research_ids.append(research_id)
|
190
|
+
# Also check if thread is still alive
|
191
|
+
elif (
|
192
|
+
not research_data.get("thread")
|
193
|
+
or not research_data.get("thread").is_alive()
|
194
|
+
):
|
195
|
+
stale_research_ids.append(research_id)
|
196
|
+
|
197
|
+
# Clean up any stale research processes
|
198
|
+
for stale_id in stale_research_ids:
|
199
|
+
print(f"Cleaning up stale research process: {stale_id}")
|
200
|
+
if stale_id in active_research:
|
201
|
+
del active_research[stale_id]
|
202
|
+
if stale_id in termination_flags:
|
203
|
+
del termination_flags[stale_id]
|
204
|
+
|
205
|
+
# After cleanup, check if there's still active research
|
206
|
+
if active_research:
|
207
|
+
return (
|
208
|
+
jsonify(
|
209
|
+
{
|
210
|
+
"status": "error",
|
211
|
+
"message": "Another research is already in progress. Please wait for it to complete.",
|
212
|
+
}
|
213
|
+
),
|
214
|
+
409,
|
215
|
+
)
|
216
|
+
|
217
|
+
# Create a record in the database with explicit UTC timestamp
|
218
|
+
created_at = datetime.utcnow().isoformat()
|
219
|
+
conn = get_db_connection()
|
220
|
+
cursor = conn.cursor()
|
221
|
+
|
222
|
+
# Save research settings in the metadata field
|
223
|
+
research_settings = {
|
224
|
+
"model_provider": model_provider,
|
225
|
+
"model": model,
|
226
|
+
"custom_endpoint": custom_endpoint,
|
227
|
+
"search_engine": search_engine,
|
228
|
+
"max_results": max_results,
|
229
|
+
"time_period": time_period,
|
230
|
+
"iterations": iterations,
|
231
|
+
"questions_per_iteration": questions_per_iteration,
|
232
|
+
}
|
233
|
+
|
234
|
+
cursor.execute(
|
235
|
+
"INSERT INTO research_history (query, mode, status, created_at, progress_log, metadata) VALUES (?, ?, ?, ?, ?, ?)",
|
236
|
+
(
|
237
|
+
query,
|
238
|
+
mode,
|
239
|
+
"in_progress",
|
240
|
+
created_at,
|
241
|
+
json.dumps(
|
242
|
+
[{"time": created_at, "message": "Research started", "progress": 0}]
|
243
|
+
),
|
244
|
+
json.dumps(research_settings),
|
245
|
+
),
|
246
|
+
)
|
247
|
+
research_id = cursor.lastrowid
|
248
|
+
conn.commit()
|
249
|
+
conn.close()
|
250
|
+
|
251
|
+
# Start the research process with the selected parameters
|
252
|
+
research_thread = start_research_process(
|
253
|
+
research_id,
|
254
|
+
query,
|
255
|
+
mode,
|
256
|
+
active_research,
|
257
|
+
termination_flags,
|
258
|
+
run_research_process,
|
259
|
+
model_provider=model_provider,
|
260
|
+
model=model,
|
261
|
+
custom_endpoint=custom_endpoint,
|
262
|
+
search_engine=search_engine,
|
263
|
+
max_results=max_results,
|
264
|
+
time_period=time_period,
|
265
|
+
iterations=iterations,
|
266
|
+
questions_per_iteration=questions_per_iteration,
|
267
|
+
)
|
268
|
+
|
269
|
+
# Store the thread reference in active_research
|
270
|
+
active_research[research_id]["thread"] = research_thread
|
271
|
+
|
272
|
+
return jsonify({"status": "success", "research_id": research_id})
|
273
|
+
|
274
|
+
|
275
|
+
@research_bp.route("/api/terminate/<int:research_id>", methods=["POST"])
|
276
|
+
def terminate_research(research_id):
|
277
|
+
"""Terminate an in-progress research process"""
|
278
|
+
|
279
|
+
# Check if the research exists and is in progress
|
280
|
+
conn = get_db_connection()
|
281
|
+
cursor = conn.cursor()
|
282
|
+
cursor.execute("SELECT status FROM research_history WHERE id = ?", (research_id,))
|
283
|
+
result = cursor.fetchone()
|
284
|
+
|
285
|
+
if not result:
|
286
|
+
conn.close()
|
287
|
+
return jsonify({"status": "error", "message": "Research not found"}), 404
|
288
|
+
|
289
|
+
status = result[0]
|
290
|
+
|
291
|
+
# If it's not in progress, return an error
|
292
|
+
if status != "in_progress":
|
293
|
+
conn.close()
|
294
|
+
return (
|
295
|
+
jsonify({"status": "error", "message": "Research is not in progress"}),
|
296
|
+
400,
|
297
|
+
)
|
298
|
+
|
299
|
+
# Check if it's in the active_research dict
|
300
|
+
if research_id not in active_research:
|
301
|
+
# Update the status in the database
|
302
|
+
cursor.execute(
|
303
|
+
"UPDATE research_history SET status = ? WHERE id = ?",
|
304
|
+
("suspended", research_id),
|
305
|
+
)
|
306
|
+
conn.commit()
|
307
|
+
conn.close()
|
308
|
+
return jsonify({"status": "success", "message": "Research terminated"})
|
309
|
+
|
310
|
+
# Set the termination flag
|
311
|
+
termination_flags[research_id] = True
|
312
|
+
|
313
|
+
# Log the termination request - using UTC timestamp
|
314
|
+
timestamp = datetime.utcnow().isoformat()
|
315
|
+
termination_message = "Research termination requested by user"
|
316
|
+
current_progress = active_research[research_id]["progress"]
|
317
|
+
|
318
|
+
# Create log entry
|
319
|
+
log_entry = {
|
320
|
+
"time": timestamp,
|
321
|
+
"message": termination_message,
|
322
|
+
"progress": current_progress,
|
323
|
+
"metadata": {"phase": "termination"},
|
324
|
+
}
|
325
|
+
|
326
|
+
# Add to in-memory log
|
327
|
+
active_research[research_id]["log"].append(log_entry)
|
328
|
+
|
329
|
+
# Add to database log
|
330
|
+
add_log_to_db(
|
331
|
+
research_id,
|
332
|
+
termination_message,
|
333
|
+
log_type="milestone",
|
334
|
+
progress=current_progress,
|
335
|
+
metadata={"phase": "termination"},
|
336
|
+
)
|
337
|
+
|
338
|
+
# Update the log in the database (old way for backward compatibility)
|
339
|
+
cursor.execute(
|
340
|
+
"SELECT progress_log FROM research_history WHERE id = ?", (research_id,)
|
341
|
+
)
|
342
|
+
log_result = cursor.fetchone()
|
343
|
+
if log_result:
|
344
|
+
try:
|
345
|
+
current_log = json.loads(log_result[0])
|
346
|
+
except Exception:
|
347
|
+
current_log = []
|
348
|
+
current_log.append(log_entry)
|
349
|
+
cursor.execute(
|
350
|
+
"UPDATE research_history SET progress_log = ? WHERE id = ?",
|
351
|
+
(json.dumps(current_log), research_id),
|
352
|
+
)
|
353
|
+
|
354
|
+
# IMMEDIATELY update the status to 'suspended' to avoid race conditions
|
355
|
+
cursor.execute(
|
356
|
+
"UPDATE research_history SET status = ? WHERE id = ?",
|
357
|
+
("suspended", research_id),
|
358
|
+
)
|
359
|
+
conn.commit()
|
360
|
+
conn.close()
|
361
|
+
|
362
|
+
# Emit a socket event for the termination request
|
363
|
+
try:
|
364
|
+
event_data = {
|
365
|
+
"status": "suspended", # Changed from 'terminating' to 'suspended'
|
366
|
+
"message": "Research was suspended by user request",
|
367
|
+
}
|
368
|
+
|
369
|
+
from ..services.socket_service import emit_socket_event
|
370
|
+
|
371
|
+
emit_socket_event(f"research_progress_{research_id}", event_data)
|
372
|
+
|
373
|
+
except Exception as socket_error:
|
374
|
+
print(f"Socket emit error (non-critical): {str(socket_error)}")
|
375
|
+
|
376
|
+
return jsonify({"status": "success", "message": "Research termination requested"})
|
377
|
+
|
378
|
+
|
379
|
+
@research_bp.route("/api/delete/<int:research_id>", methods=["DELETE"])
|
380
|
+
def delete_research(research_id):
|
381
|
+
"""Delete a research record"""
|
382
|
+
conn = get_db_connection()
|
383
|
+
cursor = conn.cursor()
|
384
|
+
|
385
|
+
# First check if the research exists and is not in progress
|
386
|
+
cursor.execute(
|
387
|
+
"SELECT status, report_path FROM research_history WHERE id = ?", (research_id,)
|
388
|
+
)
|
389
|
+
result = cursor.fetchone()
|
390
|
+
|
391
|
+
if not result:
|
392
|
+
conn.close()
|
393
|
+
return jsonify({"status": "error", "message": "Research not found"}), 404
|
394
|
+
|
395
|
+
status, report_path = result
|
396
|
+
|
397
|
+
# Don't allow deleting research in progress
|
398
|
+
if status == "in_progress" and research_id in active_research:
|
399
|
+
conn.close()
|
400
|
+
return (
|
401
|
+
jsonify(
|
402
|
+
{
|
403
|
+
"status": "error",
|
404
|
+
"message": "Cannot delete research that is in progress",
|
405
|
+
}
|
406
|
+
),
|
407
|
+
400,
|
408
|
+
)
|
409
|
+
|
410
|
+
# Delete report file if it exists
|
411
|
+
if report_path and os.path.exists(report_path):
|
412
|
+
try:
|
413
|
+
os.remove(report_path)
|
414
|
+
except Exception as e:
|
415
|
+
print(f"Error removing report file: {str(e)}")
|
416
|
+
|
417
|
+
# Delete the database record
|
418
|
+
cursor.execute("DELETE FROM research_history WHERE id = ?", (research_id,))
|
419
|
+
conn.commit()
|
420
|
+
conn.close()
|
421
|
+
|
422
|
+
return jsonify({"status": "success"})
|
423
|
+
|
424
|
+
|
425
|
+
@research_bp.route("/api/clear_history", methods=["POST"])
|
426
|
+
def clear_history():
|
427
|
+
"""Clear all research history"""
|
428
|
+
try:
|
429
|
+
conn = get_db_connection()
|
430
|
+
cursor = conn.cursor()
|
431
|
+
|
432
|
+
# Get all research IDs first to clean up files
|
433
|
+
cursor.execute("SELECT id, report_path FROM research_history")
|
434
|
+
research_records = cursor.fetchall()
|
435
|
+
|
436
|
+
# Clean up report files
|
437
|
+
for research_id, report_path in research_records:
|
438
|
+
# Skip active research
|
439
|
+
if research_id in active_research:
|
440
|
+
continue
|
441
|
+
|
442
|
+
# Delete report file if it exists
|
443
|
+
if report_path and os.path.exists(report_path):
|
444
|
+
try:
|
445
|
+
os.remove(report_path)
|
446
|
+
except Exception as e:
|
447
|
+
print(f"Error removing report file: {str(e)}")
|
448
|
+
|
449
|
+
# Delete records from the database, except active research
|
450
|
+
placeholders = ", ".join(["?"] * len(active_research))
|
451
|
+
if active_research:
|
452
|
+
cursor.execute(
|
453
|
+
f"DELETE FROM research_history WHERE id NOT IN ({placeholders})",
|
454
|
+
list(active_research.keys()),
|
455
|
+
)
|
456
|
+
else:
|
457
|
+
cursor.execute("DELETE FROM research_history")
|
458
|
+
|
459
|
+
conn.commit()
|
460
|
+
conn.close()
|
461
|
+
|
462
|
+
return jsonify({"status": "success"})
|
463
|
+
except Exception as e:
|
464
|
+
return jsonify({"status": "error", "message": str(e)}), 500
|
465
|
+
|
466
|
+
|
467
|
+
@research_bp.route("/open_file_location", methods=["POST"])
|
468
|
+
def open_file_location():
|
469
|
+
"""Open a file location in the system file explorer"""
|
470
|
+
data = request.json
|
471
|
+
file_path = data.get("path")
|
472
|
+
|
473
|
+
if not file_path:
|
474
|
+
return jsonify({"status": "error", "message": "Path is required"}), 400
|
475
|
+
|
476
|
+
# Convert to absolute path if needed
|
477
|
+
if not os.path.isabs(file_path):
|
478
|
+
file_path = os.path.abspath(file_path)
|
479
|
+
|
480
|
+
# Check if path exists
|
481
|
+
if not os.path.exists(file_path):
|
482
|
+
return jsonify({"status": "error", "message": "Path does not exist"}), 404
|
483
|
+
|
484
|
+
try:
|
485
|
+
if platform.system() == "Windows":
|
486
|
+
# On Windows, open the folder and select the file
|
487
|
+
if os.path.isfile(file_path):
|
488
|
+
subprocess.run(["explorer", "/select,", file_path], check=True)
|
489
|
+
else:
|
490
|
+
# If it's a directory, just open it
|
491
|
+
subprocess.run(["explorer", file_path], check=True)
|
492
|
+
elif platform.system() == "Darwin": # macOS
|
493
|
+
subprocess.run(["open", file_path], check=True)
|
494
|
+
else: # Linux and others
|
495
|
+
subprocess.run(["xdg-open", os.path.dirname(file_path)], check=True)
|
496
|
+
|
497
|
+
return jsonify({"status": "success"})
|
498
|
+
except Exception as e:
|
499
|
+
return jsonify({"status": "error", "message": str(e)}), 500
|
500
|
+
|
501
|
+
|
502
|
+
@research_bp.route("/api/save_raw_config", methods=["POST"])
|
503
|
+
def save_raw_config():
|
504
|
+
"""Save raw configuration"""
|
505
|
+
data = request.json
|
506
|
+
raw_config = data.get("raw_config")
|
507
|
+
|
508
|
+
if not raw_config:
|
509
|
+
return (
|
510
|
+
jsonify({"success": False, "error": "Raw configuration is required"}),
|
511
|
+
400,
|
512
|
+
)
|
513
|
+
|
514
|
+
try:
|
515
|
+
# Get the config file path
|
516
|
+
config_dir = os.path.join(os.path.expanduser("~"), ".local_deep_research")
|
517
|
+
os.makedirs(config_dir, exist_ok=True)
|
518
|
+
config_path = os.path.join(config_dir, "config.toml")
|
519
|
+
|
520
|
+
# Write the configuration to file
|
521
|
+
with open(config_path, "w", encoding="utf-8") as f:
|
522
|
+
f.write(raw_config)
|
523
|
+
|
524
|
+
return jsonify({"success": True})
|
525
|
+
except Exception as e:
|
526
|
+
return jsonify({"success": False, "error": str(e)}), 500
|
527
|
+
|
528
|
+
|
529
|
+
@research_bp.route("/api/history", methods=["GET"])
|
530
|
+
def get_history():
|
531
|
+
"""Get research history"""
|
532
|
+
try:
|
533
|
+
conn = get_db_connection()
|
534
|
+
cursor = conn.cursor()
|
535
|
+
|
536
|
+
# Check if title column exists in the database
|
537
|
+
cursor.execute("PRAGMA table_info(research_history)")
|
538
|
+
columns = [column[1] for column in cursor.fetchall()]
|
539
|
+
|
540
|
+
# Build query based on existing columns
|
541
|
+
select_columns = [
|
542
|
+
"id",
|
543
|
+
"query",
|
544
|
+
"mode",
|
545
|
+
"status",
|
546
|
+
"created_at",
|
547
|
+
"completed_at",
|
548
|
+
"report_path",
|
549
|
+
]
|
550
|
+
|
551
|
+
# Optionally include title if it exists
|
552
|
+
if "title" in columns:
|
553
|
+
select_columns.append("title")
|
554
|
+
|
555
|
+
# Construct query
|
556
|
+
select_query = f"SELECT {', '.join(select_columns)} FROM research_history ORDER BY created_at DESC"
|
557
|
+
|
558
|
+
# Execute query
|
559
|
+
cursor.execute(select_query)
|
560
|
+
|
561
|
+
history_items = []
|
562
|
+
for row in cursor.fetchall():
|
563
|
+
# Extract values
|
564
|
+
row_data = dict(zip(select_columns, row))
|
565
|
+
research_id = row_data["id"]
|
566
|
+
query = row_data["query"]
|
567
|
+
mode = row_data["mode"]
|
568
|
+
status = row_data["status"]
|
569
|
+
created_at = row_data["created_at"]
|
570
|
+
completed_at = row_data["completed_at"]
|
571
|
+
report_path = row_data["report_path"]
|
572
|
+
title = row_data.get(
|
573
|
+
"title", None
|
574
|
+
) # Use get to handle title not being present
|
575
|
+
|
576
|
+
# Calculate duration if completed
|
577
|
+
duration_seconds = None
|
578
|
+
if completed_at and created_at:
|
579
|
+
try:
|
580
|
+
duration_seconds = calculate_duration(created_at, completed_at)
|
581
|
+
except Exception as e:
|
582
|
+
print(f"Error calculating duration: {e}")
|
583
|
+
|
584
|
+
# Create a history item
|
585
|
+
item = {
|
586
|
+
"id": research_id,
|
587
|
+
"query": query,
|
588
|
+
"mode": mode,
|
589
|
+
"status": status,
|
590
|
+
"created_at": created_at,
|
591
|
+
"completed_at": completed_at,
|
592
|
+
"duration_seconds": duration_seconds,
|
593
|
+
"report_path": report_path,
|
594
|
+
}
|
595
|
+
|
596
|
+
# Add title if not None
|
597
|
+
if title is not None:
|
598
|
+
item["title"] = title
|
599
|
+
|
600
|
+
history_items.append(item)
|
601
|
+
|
602
|
+
conn.close()
|
603
|
+
return jsonify({"status": "success", "items": history_items})
|
604
|
+
except Exception as e:
|
605
|
+
# Import traceback only when needed
|
606
|
+
import traceback
|
607
|
+
|
608
|
+
print(f"Error getting history: {e}")
|
609
|
+
print(traceback.format_exc())
|
610
|
+
return jsonify({"status": "error", "message": str(e)}), 500
|
611
|
+
|
612
|
+
|
613
|
+
@research_bp.route("/api/research/<research_id>/status")
|
614
|
+
def get_research_status(research_id):
|
615
|
+
"""Get the status of a research process"""
|
616
|
+
conn = get_db_connection()
|
617
|
+
cursor = conn.cursor()
|
618
|
+
cursor.execute(
|
619
|
+
"SELECT status, progress, completed_at, report_path, metadata FROM research_history WHERE id = ?",
|
620
|
+
(research_id,),
|
621
|
+
)
|
622
|
+
result = cursor.fetchone()
|
623
|
+
|
624
|
+
if result is None:
|
625
|
+
conn.close()
|
626
|
+
return jsonify({"error": "Research not found"}), 404
|
627
|
+
|
628
|
+
status, progress, completed_at, report_path, metadata_str = result
|
629
|
+
|
630
|
+
# Parse metadata if it exists
|
631
|
+
metadata = {}
|
632
|
+
if metadata_str:
|
633
|
+
try:
|
634
|
+
metadata = json.loads(metadata_str)
|
635
|
+
except json.JSONDecodeError:
|
636
|
+
current_app.logger.warning(
|
637
|
+
f"Invalid JSON in metadata for research {research_id}"
|
638
|
+
)
|
639
|
+
|
640
|
+
# Extract and format error information for better UI display
|
641
|
+
error_info = {}
|
642
|
+
if metadata and "error" in metadata:
|
643
|
+
error_msg = metadata["error"]
|
644
|
+
error_type = "unknown"
|
645
|
+
|
646
|
+
# Detect specific error types
|
647
|
+
if "timeout" in error_msg.lower():
|
648
|
+
error_type = "timeout"
|
649
|
+
error_info = {
|
650
|
+
"type": "timeout",
|
651
|
+
"message": "LLM service timed out during synthesis. This may be due to high server load or connectivity issues.",
|
652
|
+
"suggestion": "Try again later or use a smaller query scope.",
|
653
|
+
}
|
654
|
+
elif (
|
655
|
+
"token limit" in error_msg.lower() or "context length" in error_msg.lower()
|
656
|
+
):
|
657
|
+
error_type = "token_limit"
|
658
|
+
error_info = {
|
659
|
+
"type": "token_limit",
|
660
|
+
"message": "The research query exceeded the AI model's token limit during synthesis.",
|
661
|
+
"suggestion": "Try using a more specific query or reduce the research scope.",
|
662
|
+
}
|
663
|
+
elif (
|
664
|
+
"final answer synthesis fail" in error_msg.lower()
|
665
|
+
or "llm error" in error_msg.lower()
|
666
|
+
):
|
667
|
+
error_type = "llm_error"
|
668
|
+
error_info = {
|
669
|
+
"type": "llm_error",
|
670
|
+
"message": "The AI model encountered an error during final answer synthesis.",
|
671
|
+
"suggestion": "Check that your LLM service is running correctly or try a different model.",
|
672
|
+
}
|
673
|
+
elif "ollama" in error_msg.lower():
|
674
|
+
error_type = "ollama_error"
|
675
|
+
error_info = {
|
676
|
+
"type": "ollama_error",
|
677
|
+
"message": "The Ollama service is not responding properly.",
|
678
|
+
"suggestion": "Make sure Ollama is running with 'ollama serve' and the model is downloaded.",
|
679
|
+
}
|
680
|
+
elif "connection" in error_msg.lower():
|
681
|
+
error_type = "connection"
|
682
|
+
error_info = {
|
683
|
+
"type": "connection",
|
684
|
+
"message": "Connection error with the AI service.",
|
685
|
+
"suggestion": "Check your internet connection and AI service status.",
|
686
|
+
}
|
687
|
+
elif metadata.get("solution"):
|
688
|
+
# Use the solution provided in metadata if available
|
689
|
+
error_info = {
|
690
|
+
"type": error_type,
|
691
|
+
"message": error_msg,
|
692
|
+
"suggestion": metadata.get("solution"),
|
693
|
+
}
|
694
|
+
else:
|
695
|
+
# Generic error with the original message
|
696
|
+
error_info = {
|
697
|
+
"type": error_type,
|
698
|
+
"message": error_msg,
|
699
|
+
"suggestion": "Try again with a different query or check the application logs.",
|
700
|
+
}
|
701
|
+
|
702
|
+
# Add error_info to the response if it exists
|
703
|
+
if error_info:
|
704
|
+
metadata["error_info"] = error_info
|
705
|
+
|
706
|
+
conn.close()
|
707
|
+
return jsonify(
|
708
|
+
{
|
709
|
+
"status": status,
|
710
|
+
"progress": progress,
|
711
|
+
"completed_at": completed_at,
|
712
|
+
"report_path": report_path,
|
713
|
+
"metadata": metadata,
|
714
|
+
}
|
715
|
+
)
|