local-deep-research 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- local_deep_research/api/research_functions.py +72 -90
- local_deep_research/defaults/search_engines.toml +1 -1
- local_deep_research/report_generator.py +19 -5
- local_deep_research/search_system.py +8 -2
- local_deep_research/web/routes/settings_routes.py +0 -9
- local_deep_research/web_search_engines/engines/search_engine_searxng.py +1 -1
- {local_deep_research-0.2.0.dist-info → local_deep_research-0.2.2.dist-info}/METADATA +2 -2
- {local_deep_research-0.2.0.dist-info → local_deep_research-0.2.2.dist-info}/RECORD +11 -11
- {local_deep_research-0.2.0.dist-info → local_deep_research-0.2.2.dist-info}/WHEEL +0 -0
- {local_deep_research-0.2.0.dist-info → local_deep_research-0.2.2.dist-info}/entry_points.txt +0 -0
- {local_deep_research-0.2.0.dist-info → local_deep_research-0.2.2.dist-info}/licenses/LICENSE +0 -0
@@ -9,78 +9,100 @@ from typing import Any, Callable, Dict, Optional
|
|
9
9
|
|
10
10
|
import toml
|
11
11
|
|
12
|
-
from .. import get_report_generator # Use the lazy import function
|
13
12
|
from ..config.llm_config import get_llm
|
14
13
|
from ..config.search_config import get_search
|
14
|
+
from ..report_generator import IntegratedReportGenerator
|
15
15
|
from ..search_system import AdvancedSearchSystem
|
16
16
|
from ..utilities.search_utilities import remove_think_tags
|
17
17
|
|
18
18
|
logger = logging.getLogger(__name__)
|
19
19
|
|
20
20
|
|
21
|
-
def
|
22
|
-
|
21
|
+
def _init_search_system(
|
22
|
+
model_name: str | None = None,
|
23
|
+
temperature: float = 0.7,
|
24
|
+
provider: str | None = None,
|
25
|
+
openai_endpoint_url: str | None = None,
|
26
|
+
progress_callback: Callable[[str, int, dict], None] | None = None,
|
23
27
|
search_tool: Optional[str] = None,
|
24
28
|
iterations: int = 1,
|
25
29
|
questions_per_iteration: int = 1,
|
26
|
-
|
27
|
-
max_filtered_results: int = 5,
|
28
|
-
region: str = "us",
|
29
|
-
time_period: str = "y",
|
30
|
-
safe_search: bool = True,
|
31
|
-
temperature: float = 0.7,
|
32
|
-
progress_callback: Optional[Callable] = None,
|
33
|
-
) -> Dict[str, Any]:
|
30
|
+
) -> AdvancedSearchSystem:
|
34
31
|
"""
|
35
|
-
|
32
|
+
Initializes the advanced search system with specified parameters. This function sets up
|
33
|
+
and returns an instance of the AdvancedSearchSystem using the provided configuration
|
34
|
+
options such as model name, temperature for randomness in responses, provider service
|
35
|
+
details, endpoint URL, and an optional search tool.
|
36
36
|
|
37
37
|
Args:
|
38
|
-
|
38
|
+
model_name: Name of the model to use (if None, uses database setting)
|
39
|
+
temperature: LLM temperature for generation
|
40
|
+
provider: Provider to use (if None, uses database setting)
|
41
|
+
openai_endpoint_url: Custom endpoint URL to use (if None, uses database
|
42
|
+
setting)
|
43
|
+
progress_callback: Optional callback function to receive progress updates
|
39
44
|
search_tool: Search engine to use (auto, wikipedia, arxiv, etc.). If None, uses default
|
40
45
|
iterations: Number of research cycles to perform
|
41
46
|
questions_per_iteration: Number of questions to generate per cycle
|
42
|
-
max_results: Maximum number of search results to consider
|
43
|
-
max_filtered_results: Maximum results after relevance filtering
|
44
|
-
region: Search region/locale
|
45
|
-
time_period: Time period for search results (d=day, w=week, m=month, y=year)
|
46
|
-
safe_search: Whether to enable safe search
|
47
|
-
temperature: LLM temperature for generation
|
48
|
-
progress_callback: Optional callback function to receive progress updates
|
49
47
|
|
50
48
|
Returns:
|
51
|
-
|
52
|
-
- 'summary': The generated summary text
|
53
|
-
- 'findings': List of detailed findings from each search
|
54
|
-
- 'iterations': Number of iterations performed
|
55
|
-
- 'questions': Questions generated during research
|
56
|
-
"""
|
57
|
-
logger.info("Generating quick summary for query: %s", query)
|
49
|
+
AdvancedSearchSystem: An instance of the configured AdvancedSearchSystem.
|
58
50
|
|
51
|
+
"""
|
59
52
|
# Get language model with custom temperature
|
60
|
-
llm = get_llm(
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
system.max_iterations = iterations
|
67
|
-
system.questions_per_iteration = questions_per_iteration
|
68
|
-
system.model = llm # Ensure the model is directly attached to the system
|
53
|
+
llm = get_llm(
|
54
|
+
temperature=temperature,
|
55
|
+
openai_endpoint_url=openai_endpoint_url,
|
56
|
+
model_name=model_name,
|
57
|
+
provider=provider,
|
58
|
+
)
|
69
59
|
|
70
60
|
# Set the search engine if specified
|
61
|
+
search_engine = None
|
71
62
|
if search_tool:
|
72
|
-
search_engine = get_search(search_tool)
|
73
|
-
if search_engine:
|
74
|
-
system.search = search_engine
|
75
|
-
else:
|
63
|
+
search_engine = get_search(search_tool, llm_instance=llm)
|
64
|
+
if search_engine is None:
|
76
65
|
logger.warning(
|
77
66
|
f"Could not create search engine '{search_tool}', using default."
|
78
67
|
)
|
79
68
|
|
69
|
+
# Create search system with custom parameters
|
70
|
+
system = AdvancedSearchSystem(llm=llm, search=search_engine)
|
71
|
+
|
72
|
+
# Override default settings with user-provided values
|
73
|
+
system.max_iterations = iterations
|
74
|
+
system.questions_per_iteration = questions_per_iteration
|
75
|
+
|
80
76
|
# Set progress callback if provided
|
81
77
|
if progress_callback:
|
82
78
|
system.set_progress_callback(progress_callback)
|
83
79
|
|
80
|
+
return system
|
81
|
+
|
82
|
+
|
83
|
+
def quick_summary(
|
84
|
+
query: str,
|
85
|
+
**kwargs: Any,
|
86
|
+
) -> Dict[str, Any]:
|
87
|
+
"""
|
88
|
+
Generate a quick research summary for a given query.
|
89
|
+
|
90
|
+
Args:
|
91
|
+
query: The research query to analyze
|
92
|
+
**kwargs: Configuration for the search system. Will be forwarded to
|
93
|
+
`_init_search_system()`.
|
94
|
+
|
95
|
+
Returns:
|
96
|
+
Dictionary containing the research results with keys:
|
97
|
+
- 'summary': The generated summary text
|
98
|
+
- 'findings': List of detailed findings from each search
|
99
|
+
- 'iterations': Number of iterations performed
|
100
|
+
- 'questions': Questions generated during research
|
101
|
+
"""
|
102
|
+
logger.info("Generating quick summary for query: %s", query)
|
103
|
+
|
104
|
+
system = _init_search_system(**kwargs)
|
105
|
+
|
84
106
|
# Perform the search and analysis
|
85
107
|
results = system.analyze_topic(query)
|
86
108
|
|
@@ -103,36 +125,20 @@ def quick_summary(
|
|
103
125
|
|
104
126
|
def generate_report(
|
105
127
|
query: str,
|
106
|
-
search_tool: Optional[str] = None,
|
107
|
-
iterations: int = 2,
|
108
|
-
questions_per_iteration: int = 2,
|
109
|
-
searches_per_section: int = 2,
|
110
|
-
max_results: int = 50,
|
111
|
-
max_filtered_results: int = 5,
|
112
|
-
region: str = "us",
|
113
|
-
time_period: str = "y",
|
114
|
-
safe_search: bool = True,
|
115
|
-
temperature: float = 0.7,
|
116
128
|
output_file: Optional[str] = None,
|
117
129
|
progress_callback: Optional[Callable] = None,
|
130
|
+
searches_per_section: int = 2,
|
131
|
+
**kwargs: Any,
|
118
132
|
) -> Dict[str, Any]:
|
119
133
|
"""
|
120
134
|
Generate a comprehensive, structured research report for a given query.
|
121
135
|
|
122
136
|
Args:
|
123
137
|
query: The research query to analyze
|
124
|
-
search_tool: Search engine to use (auto, wikipedia, arxiv, etc.). If None, uses default
|
125
|
-
iterations: Number of research cycles to perform
|
126
|
-
questions_per_iteration: Number of questions to generate per cycle
|
127
|
-
searches_per_section: Number of searches to perform per report section
|
128
|
-
max_results: Maximum number of search results to consider
|
129
|
-
max_filtered_results: Maximum results after relevance filtering
|
130
|
-
region: Search region/locale
|
131
|
-
time_period: Time period for search results (d=day, w=week, m=month, y=year)
|
132
|
-
safe_search: Whether to enable safe search
|
133
|
-
temperature: LLM temperature for generation
|
134
138
|
output_file: Optional path to save report markdown file
|
135
139
|
progress_callback: Optional callback function to receive progress updates
|
140
|
+
searches_per_section: The number of searches to perform for each
|
141
|
+
section in the report.
|
136
142
|
|
137
143
|
Returns:
|
138
144
|
Dictionary containing the research report with keys:
|
@@ -141,34 +147,7 @@ def generate_report(
|
|
141
147
|
"""
|
142
148
|
logger.info("Generating comprehensive research report for query: %s", query)
|
143
149
|
|
144
|
-
|
145
|
-
llm = get_llm(temperature=temperature)
|
146
|
-
|
147
|
-
# Create search system with custom parameters
|
148
|
-
system = AdvancedSearchSystem()
|
149
|
-
|
150
|
-
# Override default settings with user-provided values
|
151
|
-
system.max_iterations = iterations
|
152
|
-
system.questions_per_iteration = questions_per_iteration
|
153
|
-
system.model = llm # Ensure the model is directly attached to the system
|
154
|
-
|
155
|
-
# Set the search engine if specified
|
156
|
-
if search_tool:
|
157
|
-
search_engine = get_search(
|
158
|
-
search_tool,
|
159
|
-
llm_instance=llm,
|
160
|
-
max_results=max_results,
|
161
|
-
max_filtered_results=max_filtered_results,
|
162
|
-
region=region,
|
163
|
-
time_period=time_period,
|
164
|
-
safe_search=safe_search,
|
165
|
-
)
|
166
|
-
if search_engine:
|
167
|
-
system.search = search_engine
|
168
|
-
else:
|
169
|
-
logger.warning(
|
170
|
-
f"Could not create search engine '{search_tool}', using default."
|
171
|
-
)
|
150
|
+
system = _init_search_system(**kwargs)
|
172
151
|
|
173
152
|
# Set progress callback if provided
|
174
153
|
if progress_callback:
|
@@ -178,8 +157,11 @@ def generate_report(
|
|
178
157
|
initial_findings = system.analyze_topic(query)
|
179
158
|
|
180
159
|
# Generate the structured report
|
181
|
-
report_generator =
|
182
|
-
|
160
|
+
report_generator = IntegratedReportGenerator(
|
161
|
+
search_system=system,
|
162
|
+
llm=system.model,
|
163
|
+
searches_per_section=searches_per_section,
|
164
|
+
)
|
183
165
|
report = report_generator.generate_report(initial_findings, query)
|
184
166
|
|
185
167
|
# Save report to file if path is provided
|
@@ -98,7 +98,7 @@ search_language = "English"
|
|
98
98
|
[searxng]
|
99
99
|
module_path = ".engines.search_engine_searxng"
|
100
100
|
class_name = "SearXNGSearchEngine"
|
101
|
-
requires_api_key =
|
101
|
+
requires_api_key = false
|
102
102
|
api_key_env = "SEARXNG_INSTANCE"
|
103
103
|
reliability = 0.9
|
104
104
|
strengths = [
|
@@ -1,11 +1,11 @@
|
|
1
1
|
import importlib
|
2
2
|
from typing import Dict, List
|
3
3
|
|
4
|
+
from langchain_core.language_models import BaseChatModel
|
5
|
+
|
4
6
|
# Fix circular import by importing directly from source modules
|
5
7
|
from .config.llm_config import get_llm
|
6
8
|
from .search_system import AdvancedSearchSystem
|
7
|
-
|
8
|
-
# from . import utilities
|
9
9
|
from .utilities import search_utilities
|
10
10
|
|
11
11
|
|
@@ -19,10 +19,24 @@ def get_report_generator(search_system=None):
|
|
19
19
|
|
20
20
|
|
21
21
|
class IntegratedReportGenerator:
|
22
|
-
def __init__(
|
23
|
-
self
|
22
|
+
def __init__(
|
23
|
+
self,
|
24
|
+
searches_per_section: int = 2,
|
25
|
+
search_system=None,
|
26
|
+
llm: BaseChatModel | None = None,
|
27
|
+
):
|
28
|
+
"""
|
29
|
+
Args:
|
30
|
+
searches_per_section: Number of searches to perform for each
|
31
|
+
section in the report.
|
32
|
+
search_system: Custom search system to use, otherwise just uses
|
33
|
+
the default.
|
34
|
+
llm: Custom LLM to use, otherwise just uses the default.
|
35
|
+
|
36
|
+
"""
|
37
|
+
self.model = llm or get_llm()
|
24
38
|
# Use provided search_system or create a new one
|
25
|
-
self.search_system = search_system or AdvancedSearchSystem()
|
39
|
+
self.search_system = search_system or AdvancedSearchSystem(llm=self.model)
|
26
40
|
self.searches_per_section = (
|
27
41
|
searches_per_section # Control search depth per section
|
28
42
|
)
|
@@ -19,6 +19,7 @@ from .config.config_files import settings
|
|
19
19
|
from .config.llm_config import get_llm
|
20
20
|
from .config.search_config import get_search
|
21
21
|
from .utilities.db_utils import get_db_setting
|
22
|
+
from .web_search_engines.search_engine_base import BaseSearchEngine
|
22
23
|
|
23
24
|
logger = logging.getLogger(__name__)
|
24
25
|
|
@@ -34,6 +35,7 @@ class AdvancedSearchSystem:
|
|
34
35
|
include_text_content: bool = True,
|
35
36
|
use_cross_engine_filter: bool = True,
|
36
37
|
llm: BaseChatModel | None = None,
|
38
|
+
search: BaseSearchEngine | None = None,
|
37
39
|
):
|
38
40
|
"""Initialize the advanced search system.
|
39
41
|
|
@@ -43,12 +45,16 @@ class AdvancedSearchSystem:
|
|
43
45
|
use_cross_engine_filter: Whether to filter results across search
|
44
46
|
engines.
|
45
47
|
llm: LLM to use. If not provided, it will use the default one.
|
48
|
+
search: Search engine to use. If not provided, it will use the
|
49
|
+
default one.
|
46
50
|
"""
|
47
51
|
# Get configuration
|
48
|
-
self.search = get_search()
|
49
52
|
self.model = llm
|
50
53
|
if llm is None:
|
51
54
|
self.model = get_llm()
|
55
|
+
self.search = search
|
56
|
+
if search is None:
|
57
|
+
self.search = get_search(llm_instance=self.model)
|
52
58
|
self.max_iterations = get_db_setting(
|
53
59
|
"search.iterations", settings.search.iterations
|
54
60
|
)
|
@@ -90,7 +96,7 @@ class AdvancedSearchSystem:
|
|
90
96
|
|
91
97
|
# For backward compatibility
|
92
98
|
self.questions_by_iteration = {}
|
93
|
-
self.progress_callback = None
|
99
|
+
self.progress_callback = lambda _1, _2, _3: None
|
94
100
|
self.all_links_of_system = list()
|
95
101
|
|
96
102
|
# Configure the strategy with our attributes
|
@@ -349,10 +349,6 @@ def save_all_settings():
|
|
349
349
|
400,
|
350
350
|
)
|
351
351
|
|
352
|
-
# Export settings to file for each type
|
353
|
-
for setting_type in settings_by_type:
|
354
|
-
get_settings_manager(db_session).export_to_file(setting_type)
|
355
|
-
|
356
352
|
# Get all settings to return to the client for proper state update
|
357
353
|
all_settings = []
|
358
354
|
for setting in db_session.query(Setting).all():
|
@@ -525,11 +521,6 @@ def reset_to_defaults():
|
|
525
521
|
|
526
522
|
setup_settings(db_session)
|
527
523
|
|
528
|
-
# Also export the settings to file for consistency
|
529
|
-
settings_mgr = get_settings_manager(db_session)
|
530
|
-
for setting_type in SettingType:
|
531
|
-
settings_mgr.export_to_file(setting_type)
|
532
|
-
|
533
524
|
# Return success
|
534
525
|
return jsonify(
|
535
526
|
{
|
@@ -65,7 +65,7 @@ class SearXNGSearchEngine(BaseSearchEngine):
|
|
65
65
|
# 2. SEARXNG_INSTANCE environment variable
|
66
66
|
# 3. instance_url parameter
|
67
67
|
# 4. Default to None, which will disable the engine
|
68
|
-
self.instance_url = api_key or os.getenv("SEARXNG_INSTANCE") or instance_url
|
68
|
+
self.instance_url = api_key or os.getenv("SEARXNG_INSTANCE") or instance_url or "http://localhost:8080"
|
69
69
|
|
70
70
|
# Add debug logging for instance URL
|
71
71
|
logger.info(
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: local-deep-research
|
3
|
-
Version: 0.2.
|
3
|
+
Version: 0.2.2
|
4
4
|
Summary: AI-powered research assistant with deep, iterative analysis using LLMs and web searches
|
5
5
|
Author-Email: LearningCircuit <185559241+LearningCircuit@users.noreply.github.com>, HashedViking <6432677+HashedViking@users.noreply.github.com>
|
6
6
|
License: MIT License
|
@@ -124,7 +124,7 @@ A powerful AI-powered research assistant that performs deep, iterative analysis
|
|
124
124
|
|
125
125
|
## Windows Installation
|
126
126
|
|
127
|
-
Download the [Windows Installer](https://github.com/LearningCircuit/local-deep-research/releases/download/v0.
|
127
|
+
Download the [Windows Installer](https://github.com/LearningCircuit/local-deep-research/releases/download/v0.1.0/LocalDeepResearch_Setup.exe) for easy one-click installation.
|
128
128
|
|
129
129
|
**Requires Ollama (or other model provider configured in .env).**
|
130
130
|
Download from https://ollama.ai and then pull a model
|
@@ -1,7 +1,7 @@
|
|
1
|
-
local_deep_research-0.2.
|
2
|
-
local_deep_research-0.2.
|
3
|
-
local_deep_research-0.2.
|
4
|
-
local_deep_research-0.2.
|
1
|
+
local_deep_research-0.2.2.dist-info/METADATA,sha256=MgFc30qd-f-kk07M_jDRZ7HAq8MzL92pDLxQ34YYQMU,19797
|
2
|
+
local_deep_research-0.2.2.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
local_deep_research-0.2.2.dist-info/entry_points.txt,sha256=GcXS501Rjh-P80S8db7hnrQ23mS_Jg27PwpVQVO77as,113
|
4
|
+
local_deep_research-0.2.2.dist-info/licenses/LICENSE,sha256=Qg2CaTdu6SWnSqk1_JtgBPp_Da-LdqJDhT1Vt1MUc5s,1072
|
5
5
|
local_deep_research/__init__.py,sha256=tczbsYNZQqfPAuVtz6OFyo-uUqjNQLelEIT2G7mPTwA,870
|
6
6
|
local_deep_research/__main__.py,sha256=LIxK5iS6aLAKMFBDpUS3V-jDcxchqi3eSUsI2jAZUXk,371
|
7
7
|
local_deep_research/advanced_search_system/__init__.py,sha256=sGusMj4eFIrhXR6QbOM16UDKB6aI-iS4IFivKWpMlh0,234
|
@@ -30,7 +30,7 @@ local_deep_research/advanced_search_system/tools/knowledge_tools/__init__.py,sha
|
|
30
30
|
local_deep_research/advanced_search_system/tools/question_tools/__init__.py,sha256=73jLuCKigwc9lJQ0uD3_F16dgCg4pL-F2cwC6tk9-oc,30
|
31
31
|
local_deep_research/advanced_search_system/tools/search_tools/__init__.py,sha256=73jLuCKigwc9lJQ0uD3_F16dgCg4pL-F2cwC6tk9-oc,30
|
32
32
|
local_deep_research/api/__init__.py,sha256=-tJQp7Qm1aPg6fgfuw-w9dfNo8GzrJLOy2i3dG8Drl8,441
|
33
|
-
local_deep_research/api/research_functions.py,sha256=
|
33
|
+
local_deep_research/api/research_functions.py,sha256=8Q_Rzfc0Qj2oLxzvFJIA4ms10uQC0a5SBHkIkSoPcw4,10908
|
34
34
|
local_deep_research/app.py,sha256=U_92UX0dpVAQoaXciVNy_By_AyDEWGlXSeTwFpohALQ,155
|
35
35
|
local_deep_research/citation_handler.py,sha256=KdfwHqSewPyP2OrxEGu9o15pJtFDYLUsLwOTHkQe8I8,4564
|
36
36
|
local_deep_research/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -41,11 +41,11 @@ local_deep_research/defaults/.env.template,sha256=_eVCy4d_XwpGXy8n50CG3wH9xx2oqJ
|
|
41
41
|
local_deep_research/defaults/__init__.py,sha256=C_0t0uZmtrVB4rM9NM9Wx8PJU5kFcT-qOHvws5W2iOg,1352
|
42
42
|
local_deep_research/defaults/local_collections.toml,sha256=zNa03PVnFrZ757JdZOuW6QDxkOc6ep5tG8baGBrMmXM,1778
|
43
43
|
local_deep_research/defaults/main.toml,sha256=4PfSKHXzPjesdh7IzLprJ_oaCxqum9PvuKEaxr_-iJI,1940
|
44
|
-
local_deep_research/defaults/search_engines.toml,sha256=
|
44
|
+
local_deep_research/defaults/search_engines.toml,sha256=XBnqCxzFvXa1HoKLcb_Jg4EGXMlgYOw1sm_CicSdYDM,8285
|
45
45
|
local_deep_research/main.py,sha256=umGmaQmW7bpx27wUAgSNjNr4oSHV6mDX5hoyfb22HEY,7033
|
46
46
|
local_deep_research/migrate_db.py,sha256=S1h6Bv0OJdRW4BaH7MIMrUXBRV_yqgH2T6LVOZKTQjI,4634
|
47
|
-
local_deep_research/report_generator.py,sha256
|
48
|
-
local_deep_research/search_system.py,sha256=
|
47
|
+
local_deep_research/report_generator.py,sha256=-G3KDEbsuU3PdxDfuo5v28DIX7RE1yJCCBU2KgRbNzI,9084
|
48
|
+
local_deep_research/search_system.py,sha256=MqaG435RzllyHlVuT7eCc_wC8_rCA4RLW7F5NDp9kxE,7108
|
49
49
|
local_deep_research/setup_data_dir.py,sha256=7MJa2MMdDUnktJVHwMpyNL2079-qylpIyyLpVbF5AUY,1134
|
50
50
|
local_deep_research/test_migration.py,sha256=cXY9WbpxLslNEa1vFwLMvcvKBbUe7Wosm--AqmPIPYM,6459
|
51
51
|
local_deep_research/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -67,7 +67,7 @@ local_deep_research/web/models/settings.py,sha256=rXBI9vY5k3ndR8dPd3fZJy-6HwYltQ
|
|
67
67
|
local_deep_research/web/routes/api_routes.py,sha256=S0UdCmfm0v1GEM4UiSbI0PE3xUOxiGaYFR2ZOE0256U,19075
|
68
68
|
local_deep_research/web/routes/history_routes.py,sha256=6a_8nX349viuvi1zP5S7BaPPpAh133eTi1NVWO545A8,12622
|
69
69
|
local_deep_research/web/routes/research_routes.py,sha256=JlzaP1z-7XAP3E0nkEjLIfYj_NKf5qDcrjxBmUouAhM,23492
|
70
|
-
local_deep_research/web/routes/settings_routes.py,sha256=
|
70
|
+
local_deep_research/web/routes/settings_routes.py,sha256=rEvvFCVWJ80zchnzXBv9SAnDXMvDPLGDjSUfLRlCCi0,60012
|
71
71
|
local_deep_research/web/services/research_service.py,sha256=sxvW4oNLiiKgQ8w0SblefzMmk8EEaNNOGd8oC96j85E,39556
|
72
72
|
local_deep_research/web/services/resource_service.py,sha256=yKgOC6GEOmHqRoGzwf52e19UaGCCS1DbDbOIXgWGvGc,4378
|
73
73
|
local_deep_research/web/services/settings_manager.py,sha256=ybnhSlByuKA2oJPElN2WI8bh-ZzC6lP08x0Gsz8Ycbk,24310
|
@@ -124,7 +124,7 @@ local_deep_research/web_search_engines/engines/search_engine_guardian.py,sha256=
|
|
124
124
|
local_deep_research/web_search_engines/engines/search_engine_local.py,sha256=7s2qcyslMNiwXQynYjm_9t8nL_MDfue8wuDrBhfpcEg,40506
|
125
125
|
local_deep_research/web_search_engines/engines/search_engine_local_all.py,sha256=zg963qnwg8XwUqc9DeBrFaDSEHVr-j7Bv76WhaEuyi8,5785
|
126
126
|
local_deep_research/web_search_engines/engines/search_engine_pubmed.py,sha256=O99qfbSz7RHqinAP_C0iod-ZaEGE5tyBbh1DJi2-VhQ,38495
|
127
|
-
local_deep_research/web_search_engines/engines/search_engine_searxng.py,sha256=
|
127
|
+
local_deep_research/web_search_engines/engines/search_engine_searxng.py,sha256=wPYIl22SgXjIDYzcq62glGH2JZywCnZR31he1i7U1cE,18053
|
128
128
|
local_deep_research/web_search_engines/engines/search_engine_semantic_scholar.py,sha256=jYs_TRM0izMfldsZ8NkCQsP-o6vCPXUjyxt0nIsxOVI,22799
|
129
129
|
local_deep_research/web_search_engines/engines/search_engine_serpapi.py,sha256=OnoYL89WX1qWC6mOosSdgbJ-rXcIFmCVdrd6-qg7xes,8711
|
130
130
|
local_deep_research/web_search_engines/engines/search_engine_wayback.py,sha256=rfRs7WJxa-H1DXSyduFHBMfpFwWEVRXLd8s_78iU8gU,17894
|
@@ -132,4 +132,4 @@ local_deep_research/web_search_engines/engines/search_engine_wikipedia.py,sha256
|
|
132
132
|
local_deep_research/web_search_engines/search_engine_base.py,sha256=PLU_sAWhWKTOQWcv32GINuhLdIwB0sEQy-pp9oG9Ggo,9835
|
133
133
|
local_deep_research/web_search_engines/search_engine_factory.py,sha256=mkIf6F-8-aooS47iqb8SanJ9shnl0UOVia8hr2xX0b0,12751
|
134
134
|
local_deep_research/web_search_engines/search_engines_config.py,sha256=GmwpCT6vfeq1wrdr1R-zu6WRQ5XxyE7921HPsgGm3gI,2771
|
135
|
-
local_deep_research-0.2.
|
135
|
+
local_deep_research-0.2.2.dist-info/RECORD,,
|
File without changes
|
{local_deep_research-0.2.0.dist-info → local_deep_research-0.2.2.dist-info}/entry_points.txt
RENAMED
File without changes
|
{local_deep_research-0.2.0.dist-info → local_deep_research-0.2.2.dist-info}/licenses/LICENSE
RENAMED
File without changes
|