local-deep-research 0.3.10__py3-none-any.whl → 0.3.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__ = "0.3.10"
1
+ __version__ = "0.3.12"
@@ -9,6 +9,7 @@ from langchain_openai import ChatOpenAI
9
9
 
10
10
  from ..utilities.db_utils import get_db_setting
11
11
  from ..utilities.search_utilities import remove_think_tags
12
+ from ..utilities.url_utils import normalize_url
12
13
 
13
14
  # Setup logging
14
15
  logger = logging.getLogger(__name__)
@@ -141,7 +142,12 @@ def get_llm(model_name=None, temperature=None, provider=None, openai_endpoint_ur
141
142
  elif provider == "ollama":
142
143
  try:
143
144
  # Use the configurable Ollama base URL
144
- base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
145
+ raw_base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
146
+ base_url = (
147
+ normalize_url(raw_base_url)
148
+ if raw_base_url
149
+ else "http://localhost:11434"
150
+ )
145
151
 
146
152
  # Check if Ollama is available before trying to use it
147
153
  if not is_ollama_available():
@@ -371,7 +377,10 @@ def is_ollama_available():
371
377
  try:
372
378
  import requests
373
379
 
374
- base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
380
+ raw_base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
381
+ base_url = (
382
+ normalize_url(raw_base_url) if raw_base_url else "http://localhost:11434"
383
+ )
375
384
  logger.info(f"Checking Ollama availability at {base_url}/api/tags")
376
385
 
377
386
  try:
@@ -26,7 +26,7 @@ def get_search(search_tool=None, llm_instance=None):
26
26
  """
27
27
 
28
28
  # Use specified tool or default from settings
29
- tool = search_tool or get_db_setting("search.tool", "auto")
29
+ tool = search_tool or get_db_setting("search.tool", "searxng")
30
30
  logger.info(f"Creating search engine with tool: {tool}")
31
31
 
32
32
  # Get LLM instance (use provided or get fresh one)
@@ -733,7 +733,7 @@
733
733
  "step": null,
734
734
  "type": "SEARCH",
735
735
  "ui_element": "select",
736
- "value": "auto",
736
+ "value": "searxng",
737
737
  "visible": true
738
738
  },
739
739
  "search.engine.web.arxiv.display_name": {
@@ -3029,16 +3029,29 @@
3029
3029
  },
3030
3030
  "search.engine.web.searxng.default_params.safe_search": {
3031
3031
  "category": "searxng",
3032
- "description": "Setting for searxng.default_params.safe_search",
3032
+ "description": "Configure the safe search level",
3033
3033
  "editable": true,
3034
3034
  "max_value": null,
3035
3035
  "min_value": null,
3036
3036
  "name": "Safe Search",
3037
- "options": null,
3037
+ "options": [
3038
+ {
3039
+ "label": "Off",
3040
+ "value": "OFF"
3041
+ },
3042
+ {
3043
+ "label": "Moderate",
3044
+ "value": "MODERATE"
3045
+ },
3046
+ {
3047
+ "label": "Strict",
3048
+ "value": "STRICT"
3049
+ }
3050
+ ],
3038
3051
  "step": null,
3039
3052
  "type": "SEARCH",
3040
- "ui_element": "checkbox",
3041
- "value": 1,
3053
+ "ui_element": "select",
3054
+ "value": "OFF",
3042
3055
  "visible": true
3043
3056
  },
3044
3057
  "search.engine.web.searxng.full_search_class": {
@@ -0,0 +1,57 @@
1
+ """URL utility functions for the local deep research application."""
2
+
3
+ import logging
4
+
5
+ logger = logging.getLogger(__name__)
6
+
7
+
8
+ def normalize_url(raw_url: str) -> str:
9
+ """
10
+ Normalize a URL to ensure it has a proper scheme and format.
11
+
12
+ Args:
13
+ raw_url: The raw URL string to normalize
14
+
15
+ Returns:
16
+ A properly formatted URL string
17
+
18
+ Examples:
19
+ >>> normalize_url("localhost:11434")
20
+ 'http://localhost:11434'
21
+ >>> normalize_url("https://example.com:11434")
22
+ 'https://example.com:11434'
23
+ >>> normalize_url("http:example.com")
24
+ 'http://example.com'
25
+ """
26
+ if not raw_url:
27
+ raise ValueError("URL cannot be empty")
28
+
29
+ # Clean up the URL
30
+ raw_url = raw_url.strip()
31
+
32
+ # First check if the URL already has a proper scheme
33
+ if raw_url.startswith(("http://", "https://")):
34
+ return raw_url
35
+
36
+ # Handle case where URL is malformed like "http:hostname" (missing //)
37
+ if raw_url.startswith(("http:", "https:")) and not raw_url.startswith(
38
+ ("http://", "https://")
39
+ ):
40
+ scheme = raw_url.split(":", 1)[0]
41
+ rest = raw_url.split(":", 1)[1]
42
+ return f"{scheme}://{rest}"
43
+
44
+ # Handle URLs that start with //
45
+ if raw_url.startswith("//"):
46
+ # Remove the // and process
47
+ raw_url = raw_url[2:]
48
+
49
+ # At this point, we should have hostname:port or just hostname
50
+ # Determine if this is localhost or an external host
51
+ hostname = raw_url.split(":")[0].split("/")[0]
52
+ is_localhost = hostname in ("localhost", "127.0.0.1", "[::1]", "0.0.0.0")
53
+
54
+ # Use http for localhost, https for external hosts
55
+ scheme = "http" if is_localhost else "https"
56
+
57
+ return f"{scheme}://{raw_url}"
@@ -1,10 +1,10 @@
1
1
  import json
2
2
  import logging
3
- import os
4
3
 
5
4
  import requests
6
5
  from flask import Blueprint, current_app, jsonify, request
7
6
 
7
+ from ...utilities.url_utils import normalize_url
8
8
  from ..models.database import get_db_connection
9
9
  from ..routes.research_routes import active_research, termination_flags
10
10
  from ..services.research_service import (
@@ -49,7 +49,7 @@ def api_start_research():
49
49
  research_settings = {
50
50
  "model_provider": "OLLAMA", # Default
51
51
  "model": "llama2", # Default
52
- "search_engine": "auto", # Default
52
+ "search_engine": "searxng", # Default
53
53
  }
54
54
 
55
55
  cursor.execute(
@@ -255,10 +255,14 @@ def check_ollama_status():
255
255
  {"running": True, "message": f"Using provider: {provider}, not Ollama"}
256
256
  )
257
257
 
258
- # Get Ollama API URL
259
- ollama_base_url = os.getenv(
260
- "OLLAMA_BASE_URL",
261
- llm_config.get("ollama_base_url", "http://localhost:11434"),
258
+ # Get Ollama API URL from LLM config
259
+ raw_ollama_base_url = llm_config.get(
260
+ "ollama_base_url", "http://localhost:11434"
261
+ )
262
+ ollama_base_url = (
263
+ normalize_url(raw_ollama_base_url)
264
+ if raw_ollama_base_url
265
+ else "http://localhost:11434"
262
266
  )
263
267
 
264
268
  logger.info(f"Checking Ollama status at: {ollama_base_url}")
@@ -380,9 +384,14 @@ def check_ollama_model():
380
384
  # Log which model we're checking for debugging
381
385
  logger.info(f"Checking availability of Ollama model: {model_name}")
382
386
 
383
- ollama_base_url = os.getenv(
384
- "OLLAMA_BASE_URL",
385
- llm_config.get("ollama_base_url", "http://localhost:11434"),
387
+ # Get Ollama API URL from LLM config
388
+ raw_ollama_base_url = llm_config.get(
389
+ "ollama_base_url", "http://localhost:11434"
390
+ )
391
+ ollama_base_url = (
392
+ normalize_url(raw_ollama_base_url)
393
+ if raw_ollama_base_url
394
+ else "http://localhost:11434"
386
395
  )
387
396
 
388
397
  # Check if the model is available
@@ -19,6 +19,7 @@ from flask_wtf.csrf import generate_csrf
19
19
  from sqlalchemy.orm import Session
20
20
 
21
21
  from ...utilities.db_utils import get_db_setting
22
+ from ...utilities.url_utils import normalize_url
22
23
  from ..database.models import Setting, SettingType
23
24
  from ..services.settings_service import (
24
25
  create_or_update_setting,
@@ -667,7 +668,15 @@ def api_get_available_models():
667
668
  try:
668
669
  current_app.logger.info("Attempting to connect to Ollama API")
669
670
 
670
- base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
671
+ raw_base_url = get_db_setting(
672
+ "llm.ollama.url", "http://localhost:11434"
673
+ )
674
+ base_url = (
675
+ normalize_url(raw_base_url)
676
+ if raw_base_url
677
+ else "http://localhost:11434"
678
+ )
679
+
671
680
  ollama_response = requests.get(f"{base_url}/api/tags", timeout=5)
672
681
 
673
682
  current_app.logger.debug(
@@ -1269,11 +1278,12 @@ def fix_corrupted_settings():
1269
1278
  def check_ollama_status():
1270
1279
  """Check if Ollama is running and available"""
1271
1280
  try:
1272
- # Set a shorter timeout for the request
1273
- base_url = os.getenv(
1274
- "OLLAMA_BASE_URL",
1275
- "http://localhost:11434",
1281
+ # Get Ollama URL from settings
1282
+ raw_base_url = get_db_setting("llm.ollama.url", "http://localhost:11434")
1283
+ base_url = (
1284
+ normalize_url(raw_base_url) if raw_base_url else "http://localhost:11434"
1276
1285
  )
1286
+
1277
1287
  response = requests.get(f"{base_url}/api/version", timeout=2.0)
1278
1288
 
1279
1289
  if response.status_code == 200:
@@ -530,7 +530,7 @@ def run_research_process(
530
530
  report_path = os.path.join(
531
531
  OUTPUT_DIR,
532
532
  f"quick_summary_{safe_query}_"
533
- f"{datetime.now().isoformat()}.md",
533
+ f"{int(datetime.now().timestamp())}.md",
534
534
  )
535
535
 
536
536
  # Send progress update for writing to file
@@ -643,7 +643,7 @@ def run_research_process(
643
643
  safe_query = safe_query.replace(" ", "_").lower()
644
644
  report_path = os.path.join(
645
645
  OUTPUT_DIR,
646
- f"detailed_report_{safe_query}_{datetime.now().isoformat()}.md",
646
+ f"detailed_report_{safe_query}_{int(datetime.now().timestamp())}.md",
647
647
  )
648
648
 
649
649
  with open(report_path, "w", encoding="utf-8") as f:
@@ -25,7 +25,7 @@ class FullSearchResults:
25
25
  max_results: int = 10,
26
26
  region: str = "wt-wt",
27
27
  time: str = "y",
28
- safesearch: str = "Moderate",
28
+ safesearch: str | int = "Moderate",
29
29
  ):
30
30
  self.llm = llm
31
31
  self.output_format = output_format
@@ -32,6 +32,7 @@ from langchain_text_splitters import RecursiveCharacterTextSplitter
32
32
 
33
33
  from ...config import search_config
34
34
  from ...utilities.db_utils import get_db_setting
35
+ from ...utilities.url_utils import normalize_url
35
36
  from ..search_engine_base import BaseSearchEngine
36
37
 
37
38
  # Setup logging
@@ -169,12 +170,20 @@ class LocalEmbeddingManager:
169
170
  if self.embedding_model_type == "ollama":
170
171
  # Use Ollama for embeddings
171
172
  if not self.ollama_base_url:
172
- self.ollama_base_url = get_db_setting(
173
+ raw_ollama_base_url = get_db_setting(
173
174
  "llm.ollama.url", "http://localhost:11434"
174
175
  )
176
+ self.ollama_base_url = (
177
+ normalize_url(raw_ollama_base_url)
178
+ if raw_ollama_base_url
179
+ else "http://localhost:11434"
180
+ )
181
+ else:
182
+ # Ensure scheme is present if ollama_base_url was passed in constructor
183
+ self.ollama_base_url = normalize_url(self.ollama_base_url)
175
184
 
176
185
  logger.info(
177
- f"Initializing Ollama embeddings with model {self.embedding_model}"
186
+ f"Initializing Ollama embeddings with model {self.embedding_model} and base_url {self.ollama_base_url}"
178
187
  )
179
188
  return OllamaEmbeddings(
180
189
  model=self.embedding_model, base_url=self.ollama_base_url
@@ -563,7 +572,7 @@ class LocalEmbeddingManager:
563
572
  str(index_path),
564
573
  self.embeddings,
565
574
  allow_dangerous_deserialization=True,
566
- nomalize_L2=True,
575
+ normalize_L2=True,
567
576
  )
568
577
  except Exception as e:
569
578
  logger.error(f"Error loading index for {folder_path}: {e}")
@@ -1,3 +1,4 @@
1
+ import enum
1
2
  import logging
2
3
  import os
3
4
  import time
@@ -15,6 +16,17 @@ logging.basicConfig(level=logging.INFO)
15
16
  logger = logging.getLogger(__name__)
16
17
 
17
18
 
19
+ @enum.unique
20
+ class SafeSearchSetting(enum.IntEnum):
21
+ """
22
+ Acceptable settings for safe search.
23
+ """
24
+
25
+ OFF = 0
26
+ MODERATE = 1
27
+ STRICT = 2
28
+
29
+
18
30
  class SearXNGSearchEngine(BaseSearchEngine):
19
31
  """
20
32
  SearXNG search engine implementation that requires an instance URL provided via
@@ -29,7 +41,7 @@ class SearXNGSearchEngine(BaseSearchEngine):
29
41
  categories: Optional[List[str]] = None,
30
42
  engines: Optional[List[str]] = None,
31
43
  language: str = "en",
32
- safe_search: int = 1,
44
+ safe_search: str = SafeSearchSetting.OFF.name,
33
45
  time_range: Optional[str] = None,
34
46
  delay_between_requests: float = 0.0,
35
47
  llm: Optional[BaseLLM] = None,
@@ -89,7 +101,14 @@ class SearXNGSearchEngine(BaseSearchEngine):
89
101
  self.categories = categories or ["general"]
90
102
  self.engines = engines
91
103
  self.language = language
92
- self.safe_search = safe_search
104
+ try:
105
+ self.safe_search = SafeSearchSetting[safe_search]
106
+ except ValueError:
107
+ logger.error(
108
+ "'{}' is not a valid safe search setting. Disabling safe search",
109
+ safe_search,
110
+ )
111
+ self.safe_search = SafeSearchSetting.OFF
93
112
  self.time_range = time_range
94
113
 
95
114
  self.delay_between_requests = float(
@@ -114,11 +133,7 @@ class SearXNGSearchEngine(BaseSearchEngine):
114
133
  max_results=max_results,
115
134
  region="wt-wt",
116
135
  time="y",
117
- safesearch=(
118
- "Moderate"
119
- if safe_search == 1
120
- else "Off" if safe_search == 0 else "Strict"
121
- ),
136
+ safesearch=self.safe_search.value,
122
137
  )
123
138
 
124
139
  self.last_request_time = 0
@@ -177,7 +192,7 @@ class SearXNGSearchEngine(BaseSearchEngine):
177
192
  "language": self.language,
178
193
  "format": "html", # Use HTML format instead of JSON
179
194
  "pageno": 1,
180
- "safesearch": self.safe_search,
195
+ "safesearch": self.safe_search.value,
181
196
  "count": self.max_results,
182
197
  }
183
198
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: local-deep-research
3
- Version: 0.3.10
3
+ Version: 0.3.12
4
4
  Summary: AI-powered research assistant with deep, iterative analysis using LLMs and web searches
5
5
  Author-Email: LearningCircuit <185559241+LearningCircuit@users.noreply.github.com>, HashedViking <6432677+HashedViking@users.noreply.github.com>
6
6
  License: MIT License
@@ -106,7 +106,7 @@ Local Deep Research combines the power of large language models with intelligent
106
106
 
107
107
  ## ⚡ Quick Start
108
108
 
109
- ### Option 1: Docker (Recommended)
109
+ ### Option 1: Docker (Quickstart no MAC/ARM)
110
110
 
111
111
  ```bash
112
112
  # Step 1: Pull and run SearXNG for optimal search results
@@ -115,29 +115,54 @@ docker run -d -p 8080:8080 --name searxng searxng/searxng
115
115
 
116
116
  # Step 2: Pull and run Local Deep Research (Please build your own docker on ARM)
117
117
  docker pull localdeepresearch/local-deep-research
118
- docker run -d -p 5000:5000 --name local-deep-research localdeepresearch/local-deep-research
118
+ docker run -d -p 5000:5000 --network host --name local-deep-research localdeepresearch/local-deep-research
119
119
 
120
- # Optional 3a: For connecting to already installed local Ollama (https://ollama.com/download) or other local services
121
- # docker run -d -p 5000:5000 --network host --name local-deep-research localdeepresearch/local-deep-research
122
-
123
- # Optional 3b (recommended): Pull and run Ollama for local LLM capabilities
124
- # docker pull ollama/ollama
125
- # docker run -d -p 11434:11434 --name ollama ollama/ollama
126
- # docker exec -it ollama ollama pull gemma3:12b
127
-
128
- # Start containers - Required after each reboot (can be automated with this flag --restart unless-stopped in run)
120
+ # Start containers - Required after each reboot (can be automated with this flag in run command --restart unless-stopped)
129
121
  docker start searxng
130
122
  docker start local-deep-research
131
- # docker start ollama
123
+
124
+ ```
125
+
126
+ ### Option 2: Docker Compose (Recommended)
127
+
128
+ LDR uses Docker compose to bundle the web app and all it's dependencies so
129
+ you can get up and running quickly.
130
+
131
+ ### Prerequisites
132
+
133
+ - [Docker](https://docs.docker.com/engine/install/)
134
+ - [Docker Compose](https://docs.docker.com/compose/install/)
135
+ - `cookiecutter`: Run `pip install --user cookiecutter`
136
+
137
+ Clone the repository:
138
+
139
+ ```bash
140
+ git clone https://github.com/LearningCircuit/local-deep-research.git
141
+ cd local-deep-research
142
+ ```
143
+
144
+ ### Configuring with Docker Compose
145
+
146
+ In the LDR repository, run the following command
147
+ to do generate the compose file:
148
+
149
+ ```bash
150
+ cookiecutter cookiecutter-docker/
151
+ ```
152
+
153
+ This will prompt you to answer a series of questions. Hit Enter repeatedly
154
+ to accept the default values. It should generate a file in the repository called `docker-compose.default.yml`. To run LDR, use the following command:
155
+
156
+ ```bash
157
+ docker compose -f docker-compose.default.yml up
132
158
  ```
133
159
 
134
160
  Then visit `http://127.0.0.1:5000` to start researching!
135
161
 
136
- > **Note**: If you need to connect to local services (like Ollama), add `--network host` to the command.
137
- >
138
- > **Don't have Docker? It's installed in a few clicks: [Install Docker here](https://www.docker.com/get-started/)**
162
+ See [here](https://github.com/LearningCircuit/local-deep-research/wiki/Installation#docker-installation-recommended) for more information about
163
+ using Docker.
139
164
 
140
- ### Option 2: Python Package (mostly for programmatic access)
165
+ ### Option 3: Python Package (mostly for programmatic access)
141
166
 
142
167
  ```bash
143
168
  # Install the package
@@ -1,9 +1,9 @@
1
- local_deep_research-0.3.10.dist-info/METADATA,sha256=qefTHFrq0UMfrQ3lsj77XlOvdLYM8_Exm1boN_IJAOk,16667
2
- local_deep_research-0.3.10.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- local_deep_research-0.3.10.dist-info/entry_points.txt,sha256=GcXS501Rjh-P80S8db7hnrQ23mS_Jg27PwpVQVO77as,113
4
- local_deep_research-0.3.10.dist-info/licenses/LICENSE,sha256=Qg2CaTdu6SWnSqk1_JtgBPp_Da-LdqJDhT1Vt1MUc5s,1072
1
+ local_deep_research-0.3.12.dist-info/METADATA,sha256=uKHXsYHDDDmmrMp_LyUo6xStbKJYfoACmBwA0_kTX8s,17101
2
+ local_deep_research-0.3.12.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ local_deep_research-0.3.12.dist-info/entry_points.txt,sha256=GcXS501Rjh-P80S8db7hnrQ23mS_Jg27PwpVQVO77as,113
4
+ local_deep_research-0.3.12.dist-info/licenses/LICENSE,sha256=Qg2CaTdu6SWnSqk1_JtgBPp_Da-LdqJDhT1Vt1MUc5s,1072
5
5
  local_deep_research/__init__.py,sha256=9wV3oonZMEHsE_JhyZU9P0hW2Uwv47zotGlbAB_gQiA,885
6
- local_deep_research/__version__.py,sha256=h9TycTJK2pK49s87IMbNRq4lTqRt3xctcJl2jxCe3sU,23
6
+ local_deep_research/__version__.py,sha256=uzDtUmN3o6XGGV0Pkoi7vJCijlJJCOYCmLl8900CcXg,23
7
7
  local_deep_research/advanced_search_system/__init__.py,sha256=sGusMj4eFIrhXR6QbOM16UDKB6aI-iS4IFivKWpMlh0,234
8
8
  local_deep_research/advanced_search_system/filters/__init__.py,sha256=2dXrV4skcVHI2Lb3BSL2Ajq0rnLeSw7kc1MbIynMxa4,190
9
9
  local_deep_research/advanced_search_system/filters/base_filter.py,sha256=dFNQ7U2dj4bf3voT73YhcG-w9eW-BTlc4F9kstFcETY,969
@@ -35,11 +35,11 @@ local_deep_research/api/research_functions.py,sha256=SItLEuib94AXrhMsgmYDtykGrVm
35
35
  local_deep_research/app.py,sha256=U_92UX0dpVAQoaXciVNy_By_AyDEWGlXSeTwFpohALQ,155
36
36
  local_deep_research/citation_handler.py,sha256=MZVd6xl7g3xrWauFBPuVIC36z8onc-zQb8xI4dQXxsU,4307
37
37
  local_deep_research/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
- local_deep_research/config/llm_config.py,sha256=bYxhwyjkdBlap832aWvWgHWjjPq45Oh2NtDOLJFf4go,15560
39
- local_deep_research/config/search_config.py,sha256=ruryPSS4Wy9-xi_02c-98KLKaELeLnZ10pnCpc0-ogg,2171
38
+ local_deep_research/config/llm_config.py,sha256=swUQF_YgZnXZv0TcA0SVnrbwqKWxyvMmpXSLEVf4dTA,15894
39
+ local_deep_research/config/search_config.py,sha256=JpG2kMj67IiowComaJE5b5hK-Ze-clvoequccUWuTUo,2174
40
40
  local_deep_research/defaults/.env.template,sha256=_eVCy4d_XwpGXy8n50CG3wH9xx2oqJCFKS7IbqgInDk,491
41
41
  local_deep_research/defaults/__init__.py,sha256=C_0t0uZmtrVB4rM9NM9Wx8PJU5kFcT-qOHvws5W2iOg,1352
42
- local_deep_research/defaults/default_settings.json,sha256=OcRS16WAP4zKvU0UAbXlvBcitt0wv_Z7hq93x1OZBdA,120559
42
+ local_deep_research/defaults/default_settings.json,sha256=kVcfmw4n9eQLVLqf1CSdSehacMaKpeEpFmQ7TgpKzzk,120847
43
43
  local_deep_research/migrate_db.py,sha256=S1h6Bv0OJdRW4BaH7MIMrUXBRV_yqgH2T6LVOZKTQjI,4634
44
44
  local_deep_research/report_generator.py,sha256=-G3KDEbsuU3PdxDfuo5v28DIX7RE1yJCCBU2KgRbNzI,9084
45
45
  local_deep_research/search_system.py,sha256=dq9US9zoB7TSiMorsrFFrSHlR6MSqE0IP3NBKB3fP8U,7830
@@ -51,6 +51,7 @@ local_deep_research/utilities/enums.py,sha256=yFwmodt93uETdQd7qyW4vOUhiAzZF-BHBb
51
51
  local_deep_research/utilities/llm_utils.py,sha256=1O8faskPSnyave15cxOVXQcdcFrDybQA445m0OjnD9g,4877
52
52
  local_deep_research/utilities/search_utilities.py,sha256=k4Eag-XJOZl4cRd4GO1z2WpvVPLLhZ-HuBD7RswhaQM,9604
53
53
  local_deep_research/utilities/setup_utils.py,sha256=0Us6doQ6xQtKzgtnN1C4L7sSgxzFKJ35FpmZdj1tCDM,213
54
+ local_deep_research/utilities/url_utils.py,sha256=8tvLoar3Mq7xHFM_b5kp8Nf2Kdykz4C7wTejjUcg4vA,1714
54
55
  local_deep_research/web/__init__.py,sha256=CynnuRxCf9mB0AHeylhF5yVZwdj0H6bxVrZtniw3xmE,44
55
56
  local_deep_research/web/app.py,sha256=tl3PAf41NBYYGq0oeIKAZw_hVSB8JmjyF8jzvYPeAU0,3414
56
57
  local_deep_research/web/app_factory.py,sha256=rk5QCwpxrub6ptKsvjSDTNtq8d2RWbKo2LZLvL__fPo,8107
@@ -61,11 +62,11 @@ local_deep_research/web/database/models.py,sha256=MIxYWIQIovkrR65rFMxlDXn2iZXf5S
61
62
  local_deep_research/web/database/schema_upgrade.py,sha256=u3tx_tlsuzJw-NhvhKvQG6dqzwUckQkic0D3taZAt-k,2924
62
63
  local_deep_research/web/models/database.py,sha256=NV4h0RU0ta408SkI_ZmquCplMTNH1Q4zxYJ4SHwmiGY,9489
63
64
  local_deep_research/web/models/settings.py,sha256=rXBI9vY5k3ndR8dPd3fZJy-6HwYltQihhSBRq-sZutw,2314
64
- local_deep_research/web/routes/api_routes.py,sha256=S0UdCmfm0v1GEM4UiSbI0PE3xUOxiGaYFR2ZOE0256U,19075
65
+ local_deep_research/web/routes/api_routes.py,sha256=7eWX5igAxtLJ2vcfcyRsJjApsdYX6zjpIp-ukthgf8M,19424
65
66
  local_deep_research/web/routes/history_routes.py,sha256=6a_8nX349viuvi1zP5S7BaPPpAh133eTi1NVWO545A8,12622
66
67
  local_deep_research/web/routes/research_routes.py,sha256=zSU21oAkZnADnuhJniShd8US8hpPDiYqQxUhalJwQeU,23685
67
- local_deep_research/web/routes/settings_routes.py,sha256=fkYLwDgcHfiHVml3ux6qCc5qFMjfnKfPcwisqhg995s,49280
68
- local_deep_research/web/services/research_service.py,sha256=RyZ4cCePV9n--wm-8-c0wpLGwA1aQIiuTpnRlLuU8-I,39646
68
+ local_deep_research/web/routes/settings_routes.py,sha256=uvDEGQ4TtgGhoQBfz_Zbcd-tv2Rq_1hL3j0Jx_novJM,49635
69
+ local_deep_research/web/services/research_service.py,sha256=gsveymwt4hKKwVGU2XqFYT27GeEsmSU7EOWrvDsUtnk,39656
69
70
  local_deep_research/web/services/resource_service.py,sha256=yKgOC6GEOmHqRoGzwf52e19UaGCCS1DbDbOIXgWGvGc,4378
70
71
  local_deep_research/web/services/settings_manager.py,sha256=CHz_nd49BVRJiLALAjTHfmkKNy_Vr3ogCm5P-_633bk,17281
71
72
  local_deep_research/web/services/settings_service.py,sha256=SgmjhMvGZjJE63hKKaqY7kPGphnUyXcQG8NFN5rTizs,3550
@@ -112,7 +113,7 @@ local_deep_research/web/utils/formatters.py,sha256=Gj_a0oFveNXHtvkiFe1rwlEtzYerM
112
113
  local_deep_research/web/utils/templates.py,sha256=scBPbjUJqaFltFX37ZLsdcgPycPY7kMSew5mZWCG1H0,535
113
114
  local_deep_research/web_search_engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
114
115
  local_deep_research/web_search_engines/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
115
- local_deep_research/web_search_engines/engines/full_search.py,sha256=6Pi_wj9oAtDHAyLsIbWGBeS8QBv6yCJEJ87LN68Cp-k,4703
116
+ local_deep_research/web_search_engines/engines/full_search.py,sha256=GoBWvFh1eJS6esGNuFoMbq7GHaDMI_pqKX2RY136IXo,4709
116
117
  local_deep_research/web_search_engines/engines/meta_search_engine.py,sha256=qUFl8yw5l7sfH-BRpXXrNQ2KrQ9LsaslhG1glb2AOIM,14715
117
118
  local_deep_research/web_search_engines/engines/search_engine_arxiv.py,sha256=3k8R4pyqIZf0RDMqXDw08xIGsfkp4ZR9kePDbmeuaH0,16603
118
119
  local_deep_research/web_search_engines/engines/search_engine_brave.py,sha256=y1j4CSLM0Ujw1LSBiWg1ZBnc2BvrkhDCorrQLnUBVtM,9149
@@ -120,10 +121,10 @@ local_deep_research/web_search_engines/engines/search_engine_ddg.py,sha256=w9vRD
120
121
  local_deep_research/web_search_engines/engines/search_engine_github.py,sha256=bojmx-R36eT_s20DGspAopkwqt6vKy4q_jH4foBt3Kk,31934
121
122
  local_deep_research/web_search_engines/engines/search_engine_google_pse.py,sha256=DuFtSUZgBR7nFBLZrbFMEuG-Rnv0cb-upHeGSDo7xRY,11177
122
123
  local_deep_research/web_search_engines/engines/search_engine_guardian.py,sha256=bNCppKJNNvkmw-LR5vfpRABhdhsUwOJqpcRHVjcziNU,23390
123
- local_deep_research/web_search_engines/engines/search_engine_local.py,sha256=ephjkDrQbvil6GnceW31qSt70k11REOJ9o7y-bl69-A,40857
124
+ local_deep_research/web_search_engines/engines/search_engine_local.py,sha256=qTXfWqdqTq_MJXwVVuSM45sQoFcoXyrwEsJ97pbpVVY,41362
124
125
  local_deep_research/web_search_engines/engines/search_engine_local_all.py,sha256=vznpusmCBY9bLjD8EPrVhCb_8RZ8e9Wa8x386zv0pcM,5681
125
126
  local_deep_research/web_search_engines/engines/search_engine_pubmed.py,sha256=O99qfbSz7RHqinAP_C0iod-ZaEGE5tyBbh1DJi2-VhQ,38495
126
- local_deep_research/web_search_engines/engines/search_engine_searxng.py,sha256=LjArsD5ICgfsaFupF3O31oqb60ONgwqwWu-UDt7eA68,17710
127
+ local_deep_research/web_search_engines/engines/search_engine_searxng.py,sha256=viCD1CVSym8als_o7LHbzwYlJ4jQIUPmmCxcXLjW4P4,18043
127
128
  local_deep_research/web_search_engines/engines/search_engine_semantic_scholar.py,sha256=jYs_TRM0izMfldsZ8NkCQsP-o6vCPXUjyxt0nIsxOVI,22799
128
129
  local_deep_research/web_search_engines/engines/search_engine_serpapi.py,sha256=OnoYL89WX1qWC6mOosSdgbJ-rXcIFmCVdrd6-qg7xes,8711
129
130
  local_deep_research/web_search_engines/engines/search_engine_wayback.py,sha256=rfRs7WJxa-H1DXSyduFHBMfpFwWEVRXLd8s_78iU8gU,17894
@@ -131,4 +132,4 @@ local_deep_research/web_search_engines/engines/search_engine_wikipedia.py,sha256
131
132
  local_deep_research/web_search_engines/search_engine_base.py,sha256=PLU_sAWhWKTOQWcv32GINuhLdIwB0sEQy-pp9oG9Ggo,9835
132
133
  local_deep_research/web_search_engines/search_engine_factory.py,sha256=DghAkQvLKRJYl5xb9AUjUv7ydAQ4rPi-TvzrmqdyGxE,10890
133
134
  local_deep_research/web_search_engines/search_engines_config.py,sha256=UAE6TfxFXrt-RvSfGQ_FRsOGGrsSs8VI3n1i-0Lfo2s,4929
134
- local_deep_research-0.3.10.dist-info/RECORD,,
135
+ local_deep_research-0.3.12.dist-info/RECORD,,