webscout 7.7__py3-none-any.whl → 7.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/AIutel.py +2 -1
- webscout/Bard.py +12 -29
- webscout/DWEBS.py +477 -461
- webscout/Extra/__init__.py +2 -0
- webscout/Extra/autocoder/__init__.py +9 -9
- webscout/Extra/autocoder/{rawdog.py → autocoder.py} +849 -790
- webscout/Extra/autocoder/autocoder_utiles.py +332 -194
- webscout/Extra/gguf.py +682 -682
- webscout/Extra/tempmail/__init__.py +26 -0
- webscout/Extra/tempmail/async_utils.py +141 -0
- webscout/Extra/tempmail/base.py +156 -0
- webscout/Extra/tempmail/cli.py +187 -0
- webscout/Extra/tempmail/mail_tm.py +361 -0
- webscout/Extra/tempmail/temp_mail_io.py +292 -0
- webscout/Provider/AI21.py +1 -1
- webscout/Provider/AISEARCH/DeepFind.py +2 -2
- webscout/Provider/AISEARCH/ISou.py +2 -2
- webscout/Provider/AISEARCH/felo_search.py +6 -6
- webscout/Provider/AISEARCH/genspark_search.py +1 -1
- webscout/Provider/Aitopia.py +292 -0
- webscout/Provider/AllenAI.py +1 -1
- webscout/Provider/Andi.py +3 -3
- webscout/Provider/C4ai.py +1 -1
- webscout/Provider/ChatGPTES.py +3 -5
- webscout/Provider/ChatGPTGratis.py +4 -4
- webscout/Provider/Chatify.py +2 -2
- webscout/Provider/Cloudflare.py +3 -2
- webscout/Provider/DeepSeek.py +2 -2
- webscout/Provider/Deepinfra.py +288 -286
- webscout/Provider/ElectronHub.py +709 -634
- webscout/Provider/ExaChat.py +325 -0
- webscout/Provider/Free2GPT.py +2 -2
- webscout/Provider/Gemini.py +167 -179
- webscout/Provider/GithubChat.py +1 -1
- webscout/Provider/Glider.py +4 -4
- webscout/Provider/Groq.py +41 -27
- webscout/Provider/HF_space/qwen_qwen2.py +1 -1
- webscout/Provider/HeckAI.py +1 -1
- webscout/Provider/HuggingFaceChat.py +1 -1
- webscout/Provider/Hunyuan.py +1 -1
- webscout/Provider/Jadve.py +3 -3
- webscout/Provider/Koboldai.py +3 -3
- webscout/Provider/LambdaChat.py +3 -2
- webscout/Provider/Llama.py +3 -5
- webscout/Provider/Llama3.py +4 -12
- webscout/Provider/Marcus.py +3 -3
- webscout/Provider/OLLAMA.py +8 -8
- webscout/Provider/Openai.py +7 -3
- webscout/Provider/PI.py +1 -1
- webscout/Provider/Perplexitylabs.py +1 -1
- webscout/Provider/Phind.py +1 -1
- webscout/Provider/PizzaGPT.py +1 -1
- webscout/Provider/QwenLM.py +4 -7
- webscout/Provider/TTI/FreeAIPlayground/async_freeaiplayground.py +3 -1
- webscout/Provider/TTI/FreeAIPlayground/sync_freeaiplayground.py +3 -3
- webscout/Provider/TTI/ImgSys/__init__.py +23 -0
- webscout/Provider/TTI/ImgSys/async_imgsys.py +202 -0
- webscout/Provider/TTI/ImgSys/sync_imgsys.py +195 -0
- webscout/Provider/TTI/__init__.py +3 -1
- webscout/Provider/TTI/artbit/async_artbit.py +1 -1
- webscout/Provider/TTI/artbit/sync_artbit.py +1 -1
- webscout/Provider/TTI/huggingface/async_huggingface.py +1 -1
- webscout/Provider/TTI/huggingface/sync_huggingface.py +1 -1
- webscout/Provider/TTI/piclumen/__init__.py +22 -22
- webscout/Provider/TTI/piclumen/sync_piclumen.py +232 -232
- webscout/Provider/TTI/pixelmuse/__init__.py +4 -0
- webscout/Provider/TTI/pixelmuse/async_pixelmuse.py +249 -0
- webscout/Provider/TTI/pixelmuse/sync_pixelmuse.py +182 -0
- webscout/Provider/TTI/talkai/sync_talkai.py +1 -1
- webscout/Provider/TTS/utils.py +1 -1
- webscout/Provider/TeachAnything.py +1 -1
- webscout/Provider/TextPollinationsAI.py +232 -230
- webscout/Provider/TwoAI.py +1 -2
- webscout/Provider/Venice.py +4 -2
- webscout/Provider/VercelAI.py +234 -0
- webscout/Provider/WebSim.py +3 -2
- webscout/Provider/WiseCat.py +10 -12
- webscout/Provider/Youchat.py +1 -1
- webscout/Provider/__init__.py +10 -4
- webscout/Provider/ai4chat.py +1 -1
- webscout/Provider/aimathgpt.py +2 -6
- webscout/Provider/akashgpt.py +1 -1
- webscout/Provider/askmyai.py +4 -4
- webscout/Provider/{DARKAI.py → asksteve.py} +56 -77
- webscout/Provider/bagoodex.py +2 -2
- webscout/Provider/cerebras.py +1 -1
- webscout/Provider/chatglm.py +4 -4
- webscout/Provider/cleeai.py +1 -0
- webscout/Provider/copilot.py +21 -9
- webscout/Provider/elmo.py +1 -1
- webscout/Provider/flowith.py +1 -1
- webscout/Provider/freeaichat.py +64 -31
- webscout/Provider/gaurish.py +3 -5
- webscout/Provider/geminiprorealtime.py +1 -1
- webscout/Provider/granite.py +4 -4
- webscout/Provider/hermes.py +5 -5
- webscout/Provider/julius.py +1 -1
- webscout/Provider/koala.py +1 -1
- webscout/Provider/lepton.py +1 -1
- webscout/Provider/llama3mitril.py +4 -4
- webscout/Provider/llamatutor.py +1 -1
- webscout/Provider/llmchat.py +3 -3
- webscout/Provider/meta.py +1 -1
- webscout/Provider/multichat.py +10 -10
- webscout/Provider/promptrefine.py +1 -1
- webscout/Provider/searchchat.py +293 -0
- webscout/Provider/sonus.py +2 -2
- webscout/Provider/talkai.py +2 -2
- webscout/Provider/turboseek.py +1 -1
- webscout/Provider/tutorai.py +1 -1
- webscout/Provider/typegpt.py +5 -42
- webscout/Provider/uncovr.py +312 -297
- webscout/Provider/x0gpt.py +1 -1
- webscout/Provider/yep.py +64 -12
- webscout/__init__.py +3 -1
- webscout/cli.py +59 -98
- webscout/conversation.py +350 -17
- webscout/litprinter/__init__.py +59 -667
- webscout/optimizers.py +419 -419
- webscout/tempid.py +11 -11
- webscout/update_checker.py +14 -12
- webscout/utils.py +2 -2
- webscout/version.py +1 -1
- webscout/webscout_search.py +146 -87
- webscout/webscout_search_async.py +148 -27
- {webscout-7.7.dist-info → webscout-7.9.dist-info}/METADATA +92 -66
- webscout-7.9.dist-info/RECORD +248 -0
- webscout/Provider/EDITEE.py +0 -192
- webscout/litprinter/colors.py +0 -54
- webscout-7.7.dist-info/RECORD +0 -234
- {webscout-7.7.dist-info → webscout-7.9.dist-info}/LICENSE.md +0 -0
- {webscout-7.7.dist-info → webscout-7.9.dist-info}/WHEEL +0 -0
- {webscout-7.7.dist-info → webscout-7.9.dist-info}/entry_points.txt +0 -0
- {webscout-7.7.dist-info → webscout-7.9.dist-info}/top_level.txt +0 -0
webscout/Provider/yep.py
CHANGED
|
@@ -1,18 +1,17 @@
|
|
|
1
|
-
import time
|
|
2
1
|
import uuid
|
|
3
2
|
import cloudscraper
|
|
4
3
|
import json
|
|
5
4
|
|
|
6
|
-
from typing import Any, Dict, Optional, Generator, Union
|
|
7
|
-
from dataclasses import dataclass, asdict
|
|
8
|
-
from datetime import date
|
|
5
|
+
from typing import Any, Dict, Optional, Generator, Union, List, TypeVar
|
|
9
6
|
|
|
10
7
|
from webscout.AIutel import Optimizers
|
|
11
|
-
from webscout.AIutel import Conversation
|
|
12
8
|
from webscout.AIutel import AwesomePrompts
|
|
13
9
|
from webscout.AIbase import Provider
|
|
14
|
-
from webscout import
|
|
10
|
+
from webscout import exceptions
|
|
15
11
|
from webscout.litagent import LitAgent
|
|
12
|
+
from webscout.conversation import Conversation, Fn
|
|
13
|
+
|
|
14
|
+
T = TypeVar('T')
|
|
16
15
|
|
|
17
16
|
|
|
18
17
|
class YEPCHAT(Provider):
|
|
@@ -39,7 +38,8 @@ class YEPCHAT(Provider):
|
|
|
39
38
|
model: str = "DeepSeek-R1-Distill-Qwen-32B",
|
|
40
39
|
temperature: float = 0.6,
|
|
41
40
|
top_p: float = 0.7,
|
|
42
|
-
browser: str = "chrome"
|
|
41
|
+
browser: str = "chrome",
|
|
42
|
+
tools: Optional[List[Fn]] = None
|
|
43
43
|
):
|
|
44
44
|
"""
|
|
45
45
|
Initializes the YEPCHAT provider with the specified parameters.
|
|
@@ -51,6 +51,11 @@ class YEPCHAT(Provider):
|
|
|
51
51
|
|
|
52
52
|
>>> ai.chat("Tell me a joke", stream=True)
|
|
53
53
|
Initiates a chat with the Yep API using the provided prompt.
|
|
54
|
+
|
|
55
|
+
>>> weather_tool = Fn(name="get_weather", description="Get the current weather", parameters={"location": "string"})
|
|
56
|
+
>>> ai = YEPCHAT(tools=[weather_tool])
|
|
57
|
+
>>> ai.chat("What's the weather in New York?")
|
|
58
|
+
Uses the weather tool to provide weather information.
|
|
54
59
|
"""
|
|
55
60
|
if model not in self.AVAILABLE_MODELS:
|
|
56
61
|
raise ValueError(
|
|
@@ -103,7 +108,7 @@ class YEPCHAT(Provider):
|
|
|
103
108
|
else intro or Conversation.intro
|
|
104
109
|
)
|
|
105
110
|
self.conversation = Conversation(
|
|
106
|
-
is_conversation, self.max_tokens_to_sample, filepath, update_file
|
|
111
|
+
is_conversation, self.max_tokens_to_sample, filepath, update_file, tools=tools
|
|
107
112
|
)
|
|
108
113
|
self.conversation.history_offset = history_offset
|
|
109
114
|
self.session.proxies = proxies
|
|
@@ -150,6 +155,7 @@ class YEPCHAT(Provider):
|
|
|
150
155
|
) -> Union[Dict[str, Any], Generator]:
|
|
151
156
|
"""
|
|
152
157
|
Sends a prompt to the Yep API and returns the response.
|
|
158
|
+
Now supports tool calling functionality.
|
|
153
159
|
|
|
154
160
|
Examples:
|
|
155
161
|
>>> ai = YEPCHAT()
|
|
@@ -158,6 +164,11 @@ class YEPCHAT(Provider):
|
|
|
158
164
|
|
|
159
165
|
>>> ai.ask("Tell me a joke", stream=True)
|
|
160
166
|
Streams the response from the Yep API.
|
|
167
|
+
|
|
168
|
+
>>> weather_tool = Fn(name="get_weather", description="Get the current weather", parameters={"location": "string"})
|
|
169
|
+
>>> ai = YEPCHAT(tools=[weather_tool])
|
|
170
|
+
>>> ai.ask("What's the weather in New York?")
|
|
171
|
+
Will use the weather tool to provide response.
|
|
161
172
|
"""
|
|
162
173
|
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
163
174
|
if optimizer:
|
|
@@ -219,7 +230,25 @@ class YEPCHAT(Provider):
|
|
|
219
230
|
yield resp if raw else resp
|
|
220
231
|
except json.JSONDecodeError:
|
|
221
232
|
pass
|
|
222
|
-
|
|
233
|
+
|
|
234
|
+
# Check if the response contains a tool call
|
|
235
|
+
response_data = self.conversation.handle_tool_response(streaming_text)
|
|
236
|
+
|
|
237
|
+
if response_data["is_tool_call"]:
|
|
238
|
+
# Handle tool call results
|
|
239
|
+
if response_data["success"]:
|
|
240
|
+
for tool_call in response_data.get("tool_calls", []):
|
|
241
|
+
tool_name = tool_call.get("name", "unknown_tool")
|
|
242
|
+
result = response_data["result"]
|
|
243
|
+
self.conversation.update_chat_history_with_tool(prompt, tool_name, result)
|
|
244
|
+
else:
|
|
245
|
+
# If tool call failed, update history with error
|
|
246
|
+
self.conversation.update_chat_history(prompt,
|
|
247
|
+
f"Error executing tool call: {response_data['result']}")
|
|
248
|
+
else:
|
|
249
|
+
# Normal response handling
|
|
250
|
+
self.conversation.update_chat_history(prompt, streaming_text)
|
|
251
|
+
|
|
223
252
|
except Exception as e:
|
|
224
253
|
raise exceptions.FailedToGenerateResponseError(f"Request failed: {e}")
|
|
225
254
|
|
|
@@ -242,12 +271,35 @@ class YEPCHAT(Provider):
|
|
|
242
271
|
response_data = response.json()
|
|
243
272
|
if 'choices' in response_data and len(response_data['choices']) > 0:
|
|
244
273
|
content = response_data['choices'][0].get('message', {}).get('content', '')
|
|
245
|
-
|
|
246
|
-
|
|
274
|
+
|
|
275
|
+
# Check if the response contains a tool call
|
|
276
|
+
tool_response = self.conversation.handle_tool_response(content)
|
|
277
|
+
|
|
278
|
+
if tool_response["is_tool_call"]:
|
|
279
|
+
# Process tool call
|
|
280
|
+
if tool_response["success"]:
|
|
281
|
+
# Get the first tool call for simplicity
|
|
282
|
+
if "tool_calls" in tool_response and len(tool_response["tool_calls"]) > 0:
|
|
283
|
+
tool_call = tool_response["tool_calls"][0]
|
|
284
|
+
tool_name = tool_call.get("name", "unknown_tool")
|
|
285
|
+
tool_result = tool_response["result"]
|
|
286
|
+
|
|
287
|
+
# Update chat history with tool call
|
|
288
|
+
self.conversation.update_chat_history_with_tool(prompt, tool_name, tool_result)
|
|
289
|
+
|
|
290
|
+
# Return tool result
|
|
291
|
+
return {"text": tool_result, "is_tool_call": True, "tool_name": tool_name}
|
|
292
|
+
|
|
293
|
+
# If tool call processing failed
|
|
294
|
+
return {"text": tool_response["result"], "is_tool_call": True, "error": True}
|
|
295
|
+
else:
|
|
296
|
+
# Normal response handling
|
|
297
|
+
self.conversation.update_chat_history(prompt, content)
|
|
298
|
+
return {"text": content}
|
|
247
299
|
else:
|
|
248
300
|
raise exceptions.FailedToGenerateResponseError("No response content found")
|
|
249
301
|
except Exception as e:
|
|
250
|
-
raise exceptions.FailedToGenerateResponseError(f"Request failed:
|
|
302
|
+
raise exceptions.FailedToGenerateResponseError(f"Request failed: e")
|
|
251
303
|
|
|
252
304
|
return for_stream() if stream else for_non_stream()
|
|
253
305
|
|
webscout/__init__.py
CHANGED
|
@@ -18,6 +18,8 @@ from .litagent import LitAgent
|
|
|
18
18
|
from .scout import *
|
|
19
19
|
from .zeroart import *
|
|
20
20
|
from .yep_search import *
|
|
21
|
+
# # Import litprinter components for direct access from webscout
|
|
22
|
+
# from .litprinter import lit, litprint, ic, install, uninstall
|
|
21
23
|
agent = LitAgent()
|
|
22
24
|
|
|
23
25
|
__repo__ = "https://github.com/OE-LUCIFER/Webscout"
|
|
@@ -33,4 +35,4 @@ import logging
|
|
|
33
35
|
logging.getLogger("webscout").addHandler(logging.NullHandler())
|
|
34
36
|
|
|
35
37
|
# Import models for easy access
|
|
36
|
-
from
|
|
38
|
+
from .models import model
|
webscout/cli.py
CHANGED
|
@@ -2,112 +2,61 @@ import sys
|
|
|
2
2
|
from .swiftcli import CLI, option
|
|
3
3
|
from .webscout_search import WEBS
|
|
4
4
|
from .version import __version__
|
|
5
|
-
from rich.console import Console
|
|
6
|
-
from rich.panel import Panel
|
|
7
|
-
from rich.table import Table
|
|
8
|
-
from rich.text import Text
|
|
9
5
|
|
|
10
6
|
|
|
11
|
-
COLORS = {
|
|
12
|
-
0: "black",
|
|
13
|
-
1: "red",
|
|
14
|
-
2: "green",
|
|
15
|
-
3: "yellow",
|
|
16
|
-
4: "blue",
|
|
17
|
-
5: "magenta",
|
|
18
|
-
6: "cyan",
|
|
19
|
-
7: "bright_black",
|
|
20
|
-
8: "bright_red",
|
|
21
|
-
9: "bright_green",
|
|
22
|
-
10: "bright_yellow",
|
|
23
|
-
11: "bright_blue",
|
|
24
|
-
12: "bright_magenta",
|
|
25
|
-
13: "bright_cyan",
|
|
26
|
-
14: "white",
|
|
27
|
-
15: "bright_white",
|
|
28
|
-
}
|
|
29
|
-
|
|
30
7
|
def _print_data(data):
|
|
31
|
-
"""Prints data
|
|
32
|
-
console = Console()
|
|
8
|
+
"""Prints data in a simple formatted way."""
|
|
33
9
|
if data:
|
|
34
10
|
for i, e in enumerate(data, start=1):
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
for j, (k, v) in enumerate(e.items(), start=1):
|
|
11
|
+
print(f"\nResult {i}:")
|
|
12
|
+
print("-" * 50)
|
|
13
|
+
for k, v in e.items():
|
|
40
14
|
if v:
|
|
41
|
-
width = 300 if k in ("content", "href", "image", "source", "thumbnail", "url") else 78
|
|
42
15
|
k = "language" if k == "detected_language" else k
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
else:
|
|
46
|
-
text = Text(str(v), style="white")
|
|
47
|
-
table.add_row(k, text)
|
|
48
|
-
|
|
49
|
-
console.print(Panel(table, title=f"Result {i}", expand=False, style="green on black"))
|
|
50
|
-
console.print("\n")
|
|
16
|
+
print(f"{k:15}: {v}")
|
|
17
|
+
print("-" * 50)
|
|
51
18
|
|
|
52
19
|
def _print_weather(data):
|
|
53
|
-
"""Prints weather data in a clean
|
|
54
|
-
console = Console()
|
|
55
|
-
|
|
56
|
-
# Current weather panel
|
|
20
|
+
"""Prints weather data in a clean format."""
|
|
57
21
|
current = data["current"]
|
|
58
|
-
current_table = Table(show_header=False, show_lines=True, expand=True, box=None)
|
|
59
|
-
current_table.add_column("Metric", style="cyan", no_wrap=True, width=15)
|
|
60
|
-
current_table.add_column("Value", style="white")
|
|
61
22
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
23
|
+
print(f"\nCurrent Weather in {data['location']}:")
|
|
24
|
+
print("-" * 50)
|
|
25
|
+
print(f"Temperature: {current['temperature_c']}°C")
|
|
26
|
+
print(f"Feels Like: {current['feels_like_c']}°C")
|
|
27
|
+
print(f"Humidity: {current['humidity']}%")
|
|
28
|
+
print(f"Wind: {current['wind_speed_ms']} m/s")
|
|
29
|
+
print(f"Direction: {current['wind_direction']}°")
|
|
30
|
+
print("-" * 50)
|
|
67
31
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
daily_table = Table(show_header=True, show_lines=True, expand=True, box=None)
|
|
73
|
-
daily_table.add_column("Date", style="cyan")
|
|
74
|
-
daily_table.add_column("Condition", style="white")
|
|
75
|
-
daily_table.add_column("High", style="red")
|
|
76
|
-
daily_table.add_column("Low", style="blue")
|
|
77
|
-
|
|
78
|
-
for day in data["daily_forecast"][:5]: # Show next 5 days
|
|
79
|
-
daily_table.add_row(
|
|
80
|
-
day["date"],
|
|
81
|
-
day["condition"],
|
|
82
|
-
f"{day['max_temp_c']}°C",
|
|
83
|
-
f"{day['min_temp_c']}°C"
|
|
84
|
-
)
|
|
32
|
+
print("\n5-Day Forecast:")
|
|
33
|
+
print("-" * 50)
|
|
34
|
+
print(f"{'Date':10} {'Condition':15} {'High':8} {'Low':8}")
|
|
35
|
+
print("-" * 50)
|
|
85
36
|
|
|
86
|
-
|
|
37
|
+
for day in data["daily_forecast"][:5]:
|
|
38
|
+
print(f"{day['date']:10} {day['condition']:15} {day['max_temp_c']:8.1f}°C {day['min_temp_c']:8.1f}°C")
|
|
39
|
+
print("-" * 50)
|
|
87
40
|
|
|
88
41
|
# Initialize CLI app
|
|
89
|
-
app = CLI(name="webscout", help="Search the web with a
|
|
42
|
+
app = CLI(name="webscout", help="Search the web with a simple UI", version=__version__)
|
|
90
43
|
|
|
91
44
|
@app.command()
|
|
92
45
|
def version():
|
|
93
46
|
"""Show the version of webscout."""
|
|
94
|
-
|
|
95
|
-
console.print(f"[bold green]webscout[/bold green] version: {__version__}")
|
|
47
|
+
print(f"webscout version: {__version__}")
|
|
96
48
|
|
|
97
49
|
@app.command()
|
|
98
50
|
@option("--proxy", help="Proxy URL to use for requests")
|
|
99
51
|
@option("--model", "-m", help="AI model to use", default="gpt-4o-mini", type=str)
|
|
100
|
-
|
|
52
|
+
@option("--timeout", "-t", help="Timeout value for requests", type=int, default=10)
|
|
53
|
+
def chat(proxy: str = None, model: str = "gpt-4o-mini", timeout: int = 10):
|
|
101
54
|
"""Interactive AI chat using DuckDuckGo's AI."""
|
|
102
|
-
webs = WEBS(proxy=proxy)
|
|
103
|
-
console = Console()
|
|
55
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
104
56
|
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
console.print(f"[bold green]Using model:[/] {model}\n")
|
|
108
|
-
console.print("[cyan]Type your message and press Enter. Press Ctrl+C or type 'exit' to quit.[/]\n")
|
|
57
|
+
print(f"Using model: {model}")
|
|
58
|
+
print("Type your message and press Enter. Press Ctrl+C or type 'exit' to quit.\n")
|
|
109
59
|
|
|
110
|
-
# Start chat loop
|
|
111
60
|
try:
|
|
112
61
|
while True:
|
|
113
62
|
try:
|
|
@@ -116,13 +65,13 @@ def chat(proxy: str = None, model: str = "gpt-4o-mini"):
|
|
|
116
65
|
break
|
|
117
66
|
|
|
118
67
|
response = webs.chat(keywords=user_input, model=model)
|
|
119
|
-
|
|
68
|
+
print(f"\nAI: {response}\n")
|
|
120
69
|
|
|
121
70
|
except Exception as e:
|
|
122
|
-
|
|
71
|
+
print(f"Error: {str(e)}\n")
|
|
123
72
|
|
|
124
73
|
except KeyboardInterrupt:
|
|
125
|
-
|
|
74
|
+
print("\nChat session interrupted. Exiting...")
|
|
126
75
|
|
|
127
76
|
@app.command()
|
|
128
77
|
@option("--keywords", "-k", help="Search keywords", required=True)
|
|
@@ -132,9 +81,10 @@ def chat(proxy: str = None, model: str = "gpt-4o-mini"):
|
|
|
132
81
|
@option("--backend", "-b", help="Search backend to use", default="api")
|
|
133
82
|
@option("--max-results", "-m", help="Maximum number of results", type=int, default=25)
|
|
134
83
|
@option("--proxy", "-p", help="Proxy URL to use for requests")
|
|
135
|
-
|
|
84
|
+
@option("--timeout", "-timeout", help="Timeout value for requests", type=int, default=10)
|
|
85
|
+
def text(keywords: str, region: str, safesearch: str, timelimit: str, backend: str, max_results: int, proxy: str = None, timeout: int = 10):
|
|
136
86
|
"""Perform a text search using DuckDuckGo API."""
|
|
137
|
-
webs = WEBS(proxy=proxy)
|
|
87
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
138
88
|
try:
|
|
139
89
|
results = webs.text(keywords, region, safesearch, timelimit, backend, max_results)
|
|
140
90
|
_print_data(results)
|
|
@@ -144,9 +94,10 @@ def text(keywords: str, region: str, safesearch: str, timelimit: str, backend: s
|
|
|
144
94
|
@app.command()
|
|
145
95
|
@option("--keywords", "-k", help="Search keywords", required=True)
|
|
146
96
|
@option("--proxy", "-p", help="Proxy URL to use for requests")
|
|
147
|
-
|
|
97
|
+
@option("--timeout", "-timeout", help="Timeout value for requests", type=int, default=10)
|
|
98
|
+
def answers(keywords: str, proxy: str = None, timeout: int = 10):
|
|
148
99
|
"""Perform an answers search using DuckDuckGo API."""
|
|
149
|
-
webs = WEBS(proxy=proxy)
|
|
100
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
150
101
|
try:
|
|
151
102
|
results = webs.answers(keywords)
|
|
152
103
|
_print_data(results)
|
|
@@ -165,6 +116,7 @@ def answers(keywords: str, proxy: str = None):
|
|
|
165
116
|
@option("--license", "-lic", help="Image license", default=None)
|
|
166
117
|
@option("--max-results", "-m", help="Maximum number of results", type=int, default=90)
|
|
167
118
|
@option("--proxy", "-p", help="Proxy URL to use for requests")
|
|
119
|
+
@option("--timeout", "-timeout", help="Timeout value for requests", type=int, default=10)
|
|
168
120
|
def images(
|
|
169
121
|
keywords: str,
|
|
170
122
|
region: str,
|
|
@@ -177,9 +129,10 @@ def images(
|
|
|
177
129
|
license: str,
|
|
178
130
|
max_results: int,
|
|
179
131
|
proxy: str = None,
|
|
132
|
+
timeout: int = 10,
|
|
180
133
|
):
|
|
181
134
|
"""Perform an images search using DuckDuckGo API."""
|
|
182
|
-
webs = WEBS(proxy=proxy)
|
|
135
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
183
136
|
try:
|
|
184
137
|
results = webs.images(keywords, region, safesearch, timelimit, size, color, type, layout, license, max_results)
|
|
185
138
|
_print_data(results)
|
|
@@ -196,6 +149,7 @@ def images(
|
|
|
196
149
|
@option("--license", "-lic", help="Video license", default=None)
|
|
197
150
|
@option("--max-results", "-m", help="Maximum number of results", type=int, default=50)
|
|
198
151
|
@option("--proxy", "-p", help="Proxy URL to use for requests")
|
|
152
|
+
@option("--timeout", "-timeout", help="Timeout value for requests", type=int, default=10)
|
|
199
153
|
def videos(
|
|
200
154
|
keywords: str,
|
|
201
155
|
region: str,
|
|
@@ -206,9 +160,10 @@ def videos(
|
|
|
206
160
|
license: str,
|
|
207
161
|
max_results: int,
|
|
208
162
|
proxy: str = None,
|
|
163
|
+
timeout: int = 10,
|
|
209
164
|
):
|
|
210
165
|
"""Perform a videos search using DuckDuckGo API."""
|
|
211
|
-
webs = WEBS(proxy=proxy)
|
|
166
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
212
167
|
try:
|
|
213
168
|
results = webs.videos(keywords, region, safesearch, timelimit, resolution, duration, license, max_results)
|
|
214
169
|
_print_data(results)
|
|
@@ -222,9 +177,10 @@ def videos(
|
|
|
222
177
|
@option("--timelimit", "-t", help="Time limit for results", default=None)
|
|
223
178
|
@option("--max-results", "-m", help="Maximum number of results", type=int, default=25)
|
|
224
179
|
@option("--proxy", "-p", help="Proxy URL to use for requests")
|
|
225
|
-
|
|
180
|
+
@option("--timeout", "-timeout", help="Timeout value for requests", type=int, default=10)
|
|
181
|
+
def news(keywords: str, region: str, safesearch: str, timelimit: str, max_results: int, proxy: str = None, timeout: int = 10):
|
|
226
182
|
"""Perform a news search using DuckDuckGo API."""
|
|
227
|
-
webs = WEBS(proxy=proxy)
|
|
183
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
228
184
|
try:
|
|
229
185
|
results = webs.news(keywords, region, safesearch, timelimit, max_results)
|
|
230
186
|
_print_data(results)
|
|
@@ -245,6 +201,7 @@ def news(keywords: str, region: str, safesearch: str, timelimit: str, max_result
|
|
|
245
201
|
@option("--radius", "-r", help="Expand the search square by the distance in kilometers", type=int, default=0)
|
|
246
202
|
@option("--max-results", "-m", help="Number of results", type=int, default=50)
|
|
247
203
|
@option("--proxy", "-p", help="Proxy URL to use for requests")
|
|
204
|
+
@option("--timeout", "-timeout", help="Timeout value for requests", type=int, default=10)
|
|
248
205
|
def maps(
|
|
249
206
|
keywords: str,
|
|
250
207
|
place: str,
|
|
@@ -259,9 +216,10 @@ def maps(
|
|
|
259
216
|
radius: int,
|
|
260
217
|
max_results: int,
|
|
261
218
|
proxy: str = None,
|
|
219
|
+
timeout: int = 10,
|
|
262
220
|
):
|
|
263
221
|
"""Perform a maps search using DuckDuckGo API."""
|
|
264
|
-
webs = WEBS(proxy=proxy)
|
|
222
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
265
223
|
try:
|
|
266
224
|
results = webs.maps(
|
|
267
225
|
keywords,
|
|
@@ -286,9 +244,10 @@ def maps(
|
|
|
286
244
|
@option("--from", "-f", help="Language to translate from (defaults automatically)")
|
|
287
245
|
@option("--to", "-t", help="Language to translate to (default: 'en')", default="en")
|
|
288
246
|
@option("--proxy", "-p", help="Proxy URL to use for requests")
|
|
289
|
-
|
|
247
|
+
@option("--timeout", "-timeout", help="Timeout value for requests", type=int, default=10)
|
|
248
|
+
def translate(keywords: str, from_: str, to: str, proxy: str = None, timeout: int = 10):
|
|
290
249
|
"""Perform translation using DuckDuckGo API."""
|
|
291
|
-
webs = WEBS(proxy=proxy)
|
|
250
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
292
251
|
try:
|
|
293
252
|
results = webs.translate(keywords, from_, to)
|
|
294
253
|
_print_data(results)
|
|
@@ -299,9 +258,10 @@ def translate(keywords: str, from_: str, to: str, proxy: str = None):
|
|
|
299
258
|
@option("--keywords", "-k", help="Search keywords", required=True)
|
|
300
259
|
@option("--region", "-r", help="Region for search results", default="wt-wt")
|
|
301
260
|
@option("--proxy", "-p", help="Proxy URL to use for requests")
|
|
302
|
-
|
|
261
|
+
@option("--timeout", "-timeout", help="Timeout value for requests", type=int, default=10)
|
|
262
|
+
def suggestions(keywords: str, region: str, proxy: str = None, timeout: int = 10):
|
|
303
263
|
"""Perform a suggestions search using DuckDuckGo API."""
|
|
304
|
-
webs = WEBS(proxy=proxy)
|
|
264
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
305
265
|
try:
|
|
306
266
|
results = webs.suggestions(keywords, region)
|
|
307
267
|
_print_data(results)
|
|
@@ -312,9 +272,10 @@ def suggestions(keywords: str, region: str, proxy: str = None):
|
|
|
312
272
|
@option("--location", "-l", help="Location to get weather for", required=True)
|
|
313
273
|
@option("--language", "-lang", help="Language code (e.g. 'en', 'es')", default="en")
|
|
314
274
|
@option("--proxy", "-p", help="Proxy URL to use for requests")
|
|
315
|
-
|
|
275
|
+
@option("--timeout", "-timeout", help="Timeout value for requests", type=int, default=10)
|
|
276
|
+
def weather(location: str, language: str, proxy: str = None, timeout: int = 10):
|
|
316
277
|
"""Get weather information for a location from DuckDuckGo."""
|
|
317
|
-
webs = WEBS(proxy=proxy)
|
|
278
|
+
webs = WEBS(proxy=proxy, timeout=timeout)
|
|
318
279
|
try:
|
|
319
280
|
results = webs.weather(location, language)
|
|
320
281
|
_print_weather(results)
|