webscout 1.4.4__tar.gz → 1.4.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (44) hide show
  1. {webscout-1.4.4 → webscout-1.4.5}/PKG-INFO +1 -1
  2. {webscout-1.4.4 → webscout-1.4.5}/setup.py +1 -1
  3. webscout-1.4.5/webscout/version.py +2 -0
  4. {webscout-1.4.4 → webscout-1.4.5}/webscout/webscout_search.py +13 -10
  5. {webscout-1.4.4 → webscout-1.4.5}/webscout/webscout_search_async.py +132 -76
  6. {webscout-1.4.4 → webscout-1.4.5}/webscout.egg-info/PKG-INFO +1 -1
  7. webscout-1.4.4/webscout/version.py +0 -2
  8. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/__init__.py +0 -0
  9. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/documents/__init__.py +0 -0
  10. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/documents/query_results_extractor.py +0 -0
  11. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/documents/webpage_content_extractor.py +0 -0
  12. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/networks/__init__.py +0 -0
  13. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/networks/filepath_converter.py +0 -0
  14. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/networks/google_searcher.py +0 -0
  15. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/networks/network_configs.py +0 -0
  16. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/networks/webpage_fetcher.py +0 -0
  17. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/utilsdw/__init__.py +0 -0
  18. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/utilsdw/enver.py +0 -0
  19. {webscout-1.4.4 → webscout-1.4.5}/DeepWEBS/utilsdw/logger.py +0 -0
  20. {webscout-1.4.4 → webscout-1.4.5}/LICENSE.md +0 -0
  21. {webscout-1.4.4 → webscout-1.4.5}/README.md +0 -0
  22. {webscout-1.4.4 → webscout-1.4.5}/setup.cfg +0 -0
  23. {webscout-1.4.4 → webscout-1.4.5}/webscout/AI.py +0 -0
  24. {webscout-1.4.4 → webscout-1.4.5}/webscout/AIbase.py +0 -0
  25. {webscout-1.4.4 → webscout-1.4.5}/webscout/AIutel.py +0 -0
  26. {webscout-1.4.4 → webscout-1.4.5}/webscout/DWEBS.py +0 -0
  27. {webscout-1.4.4 → webscout-1.4.5}/webscout/LLM.py +0 -0
  28. {webscout-1.4.4 → webscout-1.4.5}/webscout/__init__.py +0 -0
  29. {webscout-1.4.4 → webscout-1.4.5}/webscout/__main__.py +0 -0
  30. {webscout-1.4.4 → webscout-1.4.5}/webscout/async_providers.py +0 -0
  31. {webscout-1.4.4 → webscout-1.4.5}/webscout/cli.py +0 -0
  32. {webscout-1.4.4 → webscout-1.4.5}/webscout/exceptions.py +0 -0
  33. {webscout-1.4.4 → webscout-1.4.5}/webscout/g4f.py +0 -0
  34. {webscout-1.4.4 → webscout-1.4.5}/webscout/models.py +0 -0
  35. {webscout-1.4.4 → webscout-1.4.5}/webscout/tempid.py +0 -0
  36. {webscout-1.4.4 → webscout-1.4.5}/webscout/transcriber.py +0 -0
  37. {webscout-1.4.4 → webscout-1.4.5}/webscout/utils.py +0 -0
  38. {webscout-1.4.4 → webscout-1.4.5}/webscout/voice.py +0 -0
  39. {webscout-1.4.4 → webscout-1.4.5}/webscout/webai.py +0 -0
  40. {webscout-1.4.4 → webscout-1.4.5}/webscout.egg-info/SOURCES.txt +0 -0
  41. {webscout-1.4.4 → webscout-1.4.5}/webscout.egg-info/dependency_links.txt +0 -0
  42. {webscout-1.4.4 → webscout-1.4.5}/webscout.egg-info/entry_points.txt +0 -0
  43. {webscout-1.4.4 → webscout-1.4.5}/webscout.egg-info/requires.txt +0 -0
  44. {webscout-1.4.4 → webscout-1.4.5}/webscout.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 1.4.4
3
+ Version: 1.4.5
4
4
  Summary: Search for anything using the Google, DuckDuckGo, phind.com. Also containes AI models, can transcribe yt videos, temporary email and phone number generation, have TTS support and webai(terminal gpt and open interpeter)
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
@@ -5,7 +5,7 @@ with open("README.md", encoding="utf-8") as f:
5
5
 
6
6
  setup(
7
7
  name="webscout",
8
- version="1.4.4",
8
+ version="1.4.5",
9
9
  description="Search for anything using the Google, DuckDuckGo, phind.com. Also containes AI models, can transcribe yt videos, temporary email and phone number generation, have TTS support and webai(terminal gpt and open interpeter)",
10
10
  long_description=README,
11
11
  long_description_content_type="text/markdown",
@@ -0,0 +1,2 @@
1
+ __version__ = "1.4.5"
2
+
@@ -20,11 +20,19 @@ class WEBS(AsyncWEBS):
20
20
  def __init__(
21
21
  self,
22
22
  headers: Optional[Dict[str, str]] = None,
23
- proxies: Union[Dict[str, str], str, None] = None,
23
+ proxy: Optional[str] = None,
24
+ proxies: Union[Dict[str, str], str, None] = None, # deprecated
24
25
  timeout: Optional[int] = 10,
25
26
  ) -> None:
26
- super().__init__(headers=headers, proxies=proxies, timeout=timeout)
27
- self._exit_done = False
27
+ """Initialize the DDGS object.
28
+
29
+ Args:
30
+ headers (dict, optional): Dictionary of headers for the HTTP client. Defaults to None.
31
+ proxy (str, optional): proxy for the HTTP client, supports http/https/socks5 protocols.
32
+ example: "http://user:pass@example.com:3128". Defaults to None.
33
+ timeout (int, optional): Timeout value for the HTTP client. Defaults to 10.
34
+ """
35
+ super().__init__(headers=headers, proxy=proxy, proxies=proxies, timeout=timeout)
28
36
 
29
37
  def __enter__(self) -> "WEBS":
30
38
  return self
@@ -42,13 +50,8 @@ class WEBS(AsyncWEBS):
42
50
 
43
51
  def _close_session(self) -> None:
44
52
  """Close the curl-cffi async session."""
45
- if self._exit_done is False:
46
- # Ensure self._asession.close() is a coroutine
47
- coro = self._asession.close()
48
- # Check if coro is a coroutine object
49
- if asyncio.iscoroutine(coro):
50
- self._run_async_in_thread(coro)
51
- self._exit_done = True
53
+ if hasattr(self, "_asession") and self._asession._closed is False:
54
+ self._run_async_in_thread(self._asession.close()) # type: ignore
52
55
 
53
56
  def _run_async_in_thread(self, coro: Awaitable[Any]) -> Any:
54
57
  """Runs an async coroutine in a separate thread."""
@@ -5,10 +5,10 @@ from concurrent.futures import ThreadPoolExecutor
5
5
  from contextlib import suppress
6
6
  from datetime import datetime, timezone
7
7
  from decimal import Decimal
8
- from functools import partial
8
+ from functools import cached_property, partial
9
9
  from itertools import cycle, islice
10
10
  from types import TracebackType
11
- from typing import Dict, List, Optional, Tuple, Union
11
+ from typing import Dict, List, Optional, Tuple, Type, Union, cast
12
12
 
13
13
  from curl_cffi import requests
14
14
 
@@ -34,68 +34,71 @@ logger = logging.getLogger("webscout_search.AsyncWEBS")
34
34
 
35
35
 
36
36
  class AsyncWEBS:
37
- """Webscout async class to get search results from duckduckgo.com."""
37
+ """webscout_search async class to get search results from duckduckgo.com."""
38
38
 
39
39
  _executor: Optional[ThreadPoolExecutor] = None
40
40
 
41
41
  def __init__(
42
42
  self,
43
43
  headers: Optional[Dict[str, str]] = None,
44
- proxies: Union[Dict[str, str], str, None] = None,
44
+ proxy: Optional[str] = None,
45
+ proxies: Union[Dict[str, str], str, None] = None, # deprecated
45
46
  timeout: Optional[int] = 10,
46
47
  ) -> None:
47
48
  """Initialize the AsyncWEBS object.
48
49
 
49
50
  Args:
50
51
  headers (dict, optional): Dictionary of headers for the HTTP client. Defaults to None.
51
- proxies (Union[dict, str], optional): Proxies for the HTTP client (can be dict or str). Defaults to None.
52
+ proxy (str, optional): proxy for the HTTP client, supports http/https/socks5 protocols.
53
+ example: "http://user:pass@example.com:3128". Defaults to None.
52
54
  timeout (int, optional): Timeout value for the HTTP client. Defaults to 10.
53
55
  """
54
- self.proxies = {"all": proxies} if isinstance(proxies, str) else proxies
56
+ self.proxy: Optional[str] = proxy
57
+ assert self.proxy is None or isinstance(self.proxy, str), "proxy must be a str"
58
+ if not proxy and proxies:
59
+ warnings.warn("'proxies' is deprecated, use 'proxy' instead.", stacklevel=1)
60
+ self.proxy = proxies.get("http") or proxies.get("https") if isinstance(proxies, dict) else proxies
55
61
  self._asession = requests.AsyncSession(
56
62
  headers=headers,
57
- proxies=self.proxies,
63
+ proxy=self.proxy,
58
64
  timeout=timeout,
59
65
  impersonate="chrome",
60
66
  allow_redirects=False,
61
67
  )
62
68
  self._asession.headers["Referer"] = "https://duckduckgo.com/"
63
- self._parser: Optional[LHTMLParser] = None
64
69
  self._exception_event = asyncio.Event()
65
- self._exit_done = False
66
70
 
67
71
  async def __aenter__(self) -> "AsyncWEBS":
68
72
  return self
69
73
 
70
74
  async def __aexit__(
71
75
  self,
72
- exc_type: Optional[BaseException] = None,
76
+ exc_type: Optional[Type[BaseException]] = None,
73
77
  exc_val: Optional[BaseException] = None,
74
78
  exc_tb: Optional[TracebackType] = None,
75
79
  ) -> None:
76
- await self._session_close()
80
+ await self._asession.__aexit__(exc_type, exc_val, exc_tb) # type: ignore
77
81
 
78
82
  def __del__(self) -> None:
79
- if self._exit_done is False:
80
- asyncio.create_task(self._session_close())
83
+ if hasattr(self, "_asession") and self._asession._closed is False:
84
+ with suppress(RuntimeError, RuntimeWarning):
85
+ asyncio.create_task(self._asession.close()) # type: ignore
81
86
 
82
- async def _session_close(self) -> None:
83
- """Close the curl-cffi async session."""
84
- if self._exit_done is False:
85
- await self._asession.close()
86
- self._exit_done = True
87
-
88
- def _get_parser(self) -> "LHTMLParser":
87
+ @cached_property
88
+ def parser(self) -> Optional["LHTMLParser"]:
89
89
  """Get HTML parser."""
90
- if self._parser is None:
91
- self._parser = LHTMLParser(remove_blank_text=True, remove_comments=True, remove_pis=True, collect_ids=False)
92
- return self._parser
90
+ return LHTMLParser(remove_blank_text=True, remove_comments=True, remove_pis=True, collect_ids=False)
93
91
 
94
- def _get_executor(self, max_workers: int = 1) -> ThreadPoolExecutor:
92
+ @classmethod
93
+ def _get_executor(cls, max_workers: int = 1) -> ThreadPoolExecutor:
95
94
  """Get ThreadPoolExecutor. Default max_workers=1, because >=2 leads to a big overhead"""
96
- if AsyncWEBS._executor is None:
97
- AsyncWEBS._executor = ThreadPoolExecutor(max_workers=max_workers)
98
- return AsyncWEBS._executor
95
+ if cls._executor is None:
96
+ cls._executor = ThreadPoolExecutor(max_workers=max_workers)
97
+ return cls._executor
98
+
99
+ @property
100
+ def executor(cls) -> Optional[ThreadPoolExecutor]:
101
+ return cls._get_executor()
99
102
 
100
103
  async def _aget_url(
101
104
  self,
@@ -107,19 +110,18 @@ class AsyncWEBS:
107
110
  if self._exception_event.is_set():
108
111
  raise WebscoutE("Exception occurred in previous call.")
109
112
  try:
110
- resp = await self._asession.request(method, url, data=data, params=params, stream=True)
111
- resp_content: bytes = await resp.acontent()
113
+ resp = await self._asession.request(method, url, data=data, params=params)
112
114
  except Exception as ex:
113
115
  self._exception_event.set()
114
116
  if "time" in str(ex).lower():
115
117
  raise TimeoutE(f"{url} {type(ex).__name__}: {ex}") from ex
116
118
  raise WebscoutE(f"{url} {type(ex).__name__}: {ex}") from ex
117
- logger.debug(f"_aget_url() {resp.url} {resp.status_code} {resp.elapsed:.2f} {len(resp_content)}")
119
+ logger.debug(f"_aget_url() {resp.url} {resp.status_code} {resp.elapsed:.2f} {len(resp.content)}")
118
120
  if resp.status_code == 200:
119
- return resp_content
121
+ return cast(bytes, resp.content)
120
122
  self._exception_event.set()
121
123
  if resp.status_code in (202, 301, 403):
122
- raise RatelimitE(f"{resp.url} {resp.status_code}")
124
+ raise RatelimitE(f"{resp.url} {resp.status_code} Ratelimit")
123
125
  raise WebscoutE(f"{resp.url} return None. {params=} {data=}")
124
126
 
125
127
  async def _aget_vqd(self, keywords: str) -> str:
@@ -136,7 +138,7 @@ class AsyncWEBS:
136
138
  backend: str = "api",
137
139
  max_results: Optional[int] = None,
138
140
  ) -> List[Dict[str, str]]:
139
- """Webscout text search generator. Query params: https://duckduckgo.com/params.
141
+ """webscout text search generator. Query params: https://duckduckgo.com/params.
140
142
 
141
143
  Args:
142
144
  keywords: keywords for query.
@@ -153,7 +155,7 @@ class AsyncWEBS:
153
155
  List of dictionaries with search results, or None if there was an error.
154
156
 
155
157
  Raises:
156
- WebscoutE: Base exception for Webscout errors.
158
+ WebscoutE: Base exception for webscout_search errors.
157
159
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
158
160
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
159
161
  """
@@ -177,7 +179,7 @@ class AsyncWEBS:
177
179
  timelimit: Optional[str] = None,
178
180
  max_results: Optional[int] = None,
179
181
  ) -> List[Dict[str, str]]:
180
- """Webscout text search generator. Query params: https://duckduckgo.com/params.
182
+ """webscout text search generator. Query params: https://duckduckgo.com/params.
181
183
 
182
184
  Args:
183
185
  keywords: keywords for query.
@@ -190,7 +192,7 @@ class AsyncWEBS:
190
192
  List of dictionaries with search results.
191
193
 
192
194
  Raises:
193
- WebscoutE: Base exception for Webscout errors.
195
+ WebscoutE: Base exception for webscout_search errors.
194
196
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
195
197
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
196
198
  """
@@ -241,11 +243,19 @@ class AsyncWEBS:
241
243
  }
242
244
  results[priority] = result
243
245
 
244
- tasks = [_text_api_page(0, 0)]
246
+ tasks = [asyncio.create_task(_text_api_page(0, 0))]
245
247
  if max_results:
246
248
  max_results = min(max_results, 500)
247
- tasks.extend(_text_api_page(s, i) for i, s in enumerate(range(23, max_results, 50), start=1))
248
- await asyncio.gather(*tasks)
249
+ tasks.extend(
250
+ asyncio.create_task(_text_api_page(s, i)) for i, s in enumerate(range(23, max_results, 50), start=1)
251
+ )
252
+ try:
253
+ await asyncio.gather(*tasks)
254
+ except Exception as e:
255
+ for task in tasks:
256
+ task.cancel()
257
+ await asyncio.gather(*tasks, return_exceptions=True)
258
+ raise e
249
259
 
250
260
  return list(islice(filter(None, results), max_results))
251
261
 
@@ -257,7 +267,7 @@ class AsyncWEBS:
257
267
  timelimit: Optional[str] = None,
258
268
  max_results: Optional[int] = None,
259
269
  ) -> List[Dict[str, str]]:
260
- """Webscout text search generator. Query params: https://duckduckgo.com/params.
270
+ """webscout text search generator. Query params: https://duckduckgo.com/params.
261
271
 
262
272
  Args:
263
273
  keywords: keywords for query.
@@ -270,7 +280,7 @@ class AsyncWEBS:
270
280
  List of dictionaries with search results.
271
281
 
272
282
  Raises:
273
- WebscoutE: Base exception for Webscout errors.
283
+ WebscoutE: Base exception for webscout_search errors.
274
284
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
275
285
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
276
286
  """
@@ -302,7 +312,7 @@ class AsyncWEBS:
302
312
  return
303
313
 
304
314
  tree = await self._asession.loop.run_in_executor(
305
- self._get_executor(), partial(document_fromstring, resp_content, self._get_parser())
315
+ self.executor, partial(document_fromstring, resp_content, self.parser)
306
316
  )
307
317
 
308
318
  for e in tree.xpath("//div[h2]"):
@@ -327,11 +337,19 @@ class AsyncWEBS:
327
337
  }
328
338
  results[priority] = result
329
339
 
330
- tasks = [_text_html_page(0, 0)]
340
+ tasks = [asyncio.create_task(_text_html_page(0, 0))]
331
341
  if max_results:
332
342
  max_results = min(max_results, 500)
333
- tasks.extend(_text_html_page(s, i) for i, s in enumerate(range(23, max_results, 50), start=1))
334
- await asyncio.gather(*tasks)
343
+ tasks.extend(
344
+ asyncio.create_task(_text_html_page(s, i)) for i, s in enumerate(range(23, max_results, 50), start=1)
345
+ )
346
+ try:
347
+ await asyncio.gather(*tasks)
348
+ except Exception as e:
349
+ for task in tasks:
350
+ task.cancel()
351
+ await asyncio.gather(*tasks, return_exceptions=True)
352
+ raise e
335
353
 
336
354
  return list(islice(filter(None, results), max_results))
337
355
 
@@ -342,7 +360,7 @@ class AsyncWEBS:
342
360
  timelimit: Optional[str] = None,
343
361
  max_results: Optional[int] = None,
344
362
  ) -> List[Dict[str, str]]:
345
- """Webscout text search generator. Query params: https://duckduckgo.com/params.
363
+ """webscout text search generator. Query params: https://duckduckgo.com/params.
346
364
 
347
365
  Args:
348
366
  keywords: keywords for query.
@@ -354,7 +372,7 @@ class AsyncWEBS:
354
372
  List of dictionaries with search results.
355
373
 
356
374
  Raises:
357
- WebscoutE: Base exception for Webscout errors.
375
+ WebscoutE: Base exception for webscout_search errors.
358
376
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
359
377
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
360
378
  """
@@ -381,7 +399,7 @@ class AsyncWEBS:
381
399
  return
382
400
 
383
401
  tree = await self._asession.loop.run_in_executor(
384
- self._get_executor(), partial(document_fromstring, resp_content, self._get_parser())
402
+ self.executor, partial(document_fromstring, resp_content, self.parser)
385
403
  )
386
404
 
387
405
  data = zip(cycle(range(1, 5)), tree.xpath("//table[last()]//tr"))
@@ -410,11 +428,19 @@ class AsyncWEBS:
410
428
  }
411
429
  results[priority] = result
412
430
 
413
- tasks = [_text_lite_page(0, 0)]
431
+ tasks = [asyncio.create_task(_text_lite_page(0, 0))]
414
432
  if max_results:
415
433
  max_results = min(max_results, 500)
416
- tasks.extend(_text_lite_page(s, i) for i, s in enumerate(range(23, max_results, 50), start=1))
417
- await asyncio.gather(*tasks)
434
+ tasks.extend(
435
+ asyncio.create_task(_text_lite_page(s, i)) for i, s in enumerate(range(23, max_results, 50), start=1)
436
+ )
437
+ try:
438
+ await asyncio.gather(*tasks)
439
+ except Exception as e:
440
+ for task in tasks:
441
+ task.cancel()
442
+ await asyncio.gather(*tasks, return_exceptions=True)
443
+ raise e
418
444
 
419
445
  return list(islice(filter(None, results), max_results))
420
446
 
@@ -431,7 +457,7 @@ class AsyncWEBS:
431
457
  license_image: Optional[str] = None,
432
458
  max_results: Optional[int] = None,
433
459
  ) -> List[Dict[str, str]]:
434
- """Webscout images search. Query params: https://duckduckgo.com/params.
460
+ """webscout images search. Query params: https://duckduckgo.com/params.
435
461
 
436
462
  Args:
437
463
  keywords: keywords for query.
@@ -454,7 +480,7 @@ class AsyncWEBS:
454
480
  List of dictionaries with images search results.
455
481
 
456
482
  Raises:
457
- WebscoutE: Base exception for Webscout errors.
483
+ WebscoutE: Base exception for webscout_search errors.
458
484
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
459
485
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
460
486
  """
@@ -505,11 +531,19 @@ class AsyncWEBS:
505
531
  }
506
532
  results[priority] = result
507
533
 
508
- tasks = [_images_page(0, page=0)]
534
+ tasks = [asyncio.create_task(_images_page(0, page=0))]
509
535
  if max_results:
510
536
  max_results = min(max_results, 500)
511
- tasks.extend(_images_page(s, i) for i, s in enumerate(range(100, max_results, 100), start=1))
512
- await asyncio.gather(*tasks)
537
+ tasks.extend(
538
+ asyncio.create_task(_images_page(s, i)) for i, s in enumerate(range(100, max_results, 100), start=1)
539
+ )
540
+ try:
541
+ await asyncio.gather(*tasks)
542
+ except Exception as e:
543
+ for task in tasks:
544
+ task.cancel()
545
+ await asyncio.gather(*tasks, return_exceptions=True)
546
+ raise e
513
547
 
514
548
  return list(islice(filter(None, results), max_results))
515
549
 
@@ -524,7 +558,7 @@ class AsyncWEBS:
524
558
  license_videos: Optional[str] = None,
525
559
  max_results: Optional[int] = None,
526
560
  ) -> List[Dict[str, str]]:
527
- """Webscout videos search. Query params: https://duckduckgo.com/params.
561
+ """webscout videos search. Query params: https://duckduckgo.com/params.
528
562
 
529
563
  Args:
530
564
  keywords: keywords for query.
@@ -540,7 +574,7 @@ class AsyncWEBS:
540
574
  List of dictionaries with videos search results.
541
575
 
542
576
  Raises:
543
- WebscoutE: Base exception for Webscout errors.
577
+ WebscoutE: Base exception for webscout_search errors.
544
578
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
545
579
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
546
580
  """
@@ -579,11 +613,19 @@ class AsyncWEBS:
579
613
  priority += 1
580
614
  results[priority] = row
581
615
 
582
- tasks = [_videos_page(0, 0)]
616
+ tasks = [asyncio.create_task(_videos_page(0, 0))]
583
617
  if max_results:
584
618
  max_results = min(max_results, 400)
585
- tasks.extend(_videos_page(s, i) for i, s in enumerate(range(59, max_results, 59), start=1))
586
- await asyncio.gather(*tasks)
619
+ tasks.extend(
620
+ asyncio.create_task(_videos_page(s, i)) for i, s in enumerate(range(59, max_results, 59), start=1)
621
+ )
622
+ try:
623
+ await asyncio.gather(*tasks)
624
+ except Exception as e:
625
+ for task in tasks:
626
+ task.cancel()
627
+ await asyncio.gather(*tasks, return_exceptions=True)
628
+ raise e
587
629
 
588
630
  return list(islice(filter(None, results), max_results))
589
631
 
@@ -595,7 +637,7 @@ class AsyncWEBS:
595
637
  timelimit: Optional[str] = None,
596
638
  max_results: Optional[int] = None,
597
639
  ) -> List[Dict[str, str]]:
598
- """Webscout news search. Query params: https://duckduckgo.com/params.
640
+ """webscout news search. Query params: https://duckduckgo.com/params.
599
641
 
600
642
  Args:
601
643
  keywords: keywords for query.
@@ -608,7 +650,7 @@ class AsyncWEBS:
608
650
  List of dictionaries with news search results.
609
651
 
610
652
  Raises:
611
- WebscoutE: Base exception for Webscout errors.
653
+ WebscoutE: Base exception for webscout_search errors.
612
654
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
613
655
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
614
656
  """
@@ -653,16 +695,24 @@ class AsyncWEBS:
653
695
  }
654
696
  results[priority] = result
655
697
 
656
- tasks = [_news_page(0, 0)]
698
+ tasks = [asyncio.create_task(_news_page(0, 0))]
657
699
  if max_results:
658
700
  max_results = min(max_results, 200)
659
- tasks.extend(_news_page(s, i) for i, s in enumerate(range(29, max_results, 29), start=1))
660
- await asyncio.gather(*tasks)
701
+ tasks.extend(
702
+ asyncio.create_task(_news_page(s, i)) for i, s in enumerate(range(29, max_results, 29), start=1)
703
+ )
704
+ try:
705
+ await asyncio.gather(*tasks)
706
+ except Exception as e:
707
+ for task in tasks:
708
+ task.cancel()
709
+ await asyncio.gather(*tasks, return_exceptions=True)
710
+ raise e
661
711
 
662
712
  return list(islice(filter(None, results), max_results))
663
713
 
664
714
  async def answers(self, keywords: str) -> List[Dict[str, str]]:
665
- """Webscout instant answers. Query params: https://duckduckgo.com/params.
715
+ """webscout instant answers. Query params: https://duckduckgo.com/params.
666
716
 
667
717
  Args:
668
718
  keywords: keywords for query,
@@ -671,7 +721,7 @@ class AsyncWEBS:
671
721
  List of dictionaries with instant answers results.
672
722
 
673
723
  Raises:
674
- WebscoutE: Base exception for Webscout errors.
724
+ WebscoutE: Base exception for webscout_search errors.
675
725
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
676
726
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
677
727
  """
@@ -733,7 +783,7 @@ class AsyncWEBS:
733
783
  return results
734
784
 
735
785
  async def suggestions(self, keywords: str, region: str = "wt-wt") -> List[Dict[str, str]]:
736
- """Webscout suggestions. Query params: https://duckduckgo.com/params.
786
+ """webscout suggestions. Query params: https://duckduckgo.com/params.
737
787
 
738
788
  Args:
739
789
  keywords: keywords for query.
@@ -743,7 +793,7 @@ class AsyncWEBS:
743
793
  List of dictionaries with suggestions results.
744
794
 
745
795
  Raises:
746
- WebscoutE: Base exception for Webscout errors.
796
+ WebscoutE: Base exception for webscout_search errors.
747
797
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
748
798
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
749
799
  """
@@ -772,7 +822,7 @@ class AsyncWEBS:
772
822
  radius: int = 0,
773
823
  max_results: Optional[int] = None,
774
824
  ) -> List[Dict[str, str]]:
775
- """Webscout maps search. Query params: https://duckduckgo.com/params.
825
+ """webscout maps search. Query params: https://duckduckgo.com/params.
776
826
 
777
827
  Args:
778
828
  keywords: keywords for query
@@ -793,7 +843,7 @@ class AsyncWEBS:
793
843
  List of dictionaries with maps search results, or None if there was an error.
794
844
 
795
845
  Raises:
796
- WebscoutE: Base exception for Webscout errors.
846
+ WebscoutE: Base exception for webscout_search errors.
797
847
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
798
848
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
799
849
  """
@@ -945,7 +995,7 @@ class AsyncWEBS:
945
995
  async def translate(
946
996
  self, keywords: Union[List[str], str], from_: Optional[str] = None, to: str = "en"
947
997
  ) -> List[Dict[str, str]]:
948
- """Webscout translate.
998
+ """webscout translate.
949
999
 
950
1000
  Args:
951
1001
  keywords: string or list of strings to translate.
@@ -956,7 +1006,7 @@ class AsyncWEBS:
956
1006
  List od dictionaries with translated keywords.
957
1007
 
958
1008
  Raises:
959
- WebscoutE: Base exception for Webscout errors.
1009
+ WebscoutE: Base exception for webscout_search errors.
960
1010
  RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
961
1011
  TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
962
1012
  """
@@ -987,7 +1037,13 @@ class AsyncWEBS:
987
1037
 
988
1038
  if isinstance(keywords, str):
989
1039
  keywords = [keywords]
990
- tasks = [_translate_keyword(keyword) for keyword in keywords]
991
- await asyncio.gather(*tasks)
1040
+ tasks = [asyncio.create_task(_translate_keyword(keyword)) for keyword in keywords]
1041
+ try:
1042
+ await asyncio.gather(*tasks)
1043
+ except Exception as e:
1044
+ for task in tasks:
1045
+ task.cancel()
1046
+ await asyncio.gather(*tasks, return_exceptions=True)
1047
+ raise e
992
1048
 
993
1049
  return results
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 1.4.4
3
+ Version: 1.4.5
4
4
  Summary: Search for anything using the Google, DuckDuckGo, phind.com. Also containes AI models, can transcribe yt videos, temporary email and phone number generation, have TTS support and webai(terminal gpt and open interpeter)
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
@@ -1,2 +0,0 @@
1
- __version__ = "1.4.4"
2
-
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes