webscout 1.2.8__tar.gz → 1.2.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- {webscout-1.2.8 → webscout-1.2.9}/PKG-INFO +9 -11
- {webscout-1.2.8 → webscout-1.2.9}/README.md +8 -10
- {webscout-1.2.8 → webscout-1.2.9}/setup.py +1 -1
- {webscout-1.2.8 → webscout-1.2.9}/webscout/__init__.py +0 -1
- {webscout-1.2.8 → webscout-1.2.9}/webscout/transcriber.py +496 -496
- webscout-1.2.9/webscout/version.py +2 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout.egg-info/PKG-INFO +9 -11
- webscout-1.2.8/webscout/version.py +0 -2
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/__init__.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/documents/__init__.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/documents/query_results_extractor.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/documents/webpage_content_extractor.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/networks/__init__.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/networks/filepath_converter.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/networks/google_searcher.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/networks/network_configs.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/networks/webpage_fetcher.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/utilsdw/__init__.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/utilsdw/enver.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/DeepWEBS/utilsdw/logger.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/LICENSE.md +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/setup.cfg +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/AI.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/AIbase.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/AIutel.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/DWEBS.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/HelpingAI.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/LLM.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/__main__.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/cli.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/exceptions.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/models.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/utils.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/webscout_search.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout/webscout_search_async.py +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout.egg-info/SOURCES.txt +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout.egg-info/dependency_links.txt +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout.egg-info/entry_points.txt +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout.egg-info/requires.txt +0 -0
- {webscout-1.2.8 → webscout-1.2.9}/webscout.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: webscout
|
|
3
|
-
Version: 1.2.
|
|
3
|
+
Version: 1.2.9
|
|
4
4
|
Summary: Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models and now can transcribe yt videos
|
|
5
5
|
Author: OEvortex
|
|
6
6
|
Author-email: helpingai5@gmail.com
|
|
@@ -357,11 +357,12 @@ import logging
|
|
|
357
357
|
import sys
|
|
358
358
|
from itertools import chain
|
|
359
359
|
from random import shuffle
|
|
360
|
-
|
|
361
360
|
import requests
|
|
362
361
|
from webscout import AsyncWEBS
|
|
363
362
|
|
|
364
|
-
#
|
|
363
|
+
# If you have proxies, define them here
|
|
364
|
+
proxies = None
|
|
365
|
+
|
|
365
366
|
if sys.platform.lower().startswith("win"):
|
|
366
367
|
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
|
367
368
|
|
|
@@ -373,24 +374,21 @@ def get_words():
|
|
|
373
374
|
|
|
374
375
|
async def aget_results(word):
|
|
375
376
|
async with AsyncWEBS(proxies=proxies) as WEBS:
|
|
376
|
-
results =
|
|
377
|
+
results = await WEBS.text(word, max_results=None)
|
|
377
378
|
return results
|
|
378
379
|
|
|
379
380
|
async def main():
|
|
380
381
|
words = get_words()
|
|
381
382
|
shuffle(words)
|
|
382
|
-
tasks = []
|
|
383
|
-
for word in words[:10]:
|
|
384
|
-
tasks.append(aget_results(word))
|
|
383
|
+
tasks = [aget_results(word) for word in words[:10]]
|
|
385
384
|
results = await asyncio.gather(*tasks)
|
|
386
385
|
print(f"Done")
|
|
387
386
|
for r in chain.from_iterable(results):
|
|
388
387
|
print(r)
|
|
389
|
-
|
|
390
388
|
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
389
|
+
logging.basicConfig(level=logging.DEBUG)
|
|
390
|
+
|
|
391
|
+
await main()
|
|
394
392
|
```
|
|
395
393
|
It is important to note that the WEBS and AsyncWEBS classes should always be used as a context manager (with statement).
|
|
396
394
|
This ensures proper resource management and cleanup, as the context manager will automatically handle opening and closing the HTTP client connection.
|
|
@@ -305,11 +305,12 @@ import logging
|
|
|
305
305
|
import sys
|
|
306
306
|
from itertools import chain
|
|
307
307
|
from random import shuffle
|
|
308
|
-
|
|
309
308
|
import requests
|
|
310
309
|
from webscout import AsyncWEBS
|
|
311
310
|
|
|
312
|
-
#
|
|
311
|
+
# If you have proxies, define them here
|
|
312
|
+
proxies = None
|
|
313
|
+
|
|
313
314
|
if sys.platform.lower().startswith("win"):
|
|
314
315
|
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
|
315
316
|
|
|
@@ -321,24 +322,21 @@ def get_words():
|
|
|
321
322
|
|
|
322
323
|
async def aget_results(word):
|
|
323
324
|
async with AsyncWEBS(proxies=proxies) as WEBS:
|
|
324
|
-
results =
|
|
325
|
+
results = await WEBS.text(word, max_results=None)
|
|
325
326
|
return results
|
|
326
327
|
|
|
327
328
|
async def main():
|
|
328
329
|
words = get_words()
|
|
329
330
|
shuffle(words)
|
|
330
|
-
tasks = []
|
|
331
|
-
for word in words[:10]:
|
|
332
|
-
tasks.append(aget_results(word))
|
|
331
|
+
tasks = [aget_results(word) for word in words[:10]]
|
|
333
332
|
results = await asyncio.gather(*tasks)
|
|
334
333
|
print(f"Done")
|
|
335
334
|
for r in chain.from_iterable(results):
|
|
336
335
|
print(r)
|
|
337
|
-
|
|
338
336
|
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
337
|
+
logging.basicConfig(level=logging.DEBUG)
|
|
338
|
+
|
|
339
|
+
await main()
|
|
342
340
|
```
|
|
343
341
|
It is important to note that the WEBS and AsyncWEBS classes should always be used as a context manager (with statement).
|
|
344
342
|
This ensures proper resource management and cleanup, as the context manager will automatically handle opening and closing the HTTP client connection.
|
|
@@ -9,7 +9,7 @@ with open("README.md", encoding="utf-8") as f:
|
|
|
9
9
|
|
|
10
10
|
setup(
|
|
11
11
|
name="webscout",
|
|
12
|
-
version="1.2.
|
|
12
|
+
version="1.2.9",
|
|
13
13
|
description="Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models and now can transcribe yt videos",
|
|
14
14
|
long_description=README,
|
|
15
15
|
long_description_content_type="text/markdown",
|