webscout 6.7__tar.gz → 6.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- {webscout-6.7/webscout.egg-info → webscout-6.9}/PKG-INFO +4 -32
- {webscout-6.7 → webscout-6.9}/README.md +1 -31
- {webscout-6.7 → webscout-6.9}/setup.py +2 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/YTdownloader.py +7 -2
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/channel.py +1 -1
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/query.py +3 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/stream.py +3 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/video.py +3 -1
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Cloudflare.py +2 -1
- {webscout-6.7 → webscout-6.9}/webscout/Provider/DARKAI.py +2 -2
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Free2GPT.py +5 -5
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Marcus.py +3 -3
- {webscout-6.7 → webscout-6.9}/webscout/Provider/PI.py +113 -47
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Phind.py +6 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/PizzaGPT.py +62 -53
- {webscout-6.7 → webscout-6.9}/webscout/Provider/RUBIKSAI.py +93 -38
- {webscout-6.7 → webscout-6.9}/webscout/Provider/__init__.py +0 -8
- {webscout-6.7 → webscout-6.9}/webscout/Provider/cerebras.py +3 -3
- {webscout-6.7 → webscout-6.9}/webscout/Provider/cleeai.py +2 -2
- {webscout-6.7 → webscout-6.9}/webscout/Provider/elmo.py +2 -2
- {webscout-6.7 → webscout-6.9}/webscout/Provider/gaurish.py +2 -2
- {webscout-6.7 → webscout-6.9}/webscout/Provider/geminiprorealtime.py +2 -2
- {webscout-6.7 → webscout-6.9}/webscout/Provider/lepton.py +2 -2
- {webscout-6.7 → webscout-6.9}/webscout/Provider/llama3mitril.py +3 -3
- {webscout-6.7 → webscout-6.9}/webscout/Provider/llamatutor.py +2 -2
- {webscout-6.7 → webscout-6.9}/webscout/Provider/llmchat.py +3 -2
- {webscout-6.7 → webscout-6.9}/webscout/Provider/meta.py +2 -2
- {webscout-6.7 → webscout-6.9}/webscout/Provider/tutorai.py +1 -1
- {webscout-6.7 → webscout-6.9}/webscout/__init__.py +0 -1
- {webscout-6.7 → webscout-6.9}/webscout/swiftcli/__init__.py +1 -0
- {webscout-6.7 → webscout-6.9}/webscout/version.py +1 -1
- {webscout-6.7 → webscout-6.9}/webscout/webscout_search.py +1140 -1104
- webscout-6.9/webscout/webscout_search_async.py +636 -0
- {webscout-6.7 → webscout-6.9/webscout.egg-info}/PKG-INFO +4 -32
- {webscout-6.7 → webscout-6.9}/webscout.egg-info/SOURCES.txt +0 -7
- {webscout-6.7 → webscout-6.9}/webscout.egg-info/requires.txt +2 -0
- webscout-6.7/webscout/Extra/markdownlite/__init__.py +0 -862
- webscout-6.7/webscout/Provider/Deepseek.py +0 -227
- webscout-6.7/webscout/Provider/Farfalle.py +0 -227
- webscout-6.7/webscout/Provider/NinjaChat.py +0 -200
- webscout-6.7/webscout/Provider/mhystical.py +0 -176
- webscout-6.7/webscout/webscout_search_async.py +0 -362
- webscout-6.7/webscout/zerodir/__init__.py +0 -225
- webscout-6.7/webstoken/t.py +0 -75
- {webscout-6.7 → webscout-6.9}/LICENSE.md +0 -0
- {webscout-6.7 → webscout-6.9}/setup.cfg +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/AIauto.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/AIbase.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/AIutel.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Bard.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/DWEBS.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/transcriber.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/errors.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/extras.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/https.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/patterns.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/playlist.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/pool.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/YTToolkit/ytapi/utils.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/autocoder/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/autocoder/autocoder_utiles.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/autocoder/rawdog.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/autollama.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/gguf.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/weather.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Extra/weather_ascii.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/LLM.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Litlogger/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Local/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Local/_version.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Local/formats.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Local/model.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Local/samplers.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Local/thread.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Local/ui.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Local/utils.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/AI21.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/AISEARCH/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/AISEARCH/felo_search.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/AISEARCH/ooai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Amigo.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Andi.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Bing.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Blackboxai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/ChatGPTES.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Chatify.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Cohere.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Deepinfra.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/DiscordRocks.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/EDITEE.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/GPTWeb.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Gemini.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Groq.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Jadve.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Koboldai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Llama.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Llama3.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Netwrck.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/OLLAMA.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Openai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Reka.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/AiForce/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/AiForce/async_aiforce.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/AiForce/sync_aiforce.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/Nexra/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/Nexra/async_nexra.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/Nexra/sync_nexra.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/PollinationsAI/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/PollinationsAI/async_pollinations.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/PollinationsAI/sync_pollinations.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/artbit/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/artbit/async_artbit.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/artbit/sync_artbit.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/blackbox/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/blackbox/async_blackbox.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/blackbox/sync_blackbox.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/deepinfra/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/deepinfra/async_deepinfra.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/deepinfra/sync_deepinfra.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/huggingface/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/huggingface/async_huggingface.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/huggingface/sync_huggingface.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/imgninza/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/imgninza/async_ninza.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/imgninza/sync_ninza.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/talkai/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/talkai/async_talkai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTI/talkai/sync_talkai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTS/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTS/deepgram.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTS/elevenlabs.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTS/gesserit.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTS/murfai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTS/parler.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTS/streamElements.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTS/utils.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TTS/voicepod.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/TeachAnything.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/Youchat.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/ai4chat.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/aimathgpt.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/askmyai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/bagoodex.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/geminiapi.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/julius.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/koala.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/learnfastai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/multichat.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/promptrefine.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/talkai.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/turboseek.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/typegpt.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/x0gpt.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/Provider/yep.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/__main__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/cli.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/conversation.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/exceptions.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/litagent/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/litprinter/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/litprinter/colors.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/optimizers.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/prompt_manager.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/core/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/core/crawler.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/core/scout.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/core/search_result.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/core/text_analyzer.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/core/text_utils.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/core/web_analyzer.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/core.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/element.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/parsers/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/parsers/html5lib_parser.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/parsers/html_parser.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/parsers/lxml_parser.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/scout/utils.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/tempid.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/update_checker.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/utils.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/zeroart/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/zeroart/base.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/zeroart/effects.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout/zeroart/fonts.py +0 -0
- {webscout-6.7 → webscout-6.9}/webscout.egg-info/dependency_links.txt +0 -0
- {webscout-6.7 → webscout-6.9}/webscout.egg-info/entry_points.txt +0 -0
- {webscout-6.7 → webscout-6.9}/webscout.egg-info/top_level.txt +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/__init__.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/classifier.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/keywords.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/language.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/ner.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/normalizer.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/processor.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/sentiment.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/stemmer.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/tagger.py +0 -0
- {webscout-6.7 → webscout-6.9}/webstoken/tokenizer.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: webscout
|
|
3
|
-
Version: 6.
|
|
3
|
+
Version: 6.9
|
|
4
4
|
Summary: Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
|
|
5
5
|
Author: OEvortex
|
|
6
6
|
Author-email: helpingai5@gmail.com
|
|
@@ -46,6 +46,8 @@ Requires-Dist: ollama
|
|
|
46
46
|
Requires-Dist: pillow
|
|
47
47
|
Requires-Dist: bson
|
|
48
48
|
Requires-Dist: cloudscraper
|
|
49
|
+
Requires-Dist: html5lib
|
|
50
|
+
Requires-Dist: aiofiles
|
|
49
51
|
Requires-Dist: emoji
|
|
50
52
|
Requires-Dist: openai
|
|
51
53
|
Requires-Dist: prompt-toolkit
|
|
@@ -107,9 +109,8 @@ Requires-Dist: unicorn; extra == "local"
|
|
|
107
109
|
* **Tempmail & Temp Number:** Generate temporary email addresses and phone numbers for enhanced privacy.
|
|
108
110
|
* **[Text-to-Speech (TTS)](webscout/Provider/TTS/README.md):** Convert text into natural-sounding speech using multiple AI-powered providers like ElevenLabs, StreamElements, and Voicepods.
|
|
109
111
|
* **Offline LLMs:** Utilize powerful language models offline with GGUF support.
|
|
110
|
-
* **Extensive Provider Ecosystem:** Explore a vast collection of providers
|
|
112
|
+
* **Extensive Provider Ecosystem:** Explore a vast collection of AI providers
|
|
111
113
|
* **Local LLM Execution:** Run GGUF models locally with minimal configuration.
|
|
112
|
-
* **Rawdog Scripting:** Execute Python scripts directly within your terminal using the `rawdog` feature.
|
|
113
114
|
* **GGUF Conversion & Quantization:** Convert and quantize Hugging Face models to GGUF format.
|
|
114
115
|
* **Autollama:** Download Hugging Face models and automatically convert them for Ollama compatibility.
|
|
115
116
|
* **[SwiftCLI](webscout/swiftcli/Readme.md):** A powerful and elegant CLI framework that makes it easy to create beautiful command-line interfaces.
|
|
@@ -117,7 +118,6 @@ Requires-Dist: unicorn; extra == "local"
|
|
|
117
118
|
* **[LitLogger](webscout/litlogger/Readme.md):** Simplifies logging with customizable formats and color schemes
|
|
118
119
|
* **[LitAgent](webscout/litagent/Readme.md):** Powerful and modern user agent generator that keeps your requests fresh and undetectable
|
|
119
120
|
* **[Text-to-Image](webscout/Provider/TTI/README.md):** Generate high-quality images using a wide range of AI art providers
|
|
120
|
-
* **[MarkdownLite](webscout/Extra/markdownlite/README.md):** Powerful HTML to Markdown conversion library with advanced parsing and structured output
|
|
121
121
|
* **[Scout](webscout/scout/README.md):** Advanced web parsing and crawling library with intelligent HTML/XML parsing, web crawling, and Markdown conversion
|
|
122
122
|
|
|
123
123
|
## ⚙️ Installation
|
|
@@ -967,34 +967,6 @@ response_str = a.chat(prompt)
|
|
|
967
967
|
print(response_str)
|
|
968
968
|
```
|
|
969
969
|
|
|
970
|
-
### `DeepSeek` - Chat with DeepSeek
|
|
971
|
-
|
|
972
|
-
```python
|
|
973
|
-
from webscout import DeepSeek
|
|
974
|
-
from rich import print
|
|
975
|
-
|
|
976
|
-
ai = DeepSeek(
|
|
977
|
-
is_conversation=True,
|
|
978
|
-
api_key='cookie',
|
|
979
|
-
max_tokens=800,
|
|
980
|
-
timeout=30,
|
|
981
|
-
intro=None,
|
|
982
|
-
filepath=None,
|
|
983
|
-
update_file=True,
|
|
984
|
-
proxies={},
|
|
985
|
-
history_offset=10250,
|
|
986
|
-
act=None,
|
|
987
|
-
model="deepseek_chat"
|
|
988
|
-
)
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
# Define a prompt to send to the AI
|
|
992
|
-
prompt = "Tell me about india"
|
|
993
|
-
# Use the 'chat' method to send the prompt and receive a response
|
|
994
|
-
r = ai.chat(prompt)
|
|
995
|
-
print(r)
|
|
996
|
-
```
|
|
997
|
-
|
|
998
970
|
### `Deepinfra`
|
|
999
971
|
|
|
1000
972
|
```python
|
|
@@ -41,9 +41,8 @@
|
|
|
41
41
|
* **Tempmail & Temp Number:** Generate temporary email addresses and phone numbers for enhanced privacy.
|
|
42
42
|
* **[Text-to-Speech (TTS)](webscout/Provider/TTS/README.md):** Convert text into natural-sounding speech using multiple AI-powered providers like ElevenLabs, StreamElements, and Voicepods.
|
|
43
43
|
* **Offline LLMs:** Utilize powerful language models offline with GGUF support.
|
|
44
|
-
* **Extensive Provider Ecosystem:** Explore a vast collection of providers
|
|
44
|
+
* **Extensive Provider Ecosystem:** Explore a vast collection of AI providers
|
|
45
45
|
* **Local LLM Execution:** Run GGUF models locally with minimal configuration.
|
|
46
|
-
* **Rawdog Scripting:** Execute Python scripts directly within your terminal using the `rawdog` feature.
|
|
47
46
|
* **GGUF Conversion & Quantization:** Convert and quantize Hugging Face models to GGUF format.
|
|
48
47
|
* **Autollama:** Download Hugging Face models and automatically convert them for Ollama compatibility.
|
|
49
48
|
* **[SwiftCLI](webscout/swiftcli/Readme.md):** A powerful and elegant CLI framework that makes it easy to create beautiful command-line interfaces.
|
|
@@ -51,7 +50,6 @@
|
|
|
51
50
|
* **[LitLogger](webscout/litlogger/Readme.md):** Simplifies logging with customizable formats and color schemes
|
|
52
51
|
* **[LitAgent](webscout/litagent/Readme.md):** Powerful and modern user agent generator that keeps your requests fresh and undetectable
|
|
53
52
|
* **[Text-to-Image](webscout/Provider/TTI/README.md):** Generate high-quality images using a wide range of AI art providers
|
|
54
|
-
* **[MarkdownLite](webscout/Extra/markdownlite/README.md):** Powerful HTML to Markdown conversion library with advanced parsing and structured output
|
|
55
53
|
* **[Scout](webscout/scout/README.md):** Advanced web parsing and crawling library with intelligent HTML/XML parsing, web crawling, and Markdown conversion
|
|
56
54
|
|
|
57
55
|
## ⚙️ Installation
|
|
@@ -901,34 +899,6 @@ response_str = a.chat(prompt)
|
|
|
901
899
|
print(response_str)
|
|
902
900
|
```
|
|
903
901
|
|
|
904
|
-
### `DeepSeek` - Chat with DeepSeek
|
|
905
|
-
|
|
906
|
-
```python
|
|
907
|
-
from webscout import DeepSeek
|
|
908
|
-
from rich import print
|
|
909
|
-
|
|
910
|
-
ai = DeepSeek(
|
|
911
|
-
is_conversation=True,
|
|
912
|
-
api_key='cookie',
|
|
913
|
-
max_tokens=800,
|
|
914
|
-
timeout=30,
|
|
915
|
-
intro=None,
|
|
916
|
-
filepath=None,
|
|
917
|
-
update_file=True,
|
|
918
|
-
proxies={},
|
|
919
|
-
history_offset=10250,
|
|
920
|
-
act=None,
|
|
921
|
-
model="deepseek_chat"
|
|
922
|
-
)
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
# Define a prompt to send to the AI
|
|
926
|
-
prompt = "Tell me about india"
|
|
927
|
-
# Use the 'chat' method to send the prompt and receive a response
|
|
928
|
-
r = ai.chat(prompt)
|
|
929
|
-
print(r)
|
|
930
|
-
```
|
|
931
|
-
|
|
932
902
|
### `Deepinfra`
|
|
933
903
|
|
|
934
904
|
```python
|
|
@@ -12,10 +12,15 @@ from sys import stdout
|
|
|
12
12
|
import os
|
|
13
13
|
import subprocess
|
|
14
14
|
import sys
|
|
15
|
+
import tempfile
|
|
15
16
|
from webscout.version import __prog__, __version__
|
|
16
|
-
from webscout.zerodir import user_cache_dir
|
|
17
17
|
from webscout.swiftcli import CLI, option, argument, group
|
|
18
18
|
|
|
19
|
+
# Define cache directory using tempfile
|
|
20
|
+
user_cache_dir = os.path.join(tempfile.gettempdir(), 'webscout')
|
|
21
|
+
if not os.path.exists(user_cache_dir):
|
|
22
|
+
os.makedirs(user_cache_dir)
|
|
23
|
+
|
|
19
24
|
logging = LitLogger(name="YTDownloader")
|
|
20
25
|
|
|
21
26
|
session = requests.session()
|
|
@@ -32,7 +37,7 @@ session.headers.update(headers)
|
|
|
32
37
|
|
|
33
38
|
get_excep = lambda e: e.args[1] if len(e.args) > 1 else e
|
|
34
39
|
|
|
35
|
-
appdir = user_cache_dir
|
|
40
|
+
appdir = user_cache_dir
|
|
36
41
|
|
|
37
42
|
if not path.isdir(appdir):
|
|
38
43
|
try:
|
|
@@ -99,4 +99,6 @@ class Video:
|
|
|
99
99
|
data['genre'] = genre_pattern.search(self._video_data).group(1)
|
|
100
100
|
except AttributeError:
|
|
101
101
|
data['genre'] = None
|
|
102
|
-
return data
|
|
102
|
+
return data
|
|
103
|
+
if __name__ == '__main__':
|
|
104
|
+
print(Video('https://www.youtube.com/watch?v=9bZkp7q19f0').metadata)
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import json
|
|
2
2
|
from uuid import uuid4
|
|
3
|
+
import webscout
|
|
3
4
|
from webscout.AIutel import Optimizers
|
|
4
5
|
from webscout.AIutel import Conversation
|
|
5
6
|
from webscout.AIutel import AwesomePrompts, sanitize_stream
|
|
@@ -107,7 +108,7 @@ class Cloudflare(Provider):
|
|
|
107
108
|
'Sec-Fetch-Dest': 'empty',
|
|
108
109
|
'Sec-Fetch-Mode': 'cors',
|
|
109
110
|
'Sec-Fetch-Site': 'same-origin',
|
|
110
|
-
'User-Agent':
|
|
111
|
+
'User-Agent': webscout.LitAgent().random()
|
|
111
112
|
}
|
|
112
113
|
|
|
113
114
|
self.cookies = {
|
|
@@ -4,7 +4,7 @@ from webscout.AIutel import Optimizers
|
|
|
4
4
|
from webscout.AIutel import Conversation
|
|
5
5
|
from webscout.AIutel import AwesomePrompts, sanitize_stream
|
|
6
6
|
from webscout.AIbase import Provider
|
|
7
|
-
from webscout import exceptions
|
|
7
|
+
from webscout import exceptions, LitAgent
|
|
8
8
|
import requests
|
|
9
9
|
|
|
10
10
|
class DARKAI(Provider):
|
|
@@ -75,7 +75,7 @@ class DARKAI(Provider):
|
|
|
75
75
|
"sec-fetch-dest": "empty",
|
|
76
76
|
"sec-fetch-mode": "cors",
|
|
77
77
|
"sec-fetch-site": "cross-site",
|
|
78
|
-
"user-agent":
|
|
78
|
+
"user-agent": LitAgent().random(),
|
|
79
79
|
}
|
|
80
80
|
|
|
81
81
|
self.__available_optimizers = (
|
|
@@ -9,7 +9,7 @@ from webscout.AIutel import Conversation
|
|
|
9
9
|
from webscout.AIutel import AwesomePrompts
|
|
10
10
|
from webscout.AIbase import Provider
|
|
11
11
|
from webscout import exceptions
|
|
12
|
-
|
|
12
|
+
from webscout import LitAgent
|
|
13
13
|
|
|
14
14
|
class Free2GPT(Provider):
|
|
15
15
|
"""
|
|
@@ -48,7 +48,7 @@ class Free2GPT(Provider):
|
|
|
48
48
|
self.session = requests.Session()
|
|
49
49
|
self.is_conversation = is_conversation
|
|
50
50
|
self.max_tokens_to_sample = max_tokens
|
|
51
|
-
self.api_endpoint = "https://
|
|
51
|
+
self.api_endpoint = "https://chat1.free2gpt.com/api/generate"
|
|
52
52
|
self.stream_chunk_size = 64
|
|
53
53
|
self.timeout = timeout
|
|
54
54
|
self.last_response = {}
|
|
@@ -59,15 +59,15 @@ class Free2GPT(Provider):
|
|
|
59
59
|
"accept-language": "en-US,en;q=0.9,en-IN;q=0.8",
|
|
60
60
|
"content-type": "text/plain;charset=UTF-8",
|
|
61
61
|
"dnt": "1",
|
|
62
|
-
"origin": "https://
|
|
63
|
-
"referer": "https://
|
|
62
|
+
"origin": "https://chat1.free2gpt.co",
|
|
63
|
+
"referer": "https://chat1.free2gpt.co",
|
|
64
64
|
"sec-ch-ua": '"Chromium";v="128", "Not;A=Brand";v="24", "Microsoft Edge";v="128"',
|
|
65
65
|
"sec-ch-ua-mobile": "?0",
|
|
66
66
|
"sec-ch-ua-platform": '"Windows"',
|
|
67
67
|
"sec-fetch-dest": "empty",
|
|
68
68
|
"sec-fetch-mode": "cors",
|
|
69
69
|
"sec-fetch-site": "same-origin",
|
|
70
|
-
"user-agent":
|
|
70
|
+
"user-agent": LitAgent().random(),
|
|
71
71
|
}
|
|
72
72
|
|
|
73
73
|
self.__available_optimizers = (
|
|
@@ -7,7 +7,7 @@ from webscout.AIutel import Conversation
|
|
|
7
7
|
from webscout.AIutel import AwesomePrompts
|
|
8
8
|
from webscout.AIbase import Provider
|
|
9
9
|
from webscout import exceptions
|
|
10
|
-
|
|
10
|
+
from webscout import LitAgent as Lit
|
|
11
11
|
|
|
12
12
|
class Marcus(Provider):
|
|
13
13
|
"""
|
|
@@ -39,7 +39,7 @@ class Marcus(Provider):
|
|
|
39
39
|
'accept': '*/*',
|
|
40
40
|
'origin': 'https://www.askmarcus.app',
|
|
41
41
|
'referer': 'https://www.askmarcus.app/chat',
|
|
42
|
-
'user-agent':
|
|
42
|
+
'user-agent': Lit().random(),
|
|
43
43
|
}
|
|
44
44
|
self.__available_optimizers = (
|
|
45
45
|
method
|
|
@@ -134,4 +134,4 @@ if __name__ == '__main__':
|
|
|
134
134
|
ai = Marcus(timeout=30)
|
|
135
135
|
response = ai.chat("Tell me about India", stream=True)
|
|
136
136
|
for chunk in response:
|
|
137
|
-
print(chunk)
|
|
137
|
+
print(chunk, end="", flush=True)
|
|
@@ -7,21 +7,19 @@ from webscout.AIutel import Optimizers
|
|
|
7
7
|
from webscout.AIutel import Conversation
|
|
8
8
|
from webscout.AIutel import AwesomePrompts
|
|
9
9
|
from webscout.AIbase import Provider
|
|
10
|
-
from typing import Dict
|
|
10
|
+
from typing import Dict, Union, Any
|
|
11
11
|
from webscout import LitAgent
|
|
12
|
-
|
|
13
|
-
"""PiAI class for interacting with the Pi.ai chat API, extending the Provider class.
|
|
12
|
+
from webscout.Litlogger import LitLogger, LogFormat, ColorScheme
|
|
14
13
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
14
|
+
class PiAI(Provider):
|
|
15
|
+
"""
|
|
16
|
+
PiAI is a provider class for interacting with the Pi.ai chat API.
|
|
18
17
|
|
|
19
18
|
Attributes:
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
AVAILABLE_VOICES (Dict[str, int]): A dictionary mapping voice names to their corresponding IDs.
|
|
23
|
-
headers (Dict[str, str]): The headers to be used in HTTP requests to the API.
|
|
19
|
+
knowledge_cutoff (str): The knowledge cutoff date for the model
|
|
20
|
+
AVAILABLE_VOICES (Dict[str, int]): Available voice options for audio responses
|
|
24
21
|
"""
|
|
22
|
+
|
|
25
23
|
def __init__(
|
|
26
24
|
self,
|
|
27
25
|
is_conversation: bool = True,
|
|
@@ -33,19 +31,30 @@ class PiAI(Provider):
|
|
|
33
31
|
proxies: dict = {},
|
|
34
32
|
history_offset: int = 10250,
|
|
35
33
|
act: str = None,
|
|
34
|
+
logging: bool = False,
|
|
36
35
|
):
|
|
37
|
-
"""
|
|
36
|
+
"""
|
|
37
|
+
Initializes the PiAI provider with specified parameters.
|
|
38
38
|
|
|
39
39
|
Args:
|
|
40
|
-
is_conversation (bool
|
|
41
|
-
max_tokens (int
|
|
42
|
-
timeout (int
|
|
43
|
-
intro (str
|
|
44
|
-
filepath (str
|
|
45
|
-
update_file (bool
|
|
46
|
-
proxies (dict
|
|
47
|
-
history_offset (int
|
|
48
|
-
act (str
|
|
40
|
+
is_conversation (bool): Whether to maintain conversation history
|
|
41
|
+
max_tokens (int): Maximum number of tokens in response
|
|
42
|
+
timeout (int): Request timeout in seconds
|
|
43
|
+
intro (str): Custom introduction message
|
|
44
|
+
filepath (str): Path to save conversation history
|
|
45
|
+
update_file (bool): Whether to update conversation history file
|
|
46
|
+
proxies (dict): Proxy configuration
|
|
47
|
+
history_offset (int): Conversation history limit
|
|
48
|
+
act (str): Custom personality/act for the AI
|
|
49
|
+
logging (bool): Enable debug logging
|
|
50
|
+
|
|
51
|
+
Examples:
|
|
52
|
+
>>> ai = PiAI(logging=True)
|
|
53
|
+
>>> ai.ask("What's the weather today?", "Alice")
|
|
54
|
+
Sends a prompt to Pi.ai and returns the response.
|
|
55
|
+
|
|
56
|
+
>>> ai.chat("Tell me a joke", voice_name="William")
|
|
57
|
+
Initiates a chat with Pi.ai using the provided prompt.
|
|
49
58
|
"""
|
|
50
59
|
self.scraper = cloudscraper.create_scraper()
|
|
51
60
|
self.url = 'https://pi.ai/api/chat'
|
|
@@ -84,7 +93,7 @@ class PiAI(Provider):
|
|
|
84
93
|
self.max_tokens_to_sample = max_tokens
|
|
85
94
|
self.stream_chunk_size = 64
|
|
86
95
|
self.timeout = timeout
|
|
87
|
-
self.last_response = {}
|
|
96
|
+
self.last_response = {} if self.is_conversation else {'text': ""}
|
|
88
97
|
self.conversation_id = None
|
|
89
98
|
|
|
90
99
|
self.__available_optimizers = (
|
|
@@ -105,11 +114,35 @@ class PiAI(Provider):
|
|
|
105
114
|
)
|
|
106
115
|
self.conversation.history_offset = history_offset
|
|
107
116
|
self.session.proxies = proxies
|
|
117
|
+
|
|
118
|
+
# Initialize logger
|
|
119
|
+
self.logger = LitLogger(name="PiAI", format=LogFormat.MODERN_EMOJI, color_scheme=ColorScheme.CYBERPUNK) if logging else None
|
|
120
|
+
|
|
121
|
+
self.knowledge_cutoff = "December 2023"
|
|
122
|
+
|
|
108
123
|
# Initialize conversation ID
|
|
109
124
|
if self.is_conversation:
|
|
110
125
|
self.start_conversation()
|
|
111
126
|
|
|
127
|
+
if self.logger:
|
|
128
|
+
self.logger.debug("PiAI instance initialized")
|
|
129
|
+
|
|
112
130
|
def start_conversation(self) -> str:
|
|
131
|
+
"""
|
|
132
|
+
Initializes a new conversation and returns the conversation ID.
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
str: The conversation ID from Pi.ai
|
|
136
|
+
|
|
137
|
+
Examples:
|
|
138
|
+
>>> ai = PiAI()
|
|
139
|
+
>>> conversation_id = ai.start_conversation()
|
|
140
|
+
>>> print(conversation_id)
|
|
141
|
+
'abc123xyz'
|
|
142
|
+
"""
|
|
143
|
+
if self.logger:
|
|
144
|
+
self.logger.debug("Starting new conversation")
|
|
145
|
+
|
|
113
146
|
response = self.scraper.post(
|
|
114
147
|
"https://pi.ai/api/chat/start",
|
|
115
148
|
headers=self.headers,
|
|
@@ -117,8 +150,16 @@ class PiAI(Provider):
|
|
|
117
150
|
json={},
|
|
118
151
|
timeout=self.timeout
|
|
119
152
|
)
|
|
153
|
+
|
|
154
|
+
if not response.ok and self.logger:
|
|
155
|
+
self.logger.error(f"Failed to start conversation: {response.status_code}")
|
|
156
|
+
|
|
120
157
|
data = response.json()
|
|
121
158
|
self.conversation_id = data['conversations'][0]['sid']
|
|
159
|
+
|
|
160
|
+
if self.logger:
|
|
161
|
+
self.logger.debug(f"Conversation started with ID: {self.conversation_id}")
|
|
162
|
+
|
|
122
163
|
return self.conversation_id
|
|
123
164
|
|
|
124
165
|
def ask(
|
|
@@ -132,26 +173,31 @@ class PiAI(Provider):
|
|
|
132
173
|
verbose:bool = None,
|
|
133
174
|
output_file:str = None
|
|
134
175
|
) -> dict:
|
|
135
|
-
"""
|
|
176
|
+
"""
|
|
177
|
+
Interact with Pi.ai by sending a prompt and receiving a response.
|
|
136
178
|
|
|
137
179
|
Args:
|
|
138
|
-
prompt (str): The prompt to be sent to
|
|
139
|
-
voice_name (str): The name of the voice to use for audio responses
|
|
140
|
-
stream (bool
|
|
141
|
-
raw (bool
|
|
142
|
-
optimizer (str
|
|
143
|
-
conversationally (bool
|
|
144
|
-
verbose (bool
|
|
145
|
-
output_file (str
|
|
180
|
+
prompt (str): The prompt to be sent to Pi.ai
|
|
181
|
+
voice_name (str): The name of the voice to use for audio responses
|
|
182
|
+
stream (bool): Flag for streaming response
|
|
183
|
+
raw (bool): If True, returns the raw response as received
|
|
184
|
+
optimizer (str): Name of the prompt optimizer to use
|
|
185
|
+
conversationally (bool): If True, chat conversationally when using optimizer
|
|
186
|
+
verbose (bool): If True, provides detailed output
|
|
187
|
+
output_file (str): File path to save the output
|
|
146
188
|
|
|
147
189
|
Returns:
|
|
148
|
-
dict: A dictionary containing the AI's response
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
190
|
+
dict: A dictionary containing the AI's response
|
|
191
|
+
|
|
192
|
+
Examples:
|
|
193
|
+
>>> ai = PiAI(logging=True)
|
|
194
|
+
>>> response = ai.ask("Hello!", "Alice", verbose=True)
|
|
195
|
+
>>> print(response['text'])
|
|
196
|
+
'Hi! How can I help you today?'
|
|
154
197
|
"""
|
|
198
|
+
if self.logger:
|
|
199
|
+
self.logger.debug(f"ask() called with prompt: {prompt}, voice: {voice_name}")
|
|
200
|
+
|
|
155
201
|
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
156
202
|
if optimizer:
|
|
157
203
|
if optimizer in self.__available_optimizers:
|
|
@@ -159,6 +205,8 @@ class PiAI(Provider):
|
|
|
159
205
|
conversation_prompt if conversationally else prompt
|
|
160
206
|
)
|
|
161
207
|
else:
|
|
208
|
+
if self.logger:
|
|
209
|
+
self.logger.error(f"Invalid optimizer: {optimizer}")
|
|
162
210
|
raise Exception(
|
|
163
211
|
f"Optimizer is not one of {self.__available_optimizers}"
|
|
164
212
|
)
|
|
@@ -187,8 +235,8 @@ class PiAI(Provider):
|
|
|
187
235
|
resp = dict(text=streaming_text)
|
|
188
236
|
self.last_response.update(resp)
|
|
189
237
|
yield parsed_data if raw else resp
|
|
190
|
-
except
|
|
191
|
-
|
|
238
|
+
except:continue
|
|
239
|
+
|
|
192
240
|
self.conversation.update_chat_history(
|
|
193
241
|
prompt, self.get_message(self.last_response)
|
|
194
242
|
)
|
|
@@ -210,20 +258,30 @@ class PiAI(Provider):
|
|
|
210
258
|
verbose:bool = True,
|
|
211
259
|
output_file:str = "PiAi.mp3"
|
|
212
260
|
) -> str:
|
|
213
|
-
"""
|
|
261
|
+
"""
|
|
262
|
+
Generates a response based on the provided prompt.
|
|
214
263
|
|
|
215
264
|
Args:
|
|
216
|
-
prompt (str):
|
|
217
|
-
voice_name (str
|
|
218
|
-
stream (bool
|
|
219
|
-
optimizer (str
|
|
220
|
-
conversationally (bool
|
|
221
|
-
verbose (bool
|
|
222
|
-
output_file (str
|
|
265
|
+
prompt (str): Input prompt for generating response
|
|
266
|
+
voice_name (str): Voice to use for audio response
|
|
267
|
+
stream (bool): Enable response streaming
|
|
268
|
+
optimizer (str): Prompt optimizer to use
|
|
269
|
+
conversationally (bool): Enable conversational mode with optimizer
|
|
270
|
+
verbose (bool): Enable verbose output
|
|
271
|
+
output_file (str): Audio output file path
|
|
223
272
|
|
|
224
273
|
Returns:
|
|
225
|
-
str: The generated response
|
|
274
|
+
str: The generated response
|
|
275
|
+
|
|
276
|
+
Examples:
|
|
277
|
+
>>> ai = PiAI(logging=True)
|
|
278
|
+
>>> response = ai.chat("Tell me a joke", voice_name="William")
|
|
279
|
+
>>> print(response)
|
|
280
|
+
'Why did the scarecrow win an award? Because he was outstanding in his field!'
|
|
226
281
|
"""
|
|
282
|
+
if self.logger:
|
|
283
|
+
self.logger.debug(f"chat() called with prompt: {prompt}, voice: {voice_name}")
|
|
284
|
+
|
|
227
285
|
assert (
|
|
228
286
|
voice_name in self.AVAILABLE_VOICES
|
|
229
287
|
), f"Voice '{voice_name}' not one of [{', '.join(self.AVAILABLE_VOICES.keys())}]"
|
|
@@ -271,6 +329,9 @@ class PiAI(Provider):
|
|
|
271
329
|
verbose (bool): Flag to indicate if verbose output is desired.
|
|
272
330
|
output_file (str): The file path where the audio will be saved.
|
|
273
331
|
"""
|
|
332
|
+
if self.logger:
|
|
333
|
+
self.logger.debug(f"Downloading audio with voice: {voice_name}")
|
|
334
|
+
|
|
274
335
|
params = {
|
|
275
336
|
'mode': 'eager',
|
|
276
337
|
'voice': f'voice{self.AVAILABLE_VOICES[voice_name]}',
|
|
@@ -278,11 +339,16 @@ class PiAI(Provider):
|
|
|
278
339
|
}
|
|
279
340
|
try:
|
|
280
341
|
audio_response = self.scraper.get('https://pi.ai/api/chat/voice', params=params, cookies=self.cookies, headers=self.headers, timeout=self.timeout)
|
|
342
|
+
if not audio_response.ok and self.logger:
|
|
343
|
+
self.logger.error(f"Audio download failed: {audio_response.status_code}")
|
|
344
|
+
|
|
281
345
|
audio_response.raise_for_status() # Raise an exception for bad status codes
|
|
282
346
|
with open(output_file, "wb") as file:
|
|
283
347
|
file.write(audio_response.content)
|
|
284
348
|
if verbose:print("\nAudio file downloaded successfully.")
|
|
285
349
|
except requests.exceptions.RequestException as e:
|
|
350
|
+
if self.logger:
|
|
351
|
+
self.logger.error(f"Audio download failed: {e}")
|
|
286
352
|
if verbose:print(f"\nFailed to download audio file. Error: {e}")
|
|
287
353
|
|
|
288
354
|
if __name__ == '__main__':
|