webscout 8.3.6__py3-none-any.whl → 2025.10.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/AIauto.py +250 -250
- webscout/AIbase.py +379 -379
- webscout/AIutel.py +60 -58
- webscout/Bard.py +1012 -1012
- webscout/Bing_search.py +417 -417
- webscout/DWEBS.py +529 -529
- webscout/Extra/Act.md +309 -309
- webscout/Extra/GitToolkit/__init__.py +10 -10
- webscout/Extra/GitToolkit/gitapi/README.md +110 -110
- webscout/Extra/GitToolkit/gitapi/__init__.py +11 -11
- webscout/Extra/GitToolkit/gitapi/repository.py +195 -195
- webscout/Extra/GitToolkit/gitapi/user.py +96 -96
- webscout/Extra/GitToolkit/gitapi/utils.py +61 -61
- webscout/Extra/YTToolkit/README.md +375 -375
- webscout/Extra/YTToolkit/YTdownloader.py +956 -956
- webscout/Extra/YTToolkit/__init__.py +2 -2
- webscout/Extra/YTToolkit/transcriber.py +475 -475
- webscout/Extra/YTToolkit/ytapi/README.md +44 -44
- webscout/Extra/YTToolkit/ytapi/__init__.py +6 -6
- webscout/Extra/YTToolkit/ytapi/channel.py +307 -307
- webscout/Extra/YTToolkit/ytapi/errors.py +13 -13
- webscout/Extra/YTToolkit/ytapi/extras.py +118 -118
- webscout/Extra/YTToolkit/ytapi/https.py +88 -88
- webscout/Extra/YTToolkit/ytapi/patterns.py +61 -61
- webscout/Extra/YTToolkit/ytapi/playlist.py +58 -58
- webscout/Extra/YTToolkit/ytapi/pool.py +7 -7
- webscout/Extra/YTToolkit/ytapi/query.py +39 -39
- webscout/Extra/YTToolkit/ytapi/stream.py +62 -62
- webscout/Extra/YTToolkit/ytapi/utils.py +62 -62
- webscout/Extra/YTToolkit/ytapi/video.py +232 -232
- webscout/Extra/autocoder/__init__.py +9 -9
- webscout/Extra/autocoder/autocoder.py +1105 -1105
- webscout/Extra/autocoder/autocoder_utiles.py +332 -332
- webscout/Extra/gguf.md +429 -429
- webscout/Extra/gguf.py +1213 -1213
- webscout/Extra/tempmail/README.md +487 -487
- webscout/Extra/tempmail/__init__.py +27 -27
- webscout/Extra/tempmail/async_utils.py +140 -140
- webscout/Extra/tempmail/base.py +160 -160
- webscout/Extra/tempmail/cli.py +186 -186
- webscout/Extra/tempmail/emailnator.py +84 -84
- webscout/Extra/tempmail/mail_tm.py +360 -360
- webscout/Extra/tempmail/temp_mail_io.py +291 -291
- webscout/Extra/weather.md +281 -281
- webscout/Extra/weather.py +193 -193
- webscout/Litlogger/README.md +10 -10
- webscout/Litlogger/__init__.py +15 -15
- webscout/Litlogger/formats.py +13 -13
- webscout/Litlogger/handlers.py +121 -121
- webscout/Litlogger/levels.py +13 -13
- webscout/Litlogger/logger.py +134 -134
- webscout/Provider/AISEARCH/Perplexity.py +332 -332
- webscout/Provider/AISEARCH/README.md +279 -279
- webscout/Provider/AISEARCH/__init__.py +33 -11
- webscout/Provider/AISEARCH/felo_search.py +206 -206
- webscout/Provider/AISEARCH/genspark_search.py +323 -323
- webscout/Provider/AISEARCH/hika_search.py +185 -185
- webscout/Provider/AISEARCH/iask_search.py +410 -410
- webscout/Provider/AISEARCH/monica_search.py +219 -219
- webscout/Provider/AISEARCH/scira_search.py +316 -314
- webscout/Provider/AISEARCH/stellar_search.py +177 -177
- webscout/Provider/AISEARCH/webpilotai_search.py +255 -255
- webscout/Provider/Aitopia.py +314 -315
- webscout/Provider/Andi.py +3 -3
- webscout/Provider/Apriel.py +306 -0
- webscout/Provider/ChatGPTClone.py +236 -236
- webscout/Provider/ChatSandbox.py +343 -342
- webscout/Provider/Cloudflare.py +324 -324
- webscout/Provider/Cohere.py +208 -207
- webscout/Provider/Deepinfra.py +370 -369
- webscout/Provider/ExaAI.py +260 -260
- webscout/Provider/ExaChat.py +308 -387
- webscout/Provider/Flowith.py +221 -221
- webscout/Provider/GMI.py +293 -0
- webscout/Provider/Gemini.py +164 -162
- webscout/Provider/GeminiProxy.py +167 -166
- webscout/Provider/GithubChat.py +371 -370
- webscout/Provider/Groq.py +800 -800
- webscout/Provider/HeckAI.py +383 -379
- webscout/Provider/Jadve.py +282 -297
- webscout/Provider/K2Think.py +308 -0
- webscout/Provider/Koboldai.py +206 -384
- webscout/Provider/LambdaChat.py +423 -425
- webscout/Provider/Nemotron.py +244 -245
- webscout/Provider/Netwrck.py +248 -247
- webscout/Provider/OLLAMA.py +395 -394
- webscout/Provider/OPENAI/Cloudflare.py +394 -395
- webscout/Provider/OPENAI/FalconH1.py +452 -457
- webscout/Provider/OPENAI/FreeGemini.py +297 -299
- webscout/Provider/OPENAI/{monochat.py → K2Think.py} +432 -329
- webscout/Provider/OPENAI/NEMOTRON.py +241 -244
- webscout/Provider/OPENAI/PI.py +428 -427
- webscout/Provider/OPENAI/README.md +959 -959
- webscout/Provider/OPENAI/TogetherAI.py +345 -345
- webscout/Provider/OPENAI/TwoAI.py +466 -467
- webscout/Provider/OPENAI/__init__.py +33 -59
- webscout/Provider/OPENAI/ai4chat.py +313 -303
- webscout/Provider/OPENAI/base.py +249 -269
- webscout/Provider/OPENAI/chatglm.py +528 -0
- webscout/Provider/OPENAI/chatgpt.py +593 -588
- webscout/Provider/OPENAI/chatgptclone.py +521 -524
- webscout/Provider/OPENAI/chatsandbox.py +202 -177
- webscout/Provider/OPENAI/deepinfra.py +319 -315
- webscout/Provider/OPENAI/e2b.py +1665 -1665
- webscout/Provider/OPENAI/exaai.py +420 -420
- webscout/Provider/OPENAI/exachat.py +452 -452
- webscout/Provider/OPENAI/friendli.py +232 -232
- webscout/Provider/OPENAI/{refact.py → gmi.py} +324 -274
- webscout/Provider/OPENAI/groq.py +364 -364
- webscout/Provider/OPENAI/heckai.py +314 -311
- webscout/Provider/OPENAI/llmchatco.py +337 -337
- webscout/Provider/OPENAI/netwrck.py +355 -354
- webscout/Provider/OPENAI/oivscode.py +290 -290
- webscout/Provider/OPENAI/opkfc.py +518 -518
- webscout/Provider/OPENAI/pydantic_imports.py +1 -1
- webscout/Provider/OPENAI/scirachat.py +535 -529
- webscout/Provider/OPENAI/sonus.py +308 -308
- webscout/Provider/OPENAI/standardinput.py +442 -442
- webscout/Provider/OPENAI/textpollinations.py +340 -348
- webscout/Provider/OPENAI/toolbaz.py +419 -413
- webscout/Provider/OPENAI/typefully.py +362 -362
- webscout/Provider/OPENAI/utils.py +295 -295
- webscout/Provider/OPENAI/venice.py +436 -436
- webscout/Provider/OPENAI/wisecat.py +387 -387
- webscout/Provider/OPENAI/writecream.py +166 -166
- webscout/Provider/OPENAI/x0gpt.py +378 -378
- webscout/Provider/OPENAI/yep.py +389 -389
- webscout/Provider/OpenGPT.py +230 -230
- webscout/Provider/Openai.py +244 -496
- webscout/Provider/PI.py +405 -404
- webscout/Provider/Perplexitylabs.py +430 -431
- webscout/Provider/QwenLM.py +272 -254
- webscout/Provider/STT/__init__.py +32 -2
- webscout/Provider/{Llama3.py → Sambanova.py} +257 -258
- webscout/Provider/StandardInput.py +309 -309
- webscout/Provider/TTI/README.md +82 -82
- webscout/Provider/TTI/__init__.py +33 -12
- webscout/Provider/TTI/aiarta.py +413 -413
- webscout/Provider/TTI/base.py +136 -136
- webscout/Provider/TTI/bing.py +243 -243
- webscout/Provider/TTI/gpt1image.py +149 -149
- webscout/Provider/TTI/imagen.py +196 -196
- webscout/Provider/TTI/infip.py +211 -211
- webscout/Provider/TTI/magicstudio.py +232 -232
- webscout/Provider/TTI/monochat.py +219 -219
- webscout/Provider/TTI/piclumen.py +214 -214
- webscout/Provider/TTI/pixelmuse.py +232 -232
- webscout/Provider/TTI/pollinations.py +232 -232
- webscout/Provider/TTI/together.py +288 -288
- webscout/Provider/TTI/utils.py +12 -12
- webscout/Provider/TTI/venice.py +367 -367
- webscout/Provider/TTS/README.md +192 -192
- webscout/Provider/TTS/__init__.py +33 -10
- webscout/Provider/TTS/parler.py +110 -110
- webscout/Provider/TTS/streamElements.py +333 -333
- webscout/Provider/TTS/utils.py +280 -280
- webscout/Provider/TeachAnything.py +237 -236
- webscout/Provider/TextPollinationsAI.py +311 -318
- webscout/Provider/TogetherAI.py +356 -357
- webscout/Provider/TwoAI.py +313 -569
- webscout/Provider/TypliAI.py +312 -311
- webscout/Provider/UNFINISHED/ChatHub.py +208 -208
- webscout/Provider/UNFINISHED/ChutesAI.py +313 -313
- webscout/Provider/{GizAI.py → UNFINISHED/GizAI.py} +294 -294
- webscout/Provider/{Marcus.py → UNFINISHED/Marcus.py} +198 -198
- webscout/Provider/{Qodo.py → UNFINISHED/Qodo.py} +477 -477
- webscout/Provider/UNFINISHED/VercelAIGateway.py +338 -338
- webscout/Provider/{XenAI.py → UNFINISHED/XenAI.py} +324 -324
- webscout/Provider/UNFINISHED/Youchat.py +330 -330
- webscout/Provider/UNFINISHED/liner.py +334 -0
- webscout/Provider/UNFINISHED/liner_api_request.py +262 -262
- webscout/Provider/UNFINISHED/puterjs.py +634 -634
- webscout/Provider/UNFINISHED/samurai.py +223 -223
- webscout/Provider/UNFINISHED/test_lmarena.py +119 -119
- webscout/Provider/Venice.py +251 -250
- webscout/Provider/VercelAI.py +256 -255
- webscout/Provider/WiseCat.py +232 -231
- webscout/Provider/WrDoChat.py +367 -366
- webscout/Provider/__init__.py +33 -86
- webscout/Provider/ai4chat.py +174 -174
- webscout/Provider/akashgpt.py +331 -334
- webscout/Provider/cerebras.py +446 -340
- webscout/Provider/chatglm.py +394 -214
- webscout/Provider/cleeai.py +211 -212
- webscout/Provider/deepseek_assistant.py +1 -1
- webscout/Provider/elmo.py +282 -282
- webscout/Provider/geminiapi.py +208 -208
- webscout/Provider/granite.py +261 -261
- webscout/Provider/hermes.py +263 -265
- webscout/Provider/julius.py +223 -222
- webscout/Provider/learnfastai.py +309 -309
- webscout/Provider/llama3mitril.py +214 -214
- webscout/Provider/llmchat.py +243 -243
- webscout/Provider/llmchatco.py +290 -290
- webscout/Provider/meta.py +801 -801
- webscout/Provider/oivscode.py +309 -309
- webscout/Provider/scira_chat.py +384 -457
- webscout/Provider/searchchat.py +292 -291
- webscout/Provider/sonus.py +258 -258
- webscout/Provider/toolbaz.py +370 -364
- webscout/Provider/turboseek.py +274 -265
- webscout/Provider/typefully.py +208 -207
- webscout/Provider/x0gpt.py +1 -0
- webscout/Provider/yep.py +372 -371
- webscout/__init__.py +30 -31
- webscout/__main__.py +5 -5
- webscout/auth/api_key_manager.py +189 -189
- webscout/auth/config.py +175 -175
- webscout/auth/models.py +185 -185
- webscout/auth/routes.py +664 -664
- webscout/auth/simple_logger.py +236 -236
- webscout/cli.py +523 -523
- webscout/conversation.py +438 -438
- webscout/exceptions.py +361 -361
- webscout/litagent/Readme.md +298 -298
- webscout/litagent/__init__.py +28 -28
- webscout/litagent/agent.py +581 -581
- webscout/litagent/constants.py +59 -59
- webscout/litprinter/__init__.py +58 -58
- webscout/models.py +181 -181
- webscout/optimizers.py +419 -419
- webscout/prompt_manager.py +288 -288
- webscout/sanitize.py +1078 -1078
- webscout/scout/README.md +401 -401
- webscout/scout/__init__.py +8 -8
- webscout/scout/core/__init__.py +6 -6
- webscout/scout/core/crawler.py +297 -297
- webscout/scout/core/scout.py +706 -706
- webscout/scout/core/search_result.py +95 -95
- webscout/scout/core/text_analyzer.py +62 -62
- webscout/scout/core/text_utils.py +277 -277
- webscout/scout/core/web_analyzer.py +51 -51
- webscout/scout/element.py +599 -599
- webscout/scout/parsers/__init__.py +69 -69
- webscout/scout/parsers/html5lib_parser.py +172 -172
- webscout/scout/parsers/html_parser.py +236 -236
- webscout/scout/parsers/lxml_parser.py +178 -178
- webscout/scout/utils.py +37 -37
- webscout/swiftcli/Readme.md +323 -323
- webscout/swiftcli/__init__.py +95 -95
- webscout/swiftcli/core/__init__.py +7 -7
- webscout/swiftcli/core/cli.py +308 -308
- webscout/swiftcli/core/context.py +104 -104
- webscout/swiftcli/core/group.py +241 -241
- webscout/swiftcli/decorators/__init__.py +28 -28
- webscout/swiftcli/decorators/command.py +221 -221
- webscout/swiftcli/decorators/options.py +220 -220
- webscout/swiftcli/decorators/output.py +302 -302
- webscout/swiftcli/exceptions.py +21 -21
- webscout/swiftcli/plugins/__init__.py +9 -9
- webscout/swiftcli/plugins/base.py +135 -135
- webscout/swiftcli/plugins/manager.py +269 -269
- webscout/swiftcli/utils/__init__.py +59 -59
- webscout/swiftcli/utils/formatting.py +252 -252
- webscout/swiftcli/utils/parsing.py +267 -267
- webscout/update_checker.py +117 -117
- webscout/version.py +1 -1
- webscout/webscout_search.py +1183 -1183
- webscout/webscout_search_async.py +649 -649
- webscout/yep_search.py +346 -346
- webscout/zeroart/README.md +89 -89
- webscout/zeroart/__init__.py +134 -134
- webscout/zeroart/base.py +66 -66
- webscout/zeroart/effects.py +100 -100
- webscout/zeroart/fonts.py +1238 -1238
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/METADATA +937 -936
- webscout-2025.10.11.dist-info/RECORD +300 -0
- webscout/Provider/AISEARCH/DeepFind.py +0 -254
- webscout/Provider/AllenAI.py +0 -440
- webscout/Provider/Blackboxai.py +0 -793
- webscout/Provider/FreeGemini.py +0 -250
- webscout/Provider/GptOss.py +0 -207
- webscout/Provider/Hunyuan.py +0 -283
- webscout/Provider/Kimi.py +0 -445
- webscout/Provider/MCPCore.py +0 -322
- webscout/Provider/MiniMax.py +0 -207
- webscout/Provider/OPENAI/BLACKBOXAI.py +0 -1045
- webscout/Provider/OPENAI/MiniMax.py +0 -298
- webscout/Provider/OPENAI/Qwen3.py +0 -304
- webscout/Provider/OPENAI/autoproxy.py +0 -1067
- webscout/Provider/OPENAI/copilot.py +0 -321
- webscout/Provider/OPENAI/gptoss.py +0 -288
- webscout/Provider/OPENAI/kimi.py +0 -469
- webscout/Provider/OPENAI/mcpcore.py +0 -431
- webscout/Provider/OPENAI/multichat.py +0 -378
- webscout/Provider/OPENAI/qodo.py +0 -630
- webscout/Provider/OPENAI/xenai.py +0 -514
- webscout/Provider/Reka.py +0 -214
- webscout/Provider/UNFINISHED/fetch_together_models.py +0 -90
- webscout/Provider/asksteve.py +0 -220
- webscout/Provider/copilot.py +0 -441
- webscout/Provider/freeaichat.py +0 -294
- webscout/Provider/koala.py +0 -182
- webscout/Provider/lmarena.py +0 -198
- webscout/Provider/monochat.py +0 -275
- webscout/Provider/multichat.py +0 -375
- webscout/Provider/scnet.py +0 -244
- webscout/Provider/talkai.py +0 -194
- webscout/tempid.py +0 -128
- webscout-8.3.6.dist-info/RECORD +0 -327
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/WHEEL +0 -0
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/entry_points.txt +0 -0
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/licenses/LICENSE.md +0 -0
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/top_level.txt +0 -0
|
@@ -1,957 +1,957 @@
|
|
|
1
|
-
from datetime import datetime
|
|
2
|
-
import json
|
|
3
|
-
from webscout.litagent import LitAgent
|
|
4
|
-
from time import sleep
|
|
5
|
-
import requests
|
|
6
|
-
from tqdm import tqdm
|
|
7
|
-
from colorama import Fore
|
|
8
|
-
from os import makedirs, path, getcwd
|
|
9
|
-
from threading import Thread
|
|
10
|
-
import os
|
|
11
|
-
import subprocess
|
|
12
|
-
import sys
|
|
13
|
-
import tempfile
|
|
14
|
-
from webscout.version import __prog__, __version__
|
|
15
|
-
from webscout.swiftcli import CLI, option, argument
|
|
16
|
-
|
|
17
|
-
# Define cache directory using tempfile
|
|
18
|
-
user_cache_dir = os.path.join(tempfile.gettempdir(), 'webscout')
|
|
19
|
-
if not os.path.exists(user_cache_dir):
|
|
20
|
-
os.makedirs(user_cache_dir)
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
session = requests.session()
|
|
24
|
-
|
|
25
|
-
headers = {
|
|
26
|
-
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
|
27
|
-
"User-Agent": LitAgent().random(),
|
|
28
|
-
"Accept-Encoding": "gzip, deflate, br",
|
|
29
|
-
"Accept-Language": "en-US,en;q=0.9",
|
|
30
|
-
"referer": "https://y2mate.com",
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
session.headers.update(headers)
|
|
34
|
-
|
|
35
|
-
get_excep = lambda e: e.args[1] if len(e.args) > 1 else e
|
|
36
|
-
|
|
37
|
-
appdir = user_cache_dir
|
|
38
|
-
|
|
39
|
-
if not path.isdir(appdir):
|
|
40
|
-
try:
|
|
41
|
-
makedirs(appdir)
|
|
42
|
-
except Exception as e:
|
|
43
|
-
print(
|
|
44
|
-
f"Error : {get_excep(e)} while creating site directory - "
|
|
45
|
-
+ appdir
|
|
46
|
-
)
|
|
47
|
-
|
|
48
|
-
history_path = path.join(appdir, "history.json")
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
class utils:
|
|
52
|
-
@staticmethod
|
|
53
|
-
def error_handler(resp=None, exit_on_error=False, log=True):
|
|
54
|
-
r"""Execption handler decorator"""
|
|
55
|
-
|
|
56
|
-
def decorator(func):
|
|
57
|
-
def main(*args, **kwargs):
|
|
58
|
-
try:
|
|
59
|
-
try:
|
|
60
|
-
return func(*args, **kwargs)
|
|
61
|
-
except KeyboardInterrupt as e:
|
|
62
|
-
print()
|
|
63
|
-
exit(1)
|
|
64
|
-
except Exception as e:
|
|
65
|
-
if log:
|
|
66
|
-
raise(f"Error - {get_excep(e)}")
|
|
67
|
-
if exit_on_error:
|
|
68
|
-
exit(1)
|
|
69
|
-
|
|
70
|
-
return resp
|
|
71
|
-
|
|
72
|
-
return main
|
|
73
|
-
|
|
74
|
-
return decorator
|
|
75
|
-
|
|
76
|
-
@staticmethod
|
|
77
|
-
def get(*args, **kwargs):
|
|
78
|
-
r"""Sends http get request"""
|
|
79
|
-
resp = session.get(*args, **kwargs)
|
|
80
|
-
return all([resp.ok, "application/json" in resp.headers["content-type"]]), resp
|
|
81
|
-
|
|
82
|
-
@staticmethod
|
|
83
|
-
def post(*args, **kwargs):
|
|
84
|
-
r"""Sends http post request"""
|
|
85
|
-
resp = session.post(*args, **kwargs)
|
|
86
|
-
return all([resp.ok, "application/json" in resp.headers["content-type"]]), resp
|
|
87
|
-
|
|
88
|
-
@staticmethod
|
|
89
|
-
def add_history(data: dict) -> None:
|
|
90
|
-
f"""Adds entry to history
|
|
91
|
-
:param data: Response of `third query`
|
|
92
|
-
:type data: dict
|
|
93
|
-
:rtype: None
|
|
94
|
-
"""
|
|
95
|
-
try:
|
|
96
|
-
if not path.isfile(history_path):
|
|
97
|
-
data1 = {__prog__: []}
|
|
98
|
-
with open(history_path, "w") as fh:
|
|
99
|
-
json.dump(data1, fh)
|
|
100
|
-
with open(history_path) as fh:
|
|
101
|
-
saved_data = json.load(fh).get(__prog__)
|
|
102
|
-
data["datetime"] = datetime.now().strftime("%c")
|
|
103
|
-
saved_data.append(data)
|
|
104
|
-
with open(history_path, "w") as fh:
|
|
105
|
-
json.dump({__prog__: saved_data}, fh, indent=4)
|
|
106
|
-
except Exception as e:
|
|
107
|
-
pass
|
|
108
|
-
|
|
109
|
-
@staticmethod
|
|
110
|
-
def get_history(dump: bool = False) -> list:
|
|
111
|
-
r"""Loads download history
|
|
112
|
-
:param dump: (Optional) Return whole history as str
|
|
113
|
-
:type dump: bool
|
|
114
|
-
:rtype: list|str
|
|
115
|
-
"""
|
|
116
|
-
try:
|
|
117
|
-
resp = []
|
|
118
|
-
if not path.isfile(history_path):
|
|
119
|
-
data1 = {__prog__: []}
|
|
120
|
-
with open(history_path, "w") as fh:
|
|
121
|
-
json.dump(data1, fh)
|
|
122
|
-
with open(history_path) as fh:
|
|
123
|
-
if dump:
|
|
124
|
-
return json.dumps(json.load(fh), indent=4)
|
|
125
|
-
entries = json.load(fh).get(__prog__)
|
|
126
|
-
for entry in entries:
|
|
127
|
-
resp.append(entry.get("vid"))
|
|
128
|
-
return resp
|
|
129
|
-
except Exception as e:
|
|
130
|
-
return []
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
class first_query:
|
|
134
|
-
def __init__(self, query: str):
|
|
135
|
-
r"""Initializes first query class
|
|
136
|
-
:param query: Video name or youtube link
|
|
137
|
-
:type query: str
|
|
138
|
-
"""
|
|
139
|
-
self.query_string = query
|
|
140
|
-
self.url = "https://www.y2mate.com/mates/analyzeV2/ajax"
|
|
141
|
-
self.payload = self.__get_payload()
|
|
142
|
-
self.processed = False
|
|
143
|
-
self.is_link = False
|
|
144
|
-
|
|
145
|
-
def __get_payload(self):
|
|
146
|
-
return {
|
|
147
|
-
"hl": "en",
|
|
148
|
-
"k_page": "home",
|
|
149
|
-
"k_query": self.query_string,
|
|
150
|
-
"q_auto": "0",
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
def __str__(self):
|
|
154
|
-
return """
|
|
155
|
-
{
|
|
156
|
-
"page": "search",
|
|
157
|
-
"status": "ok",
|
|
158
|
-
"keyword": "happy birthday",
|
|
159
|
-
"vitems": [
|
|
160
|
-
{
|
|
161
|
-
"v": "_z-1fTlSDF0",
|
|
162
|
-
"t": "Happy Birthday song"
|
|
163
|
-
},
|
|
164
|
-
]
|
|
165
|
-
}"""
|
|
166
|
-
|
|
167
|
-
def __enter__(self, *args, **kwargs):
|
|
168
|
-
return self.__call__(*args, **kwargs)
|
|
169
|
-
|
|
170
|
-
def __exit__(self, *args, **kwargs):
|
|
171
|
-
self.processed = False
|
|
172
|
-
|
|
173
|
-
def __call__(self, timeout: int = 30):
|
|
174
|
-
return self.main(timeout)
|
|
175
|
-
|
|
176
|
-
def main(self, timeout=30):
|
|
177
|
-
r"""Sets class attributes
|
|
178
|
-
:param timeout: (Optional) Http requests timeout
|
|
179
|
-
:type timeout: int
|
|
180
|
-
"""
|
|
181
|
-
okay_status, resp = utils.post(self.url, data=self.payload, timeout=timeout)
|
|
182
|
-
# print(resp.headers["content-type"])
|
|
183
|
-
# print(resp.content)
|
|
184
|
-
if okay_status:
|
|
185
|
-
dict_data = resp.json()
|
|
186
|
-
self.__setattr__("raw", dict_data)
|
|
187
|
-
for key in dict_data.keys():
|
|
188
|
-
self.__setattr__(key, dict_data.get(key))
|
|
189
|
-
self.is_link = not hasattr(self, "vitems")
|
|
190
|
-
self.processed = True
|
|
191
|
-
else:
|
|
192
|
-
raise Exception(
|
|
193
|
-
f"First query failed - [{resp.status_code} : {resp.reason}]"
|
|
194
|
-
)
|
|
195
|
-
return self
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
class second_query:
|
|
199
|
-
def __init__(self, query_one: object, item_no: int = 0):
|
|
200
|
-
r"""Initializes second_query class
|
|
201
|
-
:param query_one: Query_one class
|
|
202
|
-
:type query_one: object
|
|
203
|
-
:param item_no: (Optional) Query_one.vitems index
|
|
204
|
-
:type item_no: int
|
|
205
|
-
"""
|
|
206
|
-
assert query_one.processed, "First query failed"
|
|
207
|
-
|
|
208
|
-
self.query_one = query_one
|
|
209
|
-
self.item_no = item_no
|
|
210
|
-
self.processed = False
|
|
211
|
-
self.video_dict = None
|
|
212
|
-
self.url = "https://www.y2mate.com/mates/analyzeV2/ajax"
|
|
213
|
-
# self.payload = self.__get_payload()
|
|
214
|
-
|
|
215
|
-
def __str__(self):
|
|
216
|
-
return """
|
|
217
|
-
{
|
|
218
|
-
"status": "ok",
|
|
219
|
-
"mess": "",
|
|
220
|
-
"page": "detail",
|
|
221
|
-
"vid": "_z-1fTlSDF0",
|
|
222
|
-
"extractor": "youtube",
|
|
223
|
-
"title": "Happy Birthday song",
|
|
224
|
-
"t": 62,
|
|
225
|
-
"a": "infobells",
|
|
226
|
-
"links": {
|
|
227
|
-
"mp4": {
|
|
228
|
-
"136": {
|
|
229
|
-
"size": "5.5 MB",
|
|
230
|
-
"f": "mp4",
|
|
231
|
-
"q": "720p",
|
|
232
|
-
"q_text": "720p (.mp4) <span class=\"label label-primary\"><small>m-HD</small></span>",
|
|
233
|
-
"k": "joVBVdm2xZWhaZWhu6vZ8cXxAl7j4qpyhNgqkwx0U/tcutx/harxdZ8BfPNcg9n1"
|
|
234
|
-
},
|
|
235
|
-
},
|
|
236
|
-
"mp3": {
|
|
237
|
-
"140": {
|
|
238
|
-
"size": "975.1 KB",
|
|
239
|
-
"f": "m4a",
|
|
240
|
-
"q": ".m4a",
|
|
241
|
-
"q_text": ".m4a (128kbps)",
|
|
242
|
-
"k": "joVBVdm2xZWhaZWhu6vZ8cXxAl7j4qpyhNhuxgxyU/NQ9919mbX2dYcdevRBnt0="
|
|
243
|
-
},
|
|
244
|
-
},
|
|
245
|
-
"related": [
|
|
246
|
-
{
|
|
247
|
-
"title": "Related Videos",
|
|
248
|
-
"contents": [
|
|
249
|
-
{
|
|
250
|
-
"v": "KK24ZvxLXGU",
|
|
251
|
-
"t": "Birthday Songs - Happy Birthday To You | 15 minutes plus"
|
|
252
|
-
},
|
|
253
|
-
]
|
|
254
|
-
}
|
|
255
|
-
]
|
|
256
|
-
}
|
|
257
|
-
"""
|
|
258
|
-
|
|
259
|
-
def __call__(self, *args, **kwargs):
|
|
260
|
-
return self.main(*args, **kwargs)
|
|
261
|
-
|
|
262
|
-
def get_item(self, item_no=0):
|
|
263
|
-
r"""Return specific items on `self.query_one.vitems`"""
|
|
264
|
-
if self.video_dict:
|
|
265
|
-
return self.video_dict
|
|
266
|
-
if self.query_one.is_link:
|
|
267
|
-
return {"v": self.query_one.vid, "t": self.query_one.title}
|
|
268
|
-
all_items = self.query_one.vitems
|
|
269
|
-
assert (
|
|
270
|
-
self.item_no < len(all_items) - 1
|
|
271
|
-
), "The item_no is greater than largest item's index - try lower value"
|
|
272
|
-
|
|
273
|
-
return self.query_one.vitems[item_no or self.item_no]
|
|
274
|
-
|
|
275
|
-
def get_payload(self):
|
|
276
|
-
return {
|
|
277
|
-
"hl": "en",
|
|
278
|
-
"k_page": "home",
|
|
279
|
-
"k_query": f"https://www.youtube.com/watch?v={self.get_item().get('v')}",
|
|
280
|
-
"q_auto": "1",
|
|
281
|
-
}
|
|
282
|
-
|
|
283
|
-
def __main__(self, *args, **kwargs):
|
|
284
|
-
return self.main(*args, **kwargs)
|
|
285
|
-
|
|
286
|
-
def __enter__(self, *args, **kwargs):
|
|
287
|
-
return self.__main__(*args, **kwargs)
|
|
288
|
-
|
|
289
|
-
def __exit__(self, *args, **kwargs):
|
|
290
|
-
self.processed = False
|
|
291
|
-
|
|
292
|
-
def main(self, item_no: int = 0, timeout: int = 30):
|
|
293
|
-
r"""Requests for video formats and related videos
|
|
294
|
-
:param item_no: (Optional) Index of query_one.vitems
|
|
295
|
-
:type item_no: int
|
|
296
|
-
:param timeout: (Optional)Http request timeout
|
|
297
|
-
:type timeout: int
|
|
298
|
-
"""
|
|
299
|
-
self.processed = False
|
|
300
|
-
if item_no:
|
|
301
|
-
self.item_no = item_no
|
|
302
|
-
okay_status, resp = utils.post(
|
|
303
|
-
self.url, data=self.get_payload(), timeout=timeout
|
|
304
|
-
)
|
|
305
|
-
|
|
306
|
-
if okay_status:
|
|
307
|
-
dict_data = resp.json()
|
|
308
|
-
for key in dict_data.keys():
|
|
309
|
-
self.__setattr__(key, dict_data.get(key))
|
|
310
|
-
links = dict_data.get("links")
|
|
311
|
-
self.__setattr__("video", links.get("mp4"))
|
|
312
|
-
self.__setattr__("audio", links.get("mp3"))
|
|
313
|
-
self.__setattr__("related", dict_data.get("related")[0].get("contents"))
|
|
314
|
-
self.__setattr__("raw", dict_data)
|
|
315
|
-
self.processed = True
|
|
316
|
-
|
|
317
|
-
return self
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
class third_query:
|
|
321
|
-
def __init__(self, query_two: object):
|
|
322
|
-
assert query_two.processed, "Unprocessed second_query object parsed"
|
|
323
|
-
self.query_two = query_two
|
|
324
|
-
self.url = "https://www.y2mate.com/mates/convertV2/index"
|
|
325
|
-
self.formats = ["mp4", "mp3"]
|
|
326
|
-
self.qualities_plus = ["best", "worst"]
|
|
327
|
-
self.qualities = {
|
|
328
|
-
self.formats[0]: [
|
|
329
|
-
"4k",
|
|
330
|
-
"1080p",
|
|
331
|
-
"720p",
|
|
332
|
-
"480p",
|
|
333
|
-
"360p",
|
|
334
|
-
"240p",
|
|
335
|
-
"144p",
|
|
336
|
-
"auto",
|
|
337
|
-
]
|
|
338
|
-
+ self.qualities_plus,
|
|
339
|
-
self.formats[1]: ["mp3", "m4a", ".m4a", "128kbps", "192kbps", "328kbps"],
|
|
340
|
-
}
|
|
341
|
-
|
|
342
|
-
def __call__(self, *args, **kwargs):
|
|
343
|
-
return self.main(*args, **kwargs)
|
|
344
|
-
|
|
345
|
-
def __enter__(self, *args, **kwargs):
|
|
346
|
-
return self
|
|
347
|
-
|
|
348
|
-
def __exit__(self, *args, **kwargs):
|
|
349
|
-
pass
|
|
350
|
-
|
|
351
|
-
def __str__(self):
|
|
352
|
-
return """
|
|
353
|
-
{
|
|
354
|
-
"status": "ok",
|
|
355
|
-
"mess": "",
|
|
356
|
-
"c_status": "CONVERTED",
|
|
357
|
-
"vid": "_z-1fTlSDF0",
|
|
358
|
-
"title": "Happy Birthday song",
|
|
359
|
-
"ftype": "mp4",
|
|
360
|
-
"fquality": "144p",
|
|
361
|
-
"dlink": "https://dl165.dlmate13.online/?file=M3R4SUNiN3JsOHJ6WWQ2a3NQS1Y5ZGlxVlZIOCtyZ01tY1VxM2xzQkNMbFlyb2t1enErekxNZElFYkZlbWQ2U1g5TkVvWGplZU55T0R4K0lvcEI3QnlHbjd0a29yU3JOOXN0eWY4UmhBbE9xdmI3bXhCZEprMHFrZU96QkpweHdQVWh0OGhRMzQyaWUzS1dTdmhEMzdsYUk0VWliZkMwWXR5OENNUENOb01rUWd6NmJQS2UxaGRZWHFDQ2c0WkpNMmZ2QTVVZmx5cWc3NVlva0Nod3NJdFpPejhmeDNhTT0%3D"
|
|
362
|
-
}
|
|
363
|
-
"""
|
|
364
|
-
|
|
365
|
-
def get_payload(self, keys):
|
|
366
|
-
return {"k": keys.get("k"), "vid": self.query_two.vid}
|
|
367
|
-
|
|
368
|
-
def main(
|
|
369
|
-
self,
|
|
370
|
-
format: str = "mp4",
|
|
371
|
-
quality="auto",
|
|
372
|
-
resolver: str = None,
|
|
373
|
-
timeout: int = 30,
|
|
374
|
-
):
|
|
375
|
-
r"""
|
|
376
|
-
:param format: (Optional) Media format mp4/mp3
|
|
377
|
-
:param quality: (Optional) Media qualiy such as 720p
|
|
378
|
-
:param resolver: (Optional) Additional format info : [m4a,3gp,mp4,mp3]
|
|
379
|
-
:param timeout: (Optional) Http requests timeout
|
|
380
|
-
:type type: str
|
|
381
|
-
:type quality: str
|
|
382
|
-
:type timeout: int
|
|
383
|
-
"""
|
|
384
|
-
if not resolver:
|
|
385
|
-
resolver = "mp4" if format == "mp4" else "mp3"
|
|
386
|
-
if format == "mp3" and quality == "auto":
|
|
387
|
-
quality = "128kbps"
|
|
388
|
-
assert (
|
|
389
|
-
format in self.formats
|
|
390
|
-
), f"'{format}' is not in supported formats - {self.formats}"
|
|
391
|
-
|
|
392
|
-
assert (
|
|
393
|
-
quality in self.qualities[format]
|
|
394
|
-
), f"'{quality}' is not in supported qualities - {self.qualities[format]}"
|
|
395
|
-
|
|
396
|
-
items = self.query_two.video if format == "mp4" else self.query_two.audio
|
|
397
|
-
hunted = []
|
|
398
|
-
if quality in self.qualities_plus:
|
|
399
|
-
keys = list(items.keys())
|
|
400
|
-
if quality == self.qualities_plus[0]:
|
|
401
|
-
hunted.append(items[keys[0]])
|
|
402
|
-
else:
|
|
403
|
-
hunted.append(items[keys[len(keys) - 2]])
|
|
404
|
-
else:
|
|
405
|
-
for key in items.keys():
|
|
406
|
-
if items[key].get("q") == quality:
|
|
407
|
-
hunted.append(items[key])
|
|
408
|
-
if len(hunted) > 1:
|
|
409
|
-
for entry in hunted:
|
|
410
|
-
if entry.get("f") == resolver:
|
|
411
|
-
hunted.insert(0, entry)
|
|
412
|
-
if hunted:
|
|
413
|
-
|
|
414
|
-
def hunter_manager(souped_entry: dict = hunted[0], repeat_count=0):
|
|
415
|
-
payload = self.get_payload(souped_entry)
|
|
416
|
-
okay_status, resp = utils.post(self.url, data=payload)
|
|
417
|
-
if okay_status:
|
|
418
|
-
sanitized_feedback = resp.json()
|
|
419
|
-
if sanitized_feedback.get("c_status") == "CONVERTING":
|
|
420
|
-
if repeat_count >= 4:
|
|
421
|
-
return (False, {})
|
|
422
|
-
else:
|
|
423
|
-
sleep(5)
|
|
424
|
-
repeat_count += 1
|
|
425
|
-
return hunter_manager(souped_entry)
|
|
426
|
-
return okay_status, resp
|
|
427
|
-
return okay_status, resp
|
|
428
|
-
|
|
429
|
-
okay_status, resp = hunter_manager()
|
|
430
|
-
|
|
431
|
-
if okay_status:
|
|
432
|
-
resp_data = hunted[0]
|
|
433
|
-
resp_data.update(resp.json())
|
|
434
|
-
return resp_data
|
|
435
|
-
|
|
436
|
-
else:
|
|
437
|
-
return {}
|
|
438
|
-
else:
|
|
439
|
-
return {}
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
class Handler:
|
|
443
|
-
def __init__(
|
|
444
|
-
self,
|
|
445
|
-
query: str,
|
|
446
|
-
author: str = None,
|
|
447
|
-
timeout: int = 30,
|
|
448
|
-
confirm: bool = False,
|
|
449
|
-
unique: bool = False,
|
|
450
|
-
thread: int = 0,
|
|
451
|
-
):
|
|
452
|
-
r"""Initializes this `class`
|
|
453
|
-
:param query: Video name or youtube link
|
|
454
|
-
:type query: str
|
|
455
|
-
:param author: (Optional) Author (Channel) of the videos
|
|
456
|
-
:type author: str
|
|
457
|
-
:param timeout: (Optional) Http request timeout
|
|
458
|
-
:type timeout: int
|
|
459
|
-
:param confirm: (Optional) Confirm before downloading media
|
|
460
|
-
:type confirm: bool
|
|
461
|
-
:param unique: (Optional) Ignore previously downloaded media
|
|
462
|
-
:type confirm: bool
|
|
463
|
-
:param thread: (Optional) Thread the download process through `auto-save` method
|
|
464
|
-
:type thread int
|
|
465
|
-
"""
|
|
466
|
-
self.query = query
|
|
467
|
-
self.author = author
|
|
468
|
-
self.timeout = timeout
|
|
469
|
-
self.keyword = None
|
|
470
|
-
self.confirm = confirm
|
|
471
|
-
self.unique = unique
|
|
472
|
-
self.thread = thread
|
|
473
|
-
self.vitems = []
|
|
474
|
-
self.related = []
|
|
475
|
-
self.dropped = []
|
|
476
|
-
self.total = 1
|
|
477
|
-
self.saved_videos = utils.get_history()
|
|
478
|
-
|
|
479
|
-
def __str__(self):
|
|
480
|
-
return self.query
|
|
481
|
-
|
|
482
|
-
def __enter__(self, *args, **kwargs):
|
|
483
|
-
return self
|
|
484
|
-
|
|
485
|
-
def __exit__(self, *args, **kwargs):
|
|
486
|
-
self.vitems.clear()
|
|
487
|
-
self.total = 1
|
|
488
|
-
|
|
489
|
-
def __call__(self, *args, **kwargs):
|
|
490
|
-
return self.run(*args, **kwargs)
|
|
491
|
-
|
|
492
|
-
def __filter_videos(self, entries: list) -> list:
|
|
493
|
-
f"""Filter videos based on keyword
|
|
494
|
-
:param entries: List containing dict of video id and their titles
|
|
495
|
-
:type entries: list
|
|
496
|
-
:rtype: list
|
|
497
|
-
"""
|
|
498
|
-
if self.keyword:
|
|
499
|
-
keyword = self.keyword.lower()
|
|
500
|
-
resp = []
|
|
501
|
-
for entry in entries:
|
|
502
|
-
if keyword in entry.get("t").lower():
|
|
503
|
-
resp.append(entry)
|
|
504
|
-
return resp
|
|
505
|
-
|
|
506
|
-
else:
|
|
507
|
-
return entries
|
|
508
|
-
|
|
509
|
-
def __make_first_query(self):
|
|
510
|
-
r"""Sets query_one attribute to `self`"""
|
|
511
|
-
query_one = first_query(self.query)
|
|
512
|
-
self.__setattr__("query_one", query_one.main(self.timeout))
|
|
513
|
-
if self.query_one.is_link == False:
|
|
514
|
-
self.vitems.extend(self.__filter_videos(self.query_one.vitems))
|
|
515
|
-
|
|
516
|
-
@utils.error_handler(exit_on_error=True)
|
|
517
|
-
def __verify_item(self, second_query_obj) -> bool:
|
|
518
|
-
video_id = second_query_obj.vid
|
|
519
|
-
video_author = second_query_obj.a
|
|
520
|
-
video_title = second_query_obj.title
|
|
521
|
-
if video_id in self.saved_videos:
|
|
522
|
-
if self.unique:
|
|
523
|
-
return False, "Duplicate"
|
|
524
|
-
if self.confirm:
|
|
525
|
-
choice = confirm_from_user(
|
|
526
|
-
f">> Re-download : {Fore.GREEN+video_title+Fore.RESET} by {Fore.YELLOW+video_author+Fore.RESET}"
|
|
527
|
-
)
|
|
528
|
-
print("\n[*] Ok processing...", end="\r")
|
|
529
|
-
return choice, "User's choice"
|
|
530
|
-
if self.confirm:
|
|
531
|
-
choice = confirm_from_user(
|
|
532
|
-
f">> Download : {Fore.GREEN+video_title+Fore.RESET} by {Fore.YELLOW+video_author+Fore.RESET}"
|
|
533
|
-
)
|
|
534
|
-
print("\n[*] Ok processing...", end="\r")
|
|
535
|
-
return choice, "User's choice"
|
|
536
|
-
return True, "Auto"
|
|
537
|
-
|
|
538
|
-
def __make_second_query(self):
|
|
539
|
-
r"""Links first query with 3rd query"""
|
|
540
|
-
init_query_two = second_query(self.query_one)
|
|
541
|
-
x = 0
|
|
542
|
-
if not self.query_one.is_link:
|
|
543
|
-
for video_dict in self.vitems:
|
|
544
|
-
init_query_two.video_dict = video_dict
|
|
545
|
-
query_2 = init_query_two.main(timeout=self.timeout)
|
|
546
|
-
if query_2.processed:
|
|
547
|
-
if query_2.vid in self.dropped:
|
|
548
|
-
continue
|
|
549
|
-
if self.author and not self.author.lower() in query_2.a.lower():
|
|
550
|
-
continue
|
|
551
|
-
else:
|
|
552
|
-
yes_download, reason = self.__verify_item(query_2)
|
|
553
|
-
if not yes_download:
|
|
554
|
-
self.dropped.append(query_2.vid)
|
|
555
|
-
continue
|
|
556
|
-
self.related.append(query_2.related)
|
|
557
|
-
yield query_2
|
|
558
|
-
x += 1
|
|
559
|
-
if x >= self.total:
|
|
560
|
-
break
|
|
561
|
-
else:
|
|
562
|
-
print(
|
|
563
|
-
f"Dropping unprocessed query_two object of index {x}"
|
|
564
|
-
)
|
|
565
|
-
yield
|
|
566
|
-
|
|
567
|
-
else:
|
|
568
|
-
query_2 = init_query_two.main(timeout=self.timeout)
|
|
569
|
-
if query_2.processed:
|
|
570
|
-
# self.related.extend(query_2.related)
|
|
571
|
-
self.vitems.extend(query_2.related)
|
|
572
|
-
self.query_one.is_link = False
|
|
573
|
-
if self.total == 1:
|
|
574
|
-
yield query_2
|
|
575
|
-
else:
|
|
576
|
-
for video_dict in self.vitems:
|
|
577
|
-
init_query_two.video_dict = video_dict
|
|
578
|
-
query_2 = init_query_two.main(timeout=self.timeout)
|
|
579
|
-
if query_2.processed:
|
|
580
|
-
if (
|
|
581
|
-
self.author
|
|
582
|
-
and not self.author.lower() in query_2.a.lower()
|
|
583
|
-
):
|
|
584
|
-
continue
|
|
585
|
-
else:
|
|
586
|
-
yes_download, reason = self.__verify_item(query_2)
|
|
587
|
-
if not yes_download:
|
|
588
|
-
|
|
589
|
-
self.dropped.append(query_2.vid)
|
|
590
|
-
continue
|
|
591
|
-
|
|
592
|
-
self.related.append(query_2.related)
|
|
593
|
-
yield query_2
|
|
594
|
-
x += 1
|
|
595
|
-
if x >= self.total:
|
|
596
|
-
break
|
|
597
|
-
else:
|
|
598
|
-
yield
|
|
599
|
-
else:
|
|
600
|
-
yield
|
|
601
|
-
|
|
602
|
-
def run(
|
|
603
|
-
self,
|
|
604
|
-
format: str = "mp4",
|
|
605
|
-
quality: str = "auto",
|
|
606
|
-
resolver: str = None,
|
|
607
|
-
limit: int = 1,
|
|
608
|
-
keyword: str = None,
|
|
609
|
-
author: str = None,
|
|
610
|
-
):
|
|
611
|
-
r"""Generate and yield video dictionary
|
|
612
|
-
:param format: (Optional) Media format mp4/mp3
|
|
613
|
-
:param quality: (Optional) Media qualiy such as 720p/128kbps
|
|
614
|
-
:param resolver: (Optional) Additional format info : [m4a,3gp,mp4,mp3]
|
|
615
|
-
:param limit: (Optional) Total videos to be generated
|
|
616
|
-
:param keyword: (Optional) Video keyword
|
|
617
|
-
:param author: (Optional) Author of the videos
|
|
618
|
-
:type quality: str
|
|
619
|
-
:type total: int
|
|
620
|
-
:type keyword: str
|
|
621
|
-
:type author: str
|
|
622
|
-
:rtype: object
|
|
623
|
-
"""
|
|
624
|
-
self.author = author
|
|
625
|
-
self.keyword = keyword
|
|
626
|
-
self.total = limit
|
|
627
|
-
self.__make_first_query()
|
|
628
|
-
for query_two_obj in self.__make_second_query():
|
|
629
|
-
if query_two_obj:
|
|
630
|
-
self.vitems.extend(query_two_obj.related)
|
|
631
|
-
yield third_query(query_two_obj).main(
|
|
632
|
-
**dict(
|
|
633
|
-
format=format,
|
|
634
|
-
quality=quality,
|
|
635
|
-
resolver=resolver,
|
|
636
|
-
timeout=self.timeout,
|
|
637
|
-
)
|
|
638
|
-
)
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
def generate_filename(self, third_dict: dict, naming_format: str = None) -> str:
|
|
642
|
-
r"""Generate filename based on the response of `third_query`
|
|
643
|
-
:param third_dict: response of `third_query.main()` object
|
|
644
|
-
:param naming_format: (Optional) Format for generating filename based on `third_dict` keys
|
|
645
|
-
:type third_dict: dict
|
|
646
|
-
:type naming_format: str
|
|
647
|
-
:rtype: str
|
|
648
|
-
"""
|
|
649
|
-
fnm = (
|
|
650
|
-
f"{naming_format}" % third_dict
|
|
651
|
-
if naming_format
|
|
652
|
-
else f"{third_dict['title']} {third_dict['vid']}_{third_dict['fquality']}.{third_dict['ftype']}"
|
|
653
|
-
)
|
|
654
|
-
|
|
655
|
-
def sanitize(nm):
|
|
656
|
-
trash = [
|
|
657
|
-
"\\",
|
|
658
|
-
"/",
|
|
659
|
-
":",
|
|
660
|
-
"*",
|
|
661
|
-
"?",
|
|
662
|
-
'"',
|
|
663
|
-
"<",
|
|
664
|
-
"|",
|
|
665
|
-
">",
|
|
666
|
-
"y2mate.com",
|
|
667
|
-
"y2mate com",
|
|
668
|
-
]
|
|
669
|
-
for val in trash:
|
|
670
|
-
nm = nm.replace(val, "")
|
|
671
|
-
return nm.strip()
|
|
672
|
-
|
|
673
|
-
return sanitize(fnm)
|
|
674
|
-
|
|
675
|
-
def auto_save(
|
|
676
|
-
self,
|
|
677
|
-
dir: str = "",
|
|
678
|
-
iterator: object = None,
|
|
679
|
-
progress_bar=True,
|
|
680
|
-
quiet: bool = False,
|
|
681
|
-
naming_format: str = None,
|
|
682
|
-
chunk_size: int = 512,
|
|
683
|
-
play: bool = False,
|
|
684
|
-
resume: bool = False,
|
|
685
|
-
*args,
|
|
686
|
-
**kwargs,
|
|
687
|
-
):
|
|
688
|
-
r"""Query and save all the media
|
|
689
|
-
:param dir: (Optional) Path to Directory for saving the media files
|
|
690
|
-
:param iterator: (Optional) Function that yields third_query object - `Handler.run`
|
|
691
|
-
:param progress_bar: (Optional) Display progress bar
|
|
692
|
-
:param quiet: (Optional) Not to stdout anything
|
|
693
|
-
:param naming_format: (Optional) Format for generating filename
|
|
694
|
-
:param chunk_size: (Optional) Chunk_size for downloading files in KB
|
|
695
|
-
:param play: (Optional) Auto-play the media after download
|
|
696
|
-
:param resume: (Optional) Resume the incomplete download
|
|
697
|
-
:type dir: str
|
|
698
|
-
:type iterator: object
|
|
699
|
-
:type progress_bar: bool
|
|
700
|
-
:type quiet: bool
|
|
701
|
-
:type naming_format: str
|
|
702
|
-
:type chunk_size: int
|
|
703
|
-
:type play: bool
|
|
704
|
-
:type resume: bool
|
|
705
|
-
args & kwargs for the iterator
|
|
706
|
-
:rtype: None
|
|
707
|
-
"""
|
|
708
|
-
iterator_object = iterator or self.run(*args, **kwargs)
|
|
709
|
-
|
|
710
|
-
for x, entry in enumerate(iterator_object):
|
|
711
|
-
if self.thread:
|
|
712
|
-
t1 = Thread(
|
|
713
|
-
target=self.save,
|
|
714
|
-
args=(
|
|
715
|
-
entry,
|
|
716
|
-
dir,
|
|
717
|
-
False,
|
|
718
|
-
quiet,
|
|
719
|
-
naming_format,
|
|
720
|
-
chunk_size,
|
|
721
|
-
play,
|
|
722
|
-
resume,
|
|
723
|
-
),
|
|
724
|
-
)
|
|
725
|
-
t1.start()
|
|
726
|
-
thread_count = x + 1
|
|
727
|
-
if thread_count % self.thread == 0 or thread_count == self.total:
|
|
728
|
-
t1.join()
|
|
729
|
-
else:
|
|
730
|
-
self.save(
|
|
731
|
-
entry,
|
|
732
|
-
dir,
|
|
733
|
-
progress_bar,
|
|
734
|
-
quiet,
|
|
735
|
-
naming_format,
|
|
736
|
-
chunk_size,
|
|
737
|
-
play,
|
|
738
|
-
resume,
|
|
739
|
-
)
|
|
740
|
-
|
|
741
|
-
def save(
|
|
742
|
-
self,
|
|
743
|
-
third_dict: dict,
|
|
744
|
-
dir: str = "",
|
|
745
|
-
progress_bar=True,
|
|
746
|
-
quiet: bool = False,
|
|
747
|
-
naming_format: str = None,
|
|
748
|
-
chunk_size: int = 512,
|
|
749
|
-
play: bool = False,
|
|
750
|
-
resume: bool = False,
|
|
751
|
-
disable_history=False,
|
|
752
|
-
):
|
|
753
|
-
r"""Download media based on response of `third_query` dict-data-type
|
|
754
|
-
:param third_dict: Response of `third_query.run()`
|
|
755
|
-
:param dir: (Optional) Directory for saving the contents
|
|
756
|
-
:param progress_bar: (Optional) Display download progress bar
|
|
757
|
-
:param quiet: (Optional) Not to stdout anything
|
|
758
|
-
:param naming_format: (Optional) Format for generating filename
|
|
759
|
-
:param chunk_size: (Optional) Chunk_size for downloading files in KB
|
|
760
|
-
:param play: (Optional) Auto-play the media after download
|
|
761
|
-
:param resume: (Optional) Resume the incomplete download
|
|
762
|
-
:param disable_history (Optional) Don't save the download to history.
|
|
763
|
-
:type third_dict: dict
|
|
764
|
-
:type dir: str
|
|
765
|
-
:type progress_bar: bool
|
|
766
|
-
:type quiet: bool
|
|
767
|
-
:type naming_format: str
|
|
768
|
-
:type chunk_size: int
|
|
769
|
-
:type play: bool
|
|
770
|
-
:type resume: bool
|
|
771
|
-
:type disable_history: bool
|
|
772
|
-
:rtype: None
|
|
773
|
-
"""
|
|
774
|
-
if third_dict:
|
|
775
|
-
assert third_dict.get(
|
|
776
|
-
"dlink"
|
|
777
|
-
), "The video selected does not support that quality, try lower qualities."
|
|
778
|
-
if third_dict.get("mess"):
|
|
779
|
-
pass
|
|
780
|
-
|
|
781
|
-
current_downloaded_size = 0
|
|
782
|
-
current_downloaded_size_in_mb = 0
|
|
783
|
-
filename = self.generate_filename(third_dict, naming_format)
|
|
784
|
-
save_to = path.join(dir, filename)
|
|
785
|
-
mod_headers = headers
|
|
786
|
-
|
|
787
|
-
if resume:
|
|
788
|
-
assert path.exists(save_to), f"File not found in path - '{save_to}'"
|
|
789
|
-
current_downloaded_size = path.getsize(save_to)
|
|
790
|
-
# Set the headers to resume download from the last byte
|
|
791
|
-
mod_headers = {"Range": f"bytes={current_downloaded_size}-"}
|
|
792
|
-
current_downloaded_size_in_mb = round(
|
|
793
|
-
current_downloaded_size / 1000000, 2
|
|
794
|
-
) # convert to mb
|
|
795
|
-
|
|
796
|
-
resp = requests.get(third_dict["dlink"], stream=True, headers=mod_headers)
|
|
797
|
-
|
|
798
|
-
default_content_length = 0
|
|
799
|
-
size_in_bytes = int(
|
|
800
|
-
resp.headers.get("content-length", default_content_length)
|
|
801
|
-
)
|
|
802
|
-
if not size_in_bytes:
|
|
803
|
-
if resume:
|
|
804
|
-
raise FileExistsError(
|
|
805
|
-
f"Download completed for the file in path - '{save_to}'"
|
|
806
|
-
)
|
|
807
|
-
else:
|
|
808
|
-
raise Exception(
|
|
809
|
-
f"Cannot download file of content-length {size_in_bytes} bytes"
|
|
810
|
-
)
|
|
811
|
-
|
|
812
|
-
if resume:
|
|
813
|
-
assert (
|
|
814
|
-
size_in_bytes != current_downloaded_size
|
|
815
|
-
), f"Download completed for the file in path - '{save_to}'"
|
|
816
|
-
|
|
817
|
-
size_in_mb = (
|
|
818
|
-
round(size_in_bytes / 1000000, 2) + current_downloaded_size_in_mb
|
|
819
|
-
)
|
|
820
|
-
chunk_size_in_bytes = chunk_size * 1024
|
|
821
|
-
|
|
822
|
-
third_dict["saved_to"] = (
|
|
823
|
-
save_to
|
|
824
|
-
if any([save_to.startswith("/"), ":" in save_to])
|
|
825
|
-
else path.join(getcwd(), dir, filename)
|
|
826
|
-
)
|
|
827
|
-
try_play_media = (
|
|
828
|
-
lambda: launch_media(third_dict["saved_to"]) if play else None
|
|
829
|
-
)
|
|
830
|
-
saving_mode = "ab" if resume else "wb"
|
|
831
|
-
if progress_bar:
|
|
832
|
-
if not quiet:
|
|
833
|
-
print(f"{filename}")
|
|
834
|
-
with tqdm(
|
|
835
|
-
total=size_in_bytes + current_downloaded_size,
|
|
836
|
-
bar_format="%s%d MB %s{bar} %s{l_bar}%s"
|
|
837
|
-
% (Fore.GREEN, size_in_mb, Fore.CYAN, Fore.YELLOW, Fore.RESET),
|
|
838
|
-
initial=current_downloaded_size,
|
|
839
|
-
) as p_bar:
|
|
840
|
-
# p_bar.update(current_downloaded_size)
|
|
841
|
-
with open(save_to, saving_mode) as fh:
|
|
842
|
-
for chunks in resp.iter_content(chunk_size=chunk_size_in_bytes):
|
|
843
|
-
fh.write(chunks)
|
|
844
|
-
p_bar.update(chunk_size_in_bytes)
|
|
845
|
-
if not disable_history:
|
|
846
|
-
utils.add_history(third_dict)
|
|
847
|
-
try_play_media()
|
|
848
|
-
return save_to
|
|
849
|
-
else:
|
|
850
|
-
with open(save_to, saving_mode) as fh:
|
|
851
|
-
for chunks in resp.iter_content(chunk_size=chunk_size_in_bytes):
|
|
852
|
-
fh.write(chunks)
|
|
853
|
-
if not disable_history:
|
|
854
|
-
utils.add_history(third_dict)
|
|
855
|
-
|
|
856
|
-
try_play_media()
|
|
857
|
-
|
|
858
|
-
return save_to
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
mp4_qualities = [
|
|
863
|
-
"4k",
|
|
864
|
-
"1080p",
|
|
865
|
-
"720p",
|
|
866
|
-
"480p",
|
|
867
|
-
"360p",
|
|
868
|
-
"240p",
|
|
869
|
-
"144p",
|
|
870
|
-
"auto",
|
|
871
|
-
"best",
|
|
872
|
-
"worst",
|
|
873
|
-
]
|
|
874
|
-
mp3_qualities = ["mp3", "m4a", ".m4a", "128kbps", "192kbps", "328kbps"]
|
|
875
|
-
resolvers = ["m4a", "3gp", "mp4", "mp3"]
|
|
876
|
-
media_qualities = mp4_qualities + mp3_qualities
|
|
877
|
-
|
|
878
|
-
def launch_media(filepath):
|
|
879
|
-
"""
|
|
880
|
-
Launch media file using default system application
|
|
881
|
-
"""
|
|
882
|
-
try:
|
|
883
|
-
if sys.platform.startswith('darwin'): # macOS
|
|
884
|
-
subprocess.call(('open', filepath))
|
|
885
|
-
elif sys.platform.startswith('win'): # Windows
|
|
886
|
-
os.startfile(filepath)
|
|
887
|
-
elif sys.platform.startswith('linux'): # Linux
|
|
888
|
-
subprocess.call(('xdg-open', filepath))
|
|
889
|
-
except Exception as e:
|
|
890
|
-
print(f"Error launching media: {e}")
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
def confirm_from_user(message, default=False):
|
|
894
|
-
"""
|
|
895
|
-
Prompt user for confirmation
|
|
896
|
-
"""
|
|
897
|
-
valid = {"yes": True, "y": True, "ye": True,
|
|
898
|
-
"no": False, "n": False}
|
|
899
|
-
|
|
900
|
-
if default is None:
|
|
901
|
-
prompt = " [y/n] "
|
|
902
|
-
elif default:
|
|
903
|
-
prompt = " [Y/n] "
|
|
904
|
-
else:
|
|
905
|
-
prompt = " [y/N] "
|
|
906
|
-
|
|
907
|
-
while True:
|
|
908
|
-
choice = input(message + prompt).lower()
|
|
909
|
-
if default is not None and choice == '':
|
|
910
|
-
return default
|
|
911
|
-
elif choice in valid:
|
|
912
|
-
return valid[choice]
|
|
913
|
-
else:
|
|
914
|
-
print("Please respond with 'yes' or 'no' (or 'y' or 'n').")
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
# Create CLI app
|
|
918
|
-
app = CLI(name="ytdownloader", help="YouTube Video Downloader CLI")
|
|
919
|
-
|
|
920
|
-
@app.command()
|
|
921
|
-
@option("--author", help="Specify video author/channel")
|
|
922
|
-
@option("--timeout", type=int, default=30, help="HTTP request timeout")
|
|
923
|
-
@option("--confirm", is_flag=True, help="Confirm before downloading")
|
|
924
|
-
@option("--unique", is_flag=True, help="Ignore previously downloaded media")
|
|
925
|
-
@option("--thread", type=int, default=0, help="Thread download process")
|
|
926
|
-
@option("--format", default="mp4", help="Download format (mp4/mp3)")
|
|
927
|
-
@option("--quality", default="auto", help="Video quality")
|
|
928
|
-
@option("--limit", type=int, default=1, help="Total videos to download")
|
|
929
|
-
@option("--keyword", help="Filter videos by keyword")
|
|
930
|
-
@argument("query", help="Video name or YouTube link")
|
|
931
|
-
def download(query, author, timeout, confirm, unique, thread, format, quality, limit, keyword):
|
|
932
|
-
"""Download YouTube videos with advanced options"""
|
|
933
|
-
|
|
934
|
-
# Create handler with parsed arguments
|
|
935
|
-
handler = Handler(
|
|
936
|
-
query=query,
|
|
937
|
-
author=author,
|
|
938
|
-
timeout=timeout,
|
|
939
|
-
confirm=confirm,
|
|
940
|
-
unique=unique,
|
|
941
|
-
thread=thread
|
|
942
|
-
)
|
|
943
|
-
|
|
944
|
-
# Run download process
|
|
945
|
-
handler.auto_save(
|
|
946
|
-
format=format,
|
|
947
|
-
quality=quality,
|
|
948
|
-
limit=limit,
|
|
949
|
-
keyword=keyword
|
|
950
|
-
)
|
|
951
|
-
|
|
952
|
-
# Replace get_args function with swiftcli's argument parsing
|
|
953
|
-
def main():
|
|
954
|
-
app.run()
|
|
955
|
-
|
|
956
|
-
if __name__ == "__main__":
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
import json
|
|
3
|
+
from webscout.litagent import LitAgent
|
|
4
|
+
from time import sleep
|
|
5
|
+
import requests
|
|
6
|
+
from tqdm import tqdm
|
|
7
|
+
from colorama import Fore
|
|
8
|
+
from os import makedirs, path, getcwd
|
|
9
|
+
from threading import Thread
|
|
10
|
+
import os
|
|
11
|
+
import subprocess
|
|
12
|
+
import sys
|
|
13
|
+
import tempfile
|
|
14
|
+
from webscout.version import __prog__, __version__
|
|
15
|
+
from webscout.swiftcli import CLI, option, argument
|
|
16
|
+
|
|
17
|
+
# Define cache directory using tempfile
|
|
18
|
+
user_cache_dir = os.path.join(tempfile.gettempdir(), 'webscout')
|
|
19
|
+
if not os.path.exists(user_cache_dir):
|
|
20
|
+
os.makedirs(user_cache_dir)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
session = requests.session()
|
|
24
|
+
|
|
25
|
+
headers = {
|
|
26
|
+
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
|
27
|
+
"User-Agent": LitAgent().random(),
|
|
28
|
+
"Accept-Encoding": "gzip, deflate, br",
|
|
29
|
+
"Accept-Language": "en-US,en;q=0.9",
|
|
30
|
+
"referer": "https://y2mate.com",
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
session.headers.update(headers)
|
|
34
|
+
|
|
35
|
+
get_excep = lambda e: e.args[1] if len(e.args) > 1 else e
|
|
36
|
+
|
|
37
|
+
appdir = user_cache_dir
|
|
38
|
+
|
|
39
|
+
if not path.isdir(appdir):
|
|
40
|
+
try:
|
|
41
|
+
makedirs(appdir)
|
|
42
|
+
except Exception as e:
|
|
43
|
+
print(
|
|
44
|
+
f"Error : {get_excep(e)} while creating site directory - "
|
|
45
|
+
+ appdir
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
history_path = path.join(appdir, "history.json")
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class utils:
|
|
52
|
+
@staticmethod
|
|
53
|
+
def error_handler(resp=None, exit_on_error=False, log=True):
|
|
54
|
+
r"""Execption handler decorator"""
|
|
55
|
+
|
|
56
|
+
def decorator(func):
|
|
57
|
+
def main(*args, **kwargs):
|
|
58
|
+
try:
|
|
59
|
+
try:
|
|
60
|
+
return func(*args, **kwargs)
|
|
61
|
+
except KeyboardInterrupt as e:
|
|
62
|
+
print()
|
|
63
|
+
exit(1)
|
|
64
|
+
except Exception as e:
|
|
65
|
+
if log:
|
|
66
|
+
raise(f"Error - {get_excep(e)}")
|
|
67
|
+
if exit_on_error:
|
|
68
|
+
exit(1)
|
|
69
|
+
|
|
70
|
+
return resp
|
|
71
|
+
|
|
72
|
+
return main
|
|
73
|
+
|
|
74
|
+
return decorator
|
|
75
|
+
|
|
76
|
+
@staticmethod
|
|
77
|
+
def get(*args, **kwargs):
|
|
78
|
+
r"""Sends http get request"""
|
|
79
|
+
resp = session.get(*args, **kwargs)
|
|
80
|
+
return all([resp.ok, "application/json" in resp.headers["content-type"]]), resp
|
|
81
|
+
|
|
82
|
+
@staticmethod
|
|
83
|
+
def post(*args, **kwargs):
|
|
84
|
+
r"""Sends http post request"""
|
|
85
|
+
resp = session.post(*args, **kwargs)
|
|
86
|
+
return all([resp.ok, "application/json" in resp.headers["content-type"]]), resp
|
|
87
|
+
|
|
88
|
+
@staticmethod
|
|
89
|
+
def add_history(data: dict) -> None:
|
|
90
|
+
f"""Adds entry to history
|
|
91
|
+
:param data: Response of `third query`
|
|
92
|
+
:type data: dict
|
|
93
|
+
:rtype: None
|
|
94
|
+
"""
|
|
95
|
+
try:
|
|
96
|
+
if not path.isfile(history_path):
|
|
97
|
+
data1 = {__prog__: []}
|
|
98
|
+
with open(history_path, "w") as fh:
|
|
99
|
+
json.dump(data1, fh)
|
|
100
|
+
with open(history_path) as fh:
|
|
101
|
+
saved_data = json.load(fh).get(__prog__)
|
|
102
|
+
data["datetime"] = datetime.now().strftime("%c")
|
|
103
|
+
saved_data.append(data)
|
|
104
|
+
with open(history_path, "w") as fh:
|
|
105
|
+
json.dump({__prog__: saved_data}, fh, indent=4)
|
|
106
|
+
except Exception as e:
|
|
107
|
+
pass
|
|
108
|
+
|
|
109
|
+
@staticmethod
|
|
110
|
+
def get_history(dump: bool = False) -> list:
|
|
111
|
+
r"""Loads download history
|
|
112
|
+
:param dump: (Optional) Return whole history as str
|
|
113
|
+
:type dump: bool
|
|
114
|
+
:rtype: list|str
|
|
115
|
+
"""
|
|
116
|
+
try:
|
|
117
|
+
resp = []
|
|
118
|
+
if not path.isfile(history_path):
|
|
119
|
+
data1 = {__prog__: []}
|
|
120
|
+
with open(history_path, "w") as fh:
|
|
121
|
+
json.dump(data1, fh)
|
|
122
|
+
with open(history_path) as fh:
|
|
123
|
+
if dump:
|
|
124
|
+
return json.dumps(json.load(fh), indent=4)
|
|
125
|
+
entries = json.load(fh).get(__prog__)
|
|
126
|
+
for entry in entries:
|
|
127
|
+
resp.append(entry.get("vid"))
|
|
128
|
+
return resp
|
|
129
|
+
except Exception as e:
|
|
130
|
+
return []
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class first_query:
|
|
134
|
+
def __init__(self, query: str):
|
|
135
|
+
r"""Initializes first query class
|
|
136
|
+
:param query: Video name or youtube link
|
|
137
|
+
:type query: str
|
|
138
|
+
"""
|
|
139
|
+
self.query_string = query
|
|
140
|
+
self.url = "https://www.y2mate.com/mates/analyzeV2/ajax"
|
|
141
|
+
self.payload = self.__get_payload()
|
|
142
|
+
self.processed = False
|
|
143
|
+
self.is_link = False
|
|
144
|
+
|
|
145
|
+
def __get_payload(self):
|
|
146
|
+
return {
|
|
147
|
+
"hl": "en",
|
|
148
|
+
"k_page": "home",
|
|
149
|
+
"k_query": self.query_string,
|
|
150
|
+
"q_auto": "0",
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
def __str__(self):
|
|
154
|
+
return """
|
|
155
|
+
{
|
|
156
|
+
"page": "search",
|
|
157
|
+
"status": "ok",
|
|
158
|
+
"keyword": "happy birthday",
|
|
159
|
+
"vitems": [
|
|
160
|
+
{
|
|
161
|
+
"v": "_z-1fTlSDF0",
|
|
162
|
+
"t": "Happy Birthday song"
|
|
163
|
+
},
|
|
164
|
+
]
|
|
165
|
+
}"""
|
|
166
|
+
|
|
167
|
+
def __enter__(self, *args, **kwargs):
|
|
168
|
+
return self.__call__(*args, **kwargs)
|
|
169
|
+
|
|
170
|
+
def __exit__(self, *args, **kwargs):
|
|
171
|
+
self.processed = False
|
|
172
|
+
|
|
173
|
+
def __call__(self, timeout: int = 30):
|
|
174
|
+
return self.main(timeout)
|
|
175
|
+
|
|
176
|
+
def main(self, timeout=30):
|
|
177
|
+
r"""Sets class attributes
|
|
178
|
+
:param timeout: (Optional) Http requests timeout
|
|
179
|
+
:type timeout: int
|
|
180
|
+
"""
|
|
181
|
+
okay_status, resp = utils.post(self.url, data=self.payload, timeout=timeout)
|
|
182
|
+
# print(resp.headers["content-type"])
|
|
183
|
+
# print(resp.content)
|
|
184
|
+
if okay_status:
|
|
185
|
+
dict_data = resp.json()
|
|
186
|
+
self.__setattr__("raw", dict_data)
|
|
187
|
+
for key in dict_data.keys():
|
|
188
|
+
self.__setattr__(key, dict_data.get(key))
|
|
189
|
+
self.is_link = not hasattr(self, "vitems")
|
|
190
|
+
self.processed = True
|
|
191
|
+
else:
|
|
192
|
+
raise Exception(
|
|
193
|
+
f"First query failed - [{resp.status_code} : {resp.reason}]"
|
|
194
|
+
)
|
|
195
|
+
return self
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
class second_query:
|
|
199
|
+
def __init__(self, query_one: object, item_no: int = 0):
|
|
200
|
+
r"""Initializes second_query class
|
|
201
|
+
:param query_one: Query_one class
|
|
202
|
+
:type query_one: object
|
|
203
|
+
:param item_no: (Optional) Query_one.vitems index
|
|
204
|
+
:type item_no: int
|
|
205
|
+
"""
|
|
206
|
+
assert query_one.processed, "First query failed"
|
|
207
|
+
|
|
208
|
+
self.query_one = query_one
|
|
209
|
+
self.item_no = item_no
|
|
210
|
+
self.processed = False
|
|
211
|
+
self.video_dict = None
|
|
212
|
+
self.url = "https://www.y2mate.com/mates/analyzeV2/ajax"
|
|
213
|
+
# self.payload = self.__get_payload()
|
|
214
|
+
|
|
215
|
+
def __str__(self):
|
|
216
|
+
return """
|
|
217
|
+
{
|
|
218
|
+
"status": "ok",
|
|
219
|
+
"mess": "",
|
|
220
|
+
"page": "detail",
|
|
221
|
+
"vid": "_z-1fTlSDF0",
|
|
222
|
+
"extractor": "youtube",
|
|
223
|
+
"title": "Happy Birthday song",
|
|
224
|
+
"t": 62,
|
|
225
|
+
"a": "infobells",
|
|
226
|
+
"links": {
|
|
227
|
+
"mp4": {
|
|
228
|
+
"136": {
|
|
229
|
+
"size": "5.5 MB",
|
|
230
|
+
"f": "mp4",
|
|
231
|
+
"q": "720p",
|
|
232
|
+
"q_text": "720p (.mp4) <span class=\"label label-primary\"><small>m-HD</small></span>",
|
|
233
|
+
"k": "joVBVdm2xZWhaZWhu6vZ8cXxAl7j4qpyhNgqkwx0U/tcutx/harxdZ8BfPNcg9n1"
|
|
234
|
+
},
|
|
235
|
+
},
|
|
236
|
+
"mp3": {
|
|
237
|
+
"140": {
|
|
238
|
+
"size": "975.1 KB",
|
|
239
|
+
"f": "m4a",
|
|
240
|
+
"q": ".m4a",
|
|
241
|
+
"q_text": ".m4a (128kbps)",
|
|
242
|
+
"k": "joVBVdm2xZWhaZWhu6vZ8cXxAl7j4qpyhNhuxgxyU/NQ9919mbX2dYcdevRBnt0="
|
|
243
|
+
},
|
|
244
|
+
},
|
|
245
|
+
"related": [
|
|
246
|
+
{
|
|
247
|
+
"title": "Related Videos",
|
|
248
|
+
"contents": [
|
|
249
|
+
{
|
|
250
|
+
"v": "KK24ZvxLXGU",
|
|
251
|
+
"t": "Birthday Songs - Happy Birthday To You | 15 minutes plus"
|
|
252
|
+
},
|
|
253
|
+
]
|
|
254
|
+
}
|
|
255
|
+
]
|
|
256
|
+
}
|
|
257
|
+
"""
|
|
258
|
+
|
|
259
|
+
def __call__(self, *args, **kwargs):
|
|
260
|
+
return self.main(*args, **kwargs)
|
|
261
|
+
|
|
262
|
+
def get_item(self, item_no=0):
|
|
263
|
+
r"""Return specific items on `self.query_one.vitems`"""
|
|
264
|
+
if self.video_dict:
|
|
265
|
+
return self.video_dict
|
|
266
|
+
if self.query_one.is_link:
|
|
267
|
+
return {"v": self.query_one.vid, "t": self.query_one.title}
|
|
268
|
+
all_items = self.query_one.vitems
|
|
269
|
+
assert (
|
|
270
|
+
self.item_no < len(all_items) - 1
|
|
271
|
+
), "The item_no is greater than largest item's index - try lower value"
|
|
272
|
+
|
|
273
|
+
return self.query_one.vitems[item_no or self.item_no]
|
|
274
|
+
|
|
275
|
+
def get_payload(self):
|
|
276
|
+
return {
|
|
277
|
+
"hl": "en",
|
|
278
|
+
"k_page": "home",
|
|
279
|
+
"k_query": f"https://www.youtube.com/watch?v={self.get_item().get('v')}",
|
|
280
|
+
"q_auto": "1",
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
def __main__(self, *args, **kwargs):
|
|
284
|
+
return self.main(*args, **kwargs)
|
|
285
|
+
|
|
286
|
+
def __enter__(self, *args, **kwargs):
|
|
287
|
+
return self.__main__(*args, **kwargs)
|
|
288
|
+
|
|
289
|
+
def __exit__(self, *args, **kwargs):
|
|
290
|
+
self.processed = False
|
|
291
|
+
|
|
292
|
+
def main(self, item_no: int = 0, timeout: int = 30):
|
|
293
|
+
r"""Requests for video formats and related videos
|
|
294
|
+
:param item_no: (Optional) Index of query_one.vitems
|
|
295
|
+
:type item_no: int
|
|
296
|
+
:param timeout: (Optional)Http request timeout
|
|
297
|
+
:type timeout: int
|
|
298
|
+
"""
|
|
299
|
+
self.processed = False
|
|
300
|
+
if item_no:
|
|
301
|
+
self.item_no = item_no
|
|
302
|
+
okay_status, resp = utils.post(
|
|
303
|
+
self.url, data=self.get_payload(), timeout=timeout
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
if okay_status:
|
|
307
|
+
dict_data = resp.json()
|
|
308
|
+
for key in dict_data.keys():
|
|
309
|
+
self.__setattr__(key, dict_data.get(key))
|
|
310
|
+
links = dict_data.get("links")
|
|
311
|
+
self.__setattr__("video", links.get("mp4"))
|
|
312
|
+
self.__setattr__("audio", links.get("mp3"))
|
|
313
|
+
self.__setattr__("related", dict_data.get("related")[0].get("contents"))
|
|
314
|
+
self.__setattr__("raw", dict_data)
|
|
315
|
+
self.processed = True
|
|
316
|
+
|
|
317
|
+
return self
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
class third_query:
|
|
321
|
+
def __init__(self, query_two: object):
|
|
322
|
+
assert query_two.processed, "Unprocessed second_query object parsed"
|
|
323
|
+
self.query_two = query_two
|
|
324
|
+
self.url = "https://www.y2mate.com/mates/convertV2/index"
|
|
325
|
+
self.formats = ["mp4", "mp3"]
|
|
326
|
+
self.qualities_plus = ["best", "worst"]
|
|
327
|
+
self.qualities = {
|
|
328
|
+
self.formats[0]: [
|
|
329
|
+
"4k",
|
|
330
|
+
"1080p",
|
|
331
|
+
"720p",
|
|
332
|
+
"480p",
|
|
333
|
+
"360p",
|
|
334
|
+
"240p",
|
|
335
|
+
"144p",
|
|
336
|
+
"auto",
|
|
337
|
+
]
|
|
338
|
+
+ self.qualities_plus,
|
|
339
|
+
self.formats[1]: ["mp3", "m4a", ".m4a", "128kbps", "192kbps", "328kbps"],
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
def __call__(self, *args, **kwargs):
|
|
343
|
+
return self.main(*args, **kwargs)
|
|
344
|
+
|
|
345
|
+
def __enter__(self, *args, **kwargs):
|
|
346
|
+
return self
|
|
347
|
+
|
|
348
|
+
def __exit__(self, *args, **kwargs):
|
|
349
|
+
pass
|
|
350
|
+
|
|
351
|
+
def __str__(self):
|
|
352
|
+
return """
|
|
353
|
+
{
|
|
354
|
+
"status": "ok",
|
|
355
|
+
"mess": "",
|
|
356
|
+
"c_status": "CONVERTED",
|
|
357
|
+
"vid": "_z-1fTlSDF0",
|
|
358
|
+
"title": "Happy Birthday song",
|
|
359
|
+
"ftype": "mp4",
|
|
360
|
+
"fquality": "144p",
|
|
361
|
+
"dlink": "https://dl165.dlmate13.online/?file=M3R4SUNiN3JsOHJ6WWQ2a3NQS1Y5ZGlxVlZIOCtyZ01tY1VxM2xzQkNMbFlyb2t1enErekxNZElFYkZlbWQ2U1g5TkVvWGplZU55T0R4K0lvcEI3QnlHbjd0a29yU3JOOXN0eWY4UmhBbE9xdmI3bXhCZEprMHFrZU96QkpweHdQVWh0OGhRMzQyaWUzS1dTdmhEMzdsYUk0VWliZkMwWXR5OENNUENOb01rUWd6NmJQS2UxaGRZWHFDQ2c0WkpNMmZ2QTVVZmx5cWc3NVlva0Nod3NJdFpPejhmeDNhTT0%3D"
|
|
362
|
+
}
|
|
363
|
+
"""
|
|
364
|
+
|
|
365
|
+
def get_payload(self, keys):
|
|
366
|
+
return {"k": keys.get("k"), "vid": self.query_two.vid}
|
|
367
|
+
|
|
368
|
+
def main(
|
|
369
|
+
self,
|
|
370
|
+
format: str = "mp4",
|
|
371
|
+
quality="auto",
|
|
372
|
+
resolver: str = None,
|
|
373
|
+
timeout: int = 30,
|
|
374
|
+
):
|
|
375
|
+
r"""
|
|
376
|
+
:param format: (Optional) Media format mp4/mp3
|
|
377
|
+
:param quality: (Optional) Media qualiy such as 720p
|
|
378
|
+
:param resolver: (Optional) Additional format info : [m4a,3gp,mp4,mp3]
|
|
379
|
+
:param timeout: (Optional) Http requests timeout
|
|
380
|
+
:type type: str
|
|
381
|
+
:type quality: str
|
|
382
|
+
:type timeout: int
|
|
383
|
+
"""
|
|
384
|
+
if not resolver:
|
|
385
|
+
resolver = "mp4" if format == "mp4" else "mp3"
|
|
386
|
+
if format == "mp3" and quality == "auto":
|
|
387
|
+
quality = "128kbps"
|
|
388
|
+
assert (
|
|
389
|
+
format in self.formats
|
|
390
|
+
), f"'{format}' is not in supported formats - {self.formats}"
|
|
391
|
+
|
|
392
|
+
assert (
|
|
393
|
+
quality in self.qualities[format]
|
|
394
|
+
), f"'{quality}' is not in supported qualities - {self.qualities[format]}"
|
|
395
|
+
|
|
396
|
+
items = self.query_two.video if format == "mp4" else self.query_two.audio
|
|
397
|
+
hunted = []
|
|
398
|
+
if quality in self.qualities_plus:
|
|
399
|
+
keys = list(items.keys())
|
|
400
|
+
if quality == self.qualities_plus[0]:
|
|
401
|
+
hunted.append(items[keys[0]])
|
|
402
|
+
else:
|
|
403
|
+
hunted.append(items[keys[len(keys) - 2]])
|
|
404
|
+
else:
|
|
405
|
+
for key in items.keys():
|
|
406
|
+
if items[key].get("q") == quality:
|
|
407
|
+
hunted.append(items[key])
|
|
408
|
+
if len(hunted) > 1:
|
|
409
|
+
for entry in hunted:
|
|
410
|
+
if entry.get("f") == resolver:
|
|
411
|
+
hunted.insert(0, entry)
|
|
412
|
+
if hunted:
|
|
413
|
+
|
|
414
|
+
def hunter_manager(souped_entry: dict = hunted[0], repeat_count=0):
|
|
415
|
+
payload = self.get_payload(souped_entry)
|
|
416
|
+
okay_status, resp = utils.post(self.url, data=payload)
|
|
417
|
+
if okay_status:
|
|
418
|
+
sanitized_feedback = resp.json()
|
|
419
|
+
if sanitized_feedback.get("c_status") == "CONVERTING":
|
|
420
|
+
if repeat_count >= 4:
|
|
421
|
+
return (False, {})
|
|
422
|
+
else:
|
|
423
|
+
sleep(5)
|
|
424
|
+
repeat_count += 1
|
|
425
|
+
return hunter_manager(souped_entry)
|
|
426
|
+
return okay_status, resp
|
|
427
|
+
return okay_status, resp
|
|
428
|
+
|
|
429
|
+
okay_status, resp = hunter_manager()
|
|
430
|
+
|
|
431
|
+
if okay_status:
|
|
432
|
+
resp_data = hunted[0]
|
|
433
|
+
resp_data.update(resp.json())
|
|
434
|
+
return resp_data
|
|
435
|
+
|
|
436
|
+
else:
|
|
437
|
+
return {}
|
|
438
|
+
else:
|
|
439
|
+
return {}
|
|
440
|
+
|
|
441
|
+
|
|
442
|
+
class Handler:
|
|
443
|
+
def __init__(
|
|
444
|
+
self,
|
|
445
|
+
query: str,
|
|
446
|
+
author: str = None,
|
|
447
|
+
timeout: int = 30,
|
|
448
|
+
confirm: bool = False,
|
|
449
|
+
unique: bool = False,
|
|
450
|
+
thread: int = 0,
|
|
451
|
+
):
|
|
452
|
+
r"""Initializes this `class`
|
|
453
|
+
:param query: Video name or youtube link
|
|
454
|
+
:type query: str
|
|
455
|
+
:param author: (Optional) Author (Channel) of the videos
|
|
456
|
+
:type author: str
|
|
457
|
+
:param timeout: (Optional) Http request timeout
|
|
458
|
+
:type timeout: int
|
|
459
|
+
:param confirm: (Optional) Confirm before downloading media
|
|
460
|
+
:type confirm: bool
|
|
461
|
+
:param unique: (Optional) Ignore previously downloaded media
|
|
462
|
+
:type confirm: bool
|
|
463
|
+
:param thread: (Optional) Thread the download process through `auto-save` method
|
|
464
|
+
:type thread int
|
|
465
|
+
"""
|
|
466
|
+
self.query = query
|
|
467
|
+
self.author = author
|
|
468
|
+
self.timeout = timeout
|
|
469
|
+
self.keyword = None
|
|
470
|
+
self.confirm = confirm
|
|
471
|
+
self.unique = unique
|
|
472
|
+
self.thread = thread
|
|
473
|
+
self.vitems = []
|
|
474
|
+
self.related = []
|
|
475
|
+
self.dropped = []
|
|
476
|
+
self.total = 1
|
|
477
|
+
self.saved_videos = utils.get_history()
|
|
478
|
+
|
|
479
|
+
def __str__(self):
|
|
480
|
+
return self.query
|
|
481
|
+
|
|
482
|
+
def __enter__(self, *args, **kwargs):
|
|
483
|
+
return self
|
|
484
|
+
|
|
485
|
+
def __exit__(self, *args, **kwargs):
|
|
486
|
+
self.vitems.clear()
|
|
487
|
+
self.total = 1
|
|
488
|
+
|
|
489
|
+
def __call__(self, *args, **kwargs):
|
|
490
|
+
return self.run(*args, **kwargs)
|
|
491
|
+
|
|
492
|
+
def __filter_videos(self, entries: list) -> list:
|
|
493
|
+
f"""Filter videos based on keyword
|
|
494
|
+
:param entries: List containing dict of video id and their titles
|
|
495
|
+
:type entries: list
|
|
496
|
+
:rtype: list
|
|
497
|
+
"""
|
|
498
|
+
if self.keyword:
|
|
499
|
+
keyword = self.keyword.lower()
|
|
500
|
+
resp = []
|
|
501
|
+
for entry in entries:
|
|
502
|
+
if keyword in entry.get("t").lower():
|
|
503
|
+
resp.append(entry)
|
|
504
|
+
return resp
|
|
505
|
+
|
|
506
|
+
else:
|
|
507
|
+
return entries
|
|
508
|
+
|
|
509
|
+
def __make_first_query(self):
|
|
510
|
+
r"""Sets query_one attribute to `self`"""
|
|
511
|
+
query_one = first_query(self.query)
|
|
512
|
+
self.__setattr__("query_one", query_one.main(self.timeout))
|
|
513
|
+
if self.query_one.is_link == False:
|
|
514
|
+
self.vitems.extend(self.__filter_videos(self.query_one.vitems))
|
|
515
|
+
|
|
516
|
+
@utils.error_handler(exit_on_error=True)
|
|
517
|
+
def __verify_item(self, second_query_obj) -> bool:
|
|
518
|
+
video_id = second_query_obj.vid
|
|
519
|
+
video_author = second_query_obj.a
|
|
520
|
+
video_title = second_query_obj.title
|
|
521
|
+
if video_id in self.saved_videos:
|
|
522
|
+
if self.unique:
|
|
523
|
+
return False, "Duplicate"
|
|
524
|
+
if self.confirm:
|
|
525
|
+
choice = confirm_from_user(
|
|
526
|
+
f">> Re-download : {Fore.GREEN+video_title+Fore.RESET} by {Fore.YELLOW+video_author+Fore.RESET}"
|
|
527
|
+
)
|
|
528
|
+
print("\n[*] Ok processing...", end="\r")
|
|
529
|
+
return choice, "User's choice"
|
|
530
|
+
if self.confirm:
|
|
531
|
+
choice = confirm_from_user(
|
|
532
|
+
f">> Download : {Fore.GREEN+video_title+Fore.RESET} by {Fore.YELLOW+video_author+Fore.RESET}"
|
|
533
|
+
)
|
|
534
|
+
print("\n[*] Ok processing...", end="\r")
|
|
535
|
+
return choice, "User's choice"
|
|
536
|
+
return True, "Auto"
|
|
537
|
+
|
|
538
|
+
def __make_second_query(self):
|
|
539
|
+
r"""Links first query with 3rd query"""
|
|
540
|
+
init_query_two = second_query(self.query_one)
|
|
541
|
+
x = 0
|
|
542
|
+
if not self.query_one.is_link:
|
|
543
|
+
for video_dict in self.vitems:
|
|
544
|
+
init_query_two.video_dict = video_dict
|
|
545
|
+
query_2 = init_query_two.main(timeout=self.timeout)
|
|
546
|
+
if query_2.processed:
|
|
547
|
+
if query_2.vid in self.dropped:
|
|
548
|
+
continue
|
|
549
|
+
if self.author and not self.author.lower() in query_2.a.lower():
|
|
550
|
+
continue
|
|
551
|
+
else:
|
|
552
|
+
yes_download, reason = self.__verify_item(query_2)
|
|
553
|
+
if not yes_download:
|
|
554
|
+
self.dropped.append(query_2.vid)
|
|
555
|
+
continue
|
|
556
|
+
self.related.append(query_2.related)
|
|
557
|
+
yield query_2
|
|
558
|
+
x += 1
|
|
559
|
+
if x >= self.total:
|
|
560
|
+
break
|
|
561
|
+
else:
|
|
562
|
+
print(
|
|
563
|
+
f"Dropping unprocessed query_two object of index {x}"
|
|
564
|
+
)
|
|
565
|
+
yield
|
|
566
|
+
|
|
567
|
+
else:
|
|
568
|
+
query_2 = init_query_two.main(timeout=self.timeout)
|
|
569
|
+
if query_2.processed:
|
|
570
|
+
# self.related.extend(query_2.related)
|
|
571
|
+
self.vitems.extend(query_2.related)
|
|
572
|
+
self.query_one.is_link = False
|
|
573
|
+
if self.total == 1:
|
|
574
|
+
yield query_2
|
|
575
|
+
else:
|
|
576
|
+
for video_dict in self.vitems:
|
|
577
|
+
init_query_two.video_dict = video_dict
|
|
578
|
+
query_2 = init_query_two.main(timeout=self.timeout)
|
|
579
|
+
if query_2.processed:
|
|
580
|
+
if (
|
|
581
|
+
self.author
|
|
582
|
+
and not self.author.lower() in query_2.a.lower()
|
|
583
|
+
):
|
|
584
|
+
continue
|
|
585
|
+
else:
|
|
586
|
+
yes_download, reason = self.__verify_item(query_2)
|
|
587
|
+
if not yes_download:
|
|
588
|
+
|
|
589
|
+
self.dropped.append(query_2.vid)
|
|
590
|
+
continue
|
|
591
|
+
|
|
592
|
+
self.related.append(query_2.related)
|
|
593
|
+
yield query_2
|
|
594
|
+
x += 1
|
|
595
|
+
if x >= self.total:
|
|
596
|
+
break
|
|
597
|
+
else:
|
|
598
|
+
yield
|
|
599
|
+
else:
|
|
600
|
+
yield
|
|
601
|
+
|
|
602
|
+
def run(
|
|
603
|
+
self,
|
|
604
|
+
format: str = "mp4",
|
|
605
|
+
quality: str = "auto",
|
|
606
|
+
resolver: str = None,
|
|
607
|
+
limit: int = 1,
|
|
608
|
+
keyword: str = None,
|
|
609
|
+
author: str = None,
|
|
610
|
+
):
|
|
611
|
+
r"""Generate and yield video dictionary
|
|
612
|
+
:param format: (Optional) Media format mp4/mp3
|
|
613
|
+
:param quality: (Optional) Media qualiy such as 720p/128kbps
|
|
614
|
+
:param resolver: (Optional) Additional format info : [m4a,3gp,mp4,mp3]
|
|
615
|
+
:param limit: (Optional) Total videos to be generated
|
|
616
|
+
:param keyword: (Optional) Video keyword
|
|
617
|
+
:param author: (Optional) Author of the videos
|
|
618
|
+
:type quality: str
|
|
619
|
+
:type total: int
|
|
620
|
+
:type keyword: str
|
|
621
|
+
:type author: str
|
|
622
|
+
:rtype: object
|
|
623
|
+
"""
|
|
624
|
+
self.author = author
|
|
625
|
+
self.keyword = keyword
|
|
626
|
+
self.total = limit
|
|
627
|
+
self.__make_first_query()
|
|
628
|
+
for query_two_obj in self.__make_second_query():
|
|
629
|
+
if query_two_obj:
|
|
630
|
+
self.vitems.extend(query_two_obj.related)
|
|
631
|
+
yield third_query(query_two_obj).main(
|
|
632
|
+
**dict(
|
|
633
|
+
format=format,
|
|
634
|
+
quality=quality,
|
|
635
|
+
resolver=resolver,
|
|
636
|
+
timeout=self.timeout,
|
|
637
|
+
)
|
|
638
|
+
)
|
|
639
|
+
|
|
640
|
+
|
|
641
|
+
def generate_filename(self, third_dict: dict, naming_format: str = None) -> str:
|
|
642
|
+
r"""Generate filename based on the response of `third_query`
|
|
643
|
+
:param third_dict: response of `third_query.main()` object
|
|
644
|
+
:param naming_format: (Optional) Format for generating filename based on `third_dict` keys
|
|
645
|
+
:type third_dict: dict
|
|
646
|
+
:type naming_format: str
|
|
647
|
+
:rtype: str
|
|
648
|
+
"""
|
|
649
|
+
fnm = (
|
|
650
|
+
f"{naming_format}" % third_dict
|
|
651
|
+
if naming_format
|
|
652
|
+
else f"{third_dict['title']} {third_dict['vid']}_{third_dict['fquality']}.{third_dict['ftype']}"
|
|
653
|
+
)
|
|
654
|
+
|
|
655
|
+
def sanitize(nm):
|
|
656
|
+
trash = [
|
|
657
|
+
"\\",
|
|
658
|
+
"/",
|
|
659
|
+
":",
|
|
660
|
+
"*",
|
|
661
|
+
"?",
|
|
662
|
+
'"',
|
|
663
|
+
"<",
|
|
664
|
+
"|",
|
|
665
|
+
">",
|
|
666
|
+
"y2mate.com",
|
|
667
|
+
"y2mate com",
|
|
668
|
+
]
|
|
669
|
+
for val in trash:
|
|
670
|
+
nm = nm.replace(val, "")
|
|
671
|
+
return nm.strip()
|
|
672
|
+
|
|
673
|
+
return sanitize(fnm)
|
|
674
|
+
|
|
675
|
+
def auto_save(
|
|
676
|
+
self,
|
|
677
|
+
dir: str = "",
|
|
678
|
+
iterator: object = None,
|
|
679
|
+
progress_bar=True,
|
|
680
|
+
quiet: bool = False,
|
|
681
|
+
naming_format: str = None,
|
|
682
|
+
chunk_size: int = 512,
|
|
683
|
+
play: bool = False,
|
|
684
|
+
resume: bool = False,
|
|
685
|
+
*args,
|
|
686
|
+
**kwargs,
|
|
687
|
+
):
|
|
688
|
+
r"""Query and save all the media
|
|
689
|
+
:param dir: (Optional) Path to Directory for saving the media files
|
|
690
|
+
:param iterator: (Optional) Function that yields third_query object - `Handler.run`
|
|
691
|
+
:param progress_bar: (Optional) Display progress bar
|
|
692
|
+
:param quiet: (Optional) Not to stdout anything
|
|
693
|
+
:param naming_format: (Optional) Format for generating filename
|
|
694
|
+
:param chunk_size: (Optional) Chunk_size for downloading files in KB
|
|
695
|
+
:param play: (Optional) Auto-play the media after download
|
|
696
|
+
:param resume: (Optional) Resume the incomplete download
|
|
697
|
+
:type dir: str
|
|
698
|
+
:type iterator: object
|
|
699
|
+
:type progress_bar: bool
|
|
700
|
+
:type quiet: bool
|
|
701
|
+
:type naming_format: str
|
|
702
|
+
:type chunk_size: int
|
|
703
|
+
:type play: bool
|
|
704
|
+
:type resume: bool
|
|
705
|
+
args & kwargs for the iterator
|
|
706
|
+
:rtype: None
|
|
707
|
+
"""
|
|
708
|
+
iterator_object = iterator or self.run(*args, **kwargs)
|
|
709
|
+
|
|
710
|
+
for x, entry in enumerate(iterator_object):
|
|
711
|
+
if self.thread:
|
|
712
|
+
t1 = Thread(
|
|
713
|
+
target=self.save,
|
|
714
|
+
args=(
|
|
715
|
+
entry,
|
|
716
|
+
dir,
|
|
717
|
+
False,
|
|
718
|
+
quiet,
|
|
719
|
+
naming_format,
|
|
720
|
+
chunk_size,
|
|
721
|
+
play,
|
|
722
|
+
resume,
|
|
723
|
+
),
|
|
724
|
+
)
|
|
725
|
+
t1.start()
|
|
726
|
+
thread_count = x + 1
|
|
727
|
+
if thread_count % self.thread == 0 or thread_count == self.total:
|
|
728
|
+
t1.join()
|
|
729
|
+
else:
|
|
730
|
+
self.save(
|
|
731
|
+
entry,
|
|
732
|
+
dir,
|
|
733
|
+
progress_bar,
|
|
734
|
+
quiet,
|
|
735
|
+
naming_format,
|
|
736
|
+
chunk_size,
|
|
737
|
+
play,
|
|
738
|
+
resume,
|
|
739
|
+
)
|
|
740
|
+
|
|
741
|
+
def save(
|
|
742
|
+
self,
|
|
743
|
+
third_dict: dict,
|
|
744
|
+
dir: str = "",
|
|
745
|
+
progress_bar=True,
|
|
746
|
+
quiet: bool = False,
|
|
747
|
+
naming_format: str = None,
|
|
748
|
+
chunk_size: int = 512,
|
|
749
|
+
play: bool = False,
|
|
750
|
+
resume: bool = False,
|
|
751
|
+
disable_history=False,
|
|
752
|
+
):
|
|
753
|
+
r"""Download media based on response of `third_query` dict-data-type
|
|
754
|
+
:param third_dict: Response of `third_query.run()`
|
|
755
|
+
:param dir: (Optional) Directory for saving the contents
|
|
756
|
+
:param progress_bar: (Optional) Display download progress bar
|
|
757
|
+
:param quiet: (Optional) Not to stdout anything
|
|
758
|
+
:param naming_format: (Optional) Format for generating filename
|
|
759
|
+
:param chunk_size: (Optional) Chunk_size for downloading files in KB
|
|
760
|
+
:param play: (Optional) Auto-play the media after download
|
|
761
|
+
:param resume: (Optional) Resume the incomplete download
|
|
762
|
+
:param disable_history (Optional) Don't save the download to history.
|
|
763
|
+
:type third_dict: dict
|
|
764
|
+
:type dir: str
|
|
765
|
+
:type progress_bar: bool
|
|
766
|
+
:type quiet: bool
|
|
767
|
+
:type naming_format: str
|
|
768
|
+
:type chunk_size: int
|
|
769
|
+
:type play: bool
|
|
770
|
+
:type resume: bool
|
|
771
|
+
:type disable_history: bool
|
|
772
|
+
:rtype: None
|
|
773
|
+
"""
|
|
774
|
+
if third_dict:
|
|
775
|
+
assert third_dict.get(
|
|
776
|
+
"dlink"
|
|
777
|
+
), "The video selected does not support that quality, try lower qualities."
|
|
778
|
+
if third_dict.get("mess"):
|
|
779
|
+
pass
|
|
780
|
+
|
|
781
|
+
current_downloaded_size = 0
|
|
782
|
+
current_downloaded_size_in_mb = 0
|
|
783
|
+
filename = self.generate_filename(third_dict, naming_format)
|
|
784
|
+
save_to = path.join(dir, filename)
|
|
785
|
+
mod_headers = headers
|
|
786
|
+
|
|
787
|
+
if resume:
|
|
788
|
+
assert path.exists(save_to), f"File not found in path - '{save_to}'"
|
|
789
|
+
current_downloaded_size = path.getsize(save_to)
|
|
790
|
+
# Set the headers to resume download from the last byte
|
|
791
|
+
mod_headers = {"Range": f"bytes={current_downloaded_size}-"}
|
|
792
|
+
current_downloaded_size_in_mb = round(
|
|
793
|
+
current_downloaded_size / 1000000, 2
|
|
794
|
+
) # convert to mb
|
|
795
|
+
|
|
796
|
+
resp = requests.get(third_dict["dlink"], stream=True, headers=mod_headers)
|
|
797
|
+
|
|
798
|
+
default_content_length = 0
|
|
799
|
+
size_in_bytes = int(
|
|
800
|
+
resp.headers.get("content-length", default_content_length)
|
|
801
|
+
)
|
|
802
|
+
if not size_in_bytes:
|
|
803
|
+
if resume:
|
|
804
|
+
raise FileExistsError(
|
|
805
|
+
f"Download completed for the file in path - '{save_to}'"
|
|
806
|
+
)
|
|
807
|
+
else:
|
|
808
|
+
raise Exception(
|
|
809
|
+
f"Cannot download file of content-length {size_in_bytes} bytes"
|
|
810
|
+
)
|
|
811
|
+
|
|
812
|
+
if resume:
|
|
813
|
+
assert (
|
|
814
|
+
size_in_bytes != current_downloaded_size
|
|
815
|
+
), f"Download completed for the file in path - '{save_to}'"
|
|
816
|
+
|
|
817
|
+
size_in_mb = (
|
|
818
|
+
round(size_in_bytes / 1000000, 2) + current_downloaded_size_in_mb
|
|
819
|
+
)
|
|
820
|
+
chunk_size_in_bytes = chunk_size * 1024
|
|
821
|
+
|
|
822
|
+
third_dict["saved_to"] = (
|
|
823
|
+
save_to
|
|
824
|
+
if any([save_to.startswith("/"), ":" in save_to])
|
|
825
|
+
else path.join(getcwd(), dir, filename)
|
|
826
|
+
)
|
|
827
|
+
try_play_media = (
|
|
828
|
+
lambda: launch_media(third_dict["saved_to"]) if play else None
|
|
829
|
+
)
|
|
830
|
+
saving_mode = "ab" if resume else "wb"
|
|
831
|
+
if progress_bar:
|
|
832
|
+
if not quiet:
|
|
833
|
+
print(f"{filename}")
|
|
834
|
+
with tqdm(
|
|
835
|
+
total=size_in_bytes + current_downloaded_size,
|
|
836
|
+
bar_format="%s%d MB %s{bar} %s{l_bar}%s"
|
|
837
|
+
% (Fore.GREEN, size_in_mb, Fore.CYAN, Fore.YELLOW, Fore.RESET),
|
|
838
|
+
initial=current_downloaded_size,
|
|
839
|
+
) as p_bar:
|
|
840
|
+
# p_bar.update(current_downloaded_size)
|
|
841
|
+
with open(save_to, saving_mode) as fh:
|
|
842
|
+
for chunks in resp.iter_content(chunk_size=chunk_size_in_bytes):
|
|
843
|
+
fh.write(chunks)
|
|
844
|
+
p_bar.update(chunk_size_in_bytes)
|
|
845
|
+
if not disable_history:
|
|
846
|
+
utils.add_history(third_dict)
|
|
847
|
+
try_play_media()
|
|
848
|
+
return save_to
|
|
849
|
+
else:
|
|
850
|
+
with open(save_to, saving_mode) as fh:
|
|
851
|
+
for chunks in resp.iter_content(chunk_size=chunk_size_in_bytes):
|
|
852
|
+
fh.write(chunks)
|
|
853
|
+
if not disable_history:
|
|
854
|
+
utils.add_history(third_dict)
|
|
855
|
+
|
|
856
|
+
try_play_media()
|
|
857
|
+
|
|
858
|
+
return save_to
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
|
|
862
|
+
mp4_qualities = [
|
|
863
|
+
"4k",
|
|
864
|
+
"1080p",
|
|
865
|
+
"720p",
|
|
866
|
+
"480p",
|
|
867
|
+
"360p",
|
|
868
|
+
"240p",
|
|
869
|
+
"144p",
|
|
870
|
+
"auto",
|
|
871
|
+
"best",
|
|
872
|
+
"worst",
|
|
873
|
+
]
|
|
874
|
+
mp3_qualities = ["mp3", "m4a", ".m4a", "128kbps", "192kbps", "328kbps"]
|
|
875
|
+
resolvers = ["m4a", "3gp", "mp4", "mp3"]
|
|
876
|
+
media_qualities = mp4_qualities + mp3_qualities
|
|
877
|
+
|
|
878
|
+
def launch_media(filepath):
|
|
879
|
+
"""
|
|
880
|
+
Launch media file using default system application
|
|
881
|
+
"""
|
|
882
|
+
try:
|
|
883
|
+
if sys.platform.startswith('darwin'): # macOS
|
|
884
|
+
subprocess.call(('open', filepath))
|
|
885
|
+
elif sys.platform.startswith('win'): # Windows
|
|
886
|
+
os.startfile(filepath)
|
|
887
|
+
elif sys.platform.startswith('linux'): # Linux
|
|
888
|
+
subprocess.call(('xdg-open', filepath))
|
|
889
|
+
except Exception as e:
|
|
890
|
+
print(f"Error launching media: {e}")
|
|
891
|
+
|
|
892
|
+
|
|
893
|
+
def confirm_from_user(message, default=False):
|
|
894
|
+
"""
|
|
895
|
+
Prompt user for confirmation
|
|
896
|
+
"""
|
|
897
|
+
valid = {"yes": True, "y": True, "ye": True,
|
|
898
|
+
"no": False, "n": False}
|
|
899
|
+
|
|
900
|
+
if default is None:
|
|
901
|
+
prompt = " [y/n] "
|
|
902
|
+
elif default:
|
|
903
|
+
prompt = " [Y/n] "
|
|
904
|
+
else:
|
|
905
|
+
prompt = " [y/N] "
|
|
906
|
+
|
|
907
|
+
while True:
|
|
908
|
+
choice = input(message + prompt).lower()
|
|
909
|
+
if default is not None and choice == '':
|
|
910
|
+
return default
|
|
911
|
+
elif choice in valid:
|
|
912
|
+
return valid[choice]
|
|
913
|
+
else:
|
|
914
|
+
print("Please respond with 'yes' or 'no' (or 'y' or 'n').")
|
|
915
|
+
|
|
916
|
+
|
|
917
|
+
# Create CLI app
|
|
918
|
+
app = CLI(name="ytdownloader", help="YouTube Video Downloader CLI")
|
|
919
|
+
|
|
920
|
+
@app.command()
|
|
921
|
+
@option("--author", help="Specify video author/channel")
|
|
922
|
+
@option("--timeout", type=int, default=30, help="HTTP request timeout")
|
|
923
|
+
@option("--confirm", is_flag=True, help="Confirm before downloading")
|
|
924
|
+
@option("--unique", is_flag=True, help="Ignore previously downloaded media")
|
|
925
|
+
@option("--thread", type=int, default=0, help="Thread download process")
|
|
926
|
+
@option("--format", default="mp4", help="Download format (mp4/mp3)")
|
|
927
|
+
@option("--quality", default="auto", help="Video quality")
|
|
928
|
+
@option("--limit", type=int, default=1, help="Total videos to download")
|
|
929
|
+
@option("--keyword", help="Filter videos by keyword")
|
|
930
|
+
@argument("query", help="Video name or YouTube link")
|
|
931
|
+
def download(query, author, timeout, confirm, unique, thread, format, quality, limit, keyword):
|
|
932
|
+
"""Download YouTube videos with advanced options"""
|
|
933
|
+
|
|
934
|
+
# Create handler with parsed arguments
|
|
935
|
+
handler = Handler(
|
|
936
|
+
query=query,
|
|
937
|
+
author=author,
|
|
938
|
+
timeout=timeout,
|
|
939
|
+
confirm=confirm,
|
|
940
|
+
unique=unique,
|
|
941
|
+
thread=thread
|
|
942
|
+
)
|
|
943
|
+
|
|
944
|
+
# Run download process
|
|
945
|
+
handler.auto_save(
|
|
946
|
+
format=format,
|
|
947
|
+
quality=quality,
|
|
948
|
+
limit=limit,
|
|
949
|
+
keyword=keyword
|
|
950
|
+
)
|
|
951
|
+
|
|
952
|
+
# Replace get_args function with swiftcli's argument parsing
|
|
953
|
+
def main():
|
|
954
|
+
app.run()
|
|
955
|
+
|
|
956
|
+
if __name__ == "__main__":
|
|
957
957
|
main()
|