KekikStream 1.9.9__tar.gz → 2.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Extractor/ExtractorBase.py +12 -7
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Extractor/ExtractorLoader.py +25 -17
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Extractor/ExtractorManager.py +1 -1
- kekikstream-2.1.0/KekikStream/Core/Extractor/YTDLPCache.py +35 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Plugin/PluginBase.py +54 -2
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Plugin/PluginLoader.py +11 -7
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/__init__.py +1 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/CloseLoad.py +1 -1
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/ContentX.py +19 -2
- kekikstream-2.1.0/KekikStream/Extractors/DonilasPlay.py +86 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/Odnoklassniki.py +6 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/PeaceMakerst.py +6 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/PlayerFilmIzle.py +8 -5
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/RapidVid.py +6 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/SetPlay.py +13 -4
- kekikstream-2.1.0/KekikStream/Extractors/VCTPlay.py +41 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/VidMoly.py +52 -30
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/YTDLP.py +97 -58
- kekikstream-2.1.0/KekikStream/Plugins/BelgeselX.py +196 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/DiziBox.py +6 -10
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/DiziPal.py +4 -15
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/DiziYou.py +2 -10
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/Dizilla.py +18 -13
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/FilmBip.py +3 -6
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/FilmMakinesi.py +7 -7
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/FilmModu.py +7 -11
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/FullHDFilm.py +4 -7
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/FullHDFilmizlesene.py +4 -6
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/HDFilmCehennemi.py +84 -19
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/JetFilmizle.py +49 -38
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/KultFilmler.py +5 -8
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/RecTV.py +2 -10
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/RoketDizi.py +20 -30
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/SelcukFlix.py +47 -52
- kekikstream-2.1.0/KekikStream/Plugins/SetFilmIzle.py +252 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/SezonlukDizi.py +24 -7
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/SineWix.py +1 -9
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/Sinefy.py +15 -9
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/SinemaCX.py +5 -17
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/Sinezy.py +19 -7
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/SuperFilmGeldi.py +7 -9
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Plugins/UgurFilm.py +5 -7
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/__init__.py +2 -21
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream.egg-info/PKG-INFO +27 -1
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream.egg-info/SOURCES.txt +5 -11
- {kekikstream-1.9.9 → kekikstream-2.1.0}/PKG-INFO +27 -1
- {kekikstream-1.9.9 → kekikstream-2.1.0}/README.md +26 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/setup.py +1 -1
- kekikstream-1.9.9/KekikStream/Extractors/FirePlayer.py +0 -60
- kekikstream-1.9.9/KekikStream/Extractors/FourCX.py +0 -7
- kekikstream-1.9.9/KekikStream/Extractors/FourPichive.py +0 -7
- kekikstream-1.9.9/KekikStream/Extractors/FourPlayRu.py +0 -7
- kekikstream-1.9.9/KekikStream/Extractors/HDStreamAble.py +0 -7
- kekikstream-1.9.9/KekikStream/Extractors/Hotlinger.py +0 -7
- kekikstream-1.9.9/KekikStream/Extractors/OkRuHTTP.py +0 -7
- kekikstream-1.9.9/KekikStream/Extractors/OkRuSSL.py +0 -7
- kekikstream-1.9.9/KekikStream/Extractors/Pichive.py +0 -7
- kekikstream-1.9.9/KekikStream/Extractors/PlayRu.py +0 -7
- kekikstream-1.9.9/KekikStream/Extractors/VidMolyMe.py +0 -7
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/CLI/__init__.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/CLI/pypi_kontrol.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Extractor/ExtractorModels.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Media/MediaHandler.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Media/MediaManager.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Plugin/PluginManager.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/Plugin/PluginModels.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Core/UI/UIManager.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/DzenRu.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/ExPlay.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/HDPlayerSystem.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/JetTv.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/MailRu.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/MixPlayHD.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/MixTiger.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/MolyStream.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/PixelDrain.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/SetPrime.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/SibNet.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/Sobreatsesuyp.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/TRsTX.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/TauVideo.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/TurboImgz.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/TurkeyPlayer.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/VidHide.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/VidMoxy.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/VidPapi.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/VideoSeyred.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/Extractors/YildizKisaFilm.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/__main__.py +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream/requirements.txt +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream.egg-info/dependency_links.txt +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream.egg-info/entry_points.txt +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream.egg-info/requires.txt +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/KekikStream.egg-info/top_level.txt +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/LICENSE +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/MANIFEST.in +0 -0
- {kekikstream-1.9.9 → kekikstream-2.1.0}/setup.cfg +0 -0
|
@@ -5,7 +5,7 @@ from cloudscraper import CloudScraper
|
|
|
5
5
|
from httpx import AsyncClient
|
|
6
6
|
from typing import Optional
|
|
7
7
|
from .ExtractorModels import ExtractResult
|
|
8
|
-
from urllib.parse import urljoin
|
|
8
|
+
from urllib.parse import urljoin, urlparse
|
|
9
9
|
|
|
10
10
|
class ExtractorBase(ABC):
|
|
11
11
|
# Çıkarıcının temel özellikleri
|
|
@@ -17,18 +17,23 @@ class ExtractorBase(ABC):
|
|
|
17
17
|
self.cloudscraper = CloudScraper()
|
|
18
18
|
|
|
19
19
|
# httpx - lightweight and safe for most HTTP requests
|
|
20
|
-
self.httpx = AsyncClient(
|
|
21
|
-
timeout = 3,
|
|
22
|
-
follow_redirects = True
|
|
23
|
-
)
|
|
20
|
+
self.httpx = AsyncClient(timeout = 10)
|
|
24
21
|
self.httpx.headers.update(self.cloudscraper.headers)
|
|
25
22
|
self.httpx.cookies.update(self.cloudscraper.cookies)
|
|
26
|
-
self.httpx.headers.update({
|
|
23
|
+
self.httpx.headers.update({
|
|
24
|
+
"User-Agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 15.7; rv:135.0) Gecko/20100101 Firefox/135.0",
|
|
25
|
+
"Accept" : "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
|
|
26
|
+
})
|
|
27
27
|
|
|
28
28
|
def can_handle_url(self, url: str) -> bool:
|
|
29
29
|
# URL'nin bu çıkarıcı tarafından işlenip işlenemeyeceğini kontrol et
|
|
30
30
|
return self.main_url in url
|
|
31
31
|
|
|
32
|
+
def get_base_url(self, url: str) -> str:
|
|
33
|
+
"""URL'den base URL'i çıkar (scheme + netloc)"""
|
|
34
|
+
parsed = urlparse(url)
|
|
35
|
+
return f"{parsed.scheme}://{parsed.netloc}"
|
|
36
|
+
|
|
32
37
|
@abstractmethod
|
|
33
38
|
async def extract(self, url: str, referer: Optional[str] = None) -> ExtractResult:
|
|
34
39
|
# Alt sınıflar tarafından uygulanacak medya çıkarma fonksiyonu
|
|
@@ -46,4 +51,4 @@ class ExtractorBase(ABC):
|
|
|
46
51
|
if url.startswith("http") or url.startswith("{\""):
|
|
47
52
|
return url
|
|
48
53
|
|
|
49
|
-
return f"https:{url}" if url.startswith("//") else urljoin(self.main_url, url)
|
|
54
|
+
return f"https:{url}" if url.startswith("//") else urljoin(self.main_url, url)
|
|
@@ -19,19 +19,22 @@ class ExtractorLoader:
|
|
|
19
19
|
def load_all(self) -> list[ExtractorBase]:
|
|
20
20
|
extractors = []
|
|
21
21
|
|
|
22
|
-
#
|
|
23
|
-
if self.global_extractors_dir.exists():
|
|
24
|
-
# konsol.log(f"[green][*] Global Extractor dizininden yükleniyor: {self.global_extractors_dir}[/green]")
|
|
25
|
-
global_extractors = self._load_from_directory(self.global_extractors_dir)
|
|
26
|
-
# konsol.log(f"[green]Global Extractor'lar: {[e.__name__ for e in global_extractors]}[/green]")
|
|
27
|
-
extractors.extend(global_extractors)
|
|
28
|
-
|
|
29
|
-
# Yerel çıkarıcıları yükle
|
|
22
|
+
# Eğer yerel dizinde Extractor varsa, sadece onları yükle (eklenti geliştirme modu)
|
|
30
23
|
if self.local_extractors_dir.exists():
|
|
31
24
|
# konsol.log(f"[green][*] Yerel Extractor dizininden yükleniyor: {self.local_extractors_dir}[/green]")
|
|
32
25
|
local_extractors = self._load_from_directory(self.local_extractors_dir)
|
|
33
26
|
# konsol.log(f"[green]Yerel Extractor'lar: {[e.__name__ for e in local_extractors]}[/green]")
|
|
34
|
-
|
|
27
|
+
|
|
28
|
+
if local_extractors:
|
|
29
|
+
# konsol.log("[cyan][*] Yerel Extractor bulundu, global Extractor'lar atlanıyor (eklenti geliştirme modu)[/cyan]")
|
|
30
|
+
extractors.extend(local_extractors)
|
|
31
|
+
|
|
32
|
+
# Yerel dizinde Extractor yoksa, global'leri yükle
|
|
33
|
+
if not extractors and self.global_extractors_dir.exists():
|
|
34
|
+
# konsol.log(f"[green][*] Global Extractor dizininden yükleniyor: {self.global_extractors_dir}[/green]")
|
|
35
|
+
global_extractors = self._load_from_directory(self.global_extractors_dir)
|
|
36
|
+
# konsol.log(f"[green]Global Extractor'lar: {[e.__name__ for e in global_extractors]}[/green]")
|
|
37
|
+
extractors.extend(global_extractors)
|
|
35
38
|
|
|
36
39
|
# Benzersizliği sağlama (modül adı + sınıf adı bazında)
|
|
37
40
|
unique_extractors = []
|
|
@@ -57,9 +60,10 @@ class ExtractorLoader:
|
|
|
57
60
|
if file.endswith(".py") and not file.startswith("__"):
|
|
58
61
|
module_name = file[:-3] # .py uzantısını kaldır
|
|
59
62
|
# konsol.log(f"[cyan]Okunan Dosya\t\t: {module_name}[/cyan]")
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
+
module_extractors = self._load_extractor(directory, module_name)
|
|
64
|
+
if module_extractors:
|
|
65
|
+
# konsol.log(f"[magenta]Extractor Yüklendi\t: {[e.__name__ for e in module_extractors]}[/magenta]")
|
|
66
|
+
extractors.extend(module_extractors)
|
|
63
67
|
|
|
64
68
|
# konsol.log(f"[yellow]{directory} dizininden yüklenen Extractor'lar: {[e.__name__ for e in extractors]}[/yellow]")
|
|
65
69
|
return extractors
|
|
@@ -70,21 +74,25 @@ class ExtractorLoader:
|
|
|
70
74
|
path = directory / f"{module_name}.py"
|
|
71
75
|
spec = importlib.util.spec_from_file_location(module_name, path)
|
|
72
76
|
if not spec or not spec.loader:
|
|
73
|
-
return
|
|
77
|
+
return []
|
|
74
78
|
|
|
75
79
|
# Modülü içe aktar
|
|
76
80
|
module = importlib.util.module_from_spec(spec)
|
|
77
81
|
spec.loader.exec_module(module)
|
|
78
82
|
|
|
79
|
-
# Yalnızca doğru modülden gelen ExtractorBase sınıflarını yükle
|
|
83
|
+
# Yalnızca doğru modülden gelen ExtractorBase sınıflarını yükle (TÜM CLASS'LAR)
|
|
84
|
+
extractors = []
|
|
80
85
|
for attr in dir(module):
|
|
81
86
|
obj = getattr(module, attr)
|
|
82
|
-
|
|
87
|
+
# isinstance kontrolünü __module__ kontrolünden ÖNCE yap
|
|
88
|
+
if isinstance(obj, type) and issubclass(obj, ExtractorBase) and obj is not ExtractorBase and obj.__module__ == module_name:
|
|
83
89
|
# konsol.log(f"[green]Yüklenen sınıf\t\t: {module_name}.{obj.__name__} ({obj.__module__}.{obj.__name__})[/green]")
|
|
84
|
-
|
|
90
|
+
extractors.append(obj)
|
|
91
|
+
|
|
92
|
+
return extractors
|
|
85
93
|
|
|
86
94
|
except Exception as hata:
|
|
87
95
|
konsol.log(f"[red][!] Extractor yüklenirken hata oluştu: {module_name}\nHata: {hata}")
|
|
88
96
|
konsol.print(f"[dim]{traceback.format_exc()}[/dim]")
|
|
89
97
|
|
|
90
|
-
return
|
|
98
|
+
return []
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
|
+
|
|
3
|
+
from Kekik.cli import konsol
|
|
4
|
+
from yt_dlp.extractor import gen_extractors
|
|
5
|
+
|
|
6
|
+
# Global cache (module-level singleton)
|
|
7
|
+
_YTDLP_EXTRACTORS_CACHE = None
|
|
8
|
+
_CACHE_INITIALIZED = False
|
|
9
|
+
|
|
10
|
+
def get_ytdlp_extractors() -> list:
|
|
11
|
+
"""
|
|
12
|
+
yt-dlp extractorlarını cache'le ve döndür
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
list: yt-dlp extractor sınıfları
|
|
16
|
+
"""
|
|
17
|
+
global _YTDLP_EXTRACTORS_CACHE, _CACHE_INITIALIZED
|
|
18
|
+
|
|
19
|
+
if _CACHE_INITIALIZED:
|
|
20
|
+
return _YTDLP_EXTRACTORS_CACHE
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
extractors = list(gen_extractors())
|
|
24
|
+
extractors = [ie for ie in extractors if ie.ie_key() != 'Generic']
|
|
25
|
+
|
|
26
|
+
_YTDLP_EXTRACTORS_CACHE = extractors
|
|
27
|
+
_CACHE_INITIALIZED = True
|
|
28
|
+
|
|
29
|
+
return extractors
|
|
30
|
+
|
|
31
|
+
except Exception as e:
|
|
32
|
+
konsol.log(f"[red][⚠] yt-dlp extractor cache hatası: {e}[/red]")
|
|
33
|
+
_YTDLP_EXTRACTORS_CACHE = []
|
|
34
|
+
_CACHE_INITIALIZED = True
|
|
35
|
+
return []
|
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
+
from ...CLI import konsol
|
|
3
4
|
from abc import ABC, abstractmethod
|
|
4
5
|
from cloudscraper import CloudScraper
|
|
5
6
|
from httpx import AsyncClient
|
|
6
7
|
from .PluginModels import MainPageResult, SearchResult, MovieInfo
|
|
7
8
|
from ..Media.MediaHandler import MediaHandler
|
|
8
9
|
from ..Extractor.ExtractorManager import ExtractorManager
|
|
10
|
+
from ..Extractor.ExtractorModels import ExtractResult
|
|
9
11
|
from urllib.parse import urljoin
|
|
10
12
|
import re
|
|
11
13
|
|
|
@@ -34,7 +36,10 @@ class PluginBase(ABC):
|
|
|
34
36
|
)
|
|
35
37
|
self.httpx.headers.update(self.cloudscraper.headers)
|
|
36
38
|
self.httpx.cookies.update(self.cloudscraper.cookies)
|
|
37
|
-
self.httpx.headers.update({
|
|
39
|
+
self.httpx.headers.update({
|
|
40
|
+
"User-Agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 15.7; rv:135.0) Gecko/20100101 Firefox/135.0",
|
|
41
|
+
"Accept" : "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
|
|
42
|
+
})
|
|
38
43
|
|
|
39
44
|
self.media_handler = MediaHandler()
|
|
40
45
|
self.ex_manager = ExtractorManager()
|
|
@@ -92,6 +97,41 @@ class PluginBase(ABC):
|
|
|
92
97
|
|
|
93
98
|
return f"https:{url}" if url.startswith("//") else urljoin(self.main_url, url)
|
|
94
99
|
|
|
100
|
+
async def extract(self, url: str, referer: str = None, prefix: str | None = None) -> dict | None:
|
|
101
|
+
"""
|
|
102
|
+
Extractor ile video URL'sini çıkarır.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
url: Iframe veya video URL'si
|
|
106
|
+
referer: Referer header (varsayılan: plugin main_url)
|
|
107
|
+
prefix: İsmin başına eklenecek opsiyonel etiket (örn: "Türkçe Dublaj")
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
dict: Extractor sonucu (name prefix ile birleştirilmiş) veya None
|
|
111
|
+
|
|
112
|
+
Extractor bulunamadığında veya hata oluştuğunda uyarı verir.
|
|
113
|
+
"""
|
|
114
|
+
if referer is None:
|
|
115
|
+
referer = f"{self.main_url}/"
|
|
116
|
+
|
|
117
|
+
extractor = self.ex_manager.find_extractor(url)
|
|
118
|
+
if not extractor:
|
|
119
|
+
konsol.log(f"[magenta][?] {self.name} » Extractor bulunamadı: {url}")
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
data = await extractor.extract(url, referer=referer)
|
|
124
|
+
result = data.dict()
|
|
125
|
+
|
|
126
|
+
# prefix varsa name'e ekle
|
|
127
|
+
if prefix and result.get("name"):
|
|
128
|
+
result["name"] = f"{prefix} | {result['name']}"
|
|
129
|
+
|
|
130
|
+
return result
|
|
131
|
+
except Exception as hata:
|
|
132
|
+
konsol.log(f"[red][!] {self.name} » Extractor hatası ({extractor.name}): {hata}")
|
|
133
|
+
return None
|
|
134
|
+
|
|
95
135
|
@staticmethod
|
|
96
136
|
def clean_title(title: str) -> str:
|
|
97
137
|
suffixes = [
|
|
@@ -116,4 +156,16 @@ class PluginBase(ABC):
|
|
|
116
156
|
for suffix in suffixes:
|
|
117
157
|
cleaned_title = re.sub(f"{re.escape(suffix)}.*$", "", cleaned_title, flags=re.IGNORECASE).strip()
|
|
118
158
|
|
|
119
|
-
return cleaned_title
|
|
159
|
+
return cleaned_title
|
|
160
|
+
|
|
161
|
+
async def play(self, **kwargs):
|
|
162
|
+
"""
|
|
163
|
+
Varsayılan oynatma metodu.
|
|
164
|
+
Tüm pluginlerde ortak kullanılır.
|
|
165
|
+
"""
|
|
166
|
+
extract_result = ExtractResult(**kwargs)
|
|
167
|
+
self.media_handler.title = kwargs.get("name")
|
|
168
|
+
if self.name not in self.media_handler.title:
|
|
169
|
+
self.media_handler.title = f"{self.name} | {self.media_handler.title}"
|
|
170
|
+
|
|
171
|
+
self.media_handler.play_media(extract_result)
|
|
@@ -19,15 +19,19 @@ class PluginLoader:
|
|
|
19
19
|
def load_all(self) -> dict[str, PluginBase]:
|
|
20
20
|
plugins = {}
|
|
21
21
|
|
|
22
|
-
#
|
|
23
|
-
if self.global_plugins_dir.exists():
|
|
24
|
-
# konsol.log(f"[green][*] Global Eklenti dizininden yükleniyor: {self.global_plugins_dir}[/green]")
|
|
25
|
-
plugins |= self._load_from_directory(self.global_plugins_dir)
|
|
26
|
-
|
|
27
|
-
# Yerel eklentileri yükle
|
|
22
|
+
# Eğer yerel dizinde Plugin varsa, sadece onları yükle (eklenti geliştirme modu)
|
|
28
23
|
if self.local_plugins_dir.exists():
|
|
29
24
|
# konsol.log(f"[green][*] Yerel Eklenti dizininden yükleniyor: {self.local_plugins_dir}[/green]")
|
|
30
|
-
|
|
25
|
+
local_plugins = self._load_from_directory(self.local_plugins_dir)
|
|
26
|
+
|
|
27
|
+
if local_plugins:
|
|
28
|
+
# konsol.log("[cyan][*] Yerel Plugin bulundu, global Plugin'ler atlanıyor (eklenti geliştirme modu)[/cyan]")
|
|
29
|
+
plugins |= local_plugins
|
|
30
|
+
|
|
31
|
+
# Yerel dizinde Plugin yoksa, global'leri yükle
|
|
32
|
+
if not plugins and self.global_plugins_dir.exists():
|
|
33
|
+
# konsol.log(f"[green][*] Global Eklenti dizininden yükleniyor: {self.global_plugins_dir}[/green]")
|
|
34
|
+
plugins |= self._load_from_directory(self.global_plugins_dir)
|
|
31
35
|
|
|
32
36
|
if not plugins:
|
|
33
37
|
konsol.print("[yellow][!] Yüklenecek bir Eklenti bulunamadı![/yellow]")
|
|
@@ -13,6 +13,7 @@ from .Extractor.ExtractorManager import ExtractorManager
|
|
|
13
13
|
from .Extractor.ExtractorBase import ExtractorBase
|
|
14
14
|
from .Extractor.ExtractorLoader import ExtractorLoader
|
|
15
15
|
from .Extractor.ExtractorModels import ExtractResult, Subtitle
|
|
16
|
+
from .Extractor.YTDLPCache import get_ytdlp_extractors
|
|
16
17
|
|
|
17
18
|
from .Media.MediaManager import MediaManager
|
|
18
19
|
from .Media.MediaHandler import MediaHandler
|
|
@@ -7,10 +7,26 @@ class ContentX(ExtractorBase):
|
|
|
7
7
|
name = "ContentX"
|
|
8
8
|
main_url = "https://contentx.me"
|
|
9
9
|
|
|
10
|
+
# Birden fazla domain destekle
|
|
11
|
+
supported_domains = [
|
|
12
|
+
"contentx.me", "four.contentx.me",
|
|
13
|
+
"dplayer82.site", "sn.dplayer82.site", "four.dplayer82.site", "org.dplayer82.site",
|
|
14
|
+
"dplayer74.site", "sn.dplayer74.site",
|
|
15
|
+
"hotlinger.com", "sn.hotlinger.com",
|
|
16
|
+
"playru.net", "four.playru.net",
|
|
17
|
+
"pichive.online", "four.pichive.online", "pichive.me", "four.pichive.me"
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
def can_handle_url(self, url: str) -> bool:
|
|
21
|
+
return any(domain in url for domain in self.supported_domains)
|
|
22
|
+
|
|
10
23
|
async def extract(self, url, referer=None) -> list[ExtractResult]:
|
|
11
24
|
if referer:
|
|
12
25
|
self.httpx.headers.update({"Referer": referer})
|
|
13
26
|
|
|
27
|
+
# Dinamik base URL kullan
|
|
28
|
+
base_url = self.get_base_url(url)
|
|
29
|
+
|
|
14
30
|
istek = await self.httpx.get(url)
|
|
15
31
|
istek.raise_for_status()
|
|
16
32
|
i_source = istek.text
|
|
@@ -39,7 +55,8 @@ class ContentX(ExtractorBase):
|
|
|
39
55
|
)
|
|
40
56
|
)
|
|
41
57
|
|
|
42
|
-
|
|
58
|
+
# base_url kullan (contentx.me yerine)
|
|
59
|
+
vid_source_request = await self.httpx.get(f"{base_url}/source2.php?v={i_extract_value}", headers={"Referer": referer or base_url})
|
|
43
60
|
vid_source_request.raise_for_status()
|
|
44
61
|
|
|
45
62
|
vid_source = vid_source_request.text
|
|
@@ -59,7 +76,7 @@ class ContentX(ExtractorBase):
|
|
|
59
76
|
|
|
60
77
|
if i_dublaj := re.search(r',\"([^"]+)\",\"Türkçe"', i_source):
|
|
61
78
|
dublaj_value = i_dublaj[1]
|
|
62
|
-
dublaj_source_request = await self.httpx.get(f"{
|
|
79
|
+
dublaj_source_request = await self.httpx.get(f"{base_url}/source2.php?v={dublaj_value}", headers={"Referer": referer or base_url})
|
|
63
80
|
dublaj_source_request.raise_for_status()
|
|
64
81
|
|
|
65
82
|
dublaj_source = dublaj_source_request.text
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
|
+
|
|
3
|
+
from KekikStream.Core import ExtractorBase, ExtractResult, Subtitle
|
|
4
|
+
from Kekik.Sifreleme import AESManager
|
|
5
|
+
import re, json
|
|
6
|
+
|
|
7
|
+
class DonilasPlay(ExtractorBase):
|
|
8
|
+
name = "DonilasPlay"
|
|
9
|
+
main_url = "https://donilasplay.com"
|
|
10
|
+
|
|
11
|
+
async def extract(self, url, referer=None) -> ExtractResult:
|
|
12
|
+
if referer:
|
|
13
|
+
self.httpx.headers.update({"Referer": referer})
|
|
14
|
+
|
|
15
|
+
istek = await self.httpx.get(url)
|
|
16
|
+
istek.raise_for_status()
|
|
17
|
+
i_source = istek.text
|
|
18
|
+
|
|
19
|
+
m3u_link = None
|
|
20
|
+
subtitles = []
|
|
21
|
+
|
|
22
|
+
# bePlayer pattern
|
|
23
|
+
be_player_match = re.search(r"bePlayer\('([^']+)',\s*'(\{[^}]+\})'\);", i_source)
|
|
24
|
+
if be_player_match:
|
|
25
|
+
be_player_pass = be_player_match.group(1)
|
|
26
|
+
be_player_data = be_player_match.group(2)
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
# AES decrypt
|
|
30
|
+
decrypted = AESManager.decrypt(be_player_data, be_player_pass)
|
|
31
|
+
data = json.loads(decrypted)
|
|
32
|
+
|
|
33
|
+
m3u_link = data.get("video_location")
|
|
34
|
+
|
|
35
|
+
# Altyazıları işle
|
|
36
|
+
str_subtitles = data.get("strSubtitles", [])
|
|
37
|
+
if str_subtitles:
|
|
38
|
+
for sub in str_subtitles:
|
|
39
|
+
label = sub.get("label", "")
|
|
40
|
+
file = sub.get("file", "")
|
|
41
|
+
# Forced altyazıları hariç tut
|
|
42
|
+
if "Forced" in label:
|
|
43
|
+
continue
|
|
44
|
+
if file:
|
|
45
|
+
# Türkçe kontrolü
|
|
46
|
+
keywords = ["tur", "tr", "türkçe", "turkce"]
|
|
47
|
+
language = "Turkish" if any(k in label.lower() for k in keywords) else label
|
|
48
|
+
subtitles.append(Subtitle(
|
|
49
|
+
name = language,
|
|
50
|
+
url = self.fix_url(file)
|
|
51
|
+
))
|
|
52
|
+
except Exception:
|
|
53
|
+
pass
|
|
54
|
+
|
|
55
|
+
# Fallback: file pattern
|
|
56
|
+
if not m3u_link:
|
|
57
|
+
file_match = re.search(r'file:"([^"]+)"', i_source)
|
|
58
|
+
if file_match:
|
|
59
|
+
m3u_link = file_match.group(1)
|
|
60
|
+
|
|
61
|
+
# tracks pattern for subtitles
|
|
62
|
+
tracks_match = re.search(r'tracks:\[([^\]]+)', i_source)
|
|
63
|
+
if tracks_match:
|
|
64
|
+
try:
|
|
65
|
+
tracks_str = f"[{tracks_match.group(1)}]"
|
|
66
|
+
tracks = json.loads(tracks_str)
|
|
67
|
+
for track in tracks:
|
|
68
|
+
file_url = track.get("file")
|
|
69
|
+
label = track.get("label", "")
|
|
70
|
+
if file_url and "Forced" not in label:
|
|
71
|
+
subtitles.append(Subtitle(
|
|
72
|
+
name = label,
|
|
73
|
+
url = self.fix_url(file_url)
|
|
74
|
+
))
|
|
75
|
+
except Exception:
|
|
76
|
+
pass
|
|
77
|
+
|
|
78
|
+
if not m3u_link:
|
|
79
|
+
raise ValueError("m3u link not found")
|
|
80
|
+
|
|
81
|
+
return ExtractResult(
|
|
82
|
+
name = self.name,
|
|
83
|
+
url = m3u_link,
|
|
84
|
+
referer = url,
|
|
85
|
+
subtitles = subtitles
|
|
86
|
+
)
|
|
@@ -7,6 +7,12 @@ class Odnoklassniki(ExtractorBase):
|
|
|
7
7
|
name = "Odnoklassniki"
|
|
8
8
|
main_url = "https://odnoklassniki.ru"
|
|
9
9
|
|
|
10
|
+
# Birden fazla domain destekle
|
|
11
|
+
supported_domains = ["odnoklassniki.ru", "ok.ru"]
|
|
12
|
+
|
|
13
|
+
def can_handle_url(self, url: str) -> bool:
|
|
14
|
+
return any(domain in url for domain in self.supported_domains)
|
|
15
|
+
|
|
10
16
|
async def extract(self, url, referer=None) -> ExtractResult:
|
|
11
17
|
if "/video/" in url:
|
|
12
18
|
url = url.replace("/video/", "/videoembed/")
|
|
@@ -7,6 +7,12 @@ class PeaceMakerst(ExtractorBase):
|
|
|
7
7
|
name = "PeaceMakerst"
|
|
8
8
|
main_url = "https://peacemakerst.com"
|
|
9
9
|
|
|
10
|
+
# Birden fazla domain destekle
|
|
11
|
+
supported_domains = ["peacemakerst.com", "hdstreamable.com"]
|
|
12
|
+
|
|
13
|
+
def can_handle_url(self, url: str) -> bool:
|
|
14
|
+
return any(domain in url for domain in self.supported_domains)
|
|
15
|
+
|
|
10
16
|
async def extract(self, url, referer=None) -> ExtractResult:
|
|
11
17
|
if referer:
|
|
12
18
|
self.httpx.headers.update({"Referer": referer})
|
|
@@ -1,12 +1,15 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
3
|
from KekikStream.Core import ExtractorBase, ExtractResult, Subtitle
|
|
4
|
-
import re
|
|
4
|
+
import re
|
|
5
5
|
|
|
6
6
|
class PlayerFilmIzle(ExtractorBase):
|
|
7
7
|
name = "PlayerFilmIzle"
|
|
8
8
|
main_url = "https://player.filmizle.in"
|
|
9
9
|
|
|
10
|
+
def can_handle_url(self, url: str) -> bool:
|
|
11
|
+
return "filmizle.in" in url or "fireplayer" in url.lower()
|
|
12
|
+
|
|
10
13
|
async def extract(self, url: str, referer: str = None) -> ExtractResult:
|
|
11
14
|
# Kotlin tarafında referer mainUrl olarak zorlanmış
|
|
12
15
|
ext_ref = self.main_url
|
|
@@ -29,20 +32,20 @@ class PlayerFilmIzle(ExtractorBase):
|
|
|
29
32
|
# Data yakalama: FirePlayer|DATA|...
|
|
30
33
|
data_match = re.search(r'FirePlayer\|([^|]+)\|', video_req, re.IGNORECASE)
|
|
31
34
|
data_val = data_match.group(1) if data_match else None
|
|
32
|
-
|
|
35
|
+
|
|
33
36
|
if not data_val:
|
|
34
37
|
raise ValueError("PlayerFilmIzle: Data bulunamadı")
|
|
35
38
|
|
|
36
39
|
url_post = f"{self.main_url}/player/index.php?data={data_val}&do=getVideo"
|
|
37
|
-
|
|
40
|
+
|
|
38
41
|
post_headers = {
|
|
39
42
|
"Referer": ext_ref,
|
|
40
43
|
"X-Requested-With": "XMLHttpRequest"
|
|
41
44
|
}
|
|
42
|
-
|
|
45
|
+
|
|
43
46
|
# Kotlin'de post data: "hash" -> data, "r" -> ""
|
|
44
47
|
post_data = {"hash": data_val, "r": ""}
|
|
45
|
-
|
|
48
|
+
|
|
46
49
|
response = await self.httpx.post(url_post, data=post_data, headers=post_headers)
|
|
47
50
|
get_url = response.text.replace("\\", "")
|
|
48
51
|
|
|
@@ -8,6 +8,12 @@ class RapidVid(ExtractorBase):
|
|
|
8
8
|
name = "RapidVid"
|
|
9
9
|
main_url = "https://rapidvid.net"
|
|
10
10
|
|
|
11
|
+
# Birden fazla domain destekle
|
|
12
|
+
supported_domains = ["rapidvid.net", "rapid.filmmakinesi.to"]
|
|
13
|
+
|
|
14
|
+
def can_handle_url(self, url: str) -> bool:
|
|
15
|
+
return any(domain in url for domain in self.supported_domains)
|
|
16
|
+
|
|
11
17
|
async def extract(self, url, referer=None) -> ExtractResult:
|
|
12
18
|
if referer:
|
|
13
19
|
self.httpx.headers.update({"Referer": referer})
|
|
@@ -1,11 +1,18 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
3
|
from KekikStream.Core import ExtractorBase, ExtractResult
|
|
4
|
+
from urllib.parse import urlparse, parse_qs
|
|
4
5
|
import re
|
|
5
6
|
|
|
6
7
|
class SetPlay(ExtractorBase):
|
|
7
8
|
name = "SetPlay"
|
|
8
|
-
main_url = "https://setplay.
|
|
9
|
+
main_url = "https://setplay.shop"
|
|
10
|
+
|
|
11
|
+
# Birden fazla domain destekle
|
|
12
|
+
supported_domains = ["setplay.cfd", "setplay.shop", "setplay.site"]
|
|
13
|
+
|
|
14
|
+
def can_handle_url(self, url: str) -> bool:
|
|
15
|
+
return any(domain in url for domain in self.supported_domains)
|
|
9
16
|
|
|
10
17
|
async def extract(self, url, referer=None) -> ExtractResult:
|
|
11
18
|
ext_ref = referer or ""
|
|
@@ -13,6 +20,9 @@ class SetPlay(ExtractorBase):
|
|
|
13
20
|
if referer:
|
|
14
21
|
self.httpx.headers.update({"Referer": referer})
|
|
15
22
|
|
|
23
|
+
# Dinamik base URL kullan
|
|
24
|
+
base_url = self.get_base_url(url)
|
|
25
|
+
|
|
16
26
|
istek = await self.httpx.get(url)
|
|
17
27
|
istek.raise_for_status()
|
|
18
28
|
|
|
@@ -33,7 +43,6 @@ class SetPlay(ExtractorBase):
|
|
|
33
43
|
title_base = title_match[1].split(".")[-1] if title_match else "Unknown"
|
|
34
44
|
|
|
35
45
|
# partKey logic
|
|
36
|
-
from urllib.parse import urlparse, parse_qs
|
|
37
46
|
parsed = urlparse(url)
|
|
38
47
|
params = parse_qs(parsed.query)
|
|
39
48
|
part_key = params.get("partKey", [""])[0]
|
|
@@ -46,8 +55,8 @@ class SetPlay(ExtractorBase):
|
|
|
46
55
|
else:
|
|
47
56
|
name_suffix = title_base
|
|
48
57
|
|
|
49
|
-
# M3U8 link oluştur
|
|
50
|
-
m3u_link = f"{
|
|
58
|
+
# M3U8 link oluştur - base_url kullan (main_url yerine)
|
|
59
|
+
m3u_link = f"{base_url}{video_url}?s={video_server}"
|
|
51
60
|
|
|
52
61
|
return ExtractResult(
|
|
53
62
|
name = f"{self.name} - {name_suffix}",
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
|
+
|
|
3
|
+
from KekikStream.Core import ExtractorBase, ExtractResult
|
|
4
|
+
from urllib.parse import urlparse, parse_qs
|
|
5
|
+
|
|
6
|
+
class VCTPlay(ExtractorBase):
|
|
7
|
+
name = "VCTPlay"
|
|
8
|
+
main_url = "https://vctplay.site"
|
|
9
|
+
|
|
10
|
+
async def extract(self, url, referer=None) -> ExtractResult:
|
|
11
|
+
if referer:
|
|
12
|
+
self.httpx.headers.update({"Referer": referer})
|
|
13
|
+
|
|
14
|
+
# URL'den video ID'sini çıkar
|
|
15
|
+
# https://vctplay.site/video/2hjDGco5exdv -> 2hjDGco5exdv
|
|
16
|
+
video_id = url.split("/")[-1]
|
|
17
|
+
if "?" in video_id:
|
|
18
|
+
video_id = video_id.split("?")[0]
|
|
19
|
+
|
|
20
|
+
# Manifests URL oluştur
|
|
21
|
+
master_url = f"{self.main_url}/manifests/{video_id}/master.txt"
|
|
22
|
+
|
|
23
|
+
# partKey'den isim belirle
|
|
24
|
+
parsed = urlparse(url)
|
|
25
|
+
params = parse_qs(parsed.query)
|
|
26
|
+
part_key = params.get("partKey", [""])[0]
|
|
27
|
+
|
|
28
|
+
name_suffix = ""
|
|
29
|
+
if "turkcedublaj" in part_key.lower():
|
|
30
|
+
name_suffix = "Dublaj"
|
|
31
|
+
elif "turkcealtyazi" in part_key.lower():
|
|
32
|
+
name_suffix = "Altyazı"
|
|
33
|
+
|
|
34
|
+
display_name = f"{self.name} - {name_suffix}" if name_suffix else self.name
|
|
35
|
+
|
|
36
|
+
return ExtractResult(
|
|
37
|
+
name = display_name,
|
|
38
|
+
url = master_url,
|
|
39
|
+
referer = f"{self.main_url}/",
|
|
40
|
+
subtitles = []
|
|
41
|
+
)
|