KekikStream 2.3.9__py3-none-any.whl → 2.5.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- KekikStream/Core/Extractor/ExtractorBase.py +3 -2
- KekikStream/Core/Extractor/ExtractorLoader.py +8 -14
- KekikStream/Core/HTMLHelper.py +120 -49
- KekikStream/Core/Plugin/PluginBase.py +30 -12
- KekikStream/Core/Plugin/PluginLoader.py +12 -14
- KekikStream/Core/Plugin/PluginManager.py +2 -2
- KekikStream/Core/Plugin/PluginModels.py +0 -3
- KekikStream/Extractors/Abstream.py +27 -0
- KekikStream/Extractors/CloseLoad.py +30 -54
- KekikStream/Extractors/ContentX.py +27 -72
- KekikStream/Extractors/DonilasPlay.py +33 -77
- KekikStream/Extractors/DzenRu.py +10 -24
- KekikStream/Extractors/ExPlay.py +20 -38
- KekikStream/Extractors/Filemoon.py +21 -46
- KekikStream/Extractors/HDMomPlayer.py +30 -0
- KekikStream/Extractors/HDPlayerSystem.py +13 -31
- KekikStream/Extractors/HotStream.py +27 -0
- KekikStream/Extractors/JFVid.py +3 -24
- KekikStream/Extractors/JetTv.py +21 -34
- KekikStream/Extractors/JetV.py +55 -0
- KekikStream/Extractors/MailRu.py +11 -29
- KekikStream/Extractors/MixPlayHD.py +15 -28
- KekikStream/Extractors/MixTiger.py +17 -40
- KekikStream/Extractors/MolyStream.py +17 -21
- KekikStream/Extractors/Odnoklassniki.py +40 -104
- KekikStream/Extractors/PeaceMakerst.py +18 -45
- KekikStream/Extractors/PixelDrain.py +8 -16
- KekikStream/Extractors/PlayerFilmIzle.py +22 -41
- KekikStream/Extractors/RapidVid.py +21 -35
- KekikStream/Extractors/SetPlay.py +18 -43
- KekikStream/Extractors/SibNet.py +7 -17
- KekikStream/Extractors/Sobreatsesuyp.py +23 -45
- KekikStream/Extractors/TRsTX.py +23 -53
- KekikStream/Extractors/TurboImgz.py +7 -14
- KekikStream/Extractors/VCTPlay.py +10 -28
- KekikStream/Extractors/Veev.py +145 -0
- KekikStream/Extractors/VidBiz.py +62 -0
- KekikStream/Extractors/VidHide.py +58 -30
- KekikStream/Extractors/VidMoly.py +65 -99
- KekikStream/Extractors/VidMoxy.py +16 -27
- KekikStream/Extractors/VidPapi.py +24 -54
- KekikStream/Extractors/VideoSeyred.py +19 -40
- KekikStream/Extractors/Videostr.py +58 -0
- KekikStream/Extractors/Vidoza.py +18 -0
- KekikStream/Extractors/Vtbe.py +38 -0
- KekikStream/Extractors/YTDLP.py +2 -2
- KekikStream/Extractors/YildizKisaFilm.py +13 -31
- KekikStream/Extractors/Zeus.py +61 -0
- KekikStream/Plugins/BelgeselX.py +97 -77
- KekikStream/Plugins/DiziBox.py +28 -45
- KekikStream/Plugins/DiziMom.py +179 -0
- KekikStream/Plugins/DiziPal.py +95 -161
- KekikStream/Plugins/DiziYou.py +51 -147
- KekikStream/Plugins/Dizilla.py +40 -61
- KekikStream/Plugins/FilmBip.py +90 -39
- KekikStream/Plugins/FilmEkseni.py +199 -0
- KekikStream/Plugins/FilmMakinesi.py +72 -73
- KekikStream/Plugins/FilmModu.py +25 -35
- KekikStream/Plugins/Filmatek.py +184 -0
- KekikStream/Plugins/FilmciBaba.py +155 -0
- KekikStream/Plugins/FullHDFilmizlesene.py +16 -37
- KekikStream/Plugins/HDFilm.py +243 -0
- KekikStream/Plugins/HDFilmCehennemi.py +242 -189
- KekikStream/Plugins/JetFilmizle.py +101 -69
- KekikStream/Plugins/KultFilmler.py +138 -104
- KekikStream/Plugins/RecTV.py +52 -73
- KekikStream/Plugins/RoketDizi.py +18 -27
- KekikStream/Plugins/SelcukFlix.py +30 -48
- KekikStream/Plugins/SetFilmIzle.py +76 -104
- KekikStream/Plugins/SezonlukDizi.py +90 -94
- KekikStream/Plugins/Sinefy.py +195 -167
- KekikStream/Plugins/SinemaCX.py +148 -78
- KekikStream/Plugins/Sinezy.py +29 -31
- KekikStream/Plugins/SuperFilmGeldi.py +12 -17
- KekikStream/Plugins/UgurFilm.py +85 -38
- KekikStream/Plugins/Watch32.py +160 -0
- KekikStream/Plugins/YabanciDizi.py +176 -211
- {kekikstream-2.3.9.dist-info → kekikstream-2.5.3.dist-info}/METADATA +1 -1
- kekikstream-2.5.3.dist-info/RECORD +99 -0
- {kekikstream-2.3.9.dist-info → kekikstream-2.5.3.dist-info}/WHEEL +1 -1
- KekikStream/Plugins/FullHDFilm.py +0 -249
- kekikstream-2.3.9.dist-info/RECORD +0 -84
- {kekikstream-2.3.9.dist-info → kekikstream-2.5.3.dist-info}/entry_points.txt +0 -0
- {kekikstream-2.3.9.dist-info → kekikstream-2.5.3.dist-info}/licenses/LICENSE +0 -0
- {kekikstream-2.3.9.dist-info → kekikstream-2.5.3.dist-info}/top_level.txt +0 -0
|
@@ -49,6 +49,7 @@ class ExtractorBase(ABC):
|
|
|
49
49
|
return ""
|
|
50
50
|
|
|
51
51
|
if url.startswith("http") or url.startswith("{\""):
|
|
52
|
-
return url
|
|
52
|
+
return url.replace("\\", "")
|
|
53
53
|
|
|
54
|
-
|
|
54
|
+
url = f"https:{url}" if url.startswith("//") else urljoin(self.main_url, url)
|
|
55
|
+
return url.replace("\\", "")
|
|
@@ -9,7 +9,7 @@ class ExtractorLoader:
|
|
|
9
9
|
def __init__(self, extractors_dir: str):
|
|
10
10
|
# Yerel ve global çıkarıcı dizinlerini ayarla
|
|
11
11
|
self.local_extractors_dir = Path(extractors_dir)
|
|
12
|
-
self.global_extractors_dir = Path(__file__).parent.parent.parent /
|
|
12
|
+
self.global_extractors_dir = Path(__file__).parent.parent.parent / "Extractors"
|
|
13
13
|
|
|
14
14
|
# Dizin kontrolü
|
|
15
15
|
if not self.local_extractors_dir.exists() and not self.global_extractors_dir.exists():
|
|
@@ -19,22 +19,16 @@ class ExtractorLoader:
|
|
|
19
19
|
def load_all(self) -> list[ExtractorBase]:
|
|
20
20
|
extractors = []
|
|
21
21
|
|
|
22
|
-
#
|
|
23
|
-
|
|
22
|
+
# Yerel Extractor'lar varsa önce onları yükle (ek/öncelikli yetenekler)
|
|
23
|
+
# Eğer yerel dizin global dizinle aynıysa (örn: doğrudan core'da çalışırken) tekrar yükleme yapma
|
|
24
|
+
if self.local_extractors_dir.exists() and self.local_extractors_dir.resolve() != self.global_extractors_dir.resolve():
|
|
24
25
|
# konsol.log(f"[green][*] Yerel Extractor dizininden yükleniyor: {self.local_extractors_dir}[/green]")
|
|
25
|
-
|
|
26
|
-
# konsol.log(f"[green]Yerel Extractor'lar: {[e.__name__ for e in local_extractors]}[/green]")
|
|
26
|
+
extractors.extend(self._load_from_directory(self.local_extractors_dir))
|
|
27
27
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
extractors.extend(local_extractors)
|
|
31
|
-
|
|
32
|
-
# Yerel dizinde Extractor yoksa, global'leri yükle
|
|
33
|
-
if not extractors and self.global_extractors_dir.exists():
|
|
28
|
+
# Global Extractor'ları her zaman yükle (temel yetenekler)
|
|
29
|
+
if self.global_extractors_dir.exists():
|
|
34
30
|
# konsol.log(f"[green][*] Global Extractor dizininden yükleniyor: {self.global_extractors_dir}[/green]")
|
|
35
|
-
|
|
36
|
-
# konsol.log(f"[green]Global Extractor'lar: {[e.__name__ for e in global_extractors]}[/green]")
|
|
37
|
-
extractors.extend(global_extractors)
|
|
31
|
+
extractors.extend(self._load_from_directory(self.global_extractors_dir))
|
|
38
32
|
|
|
39
33
|
# Benzersizliği sağlama (modül adı + sınıf adı bazında)
|
|
40
34
|
unique_extractors = []
|
KekikStream/Core/HTMLHelper.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
4
5
|
import re
|
|
6
|
+
from selectolax.parser import HTMLParser, Node
|
|
5
7
|
|
|
6
8
|
|
|
7
9
|
class HTMLHelper:
|
|
@@ -10,96 +12,169 @@ class HTMLHelper:
|
|
|
10
12
|
"""
|
|
11
13
|
|
|
12
14
|
def __init__(self, html: str):
|
|
13
|
-
self.parser = HTMLParser(html)
|
|
14
15
|
self.html = html
|
|
16
|
+
self.parser = HTMLParser(html)
|
|
15
17
|
|
|
16
18
|
# ========================
|
|
17
|
-
#
|
|
19
|
+
# SELECTOR (CSS) İŞLEMLERİ
|
|
18
20
|
# ========================
|
|
19
21
|
|
|
20
|
-
def
|
|
22
|
+
def _root(self, element: Node | None) -> Node | HTMLParser:
|
|
21
23
|
"""İşlem yapılacak temel elementi döndürür."""
|
|
22
24
|
return element if element is not None else self.parser
|
|
23
25
|
|
|
24
26
|
def select(self, selector: str, element: Node | None = None) -> list[Node]:
|
|
25
27
|
"""CSS selector ile tüm eşleşen elementleri döndür."""
|
|
26
|
-
return self.
|
|
28
|
+
return self._root(element).css(selector)
|
|
27
29
|
|
|
28
30
|
def select_first(self, selector: str | None, element: Node | None = None) -> Node | None:
|
|
29
31
|
"""CSS selector ile ilk eşleşen elementi döndür."""
|
|
30
32
|
if not selector:
|
|
31
33
|
return element
|
|
34
|
+
return self._root(element).css_first(selector)
|
|
32
35
|
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def select_text(self, selector: str | None = None, element: Node | None = None, strip: bool = True) -> str | None:
|
|
36
|
+
def select_text(self, selector: str | None = None, element: Node | None = None) -> str | None:
|
|
36
37
|
"""CSS selector ile element bul ve text içeriğini döndür."""
|
|
37
38
|
el = self.select_first(selector, element)
|
|
38
39
|
if not el:
|
|
39
40
|
return None
|
|
41
|
+
val = el.text(strip=True)
|
|
42
|
+
return val or None
|
|
40
43
|
|
|
41
|
-
|
|
42
|
-
|
|
44
|
+
def select_texts(self, selector: str, element: Node | None = None) -> list[str] | None:
|
|
45
|
+
"""CSS selector ile tüm eşleşen elementlerin text içeriklerini döndür."""
|
|
46
|
+
out: list[str] = []
|
|
47
|
+
for el in self.select(selector, element):
|
|
48
|
+
txt = el.text(strip=True)
|
|
49
|
+
if txt:
|
|
50
|
+
out.append(txt)
|
|
51
|
+
return out or None
|
|
43
52
|
|
|
44
53
|
def select_attr(self, selector: str | None, attr: str, element: Node | None = None) -> str | None:
|
|
45
54
|
"""CSS selector ile element bul ve attribute değerini döndür."""
|
|
46
55
|
el = self.select_first(selector, element)
|
|
47
56
|
return el.attrs.get(attr) if el else None
|
|
48
57
|
|
|
49
|
-
def
|
|
50
|
-
"""CSS selector ile tüm eşleşen elementlerin text içeriklerini döndür."""
|
|
51
|
-
return [
|
|
52
|
-
txt for el in self.select(selector, element)
|
|
53
|
-
if (txt := el.text(strip=strip))
|
|
54
|
-
]
|
|
55
|
-
|
|
56
|
-
def select_all_attr(self, selector: str, attr: str, element: Node | None = None) -> list[str]:
|
|
58
|
+
def select_attrs(self, selector: str, attr: str, element: Node | None = None) -> list[str]:
|
|
57
59
|
"""CSS selector ile tüm eşleşen elementlerin attribute değerlerini döndür."""
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
60
|
+
out: list[str] = []
|
|
61
|
+
for el in self.select(selector, element):
|
|
62
|
+
val = el.attrs.get(attr)
|
|
63
|
+
if val:
|
|
64
|
+
out.append(val)
|
|
65
|
+
return out
|
|
64
66
|
|
|
65
67
|
def select_poster(self, selector: str = "img", element: Node | None = None) -> str | None:
|
|
66
68
|
"""Poster URL'sini çıkar. Önce data-src, sonra src dener."""
|
|
67
69
|
el = self.select_first(selector, element)
|
|
68
70
|
if not el:
|
|
69
71
|
return None
|
|
70
|
-
|
|
71
72
|
return el.attrs.get("data-src") or el.attrs.get("src")
|
|
72
73
|
|
|
74
|
+
def select_direct_text(self, selector: str, element: Node | None = None) -> str | None:
|
|
75
|
+
"""
|
|
76
|
+
Elementin yalnızca "kendi" düz metnini döndürür (child elementlerin text'ini katmadan).
|
|
77
|
+
"""
|
|
78
|
+
el = self.select_first(selector, element)
|
|
79
|
+
if not el:
|
|
80
|
+
return None
|
|
81
|
+
|
|
82
|
+
# type: ignore[call-arg]
|
|
83
|
+
val = el.text(strip=True, deep=False)
|
|
84
|
+
return val or None
|
|
85
|
+
|
|
86
|
+
# ========================
|
|
87
|
+
# META (LABEL -> VALUE) İŞLEMLERİ
|
|
88
|
+
# ========================
|
|
89
|
+
|
|
90
|
+
def meta_value(self, label: str, container_selector: str | None = None) -> str | None:
|
|
91
|
+
"""
|
|
92
|
+
Herhangi bir container içinde: LABEL metnini içeren bir elementten SONRA gelen metni döndürür.
|
|
93
|
+
label örn: "Oyuncular", "Yapım Yılı", "IMDB"
|
|
94
|
+
"""
|
|
95
|
+
needle = label.casefold()
|
|
96
|
+
|
|
97
|
+
# Belirli bir container varsa içinde ara, yoksa tüm dökümanda
|
|
98
|
+
targets = self.select(container_selector) if container_selector else [self.parser.body]
|
|
99
|
+
|
|
100
|
+
for root in targets:
|
|
101
|
+
if not root: continue
|
|
102
|
+
|
|
103
|
+
# Kalın/vurgulu elementlerde (span, strong, b, label, dt) label'ı ara
|
|
104
|
+
for label_el in self.select("span, strong, b, label, dt", root):
|
|
105
|
+
txt = (label_el.text(strip=True) or "").casefold()
|
|
106
|
+
if needle not in txt:
|
|
107
|
+
continue
|
|
108
|
+
|
|
109
|
+
# 1) Elementin kendi içindeki text'te LABEL: VALUE formatı olabilir
|
|
110
|
+
# "Oyuncular: Brad Pitt" gibi. LABEL: sonrasını al.
|
|
111
|
+
full_txt = label_el.text(strip=True)
|
|
112
|
+
if ":" in full_txt and needle in full_txt.split(":")[0].casefold():
|
|
113
|
+
val = full_txt.split(":", 1)[1].strip()
|
|
114
|
+
if val: return val
|
|
115
|
+
|
|
116
|
+
# 2) Label sonrası gelen ilk text node'u veya element'i al
|
|
117
|
+
curr = label_el.next
|
|
118
|
+
while curr:
|
|
119
|
+
if curr.tag == "-text":
|
|
120
|
+
val = curr.text(strip=True).strip(" :")
|
|
121
|
+
if val: return val
|
|
122
|
+
elif curr.tag != "br":
|
|
123
|
+
val = curr.text(strip=True).strip(" :")
|
|
124
|
+
if val: return val
|
|
125
|
+
else: # <br> gördüysek satır bitmiştir
|
|
126
|
+
break
|
|
127
|
+
curr = curr.next
|
|
128
|
+
|
|
129
|
+
return None
|
|
130
|
+
|
|
131
|
+
def meta_list(self, label: str, container_selector: str | None = None, sep: str = ",") -> list[str]:
|
|
132
|
+
"""meta_value(...) çıktısını veya label'ın ebeveynindeki linkleri listeye döndürür."""
|
|
133
|
+
needle = label.casefold()
|
|
134
|
+
targets = self.select(container_selector) if container_selector else [self.parser.body]
|
|
135
|
+
|
|
136
|
+
for root in targets:
|
|
137
|
+
if not root: continue
|
|
138
|
+
for label_el in self.select("span, strong, b, label, dt", root):
|
|
139
|
+
if needle in (label_el.text(strip=True) or "").casefold():
|
|
140
|
+
# Eğer elementin ebeveyninde linkler varsa (Kutucuklu yapı), onları al
|
|
141
|
+
links = self.select_texts("a", label_el.parent)
|
|
142
|
+
if links: return links
|
|
143
|
+
|
|
144
|
+
# Yoksa düz metin olarak meta_value mantığıyla al
|
|
145
|
+
raw = self.meta_value(label, container_selector=container_selector)
|
|
146
|
+
if not raw: return []
|
|
147
|
+
return [x.strip() for x in raw.split(sep) if x.strip()]
|
|
148
|
+
|
|
149
|
+
return []
|
|
150
|
+
|
|
73
151
|
# ========================
|
|
74
152
|
# REGEX İŞLEMLERİ
|
|
75
153
|
# ========================
|
|
76
154
|
|
|
77
|
-
def
|
|
155
|
+
def _regex_source(self, target: str | int | None) -> str:
|
|
78
156
|
"""Regex için kaynak metni döndürür."""
|
|
79
157
|
return target if isinstance(target, str) else self.html
|
|
80
158
|
|
|
81
|
-
def
|
|
82
|
-
"""Regex
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def regex_first(self, pattern: str, target: str | int | None = None, flags: int = 0) -> str | None:
|
|
86
|
-
"""Regex ile arama yap, ilk grubu döndür (grup yoksa tamamını)."""
|
|
87
|
-
match = re.search(pattern, self._source(target), self._flags(target, flags))
|
|
159
|
+
def regex_first(self, pattern: str, target: str | int | None = None, group: int | None = 1) -> str | tuple | None:
|
|
160
|
+
"""Regex ile arama yap, istenen grubu döndür (group=None ise tüm grupları tuple olarak döndür)."""
|
|
161
|
+
match = re.search(pattern, self._regex_source(target))
|
|
88
162
|
if not match:
|
|
89
163
|
return None
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
return match.
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
164
|
+
|
|
165
|
+
if group is None:
|
|
166
|
+
return match.groups()
|
|
167
|
+
|
|
168
|
+
last_idx = match.lastindex or 0
|
|
169
|
+
return match.group(group) if last_idx >= group else match.group(0)
|
|
170
|
+
|
|
171
|
+
def regex_all(self, pattern: str, target: str | int | None = None) -> list[str] | list[tuple]:
|
|
97
172
|
"""Regex ile tüm eşleşmeleri döndür."""
|
|
98
|
-
return re.findall(pattern, self.
|
|
173
|
+
return re.findall(pattern, self._regex_source(target))
|
|
99
174
|
|
|
100
|
-
def regex_replace(self, pattern: str, repl: str, target: str | int | None = None
|
|
175
|
+
def regex_replace(self, pattern: str, repl: str, target: str | int | None = None) -> str:
|
|
101
176
|
"""Regex ile replace yap."""
|
|
102
|
-
return re.sub(pattern, repl, self.
|
|
177
|
+
return re.sub(pattern, repl, self._regex_source(target))
|
|
103
178
|
|
|
104
179
|
# ========================
|
|
105
180
|
# ÖZEL AYIKLAYICILAR
|
|
@@ -108,15 +183,12 @@ class HTMLHelper:
|
|
|
108
183
|
@staticmethod
|
|
109
184
|
def extract_season_episode(text: str) -> tuple[int | None, int | None]:
|
|
110
185
|
"""Metin içinden sezon ve bölüm numarasını çıkar."""
|
|
111
|
-
# S01E05 formatı
|
|
112
186
|
if m := re.search(r"[Ss](\d+)[Ee](\d+)", text):
|
|
113
187
|
return int(m.group(1)), int(m.group(2))
|
|
114
188
|
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
e = re.search(r"(\d+)\.\s*[Bb]ölüm|[Bb]olum[- ]?(\d+)|-(\d+)-bolum|[Ee](\d+)", text, re.I)
|
|
189
|
+
s = re.search(r"(\d+)\.\s*[Ss]ezon|[Ss]ezon[- ]?(\d+)|-(\d+)-sezon|S(\d+)|(\d+)\.[Ss]", text, re.I)
|
|
190
|
+
e = re.search(r"(\d+)\.\s*[Bb][öo]l[üu]m|[Bb][öo]l[üu]m[- ]?(\d+)|-(\d+)-bolum|[Ee](\d+)", text, re.I)
|
|
118
191
|
|
|
119
|
-
# İlk bulunan grubu al (None değilse)
|
|
120
192
|
s_val = next((int(g) for g in s.groups() if g), None) if s else None
|
|
121
193
|
e_val = next((int(g) for g in e.groups() if g), None) if e else None
|
|
122
194
|
|
|
@@ -131,4 +203,3 @@ class HTMLHelper:
|
|
|
131
203
|
|
|
132
204
|
val = self.regex_first(pattern)
|
|
133
205
|
return int(val) if val and val.isdigit() else None
|
|
134
|
-
|
|
@@ -25,7 +25,7 @@ class PluginBase(ABC):
|
|
|
25
25
|
self.main_page = {url.replace(self.main_url, new_url): category for url, category in self.main_page.items()}
|
|
26
26
|
self.main_url = new_url
|
|
27
27
|
|
|
28
|
-
def __init__(self, proxy: str | dict | None = None):
|
|
28
|
+
def __init__(self, proxy: str | dict | None = None, extractor_dir: str = "Extractors"):
|
|
29
29
|
# cloudscraper - for bypassing Cloudflare
|
|
30
30
|
self.cloudscraper = CloudScraper()
|
|
31
31
|
if proxy:
|
|
@@ -50,7 +50,7 @@ class PluginBase(ABC):
|
|
|
50
50
|
})
|
|
51
51
|
|
|
52
52
|
self.media_handler = MediaHandler()
|
|
53
|
-
self.ex_manager = ExtractorManager()
|
|
53
|
+
self.ex_manager = ExtractorManager(extractor_dir=extractor_dir)
|
|
54
54
|
|
|
55
55
|
@abstractmethod
|
|
56
56
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
@@ -101,11 +101,18 @@ class PluginBase(ABC):
|
|
|
101
101
|
return ""
|
|
102
102
|
|
|
103
103
|
if url.startswith("http") or url.startswith("{\""):
|
|
104
|
-
return url
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
104
|
+
return url.replace("\\", "")
|
|
105
|
+
|
|
106
|
+
url = f"https:{url}" if url.startswith("//") else urljoin(self.main_url, url)
|
|
107
|
+
return url.replace("\\", "")
|
|
108
|
+
|
|
109
|
+
async def extract(
|
|
110
|
+
self,
|
|
111
|
+
url: str,
|
|
112
|
+
referer: str = None,
|
|
113
|
+
prefix: str | None = None,
|
|
114
|
+
name_override: str | None = None
|
|
115
|
+
) -> ExtractResult | list[ExtractResult] | None:
|
|
109
116
|
"""
|
|
110
117
|
Extractor ile video URL'sini çıkarır.
|
|
111
118
|
|
|
@@ -113,6 +120,7 @@ class PluginBase(ABC):
|
|
|
113
120
|
url: Iframe veya video URL'si
|
|
114
121
|
referer: Referer header (varsayılan: plugin main_url)
|
|
115
122
|
prefix: İsmin başına eklenecek opsiyonel etiket (örn: "Türkçe Dublaj")
|
|
123
|
+
name_override: İsmi tamamen değiştirecek opsiyonel etiket (Extractor adını ezer)
|
|
116
124
|
|
|
117
125
|
Returns:
|
|
118
126
|
ExtractResult: Extractor sonucu (name prefix ile birleştirilmiş) veya None
|
|
@@ -130,16 +138,26 @@ class PluginBase(ABC):
|
|
|
130
138
|
try:
|
|
131
139
|
data = await extractor.extract(url, referer=referer)
|
|
132
140
|
|
|
133
|
-
# Liste ise her bir öğe için prefix ekle
|
|
141
|
+
# Liste ise her bir öğe için prefix/override ekle
|
|
134
142
|
if isinstance(data, list):
|
|
135
143
|
for item in data:
|
|
136
|
-
if
|
|
137
|
-
item.name =
|
|
144
|
+
if name_override:
|
|
145
|
+
item.name = name_override
|
|
146
|
+
elif prefix and item.name:
|
|
147
|
+
if item.name.lower() in prefix.lower():
|
|
148
|
+
item.name = prefix
|
|
149
|
+
else:
|
|
150
|
+
item.name = f"{prefix} | {item.name}"
|
|
138
151
|
return data
|
|
139
152
|
|
|
140
153
|
# Tekil öğe ise
|
|
141
|
-
if
|
|
142
|
-
data.name =
|
|
154
|
+
if name_override:
|
|
155
|
+
data.name = name_override
|
|
156
|
+
elif prefix and data.name:
|
|
157
|
+
if data.name.lower() in prefix.lower():
|
|
158
|
+
data.name = prefix
|
|
159
|
+
else:
|
|
160
|
+
data.name = f"{prefix} | {data.name}"
|
|
143
161
|
|
|
144
162
|
return data
|
|
145
163
|
except Exception as hata:
|
|
@@ -6,11 +6,12 @@ from pathlib import Path
|
|
|
6
6
|
import os, importlib.util, traceback
|
|
7
7
|
|
|
8
8
|
class PluginLoader:
|
|
9
|
-
def __init__(self, plugins_dir: str, proxy: str | dict | None = None):
|
|
9
|
+
def __init__(self, plugins_dir: str, proxy: str | dict | None = None, extractor_dir: str = "Extractors"):
|
|
10
10
|
# Yerel ve global eklenti dizinlerini ayarla
|
|
11
11
|
self.proxy = proxy
|
|
12
|
+
self.extractor_dir = extractor_dir
|
|
12
13
|
self.local_plugins_dir = Path(plugins_dir).resolve()
|
|
13
|
-
self.global_plugins_dir = Path(__file__).parent.parent.parent /
|
|
14
|
+
self.global_plugins_dir = Path(__file__).parent.parent.parent / "Plugins"
|
|
14
15
|
|
|
15
16
|
# Dizin kontrolü
|
|
16
17
|
if not self.local_plugins_dir.exists() and not self.global_plugins_dir.exists():
|
|
@@ -18,19 +19,16 @@ class PluginLoader:
|
|
|
18
19
|
cikis_yap(False)
|
|
19
20
|
|
|
20
21
|
def load_all(self) -> dict[str, PluginBase]:
|
|
21
|
-
plugins
|
|
22
|
+
plugins = {}
|
|
23
|
+
local_dir_exists = self.local_plugins_dir.exists() and self.local_plugins_dir.resolve() != self.global_plugins_dir.resolve()
|
|
22
24
|
|
|
23
|
-
# Eğer yerel
|
|
24
|
-
if
|
|
25
|
+
# Eğer yerel dizin varsa, sadece oradan yükle (eklenti geliştirme/yayınlama modu)
|
|
26
|
+
if local_dir_exists:
|
|
25
27
|
# konsol.log(f"[green][*] Yerel Eklenti dizininden yükleniyor: {self.local_plugins_dir}[/green]")
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
plugins |= local_plugins
|
|
31
|
-
|
|
32
|
-
# Yerel dizinde Plugin yoksa, global'leri yükle
|
|
33
|
-
if not plugins and self.global_plugins_dir.exists():
|
|
28
|
+
plugins |= self._load_from_directory(self.local_plugins_dir)
|
|
29
|
+
|
|
30
|
+
# Yerel dizin yoksa (veya core ile aynı yerse), global'leri yükle
|
|
31
|
+
else:
|
|
34
32
|
# konsol.log(f"[green][*] Global Eklenti dizininden yükleniyor: {self.global_plugins_dir}[/green]")
|
|
35
33
|
plugins |= self._load_from_directory(self.global_plugins_dir)
|
|
36
34
|
|
|
@@ -71,7 +69,7 @@ class PluginLoader:
|
|
|
71
69
|
obj = getattr(module, attr)
|
|
72
70
|
if isinstance(obj, type) and issubclass(obj, PluginBase) and obj is not PluginBase:
|
|
73
71
|
# konsol.log(f"[yellow]Yüklenen sınıf\t\t: {module_name}.{obj.__name__} ({obj.__module__}.{obj.__name__})[/yellow]")
|
|
74
|
-
return obj(proxy=self.proxy)
|
|
72
|
+
return obj(proxy=self.proxy, extractor_dir=self.extractor_dir)
|
|
75
73
|
|
|
76
74
|
except Exception as hata:
|
|
77
75
|
konsol.print(f"[red][!] Eklenti yüklenirken hata oluştu: {module_name}\nHata: {hata}")
|
|
@@ -4,9 +4,9 @@ from .PluginLoader import PluginLoader
|
|
|
4
4
|
from .PluginBase import PluginBase
|
|
5
5
|
|
|
6
6
|
class PluginManager:
|
|
7
|
-
def __init__(self, plugin_dir="Plugins", proxy: str | dict | None = None):
|
|
7
|
+
def __init__(self, plugin_dir="Plugins", proxy: str | dict | None = None, extractor_dir: str = "Extractors"):
|
|
8
8
|
# Eklenti yükleyiciyi başlat ve tüm eklentileri yükle
|
|
9
|
-
self.plugin_loader = PluginLoader(plugin_dir, proxy=proxy)
|
|
9
|
+
self.plugin_loader = PluginLoader(plugin_dir, proxy=proxy, extractor_dir=extractor_dir)
|
|
10
10
|
self.plugins = self.plugin_loader.load_all()
|
|
11
11
|
|
|
12
12
|
def get_plugin_names(self):
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
|
+
|
|
3
|
+
from KekikStream.Core import ExtractorBase, ExtractResult, HTMLHelper
|
|
4
|
+
|
|
5
|
+
class Abstream(ExtractorBase):
|
|
6
|
+
name = "Abstream"
|
|
7
|
+
main_url = "https://abstream.to"
|
|
8
|
+
|
|
9
|
+
async def extract(self, url: str, referer: str = None) -> ExtractResult:
|
|
10
|
+
istek = await self.httpx.get(
|
|
11
|
+
url = url,
|
|
12
|
+
headers = {
|
|
13
|
+
"Accept-Language" : "en-US,en;q=0.5",
|
|
14
|
+
"Referer" : referer or self.main_url,
|
|
15
|
+
}
|
|
16
|
+
)
|
|
17
|
+
secici = HTMLHelper(istek.text)
|
|
18
|
+
video_url = secici.regex_first(r'file:"([^"]*)"')
|
|
19
|
+
|
|
20
|
+
if not video_url:
|
|
21
|
+
raise ValueError(f"Abstream: Video URL bulunamadı. {url}")
|
|
22
|
+
|
|
23
|
+
return ExtractResult(
|
|
24
|
+
name = self.name,
|
|
25
|
+
url = video_url,
|
|
26
|
+
referer = referer or self.main_url
|
|
27
|
+
)
|
|
@@ -2,75 +2,51 @@
|
|
|
2
2
|
|
|
3
3
|
from KekikStream.Core import ExtractorBase, ExtractResult, Subtitle, HTMLHelper
|
|
4
4
|
from Kekik.Sifreleme import Packer, StreamDecoder
|
|
5
|
-
import json
|
|
5
|
+
import json, contextlib
|
|
6
6
|
|
|
7
|
-
class
|
|
7
|
+
class CloseLoad(ExtractorBase):
|
|
8
8
|
name = "CloseLoad"
|
|
9
9
|
main_url = "https://closeload.filmmakinesi.to"
|
|
10
10
|
|
|
11
|
-
def
|
|
12
|
-
"""JSON-LD script tag'inden contentUrl'i çıkar (Kotlin versiyonundaki gibi)"""
|
|
13
|
-
secici = HTMLHelper(html)
|
|
14
|
-
for script in secici.select("script[type='application/ld+json']"):
|
|
15
|
-
try:
|
|
16
|
-
data = json.loads(script.text(strip=True))
|
|
17
|
-
if content_url := data.get("contentUrl"):
|
|
18
|
-
if content_url.startswith("http"):
|
|
19
|
-
return content_url
|
|
20
|
-
except (json.JSONDecodeError, TypeError):
|
|
21
|
-
# Regex ile contentUrl'i çıkarmayı dene
|
|
22
|
-
if content_url := secici.regex_first(r'"contentUrl"\s*:\s*"([^\"]+)"', script.text()):
|
|
23
|
-
if content_url.startswith("http"):
|
|
24
|
-
return content_url
|
|
25
|
-
return None
|
|
26
|
-
|
|
27
|
-
def _extract_from_packed(self, html: str) -> str | None:
|
|
28
|
-
"""Packed JavaScript'ten video URL'sini çıkar (fallback)"""
|
|
29
|
-
try:
|
|
30
|
-
packed = HTMLHelper(html).regex_all(r'\s*(eval\(function[\s\S].*)')
|
|
31
|
-
if packed:
|
|
32
|
-
return StreamDecoder.extract_stream_url(Packer.unpack(packed[0]))
|
|
33
|
-
except Exception:
|
|
34
|
-
pass
|
|
35
|
-
return None
|
|
36
|
-
|
|
37
|
-
async def extract(self, url, referer=None) -> ExtractResult:
|
|
38
|
-
if referer:
|
|
39
|
-
self.httpx.headers.update({"Referer": referer})
|
|
40
|
-
|
|
11
|
+
async def extract(self, url: str, referer: str = None) -> ExtractResult:
|
|
41
12
|
self.httpx.headers.update({
|
|
42
|
-
"
|
|
43
|
-
"Origin": self.main_url
|
|
13
|
+
"Referer" : referer or self.main_url,
|
|
14
|
+
"Origin" : self.main_url
|
|
44
15
|
})
|
|
45
16
|
|
|
46
|
-
|
|
47
|
-
|
|
17
|
+
resp = await self.httpx.get(url)
|
|
18
|
+
sel = HTMLHelper(resp.text)
|
|
48
19
|
|
|
49
|
-
#
|
|
50
|
-
|
|
20
|
+
# 1. JSON-LD'den Dene
|
|
21
|
+
m3u8_url = None
|
|
22
|
+
for script in sel.select("script[type='application/ld+json']"):
|
|
23
|
+
with contextlib.suppress(Exception):
|
|
24
|
+
data = json.loads(script.text(strip=True))
|
|
25
|
+
if content_url := data.get("contentUrl"):
|
|
26
|
+
if content_url.startswith("http"):
|
|
27
|
+
m3u8_url = content_url
|
|
28
|
+
break
|
|
51
29
|
|
|
52
|
-
#
|
|
53
|
-
if not
|
|
54
|
-
|
|
30
|
+
# 2. Packed Script Fallback
|
|
31
|
+
if not m3u8_url:
|
|
32
|
+
if packed := sel.regex_first(r"(eval\(function\(p,a,c,k,e,d\).+?)\s*</script>"):
|
|
33
|
+
m3u8_url = StreamDecoder.extract_stream_url(Packer.unpack(packed))
|
|
55
34
|
|
|
56
|
-
if not
|
|
57
|
-
raise
|
|
35
|
+
if not m3u8_url:
|
|
36
|
+
raise ValueError(f"CloseLoad: Video URL bulunamadı. {url}")
|
|
58
37
|
|
|
59
|
-
# Subtitle'ları parse et (Kotlin referansı: track elementleri)
|
|
60
38
|
subtitles = []
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
full_url = raw_src if raw_src.startswith("http") else f"{self.main_url}{raw_src}"
|
|
69
|
-
subtitles.append(Subtitle(name=label, url=full_url))
|
|
39
|
+
for track in sel.select("track"):
|
|
40
|
+
src = track.attrs.get("src")
|
|
41
|
+
if src:
|
|
42
|
+
subtitles.append(Subtitle(
|
|
43
|
+
name = track.attrs.get("label") or track.attrs.get("srclang") or "Altyazı",
|
|
44
|
+
url = self.fix_url(src)
|
|
45
|
+
))
|
|
70
46
|
|
|
71
47
|
return ExtractResult(
|
|
72
48
|
name = self.name,
|
|
73
|
-
url =
|
|
49
|
+
url = m3u8_url,
|
|
74
50
|
referer = self.main_url,
|
|
75
51
|
subtitles = subtitles
|
|
76
52
|
)
|