KekikStream 2.2.9__py3-none-any.whl → 2.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. KekikStream/Core/Extractor/ExtractorBase.py +3 -2
  2. KekikStream/Core/Extractor/ExtractorLoader.py +8 -14
  3. KekikStream/Core/HTMLHelper.py +205 -0
  4. KekikStream/Core/Plugin/PluginBase.py +48 -12
  5. KekikStream/Core/Plugin/PluginLoader.py +13 -14
  6. KekikStream/Core/Plugin/PluginManager.py +2 -2
  7. KekikStream/Core/Plugin/PluginModels.py +0 -3
  8. KekikStream/Core/__init__.py +2 -0
  9. KekikStream/Extractors/Abstream.py +27 -0
  10. KekikStream/Extractors/CloseLoad.py +31 -56
  11. KekikStream/Extractors/ContentX.py +28 -71
  12. KekikStream/Extractors/DonilasPlay.py +34 -78
  13. KekikStream/Extractors/DzenRu.py +11 -25
  14. KekikStream/Extractors/ExPlay.py +20 -38
  15. KekikStream/Extractors/Filemoon.py +23 -53
  16. KekikStream/Extractors/HDMomPlayer.py +30 -0
  17. KekikStream/Extractors/HDPlayerSystem.py +13 -31
  18. KekikStream/Extractors/HotStream.py +27 -0
  19. KekikStream/Extractors/JFVid.py +3 -24
  20. KekikStream/Extractors/JetTv.py +21 -34
  21. KekikStream/Extractors/JetV.py +55 -0
  22. KekikStream/Extractors/MailRu.py +11 -29
  23. KekikStream/Extractors/MixPlayHD.py +17 -31
  24. KekikStream/Extractors/MixTiger.py +17 -40
  25. KekikStream/Extractors/MolyStream.py +25 -22
  26. KekikStream/Extractors/Odnoklassniki.py +41 -105
  27. KekikStream/Extractors/PeaceMakerst.py +20 -47
  28. KekikStream/Extractors/PixelDrain.py +9 -16
  29. KekikStream/Extractors/PlayerFilmIzle.py +23 -46
  30. KekikStream/Extractors/RapidVid.py +23 -36
  31. KekikStream/Extractors/SetPlay.py +19 -44
  32. KekikStream/Extractors/SetPrime.py +3 -6
  33. KekikStream/Extractors/SibNet.py +8 -19
  34. KekikStream/Extractors/Sobreatsesuyp.py +25 -47
  35. KekikStream/Extractors/TRsTX.py +25 -55
  36. KekikStream/Extractors/TurboImgz.py +8 -16
  37. KekikStream/Extractors/TurkeyPlayer.py +5 -5
  38. KekikStream/Extractors/VCTPlay.py +10 -28
  39. KekikStream/Extractors/Veev.py +145 -0
  40. KekikStream/Extractors/VidBiz.py +62 -0
  41. KekikStream/Extractors/VidHide.py +59 -34
  42. KekikStream/Extractors/VidMoly.py +67 -89
  43. KekikStream/Extractors/VidMoxy.py +17 -29
  44. KekikStream/Extractors/VidPapi.py +26 -58
  45. KekikStream/Extractors/VideoSeyred.py +21 -42
  46. KekikStream/Extractors/Videostr.py +58 -0
  47. KekikStream/Extractors/Vidoza.py +18 -0
  48. KekikStream/Extractors/Vtbe.py +38 -0
  49. KekikStream/Extractors/YTDLP.py +2 -2
  50. KekikStream/Extractors/YildizKisaFilm.py +13 -31
  51. KekikStream/Extractors/Zeus.py +61 -0
  52. KekikStream/Plugins/BelgeselX.py +108 -99
  53. KekikStream/Plugins/DiziBox.py +61 -106
  54. KekikStream/Plugins/DiziMom.py +179 -0
  55. KekikStream/Plugins/DiziPal.py +104 -192
  56. KekikStream/Plugins/DiziYou.py +66 -149
  57. KekikStream/Plugins/Dizilla.py +93 -126
  58. KekikStream/Plugins/FilmBip.py +102 -72
  59. KekikStream/Plugins/FilmEkseni.py +199 -0
  60. KekikStream/Plugins/FilmMakinesi.py +101 -64
  61. KekikStream/Plugins/FilmModu.py +35 -59
  62. KekikStream/Plugins/Filmatek.py +184 -0
  63. KekikStream/Plugins/FilmciBaba.py +155 -0
  64. KekikStream/Plugins/FullHDFilmizlesene.py +32 -78
  65. KekikStream/Plugins/HDFilm.py +243 -0
  66. KekikStream/Plugins/HDFilmCehennemi.py +261 -222
  67. KekikStream/Plugins/JetFilmizle.py +117 -98
  68. KekikStream/Plugins/KultFilmler.py +153 -143
  69. KekikStream/Plugins/RecTV.py +53 -49
  70. KekikStream/Plugins/RoketDizi.py +92 -123
  71. KekikStream/Plugins/SelcukFlix.py +86 -95
  72. KekikStream/Plugins/SetFilmIzle.py +105 -143
  73. KekikStream/Plugins/SezonlukDizi.py +106 -128
  74. KekikStream/Plugins/Sinefy.py +194 -166
  75. KekikStream/Plugins/SinemaCX.py +159 -113
  76. KekikStream/Plugins/Sinezy.py +44 -73
  77. KekikStream/Plugins/SuperFilmGeldi.py +28 -52
  78. KekikStream/Plugins/UgurFilm.py +94 -72
  79. KekikStream/Plugins/Watch32.py +160 -0
  80. KekikStream/Plugins/YabanciDizi.py +250 -0
  81. {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/METADATA +1 -1
  82. kekikstream-2.5.3.dist-info/RECORD +99 -0
  83. {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/WHEEL +1 -1
  84. KekikStream/Plugins/FullHDFilm.py +0 -254
  85. kekikstream-2.2.9.dist-info/RECORD +0 -82
  86. {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/entry_points.txt +0 -0
  87. {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/licenses/LICENSE +0 -0
  88. {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/top_level.txt +0 -0
@@ -49,6 +49,7 @@ class ExtractorBase(ABC):
49
49
  return ""
50
50
 
51
51
  if url.startswith("http") or url.startswith("{\""):
52
- return url
52
+ return url.replace("\\", "")
53
53
 
54
- return f"https:{url}" if url.startswith("//") else urljoin(self.main_url, url)
54
+ url = f"https:{url}" if url.startswith("//") else urljoin(self.main_url, url)
55
+ return url.replace("\\", "")
@@ -9,7 +9,7 @@ class ExtractorLoader:
9
9
  def __init__(self, extractors_dir: str):
10
10
  # Yerel ve global çıkarıcı dizinlerini ayarla
11
11
  self.local_extractors_dir = Path(extractors_dir)
12
- self.global_extractors_dir = Path(__file__).parent.parent.parent / extractors_dir
12
+ self.global_extractors_dir = Path(__file__).parent.parent.parent / "Extractors"
13
13
 
14
14
  # Dizin kontrolü
15
15
  if not self.local_extractors_dir.exists() and not self.global_extractors_dir.exists():
@@ -19,22 +19,16 @@ class ExtractorLoader:
19
19
  def load_all(self) -> list[ExtractorBase]:
20
20
  extractors = []
21
21
 
22
- # Eğer yerel dizinde Extractor varsa, sadece onları yükle (eklenti geliştirme modu)
23
- if self.local_extractors_dir.exists():
22
+ # Yerel Extractor'lar varsa önce onları yükle (ek/öncelikli yetenekler)
23
+ # Eğer yerel dizin global dizinle aynıysa (örn: doğrudan core'da çalışırken) tekrar yükleme yapma
24
+ if self.local_extractors_dir.exists() and self.local_extractors_dir.resolve() != self.global_extractors_dir.resolve():
24
25
  # konsol.log(f"[green][*] Yerel Extractor dizininden yükleniyor: {self.local_extractors_dir}[/green]")
25
- local_extractors = self._load_from_directory(self.local_extractors_dir)
26
- # konsol.log(f"[green]Yerel Extractor'lar: {[e.__name__ for e in local_extractors]}[/green]")
26
+ extractors.extend(self._load_from_directory(self.local_extractors_dir))
27
27
 
28
- if local_extractors:
29
- # konsol.log("[cyan][*] Yerel Extractor bulundu, global Extractor'lar atlanıyor (eklenti geliştirme modu)[/cyan]")
30
- extractors.extend(local_extractors)
31
-
32
- # Yerel dizinde Extractor yoksa, global'leri yükle
33
- if not extractors and self.global_extractors_dir.exists():
28
+ # Global Extractor'ları her zaman yükle (temel yetenekler)
29
+ if self.global_extractors_dir.exists():
34
30
  # konsol.log(f"[green][*] Global Extractor dizininden yükleniyor: {self.global_extractors_dir}[/green]")
35
- global_extractors = self._load_from_directory(self.global_extractors_dir)
36
- # konsol.log(f"[green]Global Extractor'lar: {[e.__name__ for e in global_extractors]}[/green]")
37
- extractors.extend(global_extractors)
31
+ extractors.extend(self._load_from_directory(self.global_extractors_dir))
38
32
 
39
33
  # Benzersizliği sağlama (modül adı + sınıf adı bazında)
40
34
  unique_extractors = []
@@ -0,0 +1,205 @@
1
+ # Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
2
+
3
+ from __future__ import annotations
4
+
5
+ import re
6
+ from selectolax.parser import HTMLParser, Node
7
+
8
+
9
+ class HTMLHelper:
10
+ """
11
+ Selectolax ile HTML parsing işlemlerini temiz, kısa ve okunabilir hale getiren yardımcı sınıf.
12
+ """
13
+
14
+ def __init__(self, html: str):
15
+ self.html = html
16
+ self.parser = HTMLParser(html)
17
+
18
+ # ========================
19
+ # SELECTOR (CSS) İŞLEMLERİ
20
+ # ========================
21
+
22
+ def _root(self, element: Node | None) -> Node | HTMLParser:
23
+ """İşlem yapılacak temel elementi döndürür."""
24
+ return element if element is not None else self.parser
25
+
26
+ def select(self, selector: str, element: Node | None = None) -> list[Node]:
27
+ """CSS selector ile tüm eşleşen elementleri döndür."""
28
+ return self._root(element).css(selector)
29
+
30
+ def select_first(self, selector: str | None, element: Node | None = None) -> Node | None:
31
+ """CSS selector ile ilk eşleşen elementi döndür."""
32
+ if not selector:
33
+ return element
34
+ return self._root(element).css_first(selector)
35
+
36
+ def select_text(self, selector: str | None = None, element: Node | None = None) -> str | None:
37
+ """CSS selector ile element bul ve text içeriğini döndür."""
38
+ el = self.select_first(selector, element)
39
+ if not el:
40
+ return None
41
+ val = el.text(strip=True)
42
+ return val or None
43
+
44
+ def select_texts(self, selector: str, element: Node | None = None) -> list[str] | None:
45
+ """CSS selector ile tüm eşleşen elementlerin text içeriklerini döndür."""
46
+ out: list[str] = []
47
+ for el in self.select(selector, element):
48
+ txt = el.text(strip=True)
49
+ if txt:
50
+ out.append(txt)
51
+ return out or None
52
+
53
+ def select_attr(self, selector: str | None, attr: str, element: Node | None = None) -> str | None:
54
+ """CSS selector ile element bul ve attribute değerini döndür."""
55
+ el = self.select_first(selector, element)
56
+ return el.attrs.get(attr) if el else None
57
+
58
+ def select_attrs(self, selector: str, attr: str, element: Node | None = None) -> list[str]:
59
+ """CSS selector ile tüm eşleşen elementlerin attribute değerlerini döndür."""
60
+ out: list[str] = []
61
+ for el in self.select(selector, element):
62
+ val = el.attrs.get(attr)
63
+ if val:
64
+ out.append(val)
65
+ return out
66
+
67
+ def select_poster(self, selector: str = "img", element: Node | None = None) -> str | None:
68
+ """Poster URL'sini çıkar. Önce data-src, sonra src dener."""
69
+ el = self.select_first(selector, element)
70
+ if not el:
71
+ return None
72
+ return el.attrs.get("data-src") or el.attrs.get("src")
73
+
74
+ def select_direct_text(self, selector: str, element: Node | None = None) -> str | None:
75
+ """
76
+ Elementin yalnızca "kendi" düz metnini döndürür (child elementlerin text'ini katmadan).
77
+ """
78
+ el = self.select_first(selector, element)
79
+ if not el:
80
+ return None
81
+
82
+ # type: ignore[call-arg]
83
+ val = el.text(strip=True, deep=False)
84
+ return val or None
85
+
86
+ # ========================
87
+ # META (LABEL -> VALUE) İŞLEMLERİ
88
+ # ========================
89
+
90
+ def meta_value(self, label: str, container_selector: str | None = None) -> str | None:
91
+ """
92
+ Herhangi bir container içinde: LABEL metnini içeren bir elementten SONRA gelen metni döndürür.
93
+ label örn: "Oyuncular", "Yapım Yılı", "IMDB"
94
+ """
95
+ needle = label.casefold()
96
+
97
+ # Belirli bir container varsa içinde ara, yoksa tüm dökümanda
98
+ targets = self.select(container_selector) if container_selector else [self.parser.body]
99
+
100
+ for root in targets:
101
+ if not root: continue
102
+
103
+ # Kalın/vurgulu elementlerde (span, strong, b, label, dt) label'ı ara
104
+ for label_el in self.select("span, strong, b, label, dt", root):
105
+ txt = (label_el.text(strip=True) or "").casefold()
106
+ if needle not in txt:
107
+ continue
108
+
109
+ # 1) Elementin kendi içindeki text'te LABEL: VALUE formatı olabilir
110
+ # "Oyuncular: Brad Pitt" gibi. LABEL: sonrasını al.
111
+ full_txt = label_el.text(strip=True)
112
+ if ":" in full_txt and needle in full_txt.split(":")[0].casefold():
113
+ val = full_txt.split(":", 1)[1].strip()
114
+ if val: return val
115
+
116
+ # 2) Label sonrası gelen ilk text node'u veya element'i al
117
+ curr = label_el.next
118
+ while curr:
119
+ if curr.tag == "-text":
120
+ val = curr.text(strip=True).strip(" :")
121
+ if val: return val
122
+ elif curr.tag != "br":
123
+ val = curr.text(strip=True).strip(" :")
124
+ if val: return val
125
+ else: # <br> gördüysek satır bitmiştir
126
+ break
127
+ curr = curr.next
128
+
129
+ return None
130
+
131
+ def meta_list(self, label: str, container_selector: str | None = None, sep: str = ",") -> list[str]:
132
+ """meta_value(...) çıktısını veya label'ın ebeveynindeki linkleri listeye döndürür."""
133
+ needle = label.casefold()
134
+ targets = self.select(container_selector) if container_selector else [self.parser.body]
135
+
136
+ for root in targets:
137
+ if not root: continue
138
+ for label_el in self.select("span, strong, b, label, dt", root):
139
+ if needle in (label_el.text(strip=True) or "").casefold():
140
+ # Eğer elementin ebeveyninde linkler varsa (Kutucuklu yapı), onları al
141
+ links = self.select_texts("a", label_el.parent)
142
+ if links: return links
143
+
144
+ # Yoksa düz metin olarak meta_value mantığıyla al
145
+ raw = self.meta_value(label, container_selector=container_selector)
146
+ if not raw: return []
147
+ return [x.strip() for x in raw.split(sep) if x.strip()]
148
+
149
+ return []
150
+
151
+ # ========================
152
+ # REGEX İŞLEMLERİ
153
+ # ========================
154
+
155
+ def _regex_source(self, target: str | int | None) -> str:
156
+ """Regex için kaynak metni döndürür."""
157
+ return target if isinstance(target, str) else self.html
158
+
159
+ def regex_first(self, pattern: str, target: str | int | None = None, group: int | None = 1) -> str | tuple | None:
160
+ """Regex ile arama yap, istenen grubu döndür (group=None ise tüm grupları tuple olarak döndür)."""
161
+ match = re.search(pattern, self._regex_source(target))
162
+ if not match:
163
+ return None
164
+
165
+ if group is None:
166
+ return match.groups()
167
+
168
+ last_idx = match.lastindex or 0
169
+ return match.group(group) if last_idx >= group else match.group(0)
170
+
171
+ def regex_all(self, pattern: str, target: str | int | None = None) -> list[str] | list[tuple]:
172
+ """Regex ile tüm eşleşmeleri döndür."""
173
+ return re.findall(pattern, self._regex_source(target))
174
+
175
+ def regex_replace(self, pattern: str, repl: str, target: str | int | None = None) -> str:
176
+ """Regex ile replace yap."""
177
+ return re.sub(pattern, repl, self._regex_source(target))
178
+
179
+ # ========================
180
+ # ÖZEL AYIKLAYICILAR
181
+ # ========================
182
+
183
+ @staticmethod
184
+ def extract_season_episode(text: str) -> tuple[int | None, int | None]:
185
+ """Metin içinden sezon ve bölüm numarasını çıkar."""
186
+ if m := re.search(r"[Ss](\d+)[Ee](\d+)", text):
187
+ return int(m.group(1)), int(m.group(2))
188
+
189
+ s = re.search(r"(\d+)\.\s*[Ss]ezon|[Ss]ezon[- ]?(\d+)|-(\d+)-sezon|S(\d+)|(\d+)\.[Ss]", text, re.I)
190
+ e = re.search(r"(\d+)\.\s*[Bb][öo]l[üu]m|[Bb][öo]l[üu]m[- ]?(\d+)|-(\d+)-bolum|[Ee](\d+)", text, re.I)
191
+
192
+ s_val = next((int(g) for g in s.groups() if g), None) if s else None
193
+ e_val = next((int(g) for g in e.groups() if g), None) if e else None
194
+
195
+ return s_val, e_val
196
+
197
+ def extract_year(self, *selectors: str, pattern: str = r"(\d{4})") -> int | None:
198
+ """Birden fazla selector veya regex ile yıl bilgisini çıkar."""
199
+ for selector in selectors:
200
+ if text := self.select_text(selector):
201
+ if m := re.search(r"(\d{4})", text):
202
+ return int(m.group(1))
203
+
204
+ val = self.regex_first(pattern)
205
+ return int(val) if val and val.isdigit() else None
@@ -25,14 +25,22 @@ class PluginBase(ABC):
25
25
  self.main_page = {url.replace(self.main_url, new_url): category for url, category in self.main_page.items()}
26
26
  self.main_url = new_url
27
27
 
28
- def __init__(self):
28
+ def __init__(self, proxy: str | dict | None = None, extractor_dir: str = "Extractors"):
29
29
  # cloudscraper - for bypassing Cloudflare
30
30
  self.cloudscraper = CloudScraper()
31
+ if proxy:
32
+ self.cloudscraper.proxies = proxy if isinstance(proxy, dict) else {"http": proxy, "https": proxy}
33
+
34
+ # Convert dict proxy to string for httpx if necessary
35
+ httpx_proxy = proxy
36
+ if isinstance(proxy, dict):
37
+ httpx_proxy = proxy.get("https") or proxy.get("http")
31
38
 
32
39
  # httpx - lightweight and safe for most HTTP requests
33
40
  self.httpx = AsyncClient(
34
41
  timeout = 3,
35
- follow_redirects = True
42
+ follow_redirects = True,
43
+ proxy = httpx_proxy
36
44
  )
37
45
  self.httpx.headers.update(self.cloudscraper.headers)
38
46
  self.httpx.cookies.update(self.cloudscraper.cookies)
@@ -42,7 +50,7 @@ class PluginBase(ABC):
42
50
  })
43
51
 
44
52
  self.media_handler = MediaHandler()
45
- self.ex_manager = ExtractorManager()
53
+ self.ex_manager = ExtractorManager(extractor_dir=extractor_dir)
46
54
 
47
55
  @abstractmethod
48
56
  async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
@@ -93,11 +101,18 @@ class PluginBase(ABC):
93
101
  return ""
94
102
 
95
103
  if url.startswith("http") or url.startswith("{\""):
96
- return url
97
-
98
- return f"https:{url}" if url.startswith("//") else urljoin(self.main_url, url)
99
-
100
- async def extract(self, url: str, referer: str = None, prefix: str | None = None) -> ExtractResult | None:
104
+ return url.replace("\\", "")
105
+
106
+ url = f"https:{url}" if url.startswith("//") else urljoin(self.main_url, url)
107
+ return url.replace("\\", "")
108
+
109
+ async def extract(
110
+ self,
111
+ url: str,
112
+ referer: str = None,
113
+ prefix: str | None = None,
114
+ name_override: str | None = None
115
+ ) -> ExtractResult | list[ExtractResult] | None:
101
116
  """
102
117
  Extractor ile video URL'sini çıkarır.
103
118
 
@@ -105,6 +120,7 @@ class PluginBase(ABC):
105
120
  url: Iframe veya video URL'si
106
121
  referer: Referer header (varsayılan: plugin main_url)
107
122
  prefix: İsmin başına eklenecek opsiyonel etiket (örn: "Türkçe Dublaj")
123
+ name_override: İsmi tamamen değiştirecek opsiyonel etiket (Extractor adını ezer)
108
124
 
109
125
  Returns:
110
126
  ExtractResult: Extractor sonucu (name prefix ile birleştirilmiş) veya None
@@ -122,9 +138,26 @@ class PluginBase(ABC):
122
138
  try:
123
139
  data = await extractor.extract(url, referer=referer)
124
140
 
125
- # prefix varsa name'e ekle
126
- if prefix and data.name:
127
- data.name = f"{prefix} | {data.name}"
141
+ # Liste ise her bir öğe için prefix/override ekle
142
+ if isinstance(data, list):
143
+ for item in data:
144
+ if name_override:
145
+ item.name = name_override
146
+ elif prefix and item.name:
147
+ if item.name.lower() in prefix.lower():
148
+ item.name = prefix
149
+ else:
150
+ item.name = f"{prefix} | {item.name}"
151
+ return data
152
+
153
+ # Tekil öğe ise
154
+ if name_override:
155
+ data.name = name_override
156
+ elif prefix and data.name:
157
+ if data.name.lower() in prefix.lower():
158
+ data.name = prefix
159
+ else:
160
+ data.name = f"{prefix} | {data.name}"
128
161
 
129
162
  return data
130
163
  except Exception as hata:
@@ -132,7 +165,10 @@ class PluginBase(ABC):
132
165
  return None
133
166
 
134
167
  @staticmethod
135
- def clean_title(title: str) -> str:
168
+ def clean_title(title: str | None) -> str | None:
169
+ if not title:
170
+ return None
171
+
136
172
  suffixes = [
137
173
  " izle",
138
174
  " full film",
@@ -6,10 +6,12 @@ from pathlib import Path
6
6
  import os, importlib.util, traceback
7
7
 
8
8
  class PluginLoader:
9
- def __init__(self, plugins_dir: str):
9
+ def __init__(self, plugins_dir: str, proxy: str | dict | None = None, extractor_dir: str = "Extractors"):
10
10
  # Yerel ve global eklenti dizinlerini ayarla
11
+ self.proxy = proxy
12
+ self.extractor_dir = extractor_dir
11
13
  self.local_plugins_dir = Path(plugins_dir).resolve()
12
- self.global_plugins_dir = Path(__file__).parent.parent.parent / plugins_dir
14
+ self.global_plugins_dir = Path(__file__).parent.parent.parent / "Plugins"
13
15
 
14
16
  # Dizin kontrolü
15
17
  if not self.local_plugins_dir.exists() and not self.global_plugins_dir.exists():
@@ -17,19 +19,16 @@ class PluginLoader:
17
19
  cikis_yap(False)
18
20
 
19
21
  def load_all(self) -> dict[str, PluginBase]:
20
- plugins = {}
22
+ plugins = {}
23
+ local_dir_exists = self.local_plugins_dir.exists() and self.local_plugins_dir.resolve() != self.global_plugins_dir.resolve()
21
24
 
22
- # Eğer yerel dizinde Plugin varsa, sadece onları yükle (eklenti geliştirme modu)
23
- if self.local_plugins_dir.exists():
25
+ # Eğer yerel dizin varsa, sadece oradan yükle (eklenti geliştirme/yayınlama modu)
26
+ if local_dir_exists:
24
27
  # konsol.log(f"[green][*] Yerel Eklenti dizininden yükleniyor: {self.local_plugins_dir}[/green]")
25
- local_plugins = self._load_from_directory(self.local_plugins_dir)
26
-
27
- if local_plugins:
28
- # konsol.log("[cyan][*] Yerel Plugin bulundu, global Plugin'ler atlanıyor (eklenti geliştirme modu)[/cyan]")
29
- plugins |= local_plugins
30
-
31
- # Yerel dizinde Plugin yoksa, global'leri yükle
32
- if not plugins and self.global_plugins_dir.exists():
28
+ plugins |= self._load_from_directory(self.local_plugins_dir)
29
+
30
+ # Yerel dizin yoksa (veya core ile aynı yerse), global'leri yükle
31
+ else:
33
32
  # konsol.log(f"[green][*] Global Eklenti dizininden yükleniyor: {self.global_plugins_dir}[/green]")
34
33
  plugins |= self._load_from_directory(self.global_plugins_dir)
35
34
 
@@ -70,7 +69,7 @@ class PluginLoader:
70
69
  obj = getattr(module, attr)
71
70
  if isinstance(obj, type) and issubclass(obj, PluginBase) and obj is not PluginBase:
72
71
  # konsol.log(f"[yellow]Yüklenen sınıf\t\t: {module_name}.{obj.__name__} ({obj.__module__}.{obj.__name__})[/yellow]")
73
- return obj()
72
+ return obj(proxy=self.proxy, extractor_dir=self.extractor_dir)
74
73
 
75
74
  except Exception as hata:
76
75
  konsol.print(f"[red][!] Eklenti yüklenirken hata oluştu: {module_name}\nHata: {hata}")
@@ -4,9 +4,9 @@ from .PluginLoader import PluginLoader
4
4
  from .PluginBase import PluginBase
5
5
 
6
6
  class PluginManager:
7
- def __init__(self, plugin_dir="Plugins"):
7
+ def __init__(self, plugin_dir="Plugins", proxy: str | dict | None = None, extractor_dir: str = "Extractors"):
8
8
  # Eklenti yükleyiciyi başlat ve tüm eklentileri yükle
9
- self.plugin_loader = PluginLoader(plugin_dir)
9
+ self.plugin_loader = PluginLoader(plugin_dir, proxy=proxy, extractor_dir=extractor_dir)
10
10
  self.plugins = self.plugin_loader.load_all()
11
11
 
12
12
  def get_plugin_names(self):
@@ -51,9 +51,6 @@ class Episode(BaseModel):
51
51
  if not self.title:
52
52
  self.title = ""
53
53
 
54
- if any(keyword in self.title.lower() for keyword in ["bölüm", "sezon", "episode"]):
55
- self.title = ""
56
-
57
54
  return self
58
55
 
59
56
  class SeriesInfo(BaseModel):
@@ -17,3 +17,5 @@ from .Extractor.YTDLPCache import get_ytdlp_extractors
17
17
 
18
18
  from .Media.MediaManager import MediaManager
19
19
  from .Media.MediaHandler import MediaHandler
20
+
21
+ from .HTMLHelper import HTMLHelper
@@ -0,0 +1,27 @@
1
+ # Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
2
+
3
+ from KekikStream.Core import ExtractorBase, ExtractResult, HTMLHelper
4
+
5
+ class Abstream(ExtractorBase):
6
+ name = "Abstream"
7
+ main_url = "https://abstream.to"
8
+
9
+ async def extract(self, url: str, referer: str = None) -> ExtractResult:
10
+ istek = await self.httpx.get(
11
+ url = url,
12
+ headers = {
13
+ "Accept-Language" : "en-US,en;q=0.5",
14
+ "Referer" : referer or self.main_url,
15
+ }
16
+ )
17
+ secici = HTMLHelper(istek.text)
18
+ video_url = secici.regex_first(r'file:"([^"]*)"')
19
+
20
+ if not video_url:
21
+ raise ValueError(f"Abstream: Video URL bulunamadı. {url}")
22
+
23
+ return ExtractResult(
24
+ name = self.name,
25
+ url = video_url,
26
+ referer = referer or self.main_url
27
+ )
@@ -1,77 +1,52 @@
1
1
  # Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
2
2
 
3
- from KekikStream.Core import ExtractorBase, ExtractResult, Subtitle
3
+ from KekikStream.Core import ExtractorBase, ExtractResult, Subtitle, HTMLHelper
4
4
  from Kekik.Sifreleme import Packer, StreamDecoder
5
- from selectolax.parser import HTMLParser
6
- import re, json
5
+ import json, contextlib
7
6
 
8
- class CloseLoadExtractor(ExtractorBase):
7
+ class CloseLoad(ExtractorBase):
9
8
  name = "CloseLoad"
10
9
  main_url = "https://closeload.filmmakinesi.to"
11
10
 
12
- def _extract_from_json_ld(self, html: str) -> str | None:
13
- """JSON-LD script tag'inden contentUrl'i çıkar (Kotlin versiyonundaki gibi)"""
14
- secici = HTMLParser(html)
15
- for script in secici.css("script[type='application/ld+json']"):
16
- try:
17
- data = json.loads(script.text(strip=True))
18
- if content_url := data.get("contentUrl"):
19
- if content_url.startswith("http"):
20
- return content_url
21
- except (json.JSONDecodeError, TypeError):
22
- # Regex ile contentUrl'i çıkarmayı dene
23
- match = re.search(r'"contentUrl"\s*:\s*"([^"]+)"', script.text())
24
- if match and match.group(1).startswith("http"):
25
- return match.group(1)
26
- return None
27
-
28
- def _extract_from_packed(self, html: str) -> str | None:
29
- """Packed JavaScript'ten video URL'sini çıkar (fallback)"""
30
- try:
31
- eval_func = re.compile(r'\s*(eval\(function[\s\S].*)').findall(html)
32
- if eval_func:
33
- return StreamDecoder.extract_stream_url(Packer.unpack(eval_func[0]))
34
- except Exception:
35
- pass
36
- return None
37
-
38
- async def extract(self, url, referer=None) -> ExtractResult:
39
- if referer:
40
- self.httpx.headers.update({"Referer": referer})
41
-
11
+ async def extract(self, url: str, referer: str = None) -> ExtractResult:
42
12
  self.httpx.headers.update({
43
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/140.0.0.0 Safari/537.36 Edg/140.0.0.0",
44
- "Origin": self.main_url
13
+ "Referer" : referer or self.main_url,
14
+ "Origin" : self.main_url
45
15
  })
46
16
 
47
- istek = await self.httpx.get(url)
48
- istek.raise_for_status()
17
+ resp = await self.httpx.get(url)
18
+ sel = HTMLHelper(resp.text)
49
19
 
50
- # Önce JSON-LD'den dene (daha güvenilir - Kotlin versiyonu gibi)
51
- m3u_link = self._extract_from_json_ld(istek.text)
20
+ # 1. JSON-LD'den Dene
21
+ m3u8_url = None
22
+ for script in sel.select("script[type='application/ld+json']"):
23
+ with contextlib.suppress(Exception):
24
+ data = json.loads(script.text(strip=True))
25
+ if content_url := data.get("contentUrl"):
26
+ if content_url.startswith("http"):
27
+ m3u8_url = content_url
28
+ break
52
29
 
53
- # Fallback: Packed JavaScript'ten çıkar
54
- if not m3u_link:
55
- m3u_link = self._extract_from_packed(istek.text)
30
+ # 2. Packed Script Fallback
31
+ if not m3u8_url:
32
+ if packed := sel.regex_first(r"(eval\(function\(p,a,c,k,e,d\).+?)\s*</script>"):
33
+ m3u8_url = StreamDecoder.extract_stream_url(Packer.unpack(packed))
56
34
 
57
- if not m3u_link:
58
- raise Exception("Video URL bulunamadı (ne JSON-LD ne de packed script'ten)")
35
+ if not m3u8_url:
36
+ raise ValueError(f"CloseLoad: Video URL bulunamadı. {url}")
59
37
 
60
- # Subtitle'ları parse et (Kotlin referansı: track elementleri)
61
38
  subtitles = []
62
- secici = HTMLParser(istek.text)
63
- for track in secici.css("track"):
64
- raw_src = track.attrs.get("src") or ""
65
- raw_src = raw_src.strip()
66
- label = track.attrs.get("label") or track.attrs.get("srclang") or "Altyazı"
67
-
68
- if raw_src:
69
- full_url = raw_src if raw_src.startswith("http") else f"{self.main_url}{raw_src}"
70
- subtitles.append(Subtitle(name=label, url=full_url))
39
+ for track in sel.select("track"):
40
+ src = track.attrs.get("src")
41
+ if src:
42
+ subtitles.append(Subtitle(
43
+ name = track.attrs.get("label") or track.attrs.get("srclang") or "Altyazı",
44
+ url = self.fix_url(src)
45
+ ))
71
46
 
72
47
  return ExtractResult(
73
48
  name = self.name,
74
- url = m3u_link,
49
+ url = m3u8_url,
75
50
  referer = self.main_url,
76
51
  subtitles = subtitles
77
52
  )