KekikStream 2.3.4__py3-none-any.whl → 2.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -25,14 +25,17 @@ class PluginBase(ABC):
25
25
  self.main_page = {url.replace(self.main_url, new_url): category for url, category in self.main_page.items()}
26
26
  self.main_url = new_url
27
27
 
28
- def __init__(self):
28
+ def __init__(self, proxy: str | dict | None = None):
29
29
  # cloudscraper - for bypassing Cloudflare
30
30
  self.cloudscraper = CloudScraper()
31
+ if proxy:
32
+ self.cloudscraper.proxies = proxy if isinstance(proxy, dict) else {"http": proxy, "https": proxy}
31
33
 
32
34
  # httpx - lightweight and safe for most HTTP requests
33
35
  self.httpx = AsyncClient(
34
36
  timeout = 3,
35
- follow_redirects = True
37
+ follow_redirects = True,
38
+ proxy = proxy
36
39
  )
37
40
  self.httpx.headers.update(self.cloudscraper.headers)
38
41
  self.httpx.cookies.update(self.cloudscraper.cookies)
@@ -122,7 +125,14 @@ class PluginBase(ABC):
122
125
  try:
123
126
  data = await extractor.extract(url, referer=referer)
124
127
 
125
- # prefix varsa name'e ekle
128
+ # Liste ise her bir öğe için prefix ekle
129
+ if isinstance(data, list):
130
+ for item in data:
131
+ if prefix and item.name:
132
+ item.name = f"{prefix} | {item.name}"
133
+ return data
134
+
135
+ # Tekil öğe ise
126
136
  if prefix and data.name:
127
137
  data.name = f"{prefix} | {data.name}"
128
138
 
@@ -6,8 +6,9 @@ from pathlib import Path
6
6
  import os, importlib.util, traceback
7
7
 
8
8
  class PluginLoader:
9
- def __init__(self, plugins_dir: str):
9
+ def __init__(self, plugins_dir: str, proxy: str | dict | None = None):
10
10
  # Yerel ve global eklenti dizinlerini ayarla
11
+ self.proxy = proxy
11
12
  self.local_plugins_dir = Path(plugins_dir).resolve()
12
13
  self.global_plugins_dir = Path(__file__).parent.parent.parent / plugins_dir
13
14
 
@@ -70,7 +71,7 @@ class PluginLoader:
70
71
  obj = getattr(module, attr)
71
72
  if isinstance(obj, type) and issubclass(obj, PluginBase) and obj is not PluginBase:
72
73
  # konsol.log(f"[yellow]Yüklenen sınıf\t\t: {module_name}.{obj.__name__} ({obj.__module__}.{obj.__name__})[/yellow]")
73
- return obj()
74
+ return obj(proxy=self.proxy)
74
75
 
75
76
  except Exception as hata:
76
77
  konsol.print(f"[red][!] Eklenti yüklenirken hata oluştu: {module_name}\nHata: {hata}")
@@ -4,9 +4,9 @@ from .PluginLoader import PluginLoader
4
4
  from .PluginBase import PluginBase
5
5
 
6
6
  class PluginManager:
7
- def __init__(self, plugin_dir="Plugins"):
7
+ def __init__(self, plugin_dir="Plugins", proxy: str | dict | None = None):
8
8
  # Eklenti yükleyiciyi başlat ve tüm eklentileri yükle
9
- self.plugin_loader = PluginLoader(plugin_dir)
9
+ self.plugin_loader = PluginLoader(plugin_dir, proxy=proxy)
10
10
  self.plugins = self.plugin_loader.load_all()
11
11
 
12
12
  def get_plugin_names(self):
@@ -30,7 +30,7 @@ class ContentX(ExtractorBase):
30
30
  istek.raise_for_status()
31
31
  i_source = istek.text
32
32
 
33
- i_extract_value = HTMLHelper(i_source).regex_first(r"window\.openPlayer\('([^']+)'\)")
33
+ i_extract_value = HTMLHelper(i_source).regex_first(r"window\.openPlayer\('([^']+)'")
34
34
  if not i_extract_value:
35
35
  raise ValueError("i_extract is null")
36
36
 
@@ -47,8 +47,12 @@ class ContentX(ExtractorBase):
47
47
  name = sub_lang.replace("\\u0131", "ı")
48
48
  .replace("\\u0130", "İ")
49
49
  .replace("\\u00fc", "ü")
50
- .replace("\\u00e7", "ç"),
51
- url = self.fix_url(sub_url.replace("\\", ""))
50
+ .replace("\\u00e7", "ç")
51
+ .replace("\\u011f", "ğ")
52
+ .replace("\\u015f", "ş")
53
+ .replace("\\u011e", "Ğ")
54
+ .replace("\\u015e", "Ş"),
55
+ url = self.fix_url(sub_url.replace("\\/", "/").replace("\\", ""))
52
56
  )
53
57
  )
54
58
 
@@ -61,7 +65,7 @@ class ContentX(ExtractorBase):
61
65
  if not m3u_link:
62
66
  raise ValueError("vidExtract is null")
63
67
 
64
- m3u_link = m3u_link.replace("\\", "")
68
+ m3u_link = m3u_link.replace("\\", "").replace("/m.php", "/master.m3u8")
65
69
  results = [
66
70
  ExtractResult(
67
71
  name = self.name,
@@ -71,24 +75,25 @@ class ContentX(ExtractorBase):
71
75
  )
72
76
  ]
73
77
 
74
- dublaj_value = HTMLHelper(i_source).regex_first(r',\"([^\"]+)\",\"Türkçe\"')
78
+ dublaj_value = HTMLHelper(i_source).regex_first(r'["\']([^"\']+)["\'],["\']Türkçe["\']')
75
79
  if dublaj_value:
76
- dublaj_source_request = await self.httpx.get(f"{base_url}/source2.php?v={dublaj_value}", headers={"Referer": referer or base_url})
77
- dublaj_source_request.raise_for_status()
78
-
79
- dublaj_source = dublaj_source_request.text
80
- dublaj_link = HTMLHelper(dublaj_source).regex_first(r'file":"([^\"]+)"')
81
- if not dublaj_link:
82
- raise ValueError("dublajExtract is null")
83
-
84
- dublaj_link = dublaj_link.replace("\\", "")
85
- results.append(
86
- ExtractResult(
87
- name = f"{self.name} Türkçe Dublaj",
88
- url = dublaj_link,
89
- referer = url,
90
- subtitles = []
91
- )
92
- )
80
+ try:
81
+ dublaj_source_request = await self.httpx.get(f"{base_url}/source2.php?v={dublaj_value}", headers={"Referer": referer or base_url})
82
+ dublaj_source_request.raise_for_status()
83
+
84
+ dublaj_source = dublaj_source_request.text
85
+ dublaj_link = HTMLHelper(dublaj_source).regex_first(r'file":"([^\"]+)"')
86
+ if dublaj_link:
87
+ dublaj_link = dublaj_link.replace("\\", "")
88
+ results.append(
89
+ ExtractResult(
90
+ name = f"{self.name} Türkçe Dublaj",
91
+ url = dublaj_link,
92
+ referer = url,
93
+ subtitles = []
94
+ )
95
+ )
96
+ except Exception:
97
+ pass
93
98
 
94
99
  return results[0] if len(results) == 1 else results
@@ -7,6 +7,16 @@ class MolyStream(ExtractorBase):
7
7
  name = "MolyStream"
8
8
  main_url = "https://dbx.molystream.org"
9
9
 
10
+ # Birden fazla domain destekle
11
+ supported_domains = [
12
+ "ydx.molystream.org",
13
+ "yd.sheila.stream",
14
+ "ydf.popcornvakti.net",
15
+ ]
16
+
17
+ def can_handle_url(self, url: str) -> bool:
18
+ return any(domain in url for domain in self.supported_domains)
19
+
10
20
  async def extract(self, url, referer=None) -> ExtractResult:
11
21
  if "doctype html" in url:
12
22
  secici = HTMLHelper(url)
@@ -0,0 +1,212 @@
1
+ # Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
2
+
3
+ from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode, ExtractResult, HTMLHelper
4
+ import urllib.parse
5
+
6
+ class DiziWatch(PluginBase):
7
+ name = "DiziWatch"
8
+ language = "tr"
9
+ main_url = "https://diziwatch.to"
10
+ favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
11
+ description = "Diziwatch; en güncel yabancı dizileri ve animeleri, Türkçe altyazılı ve dublaj seçenekleriyle izleyebileceğiniz platform."
12
+
13
+ main_page = {
14
+ f"{main_url}/episodes" : "Yeni Bölümler",
15
+ "9" : "Aksiyon",
16
+ "17" : "Animasyon",
17
+ "5" : "Bilim Kurgu",
18
+ "2" : "Dram",
19
+ "12" : "Fantastik",
20
+ "3" : "Gizem",
21
+ "4" : "Komedi",
22
+ "8" : "Korku",
23
+ "24" : "Macera",
24
+ "14" : "Müzik",
25
+ "7" : "Romantik",
26
+ "23" : "Spor",
27
+ "1" : "Suç",
28
+ }
29
+
30
+ async def _init_session(self):
31
+ if getattr(self, "c_key", None) and getattr(self, "c_value", None):
32
+ return
33
+
34
+ # Fetch anime-arsivi to get CSRF tokens
35
+ resp = await self.httpx.get(f"{self.main_url}/anime-arsivi")
36
+ sel = HTMLHelper(resp.text)
37
+
38
+ # form.bg-[rgba(255,255,255,.15)] > input
39
+ # We can just look for the first two inputs in that specific form
40
+ inputs = sel.select("form.bg-\\[rgba\\(255\\,255\\,255\\,\\.15\\)\\] input")
41
+ if len(inputs) >= 2:
42
+ self.c_key = inputs[0].attrs.get("value")
43
+ self.c_value = inputs[1].attrs.get("value")
44
+
45
+ async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
46
+ await self._init_session()
47
+
48
+ if url.startswith("https://"):
49
+ full_url = f"{url}?page={page}"
50
+ resp = await self.httpx.get(full_url, headers={"Referer": f"{self.main_url}/"})
51
+ sel = HTMLHelper(resp.text)
52
+ items = sel.select("div.swiper-slide a")
53
+ else:
54
+ # Category ID based
55
+ full_url = f"{self.main_url}/anime-arsivi?category={url}&minImdb=&name=&release_year=&sort=date_desc&page={page}"
56
+ resp = await self.httpx.get(full_url, headers={"Referer": f"{self.main_url}/"})
57
+ sel = HTMLHelper(resp.text)
58
+ items = sel.select("div.content-inner a")
59
+
60
+ results = []
61
+ for item in items:
62
+ title = sel.select_text("h2", item)
63
+ href = item.attrs.get("href") if item.tag == "a" else sel.select_attr("a", "href", item)
64
+ poster = sel.select_attr("img", "src", item) or sel.select_attr("img", "data-src", item)
65
+
66
+ if title and href:
67
+ # If it's an episode link, clean it to get show link
68
+ # Regex in Kotlin: /sezon-\d+/bolum-\d+/?$
69
+ clean_href = HTMLHelper(href).regex_replace(r"/sezon-\d+/bolum-\d+/?$", "")
70
+
71
+ # If cleaning changed something, it was an episode link, maybe add it to title
72
+ if clean_href != href:
73
+ se_info = sel.select_text("div.flex.gap-1.items-center", item)
74
+ if se_info:
75
+ title = f"{title} - {se_info}"
76
+
77
+ results.append(MainPageResult(
78
+ category = category,
79
+ title = title,
80
+ url = self.fix_url(clean_href),
81
+ poster = self.fix_url(poster) if poster else None
82
+ ))
83
+
84
+ return results
85
+
86
+ async def search(self, query: str) -> list[SearchResult]:
87
+ await self._init_session()
88
+
89
+ post_url = f"{self.main_url}/bg/searchcontent"
90
+ data = {
91
+ "cKey" : self.c_key,
92
+ "cValue" : self.c_value,
93
+ "searchterm" : query
94
+ }
95
+
96
+ headers = {
97
+ "X-Requested-With" : "XMLHttpRequest",
98
+ "Accept" : "application/json, text/javascript, */*; q=0.01",
99
+ "Referer" : f"{self.main_url}/"
100
+ }
101
+
102
+ resp = await self.httpx.post(post_url, data=data, headers=headers)
103
+
104
+ try:
105
+ raw = resp.json()
106
+ # Kotlin maps this to ApiResponse -> DataWrapper -> Icerikler
107
+ res_array = raw.get("data", {}).get("result", [])
108
+
109
+ results = []
110
+ for item in res_array:
111
+ title = item.get("object_name", "").replace("\\", "")
112
+ slug = item.get("used_slug", "").replace("\\", "")
113
+ poster = item.get("object_poster_url", "")
114
+
115
+ # Cleanup poster URL as in Kotlin
116
+ if poster:
117
+ poster = poster.replace("images-macellan-online.cdn.ampproject.org/i/s/", "") \
118
+ .replace("file.dizilla.club", "file.macellan.online") \
119
+ .replace("images.dizilla.club", "images.macellan.online") \
120
+ .replace("images.dizimia4.com", "images.macellan.online") \
121
+ .replace("file.dizimia4.com", "file.macellan.online")
122
+ poster = HTMLHelper(poster).regex_replace(r"(file\.)[\w\.]+\/?", r"\1macellan.online/")
123
+ poster = HTMLHelper(poster).regex_replace(r"(images\.)[\w\.]+\/?", r"\1macellan.online/")
124
+ poster = poster.replace("/f/f/", "/630/910/")
125
+
126
+ if title and slug:
127
+ results.append(SearchResult(
128
+ title = title,
129
+ url = self.fix_url(slug),
130
+ poster = self.fix_url(poster) if poster else None
131
+ ))
132
+ return results
133
+ except Exception:
134
+ return []
135
+
136
+ async def load_item(self, url: str) -> SeriesInfo:
137
+ resp = await self.httpx.get(url)
138
+ sel = HTMLHelper(resp.text)
139
+
140
+ title = sel.select_text("h2")
141
+ poster = sel.select_attr("img.rounded-md", "src")
142
+ description = sel.select_text("div.text-sm")
143
+
144
+ year = sel.regex_first(r"Yap\u0131m Y\u0131l\u0131\s*:\s*(\d+)", resp.text)
145
+
146
+ tags = []
147
+ tags_raw = sel.regex_first(r"T\u00fcr\s*:\s*([^<]+)", resp.text)
148
+ if tags_raw:
149
+ tags = [t.strip() for t in tags_raw.split(",")]
150
+
151
+ rating = sel.select_text(".font-semibold.text-white")
152
+ if rating:
153
+ rating = rating.replace(",", ".").strip()
154
+
155
+ actors = [a.text(strip=True) for a in sel.select("span.valor a")]
156
+
157
+ trailer_match = sel.regex_first(r"embed\/(.*)\?rel", resp.text)
158
+ trailer = f"https://www.youtube.com/embed/{trailer_match}" if trailer_match else None
159
+
160
+ duration_text = sel.select_text("span.runtime")
161
+ duration = duration_text.split(" ")[0] if duration_text else None
162
+
163
+ episodes = []
164
+ # ul a handles episodes
165
+ for ep_link in sel.select("ul a"):
166
+ href = ep_link.attrs.get("href")
167
+ if not href or "/sezon-" not in href:
168
+ continue
169
+
170
+ ep_name = sel.select_text("span.hidden.sm\\:block", ep_link)
171
+
172
+ season_match = sel.regex_first(r"sezon-(\d+)", href)
173
+ episode_match = sel.regex_first(r"bolum-(\d+)", href)
174
+
175
+ season = season_match if season_match else None
176
+ episode_num = episode_match if episode_match else None
177
+
178
+ episodes.append(Episode(
179
+ season = int(season) if season and season.isdigit() else None,
180
+ episode = int(episode_num) if episode_num and episode_num.isdigit() else None,
181
+ title = ep_name if ep_name else f"{season}x{episode_num}",
182
+ url = self.fix_url(href)
183
+ ))
184
+
185
+ return SeriesInfo(
186
+ title = title,
187
+ url = url,
188
+ poster = self.fix_url(poster) if poster else None,
189
+ description = description,
190
+ rating = rating,
191
+ tags = tags,
192
+ actors = actors,
193
+ year = year,
194
+ episodes = episodes,
195
+ duration = int(duration) if duration and str(duration).isdigit() else None
196
+ )
197
+
198
+ async def load_links(self, url: str) -> list[ExtractResult]:
199
+ resp = await self.httpx.get(url)
200
+ sel = HTMLHelper(resp.text)
201
+
202
+ iframe = sel.select_attr("iframe", "src")
203
+ if not iframe:
204
+ return []
205
+
206
+ iframe_url = self.fix_url(iframe)
207
+ data = await self.extract(iframe_url, referer=f"{self.main_url}/")
208
+
209
+ if not data:
210
+ return []
211
+
212
+ return data if isinstance(data, list) else [data]
@@ -248,4 +248,6 @@ class Dizilla(PluginBase):
248
248
  return []
249
249
 
250
250
  data = await self.extract(iframe_url, referer=f"{self.main_url}/", prefix=first_result.get('language_name', 'Unknown'))
251
- return [data] if data else []
251
+ if not data:
252
+ return []
253
+ return data if isinstance(data, list) else [data]
@@ -75,9 +75,9 @@ class SezonlukDizi(PluginBase):
75
75
 
76
76
  results = []
77
77
  for afis in secici.select("div.afis a"):
78
- title = secici.select_text("div.description", veri)
79
- href = secici.select_attr("a", "href", veri)
80
- poster = secici.select_attr("img", "data-src", veri)
78
+ title = secici.select_text("div.description", afis)
79
+ href = secici.select_attr("a", "href", afis)
80
+ poster = secici.select_attr("img", "data-src", afis)
81
81
 
82
82
  if title and href:
83
83
  results.append(SearchResult(
@@ -159,6 +159,11 @@ class Sinefy(PluginBase):
159
159
  actors = [h5.text(strip=True) for h5 in sel.select("div.content h5") if h5.text(strip=True)]
160
160
 
161
161
  year = sel.select_text("span.item.year")
162
+ if not year and title:
163
+ # Try to extract year from title like "Movie Name(2024)"
164
+ year_match = sel.regex_first(r"\((\d{4})\)", title)
165
+ if year_match:
166
+ year = year_match
162
167
 
163
168
  episodes = []
164
169
  episodes_box_list = sel.select("section.episodes-box")
@@ -0,0 +1,274 @@
1
+ # Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
2
+
3
+ from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, MovieInfo, Episode, ExtractResult, HTMLHelper
4
+ import json, asyncio, time
5
+
6
+ class YabanciDizi(PluginBase):
7
+ name = "YabanciDizi"
8
+ language = "tr"
9
+ main_url = "https://yabancidizi.so"
10
+ favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
11
+ description = "Yabancidizi.so platformu üzerinden en güncel yabancı dizileri ve filmleri izleyebilir, favori içeriklerinizi takip edebilirsiniz."
12
+
13
+ main_page = {
14
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwia2F0ZWdvcnkiOlsiMTciXX0=" : "Diziler",
15
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwia2F0ZWdvcnkiOlsiMTgiXX0=" : "Filmler",
16
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwiY291bnRyeSI6eyJLUiI6IktSIn19" : "Kdrama",
17
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwiY291bnRyeSI6eyJKUCI6IkpQIn0sImNhdGVnb3J5IjpbXX0=" : "Jdrama",
18
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwiY2F0ZWdvcnkiOnsiMyI6IjMifX0=" : "Animasyon",
19
+ }
20
+
21
+ async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
22
+ full_url = url if page == 1 else f"{url}/{page}"
23
+
24
+ resp = await self.httpx.get(full_url, headers={"Referer": f"{self.main_url}/"})
25
+ sel = HTMLHelper(resp.text)
26
+
27
+ results = []
28
+ for item in sel.select("li.mb-lg, li.segment-poster"):
29
+ title = sel.select_text("h2", item)
30
+ href = sel.select_attr("a", "href", item)
31
+ poster = sel.select_attr("img", "src", item)
32
+ score = sel.select_text("span.rating", item)
33
+
34
+ if title and href:
35
+ results.append(MainPageResult(
36
+ category = category,
37
+ title = title,
38
+ url = self.fix_url(href),
39
+ poster = self.fix_url(poster) if poster else None,
40
+ ))
41
+
42
+ return results
43
+
44
+ async def search(self, query: str) -> list[SearchResult]:
45
+ search_url = f"{self.main_url}/search?qr={query}"
46
+
47
+ headers = {
48
+ "X-Requested-With" : "XMLHttpRequest",
49
+ "Referer" : f"{self.main_url}/"
50
+ }
51
+
52
+ resp = await self.httpx.post(search_url, headers=headers)
53
+
54
+ try:
55
+ raw = resp.json()
56
+ # Kotlin mapping: JsonResponse -> Data -> ResultItem
57
+ res_array = raw.get("data", {}).get("result", [])
58
+
59
+ results = []
60
+ for item in res_array:
61
+ title = item.get("s_name")
62
+ image = item.get("s_image")
63
+ slug = item.get("s_link")
64
+ s_type = item.get("s_type") # 0: dizi, 1: film
65
+
66
+ poster = f"{self.main_url}/uploads/series/{image}" if image else None
67
+
68
+ if s_type == "1":
69
+ href = f"{self.main_url}/film/{slug}"
70
+ else:
71
+ href = f"{self.main_url}/dizi/{slug}"
72
+
73
+ if title and slug:
74
+ results.append(SearchResult(
75
+ title = title,
76
+ url = self.fix_url(href),
77
+ poster = self.fix_url(poster) if poster else None
78
+ ))
79
+ return results
80
+ except Exception:
81
+ return []
82
+
83
+ async def load_item(self, url: str) -> SeriesInfo | MovieInfo:
84
+ resp = await self.httpx.get(url, follow_redirects=True)
85
+ sel = HTMLHelper(resp.text)
86
+
87
+ og_title = sel.select_attr("meta[property='og:title']", "content")
88
+ title = og_title.split("|")[0].strip() if og_title else sel.select_text("h1")
89
+
90
+ poster = sel.select_attr("meta[property='og:image']", "content")
91
+ description = sel.select_text("p#tv-series-desc")
92
+
93
+ year = sel.select_text("td div.truncate")
94
+ if year:
95
+ year = year.strip()
96
+
97
+ tags = []
98
+ rating = None
99
+ duration = None
100
+ year = None
101
+ actors = []
102
+ for item in sel.select("div.item"):
103
+ text = item.text(strip=True)
104
+ if "T\u00fcr\u00fc:" in text:
105
+ tags = [t.strip() for t in text.replace("T\u00fcr\u00fc:", "").split(",")]
106
+ elif "IMDb Puan\u0131" in text:
107
+ rating = text.replace("IMDb Puan\u0131", "").strip()
108
+ elif "Yap\u0131m Y\u0131l\u0131" in text:
109
+ year_match = sel.regex_first(r"(\d{4})", text)
110
+ if year_match:
111
+ year = year_match
112
+ elif "Takip\u00e7iler" in text:
113
+ continue
114
+ elif "S\u00fcre" in text:
115
+ dur_match = sel.regex_first(r"(\d+)", text)
116
+ if dur_match:
117
+ duration = dur_match
118
+ elif "Oyuncular:" in text:
119
+ actors = [a.text(strip=True) for a in sel.select("a", item)]
120
+
121
+ if not actors:
122
+ actors = [a.text(strip=True) for a in sel.select("div#common-cast-list div.item h5")]
123
+
124
+ trailer_match = sel.regex_first(r"embed\/(.*)\?rel", resp.text)
125
+ trailer = f"https://www.youtube.com/embed/{trailer_match}" if trailer_match else None
126
+
127
+ if "/film/" in url:
128
+ return MovieInfo(
129
+ title = title,
130
+ url = url,
131
+ poster = self.fix_url(poster) if poster else None,
132
+ description = description,
133
+ rating = rating,
134
+ tags = tags,
135
+ actors = actors,
136
+ year = year,
137
+ duration = int(duration) if duration and duration.isdigit() else None
138
+ )
139
+ else:
140
+ episodes = []
141
+ for bolum_item in sel.select("div.episodes-list div.ui td:has(h6)"):
142
+ link_el = sel.select_first("a", bolum_item)
143
+ if not link_el: continue
144
+
145
+ bolum_href = link_el.attrs.get("href")
146
+ bolum_name = sel.select_text("h6", bolum_item) or link_el.text(strip=True)
147
+
148
+ season = sel.regex_first(r"sezon-(\d+)", bolum_href)
149
+ episode = sel.regex_first(r"bolum-(\d+)", bolum_href)
150
+
151
+ ep_season = int(season) if season and season.isdigit() else None
152
+ ep_episode = int(episode) if episode and episode.isdigit() else None
153
+
154
+ episodes.append(Episode(
155
+ season = ep_season,
156
+ episode = ep_episode,
157
+ title = bolum_name,
158
+ url = self.fix_url(bolum_href)
159
+ ))
160
+
161
+ if episodes and (episodes[0].episode or 0) > (episodes[-1].episode or 0):
162
+ episodes.reverse()
163
+
164
+ return SeriesInfo(
165
+ title = title,
166
+ url = url,
167
+ poster = self.fix_url(poster) if poster else None,
168
+ description = description,
169
+ rating = rating,
170
+ tags = tags,
171
+ actors = actors,
172
+ year = year,
173
+ episodes = episodes
174
+ )
175
+
176
+ async def load_links(self, url: str) -> list[ExtractResult]:
177
+ resp = await self.httpx.get(url, headers={"Referer": f"{self.main_url}/"})
178
+ sel = HTMLHelper(resp.text)
179
+
180
+ results = []
181
+
182
+ # Method 1: alternatives-for-this
183
+ for alt in sel.select("div.alternatives-for-this div.item:not(.active)"):
184
+ data_hash = alt.attrs.get("data-hash")
185
+ data_link = alt.attrs.get("data-link")
186
+ q_type = alt.attrs.get("data-querytype")
187
+
188
+ if not data_hash or not data_link: continue
189
+
190
+ try:
191
+ post_resp = await self.httpx.post(
192
+ f"{self.main_url}/ajax/service",
193
+ data = {
194
+ "link" : data_link,
195
+ "hash" : data_hash,
196
+ "querytype" : q_type,
197
+ "type" : "videoGet"
198
+ },
199
+ headers = {
200
+ "X-Requested-With" : "XMLHttpRequest",
201
+ "Referer" : f"{self.main_url}/"
202
+ },
203
+ cookies = {"udys": "1760709729873", "level": "1"}
204
+ )
205
+
206
+ service_data = post_resp.json()
207
+ api_iframe = service_data.get("api_iframe")
208
+ if api_iframe:
209
+ extract_res = await self._fetch_and_extract(api_iframe, prefix="Alt")
210
+ if extract_res:
211
+ results.extend(extract_res if isinstance(extract_res, list) else [extract_res])
212
+ except Exception:
213
+ continue
214
+
215
+ # Method 2: pointing[data-eid]
216
+ for id_el in sel.select("a.ui.pointing[data-eid]"):
217
+ dil = id_el.text(strip=True)
218
+ v_lang = "tr" if "Dublaj" in dil else "en"
219
+ data_eid = id_el.attrs.get("data-eid")
220
+
221
+ try:
222
+ post_resp = await self.httpx.post(
223
+ f"{self.main_url}/ajax/service",
224
+ data = {
225
+ "e_id" : data_eid,
226
+ "v_lang" : v_lang,
227
+ "type" : "get_whatwehave"
228
+ },
229
+ headers = {
230
+ "X-Requested-With" : "XMLHttpRequest",
231
+ "Referer" : f"{self.main_url}/"
232
+ },
233
+ cookies = {"udys": "1760709729873", "level": "1"}
234
+ )
235
+
236
+ service_data = post_resp.json()
237
+ api_iframe = service_data.get("api_iframe")
238
+ if api_iframe:
239
+ extract_res = await self._fetch_and_extract(api_iframe, prefix=dil)
240
+ if extract_res:
241
+ results.extend(extract_res if isinstance(extract_res, list) else [extract_res])
242
+ except Exception:
243
+ continue
244
+
245
+ return results
246
+
247
+ async def _fetch_and_extract(self, iframe_url, prefix=""):
248
+ # Initial fetch
249
+ resp = await self.httpx.get(
250
+ iframe_url,
251
+ headers = {"Referer": f"{self.main_url}/"},
252
+ cookies = {"udys": "1760709729873", "level": "1"}
253
+ )
254
+
255
+ # Handle "Lütfen bekleyiniz" check from Kotlin
256
+ if "Lütfen bekleyiniz" in resp.text:
257
+ await asyncio.sleep(1)
258
+ timestamp = int(time.time())
259
+ # Retry with t=timestamp as in Kotlin
260
+ sep = "&" if "?" in iframe_url else "?"
261
+ resp = await self.httpx.get(
262
+ f"{iframe_url}{sep}t={timestamp}",
263
+ headers = {"Referer": f"{self.main_url}/"},
264
+ cookies = resp.cookies # Use cookies from first response
265
+ )
266
+
267
+ sel = HTMLHelper(resp.text)
268
+ final_iframe = sel.select_attr("iframe", "src")
269
+
270
+ if final_iframe:
271
+ final_url = self.fix_url(final_iframe)
272
+ return await self.extract(final_url, referer=f"{self.main_url}/", prefix=prefix)
273
+
274
+ return None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: KekikStream
3
- Version: 2.3.4
3
+ Version: 2.3.6
4
4
  Summary: terminal üzerinden medya içeriği aramanızı ve VLC/MPV gibi popüler medya oynatıcılar aracılığıyla doğrudan izlemenizi sağlayan modüler ve genişletilebilir bir bıdı bıdı
5
5
  Home-page: https://github.com/keyiflerolsun/KekikStream
6
6
  Author: keyiflerolsun
@@ -12,13 +12,13 @@ KekikStream/Core/Extractor/ExtractorModels.py,sha256=Qj_gbIeGRewaZXNfYkTi4FFRRq6
12
12
  KekikStream/Core/Extractor/YTDLPCache.py,sha256=sRg5kwFxkRXA_8iRwsV29E51g9qQJvg8dWUnzfr7EwA,984
13
13
  KekikStream/Core/Media/MediaHandler.py,sha256=MEn3spPAThVloN3WcoCwWhpoyMA7tAZvcwYjmjJsX3U,7678
14
14
  KekikStream/Core/Media/MediaManager.py,sha256=AaUq2D7JSJIphjoAj2fjLOJjswm7Qf5hjYCbBdrbnDU,438
15
- KekikStream/Core/Plugin/PluginBase.py,sha256=53nBWXnzq6KnmmRmqyi5RRBCeUH8Rcwd8CNrM76VzqE,5984
16
- KekikStream/Core/Plugin/PluginLoader.py,sha256=GcDqN1u3nJeoGKH_oDFHCpwteJlLCxHNbmPrC5L-hZE,3692
17
- KekikStream/Core/Plugin/PluginManager.py,sha256=CZVg1eegi8vfMfccx0DRV0Box8kXz-aoULTQLgbPbvM,893
15
+ KekikStream/Core/Plugin/PluginBase.py,sha256=4y8ZyUhqvwn07GrARjAGEjIW4AoCkAzHkyZSkd_4USc,6439
16
+ KekikStream/Core/Plugin/PluginLoader.py,sha256=6LE5id0571bB-gJZxaLfd973XcG6oaGeMhLVcYYY7kw,3768
17
+ KekikStream/Core/Plugin/PluginManager.py,sha256=6a0Q2mHtzIpx1ttdSTsVHg2HfLJIO0r_iHjK3Kui1Rw,939
18
18
  KekikStream/Core/Plugin/PluginModels.py,sha256=Yvx-6Fkn8QCIcuqAkFbCP5EJcq3XBkK_P8S0tRNhS6E,2476
19
19
  KekikStream/Core/UI/UIManager.py,sha256=T4V_kdTTWa-UDamgLSKa__dWJuzcvRK9NuwBlzU9Bzc,1693
20
20
  KekikStream/Extractors/CloseLoad.py,sha256=qRsiW5SloxWgm6MNUd4DF4vC7aSeyJqD3_0vZoFp7Jc,3176
21
- KekikStream/Extractors/ContentX.py,sha256=-T2l4qt5T0md2-x87bk8jR9_GB5Fn8v_rGhkmsoOLNc,3578
21
+ KekikStream/Extractors/ContentX.py,sha256=6-pzHBGrwJeGzeMaPZ5s82RCQZL9MEhHDyI3c4L-xMM,3975
22
22
  KekikStream/Extractors/DonilasPlay.py,sha256=-Bhfpp0AN_wNYAnsaWdL--wo8DD2VPblTAlUQIX6HYU,3190
23
23
  KekikStream/Extractors/DzenRu.py,sha256=WIUZUIixP4X6TweJHpY86fenRY150ucH2VNImvdxcRc,1213
24
24
  KekikStream/Extractors/ExPlay.py,sha256=G2ZmXGcsjpZ5ihtL0ZYkyVO8nPuzSC_8AR0zvED6ScQ,1746
@@ -29,7 +29,7 @@ KekikStream/Extractors/JetTv.py,sha256=2X1vYDQ0hxBTcpnE_XTcbw9tMS1aXFURcobnPdN8Z
29
29
  KekikStream/Extractors/MailRu.py,sha256=xQVCWwYqNoG5T43VAW1_m0v4e80FbO-1pNPKkwhTccU,1218
30
30
  KekikStream/Extractors/MixPlayHD.py,sha256=u5fUePHfjOI3n7KlNsWhXIv7HA_NMj5bPw1ug-eiXLU,1557
31
31
  KekikStream/Extractors/MixTiger.py,sha256=4VbOYgE4s5H-BGVvJI0AI57M-WBWqnek_LGfCFHAucw,2116
32
- KekikStream/Extractors/MolyStream.py,sha256=R3R_6AwsR4mEUj023m23qMuXFAMd9vXXRZjoXFmd7ic,1142
32
+ KekikStream/Extractors/MolyStream.py,sha256=SGKr4HdfxDmRk6nPgQUjSbdqFCKWzl7xWxRJtjjFMng,1420
33
33
  KekikStream/Extractors/Odnoklassniki.py,sha256=hajKPhWKiIuu_i441TXrWVORpLo2CdTcoJiyU3WQAuI,4038
34
34
  KekikStream/Extractors/PeaceMakerst.py,sha256=BJ5Cv5X2GEaMTwn_XFpAVVmts1h5xGno3l5rL7Ugob4,2335
35
35
  KekikStream/Extractors/PixelDrain.py,sha256=xPud8W_hqLUXJSU5O-MiCOblcmzrlDJpnEtuxr4ZdI4,1011
@@ -54,8 +54,9 @@ KekikStream/Extractors/YildizKisaFilm.py,sha256=R_JlrOVeMiDlXYcuTdItnKvidyx8_u3B
54
54
  KekikStream/Plugins/BelgeselX.py,sha256=smoLjEJTdptjb7h4m6LhG7ZUmJQtIhYyi0CUFBsk970,8696
55
55
  KekikStream/Plugins/DiziBox.py,sha256=KZGWhs6p2-hUTsd-fjz2fsmGEkanL4At2PI8qHAoDm4,10541
56
56
  KekikStream/Plugins/DiziPal.py,sha256=CTCGlknBUQIzubhvjexQoqiT3sHni34lpxiTLTemCGo,10299
57
+ KekikStream/Plugins/DiziWatch.py,sha256=Y5-tBK316WdJhc-OZsYiwjSdfjcdz-A78o_bDP4qu08,8847
57
58
  KekikStream/Plugins/DiziYou.py,sha256=ZV80_XHv1nN0wRGgJEtnoJcgFX7S_iVSKFGiFlAqcGQ,11277
58
- KekikStream/Plugins/Dizilla.py,sha256=5Jwhmo_m8vKcR49RlrWHOrzozk24m8FcEDze3mUCp_E,13740
59
+ KekikStream/Plugins/Dizilla.py,sha256=apDLGe3Fd-13nNyhcV_TFQxqX4bOZZZxEEGLonKQzS4,13803
59
60
  KekikStream/Plugins/FilmBip.py,sha256=pzvleSRZCDHh2tx8Q0JwTFiH9TexNCRnFpr3MCiMb0E,6087
60
61
  KekikStream/Plugins/FilmMakinesi.py,sha256=WaCQD7tsZdPbeU35SEnBVRZt2SzUiAQOBRBZR6drvQ4,7797
61
62
  KekikStream/Plugins/FilmModu.py,sha256=ou1BrFNR4RQaJdxVqPB5FI8vnQ0UmD-siVdwLnpp7x0,7147
@@ -68,16 +69,17 @@ KekikStream/Plugins/RecTV.py,sha256=E5ZyWU_lqibwcRm9amb_fqdXpc8qdMkekbHVxY3UmuU,
68
69
  KekikStream/Plugins/RoketDizi.py,sha256=92c3_UFIhM1SkB0Ybnp53A06VtGw2GmXtr-xmiKeJi0,8444
69
70
  KekikStream/Plugins/SelcukFlix.py,sha256=iHLO52_726gzmKAsqgW5ki2_V16fdGAZVjvaqqnQozY,13601
70
71
  KekikStream/Plugins/SetFilmIzle.py,sha256=pM4DgR2La9jUacQPRr4dilbfnljcC9l_o1OUipJh3Eg,10418
71
- KekikStream/Plugins/SezonlukDizi.py,sha256=s0dOd4Nqbj652ewuCQCEATz0BbYRRYyGZ6fliP4ni6M,9751
72
+ KekikStream/Plugins/SezonlukDizi.py,sha256=h8mIglL2ORUklnAvEwH_5z6tT3WYxiNnbkeIvxtGUTE,9751
72
73
  KekikStream/Plugins/SineWix.py,sha256=z0r90lggAugEWE1g9vg8gZsInBObUZPnVFQwq7GYmJs,7052
73
- KekikStream/Plugins/Sinefy.py,sha256=z3WUb3GveUTqYQbPg8OFkMh6Bme_ErC7qxutk_pVjWw,10874
74
+ KekikStream/Plugins/Sinefy.py,sha256=ShX13Q-_5KFBobxZufI5V_4zwWvEWfNYuP-g5CkBuww,11099
74
75
  KekikStream/Plugins/SinemaCX.py,sha256=11kzAZWgjkitIonDHHiFHMgnViBj-GjyvTXg7k28MLE,7717
75
76
  KekikStream/Plugins/Sinezy.py,sha256=fUj-3WaJMEsKZRnDpHFPxl5Eq2RPLroY80DcftLqvjM,5743
76
77
  KekikStream/Plugins/SuperFilmGeldi.py,sha256=StW0ue4qDj8p7CiWy19Lfr2aWtfYvslPExZJuR-3xiY,6348
77
78
  KekikStream/Plugins/UgurFilm.py,sha256=H6AA2iTaM0fn6uN8_Dfvr-OqUtM9gDdkg0BKIcZEj7U,4930
78
- kekikstream-2.3.4.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
79
- kekikstream-2.3.4.dist-info/METADATA,sha256=LvQkwrruf6NRnDzBmaMOHyxmN94mcbwMsjD4bpkWw2Y,10761
80
- kekikstream-2.3.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
81
- kekikstream-2.3.4.dist-info/entry_points.txt,sha256=dFwdiTx8djyehI0Gsz-rZwjAfZzUzoBSrmzRu9ubjJc,50
82
- kekikstream-2.3.4.dist-info/top_level.txt,sha256=DNmGJDXl27Drdfobrak8KYLmocW_uznVYFJOzcjUgmY,12
83
- kekikstream-2.3.4.dist-info/RECORD,,
79
+ KekikStream/Plugins/YabanciDizi.py,sha256=r3jusGf1Ufr0O1O04qQLxcxk3raCI3EFs4Z2Jwva2-s,11444
80
+ kekikstream-2.3.6.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
81
+ kekikstream-2.3.6.dist-info/METADATA,sha256=X9pxvMO-FVM4PLZ1IyEKnRZCl4dmBYU0SZvQENp19z0,10761
82
+ kekikstream-2.3.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
83
+ kekikstream-2.3.6.dist-info/entry_points.txt,sha256=dFwdiTx8djyehI0Gsz-rZwjAfZzUzoBSrmzRu9ubjJc,50
84
+ kekikstream-2.3.6.dist-info/top_level.txt,sha256=DNmGJDXl27Drdfobrak8KYLmocW_uznVYFJOzcjUgmY,12
85
+ kekikstream-2.3.6.dist-info/RECORD,,