KekikStream 2.3.4__py3-none-any.whl → 2.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -122,7 +122,14 @@ class PluginBase(ABC):
122
122
  try:
123
123
  data = await extractor.extract(url, referer=referer)
124
124
 
125
- # prefix varsa name'e ekle
125
+ # Liste ise her bir öğe için prefix ekle
126
+ if isinstance(data, list):
127
+ for item in data:
128
+ if prefix and item.name:
129
+ item.name = f"{prefix} | {item.name}"
130
+ return data
131
+
132
+ # Tekil öğe ise
126
133
  if prefix and data.name:
127
134
  data.name = f"{prefix} | {data.name}"
128
135
 
@@ -30,7 +30,7 @@ class ContentX(ExtractorBase):
30
30
  istek.raise_for_status()
31
31
  i_source = istek.text
32
32
 
33
- i_extract_value = HTMLHelper(i_source).regex_first(r"window\.openPlayer\('([^']+)'\)")
33
+ i_extract_value = HTMLHelper(i_source).regex_first(r"window\.openPlayer\('([^']+)'")
34
34
  if not i_extract_value:
35
35
  raise ValueError("i_extract is null")
36
36
 
@@ -47,8 +47,12 @@ class ContentX(ExtractorBase):
47
47
  name = sub_lang.replace("\\u0131", "ı")
48
48
  .replace("\\u0130", "İ")
49
49
  .replace("\\u00fc", "ü")
50
- .replace("\\u00e7", "ç"),
51
- url = self.fix_url(sub_url.replace("\\", ""))
50
+ .replace("\\u00e7", "ç")
51
+ .replace("\\u011f", "ğ")
52
+ .replace("\\u015f", "ş")
53
+ .replace("\\u011e", "Ğ")
54
+ .replace("\\u015e", "Ş"),
55
+ url = self.fix_url(sub_url.replace("\\/", "/").replace("\\", ""))
52
56
  )
53
57
  )
54
58
 
@@ -61,7 +65,7 @@ class ContentX(ExtractorBase):
61
65
  if not m3u_link:
62
66
  raise ValueError("vidExtract is null")
63
67
 
64
- m3u_link = m3u_link.replace("\\", "")
68
+ m3u_link = m3u_link.replace("\\", "").replace("/m.php", "/master.m3u8")
65
69
  results = [
66
70
  ExtractResult(
67
71
  name = self.name,
@@ -71,24 +75,25 @@ class ContentX(ExtractorBase):
71
75
  )
72
76
  ]
73
77
 
74
- dublaj_value = HTMLHelper(i_source).regex_first(r',\"([^\"]+)\",\"Türkçe\"')
78
+ dublaj_value = HTMLHelper(i_source).regex_first(r'["\']([^"\']+)["\'],["\']Türkçe["\']')
75
79
  if dublaj_value:
76
- dublaj_source_request = await self.httpx.get(f"{base_url}/source2.php?v={dublaj_value}", headers={"Referer": referer or base_url})
77
- dublaj_source_request.raise_for_status()
78
-
79
- dublaj_source = dublaj_source_request.text
80
- dublaj_link = HTMLHelper(dublaj_source).regex_first(r'file":"([^\"]+)"')
81
- if not dublaj_link:
82
- raise ValueError("dublajExtract is null")
83
-
84
- dublaj_link = dublaj_link.replace("\\", "")
85
- results.append(
86
- ExtractResult(
87
- name = f"{self.name} Türkçe Dublaj",
88
- url = dublaj_link,
89
- referer = url,
90
- subtitles = []
91
- )
92
- )
80
+ try:
81
+ dublaj_source_request = await self.httpx.get(f"{base_url}/source2.php?v={dublaj_value}", headers={"Referer": referer or base_url})
82
+ dublaj_source_request.raise_for_status()
83
+
84
+ dublaj_source = dublaj_source_request.text
85
+ dublaj_link = HTMLHelper(dublaj_source).regex_first(r'file":"([^\"]+)"')
86
+ if dublaj_link:
87
+ dublaj_link = dublaj_link.replace("\\", "")
88
+ results.append(
89
+ ExtractResult(
90
+ name = f"{self.name} Türkçe Dublaj",
91
+ url = dublaj_link,
92
+ referer = url,
93
+ subtitles = []
94
+ )
95
+ )
96
+ except Exception:
97
+ pass
93
98
 
94
99
  return results[0] if len(results) == 1 else results
@@ -7,6 +7,16 @@ class MolyStream(ExtractorBase):
7
7
  name = "MolyStream"
8
8
  main_url = "https://dbx.molystream.org"
9
9
 
10
+ # Birden fazla domain destekle
11
+ supported_domains = [
12
+ "ydx.molystream.org",
13
+ "yd.sheila.stream",
14
+ "ydf.popcornvakti.net",
15
+ ]
16
+
17
+ def can_handle_url(self, url: str) -> bool:
18
+ return any(domain in url for domain in self.supported_domains)
19
+
10
20
  async def extract(self, url, referer=None) -> ExtractResult:
11
21
  if "doctype html" in url:
12
22
  secici = HTMLHelper(url)
@@ -0,0 +1,217 @@
1
+ # Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
2
+
3
+ from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode, ExtractResult, HTMLHelper
4
+ import urllib.parse
5
+
6
+ class DiziWatch(PluginBase):
7
+ name = "DiziWatch"
8
+ language = "tr"
9
+ main_url = "https://diziwatch.to"
10
+ favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
11
+ description = "Diziwatch; en güncel yabancı dizileri ve animeleri, Türkçe altyazılı ve dublaj seçenekleriyle izleyebileceğiniz platform."
12
+
13
+ main_page = {
14
+ f"{main_url}/episodes" : "Yeni Bölümler",
15
+ "9" : "Aksiyon",
16
+ "17" : "Animasyon",
17
+ "5" : "Bilim Kurgu",
18
+ "2" : "Dram",
19
+ "12" : "Fantastik",
20
+ "3" : "Gizem",
21
+ "4" : "Komedi",
22
+ "8" : "Korku",
23
+ "24" : "Macera",
24
+ "14" : "Müzik",
25
+ "7" : "Romantik",
26
+ "23" : "Spor",
27
+ "1" : "Suç",
28
+ }
29
+
30
+ def __init__(self):
31
+ super().__init__()
32
+ self.c_key = None
33
+ self.c_value = None
34
+
35
+ async def _init_session(self):
36
+ if self.c_key and self.c_value:
37
+ return
38
+
39
+ # Fetch anime-arsivi to get CSRF tokens
40
+ resp = await self.httpx.get(f"{self.main_url}/anime-arsivi")
41
+ sel = HTMLHelper(resp.text)
42
+
43
+ # form.bg-[rgba(255,255,255,.15)] > input
44
+ # We can just look for the first two inputs in that specific form
45
+ inputs = sel.select("form.bg-\\[rgba\\(255\\,255\\,255\\,\\.15\\)\\] input")
46
+ if len(inputs) >= 2:
47
+ self.c_key = inputs[0].attrs.get("value")
48
+ self.c_value = inputs[1].attrs.get("value")
49
+
50
+ async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
51
+ await self._init_session()
52
+
53
+ if url.startswith("https://"):
54
+ full_url = f"{url}?page={page}"
55
+ resp = await self.httpx.get(full_url, headers={"Referer": f"{self.main_url}/"})
56
+ sel = HTMLHelper(resp.text)
57
+ items = sel.select("div.swiper-slide a")
58
+ else:
59
+ # Category ID based
60
+ full_url = f"{self.main_url}/anime-arsivi?category={url}&minImdb=&name=&release_year=&sort=date_desc&page={page}"
61
+ resp = await self.httpx.get(full_url, headers={"Referer": f"{self.main_url}/"})
62
+ sel = HTMLHelper(resp.text)
63
+ items = sel.select("div.content-inner a")
64
+
65
+ results = []
66
+ for item in items:
67
+ title = sel.select_text("h2", item)
68
+ href = item.attrs.get("href") if item.tag == "a" else sel.select_attr("a", "href", item)
69
+ poster = sel.select_attr("img", "src", item) or sel.select_attr("img", "data-src", item)
70
+
71
+ if title and href:
72
+ # If it's an episode link, clean it to get show link
73
+ # Regex in Kotlin: /sezon-\d+/bolum-\d+/?$
74
+ clean_href = HTMLHelper(href).regex_replace(r"/sezon-\d+/bolum-\d+/?$", "")
75
+
76
+ # If cleaning changed something, it was an episode link, maybe add it to title
77
+ if clean_href != href:
78
+ se_info = sel.select_text("div.flex.gap-1.items-center", item)
79
+ if se_info:
80
+ title = f"{title} - {se_info}"
81
+
82
+ results.append(MainPageResult(
83
+ category = category,
84
+ title = title,
85
+ url = self.fix_url(clean_href),
86
+ poster = self.fix_url(poster) if poster else None
87
+ ))
88
+
89
+ return results
90
+
91
+ async def search(self, query: str) -> list[SearchResult]:
92
+ await self._init_session()
93
+
94
+ post_url = f"{self.main_url}/bg/searchcontent"
95
+ data = {
96
+ "cKey" : self.c_key,
97
+ "cValue" : self.c_value,
98
+ "searchterm" : query
99
+ }
100
+
101
+ headers = {
102
+ "X-Requested-With" : "XMLHttpRequest",
103
+ "Accept" : "application/json, text/javascript, */*; q=0.01",
104
+ "Referer" : f"{self.main_url}/"
105
+ }
106
+
107
+ resp = await self.httpx.post(post_url, data=data, headers=headers)
108
+
109
+ try:
110
+ raw = resp.json()
111
+ # Kotlin maps this to ApiResponse -> DataWrapper -> Icerikler
112
+ res_array = raw.get("data", {}).get("result", [])
113
+
114
+ results = []
115
+ for item in res_array:
116
+ title = item.get("object_name", "").replace("\\", "")
117
+ slug = item.get("used_slug", "").replace("\\", "")
118
+ poster = item.get("object_poster_url", "")
119
+
120
+ # Cleanup poster URL as in Kotlin
121
+ if poster:
122
+ poster = poster.replace("images-macellan-online.cdn.ampproject.org/i/s/", "") \
123
+ .replace("file.dizilla.club", "file.macellan.online") \
124
+ .replace("images.dizilla.club", "images.macellan.online") \
125
+ .replace("images.dizimia4.com", "images.macellan.online") \
126
+ .replace("file.dizimia4.com", "file.macellan.online")
127
+ poster = HTMLHelper(poster).regex_replace(r"(file\.)[\w\.]+\/?", r"\1macellan.online/")
128
+ poster = HTMLHelper(poster).regex_replace(r"(images\.)[\w\.]+\/?", r"\1macellan.online/")
129
+ poster = poster.replace("/f/f/", "/630/910/")
130
+
131
+ if title and slug:
132
+ results.append(SearchResult(
133
+ title = title,
134
+ url = self.fix_url(slug),
135
+ poster = self.fix_url(poster) if poster else None
136
+ ))
137
+ return results
138
+ except Exception:
139
+ return []
140
+
141
+ async def load_item(self, url: str) -> SeriesInfo:
142
+ resp = await self.httpx.get(url)
143
+ sel = HTMLHelper(resp.text)
144
+
145
+ title = sel.select_text("h2")
146
+ poster = sel.select_attr("img.rounded-md", "src")
147
+ description = sel.select_text("div.text-sm")
148
+
149
+ year = sel.regex_first(r"Yap\u0131m Y\u0131l\u0131\s*:\s*(\d+)", resp.text)
150
+
151
+ tags = []
152
+ tags_raw = sel.regex_first(r"T\u00fcr\s*:\s*([^<]+)", resp.text)
153
+ if tags_raw:
154
+ tags = [t.strip() for t in tags_raw.split(",")]
155
+
156
+ rating = sel.select_text(".font-semibold.text-white")
157
+ if rating:
158
+ rating = rating.replace(",", ".").strip()
159
+
160
+ actors = [a.text(strip=True) for a in sel.select("span.valor a")]
161
+
162
+ trailer_match = sel.regex_first(r"embed\/(.*)\?rel", resp.text)
163
+ trailer = f"https://www.youtube.com/embed/{trailer_match}" if trailer_match else None
164
+
165
+ duration_text = sel.select_text("span.runtime")
166
+ duration = duration_text.split(" ")[0] if duration_text else None
167
+
168
+ episodes = []
169
+ # ul a handles episodes
170
+ for ep_link in sel.select("ul a"):
171
+ href = ep_link.attrs.get("href")
172
+ if not href or "/sezon-" not in href:
173
+ continue
174
+
175
+ ep_name = sel.select_text("span.hidden.sm\\:block", ep_link)
176
+
177
+ season_match = sel.regex_first(r"sezon-(\d+)", href)
178
+ episode_match = sel.regex_first(r"bolum-(\d+)", href)
179
+
180
+ season = season_match if season_match else None
181
+ episode_num = episode_match if episode_match else None
182
+
183
+ episodes.append(Episode(
184
+ season = int(season) if season and season.isdigit() else None,
185
+ episode = int(episode_num) if episode_num and episode_num.isdigit() else None,
186
+ title = ep_name if ep_name else f"{season}x{episode_num}",
187
+ url = self.fix_url(href)
188
+ ))
189
+
190
+ return SeriesInfo(
191
+ title = title,
192
+ url = url,
193
+ poster = self.fix_url(poster) if poster else None,
194
+ description = description,
195
+ rating = rating,
196
+ tags = tags,
197
+ actors = actors,
198
+ year = year,
199
+ episodes = episodes,
200
+ duration = int(duration) if duration and str(duration).isdigit() else None
201
+ )
202
+
203
+ async def load_links(self, url: str) -> list[ExtractResult]:
204
+ resp = await self.httpx.get(url)
205
+ sel = HTMLHelper(resp.text)
206
+
207
+ iframe = sel.select_attr("iframe", "src")
208
+ if not iframe:
209
+ return []
210
+
211
+ iframe_url = self.fix_url(iframe)
212
+ data = await self.extract(iframe_url, referer=f"{self.main_url}/")
213
+
214
+ if not data:
215
+ return []
216
+
217
+ return data if isinstance(data, list) else [data]
@@ -248,4 +248,6 @@ class Dizilla(PluginBase):
248
248
  return []
249
249
 
250
250
  data = await self.extract(iframe_url, referer=f"{self.main_url}/", prefix=first_result.get('language_name', 'Unknown'))
251
- return [data] if data else []
251
+ if not data:
252
+ return []
253
+ return data if isinstance(data, list) else [data]
@@ -75,9 +75,9 @@ class SezonlukDizi(PluginBase):
75
75
 
76
76
  results = []
77
77
  for afis in secici.select("div.afis a"):
78
- title = secici.select_text("div.description", veri)
79
- href = secici.select_attr("a", "href", veri)
80
- poster = secici.select_attr("img", "data-src", veri)
78
+ title = secici.select_text("div.description", afis)
79
+ href = secici.select_attr("a", "href", afis)
80
+ poster = secici.select_attr("img", "data-src", afis)
81
81
 
82
82
  if title and href:
83
83
  results.append(SearchResult(
@@ -159,6 +159,11 @@ class Sinefy(PluginBase):
159
159
  actors = [h5.text(strip=True) for h5 in sel.select("div.content h5") if h5.text(strip=True)]
160
160
 
161
161
  year = sel.select_text("span.item.year")
162
+ if not year and title:
163
+ # Try to extract year from title like "Movie Name(2024)"
164
+ year_match = sel.regex_first(r"\((\d{4})\)", title)
165
+ if year_match:
166
+ year = year_match
162
167
 
163
168
  episodes = []
164
169
  episodes_box_list = sel.select("section.episodes-box")
@@ -0,0 +1,274 @@
1
+ # Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
2
+
3
+ from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, MovieInfo, Episode, ExtractResult, HTMLHelper
4
+ import json, asyncio, time
5
+
6
+ class YabanciDizi(PluginBase):
7
+ name = "YabanciDizi"
8
+ language = "tr"
9
+ main_url = "https://yabancidizi.so"
10
+ favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
11
+ description = "Yabancidizi.so platformu üzerinden en güncel yabancı dizileri ve filmleri izleyebilir, favori içeriklerinizi takip edebilirsiniz."
12
+
13
+ main_page = {
14
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwia2F0ZWdvcnkiOlsiMTciXX0=" : "Diziler",
15
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwia2F0ZWdvcnkiOlsiMTgiXX0=" : "Filmler",
16
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwiY291bnRyeSI6eyJLUiI6IktSIn19" : "Kdrama",
17
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwiY291bnRyeSI6eyJKUCI6IkpQIn0sImNhdGVnb3J5IjpbXX0=" : "Jdrama",
18
+ f"{main_url}/kesfet/eyJvcmRlciI6ImRhdGVfYm90dG9tIiwiY2F0ZWdvcnkiOnsiMyI6IjMifX0=" : "Animasyon",
19
+ }
20
+
21
+ async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
22
+ full_url = url if page == 1 else f"{url}/{page}"
23
+
24
+ resp = await self.httpx.get(full_url, headers={"Referer": f"{self.main_url}/"})
25
+ sel = HTMLHelper(resp.text)
26
+
27
+ results = []
28
+ for item in sel.select("li.mb-lg, li.segment-poster"):
29
+ title = sel.select_text("h2", item)
30
+ href = sel.select_attr("a", "href", item)
31
+ poster = sel.select_attr("img", "src", item)
32
+ score = sel.select_text("span.rating", item)
33
+
34
+ if title and href:
35
+ results.append(MainPageResult(
36
+ category = category,
37
+ title = title,
38
+ url = self.fix_url(href),
39
+ poster = self.fix_url(poster) if poster else None,
40
+ ))
41
+
42
+ return results
43
+
44
+ async def search(self, query: str) -> list[SearchResult]:
45
+ search_url = f"{self.main_url}/search?qr={query}"
46
+
47
+ headers = {
48
+ "X-Requested-With" : "XMLHttpRequest",
49
+ "Referer" : f"{self.main_url}/"
50
+ }
51
+
52
+ resp = await self.httpx.post(search_url, headers=headers)
53
+
54
+ try:
55
+ raw = resp.json()
56
+ # Kotlin mapping: JsonResponse -> Data -> ResultItem
57
+ res_array = raw.get("data", {}).get("result", [])
58
+
59
+ results = []
60
+ for item in res_array:
61
+ title = item.get("s_name")
62
+ image = item.get("s_image")
63
+ slug = item.get("s_link")
64
+ s_type = item.get("s_type") # 0: dizi, 1: film
65
+
66
+ poster = f"{self.main_url}/uploads/series/{image}" if image else None
67
+
68
+ if s_type == "1":
69
+ href = f"{self.main_url}/film/{slug}"
70
+ else:
71
+ href = f"{self.main_url}/dizi/{slug}"
72
+
73
+ if title and slug:
74
+ results.append(SearchResult(
75
+ title = title,
76
+ url = self.fix_url(href),
77
+ poster = self.fix_url(poster) if poster else None
78
+ ))
79
+ return results
80
+ except Exception:
81
+ return []
82
+
83
+ async def load_item(self, url: str) -> SeriesInfo | MovieInfo:
84
+ resp = await self.httpx.get(url, follow_redirects=True)
85
+ sel = HTMLHelper(resp.text)
86
+
87
+ og_title = sel.select_attr("meta[property='og:title']", "content")
88
+ title = og_title.split("|")[0].strip() if og_title else sel.select_text("h1")
89
+
90
+ poster = sel.select_attr("meta[property='og:image']", "content")
91
+ description = sel.select_text("p#tv-series-desc")
92
+
93
+ year = sel.select_text("td div.truncate")
94
+ if year:
95
+ year = year.strip()
96
+
97
+ tags = []
98
+ rating = None
99
+ duration = None
100
+ year = None
101
+ actors = []
102
+ for item in sel.select("div.item"):
103
+ text = item.text(strip=True)
104
+ if "T\u00fcr\u00fc:" in text:
105
+ tags = [t.strip() for t in text.replace("T\u00fcr\u00fc:", "").split(",")]
106
+ elif "IMDb Puan\u0131" in text:
107
+ rating = text.replace("IMDb Puan\u0131", "").strip()
108
+ elif "Yap\u0131m Y\u0131l\u0131" in text:
109
+ year_match = sel.regex_first(r"(\d{4})", text)
110
+ if year_match:
111
+ year = year_match
112
+ elif "Takip\u00e7iler" in text:
113
+ continue
114
+ elif "S\u00fcre" in text:
115
+ dur_match = sel.regex_first(r"(\d+)", text)
116
+ if dur_match:
117
+ duration = dur_match
118
+ elif "Oyuncular:" in text:
119
+ actors = [a.text(strip=True) for a in sel.select("a", item)]
120
+
121
+ if not actors:
122
+ actors = [a.text(strip=True) for a in sel.select("div#common-cast-list div.item h5")]
123
+
124
+ trailer_match = sel.regex_first(r"embed\/(.*)\?rel", resp.text)
125
+ trailer = f"https://www.youtube.com/embed/{trailer_match}" if trailer_match else None
126
+
127
+ if "/film/" in url:
128
+ return MovieInfo(
129
+ title = title,
130
+ url = url,
131
+ poster = self.fix_url(poster) if poster else None,
132
+ description = description,
133
+ rating = rating,
134
+ tags = tags,
135
+ actors = actors,
136
+ year = year,
137
+ duration = int(duration) if duration and duration.isdigit() else None
138
+ )
139
+ else:
140
+ episodes = []
141
+ for bolum_item in sel.select("div.episodes-list div.ui td:has(h6)"):
142
+ link_el = sel.select_first("a", bolum_item)
143
+ if not link_el: continue
144
+
145
+ bolum_href = link_el.attrs.get("href")
146
+ bolum_name = sel.select_text("h6", bolum_item) or link_el.text(strip=True)
147
+
148
+ season = sel.regex_first(r"sezon-(\d+)", bolum_href)
149
+ episode = sel.regex_first(r"bolum-(\d+)", bolum_href)
150
+
151
+ ep_season = int(season) if season and season.isdigit() else None
152
+ ep_episode = int(episode) if episode and episode.isdigit() else None
153
+
154
+ episodes.append(Episode(
155
+ season = ep_season,
156
+ episode = ep_episode,
157
+ title = bolum_name,
158
+ url = self.fix_url(bolum_href)
159
+ ))
160
+
161
+ if episodes and (episodes[0].episode or 0) > (episodes[-1].episode or 0):
162
+ episodes.reverse()
163
+
164
+ return SeriesInfo(
165
+ title = title,
166
+ url = url,
167
+ poster = self.fix_url(poster) if poster else None,
168
+ description = description,
169
+ rating = rating,
170
+ tags = tags,
171
+ actors = actors,
172
+ year = year,
173
+ episodes = episodes
174
+ )
175
+
176
+ async def load_links(self, url: str) -> list[ExtractResult]:
177
+ resp = await self.httpx.get(url, headers={"Referer": f"{self.main_url}/"})
178
+ sel = HTMLHelper(resp.text)
179
+
180
+ results = []
181
+
182
+ # Method 1: alternatives-for-this
183
+ for alt in sel.select("div.alternatives-for-this div.item:not(.active)"):
184
+ data_hash = alt.attrs.get("data-hash")
185
+ data_link = alt.attrs.get("data-link")
186
+ q_type = alt.attrs.get("data-querytype")
187
+
188
+ if not data_hash or not data_link: continue
189
+
190
+ try:
191
+ post_resp = await self.httpx.post(
192
+ f"{self.main_url}/ajax/service",
193
+ data = {
194
+ "link" : data_link,
195
+ "hash" : data_hash,
196
+ "querytype" : q_type,
197
+ "type" : "videoGet"
198
+ },
199
+ headers = {
200
+ "X-Requested-With" : "XMLHttpRequest",
201
+ "Referer" : f"{self.main_url}/"
202
+ },
203
+ cookies = {"udys": "1760709729873", "level": "1"}
204
+ )
205
+
206
+ service_data = post_resp.json()
207
+ api_iframe = service_data.get("api_iframe")
208
+ if api_iframe:
209
+ extract_res = await self._fetch_and_extract(api_iframe, prefix="Alt")
210
+ if extract_res:
211
+ results.extend(extract_res if isinstance(extract_res, list) else [extract_res])
212
+ except Exception:
213
+ continue
214
+
215
+ # Method 2: pointing[data-eid]
216
+ for id_el in sel.select("a.ui.pointing[data-eid]"):
217
+ dil = id_el.text(strip=True)
218
+ v_lang = "tr" if "Dublaj" in dil else "en"
219
+ data_eid = id_el.attrs.get("data-eid")
220
+
221
+ try:
222
+ post_resp = await self.httpx.post(
223
+ f"{self.main_url}/ajax/service",
224
+ data = {
225
+ "e_id" : data_eid,
226
+ "v_lang" : v_lang,
227
+ "type" : "get_whatwehave"
228
+ },
229
+ headers = {
230
+ "X-Requested-With" : "XMLHttpRequest",
231
+ "Referer" : f"{self.main_url}/"
232
+ },
233
+ cookies = {"udys": "1760709729873", "level": "1"}
234
+ )
235
+
236
+ service_data = post_resp.json()
237
+ api_iframe = service_data.get("api_iframe")
238
+ if api_iframe:
239
+ extract_res = await self._fetch_and_extract(api_iframe, prefix=dil)
240
+ if extract_res:
241
+ results.extend(extract_res if isinstance(extract_res, list) else [extract_res])
242
+ except Exception:
243
+ continue
244
+
245
+ return results
246
+
247
+ async def _fetch_and_extract(self, iframe_url, prefix=""):
248
+ # Initial fetch
249
+ resp = await self.httpx.get(
250
+ iframe_url,
251
+ headers = {"Referer": f"{self.main_url}/"},
252
+ cookies = {"udys": "1760709729873", "level": "1"}
253
+ )
254
+
255
+ # Handle "Lütfen bekleyiniz" check from Kotlin
256
+ if "Lütfen bekleyiniz" in resp.text:
257
+ await asyncio.sleep(1)
258
+ timestamp = int(time.time())
259
+ # Retry with t=timestamp as in Kotlin
260
+ sep = "&" if "?" in iframe_url else "?"
261
+ resp = await self.httpx.get(
262
+ f"{iframe_url}{sep}t={timestamp}",
263
+ headers = {"Referer": f"{self.main_url}/"},
264
+ cookies = resp.cookies # Use cookies from first response
265
+ )
266
+
267
+ sel = HTMLHelper(resp.text)
268
+ final_iframe = sel.select_attr("iframe", "src")
269
+
270
+ if final_iframe:
271
+ final_url = self.fix_url(final_iframe)
272
+ return await self.extract(final_url, referer=f"{self.main_url}/", prefix=prefix)
273
+
274
+ return None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: KekikStream
3
- Version: 2.3.4
3
+ Version: 2.3.5
4
4
  Summary: terminal üzerinden medya içeriği aramanızı ve VLC/MPV gibi popüler medya oynatıcılar aracılığıyla doğrudan izlemenizi sağlayan modüler ve genişletilebilir bir bıdı bıdı
5
5
  Home-page: https://github.com/keyiflerolsun/KekikStream
6
6
  Author: keyiflerolsun
@@ -12,13 +12,13 @@ KekikStream/Core/Extractor/ExtractorModels.py,sha256=Qj_gbIeGRewaZXNfYkTi4FFRRq6
12
12
  KekikStream/Core/Extractor/YTDLPCache.py,sha256=sRg5kwFxkRXA_8iRwsV29E51g9qQJvg8dWUnzfr7EwA,984
13
13
  KekikStream/Core/Media/MediaHandler.py,sha256=MEn3spPAThVloN3WcoCwWhpoyMA7tAZvcwYjmjJsX3U,7678
14
14
  KekikStream/Core/Media/MediaManager.py,sha256=AaUq2D7JSJIphjoAj2fjLOJjswm7Qf5hjYCbBdrbnDU,438
15
- KekikStream/Core/Plugin/PluginBase.py,sha256=53nBWXnzq6KnmmRmqyi5RRBCeUH8Rcwd8CNrM76VzqE,5984
15
+ KekikStream/Core/Plugin/PluginBase.py,sha256=rTs5XBK10PLo5agaEHQr16ivpo2ZylFwjIUfrwK5V2c,6240
16
16
  KekikStream/Core/Plugin/PluginLoader.py,sha256=GcDqN1u3nJeoGKH_oDFHCpwteJlLCxHNbmPrC5L-hZE,3692
17
17
  KekikStream/Core/Plugin/PluginManager.py,sha256=CZVg1eegi8vfMfccx0DRV0Box8kXz-aoULTQLgbPbvM,893
18
18
  KekikStream/Core/Plugin/PluginModels.py,sha256=Yvx-6Fkn8QCIcuqAkFbCP5EJcq3XBkK_P8S0tRNhS6E,2476
19
19
  KekikStream/Core/UI/UIManager.py,sha256=T4V_kdTTWa-UDamgLSKa__dWJuzcvRK9NuwBlzU9Bzc,1693
20
20
  KekikStream/Extractors/CloseLoad.py,sha256=qRsiW5SloxWgm6MNUd4DF4vC7aSeyJqD3_0vZoFp7Jc,3176
21
- KekikStream/Extractors/ContentX.py,sha256=-T2l4qt5T0md2-x87bk8jR9_GB5Fn8v_rGhkmsoOLNc,3578
21
+ KekikStream/Extractors/ContentX.py,sha256=6-pzHBGrwJeGzeMaPZ5s82RCQZL9MEhHDyI3c4L-xMM,3975
22
22
  KekikStream/Extractors/DonilasPlay.py,sha256=-Bhfpp0AN_wNYAnsaWdL--wo8DD2VPblTAlUQIX6HYU,3190
23
23
  KekikStream/Extractors/DzenRu.py,sha256=WIUZUIixP4X6TweJHpY86fenRY150ucH2VNImvdxcRc,1213
24
24
  KekikStream/Extractors/ExPlay.py,sha256=G2ZmXGcsjpZ5ihtL0ZYkyVO8nPuzSC_8AR0zvED6ScQ,1746
@@ -29,7 +29,7 @@ KekikStream/Extractors/JetTv.py,sha256=2X1vYDQ0hxBTcpnE_XTcbw9tMS1aXFURcobnPdN8Z
29
29
  KekikStream/Extractors/MailRu.py,sha256=xQVCWwYqNoG5T43VAW1_m0v4e80FbO-1pNPKkwhTccU,1218
30
30
  KekikStream/Extractors/MixPlayHD.py,sha256=u5fUePHfjOI3n7KlNsWhXIv7HA_NMj5bPw1ug-eiXLU,1557
31
31
  KekikStream/Extractors/MixTiger.py,sha256=4VbOYgE4s5H-BGVvJI0AI57M-WBWqnek_LGfCFHAucw,2116
32
- KekikStream/Extractors/MolyStream.py,sha256=R3R_6AwsR4mEUj023m23qMuXFAMd9vXXRZjoXFmd7ic,1142
32
+ KekikStream/Extractors/MolyStream.py,sha256=SGKr4HdfxDmRk6nPgQUjSbdqFCKWzl7xWxRJtjjFMng,1420
33
33
  KekikStream/Extractors/Odnoklassniki.py,sha256=hajKPhWKiIuu_i441TXrWVORpLo2CdTcoJiyU3WQAuI,4038
34
34
  KekikStream/Extractors/PeaceMakerst.py,sha256=BJ5Cv5X2GEaMTwn_XFpAVVmts1h5xGno3l5rL7Ugob4,2335
35
35
  KekikStream/Extractors/PixelDrain.py,sha256=xPud8W_hqLUXJSU5O-MiCOblcmzrlDJpnEtuxr4ZdI4,1011
@@ -54,8 +54,9 @@ KekikStream/Extractors/YildizKisaFilm.py,sha256=R_JlrOVeMiDlXYcuTdItnKvidyx8_u3B
54
54
  KekikStream/Plugins/BelgeselX.py,sha256=smoLjEJTdptjb7h4m6LhG7ZUmJQtIhYyi0CUFBsk970,8696
55
55
  KekikStream/Plugins/DiziBox.py,sha256=KZGWhs6p2-hUTsd-fjz2fsmGEkanL4At2PI8qHAoDm4,10541
56
56
  KekikStream/Plugins/DiziPal.py,sha256=CTCGlknBUQIzubhvjexQoqiT3sHni34lpxiTLTemCGo,10299
57
+ KekikStream/Plugins/DiziWatch.py,sha256=b1p9RnGS7hXp7GtpVtFv0AUJOpFJ9tv3faAh6GCGlqQ,8919
57
58
  KekikStream/Plugins/DiziYou.py,sha256=ZV80_XHv1nN0wRGgJEtnoJcgFX7S_iVSKFGiFlAqcGQ,11277
58
- KekikStream/Plugins/Dizilla.py,sha256=5Jwhmo_m8vKcR49RlrWHOrzozk24m8FcEDze3mUCp_E,13740
59
+ KekikStream/Plugins/Dizilla.py,sha256=apDLGe3Fd-13nNyhcV_TFQxqX4bOZZZxEEGLonKQzS4,13803
59
60
  KekikStream/Plugins/FilmBip.py,sha256=pzvleSRZCDHh2tx8Q0JwTFiH9TexNCRnFpr3MCiMb0E,6087
60
61
  KekikStream/Plugins/FilmMakinesi.py,sha256=WaCQD7tsZdPbeU35SEnBVRZt2SzUiAQOBRBZR6drvQ4,7797
61
62
  KekikStream/Plugins/FilmModu.py,sha256=ou1BrFNR4RQaJdxVqPB5FI8vnQ0UmD-siVdwLnpp7x0,7147
@@ -68,16 +69,17 @@ KekikStream/Plugins/RecTV.py,sha256=E5ZyWU_lqibwcRm9amb_fqdXpc8qdMkekbHVxY3UmuU,
68
69
  KekikStream/Plugins/RoketDizi.py,sha256=92c3_UFIhM1SkB0Ybnp53A06VtGw2GmXtr-xmiKeJi0,8444
69
70
  KekikStream/Plugins/SelcukFlix.py,sha256=iHLO52_726gzmKAsqgW5ki2_V16fdGAZVjvaqqnQozY,13601
70
71
  KekikStream/Plugins/SetFilmIzle.py,sha256=pM4DgR2La9jUacQPRr4dilbfnljcC9l_o1OUipJh3Eg,10418
71
- KekikStream/Plugins/SezonlukDizi.py,sha256=s0dOd4Nqbj652ewuCQCEATz0BbYRRYyGZ6fliP4ni6M,9751
72
+ KekikStream/Plugins/SezonlukDizi.py,sha256=h8mIglL2ORUklnAvEwH_5z6tT3WYxiNnbkeIvxtGUTE,9751
72
73
  KekikStream/Plugins/SineWix.py,sha256=z0r90lggAugEWE1g9vg8gZsInBObUZPnVFQwq7GYmJs,7052
73
- KekikStream/Plugins/Sinefy.py,sha256=z3WUb3GveUTqYQbPg8OFkMh6Bme_ErC7qxutk_pVjWw,10874
74
+ KekikStream/Plugins/Sinefy.py,sha256=ShX13Q-_5KFBobxZufI5V_4zwWvEWfNYuP-g5CkBuww,11099
74
75
  KekikStream/Plugins/SinemaCX.py,sha256=11kzAZWgjkitIonDHHiFHMgnViBj-GjyvTXg7k28MLE,7717
75
76
  KekikStream/Plugins/Sinezy.py,sha256=fUj-3WaJMEsKZRnDpHFPxl5Eq2RPLroY80DcftLqvjM,5743
76
77
  KekikStream/Plugins/SuperFilmGeldi.py,sha256=StW0ue4qDj8p7CiWy19Lfr2aWtfYvslPExZJuR-3xiY,6348
77
78
  KekikStream/Plugins/UgurFilm.py,sha256=H6AA2iTaM0fn6uN8_Dfvr-OqUtM9gDdkg0BKIcZEj7U,4930
78
- kekikstream-2.3.4.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
79
- kekikstream-2.3.4.dist-info/METADATA,sha256=LvQkwrruf6NRnDzBmaMOHyxmN94mcbwMsjD4bpkWw2Y,10761
80
- kekikstream-2.3.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
81
- kekikstream-2.3.4.dist-info/entry_points.txt,sha256=dFwdiTx8djyehI0Gsz-rZwjAfZzUzoBSrmzRu9ubjJc,50
82
- kekikstream-2.3.4.dist-info/top_level.txt,sha256=DNmGJDXl27Drdfobrak8KYLmocW_uznVYFJOzcjUgmY,12
83
- kekikstream-2.3.4.dist-info/RECORD,,
79
+ KekikStream/Plugins/YabanciDizi.py,sha256=r3jusGf1Ufr0O1O04qQLxcxk3raCI3EFs4Z2Jwva2-s,11444
80
+ kekikstream-2.3.5.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
81
+ kekikstream-2.3.5.dist-info/METADATA,sha256=YQHFBYbJDi6E_JjE4BvtW7I9OoWuS4XhsPSu5ezxlVs,10761
82
+ kekikstream-2.3.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
83
+ kekikstream-2.3.5.dist-info/entry_points.txt,sha256=dFwdiTx8djyehI0Gsz-rZwjAfZzUzoBSrmzRu9ubjJc,50
84
+ kekikstream-2.3.5.dist-info/top_level.txt,sha256=DNmGJDXl27Drdfobrak8KYLmocW_uznVYFJOzcjUgmY,12
85
+ kekikstream-2.3.5.dist-info/RECORD,,