KekikStream 2.2.9__py3-none-any.whl → 2.5.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of KekikStream might be problematic. Click here for more details.
- KekikStream/Core/Extractor/ExtractorBase.py +3 -2
- KekikStream/Core/Extractor/ExtractorLoader.py +8 -14
- KekikStream/Core/HTMLHelper.py +205 -0
- KekikStream/Core/Plugin/PluginBase.py +48 -12
- KekikStream/Core/Plugin/PluginLoader.py +13 -14
- KekikStream/Core/Plugin/PluginManager.py +2 -2
- KekikStream/Core/Plugin/PluginModels.py +0 -3
- KekikStream/Core/__init__.py +2 -0
- KekikStream/Extractors/Abstream.py +27 -0
- KekikStream/Extractors/CloseLoad.py +31 -56
- KekikStream/Extractors/ContentX.py +28 -71
- KekikStream/Extractors/DonilasPlay.py +34 -78
- KekikStream/Extractors/DzenRu.py +11 -25
- KekikStream/Extractors/ExPlay.py +20 -38
- KekikStream/Extractors/Filemoon.py +23 -53
- KekikStream/Extractors/HDMomPlayer.py +30 -0
- KekikStream/Extractors/HDPlayerSystem.py +13 -31
- KekikStream/Extractors/HotStream.py +27 -0
- KekikStream/Extractors/JFVid.py +3 -24
- KekikStream/Extractors/JetTv.py +21 -34
- KekikStream/Extractors/JetV.py +55 -0
- KekikStream/Extractors/MailRu.py +11 -29
- KekikStream/Extractors/MixPlayHD.py +17 -31
- KekikStream/Extractors/MixTiger.py +17 -40
- KekikStream/Extractors/MolyStream.py +25 -22
- KekikStream/Extractors/Odnoklassniki.py +41 -105
- KekikStream/Extractors/PeaceMakerst.py +20 -47
- KekikStream/Extractors/PixelDrain.py +9 -16
- KekikStream/Extractors/PlayerFilmIzle.py +23 -46
- KekikStream/Extractors/RapidVid.py +23 -36
- KekikStream/Extractors/SetPlay.py +19 -44
- KekikStream/Extractors/SetPrime.py +3 -6
- KekikStream/Extractors/SibNet.py +8 -19
- KekikStream/Extractors/Sobreatsesuyp.py +25 -47
- KekikStream/Extractors/TRsTX.py +25 -55
- KekikStream/Extractors/TurboImgz.py +8 -16
- KekikStream/Extractors/TurkeyPlayer.py +5 -5
- KekikStream/Extractors/VCTPlay.py +10 -28
- KekikStream/Extractors/Veev.py +145 -0
- KekikStream/Extractors/VidBiz.py +62 -0
- KekikStream/Extractors/VidHide.py +59 -34
- KekikStream/Extractors/VidMoly.py +67 -89
- KekikStream/Extractors/VidMoxy.py +17 -29
- KekikStream/Extractors/VidPapi.py +26 -58
- KekikStream/Extractors/VideoSeyred.py +21 -42
- KekikStream/Extractors/Videostr.py +58 -0
- KekikStream/Extractors/Vidoza.py +18 -0
- KekikStream/Extractors/Vtbe.py +38 -0
- KekikStream/Extractors/YTDLP.py +2 -2
- KekikStream/Extractors/YildizKisaFilm.py +13 -31
- KekikStream/Extractors/Zeus.py +61 -0
- KekikStream/Plugins/BelgeselX.py +108 -99
- KekikStream/Plugins/DiziBox.py +61 -106
- KekikStream/Plugins/DiziMom.py +179 -0
- KekikStream/Plugins/DiziPal.py +104 -192
- KekikStream/Plugins/DiziYou.py +66 -149
- KekikStream/Plugins/Dizilla.py +93 -126
- KekikStream/Plugins/FilmBip.py +102 -72
- KekikStream/Plugins/FilmEkseni.py +199 -0
- KekikStream/Plugins/FilmMakinesi.py +101 -64
- KekikStream/Plugins/FilmModu.py +35 -59
- KekikStream/Plugins/Filmatek.py +184 -0
- KekikStream/Plugins/FilmciBaba.py +155 -0
- KekikStream/Plugins/FullHDFilmizlesene.py +32 -78
- KekikStream/Plugins/HDFilm.py +243 -0
- KekikStream/Plugins/HDFilmCehennemi.py +261 -222
- KekikStream/Plugins/JetFilmizle.py +117 -98
- KekikStream/Plugins/KultFilmler.py +153 -143
- KekikStream/Plugins/RecTV.py +53 -49
- KekikStream/Plugins/RoketDizi.py +92 -123
- KekikStream/Plugins/SelcukFlix.py +86 -95
- KekikStream/Plugins/SetFilmIzle.py +105 -143
- KekikStream/Plugins/SezonlukDizi.py +106 -128
- KekikStream/Plugins/Sinefy.py +194 -166
- KekikStream/Plugins/SinemaCX.py +159 -113
- KekikStream/Plugins/Sinezy.py +44 -73
- KekikStream/Plugins/SuperFilmGeldi.py +28 -52
- KekikStream/Plugins/UgurFilm.py +94 -72
- KekikStream/Plugins/Watch32.py +160 -0
- KekikStream/Plugins/YabanciDizi.py +250 -0
- {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/METADATA +1 -1
- kekikstream-2.5.3.dist-info/RECORD +99 -0
- {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/WHEEL +1 -1
- KekikStream/Plugins/FullHDFilm.py +0 -254
- kekikstream-2.2.9.dist-info/RECORD +0 -82
- {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/entry_points.txt +0 -0
- {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/licenses/LICENSE +0 -0
- {kekikstream-2.2.9.dist-info → kekikstream-2.5.3.dist-info}/top_level.txt +0 -0
|
@@ -1,8 +1,7 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core
|
|
4
|
-
|
|
5
|
-
import re, json, asyncio
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, MovieInfo, SeriesInfo, Episode, ExtractResult, HTMLHelper
|
|
4
|
+
import asyncio, contextlib
|
|
6
5
|
|
|
7
6
|
class SetFilmIzle(PluginBase):
|
|
8
7
|
name = "SetFilmIzle"
|
|
@@ -34,9 +33,9 @@ class SetFilmIzle(PluginBase):
|
|
|
34
33
|
f"{main_url}/tur/western/" : "Western"
|
|
35
34
|
}
|
|
36
35
|
|
|
37
|
-
def _get_nonce(self, nonce_type: str = "
|
|
38
|
-
"""Site cache'lenmiş nonce'ları expire olabiliyor, fresh nonce al"""
|
|
39
|
-
|
|
36
|
+
def _get_nonce(self, nonce_type: str = "video", referer: str = None) -> str:
|
|
37
|
+
"""Site cache'lenmiş nonce'ları expire olabiliyor, fresh nonce al veya sayfadan çek"""
|
|
38
|
+
with contextlib.suppress(Exception):
|
|
40
39
|
resp = self.cloudscraper.post(
|
|
41
40
|
f"{self.main_url}/wp-admin/admin-ajax.php",
|
|
42
41
|
headers = {
|
|
@@ -46,31 +45,36 @@ class SetFilmIzle(PluginBase):
|
|
|
46
45
|
},
|
|
47
46
|
data = "action=st_cache_refresh_nonces"
|
|
48
47
|
)
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
48
|
+
data = resp.json()
|
|
49
|
+
if data and data.get("success"):
|
|
50
|
+
nonces = data.get("data", {}).get("nonces", {})
|
|
51
|
+
return nonces.get(nonce_type if nonce_type != "search" else "dt_ajax_search", "")
|
|
52
|
+
|
|
53
|
+
# AJAX başarısızsa sayfadan çekmeyi dene
|
|
54
|
+
with contextlib.suppress(Exception):
|
|
55
|
+
main_resp = self.cloudscraper.get(referer or self.main_url)
|
|
56
|
+
# STMOVIE_AJAX = { ... nonces: { search: "...", ... } }
|
|
57
|
+
nonce = HTMLHelper(main_resp.text).regex_first(rf'"{nonce_type}":\s*"([^"]+)"')
|
|
58
|
+
return nonce or ""
|
|
59
|
+
|
|
60
|
+
return ""
|
|
53
61
|
|
|
54
62
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
55
63
|
istek = self.cloudscraper.get(url)
|
|
56
|
-
secici =
|
|
64
|
+
secici = HTMLHelper(istek.text)
|
|
57
65
|
|
|
58
66
|
results = []
|
|
59
|
-
for item in secici.
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
title = title_el.text(strip=True) if title_el else None
|
|
65
|
-
href = link_el.attrs.get("href") if link_el else None
|
|
66
|
-
poster = img_el.attrs.get("data-src") if img_el else None
|
|
67
|
+
for item in secici.select("div.items article"):
|
|
68
|
+
title = secici.select_text("h2", item)
|
|
69
|
+
href = secici.select_attr("a", "href", item)
|
|
70
|
+
poster = secici.select_attr("img", "data-src", item)
|
|
67
71
|
|
|
68
72
|
if title and href:
|
|
69
73
|
results.append(MainPageResult(
|
|
70
74
|
category = category,
|
|
71
75
|
title = title,
|
|
72
76
|
url = self.fix_url(href),
|
|
73
|
-
poster = self.fix_url(poster)
|
|
77
|
+
poster = self.fix_url(poster)
|
|
74
78
|
))
|
|
75
79
|
|
|
76
80
|
return results
|
|
@@ -99,137 +103,65 @@ class SetFilmIzle(PluginBase):
|
|
|
99
103
|
except:
|
|
100
104
|
return []
|
|
101
105
|
|
|
102
|
-
secici =
|
|
103
|
-
results = []
|
|
106
|
+
secici = HTMLHelper(html)
|
|
104
107
|
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
title = title_el.text(strip=True) if title_el else None
|
|
111
|
-
href = link_el.attrs.get("href") if link_el else None
|
|
112
|
-
poster = img_el.attrs.get("data-src") if img_el else None
|
|
108
|
+
results = []
|
|
109
|
+
for item in secici.select("div.items article"):
|
|
110
|
+
title = secici.select_text("h2", item)
|
|
111
|
+
href = secici.select_attr("a", "href", item)
|
|
112
|
+
poster = secici.select_attr("img", "data-src", item)
|
|
113
113
|
|
|
114
114
|
if title and href:
|
|
115
115
|
results.append(SearchResult(
|
|
116
116
|
title = title,
|
|
117
117
|
url = self.fix_url(href),
|
|
118
|
-
poster = self.fix_url(poster)
|
|
118
|
+
poster = self.fix_url(poster)
|
|
119
119
|
))
|
|
120
120
|
|
|
121
121
|
return results
|
|
122
122
|
|
|
123
123
|
async def load_item(self, url: str) -> MovieInfo | SeriesInfo:
|
|
124
|
-
istek =
|
|
125
|
-
secici =
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
year_match = re.search(r"\d{4}", year_text)
|
|
148
|
-
year = year_match.group() if year_match else None
|
|
149
|
-
|
|
150
|
-
tags = [a.text(strip=True) for a in secici.css("div.sgeneros a") if a.text(strip=True)]
|
|
151
|
-
|
|
152
|
-
duration_el = secici.css_first("span.runtime")
|
|
153
|
-
duration = None
|
|
154
|
-
if duration_el:
|
|
155
|
-
duration_text = duration_el.text(strip=True)
|
|
156
|
-
dur_match = re.search(r"\d+", duration_text)
|
|
157
|
-
duration = int(dur_match.group()) if dur_match else None
|
|
158
|
-
|
|
159
|
-
actors = [span.text(strip=True) for span in secici.css("span.valor a > span") if span.text(strip=True)]
|
|
160
|
-
|
|
161
|
-
trailer_match = re.search(r'embed/([^?]*)\?rel', html_text)
|
|
162
|
-
trailer = f"https://www.youtube.com/embed/{trailer_match.group(1)}" if trailer_match else None
|
|
163
|
-
|
|
164
|
-
# Dizi mi film mi kontrol et
|
|
165
|
-
is_series = "/dizi/" in url
|
|
166
|
-
|
|
167
|
-
if is_series:
|
|
168
|
-
year_link_el = secici.css_first("a[href*='/yil/']")
|
|
169
|
-
if year_link_el:
|
|
170
|
-
year_elem = year_link_el.text(strip=True)
|
|
171
|
-
year_match = re.search(r"\d{4}", year_elem)
|
|
172
|
-
year = year_match.group() if year_match else year
|
|
173
|
-
|
|
174
|
-
# Duration from info section
|
|
175
|
-
for span in secici.css("div#info span"):
|
|
176
|
-
span_text = span.text(strip=True) if span.text() else ""
|
|
177
|
-
if "Dakika" in span_text:
|
|
178
|
-
dur_match = re.search(r"\d+", span_text)
|
|
179
|
-
duration = int(dur_match.group()) if dur_match else duration
|
|
180
|
-
break
|
|
124
|
+
istek = self.cloudscraper.get(url)
|
|
125
|
+
secici = HTMLHelper(istek.text)
|
|
126
|
+
|
|
127
|
+
title = self.clean_title(secici.select_text("h1") or secici.select_text(".titles h1") or secici.select_attr("meta[property='og:title']", "content"))
|
|
128
|
+
poster = secici.select_poster("div.poster img")
|
|
129
|
+
description = secici.select_text("div.wp-content p")
|
|
130
|
+
rating = secici.select_text("b#repimdb strong") or secici.regex_first(r"([\d.]+)", secici.select_text("div.imdb"))
|
|
131
|
+
year = secici.extract_year("div.extra span.valor")
|
|
132
|
+
tags = secici.select_texts("div.sgeneros a")
|
|
133
|
+
duration = int(secici.regex_first(r"(\d+)", secici.select_text("span.runtime")) or 0)
|
|
134
|
+
actors = secici.select_texts("span.valor a[href*='/oyuncu/']")
|
|
135
|
+
|
|
136
|
+
common_info = {
|
|
137
|
+
"url" : url,
|
|
138
|
+
"poster" : self.fix_url(poster),
|
|
139
|
+
"title" : title,
|
|
140
|
+
"description" : description,
|
|
141
|
+
"tags" : tags,
|
|
142
|
+
"rating" : rating,
|
|
143
|
+
"year" : year,
|
|
144
|
+
"duration" : duration,
|
|
145
|
+
"actors" : actors
|
|
146
|
+
}
|
|
181
147
|
|
|
148
|
+
if "/dizi/" in url:
|
|
182
149
|
episodes = []
|
|
183
|
-
for ep_item in secici.
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
ep_detail = ep_name
|
|
192
|
-
season_match = re.search(r"(\d+)\.\s*Sezon", ep_detail)
|
|
193
|
-
episode_match = re.search(r"Sezon\s+(\d+)\.\s*Bölüm", ep_detail)
|
|
194
|
-
|
|
195
|
-
ep_season = int(season_match.group(1)) if season_match else 1
|
|
196
|
-
ep_episode = int(episode_match.group(1)) if episode_match else None
|
|
197
|
-
|
|
198
|
-
episodes.append(Episode(
|
|
199
|
-
season = ep_season,
|
|
200
|
-
episode = ep_episode,
|
|
201
|
-
title = ep_name,
|
|
202
|
-
url = self.fix_url(ep_href)
|
|
203
|
-
))
|
|
150
|
+
for ep_item in secici.select("div#episodes ul.episodios li"):
|
|
151
|
+
href = secici.select_attr("h4.episodiotitle a", "href", ep_item)
|
|
152
|
+
name = secici.select_direct_text("h4.episodiotitle a", ep_item)
|
|
153
|
+
if href and name:
|
|
154
|
+
s, e = secici.extract_season_episode(name)
|
|
155
|
+
episodes.append(Episode(season=s or 1, episode=e or 1, title=name, url=self.fix_url(href)))
|
|
156
|
+
return SeriesInfo(**common_info, episodes=episodes)
|
|
204
157
|
|
|
205
|
-
|
|
206
|
-
url = url,
|
|
207
|
-
poster = self.fix_url(poster) if poster else None,
|
|
208
|
-
title = title,
|
|
209
|
-
description = description,
|
|
210
|
-
tags = tags,
|
|
211
|
-
year = year,
|
|
212
|
-
duration = duration,
|
|
213
|
-
actors = actors,
|
|
214
|
-
episodes = episodes
|
|
215
|
-
)
|
|
216
|
-
|
|
217
|
-
return MovieInfo(
|
|
218
|
-
url = url,
|
|
219
|
-
poster = self.fix_url(poster) if poster else None,
|
|
220
|
-
title = title,
|
|
221
|
-
description = description,
|
|
222
|
-
tags = tags,
|
|
223
|
-
year = year,
|
|
224
|
-
duration = duration,
|
|
225
|
-
actors = actors
|
|
226
|
-
)
|
|
158
|
+
return MovieInfo(**common_info)
|
|
227
159
|
|
|
228
160
|
async def load_links(self, url: str) -> list[ExtractResult]:
|
|
229
161
|
istek = await self.httpx.get(url)
|
|
230
|
-
secici =
|
|
162
|
+
secici = HTMLHelper(istek.text)
|
|
231
163
|
|
|
232
|
-
nonce = secici.
|
|
164
|
+
nonce = secici.select_attr("div#playex", "data-nonce") or ""
|
|
233
165
|
|
|
234
166
|
# partKey to dil label mapping
|
|
235
167
|
part_key_labels = {
|
|
@@ -241,14 +173,14 @@ class SetFilmIzle(PluginBase):
|
|
|
241
173
|
semaphore = asyncio.Semaphore(5)
|
|
242
174
|
tasks = []
|
|
243
175
|
|
|
244
|
-
async def fetch_and_extract(player):
|
|
176
|
+
async def fetch_and_extract(player) -> list[ExtractResult]:
|
|
245
177
|
async with semaphore:
|
|
246
178
|
source_id = player.attrs.get("data-post-id")
|
|
247
|
-
player_name = player.attrs.get("data-player-name")
|
|
179
|
+
player_name = player.attrs.get("data-player-name") or secici.select_text("b", player)
|
|
248
180
|
part_key = player.attrs.get("data-part-key")
|
|
249
181
|
|
|
250
182
|
if not source_id or "event" in source_id or source_id == "":
|
|
251
|
-
return
|
|
183
|
+
return []
|
|
252
184
|
|
|
253
185
|
try:
|
|
254
186
|
resp = self.cloudscraper.post(
|
|
@@ -258,17 +190,17 @@ class SetFilmIzle(PluginBase):
|
|
|
258
190
|
"action" : "get_video_url",
|
|
259
191
|
"nonce" : nonce,
|
|
260
192
|
"post_id" : source_id,
|
|
261
|
-
"player_name" :
|
|
193
|
+
"player_name" : player.attrs.get("data-player-name") or "",
|
|
262
194
|
"part_key" : part_key or ""
|
|
263
195
|
}
|
|
264
196
|
)
|
|
265
197
|
data = resp.json()
|
|
266
198
|
except:
|
|
267
|
-
return
|
|
199
|
+
return []
|
|
268
200
|
|
|
269
201
|
iframe_url = data.get("data", {}).get("url")
|
|
270
202
|
if not iframe_url:
|
|
271
|
-
return
|
|
203
|
+
return []
|
|
272
204
|
|
|
273
205
|
if "setplay" not in iframe_url and part_key:
|
|
274
206
|
iframe_url = f"{iframe_url}?partKey={part_key}"
|
|
@@ -277,10 +209,40 @@ class SetFilmIzle(PluginBase):
|
|
|
277
209
|
if not label and part_key:
|
|
278
210
|
label = part_key.replace("_", " ").title()
|
|
279
211
|
|
|
280
|
-
|
|
212
|
+
# İsimlendirme Formatı: "FastPlay | Türkçe Dublaj"
|
|
213
|
+
final_name = player_name
|
|
214
|
+
if label:
|
|
215
|
+
final_name = f"{final_name} | {label}" if final_name else label
|
|
216
|
+
|
|
217
|
+
# Extract et
|
|
218
|
+
extracted = await self.extract(iframe_url)
|
|
219
|
+
if not extracted:
|
|
220
|
+
return []
|
|
281
221
|
|
|
282
|
-
|
|
222
|
+
results = []
|
|
223
|
+
items = extracted if isinstance(extracted, list) else [extracted]
|
|
224
|
+
for item in items:
|
|
225
|
+
if final_name:
|
|
226
|
+
item.name = final_name
|
|
227
|
+
results.append(item)
|
|
228
|
+
|
|
229
|
+
return results
|
|
230
|
+
|
|
231
|
+
# Selector Güncellemesi: data-player-name içeren tüm a tagleri
|
|
232
|
+
players = secici.select("a[data-player-name]")
|
|
233
|
+
if not players:
|
|
234
|
+
# Fallback legacy selector
|
|
235
|
+
players = secici.select("nav.player a")
|
|
236
|
+
|
|
237
|
+
for player in players:
|
|
283
238
|
tasks.append(fetch_and_extract(player))
|
|
284
239
|
|
|
285
|
-
|
|
286
|
-
|
|
240
|
+
results_groups = await asyncio.gather(*tasks)
|
|
241
|
+
|
|
242
|
+
# Flatten
|
|
243
|
+
final_results = []
|
|
244
|
+
for group in results_groups:
|
|
245
|
+
if group:
|
|
246
|
+
final_results.extend(group)
|
|
247
|
+
|
|
248
|
+
return final_results
|
|
@@ -1,8 +1,7 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core
|
|
4
|
-
|
|
5
|
-
import re, asyncio
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode, ExtractResult, HTMLHelper
|
|
4
|
+
import asyncio, contextlib
|
|
6
5
|
|
|
7
6
|
class SezonlukDizi(PluginBase):
|
|
8
7
|
name = "SezonlukDizi"
|
|
@@ -41,135 +40,92 @@ class SezonlukDizi(PluginBase):
|
|
|
41
40
|
|
|
42
41
|
async def _get_asp_data(self) -> dict:
|
|
43
42
|
js_req = await self.httpx.get(f"{self.main_url}/js/site.min.js")
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
43
|
+
js = HTMLHelper(js_req.text)
|
|
44
|
+
alt = js.regex_first(r"dataAlternatif(.*?)\.asp")
|
|
45
|
+
emb = js.regex_first(r"dataEmbed(.*?)\.asp")
|
|
46
|
+
|
|
47
47
|
return {
|
|
48
|
-
"alternatif":
|
|
49
|
-
"embed":
|
|
48
|
+
"alternatif": alt or "",
|
|
49
|
+
"embed": emb or ""
|
|
50
50
|
}
|
|
51
51
|
|
|
52
52
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
53
53
|
istek = await self.httpx.get(f"{url}{page}")
|
|
54
|
-
secici =
|
|
54
|
+
secici = HTMLHelper(istek.text)
|
|
55
55
|
|
|
56
56
|
results = []
|
|
57
|
-
for veri in secici.
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
title = desc_el.text(strip=True) if desc_el else None
|
|
62
|
-
href = veri.attrs.get("href")
|
|
63
|
-
poster = img_el.attrs.get("data-src") if img_el else None
|
|
57
|
+
for veri in secici.select("div.afis a"):
|
|
58
|
+
title = secici.select_text("div.description", veri)
|
|
59
|
+
href = secici.select_attr("a", "href", veri)
|
|
60
|
+
poster = secici.select_attr("img", "data-src", veri)
|
|
64
61
|
|
|
65
62
|
if title and href:
|
|
66
63
|
results.append(MainPageResult(
|
|
67
64
|
category = category,
|
|
68
65
|
title = title,
|
|
69
66
|
url = self.fix_url(href),
|
|
70
|
-
poster = self.fix_url(poster)
|
|
67
|
+
poster = self.fix_url(poster),
|
|
71
68
|
))
|
|
72
69
|
|
|
73
70
|
return results
|
|
74
71
|
|
|
75
72
|
async def search(self, query: str) -> list[SearchResult]:
|
|
76
|
-
istek = await self.httpx.get(f"{self.main_url}/diziler.asp?
|
|
77
|
-
secici =
|
|
73
|
+
istek = await self.httpx.get(f"{self.main_url}/diziler.asp?q={query}")
|
|
74
|
+
secici = HTMLHelper(istek.text)
|
|
78
75
|
|
|
79
76
|
results = []
|
|
80
|
-
for afis in secici.
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
title = desc_el.text(strip=True) if desc_el else None
|
|
85
|
-
href = afis.attrs.get("href")
|
|
86
|
-
poster = img_el.attrs.get("data-src") if img_el else None
|
|
77
|
+
for afis in secici.select("div.afis a"):
|
|
78
|
+
title = secici.select_text("div.description", afis)
|
|
79
|
+
href = secici.select_attr("a", "href", afis)
|
|
80
|
+
poster = secici.select_attr("img", "data-src", afis)
|
|
87
81
|
|
|
88
82
|
if title and href:
|
|
89
83
|
results.append(SearchResult(
|
|
90
84
|
title = title,
|
|
91
85
|
url = self.fix_url(href),
|
|
92
|
-
poster = self.fix_url(poster)
|
|
86
|
+
poster = self.fix_url(poster),
|
|
93
87
|
))
|
|
94
88
|
|
|
95
89
|
return results
|
|
96
90
|
|
|
97
91
|
async def load_item(self, url: str) -> SeriesInfo:
|
|
98
92
|
istek = await self.httpx.get(url)
|
|
99
|
-
secici =
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
if header_el and header_el.text(strip=True):
|
|
132
|
-
actors.append(header_el.text(strip=True))
|
|
133
|
-
|
|
134
|
-
episodes_istek = await self.httpx.get(f"{self.main_url}/bolumler/{url.split('/')[-1]}")
|
|
135
|
-
episodes_secici = HTMLParser(episodes_istek.text)
|
|
136
|
-
episodes = []
|
|
137
|
-
|
|
138
|
-
for sezon in episodes_secici.css("table.unstackable"):
|
|
139
|
-
for bolum in sezon.css("tbody tr"):
|
|
140
|
-
# td:nth-of-type selectolax'ta desteklenmiyor, alternatif yol: tüm td'leri alıp indexle
|
|
141
|
-
tds = bolum.css("td")
|
|
142
|
-
if len(tds) < 4:
|
|
143
|
-
continue
|
|
144
|
-
|
|
145
|
-
# 4. td'den isim ve href
|
|
146
|
-
ep_name_el = tds[3].css_first("a")
|
|
147
|
-
ep_name = ep_name_el.text(strip=True) if ep_name_el else None
|
|
148
|
-
ep_href = ep_name_el.attrs.get("href") if ep_name_el else None
|
|
149
|
-
|
|
150
|
-
# 3. td'den episode (re_first yerine re.search)
|
|
151
|
-
ep_episode_el = tds[2].css_first("a")
|
|
152
|
-
ep_episode_text = ep_episode_el.text(strip=True) if ep_episode_el else ""
|
|
153
|
-
ep_episode_match = re.search(r"(\d+)", ep_episode_text)
|
|
154
|
-
ep_episode = ep_episode_match.group(1) if ep_episode_match else None
|
|
155
|
-
|
|
156
|
-
# 2. td'den season (re_first yerine re.search)
|
|
157
|
-
ep_season_text = tds[1].text(strip=True) if tds[1] else ""
|
|
158
|
-
ep_season_match = re.search(r"(\d+)", ep_season_text)
|
|
159
|
-
ep_season = ep_season_match.group(1) if ep_season_match else None
|
|
160
|
-
|
|
161
|
-
if ep_name and ep_href:
|
|
162
|
-
episode = Episode(
|
|
163
|
-
season = ep_season,
|
|
164
|
-
episode = ep_episode,
|
|
165
|
-
title = ep_name,
|
|
166
|
-
url = self.fix_url(ep_href),
|
|
167
|
-
)
|
|
168
|
-
episodes.append(episode)
|
|
93
|
+
secici = HTMLHelper(istek.text)
|
|
94
|
+
|
|
95
|
+
title = secici.select_text("div.header") or ""
|
|
96
|
+
poster = secici.select_poster("div.image img")
|
|
97
|
+
year = secici.extract_year("div.extra span")
|
|
98
|
+
description = secici.select_text("span#tartismayorum-konu")
|
|
99
|
+
tags = secici.select_texts("div.labels a[href*='tur']")
|
|
100
|
+
rating = secici.regex_first(r"[\d.,]+", secici.select_text("div.dizipuani a div"))
|
|
101
|
+
|
|
102
|
+
# Actors extraction
|
|
103
|
+
id_slug = url.split('/')[-1]
|
|
104
|
+
a_resp = await self.httpx.get(f"{self.main_url}/oyuncular/{id_slug}")
|
|
105
|
+
a_sel = HTMLHelper(a_resp.text)
|
|
106
|
+
actors = a_sel.select_texts("div.doubling div.ui div.header")
|
|
107
|
+
|
|
108
|
+
# Episodes extraction
|
|
109
|
+
e_resp = await self.httpx.get(f"{self.main_url}/bolumler/{id_slug}")
|
|
110
|
+
e_sel = HTMLHelper(e_resp.text)
|
|
111
|
+
episodes = []
|
|
112
|
+
for row in e_sel.select("table.unstackable tbody tr"):
|
|
113
|
+
tds = e_sel.select("td", row)
|
|
114
|
+
if len(tds) >= 4:
|
|
115
|
+
name = e_sel.select_text("a", tds[3])
|
|
116
|
+
href = e_sel.select_attr("a", "href", tds[3])
|
|
117
|
+
if name and href:
|
|
118
|
+
s, e = e_sel.extract_season_episode(f"{tds[1].text(strip=True)} {tds[2].text(strip=True)}")
|
|
119
|
+
episodes.append(Episode(
|
|
120
|
+
season = s or 1,
|
|
121
|
+
episode = e or 1,
|
|
122
|
+
title = name,
|
|
123
|
+
url = self.fix_url(href)
|
|
124
|
+
))
|
|
169
125
|
|
|
170
126
|
return SeriesInfo(
|
|
171
127
|
url = url,
|
|
172
|
-
poster = self.fix_url(poster)
|
|
128
|
+
poster = self.fix_url(poster),
|
|
173
129
|
title = title,
|
|
174
130
|
description = description,
|
|
175
131
|
tags = tags,
|
|
@@ -180,53 +136,75 @@ class SezonlukDizi(PluginBase):
|
|
|
180
136
|
)
|
|
181
137
|
|
|
182
138
|
async def load_links(self, url: str) -> list[ExtractResult]:
|
|
183
|
-
istek
|
|
184
|
-
secici
|
|
139
|
+
istek = await self.httpx.get(url)
|
|
140
|
+
secici = HTMLHelper(istek.text)
|
|
185
141
|
asp_data = await self._get_asp_data()
|
|
186
|
-
|
|
187
|
-
bid = secici.
|
|
142
|
+
|
|
143
|
+
bid = secici.select_attr("div#dilsec", "data-id")
|
|
188
144
|
if not bid:
|
|
189
145
|
return []
|
|
190
146
|
|
|
191
147
|
semaphore = asyncio.Semaphore(5)
|
|
192
148
|
tasks = []
|
|
193
149
|
|
|
194
|
-
async def fetch_and_extract(veri, dil_etiketi):
|
|
150
|
+
async def fetch_and_extract(veri, dil_etiketi) -> list[ExtractResult]:
|
|
195
151
|
async with semaphore:
|
|
196
152
|
try:
|
|
197
153
|
embed_resp = await self.httpx.post(
|
|
198
|
-
f"{self.main_url}/ajax/dataEmbed{asp_data['embed']}.asp",
|
|
154
|
+
url = f"{self.main_url}/ajax/dataEmbed{asp_data['embed']}.asp",
|
|
199
155
|
headers = {"X-Requested-With": "XMLHttpRequest"},
|
|
200
156
|
data = {"id": str(veri.get("id"))}
|
|
201
157
|
)
|
|
202
|
-
embed_secici =
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
158
|
+
embed_secici = HTMLHelper(embed_resp.text)
|
|
159
|
+
iframe_src = embed_secici.select_attr("iframe", "src") or embed_secici.regex_first(r'src="(.*?)"')
|
|
160
|
+
|
|
161
|
+
if not iframe_src:
|
|
162
|
+
return []
|
|
163
|
+
|
|
164
|
+
iframe_url = self.fix_url(iframe_src)
|
|
165
|
+
|
|
166
|
+
real_url = iframe_url
|
|
167
|
+
if "url=" in iframe_url:
|
|
168
|
+
real_url = HTMLHelper(iframe_url).regex_first(r"url=([^&]+)")
|
|
169
|
+
if real_url:
|
|
170
|
+
real_url = self.fix_url(real_url)
|
|
171
|
+
|
|
172
|
+
source_name = veri.get('baslik') or "SezonlukDizi"
|
|
173
|
+
full_name = f"{dil_etiketi} - {source_name}"
|
|
174
|
+
|
|
175
|
+
extracted = await self.extract(real_url, referer=f"{self.main_url}/")
|
|
176
|
+
|
|
177
|
+
if not extracted:
|
|
178
|
+
return []
|
|
179
|
+
|
|
180
|
+
results = []
|
|
181
|
+
items = extracted if isinstance(extracted, list) else [extracted]
|
|
182
|
+
for item in items:
|
|
183
|
+
item.name = full_name
|
|
184
|
+
results.append(item)
|
|
185
|
+
return results
|
|
186
|
+
|
|
187
|
+
except Exception:
|
|
188
|
+
return []
|
|
215
189
|
|
|
216
190
|
for dil_kodu, dil_etiketi in [("1", "Altyazı"), ("0", "Dublaj")]:
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
191
|
+
with contextlib.suppress(Exception):
|
|
192
|
+
altyazi_resp = await self.httpx.post(
|
|
193
|
+
url = f"{self.main_url}/ajax/dataAlternatif{asp_data['alternatif']}.asp",
|
|
194
|
+
headers = {"X-Requested-With": "XMLHttpRequest"},
|
|
195
|
+
data = {"bid": bid, "dil": dil_kodu}
|
|
196
|
+
)
|
|
197
|
+
|
|
224
198
|
data_json = altyazi_resp.json()
|
|
225
199
|
if data_json.get("status") == "success" and data_json.get("data"):
|
|
226
200
|
for veri in data_json["data"]:
|
|
227
201
|
tasks.append(fetch_and_extract(veri, dil_etiketi))
|
|
228
|
-
except:
|
|
229
|
-
continue
|
|
230
202
|
|
|
231
|
-
|
|
232
|
-
|
|
203
|
+
results_groups = await asyncio.gather(*tasks)
|
|
204
|
+
|
|
205
|
+
final_results = []
|
|
206
|
+
for group in results_groups:
|
|
207
|
+
if group:
|
|
208
|
+
final_results.extend(group)
|
|
209
|
+
|
|
210
|
+
return final_results
|