KekikStream 2.4.8__py3-none-any.whl → 2.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of KekikStream might be problematic. Click here for more details.
- KekikStream/Core/Plugin/PluginBase.py +8 -2
- KekikStream/Extractors/Abstream.py +27 -0
- KekikStream/Extractors/Filemoon.py +2 -1
- KekikStream/Extractors/JetV.py +55 -0
- KekikStream/Extractors/Veev.py +145 -0
- KekikStream/Extractors/VidBiz.py +62 -0
- KekikStream/Extractors/VidHide.py +56 -7
- KekikStream/Extractors/Vtbe.py +38 -0
- KekikStream/Extractors/Zeus.py +61 -0
- KekikStream/Plugins/HDFilmCehennemi.py +37 -14
- KekikStream/Plugins/JetFilmizle.py +82 -49
- KekikStream/Plugins/KultFilmler.py +136 -68
- KekikStream/Plugins/RecTV.py +9 -5
- KekikStream/Plugins/RoketDizi.py +13 -22
- KekikStream/Plugins/SetFilmIzle.py +47 -19
- KekikStream/Plugins/SezonlukDizi.py +62 -34
- KekikStream/Plugins/Sinefy.py +172 -81
- KekikStream/Plugins/SinemaCX.py +138 -62
- KekikStream/Plugins/Sinezy.py +15 -16
- KekikStream/Plugins/SuperFilmGeldi.py +3 -3
- KekikStream/Plugins/UgurFilm.py +71 -24
- KekikStream/Plugins/Watch32.py +38 -53
- KekikStream/Plugins/YabanciDizi.py +158 -139
- {kekikstream-2.4.8.dist-info → kekikstream-2.5.0.dist-info}/METADATA +1 -1
- {kekikstream-2.4.8.dist-info → kekikstream-2.5.0.dist-info}/RECORD +29 -23
- {kekikstream-2.4.8.dist-info → kekikstream-2.5.0.dist-info}/WHEEL +0 -0
- {kekikstream-2.4.8.dist-info → kekikstream-2.5.0.dist-info}/entry_points.txt +0 -0
- {kekikstream-2.4.8.dist-info → kekikstream-2.5.0.dist-info}/licenses/LICENSE +0 -0
- {kekikstream-2.4.8.dist-info → kekikstream-2.5.0.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core
|
|
4
|
-
import
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, MovieInfo, SeriesInfo, Episode, ExtractResult, HTMLHelper
|
|
4
|
+
import asyncio, contextlib
|
|
5
5
|
|
|
6
6
|
class SetFilmIzle(PluginBase):
|
|
7
7
|
name = "SetFilmIzle"
|
|
@@ -35,7 +35,7 @@ class SetFilmIzle(PluginBase):
|
|
|
35
35
|
|
|
36
36
|
def _get_nonce(self, nonce_type: str = "video", referer: str = None) -> str:
|
|
37
37
|
"""Site cache'lenmiş nonce'ları expire olabiliyor, fresh nonce al veya sayfadan çek"""
|
|
38
|
-
|
|
38
|
+
with contextlib.suppress(Exception):
|
|
39
39
|
resp = self.cloudscraper.post(
|
|
40
40
|
f"{self.main_url}/wp-admin/admin-ajax.php",
|
|
41
41
|
headers = {
|
|
@@ -49,17 +49,15 @@ class SetFilmIzle(PluginBase):
|
|
|
49
49
|
if data and data.get("success"):
|
|
50
50
|
nonces = data.get("data", {}).get("nonces", {})
|
|
51
51
|
return nonces.get(nonce_type if nonce_type != "search" else "dt_ajax_search", "")
|
|
52
|
-
except:
|
|
53
|
-
pass
|
|
54
52
|
|
|
55
53
|
# AJAX başarısızsa sayfadan çekmeyi dene
|
|
56
|
-
|
|
54
|
+
with contextlib.suppress(Exception):
|
|
57
55
|
main_resp = self.cloudscraper.get(referer or self.main_url)
|
|
58
56
|
# STMOVIE_AJAX = { ... nonces: { search: "...", ... } }
|
|
59
57
|
nonce = HTMLHelper(main_resp.text).regex_first(rf'"{nonce_type}":\s*"([^"]+)"')
|
|
60
58
|
return nonce or ""
|
|
61
|
-
|
|
62
|
-
|
|
59
|
+
|
|
60
|
+
return ""
|
|
63
61
|
|
|
64
62
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
65
63
|
istek = self.cloudscraper.get(url)
|
|
@@ -106,8 +104,8 @@ class SetFilmIzle(PluginBase):
|
|
|
106
104
|
return []
|
|
107
105
|
|
|
108
106
|
secici = HTMLHelper(html)
|
|
109
|
-
results = []
|
|
110
107
|
|
|
108
|
+
results = []
|
|
111
109
|
for item in secici.select("div.items article"):
|
|
112
110
|
title = secici.select_text("h2", item)
|
|
113
111
|
href = secici.select_attr("a", "href", item)
|
|
@@ -175,14 +173,14 @@ class SetFilmIzle(PluginBase):
|
|
|
175
173
|
semaphore = asyncio.Semaphore(5)
|
|
176
174
|
tasks = []
|
|
177
175
|
|
|
178
|
-
async def fetch_and_extract(player):
|
|
176
|
+
async def fetch_and_extract(player) -> list[ExtractResult]:
|
|
179
177
|
async with semaphore:
|
|
180
178
|
source_id = player.attrs.get("data-post-id")
|
|
181
|
-
player_name = player.attrs.get("data-player-name")
|
|
179
|
+
player_name = player.attrs.get("data-player-name") or secici.select_text("b", player)
|
|
182
180
|
part_key = player.attrs.get("data-part-key")
|
|
183
181
|
|
|
184
182
|
if not source_id or "event" in source_id or source_id == "":
|
|
185
|
-
return
|
|
183
|
+
return []
|
|
186
184
|
|
|
187
185
|
try:
|
|
188
186
|
resp = self.cloudscraper.post(
|
|
@@ -192,17 +190,17 @@ class SetFilmIzle(PluginBase):
|
|
|
192
190
|
"action" : "get_video_url",
|
|
193
191
|
"nonce" : nonce,
|
|
194
192
|
"post_id" : source_id,
|
|
195
|
-
"player_name" :
|
|
193
|
+
"player_name" : player.attrs.get("data-player-name") or "",
|
|
196
194
|
"part_key" : part_key or ""
|
|
197
195
|
}
|
|
198
196
|
)
|
|
199
197
|
data = resp.json()
|
|
200
198
|
except:
|
|
201
|
-
return
|
|
199
|
+
return []
|
|
202
200
|
|
|
203
201
|
iframe_url = data.get("data", {}).get("url")
|
|
204
202
|
if not iframe_url:
|
|
205
|
-
return
|
|
203
|
+
return []
|
|
206
204
|
|
|
207
205
|
if "setplay" not in iframe_url and part_key:
|
|
208
206
|
iframe_url = f"{iframe_url}?partKey={part_key}"
|
|
@@ -211,10 +209,40 @@ class SetFilmIzle(PluginBase):
|
|
|
211
209
|
if not label and part_key:
|
|
212
210
|
label = part_key.replace("_", " ").title()
|
|
213
211
|
|
|
214
|
-
|
|
212
|
+
# İsimlendirme Formatı: "FastPlay | Türkçe Dublaj"
|
|
213
|
+
final_name = player_name
|
|
214
|
+
if label:
|
|
215
|
+
final_name = f"{final_name} | {label}" if final_name else label
|
|
215
216
|
|
|
216
|
-
|
|
217
|
+
# Extract et
|
|
218
|
+
extracted = await self.extract(iframe_url)
|
|
219
|
+
if not extracted:
|
|
220
|
+
return []
|
|
221
|
+
|
|
222
|
+
results = []
|
|
223
|
+
items = extracted if isinstance(extracted, list) else [extracted]
|
|
224
|
+
for item in items:
|
|
225
|
+
if final_name:
|
|
226
|
+
item.name = final_name
|
|
227
|
+
results.append(item)
|
|
228
|
+
|
|
229
|
+
return results
|
|
230
|
+
|
|
231
|
+
# Selector Güncellemesi: data-player-name içeren tüm a tagleri
|
|
232
|
+
players = secici.select("a[data-player-name]")
|
|
233
|
+
if not players:
|
|
234
|
+
# Fallback legacy selector
|
|
235
|
+
players = secici.select("nav.player a")
|
|
236
|
+
|
|
237
|
+
for player in players:
|
|
217
238
|
tasks.append(fetch_and_extract(player))
|
|
218
239
|
|
|
219
|
-
|
|
220
|
-
|
|
240
|
+
results_groups = await asyncio.gather(*tasks)
|
|
241
|
+
|
|
242
|
+
# Flatten
|
|
243
|
+
final_results = []
|
|
244
|
+
for group in results_groups:
|
|
245
|
+
if group:
|
|
246
|
+
final_results.extend(group)
|
|
247
|
+
|
|
248
|
+
return final_results
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core
|
|
4
|
-
import asyncio
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode, ExtractResult, HTMLHelper
|
|
4
|
+
import asyncio, contextlib
|
|
5
5
|
|
|
6
6
|
class SezonlukDizi(PluginBase):
|
|
7
7
|
name = "SezonlukDizi"
|
|
@@ -40,10 +40,10 @@ class SezonlukDizi(PluginBase):
|
|
|
40
40
|
|
|
41
41
|
async def _get_asp_data(self) -> dict:
|
|
42
42
|
js_req = await self.httpx.get(f"{self.main_url}/js/site.min.js")
|
|
43
|
-
js
|
|
44
|
-
alt
|
|
45
|
-
emb
|
|
46
|
-
|
|
43
|
+
js = HTMLHelper(js_req.text)
|
|
44
|
+
alt = js.regex_first(r"dataAlternatif(.*?)\.asp")
|
|
45
|
+
emb = js.regex_first(r"dataEmbed(.*?)\.asp")
|
|
46
|
+
|
|
47
47
|
return {
|
|
48
48
|
"alternatif": alt or "",
|
|
49
49
|
"embed": emb or ""
|
|
@@ -116,7 +116,12 @@ class SezonlukDizi(PluginBase):
|
|
|
116
116
|
href = e_sel.select_attr("a", "href", tds[3])
|
|
117
117
|
if name and href:
|
|
118
118
|
s, e = e_sel.extract_season_episode(f"{tds[1].text(strip=True)} {tds[2].text(strip=True)}")
|
|
119
|
-
episodes.append(Episode(
|
|
119
|
+
episodes.append(Episode(
|
|
120
|
+
season = s or 1,
|
|
121
|
+
episode = e or 1,
|
|
122
|
+
title = name,
|
|
123
|
+
url = self.fix_url(href)
|
|
124
|
+
))
|
|
120
125
|
|
|
121
126
|
return SeriesInfo(
|
|
122
127
|
url = url,
|
|
@@ -131,10 +136,10 @@ class SezonlukDizi(PluginBase):
|
|
|
131
136
|
)
|
|
132
137
|
|
|
133
138
|
async def load_links(self, url: str) -> list[ExtractResult]:
|
|
134
|
-
istek
|
|
135
|
-
secici
|
|
139
|
+
istek = await self.httpx.get(url)
|
|
140
|
+
secici = HTMLHelper(istek.text)
|
|
136
141
|
asp_data = await self._get_asp_data()
|
|
137
|
-
|
|
142
|
+
|
|
138
143
|
bid = secici.select_attr("div#dilsec", "data-id")
|
|
139
144
|
if not bid:
|
|
140
145
|
return []
|
|
@@ -142,41 +147,64 @@ class SezonlukDizi(PluginBase):
|
|
|
142
147
|
semaphore = asyncio.Semaphore(5)
|
|
143
148
|
tasks = []
|
|
144
149
|
|
|
145
|
-
async def fetch_and_extract(veri, dil_etiketi):
|
|
150
|
+
async def fetch_and_extract(veri, dil_etiketi) -> list[ExtractResult]:
|
|
146
151
|
async with semaphore:
|
|
147
152
|
try:
|
|
148
153
|
embed_resp = await self.httpx.post(
|
|
149
|
-
f"{self.main_url}/ajax/dataEmbed{asp_data['embed']}.asp",
|
|
154
|
+
url = f"{self.main_url}/ajax/dataEmbed{asp_data['embed']}.asp",
|
|
150
155
|
headers = {"X-Requested-With": "XMLHttpRequest"},
|
|
151
156
|
data = {"id": str(veri.get("id"))}
|
|
152
157
|
)
|
|
153
158
|
embed_secici = HTMLHelper(embed_resp.text)
|
|
154
|
-
iframe_src
|
|
155
|
-
|
|
156
|
-
if iframe_src:
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
159
|
+
iframe_src = embed_secici.select_attr("iframe", "src") or embed_secici.regex_first(r'src="(.*?)"')
|
|
160
|
+
|
|
161
|
+
if not iframe_src:
|
|
162
|
+
return []
|
|
163
|
+
|
|
164
|
+
iframe_url = self.fix_url(iframe_src)
|
|
165
|
+
|
|
166
|
+
real_url = iframe_url
|
|
167
|
+
if "url=" in iframe_url:
|
|
168
|
+
real_url = HTMLHelper(iframe_url).regex_first(r"url=([^&]+)")
|
|
169
|
+
if real_url:
|
|
170
|
+
real_url = self.fix_url(real_url)
|
|
171
|
+
|
|
172
|
+
source_name = veri.get('baslik') or "SezonlukDizi"
|
|
173
|
+
full_name = f"{dil_etiketi} - {source_name}"
|
|
174
|
+
|
|
175
|
+
extracted = await self.extract(real_url, referer=f"{self.main_url}/")
|
|
176
|
+
|
|
177
|
+
if not extracted:
|
|
178
|
+
return []
|
|
179
|
+
|
|
180
|
+
results = []
|
|
181
|
+
items = extracted if isinstance(extracted, list) else [extracted]
|
|
182
|
+
for item in items:
|
|
183
|
+
item.name = full_name
|
|
184
|
+
results.append(item)
|
|
185
|
+
return results
|
|
186
|
+
|
|
187
|
+
except Exception:
|
|
188
|
+
return []
|
|
165
189
|
|
|
166
190
|
for dil_kodu, dil_etiketi in [("1", "Altyazı"), ("0", "Dublaj")]:
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
191
|
+
with contextlib.suppress(Exception):
|
|
192
|
+
altyazi_resp = await self.httpx.post(
|
|
193
|
+
url = f"{self.main_url}/ajax/dataAlternatif{asp_data['alternatif']}.asp",
|
|
194
|
+
headers = {"X-Requested-With": "XMLHttpRequest"},
|
|
195
|
+
data = {"bid": bid, "dil": dil_kodu}
|
|
196
|
+
)
|
|
197
|
+
|
|
174
198
|
data_json = altyazi_resp.json()
|
|
175
199
|
if data_json.get("status") == "success" and data_json.get("data"):
|
|
176
200
|
for veri in data_json["data"]:
|
|
177
201
|
tasks.append(fetch_and_extract(veri, dil_etiketi))
|
|
178
|
-
except:
|
|
179
|
-
continue
|
|
180
202
|
|
|
181
|
-
|
|
182
|
-
|
|
203
|
+
results_groups = await asyncio.gather(*tasks)
|
|
204
|
+
|
|
205
|
+
final_results = []
|
|
206
|
+
for group in results_groups:
|
|
207
|
+
if group:
|
|
208
|
+
final_results.extend(group)
|
|
209
|
+
|
|
210
|
+
return final_results
|
KekikStream/Plugins/Sinefy.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core
|
|
4
|
-
import json,
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode, MovieInfo, ExtractResult, HTMLHelper
|
|
4
|
+
import json, contextlib, asyncio
|
|
5
5
|
|
|
6
6
|
class Sinefy(PluginBase):
|
|
7
7
|
name = "Sinefy"
|
|
@@ -42,14 +42,14 @@ class Sinefy(PluginBase):
|
|
|
42
42
|
else:
|
|
43
43
|
full_url = f"{url}&page={page}"
|
|
44
44
|
|
|
45
|
-
|
|
46
|
-
|
|
45
|
+
istek = await self.httpx.get(full_url)
|
|
46
|
+
secici = HTMLHelper(istek.text)
|
|
47
47
|
|
|
48
48
|
results = []
|
|
49
|
-
for item in
|
|
50
|
-
title =
|
|
51
|
-
href =
|
|
52
|
-
poster =
|
|
49
|
+
for item in secici.select("div.poster-with-subject, div.dark-segment div.poster-md.poster"):
|
|
50
|
+
title = secici.select_text("h2", item)
|
|
51
|
+
href = secici.select_attr("a", "href", item)
|
|
52
|
+
poster = secici.select_attr("img", "data-srcset", item)
|
|
53
53
|
|
|
54
54
|
if poster:
|
|
55
55
|
poster = poster.split(",")[0].split(" ")[0]
|
|
@@ -69,89 +69,84 @@ class Sinefy(PluginBase):
|
|
|
69
69
|
c_key = "ca1d4a53d0f4761a949b85e51e18f096"
|
|
70
70
|
c_value = "MTc0NzI2OTAwMDU3ZTEwYmZjMDViNWFmOWIwZDViODg0MjU4MjA1ZmYxOThmZTYwMDdjMWQzMzliNzY5NzFlZmViMzRhMGVmNjgwODU3MGIyZA=="
|
|
71
71
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
72
|
+
with contextlib.suppress(Exception):
|
|
73
|
+
istek = await self.httpx.get(self.main_url)
|
|
74
|
+
secici = HTMLHelper(istek.text)
|
|
75
75
|
|
|
76
|
-
cke
|
|
77
|
-
cval =
|
|
76
|
+
cke = secici.select_attr("input[name='cKey']", "value")
|
|
77
|
+
cval = secici.select_attr("input[name='cValue']", "value")
|
|
78
78
|
|
|
79
79
|
if cke and cval:
|
|
80
80
|
c_key = cke
|
|
81
81
|
c_value = cval
|
|
82
82
|
|
|
83
|
-
|
|
84
|
-
|
|
83
|
+
response = await self.httpx.post(
|
|
84
|
+
url = f"{self.main_url}/bg/searchcontent",
|
|
85
|
+
headers = {
|
|
86
|
+
"User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:134.0) Gecko/20100101 Firefox/134.0",
|
|
87
|
+
"Accept" : "application/json, text/javascript, */*; q=0.01",
|
|
88
|
+
"X-Requested-With" : "XMLHttpRequest",
|
|
89
|
+
"Content-Type" : "application/x-www-form-urlencoded; charset=UTF-8"
|
|
90
|
+
},
|
|
91
|
+
data = {
|
|
92
|
+
"cKey" : c_key,
|
|
93
|
+
"cValue" : c_value,
|
|
94
|
+
"searchTerm" : query
|
|
95
|
+
}
|
|
96
|
+
)
|
|
85
97
|
|
|
86
|
-
|
|
87
|
-
data = {
|
|
88
|
-
"cKey" : c_key,
|
|
89
|
-
"cValue" : c_value,
|
|
90
|
-
"searchTerm" : query
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
headers = {
|
|
94
|
-
"User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:134.0) Gecko/20100101 Firefox/134.0",
|
|
95
|
-
"Accept" : "application/json, text/javascript, */*; q=0.01",
|
|
96
|
-
"X-Requested-With" : "XMLHttpRequest",
|
|
97
|
-
"Content-Type" : "application/x-www-form-urlencoded; charset=UTF-8"
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
response = await self.httpx.post(post_url, data=data, headers=headers)
|
|
101
|
-
|
|
102
|
-
try:
|
|
98
|
+
with contextlib.suppress(Exception):
|
|
103
99
|
# Extract JSON data from response (might contain garbage chars at start)
|
|
104
100
|
raw = response.text
|
|
105
101
|
json_start = raw.find('{')
|
|
106
102
|
if json_start != -1:
|
|
107
103
|
clean_json = raw[json_start:]
|
|
108
|
-
data
|
|
109
|
-
|
|
104
|
+
data = json.loads(clean_json)
|
|
105
|
+
|
|
110
106
|
results = []
|
|
111
107
|
# Result array is in data['data']['result']
|
|
112
108
|
res_array = data.get("data", {}).get("result", [])
|
|
113
|
-
|
|
109
|
+
|
|
114
110
|
if not res_array:
|
|
115
111
|
# Fallback manual parsing ?
|
|
116
112
|
pass
|
|
117
113
|
|
|
118
114
|
for item in res_array:
|
|
119
|
-
name
|
|
120
|
-
slug
|
|
115
|
+
name = item.get("object_name")
|
|
116
|
+
slug = item.get("used_slug")
|
|
121
117
|
poster = item.get("object_poster_url")
|
|
122
|
-
|
|
118
|
+
|
|
123
119
|
if name and slug:
|
|
124
120
|
if "cdn.ampproject.org" in poster:
|
|
125
121
|
poster = "https://images.macellan.online/images/movie/poster/180/275/80/" + poster.split("/")[-1]
|
|
126
|
-
|
|
122
|
+
|
|
127
123
|
results.append(SearchResult(
|
|
128
|
-
title=name,
|
|
129
|
-
url=self.fix_url(slug),
|
|
130
|
-
poster=self.fix_url(poster)
|
|
124
|
+
title = name,
|
|
125
|
+
url = self.fix_url(slug),
|
|
126
|
+
poster = self.fix_url(poster)
|
|
131
127
|
))
|
|
132
128
|
return results
|
|
133
129
|
|
|
134
|
-
except Exception:
|
|
135
|
-
pass
|
|
136
130
|
return []
|
|
137
131
|
|
|
138
132
|
async def load_item(self, url: str) -> SeriesInfo | MovieInfo:
|
|
139
|
-
|
|
140
|
-
|
|
133
|
+
istek = await self.httpx.get(url)
|
|
134
|
+
secici = HTMLHelper(istek.text)
|
|
141
135
|
|
|
142
|
-
title =
|
|
143
|
-
poster_attr =
|
|
136
|
+
title = secici.select_direct_text("h1")
|
|
137
|
+
poster_attr = secici.select_attr("img.series-profile-thumb", "data-srcset") or secici.select_attr("img.series-profile-thumb", "srcset")
|
|
144
138
|
if poster_attr:
|
|
145
139
|
# "url 1x, url 2x" -> en sondakini (en yüksek kalite) al
|
|
146
140
|
poster = poster_attr.split(",")[-1].strip().split(" ")[0]
|
|
147
141
|
else:
|
|
148
|
-
poster =
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
142
|
+
poster = secici.select_poster("img.series-profile-thumb")
|
|
143
|
+
|
|
144
|
+
description = secici.select_text("p#tv-series-desc")
|
|
145
|
+
tags = secici.select_texts("div.item.categories a")
|
|
146
|
+
rating = secici.select_text("span.color-imdb")
|
|
147
|
+
actors = secici.select_texts("div.content h5")
|
|
148
|
+
year = secici.extract_year("div.truncate")
|
|
149
|
+
duration = secici.regex_first(r"(\d+)", secici.select_text(".media-meta td:last-child"))
|
|
155
150
|
if duration == year or int(duration) < 40:
|
|
156
151
|
duration = None
|
|
157
152
|
|
|
@@ -168,40 +163,136 @@ class Sinefy(PluginBase):
|
|
|
168
163
|
}
|
|
169
164
|
|
|
170
165
|
episodes = []
|
|
171
|
-
for tab in
|
|
172
|
-
for link in
|
|
166
|
+
for tab in secici.select("div.ui.tab"):
|
|
167
|
+
for link in secici.select("a[href*='bolum']", tab):
|
|
173
168
|
href = link.attrs.get("href")
|
|
174
169
|
if href:
|
|
175
|
-
s, e =
|
|
176
|
-
name =
|
|
170
|
+
s, e = secici.extract_season_episode(href)
|
|
171
|
+
name = secici.select_text("div.content div.header", link) or link.text(strip=True)
|
|
177
172
|
episodes.append(Episode(season=s or 1, episode=e or 1, title=name, url=self.fix_url(href)))
|
|
178
|
-
|
|
173
|
+
|
|
179
174
|
if episodes:
|
|
180
175
|
return SeriesInfo(**common_info, episodes=episodes)
|
|
181
176
|
|
|
182
177
|
return MovieInfo(**common_info)
|
|
183
178
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
179
|
+
def _find_iframe(self, secici: HTMLHelper) -> str | None:
|
|
180
|
+
"""Sayfa kaynağındaki video iframe adresini bulur."""
|
|
181
|
+
src = secici.select_attr("iframe", "src") or \
|
|
182
|
+
secici.select_attr("iframe", "data-src") or \
|
|
183
|
+
secici.regex_first(r'<iframe[^>]+src="([^"]+)"')
|
|
184
|
+
return self.fix_url(src) if src else None
|
|
189
185
|
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
#
|
|
186
|
+
async def _process_source(self, source: dict, subtitles: list) -> list[ExtractResult]:
|
|
187
|
+
"""Tekil bir kaynağı işleyip sonucu döndürür."""
|
|
188
|
+
target_url = source["url"]
|
|
189
|
+
name = source["name"]
|
|
190
|
+
|
|
191
|
+
# Eğer direkt iframe değilse (Sayfa linki ise), önce iframe'i bul
|
|
192
|
+
if not source.get("is_main"):
|
|
193
|
+
try:
|
|
194
|
+
resp = await self.httpx.get(target_url)
|
|
195
|
+
temp_sel = HTMLHelper(resp.text)
|
|
196
|
+
|
|
197
|
+
if not (iframe_url := self._find_iframe(temp_sel)):
|
|
198
|
+
return []
|
|
199
|
+
|
|
200
|
+
target_url = iframe_url
|
|
201
|
+
|
|
202
|
+
# Tab (Dil Seçeneği) ise, gittiğimiz sayfadaki aktif player ismini ekle
|
|
203
|
+
if source.get("is_tab"):
|
|
204
|
+
p_name = temp_sel.select_text("div.alternatives-for-this div.playeritems.active") or "PUB"
|
|
205
|
+
name = f"{name} | {p_name}"
|
|
206
|
+
except Exception:
|
|
207
|
+
return []
|
|
208
|
+
|
|
209
|
+
# Linki Extract Et
|
|
196
210
|
try:
|
|
197
|
-
|
|
198
|
-
if
|
|
199
|
-
return [
|
|
211
|
+
extracted = await self.extract(target_url, referer=self.main_url)
|
|
212
|
+
if not extracted:
|
|
213
|
+
return []
|
|
214
|
+
|
|
215
|
+
items = extracted if isinstance(extracted, list) else [extracted]
|
|
216
|
+
|
|
217
|
+
# Sonuçları işle (İsim ver, altyazı ekle)
|
|
218
|
+
copy_subtitles = list(subtitles) # Her item için kopyasını kullan
|
|
219
|
+
for item in items:
|
|
220
|
+
item.name = name
|
|
221
|
+
if copy_subtitles:
|
|
222
|
+
if not item.subtitles:
|
|
223
|
+
item.subtitles = copy_subtitles
|
|
224
|
+
else:
|
|
225
|
+
item.subtitles.extend(copy_subtitles)
|
|
226
|
+
|
|
227
|
+
return items
|
|
200
228
|
except Exception:
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
229
|
+
return []
|
|
230
|
+
|
|
231
|
+
async def load_links(self, url: str) -> list[ExtractResult]:
|
|
232
|
+
istek = await self.httpx.get(url)
|
|
233
|
+
secici = HTMLHelper(istek.text)
|
|
234
|
+
|
|
235
|
+
# 1. Altyazıları Topla
|
|
236
|
+
subtitles = []
|
|
237
|
+
for track in secici.select("track"):
|
|
238
|
+
if track.attrs.get("kind") in ("subtitles", "captions"):
|
|
239
|
+
if src := track.attrs.get("src"):
|
|
240
|
+
lang = track.attrs.get("label") or track.attrs.get("srclang") or "Altyazı"
|
|
241
|
+
subtitles.append(self.new_subtitle(src, lang))
|
|
242
|
+
|
|
243
|
+
sources = []
|
|
244
|
+
|
|
245
|
+
# Aktif Sayfa Bilgileri
|
|
246
|
+
active_tab_name = secici.select_text("div#series-tabs a.active") or "Sinefy"
|
|
247
|
+
active_player = secici.select_text("div.alternatives-for-this div.playeritems.active") or "PUB"
|
|
248
|
+
|
|
249
|
+
# A) Ana Video (Main Iframe)
|
|
250
|
+
if main_iframe := self._find_iframe(secici):
|
|
251
|
+
sources.append({
|
|
252
|
+
"url" : main_iframe,
|
|
253
|
+
"name" : f"{active_tab_name} | {active_player}",
|
|
254
|
+
"is_main" : True,
|
|
255
|
+
"is_tab" : False
|
|
256
|
+
})
|
|
257
|
+
|
|
258
|
+
# B) Alternatif Playerlar (Mevcut Sayfa Player Butonları)
|
|
259
|
+
for btn in secici.select("div.alternatives-for-this div.playeritems:not(.active) a"):
|
|
260
|
+
if href := btn.attrs.get("href"):
|
|
261
|
+
if "javascript" not in href:
|
|
262
|
+
sources.append({
|
|
263
|
+
"url" : self.fix_url(href),
|
|
264
|
+
"name" : f"{active_tab_name} | {btn.text(strip=True)}",
|
|
265
|
+
"is_main" : False,
|
|
266
|
+
"is_tab" : False
|
|
267
|
+
})
|
|
268
|
+
|
|
269
|
+
# C) Diğer Dil Seçenekleri (Tabs - Sekmeler)
|
|
270
|
+
for tab in secici.select("div#series-tabs a:not(.active)"):
|
|
271
|
+
if href := tab.attrs.get("href"):
|
|
272
|
+
sources.append({
|
|
273
|
+
"url" : self.fix_url(href),
|
|
274
|
+
"name" : tab.text(strip=True),
|
|
275
|
+
"is_main" : False,
|
|
276
|
+
"is_tab" : True
|
|
277
|
+
})
|
|
278
|
+
|
|
279
|
+
# 2. Kaynakları Paralel İşle
|
|
280
|
+
tasks = [self._process_source(src, subtitles) for src in sources]
|
|
281
|
+
results_groups = await asyncio.gather(*tasks)
|
|
282
|
+
|
|
283
|
+
# 3. Sonuçları Birleştir
|
|
284
|
+
final_results = []
|
|
285
|
+
for group in results_groups:
|
|
286
|
+
if group:
|
|
287
|
+
final_results.extend(group)
|
|
288
|
+
|
|
289
|
+
# 4. Duplicate Temizle (URL + İsim Kombinasyonu)
|
|
290
|
+
unique_results = []
|
|
291
|
+
seen = set()
|
|
292
|
+
for res in final_results:
|
|
293
|
+
key = (res.url, res.name)
|
|
294
|
+
if res.url and key not in seen:
|
|
295
|
+
unique_results.append(res)
|
|
296
|
+
seen.add(key)
|
|
297
|
+
|
|
298
|
+
return unique_results
|