KekikStream 1.4.4__py3-none-any.whl → 2.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- KekikStream/CLI/pypi_kontrol.py +6 -6
- KekikStream/Core/Extractor/ExtractorBase.py +13 -12
- KekikStream/Core/Extractor/ExtractorLoader.py +25 -17
- KekikStream/Core/Extractor/ExtractorManager.py +53 -9
- KekikStream/Core/Extractor/ExtractorModels.py +5 -7
- KekikStream/Core/Extractor/YTDLPCache.py +35 -0
- KekikStream/Core/Media/MediaHandler.py +52 -31
- KekikStream/Core/Media/MediaManager.py +0 -3
- KekikStream/Core/Plugin/PluginBase.py +47 -21
- KekikStream/Core/Plugin/PluginLoader.py +11 -7
- KekikStream/Core/Plugin/PluginModels.py +25 -25
- KekikStream/Core/__init__.py +1 -0
- KekikStream/Extractors/CloseLoad.py +6 -26
- KekikStream/Extractors/ContentX_.py +40 -0
- KekikStream/Extractors/DzenRu.py +38 -0
- KekikStream/Extractors/ExPlay.py +53 -0
- KekikStream/Extractors/FirePlayer.py +60 -0
- KekikStream/Extractors/HDPlayerSystem.py +41 -0
- KekikStream/Extractors/JetTv.py +45 -0
- KekikStream/Extractors/MailRu.py +2 -4
- KekikStream/Extractors/MixTiger.py +57 -0
- KekikStream/Extractors/MolyStream.py +25 -7
- KekikStream/Extractors/Odnoklassniki.py +16 -11
- KekikStream/Extractors/{OkRuHTTP.py → Odnoklassniki_.py} +5 -1
- KekikStream/Extractors/{HDStreamAble.py → PeaceMakerst_.py} +1 -1
- KekikStream/Extractors/PixelDrain.py +0 -1
- KekikStream/Extractors/PlayerFilmIzle.py +62 -0
- KekikStream/Extractors/RapidVid.py +30 -13
- KekikStream/Extractors/RapidVid_.py +7 -0
- KekikStream/Extractors/SetPlay.py +57 -0
- KekikStream/Extractors/SetPrime.py +45 -0
- KekikStream/Extractors/SibNet.py +0 -1
- KekikStream/Extractors/TurkeyPlayer.py +34 -0
- KekikStream/Extractors/VidHide.py +72 -0
- KekikStream/Extractors/VidMoly.py +20 -19
- KekikStream/Extractors/{VidMolyMe.py → VidMoly_.py} +1 -1
- KekikStream/Extractors/VidMoxy.py +0 -1
- KekikStream/Extractors/VidPapi.py +89 -0
- KekikStream/Extractors/YTDLP.py +177 -0
- KekikStream/Extractors/YildizKisaFilm.py +41 -0
- KekikStream/Plugins/DiziBox.py +28 -16
- KekikStream/Plugins/DiziPal.py +246 -0
- KekikStream/Plugins/DiziYou.py +58 -31
- KekikStream/Plugins/Dizilla.py +97 -68
- KekikStream/Plugins/FilmBip.py +145 -0
- KekikStream/Plugins/FilmMakinesi.py +61 -52
- KekikStream/Plugins/FilmModu.py +138 -0
- KekikStream/Plugins/FullHDFilm.py +164 -0
- KekikStream/Plugins/FullHDFilmizlesene.py +38 -37
- KekikStream/Plugins/HDFilmCehennemi.py +44 -54
- KekikStream/Plugins/JetFilmizle.py +68 -42
- KekikStream/Plugins/KultFilmler.py +219 -0
- KekikStream/Plugins/RecTV.py +41 -37
- KekikStream/Plugins/RoketDizi.py +232 -0
- KekikStream/Plugins/SelcukFlix.py +309 -0
- KekikStream/Plugins/SezonlukDizi.py +16 -14
- KekikStream/Plugins/SineWix.py +39 -30
- KekikStream/Plugins/Sinefy.py +238 -0
- KekikStream/Plugins/SinemaCX.py +157 -0
- KekikStream/Plugins/Sinezy.py +146 -0
- KekikStream/Plugins/SuperFilmGeldi.py +121 -0
- KekikStream/Plugins/UgurFilm.py +10 -10
- KekikStream/__init__.py +296 -319
- KekikStream/requirements.txt +3 -4
- kekikstream-2.0.2.dist-info/METADATA +309 -0
- kekikstream-2.0.2.dist-info/RECORD +82 -0
- {kekikstream-1.4.4.dist-info → kekikstream-2.0.2.dist-info}/WHEEL +1 -1
- KekikStream/Extractors/FourCX.py +0 -7
- KekikStream/Extractors/FourPichive.py +0 -7
- KekikStream/Extractors/FourPlayRu.py +0 -7
- KekikStream/Extractors/Hotlinger.py +0 -7
- KekikStream/Extractors/OkRuSSL.py +0 -7
- KekikStream/Extractors/Pichive.py +0 -7
- KekikStream/Extractors/PlayRu.py +0 -7
- KekikStream/Helpers/Unpack.py +0 -75
- KekikStream/Plugins/Shorten.py +0 -225
- kekikstream-1.4.4.dist-info/METADATA +0 -108
- kekikstream-1.4.4.dist-info/RECORD +0 -63
- {kekikstream-1.4.4.dist-info → kekikstream-2.0.2.dist-info}/entry_points.txt +0 -0
- {kekikstream-1.4.4.dist-info → kekikstream-2.0.2.dist-info/licenses}/LICENSE +0 -0
- {kekikstream-1.4.4.dist-info → kekikstream-2.0.2.dist-info}/top_level.txt +0 -0
KekikStream/Plugins/RecTV.py
CHANGED
|
@@ -1,20 +1,17 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core import
|
|
4
|
-
from httpx import AsyncClient
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, MovieInfo, Episode, SeriesInfo, ExtractResult, Subtitle
|
|
5
4
|
from json import dumps, loads
|
|
6
5
|
import re
|
|
7
6
|
|
|
8
7
|
class RecTV(PluginBase):
|
|
9
8
|
name = "RecTV"
|
|
10
9
|
language = "tr"
|
|
11
|
-
main_url = "https://m.
|
|
10
|
+
main_url = "https://m.prectv60.lol"
|
|
12
11
|
favicon = "https://rectvapk.cc/wp-content/uploads/2023/02/Rec-TV.webp"
|
|
13
12
|
description = "RecTv APK, Türkiye’deki en popüler Çevrimiçi Medya Akış platformlarından biridir. Filmlerin, Canlı Sporların, Web Dizilerinin ve çok daha fazlasının keyfini ücretsiz çıkarın."
|
|
14
13
|
|
|
15
|
-
sw_key
|
|
16
|
-
http2 = AsyncClient(http2=True)
|
|
17
|
-
http2.headers.update({"user-agent": "okhttp/4.12.0"})
|
|
14
|
+
sw_key = "4F5A9C3D9A86FA54EACEDDD635185/c3c5bd17-e37b-4b94-a944-8a3688a30452"
|
|
18
15
|
|
|
19
16
|
main_page = {
|
|
20
17
|
f"{main_url}/api/channel/by/filtres/0/0/SAYFA/{sw_key}/" : "Canlı",
|
|
@@ -33,8 +30,8 @@ class RecTV(PluginBase):
|
|
|
33
30
|
f"{main_url}/api/movie/by/filtres/5/created/SAYFA/{sw_key}/" : "Romantik"
|
|
34
31
|
}
|
|
35
32
|
|
|
36
|
-
@kekik_cache(ttl=60*60)
|
|
37
33
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
34
|
+
self.httpx.headers.update({"user-agent": "okhttp/4.12.0"})
|
|
38
35
|
istek = await self.httpx.get(f"{url.replace('SAYFA', str(int(page) - 1))}")
|
|
39
36
|
veriler = istek.json()
|
|
40
37
|
|
|
@@ -48,11 +45,9 @@ class RecTV(PluginBase):
|
|
|
48
45
|
for veri in veriler
|
|
49
46
|
]
|
|
50
47
|
|
|
51
|
-
@kekik_cache(ttl=60*60)
|
|
52
48
|
async def search(self, query: str) -> list[SearchResult]:
|
|
53
|
-
self.
|
|
54
|
-
|
|
55
|
-
istek = await self.http2.get(f"{self.main_url}/api/search/{query}/{self.sw_key}/")
|
|
49
|
+
self.httpx.headers.update({"user-agent": "okhttp/4.12.0"})
|
|
50
|
+
istek = await self.httpx.get(f"{self.main_url}/api/search/{query}/{self.sw_key}/")
|
|
56
51
|
|
|
57
52
|
kanallar = istek.json().get("channels")
|
|
58
53
|
icerikler = istek.json().get("posters")
|
|
@@ -70,34 +65,34 @@ class RecTV(PluginBase):
|
|
|
70
65
|
for veri in tum_veri
|
|
71
66
|
]
|
|
72
67
|
|
|
73
|
-
@kekik_cache(ttl=60*60)
|
|
74
68
|
async def load_item(self, url: str) -> MovieInfo:
|
|
69
|
+
self.httpx.headers.update({"user-agent": "okhttp/4.12.0"})
|
|
75
70
|
veri = loads(url)
|
|
76
71
|
|
|
77
72
|
match veri.get("type"):
|
|
78
73
|
case "serie":
|
|
79
|
-
dizi_istek = await self.
|
|
74
|
+
dizi_istek = await self.httpx.get(f"{self.main_url}/api/season/by/serie/{veri.get('id')}/{self.sw_key}/")
|
|
80
75
|
dizi_veri = dizi_istek.json()
|
|
81
76
|
|
|
82
77
|
episodes = []
|
|
83
78
|
for season in dizi_veri:
|
|
84
79
|
for episode in season.get("episodes"):
|
|
80
|
+
# Bölüm için gerekli bilgileri JSON olarak sakla
|
|
81
|
+
ep_data = {
|
|
82
|
+
"url" : self.fix_url(episode.get("sources")[0].get("url")),
|
|
83
|
+
"title" : f"{veri.get('title')} | {season.get('title', '1. Sezon')} {episode.get('title', '1. Bölüm')}",
|
|
84
|
+
"is_episode" : True
|
|
85
|
+
}
|
|
86
|
+
|
|
85
87
|
ep_model = Episode(
|
|
86
88
|
season = int(re.search(r"(\d+)\.S", season.get("title")).group(1)) if re.search(r"(\d+)\.S", season.get("title")) else 1,
|
|
87
89
|
episode = int(re.search(r"Bölüm (\d+)", episode.get("title")).group(1)) if re.search(r"Bölüm (\d+)", episode.get("title")) else 1,
|
|
88
90
|
title = episode.get("title"),
|
|
89
|
-
url =
|
|
91
|
+
url = dumps(ep_data),
|
|
90
92
|
)
|
|
91
93
|
|
|
92
94
|
episodes.append(ep_model)
|
|
93
95
|
|
|
94
|
-
self._data[ep_model.url] = {
|
|
95
|
-
"ext_name" : self.name,
|
|
96
|
-
"name" : f"{veri.get('title')} | {ep_model.season}. Sezon {ep_model.episode}. Bölüm",
|
|
97
|
-
"referer" : "https://twitter.com/",
|
|
98
|
-
"subtitles" : []
|
|
99
|
-
}
|
|
100
|
-
|
|
101
96
|
return SeriesInfo(
|
|
102
97
|
url = url,
|
|
103
98
|
poster = self.fix_url(veri.get("image")),
|
|
@@ -121,33 +116,42 @@ class RecTV(PluginBase):
|
|
|
121
116
|
actors = []
|
|
122
117
|
)
|
|
123
118
|
|
|
124
|
-
|
|
125
|
-
async def load_links(self, url: str) -> list[str]:
|
|
119
|
+
async def load_links(self, url: str) -> list[dict]:
|
|
126
120
|
try:
|
|
127
121
|
veri = loads(url)
|
|
128
122
|
except Exception:
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
123
|
+
# JSON değilse düz URL'dir (eski yapı veya hata)
|
|
124
|
+
return [{"url": url, "name": "Video"}]
|
|
125
|
+
|
|
126
|
+
# Eğer dizi bölümü ise (bizim oluşturduğumuz yapı)
|
|
127
|
+
if veri.get("is_episode"):
|
|
128
|
+
return [{
|
|
129
|
+
"url" : veri.get("url"),
|
|
130
|
+
"name" : veri.get("title", "Bölüm"),
|
|
131
|
+
"user_agent" : "googleusercontent",
|
|
132
|
+
"referer" : "https://twitter.com/"
|
|
133
|
+
}]
|
|
134
|
+
|
|
135
|
+
# Film ise (RecTV API yapısı)
|
|
136
|
+
results = []
|
|
132
137
|
if veri.get("sources"):
|
|
133
138
|
for kaynak in veri.get("sources"):
|
|
134
139
|
video_link = kaynak.get("url")
|
|
135
140
|
if "otolinkaff" in video_link:
|
|
136
141
|
continue
|
|
137
142
|
|
|
138
|
-
|
|
139
|
-
"
|
|
140
|
-
"name"
|
|
141
|
-
"
|
|
142
|
-
"
|
|
143
|
-
}
|
|
144
|
-
videolar.append(video_link)
|
|
143
|
+
results.append({
|
|
144
|
+
"url" : video_link,
|
|
145
|
+
"name" : f"{veri.get('title')} - {kaynak.get('title')}",
|
|
146
|
+
"user_agent" : "googleusercontent",
|
|
147
|
+
"referer" : "https://twitter.com/"
|
|
148
|
+
})
|
|
145
149
|
|
|
146
|
-
return
|
|
150
|
+
return results
|
|
147
151
|
|
|
148
|
-
async def play(self,
|
|
149
|
-
extract_result = ExtractResult(
|
|
150
|
-
self.media_handler.title = name
|
|
152
|
+
async def play(self, **kwargs):
|
|
153
|
+
extract_result = ExtractResult(**kwargs)
|
|
154
|
+
self.media_handler.title = kwargs.get("name")
|
|
151
155
|
if self.name not in self.media_handler.title:
|
|
152
156
|
self.media_handler.title = f"{self.name} | {self.media_handler.title}"
|
|
153
157
|
|
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
|
+
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode, MovieInfo
|
|
4
|
+
from parsel import Selector
|
|
5
|
+
import re, base64, json
|
|
6
|
+
|
|
7
|
+
class RoketDizi(PluginBase):
|
|
8
|
+
name = "RoketDizi"
|
|
9
|
+
lang = "tr"
|
|
10
|
+
main_url = "https://roketdizi.to"
|
|
11
|
+
favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
|
|
12
|
+
description = "Türkiye'nin en tatlış yabancı dizi izleme sitesi. Türkçe dublaj, altyazılı, eski ve yeni yabancı dizilerin yanı sıra kore (asya) dizileri izleyebilirsiniz."
|
|
13
|
+
|
|
14
|
+
main_page = {
|
|
15
|
+
f"{main_url}/dizi/tur/aksiyon" : "Aksiyon",
|
|
16
|
+
f"{main_url}/dizi/tur/bilim-kurgu" : "Bilim Kurgu",
|
|
17
|
+
f"{main_url}/dizi/tur/gerilim" : "Gerilim",
|
|
18
|
+
f"{main_url}/dizi/tur/fantastik" : "Fantastik",
|
|
19
|
+
f"{main_url}/dizi/tur/komedi" : "Komedi",
|
|
20
|
+
f"{main_url}/dizi/tur/korku" : "Korku",
|
|
21
|
+
f"{main_url}/dizi/tur/macera" : "Macera",
|
|
22
|
+
f"{main_url}/dizi/tur/suc" : "Suç"
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
26
|
+
istek = await self.httpx.get(f"{url}?&page={page}")
|
|
27
|
+
secici = Selector(istek.text)
|
|
28
|
+
|
|
29
|
+
results = []
|
|
30
|
+
|
|
31
|
+
for item in secici.css("div.w-full.p-4 span.bg-\\[\\#232323\\]"):
|
|
32
|
+
title = item.css("span.font-normal.line-clamp-1::text").get()
|
|
33
|
+
href = item.css("a::attr(href)").get()
|
|
34
|
+
poster = item.css("img::attr(src)").get()
|
|
35
|
+
|
|
36
|
+
if title and href:
|
|
37
|
+
results.append(MainPageResult(
|
|
38
|
+
category = category,
|
|
39
|
+
title = self.clean_title(title),
|
|
40
|
+
url = self.fix_url(href),
|
|
41
|
+
poster = self.fix_url(poster)
|
|
42
|
+
))
|
|
43
|
+
|
|
44
|
+
return results
|
|
45
|
+
|
|
46
|
+
async def search(self, query: str) -> list[SearchResult]:
|
|
47
|
+
istek = await self.httpx.post(
|
|
48
|
+
url = f"{self.main_url}/api/bg/searchContent?searchterm={query}",
|
|
49
|
+
headers = {
|
|
50
|
+
"Accept" : "application/json, text/javascript, */*; q=0.01",
|
|
51
|
+
"X-Requested-With" : "XMLHttpRequest",
|
|
52
|
+
"Referer" : f"{self.main_url}/",
|
|
53
|
+
}
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
veri = istek.json()
|
|
58
|
+
encoded = veri.get("response", "")
|
|
59
|
+
if not encoded:
|
|
60
|
+
return []
|
|
61
|
+
|
|
62
|
+
decoded = base64.b64decode(encoded).decode("utf-8")
|
|
63
|
+
veri = json.loads(decoded)
|
|
64
|
+
|
|
65
|
+
if not veri.get("state"):
|
|
66
|
+
return []
|
|
67
|
+
|
|
68
|
+
results = []
|
|
69
|
+
|
|
70
|
+
for item in veri.get("result", []):
|
|
71
|
+
title = item.get("object_name", "")
|
|
72
|
+
slug = item.get("used_slug", "")
|
|
73
|
+
poster = item.get("object_poster_url", "")
|
|
74
|
+
|
|
75
|
+
if title and slug:
|
|
76
|
+
results.append(SearchResult(
|
|
77
|
+
title = self.clean_title(title.strip()),
|
|
78
|
+
url = self.fix_url(f"{self.main_url}/{slug}"),
|
|
79
|
+
poster = self.fix_url(poster) if poster else None
|
|
80
|
+
))
|
|
81
|
+
|
|
82
|
+
return results
|
|
83
|
+
|
|
84
|
+
except Exception:
|
|
85
|
+
return []
|
|
86
|
+
|
|
87
|
+
async def load_item(self, url: str) -> SeriesInfo:
|
|
88
|
+
# Note: Handling both Movie and Series logic in one, returning SeriesInfo generally or MovieInfo
|
|
89
|
+
resp = await self.httpx.get(url)
|
|
90
|
+
sel = Selector(resp.text)
|
|
91
|
+
|
|
92
|
+
title = sel.css("h1.text-white::text").get()
|
|
93
|
+
poster = sel.css("div.w-full.page-top img::attr(src)").get()
|
|
94
|
+
description = sel.css("div.mt-2.text-sm::text").get()
|
|
95
|
+
|
|
96
|
+
# Tags - genre bilgileri (Detaylar bölümünde)
|
|
97
|
+
tags = []
|
|
98
|
+
genre_text = sel.css("h3.text-white.opacity-90::text").get()
|
|
99
|
+
if genre_text:
|
|
100
|
+
tags = [t.strip() for t in genre_text.split(",")]
|
|
101
|
+
|
|
102
|
+
# Rating
|
|
103
|
+
rating = sel.css("span.text-white.text-sm.font-bold::text").get()
|
|
104
|
+
|
|
105
|
+
# Year ve Actors - Detaylar (Details) bölümünden
|
|
106
|
+
year = None
|
|
107
|
+
actors = []
|
|
108
|
+
|
|
109
|
+
# Detaylar bölümündeki tüm flex-col div'leri al
|
|
110
|
+
detail_items = sel.css("div.flex.flex-col")
|
|
111
|
+
for item in detail_items:
|
|
112
|
+
# Label ve value yapısı: span.text-base ve span.text-sm.opacity-90
|
|
113
|
+
label = item.css("span.text-base::text").get()
|
|
114
|
+
value = item.css("span.text-sm.opacity-90::text").get()
|
|
115
|
+
|
|
116
|
+
if label and value:
|
|
117
|
+
label = label.strip()
|
|
118
|
+
value = value.strip()
|
|
119
|
+
|
|
120
|
+
# Yayın tarihi (yıl)
|
|
121
|
+
if label == "Yayın tarihi":
|
|
122
|
+
# "16 Ekim 2018" formatından yılı çıkar
|
|
123
|
+
year_match = re.search(r'\d{4}', value)
|
|
124
|
+
if year_match:
|
|
125
|
+
year = year_match.group()
|
|
126
|
+
|
|
127
|
+
# Yaratıcılar veya Oyuncular
|
|
128
|
+
elif label in ["Yaratıcılar", "Oyuncular"]:
|
|
129
|
+
if value:
|
|
130
|
+
actors.append(value)
|
|
131
|
+
|
|
132
|
+
# Check urls for episodes
|
|
133
|
+
all_urls = re.findall(r'"url":"([^"]*)"', resp.text)
|
|
134
|
+
is_series = any("bolum-" in u for u in all_urls)
|
|
135
|
+
|
|
136
|
+
episodes = []
|
|
137
|
+
if is_series:
|
|
138
|
+
# Dict kullanarak duplicate'leri önle ama sıralı tut
|
|
139
|
+
episodes_dict = {}
|
|
140
|
+
for u in all_urls:
|
|
141
|
+
if "bolum" in u and u not in episodes_dict:
|
|
142
|
+
season_match = re.search(r'/sezon-(\d+)', u)
|
|
143
|
+
ep_match = re.search(r'/bolum-(\d+)', u)
|
|
144
|
+
|
|
145
|
+
season = int(season_match.group(1)) if season_match else 1
|
|
146
|
+
episode_num = int(ep_match.group(1)) if ep_match else 1
|
|
147
|
+
|
|
148
|
+
# Key olarak (season, episode) tuple kullan
|
|
149
|
+
key = (season, episode_num)
|
|
150
|
+
episodes_dict[key] = Episode(
|
|
151
|
+
season = season,
|
|
152
|
+
episode = episode_num,
|
|
153
|
+
title = f"{season}. Sezon {episode_num}. Bölüm",
|
|
154
|
+
url = self.fix_url(u)
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# Sıralı liste oluştur
|
|
158
|
+
episodes = [episodes_dict[key] for key in sorted(episodes_dict.keys())]
|
|
159
|
+
|
|
160
|
+
return SeriesInfo(
|
|
161
|
+
title = title,
|
|
162
|
+
url = url,
|
|
163
|
+
poster = self.fix_url(poster),
|
|
164
|
+
description = description,
|
|
165
|
+
tags = tags,
|
|
166
|
+
rating = rating,
|
|
167
|
+
actors = actors,
|
|
168
|
+
episodes = episodes,
|
|
169
|
+
year = year
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
async def load_links(self, url: str) -> list[dict]:
|
|
173
|
+
resp = await self.httpx.get(url)
|
|
174
|
+
sel = Selector(resp.text)
|
|
175
|
+
|
|
176
|
+
next_data = sel.css("script#__NEXT_DATA__::text").get()
|
|
177
|
+
if not next_data:
|
|
178
|
+
return []
|
|
179
|
+
|
|
180
|
+
try:
|
|
181
|
+
data = json.loads(next_data)
|
|
182
|
+
secure_data = data["props"]["pageProps"]["secureData"]
|
|
183
|
+
decoded_json = json.loads(base64.b64decode(secure_data).decode('utf-8'))
|
|
184
|
+
|
|
185
|
+
# secureData içindeki RelatedResults -> getEpisodeSources -> result dizisini al
|
|
186
|
+
sources = decoded_json.get("RelatedResults", {}).get("getEpisodeSources", {}).get("result", [])
|
|
187
|
+
|
|
188
|
+
results = []
|
|
189
|
+
for source in sources:
|
|
190
|
+
source_content = source.get("source_content", "")
|
|
191
|
+
|
|
192
|
+
# iframe URL'ini source_content'ten çıkar
|
|
193
|
+
iframe_match = re.search(r'<iframe[^>]*src=["\']([^"\']*)["\']', source_content)
|
|
194
|
+
if not iframe_match:
|
|
195
|
+
continue
|
|
196
|
+
|
|
197
|
+
iframe_url = iframe_match.group(1)
|
|
198
|
+
if "http" not in iframe_url:
|
|
199
|
+
if iframe_url.startswith("//"):
|
|
200
|
+
iframe_url = "https:" + iframe_url
|
|
201
|
+
else:
|
|
202
|
+
iframe_url = "https://" + iframe_url
|
|
203
|
+
|
|
204
|
+
# Check extractor
|
|
205
|
+
extractor = self.ex_manager.find_extractor(iframe_url)
|
|
206
|
+
ext_name = extractor.name if extractor else ""
|
|
207
|
+
|
|
208
|
+
# Metadata'dan bilgileri al
|
|
209
|
+
source_name = source.get("source_name", "")
|
|
210
|
+
language_name = source.get("language_name", "")
|
|
211
|
+
quality_name = source.get("quality_name", "")
|
|
212
|
+
|
|
213
|
+
# İsmi oluştur
|
|
214
|
+
name_parts = []
|
|
215
|
+
if source_name:
|
|
216
|
+
name_parts.append(source_name)
|
|
217
|
+
if ext_name:
|
|
218
|
+
name_parts.append(ext_name)
|
|
219
|
+
if language_name:
|
|
220
|
+
name_parts.append(language_name)
|
|
221
|
+
if quality_name:
|
|
222
|
+
name_parts.append(quality_name)
|
|
223
|
+
|
|
224
|
+
results.append({
|
|
225
|
+
"url" : iframe_url,
|
|
226
|
+
"name" : " | ".join(name_parts)
|
|
227
|
+
})
|
|
228
|
+
|
|
229
|
+
return results
|
|
230
|
+
|
|
231
|
+
except Exception:
|
|
232
|
+
return []
|