KekikStream 1.8.1__py3-none-any.whl → 1.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- KekikStream/Core/Extractor/ExtractorBase.py +2 -14
- KekikStream/Core/Extractor/ExtractorManager.py +29 -8
- KekikStream/Core/Media/MediaHandler.py +30 -8
- KekikStream/Core/Plugin/PluginBase.py +2 -15
- KekikStream/Extractors/YTDLP.py +172 -0
- KekikStream/Plugins/DiziBox.py +2 -7
- KekikStream/Plugins/DiziPal.py +1 -1
- KekikStream/Plugins/DiziYou.py +41 -16
- KekikStream/Plugins/Dizilla.py +38 -27
- KekikStream/Plugins/FilmBip.py +1 -1
- KekikStream/Plugins/FilmMakinesi.py +1 -5
- KekikStream/Plugins/FullHDFilmizlesene.py +1 -5
- KekikStream/Plugins/HDFilmCehennemi.py +11 -18
- KekikStream/Plugins/JetFilmizle.py +1 -5
- KekikStream/Plugins/RecTV.py +1 -5
- KekikStream/Plugins/RoketDizi.py +80 -42
- KekikStream/Plugins/SelcukFlix.py +160 -67
- KekikStream/Plugins/SezonlukDizi.py +1 -5
- KekikStream/Plugins/SineWix.py +1 -5
- KekikStream/Plugins/Sinefy.py +72 -51
- KekikStream/Plugins/Sinezy.py +74 -42
- KekikStream/Plugins/UgurFilm.py +1 -5
- KekikStream/requirements.txt +2 -3
- kekikstream-1.9.2.dist-info/METADATA +290 -0
- {kekikstream-1.8.1.dist-info → kekikstream-1.9.2.dist-info}/RECORD +29 -28
- kekikstream-1.8.1.dist-info/METADATA +0 -110
- {kekikstream-1.8.1.dist-info → kekikstream-1.9.2.dist-info}/WHEEL +0 -0
- {kekikstream-1.8.1.dist-info → kekikstream-1.9.2.dist-info}/entry_points.txt +0 -0
- {kekikstream-1.8.1.dist-info → kekikstream-1.9.2.dist-info}/licenses/LICENSE +0 -0
- {kekikstream-1.8.1.dist-info → kekikstream-1.9.2.dist-info}/top_level.txt +0 -0
KekikStream/Plugins/Dizilla.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core import
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode
|
|
4
4
|
from parsel import Selector
|
|
5
5
|
from json import loads
|
|
6
6
|
from urllib.parse import urlparse, urlunparse
|
|
@@ -12,36 +12,47 @@ class Dizilla(PluginBase):
|
|
|
12
12
|
language = "tr"
|
|
13
13
|
main_url = "https://dizilla40.com"
|
|
14
14
|
favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
|
|
15
|
-
description = "
|
|
15
|
+
description = "1080p yabancı dizi izle. Türkçe altyazılı veya dublaj seçenekleriyle 1080p çözünürlükte yabancı dizilere anında ulaş. Popüler dizileri kesintisiz izle."
|
|
16
16
|
|
|
17
17
|
main_page = {
|
|
18
|
-
f"{main_url}/tum-bolumler"
|
|
19
|
-
f"{main_url}/
|
|
20
|
-
f"{main_url}/
|
|
21
|
-
f"{main_url}/
|
|
22
|
-
f"{main_url}/
|
|
23
|
-
f"{main_url}/
|
|
24
|
-
f"{main_url}/
|
|
18
|
+
f"{main_url}/tum-bolumler" : "Altyazılı Bölümler",
|
|
19
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=15&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Aile",
|
|
20
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=9&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Aksiyon",
|
|
21
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=17&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Animasyon",
|
|
22
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=5&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Bilim Kurgu",
|
|
23
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=2&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Dram",
|
|
24
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=12&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Fantastik",
|
|
25
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=18&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Gerilim",
|
|
26
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=3&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Gizem",
|
|
27
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=4&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Komedi",
|
|
28
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=8&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Korku",
|
|
29
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=24&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Macera",
|
|
30
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=7&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Romantik",
|
|
31
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=26&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Savaş",
|
|
32
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=1&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Suç",
|
|
33
|
+
f"{main_url}/api/bg/findSeries?releaseYearStart=1900&releaseYearEnd=2050&imdbPointMin=0&imdbPointMax=10&categoryIdsComma=11&countryIdsComma=&orderType=date_desc&languageId=-1¤tPage=SAYFA¤tPageCount=24&queryStr=&categorySlugsComma=&countryCodesComma=" : "Western",
|
|
25
34
|
}
|
|
26
35
|
|
|
27
|
-
#@kekik_cache(ttl=60*60)
|
|
28
36
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
29
|
-
istek = await self.httpx.get(url)
|
|
30
|
-
secici = Selector(istek.text)
|
|
31
|
-
|
|
32
37
|
ana_sayfa = []
|
|
33
38
|
|
|
34
|
-
if "
|
|
39
|
+
if "api/bg" in url:
|
|
40
|
+
istek = await self.httpx.post(url.replace("SAYFA", str(page)))
|
|
41
|
+
decrypted = await self.decrypt_response(istek.json().get("response"))
|
|
42
|
+
veriler = decrypted.get("result", [])
|
|
35
43
|
ana_sayfa.extend([
|
|
36
44
|
MainPageResult(
|
|
37
45
|
category = category,
|
|
38
|
-
title = veri.
|
|
39
|
-
url = self.fix_url(veri.
|
|
40
|
-
poster = self.fix_url(veri.
|
|
46
|
+
title = veri.get("original_title"),
|
|
47
|
+
url = self.fix_url(f"{self.main_url}/{veri.get('used_slug')}"),
|
|
48
|
+
poster = self.fix_url(veri.get("object_poster_url")),
|
|
41
49
|
)
|
|
42
|
-
for veri in
|
|
50
|
+
for veri in veriler
|
|
43
51
|
])
|
|
44
52
|
else:
|
|
53
|
+
istek = await self.httpx.get(url.replace("SAYFA", str(page)))
|
|
54
|
+
secici = Selector(istek.text)
|
|
55
|
+
|
|
45
56
|
for veri in secici.css("div.tab-content > div.grid a"):
|
|
46
57
|
name = veri.css("h2::text").get()
|
|
47
58
|
ep_name = veri.xpath("normalize-space(//div[contains(@class, 'opacity-80')])").get()
|
|
@@ -88,7 +99,6 @@ class Dizilla(PluginBase):
|
|
|
88
99
|
# JSON decode
|
|
89
100
|
return loads(decrypted.decode("utf-8"))
|
|
90
101
|
|
|
91
|
-
#@kekik_cache(ttl=60*60)
|
|
92
102
|
async def search(self, query: str) -> list[SearchResult]:
|
|
93
103
|
arama_istek = await self.httpx.post(f"{self.main_url}/api/bg/searchcontent?searchterm={query}")
|
|
94
104
|
decrypted = await self.decrypt_response(arama_istek.json().get("response"))
|
|
@@ -103,7 +113,6 @@ class Dizilla(PluginBase):
|
|
|
103
113
|
for veri in arama_veri
|
|
104
114
|
]
|
|
105
115
|
|
|
106
|
-
#@kekik_cache(ttl=60*60)
|
|
107
116
|
async def url_base_degis(self, eski_url:str, yeni_base:str) -> str:
|
|
108
117
|
parsed_url = urlparse(eski_url)
|
|
109
118
|
parsed_yeni_base = urlparse(yeni_base)
|
|
@@ -114,22 +123,25 @@ class Dizilla(PluginBase):
|
|
|
114
123
|
|
|
115
124
|
return urlunparse(yeni_url)
|
|
116
125
|
|
|
117
|
-
#@kekik_cache(ttl=60*60)
|
|
118
126
|
async def load_item(self, url: str) -> SeriesInfo:
|
|
119
127
|
istek = await self.httpx.get(url)
|
|
120
128
|
secici = Selector(istek.text)
|
|
121
129
|
veri = loads(secici.xpath("//script[@type='application/ld+json']/text()").getall()[-1])
|
|
122
130
|
|
|
123
|
-
title
|
|
131
|
+
title = veri.get("name")
|
|
124
132
|
if alt_title := veri.get("alternateName"):
|
|
125
133
|
title += f" - ({alt_title})"
|
|
126
134
|
|
|
127
135
|
poster = self.fix_url(veri.get("image"))
|
|
128
136
|
description = veri.get("description")
|
|
129
137
|
year = veri.get("datePublished").split("-")[0]
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
138
|
+
|
|
139
|
+
# Tags extraction from page content (h3 tag)
|
|
140
|
+
tags_raw = secici.css("h3.text-white.opacity-60::text").get()
|
|
141
|
+
tags = [t.strip() for t in tags_raw.split(",")] if tags_raw else []
|
|
142
|
+
|
|
143
|
+
rating = veri.get("aggregateRating", {}).get("ratingValue")
|
|
144
|
+
actors = [actor.get("name") for actor in veri.get("actor", []) if actor.get("name")]
|
|
133
145
|
|
|
134
146
|
bolumler = []
|
|
135
147
|
sezonlar = veri.get("containsSeason") if isinstance(veri.get("containsSeason"), list) else [veri.get("containsSeason")]
|
|
@@ -158,7 +170,6 @@ class Dizilla(PluginBase):
|
|
|
158
170
|
actors = actors
|
|
159
171
|
)
|
|
160
172
|
|
|
161
|
-
#@kekik_cache(ttl=15*60)
|
|
162
173
|
async def load_links(self, url: str) -> list[dict]:
|
|
163
174
|
istek = await self.httpx.get(url)
|
|
164
175
|
secici = Selector(istek.text)
|
|
@@ -181,4 +192,4 @@ class Dizilla(PluginBase):
|
|
|
181
192
|
"name" : f"{extractor.name if extractor else 'Main Player'} | {result.get('language_name')}",
|
|
182
193
|
})
|
|
183
194
|
|
|
184
|
-
return links
|
|
195
|
+
return links
|
KekikStream/Plugins/FilmBip.py
CHANGED
|
@@ -8,7 +8,7 @@ class FilmBip(PluginBase):
|
|
|
8
8
|
language = "tr"
|
|
9
9
|
main_url = "https://filmbip.com"
|
|
10
10
|
favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
|
|
11
|
-
description = "
|
|
11
|
+
description = "FilmBip adlı film sitemizde Full HD film izle. Yerli ve yabancı filmleri Türkçe dublaj veya altyazılı şekilde 1080p yüksek kalite film izle"
|
|
12
12
|
|
|
13
13
|
main_page = {
|
|
14
14
|
f"{main_url}/filmler/SAYFA" : "Yeni Filmler",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core import
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, MovieInfo
|
|
4
4
|
from parsel import Selector
|
|
5
5
|
|
|
6
6
|
class FilmMakinesi(PluginBase):
|
|
@@ -34,7 +34,6 @@ class FilmMakinesi(PluginBase):
|
|
|
34
34
|
f"{main_url}/tur/western-fm1/film/" : "Western"
|
|
35
35
|
}
|
|
36
36
|
|
|
37
|
-
#@kekik_cache(ttl=60*60)
|
|
38
37
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
39
38
|
istek = self.cloudscraper.get(f"{url}{'' if page == 1 else f'page/{page}/'}")
|
|
40
39
|
secici = Selector(istek.text)
|
|
@@ -51,7 +50,6 @@ class FilmMakinesi(PluginBase):
|
|
|
51
50
|
for veri in veriler
|
|
52
51
|
]
|
|
53
52
|
|
|
54
|
-
#@kekik_cache(ttl=60*60)
|
|
55
53
|
async def search(self, query: str) -> list[SearchResult]:
|
|
56
54
|
istek = await self.httpx.get(f"{self.main_url}/arama/?s={query}")
|
|
57
55
|
secici = Selector(istek.text)
|
|
@@ -73,7 +71,6 @@ class FilmMakinesi(PluginBase):
|
|
|
73
71
|
|
|
74
72
|
return results
|
|
75
73
|
|
|
76
|
-
#@kekik_cache(ttl=60*60)
|
|
77
74
|
async def load_item(self, url: str) -> MovieInfo:
|
|
78
75
|
istek = await self.httpx.get(url)
|
|
79
76
|
secici = Selector(istek.text)
|
|
@@ -101,7 +98,6 @@ class FilmMakinesi(PluginBase):
|
|
|
101
98
|
duration = duration
|
|
102
99
|
)
|
|
103
100
|
|
|
104
|
-
#@kekik_cache(ttl=15*60)
|
|
105
101
|
async def load_links(self, url: str) -> list[dict]:
|
|
106
102
|
istek = await self.httpx.get(url)
|
|
107
103
|
secici = Selector(istek.text)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core import
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, MovieInfo
|
|
4
4
|
from parsel import Selector
|
|
5
5
|
from Kekik.Sifreleme import StringCodec
|
|
6
6
|
import json, re
|
|
@@ -40,7 +40,6 @@ class FullHDFilmizlesene(PluginBase):
|
|
|
40
40
|
f"{main_url}/filmizle/yerli-filmler-hd-izle/" : "Yerli Filmler"
|
|
41
41
|
}
|
|
42
42
|
|
|
43
|
-
#@kekik_cache(ttl=60*60)
|
|
44
43
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
45
44
|
istek = self.cloudscraper.get(f"{url}{page}")
|
|
46
45
|
secici = Selector(istek.text)
|
|
@@ -55,7 +54,6 @@ class FullHDFilmizlesene(PluginBase):
|
|
|
55
54
|
for veri in secici.css("li.film")
|
|
56
55
|
]
|
|
57
56
|
|
|
58
|
-
#@kekik_cache(ttl=60*60)
|
|
59
57
|
async def search(self, query: str) -> list[SearchResult]:
|
|
60
58
|
istek = await self.httpx.get(f"{self.main_url}/arama/{query}")
|
|
61
59
|
secici = Selector(istek.text)
|
|
@@ -77,7 +75,6 @@ class FullHDFilmizlesene(PluginBase):
|
|
|
77
75
|
|
|
78
76
|
return results
|
|
79
77
|
|
|
80
|
-
#@kekik_cache(ttl=60*60)
|
|
81
78
|
async def load_item(self, url: str) -> MovieInfo:
|
|
82
79
|
istek = await self.httpx.get(url)
|
|
83
80
|
secici = Selector(istek.text)
|
|
@@ -103,7 +100,6 @@ class FullHDFilmizlesene(PluginBase):
|
|
|
103
100
|
duration = duration
|
|
104
101
|
)
|
|
105
102
|
|
|
106
|
-
#@kekik_cache(ttl=15*60)
|
|
107
103
|
async def load_links(self, url: str) -> list[dict]:
|
|
108
104
|
istek = await self.httpx.get(url)
|
|
109
105
|
secici = Selector(istek.text)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core import
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, MovieInfo, ExtractResult, Subtitle
|
|
4
4
|
from parsel import Selector
|
|
5
5
|
from Kekik.Sifreleme import Packer, StreamDecoder
|
|
6
6
|
import random, string, re
|
|
@@ -12,8 +12,7 @@ class HDFilmCehennemi(PluginBase):
|
|
|
12
12
|
favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
|
|
13
13
|
description = "Türkiye'nin en hızlı hd film izleme sitesi"
|
|
14
14
|
|
|
15
|
-
|
|
16
|
-
requires_cffi = True
|
|
15
|
+
|
|
17
16
|
|
|
18
17
|
main_page = {
|
|
19
18
|
f"{main_url}" : "Yeni Eklenen Filmler",
|
|
@@ -32,9 +31,8 @@ class HDFilmCehennemi(PluginBase):
|
|
|
32
31
|
f"{main_url}/tur/romantik-filmleri-izle-1" : "Romantik Filmleri"
|
|
33
32
|
}
|
|
34
33
|
|
|
35
|
-
#@kekik_cache(ttl=60*60)
|
|
36
34
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
37
|
-
istek = await self.
|
|
35
|
+
istek = await self.httpx.get(f"{url}", follow_redirects=True)
|
|
38
36
|
secici = Selector(istek.text)
|
|
39
37
|
|
|
40
38
|
return [
|
|
@@ -47,9 +45,8 @@ class HDFilmCehennemi(PluginBase):
|
|
|
47
45
|
for veri in secici.css("div.section-content a.poster")
|
|
48
46
|
]
|
|
49
47
|
|
|
50
|
-
#@kekik_cache(ttl=60*60)
|
|
51
48
|
async def search(self, query: str) -> list[SearchResult]:
|
|
52
|
-
istek = await self.
|
|
49
|
+
istek = await self.httpx.get(
|
|
53
50
|
url = f"{self.main_url}/search/?q={query}",
|
|
54
51
|
headers = {
|
|
55
52
|
"Referer" : f"{self.main_url}/",
|
|
@@ -76,9 +73,8 @@ class HDFilmCehennemi(PluginBase):
|
|
|
76
73
|
|
|
77
74
|
return results
|
|
78
75
|
|
|
79
|
-
#@kekik_cache(ttl=60*60)
|
|
80
76
|
async def load_item(self, url: str) -> MovieInfo:
|
|
81
|
-
istek = await self.
|
|
77
|
+
istek = await self.httpx.get(url, headers = {"Referer": f"{self.main_url}/"})
|
|
82
78
|
secici = Selector(istek.text)
|
|
83
79
|
|
|
84
80
|
title = secici.css("h1.section-title::text").get().strip()
|
|
@@ -111,11 +107,10 @@ class HDFilmCehennemi(PluginBase):
|
|
|
111
107
|
def generate_random_cookie(self):
|
|
112
108
|
return "".join(random.choices(string.ascii_letters + string.digits, k=16))
|
|
113
109
|
|
|
114
|
-
#@kekik_cache(ttl=15*60)
|
|
115
110
|
async def cehennempass(self, video_id: str) -> list[dict]:
|
|
116
111
|
results = []
|
|
117
112
|
|
|
118
|
-
istek = await self.
|
|
113
|
+
istek = await self.httpx.post(
|
|
119
114
|
url = "https://cehennempass.pw/process_quality_selection.php",
|
|
120
115
|
headers = {
|
|
121
116
|
"Referer" : f"https://cehennempass.pw/download/{video_id}",
|
|
@@ -132,7 +127,7 @@ class HDFilmCehennemi(PluginBase):
|
|
|
132
127
|
"referer" : f"https://cehennempass.pw/download/{video_id}"
|
|
133
128
|
})
|
|
134
129
|
|
|
135
|
-
istek = await self.
|
|
130
|
+
istek = await self.httpx.post(
|
|
136
131
|
url = "https://cehennempass.pw/process_quality_selection.php",
|
|
137
132
|
headers = {
|
|
138
133
|
"Referer" : f"https://cehennempass.pw/download/{video_id}",
|
|
@@ -151,10 +146,9 @@ class HDFilmCehennemi(PluginBase):
|
|
|
151
146
|
|
|
152
147
|
return results
|
|
153
148
|
|
|
154
|
-
#@kekik_cache(ttl=15*60)
|
|
155
149
|
async def invoke_local_source(self, iframe: str, source: str, url: str):
|
|
156
|
-
self.
|
|
157
|
-
istek = await self.
|
|
150
|
+
self.httpx.headers.update({"Referer": f"{self.main_url}/"})
|
|
151
|
+
istek = await self.httpx.get(iframe)
|
|
158
152
|
|
|
159
153
|
try:
|
|
160
154
|
eval_func = re.compile(r'\s*(eval\(function[\s\S].*)\s*').findall(istek.text)[0]
|
|
@@ -182,9 +176,8 @@ class HDFilmCehennemi(PluginBase):
|
|
|
182
176
|
"subtitles" : subtitles
|
|
183
177
|
}]
|
|
184
178
|
|
|
185
|
-
#@kekik_cache(ttl=15*60)
|
|
186
179
|
async def load_links(self, url: str) -> list[dict]:
|
|
187
|
-
istek = await self.
|
|
180
|
+
istek = await self.httpx.get(url)
|
|
188
181
|
secici = Selector(istek.text)
|
|
189
182
|
|
|
190
183
|
results = []
|
|
@@ -195,7 +188,7 @@ class HDFilmCehennemi(PluginBase):
|
|
|
195
188
|
source = f"{link.css('::text').get().replace('(HDrip Xbet)', '').strip()} {lang_code}"
|
|
196
189
|
video_id = link.css("::attr(data-video)").get()
|
|
197
190
|
|
|
198
|
-
api_get = await self.
|
|
191
|
+
api_get = await self.httpx.get(
|
|
199
192
|
url = f"{self.main_url}/video/{video_id}/",
|
|
200
193
|
headers = {
|
|
201
194
|
"Content-Type" : "application/json",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core import
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, MovieInfo
|
|
4
4
|
from parsel import Selector
|
|
5
5
|
|
|
6
6
|
class JetFilmizle(PluginBase):
|
|
@@ -19,7 +19,6 @@ class JetFilmizle(PluginBase):
|
|
|
19
19
|
f"{main_url}/kategoriler/yesilcam-filmleri-izlee/page/" : "Yeşilçam Filmleri"
|
|
20
20
|
}
|
|
21
21
|
|
|
22
|
-
#@kekik_cache(ttl=60*60)
|
|
23
22
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
24
23
|
istek = await self.httpx.get(f"{url}{page}", follow_redirects=True)
|
|
25
24
|
secici = Selector(istek.text)
|
|
@@ -34,7 +33,6 @@ class JetFilmizle(PluginBase):
|
|
|
34
33
|
for veri in secici.css("article.movie") if veri.css("h2 a::text, h3 a::text, h4 a::text, h5 a::text, h6 a::text").get()
|
|
35
34
|
]
|
|
36
35
|
|
|
37
|
-
#@kekik_cache(ttl=60*60)
|
|
38
36
|
async def search(self, query: str) -> list[SearchResult]:
|
|
39
37
|
istek = await self.httpx.post(
|
|
40
38
|
url = f"{self.main_url}/filmara.php",
|
|
@@ -60,7 +58,6 @@ class JetFilmizle(PluginBase):
|
|
|
60
58
|
|
|
61
59
|
return results
|
|
62
60
|
|
|
63
|
-
#@kekik_cache(ttl=60*60)
|
|
64
61
|
async def load_item(self, url: str) -> MovieInfo:
|
|
65
62
|
istek = await self.httpx.get(url)
|
|
66
63
|
secici = Selector(istek.text)
|
|
@@ -92,7 +89,6 @@ class JetFilmizle(PluginBase):
|
|
|
92
89
|
actors = actors
|
|
93
90
|
)
|
|
94
91
|
|
|
95
|
-
#@kekik_cache(ttl=15*60)
|
|
96
92
|
async def load_links(self, url: str) -> list[dict]:
|
|
97
93
|
istek = await self.httpx.get(url)
|
|
98
94
|
secici = Selector(istek.text)
|
KekikStream/Plugins/RecTV.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
|
|
2
2
|
|
|
3
|
-
from KekikStream.Core import
|
|
3
|
+
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, MovieInfo, Episode, SeriesInfo, ExtractResult, Subtitle
|
|
4
4
|
from json import dumps, loads
|
|
5
5
|
import re
|
|
6
6
|
|
|
@@ -30,7 +30,6 @@ class RecTV(PluginBase):
|
|
|
30
30
|
f"{main_url}/api/movie/by/filtres/5/created/SAYFA/{sw_key}/" : "Romantik"
|
|
31
31
|
}
|
|
32
32
|
|
|
33
|
-
#@kekik_cache(ttl=60*60)
|
|
34
33
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
35
34
|
self.httpx.headers.update({"user-agent": "okhttp/4.12.0"})
|
|
36
35
|
istek = await self.httpx.get(f"{url.replace('SAYFA', str(int(page) - 1))}")
|
|
@@ -46,7 +45,6 @@ class RecTV(PluginBase):
|
|
|
46
45
|
for veri in veriler
|
|
47
46
|
]
|
|
48
47
|
|
|
49
|
-
#@kekik_cache(ttl=60*60)
|
|
50
48
|
async def search(self, query: str) -> list[SearchResult]:
|
|
51
49
|
self.httpx.headers.update({"user-agent": "okhttp/4.12.0"})
|
|
52
50
|
istek = await self.httpx.get(f"{self.main_url}/api/search/{query}/{self.sw_key}/")
|
|
@@ -67,7 +65,6 @@ class RecTV(PluginBase):
|
|
|
67
65
|
for veri in tum_veri
|
|
68
66
|
]
|
|
69
67
|
|
|
70
|
-
#@kekik_cache(ttl=60*60)
|
|
71
68
|
async def load_item(self, url: str) -> MovieInfo:
|
|
72
69
|
self.httpx.headers.update({"user-agent": "okhttp/4.12.0"})
|
|
73
70
|
veri = loads(url)
|
|
@@ -119,7 +116,6 @@ class RecTV(PluginBase):
|
|
|
119
116
|
actors = []
|
|
120
117
|
)
|
|
121
118
|
|
|
122
|
-
#@kekik_cache(ttl=15*60)
|
|
123
119
|
async def load_links(self, url: str) -> list[dict]:
|
|
124
120
|
try:
|
|
125
121
|
veri = loads(url)
|
KekikStream/Plugins/RoketDizi.py
CHANGED
|
@@ -2,15 +2,16 @@
|
|
|
2
2
|
|
|
3
3
|
from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode, MovieInfo
|
|
4
4
|
from parsel import Selector
|
|
5
|
-
import re, base64, json
|
|
5
|
+
import re, base64, json
|
|
6
6
|
|
|
7
7
|
class RoketDizi(PluginBase):
|
|
8
8
|
name = "RoketDizi"
|
|
9
9
|
lang = "tr"
|
|
10
10
|
main_url = "https://roketdizi.to"
|
|
11
|
+
favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
|
|
12
|
+
description = "Türkiye'nin en tatlış yabancı dizi izleme sitesi. Türkçe dublaj, altyazılı, eski ve yeni yabancı dizilerin yanı sıra kore (asya) dizileri izleyebilirsiniz."
|
|
13
|
+
|
|
11
14
|
|
|
12
|
-
# Domain doğrulama ve anti-bot mekanizmaları var
|
|
13
|
-
requires_cffi = True
|
|
14
15
|
|
|
15
16
|
main_page = {
|
|
16
17
|
"dizi/tur/aksiyon" : "Aksiyon",
|
|
@@ -25,7 +26,7 @@ class RoketDizi(PluginBase):
|
|
|
25
26
|
|
|
26
27
|
async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
|
|
27
28
|
full_url = f"{self.main_url}/{url}?&page={page}"
|
|
28
|
-
resp = await self.
|
|
29
|
+
resp = await self.httpx.get(full_url)
|
|
29
30
|
sel = Selector(resp.text)
|
|
30
31
|
|
|
31
32
|
results = []
|
|
@@ -45,45 +46,50 @@ class RoketDizi(PluginBase):
|
|
|
45
46
|
return results
|
|
46
47
|
|
|
47
48
|
async def search(self, query: str) -> list[SearchResult]:
|
|
48
|
-
# Get Cookies and Keys
|
|
49
|
-
main_req = await self.cffi.get(self.main_url)
|
|
50
|
-
sel = Selector(main_req.text)
|
|
51
|
-
|
|
52
|
-
c_key = sel.css("input[name='cKey']::attr(value)").get()
|
|
53
|
-
c_value = sel.css("input[name='cValue']::attr(value)").get()
|
|
54
|
-
|
|
55
49
|
post_url = f"{self.main_url}/api/bg/searchContent?searchterm={query}"
|
|
56
50
|
|
|
57
51
|
headers = {
|
|
58
52
|
"Accept" : "application/json, text/javascript, */*; q=0.01",
|
|
59
53
|
"X-Requested-With" : "XMLHttpRequest",
|
|
60
54
|
"Referer" : f"{self.main_url}/",
|
|
61
|
-
"CNT" : "vakTR"
|
|
62
55
|
}
|
|
63
|
-
|
|
64
|
-
data = {}
|
|
65
|
-
if c_key and c_value:
|
|
66
|
-
data = {"cKey": c_key, "cValue": c_value}
|
|
67
56
|
|
|
68
|
-
search_req = await self.
|
|
57
|
+
search_req = await self.httpx.post(post_url, headers=headers)
|
|
69
58
|
|
|
70
59
|
try:
|
|
71
60
|
resp_json = search_req.json()
|
|
72
|
-
|
|
61
|
+
|
|
62
|
+
# Response is base64 encoded!
|
|
63
|
+
if not resp_json.get("success"):
|
|
64
|
+
return []
|
|
65
|
+
|
|
66
|
+
encoded_response = resp_json.get("response", "")
|
|
67
|
+
if not encoded_response:
|
|
68
|
+
return []
|
|
69
|
+
|
|
70
|
+
# Decode base64
|
|
71
|
+
decoded = base64.b64decode(encoded_response).decode('utf-8')
|
|
72
|
+
data = json.loads(decoded)
|
|
73
|
+
|
|
74
|
+
if not data.get("state"):
|
|
73
75
|
return []
|
|
74
76
|
|
|
75
|
-
html_content = resp_json.get("html", "").strip()
|
|
76
|
-
sel_results = Selector(html_content)
|
|
77
|
-
|
|
78
77
|
results = []
|
|
79
|
-
|
|
78
|
+
result_items = data.get("result", [])
|
|
80
79
|
|
|
81
|
-
for
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
80
|
+
for item in result_items:
|
|
81
|
+
title = item.get("object_name", "")
|
|
82
|
+
slug = item.get("used_slug", "")
|
|
83
|
+
poster = item.get("object_poster_url", "")
|
|
84
|
+
|
|
85
|
+
if title and slug:
|
|
86
|
+
# Construct full URL from slug
|
|
87
|
+
full_url = f"{self.main_url}/{slug}"
|
|
88
|
+
results.append(SearchResult(
|
|
89
|
+
title = title.strip(),
|
|
90
|
+
url = full_url,
|
|
91
|
+
poster = self.fix_url(poster) if poster else None
|
|
92
|
+
))
|
|
87
93
|
|
|
88
94
|
return results
|
|
89
95
|
|
|
@@ -92,21 +98,48 @@ class RoketDizi(PluginBase):
|
|
|
92
98
|
|
|
93
99
|
async def load_item(self, url: str) -> SeriesInfo:
|
|
94
100
|
# Note: Handling both Movie and Series logic in one, returning SeriesInfo generally or MovieInfo
|
|
95
|
-
resp = await self.
|
|
101
|
+
resp = await self.httpx.get(url)
|
|
96
102
|
sel = Selector(resp.text)
|
|
97
103
|
|
|
98
104
|
title = sel.css("h1.text-white::text").get()
|
|
99
105
|
poster = sel.css("div.w-full.page-top img::attr(src)").get()
|
|
100
106
|
description = sel.css("div.mt-2.text-sm::text").get()
|
|
101
107
|
|
|
102
|
-
|
|
108
|
+
# Tags - genre bilgileri (Detaylar bölümünde)
|
|
109
|
+
tags = []
|
|
110
|
+
genre_text = sel.css("h3.text-white.opacity-90::text").get()
|
|
111
|
+
if genre_text:
|
|
112
|
+
tags = [t.strip() for t in genre_text.split(",")]
|
|
103
113
|
|
|
104
|
-
|
|
105
|
-
if tags:
|
|
106
|
-
tags = [t.strip() for t in tags.split(",")]
|
|
107
|
-
|
|
114
|
+
# Rating
|
|
108
115
|
rating = sel.css("div.flex.items-center span.text-white.text-sm::text").get()
|
|
109
|
-
|
|
116
|
+
|
|
117
|
+
# Year ve Actors - Detaylar (Details) bölümünden
|
|
118
|
+
year = None
|
|
119
|
+
actors = []
|
|
120
|
+
|
|
121
|
+
# Detaylar bölümündeki tüm flex-col div'leri al
|
|
122
|
+
detail_items = sel.css("div.flex.flex-col")
|
|
123
|
+
for item in detail_items:
|
|
124
|
+
# Label ve value yapısı: span.text-base ve span.text-sm.opacity-90
|
|
125
|
+
label = item.css("span.text-base::text").get()
|
|
126
|
+
value = item.css("span.text-sm.opacity-90::text").get()
|
|
127
|
+
|
|
128
|
+
if label and value:
|
|
129
|
+
label = label.strip()
|
|
130
|
+
value = value.strip()
|
|
131
|
+
|
|
132
|
+
# Yayın tarihi (yıl)
|
|
133
|
+
if label == "Yayın tarihi":
|
|
134
|
+
# "16 Ekim 2018" formatından yılı çıkar
|
|
135
|
+
year_match = re.search(r'\d{4}', value)
|
|
136
|
+
if year_match:
|
|
137
|
+
year = year_match.group()
|
|
138
|
+
|
|
139
|
+
# Yaratıcılar veya Oyuncular
|
|
140
|
+
elif label in ["Yaratıcılar", "Oyuncular"]:
|
|
141
|
+
if value:
|
|
142
|
+
actors.append(value)
|
|
110
143
|
|
|
111
144
|
# Check urls for episodes
|
|
112
145
|
all_urls = re.findall(r'"url":"([^"]*)"', resp.text)
|
|
@@ -114,22 +147,27 @@ class RoketDizi(PluginBase):
|
|
|
114
147
|
|
|
115
148
|
episodes = []
|
|
116
149
|
if is_series:
|
|
117
|
-
|
|
150
|
+
# Dict kullanarak duplicate'leri önle ama sıralı tut
|
|
151
|
+
episodes_dict = {}
|
|
118
152
|
for u in all_urls:
|
|
119
|
-
if "bolum" in u and u not in
|
|
120
|
-
seen_eps.add(u)
|
|
153
|
+
if "bolum" in u and u not in episodes_dict:
|
|
121
154
|
season_match = re.search(r'/sezon-(\d+)', u)
|
|
122
155
|
ep_match = re.search(r'/bolum-(\d+)', u)
|
|
123
156
|
|
|
124
157
|
season = int(season_match.group(1)) if season_match else 1
|
|
125
158
|
episode_num = int(ep_match.group(1)) if ep_match else 1
|
|
126
159
|
|
|
127
|
-
|
|
160
|
+
# Key olarak (season, episode) tuple kullan
|
|
161
|
+
key = (season, episode_num)
|
|
162
|
+
episodes_dict[key] = Episode(
|
|
128
163
|
season = season,
|
|
129
164
|
episode = episode_num,
|
|
130
|
-
title = f"{season}. Sezon {episode_num}. Bölüm",
|
|
165
|
+
title = f"{season}. Sezon {episode_num}. Bölüm",
|
|
131
166
|
url = self.fix_url(u)
|
|
132
|
-
)
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
# Sıralı liste oluştur
|
|
170
|
+
episodes = [episodes_dict[key] for key in sorted(episodes_dict.keys())]
|
|
133
171
|
|
|
134
172
|
return SeriesInfo(
|
|
135
173
|
title = title,
|
|
@@ -144,7 +182,7 @@ class RoketDizi(PluginBase):
|
|
|
144
182
|
)
|
|
145
183
|
|
|
146
184
|
async def load_links(self, url: str) -> list[dict]:
|
|
147
|
-
resp = await self.
|
|
185
|
+
resp = await self.httpx.get(url)
|
|
148
186
|
sel = Selector(resp.text)
|
|
149
187
|
|
|
150
188
|
next_data = sel.css("script#__NEXT_DATA__::text").get()
|