KekikStream 1.7.6__py3-none-any.whl → 1.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. KekikStream/Core/Extractor/ExtractorBase.py +2 -14
  2. KekikStream/Core/Extractor/ExtractorModels.py +5 -7
  3. KekikStream/Core/Media/MediaHandler.py +44 -26
  4. KekikStream/Core/Media/MediaManager.py +0 -3
  5. KekikStream/Core/Plugin/PluginBase.py +2 -15
  6. KekikStream/Core/Plugin/PluginModels.py +25 -26
  7. KekikStream/Extractors/CloseLoad.py +1 -2
  8. KekikStream/Extractors/ContentX.py +0 -2
  9. KekikStream/Extractors/DzenRu.py +0 -1
  10. KekikStream/Extractors/ExPlay.py +0 -1
  11. KekikStream/Extractors/FirePlayer.py +4 -5
  12. KekikStream/Extractors/HDPlayerSystem.py +0 -1
  13. KekikStream/Extractors/JetTv.py +0 -1
  14. KekikStream/Extractors/MailRu.py +1 -2
  15. KekikStream/Extractors/MixPlayHD.py +0 -1
  16. KekikStream/Extractors/MixTiger.py +1 -5
  17. KekikStream/Extractors/MolyStream.py +5 -5
  18. KekikStream/Extractors/Odnoklassniki.py +6 -6
  19. KekikStream/Extractors/PeaceMakerst.py +0 -1
  20. KekikStream/Extractors/PixelDrain.py +0 -1
  21. KekikStream/Extractors/PlayerFilmIzle.py +5 -5
  22. KekikStream/Extractors/RapidVid.py +0 -1
  23. KekikStream/Extractors/SetPlay.py +0 -1
  24. KekikStream/Extractors/SetPrime.py +0 -1
  25. KekikStream/Extractors/SibNet.py +0 -1
  26. KekikStream/Extractors/Sobreatsesuyp.py +0 -1
  27. KekikStream/Extractors/TRsTX.py +0 -1
  28. KekikStream/Extractors/TauVideo.py +0 -1
  29. KekikStream/Extractors/TurboImgz.py +0 -1
  30. KekikStream/Extractors/TurkeyPlayer.py +5 -5
  31. KekikStream/Extractors/VidHide.py +5 -5
  32. KekikStream/Extractors/VidMoly.py +0 -1
  33. KekikStream/Extractors/VidMoxy.py +0 -1
  34. KekikStream/Extractors/VidPapi.py +0 -1
  35. KekikStream/Extractors/VideoSeyred.py +0 -1
  36. KekikStream/Extractors/YTDLP.py +109 -0
  37. KekikStream/Extractors/YildizKisaFilm.py +0 -1
  38. KekikStream/Plugins/DiziBox.py +3 -8
  39. KekikStream/Plugins/DiziPal.py +5 -5
  40. KekikStream/Plugins/DiziYou.py +44 -19
  41. KekikStream/Plugins/Dizilla.py +38 -27
  42. KekikStream/Plugins/FilmBip.py +1 -1
  43. KekikStream/Plugins/FilmMakinesi.py +1 -5
  44. KekikStream/Plugins/FilmModu.py +3 -3
  45. KekikStream/Plugins/FullHDFilmizlesene.py +1 -5
  46. KekikStream/Plugins/HDFilmCehennemi.py +14 -21
  47. KekikStream/Plugins/JetFilmizle.py +2 -6
  48. KekikStream/Plugins/RecTV.py +12 -16
  49. KekikStream/Plugins/RoketDizi.py +105 -93
  50. KekikStream/Plugins/SelcukFlix.py +160 -67
  51. KekikStream/Plugins/SezonlukDizi.py +1 -5
  52. KekikStream/Plugins/SineWix.py +4 -8
  53. KekikStream/Plugins/Sinefy.py +72 -51
  54. KekikStream/Plugins/SinemaCX.py +4 -4
  55. KekikStream/Plugins/Sinezy.py +74 -42
  56. KekikStream/Plugins/UgurFilm.py +2 -6
  57. KekikStream/__init__.py +5 -8
  58. KekikStream/requirements.txt +2 -3
  59. kekikstream-1.9.0.dist-info/METADATA +290 -0
  60. kekikstream-1.9.0.dist-info/RECORD +86 -0
  61. kekikstream-1.7.6.dist-info/METADATA +0 -110
  62. kekikstream-1.7.6.dist-info/RECORD +0 -85
  63. {kekikstream-1.7.6.dist-info → kekikstream-1.9.0.dist-info}/WHEEL +0 -0
  64. {kekikstream-1.7.6.dist-info → kekikstream-1.9.0.dist-info}/entry_points.txt +0 -0
  65. {kekikstream-1.7.6.dist-info → kekikstream-1.9.0.dist-info}/licenses/LICENSE +0 -0
  66. {kekikstream-1.7.6.dist-info → kekikstream-1.9.0.dist-info}/top_level.txt +0 -0
@@ -5,109 +5,202 @@ from parsel import Selector
5
5
  import re, base64, json, urllib.parse
6
6
 
7
7
  class SelcukFlix(PluginBase):
8
- name = "SelcukFlix"
9
- main_url = "https://selcukflix.net"
10
- lang = "tr"
11
-
8
+ name = "SelcukFlix"
9
+ lang = "tr"
10
+ main_url = "https://selcukflix.net"
11
+ favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
12
+ description = "Selcukflix'te her türden en yeni ve en popüler dizi ve filmleri izlemenin keyfini çıkarın. Aksiyondan romantiğe, bilim kurgudan dramaya, geniş kütüphanemizde herkes için bir şey var."
13
+
12
14
  main_page = {
13
- "tum-bolumler" : "Yeni Eklenen Bölümler"
15
+ f"{main_url}/tum-bolumler" : "Yeni Eklenen Bölümler",
16
+ "" : "Yeni Diziler",
17
+ "" : "Kore Dizileri",
18
+ "" : "Yerli Diziler",
19
+ "15" : "Aile",
20
+ "17" : "Animasyon",
21
+ "9" : "Aksiyon",
22
+ "5" : "Bilim Kurgu",
23
+ "2" : "Dram",
24
+ "12" : "Fantastik",
25
+ "18" : "Gerilim",
26
+ "3" : "Gizem",
27
+ "8" : "Korku",
28
+ "4" : "Komedi",
29
+ "7" : "Romantik"
14
30
  }
15
31
 
16
32
  async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
17
- full_url = f"{self.main_url}/{url}"
18
- resp = await self.httpx.get(full_url)
19
- sel = Selector(resp.text)
20
-
21
33
  results = []
22
34
  if "tum-bolumler" in url:
23
- for item in sel.css("div.col-span-3 a"):
24
- name = item.css("h2::text").get()
25
- ep_info = item.css("div.opacity-80::text").get()
26
- href = item.css("::attr(href)").get()
27
- poster = item.css("div.image img::attr(src)").get()
28
-
29
- if name and href:
30
- title = f"{name} - {ep_info}" if ep_info else name
31
- final_url = self.fix_url(href)
32
- if "/dizi/" in final_url and "/sezon-" in final_url:
33
- final_url = final_url.split("/sezon-")[0]
34
-
35
+ try:
36
+ resp = await self.httpx.get(url)
37
+ sel = Selector(resp.text)
38
+
39
+ for item in sel.css("div.col-span-3 a"):
40
+ name = item.css("h2::text").get()
41
+ ep_info = item.css("div.opacity-80::text").get()
42
+ href = item.css("::attr(href)").get()
43
+ poster = item.css("div.image img::attr(src)").get()
44
+
45
+ if name and href:
46
+ title = f"{name} - {ep_info}" if ep_info else name
47
+ final_url = self.fix_url(href)
48
+
49
+ if "/dizi/" in final_url and "/sezon-" in final_url:
50
+ final_url = final_url.split("/sezon-")[0]
51
+
52
+ results.append(MainPageResult(
53
+ category = category,
54
+ title = title,
55
+ url = final_url,
56
+ poster = self.fix_url(poster)
57
+ ))
58
+ except Exception:
59
+ pass
60
+ return results
61
+
62
+ base_api = f"{self.main_url}/api/bg/findSeries"
63
+
64
+ params = {
65
+ "releaseYearStart" : "1900",
66
+ "releaseYearEnd" : "2026",
67
+ "imdbPointMin" : "1",
68
+ "imdbPointMax" : "10",
69
+ "categoryIdsComma" : "",
70
+ "countryIdsComma" : "",
71
+ "orderType" : "date_desc",
72
+ "languageId" : "-1",
73
+ "currentPage" : page,
74
+ "currentPageCount" : "24",
75
+ "queryStr" : "",
76
+ "categorySlugsComma" : "",
77
+ "countryCodesComma" : ""
78
+ }
79
+
80
+ if "Yerli Diziler" in category:
81
+ params["imdbPointMin"] = "5"
82
+ params["countryIdsComma"] = "29"
83
+ elif "Kore Dizileri" in category:
84
+ params["countryIdsComma"] = "21"
85
+ params["countryCodesComma"] = "KR"
86
+ else:
87
+ params["categoryIdsComma"] = url
88
+
89
+ full_url = f"{base_api}?{urllib.parse.urlencode(params)}"
90
+
91
+ headers = {
92
+ "User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:137.0) Gecko/20100101 Firefox/137.0",
93
+ "Accept" : "application/json, text/plain, */*",
94
+ "Accept-Language" : "en-US,en;q=0.5",
95
+ "X-Requested-With" : "XMLHttpRequest",
96
+ "Sec-Fetch-Site" : "same-origin",
97
+ "Sec-Fetch-Mode" : "cors",
98
+ "Sec-Fetch-Dest" : "empty",
99
+ "Referer" : f"{self.main_url}/"
100
+ }
101
+
102
+ try:
103
+ post_resp = await self.httpx.post(full_url, headers=headers)
104
+ resp_json = post_resp.json()
105
+ response_data = resp_json.get("response")
106
+
107
+ raw_data = base64.b64decode(response_data)
108
+ try:
109
+ decoded_str = raw_data.decode('utf-8')
110
+ except UnicodeDecodeError:
111
+ decoded_str = raw_data.decode('iso-8859-1').encode('utf-8').decode('utf-8')
112
+
113
+ data = json.loads(decoded_str)
114
+
115
+ for item in data.get("result", []):
116
+ title = item.get("title")
117
+ slug = item.get("slug")
118
+ poster = item.get("poster")
119
+
120
+ if poster:
121
+ poster = self.clean_image_url(poster)
122
+
123
+ if slug:
35
124
  results.append(MainPageResult(
36
- category=category,
37
- title=title,
38
- url=final_url,
39
- poster=self.fix_url(poster)
125
+ category = category,
126
+ title = title,
127
+ url = self.fix_url(slug),
128
+ poster = poster
40
129
  ))
41
-
130
+
131
+ except Exception:
132
+ pass
133
+
42
134
  return results
43
135
 
44
136
  async def search(self, query: str) -> list[SearchResult]:
45
137
  search_url = f"{self.main_url}/api/bg/searchcontent?searchterm={query}"
46
-
138
+
47
139
  headers = {
48
- "Accept": "application/json, text/plain, */*",
49
- "X-Requested-With": "XMLHttpRequest",
50
- "Referer": f"{self.main_url}/"
140
+ "Accept" : "application/json, text/plain, */*",
141
+ "X-Requested-With" : "XMLHttpRequest",
142
+ "Referer" : f"{self.main_url}/"
51
143
  }
52
-
144
+
53
145
  post_resp = await self.httpx.post(search_url, headers=headers)
54
-
146
+
55
147
  try:
56
- resp_json = post_resp.json()
148
+ resp_json = post_resp.json()
57
149
  response_data = resp_json.get("response")
58
-
59
- raw_data = base64.b64decode(response_data)
150
+ raw_data = base64.b64decode(response_data)
60
151
  try:
61
152
  decoded_str = raw_data.decode('utf-8')
62
153
  except UnicodeDecodeError:
63
154
  decoded_str = raw_data.decode('iso-8859-1').encode('utf-8').decode('utf-8')
64
-
155
+
65
156
  search_data = json.loads(decoded_str)
66
-
157
+
67
158
  results = []
68
159
  for item in search_data.get("result", []):
69
- title = item.get("title")
70
- slug = item.get("slug")
160
+ title = item.get("title")
161
+ slug = item.get("slug")
71
162
  poster = item.get("poster")
163
+
72
164
  if poster:
73
165
  poster = self.clean_image_url(poster)
74
-
166
+
75
167
  if slug and "/seri-filmler/" not in slug:
76
168
  results.append(SearchResult(
77
- title=title,
78
- url=self.fix_url(slug),
79
- poster=poster
169
+ title = title,
170
+ url = self.fix_url(slug),
171
+ poster = poster
80
172
  ))
173
+
81
174
  return results
82
-
175
+
83
176
  except Exception:
84
177
  return []
85
178
 
86
179
  async def load_item(self, url: str) -> SeriesInfo:
87
180
  resp = await self.httpx.get(url)
88
181
  sel = Selector(resp.text)
89
-
182
+
90
183
  next_data = sel.css("script#__NEXT_DATA__::text").get()
91
184
  if not next_data:
92
185
  return None
93
-
94
- data = json.loads(next_data)
95
- secure_data = data["props"]["pageProps"]["secureData"]
96
- raw_data = base64.b64decode(secure_data.replace('"', ''))
186
+
187
+ data = json.loads(next_data)
188
+ secure_data = data["props"]["pageProps"]["secureData"]
189
+ raw_data = base64.b64decode(secure_data.replace('"', ''))
97
190
  try:
98
191
  decoded_str = raw_data.decode('utf-8')
99
192
  except UnicodeDecodeError:
100
193
  decoded_str = raw_data.decode('iso-8859-1') # .encode('utf-8').decode('utf-8') implied
101
-
194
+
102
195
  content_details = json.loads(decoded_str)
103
- item = content_details.get("contentItem", {})
104
-
105
- title = item.get("original_title") or item.get("originalTitle")
106
- poster = self.clean_image_url(item.get("poster_url") or item.get("posterUrl"))
107
- description = item.get("description") or item.get("used_description")
108
- rating = str(item.get("imdb_point") or item.get("imdbPoint", ""))
109
-
110
- series_data = content_details.get("relatedData", {}).get("seriesData")
196
+ item = content_details.get("contentItem", {})
197
+
198
+ title = item.get("original_title") or item.get("originalTitle")
199
+ poster = self.clean_image_url(item.get("poster_url") or item.get("posterUrl"))
200
+ description = item.get("description") or item.get("used_description")
201
+ rating = str(item.get("imdb_point") or item.get("imdbPoint", ""))
202
+
203
+ series_data = content_details.get("relatedData", {}).get("seriesData")
111
204
  if not series_data and "RelatedResults" in content_details:
112
205
  series_data = content_details["RelatedResults"].get("getSerieSeasonAndEpisodes", {}).get("result")
113
206
  if series_data and isinstance(series_data, list):
@@ -127,19 +220,19 @@ class SelcukFlix(PluginBase):
127
220
  ep_list = season.get("episodes", [])
128
221
  for ep in ep_list:
129
222
  episodes.append(Episode(
130
- season = s_no,
223
+ season = s_no,
131
224
  episode = ep.get("episode_no") or ep.get("episodeNo"),
132
- title = ep.get("ep_text") or ep.get("epText"),
133
- url = self.fix_url(ep.get("used_slug") or ep.get("usedSlug"))
225
+ title = ep.get("ep_text") or ep.get("epText"),
226
+ url = self.fix_url(ep.get("used_slug") or ep.get("usedSlug"))
134
227
  ))
135
228
 
136
229
  return SeriesInfo(
137
- title=title,
138
- url=url,
139
- poster=poster,
140
- description=description,
141
- rating=rating,
142
- episodes=episodes
230
+ title = title,
231
+ url = url,
232
+ poster = poster,
233
+ description = description,
234
+ rating = rating,
235
+ episodes = episodes
143
236
  )
144
237
 
145
238
  async def load_links(self, url: str) -> list[dict]:
@@ -1,6 +1,6 @@
1
1
  # Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
2
2
 
3
- from KekikStream.Core import kekik_cache, PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode
3
+ from KekikStream.Core import PluginBase, MainPageResult, SearchResult, SeriesInfo, Episode
4
4
  from parsel import Selector
5
5
 
6
6
  class SezonlukDizi(PluginBase):
@@ -21,7 +21,6 @@ class SezonlukDizi(PluginBase):
21
21
  f"{main_url}/diziler.asp?siralama_tipi=id&kat=6&s=" : "Belgeseller",
22
22
  }
23
23
 
24
- #@kekik_cache(ttl=60*60)
25
24
  async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
26
25
  istek = await self.httpx.get(f"{url}{page}")
27
26
  secici = Selector(istek.text)
@@ -36,7 +35,6 @@ class SezonlukDizi(PluginBase):
36
35
  for veri in secici.css("div.afis a") if veri.css("div.description::text").get()
37
36
  ]
38
37
 
39
- #@kekik_cache(ttl=60*60)
40
38
  async def search(self, query: str) -> list[SearchResult]:
41
39
  istek = await self.httpx.get(f"{self.main_url}/diziler.asp?adi={query}")
42
40
  secici = Selector(istek.text)
@@ -50,7 +48,6 @@ class SezonlukDizi(PluginBase):
50
48
  for afis in secici.css("div.afis a.column")
51
49
  ]
52
50
 
53
- #@kekik_cache(ttl=60*60)
54
51
  async def load_item(self, url: str) -> SeriesInfo:
55
52
  istek = await self.httpx.get(url)
56
53
  secici = Selector(istek.text)
@@ -102,7 +99,6 @@ class SezonlukDizi(PluginBase):
102
99
  actors = actors
103
100
  )
104
101
 
105
- #@kekik_cache(ttl=15*60)
106
102
  async def load_links(self, url: str) -> list[dict]:
107
103
  istek = await self.httpx.get(url)
108
104
  secici = Selector(istek.text)
@@ -1,6 +1,6 @@
1
1
  # Bu araç @keyiflerolsun tarafından | @KekikAkademi için yazılmıştır.
2
2
 
3
- from KekikStream.Core import kekik_cache, PluginBase, MainPageResult, SearchResult, MovieInfo, Episode, SeriesInfo, ExtractResult, Subtitle
3
+ from KekikStream.Core import PluginBase, MainPageResult, SearchResult, MovieInfo, Episode, SeriesInfo, ExtractResult, Subtitle
4
4
  import json
5
5
 
6
6
  class SineWix(PluginBase):
@@ -35,7 +35,6 @@ class SineWix(PluginBase):
35
35
  f"{main_url}/sinewix/movies/36" : "Tarih",
36
36
  }
37
37
 
38
- #@kekik_cache(ttl=60*60)
39
38
  async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
40
39
  istek = await self.httpx.get(f"{url}/{page}")
41
40
  veriler = istek.json()
@@ -50,7 +49,6 @@ class SineWix(PluginBase):
50
49
  for veri in veriler.get("data")
51
50
  ]
52
51
 
53
- #@kekik_cache(ttl=60*60)
54
52
  async def search(self, query: str) -> list[SearchResult]:
55
53
  istek = await self.httpx.get(f"{self.main_url}/sinewix/search/{query}")
56
54
 
@@ -63,7 +61,6 @@ class SineWix(PluginBase):
63
61
  for veri in istek.json().get("search")
64
62
  ]
65
63
 
66
- #@kekik_cache(ttl=60*60)
67
64
  async def load_item(self, url: str) -> MovieInfo | SeriesInfo:
68
65
  item_type = url.split("?type=")[-1].split("&id=")[0]
69
66
  item_id = url.split("&id=")[-1]
@@ -126,7 +123,6 @@ class SineWix(PluginBase):
126
123
  episodes = episodes,
127
124
  )
128
125
 
129
- #@kekik_cache(ttl=15*60)
130
126
  async def load_links(self, url: str) -> list[dict]:
131
127
  try:
132
128
  veri = json.loads(url)
@@ -161,9 +157,9 @@ class SineWix(PluginBase):
161
157
 
162
158
  return results
163
159
 
164
- async def play(self, name: str, url: str, referer: str, subtitles: list[Subtitle]):
165
- extract_result = ExtractResult(name=name, url=url, referer=referer, subtitles=subtitles)
166
- self.media_handler.title = name
160
+ async def play(self, **kwargs):
161
+ extract_result = ExtractResult(**kwargs)
162
+ self.media_handler.title = kwargs.get("name")
167
163
  if self.name not in self.media_handler.title:
168
164
  self.media_handler.title = f"{self.name} | {self.media_handler.title}"
169
165
 
@@ -5,74 +5,95 @@ from parsel import Selector
5
5
  import re, json, urllib.parse
6
6
 
7
7
  class Sinefy(PluginBase):
8
- name = "Sinefy"
9
- main_url = "https://sinefy3.com"
10
- lang = "tr"
11
-
8
+ name = "Sinefy"
9
+ language = "tr"
10
+ main_url = "https://sinefy3.com"
11
+ favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
12
+ description = "Yabancı film izle olarak vizyondaki en yeni yabancı filmleri türkçe dublaj ve altyazılı olarak en hızlı şekilde full hd olarak sizlere sunuyoruz."
13
+
12
14
  main_page = {
13
- "page/" : "Son Eklenenler",
14
- "en-yenifilmler" : "Yeni Filmler",
15
- "netflix-filmleri-izle" : "Netflix Filmleri",
16
- "dizi-izle/netflix" : "Netflix Dizileri"
15
+ f"{main_url}/page/" : "Son Eklenenler",
16
+ f"{main_url}/en-yenifilmler" : "Yeni Filmler",
17
+ f"{main_url}/netflix-filmleri-izle" : "Netflix Filmleri",
18
+ f"{main_url}/dizi-izle/netflix" : "Netflix Dizileri",
19
+ f"{main_url}/gozat/filmler/animasyon" : "Animasyon",
20
+ f"{main_url}/gozat/filmler/komedi" : "Komedi",
21
+ f"{main_url}/gozat/filmler/suc" : "Suç",
22
+ f"{main_url}/gozat/filmler/aile" : "Aile",
23
+ f"{main_url}/gozat/filmler/aksiyon" : "Aksiyon",
24
+ f"{main_url}/gozat/filmler/macera" : "Macera",
25
+ f"{main_url}/gozat/filmler/fantastik" : "Fantastik",
26
+ f"{main_url}/gozat/filmler/korku" : "Korku",
27
+ f"{main_url}/gozat/filmler/romantik" : "Romantik",
28
+ f"{main_url}/gozat/filmler/savas" : "Savaş",
29
+ f"{main_url}/gozat/filmler/gerilim" : "Gerilim",
30
+ f"{main_url}/gozat/filmler/bilim-kurgu" : "Bilim Kurgu",
31
+ f"{main_url}/gozat/filmler/dram" : "Dram",
32
+ f"{main_url}/gozat/filmler/gizem" : "Gizem",
33
+ f"{main_url}/gozat/filmler/western" : "Western",
34
+ f"{main_url}/gozat/filmler/ulke/turkiye" : "Türk Filmleri",
35
+ f"{main_url}/gozat/filmler/ulke/kore" : "Kore Filmleri"
17
36
  }
18
37
 
19
38
  async def get_main_page(self, page: int, url: str, category: str) -> list[MainPageResult]:
20
39
  if "page/" in url:
21
- full_url = f"{self.main_url}/{url}{page}"
40
+ full_url = f"{url}{page}"
22
41
  elif "en-yenifilmler" in url or "netflix" in url:
23
- full_url = f"{self.main_url}/{url}/{page}"
42
+ full_url = f"{url}/{page}"
24
43
  else:
25
- full_url = f"{self.main_url}/{url}&page={page}"
26
-
44
+ full_url = f"{url}&page={page}"
45
+
27
46
  resp = await self.httpx.get(full_url)
28
- sel = Selector(resp.text)
29
-
47
+ sel = Selector(resp.text)
48
+
30
49
  results = []
31
50
  # Kotlin: div.poster-with-subject, div.dark-segment div.poster-md.poster
32
51
  for item in sel.css("div.poster-with-subject, div.dark-segment div.poster-md.poster"):
33
- title = item.css("h2::text").get()
34
- href = item.css("a::attr(href)").get()
35
- poster= item.css("img::attr(data-srcset)").get()
52
+ title = item.css("h2::text").get()
53
+ href = item.css("a::attr(href)").get()
54
+ poster = item.css("img::attr(data-srcset)").get()
36
55
  if poster:
37
56
  poster = poster.split(",")[0].split(" ")[0]
38
-
57
+
39
58
  if title and href:
40
59
  results.append(MainPageResult(
41
- category=category,
42
- title=title,
43
- url=self.fix_url(href),
44
- poster=self.fix_url(poster)
60
+ category = category,
61
+ title = title,
62
+ url = self.fix_url(href),
63
+ poster = self.fix_url(poster)
45
64
  ))
65
+
46
66
  return results
47
67
 
48
68
  async def search(self, query: str) -> list[SearchResult]:
49
69
  # Try to get dynamic keys from main page first
50
- c_key = "ca1d4a53d0f4761a949b85e51e18f096"
70
+ c_key = "ca1d4a53d0f4761a949b85e51e18f096"
51
71
  c_value = "MTc0NzI2OTAwMDU3ZTEwYmZjMDViNWFmOWIwZDViODg0MjU4MjA1ZmYxOThmZTYwMDdjMWQzMzliNzY5NzFlZmViMzRhMGVmNjgwODU3MGIyZA=="
52
-
72
+
53
73
  try:
54
74
  resp = await self.httpx.get(self.main_url)
55
- sel = Selector(resp.text)
56
- cke = sel.css("input[name='cKey']::attr(value)").get()
75
+ sel = Selector(resp.text)
76
+ cke = sel.css("input[name='cKey']::attr(value)").get()
57
77
  cval = sel.css("input[name='cValue']::attr(value)").get()
58
78
  if cke and cval:
59
- c_key = cke
79
+ c_key = cke
60
80
  c_value = cval
81
+
61
82
  except Exception:
62
83
  pass
63
84
 
64
85
  post_url = f"{self.main_url}/bg/searchcontent"
65
86
  data = {
66
- "cKey": c_key,
67
- "cValue": c_value,
68
- "searchTerm": query
87
+ "cKey" : c_key,
88
+ "cValue" : c_value,
89
+ "searchTerm" : query
69
90
  }
70
91
 
71
92
  headers = {
72
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:134.0) Gecko/20100101 Firefox/134.0",
73
- "Accept": "application/json, text/javascript, */*; q=0.01",
74
- "X-Requested-With": "XMLHttpRequest",
75
- "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"
93
+ "User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:134.0) Gecko/20100101 Firefox/134.0",
94
+ "Accept" : "application/json, text/javascript, */*; q=0.01",
95
+ "X-Requested-With" : "XMLHttpRequest",
96
+ "Content-Type" : "application/x-www-form-urlencoded; charset=UTF-8"
76
97
  }
77
98
 
78
99
  response = await self.httpx.post(post_url, data=data, headers=headers)
@@ -179,24 +200,24 @@ class Sinefy(PluginBase):
179
200
 
180
201
  if episodes:
181
202
  return SeriesInfo(
182
- title=title,
183
- url=url,
184
- poster=self.fix_url(poster),
185
- description=description,
186
- rating=rating,
187
- tags=tags,
188
- actors=actors,
189
- episodes=episodes
203
+ title = title,
204
+ url = url,
205
+ poster = self.fix_url(poster),
206
+ description = description,
207
+ rating = rating,
208
+ tags = tags,
209
+ actors = actors,
210
+ episodes = episodes
190
211
  )
191
212
  else:
192
213
  return MovieInfo(
193
- title=title,
194
- url=url,
195
- poster=self.fix_url(poster),
196
- description=description,
197
- rating=rating,
198
- tags=tags,
199
- actors=actors
214
+ title = title,
215
+ url = url,
216
+ poster = self.fix_url(poster),
217
+ description = description,
218
+ rating = rating,
219
+ tags = tags,
220
+ actors = actors
200
221
  )
201
222
 
202
223
  async def load_links(self, url: str) -> list[dict]:
@@ -208,7 +229,7 @@ class Sinefy(PluginBase):
208
229
  iframe = self.fix_url(iframe)
209
230
  extractor = self.ex_manager.find_extractor(iframe)
210
231
  return [{
211
- "url": iframe,
212
- "name": extractor.name if extractor else "Iframe"
232
+ "url" : iframe,
233
+ "name" : extractor.name if extractor else "Iframe"
213
234
  }]
214
235
  return []
@@ -7,7 +7,7 @@ import re
7
7
  class SinemaCX(PluginBase):
8
8
  name = "SinemaCX"
9
9
  language = "tr"
10
- main_url = "https://www.sinema.now"
10
+ main_url = "https://www.sinema.onl"
11
11
  favicon = f"https://www.google.com/s2/favicons?domain={main_url}&sz=64"
12
12
  description = "HD Film izle, Türkçe Dublaj ve Altyazılı filmler."
13
13
 
@@ -148,9 +148,9 @@ class SinemaCX(PluginBase):
148
148
 
149
149
  return results
150
150
 
151
- async def play(self, name: str, url: str, referer: str, subtitles: list[Subtitle]):
152
- extract_result = ExtractResult(name=name, url=url, referer=referer, subtitles=subtitles)
153
- self.media_handler.title = name
151
+ async def play(self, **kwargs):
152
+ extract_result = ExtractResult(**kwargs)
153
+ self.media_handler.title = kwargs.get("name")
154
154
  if self.name not in self.media_handler.title:
155
155
  self.media_handler.title = f"{self.name} | {self.media_handler.title}"
156
156