StreamingCommunity 1.8.0__py3-none-any.whl → 1.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of StreamingCommunity might be problematic. Click here for more details.
- StreamingCommunity/run.py +18 -13
- {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/METADATA +157 -63
- StreamingCommunity-1.9.2.dist-info/RECORD +7 -0
- {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/WHEEL +1 -1
- {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/entry_points.txt +1 -0
- StreamingCommunity/Src/Api/Player/Helper/Vixcloud/js_parser.py +0 -143
- StreamingCommunity/Src/Api/Player/Helper/Vixcloud/util.py +0 -166
- StreamingCommunity/Src/Api/Player/ddl.py +0 -89
- StreamingCommunity/Src/Api/Player/maxstream.py +0 -151
- StreamingCommunity/Src/Api/Player/supervideo.py +0 -194
- StreamingCommunity/Src/Api/Player/vixcloud.py +0 -224
- StreamingCommunity/Src/Api/Site/1337xx/__init__.py +0 -50
- StreamingCommunity/Src/Api/Site/1337xx/costant.py +0 -15
- StreamingCommunity/Src/Api/Site/1337xx/site.py +0 -84
- StreamingCommunity/Src/Api/Site/1337xx/title.py +0 -66
- StreamingCommunity/Src/Api/Site/altadefinizione/__init__.py +0 -50
- StreamingCommunity/Src/Api/Site/altadefinizione/costant.py +0 -15
- StreamingCommunity/Src/Api/Site/altadefinizione/film.py +0 -69
- StreamingCommunity/Src/Api/Site/altadefinizione/site.py +0 -86
- StreamingCommunity/Src/Api/Site/animeunity/__init__.py +0 -50
- StreamingCommunity/Src/Api/Site/animeunity/anime.py +0 -126
- StreamingCommunity/Src/Api/Site/animeunity/costant.py +0 -15
- StreamingCommunity/Src/Api/Site/animeunity/film_serie.py +0 -131
- StreamingCommunity/Src/Api/Site/animeunity/site.py +0 -165
- StreamingCommunity/Src/Api/Site/animeunity/util/ScrapeSerie.py +0 -97
- StreamingCommunity/Src/Api/Site/bitsearch/__init__.py +0 -51
- StreamingCommunity/Src/Api/Site/bitsearch/costant.py +0 -15
- StreamingCommunity/Src/Api/Site/bitsearch/site.py +0 -84
- StreamingCommunity/Src/Api/Site/bitsearch/title.py +0 -47
- StreamingCommunity/Src/Api/Site/cb01new/__init__.py +0 -51
- StreamingCommunity/Src/Api/Site/cb01new/costant.py +0 -15
- StreamingCommunity/Src/Api/Site/cb01new/film.py +0 -69
- StreamingCommunity/Src/Api/Site/cb01new/site.py +0 -74
- StreamingCommunity/Src/Api/Site/ddlstreamitaly/Player/ScrapeSerie.py +0 -83
- StreamingCommunity/Src/Api/Site/ddlstreamitaly/__init__.py +0 -57
- StreamingCommunity/Src/Api/Site/ddlstreamitaly/costant.py +0 -16
- StreamingCommunity/Src/Api/Site/ddlstreamitaly/series.py +0 -142
- StreamingCommunity/Src/Api/Site/ddlstreamitaly/site.py +0 -93
- StreamingCommunity/Src/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +0 -83
- StreamingCommunity/Src/Api/Site/guardaserie/Player/ScrapeSerie.py +0 -110
- StreamingCommunity/Src/Api/Site/guardaserie/__init__.py +0 -52
- StreamingCommunity/Src/Api/Site/guardaserie/costant.py +0 -15
- StreamingCommunity/Src/Api/Site/guardaserie/series.py +0 -195
- StreamingCommunity/Src/Api/Site/guardaserie/site.py +0 -84
- StreamingCommunity/Src/Api/Site/guardaserie/util/ScrapeSerie.py +0 -110
- StreamingCommunity/Src/Api/Site/mostraguarda/__init__.py +0 -48
- StreamingCommunity/Src/Api/Site/mostraguarda/costant.py +0 -15
- StreamingCommunity/Src/Api/Site/mostraguarda/film.py +0 -94
- StreamingCommunity/Src/Api/Site/piratebays/__init__.py +0 -50
- StreamingCommunity/Src/Api/Site/piratebays/costant.py +0 -15
- StreamingCommunity/Src/Api/Site/piratebays/site.py +0 -89
- StreamingCommunity/Src/Api/Site/piratebays/title.py +0 -45
- StreamingCommunity/Src/Api/Site/streamingcommunity/__init__.py +0 -55
- StreamingCommunity/Src/Api/Site/streamingcommunity/costant.py +0 -15
- StreamingCommunity/Src/Api/Site/streamingcommunity/film.py +0 -70
- StreamingCommunity/Src/Api/Site/streamingcommunity/series.py +0 -203
- StreamingCommunity/Src/Api/Site/streamingcommunity/site.py +0 -126
- StreamingCommunity/Src/Api/Site/streamingcommunity/util/ScrapeSerie.py +0 -113
- StreamingCommunity/Src/Api/Template/Class/SearchType.py +0 -101
- StreamingCommunity/Src/Api/Template/Util/__init__.py +0 -5
- StreamingCommunity/Src/Api/Template/Util/get_domain.py +0 -137
- StreamingCommunity/Src/Api/Template/Util/manage_ep.py +0 -153
- StreamingCommunity/Src/Api/Template/Util/recall_search.py +0 -37
- StreamingCommunity/Src/Api/Template/__init__.py +0 -3
- StreamingCommunity/Src/Api/Template/site.py +0 -87
- StreamingCommunity/Src/Lib/Downloader/HLS/downloader.py +0 -968
- StreamingCommunity/Src/Lib/Downloader/HLS/proxyes.py +0 -110
- StreamingCommunity/Src/Lib/Downloader/HLS/segments.py +0 -540
- StreamingCommunity/Src/Lib/Downloader/MP4/downloader.py +0 -156
- StreamingCommunity/Src/Lib/Downloader/TOR/downloader.py +0 -222
- StreamingCommunity/Src/Lib/Downloader/__init__.py +0 -5
- StreamingCommunity/Src/Lib/Driver/driver_1.py +0 -76
- StreamingCommunity/Src/Lib/FFmpeg/__init__.py +0 -4
- StreamingCommunity/Src/Lib/FFmpeg/capture.py +0 -170
- StreamingCommunity/Src/Lib/FFmpeg/command.py +0 -292
- StreamingCommunity/Src/Lib/FFmpeg/util.py +0 -242
- StreamingCommunity/Src/Lib/M3U8/__init__.py +0 -6
- StreamingCommunity/Src/Lib/M3U8/decryptor.py +0 -129
- StreamingCommunity/Src/Lib/M3U8/estimator.py +0 -173
- StreamingCommunity/Src/Lib/M3U8/parser.py +0 -666
- StreamingCommunity/Src/Lib/M3U8/url_fixer.py +0 -52
- StreamingCommunity/Src/Lib/TMBD/__init__.py +0 -2
- StreamingCommunity/Src/Lib/TMBD/obj_tmbd.py +0 -39
- StreamingCommunity/Src/Lib/TMBD/tmdb.py +0 -346
- StreamingCommunity/Src/Upload/update.py +0 -64
- StreamingCommunity/Src/Upload/version.py +0 -5
- StreamingCommunity/Src/Util/_jsonConfig.py +0 -204
- StreamingCommunity/Src/Util/call_stack.py +0 -42
- StreamingCommunity/Src/Util/color.py +0 -20
- StreamingCommunity/Src/Util/console.py +0 -12
- StreamingCommunity/Src/Util/headers.py +0 -147
- StreamingCommunity/Src/Util/logger.py +0 -53
- StreamingCommunity/Src/Util/message.py +0 -46
- StreamingCommunity/Src/Util/os.py +0 -417
- StreamingCommunity/Src/Util/table.py +0 -163
- StreamingCommunity-1.8.0.dist-info/RECORD +0 -97
- {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/LICENSE +0 -0
- {StreamingCommunity-1.8.0.dist-info → StreamingCommunity-1.9.2.dist-info}/top_level.txt +0 -0
|
@@ -1,166 +0,0 @@
|
|
|
1
|
-
# 23.11.24
|
|
2
|
-
|
|
3
|
-
import re
|
|
4
|
-
import logging
|
|
5
|
-
from typing import Dict, Any, List, Union
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class Episode:
|
|
9
|
-
def __init__(self, data: Dict[str, Any]):
|
|
10
|
-
self.id: int = data.get('id', '')
|
|
11
|
-
self.number: int = data.get('number', '')
|
|
12
|
-
self.name: str = data.get('name', '')
|
|
13
|
-
self.plot: str = data.get('plot', '')
|
|
14
|
-
self.duration: int = data.get('duration', '')
|
|
15
|
-
|
|
16
|
-
def __str__(self):
|
|
17
|
-
return f"Episode(id={self.id}, number={self.number}, name='{self.name}', plot='{self.plot}', duration={self.duration} sec)"
|
|
18
|
-
|
|
19
|
-
class EpisodeManager:
|
|
20
|
-
def __init__(self):
|
|
21
|
-
self.episodes: List[Episode] = []
|
|
22
|
-
|
|
23
|
-
def add_episode(self, episode_data: Dict[str, Any]):
|
|
24
|
-
"""
|
|
25
|
-
Add a new episode to the manager.
|
|
26
|
-
|
|
27
|
-
Parameters:
|
|
28
|
-
- episode_data (Dict[str, Any]): A dictionary containing data for the new episode.
|
|
29
|
-
"""
|
|
30
|
-
episode = Episode(episode_data)
|
|
31
|
-
self.episodes.append(episode)
|
|
32
|
-
|
|
33
|
-
def get_length(self) -> int:
|
|
34
|
-
"""
|
|
35
|
-
Get the number of episodes in the manager.
|
|
36
|
-
|
|
37
|
-
Returns:
|
|
38
|
-
int: Number of episodes.
|
|
39
|
-
"""
|
|
40
|
-
return len(self.episodes)
|
|
41
|
-
|
|
42
|
-
def clear(self) -> None:
|
|
43
|
-
"""
|
|
44
|
-
This method clears the episodes list.
|
|
45
|
-
|
|
46
|
-
Parameters:
|
|
47
|
-
- self: The object instance.
|
|
48
|
-
"""
|
|
49
|
-
self.episodes.clear()
|
|
50
|
-
|
|
51
|
-
def __str__(self):
|
|
52
|
-
return f"EpisodeManager(num_episodes={len(self.episodes)})"
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
class Season:
|
|
56
|
-
def __init__(self, season_data: Dict[str, Union[int, str, None]]):
|
|
57
|
-
self.id: int = season_data.get('id')
|
|
58
|
-
self.number: int = season_data.get('number')
|
|
59
|
-
self.name: str = season_data.get('name')
|
|
60
|
-
self.plot: str = season_data.get('plot')
|
|
61
|
-
self.episodes_count: int = season_data.get('episodes_count')
|
|
62
|
-
|
|
63
|
-
def __str__(self):
|
|
64
|
-
return f"Season(id={self.id}, number={self.number}, name='{self.name}', plot='{self.plot}', episodes_count={self.episodes_count})"
|
|
65
|
-
|
|
66
|
-
class SeasonManager:
|
|
67
|
-
def __init__(self):
|
|
68
|
-
self.seasons: List[Season] = []
|
|
69
|
-
|
|
70
|
-
def add_season(self, season_data: Dict[str, Union[int, str, None]]):
|
|
71
|
-
"""
|
|
72
|
-
Add a new season to the manager.
|
|
73
|
-
|
|
74
|
-
Parameters:
|
|
75
|
-
season_data (Dict[str, Union[int, str, None]]): A dictionary containing data for the new season.
|
|
76
|
-
"""
|
|
77
|
-
season = Season(season_data)
|
|
78
|
-
self.seasons.append(season)
|
|
79
|
-
|
|
80
|
-
def get(self, index: int) -> Season:
|
|
81
|
-
"""
|
|
82
|
-
Get a season item from the list by index.
|
|
83
|
-
|
|
84
|
-
Parameters:
|
|
85
|
-
index (int): The index of the seasons item to retrieve.
|
|
86
|
-
|
|
87
|
-
Returns:
|
|
88
|
-
Season: The media item at the specified index.
|
|
89
|
-
"""
|
|
90
|
-
return self.media_list[index]
|
|
91
|
-
|
|
92
|
-
def get_length(self) -> int:
|
|
93
|
-
"""
|
|
94
|
-
Get the number of seasons in the manager.
|
|
95
|
-
|
|
96
|
-
Returns:
|
|
97
|
-
int: Number of seasons.
|
|
98
|
-
"""
|
|
99
|
-
return len(self.seasons)
|
|
100
|
-
|
|
101
|
-
def clear(self) -> None:
|
|
102
|
-
"""
|
|
103
|
-
This method clears the seasons list.
|
|
104
|
-
|
|
105
|
-
Parameters:
|
|
106
|
-
self: The object instance.
|
|
107
|
-
"""
|
|
108
|
-
self.seasons.clear()
|
|
109
|
-
|
|
110
|
-
def __str__(self):
|
|
111
|
-
return f"SeasonManager(num_seasons={len(self.seasons)})"
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
class Stream:
|
|
115
|
-
def __init__(self, name: str, url: str, active: bool):
|
|
116
|
-
self.name = name
|
|
117
|
-
self.url = url
|
|
118
|
-
self.active = active
|
|
119
|
-
|
|
120
|
-
def __repr__(self):
|
|
121
|
-
return f"Stream(name={self.name!r}, url={self.url!r}, active={self.active!r})"
|
|
122
|
-
|
|
123
|
-
class StreamsCollection:
|
|
124
|
-
def __init__(self, streams: list):
|
|
125
|
-
self.streams = [Stream(**stream) for stream in streams]
|
|
126
|
-
|
|
127
|
-
def __repr__(self):
|
|
128
|
-
return f"StreamsCollection(streams={self.streams})"
|
|
129
|
-
|
|
130
|
-
def add_stream(self, name: str, url: str, active: bool):
|
|
131
|
-
self.streams.append(Stream(name, url, active))
|
|
132
|
-
|
|
133
|
-
def get_streams(self):
|
|
134
|
-
return self.streams
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
class WindowVideo:
|
|
138
|
-
def __init__(self, data: Dict[str, Any]):
|
|
139
|
-
self.data = data
|
|
140
|
-
self.id: int = data.get('id', '')
|
|
141
|
-
self.name: str = data.get('name', '')
|
|
142
|
-
self.filename: str = data.get('filename', '')
|
|
143
|
-
self.size: str = data.get('size', '')
|
|
144
|
-
self.quality: str = data.get('quality', '')
|
|
145
|
-
self.duration: str = data.get('duration', '')
|
|
146
|
-
self.views: int = data.get('views', '')
|
|
147
|
-
self.is_viewable: bool = data.get('is_viewable', '')
|
|
148
|
-
self.status: str = data.get('status', '')
|
|
149
|
-
self.fps: float = data.get('fps', '')
|
|
150
|
-
self.legacy: bool = data.get('legacy', '')
|
|
151
|
-
self.folder_id: int = data.get('folder_id', '')
|
|
152
|
-
self.created_at_diff: str = data.get('created_at_diff', '')
|
|
153
|
-
|
|
154
|
-
def __str__(self):
|
|
155
|
-
return f"WindowVideo(id={self.id}, name='{self.name}', filename='{self.filename}', size='{self.size}', quality='{self.quality}', duration='{self.duration}', views={self.views}, is_viewable={self.is_viewable}, status='{self.status}', fps={self.fps}, legacy={self.legacy}, folder_id={self.folder_id}, created_at_diff='{self.created_at_diff}')"
|
|
156
|
-
|
|
157
|
-
class WindowParameter:
|
|
158
|
-
def __init__(self, data: Dict[str, Any]):
|
|
159
|
-
self.data = data
|
|
160
|
-
params = data.get('params', {})
|
|
161
|
-
self.token: str = params.get('token', '')
|
|
162
|
-
self.expires: str = str(params.get('expires', ''))
|
|
163
|
-
self.url = data.get('url')
|
|
164
|
-
|
|
165
|
-
def __str__(self):
|
|
166
|
-
return (f"WindowParameter(token='{self.token}', expires='{self.expires}', url='{self.url}', data={self.data})")
|
|
@@ -1,89 +0,0 @@
|
|
|
1
|
-
# 14.06.24
|
|
2
|
-
|
|
3
|
-
import logging
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
# External libraries
|
|
7
|
-
import httpx
|
|
8
|
-
from bs4 import BeautifulSoup
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
# Internal utilities
|
|
12
|
-
from StreamingCommunity.Src.Util._jsonConfig import config_manager
|
|
13
|
-
from StreamingCommunity.Src.Util.headers import get_headers
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
# Variable
|
|
17
|
-
from StreamingCommunity.Src.Api.Site.ddlstreamitaly.costant import COOKIE
|
|
18
|
-
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
class VideoSource:
|
|
22
|
-
def __init__(self) -> None:
|
|
23
|
-
"""
|
|
24
|
-
Initializes the VideoSource object with default values.
|
|
25
|
-
"""
|
|
26
|
-
self.headers = {'user-agent': get_headers()}
|
|
27
|
-
self.cookie = COOKIE
|
|
28
|
-
|
|
29
|
-
def setup(self, url: str) -> None:
|
|
30
|
-
"""
|
|
31
|
-
Sets up the video source with the provided URL.
|
|
32
|
-
|
|
33
|
-
Parameters:
|
|
34
|
-
- url (str): The URL of the video source.
|
|
35
|
-
"""
|
|
36
|
-
self.url = url
|
|
37
|
-
|
|
38
|
-
def make_request(self, url: str) -> str:
|
|
39
|
-
"""
|
|
40
|
-
Make an HTTP GET request to the provided URL.
|
|
41
|
-
|
|
42
|
-
Parameters:
|
|
43
|
-
- url (str): The URL to make the request to.
|
|
44
|
-
|
|
45
|
-
Returns:
|
|
46
|
-
- str: The response content if successful, None otherwise.
|
|
47
|
-
"""
|
|
48
|
-
try:
|
|
49
|
-
response = httpx.get(
|
|
50
|
-
url=url,
|
|
51
|
-
headers=self.headers,
|
|
52
|
-
cookies=self.cookie,
|
|
53
|
-
timeout=max_timeout
|
|
54
|
-
)
|
|
55
|
-
response.raise_for_status()
|
|
56
|
-
|
|
57
|
-
return response.text
|
|
58
|
-
|
|
59
|
-
except Exception as err:
|
|
60
|
-
logging.error(f"An error occurred: {err}")
|
|
61
|
-
|
|
62
|
-
return None
|
|
63
|
-
|
|
64
|
-
def get_playlist(self):
|
|
65
|
-
"""
|
|
66
|
-
Retrieves the playlist URL from the video source.
|
|
67
|
-
|
|
68
|
-
Returns:
|
|
69
|
-
- tuple: The mp4 link if found, None otherwise.
|
|
70
|
-
"""
|
|
71
|
-
try:
|
|
72
|
-
text = self.make_request(self.url)
|
|
73
|
-
|
|
74
|
-
if text:
|
|
75
|
-
soup = BeautifulSoup(text, "html.parser")
|
|
76
|
-
source = soup.find("source")
|
|
77
|
-
|
|
78
|
-
if source:
|
|
79
|
-
mp4_link = source.get("src")
|
|
80
|
-
return mp4_link
|
|
81
|
-
|
|
82
|
-
else:
|
|
83
|
-
logging.error("No <source> tag found in the HTML.")
|
|
84
|
-
|
|
85
|
-
else:
|
|
86
|
-
logging.error("Failed to retrieve content from the URL.")
|
|
87
|
-
|
|
88
|
-
except Exception as e:
|
|
89
|
-
logging.error(f"An error occurred while parsing the playlist: {e}")
|
|
@@ -1,151 +0,0 @@
|
|
|
1
|
-
# 05.07.24
|
|
2
|
-
|
|
3
|
-
import re
|
|
4
|
-
import logging
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
# External libraries
|
|
8
|
-
import httpx
|
|
9
|
-
import jsbeautifier
|
|
10
|
-
from bs4 import BeautifulSoup
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
# Internal utilities
|
|
14
|
-
from StreamingCommunity.Src.Util._jsonConfig import config_manager
|
|
15
|
-
from StreamingCommunity.Src.Util.headers import get_headers
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
# Variable
|
|
19
|
-
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class VideoSource:
|
|
23
|
-
def __init__(self, url: str):
|
|
24
|
-
"""
|
|
25
|
-
Sets up the video source with the provided URL.
|
|
26
|
-
|
|
27
|
-
Parameters:
|
|
28
|
-
- url (str): The URL of the video.
|
|
29
|
-
"""
|
|
30
|
-
self.url = url
|
|
31
|
-
self.redirect_url = None
|
|
32
|
-
self.maxstream_url = None
|
|
33
|
-
self.m3u8_url = None
|
|
34
|
-
self.headers = {'user-agent': get_headers()}
|
|
35
|
-
|
|
36
|
-
def get_redirect_url(self):
|
|
37
|
-
"""
|
|
38
|
-
Sends a request to the initial URL and extracts the redirect URL.
|
|
39
|
-
"""
|
|
40
|
-
try:
|
|
41
|
-
|
|
42
|
-
# Send a GET request to the initial URL
|
|
43
|
-
response = httpx.get(self.url, headers=self.headers, follow_redirects=True, timeout=max_timeout)
|
|
44
|
-
response.raise_for_status()
|
|
45
|
-
|
|
46
|
-
# Extract the redirect URL from the HTML
|
|
47
|
-
soup = BeautifulSoup(response.text, "html.parser")
|
|
48
|
-
self.redirect_url = soup.find("div", id="iframen1").get("data-src")
|
|
49
|
-
logging.info(f"Redirect URL: {self.redirect_url}")
|
|
50
|
-
|
|
51
|
-
return self.redirect_url
|
|
52
|
-
|
|
53
|
-
except httpx.RequestError as e:
|
|
54
|
-
logging.error(f"Error during the initial request: {e}")
|
|
55
|
-
raise
|
|
56
|
-
|
|
57
|
-
except AttributeError as e:
|
|
58
|
-
logging.error(f"Error parsing HTML: {e}")
|
|
59
|
-
raise
|
|
60
|
-
|
|
61
|
-
def get_maxstream_url(self):
|
|
62
|
-
"""
|
|
63
|
-
Sends a request to the redirect URL and extracts the Maxstream URL.
|
|
64
|
-
"""
|
|
65
|
-
try:
|
|
66
|
-
|
|
67
|
-
# Send a GET request to the redirect URL
|
|
68
|
-
response = httpx.get(self.redirect_url, headers=self.headers, follow_redirects=True, timeout=max_timeout)
|
|
69
|
-
response.raise_for_status()
|
|
70
|
-
|
|
71
|
-
# Extract the Maxstream URL from the HTML
|
|
72
|
-
soup = BeautifulSoup(response.text, "html.parser")
|
|
73
|
-
maxstream_url = soup.find("a")
|
|
74
|
-
|
|
75
|
-
if maxstream_url is None:
|
|
76
|
-
|
|
77
|
-
# If no anchor tag is found, try the alternative method
|
|
78
|
-
logging.warning("Anchor tag not found. Trying the alternative method.")
|
|
79
|
-
headers = {
|
|
80
|
-
'origin': 'https://stayonline.pro',
|
|
81
|
-
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 OPR/111.0.0.0',
|
|
82
|
-
'x-requested-with': 'XMLHttpRequest',
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
# Make request to stayonline api
|
|
86
|
-
data = {'id': self.redirect_url.split("/")[-2], 'ref': ''}
|
|
87
|
-
response = httpx.post('https://stayonline.pro/ajax/linkEmbedView.php', headers=headers, data=data)
|
|
88
|
-
response.raise_for_status()
|
|
89
|
-
uprot_url = response.json()['data']['value']
|
|
90
|
-
|
|
91
|
-
# Retry getting maxtstream url
|
|
92
|
-
response = httpx.get(uprot_url, headers=self.headers, follow_redirects=True, timeout=max_timeout)
|
|
93
|
-
response.raise_for_status()
|
|
94
|
-
soup = BeautifulSoup(response.text, "html.parser")
|
|
95
|
-
maxstream_url = soup.find("a").get("href")
|
|
96
|
-
|
|
97
|
-
else:
|
|
98
|
-
maxstream_url = maxstream_url.get("href")
|
|
99
|
-
|
|
100
|
-
self.maxstream_url = maxstream_url
|
|
101
|
-
logging.info(f"Maxstream URL: {self.maxstream_url}")
|
|
102
|
-
|
|
103
|
-
return self.maxstream_url
|
|
104
|
-
|
|
105
|
-
except httpx.RequestError as e:
|
|
106
|
-
logging.error(f"Error during the request to the redirect URL: {e}")
|
|
107
|
-
raise
|
|
108
|
-
|
|
109
|
-
except AttributeError as e:
|
|
110
|
-
logging.error(f"Error parsing HTML: {e}")
|
|
111
|
-
raise
|
|
112
|
-
|
|
113
|
-
def get_m3u8_url(self):
|
|
114
|
-
"""
|
|
115
|
-
Sends a request to the Maxstream URL and extracts the .m3u8 file URL.
|
|
116
|
-
"""
|
|
117
|
-
try:
|
|
118
|
-
|
|
119
|
-
# Send a GET request to the Maxstream URL
|
|
120
|
-
response = httpx.get(self.maxstream_url, headers=self.headers, follow_redirects=True, timeout=max_timeout)
|
|
121
|
-
response.raise_for_status()
|
|
122
|
-
soup = BeautifulSoup(response.text, "html.parser")
|
|
123
|
-
|
|
124
|
-
# Iterate over all script tags in the HTML
|
|
125
|
-
for script in soup.find_all("script"):
|
|
126
|
-
if "eval(function(p,a,c,k,e,d)" in script.text:
|
|
127
|
-
|
|
128
|
-
# Execute the script using
|
|
129
|
-
data_js = jsbeautifier.beautify(script.text)
|
|
130
|
-
|
|
131
|
-
# Extract the .m3u8 URL from the script's output
|
|
132
|
-
match = re.search(r'sources:\s*\[\{\s*src:\s*"([^"]+)"', data_js)
|
|
133
|
-
|
|
134
|
-
if match:
|
|
135
|
-
self.m3u8_url = match.group(1)
|
|
136
|
-
logging.info(f"M3U8 URL: {self.m3u8_url}")
|
|
137
|
-
break
|
|
138
|
-
|
|
139
|
-
return self.m3u8_url
|
|
140
|
-
|
|
141
|
-
except Exception as e:
|
|
142
|
-
logging.error(f"Error executing the Node.js script: {e}")
|
|
143
|
-
raise
|
|
144
|
-
|
|
145
|
-
def get_playlist(self):
|
|
146
|
-
"""
|
|
147
|
-
Executes the entire flow to obtain the final .m3u8 file URL.
|
|
148
|
-
"""
|
|
149
|
-
self.get_redirect_url()
|
|
150
|
-
self.get_maxstream_url()
|
|
151
|
-
return self.get_m3u8_url()
|
|
@@ -1,194 +0,0 @@
|
|
|
1
|
-
# 26.05.24
|
|
2
|
-
|
|
3
|
-
import re
|
|
4
|
-
import logging
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
# External libraries
|
|
8
|
-
import httpx
|
|
9
|
-
import jsbeautifier
|
|
10
|
-
from bs4 import BeautifulSoup
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
# Internal utilities
|
|
14
|
-
from StreamingCommunity.Src.Util._jsonConfig import config_manager
|
|
15
|
-
from StreamingCommunity.Src.Util.headers import get_headers
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
# Variable
|
|
19
|
-
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class VideoSource:
|
|
23
|
-
def __init__(self, url: str) -> None:
|
|
24
|
-
"""
|
|
25
|
-
Initializes the VideoSource object with default values.
|
|
26
|
-
|
|
27
|
-
Attributes:
|
|
28
|
-
- url (str): The URL of the video source.
|
|
29
|
-
"""
|
|
30
|
-
self.headers = {
|
|
31
|
-
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
|
32
|
-
'accept-language': 'it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7',
|
|
33
|
-
'User-Agent': get_headers()
|
|
34
|
-
}
|
|
35
|
-
self.client = httpx.Client()
|
|
36
|
-
self.url = url
|
|
37
|
-
|
|
38
|
-
def make_request(self, url: str) -> str:
|
|
39
|
-
"""
|
|
40
|
-
Make an HTTP GET request to the provided URL.
|
|
41
|
-
|
|
42
|
-
Parameters:
|
|
43
|
-
- url (str): The URL to make the request to.
|
|
44
|
-
|
|
45
|
-
Returns:
|
|
46
|
-
- str: The response content if successful, None otherwise.
|
|
47
|
-
"""
|
|
48
|
-
|
|
49
|
-
try:
|
|
50
|
-
response = self.client.get(
|
|
51
|
-
url=url,
|
|
52
|
-
headers=self.headers,
|
|
53
|
-
follow_redirects=True,
|
|
54
|
-
timeout=max_timeout
|
|
55
|
-
)
|
|
56
|
-
response.raise_for_status()
|
|
57
|
-
return response.text
|
|
58
|
-
|
|
59
|
-
except Exception as e:
|
|
60
|
-
logging.error(f"Request failed: {e}")
|
|
61
|
-
return None
|
|
62
|
-
|
|
63
|
-
def parse_html(self, html_content: str) -> BeautifulSoup:
|
|
64
|
-
"""
|
|
65
|
-
Parse the provided HTML content using BeautifulSoup.
|
|
66
|
-
|
|
67
|
-
Parameters:
|
|
68
|
-
- html_content (str): The HTML content to parse.
|
|
69
|
-
|
|
70
|
-
Returns:
|
|
71
|
-
- BeautifulSoup: Parsed HTML content if successful, None otherwise.
|
|
72
|
-
"""
|
|
73
|
-
|
|
74
|
-
try:
|
|
75
|
-
soup = BeautifulSoup(html_content, "html.parser")
|
|
76
|
-
return soup
|
|
77
|
-
|
|
78
|
-
except Exception as e:
|
|
79
|
-
logging.error(f"Failed to parse HTML content: {e}")
|
|
80
|
-
return None
|
|
81
|
-
|
|
82
|
-
def get_iframe(self, soup):
|
|
83
|
-
"""
|
|
84
|
-
Extracts the source URL of the second iframe in the provided BeautifulSoup object.
|
|
85
|
-
|
|
86
|
-
Parameters:
|
|
87
|
-
- soup (BeautifulSoup): A BeautifulSoup object representing the parsed HTML.
|
|
88
|
-
|
|
89
|
-
Returns:
|
|
90
|
-
- str: The source URL of the second iframe, or None if not found.
|
|
91
|
-
"""
|
|
92
|
-
iframes = soup.find_all("iframe")
|
|
93
|
-
if iframes and len(iframes) > 1:
|
|
94
|
-
return iframes[1].get("src")
|
|
95
|
-
|
|
96
|
-
return None
|
|
97
|
-
|
|
98
|
-
def find_content(self, url):
|
|
99
|
-
"""
|
|
100
|
-
Makes a request to the specified URL and parses the HTML content.
|
|
101
|
-
|
|
102
|
-
Parameters:
|
|
103
|
-
- url (str): The URL to fetch content from.
|
|
104
|
-
|
|
105
|
-
Returns:
|
|
106
|
-
- BeautifulSoup: A BeautifulSoup object representing the parsed HTML content, or None if the request fails.
|
|
107
|
-
"""
|
|
108
|
-
content = self.make_request(url)
|
|
109
|
-
if content:
|
|
110
|
-
return self.parse_html(content)
|
|
111
|
-
|
|
112
|
-
return None
|
|
113
|
-
|
|
114
|
-
def get_result_node_js(self, soup):
|
|
115
|
-
"""
|
|
116
|
-
Prepares and runs a Node.js script from the provided BeautifulSoup object to retrieve the video URL.
|
|
117
|
-
|
|
118
|
-
Parameters:
|
|
119
|
-
- soup (BeautifulSoup): A BeautifulSoup object representing the parsed HTML content.
|
|
120
|
-
|
|
121
|
-
Returns:
|
|
122
|
-
- str: The output from the Node.js script, or None if the script cannot be found or executed.
|
|
123
|
-
"""
|
|
124
|
-
for script in soup.find_all("script"):
|
|
125
|
-
if "eval" in str(script):
|
|
126
|
-
return jsbeautifier.beautify(script.text)
|
|
127
|
-
|
|
128
|
-
return None
|
|
129
|
-
|
|
130
|
-
def get_playlist(self) -> str:
|
|
131
|
-
"""
|
|
132
|
-
Download a video from the provided URL.
|
|
133
|
-
|
|
134
|
-
Returns:
|
|
135
|
-
str: The URL of the downloaded video if successful, None otherwise.
|
|
136
|
-
"""
|
|
137
|
-
try:
|
|
138
|
-
html_content = self.make_request(self.url)
|
|
139
|
-
if not html_content:
|
|
140
|
-
logging.error("Failed to fetch HTML content.")
|
|
141
|
-
return None
|
|
142
|
-
|
|
143
|
-
soup = self.parse_html(html_content)
|
|
144
|
-
if not soup:
|
|
145
|
-
logging.error("Failed to parse HTML content.")
|
|
146
|
-
return None
|
|
147
|
-
|
|
148
|
-
# Find master playlist
|
|
149
|
-
data_js = self.get_result_node_js(soup)
|
|
150
|
-
|
|
151
|
-
if data_js is not None:
|
|
152
|
-
match = re.search(r'sources:\s*\[\{\s*file:\s*"([^"]+)"', data_js)
|
|
153
|
-
|
|
154
|
-
if match:
|
|
155
|
-
return match.group(1)
|
|
156
|
-
|
|
157
|
-
else:
|
|
158
|
-
|
|
159
|
-
iframe_src = self.get_iframe(soup)
|
|
160
|
-
if not iframe_src:
|
|
161
|
-
logging.error("No iframe found.")
|
|
162
|
-
return None
|
|
163
|
-
|
|
164
|
-
down_page_soup = self.find_content(iframe_src)
|
|
165
|
-
if not down_page_soup:
|
|
166
|
-
logging.error("Failed to fetch down page content.")
|
|
167
|
-
return None
|
|
168
|
-
|
|
169
|
-
pattern = r'data-link="(//supervideo[^"]+)"'
|
|
170
|
-
match = re.search(pattern, str(down_page_soup))
|
|
171
|
-
if not match:
|
|
172
|
-
logging.error("No player available for download.")
|
|
173
|
-
return None
|
|
174
|
-
|
|
175
|
-
supervideo_url = "https:" + match.group(1)
|
|
176
|
-
supervideo_soup = self.find_content(supervideo_url)
|
|
177
|
-
if not supervideo_soup:
|
|
178
|
-
logging.error("Failed to fetch supervideo content.")
|
|
179
|
-
return None
|
|
180
|
-
|
|
181
|
-
# Find master playlist
|
|
182
|
-
data_js = self.get_result_node_js(supervideo_soup)
|
|
183
|
-
|
|
184
|
-
match = re.search(r'sources:\s*\[\{\s*file:\s*"([^"]+)"', data_js)
|
|
185
|
-
|
|
186
|
-
if match:
|
|
187
|
-
return match.group(1)
|
|
188
|
-
|
|
189
|
-
return None
|
|
190
|
-
|
|
191
|
-
except Exception as e:
|
|
192
|
-
logging.error(f"An error occurred: {e}")
|
|
193
|
-
return None
|
|
194
|
-
|