StreamingCommunity 3.0.1__py3-none-any.whl → 3.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of StreamingCommunity might be problematic. Click here for more details.
- StreamingCommunity/Api/Player/hdplayer.py +65 -0
- StreamingCommunity/Api/Player/mixdrop.py +145 -0
- StreamingCommunity/Api/Site/1337xx/site.py +1 -1
- StreamingCommunity/Api/Site/altadefinizione/site.py +1 -1
- StreamingCommunity/Api/Site/animeunity/site.py +2 -1
- StreamingCommunity/Api/Site/animeworld/site.py +1 -1
- StreamingCommunity/Api/Site/ddlstreamitaly/site.py +1 -1
- StreamingCommunity/Api/Site/guardaserie/site.py +1 -1
- StreamingCommunity/Api/Site/raiplay/site.py +2 -2
- StreamingCommunity/Api/Site/streamingcommunity/series.py +2 -2
- StreamingCommunity/Api/Site/streamingcommunity/site.py +1 -1
- StreamingCommunity/Api/Site/streamingwatch/__init__.py +95 -0
- StreamingCommunity/Api/Site/streamingwatch/film.py +61 -0
- StreamingCommunity/Api/Site/streamingwatch/series.py +160 -0
- StreamingCommunity/Api/Site/streamingwatch/site.py +111 -0
- StreamingCommunity/Api/Site/streamingwatch/util/ScrapeSerie.py +118 -0
- StreamingCommunity/Lib/Proxies/proxy.py +232 -0
- StreamingCommunity/Upload/version.py +1 -1
- {streamingcommunity-3.0.1.dist-info → streamingcommunity-3.0.2.dist-info}/METADATA +16 -2
- {streamingcommunity-3.0.1.dist-info → streamingcommunity-3.0.2.dist-info}/RECORD +24 -17
- {streamingcommunity-3.0.1.dist-info → streamingcommunity-3.0.2.dist-info}/WHEEL +1 -1
- StreamingCommunity/Api/Player/maxstream.py +0 -140
- {streamingcommunity-3.0.1.dist-info → streamingcommunity-3.0.2.dist-info}/entry_points.txt +0 -0
- {streamingcommunity-3.0.1.dist-info → streamingcommunity-3.0.2.dist-info}/licenses/LICENSE +0 -0
- {streamingcommunity-3.0.1.dist-info → streamingcommunity-3.0.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
# 29.04.25
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
# External libraries
|
|
7
|
+
import httpx
|
|
8
|
+
from bs4 import BeautifulSoup
|
|
9
|
+
from rich.console import Console
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Internal utilities
|
|
13
|
+
from StreamingCommunity.Util.config_json import config_manager
|
|
14
|
+
from StreamingCommunity.Util.headers import get_userAgent
|
|
15
|
+
from StreamingCommunity.Util.table import TVShowManager
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# Logic class
|
|
19
|
+
from StreamingCommunity.Api.Template.config_loader import site_constant
|
|
20
|
+
from StreamingCommunity.Api.Template.Class.SearchType import MediaManager
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# Variable
|
|
24
|
+
console = Console()
|
|
25
|
+
media_search_manager = MediaManager()
|
|
26
|
+
table_show_manager = TVShowManager()
|
|
27
|
+
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def extract_nonce(response_) -> str:
|
|
31
|
+
"""Extract nonce value from the page script"""
|
|
32
|
+
soup = BeautifulSoup(response_.content, 'html.parser')
|
|
33
|
+
script = soup.find('script', id='live-search-js-extra')
|
|
34
|
+
if script:
|
|
35
|
+
match = re.search(r'"admin_ajax_nonce":"([^"]+)"', script.text)
|
|
36
|
+
if match:
|
|
37
|
+
return match.group(1)
|
|
38
|
+
return ""
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def title_search(query: str, additionalData: list) -> int:
|
|
42
|
+
"""
|
|
43
|
+
Search for titles based on a search query.
|
|
44
|
+
|
|
45
|
+
Parameters:
|
|
46
|
+
- query (str): The query to search for.
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
int: The number of titles found.
|
|
50
|
+
"""
|
|
51
|
+
media_search_manager.clear()
|
|
52
|
+
table_show_manager.clear()
|
|
53
|
+
|
|
54
|
+
proxy, response_serie = additionalData
|
|
55
|
+
search_url = f"{site_constant.FULL_URL}/wp-admin/admin-ajax.php"
|
|
56
|
+
console.print(f"[cyan]Search url: [yellow]{search_url}")
|
|
57
|
+
|
|
58
|
+
try:
|
|
59
|
+
_wpnonce = extract_nonce(response_serie)
|
|
60
|
+
|
|
61
|
+
if not _wpnonce:
|
|
62
|
+
console.print("[red]Error: Failed to extract nonce")
|
|
63
|
+
return 0
|
|
64
|
+
|
|
65
|
+
data = {
|
|
66
|
+
'action': 'data_fetch',
|
|
67
|
+
'keyword': query,
|
|
68
|
+
'_wpnonce': _wpnonce
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
response = httpx.post(
|
|
72
|
+
search_url,
|
|
73
|
+
headers={
|
|
74
|
+
'origin': site_constant.FULL_URL,
|
|
75
|
+
'user-agent': get_userAgent()
|
|
76
|
+
},
|
|
77
|
+
data=data,
|
|
78
|
+
timeout=max_timeout,
|
|
79
|
+
proxy=proxy
|
|
80
|
+
)
|
|
81
|
+
response.raise_for_status()
|
|
82
|
+
soup = BeautifulSoup(response.text, 'html.parser')
|
|
83
|
+
|
|
84
|
+
except Exception as e:
|
|
85
|
+
console.print(f"[red]Site: {site_constant.SITE_NAME}, request search error: {e}")
|
|
86
|
+
return 0
|
|
87
|
+
|
|
88
|
+
for item in soup.find_all('div', class_='searchelement'):
|
|
89
|
+
try:
|
|
90
|
+
|
|
91
|
+
title = item.find_all("a")[-1].get_text(strip=True) if item.find_all("a") else 'N/A'
|
|
92
|
+
url = item.find('a').get('href', '')
|
|
93
|
+
year = item.find('div', id='search-cat-year')
|
|
94
|
+
year = year.get_text(strip=True) if year else 'N/A'
|
|
95
|
+
|
|
96
|
+
if any(keyword in year.lower() for keyword in ['stagione', 'episodio', 'ep.', 'season', 'episode']):
|
|
97
|
+
continue
|
|
98
|
+
|
|
99
|
+
media_search_manager.add_media({
|
|
100
|
+
'name': title,
|
|
101
|
+
'type': 'tv' if '/serie/' in url else 'Film',
|
|
102
|
+
'date': year,
|
|
103
|
+
'image': item.find('img').get('src', ''),
|
|
104
|
+
'url': url
|
|
105
|
+
})
|
|
106
|
+
|
|
107
|
+
except Exception as e:
|
|
108
|
+
print(f"Error parsing a film entry: {e}")
|
|
109
|
+
|
|
110
|
+
# Return the number of titles found
|
|
111
|
+
return media_search_manager.get_length()
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
# 29.04.25
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# External libraries
|
|
8
|
+
import httpx
|
|
9
|
+
from bs4 import BeautifulSoup
|
|
10
|
+
|
|
11
|
+
# Internal utilities
|
|
12
|
+
from StreamingCommunity.Util.headers import get_userAgent
|
|
13
|
+
from StreamingCommunity.Util.config_json import config_manager
|
|
14
|
+
from StreamingCommunity.Api.Player.Helper.Vixcloud.util import SeasonManager, Episode
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
# Variable
|
|
18
|
+
max_timeout = config_manager.get_int("REQUESTS", "timeout")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class GetSerieInfo:
|
|
22
|
+
def __init__(self, url, proxy: str = None):
|
|
23
|
+
self.headers = {'user-agent': get_userAgent()}
|
|
24
|
+
self.url = url
|
|
25
|
+
self.seasons_manager = SeasonManager()
|
|
26
|
+
self.series_name = None
|
|
27
|
+
|
|
28
|
+
self.client = httpx.Client(headers=self.headers, proxy=proxy, timeout=max_timeout)
|
|
29
|
+
|
|
30
|
+
def collect_info_season(self) -> None:
|
|
31
|
+
"""
|
|
32
|
+
Retrieve all series information including episodes and seasons.
|
|
33
|
+
"""
|
|
34
|
+
try:
|
|
35
|
+
response = self.client.get(self.url)
|
|
36
|
+
response.raise_for_status()
|
|
37
|
+
soup = BeautifulSoup(response.text, 'html.parser')
|
|
38
|
+
|
|
39
|
+
if not self.series_name:
|
|
40
|
+
title_tag = soup.find('h1', class_='title-border')
|
|
41
|
+
self.series_name = title_tag.get_text(strip=True) if title_tag else 'N/A'
|
|
42
|
+
|
|
43
|
+
# Extract episodes and organize by season
|
|
44
|
+
episodes = {}
|
|
45
|
+
for ep in soup.find_all('div', class_='bolumust'):
|
|
46
|
+
a_tag = ep.find('a')
|
|
47
|
+
if not a_tag:
|
|
48
|
+
continue
|
|
49
|
+
|
|
50
|
+
ep_url = a_tag.get('href', '')
|
|
51
|
+
episode_title = a_tag.get_text(strip=True)
|
|
52
|
+
|
|
53
|
+
# Clean up episode title by removing season info and date
|
|
54
|
+
clean_title = re.sub(r'Stagione \d+ Episodio \d+\s*\(?([^)]+)\)?\s*\d+\s*\w+\s*\d+', r'\1', episode_title)
|
|
55
|
+
|
|
56
|
+
season_match = re.search(r'stagione-(\d+)', ep_url)
|
|
57
|
+
if season_match:
|
|
58
|
+
season_num = int(season_match.group(1))
|
|
59
|
+
if season_num not in episodes:
|
|
60
|
+
episodes[season_num] = []
|
|
61
|
+
|
|
62
|
+
episodes[season_num].append({
|
|
63
|
+
'id': len(episodes[season_num]) + 1,
|
|
64
|
+
'number': len(episodes[season_num]) + 1,
|
|
65
|
+
'name': clean_title.strip(),
|
|
66
|
+
'url': ep_url
|
|
67
|
+
})
|
|
68
|
+
|
|
69
|
+
# Add seasons to SeasonManager
|
|
70
|
+
for season_num, eps in episodes.items():
|
|
71
|
+
season = self.seasons_manager.add_season({
|
|
72
|
+
'id': season_num,
|
|
73
|
+
'number': season_num,
|
|
74
|
+
'name': f'Stagione {season_num}'
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
# Add episodes to season's EpisodeManager
|
|
78
|
+
for ep in eps:
|
|
79
|
+
season.episodes.add(ep)
|
|
80
|
+
|
|
81
|
+
except Exception as e:
|
|
82
|
+
logging.error(f"Error collecting series info: {str(e)}")
|
|
83
|
+
raise
|
|
84
|
+
|
|
85
|
+
# ------------- FOR GUI -------------
|
|
86
|
+
def getNumberSeason(self) -> int:
|
|
87
|
+
"""
|
|
88
|
+
Get the total number of seasons available for the series.
|
|
89
|
+
"""
|
|
90
|
+
if not self.seasons_manager.seasons:
|
|
91
|
+
self.collect_info_season()
|
|
92
|
+
|
|
93
|
+
return len(self.seasons_manager.seasons)
|
|
94
|
+
|
|
95
|
+
def getEpisodeSeasons(self, season_number: int) -> list:
|
|
96
|
+
"""
|
|
97
|
+
Get all episodes for a specific season.
|
|
98
|
+
"""
|
|
99
|
+
if not self.seasons_manager.seasons:
|
|
100
|
+
self.collect_info_season()
|
|
101
|
+
|
|
102
|
+
season = self.seasons_manager.get_season_by_number(season_number)
|
|
103
|
+
if not season:
|
|
104
|
+
logging.error(f"Season {season_number} not found")
|
|
105
|
+
return []
|
|
106
|
+
|
|
107
|
+
return season.episodes.episodes
|
|
108
|
+
|
|
109
|
+
def selectEpisode(self, season_number: int, episode_index: int) -> Episode:
|
|
110
|
+
"""
|
|
111
|
+
Get information for a specific episode in a specific season.
|
|
112
|
+
"""
|
|
113
|
+
episodes = self.getEpisodeSeasons(season_number)
|
|
114
|
+
if not episodes or episode_index < 0 or episode_index >= len(episodes):
|
|
115
|
+
logging.error(f"Episode index {episode_index} is out of range for season {season_number}")
|
|
116
|
+
return None
|
|
117
|
+
|
|
118
|
+
return episodes[episode_index]
|
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
# 29.04.25
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
import time
|
|
6
|
+
import json
|
|
7
|
+
import signal
|
|
8
|
+
import warnings
|
|
9
|
+
warnings.filterwarnings("ignore", category=UserWarning)
|
|
10
|
+
from datetime import datetime, timedelta
|
|
11
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# External library
|
|
15
|
+
import httpx
|
|
16
|
+
from rich import print
|
|
17
|
+
from rich.progress import Progress, SpinnerColumn, BarColumn, TextColumn, TimeRemainingColumn
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# Internal utilities
|
|
21
|
+
from StreamingCommunity.Util.config_json import config_manager
|
|
22
|
+
from StreamingCommunity.Util.headers import get_headers
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# Variable
|
|
26
|
+
MAX_TIMEOUT = config_manager.get_int("REQUESTS", "timeout")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ProxyFinder:
|
|
30
|
+
def __init__(self, url, timeout_threshold: float = 7.0, max_proxies: int = 150, max_workers: int = 12):
|
|
31
|
+
self.url = url
|
|
32
|
+
self.timeout_threshold = timeout_threshold
|
|
33
|
+
self.max_proxies = max_proxies
|
|
34
|
+
self.max_workers = max_workers
|
|
35
|
+
self.found_proxy = None
|
|
36
|
+
self.shutdown_flag = False
|
|
37
|
+
self.json_file = os.path.join(os.path.dirname(__file__), 'working_proxies.json')
|
|
38
|
+
signal.signal(signal.SIGINT, self._handle_interrupt)
|
|
39
|
+
|
|
40
|
+
def load_saved_proxies(self) -> tuple:
|
|
41
|
+
"""Load saved proxies if they're not expired (2 hours old)"""
|
|
42
|
+
try:
|
|
43
|
+
if not os.path.exists(self.json_file):
|
|
44
|
+
return None, None
|
|
45
|
+
|
|
46
|
+
with open(self.json_file, 'r') as f:
|
|
47
|
+
data = json.load(f)
|
|
48
|
+
|
|
49
|
+
if not data.get('proxies') or not data.get('last_update'):
|
|
50
|
+
return None, None
|
|
51
|
+
|
|
52
|
+
last_update = datetime.fromisoformat(data['last_update'])
|
|
53
|
+
if datetime.now() - last_update > timedelta(hours=2):
|
|
54
|
+
return None, None
|
|
55
|
+
|
|
56
|
+
return data['proxies'], last_update
|
|
57
|
+
except Exception:
|
|
58
|
+
return None, None
|
|
59
|
+
|
|
60
|
+
def save_working_proxy(self, proxy: str, response_time: float):
|
|
61
|
+
"""Save working proxy to JSON file"""
|
|
62
|
+
data = {
|
|
63
|
+
'proxies': [{'proxy': proxy, 'response_time': response_time}],
|
|
64
|
+
'last_update': datetime.now().isoformat()
|
|
65
|
+
}
|
|
66
|
+
try:
|
|
67
|
+
with open(self.json_file, 'w') as f:
|
|
68
|
+
json.dump(data, f, indent=4)
|
|
69
|
+
except Exception as e:
|
|
70
|
+
print(f"[bold red]Error saving proxy:[/bold red] {str(e)}")
|
|
71
|
+
|
|
72
|
+
def fetch_geonode(self) -> list:
|
|
73
|
+
proxies = []
|
|
74
|
+
try:
|
|
75
|
+
response = httpx.get(
|
|
76
|
+
"https://proxylist.geonode.com/api/proxy-list?protocols=http%2Chttps&limit=100&page=1&sort_by=speed&sort_type=asc",
|
|
77
|
+
headers=get_headers(),
|
|
78
|
+
timeout=MAX_TIMEOUT
|
|
79
|
+
)
|
|
80
|
+
data = response.json()
|
|
81
|
+
proxies = [(f"http://{p['ip']}:{p['port']}", "Geonode") for p in data.get('data', [])]
|
|
82
|
+
|
|
83
|
+
except Exception as e:
|
|
84
|
+
print(f"[bold red]Error in Geonode:[/bold red] {str(e)[:100]}")
|
|
85
|
+
|
|
86
|
+
return proxies
|
|
87
|
+
|
|
88
|
+
def fetch_proxyscrape(self) -> list:
|
|
89
|
+
proxies = []
|
|
90
|
+
try:
|
|
91
|
+
response = httpx.get(
|
|
92
|
+
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=get_proxies&protocol=http&skip=0&proxy_format=protocolipport&format=json&limit=100&timeout=1000",
|
|
93
|
+
headers=get_headers(),
|
|
94
|
+
timeout=MAX_TIMEOUT
|
|
95
|
+
)
|
|
96
|
+
data = response.json()
|
|
97
|
+
if 'proxies' in data and isinstance(data['proxies'], list):
|
|
98
|
+
proxies = [(proxy_data['proxy'], "ProxyScrape") for proxy_data in data['proxies'] if 'proxy' in proxy_data]
|
|
99
|
+
|
|
100
|
+
except Exception as e:
|
|
101
|
+
print(f"[bold red]Error in ProxyScrape:[/bold red] {str(e)[:100]}")
|
|
102
|
+
|
|
103
|
+
return proxies
|
|
104
|
+
|
|
105
|
+
def fetch_proxies_from_sources(self) -> list:
|
|
106
|
+
print("[cyan]Fetching proxies from sources...[/cyan]")
|
|
107
|
+
with ThreadPoolExecutor(max_workers=3) as executor:
|
|
108
|
+
proxyscrape_future = executor.submit(self.fetch_proxyscrape)
|
|
109
|
+
geonode_future = executor.submit(self.fetch_geonode)
|
|
110
|
+
|
|
111
|
+
sources_proxies = {}
|
|
112
|
+
|
|
113
|
+
try:
|
|
114
|
+
proxyscrape_result = proxyscrape_future.result()
|
|
115
|
+
sources_proxies["proxyscrape"] = proxyscrape_result[:int(self.max_proxies/2)]
|
|
116
|
+
except Exception as e:
|
|
117
|
+
print(f"[bold red]Error fetching from proxyscrape:[/bold red] {str(e)[:100]}")
|
|
118
|
+
sources_proxies["proxyscrape"] = []
|
|
119
|
+
|
|
120
|
+
try:
|
|
121
|
+
geonode_result = geonode_future.result()
|
|
122
|
+
sources_proxies["geonode"] = geonode_result[:int(self.max_proxies/2)]
|
|
123
|
+
except Exception as e:
|
|
124
|
+
print(f"[bold red]Error fetching from geonode:[/bold red] {str(e)[:100]}")
|
|
125
|
+
sources_proxies["geonode"] = []
|
|
126
|
+
|
|
127
|
+
merged_proxies = []
|
|
128
|
+
|
|
129
|
+
if "proxyscrape" in sources_proxies:
|
|
130
|
+
merged_proxies.extend(sources_proxies["proxyscrape"])
|
|
131
|
+
|
|
132
|
+
if "geonode" in sources_proxies:
|
|
133
|
+
merged_proxies.extend(sources_proxies["geonode"])
|
|
134
|
+
|
|
135
|
+
proxy_list = merged_proxies[:self.max_proxies]
|
|
136
|
+
return proxy_list
|
|
137
|
+
|
|
138
|
+
def _test_single_request(self, proxy_info: tuple) -> tuple:
|
|
139
|
+
proxy, source = proxy_info
|
|
140
|
+
try:
|
|
141
|
+
start = time.time()
|
|
142
|
+
with httpx.Client(proxy=proxy, timeout=self.timeout_threshold) as client:
|
|
143
|
+
response = client.get(self.url, headers=get_headers())
|
|
144
|
+
if response.status_code == 200:
|
|
145
|
+
return (True, time.time() - start, response, source)
|
|
146
|
+
except Exception:
|
|
147
|
+
pass
|
|
148
|
+
return (False, self.timeout_threshold + 1, None, source)
|
|
149
|
+
|
|
150
|
+
def test_proxy(self, proxy_info: tuple) -> tuple:
|
|
151
|
+
proxy, source = proxy_info
|
|
152
|
+
if self.shutdown_flag:
|
|
153
|
+
return (proxy, False, 0, None, source)
|
|
154
|
+
|
|
155
|
+
success1, time1, text1, source = self._test_single_request(proxy_info)
|
|
156
|
+
if not success1 or time1 > self.timeout_threshold:
|
|
157
|
+
return (proxy, False, time1, None, source)
|
|
158
|
+
|
|
159
|
+
success2, time2, _, source = self._test_single_request(proxy_info)
|
|
160
|
+
avg_time = (time1 + time2) / 2
|
|
161
|
+
return (proxy, success2 and time2 <= self.timeout_threshold, avg_time, text1, source)
|
|
162
|
+
|
|
163
|
+
def _handle_interrupt(self, sig, frame):
|
|
164
|
+
print("\n[bold yellow]Received keyboard interrupt. Terminating...[/bold yellow]")
|
|
165
|
+
self.shutdown_flag = True
|
|
166
|
+
sys.exit(0)
|
|
167
|
+
|
|
168
|
+
def find_fast_proxy(self) -> tuple:
|
|
169
|
+
saved_proxies, last_update = self.load_saved_proxies()
|
|
170
|
+
if saved_proxies:
|
|
171
|
+
print("[cyan]Testing saved proxy...[/cyan]")
|
|
172
|
+
for proxy_data in saved_proxies:
|
|
173
|
+
result = self.test_proxy((proxy_data['proxy'], 'cached'))
|
|
174
|
+
if result[1]:
|
|
175
|
+
return proxy_data['proxy'], result[3], result[2]
|
|
176
|
+
else:
|
|
177
|
+
print(f"[red]Saved proxy {proxy_data['proxy']} failed - response time: {result[2]:.2f}s[/red]")
|
|
178
|
+
|
|
179
|
+
proxies = self.fetch_proxies_from_sources()
|
|
180
|
+
if not proxies:
|
|
181
|
+
print("[bold red]No proxies fetched to test.[/bold red]")
|
|
182
|
+
return (None, None, None)
|
|
183
|
+
|
|
184
|
+
found_proxy = None
|
|
185
|
+
response_text = None
|
|
186
|
+
source = None
|
|
187
|
+
failed_count = 0
|
|
188
|
+
success_count = 0
|
|
189
|
+
|
|
190
|
+
#print(f"[cyan]Testing {len(proxies)} proxies...[/cyan]")
|
|
191
|
+
with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
|
|
192
|
+
futures = {executor.submit(self.test_proxy, p): p for p in proxies}
|
|
193
|
+
with Progress(
|
|
194
|
+
SpinnerColumn(),
|
|
195
|
+
TextColumn("[progress.description]{task.description}"),
|
|
196
|
+
BarColumn(),
|
|
197
|
+
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
|
|
198
|
+
TextColumn("[cyan]{task.fields[success]}[/cyan]/[red]{task.fields[failed]}[/red]"),
|
|
199
|
+
TimeRemainingColumn(),
|
|
200
|
+
) as progress:
|
|
201
|
+
task = progress.add_task(
|
|
202
|
+
"[cyan]Testing Proxies",
|
|
203
|
+
total=len(futures),
|
|
204
|
+
success=success_count,
|
|
205
|
+
failed=failed_count
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
for future in as_completed(futures):
|
|
209
|
+
if self.shutdown_flag:
|
|
210
|
+
break
|
|
211
|
+
|
|
212
|
+
try:
|
|
213
|
+
proxy, success, elapsed, response, proxy_source = future.result()
|
|
214
|
+
if success:
|
|
215
|
+
success_count += 1
|
|
216
|
+
print(f"[bold green]Found valid proxy:[/bold green] {proxy} ({elapsed:.2f}s)")
|
|
217
|
+
found_proxy = proxy
|
|
218
|
+
response_text = response
|
|
219
|
+
self.save_working_proxy(proxy, elapsed)
|
|
220
|
+
self.shutdown_flag = True
|
|
221
|
+
break
|
|
222
|
+
else:
|
|
223
|
+
failed_count += 1
|
|
224
|
+
except Exception:
|
|
225
|
+
failed_count += 1
|
|
226
|
+
|
|
227
|
+
progress.update(task, advance=1, success=success_count, failed=failed_count)
|
|
228
|
+
|
|
229
|
+
if not found_proxy:
|
|
230
|
+
print("[bold red]No working proxies found[/bold red]")
|
|
231
|
+
|
|
232
|
+
return (found_proxy, response_text, source)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: StreamingCommunity
|
|
3
|
-
Version: 3.0.
|
|
3
|
+
Version: 3.0.2
|
|
4
4
|
Home-page: https://github.com/Lovi-0/StreamingCommunity
|
|
5
5
|
Author: Lovi-0
|
|
6
6
|
Project-URL: Bug Reports, https://github.com/Lovi-0/StreamingCommunity/issues
|
|
@@ -832,11 +832,25 @@ Contributions are welcome! Steps:
|
|
|
832
832
|
4. Push to branch (`git push origin feature/AmazingFeature`)
|
|
833
833
|
5. Open Pull Request
|
|
834
834
|
|
|
835
|
-
|
|
836
835
|
# Disclaimer
|
|
837
836
|
|
|
838
837
|
This software is provided "as is", without warranty of any kind, express or implied, including but not limited to the warranties of merchantability, fitness for a particular purpose, and noninfringement. In no event shall the authors or copyright holders be liable for any claim, damages, or other liability, whether in an action of contract, tort, or otherwise, arising from, out of, or in connection with the software or the use or other dealings in the software.
|
|
839
838
|
|
|
839
|
+
## Useful Project
|
|
840
|
+
|
|
841
|
+
### 🎯 [Unit3Dup](https://github.com/31December99/Unit3Dup)
|
|
842
|
+
Bot in Python per la generazione e l'upload automatico di torrent su tracker basati su Unit3D.
|
|
843
|
+
|
|
844
|
+
|
|
845
|
+
### 🇮🇹 [MammaMia](https://github.com/UrloMythus/MammaMia)
|
|
846
|
+
Addon per Stremio che consente lo streaming HTTPS di film, serie, anime e TV in diretta in lingua italiana.
|
|
847
|
+
|
|
848
|
+
### 🧩 [streamingcommunity-unofficialapi](https://github.com/Blu-Tiger/streamingcommunity-unofficialapi)
|
|
849
|
+
API non ufficiale per accedere ai contenuti del sito italiano StreamingCommunity.
|
|
850
|
+
|
|
851
|
+
### 🎥 [stream-buddy](https://github.com/Bbalduzz/stream-buddy)
|
|
852
|
+
Tool per guardare o scaricare film dalla piattaforma StreamingCommunity.
|
|
853
|
+
|
|
840
854
|
## Contributors
|
|
841
855
|
|
|
842
856
|
<a href="https://github.com/Arrowar/StreamingCommunity/graphs/contributors" alt="View Contributors">
|
|
@@ -2,49 +2,55 @@ StreamingCommunity/__init__.py,sha256=Cw-N0VCg7sef1WqdtvVwrhs1zc4LoUhs5C8k7vpM1l
|
|
|
2
2
|
StreamingCommunity/global_search.py,sha256=LgRTjVBJYyLiKYa3EIb33vRnVQ-COoQT7gASfVW8-Dg,12022
|
|
3
3
|
StreamingCommunity/run.py,sha256=xBq2GS3JznLZBAF5DnJplNtJBzqOl_zHQXVmXxDMnBs,13108
|
|
4
4
|
StreamingCommunity/Api/Player/ddl.py,sha256=S3UZFonJl3d3xU1fQrosRFXFhwAm8hGVQ8Ff8g-6xSI,2071
|
|
5
|
-
StreamingCommunity/Api/Player/
|
|
5
|
+
StreamingCommunity/Api/Player/hdplayer.py,sha256=HpymvDycFw5W9pgqTKLPdIxc5clLQb4P8JMxUn9R7O8,1802
|
|
6
6
|
StreamingCommunity/Api/Player/mediapolisvod.py,sha256=OcdnE1BMSwPZM-nw74GXNJ44E9RYwGnc_kFEA-G8XyY,2294
|
|
7
|
+
StreamingCommunity/Api/Player/mixdrop.py,sha256=B5KEv-S0xg8b8X2doSxPVcjgwDIlB5TP3m35zfn3v5w,4968
|
|
7
8
|
StreamingCommunity/Api/Player/supervideo.py,sha256=hr9QViI-XD0Dqhcx90oaH8_j0d6cxpVaf-EuCjMs6hI,5199
|
|
8
9
|
StreamingCommunity/Api/Player/sweetpixel.py,sha256=gJSe1fop5J216CB3u8vstxLPP5YbcyoGUH4y3X3-JaQ,1643
|
|
9
10
|
StreamingCommunity/Api/Player/vixcloud.py,sha256=qI9ppYEMGaJ1B5y693BOMeRQri-F4-94SfRkS-9udfM,6287
|
|
10
11
|
StreamingCommunity/Api/Player/Helper/Vixcloud/js_parser.py,sha256=U-8QlD5kGzIk3-4t4D6QyYmiDe8UBrSuVi1YHRQb7AU,4295
|
|
11
12
|
StreamingCommunity/Api/Player/Helper/Vixcloud/util.py,sha256=QLUgbwQrpuPIVNzdBlAiEJXnd-eCj_JQFckZZEEL55w,5214
|
|
12
13
|
StreamingCommunity/Api/Site/1337xx/__init__.py,sha256=OdQxYoJ9UyGSAutZwqH1FgmOH-Z6vGVHb0CLKhwEZGM,1999
|
|
13
|
-
StreamingCommunity/Api/Site/1337xx/site.py,sha256=
|
|
14
|
+
StreamingCommunity/Api/Site/1337xx/site.py,sha256=5XVUMTQn1UqMYgo7tPAw7bGMA-tqhQnfeOGKkgGh9OA,2349
|
|
14
15
|
StreamingCommunity/Api/Site/1337xx/title.py,sha256=8T3cVRb-Mt9QdOtKWVVFHz8iOHqspf7iw28E7bfTV78,1865
|
|
15
16
|
StreamingCommunity/Api/Site/altadefinizione/__init__.py,sha256=Oxjfyg6VolwV6n2VGgICLvdRVPPMzJXMSdz8oI2Xs0M,4145
|
|
16
17
|
StreamingCommunity/Api/Site/altadefinizione/film.py,sha256=0XeqMrMHnk5nbFkVTFaNZWtlXI8pETl7dsORDtIMbjg,4395
|
|
17
18
|
StreamingCommunity/Api/Site/altadefinizione/series.py,sha256=-rCYx-Fa7aZiYepcIne7OdH1aaUFZZAPX-ToBv6mxFs,8192
|
|
18
|
-
StreamingCommunity/Api/Site/altadefinizione/site.py,sha256=
|
|
19
|
+
StreamingCommunity/Api/Site/altadefinizione/site.py,sha256=2kUNQ8ebYlX5dkSql-CvEhU01TOTNtuyEMIAD6SC3lg,2865
|
|
19
20
|
StreamingCommunity/Api/Site/altadefinizione/util/ScrapeSerie.py,sha256=bSApjfY9xd5dw0tZ1t7vB6ifAo5vAkeeEwX6IS7yH1o,3756
|
|
20
21
|
StreamingCommunity/Api/Site/animeunity/__init__.py,sha256=EayZqxyWltgjRRDNfM32JRzgeyElK85o6s0_YJ0dpBk,4031
|
|
21
22
|
StreamingCommunity/Api/Site/animeunity/film.py,sha256=Vqg6yag2siR-Y3ougBsV8mzdQXChxg6ghz_KVXFQ3pE,998
|
|
22
23
|
StreamingCommunity/Api/Site/animeunity/serie.py,sha256=ib86sLXYsYbrvrFNbzKdhlwMUO3DT7JS5yTTrrSr2jk,5711
|
|
23
|
-
StreamingCommunity/Api/Site/animeunity/site.py,sha256=
|
|
24
|
+
StreamingCommunity/Api/Site/animeunity/site.py,sha256=iRFMUdtHricrc09gmVS1kUOQ-EqH_8zafh8ag4HHiUA,5672
|
|
24
25
|
StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py,sha256=Ze6a8D2MRhiOvSw3mTpL3ue2iVc6pA5aqoyUMCTnT7A,3809
|
|
25
26
|
StreamingCommunity/Api/Site/animeworld/__init__.py,sha256=UzHQbfxx_i6qzM586LL940CoiI3Y98IGIVP4-hXUxn4,2790
|
|
26
27
|
StreamingCommunity/Api/Site/animeworld/film.py,sha256=W9KOS9Wvx3Mlqx5WojR-NgnF9WX8mI79JZPS7UwG-dc,1763
|
|
27
28
|
StreamingCommunity/Api/Site/animeworld/serie.py,sha256=MXyV1fK05jPW4iV9NWrRKW-R4ect-TSN78-2APayniU,3516
|
|
28
|
-
StreamingCommunity/Api/Site/animeworld/site.py,sha256
|
|
29
|
+
StreamingCommunity/Api/Site/animeworld/site.py,sha256=-I70fk2IKWeGuS_3WPM4G2ZrmheKIMChgv2I2kWW0Es,3655
|
|
29
30
|
StreamingCommunity/Api/Site/animeworld/util/ScrapeSerie.py,sha256=CBTCH_wnTXUK_MKwq9a1k_XdvOlUrMpbUmpkD5fXVQ0,3589
|
|
30
31
|
StreamingCommunity/Api/Site/ddlstreamitaly/__init__.py,sha256=2FGeGNJ5SHPQzKEEmVMFWft4woGgJ-XLeNxjbSb6L9s,2141
|
|
31
32
|
StreamingCommunity/Api/Site/ddlstreamitaly/series.py,sha256=F_D_2lwHHWN5hgLs8oUDNCYe-4SEPtWzJoU4yT_Nzfg,3726
|
|
32
|
-
StreamingCommunity/Api/Site/ddlstreamitaly/site.py,sha256=
|
|
33
|
+
StreamingCommunity/Api/Site/ddlstreamitaly/site.py,sha256=Rh0icHsYc9RIEkaRQspeKyaFSunADntIcAwvRcnqHjw,2601
|
|
33
34
|
StreamingCommunity/Api/Site/ddlstreamitaly/util/ScrapeSerie.py,sha256=tETaN-2GyFBeRsoLnGw3Kz4D4j2TMcnPzGjzlC62w_Y,3655
|
|
34
35
|
StreamingCommunity/Api/Site/guardaserie/__init__.py,sha256=p5hzqshw5hwDl9nJ8FBWbzfPe2j7c5eNYiaftDThGcU,2768
|
|
35
36
|
StreamingCommunity/Api/Site/guardaserie/series.py,sha256=U9rMZCjRqHLFjo468vikxl-2RqO6DCJjebB-G8Y6LDg,6492
|
|
36
|
-
StreamingCommunity/Api/Site/guardaserie/site.py,sha256=
|
|
37
|
+
StreamingCommunity/Api/Site/guardaserie/site.py,sha256=6PPp6qykuKZ3Sa2uY7E1xTwh1-8vHINsEpokGnathmw,2326
|
|
37
38
|
StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py,sha256=_aXU-YcUtSwbC2b6QpNnWDZR8m6vp9xzBEx_zdu5tgI,4196
|
|
38
39
|
StreamingCommunity/Api/Site/raiplay/__init__.py,sha256=xkxVkFsSxA6DHRqPuzQYXnCVNUBhfUG5xxlz6iwf1mw,3132
|
|
39
40
|
StreamingCommunity/Api/Site/raiplay/film.py,sha256=wBv5kQXx7-aCKhAZ5LABZ8zUzu_jPGdXOl9OM2p8dpY,1982
|
|
40
41
|
StreamingCommunity/Api/Site/raiplay/series.py,sha256=uQVbeA_g3Z1Ciqeq99gsY2F8mC5DssH3ueGbCW8gd9Q,6161
|
|
41
|
-
StreamingCommunity/Api/Site/raiplay/site.py,sha256=
|
|
42
|
+
StreamingCommunity/Api/Site/raiplay/site.py,sha256=0s1yHhEIA-JJVb2uVe_SZKILx7TIisadZmov7ZhG28s,5160
|
|
42
43
|
StreamingCommunity/Api/Site/raiplay/util/ScrapeSerie.py,sha256=5F6abToCTtsvW8iIACbChZ0fPlymJiCSF_y8FRsDu7M,5002
|
|
43
44
|
StreamingCommunity/Api/Site/streamingcommunity/__init__.py,sha256=Ej-xZ6x99zeq3p5O7-e_Evi_529x3eq_VryBLejCBiA,3796
|
|
44
45
|
StreamingCommunity/Api/Site/streamingcommunity/film.py,sha256=TPt0yB1DKShDIz_1OEVG1IolMoAKBOaWIZ8lQF61dfM,2575
|
|
45
|
-
StreamingCommunity/Api/Site/streamingcommunity/series.py,sha256=
|
|
46
|
-
StreamingCommunity/Api/Site/streamingcommunity/site.py,sha256=
|
|
46
|
+
StreamingCommunity/Api/Site/streamingcommunity/series.py,sha256=zDvmxGHvljpyX82CSE88Zf7EIYtuXAxLDkk0qUTBRdA,8860
|
|
47
|
+
StreamingCommunity/Api/Site/streamingcommunity/site.py,sha256=GlkxDJH5QezpLoPKRZhLE3ovX4G4ZbsBf5yVG7zbQYw,3134
|
|
47
48
|
StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py,sha256=3sNz9uD-o9xz0dKpSnQsLPC_45v5fnW9Mzas2rQ8-Uc,5579
|
|
49
|
+
StreamingCommunity/Api/Site/streamingwatch/__init__.py,sha256=Y6YsO6m52mKCj0AUXRmBY__zpNmSklWl32U0c7o5yIQ,3544
|
|
50
|
+
StreamingCommunity/Api/Site/streamingwatch/film.py,sha256=QWE4e7Z9c0oTidP76cZPWdOKFU77_RbMXOHOlLRtPFk,1664
|
|
51
|
+
StreamingCommunity/Api/Site/streamingwatch/series.py,sha256=HF4SykhaOathLeCbYrRd1-BdCg30pDRQRCI43FnM2ck,6233
|
|
52
|
+
StreamingCommunity/Api/Site/streamingwatch/site.py,sha256=tFPQTjT9AwZuAh3Z-cn110Xb1snEh4CCNv8Zuc1Vch8,3250
|
|
53
|
+
StreamingCommunity/Api/Site/streamingwatch/util/ScrapeSerie.py,sha256=tEiwL7R5wI8F9ZLOiI-E7pEh72UJaNtswCjhx_wRlBY,4300
|
|
48
54
|
StreamingCommunity/Api/Template/__init__.py,sha256=oyfd_4_g5p5q6mxb_rKwSsudZnTM3W3kg1tLwxg-v-Q,46
|
|
49
55
|
StreamingCommunity/Api/Template/config_loader.py,sha256=2RT_0mqQmWzXM4rYaqss-yhXztYAcfNkTalFPjzv270,2056
|
|
50
56
|
StreamingCommunity/Api/Template/site.py,sha256=BJjQktdu2q2pkGflJv3UdrpSEmzJCJnaT-u-jD5zhgs,2861
|
|
@@ -65,13 +71,14 @@ StreamingCommunity/Lib/M3U8/decryptor.py,sha256=kuxxsd3eN0VGRrMJWXzHo8gCpT0u3fSZ
|
|
|
65
71
|
StreamingCommunity/Lib/M3U8/estimator.py,sha256=8gwTxJ3poRqZdHUTD9_oqXegiPWSXFuqLmqCZBnXS8A,5893
|
|
66
72
|
StreamingCommunity/Lib/M3U8/parser.py,sha256=cSjXPOSgTewrfLgREyQ47wzoOeoYo3L4lOfEWZKxad8,22485
|
|
67
73
|
StreamingCommunity/Lib/M3U8/url_fixer.py,sha256=zldE4yOuNBV6AAvL1KI6p7XdRI_R5YZRscbDgT1564M,1735
|
|
74
|
+
StreamingCommunity/Lib/Proxies/proxy.py,sha256=76O8VYg4TLH17ylqEySeXBjSKLpJ11ZjTwifTib5ft8,9349
|
|
68
75
|
StreamingCommunity/Lib/TMBD/__init__.py,sha256=XzE42tw3Ws59DD1PF8WmGtZ0D4D7Hk3Af8QthNE-22U,66
|
|
69
76
|
StreamingCommunity/Lib/TMBD/obj_tmbd.py,sha256=dRSvJFS5yqmsBZcw2wqbStcBtXNjU_3n5czMyremAtU,1187
|
|
70
77
|
StreamingCommunity/Lib/TMBD/tmdb.py,sha256=byg0EFnlmd9JeLvn1N9K3QkB1KEfeMuFa7OVfGqks1Y,10685
|
|
71
78
|
StreamingCommunity/TelegramHelp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
72
79
|
StreamingCommunity/TelegramHelp/telegram_bot.py,sha256=Qe1__aoK4PpDuing8JtWgdHzLee8LuYYyfeLNA7yADU,26307
|
|
73
80
|
StreamingCommunity/Upload/update.py,sha256=mJTKUIOhC2j03sWXUK6oAZxHyObNP2r1fl3y0BC2jes,3351
|
|
74
|
-
StreamingCommunity/Upload/version.py,sha256=
|
|
81
|
+
StreamingCommunity/Upload/version.py,sha256=JkfhUckV_-HFFWrZzhBGxqLfJSFCKABAXBxJNOYruyQ,171
|
|
75
82
|
StreamingCommunity/Util/color.py,sha256=NvD0Eni-25oOOkY-szCEoc0lGvzQxyL7xhM0RE4EvUM,458
|
|
76
83
|
StreamingCommunity/Util/config_json.py,sha256=r31BJP67EItcI6GioEX7FJzmzM0mubmZ7M73mvZ9YWo,24801
|
|
77
84
|
StreamingCommunity/Util/ffmpeg_installer.py,sha256=yRVIPwbh05tZ-duZmXkH0qasLNxaQCAT_E4cTP79Z3c,14890
|
|
@@ -80,9 +87,9 @@ StreamingCommunity/Util/logger.py,sha256=9kGD6GmWj2pM8ADpJc85o7jm8DD0c5Aguqnq-9k
|
|
|
80
87
|
StreamingCommunity/Util/message.py,sha256=SJaIPLvWeQqsIODVUKw3TgYRmBChovmlbcF6OUxqMI8,1425
|
|
81
88
|
StreamingCommunity/Util/os.py,sha256=0AD2DYoan9dl1ZC1pjDoUM7D8sRa9p81cGdI-lP1OX4,14993
|
|
82
89
|
StreamingCommunity/Util/table.py,sha256=Nw5PlsvfEIOQZWy5VhsU5OK3heuBXGwsqmLl0k8yQzc,9813
|
|
83
|
-
streamingcommunity-3.0.
|
|
84
|
-
streamingcommunity-3.0.
|
|
85
|
-
streamingcommunity-3.0.
|
|
86
|
-
streamingcommunity-3.0.
|
|
87
|
-
streamingcommunity-3.0.
|
|
88
|
-
streamingcommunity-3.0.
|
|
90
|
+
streamingcommunity-3.0.2.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
|
91
|
+
streamingcommunity-3.0.2.dist-info/METADATA,sha256=YNS9YdPhsYFpd7TG5b42rg7nb9Qa4V4z8ajb2F8p7Hs,25692
|
|
92
|
+
streamingcommunity-3.0.2.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
|
|
93
|
+
streamingcommunity-3.0.2.dist-info/entry_points.txt,sha256=Qph9XYfDC8n4LfDLOSl6gJGlkb9eFb5f-JOr_Wb_5rk,67
|
|
94
|
+
streamingcommunity-3.0.2.dist-info/top_level.txt,sha256=YsOcxKP-WOhWpIWgBlh0coll9XUx7aqmRPT7kmt3fH0,19
|
|
95
|
+
streamingcommunity-3.0.2.dist-info/RECORD,,
|