phub 4.7.7__py3-none-any.whl → 4.7.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- phub/__init__.py +1 -2
- phub/__main__.py +4 -12
- phub/consts.py +1 -18
- phub/core.py +53 -62
- phub/modules/__init__.py +1 -2
- phub/modules/parser.py +3 -2
- phub/objects/account.py +13 -10
- phub/objects/feed.py +9 -6
- phub/objects/image.py +12 -9
- phub/objects/playlist.py +10 -13
- phub/objects/query.py +11 -8
- phub/objects/user.py +32 -16
- phub/objects/video.py +58 -81
- phub/tests/test_auth.py +11 -6
- phub/tests/test_model.py +22 -6
- phub/tests/test_playlist.py +5 -2
- phub/tests/test_search.py +8 -5
- phub/tests/test_video.py +6 -17
- phub/utils.py +6 -13
- {phub-4.7.7.dist-info → phub-4.7.9.dist-info}/METADATA +3 -1
- phub-4.7.9.dist-info/RECORD +32 -0
- {phub-4.7.7.dist-info → phub-4.7.9.dist-info}/WHEEL +1 -1
- phub/modules/download.py +0 -198
- phub-4.7.7.dist-info/RECORD +0 -33
- {phub-4.7.7.dist-info → phub-4.7.9.dist-info}/entry_points.txt +0 -0
- {phub-4.7.7.dist-info → phub-4.7.9.dist-info}/licenses/LICENSE +0 -0
- {phub-4.7.7.dist-info → phub-4.7.9.dist-info}/top_level.txt +0 -0
phub/__init__.py
CHANGED
|
@@ -11,12 +11,11 @@ __copyright__ = 'Copyright 2024, PHUB'
|
|
|
11
11
|
__license__ = 'GPLv3'
|
|
12
12
|
__version__ = '4.7.2'
|
|
13
13
|
|
|
14
|
-
__all__ = ['Client', '
|
|
14
|
+
__all__ = ['Client', 'core', 'utils',
|
|
15
15
|
'consts', 'errors', 'objects', 'modules']
|
|
16
16
|
|
|
17
17
|
# Shortcuts
|
|
18
18
|
from .core import Client
|
|
19
|
-
from .utils import Quality
|
|
20
19
|
|
|
21
20
|
# Sub modules
|
|
22
21
|
from . import core
|
phub/__main__.py
CHANGED
|
@@ -4,9 +4,9 @@ PHUB built-in CLI.
|
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
6
|
import argparse
|
|
7
|
+
import re
|
|
7
8
|
|
|
8
9
|
from phub import Client, Video
|
|
9
|
-
from phub.modules.download import threaded, FFMPEG, default
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
def text_progress_bar(downloaded, total, title=False):
|
|
@@ -33,21 +33,13 @@ def download_video(client: Client, url: [str, Video], output: str, quality: str,
|
|
|
33
33
|
else:
|
|
34
34
|
raise "Some error happened here, please report on GitHub, thank you :) "
|
|
35
35
|
|
|
36
|
-
title = video.title
|
|
36
|
+
title = re.sub(r'[<>:"/\\|?*]', '', video.title)
|
|
37
37
|
final_output_path = os.path.join(output, title + ".mp4")
|
|
38
38
|
|
|
39
39
|
print(f"Downloading: {title} to: {final_output_path}")
|
|
40
40
|
video.download(path=final_output_path, quality=quality, downloader=downloader, display=text_progress_bar)
|
|
41
41
|
print(f"Successfully downloaded: {title}")
|
|
42
42
|
|
|
43
|
-
def resolve_threading_mode(mode, workers=10, timeout=10):
|
|
44
|
-
"""Resolve the appropriate threading mode based on input."""
|
|
45
|
-
return {
|
|
46
|
-
"threaded": threaded(max_workers=workers, timeout=timeout),
|
|
47
|
-
"ffmpeg": FFMPEG,
|
|
48
|
-
"default": default
|
|
49
|
-
}.get(mode, default)
|
|
50
|
-
|
|
51
43
|
|
|
52
44
|
def main():
|
|
53
45
|
parser = argparse.ArgumentParser(description="PHUB built-in CLI")
|
|
@@ -67,7 +59,7 @@ def main():
|
|
|
67
59
|
args = parser.parse_args()
|
|
68
60
|
quality = args.quality
|
|
69
61
|
output = args.output
|
|
70
|
-
downloader =
|
|
62
|
+
downloader = args.downloader
|
|
71
63
|
url = args.url
|
|
72
64
|
model = args.model
|
|
73
65
|
video_limit = args.video_limit
|
|
@@ -109,4 +101,4 @@ def main():
|
|
|
109
101
|
if __name__ == '__main__':
|
|
110
102
|
main()
|
|
111
103
|
|
|
112
|
-
# EOF
|
|
104
|
+
# EOF
|
phub/consts.py
CHANGED
|
@@ -36,22 +36,6 @@ LOGIN_PAYLOAD = {
|
|
|
36
36
|
}
|
|
37
37
|
|
|
38
38
|
RSS = 'https://www.pornhub.com/video/webmasterss'
|
|
39
|
-
|
|
40
|
-
PROXY = None
|
|
41
|
-
# Can be any http or socks proxy
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
MAX_CALL_RETRIES = 4 # Maximum times a HTTPError can be reproduced
|
|
45
|
-
MAX_CALL_TIMEOUT = .4 # Time to wait before retrying basic calls
|
|
46
|
-
CALL_TIMEOUT = 30 # Time to wait before retrying calls (in case no error happens)
|
|
47
|
-
CHALLENGE_TIMEOUT = 2 # Time to wait before injecting the new cookie for resolving the challenge (needs to be at least 1)
|
|
48
|
-
DELAY = 0 # Minimum time between requests
|
|
49
|
-
|
|
50
|
-
DOWNLOAD_SEGMENT_MAX_ATTEMPS = 5
|
|
51
|
-
DOWNLOAD_SEGMENT_ERROR_DELAY = .5
|
|
52
|
-
|
|
53
|
-
FFMPEG_EXECUTABLE = 'ffmpeg' # Use from PATH by default
|
|
54
|
-
|
|
55
39
|
IFRAME = '<iframe src="https://www.pornhub.com/embed/{key}" frameborder="0" width="{width}" height="{height}" scrolling="no" allowfullscreen></iframe>'
|
|
56
40
|
|
|
57
41
|
# Supported languages
|
|
@@ -243,8 +227,7 @@ class re:
|
|
|
243
227
|
container = find( engine.DOTALL, r'class=\"container(.*)' ) # Get the page container
|
|
244
228
|
document = find( engine.DOTALL, r'.*' ) # Match a whole document
|
|
245
229
|
get_playlist_unavailable = find( engine.DOTALL, r': (\d+)</h5' ) # Get playlist unavailable videos amount
|
|
246
|
-
|
|
247
|
-
get_playlist_size = find( engine.DOTALL, r'- (\d+).*?\"avatarPosition' ) # Get playlist video amount
|
|
230
|
+
get_playlist_size = find( engine.DOTALL, r'var itemsCount = (.*?) ||' ) # Get playlist video amount
|
|
248
231
|
get_playlist_likes = find( engine.DOTALL, r'<span class="votesUp">(.*?)</span>' ) # Get playlist likes
|
|
249
232
|
get_playlist_dislikes = find( engine.DOTALL, r'<span class="votesDown">(.*?)</span>' ) # Get playlist dislikes
|
|
250
233
|
get_playlist_ratings = find( engine.DOTALL, r'<span class="percent">(.*?)%</span>' ) # Get paylist like/dislike ratio
|
phub/core.py
CHANGED
|
@@ -1,27 +1,27 @@
|
|
|
1
1
|
'''
|
|
2
2
|
PHUB core module.
|
|
3
3
|
'''
|
|
4
|
-
|
|
4
|
+
import re
|
|
5
5
|
import time
|
|
6
6
|
import logging
|
|
7
7
|
import random
|
|
8
|
-
|
|
9
8
|
import httpx
|
|
9
|
+
|
|
10
10
|
from typing import Iterable, Union
|
|
11
11
|
from functools import cached_property
|
|
12
|
+
from base_api.base import BaseCore, setup_logger
|
|
12
13
|
|
|
13
14
|
from . import utils
|
|
14
15
|
from . import consts
|
|
15
16
|
from . import errors
|
|
16
17
|
from . import literals
|
|
18
|
+
from .errors import LoginFailed
|
|
17
19
|
|
|
18
20
|
from .modules import parser
|
|
19
21
|
|
|
20
22
|
from .objects import (Video, User,
|
|
21
23
|
Account, Query, queries, Playlist)
|
|
22
24
|
|
|
23
|
-
logger = logging.getLogger(__name__)
|
|
24
|
-
|
|
25
25
|
|
|
26
26
|
class Client:
|
|
27
27
|
'''
|
|
@@ -38,7 +38,8 @@ class Client:
|
|
|
38
38
|
login: bool = True,
|
|
39
39
|
bypass_geo_blocking: bool = False,
|
|
40
40
|
change_title_language: bool = True,
|
|
41
|
-
use_webmaster_api: bool = True
|
|
41
|
+
use_webmaster_api: bool = True,
|
|
42
|
+
core=None) -> None:
|
|
42
43
|
'''
|
|
43
44
|
Initialises a new client.
|
|
44
45
|
|
|
@@ -54,8 +55,13 @@ class Client:
|
|
|
54
55
|
LoginFailed: If Pornhub refuses the authentication.
|
|
55
56
|
The reason will be passed as the error body.
|
|
56
57
|
'''
|
|
57
|
-
|
|
58
|
-
logger
|
|
58
|
+
|
|
59
|
+
self.logger = setup_logger(name="PHUB API - [Client]", log_file=None, level=logging.ERROR)
|
|
60
|
+
self.core = core or BaseCore()
|
|
61
|
+
self.core.config.cookies = consts.COOKIES
|
|
62
|
+
self.core.config.headers = consts.HEADERS
|
|
63
|
+
# Applying PornHub specific cookies and headers to base API
|
|
64
|
+
self.logger.debug('Initialised new Client %s', self)
|
|
59
65
|
|
|
60
66
|
# Initialise session
|
|
61
67
|
Client.use_webmaster_api = use_webmaster_api
|
|
@@ -65,41 +71,30 @@ class Client:
|
|
|
65
71
|
|
|
66
72
|
self.reset()
|
|
67
73
|
|
|
68
|
-
self.
|
|
74
|
+
self.core.config.headers.update({"Accept-Language": language})
|
|
75
|
+
self.core.update_headers({"Accept-Language": language})
|
|
69
76
|
self.credentials = {'email': email,
|
|
70
77
|
'password': password}
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
self.start_delay = False
|
|
74
|
-
self.last_request_time = None
|
|
75
|
-
|
|
78
|
+
|
|
79
|
+
|
|
76
80
|
# Connect account
|
|
77
81
|
self.logged = False
|
|
78
82
|
self.account = Account(self)
|
|
79
|
-
logger.debug('Connected account to client %s', self.account)
|
|
83
|
+
self.logger.debug('Connected account to client %s', self.account)
|
|
80
84
|
|
|
81
85
|
# Automatic login
|
|
82
86
|
if login and self.account:
|
|
83
87
|
self.login()
|
|
84
|
-
|
|
88
|
+
|
|
89
|
+
def enable_logging(self, log_file: str = None, level = None, log_ip=None, log_port=None):
|
|
90
|
+
self.logger = setup_logger(name="PHUB API - [Client]", log_file=log_file, level=level, http_ip=log_ip, http_port=log_port)
|
|
91
|
+
|
|
85
92
|
def reset(self) -> None:
|
|
86
93
|
'''
|
|
87
94
|
Reset the client requests session.
|
|
88
95
|
This is useful if you are keeping the client running
|
|
89
96
|
for a long time and can help with Pornhub rate limit.
|
|
90
97
|
'''
|
|
91
|
-
verify = True
|
|
92
|
-
if consts.PROXY is not None:
|
|
93
|
-
verify = False
|
|
94
|
-
|
|
95
|
-
# Initialise session
|
|
96
|
-
self.session = httpx.Client(
|
|
97
|
-
headers = consts.HEADERS,
|
|
98
|
-
cookies = consts.COOKIES,
|
|
99
|
-
follow_redirects = True,
|
|
100
|
-
proxy = consts.PROXY,
|
|
101
|
-
verify = verify)
|
|
102
|
-
|
|
103
98
|
self._clear_granted_token()
|
|
104
99
|
|
|
105
100
|
if self.bypass_geo_blocking:
|
|
@@ -107,21 +102,21 @@ class Client:
|
|
|
107
102
|
language_code = "fr"
|
|
108
103
|
|
|
109
104
|
# Faking the X-Forwarded-For header (Fake IP source)
|
|
110
|
-
self.
|
|
105
|
+
self.core.config.headers.update({"X-Forwarded-For": f"{ip}"})
|
|
111
106
|
# Setting the Accept-Language tag to French, because the faked IP comes from france
|
|
112
|
-
self.
|
|
107
|
+
self.core.config.headers.update({"Accept-Language": f"{language_code}"})
|
|
113
108
|
# Setting the country code also to french
|
|
114
|
-
self.
|
|
115
|
-
logging.debug(f"Using faked headers for geo-bypass: {self.session.headers}")
|
|
109
|
+
self.core.config.headers.update({"CF-IPCountry": f"{language_code}"})
|
|
110
|
+
logging.debug(f"Using faked headers for geo-bypass: {self.core.config.session.headers}")
|
|
116
111
|
|
|
117
112
|
def call(self,
|
|
118
113
|
func: str,
|
|
119
114
|
method: str = 'GET',
|
|
120
115
|
data: dict = None,
|
|
121
116
|
headers: dict = None,
|
|
122
|
-
timeout: float = consts.CALL_TIMEOUT,
|
|
123
117
|
throw: bool = True,
|
|
124
|
-
silent: bool = False
|
|
118
|
+
silent: bool = False,
|
|
119
|
+
get_response = True) -> httpx.Response:
|
|
125
120
|
'''
|
|
126
121
|
Used internally to send a request or an API call.
|
|
127
122
|
|
|
@@ -130,7 +125,6 @@ class Client:
|
|
|
130
125
|
method (str): Request method (GET, POST, PUT, ...).
|
|
131
126
|
data (dict): Optional data to send to the server.
|
|
132
127
|
headers (dict): Additional request headers.
|
|
133
|
-
timeout (float): Request maximum response time.
|
|
134
128
|
throw (bool): Whether to raise an error when a request explicitly fails.
|
|
135
129
|
silent (bool): Whether to supress this call from logs.
|
|
136
130
|
|
|
@@ -142,15 +136,10 @@ class Client:
|
|
|
142
136
|
HTTPError: If the request failed, for any reason.
|
|
143
137
|
'''
|
|
144
138
|
func = utils.fix_url(func)
|
|
145
|
-
logger.log(logging.DEBUG if silent else logging.INFO, 'Fetching %s', func or '/')
|
|
139
|
+
self.logger.log(logging.DEBUG if silent else logging.INFO, 'Fetching %s', func or '/')
|
|
146
140
|
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
elapsed_time = time.time() - self.last_request_time
|
|
150
|
-
if elapsed_time < self.delay:
|
|
151
|
-
time.sleep(self.delay - elapsed_time)
|
|
152
|
-
|
|
153
|
-
self.last_request_time = time.time() # Update the time of the last request
|
|
141
|
+
if headers:
|
|
142
|
+
self.core.config.headers = headers
|
|
154
143
|
|
|
155
144
|
if not self.language == "en":
|
|
156
145
|
host = consts.LANGUAGE_MAPPING.get(self.language)
|
|
@@ -160,15 +149,13 @@ class Client:
|
|
|
160
149
|
host = consts.HOST
|
|
161
150
|
|
|
162
151
|
url = func if 'http' in func else utils.concat(host, func)
|
|
163
|
-
for i in range(
|
|
152
|
+
for i in range(self.core.config.max_retries):
|
|
164
153
|
try:
|
|
165
|
-
response = self.
|
|
154
|
+
response = self.core.fetch(
|
|
166
155
|
method = method,
|
|
167
156
|
url = url,
|
|
168
|
-
headers = headers,
|
|
169
157
|
data = data,
|
|
170
|
-
|
|
171
|
-
)
|
|
158
|
+
get_response=get_response)
|
|
172
159
|
|
|
173
160
|
# Silent 429 errors
|
|
174
161
|
if b'429</title>' in response.content:
|
|
@@ -177,19 +164,18 @@ class Client:
|
|
|
177
164
|
# Attempt to resolve the challenge if needed
|
|
178
165
|
challenge = consts.re.get_challenge(response.text, False)
|
|
179
166
|
if challenge:
|
|
180
|
-
logger.info('Challenge found, attempting to resolve')
|
|
167
|
+
self.logger.info('Challenge found, attempting to resolve')
|
|
181
168
|
parser.challenge(self, *challenge)
|
|
182
169
|
|
|
183
|
-
logger.info(f"Sleeping for
|
|
184
|
-
time.sleep(
|
|
170
|
+
self.logger.info(f"Sleeping for 1.5 seconds")
|
|
171
|
+
time.sleep(1.5) # Yes, we need to sleep that amount, otherwise PornHub refuses the challenge.
|
|
185
172
|
continue # Reload page
|
|
186
173
|
|
|
187
174
|
break
|
|
188
175
|
|
|
189
176
|
except Exception as err:
|
|
190
|
-
logger.log(logging.DEBUG if silent else logging.WARNING,
|
|
191
|
-
f'Call failed: {repr(err)}. Retrying (attempt {i + 1}/{
|
|
192
|
-
time.sleep(consts.MAX_CALL_TIMEOUT)
|
|
177
|
+
self.logger.log(logging.DEBUG if silent else logging.WARNING,
|
|
178
|
+
f'Call failed: {repr(err)}. Retrying (attempt {i + 1}/{self.core.config.max_retries})')
|
|
193
179
|
continue
|
|
194
180
|
|
|
195
181
|
else:
|
|
@@ -216,16 +202,21 @@ class Client:
|
|
|
216
202
|
LoginFailed: If the login failed, for a reason passed in the error body.
|
|
217
203
|
'''
|
|
218
204
|
|
|
219
|
-
logger.debug('Attempting login')
|
|
205
|
+
self.logger.debug('Attempting login')
|
|
220
206
|
|
|
221
207
|
if not force and self.logged:
|
|
222
|
-
logger.error('Client is already logged in')
|
|
208
|
+
self.logger.error('Client is already logged in')
|
|
223
209
|
raise errors.ClientAlreadyLogged()
|
|
224
210
|
|
|
225
211
|
# Get token
|
|
226
|
-
page = self.call('').text
|
|
227
|
-
|
|
228
|
-
|
|
212
|
+
page = self.call('https://www.pornhub.com').text
|
|
213
|
+
try:
|
|
214
|
+
base_token = consts.re.get_token(page)
|
|
215
|
+
|
|
216
|
+
except errors.RegexError:
|
|
217
|
+
self.logger.warning("Couldn't get token. Trying alternative method...")
|
|
218
|
+
base_token = re.search(r'data-token="(.*?)"', string=page).group(1)
|
|
219
|
+
|
|
229
220
|
# Send credentials
|
|
230
221
|
payload = consts.LOGIN_PAYLOAD | self.credentials | {'token': base_token}
|
|
231
222
|
response = self.call('front/authenticate', method = 'POST', data = payload)
|
|
@@ -236,7 +227,7 @@ class Client:
|
|
|
236
227
|
message = data.get('message')
|
|
237
228
|
|
|
238
229
|
if throw and not success:
|
|
239
|
-
logger.error('Login failed: Received error: %s', message)
|
|
230
|
+
self.logger.error('Login failed: Received error: %s', message)
|
|
240
231
|
raise errors.LoginFailed(message)
|
|
241
232
|
|
|
242
233
|
# Reset token
|
|
@@ -258,7 +249,7 @@ class Client:
|
|
|
258
249
|
Video: The corresponding video object.
|
|
259
250
|
'''
|
|
260
251
|
|
|
261
|
-
logger.debug(f'Fetching video at {video}')
|
|
252
|
+
self.logger.debug(f'Fetching video at {video}')
|
|
262
253
|
|
|
263
254
|
if isinstance(video, Video):
|
|
264
255
|
# User might want to re-init a video,
|
|
@@ -296,7 +287,7 @@ class Client:
|
|
|
296
287
|
if isinstance(user, User):
|
|
297
288
|
user = user.url
|
|
298
289
|
|
|
299
|
-
logger.debug('Fetching user %s', user)
|
|
290
|
+
self.logger.debug('Fetching user %s', user)
|
|
300
291
|
return User.get(self, user)
|
|
301
292
|
|
|
302
293
|
def search_hubtraffic(self,
|
|
@@ -308,7 +299,7 @@ class Client:
|
|
|
308
299
|
period: literals.ht_period = None) -> Query:
|
|
309
300
|
'''
|
|
310
301
|
Perform searching on Pornhub using the HubTraffic API.
|
|
311
|
-
It is
|
|
302
|
+
It is considered to be much faster but has less filters.
|
|
312
303
|
|
|
313
304
|
Args:
|
|
314
305
|
query (str): The query to search.
|
phub/modules/__init__.py
CHANGED
phub/modules/parser.py
CHANGED
|
@@ -79,7 +79,8 @@ def challenge(client: Client, challenge: str, token: str) -> None:
|
|
|
79
79
|
|
|
80
80
|
# Build and inject cookie
|
|
81
81
|
cookie = f'{n}*{p // n}:{s}:{token}:1'
|
|
82
|
-
client.
|
|
82
|
+
client.core.config.cookies = {'KEY', cookie}
|
|
83
|
+
client.core.update_cookies()
|
|
83
84
|
logger.info('Injected cookie %s', cookie)
|
|
84
|
-
|
|
85
|
+
print("Injected cookie for authentication")
|
|
85
86
|
# EOF
|
phub/objects/account.py
CHANGED
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
from functools import cached_property
|
|
5
|
+
from base_api.base import setup_logger
|
|
5
6
|
from typing import TYPE_CHECKING, Literal, Iterator, Union
|
|
6
7
|
|
|
7
8
|
from .. import utils
|
|
@@ -12,8 +13,6 @@ if TYPE_CHECKING:
|
|
|
12
13
|
from ..core import Client
|
|
13
14
|
from . import Feed, queries, User
|
|
14
15
|
|
|
15
|
-
logger = logging.getLogger(__name__)
|
|
16
|
-
|
|
17
16
|
|
|
18
17
|
class Account:
|
|
19
18
|
'''
|
|
@@ -45,6 +44,7 @@ class Account:
|
|
|
45
44
|
'''
|
|
46
45
|
|
|
47
46
|
self.client = client
|
|
47
|
+
self.logger = setup_logger(name="PHUB API - [Account]", log_file=None, level=logging.ERROR)
|
|
48
48
|
|
|
49
49
|
self.name: str = None
|
|
50
50
|
self.avatar: Image = None
|
|
@@ -54,9 +54,12 @@ class Account:
|
|
|
54
54
|
# Save data keys so far, so we can make a difference with the
|
|
55
55
|
# cached property ones.
|
|
56
56
|
self.loaded_keys = list(self.__dict__.keys()) + ['loaded_keys']
|
|
57
|
-
|
|
57
|
+
|
|
58
|
+
def enable_logging(self, log_file: str = None, level=None, log_ip=None, log_port=None):
|
|
59
|
+
self.logger = setup_logger(name="PHUB API - [Account]", log_file=log_file, level=level, http_ip=log_ip,
|
|
60
|
+
http_port=log_port)
|
|
61
|
+
|
|
58
62
|
def __repr__(self) -> str:
|
|
59
|
-
|
|
60
63
|
status = 'logged-out' if self.name is None else f'name={self.name}'
|
|
61
64
|
return f'phub.Account({status})'
|
|
62
65
|
|
|
@@ -77,7 +80,7 @@ class Account:
|
|
|
77
80
|
|
|
78
81
|
# We assert that the account is from a normal user (not model, etc.)
|
|
79
82
|
if not 'users/' in self.user.url:
|
|
80
|
-
logger.error('Invalid user type: %s', url)
|
|
83
|
+
self.logger.error('Invalid user type: %s', url)
|
|
81
84
|
raise NotImplementedError('Non-user account are not supported.')
|
|
82
85
|
|
|
83
86
|
def refresh(self, refresh_login: bool = False) -> None:
|
|
@@ -88,25 +91,25 @@ class Account:
|
|
|
88
91
|
refresh_login (bool): Whether to also attempt to re-log in.
|
|
89
92
|
'''
|
|
90
93
|
|
|
91
|
-
logger.info('Refreshing account %s', self)
|
|
94
|
+
self.logger.info('Refreshing account %s', self)
|
|
92
95
|
|
|
93
96
|
if refresh_login:
|
|
94
|
-
logger.info('Forcing login refresh')
|
|
97
|
+
self.logger.info('Forcing login refresh')
|
|
95
98
|
self.client.login(force = True)
|
|
96
99
|
|
|
97
100
|
# Clear properties cache
|
|
98
101
|
for key in list(self.__dict__.keys()):
|
|
99
102
|
if not key in self.loaded_keys:
|
|
100
103
|
|
|
101
|
-
logger.debug('Deleting key %s', key)
|
|
104
|
+
self.logger.debug('Deleting key %s', key)
|
|
102
105
|
delattr(self, key)
|
|
103
106
|
|
|
104
107
|
def fix_recommendations(self) -> None:
|
|
105
108
|
'''
|
|
106
|
-
Allow
|
|
109
|
+
Allow recommendations cookies.
|
|
107
110
|
'''
|
|
108
111
|
|
|
109
|
-
logger.info('Fixing account recommendations')
|
|
112
|
+
self.logger.info('Fixing account recommendations')
|
|
110
113
|
|
|
111
114
|
payload = utils.urlify({
|
|
112
115
|
'token': self.client._granted_token,
|
phub/objects/feed.py
CHANGED
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
from functools import cached_property
|
|
5
|
+
from base_api.base import setup_logger
|
|
5
6
|
from typing import TYPE_CHECKING, Callable, Iterator, Union
|
|
6
7
|
|
|
7
8
|
from .. import literals
|
|
@@ -11,8 +12,6 @@ if TYPE_CHECKING:
|
|
|
11
12
|
from . import queries
|
|
12
13
|
from ..core import Client
|
|
13
14
|
|
|
14
|
-
logger = logging.getLogger(__name__)
|
|
15
|
-
|
|
16
15
|
|
|
17
16
|
class Feed:
|
|
18
17
|
'''
|
|
@@ -28,9 +27,13 @@ class Feed:
|
|
|
28
27
|
'''
|
|
29
28
|
|
|
30
29
|
self.client = client
|
|
31
|
-
|
|
32
|
-
logger.debug('Initialised account feed: %s', self)
|
|
33
|
-
|
|
30
|
+
self.logger = setup_logger(name="PHUB API - [Feed]", log_file=None, level=logging.ERROR)
|
|
31
|
+
self.logger.debug('Initialised account feed: %s', self)
|
|
32
|
+
|
|
33
|
+
def enable_logging(self, log_file: str = None, level=None, log_ip=None, log_port=None):
|
|
34
|
+
self.logger = setup_logger(name="PHUB API - [Feed]", log_file=log_file, level=level, http_ip=log_ip,
|
|
35
|
+
http_port=log_port)
|
|
36
|
+
|
|
34
37
|
def __repr__(self) -> str:
|
|
35
38
|
|
|
36
39
|
return f'phub.FeedCreator(for={self.client.account.name})'
|
|
@@ -51,7 +54,7 @@ class Feed:
|
|
|
51
54
|
# Generate args
|
|
52
55
|
username = user.name if isinstance(user, User) else user
|
|
53
56
|
|
|
54
|
-
logger.info('Generating new filter feed using args', )
|
|
57
|
+
self.logger.info('Generating new filter feed using args', )
|
|
55
58
|
|
|
56
59
|
return queries.FeedQuery(
|
|
57
60
|
client = self.client,
|
phub/objects/image.py
CHANGED
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
4
|
import logging
|
|
5
|
+
from base_api.base import setup_logger
|
|
5
6
|
from typing import TYPE_CHECKING, Literal, Union
|
|
6
7
|
|
|
7
8
|
from .. import utils
|
|
@@ -10,9 +11,6 @@ if TYPE_CHECKING:
|
|
|
10
11
|
from ..core import Client
|
|
11
12
|
|
|
12
13
|
|
|
13
|
-
logger = logging.getLogger(__name__)
|
|
14
|
-
|
|
15
|
-
|
|
16
14
|
class Image:
|
|
17
15
|
'''
|
|
18
16
|
Represents an image hosted on Pornhub.
|
|
@@ -34,19 +32,24 @@ class Image:
|
|
|
34
32
|
name (str): Image name.
|
|
35
33
|
'''
|
|
36
34
|
|
|
35
|
+
self.logger = setup_logger(name="PHUB API - [Image]", log_file=None, level=logging.ERROR)
|
|
37
36
|
self.url = url
|
|
38
37
|
self.name = name
|
|
39
38
|
self.client = client
|
|
40
39
|
self._servers = servers or []
|
|
41
40
|
|
|
42
|
-
logger.debug('Generated new image object: %s', self)
|
|
41
|
+
self.logger.debug('Generated new image object: %s', self)
|
|
43
42
|
|
|
44
43
|
# Check server image sizes
|
|
45
44
|
sizes = [s.get('size') for s in self._servers]
|
|
46
45
|
|
|
47
46
|
if len(set(sizes)) > 1:
|
|
48
|
-
logger.warning('Detected different image sizes on alt servers: %s', sizes)
|
|
49
|
-
|
|
47
|
+
self.logger.warning('Detected different image sizes on alt servers: %s', sizes)
|
|
48
|
+
|
|
49
|
+
def enable_logging(self, log_file: str = None, level=None, log_ip=None, log_port=None):
|
|
50
|
+
self.logger = setup_logger(name="PHUB API - [Image]", log_file=log_file, level=level, http_ip=log_ip,
|
|
51
|
+
http_port=log_port)
|
|
52
|
+
|
|
50
53
|
def __repr__(self) -> str:
|
|
51
54
|
|
|
52
55
|
return f'phub.Image(name={self.name})'
|
|
@@ -70,7 +73,7 @@ class Image:
|
|
|
70
73
|
if os.path.isdir(path):
|
|
71
74
|
path = utils.concat(path, self.name + ext)
|
|
72
75
|
|
|
73
|
-
logger.info('Saving %s at %s', self, path)
|
|
76
|
+
self.logger.info('Saving %s at %s', self, path)
|
|
74
77
|
|
|
75
78
|
with open(path, 'wb') as file:
|
|
76
79
|
|
|
@@ -81,12 +84,12 @@ class Image:
|
|
|
81
84
|
|
|
82
85
|
except Exception as err:
|
|
83
86
|
|
|
84
|
-
logger.warning('Failed to get image `%s`', url)
|
|
87
|
+
self.logger.warning('Failed to get image `%s`', url)
|
|
85
88
|
if not self._servers: raise err
|
|
86
89
|
|
|
87
90
|
# Pop server and retry
|
|
88
91
|
server = self._servers.pop(0)
|
|
89
|
-
logger.info('Retrying download with server %s', server)
|
|
92
|
+
self.logger.info('Retrying download with server %s', server)
|
|
90
93
|
self.url = server['src']
|
|
91
94
|
self.download(path)
|
|
92
95
|
|
phub/objects/playlist.py
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import re
|
|
4
|
+
import time
|
|
3
5
|
from typing import TYPE_CHECKING
|
|
4
6
|
from functools import cache, cached_property
|
|
5
7
|
|
|
@@ -34,7 +36,6 @@ class Playlist(queries.VideoQuery):
|
|
|
34
36
|
|
|
35
37
|
# Initialise
|
|
36
38
|
super().__init__(client, func = None)
|
|
37
|
-
|
|
38
39
|
# Define both playlist url (first page) and chunked (next pages)
|
|
39
40
|
self.url = 'playlist/' + str(pid)
|
|
40
41
|
self.chunk_url = f'playlist/viewChunked?id={pid}' '&token={token}&page={page}'
|
|
@@ -70,13 +71,9 @@ class Playlist(queries.VideoQuery):
|
|
|
70
71
|
raise errors.NoResult()
|
|
71
72
|
|
|
72
73
|
return response.text
|
|
73
|
-
|
|
74
|
-
@cached_property
|
|
75
|
-
def _data(self) -> str:
|
|
76
|
-
return consts.re.playlist_data(self._page)
|
|
77
74
|
|
|
78
75
|
def __len__(self) -> int:
|
|
79
|
-
return int(
|
|
76
|
+
return int(re.search(r'var\s+itemsCount\s*=\s*(\d+)\s*\|\|', string=self._page).group(1))
|
|
80
77
|
|
|
81
78
|
@cached_property
|
|
82
79
|
def hidden_videos_amount(self) -> int:
|
|
@@ -89,27 +86,27 @@ class Playlist(queries.VideoQuery):
|
|
|
89
86
|
@cached_property
|
|
90
87
|
def like(self) -> Like:
|
|
91
88
|
return Like(
|
|
92
|
-
int(consts.re.get_playlist_likes(self.
|
|
93
|
-
int(consts.re.get_playlist_dislikes(self.
|
|
94
|
-
float(consts.re.get_playlist_ratings(self.
|
|
89
|
+
int(consts.re.get_playlist_likes(self._page)),
|
|
90
|
+
int(consts.re.get_playlist_dislikes(self._page)),
|
|
91
|
+
float(consts.re.get_playlist_ratings(self._page))
|
|
95
92
|
)
|
|
96
93
|
|
|
97
94
|
@cached_property
|
|
98
95
|
def views(self) -> int:
|
|
99
|
-
raw: str = consts.re.get_playlist_views(self.
|
|
96
|
+
raw: str = consts.re.get_playlist_views(self._page)
|
|
100
97
|
return int(raw.replace(',', ''))
|
|
101
98
|
|
|
102
99
|
@cached_property
|
|
103
100
|
def tags(self) -> list[str]:
|
|
104
|
-
return consts.re.get_playlist_tags(self.
|
|
101
|
+
return consts.re.get_playlist_tags(self._page)
|
|
105
102
|
|
|
106
103
|
@cached_property
|
|
107
104
|
def author(self) -> User:
|
|
108
|
-
url = consts.re.get_playlist_author(self.
|
|
105
|
+
url = consts.re.get_playlist_author(self._page)
|
|
109
106
|
return User.get(self.client, consts.HOST + url)
|
|
110
107
|
|
|
111
108
|
@cached_property
|
|
112
109
|
def title(self) -> str:
|
|
113
|
-
return consts.re.get_playlist_title(self.
|
|
110
|
+
return consts.re.get_playlist_title(self._page)
|
|
114
111
|
|
|
115
112
|
# EOF
|