thordata-sdk 0.7.0__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- thordata/__init__.py +13 -1
- thordata/_example_utils.py +76 -0
- thordata/_utils.py +46 -3
- thordata/async_client.py +863 -23
- thordata/client.py +1023 -51
- thordata/enums.py +3 -3
- thordata/exceptions.py +16 -5
- thordata/models.py +351 -7
- thordata/retry.py +6 -4
- thordata_sdk-1.0.0.dist-info/METADATA +208 -0
- thordata_sdk-1.0.0.dist-info/RECORD +15 -0
- thordata/parameters.py +0 -53
- thordata_sdk-0.7.0.dist-info/METADATA +0 -1053
- thordata_sdk-0.7.0.dist-info/RECORD +0 -15
- {thordata_sdk-0.7.0.dist-info → thordata_sdk-1.0.0.dist-info}/WHEEL +0 -0
- {thordata_sdk-0.7.0.dist-info → thordata_sdk-1.0.0.dist-info}/licenses/LICENSE +0 -0
- {thordata_sdk-0.7.0.dist-info → thordata_sdk-1.0.0.dist-info}/top_level.txt +0 -0
thordata/async_client.py
CHANGED
|
@@ -25,6 +25,7 @@ from __future__ import annotations
|
|
|
25
25
|
import asyncio
|
|
26
26
|
import logging
|
|
27
27
|
import os
|
|
28
|
+
from datetime import date
|
|
28
29
|
from typing import Any, Dict, List, Optional, Union
|
|
29
30
|
|
|
30
31
|
import aiohttp
|
|
@@ -32,6 +33,7 @@ import aiohttp
|
|
|
32
33
|
from . import __version__ as _sdk_version
|
|
33
34
|
from ._utils import (
|
|
34
35
|
build_auth_headers,
|
|
36
|
+
build_builder_headers,
|
|
35
37
|
build_public_api_headers,
|
|
36
38
|
build_user_agent,
|
|
37
39
|
decode_base64_image,
|
|
@@ -45,7 +47,19 @@ from .exceptions import (
|
|
|
45
47
|
ThordataTimeoutError,
|
|
46
48
|
raise_for_code,
|
|
47
49
|
)
|
|
48
|
-
from .models import
|
|
50
|
+
from .models import (
|
|
51
|
+
CommonSettings,
|
|
52
|
+
ProxyConfig,
|
|
53
|
+
ProxyProduct,
|
|
54
|
+
ProxyServer,
|
|
55
|
+
ProxyUserList,
|
|
56
|
+
ScraperTaskConfig,
|
|
57
|
+
SerpRequest,
|
|
58
|
+
UniversalScrapeRequest,
|
|
59
|
+
UsageStatistics,
|
|
60
|
+
VideoTaskConfig,
|
|
61
|
+
WhitelistProxyConfig,
|
|
62
|
+
)
|
|
49
63
|
from .retry import RetryConfig
|
|
50
64
|
|
|
51
65
|
logger = logging.getLogger(__name__)
|
|
@@ -78,8 +92,8 @@ class AsyncThordataClient:
|
|
|
78
92
|
# API Endpoints (same as sync client)
|
|
79
93
|
BASE_URL = "https://scraperapi.thordata.com"
|
|
80
94
|
UNIVERSAL_URL = "https://universalapi.thordata.com"
|
|
81
|
-
API_URL = "https://
|
|
82
|
-
LOCATIONS_URL = "https://
|
|
95
|
+
API_URL = "https://openapi.thordata.com/api/web-scraper-api"
|
|
96
|
+
LOCATIONS_URL = "https://openapi.thordata.com/api/locations"
|
|
83
97
|
|
|
84
98
|
def __init__(
|
|
85
99
|
self,
|
|
@@ -89,7 +103,9 @@ class AsyncThordataClient:
|
|
|
89
103
|
proxy_host: str = "pr.thordata.net",
|
|
90
104
|
proxy_port: int = 9999,
|
|
91
105
|
timeout: int = 30,
|
|
106
|
+
api_timeout: int = 60,
|
|
92
107
|
retry_config: Optional[RetryConfig] = None,
|
|
108
|
+
auth_mode: str = "bearer",
|
|
93
109
|
scraperapi_base_url: Optional[str] = None,
|
|
94
110
|
universalapi_base_url: Optional[str] = None,
|
|
95
111
|
web_scraper_api_base_url: Optional[str] = None,
|
|
@@ -106,16 +122,20 @@ class AsyncThordataClient:
|
|
|
106
122
|
# Proxy configuration
|
|
107
123
|
self._proxy_host = proxy_host
|
|
108
124
|
self._proxy_port = proxy_port
|
|
125
|
+
|
|
126
|
+
# Timeout configuration
|
|
109
127
|
self._default_timeout = aiohttp.ClientTimeout(total=timeout)
|
|
128
|
+
self._api_timeout = aiohttp.ClientTimeout(total=api_timeout)
|
|
110
129
|
|
|
111
130
|
# Retry configuration
|
|
112
131
|
self._retry_config = retry_config or RetryConfig()
|
|
113
132
|
|
|
114
|
-
#
|
|
115
|
-
self.
|
|
116
|
-
self.
|
|
117
|
-
|
|
118
|
-
|
|
133
|
+
# Authentication mode (for scraping APIs)
|
|
134
|
+
self._auth_mode = auth_mode.lower()
|
|
135
|
+
if self._auth_mode not in ("bearer", "header_token"):
|
|
136
|
+
raise ThordataConfigError(
|
|
137
|
+
f"Invalid auth_mode: {auth_mode}. Must be 'bearer' or 'header_token'."
|
|
138
|
+
)
|
|
119
139
|
|
|
120
140
|
# Base URLs (allow override via args or env vars for testing and custom routing)
|
|
121
141
|
scraperapi_base = (
|
|
@@ -142,12 +162,44 @@ class AsyncThordataClient:
|
|
|
142
162
|
or self.LOCATIONS_URL
|
|
143
163
|
).rstrip("/")
|
|
144
164
|
|
|
165
|
+
# Keep these env overrides for now
|
|
166
|
+
gateway_base = os.getenv(
|
|
167
|
+
"THORDATA_GATEWAY_BASE_URL", "https://api.thordata.com/api/gateway"
|
|
168
|
+
)
|
|
169
|
+
child_base = os.getenv(
|
|
170
|
+
"THORDATA_CHILD_BASE_URL", "https://api.thordata.com/api/child"
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
self._gateway_base_url = gateway_base
|
|
174
|
+
self._child_base_url = child_base
|
|
175
|
+
|
|
145
176
|
self._serp_url = f"{scraperapi_base}/request"
|
|
146
177
|
self._builder_url = f"{scraperapi_base}/builder"
|
|
178
|
+
self._video_builder_url = f"{scraperapi_base}/video_builder"
|
|
147
179
|
self._universal_url = f"{universalapi_base}/request"
|
|
180
|
+
|
|
148
181
|
self._status_url = f"{web_scraper_api_base}/tasks-status"
|
|
149
182
|
self._download_url = f"{web_scraper_api_base}/tasks-download"
|
|
183
|
+
self._list_url = f"{web_scraper_api_base}/tasks-list"
|
|
184
|
+
|
|
150
185
|
self._locations_base_url = locations_base
|
|
186
|
+
self._usage_stats_url = (
|
|
187
|
+
f"{locations_base.replace('/locations', '')}/account/usage-statistics"
|
|
188
|
+
)
|
|
189
|
+
self._proxy_users_url = (
|
|
190
|
+
f"{locations_base.replace('/locations', '')}/proxy-users"
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
whitelist_base = os.getenv(
|
|
194
|
+
"THORDATA_WHITELIST_BASE_URL", "https://api.thordata.com/api"
|
|
195
|
+
)
|
|
196
|
+
self._whitelist_url = f"{whitelist_base}/whitelisted-ips"
|
|
197
|
+
|
|
198
|
+
proxy_api_base = os.getenv(
|
|
199
|
+
"THORDATA_PROXY_API_BASE_URL", "https://api.thordata.com/api"
|
|
200
|
+
)
|
|
201
|
+
self._proxy_list_url = f"{proxy_api_base}/proxy/proxy-list"
|
|
202
|
+
self._proxy_expiration_url = f"{proxy_api_base}/proxy/expiration-time"
|
|
151
203
|
|
|
152
204
|
# Session initialized lazily
|
|
153
205
|
self._session: Optional[aiohttp.ClientSession] = None
|
|
@@ -156,7 +208,7 @@ class AsyncThordataClient:
|
|
|
156
208
|
"""Async context manager entry."""
|
|
157
209
|
if self._session is None or self._session.closed:
|
|
158
210
|
self._session = aiohttp.ClientSession(
|
|
159
|
-
timeout=self.
|
|
211
|
+
timeout=self._api_timeout,
|
|
160
212
|
trust_env=True,
|
|
161
213
|
headers={"User-Agent": build_user_agent(_sdk_version, "aiohttp")},
|
|
162
214
|
)
|
|
@@ -207,11 +259,26 @@ class AsyncThordataClient:
|
|
|
207
259
|
|
|
208
260
|
logger.debug(f"Async Proxy GET: {url}")
|
|
209
261
|
|
|
210
|
-
if proxy_config:
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
262
|
+
if proxy_config is None:
|
|
263
|
+
proxy_config = self._get_default_proxy_config_from_env()
|
|
264
|
+
|
|
265
|
+
if proxy_config is None:
|
|
266
|
+
raise ThordataConfigError(
|
|
267
|
+
"Proxy credentials are missing. "
|
|
268
|
+
"Pass proxy_config=ProxyConfig(username=..., password=..., product=...) "
|
|
269
|
+
"or set THORDATA_RESIDENTIAL_USERNAME/THORDATA_RESIDENTIAL_PASSWORD (or DATACENTER/MOBILE)."
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
# aiohttp has limited support for "https://" proxies (TLS to proxy / TLS-in-TLS).
|
|
273
|
+
# Your account's proxy endpoint requires HTTPS proxy, so we explicitly block here
|
|
274
|
+
# to avoid confusing "it always fails" behavior.
|
|
275
|
+
if getattr(proxy_config, "protocol", "http").lower() == "https":
|
|
276
|
+
raise ThordataConfigError(
|
|
277
|
+
"Proxy Network requires an HTTPS proxy endpoint (TLS to proxy) for your account. "
|
|
278
|
+
"aiohttp support for 'https://' proxies is limited and may fail. "
|
|
279
|
+
"Please use ThordataClient.get/post (sync client) for Proxy Network requests."
|
|
280
|
+
)
|
|
281
|
+
proxy_url, proxy_auth = proxy_config.to_aiohttp_config()
|
|
215
282
|
|
|
216
283
|
try:
|
|
217
284
|
return await session.get(
|
|
@@ -248,11 +315,26 @@ class AsyncThordataClient:
|
|
|
248
315
|
|
|
249
316
|
logger.debug(f"Async Proxy POST: {url}")
|
|
250
317
|
|
|
251
|
-
if proxy_config:
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
318
|
+
if proxy_config is None:
|
|
319
|
+
proxy_config = self._get_default_proxy_config_from_env()
|
|
320
|
+
|
|
321
|
+
if proxy_config is None:
|
|
322
|
+
raise ThordataConfigError(
|
|
323
|
+
"Proxy credentials are missing. "
|
|
324
|
+
"Pass proxy_config=ProxyConfig(username=..., password=..., product=...) "
|
|
325
|
+
"or set THORDATA_RESIDENTIAL_USERNAME/THORDATA_RESIDENTIAL_PASSWORD (or DATACENTER/MOBILE)."
|
|
326
|
+
)
|
|
327
|
+
|
|
328
|
+
# aiohttp has limited support for "https://" proxies (TLS to proxy / TLS-in-TLS).
|
|
329
|
+
# Your account's proxy endpoint requires HTTPS proxy, so we explicitly block here
|
|
330
|
+
# to avoid confusing "it always fails" behavior.
|
|
331
|
+
if getattr(proxy_config, "protocol", "http").lower() == "https":
|
|
332
|
+
raise ThordataConfigError(
|
|
333
|
+
"Proxy Network requires an HTTPS proxy endpoint (TLS to proxy) for your account. "
|
|
334
|
+
"aiohttp support for 'https://' proxies is limited and may fail. "
|
|
335
|
+
"Please use ThordataClient.get/post (sync client) for Proxy Network requests."
|
|
336
|
+
)
|
|
337
|
+
proxy_url, proxy_auth = proxy_config.to_aiohttp_config()
|
|
256
338
|
|
|
257
339
|
try:
|
|
258
340
|
return await session.post(
|
|
@@ -324,7 +406,7 @@ class AsyncThordataClient:
|
|
|
324
406
|
)
|
|
325
407
|
|
|
326
408
|
payload = request.to_payload()
|
|
327
|
-
headers = build_auth_headers(self.scraper_token)
|
|
409
|
+
headers = build_auth_headers(self.scraper_token, mode=self._auth_mode)
|
|
328
410
|
|
|
329
411
|
logger.info(f"Async SERP Search: {engine_str} - {query}")
|
|
330
412
|
|
|
@@ -372,7 +454,7 @@ class AsyncThordataClient:
|
|
|
372
454
|
session = self._get_session()
|
|
373
455
|
|
|
374
456
|
payload = request.to_payload()
|
|
375
|
-
headers = build_auth_headers(self.scraper_token)
|
|
457
|
+
headers = build_auth_headers(self.scraper_token, mode=self._auth_mode)
|
|
376
458
|
|
|
377
459
|
logger.info(f"Async SERP Advanced: {request.engine} - {request.query}")
|
|
378
460
|
|
|
@@ -466,7 +548,7 @@ class AsyncThordataClient:
|
|
|
466
548
|
session = self._get_session()
|
|
467
549
|
|
|
468
550
|
payload = request.to_payload()
|
|
469
|
-
headers = build_auth_headers(self.scraper_token)
|
|
551
|
+
headers = build_auth_headers(self.scraper_token, mode=self._auth_mode)
|
|
470
552
|
|
|
471
553
|
logger.info(f"Async Universal Scrape: {request.url}")
|
|
472
554
|
|
|
@@ -538,10 +620,16 @@ class AsyncThordataClient:
|
|
|
538
620
|
"""
|
|
539
621
|
Create a task using ScraperTaskConfig.
|
|
540
622
|
"""
|
|
623
|
+
self._require_public_credentials()
|
|
541
624
|
session = self._get_session()
|
|
542
625
|
|
|
543
626
|
payload = config.to_payload()
|
|
544
|
-
headers
|
|
627
|
+
# Builder needs 3 headers: token, key, Authorization Bearer
|
|
628
|
+
headers = build_builder_headers(
|
|
629
|
+
self.scraper_token,
|
|
630
|
+
self.public_token or "",
|
|
631
|
+
self.public_key or "",
|
|
632
|
+
)
|
|
545
633
|
|
|
546
634
|
logger.info(f"Async Task Creation: {config.spider_name}")
|
|
547
635
|
|
|
@@ -566,6 +654,75 @@ class AsyncThordataClient:
|
|
|
566
654
|
f"Task creation failed: {e}", original_error=e
|
|
567
655
|
) from e
|
|
568
656
|
|
|
657
|
+
async def create_video_task(
|
|
658
|
+
self,
|
|
659
|
+
file_name: str,
|
|
660
|
+
spider_id: str,
|
|
661
|
+
spider_name: str,
|
|
662
|
+
parameters: Dict[str, Any],
|
|
663
|
+
common_settings: CommonSettings,
|
|
664
|
+
) -> str:
|
|
665
|
+
"""
|
|
666
|
+
Create a YouTube video/audio download task.
|
|
667
|
+
"""
|
|
668
|
+
|
|
669
|
+
config = VideoTaskConfig(
|
|
670
|
+
file_name=file_name,
|
|
671
|
+
spider_id=spider_id,
|
|
672
|
+
spider_name=spider_name,
|
|
673
|
+
parameters=parameters,
|
|
674
|
+
common_settings=common_settings,
|
|
675
|
+
)
|
|
676
|
+
|
|
677
|
+
return await self.create_video_task_advanced(config)
|
|
678
|
+
|
|
679
|
+
async def create_video_task_advanced(self, config: VideoTaskConfig) -> str:
|
|
680
|
+
"""
|
|
681
|
+
Create a video task using VideoTaskConfig object.
|
|
682
|
+
"""
|
|
683
|
+
|
|
684
|
+
self._require_public_credentials()
|
|
685
|
+
session = self._get_session()
|
|
686
|
+
|
|
687
|
+
payload = config.to_payload()
|
|
688
|
+
headers = build_builder_headers(
|
|
689
|
+
self.scraper_token,
|
|
690
|
+
self.public_token or "",
|
|
691
|
+
self.public_key or "",
|
|
692
|
+
)
|
|
693
|
+
|
|
694
|
+
logger.info(
|
|
695
|
+
f"Async Video Task Creation: {config.spider_name} - {config.spider_id}"
|
|
696
|
+
)
|
|
697
|
+
|
|
698
|
+
try:
|
|
699
|
+
async with session.post(
|
|
700
|
+
self._video_builder_url,
|
|
701
|
+
data=payload,
|
|
702
|
+
headers=headers,
|
|
703
|
+
timeout=self._api_timeout,
|
|
704
|
+
) as response:
|
|
705
|
+
response.raise_for_status()
|
|
706
|
+
data = await response.json()
|
|
707
|
+
|
|
708
|
+
code = data.get("code")
|
|
709
|
+
if code != 200:
|
|
710
|
+
msg = extract_error_message(data)
|
|
711
|
+
raise_for_code(
|
|
712
|
+
f"Video task creation failed: {msg}", code=code, payload=data
|
|
713
|
+
)
|
|
714
|
+
|
|
715
|
+
return data["data"]["task_id"]
|
|
716
|
+
|
|
717
|
+
except asyncio.TimeoutError as e:
|
|
718
|
+
raise ThordataTimeoutError(
|
|
719
|
+
f"Video task creation timed out: {e}", original_error=e
|
|
720
|
+
) from e
|
|
721
|
+
except aiohttp.ClientError as e:
|
|
722
|
+
raise ThordataNetworkError(
|
|
723
|
+
f"Video task creation failed: {e}", original_error=e
|
|
724
|
+
) from e
|
|
725
|
+
|
|
569
726
|
async def get_task_status(self, task_id: str) -> str:
|
|
570
727
|
"""
|
|
571
728
|
Check async task status.
|
|
@@ -667,6 +824,61 @@ class AsyncThordataClient:
|
|
|
667
824
|
f"Get result failed: {e}", original_error=e
|
|
668
825
|
) from e
|
|
669
826
|
|
|
827
|
+
async def list_tasks(
|
|
828
|
+
self,
|
|
829
|
+
page: int = 1,
|
|
830
|
+
size: int = 20,
|
|
831
|
+
) -> Dict[str, Any]:
|
|
832
|
+
"""
|
|
833
|
+
List all Web Scraper tasks.
|
|
834
|
+
|
|
835
|
+
Args:
|
|
836
|
+
page: Page number (starts from 1).
|
|
837
|
+
size: Number of tasks per page.
|
|
838
|
+
|
|
839
|
+
Returns:
|
|
840
|
+
Dict containing 'count' and 'list' of tasks.
|
|
841
|
+
"""
|
|
842
|
+
self._require_public_credentials()
|
|
843
|
+
session = self._get_session()
|
|
844
|
+
|
|
845
|
+
headers = build_public_api_headers(
|
|
846
|
+
self.public_token or "", self.public_key or ""
|
|
847
|
+
)
|
|
848
|
+
payload: Dict[str, Any] = {}
|
|
849
|
+
if page:
|
|
850
|
+
payload["page"] = str(page)
|
|
851
|
+
if size:
|
|
852
|
+
payload["size"] = str(size)
|
|
853
|
+
|
|
854
|
+
logger.info(f"Async listing tasks: page={page}, size={size}")
|
|
855
|
+
|
|
856
|
+
try:
|
|
857
|
+
async with session.post(
|
|
858
|
+
self._list_url,
|
|
859
|
+
data=payload,
|
|
860
|
+
headers=headers,
|
|
861
|
+
timeout=self._api_timeout,
|
|
862
|
+
) as response:
|
|
863
|
+
response.raise_for_status()
|
|
864
|
+
data = await response.json()
|
|
865
|
+
|
|
866
|
+
code = data.get("code")
|
|
867
|
+
if code != 200:
|
|
868
|
+
msg = extract_error_message(data)
|
|
869
|
+
raise_for_code(f"List tasks failed: {msg}", code=code, payload=data)
|
|
870
|
+
|
|
871
|
+
return data.get("data", {"count": 0, "list": []})
|
|
872
|
+
|
|
873
|
+
except asyncio.TimeoutError as e:
|
|
874
|
+
raise ThordataTimeoutError(
|
|
875
|
+
f"List tasks timed out: {e}", original_error=e
|
|
876
|
+
) from e
|
|
877
|
+
except aiohttp.ClientError as e:
|
|
878
|
+
raise ThordataNetworkError(
|
|
879
|
+
f"List tasks failed: {e}", original_error=e
|
|
880
|
+
) from e
|
|
881
|
+
|
|
670
882
|
async def wait_for_task(
|
|
671
883
|
self,
|
|
672
884
|
task_id: str,
|
|
@@ -703,6 +915,551 @@ class AsyncThordataClient:
|
|
|
703
915
|
|
|
704
916
|
raise TimeoutError(f"Task {task_id} did not complete within {max_wait} seconds")
|
|
705
917
|
|
|
918
|
+
# =========================================================================
|
|
919
|
+
# Proxy Account Management Methods
|
|
920
|
+
# =========================================================================
|
|
921
|
+
|
|
922
|
+
async def get_usage_statistics(
|
|
923
|
+
self,
|
|
924
|
+
from_date: Union[str, date],
|
|
925
|
+
to_date: Union[str, date],
|
|
926
|
+
) -> UsageStatistics:
|
|
927
|
+
"""
|
|
928
|
+
Get account usage statistics for a date range.
|
|
929
|
+
|
|
930
|
+
Args:
|
|
931
|
+
from_date: Start date (YYYY-MM-DD string or date object).
|
|
932
|
+
to_date: End date (YYYY-MM-DD string or date object).
|
|
933
|
+
|
|
934
|
+
Returns:
|
|
935
|
+
UsageStatistics object with traffic data.
|
|
936
|
+
"""
|
|
937
|
+
|
|
938
|
+
self._require_public_credentials()
|
|
939
|
+
session = self._get_session()
|
|
940
|
+
|
|
941
|
+
# Convert dates to strings
|
|
942
|
+
if isinstance(from_date, date):
|
|
943
|
+
from_date = from_date.strftime("%Y-%m-%d")
|
|
944
|
+
if isinstance(to_date, date):
|
|
945
|
+
to_date = to_date.strftime("%Y-%m-%d")
|
|
946
|
+
|
|
947
|
+
params = {
|
|
948
|
+
"token": self.public_token,
|
|
949
|
+
"key": self.public_key,
|
|
950
|
+
"from_date": from_date,
|
|
951
|
+
"to_date": to_date,
|
|
952
|
+
}
|
|
953
|
+
|
|
954
|
+
logger.info(f"Async getting usage statistics: {from_date} to {to_date}")
|
|
955
|
+
|
|
956
|
+
try:
|
|
957
|
+
async with session.get(
|
|
958
|
+
self._usage_stats_url,
|
|
959
|
+
params=params,
|
|
960
|
+
timeout=self._api_timeout,
|
|
961
|
+
) as response:
|
|
962
|
+
response.raise_for_status()
|
|
963
|
+
data = await response.json()
|
|
964
|
+
|
|
965
|
+
if isinstance(data, dict):
|
|
966
|
+
code = data.get("code")
|
|
967
|
+
if code is not None and code != 200:
|
|
968
|
+
msg = extract_error_message(data)
|
|
969
|
+
raise_for_code(
|
|
970
|
+
f"Usage statistics error: {msg}",
|
|
971
|
+
code=code,
|
|
972
|
+
payload=data,
|
|
973
|
+
)
|
|
974
|
+
|
|
975
|
+
usage_data = data.get("data", data)
|
|
976
|
+
return UsageStatistics.from_dict(usage_data)
|
|
977
|
+
|
|
978
|
+
raise ThordataNetworkError(
|
|
979
|
+
f"Unexpected usage statistics response: {type(data).__name__}",
|
|
980
|
+
original_error=None,
|
|
981
|
+
)
|
|
982
|
+
|
|
983
|
+
except asyncio.TimeoutError as e:
|
|
984
|
+
raise ThordataTimeoutError(
|
|
985
|
+
f"Usage statistics timed out: {e}", original_error=e
|
|
986
|
+
) from e
|
|
987
|
+
except aiohttp.ClientError as e:
|
|
988
|
+
raise ThordataNetworkError(
|
|
989
|
+
f"Usage statistics failed: {e}", original_error=e
|
|
990
|
+
) from e
|
|
991
|
+
|
|
992
|
+
async def get_residential_balance(self) -> Dict[str, Any]:
|
|
993
|
+
"""
|
|
994
|
+
Get residential proxy balance.
|
|
995
|
+
|
|
996
|
+
Uses public_token/public_key.
|
|
997
|
+
"""
|
|
998
|
+
session = self._get_session()
|
|
999
|
+
headers = self._build_gateway_headers()
|
|
1000
|
+
|
|
1001
|
+
logger.info("Async getting residential proxy balance")
|
|
1002
|
+
|
|
1003
|
+
try:
|
|
1004
|
+
async with session.post(
|
|
1005
|
+
f"{self._gateway_base_url}/getFlowBalance",
|
|
1006
|
+
headers=headers,
|
|
1007
|
+
data={},
|
|
1008
|
+
timeout=self._api_timeout,
|
|
1009
|
+
) as response:
|
|
1010
|
+
response.raise_for_status()
|
|
1011
|
+
data = await response.json()
|
|
1012
|
+
|
|
1013
|
+
code = data.get("code")
|
|
1014
|
+
if code != 200:
|
|
1015
|
+
msg = extract_error_message(data)
|
|
1016
|
+
raise_for_code(
|
|
1017
|
+
f"Get balance failed: {msg}", code=code, payload=data
|
|
1018
|
+
)
|
|
1019
|
+
|
|
1020
|
+
return data.get("data", {})
|
|
1021
|
+
|
|
1022
|
+
except asyncio.TimeoutError as e:
|
|
1023
|
+
raise ThordataTimeoutError(
|
|
1024
|
+
f"Get balance timed out: {e}", original_error=e
|
|
1025
|
+
) from e
|
|
1026
|
+
except aiohttp.ClientError as e:
|
|
1027
|
+
raise ThordataNetworkError(
|
|
1028
|
+
f"Get balance failed: {e}", original_error=e
|
|
1029
|
+
) from e
|
|
1030
|
+
|
|
1031
|
+
async def get_residential_usage(
|
|
1032
|
+
self,
|
|
1033
|
+
start_time: Union[str, int],
|
|
1034
|
+
end_time: Union[str, int],
|
|
1035
|
+
) -> Dict[str, Any]:
|
|
1036
|
+
"""
|
|
1037
|
+
Get residential proxy usage records.
|
|
1038
|
+
|
|
1039
|
+
Uses public_token/public_key.
|
|
1040
|
+
"""
|
|
1041
|
+
session = self._get_session()
|
|
1042
|
+
headers = self._build_gateway_headers()
|
|
1043
|
+
payload = {"start_time": str(start_time), "end_time": str(end_time)}
|
|
1044
|
+
|
|
1045
|
+
logger.info(f"Async getting residential usage: {start_time} to {end_time}")
|
|
1046
|
+
|
|
1047
|
+
try:
|
|
1048
|
+
async with session.post(
|
|
1049
|
+
f"{self._gateway_base_url}/usageRecord",
|
|
1050
|
+
headers=headers,
|
|
1051
|
+
data=payload,
|
|
1052
|
+
timeout=self._api_timeout,
|
|
1053
|
+
) as response:
|
|
1054
|
+
response.raise_for_status()
|
|
1055
|
+
data = await response.json()
|
|
1056
|
+
|
|
1057
|
+
code = data.get("code")
|
|
1058
|
+
if code != 200:
|
|
1059
|
+
msg = extract_error_message(data)
|
|
1060
|
+
raise_for_code(f"Get usage failed: {msg}", code=code, payload=data)
|
|
1061
|
+
|
|
1062
|
+
return data.get("data", {})
|
|
1063
|
+
|
|
1064
|
+
except asyncio.TimeoutError as e:
|
|
1065
|
+
raise ThordataTimeoutError(
|
|
1066
|
+
f"Get usage timed out: {e}", original_error=e
|
|
1067
|
+
) from e
|
|
1068
|
+
except aiohttp.ClientError as e:
|
|
1069
|
+
raise ThordataNetworkError(
|
|
1070
|
+
f"Get usage failed: {e}", original_error=e
|
|
1071
|
+
) from e
|
|
1072
|
+
|
|
1073
|
+
async def list_proxy_users(
|
|
1074
|
+
self, proxy_type: Union[ProxyType, int] = ProxyType.RESIDENTIAL
|
|
1075
|
+
) -> ProxyUserList:
|
|
1076
|
+
"""List all proxy users (sub-accounts)."""
|
|
1077
|
+
|
|
1078
|
+
self._require_public_credentials()
|
|
1079
|
+
session = self._get_session()
|
|
1080
|
+
|
|
1081
|
+
params = {
|
|
1082
|
+
"token": self.public_token,
|
|
1083
|
+
"key": self.public_key,
|
|
1084
|
+
"proxy_type": str(
|
|
1085
|
+
int(proxy_type) if isinstance(proxy_type, ProxyType) else proxy_type
|
|
1086
|
+
),
|
|
1087
|
+
}
|
|
1088
|
+
|
|
1089
|
+
logger.info(f"Async listing proxy users: type={params['proxy_type']}")
|
|
1090
|
+
|
|
1091
|
+
try:
|
|
1092
|
+
async with session.get(
|
|
1093
|
+
f"{self._proxy_users_url}/user-list",
|
|
1094
|
+
params=params,
|
|
1095
|
+
timeout=self._api_timeout,
|
|
1096
|
+
) as response:
|
|
1097
|
+
response.raise_for_status()
|
|
1098
|
+
data = await response.json()
|
|
1099
|
+
|
|
1100
|
+
if isinstance(data, dict):
|
|
1101
|
+
code = data.get("code")
|
|
1102
|
+
if code is not None and code != 200:
|
|
1103
|
+
msg = extract_error_message(data)
|
|
1104
|
+
raise_for_code(
|
|
1105
|
+
f"List proxy users error: {msg}", code=code, payload=data
|
|
1106
|
+
)
|
|
1107
|
+
|
|
1108
|
+
user_data = data.get("data", data)
|
|
1109
|
+
return ProxyUserList.from_dict(user_data)
|
|
1110
|
+
|
|
1111
|
+
raise ThordataNetworkError(
|
|
1112
|
+
f"Unexpected proxy users response: {type(data).__name__}",
|
|
1113
|
+
original_error=None,
|
|
1114
|
+
)
|
|
1115
|
+
|
|
1116
|
+
except asyncio.TimeoutError as e:
|
|
1117
|
+
raise ThordataTimeoutError(
|
|
1118
|
+
f"List users timed out: {e}", original_error=e
|
|
1119
|
+
) from e
|
|
1120
|
+
except aiohttp.ClientError as e:
|
|
1121
|
+
raise ThordataNetworkError(
|
|
1122
|
+
f"List users failed: {e}", original_error=e
|
|
1123
|
+
) from e
|
|
1124
|
+
|
|
1125
|
+
async def create_proxy_user(
|
|
1126
|
+
self,
|
|
1127
|
+
username: str,
|
|
1128
|
+
password: str,
|
|
1129
|
+
proxy_type: Union[ProxyType, int] = ProxyType.RESIDENTIAL,
|
|
1130
|
+
traffic_limit: int = 0,
|
|
1131
|
+
status: bool = True,
|
|
1132
|
+
) -> Dict[str, Any]:
|
|
1133
|
+
"""Create a new proxy user (sub-account)."""
|
|
1134
|
+
self._require_public_credentials()
|
|
1135
|
+
session = self._get_session()
|
|
1136
|
+
|
|
1137
|
+
headers = build_public_api_headers(
|
|
1138
|
+
self.public_token or "", self.public_key or ""
|
|
1139
|
+
)
|
|
1140
|
+
|
|
1141
|
+
payload = {
|
|
1142
|
+
"proxy_type": str(
|
|
1143
|
+
int(proxy_type) if isinstance(proxy_type, ProxyType) else proxy_type
|
|
1144
|
+
),
|
|
1145
|
+
"username": username,
|
|
1146
|
+
"password": password,
|
|
1147
|
+
"traffic_limit": str(traffic_limit),
|
|
1148
|
+
"status": "true" if status else "false",
|
|
1149
|
+
}
|
|
1150
|
+
|
|
1151
|
+
logger.info(f"Async creating proxy user: {username}")
|
|
1152
|
+
|
|
1153
|
+
try:
|
|
1154
|
+
async with session.post(
|
|
1155
|
+
f"{self._proxy_users_url}/create-user",
|
|
1156
|
+
data=payload,
|
|
1157
|
+
headers=headers,
|
|
1158
|
+
timeout=self._api_timeout,
|
|
1159
|
+
) as response:
|
|
1160
|
+
response.raise_for_status()
|
|
1161
|
+
data = await response.json()
|
|
1162
|
+
|
|
1163
|
+
code = data.get("code")
|
|
1164
|
+
if code != 200:
|
|
1165
|
+
msg = extract_error_message(data)
|
|
1166
|
+
raise_for_code(
|
|
1167
|
+
f"Create proxy user failed: {msg}", code=code, payload=data
|
|
1168
|
+
)
|
|
1169
|
+
|
|
1170
|
+
return data.get("data", {})
|
|
1171
|
+
|
|
1172
|
+
except asyncio.TimeoutError as e:
|
|
1173
|
+
raise ThordataTimeoutError(
|
|
1174
|
+
f"Create user timed out: {e}", original_error=e
|
|
1175
|
+
) from e
|
|
1176
|
+
except aiohttp.ClientError as e:
|
|
1177
|
+
raise ThordataNetworkError(
|
|
1178
|
+
f"Create user failed: {e}", original_error=e
|
|
1179
|
+
) from e
|
|
1180
|
+
|
|
1181
|
+
async def add_whitelist_ip(
|
|
1182
|
+
self,
|
|
1183
|
+
ip: str,
|
|
1184
|
+
proxy_type: Union[ProxyType, int] = ProxyType.RESIDENTIAL,
|
|
1185
|
+
status: bool = True,
|
|
1186
|
+
) -> Dict[str, Any]:
|
|
1187
|
+
"""
|
|
1188
|
+
Add an IP to the whitelist for IP authentication.
|
|
1189
|
+
"""
|
|
1190
|
+
self._require_public_credentials()
|
|
1191
|
+
session = self._get_session()
|
|
1192
|
+
|
|
1193
|
+
headers = build_public_api_headers(
|
|
1194
|
+
self.public_token or "", self.public_key or ""
|
|
1195
|
+
)
|
|
1196
|
+
|
|
1197
|
+
proxy_type_int = (
|
|
1198
|
+
int(proxy_type) if isinstance(proxy_type, ProxyType) else proxy_type
|
|
1199
|
+
)
|
|
1200
|
+
|
|
1201
|
+
payload = {
|
|
1202
|
+
"proxy_type": str(proxy_type_int),
|
|
1203
|
+
"ip": ip,
|
|
1204
|
+
"status": "true" if status else "false",
|
|
1205
|
+
}
|
|
1206
|
+
|
|
1207
|
+
logger.info(f"Async adding whitelist IP: {ip}")
|
|
1208
|
+
|
|
1209
|
+
try:
|
|
1210
|
+
async with session.post(
|
|
1211
|
+
f"{self._whitelist_url}/add-ip",
|
|
1212
|
+
data=payload,
|
|
1213
|
+
headers=headers,
|
|
1214
|
+
timeout=self._api_timeout,
|
|
1215
|
+
) as response:
|
|
1216
|
+
response.raise_for_status()
|
|
1217
|
+
data = await response.json()
|
|
1218
|
+
|
|
1219
|
+
code = data.get("code")
|
|
1220
|
+
if code != 200:
|
|
1221
|
+
msg = extract_error_message(data)
|
|
1222
|
+
raise_for_code(
|
|
1223
|
+
f"Add whitelist IP failed: {msg}", code=code, payload=data
|
|
1224
|
+
)
|
|
1225
|
+
|
|
1226
|
+
return data.get("data", {})
|
|
1227
|
+
|
|
1228
|
+
except asyncio.TimeoutError as e:
|
|
1229
|
+
raise ThordataTimeoutError(
|
|
1230
|
+
f"Add whitelist timed out: {e}", original_error=e
|
|
1231
|
+
) from e
|
|
1232
|
+
except aiohttp.ClientError as e:
|
|
1233
|
+
raise ThordataNetworkError(
|
|
1234
|
+
f"Add whitelist failed: {e}", original_error=e
|
|
1235
|
+
) from e
|
|
1236
|
+
|
|
1237
|
+
async def list_proxy_servers(
|
|
1238
|
+
self,
|
|
1239
|
+
proxy_type: int,
|
|
1240
|
+
) -> List[ProxyServer]:
|
|
1241
|
+
"""
|
|
1242
|
+
List ISP or Datacenter proxy servers.
|
|
1243
|
+
"""
|
|
1244
|
+
|
|
1245
|
+
self._require_public_credentials()
|
|
1246
|
+
session = self._get_session()
|
|
1247
|
+
|
|
1248
|
+
params = {
|
|
1249
|
+
"token": self.public_token,
|
|
1250
|
+
"key": self.public_key,
|
|
1251
|
+
"proxy_type": str(proxy_type),
|
|
1252
|
+
}
|
|
1253
|
+
|
|
1254
|
+
logger.info(f"Async listing proxy servers: type={proxy_type}")
|
|
1255
|
+
|
|
1256
|
+
try:
|
|
1257
|
+
async with session.get(
|
|
1258
|
+
self._proxy_list_url,
|
|
1259
|
+
params=params,
|
|
1260
|
+
timeout=self._api_timeout,
|
|
1261
|
+
) as response:
|
|
1262
|
+
response.raise_for_status()
|
|
1263
|
+
data = await response.json()
|
|
1264
|
+
|
|
1265
|
+
if isinstance(data, dict):
|
|
1266
|
+
code = data.get("code")
|
|
1267
|
+
if code is not None and code != 200:
|
|
1268
|
+
msg = extract_error_message(data)
|
|
1269
|
+
raise_for_code(
|
|
1270
|
+
f"List proxy servers error: {msg}", code=code, payload=data
|
|
1271
|
+
)
|
|
1272
|
+
|
|
1273
|
+
server_list = data.get("data", data.get("list", []))
|
|
1274
|
+
elif isinstance(data, list):
|
|
1275
|
+
server_list = data
|
|
1276
|
+
else:
|
|
1277
|
+
raise ThordataNetworkError(
|
|
1278
|
+
f"Unexpected proxy list response: {type(data).__name__}",
|
|
1279
|
+
original_error=None,
|
|
1280
|
+
)
|
|
1281
|
+
|
|
1282
|
+
return [ProxyServer.from_dict(s) for s in server_list]
|
|
1283
|
+
|
|
1284
|
+
except asyncio.TimeoutError as e:
|
|
1285
|
+
raise ThordataTimeoutError(
|
|
1286
|
+
f"List servers timed out: {e}", original_error=e
|
|
1287
|
+
) from e
|
|
1288
|
+
except aiohttp.ClientError as e:
|
|
1289
|
+
raise ThordataNetworkError(
|
|
1290
|
+
f"List servers failed: {e}", original_error=e
|
|
1291
|
+
) from e
|
|
1292
|
+
|
|
1293
|
+
async def get_isp_regions(self) -> List[Dict[str, Any]]:
|
|
1294
|
+
"""
|
|
1295
|
+
Get available ISP proxy regions.
|
|
1296
|
+
|
|
1297
|
+
Uses public_token/public_key.
|
|
1298
|
+
"""
|
|
1299
|
+
session = self._get_session()
|
|
1300
|
+
headers = self._build_gateway_headers()
|
|
1301
|
+
|
|
1302
|
+
logger.info("Async getting ISP regions")
|
|
1303
|
+
|
|
1304
|
+
try:
|
|
1305
|
+
async with session.post(
|
|
1306
|
+
f"{self._gateway_base_url}/getRegionIsp",
|
|
1307
|
+
headers=headers,
|
|
1308
|
+
data={},
|
|
1309
|
+
timeout=self._api_timeout,
|
|
1310
|
+
) as response:
|
|
1311
|
+
response.raise_for_status()
|
|
1312
|
+
data = await response.json()
|
|
1313
|
+
|
|
1314
|
+
code = data.get("code")
|
|
1315
|
+
if code != 200:
|
|
1316
|
+
msg = extract_error_message(data)
|
|
1317
|
+
raise_for_code(
|
|
1318
|
+
f"Get ISP regions failed: {msg}", code=code, payload=data
|
|
1319
|
+
)
|
|
1320
|
+
|
|
1321
|
+
return data.get("data", [])
|
|
1322
|
+
|
|
1323
|
+
except asyncio.TimeoutError as e:
|
|
1324
|
+
raise ThordataTimeoutError(
|
|
1325
|
+
f"Get ISP regions timed out: {e}", original_error=e
|
|
1326
|
+
) from e
|
|
1327
|
+
except aiohttp.ClientError as e:
|
|
1328
|
+
raise ThordataNetworkError(
|
|
1329
|
+
f"Get ISP regions failed: {e}", original_error=e
|
|
1330
|
+
) from e
|
|
1331
|
+
|
|
1332
|
+
async def list_isp_proxies(self) -> List[Dict[str, Any]]:
|
|
1333
|
+
"""
|
|
1334
|
+
List ISP proxies.
|
|
1335
|
+
|
|
1336
|
+
Uses public_token/public_key.
|
|
1337
|
+
"""
|
|
1338
|
+
session = self._get_session()
|
|
1339
|
+
headers = self._build_gateway_headers()
|
|
1340
|
+
|
|
1341
|
+
logger.info("Async listing ISP proxies")
|
|
1342
|
+
|
|
1343
|
+
try:
|
|
1344
|
+
async with session.post(
|
|
1345
|
+
f"{self._gateway_base_url}/queryListIsp",
|
|
1346
|
+
headers=headers,
|
|
1347
|
+
data={},
|
|
1348
|
+
timeout=self._api_timeout,
|
|
1349
|
+
) as response:
|
|
1350
|
+
response.raise_for_status()
|
|
1351
|
+
data = await response.json()
|
|
1352
|
+
|
|
1353
|
+
code = data.get("code")
|
|
1354
|
+
if code != 200:
|
|
1355
|
+
msg = extract_error_message(data)
|
|
1356
|
+
raise_for_code(
|
|
1357
|
+
f"List ISP proxies failed: {msg}", code=code, payload=data
|
|
1358
|
+
)
|
|
1359
|
+
|
|
1360
|
+
return data.get("data", [])
|
|
1361
|
+
|
|
1362
|
+
except asyncio.TimeoutError as e:
|
|
1363
|
+
raise ThordataTimeoutError(
|
|
1364
|
+
f"List ISP proxies timed out: {e}", original_error=e
|
|
1365
|
+
) from e
|
|
1366
|
+
except aiohttp.ClientError as e:
|
|
1367
|
+
raise ThordataNetworkError(
|
|
1368
|
+
f"List ISP proxies failed: {e}", original_error=e
|
|
1369
|
+
) from e
|
|
1370
|
+
|
|
1371
|
+
async def get_wallet_balance(self) -> Dict[str, Any]:
|
|
1372
|
+
"""
|
|
1373
|
+
Get wallet balance for ISP proxies.
|
|
1374
|
+
|
|
1375
|
+
Uses public_token/public_key.
|
|
1376
|
+
"""
|
|
1377
|
+
session = self._get_session()
|
|
1378
|
+
headers = self._build_gateway_headers()
|
|
1379
|
+
|
|
1380
|
+
logger.info("Async getting wallet balance")
|
|
1381
|
+
|
|
1382
|
+
try:
|
|
1383
|
+
async with session.post(
|
|
1384
|
+
f"{self._gateway_base_url}/getBalance",
|
|
1385
|
+
headers=headers,
|
|
1386
|
+
data={},
|
|
1387
|
+
timeout=self._api_timeout,
|
|
1388
|
+
) as response:
|
|
1389
|
+
response.raise_for_status()
|
|
1390
|
+
data = await response.json()
|
|
1391
|
+
|
|
1392
|
+
code = data.get("code")
|
|
1393
|
+
if code != 200:
|
|
1394
|
+
msg = extract_error_message(data)
|
|
1395
|
+
raise_for_code(
|
|
1396
|
+
f"Get wallet balance failed: {msg}", code=code, payload=data
|
|
1397
|
+
)
|
|
1398
|
+
|
|
1399
|
+
return data.get("data", {})
|
|
1400
|
+
|
|
1401
|
+
except asyncio.TimeoutError as e:
|
|
1402
|
+
raise ThordataTimeoutError(
|
|
1403
|
+
f"Get wallet balance timed out: {e}", original_error=e
|
|
1404
|
+
) from e
|
|
1405
|
+
except aiohttp.ClientError as e:
|
|
1406
|
+
raise ThordataNetworkError(
|
|
1407
|
+
f"Get wallet balance failed: {e}", original_error=e
|
|
1408
|
+
) from e
|
|
1409
|
+
|
|
1410
|
+
async def get_proxy_expiration(
|
|
1411
|
+
self,
|
|
1412
|
+
ips: Union[str, List[str]],
|
|
1413
|
+
proxy_type: int,
|
|
1414
|
+
) -> Dict[str, Any]:
|
|
1415
|
+
"""
|
|
1416
|
+
Get expiration time for specific proxy IPs.
|
|
1417
|
+
"""
|
|
1418
|
+
self._require_public_credentials()
|
|
1419
|
+
session = self._get_session()
|
|
1420
|
+
|
|
1421
|
+
if isinstance(ips, list):
|
|
1422
|
+
ips = ",".join(ips)
|
|
1423
|
+
|
|
1424
|
+
params = {
|
|
1425
|
+
"token": self.public_token,
|
|
1426
|
+
"key": self.public_key,
|
|
1427
|
+
"proxy_type": str(proxy_type),
|
|
1428
|
+
"ips": ips,
|
|
1429
|
+
}
|
|
1430
|
+
|
|
1431
|
+
logger.info(f"Async getting proxy expiration: {ips}")
|
|
1432
|
+
|
|
1433
|
+
try:
|
|
1434
|
+
async with session.get(
|
|
1435
|
+
self._proxy_expiration_url,
|
|
1436
|
+
params=params,
|
|
1437
|
+
timeout=self._api_timeout,
|
|
1438
|
+
) as response:
|
|
1439
|
+
response.raise_for_status()
|
|
1440
|
+
data = await response.json()
|
|
1441
|
+
|
|
1442
|
+
if isinstance(data, dict):
|
|
1443
|
+
code = data.get("code")
|
|
1444
|
+
if code is not None and code != 200:
|
|
1445
|
+
msg = extract_error_message(data)
|
|
1446
|
+
raise_for_code(
|
|
1447
|
+
f"Get expiration error: {msg}", code=code, payload=data
|
|
1448
|
+
)
|
|
1449
|
+
|
|
1450
|
+
return data.get("data", data)
|
|
1451
|
+
|
|
1452
|
+
return data
|
|
1453
|
+
|
|
1454
|
+
except asyncio.TimeoutError as e:
|
|
1455
|
+
raise ThordataTimeoutError(
|
|
1456
|
+
f"Get expiration timed out: {e}", original_error=e
|
|
1457
|
+
) from e
|
|
1458
|
+
except aiohttp.ClientError as e:
|
|
1459
|
+
raise ThordataNetworkError(
|
|
1460
|
+
f"Get expiration failed: {e}", original_error=e
|
|
1461
|
+
) from e
|
|
1462
|
+
|
|
706
1463
|
# =========================================================================
|
|
707
1464
|
# Location API Methods
|
|
708
1465
|
# =========================================================================
|
|
@@ -813,3 +1570,86 @@ class AsyncThordataClient:
|
|
|
813
1570
|
"public_token and public_key are required for this operation. "
|
|
814
1571
|
"Please provide them when initializing AsyncThordataClient."
|
|
815
1572
|
)
|
|
1573
|
+
|
|
1574
|
+
def _get_proxy_endpoint_overrides(
|
|
1575
|
+
self, product: ProxyProduct
|
|
1576
|
+
) -> tuple[Optional[str], Optional[int], str]:
|
|
1577
|
+
prefix = product.value.upper()
|
|
1578
|
+
|
|
1579
|
+
host = os.getenv(f"THORDATA_{prefix}_PROXY_HOST") or os.getenv(
|
|
1580
|
+
"THORDATA_PROXY_HOST"
|
|
1581
|
+
)
|
|
1582
|
+
port_raw = os.getenv(f"THORDATA_{prefix}_PROXY_PORT") or os.getenv(
|
|
1583
|
+
"THORDATA_PROXY_PORT"
|
|
1584
|
+
)
|
|
1585
|
+
protocol = (
|
|
1586
|
+
os.getenv(f"THORDATA_{prefix}_PROXY_PROTOCOL")
|
|
1587
|
+
or os.getenv("THORDATA_PROXY_PROTOCOL")
|
|
1588
|
+
or "http"
|
|
1589
|
+
)
|
|
1590
|
+
|
|
1591
|
+
port: Optional[int] = None
|
|
1592
|
+
if port_raw:
|
|
1593
|
+
try:
|
|
1594
|
+
port = int(port_raw)
|
|
1595
|
+
except ValueError:
|
|
1596
|
+
port = None
|
|
1597
|
+
|
|
1598
|
+
return host or None, port, protocol
|
|
1599
|
+
|
|
1600
|
+
def _get_default_proxy_config_from_env(self) -> Optional[ProxyConfig]:
|
|
1601
|
+
u = os.getenv("THORDATA_RESIDENTIAL_USERNAME")
|
|
1602
|
+
p = os.getenv("THORDATA_RESIDENTIAL_PASSWORD")
|
|
1603
|
+
if u and p:
|
|
1604
|
+
host, port, protocol = self._get_proxy_endpoint_overrides(
|
|
1605
|
+
ProxyProduct.RESIDENTIAL
|
|
1606
|
+
)
|
|
1607
|
+
return ProxyConfig(
|
|
1608
|
+
username=u,
|
|
1609
|
+
password=p,
|
|
1610
|
+
product=ProxyProduct.RESIDENTIAL,
|
|
1611
|
+
host=host,
|
|
1612
|
+
port=port,
|
|
1613
|
+
protocol=protocol,
|
|
1614
|
+
)
|
|
1615
|
+
|
|
1616
|
+
u = os.getenv("THORDATA_DATACENTER_USERNAME")
|
|
1617
|
+
p = os.getenv("THORDATA_DATACENTER_PASSWORD")
|
|
1618
|
+
if u and p:
|
|
1619
|
+
host, port, protocol = self._get_proxy_endpoint_overrides(
|
|
1620
|
+
ProxyProduct.DATACENTER
|
|
1621
|
+
)
|
|
1622
|
+
return ProxyConfig(
|
|
1623
|
+
username=u,
|
|
1624
|
+
password=p,
|
|
1625
|
+
product=ProxyProduct.DATACENTER,
|
|
1626
|
+
host=host,
|
|
1627
|
+
port=port,
|
|
1628
|
+
protocol=protocol,
|
|
1629
|
+
)
|
|
1630
|
+
|
|
1631
|
+
u = os.getenv("THORDATA_MOBILE_USERNAME")
|
|
1632
|
+
p = os.getenv("THORDATA_MOBILE_PASSWORD")
|
|
1633
|
+
if u and p:
|
|
1634
|
+
host, port, protocol = self._get_proxy_endpoint_overrides(
|
|
1635
|
+
ProxyProduct.MOBILE
|
|
1636
|
+
)
|
|
1637
|
+
return ProxyConfig(
|
|
1638
|
+
username=u,
|
|
1639
|
+
password=p,
|
|
1640
|
+
product=ProxyProduct.MOBILE,
|
|
1641
|
+
host=host,
|
|
1642
|
+
port=port,
|
|
1643
|
+
protocol=protocol,
|
|
1644
|
+
)
|
|
1645
|
+
|
|
1646
|
+
return None
|
|
1647
|
+
|
|
1648
|
+
def _build_gateway_headers(self) -> Dict[str, str]:
|
|
1649
|
+
"""
|
|
1650
|
+
Headers for gateway-style endpoints.
|
|
1651
|
+
|
|
1652
|
+
Per our SDK rule: ONLY public_token/public_key exist.
|
|
1653
|
+
"""
|
|
1654
|
+
self._require_public_credentials()
|
|
1655
|
+
return build_public_api_headers(self.public_token or "", self.public_key or "")
|