thordata-sdk 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- thordata/__init__.py +13 -1
- thordata/_utils.py +66 -3
- thordata/async_client.py +787 -8
- thordata/client.py +851 -33
- thordata/enums.py +3 -3
- thordata/exceptions.py +16 -5
- thordata/models.py +294 -0
- thordata/retry.py +4 -1
- thordata_sdk-0.8.0.dist-info/METADATA +212 -0
- thordata_sdk-0.8.0.dist-info/RECORD +14 -0
- thordata/parameters.py +0 -53
- thordata_sdk-0.7.0.dist-info/METADATA +0 -1053
- thordata_sdk-0.7.0.dist-info/RECORD +0 -15
- {thordata_sdk-0.7.0.dist-info → thordata_sdk-0.8.0.dist-info}/WHEEL +0 -0
- {thordata_sdk-0.7.0.dist-info → thordata_sdk-0.8.0.dist-info}/licenses/LICENSE +0 -0
- {thordata_sdk-0.7.0.dist-info → thordata_sdk-0.8.0.dist-info}/top_level.txt +0 -0
thordata/client.py
CHANGED
|
@@ -25,6 +25,7 @@ from __future__ import annotations
|
|
|
25
25
|
|
|
26
26
|
import logging
|
|
27
27
|
import os
|
|
28
|
+
from datetime import date, datetime
|
|
28
29
|
from typing import Any, Dict, List, Optional, Union
|
|
29
30
|
|
|
30
31
|
import requests
|
|
@@ -32,7 +33,9 @@ import requests
|
|
|
32
33
|
from . import __version__ as _sdk_version
|
|
33
34
|
from ._utils import (
|
|
34
35
|
build_auth_headers,
|
|
36
|
+
build_builder_headers,
|
|
35
37
|
build_public_api_headers,
|
|
38
|
+
build_sign_headers,
|
|
36
39
|
build_user_agent,
|
|
37
40
|
decode_base64_image,
|
|
38
41
|
extract_error_message,
|
|
@@ -46,11 +49,17 @@ from .exceptions import (
|
|
|
46
49
|
raise_for_code,
|
|
47
50
|
)
|
|
48
51
|
from .models import (
|
|
52
|
+
CommonSettings,
|
|
49
53
|
ProxyConfig,
|
|
50
54
|
ProxyProduct,
|
|
55
|
+
ProxyServer,
|
|
56
|
+
ProxyUser,
|
|
57
|
+
ProxyUserList,
|
|
51
58
|
ScraperTaskConfig,
|
|
52
59
|
SerpRequest,
|
|
53
60
|
UniversalScrapeRequest,
|
|
61
|
+
UsageStatistics,
|
|
62
|
+
VideoTaskConfig,
|
|
54
63
|
)
|
|
55
64
|
from .retry import RetryConfig, with_retry
|
|
56
65
|
|
|
@@ -87,18 +96,22 @@ class ThordataClient:
|
|
|
87
96
|
# API Endpoints
|
|
88
97
|
BASE_URL = "https://scraperapi.thordata.com"
|
|
89
98
|
UNIVERSAL_URL = "https://universalapi.thordata.com"
|
|
90
|
-
API_URL = "https://
|
|
91
|
-
LOCATIONS_URL = "https://
|
|
99
|
+
API_URL = "https://openapi.thordata.com/api/web-scraper-api"
|
|
100
|
+
LOCATIONS_URL = "https://openapi.thordata.com/api/locations"
|
|
92
101
|
|
|
93
102
|
def __init__(
|
|
94
103
|
self,
|
|
95
104
|
scraper_token: str,
|
|
96
105
|
public_token: Optional[str] = None,
|
|
97
106
|
public_key: Optional[str] = None,
|
|
107
|
+
sign: Optional[str] = None,
|
|
108
|
+
api_key: Optional[str] = None,
|
|
98
109
|
proxy_host: str = "pr.thordata.net",
|
|
99
110
|
proxy_port: int = 9999,
|
|
100
111
|
timeout: int = 30,
|
|
112
|
+
api_timeout: int = 60,
|
|
101
113
|
retry_config: Optional[RetryConfig] = None,
|
|
114
|
+
auth_mode: str = "bearer",
|
|
102
115
|
scraperapi_base_url: Optional[str] = None,
|
|
103
116
|
universalapi_base_url: Optional[str] = None,
|
|
104
117
|
web_scraper_api_base_url: Optional[str] = None,
|
|
@@ -112,14 +125,33 @@ class ThordataClient:
|
|
|
112
125
|
self.public_token = public_token
|
|
113
126
|
self.public_key = public_key
|
|
114
127
|
|
|
128
|
+
# Automatic Fallback Logic: If sign/api_key is not provided, try using public_token/key
|
|
129
|
+
self.sign = sign or os.getenv("THORDATA_SIGN") or self.public_token
|
|
130
|
+
self.api_key = api_key or os.getenv("THORDATA_API_KEY") or self.public_key
|
|
131
|
+
|
|
132
|
+
# Public API authentication
|
|
133
|
+
self.sign = sign or os.getenv("THORDATA_SIGN")
|
|
134
|
+
self.api_key = api_key or os.getenv("THORDATA_API_KEY")
|
|
135
|
+
|
|
115
136
|
# Proxy configuration
|
|
116
137
|
self._proxy_host = proxy_host
|
|
117
138
|
self._proxy_port = proxy_port
|
|
118
139
|
self._default_timeout = timeout
|
|
119
140
|
|
|
141
|
+
# Timeout configuration
|
|
142
|
+
self._default_timeout = timeout
|
|
143
|
+
self._api_timeout = api_timeout
|
|
144
|
+
|
|
120
145
|
# Retry configuration
|
|
121
146
|
self._retry_config = retry_config or RetryConfig()
|
|
122
147
|
|
|
148
|
+
# Authentication mode
|
|
149
|
+
self._auth_mode = auth_mode.lower()
|
|
150
|
+
if self._auth_mode not in ("bearer", "header_token"):
|
|
151
|
+
raise ThordataConfigError(
|
|
152
|
+
f"Invalid auth_mode: {auth_mode}. Must be 'bearer' or 'header_token'."
|
|
153
|
+
)
|
|
154
|
+
|
|
123
155
|
# Build default proxy URL (for basic usage)
|
|
124
156
|
self._default_proxy_url = (
|
|
125
157
|
f"http://td-customer-{self.scraper_token}:@{proxy_host}:{proxy_port}"
|
|
@@ -170,17 +202,43 @@ class ThordataClient:
|
|
|
170
202
|
or self.LOCATIONS_URL
|
|
171
203
|
).rstrip("/")
|
|
172
204
|
|
|
205
|
+
gateway_base = os.getenv(
|
|
206
|
+
"THORDATA_GATEWAY_BASE_URL", "https://api.thordata.com/api/gateway"
|
|
207
|
+
)
|
|
208
|
+
child_base = os.getenv(
|
|
209
|
+
"THORDATA_CHILD_BASE_URL", "https://api.thordata.com/api/child"
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
self._gateway_base_url = gateway_base
|
|
213
|
+
self._child_base_url = child_base
|
|
214
|
+
|
|
173
215
|
self._serp_url = f"{scraperapi_base}/request"
|
|
174
216
|
self._builder_url = f"{scraperapi_base}/builder"
|
|
217
|
+
self._video_builder_url = f"{scraperapi_base}/video_builder"
|
|
175
218
|
self._universal_url = f"{universalapi_base}/request"
|
|
176
219
|
self._status_url = f"{web_scraper_api_base}/tasks-status"
|
|
177
220
|
self._download_url = f"{web_scraper_api_base}/tasks-download"
|
|
178
221
|
self._locations_base_url = locations_base
|
|
222
|
+
self._usage_stats_url = (
|
|
223
|
+
f"{locations_base.replace('/locations', '')}/account/usage-statistics"
|
|
224
|
+
)
|
|
225
|
+
self._proxy_users_url = (
|
|
226
|
+
f"{locations_base.replace('/locations', '')}/proxy-users"
|
|
227
|
+
)
|
|
228
|
+
whitelist_base = os.getenv(
|
|
229
|
+
"THORDATA_WHITELIST_BASE_URL", "https://api.thordata.com/api"
|
|
230
|
+
)
|
|
231
|
+
self._whitelist_url = f"{whitelist_base}/whitelisted-ips"
|
|
232
|
+
proxy_api_base = os.getenv(
|
|
233
|
+
"THORDATA_PROXY_API_BASE_URL", "https://api.thordata.com/api"
|
|
234
|
+
)
|
|
235
|
+
self._proxy_list_url = f"{proxy_api_base}/proxy/proxy-list"
|
|
236
|
+
self._proxy_expiration_url = f"{proxy_api_base}/proxy/expiration-time"
|
|
237
|
+
self._list_url = f"{web_scraper_api_base}/tasks-list"
|
|
179
238
|
|
|
180
239
|
# =========================================================================
|
|
181
|
-
# Proxy Network Methods
|
|
240
|
+
# Proxy Network Methods (Pure proxy network request functions)
|
|
182
241
|
# =========================================================================
|
|
183
|
-
|
|
184
242
|
def get(
|
|
185
243
|
self,
|
|
186
244
|
url: str,
|
|
@@ -257,6 +315,8 @@ class ThordataClient:
|
|
|
257
315
|
|
|
258
316
|
def build_proxy_url(
|
|
259
317
|
self,
|
|
318
|
+
username: str, # Required
|
|
319
|
+
password: str, # Required
|
|
260
320
|
*,
|
|
261
321
|
country: Optional[str] = None,
|
|
262
322
|
state: Optional[str] = None,
|
|
@@ -288,8 +348,8 @@ class ThordataClient:
|
|
|
288
348
|
>>> requests.get("https://example.com", proxies=proxies)
|
|
289
349
|
"""
|
|
290
350
|
config = ProxyConfig(
|
|
291
|
-
username=
|
|
292
|
-
password=
|
|
351
|
+
username=username,
|
|
352
|
+
password=password,
|
|
293
353
|
host=self._proxy_host,
|
|
294
354
|
port=self._proxy_port,
|
|
295
355
|
product=product,
|
|
@@ -302,9 +362,44 @@ class ThordataClient:
|
|
|
302
362
|
return config.build_proxy_url()
|
|
303
363
|
|
|
304
364
|
# =========================================================================
|
|
305
|
-
#
|
|
365
|
+
# Internal API Request Retry Helper (For all API calls)
|
|
306
366
|
# =========================================================================
|
|
367
|
+
def _api_request_with_retry(
|
|
368
|
+
self,
|
|
369
|
+
method: str,
|
|
370
|
+
url: str,
|
|
371
|
+
*,
|
|
372
|
+
data: Optional[Dict[str, Any]] = None,
|
|
373
|
+
headers: Optional[Dict[str, str]] = None,
|
|
374
|
+
params: Optional[Dict[str, Any]] = None,
|
|
375
|
+
) -> requests.Response:
|
|
376
|
+
"""Make an API request with automatic retry on transient failures."""
|
|
307
377
|
|
|
378
|
+
@with_retry(self._retry_config)
|
|
379
|
+
def _do_request() -> requests.Response:
|
|
380
|
+
return self._api_session.request(
|
|
381
|
+
method,
|
|
382
|
+
url,
|
|
383
|
+
data=data,
|
|
384
|
+
headers=headers,
|
|
385
|
+
params=params,
|
|
386
|
+
timeout=self._api_timeout,
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
try:
|
|
390
|
+
return _do_request()
|
|
391
|
+
except requests.Timeout as e:
|
|
392
|
+
raise ThordataTimeoutError(
|
|
393
|
+
f"API request timed out: {e}", original_error=e
|
|
394
|
+
) from e
|
|
395
|
+
except requests.RequestException as e:
|
|
396
|
+
raise ThordataNetworkError(
|
|
397
|
+
f"API request failed: {e}", original_error=e
|
|
398
|
+
) from e
|
|
399
|
+
|
|
400
|
+
# =========================================================================
|
|
401
|
+
# SERP API Methods (Search Engine Results Page functions)
|
|
402
|
+
# =========================================================================
|
|
308
403
|
def serp_search(
|
|
309
404
|
self,
|
|
310
405
|
query: str,
|
|
@@ -375,16 +470,18 @@ class ThordataClient:
|
|
|
375
470
|
)
|
|
376
471
|
|
|
377
472
|
payload = request.to_payload()
|
|
378
|
-
headers = build_auth_headers(self.scraper_token)
|
|
473
|
+
headers = build_auth_headers(self.scraper_token, mode=self._auth_mode)
|
|
379
474
|
|
|
380
|
-
logger.info(
|
|
475
|
+
logger.info(
|
|
476
|
+
f"SERP Search: {engine_str} - {query[:50]}{'...' if len(query) > 50 else ''}"
|
|
477
|
+
)
|
|
381
478
|
|
|
382
479
|
try:
|
|
383
|
-
response = self.
|
|
480
|
+
response = self._api_request_with_retry(
|
|
481
|
+
"POST",
|
|
384
482
|
self._serp_url,
|
|
385
483
|
data=payload,
|
|
386
484
|
headers=headers,
|
|
387
|
-
timeout=60,
|
|
388
485
|
)
|
|
389
486
|
response.raise_for_status()
|
|
390
487
|
|
|
@@ -445,16 +542,18 @@ class ThordataClient:
|
|
|
445
542
|
>>> results = client.serp_search_advanced(request)
|
|
446
543
|
"""
|
|
447
544
|
payload = request.to_payload()
|
|
448
|
-
headers = build_auth_headers(self.scraper_token)
|
|
545
|
+
headers = build_auth_headers(self.scraper_token, mode=self._auth_mode)
|
|
449
546
|
|
|
450
|
-
logger.info(
|
|
547
|
+
logger.info(
|
|
548
|
+
f"SERP Advanced Search: {request.engine} - {request.query[:50]}{'...' if len(request.query) > 50 else ''}"
|
|
549
|
+
)
|
|
451
550
|
|
|
452
551
|
try:
|
|
453
|
-
response = self.
|
|
552
|
+
response = self._api_request_with_retry(
|
|
553
|
+
"POST",
|
|
454
554
|
self._serp_url,
|
|
455
555
|
data=payload,
|
|
456
556
|
headers=headers,
|
|
457
|
-
timeout=60,
|
|
458
557
|
)
|
|
459
558
|
response.raise_for_status()
|
|
460
559
|
|
|
@@ -487,9 +586,8 @@ class ThordataClient:
|
|
|
487
586
|
) from e
|
|
488
587
|
|
|
489
588
|
# =========================================================================
|
|
490
|
-
# Universal Scraping API (Web Unlocker)
|
|
589
|
+
# Universal Scraping API Methods (Web Unlocker functions)
|
|
491
590
|
# =========================================================================
|
|
492
|
-
|
|
493
591
|
def universal_scrape(
|
|
494
592
|
self,
|
|
495
593
|
url: str,
|
|
@@ -559,18 +657,18 @@ class ThordataClient:
|
|
|
559
657
|
HTML string or PNG bytes.
|
|
560
658
|
"""
|
|
561
659
|
payload = request.to_payload()
|
|
562
|
-
headers = build_auth_headers(self.scraper_token)
|
|
660
|
+
headers = build_auth_headers(self.scraper_token, mode=self._auth_mode)
|
|
563
661
|
|
|
564
662
|
logger.info(
|
|
565
663
|
f"Universal Scrape: {request.url} (format: {request.output_format})"
|
|
566
664
|
)
|
|
567
665
|
|
|
568
666
|
try:
|
|
569
|
-
response = self.
|
|
667
|
+
response = self._api_request_with_retry(
|
|
668
|
+
"POST",
|
|
570
669
|
self._universal_url,
|
|
571
670
|
data=payload,
|
|
572
671
|
headers=headers,
|
|
573
|
-
timeout=60,
|
|
574
672
|
)
|
|
575
673
|
response.raise_for_status()
|
|
576
674
|
|
|
@@ -619,9 +717,8 @@ class ThordataClient:
|
|
|
619
717
|
return str(resp_json)
|
|
620
718
|
|
|
621
719
|
# =========================================================================
|
|
622
|
-
# Web Scraper API (
|
|
720
|
+
# Web Scraper API Methods (Only async task management functions)
|
|
623
721
|
# =========================================================================
|
|
624
|
-
|
|
625
722
|
def create_scraper_task(
|
|
626
723
|
self,
|
|
627
724
|
file_name: str,
|
|
@@ -673,17 +770,25 @@ class ThordataClient:
|
|
|
673
770
|
Returns:
|
|
674
771
|
The created task_id.
|
|
675
772
|
"""
|
|
773
|
+
self._require_public_credentials()
|
|
774
|
+
|
|
676
775
|
payload = config.to_payload()
|
|
677
|
-
|
|
776
|
+
|
|
777
|
+
# Builder needs 3 headers: token, key, Authorization Bearer
|
|
778
|
+
headers = build_builder_headers(
|
|
779
|
+
self.scraper_token,
|
|
780
|
+
self.public_token or "",
|
|
781
|
+
self.public_key or "",
|
|
782
|
+
)
|
|
678
783
|
|
|
679
784
|
logger.info(f"Creating Scraper Task: {config.spider_name}")
|
|
680
785
|
|
|
681
786
|
try:
|
|
682
|
-
response = self.
|
|
787
|
+
response = self._api_request_with_retry(
|
|
788
|
+
"POST",
|
|
683
789
|
self._builder_url,
|
|
684
790
|
data=payload,
|
|
685
791
|
headers=headers,
|
|
686
|
-
timeout=30,
|
|
687
792
|
)
|
|
688
793
|
response.raise_for_status()
|
|
689
794
|
|
|
@@ -701,6 +806,94 @@ class ThordataClient:
|
|
|
701
806
|
f"Task creation failed: {e}", original_error=e
|
|
702
807
|
) from e
|
|
703
808
|
|
|
809
|
+
def create_video_task(
|
|
810
|
+
self,
|
|
811
|
+
file_name: str,
|
|
812
|
+
spider_id: str,
|
|
813
|
+
spider_name: str,
|
|
814
|
+
parameters: Dict[str, Any],
|
|
815
|
+
common_settings: "CommonSettings",
|
|
816
|
+
) -> str:
|
|
817
|
+
"""
|
|
818
|
+
Create a YouTube video/audio download task.
|
|
819
|
+
|
|
820
|
+
Uses the /video_builder endpoint.
|
|
821
|
+
|
|
822
|
+
Args:
|
|
823
|
+
file_name: Output file name. Supports {{TasksID}}, {{VideoID}}.
|
|
824
|
+
spider_id: Spider identifier (e.g., "youtube_video_by-url").
|
|
825
|
+
spider_name: Spider name (typically "youtube.com").
|
|
826
|
+
parameters: Spider parameters (e.g., {"url": "..."}).
|
|
827
|
+
common_settings: Video/audio settings.
|
|
828
|
+
|
|
829
|
+
Returns:
|
|
830
|
+
The created task_id.
|
|
831
|
+
|
|
832
|
+
Example:
|
|
833
|
+
>>> from thordata import CommonSettings
|
|
834
|
+
>>> task_id = client.create_video_task(
|
|
835
|
+
... file_name="{{VideoID}}",
|
|
836
|
+
... spider_id="youtube_video_by-url",
|
|
837
|
+
... spider_name="youtube.com",
|
|
838
|
+
... parameters={"url": "https://youtube.com/watch?v=xxx"},
|
|
839
|
+
... common_settings=CommonSettings(
|
|
840
|
+
... resolution="1080p",
|
|
841
|
+
... is_subtitles="true"
|
|
842
|
+
... )
|
|
843
|
+
... )
|
|
844
|
+
"""
|
|
845
|
+
|
|
846
|
+
config = VideoTaskConfig(
|
|
847
|
+
file_name=file_name,
|
|
848
|
+
spider_id=spider_id,
|
|
849
|
+
spider_name=spider_name,
|
|
850
|
+
parameters=parameters,
|
|
851
|
+
common_settings=common_settings,
|
|
852
|
+
)
|
|
853
|
+
|
|
854
|
+
return self.create_video_task_advanced(config)
|
|
855
|
+
|
|
856
|
+
def create_video_task_advanced(self, config: VideoTaskConfig) -> str:
|
|
857
|
+
"""
|
|
858
|
+
Create a video task using VideoTaskConfig object.
|
|
859
|
+
|
|
860
|
+
Args:
|
|
861
|
+
config: Video task configuration.
|
|
862
|
+
|
|
863
|
+
Returns:
|
|
864
|
+
The created task_id.
|
|
865
|
+
"""
|
|
866
|
+
|
|
867
|
+
self._require_public_credentials()
|
|
868
|
+
|
|
869
|
+
payload = config.to_payload()
|
|
870
|
+
headers = build_builder_headers(
|
|
871
|
+
self.scraper_token,
|
|
872
|
+
self.public_token or "",
|
|
873
|
+
self.public_key or "",
|
|
874
|
+
)
|
|
875
|
+
|
|
876
|
+
logger.info(f"Creating Video Task: {config.spider_name} - {config.spider_id}")
|
|
877
|
+
|
|
878
|
+
response = self._api_request_with_retry(
|
|
879
|
+
"POST",
|
|
880
|
+
self._video_builder_url,
|
|
881
|
+
data=payload,
|
|
882
|
+
headers=headers,
|
|
883
|
+
)
|
|
884
|
+
response.raise_for_status()
|
|
885
|
+
|
|
886
|
+
data = response.json()
|
|
887
|
+
code = data.get("code")
|
|
888
|
+
|
|
889
|
+
if code != 200:
|
|
890
|
+
msg = extract_error_message(data)
|
|
891
|
+
raise_for_code(
|
|
892
|
+
f"Video task creation failed: {msg}", code=code, payload=data
|
|
893
|
+
)
|
|
894
|
+
|
|
895
|
+
return data["data"]["task_id"]
|
|
896
|
+
|
|
704
897
|
def get_task_status(self, task_id: str) -> str:
|
|
705
898
|
"""
|
|
706
899
|
Check the status of an asynchronous scraping task.
|
|
@@ -721,11 +914,11 @@ class ThordataClient:
|
|
|
721
914
|
payload = {"tasks_ids": task_id}
|
|
722
915
|
|
|
723
916
|
try:
|
|
724
|
-
response = self.
|
|
917
|
+
response = self._api_request_with_retry(
|
|
918
|
+
"POST",
|
|
725
919
|
self._status_url,
|
|
726
920
|
data=payload,
|
|
727
921
|
headers=headers,
|
|
728
|
-
timeout=30,
|
|
729
922
|
)
|
|
730
923
|
response.raise_for_status()
|
|
731
924
|
data = response.json()
|
|
@@ -788,11 +981,11 @@ class ThordataClient:
|
|
|
788
981
|
logger.info(f"Getting result URL for Task: {task_id}")
|
|
789
982
|
|
|
790
983
|
try:
|
|
791
|
-
response = self.
|
|
984
|
+
response = self._api_request_with_retry(
|
|
985
|
+
"POST",
|
|
792
986
|
self._download_url,
|
|
793
987
|
data=payload,
|
|
794
988
|
headers=headers,
|
|
795
|
-
timeout=30,
|
|
796
989
|
)
|
|
797
990
|
response.raise_for_status()
|
|
798
991
|
|
|
@@ -812,6 +1005,57 @@ class ThordataClient:
|
|
|
812
1005
|
f"Get result failed: {e}", original_error=e
|
|
813
1006
|
) from e
|
|
814
1007
|
|
|
1008
|
+
def list_tasks(
|
|
1009
|
+
self,
|
|
1010
|
+
page: int = 1,
|
|
1011
|
+
size: int = 20,
|
|
1012
|
+
) -> Dict[str, Any]:
|
|
1013
|
+
"""
|
|
1014
|
+
List all Web Scraper tasks.
|
|
1015
|
+
|
|
1016
|
+
Args:
|
|
1017
|
+
page: Page number (starts from 1).
|
|
1018
|
+
size: Number of tasks per page.
|
|
1019
|
+
|
|
1020
|
+
Returns:
|
|
1021
|
+
Dict containing 'count' and 'list' of tasks.
|
|
1022
|
+
|
|
1023
|
+
Example:
|
|
1024
|
+
>>> result = client.list_tasks(page=1, size=10)
|
|
1025
|
+
>>> print(f"Total tasks: {result['count']}")
|
|
1026
|
+
>>> for task in result['list']:
|
|
1027
|
+
... print(f"Task {task['task_id']}: {task['status']}")
|
|
1028
|
+
"""
|
|
1029
|
+
self._require_public_credentials()
|
|
1030
|
+
|
|
1031
|
+
headers = build_public_api_headers(
|
|
1032
|
+
self.public_token or "", self.public_key or ""
|
|
1033
|
+
)
|
|
1034
|
+
payload: Dict[str, Any] = {}
|
|
1035
|
+
if page:
|
|
1036
|
+
payload["page"] = str(page)
|
|
1037
|
+
if size:
|
|
1038
|
+
payload["size"] = str(size)
|
|
1039
|
+
|
|
1040
|
+
logger.info(f"Listing tasks: page={page}, size={size}")
|
|
1041
|
+
|
|
1042
|
+
response = self._api_request_with_retry(
|
|
1043
|
+
"POST",
|
|
1044
|
+
self._list_url,
|
|
1045
|
+
data=payload,
|
|
1046
|
+
headers=headers,
|
|
1047
|
+
)
|
|
1048
|
+
response.raise_for_status()
|
|
1049
|
+
|
|
1050
|
+
data = response.json()
|
|
1051
|
+
code = data.get("code")
|
|
1052
|
+
|
|
1053
|
+
if code != 200:
|
|
1054
|
+
msg = extract_error_message(data)
|
|
1055
|
+
raise_for_code(f"List tasks failed: {msg}", code=code, payload=data)
|
|
1056
|
+
|
|
1057
|
+
return data.get("data", {"count": 0, "list": []})
|
|
1058
|
+
|
|
815
1059
|
def wait_for_task(
|
|
816
1060
|
self,
|
|
817
1061
|
task_id: str,
|
|
@@ -865,9 +1109,580 @@ class ThordataClient:
|
|
|
865
1109
|
raise TimeoutError(f"Task {task_id} did not complete within {max_wait} seconds")
|
|
866
1110
|
|
|
867
1111
|
# =========================================================================
|
|
868
|
-
#
|
|
1112
|
+
# Proxy Account Management Methods (Proxy balance, user, whitelist functions)
|
|
869
1113
|
# =========================================================================
|
|
1114
|
+
def get_usage_statistics(
|
|
1115
|
+
self,
|
|
1116
|
+
from_date: Union[str, date],
|
|
1117
|
+
to_date: Union[str, date],
|
|
1118
|
+
) -> UsageStatistics:
|
|
1119
|
+
"""
|
|
1120
|
+
Get account usage statistics for a date range.
|
|
870
1121
|
|
|
1122
|
+
Args:
|
|
1123
|
+
from_date: Start date (YYYY-MM-DD string or date object).
|
|
1124
|
+
to_date: End date (YYYY-MM-DD string or date object).
|
|
1125
|
+
|
|
1126
|
+
Returns:
|
|
1127
|
+
UsageStatistics object with traffic data.
|
|
1128
|
+
|
|
1129
|
+
Raises:
|
|
1130
|
+
ValueError: If date range exceeds 180 days.
|
|
1131
|
+
|
|
1132
|
+
Example:
|
|
1133
|
+
>>> from datetime import date, timedelta
|
|
1134
|
+
>>> today = date.today()
|
|
1135
|
+
>>> week_ago = today - timedelta(days=7)
|
|
1136
|
+
>>> stats = client.get_usage_statistics(week_ago, today)
|
|
1137
|
+
>>> print(f"Used: {stats.range_usage_gb():.2f} GB")
|
|
1138
|
+
>>> print(f"Balance: {stats.balance_gb():.2f} GB")
|
|
1139
|
+
"""
|
|
1140
|
+
|
|
1141
|
+
self._require_public_credentials()
|
|
1142
|
+
|
|
1143
|
+
# Convert dates to strings
|
|
1144
|
+
if isinstance(from_date, date):
|
|
1145
|
+
from_date = from_date.strftime("%Y-%m-%d")
|
|
1146
|
+
if isinstance(to_date, date):
|
|
1147
|
+
to_date = to_date.strftime("%Y-%m-%d")
|
|
1148
|
+
|
|
1149
|
+
params = {
|
|
1150
|
+
"token": self.public_token,
|
|
1151
|
+
"key": self.public_key,
|
|
1152
|
+
"from_date": from_date,
|
|
1153
|
+
"to_date": to_date,
|
|
1154
|
+
}
|
|
1155
|
+
|
|
1156
|
+
logger.info(f"Getting usage statistics: {from_date} to {to_date}")
|
|
1157
|
+
|
|
1158
|
+
response = self._api_request_with_retry(
|
|
1159
|
+
"GET",
|
|
1160
|
+
self._usage_stats_url,
|
|
1161
|
+
params=params,
|
|
1162
|
+
)
|
|
1163
|
+
response.raise_for_status()
|
|
1164
|
+
|
|
1165
|
+
data = response.json()
|
|
1166
|
+
|
|
1167
|
+
if isinstance(data, dict):
|
|
1168
|
+
code = data.get("code")
|
|
1169
|
+
if code is not None and code != 200:
|
|
1170
|
+
msg = extract_error_message(data)
|
|
1171
|
+
raise_for_code(
|
|
1172
|
+
f"Usage statistics error: {msg}",
|
|
1173
|
+
code=code,
|
|
1174
|
+
payload=data,
|
|
1175
|
+
)
|
|
1176
|
+
|
|
1177
|
+
# Extract data field
|
|
1178
|
+
usage_data = data.get("data", data)
|
|
1179
|
+
return UsageStatistics.from_dict(usage_data)
|
|
1180
|
+
|
|
1181
|
+
raise ThordataNetworkError(
|
|
1182
|
+
f"Unexpected usage statistics response: {type(data).__name__}",
|
|
1183
|
+
original_error=None,
|
|
1184
|
+
)
|
|
1185
|
+
|
|
1186
|
+
def get_residential_balance(self) -> Dict[str, Any]:
|
|
1187
|
+
"""
|
|
1188
|
+
Get residential proxy balance (Public API NEW).
|
|
1189
|
+
|
|
1190
|
+
Requires sign and apiKey credentials.
|
|
1191
|
+
|
|
1192
|
+
Returns:
|
|
1193
|
+
Dict with 'balance' (bytes) and 'expire_time' (timestamp).
|
|
1194
|
+
|
|
1195
|
+
Example:
|
|
1196
|
+
>>> result = client.get_residential_balance()
|
|
1197
|
+
>>> balance_gb = result['balance'] / (1024**3)
|
|
1198
|
+
>>> print(f"Balance: {balance_gb:.2f} GB")
|
|
1199
|
+
"""
|
|
1200
|
+
if not self.sign or not self.api_key:
|
|
1201
|
+
raise ThordataConfigError(
|
|
1202
|
+
"sign and api_key are required for Public API NEW. "
|
|
1203
|
+
"Set THORDATA_SIGN and THORDATA_API_KEY environment variables."
|
|
1204
|
+
)
|
|
1205
|
+
|
|
1206
|
+
headers = build_sign_headers(self.sign, self.api_key)
|
|
1207
|
+
|
|
1208
|
+
logger.info("Getting residential proxy balance (API NEW)")
|
|
1209
|
+
|
|
1210
|
+
response = self._api_request_with_retry(
|
|
1211
|
+
"POST",
|
|
1212
|
+
f"{self._gateway_base_url}/getFlowBalance",
|
|
1213
|
+
headers=headers,
|
|
1214
|
+
data={},
|
|
1215
|
+
)
|
|
1216
|
+
response.raise_for_status()
|
|
1217
|
+
|
|
1218
|
+
data = response.json()
|
|
1219
|
+
code = data.get("code")
|
|
1220
|
+
|
|
1221
|
+
if code != 200:
|
|
1222
|
+
msg = extract_error_message(data)
|
|
1223
|
+
raise_for_code(f"Get balance failed: {msg}", code=code, payload=data)
|
|
1224
|
+
|
|
1225
|
+
return data.get("data", {})
|
|
1226
|
+
|
|
1227
|
+
def get_residential_usage(
|
|
1228
|
+
self,
|
|
1229
|
+
start_time: Union[str, int],
|
|
1230
|
+
end_time: Union[str, int],
|
|
1231
|
+
) -> Dict[str, Any]:
|
|
1232
|
+
"""
|
|
1233
|
+
Get residential proxy usage records (Public API NEW).
|
|
1234
|
+
|
|
1235
|
+
Args:
|
|
1236
|
+
start_time: Start timestamp (Unix timestamp or YYYY-MM-DD HH:MM:SS).
|
|
1237
|
+
end_time: End timestamp (Unix timestamp or YYYY-MM-DD HH:MM:SS).
|
|
1238
|
+
|
|
1239
|
+
Returns:
|
|
1240
|
+
Dict with usage data including 'all_flow', 'all_used_flow', 'data' list.
|
|
1241
|
+
|
|
1242
|
+
Example:
|
|
1243
|
+
>>> import time
|
|
1244
|
+
>>> end = int(time.time())
|
|
1245
|
+
>>> start = end - 7*24*3600 # Last 7 days
|
|
1246
|
+
>>> usage = client.get_residential_usage(start, end)
|
|
1247
|
+
>>> print(f"Total used: {usage['all_used_flow'] / (1024**3):.2f} GB")
|
|
1248
|
+
"""
|
|
1249
|
+
if not self.sign or not self.api_key:
|
|
1250
|
+
raise ThordataConfigError(
|
|
1251
|
+
"sign and api_key are required for Public API NEW."
|
|
1252
|
+
)
|
|
1253
|
+
|
|
1254
|
+
headers = build_sign_headers(self.sign, self.api_key)
|
|
1255
|
+
payload = {
|
|
1256
|
+
"start_time": str(start_time),
|
|
1257
|
+
"end_time": str(end_time),
|
|
1258
|
+
}
|
|
1259
|
+
|
|
1260
|
+
logger.info(f"Getting residential usage: {start_time} to {end_time}")
|
|
1261
|
+
|
|
1262
|
+
response = self._api_request_with_retry(
|
|
1263
|
+
"POST",
|
|
1264
|
+
f"{self._gateway_base_url}/usageRecord",
|
|
1265
|
+
headers=headers,
|
|
1266
|
+
data=payload,
|
|
1267
|
+
)
|
|
1268
|
+
response.raise_for_status()
|
|
1269
|
+
|
|
1270
|
+
data = response.json()
|
|
1271
|
+
code = data.get("code")
|
|
1272
|
+
|
|
1273
|
+
if code != 200:
|
|
1274
|
+
msg = extract_error_message(data)
|
|
1275
|
+
raise_for_code(f"Get usage failed: {msg}", code=code, payload=data)
|
|
1276
|
+
|
|
1277
|
+
return data.get("data", {})
|
|
1278
|
+
|
|
1279
|
+
def list_proxy_users(
|
|
1280
|
+
self, proxy_type: Union[ProxyType, int] = ProxyType.RESIDENTIAL
|
|
1281
|
+
) -> ProxyUserList:
|
|
1282
|
+
"""
|
|
1283
|
+
List all proxy users (sub-accounts).
|
|
1284
|
+
|
|
1285
|
+
Args:
|
|
1286
|
+
proxy_type: Proxy type (1=Residential, 2=Unlimited).
|
|
1287
|
+
|
|
1288
|
+
Returns:
|
|
1289
|
+
ProxyUserList with user details.
|
|
1290
|
+
|
|
1291
|
+
Example:
|
|
1292
|
+
>>> users = client.list_proxy_users(proxy_type=ProxyType.RESIDENTIAL)
|
|
1293
|
+
>>> print(f"Total users: {users.user_count}")
|
|
1294
|
+
>>> for user in users.users:
|
|
1295
|
+
... print(f"{user.username}: {user.usage_gb():.2f} GB used")
|
|
1296
|
+
"""
|
|
1297
|
+
|
|
1298
|
+
self._require_public_credentials()
|
|
1299
|
+
|
|
1300
|
+
params = {
|
|
1301
|
+
"token": self.public_token,
|
|
1302
|
+
"key": self.public_key,
|
|
1303
|
+
"proxy_type": str(
|
|
1304
|
+
int(proxy_type) if isinstance(proxy_type, ProxyType) else proxy_type
|
|
1305
|
+
),
|
|
1306
|
+
}
|
|
1307
|
+
|
|
1308
|
+
logger.info(f"Listing proxy users: type={params['proxy_type']}")
|
|
1309
|
+
|
|
1310
|
+
response = self._api_request_with_retry(
|
|
1311
|
+
"GET",
|
|
1312
|
+
f"{self._proxy_users_url}/user-list",
|
|
1313
|
+
params=params,
|
|
1314
|
+
)
|
|
1315
|
+
response.raise_for_status()
|
|
1316
|
+
|
|
1317
|
+
data = response.json()
|
|
1318
|
+
|
|
1319
|
+
if isinstance(data, dict):
|
|
1320
|
+
code = data.get("code")
|
|
1321
|
+
if code is not None and code != 200:
|
|
1322
|
+
msg = extract_error_message(data)
|
|
1323
|
+
raise_for_code(
|
|
1324
|
+
f"List proxy users error: {msg}", code=code, payload=data
|
|
1325
|
+
)
|
|
1326
|
+
|
|
1327
|
+
user_data = data.get("data", data)
|
|
1328
|
+
return ProxyUserList.from_dict(user_data)
|
|
1329
|
+
|
|
1330
|
+
raise ThordataNetworkError(
|
|
1331
|
+
f"Unexpected proxy users response: {type(data).__name__}",
|
|
1332
|
+
original_error=None,
|
|
1333
|
+
)
|
|
1334
|
+
|
|
1335
|
+
def create_proxy_user(
|
|
1336
|
+
self,
|
|
1337
|
+
username: str,
|
|
1338
|
+
password: str,
|
|
1339
|
+
proxy_type: Union[ProxyType, int] = ProxyType.RESIDENTIAL,
|
|
1340
|
+
traffic_limit: int = 0,
|
|
1341
|
+
status: bool = True,
|
|
1342
|
+
) -> Dict[str, Any]:
|
|
1343
|
+
"""
|
|
1344
|
+
Create a new proxy user (sub-account).
|
|
1345
|
+
|
|
1346
|
+
Args:
|
|
1347
|
+
username: Username for the new user.
|
|
1348
|
+
password: Password for the new user.
|
|
1349
|
+
proxy_type: Proxy type (1=Residential, 2=Unlimited).
|
|
1350
|
+
traffic_limit: Traffic limit in MB (0 = unlimited, min 100).
|
|
1351
|
+
status: Enable/disable user (True/False).
|
|
1352
|
+
|
|
1353
|
+
Returns:
|
|
1354
|
+
API response data.
|
|
1355
|
+
|
|
1356
|
+
Example:
|
|
1357
|
+
>>> result = client.create_proxy_user(
|
|
1358
|
+
... username="subuser1",
|
|
1359
|
+
... password="securepass",
|
|
1360
|
+
... traffic_limit=5120, # 5GB
|
|
1361
|
+
... status=True
|
|
1362
|
+
... )
|
|
1363
|
+
"""
|
|
1364
|
+
self._require_public_credentials()
|
|
1365
|
+
|
|
1366
|
+
headers = build_public_api_headers(
|
|
1367
|
+
self.public_token or "", self.public_key or ""
|
|
1368
|
+
)
|
|
1369
|
+
|
|
1370
|
+
payload = {
|
|
1371
|
+
"proxy_type": str(
|
|
1372
|
+
int(proxy_type) if isinstance(proxy_type, ProxyType) else proxy_type
|
|
1373
|
+
),
|
|
1374
|
+
"username": username,
|
|
1375
|
+
"password": password,
|
|
1376
|
+
"traffic_limit": str(traffic_limit),
|
|
1377
|
+
"status": "true" if status else "false",
|
|
1378
|
+
}
|
|
1379
|
+
|
|
1380
|
+
logger.info(f"Creating proxy user: {username}")
|
|
1381
|
+
|
|
1382
|
+
response = self._api_request_with_retry(
|
|
1383
|
+
"POST",
|
|
1384
|
+
f"{self._proxy_users_url}/create-user",
|
|
1385
|
+
data=payload,
|
|
1386
|
+
headers=headers,
|
|
1387
|
+
)
|
|
1388
|
+
response.raise_for_status()
|
|
1389
|
+
|
|
1390
|
+
data = response.json()
|
|
1391
|
+
code = data.get("code")
|
|
1392
|
+
|
|
1393
|
+
if code != 200:
|
|
1394
|
+
msg = extract_error_message(data)
|
|
1395
|
+
raise_for_code(f"Create proxy user failed: {msg}", code=code, payload=data)
|
|
1396
|
+
|
|
1397
|
+
return data.get("data", {})
|
|
1398
|
+
|
|
1399
|
+
def add_whitelist_ip(
|
|
1400
|
+
self,
|
|
1401
|
+
ip: str,
|
|
1402
|
+
proxy_type: Union[ProxyType, int] = ProxyType.RESIDENTIAL,
|
|
1403
|
+
status: bool = True,
|
|
1404
|
+
) -> Dict[str, Any]:
|
|
1405
|
+
"""
|
|
1406
|
+
Add an IP to the whitelist for IP authentication.
|
|
1407
|
+
|
|
1408
|
+
Args:
|
|
1409
|
+
ip: IP address to whitelist.
|
|
1410
|
+
proxy_type: Proxy type (1=Residential, 2=Unlimited, 9=Mobile).
|
|
1411
|
+
status: Enable/disable the IP (True/False).
|
|
1412
|
+
|
|
1413
|
+
Returns:
|
|
1414
|
+
API response data.
|
|
1415
|
+
|
|
1416
|
+
Example:
|
|
1417
|
+
>>> result = client.add_whitelist_ip(
|
|
1418
|
+
... ip="123.45.67.89",
|
|
1419
|
+
... proxy_type=ProxyType.RESIDENTIAL,
|
|
1420
|
+
... status=True
|
|
1421
|
+
... )
|
|
1422
|
+
"""
|
|
1423
|
+
self._require_public_credentials()
|
|
1424
|
+
|
|
1425
|
+
headers = build_public_api_headers(
|
|
1426
|
+
self.public_token or "", self.public_key or ""
|
|
1427
|
+
)
|
|
1428
|
+
|
|
1429
|
+
# Convert ProxyType to int
|
|
1430
|
+
proxy_type_int = (
|
|
1431
|
+
int(proxy_type) if isinstance(proxy_type, ProxyType) else proxy_type
|
|
1432
|
+
)
|
|
1433
|
+
|
|
1434
|
+
payload = {
|
|
1435
|
+
"proxy_type": str(proxy_type_int),
|
|
1436
|
+
"ip": ip,
|
|
1437
|
+
"status": "true" if status else "false",
|
|
1438
|
+
}
|
|
1439
|
+
|
|
1440
|
+
logger.info(f"Adding whitelist IP: {ip}")
|
|
1441
|
+
|
|
1442
|
+
response = self._api_request_with_retry(
|
|
1443
|
+
"POST",
|
|
1444
|
+
f"{self._whitelist_url}/add-ip",
|
|
1445
|
+
data=payload,
|
|
1446
|
+
headers=headers,
|
|
1447
|
+
)
|
|
1448
|
+
response.raise_for_status()
|
|
1449
|
+
|
|
1450
|
+
data = response.json()
|
|
1451
|
+
code = data.get("code")
|
|
1452
|
+
|
|
1453
|
+
if code != 200:
|
|
1454
|
+
msg = extract_error_message(data)
|
|
1455
|
+
raise_for_code(f"Add whitelist IP failed: {msg}", code=code, payload=data)
|
|
1456
|
+
|
|
1457
|
+
return data.get("data", {})
|
|
1458
|
+
|
|
1459
|
+
def list_proxy_servers(
|
|
1460
|
+
self,
|
|
1461
|
+
proxy_type: int,
|
|
1462
|
+
) -> List[ProxyServer]:
|
|
1463
|
+
"""
|
|
1464
|
+
List ISP or Datacenter proxy servers.
|
|
1465
|
+
|
|
1466
|
+
Args:
|
|
1467
|
+
proxy_type: Proxy type (1=ISP, 2=Datacenter).
|
|
1468
|
+
|
|
1469
|
+
Returns:
|
|
1470
|
+
List of ProxyServer objects.
|
|
1471
|
+
|
|
1472
|
+
Example:
|
|
1473
|
+
>>> servers = client.list_proxy_servers(proxy_type=1) # ISP proxies
|
|
1474
|
+
>>> for server in servers:
|
|
1475
|
+
... print(f"{server.ip}:{server.port} - expires: {server.expiration_time}")
|
|
1476
|
+
"""
|
|
1477
|
+
|
|
1478
|
+
self._require_public_credentials()
|
|
1479
|
+
|
|
1480
|
+
params = {
|
|
1481
|
+
"token": self.public_token,
|
|
1482
|
+
"key": self.public_key,
|
|
1483
|
+
"proxy_type": str(proxy_type),
|
|
1484
|
+
}
|
|
1485
|
+
|
|
1486
|
+
logger.info(f"Listing proxy servers: type={proxy_type}")
|
|
1487
|
+
|
|
1488
|
+
response = self._api_request_with_retry(
|
|
1489
|
+
"GET",
|
|
1490
|
+
self._proxy_list_url,
|
|
1491
|
+
params=params,
|
|
1492
|
+
)
|
|
1493
|
+
response.raise_for_status()
|
|
1494
|
+
|
|
1495
|
+
data = response.json()
|
|
1496
|
+
|
|
1497
|
+
if isinstance(data, dict):
|
|
1498
|
+
code = data.get("code")
|
|
1499
|
+
if code is not None and code != 200:
|
|
1500
|
+
msg = extract_error_message(data)
|
|
1501
|
+
raise_for_code(
|
|
1502
|
+
f"List proxy servers error: {msg}", code=code, payload=data
|
|
1503
|
+
)
|
|
1504
|
+
|
|
1505
|
+
# Extract list from data field
|
|
1506
|
+
server_list = data.get("data", data.get("list", []))
|
|
1507
|
+
elif isinstance(data, list):
|
|
1508
|
+
server_list = data
|
|
1509
|
+
else:
|
|
1510
|
+
raise ThordataNetworkError(
|
|
1511
|
+
f"Unexpected proxy list response: {type(data).__name__}",
|
|
1512
|
+
original_error=None,
|
|
1513
|
+
)
|
|
1514
|
+
|
|
1515
|
+
return [ProxyServer.from_dict(s) for s in server_list]
|
|
1516
|
+
|
|
1517
|
+
def get_isp_regions(self) -> List[Dict[str, Any]]:
|
|
1518
|
+
"""
|
|
1519
|
+
Get available ISP proxy regions (Public API NEW).
|
|
1520
|
+
|
|
1521
|
+
Returns:
|
|
1522
|
+
List of regions with id, continent, country, city, num, pricing.
|
|
1523
|
+
|
|
1524
|
+
Example:
|
|
1525
|
+
>>> regions = client.get_isp_regions()
|
|
1526
|
+
>>> for region in regions:
|
|
1527
|
+
... print(f"{region['country']}/{region['city']}: {region['num']} IPs")
|
|
1528
|
+
"""
|
|
1529
|
+
if not self.sign or not self.api_key:
|
|
1530
|
+
raise ThordataConfigError(
|
|
1531
|
+
"sign and api_key are required for Public API NEW."
|
|
1532
|
+
)
|
|
1533
|
+
|
|
1534
|
+
headers = build_sign_headers(self.sign, self.api_key)
|
|
1535
|
+
|
|
1536
|
+
logger.info("Getting ISP regions (API NEW)")
|
|
1537
|
+
|
|
1538
|
+
response = self._api_request_with_retry(
|
|
1539
|
+
"POST",
|
|
1540
|
+
f"{self._gateway_base_url}/getRegionIsp",
|
|
1541
|
+
headers=headers,
|
|
1542
|
+
data={},
|
|
1543
|
+
)
|
|
1544
|
+
response.raise_for_status()
|
|
1545
|
+
|
|
1546
|
+
data = response.json()
|
|
1547
|
+
code = data.get("code")
|
|
1548
|
+
|
|
1549
|
+
if code != 200:
|
|
1550
|
+
msg = extract_error_message(data)
|
|
1551
|
+
raise_for_code(f"Get ISP regions failed: {msg}", code=code, payload=data)
|
|
1552
|
+
|
|
1553
|
+
return data.get("data", [])
|
|
1554
|
+
|
|
1555
|
+
def list_isp_proxies(self) -> List[Dict[str, Any]]:
|
|
1556
|
+
"""
|
|
1557
|
+
List ISP proxies (Public API NEW).
|
|
1558
|
+
|
|
1559
|
+
Returns:
|
|
1560
|
+
List of ISP proxies with ip, port, user, pwd, startTime, expireTime.
|
|
1561
|
+
|
|
1562
|
+
Example:
|
|
1563
|
+
>>> proxies = client.list_isp_proxies()
|
|
1564
|
+
>>> for proxy in proxies:
|
|
1565
|
+
... print(f"{proxy['ip']}:{proxy['port']} - expires: {proxy['expireTime']}")
|
|
1566
|
+
"""
|
|
1567
|
+
if not self.sign or not self.api_key:
|
|
1568
|
+
raise ThordataConfigError(
|
|
1569
|
+
"sign and api_key are required for Public API NEW."
|
|
1570
|
+
)
|
|
1571
|
+
|
|
1572
|
+
headers = build_sign_headers(self.sign, self.api_key)
|
|
1573
|
+
|
|
1574
|
+
logger.info("Listing ISP proxies (API NEW)")
|
|
1575
|
+
|
|
1576
|
+
response = self._api_request_with_retry(
|
|
1577
|
+
"POST",
|
|
1578
|
+
f"{self._gateway_base_url}/queryListIsp",
|
|
1579
|
+
headers=headers,
|
|
1580
|
+
data={},
|
|
1581
|
+
)
|
|
1582
|
+
response.raise_for_status()
|
|
1583
|
+
|
|
1584
|
+
data = response.json()
|
|
1585
|
+
code = data.get("code")
|
|
1586
|
+
|
|
1587
|
+
if code != 200:
|
|
1588
|
+
msg = extract_error_message(data)
|
|
1589
|
+
raise_for_code(f"List ISP proxies failed: {msg}", code=code, payload=data)
|
|
1590
|
+
|
|
1591
|
+
return data.get("data", [])
|
|
1592
|
+
|
|
1593
|
+
def get_wallet_balance(self) -> Dict[str, Any]:
|
|
1594
|
+
"""
|
|
1595
|
+
Get wallet balance for ISP proxies (Public API NEW).
|
|
1596
|
+
|
|
1597
|
+
Returns:
|
|
1598
|
+
Dict with 'walletBalance'.
|
|
1599
|
+
|
|
1600
|
+
Example:
|
|
1601
|
+
>>> result = client.get_wallet_balance()
|
|
1602
|
+
>>> print(f"Wallet: ${result['walletBalance']}")
|
|
1603
|
+
"""
|
|
1604
|
+
if not self.sign or not self.api_key:
|
|
1605
|
+
raise ThordataConfigError(
|
|
1606
|
+
"sign and api_key are required for Public API NEW."
|
|
1607
|
+
)
|
|
1608
|
+
|
|
1609
|
+
headers = build_sign_headers(self.sign, self.api_key)
|
|
1610
|
+
|
|
1611
|
+
logger.info("Getting wallet balance (API NEW)")
|
|
1612
|
+
|
|
1613
|
+
response = self._api_request_with_retry(
|
|
1614
|
+
"POST",
|
|
1615
|
+
f"{self._gateway_base_url}/getBalance",
|
|
1616
|
+
headers=headers,
|
|
1617
|
+
data={},
|
|
1618
|
+
)
|
|
1619
|
+
response.raise_for_status()
|
|
1620
|
+
|
|
1621
|
+
data = response.json()
|
|
1622
|
+
code = data.get("code")
|
|
1623
|
+
|
|
1624
|
+
if code != 200:
|
|
1625
|
+
msg = extract_error_message(data)
|
|
1626
|
+
raise_for_code(f"Get wallet balance failed: {msg}", code=code, payload=data)
|
|
1627
|
+
|
|
1628
|
+
return data.get("data", {})
|
|
1629
|
+
|
|
1630
|
+
def get_proxy_expiration(
|
|
1631
|
+
self,
|
|
1632
|
+
ips: Union[str, List[str]],
|
|
1633
|
+
proxy_type: int,
|
|
1634
|
+
) -> Dict[str, Any]:
|
|
1635
|
+
"""
|
|
1636
|
+
Get expiration time for specific proxy IPs.
|
|
1637
|
+
|
|
1638
|
+
Args:
|
|
1639
|
+
ips: Single IP or list of IPs to check.
|
|
1640
|
+
proxy_type: Proxy type (1=ISP, 2=Datacenter).
|
|
1641
|
+
|
|
1642
|
+
Returns:
|
|
1643
|
+
Dict with expiration information.
|
|
1644
|
+
|
|
1645
|
+
Example:
|
|
1646
|
+
>>> result = client.get_proxy_expiration("123.45.67.89", proxy_type=1)
|
|
1647
|
+
>>> print(result)
|
|
1648
|
+
"""
|
|
1649
|
+
self._require_public_credentials()
|
|
1650
|
+
|
|
1651
|
+
# Convert list to comma-separated string
|
|
1652
|
+
if isinstance(ips, list):
|
|
1653
|
+
ips = ",".join(ips)
|
|
1654
|
+
|
|
1655
|
+
params = {
|
|
1656
|
+
"token": self.public_token,
|
|
1657
|
+
"key": self.public_key,
|
|
1658
|
+
"proxy_type": str(proxy_type),
|
|
1659
|
+
"ips": ips,
|
|
1660
|
+
}
|
|
1661
|
+
|
|
1662
|
+
logger.info(f"Getting proxy expiration: {ips}")
|
|
1663
|
+
|
|
1664
|
+
response = self._api_request_with_retry(
|
|
1665
|
+
"GET",
|
|
1666
|
+
self._proxy_expiration_url,
|
|
1667
|
+
params=params,
|
|
1668
|
+
)
|
|
1669
|
+
response.raise_for_status()
|
|
1670
|
+
|
|
1671
|
+
data = response.json()
|
|
1672
|
+
|
|
1673
|
+
if isinstance(data, dict):
|
|
1674
|
+
code = data.get("code")
|
|
1675
|
+
if code is not None and code != 200:
|
|
1676
|
+
msg = extract_error_message(data)
|
|
1677
|
+
raise_for_code(f"Get expiration error: {msg}", code=code, payload=data)
|
|
1678
|
+
|
|
1679
|
+
return data.get("data", data)
|
|
1680
|
+
|
|
1681
|
+
return data
|
|
1682
|
+
|
|
1683
|
+
# =========================================================================
|
|
1684
|
+
# Location API Methods (Country/State/City/ASN functions)
|
|
1685
|
+
# =========================================================================
|
|
871
1686
|
def list_countries(
|
|
872
1687
|
self, proxy_type: Union[ProxyType, int] = ProxyType.RESIDENTIAL
|
|
873
1688
|
) -> List[Dict[str, Any]]:
|
|
@@ -978,7 +1793,11 @@ class ThordataClient:
|
|
|
978
1793
|
logger.debug(f"Locations API request: {url}")
|
|
979
1794
|
|
|
980
1795
|
# Use requests.get directly (no proxy needed for this API)
|
|
981
|
-
response = self.
|
|
1796
|
+
response = self._api_request_with_retry(
|
|
1797
|
+
"GET",
|
|
1798
|
+
url,
|
|
1799
|
+
params=params,
|
|
1800
|
+
)
|
|
982
1801
|
response.raise_for_status()
|
|
983
1802
|
|
|
984
1803
|
data = response.json()
|
|
@@ -998,9 +1817,8 @@ class ThordataClient:
|
|
|
998
1817
|
return []
|
|
999
1818
|
|
|
1000
1819
|
# =========================================================================
|
|
1001
|
-
# Helper Methods
|
|
1820
|
+
# Helper Methods (Internal utility functions)
|
|
1002
1821
|
# =========================================================================
|
|
1003
|
-
|
|
1004
1822
|
def _require_public_credentials(self) -> None:
|
|
1005
1823
|
"""Ensure public API credentials are available."""
|
|
1006
1824
|
if not self.public_token or not self.public_key:
|