thordata-sdk 0.7.0__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- thordata/__init__.py +13 -1
- thordata/_example_utils.py +76 -0
- thordata/_utils.py +46 -3
- thordata/async_client.py +863 -23
- thordata/client.py +1023 -51
- thordata/enums.py +3 -3
- thordata/exceptions.py +16 -5
- thordata/models.py +351 -7
- thordata/retry.py +6 -4
- thordata_sdk-1.0.0.dist-info/METADATA +208 -0
- thordata_sdk-1.0.0.dist-info/RECORD +15 -0
- thordata/parameters.py +0 -53
- thordata_sdk-0.7.0.dist-info/METADATA +0 -1053
- thordata_sdk-0.7.0.dist-info/RECORD +0 -15
- {thordata_sdk-0.7.0.dist-info → thordata_sdk-1.0.0.dist-info}/WHEEL +0 -0
- {thordata_sdk-0.7.0.dist-info → thordata_sdk-1.0.0.dist-info}/licenses/LICENSE +0 -0
- {thordata_sdk-0.7.0.dist-info → thordata_sdk-1.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: thordata-sdk
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: The Official Python SDK for Thordata - AI Data Infrastructure & Proxy Network.
|
|
5
|
+
Author-email: Thordata Developer Team <support@thordata.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://www.thordata.com
|
|
8
|
+
Project-URL: Documentation, https://github.com/Thordata/thordata-python-sdk#readme
|
|
9
|
+
Project-URL: Source, https://github.com/Thordata/thordata-python-sdk
|
|
10
|
+
Project-URL: Tracker, https://github.com/Thordata/thordata-python-sdk/issues
|
|
11
|
+
Project-URL: Changelog, https://github.com/Thordata/thordata-python-sdk/blob/main/CHANGELOG.md
|
|
12
|
+
Keywords: web scraping,proxy,residential proxy,datacenter proxy,ai,llm,data-mining,serp,thordata,web scraper,anti-bot bypass
|
|
13
|
+
Classifier: Development Status :: 4 - Beta
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
16
|
+
Classifier: Topic :: Internet :: WWW/HTTP
|
|
17
|
+
Classifier: Topic :: Internet :: Proxy Servers
|
|
18
|
+
Classifier: Programming Language :: Python :: 3
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
23
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
24
|
+
Classifier: Operating System :: OS Independent
|
|
25
|
+
Classifier: Typing :: Typed
|
|
26
|
+
Requires-Python: >=3.9
|
|
27
|
+
Description-Content-Type: text/markdown
|
|
28
|
+
License-File: LICENSE
|
|
29
|
+
Requires-Dist: requests>=2.25.0
|
|
30
|
+
Requires-Dist: aiohttp>=3.9.0
|
|
31
|
+
Provides-Extra: dev
|
|
32
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
33
|
+
Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
|
|
34
|
+
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
35
|
+
Requires-Dist: pytest-httpserver>=1.0.0; extra == "dev"
|
|
36
|
+
Requires-Dist: python-dotenv>=1.0.0; extra == "dev"
|
|
37
|
+
Requires-Dist: black>=23.0.0; extra == "dev"
|
|
38
|
+
Requires-Dist: ruff>=0.1.0; extra == "dev"
|
|
39
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
40
|
+
Requires-Dist: types-requests>=2.28.0; extra == "dev"
|
|
41
|
+
Requires-Dist: aioresponses>=0.7.6; extra == "dev"
|
|
42
|
+
Dynamic: license-file
|
|
43
|
+
|
|
44
|
+
# Thordata Python SDK
|
|
45
|
+
|
|
46
|
+
<div align="center">
|
|
47
|
+
|
|
48
|
+
**Official Python client for Thordata's Proxy Network, SERP API, Web Unlocker, and Web Scraper API.**
|
|
49
|
+
|
|
50
|
+
*Async-ready, type-safe, built for AI agents and large-scale data collection.*
|
|
51
|
+
|
|
52
|
+
[](https://pypi.org/project/thordata-sdk/)
|
|
53
|
+
[](https://python.org)
|
|
54
|
+
[](LICENSE)
|
|
55
|
+
|
|
56
|
+
[Documentation](https://doc.thordata.com) • [Dashboard](https://www.thordata.com) • [Examples](examples/)
|
|
57
|
+
|
|
58
|
+
</div>
|
|
59
|
+
|
|
60
|
+
---
|
|
61
|
+
|
|
62
|
+
## ✨ Features
|
|
63
|
+
|
|
64
|
+
- 🌐 **Proxy Network**: Residential, Mobile, Datacenter, ISP proxies with geo-targeting
|
|
65
|
+
- 🔍 **SERP API**: Google, Bing, Yandex, DuckDuckGo search results
|
|
66
|
+
- 🔓 **Web Unlocker**: Bypass Cloudflare, CAPTCHAs, anti-bot systems
|
|
67
|
+
- 🕷️ **Web Scraper API**: Async task-based scraping (Text & Video/Audio)
|
|
68
|
+
- 📊 **Account Management**: Usage stats, sub-users, IP whitelist
|
|
69
|
+
- ⚡ **Async Support**: Full async/await support with aiohttp
|
|
70
|
+
- 🔄 **Auto Retry**: Configurable retry with exponential backoff
|
|
71
|
+
|
|
72
|
+
---
|
|
73
|
+
|
|
74
|
+
## 📦 Installation
|
|
75
|
+
|
|
76
|
+
```bash
|
|
77
|
+
pip install thordata-sdk
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
---
|
|
81
|
+
|
|
82
|
+
## 🔐 Configuration
|
|
83
|
+
|
|
84
|
+
Set environment variables:
|
|
85
|
+
|
|
86
|
+
```bash
|
|
87
|
+
# Required for Scraper APIs (SERP, Universal, Tasks)
|
|
88
|
+
export THORDATA_SCRAPER_TOKEN=your_token
|
|
89
|
+
|
|
90
|
+
# Public/Location APIs (Dashboard -> My account -> API)
|
|
91
|
+
export THORDATA_PUBLIC_TOKEN=your_public_token
|
|
92
|
+
export THORDATA_PUBLIC_KEY=your_public_key
|
|
93
|
+
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
---
|
|
97
|
+
|
|
98
|
+
## 🚀 Quick Start
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
from thordata import ThordataClient, Engine
|
|
102
|
+
|
|
103
|
+
# Initialize (reads from env vars)
|
|
104
|
+
client = ThordataClient(
|
|
105
|
+
scraper_token="your_token",
|
|
106
|
+
public_token="pub_token",
|
|
107
|
+
public_key="pub_key"
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
# SERP Search
|
|
111
|
+
results = client.serp_search("python tutorial", engine=Engine.GOOGLE)
|
|
112
|
+
print(f"Found {len(results.get('organic', []))} results")
|
|
113
|
+
|
|
114
|
+
# Universal Scrape
|
|
115
|
+
html = client.universal_scrape("https://httpbin.org/html")
|
|
116
|
+
print(html[:100])
|
|
117
|
+
```
|
|
118
|
+
|
|
119
|
+
---
|
|
120
|
+
|
|
121
|
+
## 📖 Feature Guide
|
|
122
|
+
|
|
123
|
+
### SERP API
|
|
124
|
+
|
|
125
|
+
```python
|
|
126
|
+
from thordata import SerpRequest
|
|
127
|
+
|
|
128
|
+
# Advanced search
|
|
129
|
+
results = client.serp_search_advanced(SerpRequest(
|
|
130
|
+
query="pizza",
|
|
131
|
+
engine="google_local",
|
|
132
|
+
country="us",
|
|
133
|
+
location="New York",
|
|
134
|
+
num=10
|
|
135
|
+
))
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
### Web Scraper API (Async Tasks)
|
|
139
|
+
|
|
140
|
+
**Create Task:**
|
|
141
|
+
```python
|
|
142
|
+
task_id = client.create_scraper_task(
|
|
143
|
+
file_name="my_task",
|
|
144
|
+
spider_id="universal",
|
|
145
|
+
spider_name="universal",
|
|
146
|
+
parameters={"url": "https://example.com"}
|
|
147
|
+
)
|
|
148
|
+
```
|
|
149
|
+
|
|
150
|
+
**Video Download (New):**
|
|
151
|
+
```python
|
|
152
|
+
from thordata import CommonSettings
|
|
153
|
+
|
|
154
|
+
task_id = client.create_video_task(
|
|
155
|
+
file_name="{{VideoID}}",
|
|
156
|
+
spider_id="youtube_video_by-url",
|
|
157
|
+
spider_name="youtube.com",
|
|
158
|
+
parameters={"url": "https://youtube.com/watch?v=..."},
|
|
159
|
+
common_settings=CommonSettings(resolution="1080p")
|
|
160
|
+
)
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
**Wait & Download:**
|
|
164
|
+
```python
|
|
165
|
+
status = client.wait_for_task(task_id)
|
|
166
|
+
if status == "ready":
|
|
167
|
+
url = client.get_task_result(task_id)
|
|
168
|
+
print(url)
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
### Account Management
|
|
172
|
+
|
|
173
|
+
```python
|
|
174
|
+
# Usage Statistics
|
|
175
|
+
stats = client.get_usage_statistics("2024-01-01", "2024-01-31")
|
|
176
|
+
print(f"Balance: {stats.balance_gb():.2f} GB")
|
|
177
|
+
|
|
178
|
+
# Proxy Users
|
|
179
|
+
users = client.list_proxy_users()
|
|
180
|
+
print(f"Sub-users: {users.user_count}")
|
|
181
|
+
|
|
182
|
+
# Whitelist IP
|
|
183
|
+
client.add_whitelist_ip("1.2.3.4")
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
### Proxy Network
|
|
187
|
+
|
|
188
|
+
```python
|
|
189
|
+
from thordata import ProxyConfig
|
|
190
|
+
|
|
191
|
+
# Generate Proxy URL
|
|
192
|
+
proxy_url = client.build_proxy_url(
|
|
193
|
+
username="proxy_user",
|
|
194
|
+
password="proxy_pass",
|
|
195
|
+
country="us",
|
|
196
|
+
city="ny"
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
# Use with requests
|
|
200
|
+
import requests
|
|
201
|
+
requests.get("https://httpbin.org/ip", proxies={"http": proxy_url, "https": proxy_url})
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
---
|
|
205
|
+
|
|
206
|
+
## 📄 License
|
|
207
|
+
|
|
208
|
+
MIT License
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
thordata/__init__.py,sha256=MILcOkXK1A3U7pCNZxGqVGFMEvdfMJC-Eki2QaNlCdc,3195
|
|
2
|
+
thordata/_example_utils.py,sha256=a7hSQwxS9OqOatvRYm2NkZRakSJ186tNWbHpJ7WskFc,2185
|
|
3
|
+
thordata/_utils.py,sha256=oMPjR6wSmNonU5dJP6NHgYyV0BDO8b_0zxtKuck1htg,4701
|
|
4
|
+
thordata/async_client.py,sha256=XSsT3ariDWSvP32ny_aqf5dTN56T7yMTraaJOgeDi-g,55416
|
|
5
|
+
thordata/client.py,sha256=mUp6pwZKLca9tNljV_Gv4leOrMJ4LQsveiqdKroYp3I,64494
|
|
6
|
+
thordata/demo.py,sha256=zmG4I4cHXnbmQfbr063SeRK7_9IXrfof9QFoGqGTVm8,3806
|
|
7
|
+
thordata/enums.py,sha256=MpZnS9_8sg2vtcFqM6UicB94cKZm5R1t83L3ejNSbLs,8502
|
|
8
|
+
thordata/exceptions.py,sha256=IgMsFuh49cPxU5YofsKP1UhP5A_snhtuN6xD1yZWLiI,10018
|
|
9
|
+
thordata/models.py,sha256=X7wxS6kk18OGX-OLvCdxl0rhpQrAAM3WYLRnjzrWcOM,37690
|
|
10
|
+
thordata/retry.py,sha256=vb05YexCHjiiZTSm8_eK-_3BsCecplGpFjQ3XBwQ8FY,11505
|
|
11
|
+
thordata_sdk-1.0.0.dist-info/licenses/LICENSE,sha256=bAxpWgQIzb-5jl3nhLdOwOJ_vlbHLtSG7yev2B7vioY,1088
|
|
12
|
+
thordata_sdk-1.0.0.dist-info/METADATA,sha256=qI0Weaz5DOzafF6CWv8gKBkozlhX7HVc4-qFQC66sgY,5667
|
|
13
|
+
thordata_sdk-1.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
14
|
+
thordata_sdk-1.0.0.dist-info/top_level.txt,sha256=Z8R_07m0lXCCSb1hapL9_nxMtyO3rf_9wOvq4n9u2Hg,9
|
|
15
|
+
thordata_sdk-1.0.0.dist-info/RECORD,,
|
thordata/parameters.py
DELETED
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
# src/thordata/parameters.py
|
|
2
|
-
|
|
3
|
-
from typing import Any, Dict
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
def normalize_serp_params(engine: str, query: str, **kwargs) -> Dict[str, Any]:
|
|
7
|
-
"""
|
|
8
|
-
Normalizes parameters across different search engines to ensure a unified API surface.
|
|
9
|
-
|
|
10
|
-
Args:
|
|
11
|
-
engine (str): The search engine to use (e.g., 'google', 'yandex').
|
|
12
|
-
query (str): The search query string.
|
|
13
|
-
**kwargs: Additional parameters to pass to the API.
|
|
14
|
-
|
|
15
|
-
Returns:
|
|
16
|
-
Dict[str, Any]: The constructed payload for the API request.
|
|
17
|
-
"""
|
|
18
|
-
# 1. Base parameters
|
|
19
|
-
payload = {
|
|
20
|
-
"num": str(kwargs.get("num", 10)), # Default to 10 results
|
|
21
|
-
"json": "1", # Force JSON response
|
|
22
|
-
"engine": engine,
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
# 2. Handle Query Parameter Differences (Yandex uses 'text', others use 'q')
|
|
26
|
-
if engine == "yandex":
|
|
27
|
-
payload["text"] = query
|
|
28
|
-
# Set default URL for Yandex if not provided
|
|
29
|
-
if "url" not in kwargs:
|
|
30
|
-
payload["url"] = "yandex.com"
|
|
31
|
-
else:
|
|
32
|
-
payload["q"] = query
|
|
33
|
-
|
|
34
|
-
# 3. Handle Default URLs for other engines
|
|
35
|
-
if "url" not in kwargs:
|
|
36
|
-
defaults = {
|
|
37
|
-
"google": "google.com",
|
|
38
|
-
"bing": "bing.com",
|
|
39
|
-
"duckduckgo": "duckduckgo.com",
|
|
40
|
-
"baidu": "baidu.com",
|
|
41
|
-
}
|
|
42
|
-
if engine in defaults:
|
|
43
|
-
payload["url"] = defaults[engine]
|
|
44
|
-
|
|
45
|
-
# 4. Passthrough for all other user-provided arguments
|
|
46
|
-
# This allows support for engine-specific parameters (e.g., tbm, uule, gl)
|
|
47
|
-
# without explicitly defining them all.
|
|
48
|
-
protected_keys = {"num", "engine", "q", "text"}
|
|
49
|
-
for key, value in kwargs.items():
|
|
50
|
-
if key not in protected_keys:
|
|
51
|
-
payload[key] = value
|
|
52
|
-
|
|
53
|
-
return payload
|