thordata-js-sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +342 -0
- package/dist/examples/basic_scraper_task.d.ts +1 -0
- package/dist/examples/basic_scraper_task.js +46 -0
- package/dist/examples/basic_scraper_task.js.map +1 -0
- package/dist/examples/basic_serp.d.ts +1 -0
- package/dist/examples/basic_serp.js +29 -0
- package/dist/examples/basic_serp.js.map +1 -0
- package/dist/examples/basic_universal.d.ts +1 -0
- package/dist/examples/basic_universal.js +23 -0
- package/dist/examples/basic_universal.js.map +1 -0
- package/dist/examples/proxy_ip_check.d.ts +1 -0
- package/dist/examples/proxy_ip_check.js +37 -0
- package/dist/examples/proxy_ip_check.js.map +1 -0
- package/dist/examples/serp_google_news.d.ts +7 -0
- package/dist/examples/serp_google_news.js +64 -0
- package/dist/examples/serp_google_news.js.map +1 -0
- package/dist/src/client.d.ts +39 -0
- package/dist/src/client.js +279 -0
- package/dist/src/client.js.map +1 -0
- package/dist/src/endpoints.d.ts +7 -0
- package/dist/src/endpoints.js +21 -0
- package/dist/src/endpoints.js.map +1 -0
- package/dist/src/enums.d.ts +22 -0
- package/dist/src/enums.js +26 -0
- package/dist/src/enums.js.map +1 -0
- package/dist/src/errors.d.ts +33 -0
- package/dist/src/errors.js +58 -0
- package/dist/src/errors.js.map +1 -0
- package/dist/src/index.d.ts +6 -0
- package/dist/src/index.js +8 -0
- package/dist/src/index.js.map +1 -0
- package/dist/src/models.d.ts +130 -0
- package/dist/src/models.js +92 -0
- package/dist/src/models.js.map +1 -0
- package/dist/src/retry.d.ts +9 -0
- package/dist/src/retry.js +19 -0
- package/dist/src/retry.js.map +1 -0
- package/dist/src/utils.d.ts +31 -0
- package/dist/src/utils.js +142 -0
- package/dist/src/utils.js.map +1 -0
- package/package.json +72 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Thordata
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
1
|
+
# Thordata JS SDK (Node.js / TypeScript)
|
|
2
|
+
|
|
3
|
+
Official JavaScript/TypeScript SDK for <!--citation:1-->.
|
|
4
|
+
|
|
5
|
+
This SDK supports:
|
|
6
|
+
|
|
7
|
+
- **SERP API** (Google / Bing / Yandex / DuckDuckGo / Baidu)
|
|
8
|
+
- **Web Unlocker / Universal API**
|
|
9
|
+
- **Web Scraper API** (task-based scraping)
|
|
10
|
+
|
|
11
|
+
It is designed to be:
|
|
12
|
+
|
|
13
|
+
- **TypeScript-first**
|
|
14
|
+
- **ESM-ready**
|
|
15
|
+
- **Offline-test friendly** (base URLs can be overridden to run examples/tests against a mock server)
|
|
16
|
+
|
|
17
|
+
---
|
|
18
|
+
|
|
19
|
+
## š¦ Installation
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
npm install thordata-js-sdk
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
If you are developing locally:
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
git clone https://github.com/Thordata/thordata-js-sdk.git
|
|
29
|
+
cd thordata-js-sdk
|
|
30
|
+
npm install
|
|
31
|
+
npm run build
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
---
|
|
35
|
+
|
|
36
|
+
## š Configuration
|
|
37
|
+
|
|
38
|
+
Set environment variables:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
export THORDATA_SCRAPER_TOKEN=your_scraper_token
|
|
42
|
+
export THORDATA_PUBLIC_TOKEN=your_public_token
|
|
43
|
+
export THORDATA_PUBLIC_KEY=your_public_key
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
Or create a `.env` file (examples may use dotenv):
|
|
47
|
+
|
|
48
|
+
```env
|
|
49
|
+
THORDATA_SCRAPER_TOKEN=your_scraper_token
|
|
50
|
+
THORDATA_PUBLIC_TOKEN=your_public_token
|
|
51
|
+
THORDATA_PUBLIC_KEY=your_public_key
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
---
|
|
55
|
+
|
|
56
|
+
## š Quick Start
|
|
57
|
+
|
|
58
|
+
### Create a client
|
|
59
|
+
|
|
60
|
+
```typescript
|
|
61
|
+
import { ThordataClient } from "thordata-js-sdk";
|
|
62
|
+
|
|
63
|
+
const client = new ThordataClient({
|
|
64
|
+
scraperToken: process.env.THORDATA_SCRAPER_TOKEN!,
|
|
65
|
+
publicToken: process.env.THORDATA_PUBLIC_TOKEN,
|
|
66
|
+
publicKey: process.env.THORDATA_PUBLIC_KEY,
|
|
67
|
+
});
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
---
|
|
71
|
+
|
|
72
|
+
## š SERP API
|
|
73
|
+
|
|
74
|
+
### Basic Google Search
|
|
75
|
+
|
|
76
|
+
```typescript
|
|
77
|
+
import { ThordataClient, Engine } from "thordata-js-sdk";
|
|
78
|
+
|
|
79
|
+
const client = new ThordataClient({ scraperToken: process.env.THORDATA_SCRAPER_TOKEN! });
|
|
80
|
+
|
|
81
|
+
const data = await client.serpSearch({
|
|
82
|
+
query: "Thordata proxy network",
|
|
83
|
+
engine: Engine.GOOGLE,
|
|
84
|
+
num: 5,
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
const organic = data?.organic ?? [];
|
|
88
|
+
console.log(`Found ${organic.length} organic results`);
|
|
89
|
+
|
|
90
|
+
for (const item of organic.slice(0, 3)) {
|
|
91
|
+
console.log("-", item.title, "->", item.link);
|
|
92
|
+
}
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
### Recommended engines for Google verticals (News / Shopping)
|
|
96
|
+
|
|
97
|
+
Thordata supports both:
|
|
98
|
+
|
|
99
|
+
- Dedicated engines (recommended): `google_news`, `google_shopping`
|
|
100
|
+
- Generic Google + tbm via `searchType` (alternative)
|
|
101
|
+
|
|
102
|
+
#### Google News (recommended):
|
|
103
|
+
|
|
104
|
+
```typescript
|
|
105
|
+
const data = await client.serpSearch({
|
|
106
|
+
query: "AI regulation",
|
|
107
|
+
engine: "google_news",
|
|
108
|
+
country: "us",
|
|
109
|
+
language: "en",
|
|
110
|
+
num: 10,
|
|
111
|
+
so: 1, // 0=relevance, 1=date (Google News)
|
|
112
|
+
});
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
#### Google Shopping (recommended):
|
|
116
|
+
|
|
117
|
+
```typescript
|
|
118
|
+
const data = await client.serpSearch({
|
|
119
|
+
query: "iPhone 15",
|
|
120
|
+
engine: "google_shopping",
|
|
121
|
+
country: "us",
|
|
122
|
+
language: "en",
|
|
123
|
+
num: 10,
|
|
124
|
+
min_price: 500,
|
|
125
|
+
max_price: 1500,
|
|
126
|
+
});
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
#### Alternative: Google generic engine + tbm (via searchType):
|
|
130
|
+
|
|
131
|
+
```typescript
|
|
132
|
+
const data = await client.serpSearch({
|
|
133
|
+
query: "iPhone 15",
|
|
134
|
+
engine: "google",
|
|
135
|
+
searchType: "shopping", // maps to tbm=shop
|
|
136
|
+
country: "us",
|
|
137
|
+
language: "en",
|
|
138
|
+
num: 10,
|
|
139
|
+
});
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
Official and up-to-date parameters are documented at: https://doc.thordata.com
|
|
143
|
+
|
|
144
|
+
---
|
|
145
|
+
|
|
146
|
+
## š Web Unlocker / Universal API
|
|
147
|
+
|
|
148
|
+
### Basic HTML scraping
|
|
149
|
+
|
|
150
|
+
```typescript
|
|
151
|
+
const html = await client.universalScrape({
|
|
152
|
+
url: "https://httpbin.org/html",
|
|
153
|
+
jsRender: false,
|
|
154
|
+
outputFormat: "html",
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
console.log(String(html).slice(0, 300));
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
### JS rendering + wait for selector
|
|
161
|
+
|
|
162
|
+
```typescript
|
|
163
|
+
const html = await client.universalScrape({
|
|
164
|
+
url: "https://example.com/spa",
|
|
165
|
+
jsRender: true,
|
|
166
|
+
outputFormat: "html",
|
|
167
|
+
waitFor: ".main-content",
|
|
168
|
+
});
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
### Screenshot (PNG)
|
|
172
|
+
|
|
173
|
+
```typescript
|
|
174
|
+
import { writeFileSync } from "node:fs";
|
|
175
|
+
|
|
176
|
+
const pngBytes = await client.universalScrape({
|
|
177
|
+
url: "https://example.com",
|
|
178
|
+
jsRender: true,
|
|
179
|
+
outputFormat: "png",
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
writeFileSync("screenshot.png", pngBytes as Buffer);
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
---
|
|
186
|
+
|
|
187
|
+
## š·ļø Web Scraper API (Task-based)
|
|
188
|
+
|
|
189
|
+
Requires `THORDATA_PUBLIC_TOKEN` and `THORDATA_PUBLIC_KEY`.
|
|
190
|
+
|
|
191
|
+
```typescript
|
|
192
|
+
const client = new ThordataClient({
|
|
193
|
+
scraperToken: process.env.THORDATA_SCRAPER_TOKEN!,
|
|
194
|
+
publicToken: process.env.THORDATA_PUBLIC_TOKEN,
|
|
195
|
+
publicKey: process.env.THORDATA_PUBLIC_KEY,
|
|
196
|
+
});
|
|
197
|
+
|
|
198
|
+
const taskId = await client.createScraperTask({
|
|
199
|
+
fileName: "demo_task",
|
|
200
|
+
spiderId: "example-spider-id",
|
|
201
|
+
spiderName: "example.com",
|
|
202
|
+
parameters: { url: "https://example.com" },
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
console.log("Task created:", taskId);
|
|
206
|
+
|
|
207
|
+
const status = await client.waitForTask(taskId, {
|
|
208
|
+
pollIntervalMs: 5000,
|
|
209
|
+
maxWaitMs: 60000,
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
console.log("Final status:", status);
|
|
213
|
+
|
|
214
|
+
if (["ready", "success", "finished"].includes(status.toLowerCase())) {
|
|
215
|
+
const downloadUrl = await client.getTaskResult(taskId, "json");
|
|
216
|
+
console.log("Download URL:", downloadUrl);
|
|
217
|
+
}
|
|
218
|
+
```
|
|
219
|
+
|
|
220
|
+
---
|
|
221
|
+
|
|
222
|
+
## š§ Errors & Response Codes
|
|
223
|
+
|
|
224
|
+
The SDK throws typed errors when the API returns a non-success code (or non-2xx HTTP status).
|
|
225
|
+
|
|
226
|
+
| Code | Typical Meaning | Error class |
|
|
227
|
+
| ------- | --------------------- | ---------------------------------------------- |
|
|
228
|
+
| 200 | Success | - |
|
|
229
|
+
| 300 | Not collected | `ThordataNotCollectedError` |
|
|
230
|
+
| 400 | Bad request | `ThordataValidationError` |
|
|
231
|
+
| 401/403 | Auth/Forbidden | `ThordataAuthError` |
|
|
232
|
+
| 402/429 | Quota/Rate limit | `ThordataRateLimitError` |
|
|
233
|
+
| 5xx | Server/timeout issues | `ThordataServerError` / `ThordataTimeoutError` |
|
|
234
|
+
|
|
235
|
+
### Example error handling:
|
|
236
|
+
|
|
237
|
+
```typescript
|
|
238
|
+
import {
|
|
239
|
+
ThordataAuthError,
|
|
240
|
+
ThordataRateLimitError,
|
|
241
|
+
ThordataTimeoutError,
|
|
242
|
+
ThordataNotCollectedError,
|
|
243
|
+
} from "thordata-js-sdk";
|
|
244
|
+
|
|
245
|
+
try {
|
|
246
|
+
const data = await client.serpSearch({ query: "test", engine: "google" });
|
|
247
|
+
console.log(data);
|
|
248
|
+
} catch (e) {
|
|
249
|
+
if (e instanceof ThordataAuthError) {
|
|
250
|
+
console.error("Auth error: check your token.");
|
|
251
|
+
} else if (e instanceof ThordataRateLimitError) {
|
|
252
|
+
console.error(`Rate limited. Retry after: ${e.retryAfter ?? "N/A"} seconds.`);
|
|
253
|
+
} else if (e instanceof ThordataNotCollectedError) {
|
|
254
|
+
console.error("Not collected (code=300). Consider retrying.");
|
|
255
|
+
} else if (e instanceof ThordataTimeoutError) {
|
|
256
|
+
console.error("Request timed out.");
|
|
257
|
+
} else {
|
|
258
|
+
console.error("Unexpected error:", e);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
```
|
|
262
|
+
|
|
263
|
+
---
|
|
264
|
+
|
|
265
|
+
## š Base URL Overrides (for offline tests / custom routing)
|
|
266
|
+
|
|
267
|
+
You can override API base URLs via environment variables:
|
|
268
|
+
|
|
269
|
+
```bash
|
|
270
|
+
export THORDATA_SCRAPERAPI_BASE_URL=http://127.0.0.1:12345
|
|
271
|
+
export THORDATA_UNIVERSALAPI_BASE_URL=http://127.0.0.1:12345
|
|
272
|
+
export THORDATA_WEB_SCRAPER_API_BASE_URL=http://127.0.0.1:12345
|
|
273
|
+
export THORDATA_LOCATIONS_BASE_URL=http://127.0.0.1:12345
|
|
274
|
+
```
|
|
275
|
+
|
|
276
|
+
Or via client config:
|
|
277
|
+
|
|
278
|
+
```typescript
|
|
279
|
+
const client = new ThordataClient({
|
|
280
|
+
scraperToken: "dummy",
|
|
281
|
+
baseUrls: { scraperapiBaseUrl: "http://127.0.0.1:12345" },
|
|
282
|
+
});
|
|
283
|
+
```
|
|
284
|
+
|
|
285
|
+
---
|
|
286
|
+
|
|
287
|
+
## š§Ŗ Development
|
|
288
|
+
|
|
289
|
+
```bash
|
|
290
|
+
npm install
|
|
291
|
+
npm run build
|
|
292
|
+
npm test
|
|
293
|
+
```
|
|
294
|
+
|
|
295
|
+
### Run examples (compiled):
|
|
296
|
+
|
|
297
|
+
```bash
|
|
298
|
+
node dist/examples/basic_serp.js
|
|
299
|
+
node dist/examples/basic_universal.js
|
|
300
|
+
```
|
|
301
|
+
|
|
302
|
+
---
|
|
303
|
+
|
|
304
|
+
## š Project Structure
|
|
305
|
+
|
|
306
|
+
```
|
|
307
|
+
thordata-js-sdk/
|
|
308
|
+
āāā src/
|
|
309
|
+
ā āāā index.ts
|
|
310
|
+
ā āāā client.ts
|
|
311
|
+
ā āāā models.ts
|
|
312
|
+
ā āāā enums.ts
|
|
313
|
+
ā āāā errors.ts
|
|
314
|
+
ā āāā retry.ts
|
|
315
|
+
ā āāā endpoints.ts
|
|
316
|
+
ā āāā utils.ts
|
|
317
|
+
āāā examples/
|
|
318
|
+
ā āāā basic_serp.ts
|
|
319
|
+
ā āāā basic_universal.ts
|
|
320
|
+
ā āāā basic_scraper_task.ts
|
|
321
|
+
ā āāā serp_google_news.ts
|
|
322
|
+
āāā tests/
|
|
323
|
+
ā āāā serp.offline.test.ts
|
|
324
|
+
ā āāā mockServer.ts
|
|
325
|
+
ā āāā examples.e2e.test.ts
|
|
326
|
+
āāā .github/workflows/ci.yml
|
|
327
|
+
āāā package.json
|
|
328
|
+
āāā tsconfig.json
|
|
329
|
+
āāā tsconfig.build.json
|
|
330
|
+
āāā README.md
|
|
331
|
+
```
|
|
332
|
+
|
|
333
|
+
---
|
|
334
|
+
|
|
335
|
+
## š® Roadmap
|
|
336
|
+
|
|
337
|
+
- Publish stable releases to npm
|
|
338
|
+
- Add async streaming / higher-level helpers for AI agents
|
|
339
|
+
- Expand coverage for more engines/verticals (Flights/Maps/Scholar/Jobs, etc.)
|
|
340
|
+
- Add integration tests (optional scheduled job with real tokens)
|
|
341
|
+
|
|
342
|
+
---
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import "dotenv/config";
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
// examples/basic_scraper_task.ts
|
|
2
|
+
import "dotenv/config";
|
|
3
|
+
import { ThordataClient } from "../src/index.js";
|
|
4
|
+
async function main() {
|
|
5
|
+
const scraperToken = process.env.THORDATA_SCRAPER_TOKEN;
|
|
6
|
+
const publicToken = process.env.THORDATA_PUBLIC_TOKEN;
|
|
7
|
+
const publicKey = process.env.THORDATA_PUBLIC_KEY;
|
|
8
|
+
if (!scraperToken || !publicToken || !publicKey) {
|
|
9
|
+
console.error("Please set THORDATA_SCRAPER_TOKEN, THORDATA_PUBLIC_TOKEN, THORDATA_PUBLIC_KEY in .env");
|
|
10
|
+
process.exit(1);
|
|
11
|
+
}
|
|
12
|
+
const client = new ThordataClient({
|
|
13
|
+
scraperToken,
|
|
14
|
+
publicToken,
|
|
15
|
+
publicKey,
|
|
16
|
+
});
|
|
17
|
+
console.log("š·ļø Creating Web Scraper task (example only)...");
|
|
18
|
+
try {
|
|
19
|
+
const taskId = await client.createScraperTask({
|
|
20
|
+
fileName: "demo_task",
|
|
21
|
+
spiderId: "example-spider-id",
|
|
22
|
+
spiderName: "example.com",
|
|
23
|
+
parameters: {
|
|
24
|
+
url: "https://example.com",
|
|
25
|
+
},
|
|
26
|
+
});
|
|
27
|
+
console.log("Task created:", taskId);
|
|
28
|
+
console.log("ā±ļø Waiting for task completion...");
|
|
29
|
+
const status = await client.waitForTask(taskId, {
|
|
30
|
+
pollIntervalMs: 5000,
|
|
31
|
+
maxWaitMs: 60_000,
|
|
32
|
+
});
|
|
33
|
+
console.log("Final status:", status);
|
|
34
|
+
if (status.toLowerCase() === "ready" || status.toLowerCase() === "success") {
|
|
35
|
+
const downloadUrl = await client.getTaskResult(taskId, "json");
|
|
36
|
+
console.log("Download URL:", downloadUrl);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
catch (err) {
|
|
40
|
+
console.error("Error:", err);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
main().catch((err) => {
|
|
44
|
+
console.error("Fatal error:", err);
|
|
45
|
+
});
|
|
46
|
+
//# sourceMappingURL=basic_scraper_task.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"basic_scraper_task.js","sourceRoot":"","sources":["../../examples/basic_scraper_task.ts"],"names":[],"mappings":"AAAA,iCAAiC;AAEjC,OAAO,eAAe,CAAC;AACvB,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAEjD,KAAK,UAAU,IAAI;IACjB,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC;IACxD,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC;IACtD,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;IAElD,IAAI,CAAC,YAAY,IAAI,CAAC,WAAW,IAAI,CAAC,SAAS,EAAE,CAAC;QAChD,OAAO,CAAC,KAAK,CACX,uFAAuF,CACxF,CAAC;QACF,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC;QAChC,YAAY;QACZ,WAAW;QACX,SAAS;KACV,CAAC,CAAC;IAEH,OAAO,CAAC,GAAG,CAAC,kDAAkD,CAAC,CAAC;IAChE,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,iBAAiB,CAAC;YAC5C,QAAQ,EAAE,WAAW;YACrB,QAAQ,EAAE,mBAAmB;YAC7B,UAAU,EAAE,aAAa;YACzB,UAAU,EAAE;gBACV,GAAG,EAAE,qBAAqB;aAC3B;SACF,CAAC,CAAC;QAEH,OAAO,CAAC,GAAG,CAAC,eAAe,EAAE,MAAM,CAAC,CAAC;QAErC,OAAO,CAAC,GAAG,CAAC,oCAAoC,CAAC,CAAC;QAClD,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,MAAM,EAAE;YAC9C,cAAc,EAAE,IAAI;YACpB,SAAS,EAAE,MAAM;SAClB,CAAC,CAAC;QAEH,OAAO,CAAC,GAAG,CAAC,eAAe,EAAE,MAAM,CAAC,CAAC;QAErC,IAAI,MAAM,CAAC,WAAW,EAAE,KAAK,OAAO,IAAI,MAAM,CAAC,WAAW,EAAE,KAAK,SAAS,EAAE,CAAC;YAC3E,MAAM,WAAW,GAAG,MAAM,MAAM,CAAC,aAAa,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAC/D,OAAO,CAAC,GAAG,CAAC,eAAe,EAAE,WAAW,CAAC,CAAC;QAC5C,CAAC;IACH,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;IAC/B,CAAC;AACH,CAAC;AAED,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;IACnB,OAAO,CAAC,KAAK,CAAC,cAAc,EAAE,GAAG,CAAC,CAAC;AACrC,CAAC,CAAC,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import "dotenv/config";
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
// examples/basic_serp.ts
|
|
2
|
+
import "dotenv/config";
|
|
3
|
+
import { ThordataClient, Engine } from "../src/index.js";
|
|
4
|
+
async function main() {
|
|
5
|
+
const token = process.env.THORDATA_SCRAPER_TOKEN;
|
|
6
|
+
if (!token) {
|
|
7
|
+
console.error("Please set THORDATA_SCRAPER_TOKEN in .env");
|
|
8
|
+
process.exit(1);
|
|
9
|
+
}
|
|
10
|
+
const client = new ThordataClient({ scraperToken: token });
|
|
11
|
+
console.log("š Google Search: 'Thordata proxy network'");
|
|
12
|
+
const results = await client.serpSearch({
|
|
13
|
+
query: "Thordata proxy network",
|
|
14
|
+
engine: Engine.GOOGLE,
|
|
15
|
+
num: 5,
|
|
16
|
+
});
|
|
17
|
+
// č°čÆēØļ¼ēēå®čæåē»ę
|
|
18
|
+
console.dir(results, { depth: 4 });
|
|
19
|
+
// ä¼å
ę¾ 'organic'ļ¼å
¶ę¬”ęęÆ 'organic_results'
|
|
20
|
+
const organic = results?.organic ?? results?.organic_results ?? [];
|
|
21
|
+
console.log(`Found ${organic.length} organic results`);
|
|
22
|
+
for (const item of organic.slice(0, 3)) {
|
|
23
|
+
console.log("-", item.title, "->", item.link);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
main().catch((err) => {
|
|
27
|
+
console.error("Error:", err);
|
|
28
|
+
});
|
|
29
|
+
//# sourceMappingURL=basic_serp.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"basic_serp.js","sourceRoot":"","sources":["../../examples/basic_serp.ts"],"names":[],"mappings":"AAAA,yBAAyB;AAEzB,OAAO,eAAe,CAAC;AACvB,OAAO,EAAE,cAAc,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAEzD,KAAK,UAAU,IAAI;IACjB,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC;IACjD,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,OAAO,CAAC,KAAK,CAAC,2CAA2C,CAAC,CAAC;QAC3D,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,CAAC;IAE3D,OAAO,CAAC,GAAG,CAAC,4CAA4C,CAAC,CAAC;IAC1D,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,CAAC;QACtC,KAAK,EAAE,wBAAwB;QAC/B,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,GAAG,EAAE,CAAC;KACP,CAAC,CAAC;IAEH,cAAc;IACd,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC;IAEnC,uCAAuC;IACvC,MAAM,OAAO,GAAG,OAAO,EAAE,OAAO,IAAI,OAAO,EAAE,eAAe,IAAI,EAAE,CAAC;IACnE,OAAO,CAAC,GAAG,CAAC,SAAS,OAAO,CAAC,MAAM,kBAAkB,CAAC,CAAC;IACvD,KAAK,MAAM,IAAI,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC;QACvC,OAAO,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;IAChD,CAAC;AACH,CAAC;AAED,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;IACnB,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;AAC/B,CAAC,CAAC,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import "dotenv/config";
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
// examples/basic_universal.ts
|
|
2
|
+
import "dotenv/config";
|
|
3
|
+
import { ThordataClient } from "../src/index.js";
|
|
4
|
+
async function main() {
|
|
5
|
+
const token = process.env.THORDATA_SCRAPER_TOKEN;
|
|
6
|
+
if (!token) {
|
|
7
|
+
console.error("Please set THORDATA_SCRAPER_TOKEN in .env");
|
|
8
|
+
process.exit(1);
|
|
9
|
+
}
|
|
10
|
+
const client = new ThordataClient({ scraperToken: token });
|
|
11
|
+
console.log("š Universal Scrape: https://httpbin.org/html");
|
|
12
|
+
const html = await client.universalScrape({
|
|
13
|
+
url: "https://httpbin.org/html",
|
|
14
|
+
jsRender: false,
|
|
15
|
+
outputFormat: "html",
|
|
16
|
+
});
|
|
17
|
+
console.log("Preview:");
|
|
18
|
+
console.log(String(html).slice(0, 300));
|
|
19
|
+
}
|
|
20
|
+
main().catch((err) => {
|
|
21
|
+
console.error("Error:", err);
|
|
22
|
+
});
|
|
23
|
+
//# sourceMappingURL=basic_universal.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"basic_universal.js","sourceRoot":"","sources":["../../examples/basic_universal.ts"],"names":[],"mappings":"AAAA,8BAA8B;AAE9B,OAAO,eAAe,CAAC;AACvB,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAEjD,KAAK,UAAU,IAAI;IACjB,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC;IACjD,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,OAAO,CAAC,KAAK,CAAC,2CAA2C,CAAC,CAAC;QAC3D,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,CAAC;IAE3D,OAAO,CAAC,GAAG,CAAC,+CAA+C,CAAC,CAAC;IAC7D,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,eAAe,CAAC;QACxC,GAAG,EAAE,0BAA0B;QAC/B,QAAQ,EAAE,KAAK;QACf,YAAY,EAAE,MAAM;KACrB,CAAC,CAAC;IAEH,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxB,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC;AAC1C,CAAC;AAED,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;IACnB,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;AAC/B,CAAC,CAAC,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import "dotenv/config";
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
// examples/proxy_ip_check.ts
|
|
2
|
+
import "dotenv/config";
|
|
3
|
+
import { ThordataClient, ProxyConfig } from "../src/index.js";
|
|
4
|
+
async function main() {
|
|
5
|
+
const scraperToken = process.env.THORDATA_SCRAPER_TOKEN;
|
|
6
|
+
const proxyUser = process.env.THORDATA_PROXY_USERNAME;
|
|
7
|
+
const proxyPass = process.env.THORDATA_PROXY_PASSWORD;
|
|
8
|
+
const proxyHost = process.env.THORDATA_PROXY_HOST || "t.pr.thordata.net";
|
|
9
|
+
const proxyPort = Number(process.env.THORDATA_PROXY_PORT || "9999");
|
|
10
|
+
if (!scraperToken) {
|
|
11
|
+
console.error("Please set THORDATA_SCRAPER_TOKEN in .env");
|
|
12
|
+
process.exit(1);
|
|
13
|
+
}
|
|
14
|
+
if (!proxyUser || !proxyPass) {
|
|
15
|
+
console.error("Please set THORDATA_PROXY_USERNAME and THORDATA_PROXY_PASSWORD in .env");
|
|
16
|
+
process.exit(1);
|
|
17
|
+
}
|
|
18
|
+
const client = new ThordataClient({ scraperToken });
|
|
19
|
+
const proxy = new ProxyConfig({
|
|
20
|
+
baseUsername: proxyUser,
|
|
21
|
+
password: proxyPass,
|
|
22
|
+
host: proxyHost,
|
|
23
|
+
port: proxyPort,
|
|
24
|
+
country: "us",
|
|
25
|
+
});
|
|
26
|
+
// ä½æēØ HTTP éæå
HTTPS + čÆä¹¦ååäøå¹é
é®é¢
|
|
27
|
+
const targetUrl = "http://ipinfo.thordata.com";
|
|
28
|
+
// ęč
ēØ http://httpbin.org/ip åę¼ē¤ŗ
|
|
29
|
+
// const targetUrl = "http://httpbin.org/ip";
|
|
30
|
+
console.log(`š Requesting ${targetUrl} via Thordata proxy...`);
|
|
31
|
+
const data = await client.requestViaProxy(targetUrl, proxy);
|
|
32
|
+
console.log("Response JSON:", data);
|
|
33
|
+
}
|
|
34
|
+
main().catch((err) => {
|
|
35
|
+
console.error("Error:", err);
|
|
36
|
+
});
|
|
37
|
+
//# sourceMappingURL=proxy_ip_check.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"proxy_ip_check.js","sourceRoot":"","sources":["../../examples/proxy_ip_check.ts"],"names":[],"mappings":"AAAA,6BAA6B;AAE7B,OAAO,eAAe,CAAC;AACvB,OAAO,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAE9D,KAAK,UAAU,IAAI;IACjB,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC;IACxD,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC;IACtD,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC;IACtD,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,IAAI,mBAAmB,CAAC;IACzE,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,IAAI,MAAM,CAAC,CAAC;IAEpE,IAAI,CAAC,YAAY,EAAE,CAAC;QAClB,OAAO,CAAC,KAAK,CAAC,2CAA2C,CAAC,CAAC;QAC3D,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAED,IAAI,CAAC,SAAS,IAAI,CAAC,SAAS,EAAE,CAAC;QAC7B,OAAO,CAAC,KAAK,CAAC,wEAAwE,CAAC,CAAC;QACxF,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;IAEpD,MAAM,KAAK,GAAG,IAAI,WAAW,CAAC;QAC5B,YAAY,EAAE,SAAS;QACvB,QAAQ,EAAE,SAAS;QACnB,IAAI,EAAE,SAAS;QACf,IAAI,EAAE,SAAS;QACf,OAAO,EAAE,IAAI;KACd,CAAC,CAAC;IAEH,+BAA+B;IAC/B,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAC/C,gCAAgC;IAChC,6CAA6C;IAE7C,OAAO,CAAC,GAAG,CAAC,iBAAiB,SAAS,wBAAwB,CAAC,CAAC;IAEhE,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,eAAe,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;IAC5D,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,IAAI,CAAC,CAAC;AACtC,CAAC;AAED,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;IACnB,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;AAC/B,CAAC,CAAC,CAAC"}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SERP Google News Demo
|
|
3
|
+
*
|
|
4
|
+
* Usage:
|
|
5
|
+
* npx ts-node examples/serp_google_news.ts
|
|
6
|
+
*/
|
|
7
|
+
import "dotenv/config";
|
|
8
|
+
import { ThordataClient, Engine } from "../src/index.js";
|
|
9
|
+
async function main() {
|
|
10
|
+
const token = process.env.THORDATA_SCRAPER_TOKEN;
|
|
11
|
+
if (!token) {
|
|
12
|
+
console.error("ā Error: THORDATA_SCRAPER_TOKEN not found in .env");
|
|
13
|
+
process.exit(1);
|
|
14
|
+
}
|
|
15
|
+
const client = new ThordataClient({
|
|
16
|
+
scraperToken: token,
|
|
17
|
+
maxRetries: 3,
|
|
18
|
+
});
|
|
19
|
+
// 1. Basic News Search
|
|
20
|
+
console.log("\nš° 1. Basic Google News Search: 'AI regulation'");
|
|
21
|
+
try {
|
|
22
|
+
const results = await client.serpSearch({
|
|
23
|
+
query: "AI regulation",
|
|
24
|
+
engine: Engine.GOOGLE_NEWS,
|
|
25
|
+
country: "us",
|
|
26
|
+
language: "en",
|
|
27
|
+
num: 5,
|
|
28
|
+
});
|
|
29
|
+
printNewsResults(results);
|
|
30
|
+
}
|
|
31
|
+
catch (e) {
|
|
32
|
+
console.error("ā Search failed:", e.message);
|
|
33
|
+
}
|
|
34
|
+
// 2. Advanced News Filters
|
|
35
|
+
console.log("\nš° 2. Advanced Filters (Sort by Date)");
|
|
36
|
+
try {
|
|
37
|
+
const results = await client.serpSearch({
|
|
38
|
+
query: "Artificial Intelligence",
|
|
39
|
+
engine: Engine.GOOGLE_NEWS,
|
|
40
|
+
country: "us",
|
|
41
|
+
language: "en",
|
|
42
|
+
num: 5,
|
|
43
|
+
so: 1, // Sort by date
|
|
44
|
+
});
|
|
45
|
+
printNewsResults(results);
|
|
46
|
+
}
|
|
47
|
+
catch (e) {
|
|
48
|
+
console.error("ā Advanced search failed:", e.message);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
function printNewsResults(results) {
|
|
52
|
+
const news = results?.news ?? [];
|
|
53
|
+
console.log(`ā
Found ${news.length} news items:`);
|
|
54
|
+
news.slice(0, 5).forEach((item) => {
|
|
55
|
+
console.log(` ${item.rank}. [${item.source}] ${item.title}`);
|
|
56
|
+
console.log(` š
${item.date}`);
|
|
57
|
+
console.log(` š ${item.link}`);
|
|
58
|
+
});
|
|
59
|
+
console.log("");
|
|
60
|
+
}
|
|
61
|
+
main().catch((err) => {
|
|
62
|
+
console.error("Fatal error:", err);
|
|
63
|
+
});
|
|
64
|
+
//# sourceMappingURL=serp_google_news.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"serp_google_news.js","sourceRoot":"","sources":["../../examples/serp_google_news.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,eAAe,CAAC;AACvB,OAAO,EAAE,cAAc,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAEzD,KAAK,UAAU,IAAI;IACjB,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC;IACjD,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,OAAO,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;QACnE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC;QAChC,YAAY,EAAE,KAAK;QACnB,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;IAEH,uBAAuB;IACvB,OAAO,CAAC,GAAG,CAAC,mDAAmD,CAAC,CAAC;IACjE,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,CAAC;YACtC,KAAK,EAAE,eAAe;YACtB,MAAM,EAAE,MAAM,CAAC,WAAW;YAC1B,OAAO,EAAE,IAAI;YACb,QAAQ,EAAE,IAAI;YACd,GAAG,EAAE,CAAC;SACP,CAAC,CAAC;QACH,gBAAgB,CAAC,OAAO,CAAC,CAAC;IAC5B,CAAC;IAAC,OAAO,CAAM,EAAE,CAAC;QAChB,OAAO,CAAC,KAAK,CAAC,kBAAkB,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IAED,2BAA2B;IAC3B,OAAO,CAAC,GAAG,CAAC,yCAAyC,CAAC,CAAC;IACvD,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,CAAC;YACtC,KAAK,EAAE,yBAAyB;YAChC,MAAM,EAAE,MAAM,CAAC,WAAW;YAC1B,OAAO,EAAE,IAAI;YACb,QAAQ,EAAE,IAAI;YACd,GAAG,EAAE,CAAC;YACN,EAAE,EAAE,CAAC,EAAE,eAAe;SACvB,CAAC,CAAC;QACH,gBAAgB,CAAC,OAAO,CAAC,CAAC;IAC5B,CAAC;IAAC,OAAO,CAAM,EAAE,CAAC;QAChB,OAAO,CAAC,KAAK,CAAC,2BAA2B,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC;IACxD,CAAC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,OAAY;IACpC,MAAM,IAAI,GAAG,OAAO,EAAE,IAAI,IAAI,EAAE,CAAC;IAEjC,OAAO,CAAC,GAAG,CAAC,WAAW,IAAI,CAAC,MAAM,cAAc,CAAC,CAAC;IAElD,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,IAAS,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,IAAI,MAAM,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC;QAC/D,OAAO,CAAC,GAAG,CAAC,YAAY,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IACH,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AAClB,CAAC;AAED,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;IACnB,OAAO,CAAC,KAAK,CAAC,cAAc,EAAE,GAAG,CAAC,CAAC;AACrC,CAAC,CAAC,CAAC"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { SerpOptions, UniversalOptions, ScraperTaskOptions, WaitForTaskOptions, ProxyConfig } from "./models.js";
|
|
2
|
+
import { type ThordataBaseUrls } from "./endpoints.js";
|
|
3
|
+
export interface ThordataClientConfig {
|
|
4
|
+
scraperToken: string;
|
|
5
|
+
publicToken?: string;
|
|
6
|
+
publicKey?: string;
|
|
7
|
+
timeoutMs?: number;
|
|
8
|
+
maxRetries?: number;
|
|
9
|
+
baseUrls?: Partial<ThordataBaseUrls>;
|
|
10
|
+
userAgent?: string;
|
|
11
|
+
}
|
|
12
|
+
export declare class ThordataClient {
|
|
13
|
+
private scraperToken;
|
|
14
|
+
private publicToken?;
|
|
15
|
+
private publicKey?;
|
|
16
|
+
private timeoutMs;
|
|
17
|
+
private maxRetries;
|
|
18
|
+
private http;
|
|
19
|
+
private baseUrls;
|
|
20
|
+
private userAgent;
|
|
21
|
+
private serpUrl;
|
|
22
|
+
private universalUrl;
|
|
23
|
+
private scraperBuilderUrl;
|
|
24
|
+
private scraperStatusUrl;
|
|
25
|
+
private scraperDownloadUrl;
|
|
26
|
+
constructor(config: ThordataClientConfig);
|
|
27
|
+
/**
|
|
28
|
+
* Internal helper to execute request with retry logic
|
|
29
|
+
*/
|
|
30
|
+
private execute;
|
|
31
|
+
serpSearch(options: SerpOptions): Promise<any>;
|
|
32
|
+
universalScrape(options: UniversalOptions): Promise<string | Buffer>;
|
|
33
|
+
createScraperTask(options: ScraperTaskOptions): Promise<string>;
|
|
34
|
+
private requirePublicCreds;
|
|
35
|
+
getTaskStatus(taskId: string): Promise<string>;
|
|
36
|
+
getTaskResult(taskId: string, fileType?: "json" | "csv" | "xlsx"): Promise<string>;
|
|
37
|
+
waitForTask(taskId: string, options?: WaitForTaskOptions): Promise<string>;
|
|
38
|
+
requestViaProxy(url: string, proxyConfig: ProxyConfig, axiosConfig?: Record<string, any>): Promise<any>;
|
|
39
|
+
}
|