firecrawl 1.6.0 → 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -0
- package/dist/index.cjs +118 -0
- package/dist/index.d.cts +25 -0
- package/dist/index.d.ts +25 -0
- package/dist/index.js +118 -0
- package/dump.rdb +0 -0
- package/package.json +1 -1
- package/src/index.ts +138 -0
package/README.md
CHANGED
|
@@ -145,6 +145,46 @@ watch.addEventListener("done", state => {
|
|
|
145
145
|
});
|
|
146
146
|
```
|
|
147
147
|
|
|
148
|
+
### Batch scraping multiple URLs
|
|
149
|
+
|
|
150
|
+
To batch scrape multiple URLs with error handling, use the `batchScrapeUrls` method. It takes the starting URLs and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the output formats.
|
|
151
|
+
|
|
152
|
+
```js
|
|
153
|
+
const batchScrapeResponse = await app.batchScrapeUrls(['https://firecrawl.dev', 'https://mendable.ai'], {
|
|
154
|
+
formats: ['markdown', 'html'],
|
|
155
|
+
})
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
#### Asynchronous batch scrape
|
|
160
|
+
|
|
161
|
+
To initiate an asynchronous batch scrape, utilize the `asyncBulkScrapeUrls` method. This method requires the starting URLs and optional parameters as inputs. The params argument enables you to define various settings for the scrape, such as the output formats. Upon successful initiation, this method returns an ID, which is essential for subsequently checking the status of the batch scrape.
|
|
162
|
+
|
|
163
|
+
```js
|
|
164
|
+
const asyncBulkScrapeResult = await app.asyncBulkScrapeUrls(['https://firecrawl.dev', 'https://mendable.ai'], { formats: ['markdown', 'html'] });
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
#### Batch scrape with WebSockets
|
|
168
|
+
|
|
169
|
+
To use batch scrape with WebSockets, use the `batchScrapeUrlsAndWatch` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the batch scrape job, such as the output formats.
|
|
170
|
+
|
|
171
|
+
```js
|
|
172
|
+
// Batch scrape multiple URLs with WebSockets:
|
|
173
|
+
const watch = await app.batchScrapeUrlsAndWatch(['https://firecrawl.dev', 'https://mendable.ai'], { formats: ['markdown', 'html'] });
|
|
174
|
+
|
|
175
|
+
watch.addEventListener("document", doc => {
|
|
176
|
+
console.log("DOC", doc.detail);
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
watch.addEventListener("error", err => {
|
|
180
|
+
console.error("ERR", err.detail.error);
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
watch.addEventListener("done", state => {
|
|
184
|
+
console.log("DONE", state.detail.status);
|
|
185
|
+
});
|
|
186
|
+
```
|
|
187
|
+
|
|
148
188
|
## Error Handling
|
|
149
189
|
|
|
150
190
|
The SDK handles errors returned by the Firecrawl API and raises appropriate exceptions. If an error occurs during a request, an exception will be raised with a descriptive error message. The examples above demonstrate how to handle these errors using `try/catch` blocks.
|
package/dist/index.cjs
CHANGED
|
@@ -286,6 +286,124 @@ var FirecrawlApp = class {
|
|
|
286
286
|
}
|
|
287
287
|
return { success: false, error: "Internal server error." };
|
|
288
288
|
}
|
|
289
|
+
/**
|
|
290
|
+
* Initiates a batch scrape job for multiple URLs using the Firecrawl API.
|
|
291
|
+
* @param url - The URLs to scrape.
|
|
292
|
+
* @param params - Additional parameters for the scrape request.
|
|
293
|
+
* @param pollInterval - Time in seconds for job status checks.
|
|
294
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
295
|
+
* @returns The response from the crawl operation.
|
|
296
|
+
*/
|
|
297
|
+
async batchScrapeUrls(urls, params, pollInterval = 2, idempotencyKey) {
|
|
298
|
+
const headers = this.prepareHeaders(idempotencyKey);
|
|
299
|
+
let jsonData = { urls, ...params ?? {} };
|
|
300
|
+
try {
|
|
301
|
+
const response = await this.postRequest(
|
|
302
|
+
this.apiUrl + `/v1/batch/scrape`,
|
|
303
|
+
jsonData,
|
|
304
|
+
headers
|
|
305
|
+
);
|
|
306
|
+
if (response.status === 200) {
|
|
307
|
+
const id = response.data.id;
|
|
308
|
+
return this.monitorJobStatus(id, headers, pollInterval);
|
|
309
|
+
} else {
|
|
310
|
+
this.handleError(response, "start batch scrape job");
|
|
311
|
+
}
|
|
312
|
+
} catch (error) {
|
|
313
|
+
if (error.response?.data?.error) {
|
|
314
|
+
throw new FirecrawlError(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ""}`, error.response.status);
|
|
315
|
+
} else {
|
|
316
|
+
throw new FirecrawlError(error.message, 500);
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
return { success: false, error: "Internal server error." };
|
|
320
|
+
}
|
|
321
|
+
async asyncBatchScrapeUrls(urls, params, idempotencyKey) {
|
|
322
|
+
const headers = this.prepareHeaders(idempotencyKey);
|
|
323
|
+
let jsonData = { urls, ...params ?? {} };
|
|
324
|
+
try {
|
|
325
|
+
const response = await this.postRequest(
|
|
326
|
+
this.apiUrl + `/v1/batch/scrape`,
|
|
327
|
+
jsonData,
|
|
328
|
+
headers
|
|
329
|
+
);
|
|
330
|
+
if (response.status === 200) {
|
|
331
|
+
return response.data;
|
|
332
|
+
} else {
|
|
333
|
+
this.handleError(response, "start batch scrape job");
|
|
334
|
+
}
|
|
335
|
+
} catch (error) {
|
|
336
|
+
if (error.response?.data?.error) {
|
|
337
|
+
throw new FirecrawlError(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ""}`, error.response.status);
|
|
338
|
+
} else {
|
|
339
|
+
throw new FirecrawlError(error.message, 500);
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
return { success: false, error: "Internal server error." };
|
|
343
|
+
}
|
|
344
|
+
/**
|
|
345
|
+
* Initiates a batch scrape job and returns a CrawlWatcher to monitor the job via WebSocket.
|
|
346
|
+
* @param urls - The URL to scrape.
|
|
347
|
+
* @param params - Additional parameters for the scrape request.
|
|
348
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
349
|
+
* @returns A CrawlWatcher instance to monitor the crawl job.
|
|
350
|
+
*/
|
|
351
|
+
async batchScrapeUrlsAndWatch(urls, params, idempotencyKey) {
|
|
352
|
+
const crawl = await this.asyncBatchScrapeUrls(urls, params, idempotencyKey);
|
|
353
|
+
if (crawl.success && crawl.id) {
|
|
354
|
+
const id = crawl.id;
|
|
355
|
+
return new CrawlWatcher(id, this);
|
|
356
|
+
}
|
|
357
|
+
throw new FirecrawlError("Batch scrape job failed to start", 400);
|
|
358
|
+
}
|
|
359
|
+
/**
|
|
360
|
+
* Checks the status of a batch scrape job using the Firecrawl API.
|
|
361
|
+
* @param id - The ID of the batch scrape operation.
|
|
362
|
+
* @param getAllData - Paginate through all the pages of documents, returning the full list of all documents. (default: `false`)
|
|
363
|
+
* @returns The response containing the job status.
|
|
364
|
+
*/
|
|
365
|
+
async checkBatchScrapeStatus(id, getAllData = false) {
|
|
366
|
+
if (!id) {
|
|
367
|
+
throw new FirecrawlError("No batch scrape ID provided", 400);
|
|
368
|
+
}
|
|
369
|
+
const headers = this.prepareHeaders();
|
|
370
|
+
try {
|
|
371
|
+
const response = await this.getRequest(
|
|
372
|
+
`${this.apiUrl}/v1/batch/scrape/${id}`,
|
|
373
|
+
headers
|
|
374
|
+
);
|
|
375
|
+
if (response.status === 200) {
|
|
376
|
+
let allData = response.data.data;
|
|
377
|
+
if (getAllData && response.data.status === "completed") {
|
|
378
|
+
let statusData = response.data;
|
|
379
|
+
if ("data" in statusData) {
|
|
380
|
+
let data = statusData.data;
|
|
381
|
+
while ("next" in statusData) {
|
|
382
|
+
statusData = (await this.getRequest(statusData.next, headers)).data;
|
|
383
|
+
data = data.concat(statusData.data);
|
|
384
|
+
}
|
|
385
|
+
allData = data;
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
return {
|
|
389
|
+
success: response.data.success,
|
|
390
|
+
status: response.data.status,
|
|
391
|
+
total: response.data.total,
|
|
392
|
+
completed: response.data.completed,
|
|
393
|
+
creditsUsed: response.data.creditsUsed,
|
|
394
|
+
expiresAt: new Date(response.data.expiresAt),
|
|
395
|
+
next: response.data.next,
|
|
396
|
+
data: allData,
|
|
397
|
+
error: response.data.error
|
|
398
|
+
};
|
|
399
|
+
} else {
|
|
400
|
+
this.handleError(response, "check batch scrape status");
|
|
401
|
+
}
|
|
402
|
+
} catch (error) {
|
|
403
|
+
throw new FirecrawlError(error.message, 500);
|
|
404
|
+
}
|
|
405
|
+
return { success: false, error: "Internal server error." };
|
|
406
|
+
}
|
|
289
407
|
/**
|
|
290
408
|
* Prepares the headers for an API request.
|
|
291
409
|
* @param idempotencyKey - Optional key to ensure idempotency.
|
package/dist/index.d.cts
CHANGED
|
@@ -255,6 +255,31 @@ declare class FirecrawlApp {
|
|
|
255
255
|
* @returns The response from the map operation.
|
|
256
256
|
*/
|
|
257
257
|
mapUrl(url: string, params?: MapParams): Promise<MapResponse | ErrorResponse>;
|
|
258
|
+
/**
|
|
259
|
+
* Initiates a batch scrape job for multiple URLs using the Firecrawl API.
|
|
260
|
+
* @param url - The URLs to scrape.
|
|
261
|
+
* @param params - Additional parameters for the scrape request.
|
|
262
|
+
* @param pollInterval - Time in seconds for job status checks.
|
|
263
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
264
|
+
* @returns The response from the crawl operation.
|
|
265
|
+
*/
|
|
266
|
+
batchScrapeUrls(urls: string[], params?: ScrapeParams, pollInterval?: number, idempotencyKey?: string): Promise<CrawlStatusResponse | ErrorResponse>;
|
|
267
|
+
asyncBatchScrapeUrls(urls: string[], params?: ScrapeParams, idempotencyKey?: string): Promise<CrawlResponse | ErrorResponse>;
|
|
268
|
+
/**
|
|
269
|
+
* Initiates a batch scrape job and returns a CrawlWatcher to monitor the job via WebSocket.
|
|
270
|
+
* @param urls - The URL to scrape.
|
|
271
|
+
* @param params - Additional parameters for the scrape request.
|
|
272
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
273
|
+
* @returns A CrawlWatcher instance to monitor the crawl job.
|
|
274
|
+
*/
|
|
275
|
+
batchScrapeUrlsAndWatch(urls: string[], params?: ScrapeParams, idempotencyKey?: string): Promise<CrawlWatcher>;
|
|
276
|
+
/**
|
|
277
|
+
* Checks the status of a batch scrape job using the Firecrawl API.
|
|
278
|
+
* @param id - The ID of the batch scrape operation.
|
|
279
|
+
* @param getAllData - Paginate through all the pages of documents, returning the full list of all documents. (default: `false`)
|
|
280
|
+
* @returns The response containing the job status.
|
|
281
|
+
*/
|
|
282
|
+
checkBatchScrapeStatus(id?: string, getAllData?: boolean): Promise<CrawlStatusResponse | ErrorResponse>;
|
|
258
283
|
/**
|
|
259
284
|
* Prepares the headers for an API request.
|
|
260
285
|
* @param idempotencyKey - Optional key to ensure idempotency.
|
package/dist/index.d.ts
CHANGED
|
@@ -255,6 +255,31 @@ declare class FirecrawlApp {
|
|
|
255
255
|
* @returns The response from the map operation.
|
|
256
256
|
*/
|
|
257
257
|
mapUrl(url: string, params?: MapParams): Promise<MapResponse | ErrorResponse>;
|
|
258
|
+
/**
|
|
259
|
+
* Initiates a batch scrape job for multiple URLs using the Firecrawl API.
|
|
260
|
+
* @param url - The URLs to scrape.
|
|
261
|
+
* @param params - Additional parameters for the scrape request.
|
|
262
|
+
* @param pollInterval - Time in seconds for job status checks.
|
|
263
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
264
|
+
* @returns The response from the crawl operation.
|
|
265
|
+
*/
|
|
266
|
+
batchScrapeUrls(urls: string[], params?: ScrapeParams, pollInterval?: number, idempotencyKey?: string): Promise<CrawlStatusResponse | ErrorResponse>;
|
|
267
|
+
asyncBatchScrapeUrls(urls: string[], params?: ScrapeParams, idempotencyKey?: string): Promise<CrawlResponse | ErrorResponse>;
|
|
268
|
+
/**
|
|
269
|
+
* Initiates a batch scrape job and returns a CrawlWatcher to monitor the job via WebSocket.
|
|
270
|
+
* @param urls - The URL to scrape.
|
|
271
|
+
* @param params - Additional parameters for the scrape request.
|
|
272
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
273
|
+
* @returns A CrawlWatcher instance to monitor the crawl job.
|
|
274
|
+
*/
|
|
275
|
+
batchScrapeUrlsAndWatch(urls: string[], params?: ScrapeParams, idempotencyKey?: string): Promise<CrawlWatcher>;
|
|
276
|
+
/**
|
|
277
|
+
* Checks the status of a batch scrape job using the Firecrawl API.
|
|
278
|
+
* @param id - The ID of the batch scrape operation.
|
|
279
|
+
* @param getAllData - Paginate through all the pages of documents, returning the full list of all documents. (default: `false`)
|
|
280
|
+
* @returns The response containing the job status.
|
|
281
|
+
*/
|
|
282
|
+
checkBatchScrapeStatus(id?: string, getAllData?: boolean): Promise<CrawlStatusResponse | ErrorResponse>;
|
|
258
283
|
/**
|
|
259
284
|
* Prepares the headers for an API request.
|
|
260
285
|
* @param idempotencyKey - Optional key to ensure idempotency.
|
package/dist/index.js
CHANGED
|
@@ -250,6 +250,124 @@ var FirecrawlApp = class {
|
|
|
250
250
|
}
|
|
251
251
|
return { success: false, error: "Internal server error." };
|
|
252
252
|
}
|
|
253
|
+
/**
|
|
254
|
+
* Initiates a batch scrape job for multiple URLs using the Firecrawl API.
|
|
255
|
+
* @param url - The URLs to scrape.
|
|
256
|
+
* @param params - Additional parameters for the scrape request.
|
|
257
|
+
* @param pollInterval - Time in seconds for job status checks.
|
|
258
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
259
|
+
* @returns The response from the crawl operation.
|
|
260
|
+
*/
|
|
261
|
+
async batchScrapeUrls(urls, params, pollInterval = 2, idempotencyKey) {
|
|
262
|
+
const headers = this.prepareHeaders(idempotencyKey);
|
|
263
|
+
let jsonData = { urls, ...params ?? {} };
|
|
264
|
+
try {
|
|
265
|
+
const response = await this.postRequest(
|
|
266
|
+
this.apiUrl + `/v1/batch/scrape`,
|
|
267
|
+
jsonData,
|
|
268
|
+
headers
|
|
269
|
+
);
|
|
270
|
+
if (response.status === 200) {
|
|
271
|
+
const id = response.data.id;
|
|
272
|
+
return this.monitorJobStatus(id, headers, pollInterval);
|
|
273
|
+
} else {
|
|
274
|
+
this.handleError(response, "start batch scrape job");
|
|
275
|
+
}
|
|
276
|
+
} catch (error) {
|
|
277
|
+
if (error.response?.data?.error) {
|
|
278
|
+
throw new FirecrawlError(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ""}`, error.response.status);
|
|
279
|
+
} else {
|
|
280
|
+
throw new FirecrawlError(error.message, 500);
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
return { success: false, error: "Internal server error." };
|
|
284
|
+
}
|
|
285
|
+
async asyncBatchScrapeUrls(urls, params, idempotencyKey) {
|
|
286
|
+
const headers = this.prepareHeaders(idempotencyKey);
|
|
287
|
+
let jsonData = { urls, ...params ?? {} };
|
|
288
|
+
try {
|
|
289
|
+
const response = await this.postRequest(
|
|
290
|
+
this.apiUrl + `/v1/batch/scrape`,
|
|
291
|
+
jsonData,
|
|
292
|
+
headers
|
|
293
|
+
);
|
|
294
|
+
if (response.status === 200) {
|
|
295
|
+
return response.data;
|
|
296
|
+
} else {
|
|
297
|
+
this.handleError(response, "start batch scrape job");
|
|
298
|
+
}
|
|
299
|
+
} catch (error) {
|
|
300
|
+
if (error.response?.data?.error) {
|
|
301
|
+
throw new FirecrawlError(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ""}`, error.response.status);
|
|
302
|
+
} else {
|
|
303
|
+
throw new FirecrawlError(error.message, 500);
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
return { success: false, error: "Internal server error." };
|
|
307
|
+
}
|
|
308
|
+
/**
|
|
309
|
+
* Initiates a batch scrape job and returns a CrawlWatcher to monitor the job via WebSocket.
|
|
310
|
+
* @param urls - The URL to scrape.
|
|
311
|
+
* @param params - Additional parameters for the scrape request.
|
|
312
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
313
|
+
* @returns A CrawlWatcher instance to monitor the crawl job.
|
|
314
|
+
*/
|
|
315
|
+
async batchScrapeUrlsAndWatch(urls, params, idempotencyKey) {
|
|
316
|
+
const crawl = await this.asyncBatchScrapeUrls(urls, params, idempotencyKey);
|
|
317
|
+
if (crawl.success && crawl.id) {
|
|
318
|
+
const id = crawl.id;
|
|
319
|
+
return new CrawlWatcher(id, this);
|
|
320
|
+
}
|
|
321
|
+
throw new FirecrawlError("Batch scrape job failed to start", 400);
|
|
322
|
+
}
|
|
323
|
+
/**
|
|
324
|
+
* Checks the status of a batch scrape job using the Firecrawl API.
|
|
325
|
+
* @param id - The ID of the batch scrape operation.
|
|
326
|
+
* @param getAllData - Paginate through all the pages of documents, returning the full list of all documents. (default: `false`)
|
|
327
|
+
* @returns The response containing the job status.
|
|
328
|
+
*/
|
|
329
|
+
async checkBatchScrapeStatus(id, getAllData = false) {
|
|
330
|
+
if (!id) {
|
|
331
|
+
throw new FirecrawlError("No batch scrape ID provided", 400);
|
|
332
|
+
}
|
|
333
|
+
const headers = this.prepareHeaders();
|
|
334
|
+
try {
|
|
335
|
+
const response = await this.getRequest(
|
|
336
|
+
`${this.apiUrl}/v1/batch/scrape/${id}`,
|
|
337
|
+
headers
|
|
338
|
+
);
|
|
339
|
+
if (response.status === 200) {
|
|
340
|
+
let allData = response.data.data;
|
|
341
|
+
if (getAllData && response.data.status === "completed") {
|
|
342
|
+
let statusData = response.data;
|
|
343
|
+
if ("data" in statusData) {
|
|
344
|
+
let data = statusData.data;
|
|
345
|
+
while ("next" in statusData) {
|
|
346
|
+
statusData = (await this.getRequest(statusData.next, headers)).data;
|
|
347
|
+
data = data.concat(statusData.data);
|
|
348
|
+
}
|
|
349
|
+
allData = data;
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
return {
|
|
353
|
+
success: response.data.success,
|
|
354
|
+
status: response.data.status,
|
|
355
|
+
total: response.data.total,
|
|
356
|
+
completed: response.data.completed,
|
|
357
|
+
creditsUsed: response.data.creditsUsed,
|
|
358
|
+
expiresAt: new Date(response.data.expiresAt),
|
|
359
|
+
next: response.data.next,
|
|
360
|
+
data: allData,
|
|
361
|
+
error: response.data.error
|
|
362
|
+
};
|
|
363
|
+
} else {
|
|
364
|
+
this.handleError(response, "check batch scrape status");
|
|
365
|
+
}
|
|
366
|
+
} catch (error) {
|
|
367
|
+
throw new FirecrawlError(error.message, 500);
|
|
368
|
+
}
|
|
369
|
+
return { success: false, error: "Internal server error." };
|
|
370
|
+
}
|
|
253
371
|
/**
|
|
254
372
|
* Prepares the headers for an API request.
|
|
255
373
|
* @param idempotencyKey - Optional key to ensure idempotency.
|
package/dump.rdb
CHANGED
|
Binary file
|
package/package.json
CHANGED
package/src/index.ts
CHANGED
|
@@ -493,6 +493,144 @@ export default class FirecrawlApp {
|
|
|
493
493
|
return { success: false, error: "Internal server error." };
|
|
494
494
|
}
|
|
495
495
|
|
|
496
|
+
/**
|
|
497
|
+
* Initiates a batch scrape job for multiple URLs using the Firecrawl API.
|
|
498
|
+
* @param url - The URLs to scrape.
|
|
499
|
+
* @param params - Additional parameters for the scrape request.
|
|
500
|
+
* @param pollInterval - Time in seconds for job status checks.
|
|
501
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
502
|
+
* @returns The response from the crawl operation.
|
|
503
|
+
*/
|
|
504
|
+
async batchScrapeUrls(
|
|
505
|
+
urls: string[],
|
|
506
|
+
params?: ScrapeParams,
|
|
507
|
+
pollInterval: number = 2,
|
|
508
|
+
idempotencyKey?: string
|
|
509
|
+
): Promise<CrawlStatusResponse | ErrorResponse> {
|
|
510
|
+
const headers = this.prepareHeaders(idempotencyKey);
|
|
511
|
+
let jsonData: any = { urls, ...(params ?? {}) };
|
|
512
|
+
try {
|
|
513
|
+
const response: AxiosResponse = await this.postRequest(
|
|
514
|
+
this.apiUrl + `/v1/batch/scrape`,
|
|
515
|
+
jsonData,
|
|
516
|
+
headers
|
|
517
|
+
);
|
|
518
|
+
if (response.status === 200) {
|
|
519
|
+
const id: string = response.data.id;
|
|
520
|
+
return this.monitorJobStatus(id, headers, pollInterval);
|
|
521
|
+
} else {
|
|
522
|
+
this.handleError(response, "start batch scrape job");
|
|
523
|
+
}
|
|
524
|
+
} catch (error: any) {
|
|
525
|
+
if (error.response?.data?.error) {
|
|
526
|
+
throw new FirecrawlError(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ''}`, error.response.status);
|
|
527
|
+
} else {
|
|
528
|
+
throw new FirecrawlError(error.message, 500);
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
return { success: false, error: "Internal server error." };
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
async asyncBatchScrapeUrls(
|
|
535
|
+
urls: string[],
|
|
536
|
+
params?: ScrapeParams,
|
|
537
|
+
idempotencyKey?: string
|
|
538
|
+
): Promise<CrawlResponse | ErrorResponse> {
|
|
539
|
+
const headers = this.prepareHeaders(idempotencyKey);
|
|
540
|
+
let jsonData: any = { urls, ...(params ?? {}) };
|
|
541
|
+
try {
|
|
542
|
+
const response: AxiosResponse = await this.postRequest(
|
|
543
|
+
this.apiUrl + `/v1/batch/scrape`,
|
|
544
|
+
jsonData,
|
|
545
|
+
headers
|
|
546
|
+
);
|
|
547
|
+
if (response.status === 200) {
|
|
548
|
+
return response.data;
|
|
549
|
+
} else {
|
|
550
|
+
this.handleError(response, "start batch scrape job");
|
|
551
|
+
}
|
|
552
|
+
} catch (error: any) {
|
|
553
|
+
if (error.response?.data?.error) {
|
|
554
|
+
throw new FirecrawlError(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ''}`, error.response.status);
|
|
555
|
+
} else {
|
|
556
|
+
throw new FirecrawlError(error.message, 500);
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
return { success: false, error: "Internal server error." };
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
/**
|
|
563
|
+
* Initiates a batch scrape job and returns a CrawlWatcher to monitor the job via WebSocket.
|
|
564
|
+
* @param urls - The URL to scrape.
|
|
565
|
+
* @param params - Additional parameters for the scrape request.
|
|
566
|
+
* @param idempotencyKey - Optional idempotency key for the request.
|
|
567
|
+
* @returns A CrawlWatcher instance to monitor the crawl job.
|
|
568
|
+
*/
|
|
569
|
+
async batchScrapeUrlsAndWatch(
|
|
570
|
+
urls: string[],
|
|
571
|
+
params?: ScrapeParams,
|
|
572
|
+
idempotencyKey?: string,
|
|
573
|
+
) {
|
|
574
|
+
const crawl = await this.asyncBatchScrapeUrls(urls, params, idempotencyKey);
|
|
575
|
+
|
|
576
|
+
if (crawl.success && crawl.id) {
|
|
577
|
+
const id = crawl.id;
|
|
578
|
+
return new CrawlWatcher(id, this);
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
throw new FirecrawlError("Batch scrape job failed to start", 400);
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
/**
|
|
585
|
+
* Checks the status of a batch scrape job using the Firecrawl API.
|
|
586
|
+
* @param id - The ID of the batch scrape operation.
|
|
587
|
+
* @param getAllData - Paginate through all the pages of documents, returning the full list of all documents. (default: `false`)
|
|
588
|
+
* @returns The response containing the job status.
|
|
589
|
+
*/
|
|
590
|
+
async checkBatchScrapeStatus(id?: string, getAllData = false): Promise<CrawlStatusResponse | ErrorResponse> {
|
|
591
|
+
if (!id) {
|
|
592
|
+
throw new FirecrawlError("No batch scrape ID provided", 400);
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
const headers: AxiosRequestHeaders = this.prepareHeaders();
|
|
596
|
+
try {
|
|
597
|
+
const response: AxiosResponse = await this.getRequest(
|
|
598
|
+
`${this.apiUrl}/v1/batch/scrape/${id}`,
|
|
599
|
+
headers
|
|
600
|
+
);
|
|
601
|
+
if (response.status === 200) {
|
|
602
|
+
let allData = response.data.data;
|
|
603
|
+
if (getAllData && response.data.status === "completed") {
|
|
604
|
+
let statusData = response.data
|
|
605
|
+
if ("data" in statusData) {
|
|
606
|
+
let data = statusData.data;
|
|
607
|
+
while ('next' in statusData) {
|
|
608
|
+
statusData = (await this.getRequest(statusData.next, headers)).data;
|
|
609
|
+
data = data.concat(statusData.data);
|
|
610
|
+
}
|
|
611
|
+
allData = data;
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
return ({
|
|
615
|
+
success: response.data.success,
|
|
616
|
+
status: response.data.status,
|
|
617
|
+
total: response.data.total,
|
|
618
|
+
completed: response.data.completed,
|
|
619
|
+
creditsUsed: response.data.creditsUsed,
|
|
620
|
+
expiresAt: new Date(response.data.expiresAt),
|
|
621
|
+
next: response.data.next,
|
|
622
|
+
data: allData,
|
|
623
|
+
error: response.data.error,
|
|
624
|
+
})
|
|
625
|
+
} else {
|
|
626
|
+
this.handleError(response, "check batch scrape status");
|
|
627
|
+
}
|
|
628
|
+
} catch (error: any) {
|
|
629
|
+
throw new FirecrawlError(error.message, 500);
|
|
630
|
+
}
|
|
631
|
+
return { success: false, error: "Internal server error." };
|
|
632
|
+
}
|
|
633
|
+
|
|
496
634
|
/**
|
|
497
635
|
* Prepares the headers for an API request.
|
|
498
636
|
* @param idempotencyKey - Optional key to ensure idempotency.
|