firecrawl 1.8.1 → 1.8.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +3 -2
- package/dist/index.d.cts +6 -2
- package/dist/index.d.ts +6 -2
- package/dist/index.js +3 -2
- package/package.json +1 -1
- package/src/index.ts +8 -3
package/dist/index.cjs
CHANGED
|
@@ -292,11 +292,12 @@ var FirecrawlApp = class {
|
|
|
292
292
|
* @param params - Additional parameters for the scrape request.
|
|
293
293
|
* @param pollInterval - Time in seconds for job status checks.
|
|
294
294
|
* @param idempotencyKey - Optional idempotency key for the request.
|
|
295
|
+
* @param webhook - Optional webhook for the batch scrape.
|
|
295
296
|
* @returns The response from the crawl operation.
|
|
296
297
|
*/
|
|
297
|
-
async batchScrapeUrls(urls, params, pollInterval = 2, idempotencyKey) {
|
|
298
|
+
async batchScrapeUrls(urls, params, pollInterval = 2, idempotencyKey, webhook) {
|
|
298
299
|
const headers = this.prepareHeaders(idempotencyKey);
|
|
299
|
-
let jsonData = { urls, ...params ?? {} };
|
|
300
|
+
let jsonData = { urls, ...params ?? {}, webhook };
|
|
300
301
|
try {
|
|
301
302
|
const response = await this.postRequest(
|
|
302
303
|
this.apiUrl + `/v1/batch/scrape`,
|
package/dist/index.d.cts
CHANGED
|
@@ -143,7 +143,10 @@ interface CrawlParams {
|
|
|
143
143
|
allowExternalLinks?: boolean;
|
|
144
144
|
ignoreSitemap?: boolean;
|
|
145
145
|
scrapeOptions?: CrawlScrapeOptions;
|
|
146
|
-
webhook?: string
|
|
146
|
+
webhook?: string | {
|
|
147
|
+
url: string;
|
|
148
|
+
headers?: Record<string, string>;
|
|
149
|
+
};
|
|
147
150
|
deduplicateSimilarURLs?: boolean;
|
|
148
151
|
ignoreQueryParameters?: boolean;
|
|
149
152
|
}
|
|
@@ -300,9 +303,10 @@ declare class FirecrawlApp {
|
|
|
300
303
|
* @param params - Additional parameters for the scrape request.
|
|
301
304
|
* @param pollInterval - Time in seconds for job status checks.
|
|
302
305
|
* @param idempotencyKey - Optional idempotency key for the request.
|
|
306
|
+
* @param webhook - Optional webhook for the batch scrape.
|
|
303
307
|
* @returns The response from the crawl operation.
|
|
304
308
|
*/
|
|
305
|
-
batchScrapeUrls(urls: string[], params?: ScrapeParams, pollInterval?: number, idempotencyKey?: string): Promise<BatchScrapeStatusResponse | ErrorResponse>;
|
|
309
|
+
batchScrapeUrls(urls: string[], params?: ScrapeParams, pollInterval?: number, idempotencyKey?: string, webhook?: CrawlParams["webhook"]): Promise<BatchScrapeStatusResponse | ErrorResponse>;
|
|
306
310
|
asyncBatchScrapeUrls(urls: string[], params?: ScrapeParams, idempotencyKey?: string): Promise<BatchScrapeResponse | ErrorResponse>;
|
|
307
311
|
/**
|
|
308
312
|
* Initiates a batch scrape job and returns a CrawlWatcher to monitor the job via WebSocket.
|
package/dist/index.d.ts
CHANGED
|
@@ -143,7 +143,10 @@ interface CrawlParams {
|
|
|
143
143
|
allowExternalLinks?: boolean;
|
|
144
144
|
ignoreSitemap?: boolean;
|
|
145
145
|
scrapeOptions?: CrawlScrapeOptions;
|
|
146
|
-
webhook?: string
|
|
146
|
+
webhook?: string | {
|
|
147
|
+
url: string;
|
|
148
|
+
headers?: Record<string, string>;
|
|
149
|
+
};
|
|
147
150
|
deduplicateSimilarURLs?: boolean;
|
|
148
151
|
ignoreQueryParameters?: boolean;
|
|
149
152
|
}
|
|
@@ -300,9 +303,10 @@ declare class FirecrawlApp {
|
|
|
300
303
|
* @param params - Additional parameters for the scrape request.
|
|
301
304
|
* @param pollInterval - Time in seconds for job status checks.
|
|
302
305
|
* @param idempotencyKey - Optional idempotency key for the request.
|
|
306
|
+
* @param webhook - Optional webhook for the batch scrape.
|
|
303
307
|
* @returns The response from the crawl operation.
|
|
304
308
|
*/
|
|
305
|
-
batchScrapeUrls(urls: string[], params?: ScrapeParams, pollInterval?: number, idempotencyKey?: string): Promise<BatchScrapeStatusResponse | ErrorResponse>;
|
|
309
|
+
batchScrapeUrls(urls: string[], params?: ScrapeParams, pollInterval?: number, idempotencyKey?: string, webhook?: CrawlParams["webhook"]): Promise<BatchScrapeStatusResponse | ErrorResponse>;
|
|
306
310
|
asyncBatchScrapeUrls(urls: string[], params?: ScrapeParams, idempotencyKey?: string): Promise<BatchScrapeResponse | ErrorResponse>;
|
|
307
311
|
/**
|
|
308
312
|
* Initiates a batch scrape job and returns a CrawlWatcher to monitor the job via WebSocket.
|
package/dist/index.js
CHANGED
|
@@ -256,11 +256,12 @@ var FirecrawlApp = class {
|
|
|
256
256
|
* @param params - Additional parameters for the scrape request.
|
|
257
257
|
* @param pollInterval - Time in seconds for job status checks.
|
|
258
258
|
* @param idempotencyKey - Optional idempotency key for the request.
|
|
259
|
+
* @param webhook - Optional webhook for the batch scrape.
|
|
259
260
|
* @returns The response from the crawl operation.
|
|
260
261
|
*/
|
|
261
|
-
async batchScrapeUrls(urls, params, pollInterval = 2, idempotencyKey) {
|
|
262
|
+
async batchScrapeUrls(urls, params, pollInterval = 2, idempotencyKey, webhook) {
|
|
262
263
|
const headers = this.prepareHeaders(idempotencyKey);
|
|
263
|
-
let jsonData = { urls, ...params ?? {} };
|
|
264
|
+
let jsonData = { urls, ...params ?? {}, webhook };
|
|
264
265
|
try {
|
|
265
266
|
const response = await this.postRequest(
|
|
266
267
|
this.apiUrl + `/v1/batch/scrape`,
|
package/package.json
CHANGED
package/src/index.ts
CHANGED
|
@@ -153,7 +153,10 @@ export interface CrawlParams {
|
|
|
153
153
|
allowExternalLinks?: boolean;
|
|
154
154
|
ignoreSitemap?: boolean;
|
|
155
155
|
scrapeOptions?: CrawlScrapeOptions;
|
|
156
|
-
webhook?: string
|
|
156
|
+
webhook?: string | {
|
|
157
|
+
url: string;
|
|
158
|
+
headers?: Record<string, string>;
|
|
159
|
+
};
|
|
157
160
|
deduplicateSimilarURLs?: boolean;
|
|
158
161
|
ignoreQueryParameters?: boolean;
|
|
159
162
|
}
|
|
@@ -540,16 +543,18 @@ export default class FirecrawlApp {
|
|
|
540
543
|
* @param params - Additional parameters for the scrape request.
|
|
541
544
|
* @param pollInterval - Time in seconds for job status checks.
|
|
542
545
|
* @param idempotencyKey - Optional idempotency key for the request.
|
|
546
|
+
* @param webhook - Optional webhook for the batch scrape.
|
|
543
547
|
* @returns The response from the crawl operation.
|
|
544
548
|
*/
|
|
545
549
|
async batchScrapeUrls(
|
|
546
550
|
urls: string[],
|
|
547
551
|
params?: ScrapeParams,
|
|
548
552
|
pollInterval: number = 2,
|
|
549
|
-
idempotencyKey?: string
|
|
553
|
+
idempotencyKey?: string,
|
|
554
|
+
webhook?: CrawlParams["webhook"],
|
|
550
555
|
): Promise<BatchScrapeStatusResponse | ErrorResponse> {
|
|
551
556
|
const headers = this.prepareHeaders(idempotencyKey);
|
|
552
|
-
let jsonData: any = { urls, ...(params ?? {}) };
|
|
557
|
+
let jsonData: any = { urls, ...(params ?? {}), webhook };
|
|
553
558
|
try {
|
|
554
559
|
const response: AxiosResponse = await this.postRequest(
|
|
555
560
|
this.apiUrl + `/v1/batch/scrape`,
|