firecrawl 4.3.3 → 4.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,7 +8,7 @@ var require_package = __commonJS({
8
8
  "package.json"(exports, module) {
9
9
  module.exports = {
10
10
  name: "@mendable/firecrawl-js",
11
- version: "4.3.3",
11
+ version: "4.3.4",
12
12
  description: "JavaScript SDK for Firecrawl API",
13
13
  main: "dist/index.js",
14
14
  types: "dist/index.d.ts",
package/dist/index.cjs CHANGED
@@ -35,7 +35,7 @@ var require_package = __commonJS({
35
35
  "package.json"(exports2, module2) {
36
36
  module2.exports = {
37
37
  name: "@mendable/firecrawl-js",
38
- version: "4.3.3",
38
+ version: "4.3.4",
39
39
  description: "JavaScript SDK for Firecrawl API",
40
40
  main: "dist/index.js",
41
41
  types: "dist/index.d.ts",
@@ -697,6 +697,7 @@ function prepareExtractPayload(args) {
697
697
  if (args.showSources != null) body.showSources = args.showSources;
698
698
  if (args.ignoreInvalidURLs != null) body.ignoreInvalidURLs = args.ignoreInvalidURLs;
699
699
  if (args.integration && args.integration.trim()) body.integration = args.integration.trim();
700
+ if (args.agent) body.agent = args.agent;
700
701
  if (args.scrapeOptions) {
701
702
  ensureValidScrapeOptions(args.scrapeOptions);
702
703
  body.scrapeOptions = args.scrapeOptions;
package/dist/index.d.cts CHANGED
@@ -287,6 +287,9 @@ interface ExtractResponse$1 {
287
287
  sources?: Record<string, unknown>;
288
288
  expiresAt?: string;
289
289
  }
290
+ interface AgentOptions$1 {
291
+ model: "FIRE-1";
292
+ }
290
293
  interface ConcurrencyCheck {
291
294
  concurrency: number;
292
295
  maxConcurrency: number;
@@ -399,6 +402,7 @@ declare function prepareExtractPayload(args: {
399
402
  scrapeOptions?: ScrapeOptions;
400
403
  ignoreInvalidURLs?: boolean;
401
404
  integration?: string;
405
+ agent?: AgentOptions$1;
402
406
  }): Record<string, unknown>;
403
407
  declare function startExtract(http: HttpClient, args: Parameters<typeof prepareExtractPayload>[0]): Promise<ExtractResponse$1>;
404
408
 
@@ -1498,4 +1502,4 @@ declare class Firecrawl extends FirecrawlClient {
1498
1502
  get v1(): FirecrawlApp;
1499
1503
  }
1500
1504
 
1501
- export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
1505
+ export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
package/dist/index.d.ts CHANGED
@@ -287,6 +287,9 @@ interface ExtractResponse$1 {
287
287
  sources?: Record<string, unknown>;
288
288
  expiresAt?: string;
289
289
  }
290
+ interface AgentOptions$1 {
291
+ model: "FIRE-1";
292
+ }
290
293
  interface ConcurrencyCheck {
291
294
  concurrency: number;
292
295
  maxConcurrency: number;
@@ -399,6 +402,7 @@ declare function prepareExtractPayload(args: {
399
402
  scrapeOptions?: ScrapeOptions;
400
403
  ignoreInvalidURLs?: boolean;
401
404
  integration?: string;
405
+ agent?: AgentOptions$1;
402
406
  }): Record<string, unknown>;
403
407
  declare function startExtract(http: HttpClient, args: Parameters<typeof prepareExtractPayload>[0]): Promise<ExtractResponse$1>;
404
408
 
@@ -1498,4 +1502,4 @@ declare class Firecrawl extends FirecrawlClient {
1498
1502
  get v1(): FirecrawlApp;
1499
1503
  }
1500
1504
 
1501
- export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
1505
+ export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
package/dist/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  require_package
3
- } from "./chunk-7RR2L6KO.js";
3
+ } from "./chunk-TIJLLR5H.js";
4
4
 
5
5
  // src/v2/utils/httpClient.ts
6
6
  import axios from "axios";
@@ -581,6 +581,7 @@ function prepareExtractPayload(args) {
581
581
  if (args.showSources != null) body.showSources = args.showSources;
582
582
  if (args.ignoreInvalidURLs != null) body.ignoreInvalidURLs = args.ignoreInvalidURLs;
583
583
  if (args.integration && args.integration.trim()) body.integration = args.integration.trim();
584
+ if (args.agent) body.agent = args.agent;
584
585
  if (args.scrapeOptions) {
585
586
  ensureValidScrapeOptions(args.scrapeOptions);
586
587
  body.scrapeOptions = args.scrapeOptions;
@@ -1064,7 +1065,7 @@ var FirecrawlApp = class {
1064
1065
  if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
1065
1066
  return process.env.npm_package_version;
1066
1067
  }
1067
- const packageJson = await import("./package-FLU7SABK.js");
1068
+ const packageJson = await import("./package-3EYW3PGP.js");
1068
1069
  return packageJson.default.version;
1069
1070
  } catch (error) {
1070
1071
  const isTest = typeof process !== "undefined" && (process.env.JEST_WORKER_ID != null || false);
@@ -1,4 +1,4 @@
1
1
  import {
2
2
  require_package
3
- } from "./chunk-7RR2L6KO.js";
3
+ } from "./chunk-TIJLLR5H.js";
4
4
  export default require_package();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "firecrawl",
3
- "version": "4.3.3",
3
+ "version": "4.3.4",
4
4
  "description": "JavaScript SDK for Firecrawl API",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -1,4 +1,4 @@
1
- import { type ExtractResponse, type ScrapeOptions } from "../types";
1
+ import { type ExtractResponse, type ScrapeOptions, type AgentOptions } from "../types";
2
2
  import { HttpClient } from "../utils/httpClient";
3
3
  import { ensureValidScrapeOptions } from "../utils/validation";
4
4
  import { normalizeAxiosError, throwForBadResponse } from "../utils/errorHandler";
@@ -16,6 +16,7 @@ function prepareExtractPayload(args: {
16
16
  scrapeOptions?: ScrapeOptions;
17
17
  ignoreInvalidURLs?: boolean;
18
18
  integration?: string;
19
+ agent?: AgentOptions;
19
20
  }): Record<string, unknown> {
20
21
  const body: Record<string, unknown> = {};
21
22
  if (args.urls) body.urls = args.urls;
@@ -31,6 +32,7 @@ function prepareExtractPayload(args: {
31
32
  if (args.showSources != null) body.showSources = args.showSources;
32
33
  if (args.ignoreInvalidURLs != null) body.ignoreInvalidURLs = args.ignoreInvalidURLs;
33
34
  if (args.integration && args.integration.trim()) body.integration = args.integration.trim();
35
+ if (args.agent) body.agent = args.agent;
34
36
  if (args.scrapeOptions) {
35
37
  ensureValidScrapeOptions(args.scrapeOptions);
36
38
  body.scrapeOptions = args.scrapeOptions;
package/src/v2/types.ts CHANGED
@@ -337,6 +337,10 @@ export interface ExtractResponse {
337
337
  expiresAt?: string;
338
338
  }
339
339
 
340
+ export interface AgentOptions {
341
+ model: "FIRE-1";
342
+ }
343
+
340
344
  export interface ConcurrencyCheck {
341
345
  concurrency: number;
342
346
  maxConcurrency: number;