firecrawl 4.8.2 → 4.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,7 +8,7 @@ var require_package = __commonJS({
8
8
  "package.json"(exports, module) {
9
9
  module.exports = {
10
10
  name: "@mendable/firecrawl-js",
11
- version: "4.8.2",
11
+ version: "4.9.0",
12
12
  description: "JavaScript SDK for Firecrawl API",
13
13
  main: "dist/index.js",
14
14
  types: "dist/index.d.ts",
package/dist/index.cjs CHANGED
@@ -35,7 +35,7 @@ var require_package = __commonJS({
35
35
  "package.json"(exports2, module2) {
36
36
  module2.exports = {
37
37
  name: "@mendable/firecrawl-js",
38
- version: "4.8.2",
38
+ version: "4.9.0",
39
39
  description: "JavaScript SDK for Firecrawl API",
40
40
  main: "dist/index.js",
41
41
  types: "dist/index.d.ts",
@@ -150,7 +150,7 @@ var HttpClient = class {
150
150
  this.backoffFactor = options.backoffFactor ?? 0.5;
151
151
  this.instance = import_axios.default.create({
152
152
  baseURL: this.apiUrl,
153
- timeout: options.timeoutMs ?? 6e4,
153
+ timeout: options.timeoutMs ?? 3e5,
154
154
  headers: {
155
155
  "Content-Type": "application/json",
156
156
  Authorization: `Bearer ${this.apiKey}`
@@ -751,6 +751,57 @@ async function extract(http, args) {
751
751
  return waitExtract(http, jobId, args.pollInterval ?? 2, args.timeout);
752
752
  }
753
753
 
754
+ // src/v2/methods/agent.ts
755
+ var import_zod_to_json_schema3 = require("zod-to-json-schema");
756
+ function prepareAgentPayload(args) {
757
+ const body = {};
758
+ if (args.urls) body.urls = args.urls;
759
+ body.prompt = args.prompt;
760
+ if (args.schema != null) {
761
+ const s = args.schema;
762
+ const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
763
+ body.schema = isZod ? (0, import_zod_to_json_schema3.zodToJsonSchema)(s) : args.schema;
764
+ }
765
+ if (args.integration && args.integration.trim()) body.integration = args.integration.trim();
766
+ return body;
767
+ }
768
+ async function startAgent(http, args) {
769
+ const payload = prepareAgentPayload(args);
770
+ try {
771
+ const res = await http.post("/v2/agent", payload);
772
+ if (res.status !== 200) throwForBadResponse(res, "agent");
773
+ return res.data;
774
+ } catch (err) {
775
+ if (err?.isAxiosError) return normalizeAxiosError(err, "agent");
776
+ throw err;
777
+ }
778
+ }
779
+ async function getAgentStatus(http, jobId) {
780
+ try {
781
+ const res = await http.get(`/v2/agent/${jobId}`);
782
+ if (res.status !== 200) throwForBadResponse(res, "agent status");
783
+ return res.data;
784
+ } catch (err) {
785
+ if (err?.isAxiosError) return normalizeAxiosError(err, "agent status");
786
+ throw err;
787
+ }
788
+ }
789
+ async function waitAgent(http, jobId, pollInterval = 2, timeout) {
790
+ const start = Date.now();
791
+ while (true) {
792
+ const status = await getAgentStatus(http, jobId);
793
+ if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
794
+ if (timeout != null && Date.now() - start > timeout * 1e3) return status;
795
+ await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
796
+ }
797
+ }
798
+ async function agent(http, args) {
799
+ const started = await startAgent(http, args);
800
+ const jobId = started.id;
801
+ if (!jobId) return started;
802
+ return waitAgent(http, jobId, args.pollInterval ?? 2, args.timeout);
803
+ }
804
+
754
805
  // src/v2/methods/usage.ts
755
806
  async function getConcurrency(http) {
756
807
  try {
@@ -1212,6 +1263,30 @@ var FirecrawlClient = class {
1212
1263
  async extract(args) {
1213
1264
  return extract(this.http, args);
1214
1265
  }
1266
+ // Agent
1267
+ /**
1268
+ * Start an agent job (async).
1269
+ * @param args Agent request (urls, prompt, schema).
1270
+ * @returns Job id or processing state.
1271
+ */
1272
+ async startAgent(args) {
1273
+ return startAgent(this.http, args);
1274
+ }
1275
+ /**
1276
+ * Get agent job status/data.
1277
+ * @param jobId Agent job id.
1278
+ */
1279
+ async getAgentStatus(jobId) {
1280
+ return getAgentStatus(this.http, jobId);
1281
+ }
1282
+ /**
1283
+ * Convenience waiter: start an agent and poll until it finishes.
1284
+ * @param args Agent request plus waiter controls (pollInterval, timeout seconds).
1285
+ * @returns Final agent response.
1286
+ */
1287
+ async agent(args) {
1288
+ return agent(this.http, args);
1289
+ }
1215
1290
  // Usage
1216
1291
  /** Current concurrency usage. */
1217
1292
  async getConcurrency() {
@@ -1251,7 +1326,7 @@ var FirecrawlClient = class {
1251
1326
  // src/v1/index.ts
1252
1327
  var import_axios3 = __toESM(require("axios"), 1);
1253
1328
  var zt2 = require("zod");
1254
- var import_zod_to_json_schema3 = require("zod-to-json-schema");
1329
+ var import_zod_to_json_schema4 = require("zod-to-json-schema");
1255
1330
 
1256
1331
  // node_modules/typescript-event-target/dist/index.mjs
1257
1332
  var e = class extends EventTarget {
@@ -1323,7 +1398,7 @@ var FirecrawlApp = class {
1323
1398
  if (jsonData?.extract?.schema) {
1324
1399
  let schema = jsonData.extract.schema;
1325
1400
  try {
1326
- schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
1401
+ schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
1327
1402
  } catch (error) {
1328
1403
  }
1329
1404
  jsonData = {
@@ -1337,7 +1412,7 @@ var FirecrawlApp = class {
1337
1412
  if (jsonData?.jsonOptions?.schema) {
1338
1413
  let schema = jsonData.jsonOptions.schema;
1339
1414
  try {
1340
- schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
1415
+ schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
1341
1416
  } catch (error) {
1342
1417
  }
1343
1418
  jsonData = {
@@ -1400,7 +1475,7 @@ var FirecrawlApp = class {
1400
1475
  if (jsonData?.scrapeOptions?.extract?.schema) {
1401
1476
  let schema = jsonData.scrapeOptions.extract.schema;
1402
1477
  try {
1403
- schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
1478
+ schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
1404
1479
  } catch (error) {
1405
1480
  }
1406
1481
  jsonData = {
@@ -1669,7 +1744,7 @@ var FirecrawlApp = class {
1669
1744
  if (jsonData?.extract?.schema) {
1670
1745
  let schema = jsonData.extract.schema;
1671
1746
  try {
1672
- schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
1747
+ schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
1673
1748
  } catch (error) {
1674
1749
  }
1675
1750
  jsonData = {
@@ -1683,7 +1758,7 @@ var FirecrawlApp = class {
1683
1758
  if (jsonData?.jsonOptions?.schema) {
1684
1759
  let schema = jsonData.jsonOptions.schema;
1685
1760
  try {
1686
- schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
1761
+ schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
1687
1762
  } catch (error) {
1688
1763
  }
1689
1764
  jsonData = {
@@ -1862,7 +1937,7 @@ var FirecrawlApp = class {
1862
1937
  jsonSchema = void 0;
1863
1938
  } else {
1864
1939
  try {
1865
- jsonSchema = (0, import_zod_to_json_schema3.zodToJsonSchema)(params.schema);
1940
+ jsonSchema = (0, import_zod_to_json_schema4.zodToJsonSchema)(params.schema);
1866
1941
  } catch (_) {
1867
1942
  jsonSchema = params.schema;
1868
1943
  }
@@ -1926,7 +2001,7 @@ var FirecrawlApp = class {
1926
2001
  jsonSchema = void 0;
1927
2002
  } else {
1928
2003
  try {
1929
- jsonSchema = (0, import_zod_to_json_schema3.zodToJsonSchema)(params.schema);
2004
+ jsonSchema = (0, import_zod_to_json_schema4.zodToJsonSchema)(params.schema);
1930
2005
  } catch (_) {
1931
2006
  jsonSchema = params.schema;
1932
2007
  }
@@ -2217,7 +2292,7 @@ var FirecrawlApp = class {
2217
2292
  if (jsonData?.jsonOptions?.schema) {
2218
2293
  let schema = jsonData.jsonOptions.schema;
2219
2294
  try {
2220
- schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
2295
+ schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
2221
2296
  } catch (error) {
2222
2297
  }
2223
2298
  jsonData = {
package/dist/index.d.cts CHANGED
@@ -283,10 +283,13 @@ interface DocumentMetadata {
283
283
  scrapeId?: string;
284
284
  numPages?: number;
285
285
  contentType?: string;
286
+ timezone?: string;
286
287
  proxyUsed?: 'basic' | 'stealth';
287
288
  cacheState?: 'hit' | 'miss';
288
289
  cachedAt?: string;
289
290
  creditsUsed?: number;
291
+ concurrencyLimited?: boolean;
292
+ concurrencyQueueDurationMs?: number;
290
293
  error?: string;
291
294
  [key: string]: unknown;
292
295
  }
@@ -446,8 +449,21 @@ interface ExtractResponse$1 {
446
449
  expiresAt?: string;
447
450
  creditsUsed?: number;
448
451
  }
452
+ interface AgentResponse {
453
+ success: boolean;
454
+ id: string;
455
+ error?: string;
456
+ }
457
+ interface AgentStatusResponse {
458
+ success: boolean;
459
+ status: 'processing' | 'completed' | 'failed';
460
+ error?: string;
461
+ data?: unknown;
462
+ expiresAt: string;
463
+ creditsUsed?: number;
464
+ }
449
465
  interface AgentOptions$1 {
450
- model: 'FIRE-1';
466
+ model: 'FIRE-1' | 'v3-beta';
451
467
  }
452
468
  interface ConcurrencyCheck {
453
469
  concurrency: number;
@@ -565,6 +581,14 @@ declare function prepareExtractPayload(args: {
565
581
  }): Record<string, unknown>;
566
582
  declare function startExtract(http: HttpClient, args: Parameters<typeof prepareExtractPayload>[0]): Promise<ExtractResponse$1>;
567
583
 
584
+ declare function prepareAgentPayload(args: {
585
+ urls?: string[];
586
+ prompt: string;
587
+ schema?: Record<string, unknown> | ZodTypeAny;
588
+ integration?: string;
589
+ }): Record<string, unknown>;
590
+ declare function startAgent(http: HttpClient, args: Parameters<typeof prepareAgentPayload>[0]): Promise<AgentResponse>;
591
+
568
592
  type JobKind = "crawl" | "batch";
569
593
  interface WatcherOptions {
570
594
  kind?: JobKind;
@@ -744,6 +768,26 @@ declare class FirecrawlClient {
744
768
  pollInterval?: number;
745
769
  timeout?: number;
746
770
  }): Promise<ExtractResponse$1>;
771
+ /**
772
+ * Start an agent job (async).
773
+ * @param args Agent request (urls, prompt, schema).
774
+ * @returns Job id or processing state.
775
+ */
776
+ startAgent(args: Parameters<typeof startAgent>[1]): Promise<AgentResponse>;
777
+ /**
778
+ * Get agent job status/data.
779
+ * @param jobId Agent job id.
780
+ */
781
+ getAgentStatus(jobId: string): Promise<AgentStatusResponse>;
782
+ /**
783
+ * Convenience waiter: start an agent and poll until it finishes.
784
+ * @param args Agent request plus waiter controls (pollInterval, timeout seconds).
785
+ * @returns Final agent response.
786
+ */
787
+ agent(args: Parameters<typeof startAgent>[1] & {
788
+ pollInterval?: number;
789
+ timeout?: number;
790
+ }): Promise<AgentStatusResponse>;
747
791
  /** Current concurrency usage. */
748
792
  getConcurrency(): Promise<ConcurrencyCheck>;
749
793
  /** Current credit usage. */
@@ -809,10 +853,14 @@ interface FirecrawlDocumentMetadata {
809
853
  articleSection?: string;
810
854
  sourceURL?: string;
811
855
  statusCode?: number;
856
+ timezone?: string;
812
857
  error?: string;
813
858
  proxyUsed?: "basic" | "stealth";
814
859
  cacheState?: "miss" | "hit";
815
860
  cachedAt?: string;
861
+ creditsUsed?: number;
862
+ concurrencyLimited?: boolean;
863
+ concurrencyQueueDurationMs?: number;
816
864
  [key: string]: any;
817
865
  }
818
866
  /**
@@ -1665,4 +1713,4 @@ declare class Firecrawl extends FirecrawlClient {
1665
1713
  get v1(): FirecrawlApp;
1666
1714
  }
1667
1715
 
1668
- export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type BrandingProfile, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
1716
+ export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AgentResponse, type AgentStatusResponse, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type BrandingProfile, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
package/dist/index.d.ts CHANGED
@@ -283,10 +283,13 @@ interface DocumentMetadata {
283
283
  scrapeId?: string;
284
284
  numPages?: number;
285
285
  contentType?: string;
286
+ timezone?: string;
286
287
  proxyUsed?: 'basic' | 'stealth';
287
288
  cacheState?: 'hit' | 'miss';
288
289
  cachedAt?: string;
289
290
  creditsUsed?: number;
291
+ concurrencyLimited?: boolean;
292
+ concurrencyQueueDurationMs?: number;
290
293
  error?: string;
291
294
  [key: string]: unknown;
292
295
  }
@@ -446,8 +449,21 @@ interface ExtractResponse$1 {
446
449
  expiresAt?: string;
447
450
  creditsUsed?: number;
448
451
  }
452
+ interface AgentResponse {
453
+ success: boolean;
454
+ id: string;
455
+ error?: string;
456
+ }
457
+ interface AgentStatusResponse {
458
+ success: boolean;
459
+ status: 'processing' | 'completed' | 'failed';
460
+ error?: string;
461
+ data?: unknown;
462
+ expiresAt: string;
463
+ creditsUsed?: number;
464
+ }
449
465
  interface AgentOptions$1 {
450
- model: 'FIRE-1';
466
+ model: 'FIRE-1' | 'v3-beta';
451
467
  }
452
468
  interface ConcurrencyCheck {
453
469
  concurrency: number;
@@ -565,6 +581,14 @@ declare function prepareExtractPayload(args: {
565
581
  }): Record<string, unknown>;
566
582
  declare function startExtract(http: HttpClient, args: Parameters<typeof prepareExtractPayload>[0]): Promise<ExtractResponse$1>;
567
583
 
584
+ declare function prepareAgentPayload(args: {
585
+ urls?: string[];
586
+ prompt: string;
587
+ schema?: Record<string, unknown> | ZodTypeAny;
588
+ integration?: string;
589
+ }): Record<string, unknown>;
590
+ declare function startAgent(http: HttpClient, args: Parameters<typeof prepareAgentPayload>[0]): Promise<AgentResponse>;
591
+
568
592
  type JobKind = "crawl" | "batch";
569
593
  interface WatcherOptions {
570
594
  kind?: JobKind;
@@ -744,6 +768,26 @@ declare class FirecrawlClient {
744
768
  pollInterval?: number;
745
769
  timeout?: number;
746
770
  }): Promise<ExtractResponse$1>;
771
+ /**
772
+ * Start an agent job (async).
773
+ * @param args Agent request (urls, prompt, schema).
774
+ * @returns Job id or processing state.
775
+ */
776
+ startAgent(args: Parameters<typeof startAgent>[1]): Promise<AgentResponse>;
777
+ /**
778
+ * Get agent job status/data.
779
+ * @param jobId Agent job id.
780
+ */
781
+ getAgentStatus(jobId: string): Promise<AgentStatusResponse>;
782
+ /**
783
+ * Convenience waiter: start an agent and poll until it finishes.
784
+ * @param args Agent request plus waiter controls (pollInterval, timeout seconds).
785
+ * @returns Final agent response.
786
+ */
787
+ agent(args: Parameters<typeof startAgent>[1] & {
788
+ pollInterval?: number;
789
+ timeout?: number;
790
+ }): Promise<AgentStatusResponse>;
747
791
  /** Current concurrency usage. */
748
792
  getConcurrency(): Promise<ConcurrencyCheck>;
749
793
  /** Current credit usage. */
@@ -809,10 +853,14 @@ interface FirecrawlDocumentMetadata {
809
853
  articleSection?: string;
810
854
  sourceURL?: string;
811
855
  statusCode?: number;
856
+ timezone?: string;
812
857
  error?: string;
813
858
  proxyUsed?: "basic" | "stealth";
814
859
  cacheState?: "miss" | "hit";
815
860
  cachedAt?: string;
861
+ creditsUsed?: number;
862
+ concurrencyLimited?: boolean;
863
+ concurrencyQueueDurationMs?: number;
816
864
  [key: string]: any;
817
865
  }
818
866
  /**
@@ -1665,4 +1713,4 @@ declare class Firecrawl extends FirecrawlClient {
1665
1713
  get v1(): FirecrawlApp;
1666
1714
  }
1667
1715
 
1668
- export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type BrandingProfile, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
1716
+ export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AgentResponse, type AgentStatusResponse, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type BrandingProfile, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
package/dist/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  require_package
3
- } from "./chunk-3IN25WJ7.js";
3
+ } from "./chunk-KS6E546Q.js";
4
4
 
5
5
  // src/v2/utils/httpClient.ts
6
6
  import axios from "axios";
@@ -32,7 +32,7 @@ var HttpClient = class {
32
32
  this.backoffFactor = options.backoffFactor ?? 0.5;
33
33
  this.instance = axios.create({
34
34
  baseURL: this.apiUrl,
35
- timeout: options.timeoutMs ?? 6e4,
35
+ timeout: options.timeoutMs ?? 3e5,
36
36
  headers: {
37
37
  "Content-Type": "application/json",
38
38
  Authorization: `Bearer ${this.apiKey}`
@@ -633,6 +633,57 @@ async function extract(http, args) {
633
633
  return waitExtract(http, jobId, args.pollInterval ?? 2, args.timeout);
634
634
  }
635
635
 
636
+ // src/v2/methods/agent.ts
637
+ import { zodToJsonSchema as zodToJsonSchema3 } from "zod-to-json-schema";
638
+ function prepareAgentPayload(args) {
639
+ const body = {};
640
+ if (args.urls) body.urls = args.urls;
641
+ body.prompt = args.prompt;
642
+ if (args.schema != null) {
643
+ const s = args.schema;
644
+ const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
645
+ body.schema = isZod ? zodToJsonSchema3(s) : args.schema;
646
+ }
647
+ if (args.integration && args.integration.trim()) body.integration = args.integration.trim();
648
+ return body;
649
+ }
650
+ async function startAgent(http, args) {
651
+ const payload = prepareAgentPayload(args);
652
+ try {
653
+ const res = await http.post("/v2/agent", payload);
654
+ if (res.status !== 200) throwForBadResponse(res, "agent");
655
+ return res.data;
656
+ } catch (err) {
657
+ if (err?.isAxiosError) return normalizeAxiosError(err, "agent");
658
+ throw err;
659
+ }
660
+ }
661
+ async function getAgentStatus(http, jobId) {
662
+ try {
663
+ const res = await http.get(`/v2/agent/${jobId}`);
664
+ if (res.status !== 200) throwForBadResponse(res, "agent status");
665
+ return res.data;
666
+ } catch (err) {
667
+ if (err?.isAxiosError) return normalizeAxiosError(err, "agent status");
668
+ throw err;
669
+ }
670
+ }
671
+ async function waitAgent(http, jobId, pollInterval = 2, timeout) {
672
+ const start = Date.now();
673
+ while (true) {
674
+ const status = await getAgentStatus(http, jobId);
675
+ if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
676
+ if (timeout != null && Date.now() - start > timeout * 1e3) return status;
677
+ await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
678
+ }
679
+ }
680
+ async function agent(http, args) {
681
+ const started = await startAgent(http, args);
682
+ const jobId = started.id;
683
+ if (!jobId) return started;
684
+ return waitAgent(http, jobId, args.pollInterval ?? 2, args.timeout);
685
+ }
686
+
636
687
  // src/v2/methods/usage.ts
637
688
  async function getConcurrency(http) {
638
689
  try {
@@ -1094,6 +1145,30 @@ var FirecrawlClient = class {
1094
1145
  async extract(args) {
1095
1146
  return extract(this.http, args);
1096
1147
  }
1148
+ // Agent
1149
+ /**
1150
+ * Start an agent job (async).
1151
+ * @param args Agent request (urls, prompt, schema).
1152
+ * @returns Job id or processing state.
1153
+ */
1154
+ async startAgent(args) {
1155
+ return startAgent(this.http, args);
1156
+ }
1157
+ /**
1158
+ * Get agent job status/data.
1159
+ * @param jobId Agent job id.
1160
+ */
1161
+ async getAgentStatus(jobId) {
1162
+ return getAgentStatus(this.http, jobId);
1163
+ }
1164
+ /**
1165
+ * Convenience waiter: start an agent and poll until it finishes.
1166
+ * @param args Agent request plus waiter controls (pollInterval, timeout seconds).
1167
+ * @returns Final agent response.
1168
+ */
1169
+ async agent(args) {
1170
+ return agent(this.http, args);
1171
+ }
1097
1172
  // Usage
1098
1173
  /** Current concurrency usage. */
1099
1174
  async getConcurrency() {
@@ -1133,7 +1208,7 @@ var FirecrawlClient = class {
1133
1208
  // src/v1/index.ts
1134
1209
  import axios2, { AxiosError } from "axios";
1135
1210
  import "zod";
1136
- import { zodToJsonSchema as zodToJsonSchema3 } from "zod-to-json-schema";
1211
+ import { zodToJsonSchema as zodToJsonSchema4 } from "zod-to-json-schema";
1137
1212
 
1138
1213
  // node_modules/typescript-event-target/dist/index.mjs
1139
1214
  var e = class extends EventTarget {
@@ -1164,7 +1239,7 @@ var FirecrawlApp = class {
1164
1239
  if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
1165
1240
  return process.env.npm_package_version;
1166
1241
  }
1167
- const packageJson = await import("./package-UXOZB356.js");
1242
+ const packageJson = await import("./package-F4K5ZEVR.js");
1168
1243
  return packageJson.default.version;
1169
1244
  } catch (error) {
1170
1245
  const isTest = typeof process !== "undefined" && (process.env.JEST_WORKER_ID != null || false);
@@ -1205,7 +1280,7 @@ var FirecrawlApp = class {
1205
1280
  if (jsonData?.extract?.schema) {
1206
1281
  let schema = jsonData.extract.schema;
1207
1282
  try {
1208
- schema = zodToJsonSchema3(schema);
1283
+ schema = zodToJsonSchema4(schema);
1209
1284
  } catch (error) {
1210
1285
  }
1211
1286
  jsonData = {
@@ -1219,7 +1294,7 @@ var FirecrawlApp = class {
1219
1294
  if (jsonData?.jsonOptions?.schema) {
1220
1295
  let schema = jsonData.jsonOptions.schema;
1221
1296
  try {
1222
- schema = zodToJsonSchema3(schema);
1297
+ schema = zodToJsonSchema4(schema);
1223
1298
  } catch (error) {
1224
1299
  }
1225
1300
  jsonData = {
@@ -1282,7 +1357,7 @@ var FirecrawlApp = class {
1282
1357
  if (jsonData?.scrapeOptions?.extract?.schema) {
1283
1358
  let schema = jsonData.scrapeOptions.extract.schema;
1284
1359
  try {
1285
- schema = zodToJsonSchema3(schema);
1360
+ schema = zodToJsonSchema4(schema);
1286
1361
  } catch (error) {
1287
1362
  }
1288
1363
  jsonData = {
@@ -1551,7 +1626,7 @@ var FirecrawlApp = class {
1551
1626
  if (jsonData?.extract?.schema) {
1552
1627
  let schema = jsonData.extract.schema;
1553
1628
  try {
1554
- schema = zodToJsonSchema3(schema);
1629
+ schema = zodToJsonSchema4(schema);
1555
1630
  } catch (error) {
1556
1631
  }
1557
1632
  jsonData = {
@@ -1565,7 +1640,7 @@ var FirecrawlApp = class {
1565
1640
  if (jsonData?.jsonOptions?.schema) {
1566
1641
  let schema = jsonData.jsonOptions.schema;
1567
1642
  try {
1568
- schema = zodToJsonSchema3(schema);
1643
+ schema = zodToJsonSchema4(schema);
1569
1644
  } catch (error) {
1570
1645
  }
1571
1646
  jsonData = {
@@ -1744,7 +1819,7 @@ var FirecrawlApp = class {
1744
1819
  jsonSchema = void 0;
1745
1820
  } else {
1746
1821
  try {
1747
- jsonSchema = zodToJsonSchema3(params.schema);
1822
+ jsonSchema = zodToJsonSchema4(params.schema);
1748
1823
  } catch (_) {
1749
1824
  jsonSchema = params.schema;
1750
1825
  }
@@ -1808,7 +1883,7 @@ var FirecrawlApp = class {
1808
1883
  jsonSchema = void 0;
1809
1884
  } else {
1810
1885
  try {
1811
- jsonSchema = zodToJsonSchema3(params.schema);
1886
+ jsonSchema = zodToJsonSchema4(params.schema);
1812
1887
  } catch (_) {
1813
1888
  jsonSchema = params.schema;
1814
1889
  }
@@ -2099,7 +2174,7 @@ var FirecrawlApp = class {
2099
2174
  if (jsonData?.jsonOptions?.schema) {
2100
2175
  let schema = jsonData.jsonOptions.schema;
2101
2176
  try {
2102
- schema = zodToJsonSchema3(schema);
2177
+ schema = zodToJsonSchema4(schema);
2103
2178
  } catch (error) {
2104
2179
  }
2105
2180
  jsonData = {
@@ -1,4 +1,4 @@
1
1
  import {
2
2
  require_package
3
- } from "./chunk-3IN25WJ7.js";
3
+ } from "./chunk-KS6E546Q.js";
4
4
  export default require_package();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "firecrawl",
3
- "version": "4.8.2",
3
+ "version": "4.9.0",
4
4
  "description": "JavaScript SDK for Firecrawl API",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
package/src/v1/index.ts CHANGED
@@ -49,10 +49,14 @@ export interface FirecrawlDocumentMetadata {
49
49
  articleSection?: string;
50
50
  sourceURL?: string;
51
51
  statusCode?: number;
52
+ timezone?: string;
52
53
  error?: string;
53
54
  proxyUsed?: "basic" | "stealth";
54
55
  cacheState?: "miss" | "hit";
55
56
  cachedAt?: string;
57
+ creditsUsed?: number;
58
+ concurrencyLimited?: boolean;
59
+ concurrencyQueueDurationMs?: number;
56
60
  [key: string]: any; // Allows for additional metadata properties not explicitly defined.
57
61
  }
58
62
 
package/src/v2/client.ts CHANGED
@@ -19,6 +19,7 @@ import {
19
19
  batchScrape as batchWaiter,
20
20
  } from "./methods/batch";
21
21
  import { startExtract, getExtractStatus, extract as extractWaiter } from "./methods/extract";
22
+ import { startAgent, getAgentStatus, agent as agentWaiter } from "./methods/agent";
22
23
  import { getConcurrency, getCreditUsage, getQueueStatus, getTokenUsage, getCreditUsageHistorical, getTokenUsageHistorical } from "./methods/usage";
23
24
  import type {
24
25
  Document,
@@ -34,6 +35,8 @@ import type {
34
35
  BatchScrapeResponse,
35
36
  BatchScrapeJob,
36
37
  ExtractResponse,
38
+ AgentResponse,
39
+ AgentStatusResponse,
37
40
  CrawlOptions,
38
41
  BatchScrapeOptions,
39
42
  PaginationConfig,
@@ -262,6 +265,31 @@ export class FirecrawlClient {
262
265
  return extractWaiter(this.http, args);
263
266
  }
264
267
 
268
+ // Agent
269
+ /**
270
+ * Start an agent job (async).
271
+ * @param args Agent request (urls, prompt, schema).
272
+ * @returns Job id or processing state.
273
+ */
274
+ async startAgent(args: Parameters<typeof startAgent>[1]): Promise<AgentResponse> {
275
+ return startAgent(this.http, args);
276
+ }
277
+ /**
278
+ * Get agent job status/data.
279
+ * @param jobId Agent job id.
280
+ */
281
+ async getAgentStatus(jobId: string): Promise<AgentStatusResponse> {
282
+ return getAgentStatus(this.http, jobId);
283
+ }
284
+ /**
285
+ * Convenience waiter: start an agent and poll until it finishes.
286
+ * @param args Agent request plus waiter controls (pollInterval, timeout seconds).
287
+ * @returns Final agent response.
288
+ */
289
+ async agent(args: Parameters<typeof startAgent>[1] & { pollInterval?: number; timeout?: number }): Promise<AgentStatusResponse> {
290
+ return agentWaiter(this.http, args);
291
+ }
292
+
265
293
  // Usage
266
294
  /** Current concurrency usage. */
267
295
  async getConcurrency() {
@@ -0,0 +1,71 @@
1
+ import { type AgentResponse, type AgentStatusResponse } from "../types";
2
+ import { HttpClient } from "../utils/httpClient";
3
+ import { normalizeAxiosError, throwForBadResponse } from "../utils/errorHandler";
4
+ import { zodToJsonSchema } from "zod-to-json-schema";
5
+ import type { ZodTypeAny } from "zod";
6
+
7
+ function prepareAgentPayload(args: {
8
+ urls?: string[];
9
+ prompt: string;
10
+ schema?: Record<string, unknown> | ZodTypeAny;
11
+ integration?: string;
12
+ }): Record<string, unknown> {
13
+ const body: Record<string, unknown> = {};
14
+ if (args.urls) body.urls = args.urls;
15
+ body.prompt = args.prompt;
16
+ if (args.schema != null) {
17
+ const s: any = args.schema;
18
+ const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
19
+ body.schema = isZod ? zodToJsonSchema(s) : args.schema;
20
+ }
21
+ if (args.integration && args.integration.trim()) body.integration = args.integration.trim();
22
+ return body;
23
+ }
24
+
25
+ export async function startAgent(http: HttpClient, args: Parameters<typeof prepareAgentPayload>[0]): Promise<AgentResponse> {
26
+ const payload = prepareAgentPayload(args);
27
+ try {
28
+ const res = await http.post<AgentResponse>("/v2/agent", payload);
29
+ if (res.status !== 200) throwForBadResponse(res, "agent");
30
+ return res.data;
31
+ } catch (err: any) {
32
+ if (err?.isAxiosError) return normalizeAxiosError(err, "agent");
33
+ throw err;
34
+ }
35
+ }
36
+
37
+ export async function getAgentStatus(http: HttpClient, jobId: string): Promise<AgentStatusResponse> {
38
+ try {
39
+ const res = await http.get<AgentStatusResponse>(`/v2/agent/${jobId}`);
40
+ if (res.status !== 200) throwForBadResponse(res, "agent status");
41
+ return res.data;
42
+ } catch (err: any) {
43
+ if (err?.isAxiosError) return normalizeAxiosError(err, "agent status");
44
+ throw err;
45
+ }
46
+ }
47
+
48
+ export async function waitAgent(
49
+ http: HttpClient,
50
+ jobId: string,
51
+ pollInterval = 2,
52
+ timeout?: number
53
+ ): Promise<AgentStatusResponse> {
54
+ const start = Date.now();
55
+ while (true) {
56
+ const status = await getAgentStatus(http, jobId);
57
+ if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
58
+ if (timeout != null && Date.now() - start > timeout * 1000) return status;
59
+ await new Promise((r) => setTimeout(r, Math.max(1000, pollInterval * 1000)));
60
+ }
61
+ }
62
+
63
+ export async function agent(
64
+ http: HttpClient,
65
+ args: Parameters<typeof prepareAgentPayload>[0] & { pollInterval?: number; timeout?: number }
66
+ ): Promise<AgentStatusResponse> {
67
+ const started = await startAgent(http, args);
68
+ const jobId = started.id;
69
+ if (!jobId) return started as unknown as AgentStatusResponse;
70
+ return waitAgent(http, jobId, args.pollInterval ?? 2, args.timeout);
71
+ }
package/src/v2/types.ts CHANGED
@@ -347,10 +347,13 @@ export interface DocumentMetadata {
347
347
  scrapeId?: string;
348
348
  numPages?: number;
349
349
  contentType?: string;
350
+ timezone?: string;
350
351
  proxyUsed?: 'basic' | 'stealth';
351
352
  cacheState?: 'hit' | 'miss';
352
353
  cachedAt?: string;
353
354
  creditsUsed?: number;
355
+ concurrencyLimited?: boolean;
356
+ concurrencyQueueDurationMs?: number;
354
357
 
355
358
  // Error information
356
359
  error?: string;
@@ -532,8 +535,23 @@ export interface ExtractResponse {
532
535
  creditsUsed?: number;
533
536
  }
534
537
 
538
+ export interface AgentResponse {
539
+ success: boolean;
540
+ id: string;
541
+ error?: string;
542
+ }
543
+
544
+ export interface AgentStatusResponse {
545
+ success: boolean;
546
+ status: 'processing' | 'completed' | 'failed';
547
+ error?: string;
548
+ data?: unknown;
549
+ expiresAt: string;
550
+ creditsUsed?: number;
551
+ }
552
+
535
553
  export interface AgentOptions {
536
- model: 'FIRE-1';
554
+ model: 'FIRE-1' | 'v3-beta';
537
555
  }
538
556
 
539
557
  export interface ConcurrencyCheck {
@@ -23,7 +23,7 @@ export class HttpClient {
23
23
  this.backoffFactor = options.backoffFactor ?? 0.5;
24
24
  this.instance = axios.create({
25
25
  baseURL: this.apiUrl,
26
- timeout: options.timeoutMs ?? 60000,
26
+ timeout: options.timeoutMs ?? 300000,
27
27
  headers: {
28
28
  "Content-Type": "application/json",
29
29
  Authorization: `Bearer ${this.apiKey}`,