@mendable/firecrawl 4.8.3 → 4.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-KNKWC5EH.js → chunk-POKQQMNV.js} +1 -1
- package/dist/index.cjs +103 -10
- package/dist/index.d.cts +51 -2
- package/dist/index.d.ts +51 -2
- package/dist/index.js +104 -11
- package/dist/{package-FYCFMQXV.js → package-YGCUU3YI.js} +1 -1
- package/package.json +1 -1
- package/src/v1/index.ts +1 -0
- package/src/v2/client.ts +36 -0
- package/src/v2/methods/agent.ts +82 -0
- package/src/v2/types.ts +17 -1
|
@@ -8,7 +8,7 @@ var require_package = __commonJS({
|
|
|
8
8
|
"package.json"(exports, module) {
|
|
9
9
|
module.exports = {
|
|
10
10
|
name: "@mendable/firecrawl-js",
|
|
11
|
-
version: "4.
|
|
11
|
+
version: "4.9.1",
|
|
12
12
|
description: "JavaScript SDK for Firecrawl API",
|
|
13
13
|
main: "dist/index.js",
|
|
14
14
|
types: "dist/index.d.ts",
|
package/dist/index.cjs
CHANGED
|
@@ -35,7 +35,7 @@ var require_package = __commonJS({
|
|
|
35
35
|
"package.json"(exports2, module2) {
|
|
36
36
|
module2.exports = {
|
|
37
37
|
name: "@mendable/firecrawl-js",
|
|
38
|
-
version: "4.
|
|
38
|
+
version: "4.9.1",
|
|
39
39
|
description: "JavaScript SDK for Firecrawl API",
|
|
40
40
|
main: "dist/index.js",
|
|
41
41
|
types: "dist/index.d.ts",
|
|
@@ -751,6 +751,67 @@ async function extract(http, args) {
|
|
|
751
751
|
return waitExtract(http, jobId, args.pollInterval ?? 2, args.timeout);
|
|
752
752
|
}
|
|
753
753
|
|
|
754
|
+
// src/v2/methods/agent.ts
|
|
755
|
+
var import_zod_to_json_schema3 = require("zod-to-json-schema");
|
|
756
|
+
function prepareAgentPayload(args) {
|
|
757
|
+
const body = {};
|
|
758
|
+
if (args.urls) body.urls = args.urls;
|
|
759
|
+
body.prompt = args.prompt;
|
|
760
|
+
if (args.schema != null) {
|
|
761
|
+
const s = args.schema;
|
|
762
|
+
const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
|
|
763
|
+
body.schema = isZod ? (0, import_zod_to_json_schema3.zodToJsonSchema)(s) : args.schema;
|
|
764
|
+
}
|
|
765
|
+
if (args.integration && args.integration.trim()) body.integration = args.integration.trim();
|
|
766
|
+
return body;
|
|
767
|
+
}
|
|
768
|
+
async function startAgent(http, args) {
|
|
769
|
+
const payload = prepareAgentPayload(args);
|
|
770
|
+
try {
|
|
771
|
+
const res = await http.post("/v2/agent", payload);
|
|
772
|
+
if (res.status !== 200) throwForBadResponse(res, "agent");
|
|
773
|
+
return res.data;
|
|
774
|
+
} catch (err) {
|
|
775
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "agent");
|
|
776
|
+
throw err;
|
|
777
|
+
}
|
|
778
|
+
}
|
|
779
|
+
async function getAgentStatus(http, jobId) {
|
|
780
|
+
try {
|
|
781
|
+
const res = await http.get(`/v2/agent/${jobId}`);
|
|
782
|
+
if (res.status !== 200) throwForBadResponse(res, "agent status");
|
|
783
|
+
return res.data;
|
|
784
|
+
} catch (err) {
|
|
785
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "agent status");
|
|
786
|
+
throw err;
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
async function waitAgent(http, jobId, pollInterval = 2, timeout) {
|
|
790
|
+
const start = Date.now();
|
|
791
|
+
while (true) {
|
|
792
|
+
const status = await getAgentStatus(http, jobId);
|
|
793
|
+
if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
|
|
794
|
+
if (timeout != null && Date.now() - start > timeout * 1e3) return status;
|
|
795
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
|
|
796
|
+
}
|
|
797
|
+
}
|
|
798
|
+
async function agent(http, args) {
|
|
799
|
+
const started = await startAgent(http, args);
|
|
800
|
+
const jobId = started.id;
|
|
801
|
+
if (!jobId) return started;
|
|
802
|
+
return waitAgent(http, jobId, args.pollInterval ?? 2, args.timeout);
|
|
803
|
+
}
|
|
804
|
+
async function cancelAgent(http, jobId) {
|
|
805
|
+
try {
|
|
806
|
+
const res = await http.delete(`/v2/agent/${jobId}`);
|
|
807
|
+
if (res.status !== 200) throwForBadResponse(res, "cancel agent");
|
|
808
|
+
return res.data?.success === true;
|
|
809
|
+
} catch (err) {
|
|
810
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "cancel agent");
|
|
811
|
+
throw err;
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
|
|
754
815
|
// src/v2/methods/usage.ts
|
|
755
816
|
async function getConcurrency(http) {
|
|
756
817
|
try {
|
|
@@ -1212,6 +1273,38 @@ var FirecrawlClient = class {
|
|
|
1212
1273
|
async extract(args) {
|
|
1213
1274
|
return extract(this.http, args);
|
|
1214
1275
|
}
|
|
1276
|
+
// Agent
|
|
1277
|
+
/**
|
|
1278
|
+
* Start an agent job (async).
|
|
1279
|
+
* @param args Agent request (urls, prompt, schema).
|
|
1280
|
+
* @returns Job id or processing state.
|
|
1281
|
+
*/
|
|
1282
|
+
async startAgent(args) {
|
|
1283
|
+
return startAgent(this.http, args);
|
|
1284
|
+
}
|
|
1285
|
+
/**
|
|
1286
|
+
* Get agent job status/data.
|
|
1287
|
+
* @param jobId Agent job id.
|
|
1288
|
+
*/
|
|
1289
|
+
async getAgentStatus(jobId) {
|
|
1290
|
+
return getAgentStatus(this.http, jobId);
|
|
1291
|
+
}
|
|
1292
|
+
/**
|
|
1293
|
+
* Convenience waiter: start an agent and poll until it finishes.
|
|
1294
|
+
* @param args Agent request plus waiter controls (pollInterval, timeout seconds).
|
|
1295
|
+
* @returns Final agent response.
|
|
1296
|
+
*/
|
|
1297
|
+
async agent(args) {
|
|
1298
|
+
return agent(this.http, args);
|
|
1299
|
+
}
|
|
1300
|
+
/**
|
|
1301
|
+
* Cancel an agent job.
|
|
1302
|
+
* @param jobId Agent job id.
|
|
1303
|
+
* @returns True if cancelled.
|
|
1304
|
+
*/
|
|
1305
|
+
async cancelAgent(jobId) {
|
|
1306
|
+
return cancelAgent(this.http, jobId);
|
|
1307
|
+
}
|
|
1215
1308
|
// Usage
|
|
1216
1309
|
/** Current concurrency usage. */
|
|
1217
1310
|
async getConcurrency() {
|
|
@@ -1251,7 +1344,7 @@ var FirecrawlClient = class {
|
|
|
1251
1344
|
// src/v1/index.ts
|
|
1252
1345
|
var import_axios3 = __toESM(require("axios"), 1);
|
|
1253
1346
|
var zt2 = require("zod");
|
|
1254
|
-
var
|
|
1347
|
+
var import_zod_to_json_schema4 = require("zod-to-json-schema");
|
|
1255
1348
|
|
|
1256
1349
|
// node_modules/typescript-event-target/dist/index.mjs
|
|
1257
1350
|
var e = class extends EventTarget {
|
|
@@ -1323,7 +1416,7 @@ var FirecrawlApp = class {
|
|
|
1323
1416
|
if (jsonData?.extract?.schema) {
|
|
1324
1417
|
let schema = jsonData.extract.schema;
|
|
1325
1418
|
try {
|
|
1326
|
-
schema = (0,
|
|
1419
|
+
schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
|
|
1327
1420
|
} catch (error) {
|
|
1328
1421
|
}
|
|
1329
1422
|
jsonData = {
|
|
@@ -1337,7 +1430,7 @@ var FirecrawlApp = class {
|
|
|
1337
1430
|
if (jsonData?.jsonOptions?.schema) {
|
|
1338
1431
|
let schema = jsonData.jsonOptions.schema;
|
|
1339
1432
|
try {
|
|
1340
|
-
schema = (0,
|
|
1433
|
+
schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
|
|
1341
1434
|
} catch (error) {
|
|
1342
1435
|
}
|
|
1343
1436
|
jsonData = {
|
|
@@ -1400,7 +1493,7 @@ var FirecrawlApp = class {
|
|
|
1400
1493
|
if (jsonData?.scrapeOptions?.extract?.schema) {
|
|
1401
1494
|
let schema = jsonData.scrapeOptions.extract.schema;
|
|
1402
1495
|
try {
|
|
1403
|
-
schema = (0,
|
|
1496
|
+
schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
|
|
1404
1497
|
} catch (error) {
|
|
1405
1498
|
}
|
|
1406
1499
|
jsonData = {
|
|
@@ -1669,7 +1762,7 @@ var FirecrawlApp = class {
|
|
|
1669
1762
|
if (jsonData?.extract?.schema) {
|
|
1670
1763
|
let schema = jsonData.extract.schema;
|
|
1671
1764
|
try {
|
|
1672
|
-
schema = (0,
|
|
1765
|
+
schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
|
|
1673
1766
|
} catch (error) {
|
|
1674
1767
|
}
|
|
1675
1768
|
jsonData = {
|
|
@@ -1683,7 +1776,7 @@ var FirecrawlApp = class {
|
|
|
1683
1776
|
if (jsonData?.jsonOptions?.schema) {
|
|
1684
1777
|
let schema = jsonData.jsonOptions.schema;
|
|
1685
1778
|
try {
|
|
1686
|
-
schema = (0,
|
|
1779
|
+
schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
|
|
1687
1780
|
} catch (error) {
|
|
1688
1781
|
}
|
|
1689
1782
|
jsonData = {
|
|
@@ -1862,7 +1955,7 @@ var FirecrawlApp = class {
|
|
|
1862
1955
|
jsonSchema = void 0;
|
|
1863
1956
|
} else {
|
|
1864
1957
|
try {
|
|
1865
|
-
jsonSchema = (0,
|
|
1958
|
+
jsonSchema = (0, import_zod_to_json_schema4.zodToJsonSchema)(params.schema);
|
|
1866
1959
|
} catch (_) {
|
|
1867
1960
|
jsonSchema = params.schema;
|
|
1868
1961
|
}
|
|
@@ -1926,7 +2019,7 @@ var FirecrawlApp = class {
|
|
|
1926
2019
|
jsonSchema = void 0;
|
|
1927
2020
|
} else {
|
|
1928
2021
|
try {
|
|
1929
|
-
jsonSchema = (0,
|
|
2022
|
+
jsonSchema = (0, import_zod_to_json_schema4.zodToJsonSchema)(params.schema);
|
|
1930
2023
|
} catch (_) {
|
|
1931
2024
|
jsonSchema = params.schema;
|
|
1932
2025
|
}
|
|
@@ -2217,7 +2310,7 @@ var FirecrawlApp = class {
|
|
|
2217
2310
|
if (jsonData?.jsonOptions?.schema) {
|
|
2218
2311
|
let schema = jsonData.jsonOptions.schema;
|
|
2219
2312
|
try {
|
|
2220
|
-
schema = (0,
|
|
2313
|
+
schema = (0, import_zod_to_json_schema4.zodToJsonSchema)(schema);
|
|
2221
2314
|
} catch (error) {
|
|
2222
2315
|
}
|
|
2223
2316
|
jsonData = {
|
package/dist/index.d.cts
CHANGED
|
@@ -283,6 +283,7 @@ interface DocumentMetadata {
|
|
|
283
283
|
scrapeId?: string;
|
|
284
284
|
numPages?: number;
|
|
285
285
|
contentType?: string;
|
|
286
|
+
timezone?: string;
|
|
286
287
|
proxyUsed?: 'basic' | 'stealth';
|
|
287
288
|
cacheState?: 'hit' | 'miss';
|
|
288
289
|
cachedAt?: string;
|
|
@@ -448,8 +449,21 @@ interface ExtractResponse$1 {
|
|
|
448
449
|
expiresAt?: string;
|
|
449
450
|
creditsUsed?: number;
|
|
450
451
|
}
|
|
452
|
+
interface AgentResponse {
|
|
453
|
+
success: boolean;
|
|
454
|
+
id: string;
|
|
455
|
+
error?: string;
|
|
456
|
+
}
|
|
457
|
+
interface AgentStatusResponse {
|
|
458
|
+
success: boolean;
|
|
459
|
+
status: 'processing' | 'completed' | 'failed';
|
|
460
|
+
error?: string;
|
|
461
|
+
data?: unknown;
|
|
462
|
+
expiresAt: string;
|
|
463
|
+
creditsUsed?: number;
|
|
464
|
+
}
|
|
451
465
|
interface AgentOptions$1 {
|
|
452
|
-
model: 'FIRE-1';
|
|
466
|
+
model: 'FIRE-1' | 'v3-beta';
|
|
453
467
|
}
|
|
454
468
|
interface ConcurrencyCheck {
|
|
455
469
|
concurrency: number;
|
|
@@ -567,6 +581,14 @@ declare function prepareExtractPayload(args: {
|
|
|
567
581
|
}): Record<string, unknown>;
|
|
568
582
|
declare function startExtract(http: HttpClient, args: Parameters<typeof prepareExtractPayload>[0]): Promise<ExtractResponse$1>;
|
|
569
583
|
|
|
584
|
+
declare function prepareAgentPayload(args: {
|
|
585
|
+
urls?: string[];
|
|
586
|
+
prompt: string;
|
|
587
|
+
schema?: Record<string, unknown> | ZodTypeAny;
|
|
588
|
+
integration?: string;
|
|
589
|
+
}): Record<string, unknown>;
|
|
590
|
+
declare function startAgent(http: HttpClient, args: Parameters<typeof prepareAgentPayload>[0]): Promise<AgentResponse>;
|
|
591
|
+
|
|
570
592
|
type JobKind = "crawl" | "batch";
|
|
571
593
|
interface WatcherOptions {
|
|
572
594
|
kind?: JobKind;
|
|
@@ -746,6 +768,32 @@ declare class FirecrawlClient {
|
|
|
746
768
|
pollInterval?: number;
|
|
747
769
|
timeout?: number;
|
|
748
770
|
}): Promise<ExtractResponse$1>;
|
|
771
|
+
/**
|
|
772
|
+
* Start an agent job (async).
|
|
773
|
+
* @param args Agent request (urls, prompt, schema).
|
|
774
|
+
* @returns Job id or processing state.
|
|
775
|
+
*/
|
|
776
|
+
startAgent(args: Parameters<typeof startAgent>[1]): Promise<AgentResponse>;
|
|
777
|
+
/**
|
|
778
|
+
* Get agent job status/data.
|
|
779
|
+
* @param jobId Agent job id.
|
|
780
|
+
*/
|
|
781
|
+
getAgentStatus(jobId: string): Promise<AgentStatusResponse>;
|
|
782
|
+
/**
|
|
783
|
+
* Convenience waiter: start an agent and poll until it finishes.
|
|
784
|
+
* @param args Agent request plus waiter controls (pollInterval, timeout seconds).
|
|
785
|
+
* @returns Final agent response.
|
|
786
|
+
*/
|
|
787
|
+
agent(args: Parameters<typeof startAgent>[1] & {
|
|
788
|
+
pollInterval?: number;
|
|
789
|
+
timeout?: number;
|
|
790
|
+
}): Promise<AgentStatusResponse>;
|
|
791
|
+
/**
|
|
792
|
+
* Cancel an agent job.
|
|
793
|
+
* @param jobId Agent job id.
|
|
794
|
+
* @returns True if cancelled.
|
|
795
|
+
*/
|
|
796
|
+
cancelAgent(jobId: string): Promise<boolean>;
|
|
749
797
|
/** Current concurrency usage. */
|
|
750
798
|
getConcurrency(): Promise<ConcurrencyCheck>;
|
|
751
799
|
/** Current credit usage. */
|
|
@@ -811,6 +859,7 @@ interface FirecrawlDocumentMetadata {
|
|
|
811
859
|
articleSection?: string;
|
|
812
860
|
sourceURL?: string;
|
|
813
861
|
statusCode?: number;
|
|
862
|
+
timezone?: string;
|
|
814
863
|
error?: string;
|
|
815
864
|
proxyUsed?: "basic" | "stealth";
|
|
816
865
|
cacheState?: "miss" | "hit";
|
|
@@ -1670,4 +1719,4 @@ declare class Firecrawl extends FirecrawlClient {
|
|
|
1670
1719
|
get v1(): FirecrawlApp;
|
|
1671
1720
|
}
|
|
1672
1721
|
|
|
1673
|
-
export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type BrandingProfile, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
|
|
1722
|
+
export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AgentResponse, type AgentStatusResponse, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type BrandingProfile, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
|
package/dist/index.d.ts
CHANGED
|
@@ -283,6 +283,7 @@ interface DocumentMetadata {
|
|
|
283
283
|
scrapeId?: string;
|
|
284
284
|
numPages?: number;
|
|
285
285
|
contentType?: string;
|
|
286
|
+
timezone?: string;
|
|
286
287
|
proxyUsed?: 'basic' | 'stealth';
|
|
287
288
|
cacheState?: 'hit' | 'miss';
|
|
288
289
|
cachedAt?: string;
|
|
@@ -448,8 +449,21 @@ interface ExtractResponse$1 {
|
|
|
448
449
|
expiresAt?: string;
|
|
449
450
|
creditsUsed?: number;
|
|
450
451
|
}
|
|
452
|
+
interface AgentResponse {
|
|
453
|
+
success: boolean;
|
|
454
|
+
id: string;
|
|
455
|
+
error?: string;
|
|
456
|
+
}
|
|
457
|
+
interface AgentStatusResponse {
|
|
458
|
+
success: boolean;
|
|
459
|
+
status: 'processing' | 'completed' | 'failed';
|
|
460
|
+
error?: string;
|
|
461
|
+
data?: unknown;
|
|
462
|
+
expiresAt: string;
|
|
463
|
+
creditsUsed?: number;
|
|
464
|
+
}
|
|
451
465
|
interface AgentOptions$1 {
|
|
452
|
-
model: 'FIRE-1';
|
|
466
|
+
model: 'FIRE-1' | 'v3-beta';
|
|
453
467
|
}
|
|
454
468
|
interface ConcurrencyCheck {
|
|
455
469
|
concurrency: number;
|
|
@@ -567,6 +581,14 @@ declare function prepareExtractPayload(args: {
|
|
|
567
581
|
}): Record<string, unknown>;
|
|
568
582
|
declare function startExtract(http: HttpClient, args: Parameters<typeof prepareExtractPayload>[0]): Promise<ExtractResponse$1>;
|
|
569
583
|
|
|
584
|
+
declare function prepareAgentPayload(args: {
|
|
585
|
+
urls?: string[];
|
|
586
|
+
prompt: string;
|
|
587
|
+
schema?: Record<string, unknown> | ZodTypeAny;
|
|
588
|
+
integration?: string;
|
|
589
|
+
}): Record<string, unknown>;
|
|
590
|
+
declare function startAgent(http: HttpClient, args: Parameters<typeof prepareAgentPayload>[0]): Promise<AgentResponse>;
|
|
591
|
+
|
|
570
592
|
type JobKind = "crawl" | "batch";
|
|
571
593
|
interface WatcherOptions {
|
|
572
594
|
kind?: JobKind;
|
|
@@ -746,6 +768,32 @@ declare class FirecrawlClient {
|
|
|
746
768
|
pollInterval?: number;
|
|
747
769
|
timeout?: number;
|
|
748
770
|
}): Promise<ExtractResponse$1>;
|
|
771
|
+
/**
|
|
772
|
+
* Start an agent job (async).
|
|
773
|
+
* @param args Agent request (urls, prompt, schema).
|
|
774
|
+
* @returns Job id or processing state.
|
|
775
|
+
*/
|
|
776
|
+
startAgent(args: Parameters<typeof startAgent>[1]): Promise<AgentResponse>;
|
|
777
|
+
/**
|
|
778
|
+
* Get agent job status/data.
|
|
779
|
+
* @param jobId Agent job id.
|
|
780
|
+
*/
|
|
781
|
+
getAgentStatus(jobId: string): Promise<AgentStatusResponse>;
|
|
782
|
+
/**
|
|
783
|
+
* Convenience waiter: start an agent and poll until it finishes.
|
|
784
|
+
* @param args Agent request plus waiter controls (pollInterval, timeout seconds).
|
|
785
|
+
* @returns Final agent response.
|
|
786
|
+
*/
|
|
787
|
+
agent(args: Parameters<typeof startAgent>[1] & {
|
|
788
|
+
pollInterval?: number;
|
|
789
|
+
timeout?: number;
|
|
790
|
+
}): Promise<AgentStatusResponse>;
|
|
791
|
+
/**
|
|
792
|
+
* Cancel an agent job.
|
|
793
|
+
* @param jobId Agent job id.
|
|
794
|
+
* @returns True if cancelled.
|
|
795
|
+
*/
|
|
796
|
+
cancelAgent(jobId: string): Promise<boolean>;
|
|
749
797
|
/** Current concurrency usage. */
|
|
750
798
|
getConcurrency(): Promise<ConcurrencyCheck>;
|
|
751
799
|
/** Current credit usage. */
|
|
@@ -811,6 +859,7 @@ interface FirecrawlDocumentMetadata {
|
|
|
811
859
|
articleSection?: string;
|
|
812
860
|
sourceURL?: string;
|
|
813
861
|
statusCode?: number;
|
|
862
|
+
timezone?: string;
|
|
814
863
|
error?: string;
|
|
815
864
|
proxyUsed?: "basic" | "stealth";
|
|
816
865
|
cacheState?: "miss" | "hit";
|
|
@@ -1670,4 +1719,4 @@ declare class Firecrawl extends FirecrawlClient {
|
|
|
1670
1719
|
get v1(): FirecrawlApp;
|
|
1671
1720
|
}
|
|
1672
1721
|
|
|
1673
|
-
export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type BrandingProfile, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
|
|
1722
|
+
export { type ActionOption, type ActiveCrawl, type ActiveCrawlsResponse, type AgentOptions$1 as AgentOptions, type AgentResponse, type AgentStatusResponse, type AttributesFormat, type BatchScrapeJob, type BatchScrapeOptions, type BatchScrapeResponse$1 as BatchScrapeResponse, type BrandingProfile, type CategoryOption, type ChangeTrackingFormat, type ClickAction, type ConcurrencyCheck, type CrawlErrorsResponse$1 as CrawlErrorsResponse, type CrawlJob, type CrawlOptions, type CrawlResponse$1 as CrawlResponse, type CreditUsage, type CreditUsageHistoricalPeriod, type CreditUsageHistoricalResponse, type Document, type DocumentMetadata, type ErrorDetails, type ExecuteJavascriptAction, type ExtractResponse$1 as ExtractResponse, Firecrawl, FirecrawlApp as FirecrawlAppV1, FirecrawlClient, type FirecrawlClientOptions, type Format, type FormatOption, type FormatString, type JsonFormat, type LocationConfig$1 as LocationConfig, type MapData, type MapOptions, type PDFAction, type PaginationConfig, type PressAction, type QueueStatusResponse$1 as QueueStatusResponse, type ScrapeAction, type ScrapeOptions, type ScreenshotAction, type ScreenshotFormat, type ScrollAction, SdkError, type SearchData, type SearchRequest, type SearchResultImages, type SearchResultNews, type SearchResultWeb, type TokenUsage, type TokenUsageHistoricalPeriod, type TokenUsageHistoricalResponse, type Viewport, type WaitAction, type WebhookConfig, type WriteAction, Firecrawl as default };
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import {
|
|
2
2
|
require_package
|
|
3
|
-
} from "./chunk-
|
|
3
|
+
} from "./chunk-POKQQMNV.js";
|
|
4
4
|
|
|
5
5
|
// src/v2/utils/httpClient.ts
|
|
6
6
|
import axios from "axios";
|
|
@@ -633,6 +633,67 @@ async function extract(http, args) {
|
|
|
633
633
|
return waitExtract(http, jobId, args.pollInterval ?? 2, args.timeout);
|
|
634
634
|
}
|
|
635
635
|
|
|
636
|
+
// src/v2/methods/agent.ts
|
|
637
|
+
import { zodToJsonSchema as zodToJsonSchema3 } from "zod-to-json-schema";
|
|
638
|
+
function prepareAgentPayload(args) {
|
|
639
|
+
const body = {};
|
|
640
|
+
if (args.urls) body.urls = args.urls;
|
|
641
|
+
body.prompt = args.prompt;
|
|
642
|
+
if (args.schema != null) {
|
|
643
|
+
const s = args.schema;
|
|
644
|
+
const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
|
|
645
|
+
body.schema = isZod ? zodToJsonSchema3(s) : args.schema;
|
|
646
|
+
}
|
|
647
|
+
if (args.integration && args.integration.trim()) body.integration = args.integration.trim();
|
|
648
|
+
return body;
|
|
649
|
+
}
|
|
650
|
+
async function startAgent(http, args) {
|
|
651
|
+
const payload = prepareAgentPayload(args);
|
|
652
|
+
try {
|
|
653
|
+
const res = await http.post("/v2/agent", payload);
|
|
654
|
+
if (res.status !== 200) throwForBadResponse(res, "agent");
|
|
655
|
+
return res.data;
|
|
656
|
+
} catch (err) {
|
|
657
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "agent");
|
|
658
|
+
throw err;
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
async function getAgentStatus(http, jobId) {
|
|
662
|
+
try {
|
|
663
|
+
const res = await http.get(`/v2/agent/${jobId}`);
|
|
664
|
+
if (res.status !== 200) throwForBadResponse(res, "agent status");
|
|
665
|
+
return res.data;
|
|
666
|
+
} catch (err) {
|
|
667
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "agent status");
|
|
668
|
+
throw err;
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
async function waitAgent(http, jobId, pollInterval = 2, timeout) {
|
|
672
|
+
const start = Date.now();
|
|
673
|
+
while (true) {
|
|
674
|
+
const status = await getAgentStatus(http, jobId);
|
|
675
|
+
if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
|
|
676
|
+
if (timeout != null && Date.now() - start > timeout * 1e3) return status;
|
|
677
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
async function agent(http, args) {
|
|
681
|
+
const started = await startAgent(http, args);
|
|
682
|
+
const jobId = started.id;
|
|
683
|
+
if (!jobId) return started;
|
|
684
|
+
return waitAgent(http, jobId, args.pollInterval ?? 2, args.timeout);
|
|
685
|
+
}
|
|
686
|
+
async function cancelAgent(http, jobId) {
|
|
687
|
+
try {
|
|
688
|
+
const res = await http.delete(`/v2/agent/${jobId}`);
|
|
689
|
+
if (res.status !== 200) throwForBadResponse(res, "cancel agent");
|
|
690
|
+
return res.data?.success === true;
|
|
691
|
+
} catch (err) {
|
|
692
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "cancel agent");
|
|
693
|
+
throw err;
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
|
|
636
697
|
// src/v2/methods/usage.ts
|
|
637
698
|
async function getConcurrency(http) {
|
|
638
699
|
try {
|
|
@@ -1094,6 +1155,38 @@ var FirecrawlClient = class {
|
|
|
1094
1155
|
async extract(args) {
|
|
1095
1156
|
return extract(this.http, args);
|
|
1096
1157
|
}
|
|
1158
|
+
// Agent
|
|
1159
|
+
/**
|
|
1160
|
+
* Start an agent job (async).
|
|
1161
|
+
* @param args Agent request (urls, prompt, schema).
|
|
1162
|
+
* @returns Job id or processing state.
|
|
1163
|
+
*/
|
|
1164
|
+
async startAgent(args) {
|
|
1165
|
+
return startAgent(this.http, args);
|
|
1166
|
+
}
|
|
1167
|
+
/**
|
|
1168
|
+
* Get agent job status/data.
|
|
1169
|
+
* @param jobId Agent job id.
|
|
1170
|
+
*/
|
|
1171
|
+
async getAgentStatus(jobId) {
|
|
1172
|
+
return getAgentStatus(this.http, jobId);
|
|
1173
|
+
}
|
|
1174
|
+
/**
|
|
1175
|
+
* Convenience waiter: start an agent and poll until it finishes.
|
|
1176
|
+
* @param args Agent request plus waiter controls (pollInterval, timeout seconds).
|
|
1177
|
+
* @returns Final agent response.
|
|
1178
|
+
*/
|
|
1179
|
+
async agent(args) {
|
|
1180
|
+
return agent(this.http, args);
|
|
1181
|
+
}
|
|
1182
|
+
/**
|
|
1183
|
+
* Cancel an agent job.
|
|
1184
|
+
* @param jobId Agent job id.
|
|
1185
|
+
* @returns True if cancelled.
|
|
1186
|
+
*/
|
|
1187
|
+
async cancelAgent(jobId) {
|
|
1188
|
+
return cancelAgent(this.http, jobId);
|
|
1189
|
+
}
|
|
1097
1190
|
// Usage
|
|
1098
1191
|
/** Current concurrency usage. */
|
|
1099
1192
|
async getConcurrency() {
|
|
@@ -1133,7 +1226,7 @@ var FirecrawlClient = class {
|
|
|
1133
1226
|
// src/v1/index.ts
|
|
1134
1227
|
import axios2, { AxiosError } from "axios";
|
|
1135
1228
|
import "zod";
|
|
1136
|
-
import { zodToJsonSchema as
|
|
1229
|
+
import { zodToJsonSchema as zodToJsonSchema4 } from "zod-to-json-schema";
|
|
1137
1230
|
|
|
1138
1231
|
// node_modules/typescript-event-target/dist/index.mjs
|
|
1139
1232
|
var e = class extends EventTarget {
|
|
@@ -1164,7 +1257,7 @@ var FirecrawlApp = class {
|
|
|
1164
1257
|
if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
|
|
1165
1258
|
return process.env.npm_package_version;
|
|
1166
1259
|
}
|
|
1167
|
-
const packageJson = await import("./package-
|
|
1260
|
+
const packageJson = await import("./package-YGCUU3YI.js");
|
|
1168
1261
|
return packageJson.default.version;
|
|
1169
1262
|
} catch (error) {
|
|
1170
1263
|
const isTest = typeof process !== "undefined" && (process.env.JEST_WORKER_ID != null || false);
|
|
@@ -1205,7 +1298,7 @@ var FirecrawlApp = class {
|
|
|
1205
1298
|
if (jsonData?.extract?.schema) {
|
|
1206
1299
|
let schema = jsonData.extract.schema;
|
|
1207
1300
|
try {
|
|
1208
|
-
schema =
|
|
1301
|
+
schema = zodToJsonSchema4(schema);
|
|
1209
1302
|
} catch (error) {
|
|
1210
1303
|
}
|
|
1211
1304
|
jsonData = {
|
|
@@ -1219,7 +1312,7 @@ var FirecrawlApp = class {
|
|
|
1219
1312
|
if (jsonData?.jsonOptions?.schema) {
|
|
1220
1313
|
let schema = jsonData.jsonOptions.schema;
|
|
1221
1314
|
try {
|
|
1222
|
-
schema =
|
|
1315
|
+
schema = zodToJsonSchema4(schema);
|
|
1223
1316
|
} catch (error) {
|
|
1224
1317
|
}
|
|
1225
1318
|
jsonData = {
|
|
@@ -1282,7 +1375,7 @@ var FirecrawlApp = class {
|
|
|
1282
1375
|
if (jsonData?.scrapeOptions?.extract?.schema) {
|
|
1283
1376
|
let schema = jsonData.scrapeOptions.extract.schema;
|
|
1284
1377
|
try {
|
|
1285
|
-
schema =
|
|
1378
|
+
schema = zodToJsonSchema4(schema);
|
|
1286
1379
|
} catch (error) {
|
|
1287
1380
|
}
|
|
1288
1381
|
jsonData = {
|
|
@@ -1551,7 +1644,7 @@ var FirecrawlApp = class {
|
|
|
1551
1644
|
if (jsonData?.extract?.schema) {
|
|
1552
1645
|
let schema = jsonData.extract.schema;
|
|
1553
1646
|
try {
|
|
1554
|
-
schema =
|
|
1647
|
+
schema = zodToJsonSchema4(schema);
|
|
1555
1648
|
} catch (error) {
|
|
1556
1649
|
}
|
|
1557
1650
|
jsonData = {
|
|
@@ -1565,7 +1658,7 @@ var FirecrawlApp = class {
|
|
|
1565
1658
|
if (jsonData?.jsonOptions?.schema) {
|
|
1566
1659
|
let schema = jsonData.jsonOptions.schema;
|
|
1567
1660
|
try {
|
|
1568
|
-
schema =
|
|
1661
|
+
schema = zodToJsonSchema4(schema);
|
|
1569
1662
|
} catch (error) {
|
|
1570
1663
|
}
|
|
1571
1664
|
jsonData = {
|
|
@@ -1744,7 +1837,7 @@ var FirecrawlApp = class {
|
|
|
1744
1837
|
jsonSchema = void 0;
|
|
1745
1838
|
} else {
|
|
1746
1839
|
try {
|
|
1747
|
-
jsonSchema =
|
|
1840
|
+
jsonSchema = zodToJsonSchema4(params.schema);
|
|
1748
1841
|
} catch (_) {
|
|
1749
1842
|
jsonSchema = params.schema;
|
|
1750
1843
|
}
|
|
@@ -1808,7 +1901,7 @@ var FirecrawlApp = class {
|
|
|
1808
1901
|
jsonSchema = void 0;
|
|
1809
1902
|
} else {
|
|
1810
1903
|
try {
|
|
1811
|
-
jsonSchema =
|
|
1904
|
+
jsonSchema = zodToJsonSchema4(params.schema);
|
|
1812
1905
|
} catch (_) {
|
|
1813
1906
|
jsonSchema = params.schema;
|
|
1814
1907
|
}
|
|
@@ -2099,7 +2192,7 @@ var FirecrawlApp = class {
|
|
|
2099
2192
|
if (jsonData?.jsonOptions?.schema) {
|
|
2100
2193
|
let schema = jsonData.jsonOptions.schema;
|
|
2101
2194
|
try {
|
|
2102
|
-
schema =
|
|
2195
|
+
schema = zodToJsonSchema4(schema);
|
|
2103
2196
|
} catch (error) {
|
|
2104
2197
|
}
|
|
2105
2198
|
jsonData = {
|
package/package.json
CHANGED
package/src/v1/index.ts
CHANGED
package/src/v2/client.ts
CHANGED
|
@@ -19,6 +19,7 @@ import {
|
|
|
19
19
|
batchScrape as batchWaiter,
|
|
20
20
|
} from "./methods/batch";
|
|
21
21
|
import { startExtract, getExtractStatus, extract as extractWaiter } from "./methods/extract";
|
|
22
|
+
import { startAgent, getAgentStatus, cancelAgent, agent as agentWaiter } from "./methods/agent";
|
|
22
23
|
import { getConcurrency, getCreditUsage, getQueueStatus, getTokenUsage, getCreditUsageHistorical, getTokenUsageHistorical } from "./methods/usage";
|
|
23
24
|
import type {
|
|
24
25
|
Document,
|
|
@@ -34,6 +35,8 @@ import type {
|
|
|
34
35
|
BatchScrapeResponse,
|
|
35
36
|
BatchScrapeJob,
|
|
36
37
|
ExtractResponse,
|
|
38
|
+
AgentResponse,
|
|
39
|
+
AgentStatusResponse,
|
|
37
40
|
CrawlOptions,
|
|
38
41
|
BatchScrapeOptions,
|
|
39
42
|
PaginationConfig,
|
|
@@ -262,6 +265,39 @@ export class FirecrawlClient {
|
|
|
262
265
|
return extractWaiter(this.http, args);
|
|
263
266
|
}
|
|
264
267
|
|
|
268
|
+
// Agent
|
|
269
|
+
/**
|
|
270
|
+
* Start an agent job (async).
|
|
271
|
+
* @param args Agent request (urls, prompt, schema).
|
|
272
|
+
* @returns Job id or processing state.
|
|
273
|
+
*/
|
|
274
|
+
async startAgent(args: Parameters<typeof startAgent>[1]): Promise<AgentResponse> {
|
|
275
|
+
return startAgent(this.http, args);
|
|
276
|
+
}
|
|
277
|
+
/**
|
|
278
|
+
* Get agent job status/data.
|
|
279
|
+
* @param jobId Agent job id.
|
|
280
|
+
*/
|
|
281
|
+
async getAgentStatus(jobId: string): Promise<AgentStatusResponse> {
|
|
282
|
+
return getAgentStatus(this.http, jobId);
|
|
283
|
+
}
|
|
284
|
+
/**
|
|
285
|
+
* Convenience waiter: start an agent and poll until it finishes.
|
|
286
|
+
* @param args Agent request plus waiter controls (pollInterval, timeout seconds).
|
|
287
|
+
* @returns Final agent response.
|
|
288
|
+
*/
|
|
289
|
+
async agent(args: Parameters<typeof startAgent>[1] & { pollInterval?: number; timeout?: number }): Promise<AgentStatusResponse> {
|
|
290
|
+
return agentWaiter(this.http, args);
|
|
291
|
+
}
|
|
292
|
+
/**
|
|
293
|
+
* Cancel an agent job.
|
|
294
|
+
* @param jobId Agent job id.
|
|
295
|
+
* @returns True if cancelled.
|
|
296
|
+
*/
|
|
297
|
+
async cancelAgent(jobId: string): Promise<boolean> {
|
|
298
|
+
return cancelAgent(this.http, jobId);
|
|
299
|
+
}
|
|
300
|
+
|
|
265
301
|
// Usage
|
|
266
302
|
/** Current concurrency usage. */
|
|
267
303
|
async getConcurrency() {
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import { type AgentResponse, type AgentStatusResponse } from "../types";
|
|
2
|
+
import { HttpClient } from "../utils/httpClient";
|
|
3
|
+
import { normalizeAxiosError, throwForBadResponse } from "../utils/errorHandler";
|
|
4
|
+
import { zodToJsonSchema } from "zod-to-json-schema";
|
|
5
|
+
import type { ZodTypeAny } from "zod";
|
|
6
|
+
|
|
7
|
+
function prepareAgentPayload(args: {
|
|
8
|
+
urls?: string[];
|
|
9
|
+
prompt: string;
|
|
10
|
+
schema?: Record<string, unknown> | ZodTypeAny;
|
|
11
|
+
integration?: string;
|
|
12
|
+
}): Record<string, unknown> {
|
|
13
|
+
const body: Record<string, unknown> = {};
|
|
14
|
+
if (args.urls) body.urls = args.urls;
|
|
15
|
+
body.prompt = args.prompt;
|
|
16
|
+
if (args.schema != null) {
|
|
17
|
+
const s: any = args.schema;
|
|
18
|
+
const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
|
|
19
|
+
body.schema = isZod ? zodToJsonSchema(s) : args.schema;
|
|
20
|
+
}
|
|
21
|
+
if (args.integration && args.integration.trim()) body.integration = args.integration.trim();
|
|
22
|
+
return body;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export async function startAgent(http: HttpClient, args: Parameters<typeof prepareAgentPayload>[0]): Promise<AgentResponse> {
|
|
26
|
+
const payload = prepareAgentPayload(args);
|
|
27
|
+
try {
|
|
28
|
+
const res = await http.post<AgentResponse>("/v2/agent", payload);
|
|
29
|
+
if (res.status !== 200) throwForBadResponse(res, "agent");
|
|
30
|
+
return res.data;
|
|
31
|
+
} catch (err: any) {
|
|
32
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "agent");
|
|
33
|
+
throw err;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export async function getAgentStatus(http: HttpClient, jobId: string): Promise<AgentStatusResponse> {
|
|
38
|
+
try {
|
|
39
|
+
const res = await http.get<AgentStatusResponse>(`/v2/agent/${jobId}`);
|
|
40
|
+
if (res.status !== 200) throwForBadResponse(res, "agent status");
|
|
41
|
+
return res.data;
|
|
42
|
+
} catch (err: any) {
|
|
43
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "agent status");
|
|
44
|
+
throw err;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export async function waitAgent(
|
|
49
|
+
http: HttpClient,
|
|
50
|
+
jobId: string,
|
|
51
|
+
pollInterval = 2,
|
|
52
|
+
timeout?: number
|
|
53
|
+
): Promise<AgentStatusResponse> {
|
|
54
|
+
const start = Date.now();
|
|
55
|
+
while (true) {
|
|
56
|
+
const status = await getAgentStatus(http, jobId);
|
|
57
|
+
if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
|
|
58
|
+
if (timeout != null && Date.now() - start > timeout * 1000) return status;
|
|
59
|
+
await new Promise((r) => setTimeout(r, Math.max(1000, pollInterval * 1000)));
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export async function agent(
|
|
64
|
+
http: HttpClient,
|
|
65
|
+
args: Parameters<typeof prepareAgentPayload>[0] & { pollInterval?: number; timeout?: number }
|
|
66
|
+
): Promise<AgentStatusResponse> {
|
|
67
|
+
const started = await startAgent(http, args);
|
|
68
|
+
const jobId = started.id;
|
|
69
|
+
if (!jobId) return started as unknown as AgentStatusResponse;
|
|
70
|
+
return waitAgent(http, jobId, args.pollInterval ?? 2, args.timeout);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export async function cancelAgent(http: HttpClient, jobId: string): Promise<boolean> {
|
|
74
|
+
try {
|
|
75
|
+
const res = await http.delete<{ success: boolean }>(`/v2/agent/${jobId}`);
|
|
76
|
+
if (res.status !== 200) throwForBadResponse(res, "cancel agent");
|
|
77
|
+
return res.data?.success === true;
|
|
78
|
+
} catch (err: any) {
|
|
79
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "cancel agent");
|
|
80
|
+
throw err;
|
|
81
|
+
}
|
|
82
|
+
}
|
package/src/v2/types.ts
CHANGED
|
@@ -347,6 +347,7 @@ export interface DocumentMetadata {
|
|
|
347
347
|
scrapeId?: string;
|
|
348
348
|
numPages?: number;
|
|
349
349
|
contentType?: string;
|
|
350
|
+
timezone?: string;
|
|
350
351
|
proxyUsed?: 'basic' | 'stealth';
|
|
351
352
|
cacheState?: 'hit' | 'miss';
|
|
352
353
|
cachedAt?: string;
|
|
@@ -534,8 +535,23 @@ export interface ExtractResponse {
|
|
|
534
535
|
creditsUsed?: number;
|
|
535
536
|
}
|
|
536
537
|
|
|
538
|
+
export interface AgentResponse {
|
|
539
|
+
success: boolean;
|
|
540
|
+
id: string;
|
|
541
|
+
error?: string;
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
export interface AgentStatusResponse {
|
|
545
|
+
success: boolean;
|
|
546
|
+
status: 'processing' | 'completed' | 'failed';
|
|
547
|
+
error?: string;
|
|
548
|
+
data?: unknown;
|
|
549
|
+
expiresAt: string;
|
|
550
|
+
creditsUsed?: number;
|
|
551
|
+
}
|
|
552
|
+
|
|
537
553
|
export interface AgentOptions {
|
|
538
|
-
model: 'FIRE-1';
|
|
554
|
+
model: 'FIRE-1' | 'v3-beta';
|
|
539
555
|
}
|
|
540
556
|
|
|
541
557
|
export interface ConcurrencyCheck {
|