@mendable/firecrawl-js 1.29.2 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +4 -2
- package/LICENSE +0 -0
- package/README.md +85 -78
- package/audit-ci.jsonc +4 -0
- package/dist/chunk-RDDU62K7.js +85 -0
- package/dist/index.cjs +1006 -41
- package/dist/index.d.cts +535 -11
- package/dist/index.d.ts +535 -11
- package/dist/index.js +996 -32
- package/dist/package-5MOU5FLU.js +4 -0
- package/dump.rdb +0 -0
- package/jest.config.js +0 -0
- package/package.json +6 -5
- package/src/__tests__/{v1/e2e_withAuth → e2e/v1}/index.test.ts +1 -0
- package/src/__tests__/e2e/v2/batch.test.ts +74 -0
- package/src/__tests__/e2e/v2/crawl.test.ts +182 -0
- package/src/__tests__/e2e/v2/extract.test.ts +70 -0
- package/src/__tests__/e2e/v2/map.test.ts +55 -0
- package/src/__tests__/e2e/v2/scrape.test.ts +130 -0
- package/src/__tests__/e2e/v2/search.test.ts +247 -0
- package/src/__tests__/e2e/v2/usage.test.ts +36 -0
- package/src/__tests__/e2e/v2/utils/idmux.ts +58 -0
- package/src/__tests__/e2e/v2/watcher.test.ts +96 -0
- package/src/__tests__/unit/v1/monitor-job-status-retry.test.ts +154 -0
- package/src/__tests__/unit/v2/errorHandler.test.ts +19 -0
- package/src/__tests__/unit/v2/scrape.unit.test.ts +11 -0
- package/src/__tests__/unit/v2/validation.test.ts +59 -0
- package/src/index.backup.ts +2146 -0
- package/src/index.ts +27 -2071
- package/src/v1/index.ts +2158 -0
- package/src/v2/client.ts +281 -0
- package/src/v2/methods/batch.ts +131 -0
- package/src/v2/methods/crawl.ts +160 -0
- package/src/v2/methods/extract.ts +86 -0
- package/src/v2/methods/map.ts +37 -0
- package/src/v2/methods/scrape.ts +26 -0
- package/src/v2/methods/search.ts +69 -0
- package/src/v2/methods/usage.ts +39 -0
- package/src/v2/types.ts +308 -0
- package/src/v2/utils/errorHandler.ts +18 -0
- package/src/v2/utils/getVersion.ts +14 -0
- package/src/v2/utils/httpClient.ts +101 -0
- package/src/v2/utils/validation.ts +50 -0
- package/src/v2/watcher.ts +159 -0
- package/tsconfig.json +2 -1
- package/tsup.config.ts +0 -0
- package/dist/package-E7ICGMY6.js +0 -110
package/dist/index.cjs
CHANGED
|
@@ -35,7 +35,7 @@ var require_package = __commonJS({
|
|
|
35
35
|
"package.json"(exports2, module2) {
|
|
36
36
|
module2.exports = {
|
|
37
37
|
name: "@mendable/firecrawl-js",
|
|
38
|
-
version: "
|
|
38
|
+
version: "3.0.1",
|
|
39
39
|
description: "JavaScript SDK for Firecrawl API",
|
|
40
40
|
main: "dist/index.js",
|
|
41
41
|
types: "dist/index.d.ts",
|
|
@@ -51,11 +51,12 @@ var require_package = __commonJS({
|
|
|
51
51
|
build: "tsup",
|
|
52
52
|
"build-and-publish": "npm run build && npm publish --access public",
|
|
53
53
|
"publish-beta": "npm run build && npm publish --access public --tag beta",
|
|
54
|
-
test: "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/
|
|
54
|
+
test: "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/e2e/v2/*.test.ts --detectOpenHandles",
|
|
55
|
+
"test:unit": "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/unit/v2/*.test.ts"
|
|
55
56
|
},
|
|
56
57
|
repository: {
|
|
57
58
|
type: "git",
|
|
58
|
-
url: "git+https://github.com/
|
|
59
|
+
url: "git+https://github.com/firecrawl/firecrawl.git"
|
|
59
60
|
},
|
|
60
61
|
author: "Mendable.ai",
|
|
61
62
|
license: "MIT",
|
|
@@ -66,9 +67,9 @@ var require_package = __commonJS({
|
|
|
66
67
|
"zod-to-json-schema": "^3.23.0"
|
|
67
68
|
},
|
|
68
69
|
bugs: {
|
|
69
|
-
url: "https://github.com/
|
|
70
|
+
url: "https://github.com/firecrawl/firecrawl/issues"
|
|
70
71
|
},
|
|
71
|
-
homepage: "https://github.com/
|
|
72
|
+
homepage: "https://github.com/firecrawl/firecrawl#readme",
|
|
72
73
|
devDependencies: {
|
|
73
74
|
"@jest/globals": "^30.0.5",
|
|
74
75
|
"@types/dotenv": "^8.2.0",
|
|
@@ -107,16 +108,917 @@ var require_package = __commonJS({
|
|
|
107
108
|
});
|
|
108
109
|
|
|
109
110
|
// src/index.ts
|
|
110
|
-
var
|
|
111
|
-
__export(
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
111
|
+
var src_exports = {};
|
|
112
|
+
__export(src_exports, {
|
|
113
|
+
Firecrawl: () => Firecrawl,
|
|
114
|
+
FirecrawlAppV1: () => FirecrawlApp,
|
|
115
|
+
FirecrawlClient: () => FirecrawlClient,
|
|
116
|
+
SdkError: () => SdkError,
|
|
117
|
+
default: () => src_default
|
|
115
118
|
});
|
|
116
|
-
module.exports = __toCommonJS(
|
|
119
|
+
module.exports = __toCommonJS(src_exports);
|
|
120
|
+
|
|
121
|
+
// src/v2/utils/httpClient.ts
|
|
117
122
|
var import_axios = __toESM(require("axios"), 1);
|
|
123
|
+
|
|
124
|
+
// src/v2/utils/getVersion.ts
|
|
125
|
+
function getVersion() {
|
|
126
|
+
try {
|
|
127
|
+
if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
|
|
128
|
+
return process.env.npm_package_version;
|
|
129
|
+
}
|
|
130
|
+
const pkg = require_package();
|
|
131
|
+
return pkg?.version || "3.x.x";
|
|
132
|
+
} catch {
|
|
133
|
+
return "3.x.x";
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// src/v2/utils/httpClient.ts
|
|
138
|
+
var HttpClient = class {
|
|
139
|
+
instance;
|
|
140
|
+
apiKey;
|
|
141
|
+
apiUrl;
|
|
142
|
+
maxRetries;
|
|
143
|
+
backoffFactor;
|
|
144
|
+
constructor(options) {
|
|
145
|
+
this.apiKey = options.apiKey;
|
|
146
|
+
this.apiUrl = options.apiUrl.replace(/\/$/, "");
|
|
147
|
+
this.maxRetries = options.maxRetries ?? 3;
|
|
148
|
+
this.backoffFactor = options.backoffFactor ?? 0.5;
|
|
149
|
+
this.instance = import_axios.default.create({
|
|
150
|
+
baseURL: this.apiUrl,
|
|
151
|
+
timeout: options.timeoutMs ?? 6e4,
|
|
152
|
+
headers: {
|
|
153
|
+
"Content-Type": "application/json",
|
|
154
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
155
|
+
},
|
|
156
|
+
transitional: { clarifyTimeoutError: true }
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
getApiUrl() {
|
|
160
|
+
return this.apiUrl;
|
|
161
|
+
}
|
|
162
|
+
getApiKey() {
|
|
163
|
+
return this.apiKey;
|
|
164
|
+
}
|
|
165
|
+
async request(config) {
|
|
166
|
+
const version = getVersion();
|
|
167
|
+
config.headers = {
|
|
168
|
+
...config.headers || {},
|
|
169
|
+
// origin header for GET/DELETE; for POST we also include in body below
|
|
170
|
+
origin: `js-sdk@${version}`
|
|
171
|
+
};
|
|
172
|
+
let lastError;
|
|
173
|
+
for (let attempt = 0; attempt < this.maxRetries; attempt++) {
|
|
174
|
+
try {
|
|
175
|
+
const cfg = { ...config };
|
|
176
|
+
if (cfg.method && ["post", "put", "patch"].includes(cfg.method.toLowerCase())) {
|
|
177
|
+
const data = cfg.data ?? {};
|
|
178
|
+
cfg.data = { ...data, origin: `js-sdk@${version}` };
|
|
179
|
+
}
|
|
180
|
+
const res = await this.instance.request(cfg);
|
|
181
|
+
if (res.status === 502 && attempt < this.maxRetries - 1) {
|
|
182
|
+
await this.sleep(this.backoffFactor * Math.pow(2, attempt));
|
|
183
|
+
continue;
|
|
184
|
+
}
|
|
185
|
+
return res;
|
|
186
|
+
} catch (err) {
|
|
187
|
+
lastError = err;
|
|
188
|
+
const status = err?.response?.status;
|
|
189
|
+
if (status === 502 && attempt < this.maxRetries - 1) {
|
|
190
|
+
await this.sleep(this.backoffFactor * Math.pow(2, attempt));
|
|
191
|
+
continue;
|
|
192
|
+
}
|
|
193
|
+
throw err;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
throw lastError ?? new Error("Unexpected HTTP client error");
|
|
197
|
+
}
|
|
198
|
+
sleep(seconds) {
|
|
199
|
+
return new Promise((r) => setTimeout(r, seconds * 1e3));
|
|
200
|
+
}
|
|
201
|
+
post(endpoint, body, headers) {
|
|
202
|
+
return this.request({ method: "post", url: endpoint, data: body, headers });
|
|
203
|
+
}
|
|
204
|
+
get(endpoint, headers) {
|
|
205
|
+
return this.request({ method: "get", url: endpoint, headers });
|
|
206
|
+
}
|
|
207
|
+
delete(endpoint, headers) {
|
|
208
|
+
return this.request({ method: "delete", url: endpoint, headers });
|
|
209
|
+
}
|
|
210
|
+
prepareHeaders(idempotencyKey) {
|
|
211
|
+
const headers = {};
|
|
212
|
+
if (idempotencyKey) headers["x-idempotency-key"] = idempotencyKey;
|
|
213
|
+
return headers;
|
|
214
|
+
}
|
|
215
|
+
};
|
|
216
|
+
|
|
217
|
+
// src/v2/types.ts
|
|
218
|
+
var SdkError = class extends Error {
|
|
219
|
+
status;
|
|
220
|
+
code;
|
|
221
|
+
details;
|
|
222
|
+
constructor(message, status, code, details) {
|
|
223
|
+
super(message);
|
|
224
|
+
this.name = "FirecrawlSdkError";
|
|
225
|
+
this.status = status;
|
|
226
|
+
this.code = code;
|
|
227
|
+
this.details = details;
|
|
228
|
+
}
|
|
229
|
+
};
|
|
230
|
+
|
|
231
|
+
// src/v2/utils/validation.ts
|
|
232
|
+
var import_zod_to_json_schema = __toESM(require("zod-to-json-schema"), 1);
|
|
233
|
+
function ensureValidFormats(formats) {
|
|
234
|
+
if (!formats) return;
|
|
235
|
+
for (const fmt of formats) {
|
|
236
|
+
if (typeof fmt === "string") {
|
|
237
|
+
if (fmt === "json") {
|
|
238
|
+
throw new Error("json format must be an object with { type: 'json', prompt, schema }");
|
|
239
|
+
}
|
|
240
|
+
continue;
|
|
241
|
+
}
|
|
242
|
+
if (fmt.type === "json") {
|
|
243
|
+
const j = fmt;
|
|
244
|
+
if (!j.prompt && !j.schema) {
|
|
245
|
+
throw new Error("json format requires either 'prompt' or 'schema' (or both)");
|
|
246
|
+
}
|
|
247
|
+
const maybeSchema = j.schema;
|
|
248
|
+
const isZod = !!maybeSchema && (typeof maybeSchema.safeParse === "function" || typeof maybeSchema.parse === "function") && !!maybeSchema._def;
|
|
249
|
+
if (isZod) {
|
|
250
|
+
try {
|
|
251
|
+
j.schema = (0, import_zod_to_json_schema.default)(maybeSchema);
|
|
252
|
+
} catch {
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
continue;
|
|
256
|
+
}
|
|
257
|
+
if (fmt.type === "screenshot") {
|
|
258
|
+
const s = fmt;
|
|
259
|
+
if (s.quality != null && (typeof s.quality !== "number" || s.quality < 0)) {
|
|
260
|
+
throw new Error("screenshot.quality must be a non-negative number");
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
function ensureValidScrapeOptions(options) {
|
|
266
|
+
if (!options) return;
|
|
267
|
+
if (options.timeout != null && options.timeout <= 0) {
|
|
268
|
+
throw new Error("timeout must be positive");
|
|
269
|
+
}
|
|
270
|
+
if (options.waitFor != null && options.waitFor < 0) {
|
|
271
|
+
throw new Error("waitFor must be non-negative");
|
|
272
|
+
}
|
|
273
|
+
ensureValidFormats(options.formats);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// src/v2/utils/errorHandler.ts
|
|
277
|
+
var import_axios2 = require("axios");
|
|
278
|
+
function throwForBadResponse(resp, action) {
|
|
279
|
+
const status = resp.status;
|
|
280
|
+
const body = resp.data || {};
|
|
281
|
+
const msg = body?.error || body?.message || `Request failed (${status}) while trying to ${action}`;
|
|
282
|
+
throw new SdkError(msg, status, void 0, body?.details);
|
|
283
|
+
}
|
|
284
|
+
function normalizeAxiosError(err, action) {
|
|
285
|
+
const status = err.response?.status;
|
|
286
|
+
const body = err.response?.data;
|
|
287
|
+
const message = body?.error || err.message || `Request failed${status ? ` (${status})` : ""} while trying to ${action}`;
|
|
288
|
+
const code = body?.code || err.code;
|
|
289
|
+
throw new SdkError(message, status, code, body?.details ?? body);
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
// src/v2/methods/scrape.ts
|
|
293
|
+
async function scrape(http, url, options) {
|
|
294
|
+
if (!url || !url.trim()) {
|
|
295
|
+
throw new Error("URL cannot be empty");
|
|
296
|
+
}
|
|
297
|
+
if (options) ensureValidScrapeOptions(options);
|
|
298
|
+
const payload = { url: url.trim() };
|
|
299
|
+
if (options) Object.assign(payload, options);
|
|
300
|
+
try {
|
|
301
|
+
const res = await http.post("/v2/scrape", payload);
|
|
302
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
303
|
+
throwForBadResponse(res, "scrape");
|
|
304
|
+
}
|
|
305
|
+
return res.data.data || {};
|
|
306
|
+
} catch (err) {
|
|
307
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "scrape");
|
|
308
|
+
throw err;
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
// src/v2/methods/search.ts
|
|
313
|
+
function prepareSearchPayload(req) {
|
|
314
|
+
if (!req.query || !req.query.trim()) throw new Error("Query cannot be empty");
|
|
315
|
+
if (req.limit != null && req.limit <= 0) throw new Error("limit must be positive");
|
|
316
|
+
if (req.timeout != null && req.timeout <= 0) throw new Error("timeout must be positive");
|
|
317
|
+
const payload = {
|
|
318
|
+
query: req.query
|
|
319
|
+
};
|
|
320
|
+
if (req.sources) payload.sources = req.sources;
|
|
321
|
+
if (req.limit != null) payload.limit = req.limit;
|
|
322
|
+
if (req.tbs != null) payload.tbs = req.tbs;
|
|
323
|
+
if (req.location != null) payload.location = req.location;
|
|
324
|
+
if (req.ignoreInvalidURLs != null) payload.ignoreInvalidURLs = req.ignoreInvalidURLs;
|
|
325
|
+
if (req.timeout != null) payload.timeout = req.timeout;
|
|
326
|
+
if (req.scrapeOptions) {
|
|
327
|
+
ensureValidScrapeOptions(req.scrapeOptions);
|
|
328
|
+
payload.scrapeOptions = req.scrapeOptions;
|
|
329
|
+
}
|
|
330
|
+
return payload;
|
|
331
|
+
}
|
|
332
|
+
async function search(http, request) {
|
|
333
|
+
const payload = prepareSearchPayload(request);
|
|
334
|
+
try {
|
|
335
|
+
const res = await http.post("/v2/search", payload);
|
|
336
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
337
|
+
throwForBadResponse(res, "search");
|
|
338
|
+
}
|
|
339
|
+
const data = res.data.data || {};
|
|
340
|
+
const out = {};
|
|
341
|
+
for (const key of Object.keys(data)) {
|
|
342
|
+
const arr = data[key];
|
|
343
|
+
if (Array.isArray(arr)) {
|
|
344
|
+
const results = [];
|
|
345
|
+
for (const item of arr) {
|
|
346
|
+
if (item && typeof item === "object") {
|
|
347
|
+
if ("markdown" in item || "html" in item || "rawHtml" in item || "links" in item || "screenshot" in item || "changeTracking" in item || "summary" in item || "json" in item) {
|
|
348
|
+
results.push(item);
|
|
349
|
+
} else {
|
|
350
|
+
results.push({ url: item.url, title: item.title, description: item.description });
|
|
351
|
+
}
|
|
352
|
+
} else if (typeof item === "string") {
|
|
353
|
+
results.push({ url: item });
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
out[key] = results;
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
return out;
|
|
360
|
+
} catch (err) {
|
|
361
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "search");
|
|
362
|
+
throw err;
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
// src/v2/methods/map.ts
|
|
367
|
+
function prepareMapPayload(url, options) {
|
|
368
|
+
if (!url || !url.trim()) throw new Error("URL cannot be empty");
|
|
369
|
+
const payload = { url: url.trim() };
|
|
370
|
+
if (options) {
|
|
371
|
+
if (options.sitemap != null) payload.sitemap = options.sitemap;
|
|
372
|
+
if (options.search != null) payload.search = options.search;
|
|
373
|
+
if (options.includeSubdomains != null) payload.includeSubdomains = options.includeSubdomains;
|
|
374
|
+
if (options.limit != null) payload.limit = options.limit;
|
|
375
|
+
if (options.timeout != null) payload.timeout = options.timeout;
|
|
376
|
+
}
|
|
377
|
+
return payload;
|
|
378
|
+
}
|
|
379
|
+
async function map(http, url, options) {
|
|
380
|
+
const payload = prepareMapPayload(url, options);
|
|
381
|
+
try {
|
|
382
|
+
const res = await http.post("/v2/map", payload);
|
|
383
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
384
|
+
throwForBadResponse(res, "map");
|
|
385
|
+
}
|
|
386
|
+
const linksIn = res.data.links || [];
|
|
387
|
+
const links = [];
|
|
388
|
+
for (const item of linksIn) {
|
|
389
|
+
if (typeof item === "string") links.push({ url: item });
|
|
390
|
+
else if (item && typeof item === "object") links.push({ url: item.url, title: item.title, description: item.description });
|
|
391
|
+
}
|
|
392
|
+
return { links };
|
|
393
|
+
} catch (err) {
|
|
394
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "map");
|
|
395
|
+
throw err;
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
// src/v2/methods/crawl.ts
|
|
400
|
+
function prepareCrawlPayload(request) {
|
|
401
|
+
if (!request.url || !request.url.trim()) throw new Error("URL cannot be empty");
|
|
402
|
+
const data = { url: request.url.trim() };
|
|
403
|
+
if (request.prompt) data.prompt = request.prompt;
|
|
404
|
+
if (request.excludePaths) data.excludePaths = request.excludePaths;
|
|
405
|
+
if (request.includePaths) data.includePaths = request.includePaths;
|
|
406
|
+
if (request.maxDiscoveryDepth != null) data.maxDiscoveryDepth = request.maxDiscoveryDepth;
|
|
407
|
+
if (request.sitemap != null) data.sitemap = request.sitemap;
|
|
408
|
+
if (request.ignoreQueryParameters != null) data.ignoreQueryParameters = request.ignoreQueryParameters;
|
|
409
|
+
if (request.limit != null) data.limit = request.limit;
|
|
410
|
+
if (request.crawlEntireDomain != null) data.crawlEntireDomain = request.crawlEntireDomain;
|
|
411
|
+
if (request.allowExternalLinks != null) data.allowExternalLinks = request.allowExternalLinks;
|
|
412
|
+
if (request.allowSubdomains != null) data.allowSubdomains = request.allowSubdomains;
|
|
413
|
+
if (request.delay != null) data.delay = request.delay;
|
|
414
|
+
if (request.maxConcurrency != null) data.maxConcurrency = request.maxConcurrency;
|
|
415
|
+
if (request.webhook != null) data.webhook = request.webhook;
|
|
416
|
+
if (request.scrapeOptions) {
|
|
417
|
+
ensureValidScrapeOptions(request.scrapeOptions);
|
|
418
|
+
data.scrapeOptions = request.scrapeOptions;
|
|
419
|
+
}
|
|
420
|
+
if (request.zeroDataRetention != null) data.zeroDataRetention = request.zeroDataRetention;
|
|
421
|
+
return data;
|
|
422
|
+
}
|
|
423
|
+
async function startCrawl(http, request) {
|
|
424
|
+
const payload = prepareCrawlPayload(request);
|
|
425
|
+
try {
|
|
426
|
+
const res = await http.post("/v2/crawl", payload);
|
|
427
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
428
|
+
throwForBadResponse(res, "start crawl");
|
|
429
|
+
}
|
|
430
|
+
return { id: res.data.id, url: res.data.url };
|
|
431
|
+
} catch (err) {
|
|
432
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "start crawl");
|
|
433
|
+
throw err;
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
async function getCrawlStatus(http, jobId) {
|
|
437
|
+
try {
|
|
438
|
+
const res = await http.get(`/v2/crawl/${jobId}`);
|
|
439
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
440
|
+
throwForBadResponse(res, "get crawl status");
|
|
441
|
+
}
|
|
442
|
+
const body = res.data;
|
|
443
|
+
return {
|
|
444
|
+
status: body.status,
|
|
445
|
+
completed: body.completed ?? 0,
|
|
446
|
+
total: body.total ?? 0,
|
|
447
|
+
creditsUsed: body.creditsUsed,
|
|
448
|
+
expiresAt: body.expiresAt,
|
|
449
|
+
next: body.next ?? null,
|
|
450
|
+
data: body.data || []
|
|
451
|
+
};
|
|
452
|
+
} catch (err) {
|
|
453
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get crawl status");
|
|
454
|
+
throw err;
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
async function cancelCrawl(http, jobId) {
|
|
458
|
+
try {
|
|
459
|
+
const res = await http.delete(`/v2/crawl/${jobId}`);
|
|
460
|
+
if (res.status !== 200) throwForBadResponse(res, "cancel crawl");
|
|
461
|
+
return res.data?.status === "cancelled";
|
|
462
|
+
} catch (err) {
|
|
463
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "cancel crawl");
|
|
464
|
+
throw err;
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
async function waitForCrawlCompletion(http, jobId, pollInterval = 2, timeout) {
|
|
468
|
+
const start = Date.now();
|
|
469
|
+
while (true) {
|
|
470
|
+
const status = await getCrawlStatus(http, jobId);
|
|
471
|
+
if (["completed", "failed", "cancelled"].includes(status.status)) return status;
|
|
472
|
+
if (timeout != null && Date.now() - start > timeout * 1e3) {
|
|
473
|
+
throw new Error(`Crawl job ${jobId} did not complete within ${timeout} seconds`);
|
|
474
|
+
}
|
|
475
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
async function crawl(http, request, pollInterval = 2, timeout) {
|
|
479
|
+
const started = await startCrawl(http, request);
|
|
480
|
+
return waitForCrawlCompletion(http, started.id, pollInterval, timeout);
|
|
481
|
+
}
|
|
482
|
+
async function getCrawlErrors(http, crawlId) {
|
|
483
|
+
try {
|
|
484
|
+
const res = await http.get(`/v2/crawl/${crawlId}/errors`);
|
|
485
|
+
if (res.status !== 200) throwForBadResponse(res, "get crawl errors");
|
|
486
|
+
const payload = res.data?.data ?? res.data;
|
|
487
|
+
return { errors: payload.errors || [], robotsBlocked: payload.robotsBlocked || [] };
|
|
488
|
+
} catch (err) {
|
|
489
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get crawl errors");
|
|
490
|
+
throw err;
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
async function getActiveCrawls(http) {
|
|
494
|
+
try {
|
|
495
|
+
const res = await http.get(`/v2/crawl/active`);
|
|
496
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get active crawls");
|
|
497
|
+
const crawlsIn = res.data?.crawls || [];
|
|
498
|
+
const crawls = crawlsIn.map((c) => ({ id: c.id, teamId: c.teamId ?? c.team_id, url: c.url, options: c.options ?? null }));
|
|
499
|
+
return { success: true, crawls };
|
|
500
|
+
} catch (err) {
|
|
501
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get active crawls");
|
|
502
|
+
throw err;
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
async function crawlParamsPreview(http, url, prompt) {
|
|
506
|
+
if (!url || !url.trim()) throw new Error("URL cannot be empty");
|
|
507
|
+
if (!prompt || !prompt.trim()) throw new Error("Prompt cannot be empty");
|
|
508
|
+
try {
|
|
509
|
+
const res = await http.post("/v2/crawl/params-preview", { url: url.trim(), prompt });
|
|
510
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "crawl params preview");
|
|
511
|
+
const data = res.data.data || {};
|
|
512
|
+
if (res.data.warning) data.warning = res.data.warning;
|
|
513
|
+
return data;
|
|
514
|
+
} catch (err) {
|
|
515
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "crawl params preview");
|
|
516
|
+
throw err;
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
// src/v2/methods/batch.ts
|
|
521
|
+
async function startBatchScrape(http, urls, {
|
|
522
|
+
options,
|
|
523
|
+
webhook,
|
|
524
|
+
appendToId,
|
|
525
|
+
ignoreInvalidURLs,
|
|
526
|
+
maxConcurrency,
|
|
527
|
+
zeroDataRetention,
|
|
528
|
+
integration,
|
|
529
|
+
idempotencyKey
|
|
530
|
+
} = {}) {
|
|
531
|
+
if (!Array.isArray(urls) || urls.length === 0) throw new Error("URLs list cannot be empty");
|
|
532
|
+
const payload = { urls };
|
|
533
|
+
if (options) {
|
|
534
|
+
ensureValidScrapeOptions(options);
|
|
535
|
+
Object.assign(payload, options);
|
|
536
|
+
}
|
|
537
|
+
if (webhook != null) payload.webhook = webhook;
|
|
538
|
+
if (appendToId != null) payload.appendToId = appendToId;
|
|
539
|
+
if (ignoreInvalidURLs != null) payload.ignoreInvalidURLs = ignoreInvalidURLs;
|
|
540
|
+
if (maxConcurrency != null) payload.maxConcurrency = maxConcurrency;
|
|
541
|
+
if (zeroDataRetention != null) payload.zeroDataRetention = zeroDataRetention;
|
|
542
|
+
if (integration != null) payload.integration = integration;
|
|
543
|
+
try {
|
|
544
|
+
const headers = http.prepareHeaders(idempotencyKey);
|
|
545
|
+
const res = await http.post("/v2/batch/scrape", payload, headers);
|
|
546
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "start batch scrape");
|
|
547
|
+
return { id: res.data.id, url: res.data.url, invalidURLs: res.data.invalidURLs || void 0 };
|
|
548
|
+
} catch (err) {
|
|
549
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "start batch scrape");
|
|
550
|
+
throw err;
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
async function getBatchScrapeStatus(http, jobId) {
|
|
554
|
+
try {
|
|
555
|
+
const res = await http.get(`/v2/batch/scrape/${jobId}`);
|
|
556
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get batch scrape status");
|
|
557
|
+
const body = res.data;
|
|
558
|
+
return {
|
|
559
|
+
status: body.status,
|
|
560
|
+
completed: body.completed ?? 0,
|
|
561
|
+
total: body.total ?? 0,
|
|
562
|
+
creditsUsed: body.creditsUsed,
|
|
563
|
+
expiresAt: body.expiresAt,
|
|
564
|
+
next: body.next ?? null,
|
|
565
|
+
data: body.data || []
|
|
566
|
+
};
|
|
567
|
+
} catch (err) {
|
|
568
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get batch scrape status");
|
|
569
|
+
throw err;
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
async function cancelBatchScrape(http, jobId) {
|
|
573
|
+
try {
|
|
574
|
+
const res = await http.delete(`/v2/batch/scrape/${jobId}`);
|
|
575
|
+
if (res.status !== 200) throwForBadResponse(res, "cancel batch scrape");
|
|
576
|
+
return res.data?.status === "cancelled";
|
|
577
|
+
} catch (err) {
|
|
578
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "cancel batch scrape");
|
|
579
|
+
throw err;
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
async function getBatchScrapeErrors(http, jobId) {
|
|
583
|
+
try {
|
|
584
|
+
const res = await http.get(`/v2/batch/scrape/${jobId}/errors`);
|
|
585
|
+
if (res.status !== 200) throwForBadResponse(res, "get batch scrape errors");
|
|
586
|
+
const payload = res.data?.data ?? res.data;
|
|
587
|
+
return { errors: payload.errors || [], robotsBlocked: payload.robotsBlocked || [] };
|
|
588
|
+
} catch (err) {
|
|
589
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get batch scrape errors");
|
|
590
|
+
throw err;
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
async function waitForBatchCompletion(http, jobId, pollInterval = 2, timeout) {
|
|
594
|
+
const start = Date.now();
|
|
595
|
+
while (true) {
|
|
596
|
+
const status = await getBatchScrapeStatus(http, jobId);
|
|
597
|
+
if (["completed", "failed", "cancelled"].includes(status.status)) return status;
|
|
598
|
+
if (timeout != null && Date.now() - start > timeout * 1e3) {
|
|
599
|
+
throw new Error(`Batch scrape job ${jobId} did not complete within ${timeout} seconds`);
|
|
600
|
+
}
|
|
601
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
async function batchScrape(http, urls, opts = {}) {
|
|
605
|
+
const start = await startBatchScrape(http, urls, opts);
|
|
606
|
+
return waitForBatchCompletion(http, start.id, opts.pollInterval ?? 2, opts.timeout);
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
// src/v2/methods/extract.ts
|
|
610
|
+
var import_zod_to_json_schema2 = require("zod-to-json-schema");
|
|
611
|
+
function prepareExtractPayload(args) {
|
|
612
|
+
const body = {};
|
|
613
|
+
if (args.urls) body.urls = args.urls;
|
|
614
|
+
if (args.prompt != null) body.prompt = args.prompt;
|
|
615
|
+
if (args.schema != null) {
|
|
616
|
+
const s = args.schema;
|
|
617
|
+
const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
|
|
618
|
+
body.schema = isZod ? (0, import_zod_to_json_schema2.zodToJsonSchema)(s) : args.schema;
|
|
619
|
+
}
|
|
620
|
+
if (args.systemPrompt != null) body.systemPrompt = args.systemPrompt;
|
|
621
|
+
if (args.allowExternalLinks != null) body.allowExternalLinks = args.allowExternalLinks;
|
|
622
|
+
if (args.enableWebSearch != null) body.enableWebSearch = args.enableWebSearch;
|
|
623
|
+
if (args.showSources != null) body.showSources = args.showSources;
|
|
624
|
+
if (args.ignoreInvalidURLs != null) body.ignoreInvalidURLs = args.ignoreInvalidURLs;
|
|
625
|
+
if (args.scrapeOptions) {
|
|
626
|
+
ensureValidScrapeOptions(args.scrapeOptions);
|
|
627
|
+
body.scrapeOptions = args.scrapeOptions;
|
|
628
|
+
}
|
|
629
|
+
return body;
|
|
630
|
+
}
|
|
631
|
+
async function startExtract(http, args) {
|
|
632
|
+
const payload = prepareExtractPayload(args);
|
|
633
|
+
try {
|
|
634
|
+
const res = await http.post("/v2/extract", payload);
|
|
635
|
+
if (res.status !== 200) throwForBadResponse(res, "extract");
|
|
636
|
+
return res.data;
|
|
637
|
+
} catch (err) {
|
|
638
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "extract");
|
|
639
|
+
throw err;
|
|
640
|
+
}
|
|
641
|
+
}
|
|
642
|
+
async function getExtractStatus(http, jobId) {
|
|
643
|
+
try {
|
|
644
|
+
const res = await http.get(`/v2/extract/${jobId}`);
|
|
645
|
+
if (res.status !== 200) throwForBadResponse(res, "extract status");
|
|
646
|
+
return res.data;
|
|
647
|
+
} catch (err) {
|
|
648
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "extract status");
|
|
649
|
+
throw err;
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
async function waitExtract(http, jobId, pollInterval = 2, timeout) {
|
|
653
|
+
const start = Date.now();
|
|
654
|
+
while (true) {
|
|
655
|
+
const status = await getExtractStatus(http, jobId);
|
|
656
|
+
if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
|
|
657
|
+
if (timeout != null && Date.now() - start > timeout * 1e3) return status;
|
|
658
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
async function extract(http, args) {
|
|
662
|
+
const started = await startExtract(http, args);
|
|
663
|
+
const jobId = started.id;
|
|
664
|
+
if (!jobId) return started;
|
|
665
|
+
return waitExtract(http, jobId, args.pollInterval ?? 2, args.timeout);
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
// src/v2/methods/usage.ts
|
|
669
|
+
async function getConcurrency(http) {
|
|
670
|
+
try {
|
|
671
|
+
const res = await http.get("/v2/concurrency-check");
|
|
672
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get concurrency");
|
|
673
|
+
const d = res.data.data || res.data;
|
|
674
|
+
return { concurrency: d.concurrency, maxConcurrency: d.maxConcurrency ?? d.max_concurrency };
|
|
675
|
+
} catch (err) {
|
|
676
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get concurrency");
|
|
677
|
+
throw err;
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
async function getCreditUsage(http) {
|
|
681
|
+
try {
|
|
682
|
+
const res = await http.get("/v2/team/credit-usage");
|
|
683
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get credit usage");
|
|
684
|
+
const d = res.data.data || res.data;
|
|
685
|
+
return { remainingCredits: d.remainingCredits ?? d.remaining_credits ?? 0 };
|
|
686
|
+
} catch (err) {
|
|
687
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get credit usage");
|
|
688
|
+
throw err;
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
async function getTokenUsage(http) {
|
|
692
|
+
try {
|
|
693
|
+
const res = await http.get("/v2/team/token-usage");
|
|
694
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get token usage");
|
|
695
|
+
return res.data.data || res.data;
|
|
696
|
+
} catch (err) {
|
|
697
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get token usage");
|
|
698
|
+
throw err;
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
// src/v2/watcher.ts
|
|
703
|
+
var import_events = require("events");
|
|
704
|
+
var Watcher = class extends import_events.EventEmitter {
|
|
705
|
+
http;
|
|
706
|
+
jobId;
|
|
707
|
+
kind;
|
|
708
|
+
pollInterval;
|
|
709
|
+
timeout;
|
|
710
|
+
ws;
|
|
711
|
+
closed = false;
|
|
712
|
+
constructor(http, jobId, opts = {}) {
|
|
713
|
+
super();
|
|
714
|
+
this.http = http;
|
|
715
|
+
this.jobId = jobId;
|
|
716
|
+
this.kind = opts.kind ?? "crawl";
|
|
717
|
+
this.pollInterval = opts.pollInterval ?? 2;
|
|
718
|
+
this.timeout = opts.timeout;
|
|
719
|
+
}
|
|
720
|
+
buildWsUrl() {
|
|
721
|
+
const apiUrl = this.http.getApiUrl();
|
|
722
|
+
const wsBase = apiUrl.replace(/^http/, "ws");
|
|
723
|
+
const path = this.kind === "crawl" ? `/v2/crawl/${this.jobId}` : `/v2/batch/scrape/${this.jobId}`;
|
|
724
|
+
return `${wsBase}${path}`;
|
|
725
|
+
}
|
|
726
|
+
async start() {
|
|
727
|
+
try {
|
|
728
|
+
const url = this.buildWsUrl();
|
|
729
|
+
this.ws = new WebSocket(url, this.http.getApiKey());
|
|
730
|
+
this.attachWsHandlers(this.ws);
|
|
731
|
+
} catch {
|
|
732
|
+
this.pollLoop();
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
attachWsHandlers(ws) {
|
|
736
|
+
let startTs = Date.now();
|
|
737
|
+
const timeoutMs = this.timeout ? this.timeout * 1e3 : void 0;
|
|
738
|
+
ws.onmessage = (ev) => {
|
|
739
|
+
try {
|
|
740
|
+
const body = typeof ev.data === "string" ? JSON.parse(ev.data) : null;
|
|
741
|
+
if (!body) return;
|
|
742
|
+
const type = body.type;
|
|
743
|
+
if (type === "error") {
|
|
744
|
+
this.emit("error", { status: "failed", data: [], error: body.error, id: this.jobId });
|
|
745
|
+
return;
|
|
746
|
+
}
|
|
747
|
+
if (type === "catchup") {
|
|
748
|
+
const payload2 = body.data || {};
|
|
749
|
+
this.emitDocuments(payload2.data || []);
|
|
750
|
+
this.emitSnapshot(payload2);
|
|
751
|
+
return;
|
|
752
|
+
}
|
|
753
|
+
if (type === "document") {
|
|
754
|
+
const doc = body.data;
|
|
755
|
+
if (doc) this.emit("document", doc);
|
|
756
|
+
return;
|
|
757
|
+
}
|
|
758
|
+
if (type === "done") {
|
|
759
|
+
this.emit("done", { status: "completed", data: [], id: this.jobId });
|
|
760
|
+
this.close();
|
|
761
|
+
return;
|
|
762
|
+
}
|
|
763
|
+
const payload = body.data || body;
|
|
764
|
+
if (payload && payload.status) this.emitSnapshot(payload);
|
|
765
|
+
} catch {
|
|
766
|
+
}
|
|
767
|
+
if (timeoutMs && Date.now() - startTs > timeoutMs) this.close();
|
|
768
|
+
};
|
|
769
|
+
ws.onerror = () => {
|
|
770
|
+
this.emit("error", { status: "failed", data: [], error: "WebSocket error", id: this.jobId });
|
|
771
|
+
this.close();
|
|
772
|
+
};
|
|
773
|
+
ws.onclose = () => {
|
|
774
|
+
if (!this.closed) this.pollLoop();
|
|
775
|
+
};
|
|
776
|
+
}
|
|
777
|
+
emitDocuments(docs) {
|
|
778
|
+
for (const doc of docs) this.emit("document", { ...doc, id: this.jobId });
|
|
779
|
+
}
|
|
780
|
+
emitSnapshot(payload) {
|
|
781
|
+
const status = payload.status;
|
|
782
|
+
const data = payload.data || [];
|
|
783
|
+
const snap = this.kind === "crawl" ? {
|
|
784
|
+
status,
|
|
785
|
+
completed: payload.completed ?? 0,
|
|
786
|
+
total: payload.total ?? 0,
|
|
787
|
+
creditsUsed: payload.creditsUsed,
|
|
788
|
+
expiresAt: payload.expiresAt,
|
|
789
|
+
next: payload.next ?? null,
|
|
790
|
+
data
|
|
791
|
+
} : {
|
|
792
|
+
status,
|
|
793
|
+
completed: payload.completed ?? 0,
|
|
794
|
+
total: payload.total ?? 0,
|
|
795
|
+
creditsUsed: payload.creditsUsed,
|
|
796
|
+
expiresAt: payload.expiresAt,
|
|
797
|
+
next: payload.next ?? null,
|
|
798
|
+
data
|
|
799
|
+
};
|
|
800
|
+
this.emit("snapshot", snap);
|
|
801
|
+
if (["completed", "failed", "cancelled"].includes(status)) {
|
|
802
|
+
this.emit("done", { status, data, id: this.jobId });
|
|
803
|
+
this.close();
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
async pollLoop() {
|
|
807
|
+
const startTs = Date.now();
|
|
808
|
+
const timeoutMs = this.timeout ? this.timeout * 1e3 : void 0;
|
|
809
|
+
while (!this.closed) {
|
|
810
|
+
try {
|
|
811
|
+
const snap = this.kind === "crawl" ? await getCrawlStatus(this.http, this.jobId) : await getBatchScrapeStatus(this.http, this.jobId);
|
|
812
|
+
this.emit("snapshot", snap);
|
|
813
|
+
if (["completed", "failed", "cancelled"].includes(snap.status)) {
|
|
814
|
+
this.emit("done", { status: snap.status, data: snap.data, id: this.jobId });
|
|
815
|
+
this.close();
|
|
816
|
+
break;
|
|
817
|
+
}
|
|
818
|
+
} catch {
|
|
819
|
+
}
|
|
820
|
+
if (timeoutMs && Date.now() - startTs > timeoutMs) break;
|
|
821
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, this.pollInterval * 1e3)));
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
close() {
|
|
825
|
+
this.closed = true;
|
|
826
|
+
if (this.ws && this.ws.close) this.ws.close();
|
|
827
|
+
}
|
|
828
|
+
};
|
|
829
|
+
|
|
830
|
+
// src/v2/client.ts
|
|
831
|
+
var FirecrawlClient = class {
|
|
832
|
+
http;
|
|
833
|
+
/**
|
|
834
|
+
* Create a v2 client.
|
|
835
|
+
* @param options Transport configuration (API key, base URL, timeouts, retries).
|
|
836
|
+
*/
|
|
837
|
+
constructor(options = {}) {
|
|
838
|
+
const apiKey = options.apiKey ?? process.env.FIRECRAWL_API_KEY ?? "";
|
|
839
|
+
const apiUrl = (options.apiUrl ?? process.env.FIRECRAWL_API_URL ?? "https://api.firecrawl.dev").replace(/\/$/, "");
|
|
840
|
+
if (!apiKey) {
|
|
841
|
+
throw new Error("API key is required. Set FIRECRAWL_API_KEY env or pass apiKey.");
|
|
842
|
+
}
|
|
843
|
+
this.http = new HttpClient({
|
|
844
|
+
apiKey,
|
|
845
|
+
apiUrl,
|
|
846
|
+
timeoutMs: options.timeoutMs,
|
|
847
|
+
maxRetries: options.maxRetries,
|
|
848
|
+
backoffFactor: options.backoffFactor
|
|
849
|
+
});
|
|
850
|
+
}
|
|
851
|
+
async scrape(url, options) {
|
|
852
|
+
return scrape(this.http, url, options);
|
|
853
|
+
}
|
|
854
|
+
// Search
|
|
855
|
+
/**
|
|
856
|
+
* Search the web and optionally scrape each result.
|
|
857
|
+
* @param query Search query string.
|
|
858
|
+
* @param req Additional search options (sources, limit, scrapeOptions, etc.).
|
|
859
|
+
* @returns Structured search results.
|
|
860
|
+
*/
|
|
861
|
+
async search(query, req = {}) {
|
|
862
|
+
return search(this.http, { query, ...req });
|
|
863
|
+
}
|
|
864
|
+
// Map
|
|
865
|
+
/**
|
|
866
|
+
* Map a site to discover URLs (sitemap-aware).
|
|
867
|
+
* @param url Root URL to map.
|
|
868
|
+
* @param options Mapping options (sitemap mode, includeSubdomains, limit, timeout).
|
|
869
|
+
* @returns Discovered links.
|
|
870
|
+
*/
|
|
871
|
+
async map(url, options) {
|
|
872
|
+
return map(this.http, url, options);
|
|
873
|
+
}
|
|
874
|
+
// Crawl
|
|
875
|
+
/**
|
|
876
|
+
* Start a crawl job (async).
|
|
877
|
+
* @param url Root URL to crawl.
|
|
878
|
+
* @param req Crawl configuration (paths, limits, scrapeOptions, webhook, etc.).
|
|
879
|
+
* @returns Job id and url.
|
|
880
|
+
*/
|
|
881
|
+
async startCrawl(url, req = {}) {
|
|
882
|
+
return startCrawl(this.http, { url, ...req });
|
|
883
|
+
}
|
|
884
|
+
/**
|
|
885
|
+
* Get the status and partial data of a crawl job.
|
|
886
|
+
* @param jobId Crawl job id.
|
|
887
|
+
*/
|
|
888
|
+
async getCrawlStatus(jobId) {
|
|
889
|
+
return getCrawlStatus(this.http, jobId);
|
|
890
|
+
}
|
|
891
|
+
/**
|
|
892
|
+
* Cancel a crawl job.
|
|
893
|
+
* @param jobId Crawl job id.
|
|
894
|
+
* @returns True if cancelled.
|
|
895
|
+
*/
|
|
896
|
+
async cancelCrawl(jobId) {
|
|
897
|
+
return cancelCrawl(this.http, jobId);
|
|
898
|
+
}
|
|
899
|
+
/**
|
|
900
|
+
* Convenience waiter: start a crawl and poll until it finishes.
|
|
901
|
+
* @param url Root URL to crawl.
|
|
902
|
+
* @param req Crawl configuration plus waiter controls (pollInterval, timeout seconds).
|
|
903
|
+
* @returns Final job snapshot.
|
|
904
|
+
*/
|
|
905
|
+
async crawl(url, req = {}) {
|
|
906
|
+
return crawl(this.http, { url, ...req }, req.pollInterval, req.timeout);
|
|
907
|
+
}
|
|
908
|
+
/**
|
|
909
|
+
* Retrieve crawl errors and robots.txt blocks.
|
|
910
|
+
* @param crawlId Crawl job id.
|
|
911
|
+
*/
|
|
912
|
+
async getCrawlErrors(crawlId) {
|
|
913
|
+
return getCrawlErrors(this.http, crawlId);
|
|
914
|
+
}
|
|
915
|
+
/**
|
|
916
|
+
* List active crawls for the authenticated team.
|
|
917
|
+
*/
|
|
918
|
+
async getActiveCrawls() {
|
|
919
|
+
return getActiveCrawls(this.http);
|
|
920
|
+
}
|
|
921
|
+
/**
|
|
922
|
+
* Preview normalized crawl parameters produced by a natural-language prompt.
|
|
923
|
+
* @param url Root URL.
|
|
924
|
+
* @param prompt Natural-language instruction.
|
|
925
|
+
*/
|
|
926
|
+
async crawlParamsPreview(url, prompt) {
|
|
927
|
+
return crawlParamsPreview(this.http, url, prompt);
|
|
928
|
+
}
|
|
929
|
+
// Batch
|
|
930
|
+
/**
|
|
931
|
+
* Start a batch scrape job for multiple URLs (async).
|
|
932
|
+
* @param urls URLs to scrape.
|
|
933
|
+
* @param opts Batch options (scrape options, webhook, concurrency, idempotency key, etc.).
|
|
934
|
+
* @returns Job id and url.
|
|
935
|
+
*/
|
|
936
|
+
async startBatchScrape(urls, opts) {
|
|
937
|
+
return startBatchScrape(this.http, urls, opts);
|
|
938
|
+
}
|
|
939
|
+
/**
|
|
940
|
+
* Get the status and partial data of a batch scrape job.
|
|
941
|
+
* @param jobId Batch job id.
|
|
942
|
+
*/
|
|
943
|
+
async getBatchScrapeStatus(jobId) {
|
|
944
|
+
return getBatchScrapeStatus(this.http, jobId);
|
|
945
|
+
}
|
|
946
|
+
/**
|
|
947
|
+
* Retrieve batch scrape errors and robots.txt blocks.
|
|
948
|
+
* @param jobId Batch job id.
|
|
949
|
+
*/
|
|
950
|
+
async getBatchScrapeErrors(jobId) {
|
|
951
|
+
return getBatchScrapeErrors(this.http, jobId);
|
|
952
|
+
}
|
|
953
|
+
/**
|
|
954
|
+
* Cancel a batch scrape job.
|
|
955
|
+
* @param jobId Batch job id.
|
|
956
|
+
* @returns True if cancelled.
|
|
957
|
+
*/
|
|
958
|
+
async cancelBatchScrape(jobId) {
|
|
959
|
+
return cancelBatchScrape(this.http, jobId);
|
|
960
|
+
}
|
|
961
|
+
/**
|
|
962
|
+
* Convenience waiter: start a batch scrape and poll until it finishes.
|
|
963
|
+
* @param urls URLs to scrape.
|
|
964
|
+
* @param opts Batch options plus waiter controls (pollInterval, timeout seconds).
|
|
965
|
+
* @returns Final job snapshot.
|
|
966
|
+
*/
|
|
967
|
+
async batchScrape(urls, opts) {
|
|
968
|
+
return batchScrape(this.http, urls, opts);
|
|
969
|
+
}
|
|
970
|
+
// Extract
|
|
971
|
+
/**
|
|
972
|
+
* Start an extract job (async).
|
|
973
|
+
* @param args Extraction request (urls, schema or prompt, flags).
|
|
974
|
+
* @returns Job id or processing state.
|
|
975
|
+
*/
|
|
976
|
+
async startExtract(args) {
|
|
977
|
+
return startExtract(this.http, args);
|
|
978
|
+
}
|
|
979
|
+
/**
|
|
980
|
+
* Get extract job status/data.
|
|
981
|
+
* @param jobId Extract job id.
|
|
982
|
+
*/
|
|
983
|
+
async getExtractStatus(jobId) {
|
|
984
|
+
return getExtractStatus(this.http, jobId);
|
|
985
|
+
}
|
|
986
|
+
/**
|
|
987
|
+
* Convenience waiter: start an extract and poll until it finishes.
|
|
988
|
+
* @param args Extraction request plus waiter controls (pollInterval, timeout seconds).
|
|
989
|
+
* @returns Final extract response.
|
|
990
|
+
*/
|
|
991
|
+
async extract(args) {
|
|
992
|
+
return extract(this.http, args);
|
|
993
|
+
}
|
|
994
|
+
// Usage
|
|
995
|
+
/** Current concurrency usage. */
|
|
996
|
+
async getConcurrency() {
|
|
997
|
+
return getConcurrency(this.http);
|
|
998
|
+
}
|
|
999
|
+
/** Current credit usage. */
|
|
1000
|
+
async getCreditUsage() {
|
|
1001
|
+
return getCreditUsage(this.http);
|
|
1002
|
+
}
|
|
1003
|
+
/** Recent token usage. */
|
|
1004
|
+
async getTokenUsage() {
|
|
1005
|
+
return getTokenUsage(this.http);
|
|
1006
|
+
}
|
|
1007
|
+
// Watcher
|
|
1008
|
+
/**
|
|
1009
|
+
* Create a watcher for a crawl or batch job. Emits: `document`, `snapshot`, `done`, `error`.
|
|
1010
|
+
* @param jobId Job id.
|
|
1011
|
+
* @param opts Watcher options (kind, pollInterval, timeout seconds).
|
|
1012
|
+
*/
|
|
1013
|
+
watcher(jobId, opts = {}) {
|
|
1014
|
+
return new Watcher(this.http, jobId, opts);
|
|
1015
|
+
}
|
|
1016
|
+
};
|
|
1017
|
+
|
|
1018
|
+
// src/v1/index.ts
|
|
1019
|
+
var import_axios3 = __toESM(require("axios"), 1);
|
|
118
1020
|
var zt = require("zod");
|
|
119
|
-
var
|
|
1021
|
+
var import_zod_to_json_schema3 = require("zod-to-json-schema");
|
|
120
1022
|
|
|
121
1023
|
// node_modules/typescript-event-target/dist/index.mjs
|
|
122
1024
|
var e = class extends EventTarget {
|
|
@@ -125,7 +1027,7 @@ var e = class extends EventTarget {
|
|
|
125
1027
|
}
|
|
126
1028
|
};
|
|
127
1029
|
|
|
128
|
-
// src/index.ts
|
|
1030
|
+
// src/v1/index.ts
|
|
129
1031
|
var FirecrawlError = class extends Error {
|
|
130
1032
|
statusCode;
|
|
131
1033
|
details;
|
|
@@ -144,10 +1046,16 @@ var FirecrawlApp = class {
|
|
|
144
1046
|
}
|
|
145
1047
|
async getVersion() {
|
|
146
1048
|
try {
|
|
1049
|
+
if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
|
|
1050
|
+
return process.env.npm_package_version;
|
|
1051
|
+
}
|
|
147
1052
|
const packageJson = await Promise.resolve().then(() => __toESM(require_package(), 1));
|
|
148
1053
|
return packageJson.default.version;
|
|
149
1054
|
} catch (error) {
|
|
150
|
-
|
|
1055
|
+
const isTest = typeof process !== "undefined" && (process.env.JEST_WORKER_ID != null || false);
|
|
1056
|
+
if (!isTest) {
|
|
1057
|
+
console.error("Error getting version:", error);
|
|
1058
|
+
}
|
|
151
1059
|
return "1.25.1";
|
|
152
1060
|
}
|
|
153
1061
|
}
|
|
@@ -182,7 +1090,7 @@ var FirecrawlApp = class {
|
|
|
182
1090
|
if (jsonData?.extract?.schema) {
|
|
183
1091
|
let schema = jsonData.extract.schema;
|
|
184
1092
|
try {
|
|
185
|
-
schema = (0,
|
|
1093
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
186
1094
|
} catch (error) {
|
|
187
1095
|
}
|
|
188
1096
|
jsonData = {
|
|
@@ -196,7 +1104,7 @@ var FirecrawlApp = class {
|
|
|
196
1104
|
if (jsonData?.jsonOptions?.schema) {
|
|
197
1105
|
let schema = jsonData.jsonOptions.schema;
|
|
198
1106
|
try {
|
|
199
|
-
schema = (0,
|
|
1107
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
200
1108
|
} catch (error) {
|
|
201
1109
|
}
|
|
202
1110
|
jsonData = {
|
|
@@ -208,7 +1116,7 @@ var FirecrawlApp = class {
|
|
|
208
1116
|
};
|
|
209
1117
|
}
|
|
210
1118
|
try {
|
|
211
|
-
const response = await
|
|
1119
|
+
const response = await import_axios3.default.post(
|
|
212
1120
|
this.apiUrl + `/v1/scrape`,
|
|
213
1121
|
jsonData,
|
|
214
1122
|
{ headers, timeout: params?.timeout !== void 0 ? params.timeout + 5e3 : void 0 }
|
|
@@ -259,7 +1167,7 @@ var FirecrawlApp = class {
|
|
|
259
1167
|
if (jsonData?.scrapeOptions?.extract?.schema) {
|
|
260
1168
|
let schema = jsonData.scrapeOptions.extract.schema;
|
|
261
1169
|
try {
|
|
262
|
-
schema = (0,
|
|
1170
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
263
1171
|
} catch (error) {
|
|
264
1172
|
}
|
|
265
1173
|
jsonData = {
|
|
@@ -480,9 +1388,9 @@ var FirecrawlApp = class {
|
|
|
480
1388
|
* @returns A CrawlWatcher instance to monitor the crawl job.
|
|
481
1389
|
*/
|
|
482
1390
|
async crawlUrlAndWatch(url, params, idempotencyKey) {
|
|
483
|
-
const
|
|
484
|
-
if (
|
|
485
|
-
const id =
|
|
1391
|
+
const crawl2 = await this.asyncCrawlUrl(url, params, idempotencyKey);
|
|
1392
|
+
if (crawl2.success && crawl2.id) {
|
|
1393
|
+
const id = crawl2.id;
|
|
486
1394
|
return new CrawlWatcher(id, this);
|
|
487
1395
|
}
|
|
488
1396
|
throw new FirecrawlError("Crawl job failed to start", 400);
|
|
@@ -528,7 +1436,7 @@ var FirecrawlApp = class {
|
|
|
528
1436
|
if (jsonData?.extract?.schema) {
|
|
529
1437
|
let schema = jsonData.extract.schema;
|
|
530
1438
|
try {
|
|
531
|
-
schema = (0,
|
|
1439
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
532
1440
|
} catch (error) {
|
|
533
1441
|
}
|
|
534
1442
|
jsonData = {
|
|
@@ -542,7 +1450,7 @@ var FirecrawlApp = class {
|
|
|
542
1450
|
if (jsonData?.jsonOptions?.schema) {
|
|
543
1451
|
let schema = jsonData.jsonOptions.schema;
|
|
544
1452
|
try {
|
|
545
|
-
schema = (0,
|
|
1453
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
546
1454
|
} catch (error) {
|
|
547
1455
|
}
|
|
548
1456
|
jsonData = {
|
|
@@ -605,9 +1513,9 @@ var FirecrawlApp = class {
|
|
|
605
1513
|
* @returns A CrawlWatcher instance to monitor the crawl job.
|
|
606
1514
|
*/
|
|
607
1515
|
async batchScrapeUrlsAndWatch(urls, params, idempotencyKey, webhook, ignoreInvalidURLs) {
|
|
608
|
-
const
|
|
609
|
-
if (
|
|
610
|
-
const id =
|
|
1516
|
+
const crawl2 = await this.asyncBatchScrapeUrls(urls, params, idempotencyKey, webhook, ignoreInvalidURLs);
|
|
1517
|
+
if (crawl2.success && crawl2.id) {
|
|
1518
|
+
const id = crawl2.id;
|
|
611
1519
|
return new CrawlWatcher(id, this);
|
|
612
1520
|
}
|
|
613
1521
|
throw new FirecrawlError("Batch scrape job failed to start", 400);
|
|
@@ -721,7 +1629,7 @@ var FirecrawlApp = class {
|
|
|
721
1629
|
jsonSchema = void 0;
|
|
722
1630
|
} else {
|
|
723
1631
|
try {
|
|
724
|
-
jsonSchema = (0,
|
|
1632
|
+
jsonSchema = (0, import_zod_to_json_schema3.zodToJsonSchema)(params.schema);
|
|
725
1633
|
} catch (_) {
|
|
726
1634
|
jsonSchema = params.schema;
|
|
727
1635
|
}
|
|
@@ -785,7 +1693,7 @@ var FirecrawlApp = class {
|
|
|
785
1693
|
jsonSchema = void 0;
|
|
786
1694
|
} else {
|
|
787
1695
|
try {
|
|
788
|
-
jsonSchema = (0,
|
|
1696
|
+
jsonSchema = (0, import_zod_to_json_schema3.zodToJsonSchema)(params.schema);
|
|
789
1697
|
} catch (_) {
|
|
790
1698
|
jsonSchema = params.schema;
|
|
791
1699
|
}
|
|
@@ -849,7 +1757,7 @@ var FirecrawlApp = class {
|
|
|
849
1757
|
* @returns The response from the POST request.
|
|
850
1758
|
*/
|
|
851
1759
|
postRequest(url, data, headers) {
|
|
852
|
-
return
|
|
1760
|
+
return import_axios3.default.post(url, data, { headers, timeout: data?.timeout ? data.timeout + 5e3 : void 0 });
|
|
853
1761
|
}
|
|
854
1762
|
/**
|
|
855
1763
|
* Sends a GET request to the specified URL.
|
|
@@ -859,9 +1767,9 @@ var FirecrawlApp = class {
|
|
|
859
1767
|
*/
|
|
860
1768
|
async getRequest(url, headers) {
|
|
861
1769
|
try {
|
|
862
|
-
return await
|
|
1770
|
+
return await import_axios3.default.get(url, { headers });
|
|
863
1771
|
} catch (error) {
|
|
864
|
-
if (error instanceof
|
|
1772
|
+
if (error instanceof import_axios3.AxiosError && error.response) {
|
|
865
1773
|
return error.response;
|
|
866
1774
|
} else {
|
|
867
1775
|
throw error;
|
|
@@ -876,9 +1784,9 @@ var FirecrawlApp = class {
|
|
|
876
1784
|
*/
|
|
877
1785
|
async deleteRequest(url, headers) {
|
|
878
1786
|
try {
|
|
879
|
-
return await
|
|
1787
|
+
return await import_axios3.default.delete(url, { headers });
|
|
880
1788
|
} catch (error) {
|
|
881
|
-
if (error instanceof
|
|
1789
|
+
if (error instanceof import_axios3.AxiosError && error.response) {
|
|
882
1790
|
return error.response;
|
|
883
1791
|
} else {
|
|
884
1792
|
throw error;
|
|
@@ -894,15 +1802,18 @@ var FirecrawlApp = class {
|
|
|
894
1802
|
* @returns The final job status or data.
|
|
895
1803
|
*/
|
|
896
1804
|
async monitorJobStatus(id, headers, checkInterval) {
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
1805
|
+
let failedTries = 0;
|
|
1806
|
+
let networkRetries = 0;
|
|
1807
|
+
const maxNetworkRetries = 3;
|
|
1808
|
+
while (true) {
|
|
1809
|
+
try {
|
|
900
1810
|
let statusResponse = await this.getRequest(
|
|
901
1811
|
`${this.apiUrl}/v1/crawl/${id}`,
|
|
902
1812
|
headers
|
|
903
1813
|
);
|
|
904
1814
|
if (statusResponse.status === 200) {
|
|
905
1815
|
failedTries = 0;
|
|
1816
|
+
networkRetries = 0;
|
|
906
1817
|
let statusData = statusResponse.data;
|
|
907
1818
|
if (statusData.status === "completed") {
|
|
908
1819
|
if ("data" in statusData) {
|
|
@@ -937,11 +1848,45 @@ var FirecrawlApp = class {
|
|
|
937
1848
|
this.handleError(statusResponse, "check crawl status");
|
|
938
1849
|
}
|
|
939
1850
|
}
|
|
1851
|
+
} catch (error) {
|
|
1852
|
+
if (this.isRetryableError(error) && networkRetries < maxNetworkRetries) {
|
|
1853
|
+
networkRetries++;
|
|
1854
|
+
const backoffDelay = Math.min(1e3 * Math.pow(2, networkRetries - 1), 1e4);
|
|
1855
|
+
await new Promise((resolve) => setTimeout(resolve, backoffDelay));
|
|
1856
|
+
continue;
|
|
1857
|
+
}
|
|
1858
|
+
throw new FirecrawlError(error, 500);
|
|
940
1859
|
}
|
|
941
|
-
} catch (error) {
|
|
942
|
-
throw new FirecrawlError(error, 500);
|
|
943
1860
|
}
|
|
944
1861
|
}
|
|
1862
|
+
/**
|
|
1863
|
+
* Determines if an error is retryable (transient network error)
|
|
1864
|
+
* @param error - The error to check
|
|
1865
|
+
* @returns True if the error should be retried
|
|
1866
|
+
*/
|
|
1867
|
+
isRetryableError(error) {
|
|
1868
|
+
if (error instanceof import_axios3.AxiosError) {
|
|
1869
|
+
if (!error.response) {
|
|
1870
|
+
const code = error.code;
|
|
1871
|
+
const message = error.message?.toLowerCase() || "";
|
|
1872
|
+
return code === "ECONNRESET" || code === "ETIMEDOUT" || code === "ENOTFOUND" || code === "ECONNREFUSED" || message.includes("socket hang up") || message.includes("network error") || message.includes("timeout");
|
|
1873
|
+
}
|
|
1874
|
+
if (error.response?.status === 408 || error.response?.status === 504) {
|
|
1875
|
+
return true;
|
|
1876
|
+
}
|
|
1877
|
+
}
|
|
1878
|
+
if (error && typeof error === "object") {
|
|
1879
|
+
const code = error.code;
|
|
1880
|
+
const message = error.message?.toLowerCase() || "";
|
|
1881
|
+
if (code === "ECONNRESET" || code === "ETIMEDOUT" || code === "ENOTFOUND" || code === "ECONNREFUSED" || message.includes("socket hang up") || message.includes("network error") || message.includes("timeout")) {
|
|
1882
|
+
return true;
|
|
1883
|
+
}
|
|
1884
|
+
if (error.response?.status === 408 || error.response?.status === 504) {
|
|
1885
|
+
return true;
|
|
1886
|
+
}
|
|
1887
|
+
}
|
|
1888
|
+
return false;
|
|
1889
|
+
}
|
|
945
1890
|
/**
|
|
946
1891
|
* Handles errors from API responses.
|
|
947
1892
|
* @param {AxiosResponse} response - The response from the API.
|
|
@@ -1039,7 +1984,7 @@ var FirecrawlApp = class {
|
|
|
1039
1984
|
if (jsonData?.jsonOptions?.schema) {
|
|
1040
1985
|
let schema = jsonData.jsonOptions.schema;
|
|
1041
1986
|
try {
|
|
1042
|
-
schema = (0,
|
|
1987
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
1043
1988
|
} catch (error) {
|
|
1044
1989
|
}
|
|
1045
1990
|
jsonData = {
|
|
@@ -1394,8 +2339,28 @@ var CrawlWatcher = class extends e {
|
|
|
1394
2339
|
this.ws.close();
|
|
1395
2340
|
}
|
|
1396
2341
|
};
|
|
2342
|
+
|
|
2343
|
+
// src/index.ts
|
|
2344
|
+
var Firecrawl = class extends FirecrawlClient {
|
|
2345
|
+
/** Feature‑frozen v1 client (lazy). */
|
|
2346
|
+
_v1;
|
|
2347
|
+
_v1Opts;
|
|
2348
|
+
/** @param opts API credentials and base URL. */
|
|
2349
|
+
constructor(opts = {}) {
|
|
2350
|
+
super(opts);
|
|
2351
|
+
this._v1Opts = opts;
|
|
2352
|
+
}
|
|
2353
|
+
/** Access the legacy v1 client (instantiated on first access). */
|
|
2354
|
+
get v1() {
|
|
2355
|
+
if (!this._v1) this._v1 = new FirecrawlApp(this._v1Opts);
|
|
2356
|
+
return this._v1;
|
|
2357
|
+
}
|
|
2358
|
+
};
|
|
2359
|
+
var src_default = Firecrawl;
|
|
1397
2360
|
// Annotate the CommonJS export names for ESM import in node:
|
|
1398
2361
|
0 && (module.exports = {
|
|
1399
|
-
|
|
1400
|
-
|
|
2362
|
+
Firecrawl,
|
|
2363
|
+
FirecrawlAppV1,
|
|
2364
|
+
FirecrawlClient,
|
|
2365
|
+
SdkError
|
|
1401
2366
|
});
|