@mendable/firecrawl 1.29.2 → 3.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +4 -2
- package/README.md +85 -78
- package/audit-ci.jsonc +4 -0
- package/dist/chunk-OIZ6OKY4.js +85 -0
- package/dist/index.cjs +1002 -38
- package/dist/index.d.cts +530 -11
- package/dist/index.d.ts +530 -11
- package/dist/index.js +995 -32
- package/dist/package-V5IPFKBE.js +4 -0
- package/package.json +6 -5
- package/src/__tests__/{v1/e2e_withAuth → e2e/v1}/index.test.ts +1 -0
- package/src/__tests__/e2e/v2/batch.test.ts +74 -0
- package/src/__tests__/e2e/v2/crawl.test.ts +182 -0
- package/src/__tests__/e2e/v2/extract.test.ts +70 -0
- package/src/__tests__/e2e/v2/map.test.ts +55 -0
- package/src/__tests__/e2e/v2/scrape.test.ts +130 -0
- package/src/__tests__/e2e/v2/search.test.ts +247 -0
- package/src/__tests__/e2e/v2/usage.test.ts +36 -0
- package/src/__tests__/e2e/v2/utils/idmux.ts +58 -0
- package/src/__tests__/e2e/v2/watcher.test.ts +96 -0
- package/src/__tests__/unit/v1/monitor-job-status-retry.test.ts +154 -0
- package/src/__tests__/unit/v2/errorHandler.test.ts +19 -0
- package/src/__tests__/unit/v2/scrape.unit.test.ts +11 -0
- package/src/__tests__/unit/v2/validation.test.ts +59 -0
- package/src/index.backup.ts +2146 -0
- package/src/index.ts +27 -2071
- package/src/v1/index.ts +2158 -0
- package/src/v2/client.ts +283 -0
- package/src/v2/methods/batch.ts +119 -0
- package/src/v2/methods/crawl.ts +144 -0
- package/src/v2/methods/extract.ts +86 -0
- package/src/v2/methods/map.ts +37 -0
- package/src/v2/methods/scrape.ts +26 -0
- package/src/v2/methods/search.ts +69 -0
- package/src/v2/methods/usage.ts +39 -0
- package/src/v2/types.ts +337 -0
- package/src/v2/utils/errorHandler.ts +18 -0
- package/src/v2/utils/getVersion.ts +14 -0
- package/src/v2/utils/httpClient.ts +99 -0
- package/src/v2/utils/validation.ts +50 -0
- package/src/v2/watcher.ts +159 -0
- package/tsconfig.json +2 -1
- package/dist/package-E7ICGMY6.js +0 -110
package/dist/index.cjs
CHANGED
|
@@ -35,7 +35,7 @@ var require_package = __commonJS({
|
|
|
35
35
|
"package.json"(exports2, module2) {
|
|
36
36
|
module2.exports = {
|
|
37
37
|
name: "@mendable/firecrawl-js",
|
|
38
|
-
version: "
|
|
38
|
+
version: "3.0.3",
|
|
39
39
|
description: "JavaScript SDK for Firecrawl API",
|
|
40
40
|
main: "dist/index.js",
|
|
41
41
|
types: "dist/index.d.ts",
|
|
@@ -51,11 +51,12 @@ var require_package = __commonJS({
|
|
|
51
51
|
build: "tsup",
|
|
52
52
|
"build-and-publish": "npm run build && npm publish --access public",
|
|
53
53
|
"publish-beta": "npm run build && npm publish --access public --tag beta",
|
|
54
|
-
test: "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/
|
|
54
|
+
test: "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/e2e/v2/*.test.ts --detectOpenHandles",
|
|
55
|
+
"test:unit": "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/unit/v2/*.test.ts"
|
|
55
56
|
},
|
|
56
57
|
repository: {
|
|
57
58
|
type: "git",
|
|
58
|
-
url: "git+https://github.com/
|
|
59
|
+
url: "git+https://github.com/firecrawl/firecrawl.git"
|
|
59
60
|
},
|
|
60
61
|
author: "Mendable.ai",
|
|
61
62
|
license: "MIT",
|
|
@@ -66,9 +67,9 @@ var require_package = __commonJS({
|
|
|
66
67
|
"zod-to-json-schema": "^3.23.0"
|
|
67
68
|
},
|
|
68
69
|
bugs: {
|
|
69
|
-
url: "https://github.com/
|
|
70
|
+
url: "https://github.com/firecrawl/firecrawl/issues"
|
|
70
71
|
},
|
|
71
|
-
homepage: "https://github.com/
|
|
72
|
+
homepage: "https://github.com/firecrawl/firecrawl#readme",
|
|
72
73
|
devDependencies: {
|
|
73
74
|
"@jest/globals": "^30.0.5",
|
|
74
75
|
"@types/dotenv": "^8.2.0",
|
|
@@ -109,14 +110,914 @@ var require_package = __commonJS({
|
|
|
109
110
|
// src/index.ts
|
|
110
111
|
var index_exports = {};
|
|
111
112
|
__export(index_exports, {
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
113
|
+
Firecrawl: () => Firecrawl,
|
|
114
|
+
FirecrawlAppV1: () => FirecrawlApp,
|
|
115
|
+
FirecrawlClient: () => FirecrawlClient,
|
|
116
|
+
SdkError: () => SdkError,
|
|
117
|
+
default: () => index_default
|
|
115
118
|
});
|
|
116
119
|
module.exports = __toCommonJS(index_exports);
|
|
120
|
+
|
|
121
|
+
// src/v2/utils/httpClient.ts
|
|
117
122
|
var import_axios = __toESM(require("axios"), 1);
|
|
123
|
+
|
|
124
|
+
// src/v2/utils/getVersion.ts
|
|
125
|
+
function getVersion() {
|
|
126
|
+
try {
|
|
127
|
+
if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
|
|
128
|
+
return process.env.npm_package_version;
|
|
129
|
+
}
|
|
130
|
+
const pkg = require_package();
|
|
131
|
+
return pkg?.version || "3.x.x";
|
|
132
|
+
} catch {
|
|
133
|
+
return "3.x.x";
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// src/v2/utils/httpClient.ts
|
|
138
|
+
var HttpClient = class {
|
|
139
|
+
instance;
|
|
140
|
+
apiKey;
|
|
141
|
+
apiUrl;
|
|
142
|
+
maxRetries;
|
|
143
|
+
backoffFactor;
|
|
144
|
+
constructor(options) {
|
|
145
|
+
this.apiKey = options.apiKey;
|
|
146
|
+
this.apiUrl = options.apiUrl.replace(/\/$/, "");
|
|
147
|
+
this.maxRetries = options.maxRetries ?? 3;
|
|
148
|
+
this.backoffFactor = options.backoffFactor ?? 0.5;
|
|
149
|
+
this.instance = import_axios.default.create({
|
|
150
|
+
baseURL: this.apiUrl,
|
|
151
|
+
timeout: options.timeoutMs ?? 6e4,
|
|
152
|
+
headers: {
|
|
153
|
+
"Content-Type": "application/json",
|
|
154
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
155
|
+
},
|
|
156
|
+
transitional: { clarifyTimeoutError: true }
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
getApiUrl() {
|
|
160
|
+
return this.apiUrl;
|
|
161
|
+
}
|
|
162
|
+
getApiKey() {
|
|
163
|
+
return this.apiKey;
|
|
164
|
+
}
|
|
165
|
+
async request(config) {
|
|
166
|
+
const version = getVersion();
|
|
167
|
+
config.headers = {
|
|
168
|
+
...config.headers || {}
|
|
169
|
+
};
|
|
170
|
+
let lastError;
|
|
171
|
+
for (let attempt = 0; attempt < this.maxRetries; attempt++) {
|
|
172
|
+
try {
|
|
173
|
+
const cfg = { ...config };
|
|
174
|
+
if (cfg.method && ["post", "put", "patch"].includes(cfg.method.toLowerCase())) {
|
|
175
|
+
const data = cfg.data ?? {};
|
|
176
|
+
cfg.data = { ...data, origin: `js-sdk@${version}` };
|
|
177
|
+
}
|
|
178
|
+
const res = await this.instance.request(cfg);
|
|
179
|
+
if (res.status === 502 && attempt < this.maxRetries - 1) {
|
|
180
|
+
await this.sleep(this.backoffFactor * Math.pow(2, attempt));
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
return res;
|
|
184
|
+
} catch (err) {
|
|
185
|
+
lastError = err;
|
|
186
|
+
const status = err?.response?.status;
|
|
187
|
+
if (status === 502 && attempt < this.maxRetries - 1) {
|
|
188
|
+
await this.sleep(this.backoffFactor * Math.pow(2, attempt));
|
|
189
|
+
continue;
|
|
190
|
+
}
|
|
191
|
+
throw err;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
throw lastError ?? new Error("Unexpected HTTP client error");
|
|
195
|
+
}
|
|
196
|
+
sleep(seconds) {
|
|
197
|
+
return new Promise((r) => setTimeout(r, seconds * 1e3));
|
|
198
|
+
}
|
|
199
|
+
post(endpoint, body, headers) {
|
|
200
|
+
return this.request({ method: "post", url: endpoint, data: body, headers });
|
|
201
|
+
}
|
|
202
|
+
get(endpoint, headers) {
|
|
203
|
+
return this.request({ method: "get", url: endpoint, headers });
|
|
204
|
+
}
|
|
205
|
+
delete(endpoint, headers) {
|
|
206
|
+
return this.request({ method: "delete", url: endpoint, headers });
|
|
207
|
+
}
|
|
208
|
+
prepareHeaders(idempotencyKey) {
|
|
209
|
+
const headers = {};
|
|
210
|
+
if (idempotencyKey) headers["x-idempotency-key"] = idempotencyKey;
|
|
211
|
+
return headers;
|
|
212
|
+
}
|
|
213
|
+
};
|
|
214
|
+
|
|
215
|
+
// src/v2/types.ts
|
|
216
|
+
var SdkError = class extends Error {
|
|
217
|
+
status;
|
|
218
|
+
code;
|
|
219
|
+
details;
|
|
220
|
+
constructor(message, status, code, details) {
|
|
221
|
+
super(message);
|
|
222
|
+
this.name = "FirecrawlSdkError";
|
|
223
|
+
this.status = status;
|
|
224
|
+
this.code = code;
|
|
225
|
+
this.details = details;
|
|
226
|
+
}
|
|
227
|
+
};
|
|
228
|
+
|
|
229
|
+
// src/v2/utils/validation.ts
|
|
230
|
+
var import_zod_to_json_schema = __toESM(require("zod-to-json-schema"), 1);
|
|
231
|
+
function ensureValidFormats(formats) {
|
|
232
|
+
if (!formats) return;
|
|
233
|
+
for (const fmt of formats) {
|
|
234
|
+
if (typeof fmt === "string") {
|
|
235
|
+
if (fmt === "json") {
|
|
236
|
+
throw new Error("json format must be an object with { type: 'json', prompt, schema }");
|
|
237
|
+
}
|
|
238
|
+
continue;
|
|
239
|
+
}
|
|
240
|
+
if (fmt.type === "json") {
|
|
241
|
+
const j = fmt;
|
|
242
|
+
if (!j.prompt && !j.schema) {
|
|
243
|
+
throw new Error("json format requires either 'prompt' or 'schema' (or both)");
|
|
244
|
+
}
|
|
245
|
+
const maybeSchema = j.schema;
|
|
246
|
+
const isZod = !!maybeSchema && (typeof maybeSchema.safeParse === "function" || typeof maybeSchema.parse === "function") && !!maybeSchema._def;
|
|
247
|
+
if (isZod) {
|
|
248
|
+
try {
|
|
249
|
+
j.schema = (0, import_zod_to_json_schema.default)(maybeSchema);
|
|
250
|
+
} catch {
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
continue;
|
|
254
|
+
}
|
|
255
|
+
if (fmt.type === "screenshot") {
|
|
256
|
+
const s = fmt;
|
|
257
|
+
if (s.quality != null && (typeof s.quality !== "number" || s.quality < 0)) {
|
|
258
|
+
throw new Error("screenshot.quality must be a non-negative number");
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
function ensureValidScrapeOptions(options) {
|
|
264
|
+
if (!options) return;
|
|
265
|
+
if (options.timeout != null && options.timeout <= 0) {
|
|
266
|
+
throw new Error("timeout must be positive");
|
|
267
|
+
}
|
|
268
|
+
if (options.waitFor != null && options.waitFor < 0) {
|
|
269
|
+
throw new Error("waitFor must be non-negative");
|
|
270
|
+
}
|
|
271
|
+
ensureValidFormats(options.formats);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
// src/v2/utils/errorHandler.ts
|
|
275
|
+
var import_axios2 = require("axios");
|
|
276
|
+
function throwForBadResponse(resp, action) {
|
|
277
|
+
const status = resp.status;
|
|
278
|
+
const body = resp.data || {};
|
|
279
|
+
const msg = body?.error || body?.message || `Request failed (${status}) while trying to ${action}`;
|
|
280
|
+
throw new SdkError(msg, status, void 0, body?.details);
|
|
281
|
+
}
|
|
282
|
+
function normalizeAxiosError(err, action) {
|
|
283
|
+
const status = err.response?.status;
|
|
284
|
+
const body = err.response?.data;
|
|
285
|
+
const message = body?.error || err.message || `Request failed${status ? ` (${status})` : ""} while trying to ${action}`;
|
|
286
|
+
const code = body?.code || err.code;
|
|
287
|
+
throw new SdkError(message, status, code, body?.details ?? body);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
// src/v2/methods/scrape.ts
|
|
291
|
+
async function scrape(http, url, options) {
|
|
292
|
+
if (!url || !url.trim()) {
|
|
293
|
+
throw new Error("URL cannot be empty");
|
|
294
|
+
}
|
|
295
|
+
if (options) ensureValidScrapeOptions(options);
|
|
296
|
+
const payload = { url: url.trim() };
|
|
297
|
+
if (options) Object.assign(payload, options);
|
|
298
|
+
try {
|
|
299
|
+
const res = await http.post("/v2/scrape", payload);
|
|
300
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
301
|
+
throwForBadResponse(res, "scrape");
|
|
302
|
+
}
|
|
303
|
+
return res.data.data || {};
|
|
304
|
+
} catch (err) {
|
|
305
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "scrape");
|
|
306
|
+
throw err;
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// src/v2/methods/search.ts
|
|
311
|
+
function prepareSearchPayload(req) {
|
|
312
|
+
if (!req.query || !req.query.trim()) throw new Error("Query cannot be empty");
|
|
313
|
+
if (req.limit != null && req.limit <= 0) throw new Error("limit must be positive");
|
|
314
|
+
if (req.timeout != null && req.timeout <= 0) throw new Error("timeout must be positive");
|
|
315
|
+
const payload = {
|
|
316
|
+
query: req.query
|
|
317
|
+
};
|
|
318
|
+
if (req.sources) payload.sources = req.sources;
|
|
319
|
+
if (req.limit != null) payload.limit = req.limit;
|
|
320
|
+
if (req.tbs != null) payload.tbs = req.tbs;
|
|
321
|
+
if (req.location != null) payload.location = req.location;
|
|
322
|
+
if (req.ignoreInvalidURLs != null) payload.ignoreInvalidURLs = req.ignoreInvalidURLs;
|
|
323
|
+
if (req.timeout != null) payload.timeout = req.timeout;
|
|
324
|
+
if (req.scrapeOptions) {
|
|
325
|
+
ensureValidScrapeOptions(req.scrapeOptions);
|
|
326
|
+
payload.scrapeOptions = req.scrapeOptions;
|
|
327
|
+
}
|
|
328
|
+
return payload;
|
|
329
|
+
}
|
|
330
|
+
async function search(http, request) {
|
|
331
|
+
const payload = prepareSearchPayload(request);
|
|
332
|
+
try {
|
|
333
|
+
const res = await http.post("/v2/search", payload);
|
|
334
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
335
|
+
throwForBadResponse(res, "search");
|
|
336
|
+
}
|
|
337
|
+
const data = res.data.data || {};
|
|
338
|
+
const out = {};
|
|
339
|
+
for (const key of Object.keys(data)) {
|
|
340
|
+
const arr = data[key];
|
|
341
|
+
if (Array.isArray(arr)) {
|
|
342
|
+
const results = [];
|
|
343
|
+
for (const item of arr) {
|
|
344
|
+
if (item && typeof item === "object") {
|
|
345
|
+
if ("markdown" in item || "html" in item || "rawHtml" in item || "links" in item || "screenshot" in item || "changeTracking" in item || "summary" in item || "json" in item) {
|
|
346
|
+
results.push(item);
|
|
347
|
+
} else {
|
|
348
|
+
results.push({ url: item.url, title: item.title, description: item.description });
|
|
349
|
+
}
|
|
350
|
+
} else if (typeof item === "string") {
|
|
351
|
+
results.push({ url: item });
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
out[key] = results;
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
return out;
|
|
358
|
+
} catch (err) {
|
|
359
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "search");
|
|
360
|
+
throw err;
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
// src/v2/methods/map.ts
|
|
365
|
+
function prepareMapPayload(url, options) {
|
|
366
|
+
if (!url || !url.trim()) throw new Error("URL cannot be empty");
|
|
367
|
+
const payload = { url: url.trim() };
|
|
368
|
+
if (options) {
|
|
369
|
+
if (options.sitemap != null) payload.sitemap = options.sitemap;
|
|
370
|
+
if (options.search != null) payload.search = options.search;
|
|
371
|
+
if (options.includeSubdomains != null) payload.includeSubdomains = options.includeSubdomains;
|
|
372
|
+
if (options.limit != null) payload.limit = options.limit;
|
|
373
|
+
if (options.timeout != null) payload.timeout = options.timeout;
|
|
374
|
+
}
|
|
375
|
+
return payload;
|
|
376
|
+
}
|
|
377
|
+
async function map(http, url, options) {
|
|
378
|
+
const payload = prepareMapPayload(url, options);
|
|
379
|
+
try {
|
|
380
|
+
const res = await http.post("/v2/map", payload);
|
|
381
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
382
|
+
throwForBadResponse(res, "map");
|
|
383
|
+
}
|
|
384
|
+
const linksIn = res.data.links || [];
|
|
385
|
+
const links = [];
|
|
386
|
+
for (const item of linksIn) {
|
|
387
|
+
if (typeof item === "string") links.push({ url: item });
|
|
388
|
+
else if (item && typeof item === "object") links.push({ url: item.url, title: item.title, description: item.description });
|
|
389
|
+
}
|
|
390
|
+
return { links };
|
|
391
|
+
} catch (err) {
|
|
392
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "map");
|
|
393
|
+
throw err;
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
// src/v2/methods/crawl.ts
|
|
398
|
+
function prepareCrawlPayload(request) {
|
|
399
|
+
if (!request.url || !request.url.trim()) throw new Error("URL cannot be empty");
|
|
400
|
+
const data = { url: request.url.trim() };
|
|
401
|
+
if (request.prompt) data.prompt = request.prompt;
|
|
402
|
+
if (request.excludePaths) data.excludePaths = request.excludePaths;
|
|
403
|
+
if (request.includePaths) data.includePaths = request.includePaths;
|
|
404
|
+
if (request.maxDiscoveryDepth != null) data.maxDiscoveryDepth = request.maxDiscoveryDepth;
|
|
405
|
+
if (request.sitemap != null) data.sitemap = request.sitemap;
|
|
406
|
+
if (request.ignoreQueryParameters != null) data.ignoreQueryParameters = request.ignoreQueryParameters;
|
|
407
|
+
if (request.limit != null) data.limit = request.limit;
|
|
408
|
+
if (request.crawlEntireDomain != null) data.crawlEntireDomain = request.crawlEntireDomain;
|
|
409
|
+
if (request.allowExternalLinks != null) data.allowExternalLinks = request.allowExternalLinks;
|
|
410
|
+
if (request.allowSubdomains != null) data.allowSubdomains = request.allowSubdomains;
|
|
411
|
+
if (request.delay != null) data.delay = request.delay;
|
|
412
|
+
if (request.maxConcurrency != null) data.maxConcurrency = request.maxConcurrency;
|
|
413
|
+
if (request.webhook != null) data.webhook = request.webhook;
|
|
414
|
+
if (request.scrapeOptions) {
|
|
415
|
+
ensureValidScrapeOptions(request.scrapeOptions);
|
|
416
|
+
data.scrapeOptions = request.scrapeOptions;
|
|
417
|
+
}
|
|
418
|
+
if (request.zeroDataRetention != null) data.zeroDataRetention = request.zeroDataRetention;
|
|
419
|
+
return data;
|
|
420
|
+
}
|
|
421
|
+
async function startCrawl(http, request) {
|
|
422
|
+
const payload = prepareCrawlPayload(request);
|
|
423
|
+
try {
|
|
424
|
+
const res = await http.post("/v2/crawl", payload);
|
|
425
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
426
|
+
throwForBadResponse(res, "start crawl");
|
|
427
|
+
}
|
|
428
|
+
return { id: res.data.id, url: res.data.url };
|
|
429
|
+
} catch (err) {
|
|
430
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "start crawl");
|
|
431
|
+
throw err;
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
async function getCrawlStatus(http, jobId) {
|
|
435
|
+
try {
|
|
436
|
+
const res = await http.get(`/v2/crawl/${jobId}`);
|
|
437
|
+
if (res.status !== 200 || !res.data?.success) {
|
|
438
|
+
throwForBadResponse(res, "get crawl status");
|
|
439
|
+
}
|
|
440
|
+
const body = res.data;
|
|
441
|
+
return {
|
|
442
|
+
status: body.status,
|
|
443
|
+
completed: body.completed ?? 0,
|
|
444
|
+
total: body.total ?? 0,
|
|
445
|
+
creditsUsed: body.creditsUsed,
|
|
446
|
+
expiresAt: body.expiresAt,
|
|
447
|
+
next: body.next ?? null,
|
|
448
|
+
data: body.data || []
|
|
449
|
+
};
|
|
450
|
+
} catch (err) {
|
|
451
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get crawl status");
|
|
452
|
+
throw err;
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
async function cancelCrawl(http, jobId) {
|
|
456
|
+
try {
|
|
457
|
+
const res = await http.delete(`/v2/crawl/${jobId}`);
|
|
458
|
+
if (res.status !== 200) throwForBadResponse(res, "cancel crawl");
|
|
459
|
+
return res.data?.status === "cancelled";
|
|
460
|
+
} catch (err) {
|
|
461
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "cancel crawl");
|
|
462
|
+
throw err;
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
async function waitForCrawlCompletion(http, jobId, pollInterval = 2, timeout) {
|
|
466
|
+
const start = Date.now();
|
|
467
|
+
while (true) {
|
|
468
|
+
const status = await getCrawlStatus(http, jobId);
|
|
469
|
+
if (["completed", "failed", "cancelled"].includes(status.status)) return status;
|
|
470
|
+
if (timeout != null && Date.now() - start > timeout * 1e3) {
|
|
471
|
+
throw new Error(`Crawl job ${jobId} did not complete within ${timeout} seconds`);
|
|
472
|
+
}
|
|
473
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
async function crawl(http, request, pollInterval = 2, timeout) {
|
|
477
|
+
const started = await startCrawl(http, request);
|
|
478
|
+
return waitForCrawlCompletion(http, started.id, pollInterval, timeout);
|
|
479
|
+
}
|
|
480
|
+
async function getCrawlErrors(http, crawlId) {
|
|
481
|
+
try {
|
|
482
|
+
const res = await http.get(`/v2/crawl/${crawlId}/errors`);
|
|
483
|
+
if (res.status !== 200) throwForBadResponse(res, "get crawl errors");
|
|
484
|
+
const payload = res.data?.data ?? res.data;
|
|
485
|
+
return { errors: payload.errors || [], robotsBlocked: payload.robotsBlocked || [] };
|
|
486
|
+
} catch (err) {
|
|
487
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get crawl errors");
|
|
488
|
+
throw err;
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
async function getActiveCrawls(http) {
|
|
492
|
+
try {
|
|
493
|
+
const res = await http.get(`/v2/crawl/active`);
|
|
494
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get active crawls");
|
|
495
|
+
const crawlsIn = res.data?.crawls || [];
|
|
496
|
+
const crawls = crawlsIn.map((c) => ({ id: c.id, teamId: c.teamId ?? c.team_id, url: c.url, options: c.options ?? null }));
|
|
497
|
+
return { success: true, crawls };
|
|
498
|
+
} catch (err) {
|
|
499
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get active crawls");
|
|
500
|
+
throw err;
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
async function crawlParamsPreview(http, url, prompt) {
|
|
504
|
+
if (!url || !url.trim()) throw new Error("URL cannot be empty");
|
|
505
|
+
if (!prompt || !prompt.trim()) throw new Error("Prompt cannot be empty");
|
|
506
|
+
try {
|
|
507
|
+
const res = await http.post("/v2/crawl/params-preview", { url: url.trim(), prompt });
|
|
508
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "crawl params preview");
|
|
509
|
+
const data = res.data.data || {};
|
|
510
|
+
if (res.data.warning) data.warning = res.data.warning;
|
|
511
|
+
return data;
|
|
512
|
+
} catch (err) {
|
|
513
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "crawl params preview");
|
|
514
|
+
throw err;
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
// src/v2/methods/batch.ts
|
|
519
|
+
async function startBatchScrape(http, urls, {
|
|
520
|
+
options,
|
|
521
|
+
webhook,
|
|
522
|
+
appendToId,
|
|
523
|
+
ignoreInvalidURLs,
|
|
524
|
+
maxConcurrency,
|
|
525
|
+
zeroDataRetention,
|
|
526
|
+
integration,
|
|
527
|
+
idempotencyKey
|
|
528
|
+
} = {}) {
|
|
529
|
+
if (!Array.isArray(urls) || urls.length === 0) throw new Error("URLs list cannot be empty");
|
|
530
|
+
const payload = { urls };
|
|
531
|
+
if (options) {
|
|
532
|
+
ensureValidScrapeOptions(options);
|
|
533
|
+
Object.assign(payload, options);
|
|
534
|
+
}
|
|
535
|
+
if (webhook != null) payload.webhook = webhook;
|
|
536
|
+
if (appendToId != null) payload.appendToId = appendToId;
|
|
537
|
+
if (ignoreInvalidURLs != null) payload.ignoreInvalidURLs = ignoreInvalidURLs;
|
|
538
|
+
if (maxConcurrency != null) payload.maxConcurrency = maxConcurrency;
|
|
539
|
+
if (zeroDataRetention != null) payload.zeroDataRetention = zeroDataRetention;
|
|
540
|
+
if (integration != null) payload.integration = integration;
|
|
541
|
+
try {
|
|
542
|
+
const headers = http.prepareHeaders(idempotencyKey);
|
|
543
|
+
const res = await http.post("/v2/batch/scrape", payload, headers);
|
|
544
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "start batch scrape");
|
|
545
|
+
return { id: res.data.id, url: res.data.url, invalidURLs: res.data.invalidURLs || void 0 };
|
|
546
|
+
} catch (err) {
|
|
547
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "start batch scrape");
|
|
548
|
+
throw err;
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
async function getBatchScrapeStatus(http, jobId) {
|
|
552
|
+
try {
|
|
553
|
+
const res = await http.get(`/v2/batch/scrape/${jobId}`);
|
|
554
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get batch scrape status");
|
|
555
|
+
const body = res.data;
|
|
556
|
+
return {
|
|
557
|
+
status: body.status,
|
|
558
|
+
completed: body.completed ?? 0,
|
|
559
|
+
total: body.total ?? 0,
|
|
560
|
+
creditsUsed: body.creditsUsed,
|
|
561
|
+
expiresAt: body.expiresAt,
|
|
562
|
+
next: body.next ?? null,
|
|
563
|
+
data: body.data || []
|
|
564
|
+
};
|
|
565
|
+
} catch (err) {
|
|
566
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get batch scrape status");
|
|
567
|
+
throw err;
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
async function cancelBatchScrape(http, jobId) {
|
|
571
|
+
try {
|
|
572
|
+
const res = await http.delete(`/v2/batch/scrape/${jobId}`);
|
|
573
|
+
if (res.status !== 200) throwForBadResponse(res, "cancel batch scrape");
|
|
574
|
+
return res.data?.status === "cancelled";
|
|
575
|
+
} catch (err) {
|
|
576
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "cancel batch scrape");
|
|
577
|
+
throw err;
|
|
578
|
+
}
|
|
579
|
+
}
|
|
580
|
+
async function getBatchScrapeErrors(http, jobId) {
|
|
581
|
+
try {
|
|
582
|
+
const res = await http.get(`/v2/batch/scrape/${jobId}/errors`);
|
|
583
|
+
if (res.status !== 200) throwForBadResponse(res, "get batch scrape errors");
|
|
584
|
+
const payload = res.data?.data ?? res.data;
|
|
585
|
+
return { errors: payload.errors || [], robotsBlocked: payload.robotsBlocked || [] };
|
|
586
|
+
} catch (err) {
|
|
587
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get batch scrape errors");
|
|
588
|
+
throw err;
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
async function waitForBatchCompletion(http, jobId, pollInterval = 2, timeout) {
|
|
592
|
+
const start = Date.now();
|
|
593
|
+
while (true) {
|
|
594
|
+
const status = await getBatchScrapeStatus(http, jobId);
|
|
595
|
+
if (["completed", "failed", "cancelled"].includes(status.status)) return status;
|
|
596
|
+
if (timeout != null && Date.now() - start > timeout * 1e3) {
|
|
597
|
+
throw new Error(`Batch scrape job ${jobId} did not complete within ${timeout} seconds`);
|
|
598
|
+
}
|
|
599
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
async function batchScrape(http, urls, opts = {}) {
|
|
603
|
+
const start = await startBatchScrape(http, urls, opts);
|
|
604
|
+
return waitForBatchCompletion(http, start.id, opts.pollInterval ?? 2, opts.timeout);
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
// src/v2/methods/extract.ts
|
|
608
|
+
var import_zod_to_json_schema2 = require("zod-to-json-schema");
|
|
609
|
+
function prepareExtractPayload(args) {
|
|
610
|
+
const body = {};
|
|
611
|
+
if (args.urls) body.urls = args.urls;
|
|
612
|
+
if (args.prompt != null) body.prompt = args.prompt;
|
|
613
|
+
if (args.schema != null) {
|
|
614
|
+
const s = args.schema;
|
|
615
|
+
const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
|
|
616
|
+
body.schema = isZod ? (0, import_zod_to_json_schema2.zodToJsonSchema)(s) : args.schema;
|
|
617
|
+
}
|
|
618
|
+
if (args.systemPrompt != null) body.systemPrompt = args.systemPrompt;
|
|
619
|
+
if (args.allowExternalLinks != null) body.allowExternalLinks = args.allowExternalLinks;
|
|
620
|
+
if (args.enableWebSearch != null) body.enableWebSearch = args.enableWebSearch;
|
|
621
|
+
if (args.showSources != null) body.showSources = args.showSources;
|
|
622
|
+
if (args.ignoreInvalidURLs != null) body.ignoreInvalidURLs = args.ignoreInvalidURLs;
|
|
623
|
+
if (args.scrapeOptions) {
|
|
624
|
+
ensureValidScrapeOptions(args.scrapeOptions);
|
|
625
|
+
body.scrapeOptions = args.scrapeOptions;
|
|
626
|
+
}
|
|
627
|
+
return body;
|
|
628
|
+
}
|
|
629
|
+
async function startExtract(http, args) {
|
|
630
|
+
const payload = prepareExtractPayload(args);
|
|
631
|
+
try {
|
|
632
|
+
const res = await http.post("/v2/extract", payload);
|
|
633
|
+
if (res.status !== 200) throwForBadResponse(res, "extract");
|
|
634
|
+
return res.data;
|
|
635
|
+
} catch (err) {
|
|
636
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "extract");
|
|
637
|
+
throw err;
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
async function getExtractStatus(http, jobId) {
|
|
641
|
+
try {
|
|
642
|
+
const res = await http.get(`/v2/extract/${jobId}`);
|
|
643
|
+
if (res.status !== 200) throwForBadResponse(res, "extract status");
|
|
644
|
+
return res.data;
|
|
645
|
+
} catch (err) {
|
|
646
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "extract status");
|
|
647
|
+
throw err;
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
async function waitExtract(http, jobId, pollInterval = 2, timeout) {
|
|
651
|
+
const start = Date.now();
|
|
652
|
+
while (true) {
|
|
653
|
+
const status = await getExtractStatus(http, jobId);
|
|
654
|
+
if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
|
|
655
|
+
if (timeout != null && Date.now() - start > timeout * 1e3) return status;
|
|
656
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
async function extract(http, args) {
|
|
660
|
+
const started = await startExtract(http, args);
|
|
661
|
+
const jobId = started.id;
|
|
662
|
+
if (!jobId) return started;
|
|
663
|
+
return waitExtract(http, jobId, args.pollInterval ?? 2, args.timeout);
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
// src/v2/methods/usage.ts
|
|
667
|
+
async function getConcurrency(http) {
|
|
668
|
+
try {
|
|
669
|
+
const res = await http.get("/v2/concurrency-check");
|
|
670
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get concurrency");
|
|
671
|
+
const d = res.data.data || res.data;
|
|
672
|
+
return { concurrency: d.concurrency, maxConcurrency: d.maxConcurrency ?? d.max_concurrency };
|
|
673
|
+
} catch (err) {
|
|
674
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get concurrency");
|
|
675
|
+
throw err;
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
async function getCreditUsage(http) {
|
|
679
|
+
try {
|
|
680
|
+
const res = await http.get("/v2/team/credit-usage");
|
|
681
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get credit usage");
|
|
682
|
+
const d = res.data.data || res.data;
|
|
683
|
+
return { remainingCredits: d.remainingCredits ?? d.remaining_credits ?? 0 };
|
|
684
|
+
} catch (err) {
|
|
685
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get credit usage");
|
|
686
|
+
throw err;
|
|
687
|
+
}
|
|
688
|
+
}
|
|
689
|
+
async function getTokenUsage(http) {
|
|
690
|
+
try {
|
|
691
|
+
const res = await http.get("/v2/team/token-usage");
|
|
692
|
+
if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get token usage");
|
|
693
|
+
return res.data.data || res.data;
|
|
694
|
+
} catch (err) {
|
|
695
|
+
if (err?.isAxiosError) return normalizeAxiosError(err, "get token usage");
|
|
696
|
+
throw err;
|
|
697
|
+
}
|
|
698
|
+
}
|
|
699
|
+
|
|
700
|
+
// src/v2/watcher.ts
|
|
701
|
+
var import_events = require("events");
|
|
702
|
+
var Watcher = class extends import_events.EventEmitter {
|
|
703
|
+
http;
|
|
704
|
+
jobId;
|
|
705
|
+
kind;
|
|
706
|
+
pollInterval;
|
|
707
|
+
timeout;
|
|
708
|
+
ws;
|
|
709
|
+
closed = false;
|
|
710
|
+
constructor(http, jobId, opts = {}) {
|
|
711
|
+
super();
|
|
712
|
+
this.http = http;
|
|
713
|
+
this.jobId = jobId;
|
|
714
|
+
this.kind = opts.kind ?? "crawl";
|
|
715
|
+
this.pollInterval = opts.pollInterval ?? 2;
|
|
716
|
+
this.timeout = opts.timeout;
|
|
717
|
+
}
|
|
718
|
+
buildWsUrl() {
|
|
719
|
+
const apiUrl = this.http.getApiUrl();
|
|
720
|
+
const wsBase = apiUrl.replace(/^http/, "ws");
|
|
721
|
+
const path = this.kind === "crawl" ? `/v2/crawl/${this.jobId}` : `/v2/batch/scrape/${this.jobId}`;
|
|
722
|
+
return `${wsBase}${path}`;
|
|
723
|
+
}
|
|
724
|
+
async start() {
|
|
725
|
+
try {
|
|
726
|
+
const url = this.buildWsUrl();
|
|
727
|
+
this.ws = new WebSocket(url, this.http.getApiKey());
|
|
728
|
+
this.attachWsHandlers(this.ws);
|
|
729
|
+
} catch {
|
|
730
|
+
this.pollLoop();
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
attachWsHandlers(ws) {
|
|
734
|
+
let startTs = Date.now();
|
|
735
|
+
const timeoutMs = this.timeout ? this.timeout * 1e3 : void 0;
|
|
736
|
+
ws.onmessage = (ev) => {
|
|
737
|
+
try {
|
|
738
|
+
const body = typeof ev.data === "string" ? JSON.parse(ev.data) : null;
|
|
739
|
+
if (!body) return;
|
|
740
|
+
const type = body.type;
|
|
741
|
+
if (type === "error") {
|
|
742
|
+
this.emit("error", { status: "failed", data: [], error: body.error, id: this.jobId });
|
|
743
|
+
return;
|
|
744
|
+
}
|
|
745
|
+
if (type === "catchup") {
|
|
746
|
+
const payload2 = body.data || {};
|
|
747
|
+
this.emitDocuments(payload2.data || []);
|
|
748
|
+
this.emitSnapshot(payload2);
|
|
749
|
+
return;
|
|
750
|
+
}
|
|
751
|
+
if (type === "document") {
|
|
752
|
+
const doc = body.data;
|
|
753
|
+
if (doc) this.emit("document", doc);
|
|
754
|
+
return;
|
|
755
|
+
}
|
|
756
|
+
if (type === "done") {
|
|
757
|
+
this.emit("done", { status: "completed", data: [], id: this.jobId });
|
|
758
|
+
this.close();
|
|
759
|
+
return;
|
|
760
|
+
}
|
|
761
|
+
const payload = body.data || body;
|
|
762
|
+
if (payload && payload.status) this.emitSnapshot(payload);
|
|
763
|
+
} catch {
|
|
764
|
+
}
|
|
765
|
+
if (timeoutMs && Date.now() - startTs > timeoutMs) this.close();
|
|
766
|
+
};
|
|
767
|
+
ws.onerror = () => {
|
|
768
|
+
this.emit("error", { status: "failed", data: [], error: "WebSocket error", id: this.jobId });
|
|
769
|
+
this.close();
|
|
770
|
+
};
|
|
771
|
+
ws.onclose = () => {
|
|
772
|
+
if (!this.closed) this.pollLoop();
|
|
773
|
+
};
|
|
774
|
+
}
|
|
775
|
+
emitDocuments(docs) {
|
|
776
|
+
for (const doc of docs) this.emit("document", { ...doc, id: this.jobId });
|
|
777
|
+
}
|
|
778
|
+
emitSnapshot(payload) {
|
|
779
|
+
const status = payload.status;
|
|
780
|
+
const data = payload.data || [];
|
|
781
|
+
const snap = this.kind === "crawl" ? {
|
|
782
|
+
status,
|
|
783
|
+
completed: payload.completed ?? 0,
|
|
784
|
+
total: payload.total ?? 0,
|
|
785
|
+
creditsUsed: payload.creditsUsed,
|
|
786
|
+
expiresAt: payload.expiresAt,
|
|
787
|
+
next: payload.next ?? null,
|
|
788
|
+
data
|
|
789
|
+
} : {
|
|
790
|
+
status,
|
|
791
|
+
completed: payload.completed ?? 0,
|
|
792
|
+
total: payload.total ?? 0,
|
|
793
|
+
creditsUsed: payload.creditsUsed,
|
|
794
|
+
expiresAt: payload.expiresAt,
|
|
795
|
+
next: payload.next ?? null,
|
|
796
|
+
data
|
|
797
|
+
};
|
|
798
|
+
this.emit("snapshot", snap);
|
|
799
|
+
if (["completed", "failed", "cancelled"].includes(status)) {
|
|
800
|
+
this.emit("done", { status, data, id: this.jobId });
|
|
801
|
+
this.close();
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
async pollLoop() {
|
|
805
|
+
const startTs = Date.now();
|
|
806
|
+
const timeoutMs = this.timeout ? this.timeout * 1e3 : void 0;
|
|
807
|
+
while (!this.closed) {
|
|
808
|
+
try {
|
|
809
|
+
const snap = this.kind === "crawl" ? await getCrawlStatus(this.http, this.jobId) : await getBatchScrapeStatus(this.http, this.jobId);
|
|
810
|
+
this.emit("snapshot", snap);
|
|
811
|
+
if (["completed", "failed", "cancelled"].includes(snap.status)) {
|
|
812
|
+
this.emit("done", { status: snap.status, data: snap.data, id: this.jobId });
|
|
813
|
+
this.close();
|
|
814
|
+
break;
|
|
815
|
+
}
|
|
816
|
+
} catch {
|
|
817
|
+
}
|
|
818
|
+
if (timeoutMs && Date.now() - startTs > timeoutMs) break;
|
|
819
|
+
await new Promise((r) => setTimeout(r, Math.max(1e3, this.pollInterval * 1e3)));
|
|
820
|
+
}
|
|
821
|
+
}
|
|
822
|
+
close() {
|
|
823
|
+
this.closed = true;
|
|
824
|
+
if (this.ws && this.ws.close) this.ws.close();
|
|
825
|
+
}
|
|
826
|
+
};
|
|
827
|
+
|
|
828
|
+
// src/v2/client.ts
|
|
118
829
|
var zt = require("zod");
|
|
119
|
-
var
|
|
830
|
+
var FirecrawlClient = class {
|
|
831
|
+
http;
|
|
832
|
+
/**
|
|
833
|
+
* Create a v2 client.
|
|
834
|
+
* @param options Transport configuration (API key, base URL, timeouts, retries).
|
|
835
|
+
*/
|
|
836
|
+
constructor(options = {}) {
|
|
837
|
+
const apiKey = options.apiKey ?? process.env.FIRECRAWL_API_KEY ?? "";
|
|
838
|
+
const apiUrl = (options.apiUrl ?? process.env.FIRECRAWL_API_URL ?? "https://api.firecrawl.dev").replace(/\/$/, "");
|
|
839
|
+
if (!apiKey) {
|
|
840
|
+
throw new Error("API key is required. Set FIRECRAWL_API_KEY env or pass apiKey.");
|
|
841
|
+
}
|
|
842
|
+
this.http = new HttpClient({
|
|
843
|
+
apiKey,
|
|
844
|
+
apiUrl,
|
|
845
|
+
timeoutMs: options.timeoutMs,
|
|
846
|
+
maxRetries: options.maxRetries,
|
|
847
|
+
backoffFactor: options.backoffFactor
|
|
848
|
+
});
|
|
849
|
+
}
|
|
850
|
+
async scrape(url, options) {
|
|
851
|
+
return scrape(this.http, url, options);
|
|
852
|
+
}
|
|
853
|
+
// Search
|
|
854
|
+
/**
|
|
855
|
+
* Search the web and optionally scrape each result.
|
|
856
|
+
* @param query Search query string.
|
|
857
|
+
* @param req Additional search options (sources, limit, scrapeOptions, etc.).
|
|
858
|
+
* @returns Structured search results.
|
|
859
|
+
*/
|
|
860
|
+
async search(query, req = {}) {
|
|
861
|
+
return search(this.http, { query, ...req });
|
|
862
|
+
}
|
|
863
|
+
// Map
|
|
864
|
+
/**
|
|
865
|
+
* Map a site to discover URLs (sitemap-aware).
|
|
866
|
+
* @param url Root URL to map.
|
|
867
|
+
* @param options Mapping options (sitemap mode, includeSubdomains, limit, timeout).
|
|
868
|
+
* @returns Discovered links.
|
|
869
|
+
*/
|
|
870
|
+
async map(url, options) {
|
|
871
|
+
return map(this.http, url, options);
|
|
872
|
+
}
|
|
873
|
+
// Crawl
|
|
874
|
+
/**
|
|
875
|
+
* Start a crawl job (async).
|
|
876
|
+
* @param url Root URL to crawl.
|
|
877
|
+
* @param req Crawl configuration (paths, limits, scrapeOptions, webhook, etc.).
|
|
878
|
+
* @returns Job id and url.
|
|
879
|
+
*/
|
|
880
|
+
async startCrawl(url, req = {}) {
|
|
881
|
+
return startCrawl(this.http, { url, ...req });
|
|
882
|
+
}
|
|
883
|
+
/**
|
|
884
|
+
* Get the status and partial data of a crawl job.
|
|
885
|
+
* @param jobId Crawl job id.
|
|
886
|
+
*/
|
|
887
|
+
async getCrawlStatus(jobId) {
|
|
888
|
+
return getCrawlStatus(this.http, jobId);
|
|
889
|
+
}
|
|
890
|
+
/**
|
|
891
|
+
* Cancel a crawl job.
|
|
892
|
+
* @param jobId Crawl job id.
|
|
893
|
+
* @returns True if cancelled.
|
|
894
|
+
*/
|
|
895
|
+
async cancelCrawl(jobId) {
|
|
896
|
+
return cancelCrawl(this.http, jobId);
|
|
897
|
+
}
|
|
898
|
+
/**
|
|
899
|
+
* Convenience waiter: start a crawl and poll until it finishes.
|
|
900
|
+
* @param url Root URL to crawl.
|
|
901
|
+
* @param req Crawl configuration plus waiter controls (pollInterval, timeout seconds).
|
|
902
|
+
* @returns Final job snapshot.
|
|
903
|
+
*/
|
|
904
|
+
async crawl(url, req = {}) {
|
|
905
|
+
return crawl(this.http, { url, ...req }, req.pollInterval, req.timeout);
|
|
906
|
+
}
|
|
907
|
+
/**
|
|
908
|
+
* Retrieve crawl errors and robots.txt blocks.
|
|
909
|
+
* @param crawlId Crawl job id.
|
|
910
|
+
*/
|
|
911
|
+
async getCrawlErrors(crawlId) {
|
|
912
|
+
return getCrawlErrors(this.http, crawlId);
|
|
913
|
+
}
|
|
914
|
+
/**
|
|
915
|
+
* List active crawls for the authenticated team.
|
|
916
|
+
*/
|
|
917
|
+
async getActiveCrawls() {
|
|
918
|
+
return getActiveCrawls(this.http);
|
|
919
|
+
}
|
|
920
|
+
/**
|
|
921
|
+
* Preview normalized crawl parameters produced by a natural-language prompt.
|
|
922
|
+
* @param url Root URL.
|
|
923
|
+
* @param prompt Natural-language instruction.
|
|
924
|
+
*/
|
|
925
|
+
async crawlParamsPreview(url, prompt) {
|
|
926
|
+
return crawlParamsPreview(this.http, url, prompt);
|
|
927
|
+
}
|
|
928
|
+
// Batch
|
|
929
|
+
/**
|
|
930
|
+
* Start a batch scrape job for multiple URLs (async).
|
|
931
|
+
* @param urls URLs to scrape.
|
|
932
|
+
* @param opts Batch options (scrape options, webhook, concurrency, idempotency key, etc.).
|
|
933
|
+
* @returns Job id and url.
|
|
934
|
+
*/
|
|
935
|
+
async startBatchScrape(urls, opts) {
|
|
936
|
+
return startBatchScrape(this.http, urls, opts);
|
|
937
|
+
}
|
|
938
|
+
/**
|
|
939
|
+
* Get the status and partial data of a batch scrape job.
|
|
940
|
+
* @param jobId Batch job id.
|
|
941
|
+
*/
|
|
942
|
+
async getBatchScrapeStatus(jobId) {
|
|
943
|
+
return getBatchScrapeStatus(this.http, jobId);
|
|
944
|
+
}
|
|
945
|
+
/**
|
|
946
|
+
* Retrieve batch scrape errors and robots.txt blocks.
|
|
947
|
+
* @param jobId Batch job id.
|
|
948
|
+
*/
|
|
949
|
+
async getBatchScrapeErrors(jobId) {
|
|
950
|
+
return getBatchScrapeErrors(this.http, jobId);
|
|
951
|
+
}
|
|
952
|
+
/**
|
|
953
|
+
* Cancel a batch scrape job.
|
|
954
|
+
* @param jobId Batch job id.
|
|
955
|
+
* @returns True if cancelled.
|
|
956
|
+
*/
|
|
957
|
+
async cancelBatchScrape(jobId) {
|
|
958
|
+
return cancelBatchScrape(this.http, jobId);
|
|
959
|
+
}
|
|
960
|
+
/**
|
|
961
|
+
* Convenience waiter: start a batch scrape and poll until it finishes.
|
|
962
|
+
* @param urls URLs to scrape.
|
|
963
|
+
* @param opts Batch options plus waiter controls (pollInterval, timeout seconds).
|
|
964
|
+
* @returns Final job snapshot.
|
|
965
|
+
*/
|
|
966
|
+
async batchScrape(urls, opts) {
|
|
967
|
+
return batchScrape(this.http, urls, opts);
|
|
968
|
+
}
|
|
969
|
+
// Extract
|
|
970
|
+
/**
|
|
971
|
+
* Start an extract job (async).
|
|
972
|
+
* @param args Extraction request (urls, schema or prompt, flags).
|
|
973
|
+
* @returns Job id or processing state.
|
|
974
|
+
*/
|
|
975
|
+
async startExtract(args) {
|
|
976
|
+
return startExtract(this.http, args);
|
|
977
|
+
}
|
|
978
|
+
/**
|
|
979
|
+
* Get extract job status/data.
|
|
980
|
+
* @param jobId Extract job id.
|
|
981
|
+
*/
|
|
982
|
+
async getExtractStatus(jobId) {
|
|
983
|
+
return getExtractStatus(this.http, jobId);
|
|
984
|
+
}
|
|
985
|
+
/**
|
|
986
|
+
* Convenience waiter: start an extract and poll until it finishes.
|
|
987
|
+
* @param args Extraction request plus waiter controls (pollInterval, timeout seconds).
|
|
988
|
+
* @returns Final extract response.
|
|
989
|
+
*/
|
|
990
|
+
async extract(args) {
|
|
991
|
+
return extract(this.http, args);
|
|
992
|
+
}
|
|
993
|
+
// Usage
|
|
994
|
+
/** Current concurrency usage. */
|
|
995
|
+
async getConcurrency() {
|
|
996
|
+
return getConcurrency(this.http);
|
|
997
|
+
}
|
|
998
|
+
/** Current credit usage. */
|
|
999
|
+
async getCreditUsage() {
|
|
1000
|
+
return getCreditUsage(this.http);
|
|
1001
|
+
}
|
|
1002
|
+
/** Recent token usage. */
|
|
1003
|
+
async getTokenUsage() {
|
|
1004
|
+
return getTokenUsage(this.http);
|
|
1005
|
+
}
|
|
1006
|
+
// Watcher
|
|
1007
|
+
/**
|
|
1008
|
+
* Create a watcher for a crawl or batch job. Emits: `document`, `snapshot`, `done`, `error`.
|
|
1009
|
+
* @param jobId Job id.
|
|
1010
|
+
* @param opts Watcher options (kind, pollInterval, timeout seconds).
|
|
1011
|
+
*/
|
|
1012
|
+
watcher(jobId, opts = {}) {
|
|
1013
|
+
return new Watcher(this.http, jobId, opts);
|
|
1014
|
+
}
|
|
1015
|
+
};
|
|
1016
|
+
|
|
1017
|
+
// src/v1/index.ts
|
|
1018
|
+
var import_axios3 = __toESM(require("axios"), 1);
|
|
1019
|
+
var zt2 = require("zod");
|
|
1020
|
+
var import_zod_to_json_schema3 = require("zod-to-json-schema");
|
|
120
1021
|
|
|
121
1022
|
// node_modules/typescript-event-target/dist/index.mjs
|
|
122
1023
|
var e = class extends EventTarget {
|
|
@@ -125,7 +1026,7 @@ var e = class extends EventTarget {
|
|
|
125
1026
|
}
|
|
126
1027
|
};
|
|
127
1028
|
|
|
128
|
-
// src/index.ts
|
|
1029
|
+
// src/v1/index.ts
|
|
129
1030
|
var FirecrawlError = class extends Error {
|
|
130
1031
|
statusCode;
|
|
131
1032
|
details;
|
|
@@ -144,10 +1045,16 @@ var FirecrawlApp = class {
|
|
|
144
1045
|
}
|
|
145
1046
|
async getVersion() {
|
|
146
1047
|
try {
|
|
1048
|
+
if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
|
|
1049
|
+
return process.env.npm_package_version;
|
|
1050
|
+
}
|
|
147
1051
|
const packageJson = await Promise.resolve().then(() => __toESM(require_package(), 1));
|
|
148
1052
|
return packageJson.default.version;
|
|
149
1053
|
} catch (error) {
|
|
150
|
-
|
|
1054
|
+
const isTest = typeof process !== "undefined" && (process.env.JEST_WORKER_ID != null || false);
|
|
1055
|
+
if (!isTest) {
|
|
1056
|
+
console.error("Error getting version:", error);
|
|
1057
|
+
}
|
|
151
1058
|
return "1.25.1";
|
|
152
1059
|
}
|
|
153
1060
|
}
|
|
@@ -182,7 +1089,7 @@ var FirecrawlApp = class {
|
|
|
182
1089
|
if (jsonData?.extract?.schema) {
|
|
183
1090
|
let schema = jsonData.extract.schema;
|
|
184
1091
|
try {
|
|
185
|
-
schema = (0,
|
|
1092
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
186
1093
|
} catch (error) {
|
|
187
1094
|
}
|
|
188
1095
|
jsonData = {
|
|
@@ -196,7 +1103,7 @@ var FirecrawlApp = class {
|
|
|
196
1103
|
if (jsonData?.jsonOptions?.schema) {
|
|
197
1104
|
let schema = jsonData.jsonOptions.schema;
|
|
198
1105
|
try {
|
|
199
|
-
schema = (0,
|
|
1106
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
200
1107
|
} catch (error) {
|
|
201
1108
|
}
|
|
202
1109
|
jsonData = {
|
|
@@ -208,7 +1115,7 @@ var FirecrawlApp = class {
|
|
|
208
1115
|
};
|
|
209
1116
|
}
|
|
210
1117
|
try {
|
|
211
|
-
const response = await
|
|
1118
|
+
const response = await import_axios3.default.post(
|
|
212
1119
|
this.apiUrl + `/v1/scrape`,
|
|
213
1120
|
jsonData,
|
|
214
1121
|
{ headers, timeout: params?.timeout !== void 0 ? params.timeout + 5e3 : void 0 }
|
|
@@ -259,7 +1166,7 @@ var FirecrawlApp = class {
|
|
|
259
1166
|
if (jsonData?.scrapeOptions?.extract?.schema) {
|
|
260
1167
|
let schema = jsonData.scrapeOptions.extract.schema;
|
|
261
1168
|
try {
|
|
262
|
-
schema = (0,
|
|
1169
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
263
1170
|
} catch (error) {
|
|
264
1171
|
}
|
|
265
1172
|
jsonData = {
|
|
@@ -480,9 +1387,9 @@ var FirecrawlApp = class {
|
|
|
480
1387
|
* @returns A CrawlWatcher instance to monitor the crawl job.
|
|
481
1388
|
*/
|
|
482
1389
|
async crawlUrlAndWatch(url, params, idempotencyKey) {
|
|
483
|
-
const
|
|
484
|
-
if (
|
|
485
|
-
const id =
|
|
1390
|
+
const crawl2 = await this.asyncCrawlUrl(url, params, idempotencyKey);
|
|
1391
|
+
if (crawl2.success && crawl2.id) {
|
|
1392
|
+
const id = crawl2.id;
|
|
486
1393
|
return new CrawlWatcher(id, this);
|
|
487
1394
|
}
|
|
488
1395
|
throw new FirecrawlError("Crawl job failed to start", 400);
|
|
@@ -528,7 +1435,7 @@ var FirecrawlApp = class {
|
|
|
528
1435
|
if (jsonData?.extract?.schema) {
|
|
529
1436
|
let schema = jsonData.extract.schema;
|
|
530
1437
|
try {
|
|
531
|
-
schema = (0,
|
|
1438
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
532
1439
|
} catch (error) {
|
|
533
1440
|
}
|
|
534
1441
|
jsonData = {
|
|
@@ -542,7 +1449,7 @@ var FirecrawlApp = class {
|
|
|
542
1449
|
if (jsonData?.jsonOptions?.schema) {
|
|
543
1450
|
let schema = jsonData.jsonOptions.schema;
|
|
544
1451
|
try {
|
|
545
|
-
schema = (0,
|
|
1452
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
546
1453
|
} catch (error) {
|
|
547
1454
|
}
|
|
548
1455
|
jsonData = {
|
|
@@ -605,9 +1512,9 @@ var FirecrawlApp = class {
|
|
|
605
1512
|
* @returns A CrawlWatcher instance to monitor the crawl job.
|
|
606
1513
|
*/
|
|
607
1514
|
async batchScrapeUrlsAndWatch(urls, params, idempotencyKey, webhook, ignoreInvalidURLs) {
|
|
608
|
-
const
|
|
609
|
-
if (
|
|
610
|
-
const id =
|
|
1515
|
+
const crawl2 = await this.asyncBatchScrapeUrls(urls, params, idempotencyKey, webhook, ignoreInvalidURLs);
|
|
1516
|
+
if (crawl2.success && crawl2.id) {
|
|
1517
|
+
const id = crawl2.id;
|
|
611
1518
|
return new CrawlWatcher(id, this);
|
|
612
1519
|
}
|
|
613
1520
|
throw new FirecrawlError("Batch scrape job failed to start", 400);
|
|
@@ -721,7 +1628,7 @@ var FirecrawlApp = class {
|
|
|
721
1628
|
jsonSchema = void 0;
|
|
722
1629
|
} else {
|
|
723
1630
|
try {
|
|
724
|
-
jsonSchema = (0,
|
|
1631
|
+
jsonSchema = (0, import_zod_to_json_schema3.zodToJsonSchema)(params.schema);
|
|
725
1632
|
} catch (_) {
|
|
726
1633
|
jsonSchema = params.schema;
|
|
727
1634
|
}
|
|
@@ -785,7 +1692,7 @@ var FirecrawlApp = class {
|
|
|
785
1692
|
jsonSchema = void 0;
|
|
786
1693
|
} else {
|
|
787
1694
|
try {
|
|
788
|
-
jsonSchema = (0,
|
|
1695
|
+
jsonSchema = (0, import_zod_to_json_schema3.zodToJsonSchema)(params.schema);
|
|
789
1696
|
} catch (_) {
|
|
790
1697
|
jsonSchema = params.schema;
|
|
791
1698
|
}
|
|
@@ -849,7 +1756,7 @@ var FirecrawlApp = class {
|
|
|
849
1756
|
* @returns The response from the POST request.
|
|
850
1757
|
*/
|
|
851
1758
|
postRequest(url, data, headers) {
|
|
852
|
-
return
|
|
1759
|
+
return import_axios3.default.post(url, data, { headers, timeout: data?.timeout ? data.timeout + 5e3 : void 0 });
|
|
853
1760
|
}
|
|
854
1761
|
/**
|
|
855
1762
|
* Sends a GET request to the specified URL.
|
|
@@ -859,9 +1766,9 @@ var FirecrawlApp = class {
|
|
|
859
1766
|
*/
|
|
860
1767
|
async getRequest(url, headers) {
|
|
861
1768
|
try {
|
|
862
|
-
return await
|
|
1769
|
+
return await import_axios3.default.get(url, { headers });
|
|
863
1770
|
} catch (error) {
|
|
864
|
-
if (error instanceof
|
|
1771
|
+
if (error instanceof import_axios3.AxiosError && error.response) {
|
|
865
1772
|
return error.response;
|
|
866
1773
|
} else {
|
|
867
1774
|
throw error;
|
|
@@ -876,9 +1783,9 @@ var FirecrawlApp = class {
|
|
|
876
1783
|
*/
|
|
877
1784
|
async deleteRequest(url, headers) {
|
|
878
1785
|
try {
|
|
879
|
-
return await
|
|
1786
|
+
return await import_axios3.default.delete(url, { headers });
|
|
880
1787
|
} catch (error) {
|
|
881
|
-
if (error instanceof
|
|
1788
|
+
if (error instanceof import_axios3.AxiosError && error.response) {
|
|
882
1789
|
return error.response;
|
|
883
1790
|
} else {
|
|
884
1791
|
throw error;
|
|
@@ -894,15 +1801,18 @@ var FirecrawlApp = class {
|
|
|
894
1801
|
* @returns The final job status or data.
|
|
895
1802
|
*/
|
|
896
1803
|
async monitorJobStatus(id, headers, checkInterval) {
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
1804
|
+
let failedTries = 0;
|
|
1805
|
+
let networkRetries = 0;
|
|
1806
|
+
const maxNetworkRetries = 3;
|
|
1807
|
+
while (true) {
|
|
1808
|
+
try {
|
|
900
1809
|
let statusResponse = await this.getRequest(
|
|
901
1810
|
`${this.apiUrl}/v1/crawl/${id}`,
|
|
902
1811
|
headers
|
|
903
1812
|
);
|
|
904
1813
|
if (statusResponse.status === 200) {
|
|
905
1814
|
failedTries = 0;
|
|
1815
|
+
networkRetries = 0;
|
|
906
1816
|
let statusData = statusResponse.data;
|
|
907
1817
|
if (statusData.status === "completed") {
|
|
908
1818
|
if ("data" in statusData) {
|
|
@@ -937,11 +1847,45 @@ var FirecrawlApp = class {
|
|
|
937
1847
|
this.handleError(statusResponse, "check crawl status");
|
|
938
1848
|
}
|
|
939
1849
|
}
|
|
1850
|
+
} catch (error) {
|
|
1851
|
+
if (this.isRetryableError(error) && networkRetries < maxNetworkRetries) {
|
|
1852
|
+
networkRetries++;
|
|
1853
|
+
const backoffDelay = Math.min(1e3 * Math.pow(2, networkRetries - 1), 1e4);
|
|
1854
|
+
await new Promise((resolve) => setTimeout(resolve, backoffDelay));
|
|
1855
|
+
continue;
|
|
1856
|
+
}
|
|
1857
|
+
throw new FirecrawlError(error, 500);
|
|
940
1858
|
}
|
|
941
|
-
} catch (error) {
|
|
942
|
-
throw new FirecrawlError(error, 500);
|
|
943
1859
|
}
|
|
944
1860
|
}
|
|
1861
|
+
/**
|
|
1862
|
+
* Determines if an error is retryable (transient network error)
|
|
1863
|
+
* @param error - The error to check
|
|
1864
|
+
* @returns True if the error should be retried
|
|
1865
|
+
*/
|
|
1866
|
+
isRetryableError(error) {
|
|
1867
|
+
if (error instanceof import_axios3.AxiosError) {
|
|
1868
|
+
if (!error.response) {
|
|
1869
|
+
const code = error.code;
|
|
1870
|
+
const message = error.message?.toLowerCase() || "";
|
|
1871
|
+
return code === "ECONNRESET" || code === "ETIMEDOUT" || code === "ENOTFOUND" || code === "ECONNREFUSED" || message.includes("socket hang up") || message.includes("network error") || message.includes("timeout");
|
|
1872
|
+
}
|
|
1873
|
+
if (error.response?.status === 408 || error.response?.status === 504) {
|
|
1874
|
+
return true;
|
|
1875
|
+
}
|
|
1876
|
+
}
|
|
1877
|
+
if (error && typeof error === "object") {
|
|
1878
|
+
const code = error.code;
|
|
1879
|
+
const message = error.message?.toLowerCase() || "";
|
|
1880
|
+
if (code === "ECONNRESET" || code === "ETIMEDOUT" || code === "ENOTFOUND" || code === "ECONNREFUSED" || message.includes("socket hang up") || message.includes("network error") || message.includes("timeout")) {
|
|
1881
|
+
return true;
|
|
1882
|
+
}
|
|
1883
|
+
if (error.response?.status === 408 || error.response?.status === 504) {
|
|
1884
|
+
return true;
|
|
1885
|
+
}
|
|
1886
|
+
}
|
|
1887
|
+
return false;
|
|
1888
|
+
}
|
|
945
1889
|
/**
|
|
946
1890
|
* Handles errors from API responses.
|
|
947
1891
|
* @param {AxiosResponse} response - The response from the API.
|
|
@@ -1039,7 +1983,7 @@ var FirecrawlApp = class {
|
|
|
1039
1983
|
if (jsonData?.jsonOptions?.schema) {
|
|
1040
1984
|
let schema = jsonData.jsonOptions.schema;
|
|
1041
1985
|
try {
|
|
1042
|
-
schema = (0,
|
|
1986
|
+
schema = (0, import_zod_to_json_schema3.zodToJsonSchema)(schema);
|
|
1043
1987
|
} catch (error) {
|
|
1044
1988
|
}
|
|
1045
1989
|
jsonData = {
|
|
@@ -1394,8 +2338,28 @@ var CrawlWatcher = class extends e {
|
|
|
1394
2338
|
this.ws.close();
|
|
1395
2339
|
}
|
|
1396
2340
|
};
|
|
2341
|
+
|
|
2342
|
+
// src/index.ts
|
|
2343
|
+
var Firecrawl = class extends FirecrawlClient {
|
|
2344
|
+
/** Feature‑frozen v1 client (lazy). */
|
|
2345
|
+
_v1;
|
|
2346
|
+
_v1Opts;
|
|
2347
|
+
/** @param opts API credentials and base URL. */
|
|
2348
|
+
constructor(opts = {}) {
|
|
2349
|
+
super(opts);
|
|
2350
|
+
this._v1Opts = opts;
|
|
2351
|
+
}
|
|
2352
|
+
/** Access the legacy v1 client (instantiated on first access). */
|
|
2353
|
+
get v1() {
|
|
2354
|
+
if (!this._v1) this._v1 = new FirecrawlApp(this._v1Opts);
|
|
2355
|
+
return this._v1;
|
|
2356
|
+
}
|
|
2357
|
+
};
|
|
2358
|
+
var index_default = Firecrawl;
|
|
1397
2359
|
// Annotate the CommonJS export names for ESM import in node:
|
|
1398
2360
|
0 && (module.exports = {
|
|
1399
|
-
|
|
1400
|
-
|
|
2361
|
+
Firecrawl,
|
|
2362
|
+
FirecrawlAppV1,
|
|
2363
|
+
FirecrawlClient,
|
|
2364
|
+
SdkError
|
|
1401
2365
|
});
|