firecrawl 1.29.3 → 3.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/.env.example +4 -2
  2. package/LICENSE +0 -0
  3. package/README.md +85 -78
  4. package/audit-ci.jsonc +4 -0
  5. package/dist/chunk-JFWW4BWA.js +85 -0
  6. package/dist/index.cjs +964 -39
  7. package/dist/index.d.cts +529 -11
  8. package/dist/index.d.ts +529 -11
  9. package/dist/index.js +952 -27
  10. package/dist/package-KYZ3HXR5.js +4 -0
  11. package/dump.rdb +0 -0
  12. package/jest.config.js +0 -0
  13. package/package.json +6 -6
  14. package/src/__tests__/e2e/v2/batch.test.ts +74 -0
  15. package/src/__tests__/e2e/v2/crawl.test.ts +182 -0
  16. package/src/__tests__/e2e/v2/extract.test.ts +70 -0
  17. package/src/__tests__/e2e/v2/map.test.ts +55 -0
  18. package/src/__tests__/e2e/v2/scrape.test.ts +130 -0
  19. package/src/__tests__/e2e/v2/search.test.ts +247 -0
  20. package/src/__tests__/e2e/v2/usage.test.ts +36 -0
  21. package/src/__tests__/e2e/v2/utils/idmux.ts +58 -0
  22. package/src/__tests__/e2e/v2/watcher.test.ts +96 -0
  23. package/src/__tests__/unit/v2/errorHandler.test.ts +19 -0
  24. package/src/__tests__/unit/v2/scrape.unit.test.ts +11 -0
  25. package/src/__tests__/unit/v2/validation.test.ts +59 -0
  26. package/src/index.backup.ts +2146 -0
  27. package/src/index.ts +27 -2134
  28. package/src/v1/index.ts +2158 -0
  29. package/src/v2/client.ts +281 -0
  30. package/src/v2/methods/batch.ts +131 -0
  31. package/src/v2/methods/crawl.ts +160 -0
  32. package/src/v2/methods/extract.ts +86 -0
  33. package/src/v2/methods/map.ts +37 -0
  34. package/src/v2/methods/scrape.ts +26 -0
  35. package/src/v2/methods/search.ts +69 -0
  36. package/src/v2/methods/usage.ts +39 -0
  37. package/src/v2/types.ts +308 -0
  38. package/src/v2/utils/errorHandler.ts +18 -0
  39. package/src/v2/utils/getVersion.ts +14 -0
  40. package/src/v2/utils/httpClient.ts +99 -0
  41. package/src/v2/utils/validation.ts +50 -0
  42. package/src/v2/watcher.ts +159 -0
  43. package/tsconfig.json +2 -1
  44. package/tsup.config.ts +0 -0
  45. package/dist/package-Z6F7JDXI.js +0 -111
  46. /package/src/__tests__/{v1/e2e_withAuth → e2e/v1}/index.test.ts +0 -0
  47. /package/src/__tests__/{v1/unit → unit/v1}/monitor-job-status-retry.test.ts +0 -0
package/dist/index.js CHANGED
@@ -1,7 +1,906 @@
1
- // src/index.ts
2
- import axios, { AxiosError } from "axios";
1
+ import {
2
+ require_package
3
+ } from "./chunk-JFWW4BWA.js";
4
+
5
+ // src/v2/utils/httpClient.ts
6
+ import axios from "axios";
7
+
8
+ // src/v2/utils/getVersion.ts
9
+ function getVersion() {
10
+ try {
11
+ if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
12
+ return process.env.npm_package_version;
13
+ }
14
+ const pkg = require_package();
15
+ return pkg?.version || "3.x.x";
16
+ } catch {
17
+ return "3.x.x";
18
+ }
19
+ }
20
+
21
+ // src/v2/utils/httpClient.ts
22
+ var HttpClient = class {
23
+ instance;
24
+ apiKey;
25
+ apiUrl;
26
+ maxRetries;
27
+ backoffFactor;
28
+ constructor(options) {
29
+ this.apiKey = options.apiKey;
30
+ this.apiUrl = options.apiUrl.replace(/\/$/, "");
31
+ this.maxRetries = options.maxRetries ?? 3;
32
+ this.backoffFactor = options.backoffFactor ?? 0.5;
33
+ this.instance = axios.create({
34
+ baseURL: this.apiUrl,
35
+ timeout: options.timeoutMs ?? 6e4,
36
+ headers: {
37
+ "Content-Type": "application/json",
38
+ Authorization: `Bearer ${this.apiKey}`
39
+ },
40
+ transitional: { clarifyTimeoutError: true }
41
+ });
42
+ }
43
+ getApiUrl() {
44
+ return this.apiUrl;
45
+ }
46
+ getApiKey() {
47
+ return this.apiKey;
48
+ }
49
+ async request(config) {
50
+ const version = getVersion();
51
+ config.headers = {
52
+ ...config.headers || {}
53
+ };
54
+ let lastError;
55
+ for (let attempt = 0; attempt < this.maxRetries; attempt++) {
56
+ try {
57
+ const cfg = { ...config };
58
+ if (cfg.method && ["post", "put", "patch"].includes(cfg.method.toLowerCase())) {
59
+ const data = cfg.data ?? {};
60
+ cfg.data = { ...data, origin: `js-sdk@${version}` };
61
+ }
62
+ const res = await this.instance.request(cfg);
63
+ if (res.status === 502 && attempt < this.maxRetries - 1) {
64
+ await this.sleep(this.backoffFactor * Math.pow(2, attempt));
65
+ continue;
66
+ }
67
+ return res;
68
+ } catch (err) {
69
+ lastError = err;
70
+ const status = err?.response?.status;
71
+ if (status === 502 && attempt < this.maxRetries - 1) {
72
+ await this.sleep(this.backoffFactor * Math.pow(2, attempt));
73
+ continue;
74
+ }
75
+ throw err;
76
+ }
77
+ }
78
+ throw lastError ?? new Error("Unexpected HTTP client error");
79
+ }
80
+ sleep(seconds) {
81
+ return new Promise((r) => setTimeout(r, seconds * 1e3));
82
+ }
83
+ post(endpoint, body, headers) {
84
+ return this.request({ method: "post", url: endpoint, data: body, headers });
85
+ }
86
+ get(endpoint, headers) {
87
+ return this.request({ method: "get", url: endpoint, headers });
88
+ }
89
+ delete(endpoint, headers) {
90
+ return this.request({ method: "delete", url: endpoint, headers });
91
+ }
92
+ prepareHeaders(idempotencyKey) {
93
+ const headers = {};
94
+ if (idempotencyKey) headers["x-idempotency-key"] = idempotencyKey;
95
+ return headers;
96
+ }
97
+ };
98
+
99
+ // src/v2/types.ts
100
+ var SdkError = class extends Error {
101
+ status;
102
+ code;
103
+ details;
104
+ constructor(message, status, code, details) {
105
+ super(message);
106
+ this.name = "FirecrawlSdkError";
107
+ this.status = status;
108
+ this.code = code;
109
+ this.details = details;
110
+ }
111
+ };
112
+
113
+ // src/v2/utils/validation.ts
114
+ import zodToJsonSchema from "zod-to-json-schema";
115
+ function ensureValidFormats(formats) {
116
+ if (!formats) return;
117
+ for (const fmt of formats) {
118
+ if (typeof fmt === "string") {
119
+ if (fmt === "json") {
120
+ throw new Error("json format must be an object with { type: 'json', prompt, schema }");
121
+ }
122
+ continue;
123
+ }
124
+ if (fmt.type === "json") {
125
+ const j = fmt;
126
+ if (!j.prompt && !j.schema) {
127
+ throw new Error("json format requires either 'prompt' or 'schema' (or both)");
128
+ }
129
+ const maybeSchema = j.schema;
130
+ const isZod = !!maybeSchema && (typeof maybeSchema.safeParse === "function" || typeof maybeSchema.parse === "function") && !!maybeSchema._def;
131
+ if (isZod) {
132
+ try {
133
+ j.schema = zodToJsonSchema(maybeSchema);
134
+ } catch {
135
+ }
136
+ }
137
+ continue;
138
+ }
139
+ if (fmt.type === "screenshot") {
140
+ const s = fmt;
141
+ if (s.quality != null && (typeof s.quality !== "number" || s.quality < 0)) {
142
+ throw new Error("screenshot.quality must be a non-negative number");
143
+ }
144
+ }
145
+ }
146
+ }
147
+ function ensureValidScrapeOptions(options) {
148
+ if (!options) return;
149
+ if (options.timeout != null && options.timeout <= 0) {
150
+ throw new Error("timeout must be positive");
151
+ }
152
+ if (options.waitFor != null && options.waitFor < 0) {
153
+ throw new Error("waitFor must be non-negative");
154
+ }
155
+ ensureValidFormats(options.formats);
156
+ }
157
+
158
+ // src/v2/utils/errorHandler.ts
159
+ import "axios";
160
+ function throwForBadResponse(resp, action) {
161
+ const status = resp.status;
162
+ const body = resp.data || {};
163
+ const msg = body?.error || body?.message || `Request failed (${status}) while trying to ${action}`;
164
+ throw new SdkError(msg, status, void 0, body?.details);
165
+ }
166
+ function normalizeAxiosError(err, action) {
167
+ const status = err.response?.status;
168
+ const body = err.response?.data;
169
+ const message = body?.error || err.message || `Request failed${status ? ` (${status})` : ""} while trying to ${action}`;
170
+ const code = body?.code || err.code;
171
+ throw new SdkError(message, status, code, body?.details ?? body);
172
+ }
173
+
174
+ // src/v2/methods/scrape.ts
175
+ async function scrape(http, url, options) {
176
+ if (!url || !url.trim()) {
177
+ throw new Error("URL cannot be empty");
178
+ }
179
+ if (options) ensureValidScrapeOptions(options);
180
+ const payload = { url: url.trim() };
181
+ if (options) Object.assign(payload, options);
182
+ try {
183
+ const res = await http.post("/v2/scrape", payload);
184
+ if (res.status !== 200 || !res.data?.success) {
185
+ throwForBadResponse(res, "scrape");
186
+ }
187
+ return res.data.data || {};
188
+ } catch (err) {
189
+ if (err?.isAxiosError) return normalizeAxiosError(err, "scrape");
190
+ throw err;
191
+ }
192
+ }
193
+
194
+ // src/v2/methods/search.ts
195
+ function prepareSearchPayload(req) {
196
+ if (!req.query || !req.query.trim()) throw new Error("Query cannot be empty");
197
+ if (req.limit != null && req.limit <= 0) throw new Error("limit must be positive");
198
+ if (req.timeout != null && req.timeout <= 0) throw new Error("timeout must be positive");
199
+ const payload = {
200
+ query: req.query
201
+ };
202
+ if (req.sources) payload.sources = req.sources;
203
+ if (req.limit != null) payload.limit = req.limit;
204
+ if (req.tbs != null) payload.tbs = req.tbs;
205
+ if (req.location != null) payload.location = req.location;
206
+ if (req.ignoreInvalidURLs != null) payload.ignoreInvalidURLs = req.ignoreInvalidURLs;
207
+ if (req.timeout != null) payload.timeout = req.timeout;
208
+ if (req.scrapeOptions) {
209
+ ensureValidScrapeOptions(req.scrapeOptions);
210
+ payload.scrapeOptions = req.scrapeOptions;
211
+ }
212
+ return payload;
213
+ }
214
+ async function search(http, request) {
215
+ const payload = prepareSearchPayload(request);
216
+ try {
217
+ const res = await http.post("/v2/search", payload);
218
+ if (res.status !== 200 || !res.data?.success) {
219
+ throwForBadResponse(res, "search");
220
+ }
221
+ const data = res.data.data || {};
222
+ const out = {};
223
+ for (const key of Object.keys(data)) {
224
+ const arr = data[key];
225
+ if (Array.isArray(arr)) {
226
+ const results = [];
227
+ for (const item of arr) {
228
+ if (item && typeof item === "object") {
229
+ if ("markdown" in item || "html" in item || "rawHtml" in item || "links" in item || "screenshot" in item || "changeTracking" in item || "summary" in item || "json" in item) {
230
+ results.push(item);
231
+ } else {
232
+ results.push({ url: item.url, title: item.title, description: item.description });
233
+ }
234
+ } else if (typeof item === "string") {
235
+ results.push({ url: item });
236
+ }
237
+ }
238
+ out[key] = results;
239
+ }
240
+ }
241
+ return out;
242
+ } catch (err) {
243
+ if (err?.isAxiosError) return normalizeAxiosError(err, "search");
244
+ throw err;
245
+ }
246
+ }
247
+
248
+ // src/v2/methods/map.ts
249
+ function prepareMapPayload(url, options) {
250
+ if (!url || !url.trim()) throw new Error("URL cannot be empty");
251
+ const payload = { url: url.trim() };
252
+ if (options) {
253
+ if (options.sitemap != null) payload.sitemap = options.sitemap;
254
+ if (options.search != null) payload.search = options.search;
255
+ if (options.includeSubdomains != null) payload.includeSubdomains = options.includeSubdomains;
256
+ if (options.limit != null) payload.limit = options.limit;
257
+ if (options.timeout != null) payload.timeout = options.timeout;
258
+ }
259
+ return payload;
260
+ }
261
+ async function map(http, url, options) {
262
+ const payload = prepareMapPayload(url, options);
263
+ try {
264
+ const res = await http.post("/v2/map", payload);
265
+ if (res.status !== 200 || !res.data?.success) {
266
+ throwForBadResponse(res, "map");
267
+ }
268
+ const linksIn = res.data.links || [];
269
+ const links = [];
270
+ for (const item of linksIn) {
271
+ if (typeof item === "string") links.push({ url: item });
272
+ else if (item && typeof item === "object") links.push({ url: item.url, title: item.title, description: item.description });
273
+ }
274
+ return { links };
275
+ } catch (err) {
276
+ if (err?.isAxiosError) return normalizeAxiosError(err, "map");
277
+ throw err;
278
+ }
279
+ }
280
+
281
+ // src/v2/methods/crawl.ts
282
+ function prepareCrawlPayload(request) {
283
+ if (!request.url || !request.url.trim()) throw new Error("URL cannot be empty");
284
+ const data = { url: request.url.trim() };
285
+ if (request.prompt) data.prompt = request.prompt;
286
+ if (request.excludePaths) data.excludePaths = request.excludePaths;
287
+ if (request.includePaths) data.includePaths = request.includePaths;
288
+ if (request.maxDiscoveryDepth != null) data.maxDiscoveryDepth = request.maxDiscoveryDepth;
289
+ if (request.sitemap != null) data.sitemap = request.sitemap;
290
+ if (request.ignoreQueryParameters != null) data.ignoreQueryParameters = request.ignoreQueryParameters;
291
+ if (request.limit != null) data.limit = request.limit;
292
+ if (request.crawlEntireDomain != null) data.crawlEntireDomain = request.crawlEntireDomain;
293
+ if (request.allowExternalLinks != null) data.allowExternalLinks = request.allowExternalLinks;
294
+ if (request.allowSubdomains != null) data.allowSubdomains = request.allowSubdomains;
295
+ if (request.delay != null) data.delay = request.delay;
296
+ if (request.maxConcurrency != null) data.maxConcurrency = request.maxConcurrency;
297
+ if (request.webhook != null) data.webhook = request.webhook;
298
+ if (request.scrapeOptions) {
299
+ ensureValidScrapeOptions(request.scrapeOptions);
300
+ data.scrapeOptions = request.scrapeOptions;
301
+ }
302
+ if (request.zeroDataRetention != null) data.zeroDataRetention = request.zeroDataRetention;
303
+ return data;
304
+ }
305
+ async function startCrawl(http, request) {
306
+ const payload = prepareCrawlPayload(request);
307
+ try {
308
+ const res = await http.post("/v2/crawl", payload);
309
+ if (res.status !== 200 || !res.data?.success) {
310
+ throwForBadResponse(res, "start crawl");
311
+ }
312
+ return { id: res.data.id, url: res.data.url };
313
+ } catch (err) {
314
+ if (err?.isAxiosError) return normalizeAxiosError(err, "start crawl");
315
+ throw err;
316
+ }
317
+ }
318
+ async function getCrawlStatus(http, jobId) {
319
+ try {
320
+ const res = await http.get(`/v2/crawl/${jobId}`);
321
+ if (res.status !== 200 || !res.data?.success) {
322
+ throwForBadResponse(res, "get crawl status");
323
+ }
324
+ const body = res.data;
325
+ return {
326
+ status: body.status,
327
+ completed: body.completed ?? 0,
328
+ total: body.total ?? 0,
329
+ creditsUsed: body.creditsUsed,
330
+ expiresAt: body.expiresAt,
331
+ next: body.next ?? null,
332
+ data: body.data || []
333
+ };
334
+ } catch (err) {
335
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get crawl status");
336
+ throw err;
337
+ }
338
+ }
339
+ async function cancelCrawl(http, jobId) {
340
+ try {
341
+ const res = await http.delete(`/v2/crawl/${jobId}`);
342
+ if (res.status !== 200) throwForBadResponse(res, "cancel crawl");
343
+ return res.data?.status === "cancelled";
344
+ } catch (err) {
345
+ if (err?.isAxiosError) return normalizeAxiosError(err, "cancel crawl");
346
+ throw err;
347
+ }
348
+ }
349
+ async function waitForCrawlCompletion(http, jobId, pollInterval = 2, timeout) {
350
+ const start = Date.now();
351
+ while (true) {
352
+ const status = await getCrawlStatus(http, jobId);
353
+ if (["completed", "failed", "cancelled"].includes(status.status)) return status;
354
+ if (timeout != null && Date.now() - start > timeout * 1e3) {
355
+ throw new Error(`Crawl job ${jobId} did not complete within ${timeout} seconds`);
356
+ }
357
+ await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
358
+ }
359
+ }
360
+ async function crawl(http, request, pollInterval = 2, timeout) {
361
+ const started = await startCrawl(http, request);
362
+ return waitForCrawlCompletion(http, started.id, pollInterval, timeout);
363
+ }
364
+ async function getCrawlErrors(http, crawlId) {
365
+ try {
366
+ const res = await http.get(`/v2/crawl/${crawlId}/errors`);
367
+ if (res.status !== 200) throwForBadResponse(res, "get crawl errors");
368
+ const payload = res.data?.data ?? res.data;
369
+ return { errors: payload.errors || [], robotsBlocked: payload.robotsBlocked || [] };
370
+ } catch (err) {
371
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get crawl errors");
372
+ throw err;
373
+ }
374
+ }
375
+ async function getActiveCrawls(http) {
376
+ try {
377
+ const res = await http.get(`/v2/crawl/active`);
378
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get active crawls");
379
+ const crawlsIn = res.data?.crawls || [];
380
+ const crawls = crawlsIn.map((c) => ({ id: c.id, teamId: c.teamId ?? c.team_id, url: c.url, options: c.options ?? null }));
381
+ return { success: true, crawls };
382
+ } catch (err) {
383
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get active crawls");
384
+ throw err;
385
+ }
386
+ }
387
+ async function crawlParamsPreview(http, url, prompt) {
388
+ if (!url || !url.trim()) throw new Error("URL cannot be empty");
389
+ if (!prompt || !prompt.trim()) throw new Error("Prompt cannot be empty");
390
+ try {
391
+ const res = await http.post("/v2/crawl/params-preview", { url: url.trim(), prompt });
392
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "crawl params preview");
393
+ const data = res.data.data || {};
394
+ if (res.data.warning) data.warning = res.data.warning;
395
+ return data;
396
+ } catch (err) {
397
+ if (err?.isAxiosError) return normalizeAxiosError(err, "crawl params preview");
398
+ throw err;
399
+ }
400
+ }
401
+
402
+ // src/v2/methods/batch.ts
403
+ async function startBatchScrape(http, urls, {
404
+ options,
405
+ webhook,
406
+ appendToId,
407
+ ignoreInvalidURLs,
408
+ maxConcurrency,
409
+ zeroDataRetention,
410
+ integration,
411
+ idempotencyKey
412
+ } = {}) {
413
+ if (!Array.isArray(urls) || urls.length === 0) throw new Error("URLs list cannot be empty");
414
+ const payload = { urls };
415
+ if (options) {
416
+ ensureValidScrapeOptions(options);
417
+ Object.assign(payload, options);
418
+ }
419
+ if (webhook != null) payload.webhook = webhook;
420
+ if (appendToId != null) payload.appendToId = appendToId;
421
+ if (ignoreInvalidURLs != null) payload.ignoreInvalidURLs = ignoreInvalidURLs;
422
+ if (maxConcurrency != null) payload.maxConcurrency = maxConcurrency;
423
+ if (zeroDataRetention != null) payload.zeroDataRetention = zeroDataRetention;
424
+ if (integration != null) payload.integration = integration;
425
+ try {
426
+ const headers = http.prepareHeaders(idempotencyKey);
427
+ const res = await http.post("/v2/batch/scrape", payload, headers);
428
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "start batch scrape");
429
+ return { id: res.data.id, url: res.data.url, invalidURLs: res.data.invalidURLs || void 0 };
430
+ } catch (err) {
431
+ if (err?.isAxiosError) return normalizeAxiosError(err, "start batch scrape");
432
+ throw err;
433
+ }
434
+ }
435
+ async function getBatchScrapeStatus(http, jobId) {
436
+ try {
437
+ const res = await http.get(`/v2/batch/scrape/${jobId}`);
438
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get batch scrape status");
439
+ const body = res.data;
440
+ return {
441
+ status: body.status,
442
+ completed: body.completed ?? 0,
443
+ total: body.total ?? 0,
444
+ creditsUsed: body.creditsUsed,
445
+ expiresAt: body.expiresAt,
446
+ next: body.next ?? null,
447
+ data: body.data || []
448
+ };
449
+ } catch (err) {
450
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get batch scrape status");
451
+ throw err;
452
+ }
453
+ }
454
+ async function cancelBatchScrape(http, jobId) {
455
+ try {
456
+ const res = await http.delete(`/v2/batch/scrape/${jobId}`);
457
+ if (res.status !== 200) throwForBadResponse(res, "cancel batch scrape");
458
+ return res.data?.status === "cancelled";
459
+ } catch (err) {
460
+ if (err?.isAxiosError) return normalizeAxiosError(err, "cancel batch scrape");
461
+ throw err;
462
+ }
463
+ }
464
+ async function getBatchScrapeErrors(http, jobId) {
465
+ try {
466
+ const res = await http.get(`/v2/batch/scrape/${jobId}/errors`);
467
+ if (res.status !== 200) throwForBadResponse(res, "get batch scrape errors");
468
+ const payload = res.data?.data ?? res.data;
469
+ return { errors: payload.errors || [], robotsBlocked: payload.robotsBlocked || [] };
470
+ } catch (err) {
471
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get batch scrape errors");
472
+ throw err;
473
+ }
474
+ }
475
+ async function waitForBatchCompletion(http, jobId, pollInterval = 2, timeout) {
476
+ const start = Date.now();
477
+ while (true) {
478
+ const status = await getBatchScrapeStatus(http, jobId);
479
+ if (["completed", "failed", "cancelled"].includes(status.status)) return status;
480
+ if (timeout != null && Date.now() - start > timeout * 1e3) {
481
+ throw new Error(`Batch scrape job ${jobId} did not complete within ${timeout} seconds`);
482
+ }
483
+ await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
484
+ }
485
+ }
486
+ async function batchScrape(http, urls, opts = {}) {
487
+ const start = await startBatchScrape(http, urls, opts);
488
+ return waitForBatchCompletion(http, start.id, opts.pollInterval ?? 2, opts.timeout);
489
+ }
490
+
491
+ // src/v2/methods/extract.ts
492
+ import { zodToJsonSchema as zodToJsonSchema2 } from "zod-to-json-schema";
493
+ function prepareExtractPayload(args) {
494
+ const body = {};
495
+ if (args.urls) body.urls = args.urls;
496
+ if (args.prompt != null) body.prompt = args.prompt;
497
+ if (args.schema != null) {
498
+ const s = args.schema;
499
+ const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
500
+ body.schema = isZod ? zodToJsonSchema2(s) : args.schema;
501
+ }
502
+ if (args.systemPrompt != null) body.systemPrompt = args.systemPrompt;
503
+ if (args.allowExternalLinks != null) body.allowExternalLinks = args.allowExternalLinks;
504
+ if (args.enableWebSearch != null) body.enableWebSearch = args.enableWebSearch;
505
+ if (args.showSources != null) body.showSources = args.showSources;
506
+ if (args.ignoreInvalidURLs != null) body.ignoreInvalidURLs = args.ignoreInvalidURLs;
507
+ if (args.scrapeOptions) {
508
+ ensureValidScrapeOptions(args.scrapeOptions);
509
+ body.scrapeOptions = args.scrapeOptions;
510
+ }
511
+ return body;
512
+ }
513
+ async function startExtract(http, args) {
514
+ const payload = prepareExtractPayload(args);
515
+ try {
516
+ const res = await http.post("/v2/extract", payload);
517
+ if (res.status !== 200) throwForBadResponse(res, "extract");
518
+ return res.data;
519
+ } catch (err) {
520
+ if (err?.isAxiosError) return normalizeAxiosError(err, "extract");
521
+ throw err;
522
+ }
523
+ }
524
+ async function getExtractStatus(http, jobId) {
525
+ try {
526
+ const res = await http.get(`/v2/extract/${jobId}`);
527
+ if (res.status !== 200) throwForBadResponse(res, "extract status");
528
+ return res.data;
529
+ } catch (err) {
530
+ if (err?.isAxiosError) return normalizeAxiosError(err, "extract status");
531
+ throw err;
532
+ }
533
+ }
534
+ async function waitExtract(http, jobId, pollInterval = 2, timeout) {
535
+ const start = Date.now();
536
+ while (true) {
537
+ const status = await getExtractStatus(http, jobId);
538
+ if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
539
+ if (timeout != null && Date.now() - start > timeout * 1e3) return status;
540
+ await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
541
+ }
542
+ }
543
+ async function extract(http, args) {
544
+ const started = await startExtract(http, args);
545
+ const jobId = started.id;
546
+ if (!jobId) return started;
547
+ return waitExtract(http, jobId, args.pollInterval ?? 2, args.timeout);
548
+ }
549
+
550
+ // src/v2/methods/usage.ts
551
+ async function getConcurrency(http) {
552
+ try {
553
+ const res = await http.get("/v2/concurrency-check");
554
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get concurrency");
555
+ const d = res.data.data || res.data;
556
+ return { concurrency: d.concurrency, maxConcurrency: d.maxConcurrency ?? d.max_concurrency };
557
+ } catch (err) {
558
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get concurrency");
559
+ throw err;
560
+ }
561
+ }
562
+ async function getCreditUsage(http) {
563
+ try {
564
+ const res = await http.get("/v2/team/credit-usage");
565
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get credit usage");
566
+ const d = res.data.data || res.data;
567
+ return { remainingCredits: d.remainingCredits ?? d.remaining_credits ?? 0 };
568
+ } catch (err) {
569
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get credit usage");
570
+ throw err;
571
+ }
572
+ }
573
+ async function getTokenUsage(http) {
574
+ try {
575
+ const res = await http.get("/v2/team/token-usage");
576
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get token usage");
577
+ return res.data.data || res.data;
578
+ } catch (err) {
579
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get token usage");
580
+ throw err;
581
+ }
582
+ }
583
+
584
+ // src/v2/watcher.ts
585
+ import { EventEmitter } from "events";
586
+ var Watcher = class extends EventEmitter {
587
+ http;
588
+ jobId;
589
+ kind;
590
+ pollInterval;
591
+ timeout;
592
+ ws;
593
+ closed = false;
594
+ constructor(http, jobId, opts = {}) {
595
+ super();
596
+ this.http = http;
597
+ this.jobId = jobId;
598
+ this.kind = opts.kind ?? "crawl";
599
+ this.pollInterval = opts.pollInterval ?? 2;
600
+ this.timeout = opts.timeout;
601
+ }
602
+ buildWsUrl() {
603
+ const apiUrl = this.http.getApiUrl();
604
+ const wsBase = apiUrl.replace(/^http/, "ws");
605
+ const path = this.kind === "crawl" ? `/v2/crawl/${this.jobId}` : `/v2/batch/scrape/${this.jobId}`;
606
+ return `${wsBase}${path}`;
607
+ }
608
+ async start() {
609
+ try {
610
+ const url = this.buildWsUrl();
611
+ this.ws = new WebSocket(url, this.http.getApiKey());
612
+ this.attachWsHandlers(this.ws);
613
+ } catch {
614
+ this.pollLoop();
615
+ }
616
+ }
617
+ attachWsHandlers(ws) {
618
+ let startTs = Date.now();
619
+ const timeoutMs = this.timeout ? this.timeout * 1e3 : void 0;
620
+ ws.onmessage = (ev) => {
621
+ try {
622
+ const body = typeof ev.data === "string" ? JSON.parse(ev.data) : null;
623
+ if (!body) return;
624
+ const type = body.type;
625
+ if (type === "error") {
626
+ this.emit("error", { status: "failed", data: [], error: body.error, id: this.jobId });
627
+ return;
628
+ }
629
+ if (type === "catchup") {
630
+ const payload2 = body.data || {};
631
+ this.emitDocuments(payload2.data || []);
632
+ this.emitSnapshot(payload2);
633
+ return;
634
+ }
635
+ if (type === "document") {
636
+ const doc = body.data;
637
+ if (doc) this.emit("document", doc);
638
+ return;
639
+ }
640
+ if (type === "done") {
641
+ this.emit("done", { status: "completed", data: [], id: this.jobId });
642
+ this.close();
643
+ return;
644
+ }
645
+ const payload = body.data || body;
646
+ if (payload && payload.status) this.emitSnapshot(payload);
647
+ } catch {
648
+ }
649
+ if (timeoutMs && Date.now() - startTs > timeoutMs) this.close();
650
+ };
651
+ ws.onerror = () => {
652
+ this.emit("error", { status: "failed", data: [], error: "WebSocket error", id: this.jobId });
653
+ this.close();
654
+ };
655
+ ws.onclose = () => {
656
+ if (!this.closed) this.pollLoop();
657
+ };
658
+ }
659
+ emitDocuments(docs) {
660
+ for (const doc of docs) this.emit("document", { ...doc, id: this.jobId });
661
+ }
662
+ emitSnapshot(payload) {
663
+ const status = payload.status;
664
+ const data = payload.data || [];
665
+ const snap = this.kind === "crawl" ? {
666
+ status,
667
+ completed: payload.completed ?? 0,
668
+ total: payload.total ?? 0,
669
+ creditsUsed: payload.creditsUsed,
670
+ expiresAt: payload.expiresAt,
671
+ next: payload.next ?? null,
672
+ data
673
+ } : {
674
+ status,
675
+ completed: payload.completed ?? 0,
676
+ total: payload.total ?? 0,
677
+ creditsUsed: payload.creditsUsed,
678
+ expiresAt: payload.expiresAt,
679
+ next: payload.next ?? null,
680
+ data
681
+ };
682
+ this.emit("snapshot", snap);
683
+ if (["completed", "failed", "cancelled"].includes(status)) {
684
+ this.emit("done", { status, data, id: this.jobId });
685
+ this.close();
686
+ }
687
+ }
688
+ async pollLoop() {
689
+ const startTs = Date.now();
690
+ const timeoutMs = this.timeout ? this.timeout * 1e3 : void 0;
691
+ while (!this.closed) {
692
+ try {
693
+ const snap = this.kind === "crawl" ? await getCrawlStatus(this.http, this.jobId) : await getBatchScrapeStatus(this.http, this.jobId);
694
+ this.emit("snapshot", snap);
695
+ if (["completed", "failed", "cancelled"].includes(snap.status)) {
696
+ this.emit("done", { status: snap.status, data: snap.data, id: this.jobId });
697
+ this.close();
698
+ break;
699
+ }
700
+ } catch {
701
+ }
702
+ if (timeoutMs && Date.now() - startTs > timeoutMs) break;
703
+ await new Promise((r) => setTimeout(r, Math.max(1e3, this.pollInterval * 1e3)));
704
+ }
705
+ }
706
+ close() {
707
+ this.closed = true;
708
+ if (this.ws && this.ws.close) this.ws.close();
709
+ }
710
+ };
711
+
712
+ // src/v2/client.ts
713
+ var FirecrawlClient = class {
714
+ http;
715
+ /**
716
+ * Create a v2 client.
717
+ * @param options Transport configuration (API key, base URL, timeouts, retries).
718
+ */
719
+ constructor(options = {}) {
720
+ const apiKey = options.apiKey ?? process.env.FIRECRAWL_API_KEY ?? "";
721
+ const apiUrl = (options.apiUrl ?? process.env.FIRECRAWL_API_URL ?? "https://api.firecrawl.dev").replace(/\/$/, "");
722
+ if (!apiKey) {
723
+ throw new Error("API key is required. Set FIRECRAWL_API_KEY env or pass apiKey.");
724
+ }
725
+ this.http = new HttpClient({
726
+ apiKey,
727
+ apiUrl,
728
+ timeoutMs: options.timeoutMs,
729
+ maxRetries: options.maxRetries,
730
+ backoffFactor: options.backoffFactor
731
+ });
732
+ }
733
+ async scrape(url, options) {
734
+ return scrape(this.http, url, options);
735
+ }
736
+ // Search
737
+ /**
738
+ * Search the web and optionally scrape each result.
739
+ * @param query Search query string.
740
+ * @param req Additional search options (sources, limit, scrapeOptions, etc.).
741
+ * @returns Structured search results.
742
+ */
743
+ async search(query, req = {}) {
744
+ return search(this.http, { query, ...req });
745
+ }
746
+ // Map
747
+ /**
748
+ * Map a site to discover URLs (sitemap-aware).
749
+ * @param url Root URL to map.
750
+ * @param options Mapping options (sitemap mode, includeSubdomains, limit, timeout).
751
+ * @returns Discovered links.
752
+ */
753
+ async map(url, options) {
754
+ return map(this.http, url, options);
755
+ }
756
+ // Crawl
757
+ /**
758
+ * Start a crawl job (async).
759
+ * @param url Root URL to crawl.
760
+ * @param req Crawl configuration (paths, limits, scrapeOptions, webhook, etc.).
761
+ * @returns Job id and url.
762
+ */
763
+ async startCrawl(url, req = {}) {
764
+ return startCrawl(this.http, { url, ...req });
765
+ }
766
+ /**
767
+ * Get the status and partial data of a crawl job.
768
+ * @param jobId Crawl job id.
769
+ */
770
+ async getCrawlStatus(jobId) {
771
+ return getCrawlStatus(this.http, jobId);
772
+ }
773
+ /**
774
+ * Cancel a crawl job.
775
+ * @param jobId Crawl job id.
776
+ * @returns True if cancelled.
777
+ */
778
+ async cancelCrawl(jobId) {
779
+ return cancelCrawl(this.http, jobId);
780
+ }
781
+ /**
782
+ * Convenience waiter: start a crawl and poll until it finishes.
783
+ * @param url Root URL to crawl.
784
+ * @param req Crawl configuration plus waiter controls (pollInterval, timeout seconds).
785
+ * @returns Final job snapshot.
786
+ */
787
+ async crawl(url, req = {}) {
788
+ return crawl(this.http, { url, ...req }, req.pollInterval, req.timeout);
789
+ }
790
+ /**
791
+ * Retrieve crawl errors and robots.txt blocks.
792
+ * @param crawlId Crawl job id.
793
+ */
794
+ async getCrawlErrors(crawlId) {
795
+ return getCrawlErrors(this.http, crawlId);
796
+ }
797
+ /**
798
+ * List active crawls for the authenticated team.
799
+ */
800
+ async getActiveCrawls() {
801
+ return getActiveCrawls(this.http);
802
+ }
803
+ /**
804
+ * Preview normalized crawl parameters produced by a natural-language prompt.
805
+ * @param url Root URL.
806
+ * @param prompt Natural-language instruction.
807
+ */
808
+ async crawlParamsPreview(url, prompt) {
809
+ return crawlParamsPreview(this.http, url, prompt);
810
+ }
811
+ // Batch
812
+ /**
813
+ * Start a batch scrape job for multiple URLs (async).
814
+ * @param urls URLs to scrape.
815
+ * @param opts Batch options (scrape options, webhook, concurrency, idempotency key, etc.).
816
+ * @returns Job id and url.
817
+ */
818
+ async startBatchScrape(urls, opts) {
819
+ return startBatchScrape(this.http, urls, opts);
820
+ }
821
+ /**
822
+ * Get the status and partial data of a batch scrape job.
823
+ * @param jobId Batch job id.
824
+ */
825
+ async getBatchScrapeStatus(jobId) {
826
+ return getBatchScrapeStatus(this.http, jobId);
827
+ }
828
+ /**
829
+ * Retrieve batch scrape errors and robots.txt blocks.
830
+ * @param jobId Batch job id.
831
+ */
832
+ async getBatchScrapeErrors(jobId) {
833
+ return getBatchScrapeErrors(this.http, jobId);
834
+ }
835
+ /**
836
+ * Cancel a batch scrape job.
837
+ * @param jobId Batch job id.
838
+ * @returns True if cancelled.
839
+ */
840
+ async cancelBatchScrape(jobId) {
841
+ return cancelBatchScrape(this.http, jobId);
842
+ }
843
+ /**
844
+ * Convenience waiter: start a batch scrape and poll until it finishes.
845
+ * @param urls URLs to scrape.
846
+ * @param opts Batch options plus waiter controls (pollInterval, timeout seconds).
847
+ * @returns Final job snapshot.
848
+ */
849
+ async batchScrape(urls, opts) {
850
+ return batchScrape(this.http, urls, opts);
851
+ }
852
+ // Extract
853
+ /**
854
+ * Start an extract job (async).
855
+ * @param args Extraction request (urls, schema or prompt, flags).
856
+ * @returns Job id or processing state.
857
+ */
858
+ async startExtract(args) {
859
+ return startExtract(this.http, args);
860
+ }
861
+ /**
862
+ * Get extract job status/data.
863
+ * @param jobId Extract job id.
864
+ */
865
+ async getExtractStatus(jobId) {
866
+ return getExtractStatus(this.http, jobId);
867
+ }
868
+ /**
869
+ * Convenience waiter: start an extract and poll until it finishes.
870
+ * @param args Extraction request plus waiter controls (pollInterval, timeout seconds).
871
+ * @returns Final extract response.
872
+ */
873
+ async extract(args) {
874
+ return extract(this.http, args);
875
+ }
876
+ // Usage
877
+ /** Current concurrency usage. */
878
+ async getConcurrency() {
879
+ return getConcurrency(this.http);
880
+ }
881
+ /** Current credit usage. */
882
+ async getCreditUsage() {
883
+ return getCreditUsage(this.http);
884
+ }
885
+ /** Recent token usage. */
886
+ async getTokenUsage() {
887
+ return getTokenUsage(this.http);
888
+ }
889
+ // Watcher
890
+ /**
891
+ * Create a watcher for a crawl or batch job. Emits: `document`, `snapshot`, `done`, `error`.
892
+ * @param jobId Job id.
893
+ * @param opts Watcher options (kind, pollInterval, timeout seconds).
894
+ */
895
+ watcher(jobId, opts = {}) {
896
+ return new Watcher(this.http, jobId, opts);
897
+ }
898
+ };
899
+
900
+ // src/v1/index.ts
901
+ import axios2, { AxiosError } from "axios";
3
902
  import "zod";
4
- import { zodToJsonSchema } from "zod-to-json-schema";
903
+ import { zodToJsonSchema as zodToJsonSchema3 } from "zod-to-json-schema";
5
904
 
6
905
  // node_modules/typescript-event-target/dist/index.mjs
7
906
  var e = class extends EventTarget {
@@ -10,7 +909,7 @@ var e = class extends EventTarget {
10
909
  }
11
910
  };
12
911
 
13
- // src/index.ts
912
+ // src/v1/index.ts
14
913
  var FirecrawlError = class extends Error {
15
914
  statusCode;
16
915
  details;
@@ -29,10 +928,16 @@ var FirecrawlApp = class {
29
928
  }
30
929
  async getVersion() {
31
930
  try {
32
- const packageJson = await import("./package-Z6F7JDXI.js");
931
+ if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
932
+ return process.env.npm_package_version;
933
+ }
934
+ const packageJson = await import("./package-KYZ3HXR5.js");
33
935
  return packageJson.default.version;
34
936
  } catch (error) {
35
- console.error("Error getting version:", error);
937
+ const isTest = typeof process !== "undefined" && (process.env.JEST_WORKER_ID != null || false);
938
+ if (!isTest) {
939
+ console.error("Error getting version:", error);
940
+ }
36
941
  return "1.25.1";
37
942
  }
38
943
  }
@@ -67,7 +972,7 @@ var FirecrawlApp = class {
67
972
  if (jsonData?.extract?.schema) {
68
973
  let schema = jsonData.extract.schema;
69
974
  try {
70
- schema = zodToJsonSchema(schema);
975
+ schema = zodToJsonSchema3(schema);
71
976
  } catch (error) {
72
977
  }
73
978
  jsonData = {
@@ -81,7 +986,7 @@ var FirecrawlApp = class {
81
986
  if (jsonData?.jsonOptions?.schema) {
82
987
  let schema = jsonData.jsonOptions.schema;
83
988
  try {
84
- schema = zodToJsonSchema(schema);
989
+ schema = zodToJsonSchema3(schema);
85
990
  } catch (error) {
86
991
  }
87
992
  jsonData = {
@@ -93,7 +998,7 @@ var FirecrawlApp = class {
93
998
  };
94
999
  }
95
1000
  try {
96
- const response = await axios.post(
1001
+ const response = await axios2.post(
97
1002
  this.apiUrl + `/v1/scrape`,
98
1003
  jsonData,
99
1004
  { headers, timeout: params?.timeout !== void 0 ? params.timeout + 5e3 : void 0 }
@@ -144,7 +1049,7 @@ var FirecrawlApp = class {
144
1049
  if (jsonData?.scrapeOptions?.extract?.schema) {
145
1050
  let schema = jsonData.scrapeOptions.extract.schema;
146
1051
  try {
147
- schema = zodToJsonSchema(schema);
1052
+ schema = zodToJsonSchema3(schema);
148
1053
  } catch (error) {
149
1054
  }
150
1055
  jsonData = {
@@ -365,9 +1270,9 @@ var FirecrawlApp = class {
365
1270
  * @returns A CrawlWatcher instance to monitor the crawl job.
366
1271
  */
367
1272
  async crawlUrlAndWatch(url, params, idempotencyKey) {
368
- const crawl = await this.asyncCrawlUrl(url, params, idempotencyKey);
369
- if (crawl.success && crawl.id) {
370
- const id = crawl.id;
1273
+ const crawl2 = await this.asyncCrawlUrl(url, params, idempotencyKey);
1274
+ if (crawl2.success && crawl2.id) {
1275
+ const id = crawl2.id;
371
1276
  return new CrawlWatcher(id, this);
372
1277
  }
373
1278
  throw new FirecrawlError("Crawl job failed to start", 400);
@@ -413,7 +1318,7 @@ var FirecrawlApp = class {
413
1318
  if (jsonData?.extract?.schema) {
414
1319
  let schema = jsonData.extract.schema;
415
1320
  try {
416
- schema = zodToJsonSchema(schema);
1321
+ schema = zodToJsonSchema3(schema);
417
1322
  } catch (error) {
418
1323
  }
419
1324
  jsonData = {
@@ -427,7 +1332,7 @@ var FirecrawlApp = class {
427
1332
  if (jsonData?.jsonOptions?.schema) {
428
1333
  let schema = jsonData.jsonOptions.schema;
429
1334
  try {
430
- schema = zodToJsonSchema(schema);
1335
+ schema = zodToJsonSchema3(schema);
431
1336
  } catch (error) {
432
1337
  }
433
1338
  jsonData = {
@@ -490,9 +1395,9 @@ var FirecrawlApp = class {
490
1395
  * @returns A CrawlWatcher instance to monitor the crawl job.
491
1396
  */
492
1397
  async batchScrapeUrlsAndWatch(urls, params, idempotencyKey, webhook, ignoreInvalidURLs) {
493
- const crawl = await this.asyncBatchScrapeUrls(urls, params, idempotencyKey, webhook, ignoreInvalidURLs);
494
- if (crawl.success && crawl.id) {
495
- const id = crawl.id;
1398
+ const crawl2 = await this.asyncBatchScrapeUrls(urls, params, idempotencyKey, webhook, ignoreInvalidURLs);
1399
+ if (crawl2.success && crawl2.id) {
1400
+ const id = crawl2.id;
496
1401
  return new CrawlWatcher(id, this);
497
1402
  }
498
1403
  throw new FirecrawlError("Batch scrape job failed to start", 400);
@@ -606,7 +1511,7 @@ var FirecrawlApp = class {
606
1511
  jsonSchema = void 0;
607
1512
  } else {
608
1513
  try {
609
- jsonSchema = zodToJsonSchema(params.schema);
1514
+ jsonSchema = zodToJsonSchema3(params.schema);
610
1515
  } catch (_) {
611
1516
  jsonSchema = params.schema;
612
1517
  }
@@ -670,7 +1575,7 @@ var FirecrawlApp = class {
670
1575
  jsonSchema = void 0;
671
1576
  } else {
672
1577
  try {
673
- jsonSchema = zodToJsonSchema(params.schema);
1578
+ jsonSchema = zodToJsonSchema3(params.schema);
674
1579
  } catch (_) {
675
1580
  jsonSchema = params.schema;
676
1581
  }
@@ -734,7 +1639,7 @@ var FirecrawlApp = class {
734
1639
  * @returns The response from the POST request.
735
1640
  */
736
1641
  postRequest(url, data, headers) {
737
- return axios.post(url, data, { headers, timeout: data?.timeout ? data.timeout + 5e3 : void 0 });
1642
+ return axios2.post(url, data, { headers, timeout: data?.timeout ? data.timeout + 5e3 : void 0 });
738
1643
  }
739
1644
  /**
740
1645
  * Sends a GET request to the specified URL.
@@ -744,7 +1649,7 @@ var FirecrawlApp = class {
744
1649
  */
745
1650
  async getRequest(url, headers) {
746
1651
  try {
747
- return await axios.get(url, { headers });
1652
+ return await axios2.get(url, { headers });
748
1653
  } catch (error) {
749
1654
  if (error instanceof AxiosError && error.response) {
750
1655
  return error.response;
@@ -761,7 +1666,7 @@ var FirecrawlApp = class {
761
1666
  */
762
1667
  async deleteRequest(url, headers) {
763
1668
  try {
764
- return await axios.delete(url, { headers });
1669
+ return await axios2.delete(url, { headers });
765
1670
  } catch (error) {
766
1671
  if (error instanceof AxiosError && error.response) {
767
1672
  return error.response;
@@ -961,7 +1866,7 @@ var FirecrawlApp = class {
961
1866
  if (jsonData?.jsonOptions?.schema) {
962
1867
  let schema = jsonData.jsonOptions.schema;
963
1868
  try {
964
- schema = zodToJsonSchema(schema);
1869
+ schema = zodToJsonSchema3(schema);
965
1870
  } catch (error) {
966
1871
  }
967
1872
  jsonData = {
@@ -1316,8 +2221,28 @@ var CrawlWatcher = class extends e {
1316
2221
  this.ws.close();
1317
2222
  }
1318
2223
  };
2224
+
2225
+ // src/index.ts
2226
+ var Firecrawl = class extends FirecrawlClient {
2227
+ /** Feature‑frozen v1 client (lazy). */
2228
+ _v1;
2229
+ _v1Opts;
2230
+ /** @param opts API credentials and base URL. */
2231
+ constructor(opts = {}) {
2232
+ super(opts);
2233
+ this._v1Opts = opts;
2234
+ }
2235
+ /** Access the legacy v1 client (instantiated on first access). */
2236
+ get v1() {
2237
+ if (!this._v1) this._v1 = new FirecrawlApp(this._v1Opts);
2238
+ return this._v1;
2239
+ }
2240
+ };
2241
+ var src_default = Firecrawl;
1319
2242
  export {
1320
- CrawlWatcher,
1321
- FirecrawlError,
1322
- FirecrawlApp as default
2243
+ Firecrawl,
2244
+ FirecrawlApp as FirecrawlAppV1,
2245
+ FirecrawlClient,
2246
+ SdkError,
2247
+ src_default as default
1323
2248
  };