@mendable/firecrawl-js 1.29.3 → 3.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/.env.example +4 -2
  2. package/LICENSE +0 -0
  3. package/README.md +85 -78
  4. package/audit-ci.jsonc +4 -0
  5. package/dist/chunk-RDDU62K7.js +85 -0
  6. package/dist/index.cjs +965 -38
  7. package/dist/index.d.cts +529 -11
  8. package/dist/index.d.ts +529 -11
  9. package/dist/index.js +954 -27
  10. package/dist/package-5MOU5FLU.js +4 -0
  11. package/dump.rdb +0 -0
  12. package/jest.config.js +0 -0
  13. package/package.json +6 -6
  14. package/src/__tests__/e2e/v2/batch.test.ts +74 -0
  15. package/src/__tests__/e2e/v2/crawl.test.ts +182 -0
  16. package/src/__tests__/e2e/v2/extract.test.ts +70 -0
  17. package/src/__tests__/e2e/v2/map.test.ts +55 -0
  18. package/src/__tests__/e2e/v2/scrape.test.ts +130 -0
  19. package/src/__tests__/e2e/v2/search.test.ts +247 -0
  20. package/src/__tests__/e2e/v2/usage.test.ts +36 -0
  21. package/src/__tests__/e2e/v2/utils/idmux.ts +58 -0
  22. package/src/__tests__/e2e/v2/watcher.test.ts +96 -0
  23. package/src/__tests__/unit/v2/errorHandler.test.ts +19 -0
  24. package/src/__tests__/unit/v2/scrape.unit.test.ts +11 -0
  25. package/src/__tests__/unit/v2/validation.test.ts +59 -0
  26. package/src/index.backup.ts +2146 -0
  27. package/src/index.ts +27 -2134
  28. package/src/v1/index.ts +2158 -0
  29. package/src/v2/client.ts +281 -0
  30. package/src/v2/methods/batch.ts +131 -0
  31. package/src/v2/methods/crawl.ts +160 -0
  32. package/src/v2/methods/extract.ts +86 -0
  33. package/src/v2/methods/map.ts +37 -0
  34. package/src/v2/methods/scrape.ts +26 -0
  35. package/src/v2/methods/search.ts +69 -0
  36. package/src/v2/methods/usage.ts +39 -0
  37. package/src/v2/types.ts +308 -0
  38. package/src/v2/utils/errorHandler.ts +18 -0
  39. package/src/v2/utils/getVersion.ts +14 -0
  40. package/src/v2/utils/httpClient.ts +101 -0
  41. package/src/v2/utils/validation.ts +50 -0
  42. package/src/v2/watcher.ts +159 -0
  43. package/tsconfig.json +2 -1
  44. package/tsup.config.ts +0 -0
  45. package/dist/package-Z6F7JDXI.js +0 -111
  46. /package/src/__tests__/{v1/e2e_withAuth → e2e/v1}/index.test.ts +0 -0
  47. /package/src/__tests__/{v1/unit → unit/v1}/monitor-job-status-retry.test.ts +0 -0
package/dist/index.js CHANGED
@@ -1,7 +1,908 @@
1
- // src/index.ts
2
- import axios, { AxiosError } from "axios";
1
+ import {
2
+ require_package
3
+ } from "./chunk-RDDU62K7.js";
4
+
5
+ // src/v2/utils/httpClient.ts
6
+ import axios from "axios";
7
+
8
+ // src/v2/utils/getVersion.ts
9
+ function getVersion() {
10
+ try {
11
+ if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
12
+ return process.env.npm_package_version;
13
+ }
14
+ const pkg = require_package();
15
+ return pkg?.version || "3.x.x";
16
+ } catch {
17
+ return "3.x.x";
18
+ }
19
+ }
20
+
21
+ // src/v2/utils/httpClient.ts
22
+ var HttpClient = class {
23
+ instance;
24
+ apiKey;
25
+ apiUrl;
26
+ maxRetries;
27
+ backoffFactor;
28
+ constructor(options) {
29
+ this.apiKey = options.apiKey;
30
+ this.apiUrl = options.apiUrl.replace(/\/$/, "");
31
+ this.maxRetries = options.maxRetries ?? 3;
32
+ this.backoffFactor = options.backoffFactor ?? 0.5;
33
+ this.instance = axios.create({
34
+ baseURL: this.apiUrl,
35
+ timeout: options.timeoutMs ?? 6e4,
36
+ headers: {
37
+ "Content-Type": "application/json",
38
+ Authorization: `Bearer ${this.apiKey}`
39
+ },
40
+ transitional: { clarifyTimeoutError: true }
41
+ });
42
+ }
43
+ getApiUrl() {
44
+ return this.apiUrl;
45
+ }
46
+ getApiKey() {
47
+ return this.apiKey;
48
+ }
49
+ async request(config) {
50
+ const version = getVersion();
51
+ config.headers = {
52
+ ...config.headers || {},
53
+ // origin header for GET/DELETE; for POST we also include in body below
54
+ origin: `js-sdk@${version}`
55
+ };
56
+ let lastError;
57
+ for (let attempt = 0; attempt < this.maxRetries; attempt++) {
58
+ try {
59
+ const cfg = { ...config };
60
+ if (cfg.method && ["post", "put", "patch"].includes(cfg.method.toLowerCase())) {
61
+ const data = cfg.data ?? {};
62
+ cfg.data = { ...data, origin: `js-sdk@${version}` };
63
+ }
64
+ const res = await this.instance.request(cfg);
65
+ if (res.status === 502 && attempt < this.maxRetries - 1) {
66
+ await this.sleep(this.backoffFactor * Math.pow(2, attempt));
67
+ continue;
68
+ }
69
+ return res;
70
+ } catch (err) {
71
+ lastError = err;
72
+ const status = err?.response?.status;
73
+ if (status === 502 && attempt < this.maxRetries - 1) {
74
+ await this.sleep(this.backoffFactor * Math.pow(2, attempt));
75
+ continue;
76
+ }
77
+ throw err;
78
+ }
79
+ }
80
+ throw lastError ?? new Error("Unexpected HTTP client error");
81
+ }
82
+ sleep(seconds) {
83
+ return new Promise((r) => setTimeout(r, seconds * 1e3));
84
+ }
85
+ post(endpoint, body, headers) {
86
+ return this.request({ method: "post", url: endpoint, data: body, headers });
87
+ }
88
+ get(endpoint, headers) {
89
+ return this.request({ method: "get", url: endpoint, headers });
90
+ }
91
+ delete(endpoint, headers) {
92
+ return this.request({ method: "delete", url: endpoint, headers });
93
+ }
94
+ prepareHeaders(idempotencyKey) {
95
+ const headers = {};
96
+ if (idempotencyKey) headers["x-idempotency-key"] = idempotencyKey;
97
+ return headers;
98
+ }
99
+ };
100
+
101
+ // src/v2/types.ts
102
+ var SdkError = class extends Error {
103
+ status;
104
+ code;
105
+ details;
106
+ constructor(message, status, code, details) {
107
+ super(message);
108
+ this.name = "FirecrawlSdkError";
109
+ this.status = status;
110
+ this.code = code;
111
+ this.details = details;
112
+ }
113
+ };
114
+
115
+ // src/v2/utils/validation.ts
116
+ import zodToJsonSchema from "zod-to-json-schema";
117
+ function ensureValidFormats(formats) {
118
+ if (!formats) return;
119
+ for (const fmt of formats) {
120
+ if (typeof fmt === "string") {
121
+ if (fmt === "json") {
122
+ throw new Error("json format must be an object with { type: 'json', prompt, schema }");
123
+ }
124
+ continue;
125
+ }
126
+ if (fmt.type === "json") {
127
+ const j = fmt;
128
+ if (!j.prompt && !j.schema) {
129
+ throw new Error("json format requires either 'prompt' or 'schema' (or both)");
130
+ }
131
+ const maybeSchema = j.schema;
132
+ const isZod = !!maybeSchema && (typeof maybeSchema.safeParse === "function" || typeof maybeSchema.parse === "function") && !!maybeSchema._def;
133
+ if (isZod) {
134
+ try {
135
+ j.schema = zodToJsonSchema(maybeSchema);
136
+ } catch {
137
+ }
138
+ }
139
+ continue;
140
+ }
141
+ if (fmt.type === "screenshot") {
142
+ const s = fmt;
143
+ if (s.quality != null && (typeof s.quality !== "number" || s.quality < 0)) {
144
+ throw new Error("screenshot.quality must be a non-negative number");
145
+ }
146
+ }
147
+ }
148
+ }
149
+ function ensureValidScrapeOptions(options) {
150
+ if (!options) return;
151
+ if (options.timeout != null && options.timeout <= 0) {
152
+ throw new Error("timeout must be positive");
153
+ }
154
+ if (options.waitFor != null && options.waitFor < 0) {
155
+ throw new Error("waitFor must be non-negative");
156
+ }
157
+ ensureValidFormats(options.formats);
158
+ }
159
+
160
+ // src/v2/utils/errorHandler.ts
161
+ import "axios";
162
+ function throwForBadResponse(resp, action) {
163
+ const status = resp.status;
164
+ const body = resp.data || {};
165
+ const msg = body?.error || body?.message || `Request failed (${status}) while trying to ${action}`;
166
+ throw new SdkError(msg, status, void 0, body?.details);
167
+ }
168
+ function normalizeAxiosError(err, action) {
169
+ const status = err.response?.status;
170
+ const body = err.response?.data;
171
+ const message = body?.error || err.message || `Request failed${status ? ` (${status})` : ""} while trying to ${action}`;
172
+ const code = body?.code || err.code;
173
+ throw new SdkError(message, status, code, body?.details ?? body);
174
+ }
175
+
176
+ // src/v2/methods/scrape.ts
177
+ async function scrape(http, url, options) {
178
+ if (!url || !url.trim()) {
179
+ throw new Error("URL cannot be empty");
180
+ }
181
+ if (options) ensureValidScrapeOptions(options);
182
+ const payload = { url: url.trim() };
183
+ if (options) Object.assign(payload, options);
184
+ try {
185
+ const res = await http.post("/v2/scrape", payload);
186
+ if (res.status !== 200 || !res.data?.success) {
187
+ throwForBadResponse(res, "scrape");
188
+ }
189
+ return res.data.data || {};
190
+ } catch (err) {
191
+ if (err?.isAxiosError) return normalizeAxiosError(err, "scrape");
192
+ throw err;
193
+ }
194
+ }
195
+
196
+ // src/v2/methods/search.ts
197
+ function prepareSearchPayload(req) {
198
+ if (!req.query || !req.query.trim()) throw new Error("Query cannot be empty");
199
+ if (req.limit != null && req.limit <= 0) throw new Error("limit must be positive");
200
+ if (req.timeout != null && req.timeout <= 0) throw new Error("timeout must be positive");
201
+ const payload = {
202
+ query: req.query
203
+ };
204
+ if (req.sources) payload.sources = req.sources;
205
+ if (req.limit != null) payload.limit = req.limit;
206
+ if (req.tbs != null) payload.tbs = req.tbs;
207
+ if (req.location != null) payload.location = req.location;
208
+ if (req.ignoreInvalidURLs != null) payload.ignoreInvalidURLs = req.ignoreInvalidURLs;
209
+ if (req.timeout != null) payload.timeout = req.timeout;
210
+ if (req.scrapeOptions) {
211
+ ensureValidScrapeOptions(req.scrapeOptions);
212
+ payload.scrapeOptions = req.scrapeOptions;
213
+ }
214
+ return payload;
215
+ }
216
+ async function search(http, request) {
217
+ const payload = prepareSearchPayload(request);
218
+ try {
219
+ const res = await http.post("/v2/search", payload);
220
+ if (res.status !== 200 || !res.data?.success) {
221
+ throwForBadResponse(res, "search");
222
+ }
223
+ const data = res.data.data || {};
224
+ const out = {};
225
+ for (const key of Object.keys(data)) {
226
+ const arr = data[key];
227
+ if (Array.isArray(arr)) {
228
+ const results = [];
229
+ for (const item of arr) {
230
+ if (item && typeof item === "object") {
231
+ if ("markdown" in item || "html" in item || "rawHtml" in item || "links" in item || "screenshot" in item || "changeTracking" in item || "summary" in item || "json" in item) {
232
+ results.push(item);
233
+ } else {
234
+ results.push({ url: item.url, title: item.title, description: item.description });
235
+ }
236
+ } else if (typeof item === "string") {
237
+ results.push({ url: item });
238
+ }
239
+ }
240
+ out[key] = results;
241
+ }
242
+ }
243
+ return out;
244
+ } catch (err) {
245
+ if (err?.isAxiosError) return normalizeAxiosError(err, "search");
246
+ throw err;
247
+ }
248
+ }
249
+
250
+ // src/v2/methods/map.ts
251
+ function prepareMapPayload(url, options) {
252
+ if (!url || !url.trim()) throw new Error("URL cannot be empty");
253
+ const payload = { url: url.trim() };
254
+ if (options) {
255
+ if (options.sitemap != null) payload.sitemap = options.sitemap;
256
+ if (options.search != null) payload.search = options.search;
257
+ if (options.includeSubdomains != null) payload.includeSubdomains = options.includeSubdomains;
258
+ if (options.limit != null) payload.limit = options.limit;
259
+ if (options.timeout != null) payload.timeout = options.timeout;
260
+ }
261
+ return payload;
262
+ }
263
+ async function map(http, url, options) {
264
+ const payload = prepareMapPayload(url, options);
265
+ try {
266
+ const res = await http.post("/v2/map", payload);
267
+ if (res.status !== 200 || !res.data?.success) {
268
+ throwForBadResponse(res, "map");
269
+ }
270
+ const linksIn = res.data.links || [];
271
+ const links = [];
272
+ for (const item of linksIn) {
273
+ if (typeof item === "string") links.push({ url: item });
274
+ else if (item && typeof item === "object") links.push({ url: item.url, title: item.title, description: item.description });
275
+ }
276
+ return { links };
277
+ } catch (err) {
278
+ if (err?.isAxiosError) return normalizeAxiosError(err, "map");
279
+ throw err;
280
+ }
281
+ }
282
+
283
+ // src/v2/methods/crawl.ts
284
+ function prepareCrawlPayload(request) {
285
+ if (!request.url || !request.url.trim()) throw new Error("URL cannot be empty");
286
+ const data = { url: request.url.trim() };
287
+ if (request.prompt) data.prompt = request.prompt;
288
+ if (request.excludePaths) data.excludePaths = request.excludePaths;
289
+ if (request.includePaths) data.includePaths = request.includePaths;
290
+ if (request.maxDiscoveryDepth != null) data.maxDiscoveryDepth = request.maxDiscoveryDepth;
291
+ if (request.sitemap != null) data.sitemap = request.sitemap;
292
+ if (request.ignoreQueryParameters != null) data.ignoreQueryParameters = request.ignoreQueryParameters;
293
+ if (request.limit != null) data.limit = request.limit;
294
+ if (request.crawlEntireDomain != null) data.crawlEntireDomain = request.crawlEntireDomain;
295
+ if (request.allowExternalLinks != null) data.allowExternalLinks = request.allowExternalLinks;
296
+ if (request.allowSubdomains != null) data.allowSubdomains = request.allowSubdomains;
297
+ if (request.delay != null) data.delay = request.delay;
298
+ if (request.maxConcurrency != null) data.maxConcurrency = request.maxConcurrency;
299
+ if (request.webhook != null) data.webhook = request.webhook;
300
+ if (request.scrapeOptions) {
301
+ ensureValidScrapeOptions(request.scrapeOptions);
302
+ data.scrapeOptions = request.scrapeOptions;
303
+ }
304
+ if (request.zeroDataRetention != null) data.zeroDataRetention = request.zeroDataRetention;
305
+ return data;
306
+ }
307
+ async function startCrawl(http, request) {
308
+ const payload = prepareCrawlPayload(request);
309
+ try {
310
+ const res = await http.post("/v2/crawl", payload);
311
+ if (res.status !== 200 || !res.data?.success) {
312
+ throwForBadResponse(res, "start crawl");
313
+ }
314
+ return { id: res.data.id, url: res.data.url };
315
+ } catch (err) {
316
+ if (err?.isAxiosError) return normalizeAxiosError(err, "start crawl");
317
+ throw err;
318
+ }
319
+ }
320
+ async function getCrawlStatus(http, jobId) {
321
+ try {
322
+ const res = await http.get(`/v2/crawl/${jobId}`);
323
+ if (res.status !== 200 || !res.data?.success) {
324
+ throwForBadResponse(res, "get crawl status");
325
+ }
326
+ const body = res.data;
327
+ return {
328
+ status: body.status,
329
+ completed: body.completed ?? 0,
330
+ total: body.total ?? 0,
331
+ creditsUsed: body.creditsUsed,
332
+ expiresAt: body.expiresAt,
333
+ next: body.next ?? null,
334
+ data: body.data || []
335
+ };
336
+ } catch (err) {
337
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get crawl status");
338
+ throw err;
339
+ }
340
+ }
341
+ async function cancelCrawl(http, jobId) {
342
+ try {
343
+ const res = await http.delete(`/v2/crawl/${jobId}`);
344
+ if (res.status !== 200) throwForBadResponse(res, "cancel crawl");
345
+ return res.data?.status === "cancelled";
346
+ } catch (err) {
347
+ if (err?.isAxiosError) return normalizeAxiosError(err, "cancel crawl");
348
+ throw err;
349
+ }
350
+ }
351
+ async function waitForCrawlCompletion(http, jobId, pollInterval = 2, timeout) {
352
+ const start = Date.now();
353
+ while (true) {
354
+ const status = await getCrawlStatus(http, jobId);
355
+ if (["completed", "failed", "cancelled"].includes(status.status)) return status;
356
+ if (timeout != null && Date.now() - start > timeout * 1e3) {
357
+ throw new Error(`Crawl job ${jobId} did not complete within ${timeout} seconds`);
358
+ }
359
+ await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
360
+ }
361
+ }
362
+ async function crawl(http, request, pollInterval = 2, timeout) {
363
+ const started = await startCrawl(http, request);
364
+ return waitForCrawlCompletion(http, started.id, pollInterval, timeout);
365
+ }
366
+ async function getCrawlErrors(http, crawlId) {
367
+ try {
368
+ const res = await http.get(`/v2/crawl/${crawlId}/errors`);
369
+ if (res.status !== 200) throwForBadResponse(res, "get crawl errors");
370
+ const payload = res.data?.data ?? res.data;
371
+ return { errors: payload.errors || [], robotsBlocked: payload.robotsBlocked || [] };
372
+ } catch (err) {
373
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get crawl errors");
374
+ throw err;
375
+ }
376
+ }
377
+ async function getActiveCrawls(http) {
378
+ try {
379
+ const res = await http.get(`/v2/crawl/active`);
380
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get active crawls");
381
+ const crawlsIn = res.data?.crawls || [];
382
+ const crawls = crawlsIn.map((c) => ({ id: c.id, teamId: c.teamId ?? c.team_id, url: c.url, options: c.options ?? null }));
383
+ return { success: true, crawls };
384
+ } catch (err) {
385
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get active crawls");
386
+ throw err;
387
+ }
388
+ }
389
+ async function crawlParamsPreview(http, url, prompt) {
390
+ if (!url || !url.trim()) throw new Error("URL cannot be empty");
391
+ if (!prompt || !prompt.trim()) throw new Error("Prompt cannot be empty");
392
+ try {
393
+ const res = await http.post("/v2/crawl/params-preview", { url: url.trim(), prompt });
394
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "crawl params preview");
395
+ const data = res.data.data || {};
396
+ if (res.data.warning) data.warning = res.data.warning;
397
+ return data;
398
+ } catch (err) {
399
+ if (err?.isAxiosError) return normalizeAxiosError(err, "crawl params preview");
400
+ throw err;
401
+ }
402
+ }
403
+
404
+ // src/v2/methods/batch.ts
405
+ async function startBatchScrape(http, urls, {
406
+ options,
407
+ webhook,
408
+ appendToId,
409
+ ignoreInvalidURLs,
410
+ maxConcurrency,
411
+ zeroDataRetention,
412
+ integration,
413
+ idempotencyKey
414
+ } = {}) {
415
+ if (!Array.isArray(urls) || urls.length === 0) throw new Error("URLs list cannot be empty");
416
+ const payload = { urls };
417
+ if (options) {
418
+ ensureValidScrapeOptions(options);
419
+ Object.assign(payload, options);
420
+ }
421
+ if (webhook != null) payload.webhook = webhook;
422
+ if (appendToId != null) payload.appendToId = appendToId;
423
+ if (ignoreInvalidURLs != null) payload.ignoreInvalidURLs = ignoreInvalidURLs;
424
+ if (maxConcurrency != null) payload.maxConcurrency = maxConcurrency;
425
+ if (zeroDataRetention != null) payload.zeroDataRetention = zeroDataRetention;
426
+ if (integration != null) payload.integration = integration;
427
+ try {
428
+ const headers = http.prepareHeaders(idempotencyKey);
429
+ const res = await http.post("/v2/batch/scrape", payload, headers);
430
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "start batch scrape");
431
+ return { id: res.data.id, url: res.data.url, invalidURLs: res.data.invalidURLs || void 0 };
432
+ } catch (err) {
433
+ if (err?.isAxiosError) return normalizeAxiosError(err, "start batch scrape");
434
+ throw err;
435
+ }
436
+ }
437
+ async function getBatchScrapeStatus(http, jobId) {
438
+ try {
439
+ const res = await http.get(`/v2/batch/scrape/${jobId}`);
440
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get batch scrape status");
441
+ const body = res.data;
442
+ return {
443
+ status: body.status,
444
+ completed: body.completed ?? 0,
445
+ total: body.total ?? 0,
446
+ creditsUsed: body.creditsUsed,
447
+ expiresAt: body.expiresAt,
448
+ next: body.next ?? null,
449
+ data: body.data || []
450
+ };
451
+ } catch (err) {
452
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get batch scrape status");
453
+ throw err;
454
+ }
455
+ }
456
+ async function cancelBatchScrape(http, jobId) {
457
+ try {
458
+ const res = await http.delete(`/v2/batch/scrape/${jobId}`);
459
+ if (res.status !== 200) throwForBadResponse(res, "cancel batch scrape");
460
+ return res.data?.status === "cancelled";
461
+ } catch (err) {
462
+ if (err?.isAxiosError) return normalizeAxiosError(err, "cancel batch scrape");
463
+ throw err;
464
+ }
465
+ }
466
+ async function getBatchScrapeErrors(http, jobId) {
467
+ try {
468
+ const res = await http.get(`/v2/batch/scrape/${jobId}/errors`);
469
+ if (res.status !== 200) throwForBadResponse(res, "get batch scrape errors");
470
+ const payload = res.data?.data ?? res.data;
471
+ return { errors: payload.errors || [], robotsBlocked: payload.robotsBlocked || [] };
472
+ } catch (err) {
473
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get batch scrape errors");
474
+ throw err;
475
+ }
476
+ }
477
+ async function waitForBatchCompletion(http, jobId, pollInterval = 2, timeout) {
478
+ const start = Date.now();
479
+ while (true) {
480
+ const status = await getBatchScrapeStatus(http, jobId);
481
+ if (["completed", "failed", "cancelled"].includes(status.status)) return status;
482
+ if (timeout != null && Date.now() - start > timeout * 1e3) {
483
+ throw new Error(`Batch scrape job ${jobId} did not complete within ${timeout} seconds`);
484
+ }
485
+ await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
486
+ }
487
+ }
488
+ async function batchScrape(http, urls, opts = {}) {
489
+ const start = await startBatchScrape(http, urls, opts);
490
+ return waitForBatchCompletion(http, start.id, opts.pollInterval ?? 2, opts.timeout);
491
+ }
492
+
493
+ // src/v2/methods/extract.ts
494
+ import { zodToJsonSchema as zodToJsonSchema2 } from "zod-to-json-schema";
495
+ function prepareExtractPayload(args) {
496
+ const body = {};
497
+ if (args.urls) body.urls = args.urls;
498
+ if (args.prompt != null) body.prompt = args.prompt;
499
+ if (args.schema != null) {
500
+ const s = args.schema;
501
+ const isZod = s && (typeof s.safeParse === "function" || typeof s.parse === "function") && s._def;
502
+ body.schema = isZod ? zodToJsonSchema2(s) : args.schema;
503
+ }
504
+ if (args.systemPrompt != null) body.systemPrompt = args.systemPrompt;
505
+ if (args.allowExternalLinks != null) body.allowExternalLinks = args.allowExternalLinks;
506
+ if (args.enableWebSearch != null) body.enableWebSearch = args.enableWebSearch;
507
+ if (args.showSources != null) body.showSources = args.showSources;
508
+ if (args.ignoreInvalidURLs != null) body.ignoreInvalidURLs = args.ignoreInvalidURLs;
509
+ if (args.scrapeOptions) {
510
+ ensureValidScrapeOptions(args.scrapeOptions);
511
+ body.scrapeOptions = args.scrapeOptions;
512
+ }
513
+ return body;
514
+ }
515
+ async function startExtract(http, args) {
516
+ const payload = prepareExtractPayload(args);
517
+ try {
518
+ const res = await http.post("/v2/extract", payload);
519
+ if (res.status !== 200) throwForBadResponse(res, "extract");
520
+ return res.data;
521
+ } catch (err) {
522
+ if (err?.isAxiosError) return normalizeAxiosError(err, "extract");
523
+ throw err;
524
+ }
525
+ }
526
+ async function getExtractStatus(http, jobId) {
527
+ try {
528
+ const res = await http.get(`/v2/extract/${jobId}`);
529
+ if (res.status !== 200) throwForBadResponse(res, "extract status");
530
+ return res.data;
531
+ } catch (err) {
532
+ if (err?.isAxiosError) return normalizeAxiosError(err, "extract status");
533
+ throw err;
534
+ }
535
+ }
536
+ async function waitExtract(http, jobId, pollInterval = 2, timeout) {
537
+ const start = Date.now();
538
+ while (true) {
539
+ const status = await getExtractStatus(http, jobId);
540
+ if (["completed", "failed", "cancelled"].includes(status.status || "")) return status;
541
+ if (timeout != null && Date.now() - start > timeout * 1e3) return status;
542
+ await new Promise((r) => setTimeout(r, Math.max(1e3, pollInterval * 1e3)));
543
+ }
544
+ }
545
+ async function extract(http, args) {
546
+ const started = await startExtract(http, args);
547
+ const jobId = started.id;
548
+ if (!jobId) return started;
549
+ return waitExtract(http, jobId, args.pollInterval ?? 2, args.timeout);
550
+ }
551
+
552
+ // src/v2/methods/usage.ts
553
+ async function getConcurrency(http) {
554
+ try {
555
+ const res = await http.get("/v2/concurrency-check");
556
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get concurrency");
557
+ const d = res.data.data || res.data;
558
+ return { concurrency: d.concurrency, maxConcurrency: d.maxConcurrency ?? d.max_concurrency };
559
+ } catch (err) {
560
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get concurrency");
561
+ throw err;
562
+ }
563
+ }
564
+ async function getCreditUsage(http) {
565
+ try {
566
+ const res = await http.get("/v2/team/credit-usage");
567
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get credit usage");
568
+ const d = res.data.data || res.data;
569
+ return { remainingCredits: d.remainingCredits ?? d.remaining_credits ?? 0 };
570
+ } catch (err) {
571
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get credit usage");
572
+ throw err;
573
+ }
574
+ }
575
+ async function getTokenUsage(http) {
576
+ try {
577
+ const res = await http.get("/v2/team/token-usage");
578
+ if (res.status !== 200 || !res.data?.success) throwForBadResponse(res, "get token usage");
579
+ return res.data.data || res.data;
580
+ } catch (err) {
581
+ if (err?.isAxiosError) return normalizeAxiosError(err, "get token usage");
582
+ throw err;
583
+ }
584
+ }
585
+
586
+ // src/v2/watcher.ts
587
+ import { EventEmitter } from "events";
588
+ var Watcher = class extends EventEmitter {
589
+ http;
590
+ jobId;
591
+ kind;
592
+ pollInterval;
593
+ timeout;
594
+ ws;
595
+ closed = false;
596
+ constructor(http, jobId, opts = {}) {
597
+ super();
598
+ this.http = http;
599
+ this.jobId = jobId;
600
+ this.kind = opts.kind ?? "crawl";
601
+ this.pollInterval = opts.pollInterval ?? 2;
602
+ this.timeout = opts.timeout;
603
+ }
604
+ buildWsUrl() {
605
+ const apiUrl = this.http.getApiUrl();
606
+ const wsBase = apiUrl.replace(/^http/, "ws");
607
+ const path = this.kind === "crawl" ? `/v2/crawl/${this.jobId}` : `/v2/batch/scrape/${this.jobId}`;
608
+ return `${wsBase}${path}`;
609
+ }
610
+ async start() {
611
+ try {
612
+ const url = this.buildWsUrl();
613
+ this.ws = new WebSocket(url, this.http.getApiKey());
614
+ this.attachWsHandlers(this.ws);
615
+ } catch {
616
+ this.pollLoop();
617
+ }
618
+ }
619
+ attachWsHandlers(ws) {
620
+ let startTs = Date.now();
621
+ const timeoutMs = this.timeout ? this.timeout * 1e3 : void 0;
622
+ ws.onmessage = (ev) => {
623
+ try {
624
+ const body = typeof ev.data === "string" ? JSON.parse(ev.data) : null;
625
+ if (!body) return;
626
+ const type = body.type;
627
+ if (type === "error") {
628
+ this.emit("error", { status: "failed", data: [], error: body.error, id: this.jobId });
629
+ return;
630
+ }
631
+ if (type === "catchup") {
632
+ const payload2 = body.data || {};
633
+ this.emitDocuments(payload2.data || []);
634
+ this.emitSnapshot(payload2);
635
+ return;
636
+ }
637
+ if (type === "document") {
638
+ const doc = body.data;
639
+ if (doc) this.emit("document", doc);
640
+ return;
641
+ }
642
+ if (type === "done") {
643
+ this.emit("done", { status: "completed", data: [], id: this.jobId });
644
+ this.close();
645
+ return;
646
+ }
647
+ const payload = body.data || body;
648
+ if (payload && payload.status) this.emitSnapshot(payload);
649
+ } catch {
650
+ }
651
+ if (timeoutMs && Date.now() - startTs > timeoutMs) this.close();
652
+ };
653
+ ws.onerror = () => {
654
+ this.emit("error", { status: "failed", data: [], error: "WebSocket error", id: this.jobId });
655
+ this.close();
656
+ };
657
+ ws.onclose = () => {
658
+ if (!this.closed) this.pollLoop();
659
+ };
660
+ }
661
+ emitDocuments(docs) {
662
+ for (const doc of docs) this.emit("document", { ...doc, id: this.jobId });
663
+ }
664
+ emitSnapshot(payload) {
665
+ const status = payload.status;
666
+ const data = payload.data || [];
667
+ const snap = this.kind === "crawl" ? {
668
+ status,
669
+ completed: payload.completed ?? 0,
670
+ total: payload.total ?? 0,
671
+ creditsUsed: payload.creditsUsed,
672
+ expiresAt: payload.expiresAt,
673
+ next: payload.next ?? null,
674
+ data
675
+ } : {
676
+ status,
677
+ completed: payload.completed ?? 0,
678
+ total: payload.total ?? 0,
679
+ creditsUsed: payload.creditsUsed,
680
+ expiresAt: payload.expiresAt,
681
+ next: payload.next ?? null,
682
+ data
683
+ };
684
+ this.emit("snapshot", snap);
685
+ if (["completed", "failed", "cancelled"].includes(status)) {
686
+ this.emit("done", { status, data, id: this.jobId });
687
+ this.close();
688
+ }
689
+ }
690
+ async pollLoop() {
691
+ const startTs = Date.now();
692
+ const timeoutMs = this.timeout ? this.timeout * 1e3 : void 0;
693
+ while (!this.closed) {
694
+ try {
695
+ const snap = this.kind === "crawl" ? await getCrawlStatus(this.http, this.jobId) : await getBatchScrapeStatus(this.http, this.jobId);
696
+ this.emit("snapshot", snap);
697
+ if (["completed", "failed", "cancelled"].includes(snap.status)) {
698
+ this.emit("done", { status: snap.status, data: snap.data, id: this.jobId });
699
+ this.close();
700
+ break;
701
+ }
702
+ } catch {
703
+ }
704
+ if (timeoutMs && Date.now() - startTs > timeoutMs) break;
705
+ await new Promise((r) => setTimeout(r, Math.max(1e3, this.pollInterval * 1e3)));
706
+ }
707
+ }
708
+ close() {
709
+ this.closed = true;
710
+ if (this.ws && this.ws.close) this.ws.close();
711
+ }
712
+ };
713
+
714
+ // src/v2/client.ts
715
+ var FirecrawlClient = class {
716
+ http;
717
+ /**
718
+ * Create a v2 client.
719
+ * @param options Transport configuration (API key, base URL, timeouts, retries).
720
+ */
721
+ constructor(options = {}) {
722
+ const apiKey = options.apiKey ?? process.env.FIRECRAWL_API_KEY ?? "";
723
+ const apiUrl = (options.apiUrl ?? process.env.FIRECRAWL_API_URL ?? "https://api.firecrawl.dev").replace(/\/$/, "");
724
+ if (!apiKey) {
725
+ throw new Error("API key is required. Set FIRECRAWL_API_KEY env or pass apiKey.");
726
+ }
727
+ this.http = new HttpClient({
728
+ apiKey,
729
+ apiUrl,
730
+ timeoutMs: options.timeoutMs,
731
+ maxRetries: options.maxRetries,
732
+ backoffFactor: options.backoffFactor
733
+ });
734
+ }
735
+ async scrape(url, options) {
736
+ return scrape(this.http, url, options);
737
+ }
738
+ // Search
739
+ /**
740
+ * Search the web and optionally scrape each result.
741
+ * @param query Search query string.
742
+ * @param req Additional search options (sources, limit, scrapeOptions, etc.).
743
+ * @returns Structured search results.
744
+ */
745
+ async search(query, req = {}) {
746
+ return search(this.http, { query, ...req });
747
+ }
748
+ // Map
749
+ /**
750
+ * Map a site to discover URLs (sitemap-aware).
751
+ * @param url Root URL to map.
752
+ * @param options Mapping options (sitemap mode, includeSubdomains, limit, timeout).
753
+ * @returns Discovered links.
754
+ */
755
+ async map(url, options) {
756
+ return map(this.http, url, options);
757
+ }
758
+ // Crawl
759
+ /**
760
+ * Start a crawl job (async).
761
+ * @param url Root URL to crawl.
762
+ * @param req Crawl configuration (paths, limits, scrapeOptions, webhook, etc.).
763
+ * @returns Job id and url.
764
+ */
765
+ async startCrawl(url, req = {}) {
766
+ return startCrawl(this.http, { url, ...req });
767
+ }
768
+ /**
769
+ * Get the status and partial data of a crawl job.
770
+ * @param jobId Crawl job id.
771
+ */
772
+ async getCrawlStatus(jobId) {
773
+ return getCrawlStatus(this.http, jobId);
774
+ }
775
+ /**
776
+ * Cancel a crawl job.
777
+ * @param jobId Crawl job id.
778
+ * @returns True if cancelled.
779
+ */
780
+ async cancelCrawl(jobId) {
781
+ return cancelCrawl(this.http, jobId);
782
+ }
783
+ /**
784
+ * Convenience waiter: start a crawl and poll until it finishes.
785
+ * @param url Root URL to crawl.
786
+ * @param req Crawl configuration plus waiter controls (pollInterval, timeout seconds).
787
+ * @returns Final job snapshot.
788
+ */
789
+ async crawl(url, req = {}) {
790
+ return crawl(this.http, { url, ...req }, req.pollInterval, req.timeout);
791
+ }
792
+ /**
793
+ * Retrieve crawl errors and robots.txt blocks.
794
+ * @param crawlId Crawl job id.
795
+ */
796
+ async getCrawlErrors(crawlId) {
797
+ return getCrawlErrors(this.http, crawlId);
798
+ }
799
+ /**
800
+ * List active crawls for the authenticated team.
801
+ */
802
+ async getActiveCrawls() {
803
+ return getActiveCrawls(this.http);
804
+ }
805
+ /**
806
+ * Preview normalized crawl parameters produced by a natural-language prompt.
807
+ * @param url Root URL.
808
+ * @param prompt Natural-language instruction.
809
+ */
810
+ async crawlParamsPreview(url, prompt) {
811
+ return crawlParamsPreview(this.http, url, prompt);
812
+ }
813
+ // Batch
814
+ /**
815
+ * Start a batch scrape job for multiple URLs (async).
816
+ * @param urls URLs to scrape.
817
+ * @param opts Batch options (scrape options, webhook, concurrency, idempotency key, etc.).
818
+ * @returns Job id and url.
819
+ */
820
+ async startBatchScrape(urls, opts) {
821
+ return startBatchScrape(this.http, urls, opts);
822
+ }
823
+ /**
824
+ * Get the status and partial data of a batch scrape job.
825
+ * @param jobId Batch job id.
826
+ */
827
+ async getBatchScrapeStatus(jobId) {
828
+ return getBatchScrapeStatus(this.http, jobId);
829
+ }
830
+ /**
831
+ * Retrieve batch scrape errors and robots.txt blocks.
832
+ * @param jobId Batch job id.
833
+ */
834
+ async getBatchScrapeErrors(jobId) {
835
+ return getBatchScrapeErrors(this.http, jobId);
836
+ }
837
+ /**
838
+ * Cancel a batch scrape job.
839
+ * @param jobId Batch job id.
840
+ * @returns True if cancelled.
841
+ */
842
+ async cancelBatchScrape(jobId) {
843
+ return cancelBatchScrape(this.http, jobId);
844
+ }
845
+ /**
846
+ * Convenience waiter: start a batch scrape and poll until it finishes.
847
+ * @param urls URLs to scrape.
848
+ * @param opts Batch options plus waiter controls (pollInterval, timeout seconds).
849
+ * @returns Final job snapshot.
850
+ */
851
+ async batchScrape(urls, opts) {
852
+ return batchScrape(this.http, urls, opts);
853
+ }
854
+ // Extract
855
+ /**
856
+ * Start an extract job (async).
857
+ * @param args Extraction request (urls, schema or prompt, flags).
858
+ * @returns Job id or processing state.
859
+ */
860
+ async startExtract(args) {
861
+ return startExtract(this.http, args);
862
+ }
863
+ /**
864
+ * Get extract job status/data.
865
+ * @param jobId Extract job id.
866
+ */
867
+ async getExtractStatus(jobId) {
868
+ return getExtractStatus(this.http, jobId);
869
+ }
870
+ /**
871
+ * Convenience waiter: start an extract and poll until it finishes.
872
+ * @param args Extraction request plus waiter controls (pollInterval, timeout seconds).
873
+ * @returns Final extract response.
874
+ */
875
+ async extract(args) {
876
+ return extract(this.http, args);
877
+ }
878
+ // Usage
879
+ /** Current concurrency usage. */
880
+ async getConcurrency() {
881
+ return getConcurrency(this.http);
882
+ }
883
+ /** Current credit usage. */
884
+ async getCreditUsage() {
885
+ return getCreditUsage(this.http);
886
+ }
887
+ /** Recent token usage. */
888
+ async getTokenUsage() {
889
+ return getTokenUsage(this.http);
890
+ }
891
+ // Watcher
892
+ /**
893
+ * Create a watcher for a crawl or batch job. Emits: `document`, `snapshot`, `done`, `error`.
894
+ * @param jobId Job id.
895
+ * @param opts Watcher options (kind, pollInterval, timeout seconds).
896
+ */
897
+ watcher(jobId, opts = {}) {
898
+ return new Watcher(this.http, jobId, opts);
899
+ }
900
+ };
901
+
902
+ // src/v1/index.ts
903
+ import axios2, { AxiosError } from "axios";
3
904
  import "zod";
4
- import { zodToJsonSchema } from "zod-to-json-schema";
905
+ import { zodToJsonSchema as zodToJsonSchema3 } from "zod-to-json-schema";
5
906
 
6
907
  // node_modules/typescript-event-target/dist/index.mjs
7
908
  var e = class extends EventTarget {
@@ -10,7 +911,7 @@ var e = class extends EventTarget {
10
911
  }
11
912
  };
12
913
 
13
- // src/index.ts
914
+ // src/v1/index.ts
14
915
  var FirecrawlError = class extends Error {
15
916
  statusCode;
16
917
  details;
@@ -29,10 +930,16 @@ var FirecrawlApp = class {
29
930
  }
30
931
  async getVersion() {
31
932
  try {
32
- const packageJson = await import("./package-Z6F7JDXI.js");
933
+ if (typeof process !== "undefined" && process.env && process.env.npm_package_version) {
934
+ return process.env.npm_package_version;
935
+ }
936
+ const packageJson = await import("./package-5MOU5FLU.js");
33
937
  return packageJson.default.version;
34
938
  } catch (error) {
35
- console.error("Error getting version:", error);
939
+ const isTest = typeof process !== "undefined" && (process.env.JEST_WORKER_ID != null || false);
940
+ if (!isTest) {
941
+ console.error("Error getting version:", error);
942
+ }
36
943
  return "1.25.1";
37
944
  }
38
945
  }
@@ -67,7 +974,7 @@ var FirecrawlApp = class {
67
974
  if (jsonData?.extract?.schema) {
68
975
  let schema = jsonData.extract.schema;
69
976
  try {
70
- schema = zodToJsonSchema(schema);
977
+ schema = zodToJsonSchema3(schema);
71
978
  } catch (error) {
72
979
  }
73
980
  jsonData = {
@@ -81,7 +988,7 @@ var FirecrawlApp = class {
81
988
  if (jsonData?.jsonOptions?.schema) {
82
989
  let schema = jsonData.jsonOptions.schema;
83
990
  try {
84
- schema = zodToJsonSchema(schema);
991
+ schema = zodToJsonSchema3(schema);
85
992
  } catch (error) {
86
993
  }
87
994
  jsonData = {
@@ -93,7 +1000,7 @@ var FirecrawlApp = class {
93
1000
  };
94
1001
  }
95
1002
  try {
96
- const response = await axios.post(
1003
+ const response = await axios2.post(
97
1004
  this.apiUrl + `/v1/scrape`,
98
1005
  jsonData,
99
1006
  { headers, timeout: params?.timeout !== void 0 ? params.timeout + 5e3 : void 0 }
@@ -144,7 +1051,7 @@ var FirecrawlApp = class {
144
1051
  if (jsonData?.scrapeOptions?.extract?.schema) {
145
1052
  let schema = jsonData.scrapeOptions.extract.schema;
146
1053
  try {
147
- schema = zodToJsonSchema(schema);
1054
+ schema = zodToJsonSchema3(schema);
148
1055
  } catch (error) {
149
1056
  }
150
1057
  jsonData = {
@@ -365,9 +1272,9 @@ var FirecrawlApp = class {
365
1272
  * @returns A CrawlWatcher instance to monitor the crawl job.
366
1273
  */
367
1274
  async crawlUrlAndWatch(url, params, idempotencyKey) {
368
- const crawl = await this.asyncCrawlUrl(url, params, idempotencyKey);
369
- if (crawl.success && crawl.id) {
370
- const id = crawl.id;
1275
+ const crawl2 = await this.asyncCrawlUrl(url, params, idempotencyKey);
1276
+ if (crawl2.success && crawl2.id) {
1277
+ const id = crawl2.id;
371
1278
  return new CrawlWatcher(id, this);
372
1279
  }
373
1280
  throw new FirecrawlError("Crawl job failed to start", 400);
@@ -413,7 +1320,7 @@ var FirecrawlApp = class {
413
1320
  if (jsonData?.extract?.schema) {
414
1321
  let schema = jsonData.extract.schema;
415
1322
  try {
416
- schema = zodToJsonSchema(schema);
1323
+ schema = zodToJsonSchema3(schema);
417
1324
  } catch (error) {
418
1325
  }
419
1326
  jsonData = {
@@ -427,7 +1334,7 @@ var FirecrawlApp = class {
427
1334
  if (jsonData?.jsonOptions?.schema) {
428
1335
  let schema = jsonData.jsonOptions.schema;
429
1336
  try {
430
- schema = zodToJsonSchema(schema);
1337
+ schema = zodToJsonSchema3(schema);
431
1338
  } catch (error) {
432
1339
  }
433
1340
  jsonData = {
@@ -490,9 +1397,9 @@ var FirecrawlApp = class {
490
1397
  * @returns A CrawlWatcher instance to monitor the crawl job.
491
1398
  */
492
1399
  async batchScrapeUrlsAndWatch(urls, params, idempotencyKey, webhook, ignoreInvalidURLs) {
493
- const crawl = await this.asyncBatchScrapeUrls(urls, params, idempotencyKey, webhook, ignoreInvalidURLs);
494
- if (crawl.success && crawl.id) {
495
- const id = crawl.id;
1400
+ const crawl2 = await this.asyncBatchScrapeUrls(urls, params, idempotencyKey, webhook, ignoreInvalidURLs);
1401
+ if (crawl2.success && crawl2.id) {
1402
+ const id = crawl2.id;
496
1403
  return new CrawlWatcher(id, this);
497
1404
  }
498
1405
  throw new FirecrawlError("Batch scrape job failed to start", 400);
@@ -606,7 +1513,7 @@ var FirecrawlApp = class {
606
1513
  jsonSchema = void 0;
607
1514
  } else {
608
1515
  try {
609
- jsonSchema = zodToJsonSchema(params.schema);
1516
+ jsonSchema = zodToJsonSchema3(params.schema);
610
1517
  } catch (_) {
611
1518
  jsonSchema = params.schema;
612
1519
  }
@@ -670,7 +1577,7 @@ var FirecrawlApp = class {
670
1577
  jsonSchema = void 0;
671
1578
  } else {
672
1579
  try {
673
- jsonSchema = zodToJsonSchema(params.schema);
1580
+ jsonSchema = zodToJsonSchema3(params.schema);
674
1581
  } catch (_) {
675
1582
  jsonSchema = params.schema;
676
1583
  }
@@ -734,7 +1641,7 @@ var FirecrawlApp = class {
734
1641
  * @returns The response from the POST request.
735
1642
  */
736
1643
  postRequest(url, data, headers) {
737
- return axios.post(url, data, { headers, timeout: data?.timeout ? data.timeout + 5e3 : void 0 });
1644
+ return axios2.post(url, data, { headers, timeout: data?.timeout ? data.timeout + 5e3 : void 0 });
738
1645
  }
739
1646
  /**
740
1647
  * Sends a GET request to the specified URL.
@@ -744,7 +1651,7 @@ var FirecrawlApp = class {
744
1651
  */
745
1652
  async getRequest(url, headers) {
746
1653
  try {
747
- return await axios.get(url, { headers });
1654
+ return await axios2.get(url, { headers });
748
1655
  } catch (error) {
749
1656
  if (error instanceof AxiosError && error.response) {
750
1657
  return error.response;
@@ -761,7 +1668,7 @@ var FirecrawlApp = class {
761
1668
  */
762
1669
  async deleteRequest(url, headers) {
763
1670
  try {
764
- return await axios.delete(url, { headers });
1671
+ return await axios2.delete(url, { headers });
765
1672
  } catch (error) {
766
1673
  if (error instanceof AxiosError && error.response) {
767
1674
  return error.response;
@@ -961,7 +1868,7 @@ var FirecrawlApp = class {
961
1868
  if (jsonData?.jsonOptions?.schema) {
962
1869
  let schema = jsonData.jsonOptions.schema;
963
1870
  try {
964
- schema = zodToJsonSchema(schema);
1871
+ schema = zodToJsonSchema3(schema);
965
1872
  } catch (error) {
966
1873
  }
967
1874
  jsonData = {
@@ -1316,8 +2223,28 @@ var CrawlWatcher = class extends e {
1316
2223
  this.ws.close();
1317
2224
  }
1318
2225
  };
2226
+
2227
+ // src/index.ts
2228
+ var Firecrawl = class extends FirecrawlClient {
2229
+ /** Feature‑frozen v1 client (lazy). */
2230
+ _v1;
2231
+ _v1Opts;
2232
+ /** @param opts API credentials and base URL. */
2233
+ constructor(opts = {}) {
2234
+ super(opts);
2235
+ this._v1Opts = opts;
2236
+ }
2237
+ /** Access the legacy v1 client (instantiated on first access). */
2238
+ get v1() {
2239
+ if (!this._v1) this._v1 = new FirecrawlApp(this._v1Opts);
2240
+ return this._v1;
2241
+ }
2242
+ };
2243
+ var src_default = Firecrawl;
1319
2244
  export {
1320
- CrawlWatcher,
1321
- FirecrawlError,
1322
- FirecrawlApp as default
2245
+ Firecrawl,
2246
+ FirecrawlApp as FirecrawlAppV1,
2247
+ FirecrawlClient,
2248
+ SdkError,
2249
+ src_default as default
1323
2250
  };