firecrawl 1.1.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,13 @@
1
- import { AxiosResponse, AxiosRequestHeaders } from "axios";
2
- import { TypedEventTarget } from "typescript-event-target";
1
+ import { AxiosRequestHeaders, AxiosResponse } from 'axios';
2
+ import { ZodSchema } from 'zod';
3
+ import { TypedEventTarget } from 'typescript-event-target';
4
+
3
5
  /**
4
6
  * Configuration interface for FirecrawlApp.
5
7
  * @param apiKey - Optional API key for authentication.
6
8
  * @param apiUrl - Optional base URL of the API; defaults to 'https://api.firecrawl.dev'.
7
9
  */
8
- export interface FirecrawlAppConfig {
10
+ interface FirecrawlAppConfig {
9
11
  apiKey?: string | null;
10
12
  apiUrl?: string | null;
11
13
  }
@@ -13,7 +15,7 @@ export interface FirecrawlAppConfig {
13
15
  * Metadata for a Firecrawl document.
14
16
  * Includes various optional properties for document metadata.
15
17
  */
16
- export interface FirecrawlDocumentMetadata {
18
+ interface FirecrawlDocumentMetadata {
17
19
  title?: string;
18
20
  description?: string;
19
21
  language?: string;
@@ -52,12 +54,13 @@ export interface FirecrawlDocumentMetadata {
52
54
  * Document interface for Firecrawl.
53
55
  * Represents a document retrieved or processed by Firecrawl.
54
56
  */
55
- export interface FirecrawlDocument {
57
+ interface FirecrawlDocument {
56
58
  url?: string;
57
59
  markdown?: string;
58
60
  html?: string;
59
61
  rawHtml?: string;
60
62
  links?: string[];
63
+ extract?: Record<any, any>;
61
64
  screenshot?: string;
62
65
  metadata?: FirecrawlDocumentMetadata;
63
66
  }
@@ -65,12 +68,17 @@ export interface FirecrawlDocument {
65
68
  * Parameters for scraping operations.
66
69
  * Defines the options and configurations available for scraping web content.
67
70
  */
68
- export interface ScrapeParams {
69
- formats: ("markdown" | "html" | "rawHtml" | "content" | "links" | "screenshot" | "full@scrennshot")[];
71
+ interface ScrapeParams {
72
+ formats: ("markdown" | "html" | "rawHtml" | "content" | "links" | "screenshot" | "extract" | "full@scrennshot")[];
70
73
  headers?: Record<string, string>;
71
74
  includeTags?: string[];
72
75
  excludeTags?: string[];
73
76
  onlyMainContent?: boolean;
77
+ extract?: {
78
+ prompt?: string;
79
+ schema?: ZodSchema | any;
80
+ systemPrompt?: string;
81
+ };
74
82
  waitFor?: number;
75
83
  timeout?: number;
76
84
  }
@@ -78,7 +86,7 @@ export interface ScrapeParams {
78
86
  * Response interface for scraping operations.
79
87
  * Defines the structure of the response received after a scraping operation.
80
88
  */
81
- export interface ScrapeResponse extends FirecrawlDocument {
89
+ interface ScrapeResponse extends FirecrawlDocument {
82
90
  success: true;
83
91
  warning?: string;
84
92
  error?: string;
@@ -87,7 +95,7 @@ export interface ScrapeResponse extends FirecrawlDocument {
87
95
  * Parameters for crawling operations.
88
96
  * Includes options for both scraping and mapping during a crawl.
89
97
  */
90
- export interface CrawlParams {
98
+ interface CrawlParams {
91
99
  includePaths?: string[];
92
100
  excludePaths?: string[];
93
101
  maxDepth?: number;
@@ -96,12 +104,13 @@ export interface CrawlParams {
96
104
  allowExternalLinks?: boolean;
97
105
  ignoreSitemap?: boolean;
98
106
  scrapeOptions?: ScrapeParams;
107
+ webhook?: string;
99
108
  }
100
109
  /**
101
110
  * Response interface for crawling operations.
102
111
  * Defines the structure of the response received after initiating a crawl.
103
112
  */
104
- export interface CrawlResponse {
113
+ interface CrawlResponse {
105
114
  id?: string;
106
115
  url?: string;
107
116
  success: true;
@@ -111,22 +120,21 @@ export interface CrawlResponse {
111
120
  * Response interface for job status checks.
112
121
  * Provides detailed status of a crawl job including progress and results.
113
122
  */
114
- export interface CrawlStatusResponse {
123
+ interface CrawlStatusResponse {
115
124
  success: true;
116
- total: number;
125
+ status: "scraping" | "completed" | "failed" | "cancelled";
117
126
  completed: number;
127
+ total: number;
118
128
  creditsUsed: number;
119
129
  expiresAt: Date;
120
- status: "scraping" | "completed" | "failed";
121
- next: string;
122
- data?: FirecrawlDocument[];
123
- error?: string;
130
+ next?: string;
131
+ data: FirecrawlDocument[];
124
132
  }
125
133
  /**
126
134
  * Parameters for mapping operations.
127
135
  * Defines options for mapping URLs during a crawl.
128
136
  */
129
- export interface MapParams {
137
+ interface MapParams {
130
138
  search?: string;
131
139
  ignoreSitemap?: boolean;
132
140
  includeSubdomains?: boolean;
@@ -136,7 +144,7 @@ export interface MapParams {
136
144
  * Response interface for mapping operations.
137
145
  * Defines the structure of the response received after a mapping operation.
138
146
  */
139
- export interface MapResponse {
147
+ interface MapResponse {
140
148
  success: true;
141
149
  links?: string[];
142
150
  error?: string;
@@ -145,7 +153,7 @@ export interface MapResponse {
145
153
  * Error response interface.
146
154
  * Defines the structure of the response received when an error occurs.
147
155
  */
148
- export interface ErrorResponse {
156
+ interface ErrorResponse {
149
157
  success: false;
150
158
  error: string;
151
159
  }
@@ -153,7 +161,7 @@ export interface ErrorResponse {
153
161
  * Main class for interacting with the Firecrawl API.
154
162
  * Provides methods for scraping, searching, crawling, and mapping web content.
155
163
  */
156
- export default class FirecrawlApp {
164
+ declare class FirecrawlApp {
157
165
  apiKey: string;
158
166
  apiUrl: string;
159
167
  /**
@@ -188,9 +196,10 @@ export default class FirecrawlApp {
188
196
  /**
189
197
  * Checks the status of a crawl job using the Firecrawl API.
190
198
  * @param id - The ID of the crawl operation.
199
+ * @param getAllData - Paginate through all the pages of documents, returning the full list of all documents. (default: `false`)
191
200
  * @returns The response containing the job status.
192
201
  */
193
- checkCrawlStatus(id?: string): Promise<CrawlStatusResponse | ErrorResponse>;
202
+ checkCrawlStatus(id?: string, getAllData?: boolean): Promise<CrawlStatusResponse | ErrorResponse>;
194
203
  crawlUrlAndWatch(url: string, params?: CrawlParams, idempotencyKey?: string): Promise<CrawlWatcher>;
195
204
  mapUrl(url: string, params?: MapParams): Promise<MapResponse | ErrorResponse>;
196
205
  /**
@@ -222,7 +231,7 @@ export default class FirecrawlApp {
222
231
  * @param checkUrl - Optional URL to check the status (used for v1 API)
223
232
  * @returns The final job status or data.
224
233
  */
225
- monitorJobStatus(id: string, headers: AxiosRequestHeaders, checkInterval: number): Promise<CrawlStatusResponse>;
234
+ monitorJobStatus(id: string, headers: AxiosRequestHeaders, checkInterval: number): Promise<CrawlStatusResponse | ErrorResponse>;
226
235
  /**
227
236
  * Handles errors from API responses.
228
237
  * @param {AxiosResponse} response - The response from the API.
@@ -242,11 +251,12 @@ interface CrawlWatcherEvents {
242
251
  error: string;
243
252
  }>;
244
253
  }
245
- export declare class CrawlWatcher extends TypedEventTarget<CrawlWatcherEvents> {
254
+ declare class CrawlWatcher extends TypedEventTarget<CrawlWatcherEvents> {
246
255
  private ws;
247
256
  data: FirecrawlDocument[];
248
257
  status: CrawlStatusResponse["status"];
249
258
  constructor(id: string, app: FirecrawlApp);
250
259
  close(): void;
251
260
  }
252
- export {};
261
+
262
+ export { type CrawlParams, type CrawlResponse, type CrawlStatusResponse, CrawlWatcher, type ErrorResponse, type FirecrawlAppConfig, type FirecrawlDocument, type FirecrawlDocumentMetadata, type MapParams, type MapResponse, type ScrapeParams, type ScrapeResponse, FirecrawlApp as default };
package/dist/index.js ADDED
@@ -0,0 +1,375 @@
1
+ // src/index.ts
2
+ import axios from "axios";
3
+ import { zodToJsonSchema } from "zod-to-json-schema";
4
+ import { WebSocket } from "isows";
5
+ import { TypedEventTarget } from "typescript-event-target";
6
+ var FirecrawlApp = class {
7
+ apiKey;
8
+ apiUrl;
9
+ /**
10
+ * Initializes a new instance of the FirecrawlApp class.
11
+ * @param config - Configuration options for the FirecrawlApp instance.
12
+ */
13
+ constructor({ apiKey = null, apiUrl = null }) {
14
+ if (typeof apiKey !== "string") {
15
+ throw new Error("No API key provided");
16
+ }
17
+ this.apiKey = apiKey;
18
+ this.apiUrl = apiUrl || "https://api.firecrawl.dev";
19
+ }
20
+ /**
21
+ * Scrapes a URL using the Firecrawl API.
22
+ * @param url - The URL to scrape.
23
+ * @param params - Additional parameters for the scrape request.
24
+ * @returns The response from the scrape operation.
25
+ */
26
+ async scrapeUrl(url, params) {
27
+ const headers = {
28
+ "Content-Type": "application/json",
29
+ Authorization: `Bearer ${this.apiKey}`
30
+ };
31
+ let jsonData = { url, ...params };
32
+ if (jsonData?.extract?.schema) {
33
+ let schema = jsonData.extract.schema;
34
+ try {
35
+ schema = zodToJsonSchema(schema);
36
+ } catch (error) {
37
+ }
38
+ jsonData = {
39
+ ...jsonData,
40
+ extract: {
41
+ ...jsonData.extract,
42
+ schema
43
+ }
44
+ };
45
+ }
46
+ try {
47
+ const response = await axios.post(
48
+ this.apiUrl + `/v1/scrape`,
49
+ jsonData,
50
+ { headers }
51
+ );
52
+ if (response.status === 200) {
53
+ const responseData = response.data;
54
+ if (responseData.success) {
55
+ return {
56
+ success: true,
57
+ warning: responseData.warning,
58
+ error: responseData.error,
59
+ ...responseData.data
60
+ };
61
+ } else {
62
+ throw new Error(`Failed to scrape URL. Error: ${responseData.error}`);
63
+ }
64
+ } else {
65
+ this.handleError(response, "scrape URL");
66
+ }
67
+ } catch (error) {
68
+ throw new Error(error.message);
69
+ }
70
+ return { success: false, error: "Internal server error." };
71
+ }
72
+ /**
73
+ * This method is intended to search for a query using the Firecrawl API. However, it is not supported in version 1 of the API.
74
+ * @param query - The search query string.
75
+ * @param params - Additional parameters for the search.
76
+ * @returns Throws an error advising to use version 0 of the API.
77
+ */
78
+ async search(query, params) {
79
+ throw new Error("Search is not supported in v1, please update FirecrawlApp() initialization to use v0.");
80
+ }
81
+ /**
82
+ * Initiates a crawl job for a URL using the Firecrawl API.
83
+ * @param url - The URL to crawl.
84
+ * @param params - Additional parameters for the crawl request.
85
+ * @param pollInterval - Time in seconds for job status checks.
86
+ * @param idempotencyKey - Optional idempotency key for the request.
87
+ * @returns The response from the crawl operation.
88
+ */
89
+ async crawlUrl(url, params, pollInterval = 2, idempotencyKey) {
90
+ const headers = this.prepareHeaders(idempotencyKey);
91
+ let jsonData = { url, ...params };
92
+ try {
93
+ const response = await this.postRequest(
94
+ this.apiUrl + `/v1/crawl`,
95
+ jsonData,
96
+ headers
97
+ );
98
+ if (response.status === 200) {
99
+ const id = response.data.id;
100
+ return this.monitorJobStatus(id, headers, pollInterval);
101
+ } else {
102
+ this.handleError(response, "start crawl job");
103
+ }
104
+ } catch (error) {
105
+ if (error.response?.data?.error) {
106
+ throw new Error(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ""}`);
107
+ } else {
108
+ throw new Error(error.message);
109
+ }
110
+ }
111
+ return { success: false, error: "Internal server error." };
112
+ }
113
+ async asyncCrawlUrl(url, params, idempotencyKey) {
114
+ const headers = this.prepareHeaders(idempotencyKey);
115
+ let jsonData = { url, ...params };
116
+ try {
117
+ const response = await this.postRequest(
118
+ this.apiUrl + `/v1/crawl`,
119
+ jsonData,
120
+ headers
121
+ );
122
+ if (response.status === 200) {
123
+ return response.data;
124
+ } else {
125
+ this.handleError(response, "start crawl job");
126
+ }
127
+ } catch (error) {
128
+ if (error.response?.data?.error) {
129
+ throw new Error(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ""}`);
130
+ } else {
131
+ throw new Error(error.message);
132
+ }
133
+ }
134
+ return { success: false, error: "Internal server error." };
135
+ }
136
+ /**
137
+ * Checks the status of a crawl job using the Firecrawl API.
138
+ * @param id - The ID of the crawl operation.
139
+ * @param getAllData - Paginate through all the pages of documents, returning the full list of all documents. (default: `false`)
140
+ * @returns The response containing the job status.
141
+ */
142
+ async checkCrawlStatus(id, getAllData = false) {
143
+ if (!id) {
144
+ throw new Error("No crawl ID provided");
145
+ }
146
+ const headers = this.prepareHeaders();
147
+ try {
148
+ const response = await this.getRequest(
149
+ `${this.apiUrl}/v1/crawl/${id}`,
150
+ headers
151
+ );
152
+ if (response.status === 200) {
153
+ let allData = response.data.data;
154
+ if (getAllData && response.data.status === "completed") {
155
+ let statusData = response.data;
156
+ if ("data" in statusData) {
157
+ let data = statusData.data;
158
+ while ("next" in statusData) {
159
+ statusData = (await this.getRequest(statusData.next, headers)).data;
160
+ data = data.concat(statusData.data);
161
+ }
162
+ allData = data;
163
+ }
164
+ }
165
+ return {
166
+ success: response.data.success,
167
+ status: response.data.status,
168
+ total: response.data.total,
169
+ completed: response.data.completed,
170
+ creditsUsed: response.data.creditsUsed,
171
+ expiresAt: new Date(response.data.expiresAt),
172
+ next: response.data.next,
173
+ data: allData,
174
+ error: response.data.error
175
+ };
176
+ } else {
177
+ this.handleError(response, "check crawl status");
178
+ }
179
+ } catch (error) {
180
+ throw new Error(error.message);
181
+ }
182
+ return { success: false, error: "Internal server error." };
183
+ }
184
+ async crawlUrlAndWatch(url, params, idempotencyKey) {
185
+ const crawl = await this.asyncCrawlUrl(url, params, idempotencyKey);
186
+ if (crawl.success && crawl.id) {
187
+ const id = crawl.id;
188
+ return new CrawlWatcher(id, this);
189
+ }
190
+ throw new Error("Crawl job failed to start");
191
+ }
192
+ async mapUrl(url, params) {
193
+ const headers = this.prepareHeaders();
194
+ let jsonData = { url, ...params };
195
+ try {
196
+ const response = await this.postRequest(
197
+ this.apiUrl + `/v1/map`,
198
+ jsonData,
199
+ headers
200
+ );
201
+ if (response.status === 200) {
202
+ return response.data;
203
+ } else {
204
+ this.handleError(response, "map");
205
+ }
206
+ } catch (error) {
207
+ throw new Error(error.message);
208
+ }
209
+ return { success: false, error: "Internal server error." };
210
+ }
211
+ /**
212
+ * Prepares the headers for an API request.
213
+ * @param idempotencyKey - Optional key to ensure idempotency.
214
+ * @returns The prepared headers.
215
+ */
216
+ prepareHeaders(idempotencyKey) {
217
+ return {
218
+ "Content-Type": "application/json",
219
+ Authorization: `Bearer ${this.apiKey}`,
220
+ ...idempotencyKey ? { "x-idempotency-key": idempotencyKey } : {}
221
+ };
222
+ }
223
+ /**
224
+ * Sends a POST request to the specified URL.
225
+ * @param url - The URL to send the request to.
226
+ * @param data - The data to send in the request.
227
+ * @param headers - The headers for the request.
228
+ * @returns The response from the POST request.
229
+ */
230
+ postRequest(url, data, headers) {
231
+ return axios.post(url, data, { headers });
232
+ }
233
+ /**
234
+ * Sends a GET request to the specified URL.
235
+ * @param url - The URL to send the request to.
236
+ * @param headers - The headers for the request.
237
+ * @returns The response from the GET request.
238
+ */
239
+ getRequest(url, headers) {
240
+ return axios.get(url, { headers });
241
+ }
242
+ /**
243
+ * Monitors the status of a crawl job until completion or failure.
244
+ * @param id - The ID of the crawl operation.
245
+ * @param headers - The headers for the request.
246
+ * @param checkInterval - Interval in seconds for job status checks.
247
+ * @param checkUrl - Optional URL to check the status (used for v1 API)
248
+ * @returns The final job status or data.
249
+ */
250
+ async monitorJobStatus(id, headers, checkInterval) {
251
+ while (true) {
252
+ let statusResponse = await this.getRequest(
253
+ `${this.apiUrl}/v1/crawl/${id}`,
254
+ headers
255
+ );
256
+ if (statusResponse.status === 200) {
257
+ let statusData = statusResponse.data;
258
+ if (statusData.status === "completed") {
259
+ if ("data" in statusData) {
260
+ let data = statusData.data;
261
+ while ("next" in statusData) {
262
+ statusResponse = await this.getRequest(statusData.next, headers);
263
+ statusData = statusResponse.data;
264
+ data = data.concat(statusData.data);
265
+ }
266
+ statusData.data = data;
267
+ return statusData;
268
+ } else {
269
+ throw new Error("Crawl job completed but no data was returned");
270
+ }
271
+ } else if (["active", "paused", "pending", "queued", "waiting", "scraping"].includes(statusData.status)) {
272
+ checkInterval = Math.max(checkInterval, 2);
273
+ await new Promise(
274
+ (resolve) => setTimeout(resolve, checkInterval * 1e3)
275
+ );
276
+ } else {
277
+ throw new Error(
278
+ `Crawl job failed or was stopped. Status: ${statusData.status}`
279
+ );
280
+ }
281
+ } else {
282
+ this.handleError(statusResponse, "check crawl status");
283
+ }
284
+ }
285
+ }
286
+ /**
287
+ * Handles errors from API responses.
288
+ * @param {AxiosResponse} response - The response from the API.
289
+ * @param {string} action - The action being performed when the error occurred.
290
+ */
291
+ handleError(response, action) {
292
+ if ([402, 408, 409, 500].includes(response.status)) {
293
+ const errorMessage = response.data.error || "Unknown error occurred";
294
+ throw new Error(
295
+ `Failed to ${action}. Status code: ${response.status}. Error: ${errorMessage}`
296
+ );
297
+ } else {
298
+ throw new Error(
299
+ `Unexpected error occurred while trying to ${action}. Status code: ${response.status}`
300
+ );
301
+ }
302
+ }
303
+ };
304
+ var CrawlWatcher = class extends TypedEventTarget {
305
+ ws;
306
+ data;
307
+ status;
308
+ constructor(id, app) {
309
+ super();
310
+ this.ws = new WebSocket(`${app.apiUrl}/v1/crawl/${id}`, app.apiKey);
311
+ this.status = "scraping";
312
+ this.data = [];
313
+ const messageHandler = (msg) => {
314
+ if (msg.type === "done") {
315
+ this.status = "completed";
316
+ this.dispatchTypedEvent("done", new CustomEvent("done", {
317
+ detail: {
318
+ status: this.status,
319
+ data: this.data
320
+ }
321
+ }));
322
+ } else if (msg.type === "error") {
323
+ this.status = "failed";
324
+ this.dispatchTypedEvent("error", new CustomEvent("error", {
325
+ detail: {
326
+ status: this.status,
327
+ data: this.data,
328
+ error: msg.error
329
+ }
330
+ }));
331
+ } else if (msg.type === "catchup") {
332
+ this.status = msg.data.status;
333
+ this.data.push(...msg.data.data ?? []);
334
+ for (const doc of this.data) {
335
+ this.dispatchTypedEvent("document", new CustomEvent("document", {
336
+ detail: doc
337
+ }));
338
+ }
339
+ } else if (msg.type === "document") {
340
+ this.dispatchTypedEvent("document", new CustomEvent("document", {
341
+ detail: msg.data
342
+ }));
343
+ }
344
+ };
345
+ this.ws.onmessage = ((ev) => {
346
+ if (typeof ev.data !== "string") {
347
+ this.ws.close();
348
+ return;
349
+ }
350
+ const msg = JSON.parse(ev.data);
351
+ messageHandler(msg);
352
+ }).bind(this);
353
+ this.ws.onclose = ((ev) => {
354
+ const msg = JSON.parse(ev.reason);
355
+ messageHandler(msg);
356
+ }).bind(this);
357
+ this.ws.onerror = ((_) => {
358
+ this.status = "failed";
359
+ this.dispatchTypedEvent("error", new CustomEvent("error", {
360
+ detail: {
361
+ status: this.status,
362
+ data: this.data,
363
+ error: "WebSocket error"
364
+ }
365
+ }));
366
+ }).bind(this);
367
+ }
368
+ close() {
369
+ this.ws.close();
370
+ }
371
+ };
372
+ export {
373
+ CrawlWatcher,
374
+ FirecrawlApp as default
375
+ };
package/package.json CHANGED
@@ -1,22 +1,19 @@
1
1
  {
2
2
  "name": "firecrawl",
3
- "version": "1.1.0",
3
+ "version": "1.3.0",
4
4
  "description": "JavaScript SDK for Firecrawl API",
5
- "main": "build/cjs/index.js",
6
- "types": "types/index.d.ts",
7
- "type": "module",
5
+ "main": "dist/index.js",
6
+ "types": "dist/index.d.ts",
8
7
  "exports": {
9
- "require": {
10
- "types": "./types/index.d.ts",
11
- "default": "./build/cjs/index.js"
12
- },
13
- "import": {
14
- "types": "./types/index.d.ts",
15
- "default": "./build/esm/index.js"
8
+ "./package.json": "./package.json",
9
+ ".": {
10
+ "import": "./dist/index.js",
11
+ "default": "./dist/index.cjs"
16
12
  }
17
13
  },
14
+ "type": "module",
18
15
  "scripts": {
19
- "build": "tsc --module commonjs --moduleResolution node10 --outDir build/cjs/ && echo '{\"type\": \"commonjs\"}' > build/cjs/package.json && npx tsc --module NodeNext --moduleResolution NodeNext --outDir build/esm/ && echo '{\"type\": \"module\"}' > build/esm/package.json",
16
+ "build": "tsup",
20
17
  "build-and-publish": "npm run build && npm publish --access public",
21
18
  "publish-beta": "npm run build && npm publish --access public --tag beta",
22
19
  "test": "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/v1/**/*.test.ts"
@@ -29,10 +26,8 @@
29
26
  "license": "MIT",
30
27
  "dependencies": {
31
28
  "axios": "^1.6.8",
32
- "dotenv": "^16.4.5",
33
29
  "isows": "^1.0.4",
34
30
  "typescript-event-target": "^1.1.1",
35
- "uuid": "^9.0.1",
36
31
  "zod": "^3.23.8",
37
32
  "zod-to-json-schema": "^3.23.0"
38
33
  },
@@ -41,6 +36,8 @@
41
36
  },
42
37
  "homepage": "https://github.com/mendableai/firecrawl#readme",
43
38
  "devDependencies": {
39
+ "uuid": "^9.0.1",
40
+ "dotenv": "^16.4.5",
44
41
  "@jest/globals": "^29.7.0",
45
42
  "@types/axios": "^0.14.0",
46
43
  "@types/dotenv": "^8.2.0",
@@ -50,6 +47,7 @@
50
47
  "@types/uuid": "^9.0.8",
51
48
  "jest": "^29.7.0",
52
49
  "ts-jest": "^29.2.2",
50
+ "tsup": "^8.2.4",
53
51
  "typescript": "^5.4.5"
54
52
  },
55
53
  "keywords": [