firecrawl-mcp 1.4.0 → 1.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -21
- package/dist/index.js +4 -11
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,16 +1,17 @@
|
|
|
1
|
-
#
|
|
1
|
+
# Firecrawl MCP Server
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
A Model Context Protocol (MCP) server implementation that integrates with [Firecrawl](https://github.com/mendableai/firecrawl) for web scraping capabilities.
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
Big thanks to [@vrknetha](https://github.com/vrknetha), [@cawstudios](https://caw.tech) for the initial implementation!
|
|
6
6
|
|
|
7
7
|
## Features
|
|
8
8
|
|
|
9
|
-
-
|
|
10
|
-
-
|
|
9
|
+
- Scrape, crawl, search, extract and batch scrape support
|
|
10
|
+
- Web scraping with JS rendering
|
|
11
11
|
- URL discovery and crawling
|
|
12
12
|
- Web search with content extraction
|
|
13
13
|
- Automatic retries with exponential backoff
|
|
14
|
+
- - Efficient batch processing with built-in rate limiting
|
|
14
15
|
- Credit usage monitoring for cloud API
|
|
15
16
|
- Comprehensive logging system
|
|
16
17
|
- Support for cloud and self-hosted FireCrawl instances
|
|
@@ -19,29 +20,18 @@ A Model Context Protocol (MCP) server implementation that integrates with FireCr
|
|
|
19
20
|
|
|
20
21
|
## Installation
|
|
21
22
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
### 1. Installation
|
|
25
|
-
|
|
26
|
-
Choose one of these methods:
|
|
23
|
+
### Running with npx
|
|
27
24
|
|
|
28
25
|
```bash
|
|
29
|
-
|
|
30
|
-
npx -y firecrawl-mcp
|
|
31
|
-
|
|
32
|
-
# Global installation
|
|
33
|
-
npm install -g firecrawl-mcp
|
|
26
|
+
env FIRECRAWL_API_KEY=fc-YOUR_API_KEY npx -y firecrawl-mcp
|
|
34
27
|
```
|
|
35
28
|
|
|
36
|
-
###
|
|
37
|
-
|
|
38
|
-
To install FireCrawl for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@mendableai/mcp-server-firecrawl):
|
|
29
|
+
### Manual Installation
|
|
39
30
|
|
|
40
31
|
```bash
|
|
41
|
-
|
|
32
|
+
npm install -g firecrawl-mcp
|
|
42
33
|
```
|
|
43
34
|
|
|
44
|
-
|
|
45
35
|
### Running on Cursor
|
|
46
36
|
|
|
47
37
|
Configuring Cursor 🖥️
|
|
@@ -57,10 +47,39 @@ To configure FireCrawl MCP in Cursor:
|
|
|
57
47
|
- Type: "command"
|
|
58
48
|
- Command: `env FIRECRAWL_API_KEY=your-api-key npx -y firecrawl-mcp`
|
|
59
49
|
|
|
50
|
+
> If you are using Windows and are running into issues, try `cmd /c "set FIRECRAWL_API_KEY=your-api-key && npx -y firecrawl-mcp"`
|
|
51
|
+
|
|
60
52
|
Replace `your-api-key` with your FireCrawl API key.
|
|
61
53
|
|
|
62
54
|
After adding, refresh the MCP server list to see the new tools. The Composer Agent will automatically use FireCrawl MCP when appropriate, but you can explicitly request it by describing your web scraping needs. Access the Composer via Command+L (Mac), select "Agent" next to the submit button, and enter your query.
|
|
63
55
|
|
|
56
|
+
### Running on Windsurf
|
|
57
|
+
|
|
58
|
+
Add this to your `./codeium/windsurf/model_config.json`:
|
|
59
|
+
|
|
60
|
+
```json
|
|
61
|
+
{
|
|
62
|
+
"mcpServers": {
|
|
63
|
+
"mcp-server-firecrawl": {
|
|
64
|
+
"command": "npx",
|
|
65
|
+
"args": ["-y", "firecrawl-mcp"],
|
|
66
|
+
"env": {
|
|
67
|
+
"FIRECRAWL_API_KEY": "YOUR_API_KEY_HERE"
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
### Installing via Smithery (Legacy)
|
|
76
|
+
|
|
77
|
+
To install FireCrawl for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@mendableai/mcp-server-firecrawl):
|
|
78
|
+
|
|
79
|
+
```bash
|
|
80
|
+
npx -y @smithery/cli install @mendableai/mcp-server-firecrawl --client claude
|
|
81
|
+
```
|
|
82
|
+
|
|
64
83
|
## Configuration
|
|
65
84
|
|
|
66
85
|
### Environment Variables
|
|
@@ -130,7 +149,7 @@ Add this to your `claude_desktop_config.json`:
|
|
|
130
149
|
"mcpServers": {
|
|
131
150
|
"mcp-server-firecrawl": {
|
|
132
151
|
"command": "npx",
|
|
133
|
-
"args": ["-y", "mcp
|
|
152
|
+
"args": ["-y", "firecrawl-mcp"],
|
|
134
153
|
"env": {
|
|
135
154
|
"FIRECRAWL_API_KEY": "YOUR_API_KEY_HERE",
|
|
136
155
|
|
package/dist/index.js
CHANGED
|
@@ -513,11 +513,7 @@ const DEEP_RESEARCH_TOOL = {
|
|
|
513
513
|
maxUrls: {
|
|
514
514
|
type: 'number',
|
|
515
515
|
description: 'Maximum number of URLs to analyze (1-1000)',
|
|
516
|
-
}
|
|
517
|
-
__experimental_streamSteps: {
|
|
518
|
-
type: 'boolean',
|
|
519
|
-
description: 'Experimental flag for streaming steps',
|
|
520
|
-
},
|
|
516
|
+
}
|
|
521
517
|
},
|
|
522
518
|
required: ['query'],
|
|
523
519
|
},
|
|
@@ -1044,14 +1040,11 @@ ${result.markdown ? `\nContent:\n${result.markdown}` : ''}`)
|
|
|
1044
1040
|
// Format the results
|
|
1045
1041
|
const formattedResponse = {
|
|
1046
1042
|
finalAnalysis: response.data.finalAnalysis,
|
|
1047
|
-
activities: response.activities,
|
|
1048
|
-
sources: response.sources,
|
|
1049
|
-
currentDepth: response.currentDepth,
|
|
1050
|
-
maxDepth: response.maxDepth,
|
|
1051
|
-
summaries: response.summaries || [],
|
|
1043
|
+
activities: response.data.activities,
|
|
1044
|
+
sources: response.data.sources,
|
|
1052
1045
|
};
|
|
1053
1046
|
return {
|
|
1054
|
-
content: [{ type: 'text', text:
|
|
1047
|
+
content: [{ type: 'text', text: formattedResponse.finalAnalysis }],
|
|
1055
1048
|
isError: false,
|
|
1056
1049
|
};
|
|
1057
1050
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "firecrawl-mcp",
|
|
3
|
-
"version": "1.4.
|
|
3
|
+
"version": "1.4.2",
|
|
4
4
|
"description": "MCP server for FireCrawl web scraping integration. Supports both cloud and self-hosted instances. Features include web scraping, batch processing, structured data extraction, and LLM-powered content analysis.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|