firecrawl-mcp 1.3.2 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -19,9 +19,17 @@ A Model Context Protocol (MCP) server implementation that integrates with FireCr
19
19
 
20
20
  ## Installation
21
21
 
22
- ### Manual Installation
22
+ ## Quick Start
23
+
24
+ ### 1. Installation
25
+
26
+ Choose one of these methods:
23
27
 
24
28
  ```bash
29
+ # Using npx (recommended)
30
+ npx -y firecrawl-mcp
31
+
32
+ # Global installation
25
33
  npm install -g firecrawl-mcp
26
34
  ```
27
35
 
@@ -33,6 +41,26 @@ To install FireCrawl for Claude Desktop automatically via [Smithery](https://smi
33
41
  npx -y @smithery/cli install @mendableai/mcp-server-firecrawl --client claude
34
42
  ```
35
43
 
44
+
45
+ ### Running on Cursor
46
+
47
+ Configuring Cursor 🖥️
48
+ Note: Requires Cursor version 0.45.6+
49
+
50
+ To configure FireCrawl MCP in Cursor:
51
+
52
+ 1. Open Cursor Settings
53
+ 2. Go to Features > MCP Servers
54
+ 3. Click "+ Add New MCP Server"
55
+ 4. Enter the following:
56
+ - Name: "firecrawl-mcp" (or your preferred name)
57
+ - Type: "command"
58
+ - Command: `env FIRECRAWL_API_KEY=your-api-key npx -y firecrawl-mcp`
59
+
60
+ Replace `your-api-key` with your FireCrawl API key.
61
+
62
+ After adding, refresh the MCP server list to see the new tools. The Composer Agent will automatically use FireCrawl MCP when appropriate, but you can explicitly request it by describing your web scraping needs. Access the Composer via Command+L (Mac), select "Agent" next to the submit button, and enter your query.
63
+
36
64
  ## Configuration
37
65
 
38
66
  ### Environment Variables
package/dist/index.js CHANGED
@@ -492,6 +492,36 @@ const EXTRACT_TOOL = {
492
492
  required: ['urls'],
493
493
  },
494
494
  };
495
+ const DEEP_RESEARCH_TOOL = {
496
+ name: 'firecrawl_deep_research',
497
+ description: 'Conduct deep research on a query using web crawling, search, and AI analysis.',
498
+ inputSchema: {
499
+ type: 'object',
500
+ properties: {
501
+ query: {
502
+ type: 'string',
503
+ description: 'The query to research',
504
+ },
505
+ maxDepth: {
506
+ type: 'number',
507
+ description: 'Maximum depth of research iterations (1-10)',
508
+ },
509
+ timeLimit: {
510
+ type: 'number',
511
+ description: 'Time limit in seconds (30-300)',
512
+ },
513
+ maxUrls: {
514
+ type: 'number',
515
+ description: 'Maximum number of URLs to analyze (1-1000)',
516
+ },
517
+ __experimental_streamSteps: {
518
+ type: 'boolean',
519
+ description: 'Experimental flag for streaming steps',
520
+ },
521
+ },
522
+ required: ['query'],
523
+ },
524
+ };
495
525
  // Type guards
496
526
  function isScrapeOptions(args) {
497
527
  return (typeof args === 'object' &&
@@ -539,8 +569,8 @@ function isExtractOptions(args) {
539
569
  }
540
570
  // Server implementation
541
571
  const server = new Server({
542
- name: 'fire-crawl',
543
- version: '0.1.0',
572
+ name: 'firecrawl-mcp',
573
+ version: '1.3.2',
544
574
  }, {
545
575
  capabilities: {
546
576
  tools: {},
@@ -672,6 +702,7 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({
672
702
  CHECK_CRAWL_STATUS_TOOL,
673
703
  SEARCH_TOOL,
674
704
  EXTRACT_TOOL,
705
+ DEEP_RESEARCH_TOOL,
675
706
  ],
676
707
  }));
677
708
  server.setRequestHandler(CallToolRequestSchema, async (request) => {
@@ -973,6 +1004,65 @@ ${result.markdown ? `\nContent:\n${result.markdown}` : ''}`)
973
1004
  };
974
1005
  }
975
1006
  }
1007
+ case 'firecrawl_deep_research': {
1008
+ if (!args || typeof args !== 'object' || !('query' in args)) {
1009
+ throw new Error('Invalid arguments for firecrawl_deep_research');
1010
+ }
1011
+ try {
1012
+ const researchStartTime = Date.now();
1013
+ server.sendLoggingMessage({
1014
+ level: 'info',
1015
+ data: `Starting deep research for query: ${args.query}`,
1016
+ });
1017
+ const response = await client.deepResearch(args.query, {
1018
+ maxDepth: args.maxDepth,
1019
+ timeLimit: args.timeLimit,
1020
+ maxUrls: args.maxUrls,
1021
+ },
1022
+ // Activity callback
1023
+ (activity) => {
1024
+ server.sendLoggingMessage({
1025
+ level: 'info',
1026
+ data: `Research activity: ${activity.message} (Depth: ${activity.depth})`,
1027
+ });
1028
+ },
1029
+ // Source callback
1030
+ (source) => {
1031
+ server.sendLoggingMessage({
1032
+ level: 'info',
1033
+ data: `Research source found: ${source.url}${source.title ? ` - ${source.title}` : ''}`,
1034
+ });
1035
+ });
1036
+ // Log performance metrics
1037
+ server.sendLoggingMessage({
1038
+ level: 'info',
1039
+ data: `Deep research completed in ${Date.now() - researchStartTime}ms`,
1040
+ });
1041
+ if (!response.success) {
1042
+ throw new Error(response.error || 'Deep research failed');
1043
+ }
1044
+ // Format the results
1045
+ const formattedResponse = {
1046
+ finalAnalysis: response.data.finalAnalysis,
1047
+ activities: response.activities,
1048
+ sources: response.sources,
1049
+ currentDepth: response.currentDepth,
1050
+ maxDepth: response.maxDepth,
1051
+ summaries: response.summaries || [],
1052
+ };
1053
+ return {
1054
+ content: [{ type: 'text', text: JSON.stringify(formattedResponse, null, 2) }],
1055
+ isError: false,
1056
+ };
1057
+ }
1058
+ catch (error) {
1059
+ const errorMessage = error instanceof Error ? error.message : String(error);
1060
+ return {
1061
+ content: [{ type: 'text', text: errorMessage }],
1062
+ isError: true,
1063
+ };
1064
+ }
1065
+ }
976
1066
  default:
977
1067
  return {
978
1068
  content: [{ type: 'text', text: `Unknown tool: ${name}` }],
File without changes
@@ -0,0 +1,58 @@
1
+ import { jest } from '@jest/globals';
2
+ // Set test timeout
3
+ jest.setTimeout(30000);
4
+ // Create mock responses
5
+ const mockSearchResponse = {
6
+ success: true,
7
+ data: [
8
+ {
9
+ url: 'https://example.com',
10
+ title: 'Test Page',
11
+ description: 'Test Description',
12
+ markdown: '# Test Content',
13
+ actions: null,
14
+ },
15
+ ],
16
+ };
17
+ const mockBatchScrapeResponse = {
18
+ success: true,
19
+ id: 'test-batch-id',
20
+ };
21
+ const mockBatchStatusResponse = {
22
+ success: true,
23
+ status: 'completed',
24
+ completed: 1,
25
+ total: 1,
26
+ creditsUsed: 1,
27
+ expiresAt: new Date(),
28
+ data: [
29
+ {
30
+ url: 'https://example.com',
31
+ title: 'Test Page',
32
+ description: 'Test Description',
33
+ markdown: '# Test Content',
34
+ actions: null,
35
+ },
36
+ ],
37
+ };
38
+ // Create mock instance methods
39
+ const mockSearch = jest.fn().mockImplementation(async () => mockSearchResponse);
40
+ const mockAsyncBatchScrapeUrls = jest
41
+ .fn()
42
+ .mockImplementation(async () => mockBatchScrapeResponse);
43
+ const mockCheckBatchScrapeStatus = jest
44
+ .fn()
45
+ .mockImplementation(async () => mockBatchStatusResponse);
46
+ // Create mock instance
47
+ const mockInstance = {
48
+ apiKey: 'test-api-key',
49
+ apiUrl: 'test-api-url',
50
+ search: mockSearch,
51
+ asyncBatchScrapeUrls: mockAsyncBatchScrapeUrls,
52
+ checkBatchScrapeStatus: mockCheckBatchScrapeStatus,
53
+ };
54
+ // Mock the module
55
+ jest.mock('@mendable/firecrawl-js', () => ({
56
+ __esModule: true,
57
+ default: jest.fn().mockImplementation(() => mockInstance),
58
+ }));