n8n-nodes-firecrawl-latest 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. package/LICENSE.md +19 -0
  2. package/README.md +232 -0
  3. package/dist/credentials/FirecrawlApi.credentials.js +22 -0
  4. package/dist/icons/flames-icon.svg +144 -0
  5. package/dist/nodes/Firecrawl/FireCrawlScraper.node.js +156 -0
  6. package/dist/nodes/Firecrawl/resources/batchScrape/batchScrape.methods.js +253 -0
  7. package/dist/nodes/Firecrawl/resources/batchScrape/batchScrape.properties.js +205 -0
  8. package/dist/nodes/Firecrawl/resources/crawler/crawler.methods.js +281 -0
  9. package/dist/nodes/Firecrawl/resources/crawler/crawler.properties.js +313 -0
  10. package/dist/nodes/Firecrawl/resources/deepResearch/deepResearch.methods.js +171 -0
  11. package/dist/nodes/Firecrawl/resources/deepResearch/deepResearch.properties.js +200 -0
  12. package/dist/nodes/Firecrawl/resources/extract/extract.methods.js +424 -0
  13. package/dist/nodes/Firecrawl/resources/extract/extract.properties.js +339 -0
  14. package/dist/nodes/Firecrawl/resources/llmsText/llmsText.methods.js +124 -0
  15. package/dist/nodes/Firecrawl/resources/llmsText/llmsText.properties.js +87 -0
  16. package/dist/nodes/Firecrawl/resources/map/map.methods.js +52 -0
  17. package/dist/nodes/Firecrawl/resources/map/map.properties.js +22 -0
  18. package/dist/nodes/Firecrawl/resources/scrape/scrape.methods.js +203 -0
  19. package/dist/nodes/Firecrawl/resources/scrape/scrape.properties.js +348 -0
  20. package/dist/nodes/HttpBin/HttpBin.node.js +59 -0
  21. package/dist/nodes/HttpBin/HttpVerbDescription.js +246 -0
  22. package/dist/nodes/HttpBin/httpbin.svg +18 -0
  23. package/index.js +7 -0
  24. package/package.json +58 -0
package/LICENSE.md ADDED
@@ -0,0 +1,19 @@
1
+ Copyright 2022 n8n
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
4
+ this software and associated documentation files (the "Software"), to deal in
5
+ the Software without restriction, including without limitation the rights to
6
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
7
+ of the Software, and to permit persons to whom the Software is furnished to do
8
+ so, subject to the following conditions:
9
+
10
+ The above copyright notice and this permission notice shall be included in all
11
+ copies or substantial portions of the Software.
12
+
13
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,232 @@
1
+ # FireCrawl Scraper Custom Nodes for n8n
2
+
3
+ This package contains custom n8n nodes that integrate with the FireCrawl API for web scraping, crawling, extraction, and data analysis.
4
+
5
+ ## Features
6
+
7
+ The FireCrawl Scraper node provides several powerful resources for web data extraction:
8
+
9
+ - **Scrape**: Scrape a single URL with various output formats and data extraction capabilities
10
+ - **Batch Scrape**: Process multiple URLs in a batch operation
11
+ - **Crawler**: Crawl an entire website starting from a specific URL
12
+ - **Extract**: Extract structured data from web pages using AI
13
+ - **Map**: Map URLs and extract structured data
14
+ - **LLMs.txt**: Generate LLMs.txt files from websites for LLM training and analysis
15
+ - **Deep Research**: AI-powered deep research on any topic
16
+
17
+ ## Installation
18
+
19
+ ### Option 1: Using Docker Compose
20
+
21
+ This package includes a Docker Compose configuration for easy setup with n8n:
22
+
23
+ 1. Clone this repository
24
+ 2. Create a `.env` file with your FireCrawl API key:
25
+ ```
26
+ FIRECRAWL_API_KEY=your_api_key_here
27
+ ```
28
+ 3. Run Docker Compose:
29
+ ```bash
30
+ docker-compose up -d
31
+ ```
32
+ 4. Access n8n at `http://localhost:5678`
33
+
34
+ ### Option 2: Installing in an existing n8n instance
35
+
36
+ ```bash
37
+ npm install n8n-nodes-firecrawl-scraper-custom
38
+ ```
39
+
40
+ ## Configuration
41
+
42
+ Before using the nodes, you need to set up the FireCrawl API credentials in n8n:
43
+
44
+ 1. Go to **Settings** > **Credentials**
45
+ 2. Click on **New Credential**
46
+ 3. Select **FireCrawl API**
47
+ 4. Enter your API key
48
+ 5. Save the credential
49
+
50
+ ## Usage
51
+
52
+ ### Scrape Resource
53
+
54
+ The Scrape resource allows you to extract content from a single URL.
55
+
56
+ **Key Features:**
57
+ - Multiple output formats (Markdown, HTML, Screenshots, etc.)
58
+ - Change tracking between scrapes
59
+ - Structured data extraction
60
+ - Page action support for dynamic content
61
+ - FIRE-1 agent integration for advanced capabilities
62
+
63
+ **Example Configuration:**
64
+ 1. Add the **FireCrawl Scraper** node
65
+ 2. Select **Scrape** as the resource
66
+ 3. Enter the URL to scrape
67
+ 4. Select output formats
68
+ 5. Enable additional options as needed
69
+
70
+ ### Batch Scrape Resource
71
+
72
+ Process multiple URLs in a batch operation.
73
+
74
+ **Key Features:**
75
+ - Batch processing of multiple URLs
76
+ - Synchronous or asynchronous operation modes
77
+ - Data extraction across multiple pages
78
+
79
+ ### Crawler Resource
80
+
81
+ Crawl an entire website starting from a specific URL.
82
+
83
+ **Key Features:**
84
+ - Configurable crawl depth and limits
85
+ - Path inclusion and exclusion patterns
86
+ - LLM extraction during crawling
87
+ - Change tracking
88
+
89
+ ### Extract Resource
90
+
91
+ Extract specific structured data from web pages using AI.
92
+
93
+ **Key Features:**
94
+ - Schema-based extraction
95
+ - Simple prompt-based extraction
96
+ - Multiple URL processing
97
+ - V2 support with FIRE-1 agent
98
+
99
+ ### Deep Research Resource
100
+
101
+ Perform AI-powered deep research on any topic.
102
+
103
+ **Key Features:**
104
+ - Multi-depth research capabilities
105
+ - Customizable system and analysis prompts
106
+ - JSON structured output
107
+ - Activity tracking
108
+
109
+ ### LLMs.txt Resource
110
+
111
+ Generate LLMs.txt files from websites.
112
+
113
+ **Key Features:**
114
+ - Create LLMs.txt files for LLM training
115
+ - Configurable crawl limits
116
+ - Full text generation option
117
+
118
+ ### Map Resource
119
+
120
+ Map URLs and extract structured data.
121
+
122
+ **Key Features:**
123
+ - Site mapping capabilities
124
+ - Structure discovery
125
+
126
+ ## Resource Details
127
+
128
+ ### Scrape Resource Options
129
+
130
+ | Option | Description |
131
+ |--------|-------------|
132
+ | URL | The URL to scrape |
133
+ | Output Formats | Formats to return (Markdown, HTML, Screenshots, etc.) |
134
+ | Track Changes | Track differences between scrapes |
135
+ | Include Extract | Extract structured data |
136
+ | Include Page Actions | Perform actions before scraping |
137
+ | Use FIRE-1 Agent | Enable advanced agent capabilities |
138
+ | Location Settings | Specify geographic location |
139
+
140
+ ### Crawler Resource Options
141
+
142
+ | Option | Description |
143
+ |--------|-------------|
144
+ | URL | The starting URL to crawl |
145
+ | Limit | Maximum number of pages to crawl |
146
+ | Maximum Depth | How deep to crawl (1-10) |
147
+ | Include/Exclude Paths | Regular expressions for paths |
148
+ | Operation Mode | Synchronous or asynchronous |
149
+ | Enable LLM Extraction | Extract data during crawling |
150
+
151
+ ### Extract Resource Options
152
+
153
+ | Option | Description |
154
+ |--------|-------------|
155
+ | URL(s) | URLs to extract data from |
156
+ | Version | API version (V1 or V2 with FIRE-1) |
157
+ | Extraction Method | Simple or schema-based |
158
+ | Operation Mode | Single, batch, or URL-less |
159
+ | Enable Web Search | Follow external links for context |
160
+ | Track Changes | Track differences between extractions |
161
+
162
+ ## Examples
163
+
164
+ ### Example: Scraping a Website and Extracting Structured Data
165
+
166
+ ```
167
+ [n8n Workflow]
168
+ 1. FireCrawl Scraper (Scrape)
169
+ - URL: https://example.com
170
+ - Output Formats: Markdown, HTML
171
+ - Include Extract: Yes
172
+ - Extraction Method: Schema Based
173
+ - Schema: JSON schema defining product information
174
+ ```
175
+
176
+ ### Example: Crawling a Website and Generating a Report
177
+
178
+ ```
179
+ [n8n Workflow]
180
+ 1. FireCrawl Scraper (Crawler)
181
+ - URL: https://example.com
182
+ - Limit: 100
183
+ - Maximum Depth: 3
184
+ - Enable LLM Extraction: Yes
185
+ 2. Google Sheets
186
+ - Action: Append
187
+ - Sheet: Crawl Results
188
+ ```
189
+
190
+ ### Example: Performing Deep Research on a Topic
191
+
192
+ ```
193
+ [n8n Workflow]
194
+ 1. FireCrawl Scraper (Deep Research)
195
+ - Query: "Latest advancements in renewable energy"
196
+ - Maximum Depth: 7
197
+ - Wait for Completion: Yes
198
+ 2. Text Formatter
199
+ - Format research results
200
+ 3. Email
201
+ - Send formatted research
202
+ ```
203
+
204
+ ## Error Handling
205
+
206
+ The nodes implement error handling with the option to continue workflow execution on failures. Each response includes a `success` field indicating whether the operation succeeded, along with detailed error messages when applicable.
207
+
208
+ ## Development
209
+
210
+ ### Building the Package
211
+
212
+ To build the package:
213
+
214
+ ```bash
215
+ npm run build
216
+ ```
217
+
218
+ ### Testing
219
+
220
+ ```bash
221
+ npm run test
222
+ ```
223
+
224
+ ## License
225
+
226
+ [MIT](LICENSE)
227
+
228
+ ## Support
229
+
230
+ For support with the FireCrawl API, visit [FireCrawl Documentation](https://docs.firecrawl.dev).
231
+
232
+ For issues with these custom nodes, please open an issue on GitHub.
@@ -0,0 +1,22 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.FirecrawlApi = void 0;
4
+ class FirecrawlApi {
5
+ constructor() {
6
+ this.name = 'firecrawlApi';
7
+ this.displayName = 'Firecrawl API';
8
+ this.documentationUrl = 'https://firecrawl.dev/docs';
9
+ this.properties = [
10
+ {
11
+ displayName: 'API Key',
12
+ name: 'apiKey',
13
+ type: 'string',
14
+ typeOptions: { password: true },
15
+ default: '',
16
+ required: true,
17
+ description: 'The API key for authenticating with Firecrawl API. Get your API key from firecrawl.dev',
18
+ },
19
+ ];
20
+ }
21
+ }
22
+ exports.FirecrawlApi = FirecrawlApi;
@@ -0,0 +1,144 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" version="1.1" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 91.2 122.9">
3
+ <!-- Generator: Adobe Illustrator 29.3.1, SVG Export Plug-In . SVG Version: 2.1.0 Build 151) -->
4
+ <defs>
5
+ <style>
6
+ .st0 {
7
+ fill: url(#linear-gradient2);
8
+ }
9
+
10
+ .st1 {
11
+ fill: url(#radial-gradient10);
12
+ }
13
+
14
+ .st2 {
15
+ fill: url(#linear-gradient1);
16
+ }
17
+
18
+ .st3 {
19
+ fill: url(#radial-gradient);
20
+ }
21
+
22
+ .st4 {
23
+ fill: url(#radial-gradient5);
24
+ }
25
+
26
+ .st5 {
27
+ fill: url(#radial-gradient1);
28
+ }
29
+
30
+ .st6 {
31
+ fill: url(#radial-gradient7);
32
+ }
33
+
34
+ .st7 {
35
+ fill: url(#radial-gradient8);
36
+ }
37
+
38
+ .st8 {
39
+ fill: url(#linear-gradient3);
40
+ }
41
+
42
+ .st9 {
43
+ fill: url(#radial-gradient6);
44
+ }
45
+
46
+ .st10 {
47
+ fill: url(#radial-gradient2);
48
+ }
49
+
50
+ .st11 {
51
+ fill: url(#radial-gradient4);
52
+ }
53
+
54
+ .st12 {
55
+ fill: url(#radial-gradient3);
56
+ }
57
+
58
+ .st13 {
59
+ fill: url(#radial-gradient9);
60
+ }
61
+
62
+ .st14 {
63
+ fill: url(#linear-gradient);
64
+ }
65
+ </style>
66
+ <radialGradient id="radial-gradient" cx="-18.7" cy="138" fx="-18.7" fy="138" r="4.3" gradientTransform="translate(-207.3 -3483.3) rotate(-179.5) scale(17.1 -25.8)" gradientUnits="userSpaceOnUse">
67
+ <stop offset="0" stop-color="#ff953d"/>
68
+ <stop offset="1" stop-color="#ff5141"/>
69
+ </radialGradient>
70
+ <radialGradient id="radial-gradient1" cx="-38.1" cy="136.8" fx="-38.1" fy="136.8" r="4.3" gradientTransform="translate(573.8 -2351.6) rotate(-157.9) scale(10.3 -17.8)" gradientUnits="userSpaceOnUse">
71
+ <stop offset="0" stop-color="#ce5327"/>
72
+ <stop offset="1" stop-color="#ce5327" stop-opacity="0"/>
73
+ </radialGradient>
74
+ <linearGradient id="linear-gradient" x1="306" y1="-333.4" x2="306" y2="-311.3" gradientTransform="translate(-260.4 -211.7) scale(1 -1)" gradientUnits="userSpaceOnUse">
75
+ <stop offset="0" stop-color="#ff7583"/>
76
+ <stop offset="1" stop-color="#ff7583" stop-opacity="0"/>
77
+ </linearGradient>
78
+ <radialGradient id="radial-gradient2" cx="123.6" cy="113.6" fx="123.6" fy="113.6" r="4.3" gradientTransform="translate(-1540.6 2564.4) rotate(24) scale(3.1 -25.7)" gradientUnits="userSpaceOnUse">
79
+ <stop offset="0" stop-color="#ffaa7b"/>
80
+ <stop offset="1" stop-color="#ffaa7b" stop-opacity="0"/>
81
+ </radialGradient>
82
+ <radialGradient id="radial-gradient3" cx="103.5" cy="161.5" fx="103.5" fy="161.5" r="4.3" gradientTransform="translate(-825.6 -199.1) rotate(77.3) scale(3.8 -4.8)" gradientUnits="userSpaceOnUse">
83
+ <stop offset="0" stop-color="#ff5e47"/>
84
+ <stop offset="1" stop-color="#ff5e47" stop-opacity="0"/>
85
+ </radialGradient>
86
+ <radialGradient id="radial-gradient4" cx="33.7" cy="152.6" fx="33.7" fy="152.6" r="4.3" gradientTransform="translate(-1396.2 -290.6) rotate(87.9) scale(10.1 -9.4)" gradientUnits="userSpaceOnUse">
87
+ <stop offset="0" stop-color="#ff2f3c"/>
88
+ <stop offset="1" stop-color="#ff2f3c" stop-opacity="0"/>
89
+ </radialGradient>
90
+ <radialGradient id="radial-gradient5" cx="175" cy="82.1" fx="175" fy="82.1" r="4.3" gradientTransform="translate(-489 182.7) rotate(25.3) scale(2.3 -4.6)" gradientUnits="userSpaceOnUse">
91
+ <stop offset="0" stop-color="#ff846c"/>
92
+ <stop offset="1" stop-color="#ff846c" stop-opacity="0"/>
93
+ </radialGradient>
94
+ <radialGradient id="radial-gradient6" cx="94.5" cy="879.8" fx="94.5" fy="879.8" r="4.3" gradientTransform="translate(-306.9 -351.6) rotate(113.2) scale(2.3 -.5)" gradientUnits="userSpaceOnUse">
95
+ <stop offset="0" stop-color="#ffa682"/>
96
+ <stop offset="1" stop-color="#ffa682" stop-opacity="0"/>
97
+ </radialGradient>
98
+ <radialGradient id="radial-gradient7" cx="-38.1" cy="150" fx="-38.1" fy="150" r="4.3" gradientTransform="translate(65.2 -1814.3) rotate(-168.6) scale(10 -12.5)" gradientUnits="userSpaceOnUse">
99
+ <stop offset="0" stop-color="#ffda2f"/>
100
+ <stop offset="1" stop-color="#ff8e41"/>
101
+ </radialGradient>
102
+ <radialGradient id="radial-gradient8" cx="30.5" cy="133.7" fx="30.5" fy="133.7" r="4.3" gradientTransform="translate(-1658.1 214.1) rotate(69) scale(14.1 -12.3)" gradientUnits="userSpaceOnUse">
103
+ <stop offset="0" stop-color="#fd5639"/>
104
+ <stop offset="1" stop-color="#fe5533" stop-opacity="0"/>
105
+ </radialGradient>
106
+ <radialGradient id="radial-gradient9" cx="-26.5" cy="140.5" fx="-26.5" fy="140.5" r="4.3" gradientTransform="translate(-537.1 -3247.1) rotate(174.2) scale(9.8 -24)" gradientUnits="userSpaceOnUse">
107
+ <stop offset=".6" stop-color="#d7812d" stop-opacity="0"/>
108
+ <stop offset="1" stop-color="#d7812d"/>
109
+ </radialGradient>
110
+ <linearGradient id="linear-gradient1" x1="308.3" y1="-249.5" x2="308.3" y2="-265.3" gradientTransform="translate(-260.4 -211.7) scale(1 -1)" gradientUnits="userSpaceOnUse">
111
+ <stop offset="0" stop-color="#f95131"/>
112
+ <stop offset="1" stop-color="#f95131" stop-opacity="0"/>
113
+ </linearGradient>
114
+ <radialGradient id="radial-gradient10" cx="-1" cy="165.9" fx="-1" fy="165.9" r="4.3" gradientTransform="translate(-1095.3 -1456.8) rotate(143.1) scale(16.1 -11.6)" gradientUnits="userSpaceOnUse">
115
+ <stop offset=".8" stop-color="#f18a52" stop-opacity="0"/>
116
+ <stop offset="1" stop-color="#f18a52"/>
117
+ </radialGradient>
118
+ <linearGradient id="linear-gradient2" x1="300.6" y1="-221.4" x2="268.7" y2="-301.6" gradientTransform="translate(-260.4 -211.7) scale(1 -1)" gradientUnits="userSpaceOnUse">
119
+ <stop offset="0" stop-color="#ff7558"/>
120
+ <stop offset="1" stop-color="#f38758"/>
121
+ </linearGradient>
122
+ <linearGradient id="linear-gradient3" x1="278.4" y1="-238.9" x2="261.2" y2="-288.1" gradientTransform="translate(-260.4 -211.7) scale(1 -1)" gradientUnits="userSpaceOnUse">
123
+ <stop offset="0" stop-color="#ff815b"/>
124
+ <stop offset="1" stop-color="#ff9c6d"/>
125
+ </linearGradient>
126
+ </defs>
127
+ <path class="st3" d="M30.7,30.5c6.7-10.4,10.6-20,12.7-26.8.8-2.6,4.1-3.7,6.1-1.8,29.8,28.1,39.6,49.6,41.5,74.5,1.4,24-10.4,45.4-42.1,45.4S-2.7,101,.3,68.1c1.8-19.7,9.4-34.6,15.5-43.6,1.9-2.8,5.9-2.8,8.1-.3l5.5,6.4c.4.4,1.1.4,1.4,0Z"/>
128
+ <path class="st5" d="M30.7,30.5c6.7-10.4,10.6-20,12.7-26.8.8-2.6,4.1-3.7,6.1-1.8,29.8,28.1,39.6,49.6,41.5,74.5,1.4,24-10.4,45.4-42.1,45.4S-2.7,101,.3,68.1c1.8-19.7,9.4-34.6,15.5-43.6,1.9-2.8,5.9-2.8,8.1-.3l5.5,6.4c.4.4,1.1.4,1.4,0Z"/>
129
+ <path class="st14" d="M30.7,30.5c6.7-10.4,10.6-20,12.7-26.8.8-2.6,4.1-3.7,6.1-1.8,29.8,28.1,39.6,49.6,41.5,74.5,1.4,24-10.4,45.4-42.1,45.4S-2.7,101,.3,68.1c1.8-19.7,9.4-34.6,15.5-43.6,1.9-2.8,5.9-2.8,8.1-.3l5.5,6.4c.4.4,1.1.4,1.4,0Z"/>
130
+ <path class="st10" d="M30.7,30.5c6.7-10.4,10.6-20,12.7-26.8.8-2.6,4.1-3.7,6.1-1.8,29.8,28.1,39.6,49.6,41.5,74.5,1.4,24-10.4,45.4-42.1,45.4S-2.7,101,.3,68.1c1.8-19.7,9.4-34.6,15.5-43.6,1.9-2.8,5.9-2.8,8.1-.3l5.5,6.4c.4.4,1.1.4,1.4,0Z"/>
131
+ <path class="st12" d="M30.7,30.5c6.7-10.4,10.6-20,12.7-26.8.8-2.6,4.1-3.7,6.1-1.8,29.8,28.1,39.6,49.6,41.5,74.5,1.4,24-10.4,45.4-42.1,45.4S-2.7,101,.3,68.1c1.8-19.7,9.4-34.6,15.5-43.6,1.9-2.8,5.9-2.8,8.1-.3l5.5,6.4c.4.4,1.1.4,1.4,0Z"/>
132
+ <path class="st11" d="M30.7,30.5c6.7-10.4,10.6-20,12.7-26.8.8-2.6,4.1-3.7,6.1-1.8,29.8,28.1,39.6,49.6,41.5,74.5,1.4,24-10.4,45.4-42.1,45.4S-2.7,101,.3,68.1c1.8-19.7,9.4-34.6,15.5-43.6,1.9-2.8,5.9-2.8,8.1-.3l5.5,6.4c.4.4,1.1.4,1.4,0Z"/>
133
+ <path class="st4" d="M30.7,30.5c6.7-10.4,10.6-20,12.7-26.8.8-2.6,4.1-3.7,6.1-1.8,29.8,28.1,39.6,49.6,41.5,74.5,1.4,24-10.4,45.4-42.1,45.4S-2.7,101,.3,68.1c1.8-19.7,9.4-34.6,15.5-43.6,1.9-2.8,5.9-2.8,8.1-.3l5.5,6.4c.4.4,1.1.4,1.4,0Z"/>
134
+ <path class="st9" d="M30.7,30.5c6.7-10.4,10.6-20,12.7-26.8.8-2.6,4.1-3.7,6.1-1.8,29.8,28.1,39.6,49.6,41.5,74.5,1.4,24-10.4,45.4-42.1,45.4S-2.7,101,.3,68.1c1.8-19.7,9.4-34.6,15.5-43.6,1.9-2.8,5.9-2.8,8.1-.3l5.5,6.4c.4.4,1.1.4,1.4,0Z"/>
135
+ <g>
136
+ <path class="st6" d="M20.4,78c5.7-13.5,17.3-28.3,25.1-36.4,2.2-2.3,5.8-2.3,8.1-.2,15,13.8,19.6,26.4,23.4,40.9,4,15.2,0,39.6-30.1,39.6s-34.8-24.2-26.6-43.9Z"/>
137
+ <path class="st7" d="M20.4,78c5.7-13.5,17.3-28.3,25.1-36.4,2.2-2.3,5.8-2.3,8.1-.2,15,13.8,19.6,26.4,23.4,40.9,4,15.2,0,39.6-30.1,39.6s-34.8-24.2-26.6-43.9Z"/>
138
+ <path class="st13" d="M20.4,78c5.7-13.5,17.3-28.3,25.1-36.4,2.2-2.3,5.8-2.3,8.1-.2,15,13.8,19.6,26.4,23.4,40.9,4,15.2,0,39.6-30.1,39.6s-34.8-24.2-26.6-43.9Z"/>
139
+ </g>
140
+ <path class="st2" d="M20.4,78c5.7-13.5,17.3-28.3,25.1-36.4,2.2-2.3,5.8-2.3,8.1-.2,15,13.8,19.6,26.4,23.4,40.9,4,15.2,0,39.6-30.1,39.6s-34.8-24.2-26.6-43.9Z"/>
141
+ <path class="st1" d="M20.4,78c5.7-13.5,17.3-28.3,25.1-36.4,2.2-2.3,5.8-2.3,8.1-.2,15,13.8,19.6,26.4,23.4,40.9,4,15.2,0,39.6-30.1,39.6s-34.8-24.2-26.6-43.9Z"/>
142
+ <path class="st0" d="M27.5,41.8c10.1-14.7,15.9-30.6,17.7-34.6-2.6,20.1-10.2,34.9-24.6,53.8-11.5,15.2-13.6,28.2-13.5,31.3-3.6-24.1,7.8-32.2,20.3-50.6Z"/>
143
+ <path class="st8" d="M18.8,25.9C13.2,32.9,2,52.3,2.2,74c5.6-22.2,18.6-29.3,16.6-48.1Z"/>
144
+ </svg>
@@ -0,0 +1,156 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.FireCrawlScraper = void 0;
37
+ // Import resource properties
38
+ const crawlerProperties = __importStar(require("./resources/crawler/crawler.properties"));
39
+ const extractProperties = __importStar(require("./resources/extract/extract.properties"));
40
+ const mapProperties = __importStar(require("./resources/map/map.properties"));
41
+ const scrapeProperties = __importStar(require("./resources/scrape/scrape.properties"));
42
+ const llmsTextProperties = __importStar(require("./resources/llmsText/llmsText.properties"));
43
+ const batchScrapeProperties = __importStar(require("./resources/batchScrape/batchScrape.properties"));
44
+ const deepResearchProperties = __importStar(require("./resources/deepResearch/deepResearch.properties"));
45
+ // Import resource methods
46
+ const crawler_methods_1 = require("./resources/crawler/crawler.methods");
47
+ const extract_methods_1 = require("./resources/extract/extract.methods");
48
+ const map_methods_1 = require("./resources/map/map.methods");
49
+ const scrape_methods_1 = require("./resources/scrape/scrape.methods");
50
+ const llmsText_methods_1 = require("./resources/llmsText/llmsText.methods");
51
+ const batchScrape_methods_1 = require("./resources/batchScrape/batchScrape.methods");
52
+ const deepResearch_methods_1 = require("./resources/deepResearch/deepResearch.methods");
53
+ class FireCrawlScraper {
54
+ constructor() {
55
+ this.description = {
56
+ displayName: 'FireCrawl Scraper',
57
+ name: 'fireCrawlScraper',
58
+ icon: 'file:../../icons/flames-icon.svg',
59
+ group: ['transform'],
60
+ version: 1,
61
+ subtitle: '={{$parameter["resource"]}}',
62
+ description: 'FireCrawl API for web scraping and crawling',
63
+ defaults: {
64
+ name: 'FireCrawl Scraper',
65
+ },
66
+ inputs: ['main'],
67
+ outputs: ['main'],
68
+ credentials: [
69
+ {
70
+ name: 'firecrawlApi',
71
+ required: true,
72
+ },
73
+ ],
74
+ // Define resources
75
+ properties: [
76
+ // Resource selection
77
+ {
78
+ displayName: 'Resource',
79
+ name: 'resource',
80
+ type: 'options',
81
+ noDataExpression: true,
82
+ default: 'scrape',
83
+ options: [
84
+ {
85
+ name: 'Batch Scrape',
86
+ value: 'batchScrape',
87
+ description: 'Scrape multiple URLs in a batch operation',
88
+ },
89
+ {
90
+ name: 'Crawler',
91
+ value: 'crawler',
92
+ description: 'Crawl a website and extract data from multiple pages',
93
+ },
94
+ {
95
+ name: 'Deep Research',
96
+ value: 'deepResearch',
97
+ description: 'AI-powered deep research and analysis on any topic',
98
+ },
99
+ {
100
+ name: 'Extract',
101
+ value: 'extract',
102
+ description: 'Extract specific information from scraped content',
103
+ },
104
+ {
105
+ name: 'LLMs.txt',
106
+ value: 'llmsText',
107
+ description: 'Generate LLMs.txt files from any website for LLM training and analysis',
108
+ },
109
+ {
110
+ name: 'Map',
111
+ value: 'map',
112
+ description: 'Map scraped data to a structured format',
113
+ },
114
+ {
115
+ name: 'Scrape',
116
+ value: 'scrape',
117
+ description: 'Scrape a single URL using FireCrawl',
118
+ },
119
+ ],
120
+ },
121
+ // Add resource-specific properties
122
+ ...scrapeProperties.scrapeProperties,
123
+ ...mapProperties.mapProperties,
124
+ ...crawlerProperties.crawlerProperties,
125
+ ...extractProperties.extractProperties,
126
+ ...llmsTextProperties.llmsTextProperties,
127
+ ...batchScrapeProperties.batchScrapeProperties,
128
+ ...deepResearchProperties.deepResearchProperties,
129
+ ],
130
+ };
131
+ }
132
+ // Execute method to handle all resources
133
+ async execute() {
134
+ const resource = this.getNodeParameter('resource', 0);
135
+ // Route execution to the appropriate resource handler
136
+ switch (resource) {
137
+ case 'scrape':
138
+ return scrape_methods_1.scrapeMethods.execute.call(this);
139
+ case 'batchScrape':
140
+ return batchScrape_methods_1.batchScrapeMethods.execute.call(this);
141
+ case 'map':
142
+ return map_methods_1.mapMethods.execute.call(this);
143
+ case 'crawler':
144
+ return crawler_methods_1.crawlerMethods.execute.call(this);
145
+ case 'extract':
146
+ return extract_methods_1.extractMethods.execute.call(this);
147
+ case 'llmsText':
148
+ return llmsText_methods_1.llmsTextMethods.execute.call(this);
149
+ case 'deepResearch':
150
+ return deepResearch_methods_1.deepResearchMethods.execute.call(this);
151
+ default:
152
+ throw new Error(`The resource "${resource}" is not known!`);
153
+ }
154
+ }
155
+ }
156
+ exports.FireCrawlScraper = FireCrawlScraper;