n8n-nodes-crawl4ai-plus 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/LICENSE +23 -0
  2. package/LICENSE.md +19 -0
  3. package/README.md +157 -0
  4. package/dist/credentials/Crawl4aiApi.credentials.d.ts +7 -0
  5. package/dist/credentials/Crawl4aiApi.credentials.js +242 -0
  6. package/dist/credentials/Crawl4aiApi.credentials.js.map +1 -0
  7. package/dist/nodes/Crawl4aiBasicCrawler/Crawl4aiBasicCrawler.node.d.ts +5 -0
  8. package/dist/nodes/Crawl4aiBasicCrawler/Crawl4aiBasicCrawler.node.js +37 -0
  9. package/dist/nodes/Crawl4aiBasicCrawler/Crawl4aiBasicCrawler.node.js.map +1 -0
  10. package/dist/nodes/Crawl4aiBasicCrawler/actions/crawlMultipleUrls.operation.d.ts +4 -0
  11. package/dist/nodes/Crawl4aiBasicCrawler/actions/crawlMultipleUrls.operation.js +299 -0
  12. package/dist/nodes/Crawl4aiBasicCrawler/actions/crawlMultipleUrls.operation.js.map +1 -0
  13. package/dist/nodes/Crawl4aiBasicCrawler/actions/crawlSingleUrl.operation.d.ts +4 -0
  14. package/dist/nodes/Crawl4aiBasicCrawler/actions/crawlSingleUrl.operation.js +324 -0
  15. package/dist/nodes/Crawl4aiBasicCrawler/actions/crawlSingleUrl.operation.js.map +1 -0
  16. package/dist/nodes/Crawl4aiBasicCrawler/actions/operations.d.ts +8 -0
  17. package/dist/nodes/Crawl4aiBasicCrawler/actions/operations.js +67 -0
  18. package/dist/nodes/Crawl4aiBasicCrawler/actions/operations.js.map +1 -0
  19. package/dist/nodes/Crawl4aiBasicCrawler/actions/processRawHtml.operation.d.ts +4 -0
  20. package/dist/nodes/Crawl4aiBasicCrawler/actions/processRawHtml.operation.js +148 -0
  21. package/dist/nodes/Crawl4aiBasicCrawler/actions/processRawHtml.operation.js.map +1 -0
  22. package/dist/nodes/Crawl4aiBasicCrawler/actions/router.d.ts +2 -0
  23. package/dist/nodes/Crawl4aiBasicCrawler/actions/router.js +37 -0
  24. package/dist/nodes/Crawl4aiBasicCrawler/actions/router.js.map +1 -0
  25. package/dist/nodes/Crawl4aiBasicCrawler/crawl4ai.svg +17 -0
  26. package/dist/nodes/Crawl4aiBasicCrawler/helpers/apiClient.d.ts +15 -0
  27. package/dist/nodes/Crawl4aiBasicCrawler/helpers/apiClient.js +226 -0
  28. package/dist/nodes/Crawl4aiBasicCrawler/helpers/apiClient.js.map +1 -0
  29. package/dist/nodes/Crawl4aiBasicCrawler/helpers/formatters.d.ts +5 -0
  30. package/dist/nodes/Crawl4aiBasicCrawler/helpers/formatters.js +81 -0
  31. package/dist/nodes/Crawl4aiBasicCrawler/helpers/formatters.js.map +1 -0
  32. package/dist/nodes/Crawl4aiBasicCrawler/helpers/interfaces.d.ts +189 -0
  33. package/dist/nodes/Crawl4aiBasicCrawler/helpers/interfaces.js +3 -0
  34. package/dist/nodes/Crawl4aiBasicCrawler/helpers/interfaces.js.map +1 -0
  35. package/dist/nodes/Crawl4aiBasicCrawler/helpers/utils.d.ts +8 -0
  36. package/dist/nodes/Crawl4aiBasicCrawler/helpers/utils.js +97 -0
  37. package/dist/nodes/Crawl4aiBasicCrawler/helpers/utils.js.map +1 -0
  38. package/dist/nodes/Crawl4aiContentExtractor/Crawl4aiContentExtractor.node.d.ts +5 -0
  39. package/dist/nodes/Crawl4aiContentExtractor/Crawl4aiContentExtractor.node.js +38 -0
  40. package/dist/nodes/Crawl4aiContentExtractor/Crawl4aiContentExtractor.node.js.map +1 -0
  41. package/dist/nodes/Crawl4aiContentExtractor/actions/cssExtractor.operation.d.ts +4 -0
  42. package/dist/nodes/Crawl4aiContentExtractor/actions/cssExtractor.operation.js +336 -0
  43. package/dist/nodes/Crawl4aiContentExtractor/actions/cssExtractor.operation.js.map +1 -0
  44. package/dist/nodes/Crawl4aiContentExtractor/actions/jsonExtractor.operation.d.ts +4 -0
  45. package/dist/nodes/Crawl4aiContentExtractor/actions/jsonExtractor.operation.js +369 -0
  46. package/dist/nodes/Crawl4aiContentExtractor/actions/jsonExtractor.operation.js.map +1 -0
  47. package/dist/nodes/Crawl4aiContentExtractor/actions/llmExtractor.operation.d.ts +4 -0
  48. package/dist/nodes/Crawl4aiContentExtractor/actions/llmExtractor.operation.js +786 -0
  49. package/dist/nodes/Crawl4aiContentExtractor/actions/llmExtractor.operation.js.map +1 -0
  50. package/dist/nodes/Crawl4aiContentExtractor/actions/operations.d.ts +8 -0
  51. package/dist/nodes/Crawl4aiContentExtractor/actions/operations.js +76 -0
  52. package/dist/nodes/Crawl4aiContentExtractor/actions/operations.js.map +1 -0
  53. package/dist/nodes/Crawl4aiContentExtractor/actions/regexExtractor.operation.d.ts +4 -0
  54. package/dist/nodes/Crawl4aiContentExtractor/actions/regexExtractor.operation.js +437 -0
  55. package/dist/nodes/Crawl4aiContentExtractor/actions/regexExtractor.operation.js.map +1 -0
  56. package/dist/nodes/Crawl4aiContentExtractor/actions/router.d.ts +2 -0
  57. package/dist/nodes/Crawl4aiContentExtractor/actions/router.js +37 -0
  58. package/dist/nodes/Crawl4aiContentExtractor/actions/router.js.map +1 -0
  59. package/dist/nodes/Crawl4aiContentExtractor/crawl4ai.svg +17 -0
  60. package/dist/nodes/Crawl4aiContentExtractor/helpers/apiClient.d.ts +1 -0
  61. package/dist/nodes/Crawl4aiContentExtractor/helpers/apiClient.js +7 -0
  62. package/dist/nodes/Crawl4aiContentExtractor/helpers/apiClient.js.map +1 -0
  63. package/dist/nodes/Crawl4aiContentExtractor/helpers/formatters.d.ts +1 -0
  64. package/dist/nodes/Crawl4aiContentExtractor/helpers/formatters.js +8 -0
  65. package/dist/nodes/Crawl4aiContentExtractor/helpers/formatters.js.map +1 -0
  66. package/dist/nodes/Crawl4aiContentExtractor/helpers/interfaces.d.ts +1 -0
  67. package/dist/nodes/Crawl4aiContentExtractor/helpers/interfaces.js +3 -0
  68. package/dist/nodes/Crawl4aiContentExtractor/helpers/interfaces.js.map +1 -0
  69. package/dist/nodes/Crawl4aiContentExtractor/helpers/utils.d.ts +6 -0
  70. package/dist/nodes/Crawl4aiContentExtractor/helpers/utils.js +89 -0
  71. package/dist/nodes/Crawl4aiContentExtractor/helpers/utils.js.map +1 -0
  72. package/dist/tsconfig.tsbuildinfo +1 -0
  73. package/index.js +14 -0
  74. package/package.json +70 -0
package/LICENSE ADDED
@@ -0,0 +1,23 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Heictor Hsiao (Original Author)
4
+ Copyright (c) 2025 Matias Lopez (First Maintainer)
5
+ Copyright (c) 2025 Max Soukhomlinov (Current Maintainer)
6
+
7
+ Permission is hereby granted, free of charge, to any person obtaining a copy
8
+ of this software and associated documentation files (the "Software"), to deal
9
+ in the Software without restriction, including without limitation the rights
10
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11
+ copies of the Software, and to permit persons to whom the Software is
12
+ furnished to do so, subject to the following conditions:
13
+
14
+ The above copyright notice and this permission notice shall be included in all
15
+ copies or substantial portions of the Software.
16
+
17
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23
+ SOFTWARE.
package/LICENSE.md ADDED
@@ -0,0 +1,19 @@
1
+ Copyright 2022 n8n
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
4
+ this software and associated documentation files (the "Software"), to deal in
5
+ the Software without restriction, including without limitation the rights to
6
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
7
+ of the Software, and to permit persons to whom the Software is furnished to do
8
+ so, subject to the following conditions:
9
+
10
+ The above copyright notice and this permission notice shall be included in all
11
+ copies or substantial portions of the Software.
12
+
13
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,157 @@
1
+ # Crawl4AI Plus for n8n
2
+
3
+ > **Enhanced fork** with comprehensive Crawl4AI v0.7.x support, including regex extraction, multi-browser support, and 22+ LLM providers.
4
+
5
+ ## Project History & Attribution
6
+
7
+ This is a maintained fork with enhanced features for Crawl4AI 0.7.x.
8
+
9
+ ### Fork Chain
10
+ - **Original author**: [Heictor Hsiao](https://github.com/golfamigo) - [golfamigo/n8n-nodes-crawl4j](https://github.com/golfamigo/n8n-nodes-crawl4j)
11
+ - **First maintainer**: [Matias Lopez](https://github.com/qmatiaslopez) - [qmatiaslopez/n8n-nodes-crawl4j](https://github.com/qmatiaslopez/n8n-nodes-crawl4j)
12
+ - **Current maintainer**: [Max Soukhomlinov](https://github.com/msoukhomlinov) - [msoukhomlinov/n8n-nodes-crawl4ai-plus](https://github.com/msoukhomlinov/n8n-nodes-crawl4ai-plus)
13
+
14
+ All credit for the original implementation goes to **Heictor Hsiao** and **Matias Lopez**.
15
+
16
+ ## What's New in Plus
17
+
18
+ This enhanced fork includes Priority 1 features for Crawl4AI 0.7.x:
19
+
20
+ - ✅ **Regex Extraction** - NEW! 21 built-in patterns (email, phone, URL, credit cards, etc.)
21
+ - ✅ **Multi-Browser Support** - Chromium, Firefox, and Webkit
22
+ - ✅ **Enhanced Cache Modes** - 5 modes (ENABLED, DISABLED, READ_ONLY, WRITE_ONLY, BYPASS)
23
+ - ✅ **22+ LLM Providers** - OpenAI, Anthropic, Google, DeepSeek, Groq, Ollama, and more
24
+ - ✅ **Dynamic Content** - wait_for parameter for JavaScript-heavy sites
25
+ - ✅ **External LiteLLM Proxy** - Connect to custom LLM endpoints
26
+ - ✅ **Better Error Handling** - Exposed status codes and detailed error messages
27
+
28
+ ---
29
+
30
+ This project provides n8n integration for Crawl4AI, a powerful web crawling and data extraction tool. It consists of two main nodes:
31
+
32
+ 1. **Crawl4AI: Basic Crawler** - For general web crawling and content extraction
33
+ 2. **Crawl4AI: Content Extractor** - For extracting structured data using CSS selectors, LLM, or JSON
34
+
35
+ ## Features
36
+
37
+ ### Basic Crawler Node
38
+
39
+ - **Crawl Single URL** - Extract content from a single web page
40
+ - **Crawl Multiple URLs** - Process multiple web pages in one operation
41
+ - **Process Raw HTML** - Extract content from raw HTML without crawling
42
+
43
+ ### Content Extractor Node
44
+
45
+ - **CSS Selector Extractor** - Extract structured data using CSS selectors
46
+ - **LLM Extractor** - Use AI to extract structured data from webpages
47
+ - **JSON Extractor** - Extract and process JSON data from web pages
48
+ - **Regex Extractor** - Extract data using 21 built-in patterns or custom regex (NEW!)
49
+
50
+ ## Installation
51
+
52
+ 1. Clone this repository into your n8n custom nodes directory
53
+ 2. Run `npm install` to install dependencies
54
+ 3. Restart your n8n instance
55
+
56
+ ## Usage
57
+
58
+ ### Setting up credentials
59
+
60
+ Before using the nodes, you need to set up Crawl4AI API credentials:
61
+
62
+ 1. Go to **Settings > Credentials > New**
63
+ 2. Select **Crawl4AI API**
64
+ 3. Configure connection settings:
65
+ - **Connection Mode**: Direct or Docker
66
+ - **Authentication**: Configure as needed
67
+ - **LLM Settings**: Enable and configure if needed for AI extraction
68
+
69
+ ### Basic Crawler Usage
70
+
71
+ The Basic Crawler node allows you to crawl web pages and extract their content:
72
+
73
+ 1. Add the "Crawl4AI: Basic Crawler" node to your workflow
74
+ 2. Select an operation (Crawl Single URL, Crawl Multiple URLs, or Process Raw HTML)
75
+ 3. Configure the required parameters
76
+ 4. Run the workflow to extract content
77
+
78
+ ### Content Extractor Usage
79
+
80
+ The Content Extractor node allows you to extract structured data from web pages:
81
+
82
+ 1. Add the "Crawl4AI: Content Extractor" node to your workflow
83
+ 2. Select an extraction method (CSS Selector, LLM, or JSON)
84
+ 3. Configure the extraction parameters
85
+ 4. Run the workflow to extract structured data
86
+
87
+ ## Configuration Options
88
+
89
+ ### Browser Options
90
+
91
+ - **Headless Mode**: Run browser in headless mode
92
+ - **Enable JavaScript**: Enable JavaScript execution
93
+ - **Viewport Size**: Set browser viewport dimensions
94
+ - **Timeout**: Maximum time to wait for page load
95
+ - **User Agent**: Override browser user agent
96
+
97
+ ### Crawler Options
98
+
99
+ - **Cache Mode**: Control caching behavior
100
+ - **JavaScript Code**: Execute custom JS on the page
101
+ - **CSS Selector**: Focus crawling on specific elements
102
+ - **Excluded Tags**: Skip specific HTML tags
103
+ - **Check Robots.txt**: Respect robots.txt rules
104
+ - **Word Count Threshold**: Filter content by word count
105
+
106
+ ### LLM Extraction Options
107
+
108
+ - **Extraction Instructions**: Instructions for the AI
109
+ - **Schema Fields**: Define structured data schema
110
+ - **LLM Provider**: Choose AI model provider
111
+ - **Temperature**: Control randomness of AI responses
112
+
113
+ ## Project Structure
114
+
115
+ ```
116
+ nodes/
117
+ ├── Crawl4aiBasicCrawler/
118
+ │ ├── Crawl4aiBasicCrawler.node.ts # Main node file
119
+ │ ├── crawl4ai.svg # Icon
120
+ │ ├── actions/
121
+ │ │ ├── operations.ts # Operations definition
122
+ │ │ ├── router.ts # Router handler
123
+ │ │ ├── crawlSingleUrl.operation.ts # Single URL crawl operation
124
+ │ │ ├── crawlMultipleUrls.operation.ts # Multiple URL crawl operation
125
+ │ │ └── processRawHtml.operation.ts # Raw HTML processing operation
126
+ │ └── helpers/
127
+ │ ├── interfaces.ts # Interface definitions
128
+ │ ├── utils.ts # Common utilities
129
+ │ ├── apiClient.ts # API client
130
+ │ └── formatters.ts # Formatting tools
131
+
132
+ └── Crawl4aiContentExtractor/
133
+ ├── Crawl4aiContentExtractor.node.ts # Main node file
134
+ ├── crawl4ai.svg # Icon
135
+ ├── actions/
136
+ │ ├── operations.ts # Operations definition
137
+ │ ├── router.ts # Router handler
138
+ │ ├── cssExtractor.operation.ts # CSS selector extraction operation
139
+ │ ├── llmExtractor.operation.ts # LLM extraction operation
140
+ │ └── jsonExtractor.operation.ts # JSON extraction operation
141
+ └── helpers/
142
+ ├── interfaces.ts # Interface definitions
143
+ ├── utils.ts # Common utilities
144
+ ├── apiClient.ts # API client
145
+ └── formatters.ts # Formatting tools
146
+
147
+ credentials/
148
+ └── Crawl4aiApi.credentials.ts # Credentials definition
149
+ ```
150
+
151
+ ## License
152
+
153
+ MIT
154
+
155
+ ## Contributing
156
+
157
+ Contributions are welcome! Please feel free to submit a Pull Request.
@@ -0,0 +1,7 @@
1
+ import { ICredentialType, INodeProperties } from 'n8n-workflow';
2
+ export declare class Crawl4aiApi implements ICredentialType {
3
+ name: string;
4
+ displayName: string;
5
+ documentationUrl: string;
6
+ properties: INodeProperties[];
7
+ }
@@ -0,0 +1,242 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.Crawl4aiApi = void 0;
4
+ class Crawl4aiApi {
5
+ constructor() {
6
+ this.name = 'crawl4aiApi';
7
+ this.displayName = 'Crawl4AI API';
8
+ this.documentationUrl = 'https://github.com/unclecode/crawl4ai';
9
+ this.properties = [
10
+ {
11
+ displayName: 'Connection Mode',
12
+ name: 'connectionMode',
13
+ type: 'options',
14
+ options: [
15
+ {
16
+ name: 'Direct Python Package',
17
+ value: 'direct',
18
+ description: 'Use Crawl4AI directly as a Python package'
19
+ },
20
+ {
21
+ name: 'Docker Client',
22
+ value: 'docker',
23
+ description: 'Connect to a Crawl4AI Docker container'
24
+ },
25
+ ],
26
+ default: 'docker',
27
+ description: 'The mode to connect to Crawl4AI'
28
+ },
29
+ {
30
+ displayName: 'Docker Server URL',
31
+ name: 'dockerUrl',
32
+ type: 'string',
33
+ default: 'http://crawl4ai:11235',
34
+ placeholder: 'http://crawl4ai:11235',
35
+ description: 'The URL of the Crawl4AI Docker server',
36
+ displayOptions: {
37
+ show: {
38
+ connectionMode: ['docker'],
39
+ },
40
+ },
41
+ },
42
+ {
43
+ displayName: 'Authentication Type',
44
+ name: 'authenticationType',
45
+ type: 'options',
46
+ options: [
47
+ {
48
+ name: 'No Authentication',
49
+ value: 'none',
50
+ description: 'No authentication is required'
51
+ },
52
+ {
53
+ name: 'Token Authentication',
54
+ value: 'token',
55
+ description: 'Use an API token for authentication'
56
+ },
57
+ {
58
+ name: 'Username/Password Authentication',
59
+ value: 'basic',
60
+ description: 'Use username and password for authentication'
61
+ },
62
+ ],
63
+ default: 'token',
64
+ description: 'The authentication method to use',
65
+ displayOptions: {
66
+ show: {
67
+ connectionMode: ['docker'],
68
+ },
69
+ },
70
+ },
71
+ {
72
+ displayName: 'API Token',
73
+ name: 'apiToken',
74
+ type: 'string',
75
+ typeOptions: {
76
+ password: true,
77
+ },
78
+ default: '',
79
+ description: 'The API token for Docker server authentication',
80
+ displayOptions: {
81
+ show: {
82
+ connectionMode: ['docker'],
83
+ authenticationType: ['token'],
84
+ },
85
+ },
86
+ },
87
+ {
88
+ displayName: 'Username',
89
+ name: 'username',
90
+ type: 'string',
91
+ default: '',
92
+ description: 'The username for Docker server authentication',
93
+ displayOptions: {
94
+ show: {
95
+ connectionMode: ['docker'],
96
+ authenticationType: ['basic'],
97
+ },
98
+ },
99
+ },
100
+ {
101
+ displayName: 'Password',
102
+ name: 'password',
103
+ type: 'string',
104
+ typeOptions: {
105
+ password: true,
106
+ },
107
+ default: '',
108
+ description: 'The password for Docker server authentication',
109
+ displayOptions: {
110
+ show: {
111
+ connectionMode: ['docker'],
112
+ authenticationType: ['basic'],
113
+ },
114
+ },
115
+ },
116
+ {
117
+ displayName: 'Enable LLM Features',
118
+ name: 'enableLlm',
119
+ type: 'boolean',
120
+ default: false,
121
+ description: 'Whether to enable LLM-based features',
122
+ },
123
+ {
124
+ displayName: 'LLM Provider',
125
+ name: 'llmProvider',
126
+ type: 'options',
127
+ options: [
128
+ {
129
+ name: 'OpenAI',
130
+ value: 'openai',
131
+ },
132
+ {
133
+ name: 'Ollama',
134
+ value: 'ollama',
135
+ },
136
+ {
137
+ name: 'Groq',
138
+ value: 'groq',
139
+ },
140
+ {
141
+ name: 'Anthropic',
142
+ value: 'anthropic',
143
+ },
144
+ {
145
+ name: 'Other',
146
+ value: 'other',
147
+ },
148
+ ],
149
+ default: 'openai',
150
+ description: 'The LLM provider to use for LLM-based features',
151
+ displayOptions: {
152
+ show: {
153
+ enableLlm: [true],
154
+ },
155
+ },
156
+ },
157
+ {
158
+ displayName: 'API Key',
159
+ name: 'apiKey',
160
+ type: 'string',
161
+ typeOptions: {
162
+ password: true,
163
+ },
164
+ default: '',
165
+ description: 'The API key for the LLM provider',
166
+ displayOptions: {
167
+ show: {
168
+ enableLlm: [true],
169
+ llmProvider: ['openai', 'groq', 'anthropic'],
170
+ },
171
+ },
172
+ },
173
+ {
174
+ displayName: 'Ollama URL',
175
+ name: 'ollamaUrl',
176
+ type: 'string',
177
+ default: 'http://localhost:11434',
178
+ description: 'The URL for Ollama server',
179
+ displayOptions: {
180
+ show: {
181
+ enableLlm: [true],
182
+ llmProvider: ['ollama'],
183
+ },
184
+ },
185
+ },
186
+ {
187
+ displayName: 'Custom Provider',
188
+ name: 'customProvider',
189
+ type: 'string',
190
+ default: '',
191
+ placeholder: 'custom/llama-3-70b or provider/model',
192
+ description: 'The custom provider in format "provider/model". Use "custom/" prefix for external LiteLLM proxies or custom endpoints (e.g., custom/llama-3-70b)',
193
+ displayOptions: {
194
+ show: {
195
+ enableLlm: [true],
196
+ llmProvider: ['other'],
197
+ },
198
+ },
199
+ },
200
+ {
201
+ displayName: 'Custom Base URL',
202
+ name: 'customBaseUrl',
203
+ type: 'string',
204
+ default: '',
205
+ placeholder: 'https://litellm-proxy.company.com/v1',
206
+ description: 'The base URL for your custom LLM provider, external LiteLLM proxy server, or custom inference endpoint. Required for external providers.',
207
+ displayOptions: {
208
+ show: {
209
+ enableLlm: [true],
210
+ llmProvider: ['other'],
211
+ },
212
+ },
213
+ },
214
+ {
215
+ displayName: 'Custom Provider API Key',
216
+ name: 'customApiKey',
217
+ type: 'string',
218
+ typeOptions: {
219
+ password: true,
220
+ },
221
+ default: '',
222
+ description: 'The API key for the custom provider or LiteLLM proxy server',
223
+ displayOptions: {
224
+ show: {
225
+ enableLlm: [true],
226
+ llmProvider: ['other'],
227
+ },
228
+ },
229
+ },
230
+ {
231
+ displayName: 'Cache Directory',
232
+ name: 'cacheDir',
233
+ type: 'string',
234
+ default: '',
235
+ placeholder: '/path/to/cache',
236
+ description: 'The directory to store cache files (leave empty for default)',
237
+ },
238
+ ];
239
+ }
240
+ }
241
+ exports.Crawl4aiApi = Crawl4aiApi;
242
+ //# sourceMappingURL=Crawl4aiApi.credentials.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"Crawl4aiApi.credentials.js","sourceRoot":"","sources":["../../credentials/Crawl4aiApi.credentials.ts"],"names":[],"mappings":";;;AAEA,MAAa,WAAW;IAAxB;QACC,SAAI,GAAG,aAAa,CAAC;QACrB,gBAAW,GAAG,cAAc,CAAC;QAC7B,qBAAgB,GAAG,uCAAuC,CAAC;QAC3D,eAAU,GAAsB;YAE/B;gBACC,WAAW,EAAE,iBAAiB;gBAC9B,IAAI,EAAE,gBAAgB;gBACtB,IAAI,EAAE,SAAS;gBACf,OAAO,EAAE;oBACR;wBACC,IAAI,EAAE,uBAAuB;wBAC7B,KAAK,EAAE,QAAQ;wBACf,WAAW,EAAE,2CAA2C;qBACxD;oBACD;wBACC,IAAI,EAAE,eAAe;wBACrB,KAAK,EAAE,QAAQ;wBACf,WAAW,EAAE,wCAAwC;qBACrD;iBACD;gBACD,OAAO,EAAE,QAAQ;gBACjB,WAAW,EAAE,iCAAiC;aAC9C;YAED;gBACC,WAAW,EAAE,mBAAmB;gBAChC,IAAI,EAAE,WAAW;gBACjB,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,uBAAuB;gBAChC,WAAW,EAAE,uBAAuB;gBACpC,WAAW,EAAE,uCAAuC;gBACpD,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,cAAc,EAAE,CAAC,QAAQ,CAAC;qBAC1B;iBACD;aACD;YACD;gBACC,WAAW,EAAE,qBAAqB;gBAClC,IAAI,EAAE,oBAAoB;gBAC1B,IAAI,EAAE,SAAS;gBACf,OAAO,EAAE;oBACR;wBACC,IAAI,EAAE,mBAAmB;wBACzB,KAAK,EAAE,MAAM;wBACb,WAAW,EAAE,+BAA+B;qBAC5C;oBACD;wBACC,IAAI,EAAE,sBAAsB;wBAC5B,KAAK,EAAE,OAAO;wBACd,WAAW,EAAE,qCAAqC;qBAClD;oBACD;wBACC,IAAI,EAAE,kCAAkC;wBACxC,KAAK,EAAE,OAAO;wBACd,WAAW,EAAE,8CAA8C;qBAC3D;iBACD;gBACD,OAAO,EAAE,OAAO;gBAChB,WAAW,EAAE,kCAAkC;gBAC/C,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,cAAc,EAAE,CAAC,QAAQ,CAAC;qBAC1B;iBACD;aACD;YACD;gBACC,WAAW,EAAE,WAAW;gBACxB,IAAI,EAAE,UAAU;gBAChB,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE;oBACZ,QAAQ,EAAE,IAAI;iBACd;gBACD,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,gDAAgD;gBAC7D,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,cAAc,EAAE,CAAC,QAAQ,CAAC;wBAC1B,kBAAkB,EAAE,CAAC,OAAO,CAAC;qBAC7B;iBACD;aACD;YACD;gBACC,WAAW,EAAE,UAAU;gBACvB,IAAI,EAAE,UAAU;gBAChB,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,+CAA+C;gBAC5D,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,cAAc,EAAE,CAAC,QAAQ,CAAC;wBAC1B,kBAAkB,EAAE,CAAC,OAAO,CAAC;qBAC7B;iBACD;aACD;YACD;gBACC,WAAW,EAAE,UAAU;gBACvB,IAAI,EAAE,UAAU;gBAChB,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE;oBACZ,QAAQ,EAAE,IAAI;iBACd;gBACD,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,+CAA+C;gBAC5D,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,cAAc,EAAE,CAAC,QAAQ,CAAC;wBAC1B,kBAAkB,EAAE,CAAC,OAAO,CAAC;qBAC7B;iBACD;aACD;YAED;gBACC,WAAW,EAAE,qBAAqB;gBAClC,IAAI,EAAE,WAAW;gBACjB,IAAI,EAAE,SAAS;gBACf,OAAO,EAAE,KAAK;gBACd,WAAW,EAAE,sCAAsC;aACnD;YACD;gBACC,WAAW,EAAE,cAAc;gBAC3B,IAAI,EAAE,aAAa;gBACnB,IAAI,EAAE,SAAS;gBACf,OAAO,EAAE;oBACR;wBACC,IAAI,EAAE,QAAQ;wBACd,KAAK,EAAE,QAAQ;qBACf;oBACD;wBACC,IAAI,EAAE,QAAQ;wBACd,KAAK,EAAE,QAAQ;qBACf;oBACD;wBACC,IAAI,EAAE,MAAM;wBACZ,KAAK,EAAE,MAAM;qBACb;oBACD;wBACC,IAAI,EAAE,WAAW;wBACjB,KAAK,EAAE,WAAW;qBAClB;oBACD;wBACC,IAAI,EAAE,OAAO;wBACb,KAAK,EAAE,OAAO;qBACd;iBACD;gBACD,OAAO,EAAE,QAAQ;gBACjB,WAAW,EAAE,gDAAgD;gBAC7D,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,SAAS,EAAE,CAAC,IAAI,CAAC;qBACjB;iBACD;aACD;YACD;gBACC,WAAW,EAAE,SAAS;gBACtB,IAAI,EAAE,QAAQ;gBACd,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE;oBACZ,QAAQ,EAAE,IAAI;iBACd;gBACD,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,kCAAkC;gBAC/C,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,SAAS,EAAE,CAAC,IAAI,CAAC;wBACjB,WAAW,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,CAAC;qBAC5C;iBACD;aACD;YACD;gBACC,WAAW,EAAE,YAAY;gBACzB,IAAI,EAAE,WAAW;gBACjB,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,wBAAwB;gBACjC,WAAW,EAAE,2BAA2B;gBACxC,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,SAAS,EAAE,CAAC,IAAI,CAAC;wBACjB,WAAW,EAAE,CAAC,QAAQ,CAAC;qBACvB;iBACD;aACD;YACD;gBACC,WAAW,EAAE,iBAAiB;gBAC9B,IAAI,EAAE,gBAAgB;gBACtB,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,sCAAsC;gBACnD,WAAW,EAAE,kJAAkJ;gBAC/J,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,SAAS,EAAE,CAAC,IAAI,CAAC;wBACjB,WAAW,EAAE,CAAC,OAAO,CAAC;qBACtB;iBACD;aACD;YACD;gBACC,WAAW,EAAE,iBAAiB;gBAC9B,IAAI,EAAE,eAAe;gBACrB,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,sCAAsC;gBACnD,WAAW,EAAE,0IAA0I;gBACvJ,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,SAAS,EAAE,CAAC,IAAI,CAAC;wBACjB,WAAW,EAAE,CAAC,OAAO,CAAC;qBACtB;iBACD;aACD;YACD;gBACC,WAAW,EAAE,yBAAyB;gBACtC,IAAI,EAAE,cAAc;gBACpB,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE;oBACZ,QAAQ,EAAE,IAAI;iBACd;gBACD,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,6DAA6D;gBAC1E,cAAc,EAAE;oBACf,IAAI,EAAE;wBACL,SAAS,EAAE,CAAC,IAAI,CAAC;wBACjB,WAAW,EAAE,CAAC,OAAO,CAAC;qBACtB;iBACD;aACD;YAED;gBACC,WAAW,EAAE,iBAAiB;gBAC9B,IAAI,EAAE,UAAU;gBAChB,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,gBAAgB;gBAC7B,WAAW,EAAE,8DAA8D;aAC3E;SACD,CAAC;IACH,CAAC;CAAA;AA9OD,kCA8OC"}
@@ -0,0 +1,5 @@
1
+ import { IExecuteFunctions, INodeExecutionData, INodeType, INodeTypeDescription } from 'n8n-workflow';
2
+ export declare class Crawl4aiBasicCrawler implements INodeType {
3
+ description: INodeTypeDescription;
4
+ execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]>;
5
+ }
@@ -0,0 +1,37 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.Crawl4aiBasicCrawler = void 0;
4
+ const router_1 = require("./actions/router");
5
+ const operations_1 = require("./actions/operations");
6
+ class Crawl4aiBasicCrawler {
7
+ constructor() {
8
+ this.description = {
9
+ displayName: 'Crawl4AI: Basic Crawler',
10
+ name: 'crawl4aiBasicCrawler',
11
+ icon: 'file:crawl4ai.svg',
12
+ group: ['transform'],
13
+ version: 1,
14
+ subtitle: '={{$parameter["operation"]}}',
15
+ description: 'Crawl websites using Crawl4AI',
16
+ defaults: {
17
+ name: 'Crawl4AI: Basic Crawler',
18
+ },
19
+ inputs: ['main'],
20
+ outputs: ['main'],
21
+ credentials: [
22
+ {
23
+ name: 'crawl4aiApi',
24
+ required: true,
25
+ },
26
+ ],
27
+ properties: [
28
+ ...operations_1.description,
29
+ ],
30
+ };
31
+ }
32
+ async execute() {
33
+ return await router_1.router.call(this);
34
+ }
35
+ }
36
+ exports.Crawl4aiBasicCrawler = Crawl4aiBasicCrawler;
37
+ //# sourceMappingURL=Crawl4aiBasicCrawler.node.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"Crawl4aiBasicCrawler.node.js","sourceRoot":"","sources":["../../../nodes/Crawl4aiBasicCrawler/Crawl4aiBasicCrawler.node.ts"],"names":[],"mappings":";;;AAQA,6CAA0C;AAC1C,qDAA4E;AAG5E,MAAa,oBAAoB;IAAjC;QACC,gBAAW,GAAyB;YACnC,WAAW,EAAE,yBAAyB;YACtC,IAAI,EAAE,sBAAsB;YAC5B,IAAI,EAAE,mBAAmB;YACzB,KAAK,EAAE,CAAC,WAAW,CAAC;YACpB,OAAO,EAAE,CAAC;YACV,QAAQ,EAAE,8BAA8B;YACxC,WAAW,EAAE,+BAA+B;YAC5C,QAAQ,EAAE;gBACT,IAAI,EAAE,yBAAyB;aAC/B;YACD,MAAM,EAAE,CAAC,MAAM,CAAC;YAChB,OAAO,EAAE,CAAC,MAAM,CAAC;YACjB,WAAW,EAAE;gBACZ;oBACC,IAAI,EAAE,aAAa;oBACnB,QAAQ,EAAE,IAAI;iBACd;aACD;YACD,UAAU,EAAE;gBACX,GAAG,wBAAqB;aACxB;SACD,CAAC;IAKH,CAAC;IAHA,KAAK,CAAC,OAAO;QACZ,OAAO,MAAM,eAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAChC,CAAC;CACD;AA5BD,oDA4BC"}
@@ -0,0 +1,4 @@
1
+ import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
2
+ import type { Crawl4aiNodeOptions } from '../helpers/interfaces';
3
+ export declare const description: INodeProperties[];
4
+ export declare function execute(this: IExecuteFunctions, items: INodeExecutionData[], nodeOptions: Crawl4aiNodeOptions): Promise<INodeExecutionData[]>;