@n8n/ai-workflow-builder 1.3.1 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/build.tsbuildinfo +1 -1
- package/dist/multi-agent-workflow-subgraphs.d.ts +1 -0
- package/dist/multi-agent-workflow-subgraphs.js +6 -1
- package/dist/multi-agent-workflow-subgraphs.js.map +1 -1
- package/dist/prompts/agents/builder.prompt.js +5 -17
- package/dist/prompts/agents/builder.prompt.js.map +1 -1
- package/dist/prompts/agents/configurator.prompt.js +36 -5
- package/dist/prompts/agents/configurator.prompt.js.map +1 -1
- package/dist/prompts/agents/discovery.prompt.js +63 -10
- package/dist/prompts/agents/discovery.prompt.js.map +1 -1
- package/dist/prompts/chains/categorization.prompt.d.ts +5 -2
- package/dist/prompts/chains/categorization.prompt.js +14 -5
- package/dist/prompts/chains/categorization.prompt.js.map +1 -1
- package/dist/prompts/legacy-agent.prompt.js +2 -9
- package/dist/prompts/legacy-agent.prompt.js.map +1 -1
- package/dist/prompts/shared/node-guidance/index.d.ts +1 -0
- package/dist/prompts/shared/node-guidance/index.js +6 -0
- package/dist/prompts/shared/node-guidance/index.js.map +1 -0
- package/dist/prompts/shared/node-guidance/structured-output-parser.d.ts +2 -0
- package/dist/prompts/shared/node-guidance/structured-output-parser.js +35 -0
- package/dist/prompts/shared/node-guidance/structured-output-parser.js.map +1 -0
- package/dist/tools/best-practices/data-persistence.d.ts +7 -0
- package/dist/tools/best-practices/data-persistence.js +192 -0
- package/dist/tools/best-practices/data-persistence.js.map +1 -0
- package/dist/tools/best-practices/data-transformation.js +0 -16
- package/dist/tools/best-practices/data-transformation.js.map +1 -1
- package/dist/tools/best-practices/document-processing.js +3 -6
- package/dist/tools/best-practices/document-processing.js.map +1 -1
- package/dist/tools/best-practices/index.js +6 -2
- package/dist/tools/best-practices/index.js.map +1 -1
- package/dist/tools/best-practices/notification.js +35 -129
- package/dist/tools/best-practices/notification.js.map +1 -1
- package/dist/tools/best-practices/scraping-and-research.js +1 -4
- package/dist/tools/best-practices/scraping-and-research.js.map +1 -1
- package/dist/tools/best-practices/triage.js +66 -128
- package/dist/tools/best-practices/triage.js.map +1 -1
- package/dist/tools/get-best-practices.tool.d.ts +4 -3
- package/dist/types/categorization.d.ts +1 -0
- package/dist/types/categorization.js +2 -0
- package/dist/types/categorization.js.map +1 -1
- package/dist/types/index.d.ts +1 -0
- package/dist/types/index.js.map +1 -1
- package/dist/types/node-guidance.d.ts +7 -0
- package/dist/types/node-guidance.js +3 -0
- package/dist/types/node-guidance.js.map +1 -0
- package/dist/workflow-builder-agent.js +1 -0
- package/dist/workflow-builder-agent.js.map +1 -1
- package/package.json +5 -5
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DataPersistenceBestPractices = void 0;
|
|
4
|
+
const categorization_1 = require("../../types/categorization");
|
|
5
|
+
class DataPersistenceBestPractices {
|
|
6
|
+
technique = categorization_1.WorkflowTechnique.DATA_PERSISTENCE;
|
|
7
|
+
version = '1.0.0';
|
|
8
|
+
documentation = `# Best Practices: Data Persistence
|
|
9
|
+
|
|
10
|
+
## Overview
|
|
11
|
+
|
|
12
|
+
Data persistence involves storing, updating, or retrieving records from durable storage systems. This technique is essential when you need to maintain data beyond the lifetime of a single workflow execution, or when you need to access existing data that users have stored in their spreadsheets, tables, or databases as part of your workflow logic.
|
|
13
|
+
|
|
14
|
+
## When to Use Data Persistence
|
|
15
|
+
|
|
16
|
+
Use data persistence when you need to:
|
|
17
|
+
- Store workflow results for later retrieval or audit trails
|
|
18
|
+
- Maintain records that multiple workflows can access and update
|
|
19
|
+
- Create a centralized data repository for your automation
|
|
20
|
+
- Archive historical data for reporting or compliance
|
|
21
|
+
- Build data that persists across workflow executions
|
|
22
|
+
- Track changes or maintain state over time
|
|
23
|
+
- Store raw form inputs
|
|
24
|
+
|
|
25
|
+
## Choosing the Right Storage Node
|
|
26
|
+
|
|
27
|
+
### Data Table (n8n-nodes-base.dataTable) - PREFERRED
|
|
28
|
+
|
|
29
|
+
**Best for:** Quick setup, small to medium amounts of data
|
|
30
|
+
|
|
31
|
+
Advantages:
|
|
32
|
+
- No credentials or external configuration required
|
|
33
|
+
- Built directly into n8n
|
|
34
|
+
- Fast and reliable for small to medium datasets
|
|
35
|
+
- Ideal for prototyping and internal workflows
|
|
36
|
+
- No additional costs or external dependencies
|
|
37
|
+
|
|
38
|
+
When to use:
|
|
39
|
+
- Internal workflow data storage
|
|
40
|
+
- Temporary or staging data
|
|
41
|
+
- Admin/audit trails
|
|
42
|
+
- Simple record keeping
|
|
43
|
+
- Development and testing
|
|
44
|
+
|
|
45
|
+
### Google Sheets (n8n-nodes-base.googleSheets)
|
|
46
|
+
|
|
47
|
+
**Best for:** Collaboration, reporting, easy data sharing
|
|
48
|
+
|
|
49
|
+
Advantages:
|
|
50
|
+
- Familiar spreadsheet interface for non-technical users
|
|
51
|
+
- Easy to share and collaborate on data
|
|
52
|
+
- Built-in visualization and formula capabilities
|
|
53
|
+
- Good for reporting and dashboards
|
|
54
|
+
- Accessible from anywhere
|
|
55
|
+
|
|
56
|
+
When to use:
|
|
57
|
+
- Data needs to be viewed/edited by multiple people
|
|
58
|
+
- Non-technical users need access to data
|
|
59
|
+
- Integration with other Google Workspace tools
|
|
60
|
+
- Simple data structures without complex relationships
|
|
61
|
+
- Workflow needs access to existing spreadsheets in Google Sheets
|
|
62
|
+
|
|
63
|
+
Pitfalls:
|
|
64
|
+
- API rate limits can affect high-volume workflows
|
|
65
|
+
- Not suitable for frequently changing data
|
|
66
|
+
- Performance degrades with very large datasets (>10k rows)
|
|
67
|
+
|
|
68
|
+
### Airtable (n8n-nodes-base.airtable)
|
|
69
|
+
|
|
70
|
+
**Best for:** Structured data with relationships, rich field types
|
|
71
|
+
|
|
72
|
+
Advantages:
|
|
73
|
+
- Supports relationships between tables
|
|
74
|
+
- Rich field types (attachments, select, links, etc.)
|
|
75
|
+
- Better structure than spreadsheets
|
|
76
|
+
|
|
77
|
+
When to use:
|
|
78
|
+
- Data has relationships or references between records
|
|
79
|
+
- Need structured database-like features
|
|
80
|
+
- Managing projects, tasks, or inventory
|
|
81
|
+
- Workflow needs access to existing data in Airtable
|
|
82
|
+
|
|
83
|
+
Pitfalls:
|
|
84
|
+
- Requires Airtable account and API key
|
|
85
|
+
- Schema changes require careful planning
|
|
86
|
+
|
|
87
|
+
## Storage Patterns
|
|
88
|
+
|
|
89
|
+
### Immediate Storage Pattern
|
|
90
|
+
|
|
91
|
+
Store data immediately after collection or generation:
|
|
92
|
+
|
|
93
|
+
\`\`\`mermaid
|
|
94
|
+
flowchart LR
|
|
95
|
+
Trigger --> Process_Data["Process Data"]
|
|
96
|
+
Process_Data --> Storage_Node["Storage Node"]
|
|
97
|
+
Storage_Node --> Continue_Workflow["Continue Workflow"]
|
|
98
|
+
\`\`\`
|
|
99
|
+
|
|
100
|
+
Best for: Raw data preservation, audit trails, form submissions
|
|
101
|
+
|
|
102
|
+
### Batch Storage Pattern
|
|
103
|
+
|
|
104
|
+
Collect multiple items and store them together:
|
|
105
|
+
|
|
106
|
+
\`\`\`mermaid
|
|
107
|
+
flowchart LR
|
|
108
|
+
Trigger --> Loop_Split["Loop/Split"]
|
|
109
|
+
Loop_Split --> Process["Process"]
|
|
110
|
+
Process --> Aggregate["Aggregate"]
|
|
111
|
+
Aggregate --> Storage_Node["Storage Node"]
|
|
112
|
+
\`\`\`
|
|
113
|
+
|
|
114
|
+
Best for: Processing lists, batch operations, scheduled aggregations
|
|
115
|
+
|
|
116
|
+
### Update Pattern
|
|
117
|
+
|
|
118
|
+
Retrieve, modify, and update existing records:
|
|
119
|
+
|
|
120
|
+
\`\`\`mermaid
|
|
121
|
+
flowchart LR
|
|
122
|
+
Trigger --> Retrieve["Retrieve from Storage"]
|
|
123
|
+
Retrieve --> Modify["Modify"]
|
|
124
|
+
Modify --> Update_Storage["Update Storage Node"]
|
|
125
|
+
\`\`\`
|
|
126
|
+
|
|
127
|
+
Best for: Maintaining state, updating records, tracking changes
|
|
128
|
+
|
|
129
|
+
### Lookup Pattern
|
|
130
|
+
|
|
131
|
+
Query storage to retrieve specific records:
|
|
132
|
+
|
|
133
|
+
\`\`\`mermaid
|
|
134
|
+
flowchart LR
|
|
135
|
+
Trigger --> Query_Storage["Query Storage Node"]
|
|
136
|
+
Query_Storage --> Use_Data["Use Retrieved Data"]
|
|
137
|
+
Use_Data --> Continue_Workflow["Continue Workflow"]
|
|
138
|
+
\`\`\`
|
|
139
|
+
|
|
140
|
+
Best for: Enrichment, validation, conditional logic based on stored data
|
|
141
|
+
|
|
142
|
+
## Key Considerations
|
|
143
|
+
|
|
144
|
+
### Data Structure
|
|
145
|
+
|
|
146
|
+
- **Plan your schema ahead:** Define what fields you need before creating storage
|
|
147
|
+
- **Use consistent field names:** Match field names across your workflow for easy mapping
|
|
148
|
+
- **Consider data types:** Ensure your storage supports the data types you need
|
|
149
|
+
- **Think about relationships:** If data is related, consider Airtable or use multiple tables
|
|
150
|
+
|
|
151
|
+
### Performance
|
|
152
|
+
|
|
153
|
+
- **Batch operations when possible:** Multiple small writes are slower than batch operations
|
|
154
|
+
- **Use appropriate operations:** Use "append" for new records, "update" for modifications
|
|
155
|
+
- **Consider API limits:** Google Sheets has rate limits; plan accordingly for high-volume workflows
|
|
156
|
+
|
|
157
|
+
### Data Integrity
|
|
158
|
+
|
|
159
|
+
- **Store raw data first:** Keep unmodified input before transformations
|
|
160
|
+
- **Handle errors gracefully:** Use error handling to prevent data loss on failures
|
|
161
|
+
- **Validate before storing:** Ensure data quality before persistence
|
|
162
|
+
- **Avoid duplicates:** Use unique identifiers or upsert operations when appropriate
|
|
163
|
+
|
|
164
|
+
## Important Distinctions
|
|
165
|
+
|
|
166
|
+
### Storage vs. Transformation
|
|
167
|
+
|
|
168
|
+
- **Set/Merge nodes are NOT storage:** They transform data in memory only
|
|
169
|
+
- **Storage happens explicitly:** Data won't persist unless you explicitly write it to storage
|
|
170
|
+
|
|
171
|
+
### Temporary vs. Persistent Storage
|
|
172
|
+
|
|
173
|
+
- **NOT covered by this technique:** Redis, caching, session storage, in-memory operations
|
|
174
|
+
- **This technique covers:** Durable storage that persists beyond workflow execution
|
|
175
|
+
- **Focus on permanence:** Use these nodes when you need data to survive restarts and be queryable later
|
|
176
|
+
|
|
177
|
+
## Common Pitfalls to Avoid
|
|
178
|
+
|
|
179
|
+
### Not Handling Duplicates
|
|
180
|
+
|
|
181
|
+
Without proper unique identifiers or upsert logic, you may create duplicate records. Use unique IDs or check for existing records before inserting.
|
|
182
|
+
|
|
183
|
+
### Ignoring Storage Limits
|
|
184
|
+
|
|
185
|
+
Each storage system has limits (row counts, API rates, file sizes). Design your workflow to work within these constraints or implement pagination/batching.
|
|
186
|
+
`;
|
|
187
|
+
getDocumentation() {
|
|
188
|
+
return this.documentation;
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
exports.DataPersistenceBestPractices = DataPersistenceBestPractices;
|
|
192
|
+
//# sourceMappingURL=data-persistence.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data-persistence.js","sourceRoot":"","sources":["../../../src/tools/best-practices/data-persistence.ts"],"names":[],"mappings":";;;AACA,2DAA2D;AAE3D,MAAa,4BAA4B;IAC/B,SAAS,GAAG,kCAAiB,CAAC,gBAAgB,CAAC;IAC/C,OAAO,GAAG,OAAO,CAAC;IAEV,aAAa,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkLjC,CAAC;IAED,gBAAgB;QACf,OAAO,IAAI,CAAC,aAAa,CAAC;IAC3B,CAAC;CACD;AA3LD,oEA2LC"}
|
|
@@ -69,22 +69,6 @@ class DataTransformationBestPractices {
|
|
|
69
69
|
|
|
70
70
|
#### Code Node (n8n-nodes-base.code)
|
|
71
71
|
|
|
72
|
-
**Built-in Nodes vs. Code Node**
|
|
73
|
-
- Prefer basic built-in nodes (Edit Fields, Filter, Split Out, Summarize, Aggregate, etc.) over Code node. Use Code only for complex logic that can't be achieved otherwise.
|
|
74
|
-
- Rule of thumb: if the goal can be achieved with fewer than 5 basic nodes, use basic nodes
|
|
75
|
-
|
|
76
|
-
**When NOT to Use**: Code node may be slower than core nodes (like Edit Fields, If, Switch, Split Out, Aggregate, etc.) as Code nodes run in a sandboxed environment. Avoid the code node where possible — it should only be used for complex transformations that can't be done with other nodes. For example, DO NOT use it for:
|
|
77
|
-
- Adding or removing fields from items (use the 'edit fields' node instead)
|
|
78
|
-
- Single-line data transformations of item fields (use the 'edit fields' node instead)
|
|
79
|
-
- Filtering items based on their fields (use the 'filter' node instead)
|
|
80
|
-
- Pivoting or summarizing data across multiple items (use the 'summarize' node instead)
|
|
81
|
-
- Splitting arrays inside items out into multiple items (use the 'split out' node instead)
|
|
82
|
-
- Aggregating multiple items into a single item (use the 'aggregate' node instead)
|
|
83
|
-
- Sorting items in an array based on their fields (use the 'Sort' node instead)
|
|
84
|
-
- Generating HTML from text or formatting text as HTML (use the 'HTML' node set to operation 'Generate HTML Template' or 'Convert to HTML Table' instead)
|
|
85
|
-
|
|
86
|
-
**When to Use**: Complex transformations impossible with built-in nodes
|
|
87
|
-
|
|
88
72
|
**Execution Modes**:
|
|
89
73
|
- "Run Once per Item": Process each item independently
|
|
90
74
|
- "Run Once for All Items": Access entire dataset (for aggregation)
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"data-transformation.js","sourceRoot":"","sources":["../../../src/tools/best-practices/data-transformation.ts"],"names":[],"mappings":";;;AACA,2DAA2D;AAE3D,MAAa,+BAA+B;IAClC,SAAS,GAAG,kCAAiB,CAAC,mBAAmB,CAAC;IAClD,OAAO,GAAG,OAAO,CAAC;IAEV,aAAa,GAAG
|
|
1
|
+
{"version":3,"file":"data-transformation.js","sourceRoot":"","sources":["../../../src/tools/best-practices/data-transformation.ts"],"names":[],"mappings":";;;AACA,2DAA2D;AAE3D,MAAa,+BAA+B;IAClC,SAAS,GAAG,kCAAiB,CAAC,mBAAmB,CAAC;IAClD,OAAO,GAAG,OAAO,CAAC;IAEV,aAAa,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAoIjC,CAAC;IAED,gBAAgB;QACf,OAAO,IAAI,CAAC,aAAa,CAAC;IAC3B,CAAC;CACD;AA7ID,0EA6IC"}
|
|
@@ -103,14 +103,12 @@ For varied or complex documents:
|
|
|
103
103
|
Option 1 - Using Document Loader (Recommended for binary files):
|
|
104
104
|
1. Pass binary data directly to Document Loader node (set Data Source to "Binary")
|
|
105
105
|
2. Connect to AI Agent or LLM Chain for processing
|
|
106
|
-
3.
|
|
107
|
-
4. Validate extracted fields before processing
|
|
106
|
+
3. Validate extracted fields before processing
|
|
108
107
|
|
|
109
108
|
Option 2 - Using text extraction:
|
|
110
109
|
1. Extract raw text using Extract from File or OCR
|
|
111
110
|
2. Pass to AI Agent or LLM Chain with structured prompt
|
|
112
|
-
3.
|
|
113
|
-
4. Validate extracted fields before processing
|
|
111
|
+
3. Validate extracted fields before processing
|
|
114
112
|
|
|
115
113
|
Example system prompt structure:
|
|
116
114
|
"Extract the following fields from the document: [field list]. Return as JSON with this schema: [schema example]"
|
|
@@ -198,7 +196,6 @@ Configuration: Include structured output tools for consistent results
|
|
|
198
196
|
|
|
199
197
|
**LLM Chain (@n8n/n8n-nodes-langchain.chainLlm)**
|
|
200
198
|
Purpose: Document classification and data extraction
|
|
201
|
-
Use with: Structured Output Parser for JSON consistency
|
|
202
199
|
|
|
203
200
|
**Document Loader (@n8n/n8n-nodes-langchain.documentLoader)**
|
|
204
201
|
Purpose: Load and process documents directly from binary data for AI processing
|
|
@@ -238,7 +235,7 @@ Modes: Use "Pass Through" to preserve binary from one branch
|
|
|
238
235
|
**Edit Fields (Set) (n8n-nodes-base.set)**
|
|
239
236
|
Purpose: Better choice for combining data from separate/independent branches
|
|
240
237
|
Use for: Adding fields from different sources, preserving binary while adding processed data
|
|
241
|
-
Configuration: Set common fields and use "Include Other Input Fields"
|
|
238
|
+
Configuration: Set common fields and use "Include Other Input Fields" ON to preserve existing data including binary
|
|
242
239
|
|
|
243
240
|
**Execute Workflow Trigger (n8n-nodes-base.executeWorkflowTrigger)**
|
|
244
241
|
Purpose: Start point for sub-workflows that are called by other workflows
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"document-processing.js","sourceRoot":"","sources":["../../../src/tools/best-practices/document-processing.ts"],"names":[],"mappings":";;;AACA,2DAA2D;AAE3D,MAAa,+BAA+B;IAClC,SAAS,GAAG,kCAAiB,CAAC,mBAAmB,CAAC;IAClD,OAAO,GAAG,OAAO,CAAC;IAEV,aAAa,GAAG
|
|
1
|
+
{"version":3,"file":"document-processing.js","sourceRoot":"","sources":["../../../src/tools/best-practices/document-processing.ts"],"names":[],"mappings":";;;AACA,2DAA2D;AAE3D,MAAa,+BAA+B;IAClC,SAAS,GAAG,kCAAiB,CAAC,mBAAmB,CAAC;IAClD,OAAO,GAAG,OAAO,CAAC;IAEV,aAAa,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;2DA8TyB,CAAC;IAE3D,gBAAgB;QACf,OAAO,IAAI,CAAC,aAAa,CAAC;IAC3B,CAAC;CACD;AAvUD,0EAuUC"}
|
|
@@ -5,23 +5,27 @@ const categorization_1 = require("../../types/categorization");
|
|
|
5
5
|
const chatbot_1 = require("./chatbot");
|
|
6
6
|
const content_generation_1 = require("./content-generation");
|
|
7
7
|
const data_extraction_1 = require("./data-extraction");
|
|
8
|
+
const data_persistence_1 = require("./data-persistence");
|
|
8
9
|
const data_transformation_1 = require("./data-transformation");
|
|
9
10
|
const document_processing_1 = require("./document-processing");
|
|
10
11
|
const form_input_1 = require("./form-input");
|
|
12
|
+
const notification_1 = require("./notification");
|
|
11
13
|
const scraping_and_research_1 = require("./scraping-and-research");
|
|
14
|
+
const triage_1 = require("./triage");
|
|
12
15
|
exports.documentation = {
|
|
13
16
|
[categorization_1.WorkflowTechnique.SCRAPING_AND_RESEARCH]: new scraping_and_research_1.ScrapingAndResearchBestPractices(),
|
|
14
17
|
[categorization_1.WorkflowTechnique.CHATBOT]: new chatbot_1.ChatbotBestPractices(),
|
|
15
18
|
[categorization_1.WorkflowTechnique.CONTENT_GENERATION]: new content_generation_1.ContentGenerationBestPractices(),
|
|
16
19
|
[categorization_1.WorkflowTechnique.DATA_ANALYSIS]: undefined,
|
|
17
20
|
[categorization_1.WorkflowTechnique.DATA_EXTRACTION]: new data_extraction_1.DataExtractionBestPractices(),
|
|
21
|
+
[categorization_1.WorkflowTechnique.DATA_PERSISTENCE]: new data_persistence_1.DataPersistenceBestPractices(),
|
|
18
22
|
[categorization_1.WorkflowTechnique.DATA_TRANSFORMATION]: new data_transformation_1.DataTransformationBestPractices(),
|
|
19
23
|
[categorization_1.WorkflowTechnique.DOCUMENT_PROCESSING]: new document_processing_1.DocumentProcessingBestPractices(),
|
|
20
24
|
[categorization_1.WorkflowTechnique.ENRICHMENT]: undefined,
|
|
21
25
|
[categorization_1.WorkflowTechnique.FORM_INPUT]: new form_input_1.FormInputBestPractices(),
|
|
22
26
|
[categorization_1.WorkflowTechnique.KNOWLEDGE_BASE]: undefined,
|
|
23
|
-
[categorization_1.WorkflowTechnique.NOTIFICATION]:
|
|
24
|
-
[categorization_1.WorkflowTechnique.TRIAGE]:
|
|
27
|
+
[categorization_1.WorkflowTechnique.NOTIFICATION]: new notification_1.NotificationBestPractices(),
|
|
28
|
+
[categorization_1.WorkflowTechnique.TRIAGE]: new triage_1.TriageBestPractices(),
|
|
25
29
|
[categorization_1.WorkflowTechnique.HUMAN_IN_THE_LOOP]: undefined,
|
|
26
30
|
[categorization_1.WorkflowTechnique.MONITORING]: undefined,
|
|
27
31
|
[categorization_1.WorkflowTechnique.SCHEDULING]: undefined,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/tools/best-practices/index.ts"],"names":[],"mappings":";;;AACA,2DAAuF;AAEvF,uCAAiD;AACjD,6DAAsE;AAEtE,uDAAgE;AAChE,+DAAwE;AACxE,+DAAwE;AAExE,6CAAsD;
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/tools/best-practices/index.ts"],"names":[],"mappings":";;;AACA,2DAAuF;AAEvF,uCAAiD;AACjD,6DAAsE;AAEtE,uDAAgE;AAChE,yDAAkE;AAClE,+DAAwE;AACxE,+DAAwE;AAExE,6CAAsD;AAItD,iDAA2D;AAC3D,mEAA2E;AAE3E,qCAA+C;AAElC,QAAA,aAAa,GAAqE;IAC9F,CAAC,kCAAiB,CAAC,qBAAqB,CAAC,EAAE,IAAI,wDAAgC,EAAE;IACjF,CAAC,kCAAiB,CAAC,OAAO,CAAC,EAAE,IAAI,8BAAoB,EAAE;IACvD,CAAC,kCAAiB,CAAC,kBAAkB,CAAC,EAAE,IAAI,mDAA8B,EAAE;IAC5E,CAAC,kCAAiB,CAAC,aAAa,CAAC,EAAE,SAAS;IAC5C,CAAC,kCAAiB,CAAC,eAAe,CAAC,EAAE,IAAI,6CAA2B,EAAE;IACtE,CAAC,kCAAiB,CAAC,gBAAgB,CAAC,EAAE,IAAI,+CAA4B,EAAE;IACxE,CAAC,kCAAiB,CAAC,mBAAmB,CAAC,EAAE,IAAI,qDAA+B,EAAE;IAC9E,CAAC,kCAAiB,CAAC,mBAAmB,CAAC,EAAE,IAAI,qDAA+B,EAAE;IAC9E,CAAC,kCAAiB,CAAC,UAAU,CAAC,EAAE,SAAS;IACzC,CAAC,kCAAiB,CAAC,UAAU,CAAC,EAAE,IAAI,mCAAsB,EAAE;IAC5D,CAAC,kCAAiB,CAAC,cAAc,CAAC,EAAE,SAAS;IAC7C,CAAC,kCAAiB,CAAC,YAAY,CAAC,EAAE,IAAI,wCAAyB,EAAE;IACjE,CAAC,kCAAiB,CAAC,MAAM,CAAC,EAAE,IAAI,4BAAmB,EAAE;IACrD,CAAC,kCAAiB,CAAC,iBAAiB,CAAC,EAAE,SAAS;IAChD,CAAC,kCAAiB,CAAC,UAAU,CAAC,EAAE,SAAS;IACzC,CAAC,kCAAiB,CAAC,UAAU,CAAC,EAAE,SAAS;CACzC,CAAC"}
|
|
@@ -9,45 +9,50 @@ class NotificationBestPractices {
|
|
|
9
9
|
|
|
10
10
|
## Workflow Design
|
|
11
11
|
|
|
12
|
-
Structure notification workflows in a clear sequence
|
|
12
|
+
Structure notification workflows in a clear sequence. Keep each part modular with nodes dedicated to specific purposes.
|
|
13
13
|
|
|
14
|
-
|
|
14
|
+
\`\`\`mermaid
|
|
15
|
+
graph LR
|
|
16
|
+
A[Trigger] --> B[Data Retrieval/Processing]
|
|
17
|
+
B --> C[Condition Check]
|
|
18
|
+
C --> D[Notification Action]
|
|
19
|
+
D --> E[Post-Notification: logging/tracking]
|
|
20
|
+
\`\`\`
|
|
21
|
+
|
|
22
|
+
Choose between event-based triggers (webhooks, form submissions, CRM events) for immediate notifications or scheduled triggers (Cron) for periodic condition monitoring.
|
|
15
23
|
|
|
16
24
|
CRITICAL: Multi-channel notifications should branch from a single condition check to multiple notification nodes in parallel, not duplicate the entire workflow. This enables easy extension and maintenance.
|
|
17
25
|
|
|
18
26
|
Example pattern:
|
|
19
|
-
|
|
20
|
-
|
|
27
|
+
\`\`\`mermaid
|
|
28
|
+
graph LR
|
|
29
|
+
A[Webhook/Schedule Trigger] --> B[Fetch Data]
|
|
30
|
+
B --> C[If: threshold exceeded]
|
|
31
|
+
C -->|true| D[Email]
|
|
32
|
+
C -->|true| E[Slack]
|
|
33
|
+
C -->|true| F[SMS]
|
|
34
|
+
C -->|false| G[End/Log]
|
|
35
|
+
\`\`\`
|
|
36
|
+
Result: Single workflow handles all channels efficiently with consistent logic
|
|
21
37
|
|
|
22
38
|
## Condition Logic & Filtering
|
|
23
39
|
|
|
24
|
-
Use IF nodes for simple
|
|
40
|
+
Use IF nodes for simple checks without code. For complex conditions (multiple fields, array filtering), use Code nodes to script the logic and filter items that need alerts.
|
|
25
41
|
|
|
26
42
|
Always include empty notification prevention - check that alert-worthy items exist (items.length > 0) before proceeding to notification nodes. Route the false branch to end the workflow or log "no alert needed".
|
|
27
43
|
|
|
28
|
-
Store threshold values in environment variables or external sources (Google Sheets, database) rather than hardcoding. This enables adjustment without workflow modification.
|
|
29
|
-
|
|
30
44
|
## Message Construction
|
|
31
45
|
|
|
32
|
-
Use expressions to inject dynamic data into messages. The expression \`{{ $json.fieldName }}\` pulls data from input items.
|
|
46
|
+
Use expressions to inject dynamic data into messages. The expression \`{{ $json.fieldName }}\` pulls data from input items.
|
|
33
47
|
|
|
34
48
|
Format messages appropriately for each channel:
|
|
35
49
|
- Email: Support HTML or plain text, use clear subject lines
|
|
36
50
|
- Slack: Use markdown-like formatting, \\n for newlines
|
|
37
51
|
- SMS: Keep concise due to character limits, plain text only
|
|
38
52
|
|
|
39
|
-
## Authentication & Permissions
|
|
40
|
-
|
|
41
|
-
Configure proper credentials for each service:
|
|
42
|
-
- Email: SMTP settings with correct host/port/auth (use App Passwords for Gmail)
|
|
43
|
-
- Slack: Bot token with chat:write scope, bot must be invited to target channel
|
|
44
|
-
- SMS (Twilio): Account SID, Auth Token, verified phone numbers
|
|
45
|
-
|
|
46
|
-
Store sensitive information in n8n credentials system or environment variables, never hardcode in workflows.
|
|
47
|
-
|
|
48
53
|
## Alert Management
|
|
49
54
|
|
|
50
|
-
|
|
55
|
+
Consider alert aggregation - send one message listing multiple items rather than individual alerts.
|
|
51
56
|
|
|
52
57
|
Add logging nodes to track sent notifications for audit trails and duplicate prevention. Consider using error handling paths with Continue on Fail settings for redundancy.
|
|
53
58
|
|
|
@@ -55,20 +60,25 @@ Add logging nodes to track sent notifications for audit trails and duplicate pre
|
|
|
55
60
|
|
|
56
61
|
### Trigger Nodes
|
|
57
62
|
|
|
63
|
+
**Service-specific triggers** (e.g., n8n-nodes-base.googleSheetsTrigger, n8n-nodes-base.crmTrigger):
|
|
64
|
+
- Purpose: Direct integration with specific services for event-based notifications
|
|
65
|
+
- Use cases: New row in Google Sheets, CRM record updates
|
|
66
|
+
- When to use: When specific trigger node is available
|
|
67
|
+
|
|
58
68
|
**Webhook** (n8n-nodes-base.webhook):
|
|
59
69
|
- Purpose: Event-based notifications triggered by external systems
|
|
60
70
|
- Use cases: Form submissions, CRM events, API webhooks
|
|
61
|
-
-
|
|
71
|
+
- When to use: When there is no dedicated trigger node and external service supports webhooks
|
|
62
72
|
|
|
63
73
|
**Schedule Trigger** (n8n-nodes-base.scheduleTrigger):
|
|
64
74
|
- Purpose: Periodic monitoring and batch notifications
|
|
65
75
|
- Use cases: Daily reports, threshold monitoring, scheduled alerts
|
|
66
|
-
-
|
|
76
|
+
- When to use: For regular checks rather than immediate alerts, or as a polling mechanism when webhooks are not available
|
|
67
77
|
|
|
68
|
-
**
|
|
69
|
-
- Purpose:
|
|
70
|
-
- Use cases:
|
|
71
|
-
-
|
|
78
|
+
**Form Trigger** (n8n-nodes-base.formTrigger):
|
|
79
|
+
- Purpose: User-submitted data triggering notifications
|
|
80
|
+
- Use cases: Contact forms, feedback submissions
|
|
81
|
+
- When to use: For workflows initiated by user input via forms
|
|
72
82
|
|
|
73
83
|
### Notification Nodes
|
|
74
84
|
|
|
@@ -99,13 +109,12 @@ Add logging nodes to track sent notifications for audit trails and duplicate pre
|
|
|
99
109
|
**HTTP Request** (n8n-nodes-base.httpRequest):
|
|
100
110
|
- Purpose: Custom webhooks (Microsoft Teams, Discord, custom APIs)
|
|
101
111
|
- Use cases: Integration with services without dedicated nodes
|
|
102
|
-
- Best practice: Test webhook URLs before production
|
|
103
112
|
|
|
104
113
|
### Logic & Processing
|
|
105
114
|
|
|
106
115
|
**IF** (n8n-nodes-base.if):
|
|
107
116
|
- Purpose: Simple threshold checks and condition routing
|
|
108
|
-
- Use cases: Check if
|
|
117
|
+
- Use cases: Check if notification criteria met
|
|
109
118
|
- Best practice: Include empty notification prevention (items.length > 0)
|
|
110
119
|
|
|
111
120
|
**Switch** (n8n-nodes-base.switch):
|
|
@@ -113,113 +122,10 @@ Add logging nodes to track sent notifications for audit trails and duplicate pre
|
|
|
113
122
|
- Use cases: Different channels for different alert levels
|
|
114
123
|
- Best practice: Always define Default case for unexpected values
|
|
115
124
|
|
|
116
|
-
**Function** (n8n-nodes-base.function):
|
|
117
|
-
- Purpose: Complex filtering and data transformation
|
|
118
|
-
- Use cases: Array filtering, complex conditions, message formatting
|
|
119
|
-
- Best practice: Keep logic focused and well-documented
|
|
120
|
-
|
|
121
|
-
**Merge** (n8n-nodes-base.merge):
|
|
122
|
-
- Purpose: Combine parallel notification branches
|
|
123
|
-
- Use cases: Track all notification attempts, consolidate logs
|
|
124
|
-
- Best practice: Use after parallel notification nodes
|
|
125
|
-
|
|
126
|
-
### Data Sources
|
|
127
|
-
|
|
128
|
-
**Database Nodes**:
|
|
129
|
-
- Postgres (n8n-nodes-base.postgres)
|
|
130
|
-
- MySQL (n8n-nodes-base.mySql)
|
|
131
|
-
- MongoDB (n8n-nodes-base.mongoDb)
|
|
132
|
-
|
|
133
|
-
Purpose: Fetch metrics, thresholds, and historical data
|
|
134
|
-
Best practice: Use queries with proper indexing for performance
|
|
135
|
-
|
|
136
|
-
**Google Sheets** (n8n-nodes-base.googleSheets):
|
|
137
|
-
- Purpose: Configuration storage and logging
|
|
138
|
-
- Use cases: Store thresholds, log notifications, track cooldowns
|
|
139
|
-
- Best practice: Use for non-critical configurations that need easy updates
|
|
140
|
-
|
|
141
|
-
**HTTP Request** (n8n-nodes-base.httpRequest):
|
|
142
|
-
- Purpose: API data retrieval
|
|
143
|
-
- Use cases: Fetch metrics from monitoring APIs, get user preferences
|
|
144
|
-
- Best practice: Handle API errors gracefully
|
|
145
|
-
|
|
146
|
-
### Utility Nodes
|
|
147
|
-
|
|
148
125
|
**Set** (n8n-nodes-base.set):
|
|
149
126
|
- Purpose: Prepare alert messages and structure data
|
|
150
127
|
- Use cases: Format notification content, add metadata
|
|
151
128
|
- Best practice: Use to centralize message construction logic
|
|
152
|
-
|
|
153
|
-
**Wait** (n8n-nodes-base.wait):
|
|
154
|
-
- Purpose: Delays between notifications
|
|
155
|
-
- Use cases: Rate limiting, cooldown periods, retry logic
|
|
156
|
-
- Best practice: Use for preventing notification floods
|
|
157
|
-
|
|
158
|
-
**Split In Batches** (n8n-nodes-base.splitInBatches):
|
|
159
|
-
- Purpose: Handle large datasets without overwhelming recipients
|
|
160
|
-
- Use cases: Bulk notifications with rate limiting
|
|
161
|
-
- Best practice: Combine with Wait node for controlled sending
|
|
162
|
-
|
|
163
|
-
## Common Pitfalls to Avoid
|
|
164
|
-
|
|
165
|
-
### Authentication Failures
|
|
166
|
-
**Problem**: Invalid or expired credentials are the most common cause of failed notifications.
|
|
167
|
-
|
|
168
|
-
**Solution**:
|
|
169
|
-
- Regularly verify API keys, OAuth tokens, and SMTP passwords
|
|
170
|
-
- Ensure bots have proper permissions (Slack bots need channel membership)
|
|
171
|
-
- Use n8n credentials system, never hardcode sensitive data
|
|
172
|
-
- Test authentication in isolation before deploying
|
|
173
|
-
|
|
174
|
-
### Notification Floods
|
|
175
|
-
**Problem**: Without proper controls, a threshold breach can trigger hundreds of identical alerts.
|
|
176
|
-
|
|
177
|
-
**Solution**:
|
|
178
|
-
- Implement cooldown periods using Wait node or tracking last alert time
|
|
179
|
-
- Use alert aggregation - send one message listing multiple items
|
|
180
|
-
- Use deduplication logic to prevent identical alerts
|
|
181
|
-
- Consider exponential backoff for repeated alerts
|
|
182
|
-
- Store last notification timestamp in database/sheets
|
|
183
|
-
|
|
184
|
-
### Incorrect Channel Configuration
|
|
185
|
-
**Problem**: Notifications fail due to misconfigured channels.
|
|
186
|
-
|
|
187
|
-
**Solution**:
|
|
188
|
-
- Slack requires channel IDs (starting with C) not names
|
|
189
|
-
- Email requires verified sender addresses
|
|
190
|
-
- SMS needs international format (+1234567890)
|
|
191
|
-
- Test each channel with sample data before production
|
|
192
|
-
- Validate configuration in node settings
|
|
193
|
-
|
|
194
|
-
### Data Type Mismatches
|
|
195
|
-
**Problem**: String-to-number comparisons fail silently ("5" > "10" is lexicographically true).
|
|
196
|
-
|
|
197
|
-
**Solution**:
|
|
198
|
-
- Always convert data types before comparisons
|
|
199
|
-
- Use Number() or parseInt() for numeric comparisons
|
|
200
|
-
- Escape special characters in messages to prevent formatting breaks
|
|
201
|
-
- Validate data types early in the workflow
|
|
202
|
-
|
|
203
|
-
### Missing Error Handling
|
|
204
|
-
**Problem**: A single failed notification can stop the entire workflow.
|
|
205
|
-
|
|
206
|
-
**Solution**:
|
|
207
|
-
- Configure error workflows using Error Trigger node
|
|
208
|
-
- Use "Continue on Fail" setting for redundancy
|
|
209
|
-
- Implement fallback channels (if Slack fails, send email)
|
|
210
|
-
- Log failed notification attempts for debugging
|
|
211
|
-
- Add retry logic with exponential backoff
|
|
212
|
-
|
|
213
|
-
### Rate Limit Violations
|
|
214
|
-
**Problem**: External services have posting limits that can block notifications.
|
|
215
|
-
|
|
216
|
-
**Solution**:
|
|
217
|
-
- Add delays between bulk sends using Wait node
|
|
218
|
-
- Monitor API quotas and adjust trigger frequency
|
|
219
|
-
- Use BCC for bulk emails when appropriate
|
|
220
|
-
- Implement batch processing with Split In Batches
|
|
221
|
-
- Check service documentation for rate limits
|
|
222
|
-
- Use webhook aggregation where possible
|
|
223
129
|
`;
|
|
224
130
|
getDocumentation() {
|
|
225
131
|
return this.documentation;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"notification.js","sourceRoot":"","sources":["../../../src/tools/best-practices/notification.ts"],"names":[],"mappings":";;;AACA,2DAA2D;AAE3D,MAAa,yBAAyB;IAC5B,SAAS,GAAG,kCAAiB,CAAC,YAAY,CAAC;IAC3C,OAAO,GAAG,OAAO,CAAC;IAEV,aAAa,GAAG
|
|
1
|
+
{"version":3,"file":"notification.js","sourceRoot":"","sources":["../../../src/tools/best-practices/notification.ts"],"names":[],"mappings":";;;AACA,2DAA2D;AAE3D,MAAa,yBAAyB;IAC5B,SAAS,GAAG,kCAAiB,CAAC,YAAY,CAAC;IAC3C,OAAO,GAAG,OAAO,CAAC;IAEV,aAAa,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAyHjC,CAAC;IAED,gBAAgB;QACf,OAAO,IAAI,CAAC,aAAa,CAAC;IAC3B,CAAC;CACD;AAlID,8DAkIC"}
|
|
@@ -34,10 +34,7 @@ Purpose: Fetches web pages or API data for scraping and research workflows
|
|
|
34
34
|
Pitfalls:
|
|
35
35
|
|
|
36
36
|
- Depending on the data which the user wishes to scrape/research, it maybe against the terms of service to attempt to
|
|
37
|
-
fetch it from the site directly. Using scraping nodes is the best way to get around this
|
|
38
|
-
|
|
39
|
-
Pitfalls:
|
|
40
|
-
|
|
37
|
+
fetch it from the site directly. Using scraping nodes is the best way to get around this
|
|
41
38
|
- Double-check URL formatting, query parameters, and ensure all required fields are present to avoid bad request errors
|
|
42
39
|
- Be aware of 429 rate limiting errors when the service receives too many requests - implement batching or use "Retry on
|
|
43
40
|
Fail" feature
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"scraping-and-research.js","sourceRoot":"","sources":["../../../src/tools/best-practices/scraping-and-research.ts"],"names":[],"mappings":";;;AACA,2DAA2D;AAE3D,MAAa,gCAAgC;IACnC,SAAS,GAAG,kCAAiB,CAAC,qBAAqB,CAAC;IACpD,OAAO,GAAG,OAAO,CAAC;IAEV,aAAa,GAAG
|
|
1
|
+
{"version":3,"file":"scraping-and-research.js","sourceRoot":"","sources":["../../../src/tools/best-practices/scraping-and-research.ts"],"names":[],"mappings":";;;AACA,2DAA2D;AAE3D,MAAa,gCAAgC;IACnC,SAAS,GAAG,kCAAiB,CAAC,qBAAqB,CAAC;IACpD,OAAO,GAAG,OAAO,CAAC;IAEV,aAAa,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgJjC,CAAC;IAED,gBAAgB;QACf,OAAO,IAAI,CAAC,aAAa,CAAC;IAC3B,CAAC;CACD;AAzJD,4EAyJC"}
|