agentic-qe 2.5.6 → 2.5.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/agents/n8n/n8n-base-agent.md +376 -0
- package/.claude/agents/n8n/n8n-bdd-scenario-tester.md +613 -0
- package/.claude/agents/n8n/n8n-chaos-tester.md +654 -0
- package/.claude/agents/n8n/n8n-ci-orchestrator.md +850 -0
- package/.claude/agents/n8n/n8n-compliance-validator.md +685 -0
- package/.claude/agents/n8n/n8n-expression-validator.md +560 -0
- package/.claude/agents/n8n/n8n-integration-test.md +602 -0
- package/.claude/agents/n8n/n8n-monitoring-validator.md +589 -0
- package/.claude/agents/n8n/n8n-node-validator.md +455 -0
- package/.claude/agents/n8n/n8n-performance-tester.md +630 -0
- package/.claude/agents/n8n/n8n-security-auditor.md +786 -0
- package/.claude/agents/n8n/n8n-trigger-test.md +500 -0
- package/.claude/agents/n8n/n8n-unit-tester.md +633 -0
- package/.claude/agents/n8n/n8n-version-comparator.md +567 -0
- package/.claude/agents/n8n/n8n-workflow-executor.md +392 -0
- package/.claude/skills/n8n-expression-testing/SKILL.md +434 -0
- package/.claude/skills/n8n-integration-testing-patterns/SKILL.md +540 -0
- package/.claude/skills/n8n-security-testing/SKILL.md +599 -0
- package/.claude/skills/n8n-trigger-testing-strategies/SKILL.md +541 -0
- package/.claude/skills/n8n-workflow-testing-fundamentals/SKILL.md +447 -0
- package/CHANGELOG.md +127 -0
- package/README.md +7 -4
- package/dist/agents/BaseAgent.d.ts +142 -0
- package/dist/agents/BaseAgent.d.ts.map +1 -1
- package/dist/agents/BaseAgent.js +372 -2
- package/dist/agents/BaseAgent.js.map +1 -1
- package/dist/agents/TestGeneratorAgent.d.ts +5 -0
- package/dist/agents/TestGeneratorAgent.d.ts.map +1 -1
- package/dist/agents/TestGeneratorAgent.js +38 -0
- package/dist/agents/TestGeneratorAgent.js.map +1 -1
- package/dist/agents/index.d.ts +1 -1
- package/dist/agents/index.d.ts.map +1 -1
- package/dist/agents/index.js.map +1 -1
- package/dist/agents/n8n/N8nAPIClient.d.ts +121 -0
- package/dist/agents/n8n/N8nAPIClient.d.ts.map +1 -0
- package/dist/agents/n8n/N8nAPIClient.js +367 -0
- package/dist/agents/n8n/N8nAPIClient.js.map +1 -0
- package/dist/agents/n8n/N8nAuditPersistence.d.ts +120 -0
- package/dist/agents/n8n/N8nAuditPersistence.d.ts.map +1 -0
- package/dist/agents/n8n/N8nAuditPersistence.js +473 -0
- package/dist/agents/n8n/N8nAuditPersistence.js.map +1 -0
- package/dist/agents/n8n/N8nBDDScenarioTesterAgent.d.ts +159 -0
- package/dist/agents/n8n/N8nBDDScenarioTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nBDDScenarioTesterAgent.js +697 -0
- package/dist/agents/n8n/N8nBDDScenarioTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nBaseAgent.d.ts +126 -0
- package/dist/agents/n8n/N8nBaseAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nBaseAgent.js +446 -0
- package/dist/agents/n8n/N8nBaseAgent.js.map +1 -0
- package/dist/agents/n8n/N8nCIOrchestratorAgent.d.ts +164 -0
- package/dist/agents/n8n/N8nCIOrchestratorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nCIOrchestratorAgent.js +610 -0
- package/dist/agents/n8n/N8nCIOrchestratorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nChaosTesterAgent.d.ts +205 -0
- package/dist/agents/n8n/N8nChaosTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nChaosTesterAgent.js +729 -0
- package/dist/agents/n8n/N8nChaosTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nComplianceValidatorAgent.d.ts +228 -0
- package/dist/agents/n8n/N8nComplianceValidatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nComplianceValidatorAgent.js +986 -0
- package/dist/agents/n8n/N8nComplianceValidatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nContractTesterAgent.d.ts +213 -0
- package/dist/agents/n8n/N8nContractTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nContractTesterAgent.js +989 -0
- package/dist/agents/n8n/N8nContractTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nExpressionValidatorAgent.d.ts +99 -0
- package/dist/agents/n8n/N8nExpressionValidatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nExpressionValidatorAgent.js +632 -0
- package/dist/agents/n8n/N8nExpressionValidatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nFailureModeTesterAgent.d.ts +238 -0
- package/dist/agents/n8n/N8nFailureModeTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nFailureModeTesterAgent.js +956 -0
- package/dist/agents/n8n/N8nFailureModeTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nIdempotencyTesterAgent.d.ts +242 -0
- package/dist/agents/n8n/N8nIdempotencyTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nIdempotencyTesterAgent.js +992 -0
- package/dist/agents/n8n/N8nIdempotencyTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nIntegrationTestAgent.d.ts +104 -0
- package/dist/agents/n8n/N8nIntegrationTestAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nIntegrationTestAgent.js +653 -0
- package/dist/agents/n8n/N8nIntegrationTestAgent.js.map +1 -0
- package/dist/agents/n8n/N8nMonitoringValidatorAgent.d.ts +210 -0
- package/dist/agents/n8n/N8nMonitoringValidatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nMonitoringValidatorAgent.js +669 -0
- package/dist/agents/n8n/N8nMonitoringValidatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nNodeValidatorAgent.d.ts +142 -0
- package/dist/agents/n8n/N8nNodeValidatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nNodeValidatorAgent.js +1090 -0
- package/dist/agents/n8n/N8nNodeValidatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nPerformanceTesterAgent.d.ts +198 -0
- package/dist/agents/n8n/N8nPerformanceTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nPerformanceTesterAgent.js +653 -0
- package/dist/agents/n8n/N8nPerformanceTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nReplayabilityTesterAgent.d.ts +245 -0
- package/dist/agents/n8n/N8nReplayabilityTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nReplayabilityTesterAgent.js +952 -0
- package/dist/agents/n8n/N8nReplayabilityTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nSecretsHygieneAuditorAgent.d.ts +325 -0
- package/dist/agents/n8n/N8nSecretsHygieneAuditorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nSecretsHygieneAuditorAgent.js +1187 -0
- package/dist/agents/n8n/N8nSecretsHygieneAuditorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nSecurityAuditorAgent.d.ts +91 -0
- package/dist/agents/n8n/N8nSecurityAuditorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nSecurityAuditorAgent.js +825 -0
- package/dist/agents/n8n/N8nSecurityAuditorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nTestHarness.d.ts +131 -0
- package/dist/agents/n8n/N8nTestHarness.d.ts.map +1 -0
- package/dist/agents/n8n/N8nTestHarness.js +456 -0
- package/dist/agents/n8n/N8nTestHarness.js.map +1 -0
- package/dist/agents/n8n/N8nTriggerTestAgent.d.ts +119 -0
- package/dist/agents/n8n/N8nTriggerTestAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nTriggerTestAgent.js +652 -0
- package/dist/agents/n8n/N8nTriggerTestAgent.js.map +1 -0
- package/dist/agents/n8n/N8nUnitTesterAgent.d.ts +130 -0
- package/dist/agents/n8n/N8nUnitTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nUnitTesterAgent.js +522 -0
- package/dist/agents/n8n/N8nUnitTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nVersionComparatorAgent.d.ts +201 -0
- package/dist/agents/n8n/N8nVersionComparatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nVersionComparatorAgent.js +645 -0
- package/dist/agents/n8n/N8nVersionComparatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nWorkflowExecutorAgent.d.ts +120 -0
- package/dist/agents/n8n/N8nWorkflowExecutorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nWorkflowExecutorAgent.js +347 -0
- package/dist/agents/n8n/N8nWorkflowExecutorAgent.js.map +1 -0
- package/dist/agents/n8n/index.d.ts +119 -0
- package/dist/agents/n8n/index.d.ts.map +1 -0
- package/dist/agents/n8n/index.js +298 -0
- package/dist/agents/n8n/index.js.map +1 -0
- package/dist/agents/n8n/types.d.ts +486 -0
- package/dist/agents/n8n/types.d.ts.map +1 -0
- package/dist/agents/n8n/types.js +8 -0
- package/dist/agents/n8n/types.js.map +1 -0
- package/dist/cli/init/agents.d.ts.map +1 -1
- package/dist/cli/init/agents.js +29 -0
- package/dist/cli/init/agents.js.map +1 -1
- package/dist/cli/init/skills.d.ts.map +1 -1
- package/dist/cli/init/skills.js +7 -1
- package/dist/cli/init/skills.js.map +1 -1
- package/dist/core/memory/HNSWVectorMemory.js +1 -1
- package/dist/core/memory/RuVectorPatternStore.d.ts +90 -0
- package/dist/core/memory/RuVectorPatternStore.d.ts.map +1 -1
- package/dist/core/memory/RuVectorPatternStore.js +209 -0
- package/dist/core/memory/RuVectorPatternStore.js.map +1 -1
- package/dist/learning/FederatedManager.d.ts +232 -0
- package/dist/learning/FederatedManager.d.ts.map +1 -0
- package/dist/learning/FederatedManager.js +489 -0
- package/dist/learning/FederatedManager.js.map +1 -0
- package/dist/learning/HNSWPatternAdapter.d.ts +117 -0
- package/dist/learning/HNSWPatternAdapter.d.ts.map +1 -0
- package/dist/learning/HNSWPatternAdapter.js +262 -0
- package/dist/learning/HNSWPatternAdapter.js.map +1 -0
- package/dist/learning/LearningEngine.d.ts +27 -0
- package/dist/learning/LearningEngine.d.ts.map +1 -1
- package/dist/learning/LearningEngine.js +75 -1
- package/dist/learning/LearningEngine.js.map +1 -1
- package/dist/learning/PatternCurator.d.ts +217 -0
- package/dist/learning/PatternCurator.d.ts.map +1 -0
- package/dist/learning/PatternCurator.js +393 -0
- package/dist/learning/PatternCurator.js.map +1 -0
- package/dist/learning/index.d.ts +6 -0
- package/dist/learning/index.d.ts.map +1 -1
- package/dist/learning/index.js +16 -1
- package/dist/learning/index.js.map +1 -1
- package/dist/learning/types.d.ts +4 -0
- package/dist/learning/types.d.ts.map +1 -1
- package/dist/mcp/server-instructions.d.ts +1 -1
- package/dist/mcp/server-instructions.js +1 -1
- package/dist/memory/HNSWPatternStore.d.ts +176 -0
- package/dist/memory/HNSWPatternStore.d.ts.map +1 -0
- package/dist/memory/HNSWPatternStore.js +392 -0
- package/dist/memory/HNSWPatternStore.js.map +1 -0
- package/dist/memory/index.d.ts +8 -0
- package/dist/memory/index.d.ts.map +1 -0
- package/dist/memory/index.js +13 -0
- package/dist/memory/index.js.map +1 -0
- package/dist/providers/HybridRouter.d.ts +85 -4
- package/dist/providers/HybridRouter.d.ts.map +1 -1
- package/dist/providers/HybridRouter.js +332 -10
- package/dist/providers/HybridRouter.js.map +1 -1
- package/dist/providers/LLMBaselineTracker.d.ts +120 -0
- package/dist/providers/LLMBaselineTracker.d.ts.map +1 -0
- package/dist/providers/LLMBaselineTracker.js +305 -0
- package/dist/providers/LLMBaselineTracker.js.map +1 -0
- package/dist/providers/OpenRouterProvider.d.ts +26 -0
- package/dist/providers/OpenRouterProvider.d.ts.map +1 -1
- package/dist/providers/OpenRouterProvider.js +75 -6
- package/dist/providers/OpenRouterProvider.js.map +1 -1
- package/dist/providers/RuVectorClient.d.ts +259 -0
- package/dist/providers/RuVectorClient.d.ts.map +1 -0
- package/dist/providers/RuVectorClient.js +416 -0
- package/dist/providers/RuVectorClient.js.map +1 -0
- package/dist/providers/RuvllmPatternCurator.d.ts +116 -0
- package/dist/providers/RuvllmPatternCurator.d.ts.map +1 -0
- package/dist/providers/RuvllmPatternCurator.js +323 -0
- package/dist/providers/RuvllmPatternCurator.js.map +1 -0
- package/dist/providers/RuvllmProvider.d.ts +233 -1
- package/dist/providers/RuvllmProvider.d.ts.map +1 -1
- package/dist/providers/RuvllmProvider.js +781 -11
- package/dist/providers/RuvllmProvider.js.map +1 -1
- package/dist/providers/index.d.ts +5 -1
- package/dist/providers/index.d.ts.map +1 -1
- package/dist/providers/index.js +12 -2
- package/dist/providers/index.js.map +1 -1
- package/dist/utils/ruvllm-loader.d.ts +98 -1
- package/dist/utils/ruvllm-loader.d.ts.map +1 -1
- package/dist/utils/ruvllm-loader.js.map +1 -1
- package/docs/reference/agents.md +91 -2
- package/docs/reference/skills.md +97 -2
- package/package.json +2 -2
|
@@ -0,0 +1,992 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* N8n Idempotency Tester Agent
|
|
4
|
+
*
|
|
5
|
+
* Tests workflows for idempotency, concurrency safety, and duplicate handling:
|
|
6
|
+
* - Duplicate trigger detection and handling
|
|
7
|
+
* - Parallel execution safety analysis
|
|
8
|
+
* - Race condition detection patterns
|
|
9
|
+
* - Locking/mutex pattern analysis
|
|
10
|
+
* - Deduplication key validation
|
|
11
|
+
* - Concurrent webhook handling
|
|
12
|
+
*/
|
|
13
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
14
|
+
exports.N8nIdempotencyTesterAgent = void 0;
|
|
15
|
+
const N8nBaseAgent_1 = require("./N8nBaseAgent");
|
|
16
|
+
const N8nTestHarness_1 = require("./N8nTestHarness");
|
|
17
|
+
// Non-idempotent operation patterns
|
|
18
|
+
const NON_IDEMPOTENT_PATTERNS = {
|
|
19
|
+
// Counter operations
|
|
20
|
+
'increment': {
|
|
21
|
+
reason: 'Counter increments are not idempotent - repeated calls increase value',
|
|
22
|
+
suggestion: 'Use SET with absolute value or implement idempotency key tracking',
|
|
23
|
+
},
|
|
24
|
+
'decrement': {
|
|
25
|
+
reason: 'Counter decrements are not idempotent - repeated calls decrease value',
|
|
26
|
+
suggestion: 'Use SET with absolute value or track processed requests',
|
|
27
|
+
},
|
|
28
|
+
// Append operations
|
|
29
|
+
'append': {
|
|
30
|
+
reason: 'Append operations add duplicate data on retry',
|
|
31
|
+
suggestion: 'Use UPSERT with unique key or check existence before append',
|
|
32
|
+
},
|
|
33
|
+
'push': {
|
|
34
|
+
reason: 'Push to array creates duplicates on retry',
|
|
35
|
+
suggestion: 'Use SET with deduplication or addToSet operation',
|
|
36
|
+
},
|
|
37
|
+
// Insert operations
|
|
38
|
+
'insert': {
|
|
39
|
+
reason: 'INSERT creates duplicate records on retry without unique constraint',
|
|
40
|
+
suggestion: 'Use UPSERT/INSERT...ON CONFLICT or add idempotency key',
|
|
41
|
+
},
|
|
42
|
+
'create': {
|
|
43
|
+
reason: 'CREATE operations may fail or duplicate on retry',
|
|
44
|
+
suggestion: 'Use UPSERT pattern or implement existence check',
|
|
45
|
+
},
|
|
46
|
+
// Send operations
|
|
47
|
+
'sendEmail': {
|
|
48
|
+
reason: 'Email sends are not idempotent - duplicates annoy recipients',
|
|
49
|
+
suggestion: 'Track sent message IDs or use email service idempotency keys',
|
|
50
|
+
},
|
|
51
|
+
'sendMessage': {
|
|
52
|
+
reason: 'Message sends create duplicates on retry',
|
|
53
|
+
suggestion: 'Use message deduplication ID or track sent messages',
|
|
54
|
+
},
|
|
55
|
+
'sendNotification': {
|
|
56
|
+
reason: 'Notifications duplicate on retry',
|
|
57
|
+
suggestion: 'Implement notification deduplication by content hash or ID',
|
|
58
|
+
},
|
|
59
|
+
// Payment operations
|
|
60
|
+
'charge': {
|
|
61
|
+
reason: 'Payment charges are critically non-idempotent - double charges',
|
|
62
|
+
suggestion: 'ALWAYS use payment provider idempotency key',
|
|
63
|
+
},
|
|
64
|
+
'transfer': {
|
|
65
|
+
reason: 'Money transfers must be idempotent to prevent double-transfers',
|
|
66
|
+
suggestion: 'Use transfer ID as idempotency key with payment provider',
|
|
67
|
+
},
|
|
68
|
+
// Queue operations
|
|
69
|
+
'publish': {
|
|
70
|
+
reason: 'Message publishing may create duplicates',
|
|
71
|
+
suggestion: 'Use message deduplication ID in queue configuration',
|
|
72
|
+
},
|
|
73
|
+
'enqueue': {
|
|
74
|
+
reason: 'Queue operations may duplicate messages',
|
|
75
|
+
suggestion: 'Implement message-level deduplication',
|
|
76
|
+
},
|
|
77
|
+
};
|
|
78
|
+
// Operations that typically support idempotent patterns
|
|
79
|
+
const IDEMPOTENT_OPERATIONS = [
|
|
80
|
+
'get', 'read', 'fetch', 'lookup', 'search', 'query', 'list',
|
|
81
|
+
'set', 'put', 'update', 'upsert', 'replace', 'patch',
|
|
82
|
+
'delete', 'remove', // Delete by ID is idempotent
|
|
83
|
+
];
|
|
84
|
+
// Node types with high concurrency risk
|
|
85
|
+
const HIGH_CONCURRENCY_RISK_NODES = [
|
|
86
|
+
'n8n-nodes-base.postgres',
|
|
87
|
+
'n8n-nodes-base.mysql',
|
|
88
|
+
'n8n-nodes-base.mongodb',
|
|
89
|
+
'n8n-nodes-base.redis',
|
|
90
|
+
'n8n-nodes-base.googleSheets',
|
|
91
|
+
'n8n-nodes-base.airtable',
|
|
92
|
+
'n8n-nodes-base.notion',
|
|
93
|
+
'n8n-nodes-base.stripe',
|
|
94
|
+
'n8n-nodes-base.shopify',
|
|
95
|
+
];
|
|
96
|
+
/**
|
|
97
|
+
* N8n Idempotency Tester Agent
|
|
98
|
+
*
|
|
99
|
+
* Analyzes workflows for idempotency issues, concurrency risks,
|
|
100
|
+
* and duplicate handling patterns.
|
|
101
|
+
*/
|
|
102
|
+
class N8nIdempotencyTesterAgent extends N8nBaseAgent_1.N8nBaseAgent {
|
|
103
|
+
constructor(config) {
|
|
104
|
+
const capabilities = [
|
|
105
|
+
{
|
|
106
|
+
name: 'idempotency-testing',
|
|
107
|
+
version: '1.0.0',
|
|
108
|
+
description: 'Test workflow idempotency and duplicate handling',
|
|
109
|
+
parameters: {},
|
|
110
|
+
},
|
|
111
|
+
{
|
|
112
|
+
name: 'concurrency-analysis',
|
|
113
|
+
version: '1.0.0',
|
|
114
|
+
description: 'Analyze concurrency risks and race conditions',
|
|
115
|
+
parameters: {},
|
|
116
|
+
},
|
|
117
|
+
{
|
|
118
|
+
name: 'duplicate-detection',
|
|
119
|
+
version: '1.0.0',
|
|
120
|
+
description: 'Detect duplicate trigger handling patterns',
|
|
121
|
+
parameters: {},
|
|
122
|
+
},
|
|
123
|
+
{
|
|
124
|
+
name: 'locking-pattern-analysis',
|
|
125
|
+
version: '1.0.0',
|
|
126
|
+
description: 'Analyze locking and mutex patterns',
|
|
127
|
+
parameters: {},
|
|
128
|
+
},
|
|
129
|
+
{
|
|
130
|
+
name: 'concurrent-execution-testing',
|
|
131
|
+
version: '1.0.0',
|
|
132
|
+
description: 'Execute workflow concurrently to verify idempotency',
|
|
133
|
+
parameters: {},
|
|
134
|
+
},
|
|
135
|
+
];
|
|
136
|
+
super({
|
|
137
|
+
...config,
|
|
138
|
+
type: 'n8n-idempotency-tester',
|
|
139
|
+
capabilities: [...capabilities, ...(config.capabilities || [])],
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
async performTask(task) {
|
|
143
|
+
const idempotencyTask = task;
|
|
144
|
+
if (idempotencyTask.type !== 'idempotency-test') {
|
|
145
|
+
throw new Error(`Unsupported task type: ${idempotencyTask.type}`);
|
|
146
|
+
}
|
|
147
|
+
return this.testIdempotency(idempotencyTask.target, idempotencyTask.workflow, idempotencyTask.options);
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Run idempotency tests on a workflow
|
|
151
|
+
*/
|
|
152
|
+
async testIdempotency(workflowId, providedWorkflow, options) {
|
|
153
|
+
const startTime = Date.now();
|
|
154
|
+
// Get workflow
|
|
155
|
+
let workflow;
|
|
156
|
+
if (providedWorkflow) {
|
|
157
|
+
workflow = providedWorkflow;
|
|
158
|
+
}
|
|
159
|
+
else {
|
|
160
|
+
workflow = await this.getWorkflow(workflowId);
|
|
161
|
+
}
|
|
162
|
+
const opts = options || {};
|
|
163
|
+
// Run all analyses
|
|
164
|
+
const dedupKeyAnalysis = opts.checkDedupKeys !== false
|
|
165
|
+
? this.analyzeDedupKeys(workflow)
|
|
166
|
+
: [];
|
|
167
|
+
const concurrencyRisks = opts.analyzeConcurrency !== false
|
|
168
|
+
? this.analyzeConcurrencyRisks(workflow)
|
|
169
|
+
: [];
|
|
170
|
+
const parallelExecution = opts.checkParallelPaths !== false
|
|
171
|
+
? this.analyzeParallelExecution(workflow)
|
|
172
|
+
: { hasParallelPaths: false, parallelBranches: [], sharedStateRisks: [], recommendations: [] };
|
|
173
|
+
const webhookDuplicates = opts.analyzeWebhooks !== false
|
|
174
|
+
? this.analyzeWebhookDuplicates(workflow)
|
|
175
|
+
: [];
|
|
176
|
+
const nonIdempotentOperations = this.findNonIdempotentOperations(workflow);
|
|
177
|
+
const lockingPatterns = opts.checkLocking !== false
|
|
178
|
+
? this.analyzeLockingPatterns(workflow)
|
|
179
|
+
: { hasLocking: false, lockNodes: [], lockScope: 'none', recommendations: [] };
|
|
180
|
+
// NEW: Run concurrent execution test if requested
|
|
181
|
+
let concurrentTestResult;
|
|
182
|
+
if (opts.runConcurrentTest) {
|
|
183
|
+
concurrentTestResult = await this.runConcurrentExecutionTest(workflowId, opts.concurrency || 3, opts.testInput, opts.assertIdenticalOutput);
|
|
184
|
+
// Update concurrency risks based on actual test results
|
|
185
|
+
if (concurrentTestResult.sideEffectAnalysis.duplicatesCreated) {
|
|
186
|
+
concurrencyRisks.push({
|
|
187
|
+
nodeId: 'workflow',
|
|
188
|
+
nodeName: 'Workflow',
|
|
189
|
+
nodeType: 'workflow',
|
|
190
|
+
riskLevel: 'high',
|
|
191
|
+
risks: [{
|
|
192
|
+
type: 'duplicate-side-effect',
|
|
193
|
+
description: 'Concurrent execution created duplicate side effects',
|
|
194
|
+
severity: 'critical',
|
|
195
|
+
pattern: 'Observed during active testing',
|
|
196
|
+
}],
|
|
197
|
+
mitigations: ['Implement idempotency key tracking', 'Add deduplication at entry point'],
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
if (concurrentTestResult.sideEffectAnalysis.resourceConflicts) {
|
|
201
|
+
concurrencyRisks.push({
|
|
202
|
+
nodeId: 'workflow',
|
|
203
|
+
nodeName: 'Workflow',
|
|
204
|
+
nodeType: 'workflow',
|
|
205
|
+
riskLevel: 'high',
|
|
206
|
+
risks: [{
|
|
207
|
+
type: 'race-condition-risk',
|
|
208
|
+
description: 'Concurrent execution caused resource conflicts',
|
|
209
|
+
severity: 'critical',
|
|
210
|
+
pattern: 'Observed during active testing',
|
|
211
|
+
}],
|
|
212
|
+
mitigations: ['Implement distributed locking', 'Use optimistic concurrency control'],
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
// Calculate overall score
|
|
217
|
+
const overallScore = this.calculateIdempotencyScore(dedupKeyAnalysis, concurrencyRisks, parallelExecution, webhookDuplicates, nonIdempotentOperations, lockingPatterns);
|
|
218
|
+
// Determine if workflow is idempotent
|
|
219
|
+
const isIdempotent = overallScore >= 80 && nonIdempotentOperations.length === 0;
|
|
220
|
+
// Generate recommendations
|
|
221
|
+
const recommendations = this.generateRecommendations(dedupKeyAnalysis, concurrencyRisks, parallelExecution, webhookDuplicates, nonIdempotentOperations, lockingPatterns);
|
|
222
|
+
return {
|
|
223
|
+
workflowId: workflow.id?.toString() || workflowId,
|
|
224
|
+
workflowName: workflow.name,
|
|
225
|
+
overallScore,
|
|
226
|
+
isIdempotent,
|
|
227
|
+
dedupKeyAnalysis,
|
|
228
|
+
concurrencyRisks,
|
|
229
|
+
parallelExecution,
|
|
230
|
+
webhookDuplicates,
|
|
231
|
+
nonIdempotentOperations,
|
|
232
|
+
lockingPatterns,
|
|
233
|
+
recommendations,
|
|
234
|
+
testDuration: Date.now() - startTime,
|
|
235
|
+
concurrentTestResult,
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
/**
|
|
239
|
+
* Analyze deduplication keys in workflow
|
|
240
|
+
*/
|
|
241
|
+
analyzeDedupKeys(workflow) {
|
|
242
|
+
const results = [];
|
|
243
|
+
for (const node of workflow.nodes) {
|
|
244
|
+
// Check nodes that should have dedup keys
|
|
245
|
+
if (this.shouldHaveDedupKey(node)) {
|
|
246
|
+
const analysis = this.analyzeNodeDedupKey(node);
|
|
247
|
+
results.push(analysis);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
return results;
|
|
251
|
+
}
|
|
252
|
+
shouldHaveDedupKey(node) {
|
|
253
|
+
const nodeType = node.type.toLowerCase();
|
|
254
|
+
// Webhook triggers and data modification nodes should have dedup
|
|
255
|
+
return (nodeType.includes('webhook') ||
|
|
256
|
+
nodeType.includes('trigger') ||
|
|
257
|
+
HIGH_CONCURRENCY_RISK_NODES.some(t => nodeType.includes(t.replace('n8n-nodes-base.', ''))));
|
|
258
|
+
}
|
|
259
|
+
analyzeNodeDedupKey(node) {
|
|
260
|
+
const params = node.parameters || {};
|
|
261
|
+
const issues = [];
|
|
262
|
+
const recommendations = [];
|
|
263
|
+
// Look for dedup key patterns
|
|
264
|
+
let dedupKeyExpression;
|
|
265
|
+
let hasDedupKey = false;
|
|
266
|
+
let keyStrength = 'none';
|
|
267
|
+
// Check for common dedup key parameter names
|
|
268
|
+
const dedupKeyParams = ['idempotencyKey', 'dedupKey', 'uniqueKey', 'requestId', 'messageId'];
|
|
269
|
+
for (const param of dedupKeyParams) {
|
|
270
|
+
if (params[param]) {
|
|
271
|
+
hasDedupKey = true;
|
|
272
|
+
dedupKeyExpression = String(params[param]);
|
|
273
|
+
break;
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
// Check webhook for headers that might be dedup keys
|
|
277
|
+
if (node.type.toLowerCase().includes('webhook')) {
|
|
278
|
+
const webhookParams = params;
|
|
279
|
+
if (webhookParams.headerAuth || webhookParams.authentication) {
|
|
280
|
+
// May have delivery ID in headers
|
|
281
|
+
recommendations.push('Consider extracting X-Delivery-ID or similar header for deduplication');
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
// Evaluate key strength
|
|
285
|
+
if (hasDedupKey && dedupKeyExpression) {
|
|
286
|
+
if (dedupKeyExpression.includes('$json') && dedupKeyExpression.includes('id')) {
|
|
287
|
+
keyStrength = 'strong';
|
|
288
|
+
}
|
|
289
|
+
else if (dedupKeyExpression.includes('uuid') || dedupKeyExpression.includes('$execution')) {
|
|
290
|
+
keyStrength = 'strong';
|
|
291
|
+
}
|
|
292
|
+
else if (dedupKeyExpression.includes('timestamp') || dedupKeyExpression.includes('Date')) {
|
|
293
|
+
keyStrength = 'weak';
|
|
294
|
+
issues.push('Timestamp-based dedup keys may have collision risk');
|
|
295
|
+
}
|
|
296
|
+
else {
|
|
297
|
+
keyStrength = 'weak';
|
|
298
|
+
issues.push('Dedup key expression may not be unique enough');
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
if (!hasDedupKey) {
|
|
302
|
+
issues.push('No deduplication key configured');
|
|
303
|
+
recommendations.push('Add idempotency key using unique identifier from input data');
|
|
304
|
+
}
|
|
305
|
+
return {
|
|
306
|
+
nodeId: node.id,
|
|
307
|
+
nodeName: node.name,
|
|
308
|
+
hasDedupKey,
|
|
309
|
+
dedupKeyExpression,
|
|
310
|
+
keyStrength,
|
|
311
|
+
issues,
|
|
312
|
+
recommendations,
|
|
313
|
+
};
|
|
314
|
+
}
|
|
315
|
+
/**
|
|
316
|
+
* Analyze concurrency risks in workflow
|
|
317
|
+
*/
|
|
318
|
+
analyzeConcurrencyRisks(workflow) {
|
|
319
|
+
const results = [];
|
|
320
|
+
for (const node of workflow.nodes) {
|
|
321
|
+
const risks = this.analyzeNodeConcurrencyRisks(node, workflow);
|
|
322
|
+
if (risks.risks.length > 0 || risks.riskLevel !== 'none') {
|
|
323
|
+
results.push(risks);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
return results;
|
|
327
|
+
}
|
|
328
|
+
analyzeNodeConcurrencyRisks(node, workflow) {
|
|
329
|
+
const risks = [];
|
|
330
|
+
const mitigations = [];
|
|
331
|
+
const params = node.parameters || {};
|
|
332
|
+
// Check for high-risk node types
|
|
333
|
+
const isHighRiskNode = HIGH_CONCURRENCY_RISK_NODES.some(t => node.type.toLowerCase().includes(t.replace('n8n-nodes-base.', '')));
|
|
334
|
+
// Check for write operations
|
|
335
|
+
const operation = params.operation?.toLowerCase() || '';
|
|
336
|
+
const resource = params.resource?.toLowerCase() || '';
|
|
337
|
+
// Detect non-atomic operations
|
|
338
|
+
if (operation === 'update' || operation === 'upsert') {
|
|
339
|
+
if (!this.hasOptimisticLocking(params)) {
|
|
340
|
+
risks.push({
|
|
341
|
+
type: 'race-condition-risk',
|
|
342
|
+
description: `Update operation without optimistic locking in ${node.name}`,
|
|
343
|
+
severity: isHighRiskNode ? 'high' : 'medium',
|
|
344
|
+
pattern: 'read-modify-write without version check',
|
|
345
|
+
});
|
|
346
|
+
mitigations.push('Add version field or ETag check for optimistic locking');
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
// Check for counter operations
|
|
350
|
+
if (operation.includes('increment') || operation.includes('decrement')) {
|
|
351
|
+
risks.push({
|
|
352
|
+
type: 'counter-increment-unsafe',
|
|
353
|
+
description: `Counter operation may cause race condition in ${node.name}`,
|
|
354
|
+
severity: 'high',
|
|
355
|
+
pattern: 'concurrent counter modification',
|
|
356
|
+
});
|
|
357
|
+
mitigations.push('Use atomic increment operation or implement distributed lock');
|
|
358
|
+
}
|
|
359
|
+
// Check for insert without unique constraint
|
|
360
|
+
if (operation === 'insert' || operation === 'create') {
|
|
361
|
+
risks.push({
|
|
362
|
+
type: 'duplicate-side-effect',
|
|
363
|
+
description: `Insert may create duplicates on concurrent execution in ${node.name}`,
|
|
364
|
+
severity: 'medium',
|
|
365
|
+
pattern: 'insert without uniqueness guarantee',
|
|
366
|
+
});
|
|
367
|
+
mitigations.push('Use UPSERT or add unique constraint with ON CONFLICT handling');
|
|
368
|
+
}
|
|
369
|
+
// Determine risk level
|
|
370
|
+
let riskLevel = 'none';
|
|
371
|
+
if (risks.some(r => r.severity === 'critical')) {
|
|
372
|
+
riskLevel = 'high';
|
|
373
|
+
}
|
|
374
|
+
else if (risks.some(r => r.severity === 'high')) {
|
|
375
|
+
riskLevel = 'high';
|
|
376
|
+
}
|
|
377
|
+
else if (risks.some(r => r.severity === 'medium')) {
|
|
378
|
+
riskLevel = 'medium';
|
|
379
|
+
}
|
|
380
|
+
else if (risks.length > 0) {
|
|
381
|
+
riskLevel = 'low';
|
|
382
|
+
}
|
|
383
|
+
return {
|
|
384
|
+
nodeId: node.id,
|
|
385
|
+
nodeName: node.name,
|
|
386
|
+
nodeType: node.type,
|
|
387
|
+
riskLevel,
|
|
388
|
+
risks,
|
|
389
|
+
mitigations,
|
|
390
|
+
};
|
|
391
|
+
}
|
|
392
|
+
hasOptimisticLocking(params) {
|
|
393
|
+
// Check for version/etag fields in conditions
|
|
394
|
+
const conditions = JSON.stringify(params).toLowerCase();
|
|
395
|
+
return (conditions.includes('version') ||
|
|
396
|
+
conditions.includes('etag') ||
|
|
397
|
+
conditions.includes('_rev') ||
|
|
398
|
+
conditions.includes('updatedAt'));
|
|
399
|
+
}
|
|
400
|
+
/**
|
|
401
|
+
* Analyze parallel execution paths
|
|
402
|
+
*/
|
|
403
|
+
analyzeParallelExecution(workflow) {
|
|
404
|
+
const parallelBranches = [];
|
|
405
|
+
const sharedStateRisks = [];
|
|
406
|
+
const recommendations = [];
|
|
407
|
+
// Find nodes with multiple outputs (split points)
|
|
408
|
+
const splitNodes = workflow.nodes.filter(node => {
|
|
409
|
+
const connections = workflow.connections[node.name];
|
|
410
|
+
if (!connections || !connections.main)
|
|
411
|
+
return false;
|
|
412
|
+
// Check if node has multiple output branches
|
|
413
|
+
return connections.main.length > 1 || connections.main.some(output => output && output.length > 1);
|
|
414
|
+
});
|
|
415
|
+
if (splitNodes.length === 0) {
|
|
416
|
+
return {
|
|
417
|
+
hasParallelPaths: false,
|
|
418
|
+
parallelBranches: [],
|
|
419
|
+
sharedStateRisks: [],
|
|
420
|
+
recommendations: [],
|
|
421
|
+
};
|
|
422
|
+
}
|
|
423
|
+
// Analyze each split point
|
|
424
|
+
for (const splitNode of splitNodes) {
|
|
425
|
+
const branches = this.traceBranches(splitNode, workflow);
|
|
426
|
+
parallelBranches.push(...branches);
|
|
427
|
+
}
|
|
428
|
+
// Find shared state risks between branches
|
|
429
|
+
const resourcesByBranch = new Map();
|
|
430
|
+
for (const branch of parallelBranches) {
|
|
431
|
+
resourcesByBranch.set(branch.branchId, new Set(branch.resourcesAccessed));
|
|
432
|
+
}
|
|
433
|
+
// Check for overlapping resources
|
|
434
|
+
const branchIds = Array.from(resourcesByBranch.keys());
|
|
435
|
+
for (let i = 0; i < branchIds.length; i++) {
|
|
436
|
+
for (let j = i + 1; j < branchIds.length; j++) {
|
|
437
|
+
const branch1Resources = resourcesByBranch.get(branchIds[i]);
|
|
438
|
+
const branch2Resources = resourcesByBranch.get(branchIds[j]);
|
|
439
|
+
for (const resource of branch1Resources) {
|
|
440
|
+
if (branch2Resources.has(resource)) {
|
|
441
|
+
sharedStateRisks.push({
|
|
442
|
+
resource,
|
|
443
|
+
accessedBy: [branchIds[i], branchIds[j]],
|
|
444
|
+
riskType: 'write-write-conflict',
|
|
445
|
+
severity: 'high',
|
|
446
|
+
});
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
if (sharedStateRisks.length > 0) {
|
|
452
|
+
recommendations.push('Consider serializing access to shared resources');
|
|
453
|
+
recommendations.push('Implement distributed locking for concurrent resource access');
|
|
454
|
+
recommendations.push('Use optimistic locking with retry for conflict resolution');
|
|
455
|
+
}
|
|
456
|
+
return {
|
|
457
|
+
hasParallelPaths: parallelBranches.length > 0,
|
|
458
|
+
parallelBranches,
|
|
459
|
+
sharedStateRisks,
|
|
460
|
+
recommendations,
|
|
461
|
+
};
|
|
462
|
+
}
|
|
463
|
+
traceBranches(splitNode, workflow) {
|
|
464
|
+
const branches = [];
|
|
465
|
+
const connections = workflow.connections[splitNode.name];
|
|
466
|
+
if (!connections || !connections.main)
|
|
467
|
+
return branches;
|
|
468
|
+
let branchIndex = 0;
|
|
469
|
+
for (const outputConnections of connections.main) {
|
|
470
|
+
if (!outputConnections)
|
|
471
|
+
continue;
|
|
472
|
+
for (const connection of outputConnections) {
|
|
473
|
+
const branchNodes = [];
|
|
474
|
+
const resourcesAccessed = [];
|
|
475
|
+
// Trace this branch
|
|
476
|
+
this.traceBranchNodes(connection.node, workflow, branchNodes, resourcesAccessed, new Set());
|
|
477
|
+
if (branchNodes.length > 0) {
|
|
478
|
+
branches.push({
|
|
479
|
+
branchId: `${splitNode.name}-branch-${branchIndex++}`,
|
|
480
|
+
nodes: branchNodes,
|
|
481
|
+
resourcesAccessed,
|
|
482
|
+
});
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
return branches;
|
|
487
|
+
}
|
|
488
|
+
traceBranchNodes(nodeName, workflow, nodes, resources, visited) {
|
|
489
|
+
if (visited.has(nodeName))
|
|
490
|
+
return;
|
|
491
|
+
visited.add(nodeName);
|
|
492
|
+
const node = workflow.nodes.find(n => n.name === nodeName);
|
|
493
|
+
if (!node)
|
|
494
|
+
return;
|
|
495
|
+
nodes.push(nodeName);
|
|
496
|
+
// Extract resources accessed
|
|
497
|
+
const params = node.parameters || {};
|
|
498
|
+
if (params.table)
|
|
499
|
+
resources.push(`table:${params.table}`);
|
|
500
|
+
if (params.collection)
|
|
501
|
+
resources.push(`collection:${params.collection}`);
|
|
502
|
+
if (params.sheetName)
|
|
503
|
+
resources.push(`sheet:${params.sheetName}`);
|
|
504
|
+
if (params.database)
|
|
505
|
+
resources.push(`database:${params.database}`);
|
|
506
|
+
// Continue to next nodes
|
|
507
|
+
const connections = workflow.connections[nodeName];
|
|
508
|
+
if (!connections || !connections.main)
|
|
509
|
+
return;
|
|
510
|
+
for (const outputConnections of connections.main) {
|
|
511
|
+
if (!outputConnections)
|
|
512
|
+
continue;
|
|
513
|
+
for (const connection of outputConnections) {
|
|
514
|
+
this.traceBranchNodes(connection.node, workflow, nodes, resources, visited);
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
/**
|
|
519
|
+
* Analyze webhook duplicate handling
|
|
520
|
+
*/
|
|
521
|
+
analyzeWebhookDuplicates(workflow) {
|
|
522
|
+
const results = [];
|
|
523
|
+
const webhookNodes = workflow.nodes.filter(node => node.type.toLowerCase().includes('webhook'));
|
|
524
|
+
for (const webhook of webhookNodes) {
|
|
525
|
+
const analysis = this.analyzeWebhookNode(webhook, workflow);
|
|
526
|
+
results.push(analysis);
|
|
527
|
+
}
|
|
528
|
+
return results;
|
|
529
|
+
}
|
|
530
|
+
analyzeWebhookNode(node, workflow) {
|
|
531
|
+
const params = node.parameters || {};
|
|
532
|
+
const recommendations = [];
|
|
533
|
+
// Check for delivery ID handling
|
|
534
|
+
const hasDeliveryIdCheck = this.checkForDeliveryIdHandling(node, workflow);
|
|
535
|
+
if (!hasDeliveryIdCheck) {
|
|
536
|
+
recommendations.push('Extract and validate X-Delivery-ID or similar header for deduplication');
|
|
537
|
+
}
|
|
538
|
+
// Check for timestamp validation
|
|
539
|
+
const hasTimestampValidation = this.checkForTimestampValidation(node, workflow);
|
|
540
|
+
if (!hasTimestampValidation) {
|
|
541
|
+
recommendations.push('Add timestamp validation to reject stale webhook deliveries');
|
|
542
|
+
}
|
|
543
|
+
// Check for replay protection
|
|
544
|
+
const hasReplayProtection = hasDeliveryIdCheck || hasTimestampValidation;
|
|
545
|
+
if (!hasReplayProtection) {
|
|
546
|
+
recommendations.push('Implement replay protection using nonce or sliding window');
|
|
547
|
+
}
|
|
548
|
+
// Determine duplicate handling strategy
|
|
549
|
+
let duplicateHandlingStrategy = 'none';
|
|
550
|
+
if (hasDeliveryIdCheck) {
|
|
551
|
+
duplicateHandlingStrategy = 'ignore';
|
|
552
|
+
}
|
|
553
|
+
return {
|
|
554
|
+
webhookNodeId: node.id,
|
|
555
|
+
hasDeliveryIdCheck,
|
|
556
|
+
hasTimestampValidation,
|
|
557
|
+
hasReplayProtection,
|
|
558
|
+
duplicateHandlingStrategy,
|
|
559
|
+
recommendations,
|
|
560
|
+
};
|
|
561
|
+
}
|
|
562
|
+
checkForDeliveryIdHandling(node, workflow) {
|
|
563
|
+
// Check if there's a node after webhook that extracts delivery ID
|
|
564
|
+
const connections = workflow.connections[node.name];
|
|
565
|
+
if (!connections || !connections.main)
|
|
566
|
+
return false;
|
|
567
|
+
// Look for IF/Switch nodes checking for headers
|
|
568
|
+
for (const outputConnections of connections.main) {
|
|
569
|
+
if (!outputConnections)
|
|
570
|
+
continue;
|
|
571
|
+
for (const connection of outputConnections) {
|
|
572
|
+
const nextNode = workflow.nodes.find(n => n.name === connection.node);
|
|
573
|
+
if (nextNode) {
|
|
574
|
+
const params = JSON.stringify(nextNode.parameters || {}).toLowerCase();
|
|
575
|
+
if (params.includes('delivery') ||
|
|
576
|
+
params.includes('idempotency') ||
|
|
577
|
+
params.includes('x-request-id')) {
|
|
578
|
+
return true;
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
return false;
|
|
584
|
+
}
|
|
585
|
+
checkForTimestampValidation(node, workflow) {
|
|
586
|
+
// Similar check for timestamp validation
|
|
587
|
+
const connections = workflow.connections[node.name];
|
|
588
|
+
if (!connections || !connections.main)
|
|
589
|
+
return false;
|
|
590
|
+
for (const outputConnections of connections.main) {
|
|
591
|
+
if (!outputConnections)
|
|
592
|
+
continue;
|
|
593
|
+
for (const connection of outputConnections) {
|
|
594
|
+
const nextNode = workflow.nodes.find(n => n.name === connection.node);
|
|
595
|
+
if (nextNode) {
|
|
596
|
+
const params = JSON.stringify(nextNode.parameters || {}).toLowerCase();
|
|
597
|
+
if (params.includes('timestamp') &&
|
|
598
|
+
(params.includes('compare') || params.includes('greater') || params.includes('less'))) {
|
|
599
|
+
return true;
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
return false;
|
|
605
|
+
}
|
|
606
|
+
/**
|
|
607
|
+
* Find non-idempotent operations
|
|
608
|
+
*/
|
|
609
|
+
findNonIdempotentOperations(workflow) {
|
|
610
|
+
const results = [];
|
|
611
|
+
for (const node of workflow.nodes) {
|
|
612
|
+
const params = node.parameters || {};
|
|
613
|
+
const operation = params.operation?.toLowerCase() || '';
|
|
614
|
+
const method = params.method?.toLowerCase() || '';
|
|
615
|
+
// Check operation against non-idempotent patterns
|
|
616
|
+
for (const [pattern, info] of Object.entries(NON_IDEMPOTENT_PATTERNS)) {
|
|
617
|
+
if (operation.includes(pattern) || method.includes(pattern) || node.name.toLowerCase().includes(pattern)) {
|
|
618
|
+
results.push({
|
|
619
|
+
nodeId: node.id,
|
|
620
|
+
nodeName: node.name,
|
|
621
|
+
operationType: operation || method || pattern,
|
|
622
|
+
reason: info.reason,
|
|
623
|
+
canBeMadeIdempotent: true,
|
|
624
|
+
suggestion: info.suggestion,
|
|
625
|
+
});
|
|
626
|
+
break;
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
// Check for idempotent operations (no issue)
|
|
630
|
+
const isIdempotent = IDEMPOTENT_OPERATIONS.some(op => operation.includes(op) || method.includes(op));
|
|
631
|
+
if (!isIdempotent && !results.some(r => r.nodeId === node.id)) {
|
|
632
|
+
// Unknown operation - flag for review
|
|
633
|
+
if (operation && !['execute', 'run', 'process'].includes(operation)) {
|
|
634
|
+
// Don't flag generic operations, but flag specific ones we don't recognize
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
return results;
|
|
639
|
+
}
|
|
640
|
+
/**
|
|
641
|
+
* Analyze locking patterns
|
|
642
|
+
*/
|
|
643
|
+
analyzeLockingPatterns(workflow) {
|
|
644
|
+
const lockNodes = [];
|
|
645
|
+
const recommendations = [];
|
|
646
|
+
let hasLocking = false;
|
|
647
|
+
let lockType = 'none';
|
|
648
|
+
let lockScope = 'none';
|
|
649
|
+
for (const node of workflow.nodes) {
|
|
650
|
+
const params = node.parameters || {};
|
|
651
|
+
const paramsStr = JSON.stringify(params).toLowerCase();
|
|
652
|
+
// Check for Redis locks
|
|
653
|
+
if (node.type.toLowerCase().includes('redis')) {
|
|
654
|
+
if (paramsStr.includes('setnx') || paramsStr.includes('lock')) {
|
|
655
|
+
hasLocking = true;
|
|
656
|
+
lockType = 'pessimistic';
|
|
657
|
+
lockScope = 'resource';
|
|
658
|
+
lockNodes.push(node.name);
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
// Check for optimistic locking patterns
|
|
662
|
+
if (paramsStr.includes('version') ||
|
|
663
|
+
paramsStr.includes('etag') ||
|
|
664
|
+
paramsStr.includes('_rev')) {
|
|
665
|
+
hasLocking = true;
|
|
666
|
+
lockType = 'optimistic';
|
|
667
|
+
lockScope = 'resource';
|
|
668
|
+
lockNodes.push(node.name);
|
|
669
|
+
}
|
|
670
|
+
// Check for database locks
|
|
671
|
+
if (paramsStr.includes('for update') || paramsStr.includes('lock')) {
|
|
672
|
+
hasLocking = true;
|
|
673
|
+
lockType = 'pessimistic';
|
|
674
|
+
lockScope = 'resource';
|
|
675
|
+
lockNodes.push(node.name);
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
if (!hasLocking) {
|
|
679
|
+
recommendations.push('Consider implementing locking for concurrent workflow executions');
|
|
680
|
+
recommendations.push('Use Redis SETNX for distributed locks');
|
|
681
|
+
recommendations.push('Implement optimistic locking with version fields for database operations');
|
|
682
|
+
}
|
|
683
|
+
else {
|
|
684
|
+
if (lockType === 'pessimistic') {
|
|
685
|
+
recommendations.push('Ensure locks have TTL to prevent deadlocks');
|
|
686
|
+
recommendations.push('Implement lock retry with exponential backoff');
|
|
687
|
+
}
|
|
688
|
+
}
|
|
689
|
+
return {
|
|
690
|
+
hasLocking,
|
|
691
|
+
lockType: hasLocking ? lockType : undefined,
|
|
692
|
+
lockNodes,
|
|
693
|
+
lockScope,
|
|
694
|
+
recommendations,
|
|
695
|
+
};
|
|
696
|
+
}
|
|
697
|
+
/**
|
|
698
|
+
* Calculate overall idempotency score
|
|
699
|
+
*/
|
|
700
|
+
calculateIdempotencyScore(dedupKeyAnalysis, concurrencyRisks, parallelExecution, webhookDuplicates, nonIdempotentOperations, lockingPatterns) {
|
|
701
|
+
let score = 100;
|
|
702
|
+
// Deduct for missing dedup keys
|
|
703
|
+
for (const analysis of dedupKeyAnalysis) {
|
|
704
|
+
if (!analysis.hasDedupKey) {
|
|
705
|
+
score -= 10;
|
|
706
|
+
}
|
|
707
|
+
else if (analysis.keyStrength === 'weak') {
|
|
708
|
+
score -= 5;
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
// Deduct for concurrency risks
|
|
712
|
+
for (const risk of concurrencyRisks) {
|
|
713
|
+
if (risk.riskLevel === 'high') {
|
|
714
|
+
score -= 15;
|
|
715
|
+
}
|
|
716
|
+
else if (risk.riskLevel === 'medium') {
|
|
717
|
+
score -= 8;
|
|
718
|
+
}
|
|
719
|
+
else if (risk.riskLevel === 'low') {
|
|
720
|
+
score -= 3;
|
|
721
|
+
}
|
|
722
|
+
}
|
|
723
|
+
// Deduct for parallel execution without protection
|
|
724
|
+
if (parallelExecution.hasParallelPaths) {
|
|
725
|
+
score -= 5 * parallelExecution.sharedStateRisks.length;
|
|
726
|
+
}
|
|
727
|
+
// Deduct for webhook without duplicate handling
|
|
728
|
+
for (const webhook of webhookDuplicates) {
|
|
729
|
+
if (!webhook.hasReplayProtection) {
|
|
730
|
+
score -= 10;
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
// Deduct for non-idempotent operations
|
|
734
|
+
score -= 10 * nonIdempotentOperations.length;
|
|
735
|
+
// Bonus for having locking
|
|
736
|
+
if (lockingPatterns.hasLocking) {
|
|
737
|
+
score += 5;
|
|
738
|
+
}
|
|
739
|
+
return Math.max(0, Math.min(100, score));
|
|
740
|
+
}
|
|
741
|
+
/**
|
|
742
|
+
* Generate recommendations
|
|
743
|
+
*/
|
|
744
|
+
generateRecommendations(dedupKeyAnalysis, concurrencyRisks, parallelExecution, webhookDuplicates, nonIdempotentOperations, lockingPatterns) {
|
|
745
|
+
const recommendations = [];
|
|
746
|
+
// High-priority: non-idempotent operations
|
|
747
|
+
if (nonIdempotentOperations.length > 0) {
|
|
748
|
+
recommendations.push(`CRITICAL: ${nonIdempotentOperations.length} non-idempotent operations detected - fix before production`);
|
|
749
|
+
for (const op of nonIdempotentOperations.slice(0, 3)) {
|
|
750
|
+
recommendations.push(` - ${op.nodeName}: ${op.suggestion}`);
|
|
751
|
+
}
|
|
752
|
+
}
|
|
753
|
+
// Webhook handling
|
|
754
|
+
const unprotectedWebhooks = webhookDuplicates.filter(w => !w.hasReplayProtection);
|
|
755
|
+
if (unprotectedWebhooks.length > 0) {
|
|
756
|
+
recommendations.push(`Add replay protection to ${unprotectedWebhooks.length} webhook trigger(s)`);
|
|
757
|
+
}
|
|
758
|
+
// Concurrency risks
|
|
759
|
+
const highRisks = concurrencyRisks.filter(r => r.riskLevel === 'high');
|
|
760
|
+
if (highRisks.length > 0) {
|
|
761
|
+
recommendations.push(`Address ${highRisks.length} high concurrency risk node(s)`);
|
|
762
|
+
}
|
|
763
|
+
// Parallel execution
|
|
764
|
+
if (parallelExecution.sharedStateRisks.length > 0) {
|
|
765
|
+
recommendations.push('Implement synchronization for shared resources in parallel branches');
|
|
766
|
+
}
|
|
767
|
+
// Locking patterns
|
|
768
|
+
recommendations.push(...lockingPatterns.recommendations);
|
|
769
|
+
// Dedup keys
|
|
770
|
+
const missingDedup = dedupKeyAnalysis.filter(d => !d.hasDedupKey);
|
|
771
|
+
if (missingDedup.length > 0) {
|
|
772
|
+
recommendations.push(`Add deduplication keys to ${missingDedup.length} node(s)`);
|
|
773
|
+
}
|
|
774
|
+
return [...new Set(recommendations)]; // Remove duplicates
|
|
775
|
+
}
|
|
776
|
+
// ============================================================================
|
|
777
|
+
// Active Concurrent Execution Testing
|
|
778
|
+
// ============================================================================
|
|
779
|
+
/**
|
|
780
|
+
* Run concurrent execution test to verify idempotency
|
|
781
|
+
* This actually executes the workflow multiple times simultaneously
|
|
782
|
+
*/
|
|
783
|
+
async runConcurrentExecutionTest(workflowId, concurrency, testInput, assertIdenticalOutput) {
|
|
784
|
+
const harness = new N8nTestHarness_1.N8nTestHarness(this.n8nConfig);
|
|
785
|
+
try {
|
|
786
|
+
// Configure concurrent execution
|
|
787
|
+
const config = {
|
|
788
|
+
concurrency,
|
|
789
|
+
staggerMs: 0, // No stagger - true concurrent execution
|
|
790
|
+
inputVariations: Array(concurrency).fill(testInput || {}),
|
|
791
|
+
timeout: 60000,
|
|
792
|
+
};
|
|
793
|
+
// Execute concurrently
|
|
794
|
+
const result = await harness.executeConcurrently(workflowId, config);
|
|
795
|
+
// Analyze results
|
|
796
|
+
const executionResults = result.executions.map(exec => ({
|
|
797
|
+
index: exec.index,
|
|
798
|
+
executionId: exec.executionId,
|
|
799
|
+
status: exec.status,
|
|
800
|
+
duration: exec.duration,
|
|
801
|
+
outputHash: this.hashOutput(exec.output),
|
|
802
|
+
error: exec.error,
|
|
803
|
+
}));
|
|
804
|
+
// Check for side effects
|
|
805
|
+
const sideEffectAnalysis = this.analyzeSideEffects(result);
|
|
806
|
+
// Check if all outputs are identical
|
|
807
|
+
const allOutputsIdentical = result.allIdentical;
|
|
808
|
+
// Generate summary
|
|
809
|
+
const summary = this.generateConcurrentTestSummary(concurrency, result.executions.filter(e => e.status === 'success').length, allOutputsIdentical, sideEffectAnalysis, result.timing);
|
|
810
|
+
// Emit event
|
|
811
|
+
this.emitEvent('idempotency.concurrent-test.completed', {
|
|
812
|
+
workflowId,
|
|
813
|
+
concurrency,
|
|
814
|
+
allSucceeded: result.executions.every(e => e.status === 'success'),
|
|
815
|
+
allIdentical: allOutputsIdentical,
|
|
816
|
+
differences: result.differences.length,
|
|
817
|
+
});
|
|
818
|
+
return {
|
|
819
|
+
executed: true,
|
|
820
|
+
concurrency,
|
|
821
|
+
allSucceeded: result.executions.every(e => e.status === 'success'),
|
|
822
|
+
allOutputsIdentical,
|
|
823
|
+
executionResults,
|
|
824
|
+
differences: result.differences,
|
|
825
|
+
sideEffectAnalysis,
|
|
826
|
+
summary,
|
|
827
|
+
};
|
|
828
|
+
}
|
|
829
|
+
catch (error) {
|
|
830
|
+
return {
|
|
831
|
+
executed: false,
|
|
832
|
+
concurrency,
|
|
833
|
+
allSucceeded: false,
|
|
834
|
+
allOutputsIdentical: false,
|
|
835
|
+
executionResults: [],
|
|
836
|
+
differences: [],
|
|
837
|
+
sideEffectAnalysis: {
|
|
838
|
+
duplicatesCreated: false,
|
|
839
|
+
resourceConflicts: false,
|
|
840
|
+
dataCorruption: false,
|
|
841
|
+
details: [`Test execution failed: ${error.message}`],
|
|
842
|
+
},
|
|
843
|
+
summary: `Concurrent test failed: ${error.message}`,
|
|
844
|
+
};
|
|
845
|
+
}
|
|
846
|
+
finally {
|
|
847
|
+
await harness.cleanup();
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
/**
|
|
851
|
+
* Analyze side effects from concurrent execution
|
|
852
|
+
*/
|
|
853
|
+
analyzeSideEffects(result) {
|
|
854
|
+
const details = [];
|
|
855
|
+
let duplicatesCreated = false;
|
|
856
|
+
let resourceConflicts = false;
|
|
857
|
+
let dataCorruption = false;
|
|
858
|
+
// Check for duplicate outputs (same data created multiple times)
|
|
859
|
+
const outputHashes = new Map();
|
|
860
|
+
for (const exec of result.executions) {
|
|
861
|
+
const hash = this.hashOutput(exec.output);
|
|
862
|
+
outputHashes.set(hash, (outputHashes.get(hash) || 0) + 1);
|
|
863
|
+
}
|
|
864
|
+
// If all outputs are identical, that's good for idempotency
|
|
865
|
+
// But we need to check if they indicate duplicate side effects
|
|
866
|
+
if (result.allIdentical && result.executions.length > 1) {
|
|
867
|
+
// Check if outputs contain duplicate indicators
|
|
868
|
+
const firstOutput = result.executions[0]?.output;
|
|
869
|
+
if (firstOutput) {
|
|
870
|
+
const outputStr = JSON.stringify(firstOutput);
|
|
871
|
+
if (outputStr.includes('duplicate') || outputStr.includes('already exists')) {
|
|
872
|
+
duplicatesCreated = true;
|
|
873
|
+
details.push('Output indicates duplicate detection triggered');
|
|
874
|
+
}
|
|
875
|
+
}
|
|
876
|
+
}
|
|
877
|
+
// Check for errors that indicate conflicts
|
|
878
|
+
for (const exec of result.executions) {
|
|
879
|
+
if (exec.error) {
|
|
880
|
+
if (exec.error.toLowerCase().includes('conflict') ||
|
|
881
|
+
exec.error.toLowerCase().includes('locked') ||
|
|
882
|
+
exec.error.toLowerCase().includes('deadlock')) {
|
|
883
|
+
resourceConflicts = true;
|
|
884
|
+
details.push(`Resource conflict detected: ${exec.error}`);
|
|
885
|
+
}
|
|
886
|
+
if (exec.error.toLowerCase().includes('corrupt') ||
|
|
887
|
+
exec.error.toLowerCase().includes('invalid state')) {
|
|
888
|
+
dataCorruption = true;
|
|
889
|
+
details.push(`Data corruption detected: ${exec.error}`);
|
|
890
|
+
}
|
|
891
|
+
}
|
|
892
|
+
}
|
|
893
|
+
// Check for differences that indicate non-idempotent behavior
|
|
894
|
+
if (result.differences.length > 0) {
|
|
895
|
+
// Analyze types of differences
|
|
896
|
+
const valueChanges = result.differences.filter(d => d.differenceType === 'value-changed');
|
|
897
|
+
if (valueChanges.length > 0) {
|
|
898
|
+
details.push(`${valueChanges.length} output value(s) differed between executions`);
|
|
899
|
+
// Check if differences are in fields that should be idempotent
|
|
900
|
+
for (const diff of valueChanges.slice(0, 5)) {
|
|
901
|
+
const fieldLower = diff.fieldPath.toLowerCase();
|
|
902
|
+
if (fieldLower.includes('count') ||
|
|
903
|
+
fieldLower.includes('total') ||
|
|
904
|
+
fieldLower.includes('sum')) {
|
|
905
|
+
duplicatesCreated = true;
|
|
906
|
+
details.push(`Counter field '${diff.fieldPath}' changed: may indicate duplicate processing`);
|
|
907
|
+
}
|
|
908
|
+
if (fieldLower.includes('id') && !fieldLower.includes('timestamp')) {
|
|
909
|
+
duplicatesCreated = true;
|
|
910
|
+
details.push(`ID field '${diff.fieldPath}' differs: may indicate duplicate record creation`);
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
}
|
|
915
|
+
// If no issues detected, add positive message
|
|
916
|
+
if (!duplicatesCreated && !resourceConflicts && !dataCorruption) {
|
|
917
|
+
details.push('No side effect issues detected during concurrent execution');
|
|
918
|
+
}
|
|
919
|
+
return {
|
|
920
|
+
duplicatesCreated,
|
|
921
|
+
resourceConflicts,
|
|
922
|
+
dataCorruption,
|
|
923
|
+
details,
|
|
924
|
+
};
|
|
925
|
+
}
|
|
926
|
+
/**
|
|
927
|
+
* Generate summary of concurrent test
|
|
928
|
+
*/
|
|
929
|
+
generateConcurrentTestSummary(concurrency, successCount, allIdentical, sideEffects, timing) {
|
|
930
|
+
const parts = [];
|
|
931
|
+
parts.push(`Executed ${concurrency} concurrent workflow instances`);
|
|
932
|
+
parts.push(`${successCount}/${concurrency} succeeded`);
|
|
933
|
+
if (allIdentical) {
|
|
934
|
+
parts.push('All outputs were identical (good for idempotency)');
|
|
935
|
+
}
|
|
936
|
+
else {
|
|
937
|
+
parts.push('⚠️ Outputs differed between executions');
|
|
938
|
+
}
|
|
939
|
+
if (sideEffects.duplicatesCreated) {
|
|
940
|
+
parts.push('⚠️ Duplicate side effects detected');
|
|
941
|
+
}
|
|
942
|
+
if (sideEffects.resourceConflicts) {
|
|
943
|
+
parts.push('⚠️ Resource conflicts occurred');
|
|
944
|
+
}
|
|
945
|
+
if (sideEffects.dataCorruption) {
|
|
946
|
+
parts.push('❌ Data corruption detected');
|
|
947
|
+
}
|
|
948
|
+
parts.push(`Timing: avg ${timing.avgMs}ms, min ${timing.minMs}ms, max ${timing.maxMs}ms`);
|
|
949
|
+
return parts.join('. ');
|
|
950
|
+
}
|
|
951
|
+
/**
|
|
952
|
+
* Hash output for comparison
|
|
953
|
+
*/
|
|
954
|
+
hashOutput(output) {
|
|
955
|
+
const str = JSON.stringify(output, Object.keys(output).sort());
|
|
956
|
+
let hash = 0;
|
|
957
|
+
for (let i = 0; i < str.length; i++) {
|
|
958
|
+
const char = str.charCodeAt(i);
|
|
959
|
+
hash = ((hash << 5) - hash) + char;
|
|
960
|
+
hash = hash & hash;
|
|
961
|
+
}
|
|
962
|
+
return Math.abs(hash).toString(16).padStart(8, '0');
|
|
963
|
+
}
|
|
964
|
+
/**
|
|
965
|
+
* Quick idempotency check - runs a fast concurrent test
|
|
966
|
+
*/
|
|
967
|
+
async quickIdempotencyCheck(workflowId, testInput) {
|
|
968
|
+
const result = await this.testIdempotency(workflowId, undefined, {
|
|
969
|
+
runConcurrentTest: true,
|
|
970
|
+
concurrency: 2,
|
|
971
|
+
testInput,
|
|
972
|
+
assertIdenticalOutput: true,
|
|
973
|
+
});
|
|
974
|
+
const issues = [];
|
|
975
|
+
if (result.nonIdempotentOperations.length > 0) {
|
|
976
|
+
issues.push(...result.nonIdempotentOperations.map(op => op.reason));
|
|
977
|
+
}
|
|
978
|
+
if (result.concurrentTestResult && !result.concurrentTestResult.allOutputsIdentical) {
|
|
979
|
+
issues.push('Concurrent executions produced different outputs');
|
|
980
|
+
}
|
|
981
|
+
if (result.concurrentTestResult?.sideEffectAnalysis.duplicatesCreated) {
|
|
982
|
+
issues.push('Concurrent execution created duplicate side effects');
|
|
983
|
+
}
|
|
984
|
+
return {
|
|
985
|
+
isIdempotent: result.isIdempotent,
|
|
986
|
+
concurrentTestPassed: result.concurrentTestResult?.allSucceeded && result.concurrentTestResult?.allOutputsIdentical || false,
|
|
987
|
+
issues,
|
|
988
|
+
};
|
|
989
|
+
}
|
|
990
|
+
}
|
|
991
|
+
exports.N8nIdempotencyTesterAgent = N8nIdempotencyTesterAgent;
|
|
992
|
+
//# sourceMappingURL=N8nIdempotencyTesterAgent.js.map
|