agentic-qe 2.5.5 → 2.5.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/agents/n8n/n8n-base-agent.md +376 -0
- package/.claude/agents/n8n/n8n-bdd-scenario-tester.md +613 -0
- package/.claude/agents/n8n/n8n-chaos-tester.md +654 -0
- package/.claude/agents/n8n/n8n-ci-orchestrator.md +850 -0
- package/.claude/agents/n8n/n8n-compliance-validator.md +685 -0
- package/.claude/agents/n8n/n8n-expression-validator.md +560 -0
- package/.claude/agents/n8n/n8n-integration-test.md +602 -0
- package/.claude/agents/n8n/n8n-monitoring-validator.md +589 -0
- package/.claude/agents/n8n/n8n-node-validator.md +455 -0
- package/.claude/agents/n8n/n8n-performance-tester.md +630 -0
- package/.claude/agents/n8n/n8n-security-auditor.md +786 -0
- package/.claude/agents/n8n/n8n-trigger-test.md +500 -0
- package/.claude/agents/n8n/n8n-unit-tester.md +633 -0
- package/.claude/agents/n8n/n8n-version-comparator.md +567 -0
- package/.claude/agents/n8n/n8n-workflow-executor.md +392 -0
- package/.claude/skills/n8n-expression-testing/SKILL.md +434 -0
- package/.claude/skills/n8n-integration-testing-patterns/SKILL.md +540 -0
- package/.claude/skills/n8n-security-testing/SKILL.md +599 -0
- package/.claude/skills/n8n-trigger-testing-strategies/SKILL.md +541 -0
- package/.claude/skills/n8n-workflow-testing-fundamentals/SKILL.md +447 -0
- package/CHANGELOG.md +111 -0
- package/README.md +7 -4
- package/dist/adapters/MemoryStoreAdapter.d.ts +75 -123
- package/dist/adapters/MemoryStoreAdapter.d.ts.map +1 -1
- package/dist/adapters/MemoryStoreAdapter.js +204 -219
- package/dist/adapters/MemoryStoreAdapter.js.map +1 -1
- package/dist/agents/AccessibilityAllyAgent.d.ts.map +1 -1
- package/dist/agents/AccessibilityAllyAgent.js +17 -1
- package/dist/agents/AccessibilityAllyAgent.js.map +1 -1
- package/dist/agents/BaseAgent.d.ts +18 -250
- package/dist/agents/BaseAgent.d.ts.map +1 -1
- package/dist/agents/BaseAgent.js +122 -520
- package/dist/agents/BaseAgent.js.map +1 -1
- package/dist/agents/n8n/N8nAPIClient.d.ts +121 -0
- package/dist/agents/n8n/N8nAPIClient.d.ts.map +1 -0
- package/dist/agents/n8n/N8nAPIClient.js +367 -0
- package/dist/agents/n8n/N8nAPIClient.js.map +1 -0
- package/dist/agents/n8n/N8nAuditPersistence.d.ts +120 -0
- package/dist/agents/n8n/N8nAuditPersistence.d.ts.map +1 -0
- package/dist/agents/n8n/N8nAuditPersistence.js +473 -0
- package/dist/agents/n8n/N8nAuditPersistence.js.map +1 -0
- package/dist/agents/n8n/N8nBDDScenarioTesterAgent.d.ts +159 -0
- package/dist/agents/n8n/N8nBDDScenarioTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nBDDScenarioTesterAgent.js +697 -0
- package/dist/agents/n8n/N8nBDDScenarioTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nBaseAgent.d.ts +126 -0
- package/dist/agents/n8n/N8nBaseAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nBaseAgent.js +446 -0
- package/dist/agents/n8n/N8nBaseAgent.js.map +1 -0
- package/dist/agents/n8n/N8nCIOrchestratorAgent.d.ts +164 -0
- package/dist/agents/n8n/N8nCIOrchestratorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nCIOrchestratorAgent.js +610 -0
- package/dist/agents/n8n/N8nCIOrchestratorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nChaosTesterAgent.d.ts +205 -0
- package/dist/agents/n8n/N8nChaosTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nChaosTesterAgent.js +729 -0
- package/dist/agents/n8n/N8nChaosTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nComplianceValidatorAgent.d.ts +228 -0
- package/dist/agents/n8n/N8nComplianceValidatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nComplianceValidatorAgent.js +986 -0
- package/dist/agents/n8n/N8nComplianceValidatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nContractTesterAgent.d.ts +213 -0
- package/dist/agents/n8n/N8nContractTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nContractTesterAgent.js +989 -0
- package/dist/agents/n8n/N8nContractTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nExpressionValidatorAgent.d.ts +99 -0
- package/dist/agents/n8n/N8nExpressionValidatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nExpressionValidatorAgent.js +632 -0
- package/dist/agents/n8n/N8nExpressionValidatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nFailureModeTesterAgent.d.ts +238 -0
- package/dist/agents/n8n/N8nFailureModeTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nFailureModeTesterAgent.js +956 -0
- package/dist/agents/n8n/N8nFailureModeTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nIdempotencyTesterAgent.d.ts +242 -0
- package/dist/agents/n8n/N8nIdempotencyTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nIdempotencyTesterAgent.js +992 -0
- package/dist/agents/n8n/N8nIdempotencyTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nIntegrationTestAgent.d.ts +104 -0
- package/dist/agents/n8n/N8nIntegrationTestAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nIntegrationTestAgent.js +653 -0
- package/dist/agents/n8n/N8nIntegrationTestAgent.js.map +1 -0
- package/dist/agents/n8n/N8nMonitoringValidatorAgent.d.ts +210 -0
- package/dist/agents/n8n/N8nMonitoringValidatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nMonitoringValidatorAgent.js +669 -0
- package/dist/agents/n8n/N8nMonitoringValidatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nNodeValidatorAgent.d.ts +142 -0
- package/dist/agents/n8n/N8nNodeValidatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nNodeValidatorAgent.js +1090 -0
- package/dist/agents/n8n/N8nNodeValidatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nPerformanceTesterAgent.d.ts +198 -0
- package/dist/agents/n8n/N8nPerformanceTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nPerformanceTesterAgent.js +653 -0
- package/dist/agents/n8n/N8nPerformanceTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nReplayabilityTesterAgent.d.ts +245 -0
- package/dist/agents/n8n/N8nReplayabilityTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nReplayabilityTesterAgent.js +952 -0
- package/dist/agents/n8n/N8nReplayabilityTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nSecretsHygieneAuditorAgent.d.ts +325 -0
- package/dist/agents/n8n/N8nSecretsHygieneAuditorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nSecretsHygieneAuditorAgent.js +1187 -0
- package/dist/agents/n8n/N8nSecretsHygieneAuditorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nSecurityAuditorAgent.d.ts +91 -0
- package/dist/agents/n8n/N8nSecurityAuditorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nSecurityAuditorAgent.js +825 -0
- package/dist/agents/n8n/N8nSecurityAuditorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nTestHarness.d.ts +131 -0
- package/dist/agents/n8n/N8nTestHarness.d.ts.map +1 -0
- package/dist/agents/n8n/N8nTestHarness.js +456 -0
- package/dist/agents/n8n/N8nTestHarness.js.map +1 -0
- package/dist/agents/n8n/N8nTriggerTestAgent.d.ts +119 -0
- package/dist/agents/n8n/N8nTriggerTestAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nTriggerTestAgent.js +652 -0
- package/dist/agents/n8n/N8nTriggerTestAgent.js.map +1 -0
- package/dist/agents/n8n/N8nUnitTesterAgent.d.ts +130 -0
- package/dist/agents/n8n/N8nUnitTesterAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nUnitTesterAgent.js +522 -0
- package/dist/agents/n8n/N8nUnitTesterAgent.js.map +1 -0
- package/dist/agents/n8n/N8nVersionComparatorAgent.d.ts +201 -0
- package/dist/agents/n8n/N8nVersionComparatorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nVersionComparatorAgent.js +645 -0
- package/dist/agents/n8n/N8nVersionComparatorAgent.js.map +1 -0
- package/dist/agents/n8n/N8nWorkflowExecutorAgent.d.ts +120 -0
- package/dist/agents/n8n/N8nWorkflowExecutorAgent.d.ts.map +1 -0
- package/dist/agents/n8n/N8nWorkflowExecutorAgent.js +347 -0
- package/dist/agents/n8n/N8nWorkflowExecutorAgent.js.map +1 -0
- package/dist/agents/n8n/index.d.ts +119 -0
- package/dist/agents/n8n/index.d.ts.map +1 -0
- package/dist/agents/n8n/index.js +298 -0
- package/dist/agents/n8n/index.js.map +1 -0
- package/dist/agents/n8n/types.d.ts +486 -0
- package/dist/agents/n8n/types.d.ts.map +1 -0
- package/dist/agents/n8n/types.js +8 -0
- package/dist/agents/n8n/types.js.map +1 -0
- package/dist/agents/utils/generators.d.ts +30 -0
- package/dist/agents/utils/generators.d.ts.map +1 -0
- package/dist/agents/utils/generators.js +44 -0
- package/dist/agents/utils/generators.js.map +1 -0
- package/dist/agents/utils/index.d.ts +10 -0
- package/dist/agents/utils/index.d.ts.map +1 -0
- package/dist/agents/utils/index.js +19 -0
- package/dist/agents/utils/index.js.map +1 -0
- package/dist/agents/utils/validation.d.ts +72 -0
- package/dist/agents/utils/validation.d.ts.map +1 -0
- package/dist/agents/utils/validation.js +75 -0
- package/dist/agents/utils/validation.js.map +1 -0
- package/dist/cli/init/agents.d.ts.map +1 -1
- package/dist/cli/init/agents.js +29 -0
- package/dist/cli/init/agents.js.map +1 -1
- package/dist/cli/init/skills.d.ts.map +1 -1
- package/dist/cli/init/skills.js +7 -1
- package/dist/cli/init/skills.js.map +1 -1
- package/dist/core/memory/HNSWVectorMemory.js +1 -1
- package/dist/core/memory/SwarmMemoryManager.d.ts +114 -90
- package/dist/core/memory/SwarmMemoryManager.d.ts.map +1 -1
- package/dist/core/memory/SwarmMemoryManager.js +277 -235
- package/dist/core/memory/SwarmMemoryManager.js.map +1 -1
- package/dist/learning/baselines/StandardTaskSuite.d.ts.map +1 -1
- package/dist/learning/baselines/StandardTaskSuite.js +38 -0
- package/dist/learning/baselines/StandardTaskSuite.js.map +1 -1
- package/dist/mcp/server-instructions.d.ts +1 -1
- package/dist/mcp/server-instructions.js +1 -1
- package/dist/types/memory-interfaces.d.ts +76 -68
- package/dist/types/memory-interfaces.d.ts.map +1 -1
- package/dist/types/memory-interfaces.js +3 -0
- package/dist/types/memory-interfaces.js.map +1 -1
- package/docs/reference/agents.md +91 -2
- package/docs/reference/skills.md +97 -2
- package/package.json +2 -2
|
@@ -0,0 +1,989 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* N8nContractTesterAgent
|
|
4
|
+
*
|
|
5
|
+
* Data-shape and schema contract testing for n8n workflows:
|
|
6
|
+
* - JSON schema validation at node boundaries
|
|
7
|
+
* - Data shape drift detection between nodes
|
|
8
|
+
* - Optional field handling validation
|
|
9
|
+
* - Array vs object type checking
|
|
10
|
+
* - Pagination response validation
|
|
11
|
+
* - Empty result handling
|
|
12
|
+
* - Type coercion detection
|
|
13
|
+
*/
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.N8nContractTesterAgent = void 0;
|
|
16
|
+
const N8nBaseAgent_1 = require("./N8nBaseAgent");
|
|
17
|
+
// ============================================================================
|
|
18
|
+
// Common Data Patterns for n8n
|
|
19
|
+
// ============================================================================
|
|
20
|
+
const COMMON_PATTERNS = {
|
|
21
|
+
// Pagination patterns
|
|
22
|
+
pagination: {
|
|
23
|
+
type: 'object',
|
|
24
|
+
properties: {
|
|
25
|
+
data: { type: 'array', items: { type: 'object' } },
|
|
26
|
+
meta: {
|
|
27
|
+
type: 'object',
|
|
28
|
+
properties: {
|
|
29
|
+
total: { type: 'number' },
|
|
30
|
+
page: { type: 'number' },
|
|
31
|
+
limit: { type: 'number' },
|
|
32
|
+
hasMore: { type: 'boolean' },
|
|
33
|
+
},
|
|
34
|
+
},
|
|
35
|
+
},
|
|
36
|
+
},
|
|
37
|
+
// Empty result patterns
|
|
38
|
+
emptyArray: { type: 'array', items: { type: 'object' }, minItems: 0, maxItems: 0 },
|
|
39
|
+
emptyObject: { type: 'object', properties: {} },
|
|
40
|
+
// Common API response patterns
|
|
41
|
+
apiResponse: {
|
|
42
|
+
type: 'object',
|
|
43
|
+
properties: {
|
|
44
|
+
success: { type: 'boolean' },
|
|
45
|
+
data: { type: 'object' },
|
|
46
|
+
error: { type: 'object', nullable: true },
|
|
47
|
+
},
|
|
48
|
+
},
|
|
49
|
+
};
|
|
50
|
+
// ============================================================================
|
|
51
|
+
// Agent Implementation
|
|
52
|
+
// ============================================================================
|
|
53
|
+
class N8nContractTesterAgent extends N8nBaseAgent_1.N8nBaseAgent {
|
|
54
|
+
constructor(config) {
|
|
55
|
+
const capabilities = [
|
|
56
|
+
{
|
|
57
|
+
name: 'schema-validation',
|
|
58
|
+
version: '1.0.0',
|
|
59
|
+
description: 'Validate JSON schemas at node boundaries',
|
|
60
|
+
parameters: {},
|
|
61
|
+
},
|
|
62
|
+
{
|
|
63
|
+
name: 'data-shape-detection',
|
|
64
|
+
version: '1.0.0',
|
|
65
|
+
description: 'Detect data shape drift between nodes',
|
|
66
|
+
parameters: {},
|
|
67
|
+
},
|
|
68
|
+
{
|
|
69
|
+
name: 'contract-inference',
|
|
70
|
+
version: '1.0.0',
|
|
71
|
+
description: 'Infer schemas from execution data',
|
|
72
|
+
parameters: {},
|
|
73
|
+
},
|
|
74
|
+
{
|
|
75
|
+
name: 'boundary-testing',
|
|
76
|
+
version: '1.0.0',
|
|
77
|
+
description: 'Test data compatibility at node boundaries',
|
|
78
|
+
parameters: {},
|
|
79
|
+
},
|
|
80
|
+
{
|
|
81
|
+
name: 'schema-persistence',
|
|
82
|
+
version: '1.0.0',
|
|
83
|
+
description: 'Persist schema snapshots for drift detection',
|
|
84
|
+
parameters: {},
|
|
85
|
+
},
|
|
86
|
+
{
|
|
87
|
+
name: 'drift-detection',
|
|
88
|
+
version: '1.0.0',
|
|
89
|
+
description: 'Detect schema drift against baseline',
|
|
90
|
+
parameters: {},
|
|
91
|
+
},
|
|
92
|
+
];
|
|
93
|
+
super({
|
|
94
|
+
...config,
|
|
95
|
+
type: 'n8n-contract-tester',
|
|
96
|
+
capabilities: [...capabilities, ...(config.capabilities || [])],
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
async performTask(task) {
|
|
100
|
+
const contractTask = task;
|
|
101
|
+
if (contractTask.type !== 'contract-test') {
|
|
102
|
+
throw new Error(`Unsupported task type: ${contractTask.type}`);
|
|
103
|
+
}
|
|
104
|
+
return this.testContracts(contractTask.target, contractTask.options);
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Run contract tests on a workflow
|
|
108
|
+
*/
|
|
109
|
+
async testContracts(workflowId, options, providedWorkflow) {
|
|
110
|
+
const workflow = providedWorkflow || await this.getWorkflow(workflowId);
|
|
111
|
+
const violations = [];
|
|
112
|
+
const nodeContracts = [];
|
|
113
|
+
const boundaryTests = [];
|
|
114
|
+
const recommendations = [];
|
|
115
|
+
// Get recent executions to analyze actual data shapes
|
|
116
|
+
// Only fetch executions if we need to infer schemas and no workflow was provided
|
|
117
|
+
let executions = [];
|
|
118
|
+
if (options?.inferSchemas && !providedWorkflow) {
|
|
119
|
+
try {
|
|
120
|
+
executions = await this.n8nClient.listExecutions({ workflowId, limit: 10 });
|
|
121
|
+
}
|
|
122
|
+
catch {
|
|
123
|
+
recommendations.push('No execution data available - run the workflow to enable data shape analysis');
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
// Infer schemas from execution data if requested
|
|
127
|
+
let inferredSchemas = {};
|
|
128
|
+
if (options?.inferSchemas && executions.length > 0) {
|
|
129
|
+
inferredSchemas = this.inferSchemasFromExecutions(executions);
|
|
130
|
+
}
|
|
131
|
+
// Combine provided schemas with inferred ones
|
|
132
|
+
const schemas = { ...inferredSchemas, ...(options?.schemas || {}) };
|
|
133
|
+
// Test each node's contract
|
|
134
|
+
for (const node of workflow.nodes) {
|
|
135
|
+
const nodeResult = await this.testNodeContract(node, workflow, executions, schemas[node.name], options);
|
|
136
|
+
nodeContracts.push(nodeResult);
|
|
137
|
+
violations.push(...nodeResult.issues.map(issue => ({
|
|
138
|
+
node: node.name,
|
|
139
|
+
field: '',
|
|
140
|
+
expected: '',
|
|
141
|
+
actual: '',
|
|
142
|
+
severity: 'warning',
|
|
143
|
+
message: issue,
|
|
144
|
+
})));
|
|
145
|
+
}
|
|
146
|
+
// Test boundaries between connected nodes
|
|
147
|
+
for (const [sourceName, connections] of Object.entries(workflow.connections)) {
|
|
148
|
+
if (connections.main) {
|
|
149
|
+
for (const output of connections.main) {
|
|
150
|
+
for (const conn of output) {
|
|
151
|
+
const boundaryResult = this.testBoundary(sourceName, conn.node, nodeContracts, executions, options);
|
|
152
|
+
boundaryTests.push(boundaryResult);
|
|
153
|
+
violations.push(...boundaryResult.issues);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
// Check for common n8n data problems
|
|
159
|
+
violations.push(...this.checkCommonDataProblems(workflow, executions, options));
|
|
160
|
+
// Generate recommendations
|
|
161
|
+
recommendations.push(...this.generateRecommendations(violations, nodeContracts, boundaryTests));
|
|
162
|
+
// NEW: Drift detection against baseline
|
|
163
|
+
let driftAnalysis;
|
|
164
|
+
if (options?.detectDrift && Object.keys(schemas).length > 0) {
|
|
165
|
+
driftAnalysis = await this.detectSchemaDrift(workflowId, schemas, options.schemaVersion || 'current', options.allowedDriftTypes);
|
|
166
|
+
// Add drift violations
|
|
167
|
+
for (const drift of driftAnalysis.drifts) {
|
|
168
|
+
violations.push({
|
|
169
|
+
node: drift.nodeName,
|
|
170
|
+
field: drift.fieldPath,
|
|
171
|
+
expected: String(drift.baselineValue),
|
|
172
|
+
actual: String(drift.currentValue),
|
|
173
|
+
severity: drift.isBreaking ? 'error' : 'warning',
|
|
174
|
+
message: drift.message,
|
|
175
|
+
suggestion: drift.suggestion,
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
// NEW: Persist schemas if requested
|
|
180
|
+
let schemasPersisted = false;
|
|
181
|
+
if (options?.persistSchemas && Object.keys(schemas).length > 0) {
|
|
182
|
+
await this.persistSchemaSnapshot(workflowId, workflow.name, schemas, options.schemaVersion || 'latest');
|
|
183
|
+
schemasPersisted = true;
|
|
184
|
+
}
|
|
185
|
+
// Calculate score
|
|
186
|
+
const errorCount = violations.filter(v => v.severity === 'error').length;
|
|
187
|
+
const warningCount = violations.filter(v => v.severity === 'warning').length;
|
|
188
|
+
const score = Math.max(0, 100 - (errorCount * 15) - (warningCount * 5));
|
|
189
|
+
const result = {
|
|
190
|
+
workflowId: workflow.id || workflowId,
|
|
191
|
+
workflowName: workflow.name,
|
|
192
|
+
testDate: new Date().toISOString(),
|
|
193
|
+
passed: errorCount === 0,
|
|
194
|
+
score,
|
|
195
|
+
nodeContracts,
|
|
196
|
+
boundaryTests,
|
|
197
|
+
schemaViolations: violations,
|
|
198
|
+
inferredSchemas: options?.inferSchemas ? inferredSchemas : undefined,
|
|
199
|
+
recommendations,
|
|
200
|
+
driftAnalysis,
|
|
201
|
+
schemasPersisted,
|
|
202
|
+
schemaVersion: options?.schemaVersion,
|
|
203
|
+
};
|
|
204
|
+
// Store result
|
|
205
|
+
await this.storeTestResult(`contract-test:${workflowId}`, result);
|
|
206
|
+
// Emit event
|
|
207
|
+
this.emitEvent('contract.test.completed', {
|
|
208
|
+
workflowId,
|
|
209
|
+
passed: result.passed,
|
|
210
|
+
score: result.score,
|
|
211
|
+
violations: violations.length,
|
|
212
|
+
});
|
|
213
|
+
return result;
|
|
214
|
+
}
|
|
215
|
+
/**
|
|
216
|
+
* Test a single node's contract
|
|
217
|
+
*/
|
|
218
|
+
async testNodeContract(node, workflow, executions, expectedSchema, options) {
|
|
219
|
+
const issues = [];
|
|
220
|
+
// Infer input/output schemas from execution data
|
|
221
|
+
let inputSchema = null;
|
|
222
|
+
let outputSchema = null;
|
|
223
|
+
for (const execution of executions) {
|
|
224
|
+
const runData = execution.data?.resultData?.runData?.[node.name];
|
|
225
|
+
if (runData && runData[0]) {
|
|
226
|
+
const nodeRun = runData[0];
|
|
227
|
+
// Extract output schema
|
|
228
|
+
if (nodeRun.data?.main?.[0]?.[0]) {
|
|
229
|
+
const outputData = nodeRun.data.main[0][0].json;
|
|
230
|
+
outputSchema = this.inferSchema(outputData);
|
|
231
|
+
}
|
|
232
|
+
// Get input from source nodes
|
|
233
|
+
if (nodeRun.source?.[0]) {
|
|
234
|
+
const sourceName = nodeRun.source[0].previousNode;
|
|
235
|
+
const sourceRun = execution.data?.resultData?.runData?.[sourceName];
|
|
236
|
+
if (sourceRun?.[0]?.data?.main?.[0]?.[0]) {
|
|
237
|
+
const inputData = sourceRun[0].data.main[0][0].json;
|
|
238
|
+
inputSchema = this.inferSchema(inputData);
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
break;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
// Validate against expected schema if provided
|
|
245
|
+
if (expectedSchema && outputSchema) {
|
|
246
|
+
const schemaIssues = this.compareSchemas(expectedSchema, outputSchema, node.name);
|
|
247
|
+
issues.push(...schemaIssues);
|
|
248
|
+
}
|
|
249
|
+
// Check for common issues
|
|
250
|
+
if (outputSchema) {
|
|
251
|
+
// Check for inconsistent array items
|
|
252
|
+
if (options?.checkArrayConsistency) {
|
|
253
|
+
const arrayIssues = this.checkArrayConsistency(outputSchema, node.name);
|
|
254
|
+
issues.push(...arrayIssues);
|
|
255
|
+
}
|
|
256
|
+
// Check for unexpected nulls
|
|
257
|
+
if (options?.checkNullability) {
|
|
258
|
+
const nullIssues = this.checkNullability(outputSchema, node.name);
|
|
259
|
+
issues.push(...nullIssues);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
return {
|
|
263
|
+
nodeName: node.name,
|
|
264
|
+
nodeType: node.type,
|
|
265
|
+
inputSchema,
|
|
266
|
+
outputSchema,
|
|
267
|
+
inputValid: issues.length === 0,
|
|
268
|
+
outputValid: issues.length === 0,
|
|
269
|
+
issues,
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
/**
|
|
273
|
+
* Test boundary between two connected nodes
|
|
274
|
+
*/
|
|
275
|
+
testBoundary(sourceName, targetName, nodeContracts, executions, options) {
|
|
276
|
+
const issues = [];
|
|
277
|
+
const sourceContract = nodeContracts.find(nc => nc.nodeName === sourceName);
|
|
278
|
+
const targetContract = nodeContracts.find(nc => nc.nodeName === targetName);
|
|
279
|
+
if (!sourceContract?.outputSchema || !targetContract?.inputSchema) {
|
|
280
|
+
return {
|
|
281
|
+
sourceNode: sourceName,
|
|
282
|
+
targetNode: targetName,
|
|
283
|
+
compatible: true, // Can't determine without schemas
|
|
284
|
+
issues: [],
|
|
285
|
+
};
|
|
286
|
+
}
|
|
287
|
+
// Check if output schema is compatible with input schema
|
|
288
|
+
const sourceOutput = sourceContract.outputSchema;
|
|
289
|
+
const targetExpected = targetContract.inputSchema;
|
|
290
|
+
// Check type compatibility
|
|
291
|
+
if (sourceOutput.type !== targetExpected.type) {
|
|
292
|
+
issues.push({
|
|
293
|
+
node: targetName,
|
|
294
|
+
field: 'input',
|
|
295
|
+
expected: targetExpected.type,
|
|
296
|
+
actual: sourceOutput.type,
|
|
297
|
+
severity: 'error',
|
|
298
|
+
message: `Type mismatch: ${sourceName} outputs ${sourceOutput.type} but ${targetName} expects ${targetExpected.type}`,
|
|
299
|
+
suggestion: 'Add a Set node to transform the data type',
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
// Check required fields
|
|
303
|
+
if (targetExpected.required && sourceOutput.properties) {
|
|
304
|
+
for (const required of targetExpected.required) {
|
|
305
|
+
if (!sourceOutput.properties[required]) {
|
|
306
|
+
issues.push({
|
|
307
|
+
node: targetName,
|
|
308
|
+
field: required,
|
|
309
|
+
expected: 'present',
|
|
310
|
+
actual: 'missing',
|
|
311
|
+
severity: 'error',
|
|
312
|
+
message: `Required field "${required}" missing from ${sourceName} output`,
|
|
313
|
+
suggestion: `Add "${required}" field in ${sourceName} or use Set node to add it`,
|
|
314
|
+
});
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
return {
|
|
319
|
+
sourceNode: sourceName,
|
|
320
|
+
targetNode: targetName,
|
|
321
|
+
compatible: issues.filter(i => i.severity === 'error').length === 0,
|
|
322
|
+
issues,
|
|
323
|
+
};
|
|
324
|
+
}
|
|
325
|
+
/**
|
|
326
|
+
* Check for common n8n data problems
|
|
327
|
+
*/
|
|
328
|
+
checkCommonDataProblems(workflow, executions, options) {
|
|
329
|
+
const violations = [];
|
|
330
|
+
for (const execution of executions) {
|
|
331
|
+
const runData = execution.data?.resultData?.runData;
|
|
332
|
+
if (!runData)
|
|
333
|
+
continue;
|
|
334
|
+
for (const [nodeName, nodeRuns] of Object.entries(runData)) {
|
|
335
|
+
const run = nodeRuns[0];
|
|
336
|
+
if (!run?.data?.main?.[0])
|
|
337
|
+
continue;
|
|
338
|
+
for (const item of run.data.main[0]) {
|
|
339
|
+
const data = item.json;
|
|
340
|
+
// Check for empty results that might break downstream nodes
|
|
341
|
+
if (data === null || data === undefined) {
|
|
342
|
+
violations.push({
|
|
343
|
+
node: nodeName,
|
|
344
|
+
field: 'output',
|
|
345
|
+
expected: 'data',
|
|
346
|
+
actual: 'null/undefined',
|
|
347
|
+
severity: 'warning',
|
|
348
|
+
message: `Node "${nodeName}" produced null/undefined output`,
|
|
349
|
+
suggestion: 'Add IF node to handle empty results',
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
// Check for arrays that might be empty
|
|
353
|
+
if (Array.isArray(data) && data.length === 0) {
|
|
354
|
+
violations.push({
|
|
355
|
+
node: nodeName,
|
|
356
|
+
field: 'output',
|
|
357
|
+
expected: 'non-empty array',
|
|
358
|
+
actual: 'empty array',
|
|
359
|
+
severity: 'info',
|
|
360
|
+
message: `Node "${nodeName}" produced empty array - ensure downstream nodes handle this`,
|
|
361
|
+
});
|
|
362
|
+
}
|
|
363
|
+
// Check for pagination that might not be handled
|
|
364
|
+
if (data && typeof data === 'object') {
|
|
365
|
+
const objData = data;
|
|
366
|
+
if ('nextPage' in objData || 'hasMore' in objData || 'cursor' in objData) {
|
|
367
|
+
violations.push({
|
|
368
|
+
node: nodeName,
|
|
369
|
+
field: 'pagination',
|
|
370
|
+
expected: 'handled',
|
|
371
|
+
actual: 'detected',
|
|
372
|
+
severity: 'warning',
|
|
373
|
+
message: `Node "${nodeName}" has pagination fields - ensure all pages are fetched`,
|
|
374
|
+
suggestion: 'Use Loop Over Items or pagination settings if available',
|
|
375
|
+
});
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
return violations;
|
|
382
|
+
}
|
|
383
|
+
/**
|
|
384
|
+
* Infer JSON schema from data
|
|
385
|
+
*/
|
|
386
|
+
inferSchema(data) {
|
|
387
|
+
if (data === null) {
|
|
388
|
+
return { type: 'null' };
|
|
389
|
+
}
|
|
390
|
+
if (Array.isArray(data)) {
|
|
391
|
+
const itemSchemas = data.slice(0, 10).map(item => this.inferSchema(item));
|
|
392
|
+
const mergedItemSchema = itemSchemas.length > 0
|
|
393
|
+
? this.mergeSchemas(itemSchemas)
|
|
394
|
+
: { type: 'object' };
|
|
395
|
+
return {
|
|
396
|
+
type: 'array',
|
|
397
|
+
items: mergedItemSchema,
|
|
398
|
+
minItems: data.length,
|
|
399
|
+
maxItems: data.length,
|
|
400
|
+
};
|
|
401
|
+
}
|
|
402
|
+
if (typeof data === 'object') {
|
|
403
|
+
const properties = {};
|
|
404
|
+
const required = [];
|
|
405
|
+
for (const [key, value] of Object.entries(data)) {
|
|
406
|
+
properties[key] = this.inferSchema(value);
|
|
407
|
+
if (value !== null && value !== undefined) {
|
|
408
|
+
required.push(key);
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
return {
|
|
412
|
+
type: 'object',
|
|
413
|
+
properties,
|
|
414
|
+
required,
|
|
415
|
+
};
|
|
416
|
+
}
|
|
417
|
+
if (typeof data === 'string') {
|
|
418
|
+
return { type: 'string' };
|
|
419
|
+
}
|
|
420
|
+
if (typeof data === 'number') {
|
|
421
|
+
return { type: 'number' };
|
|
422
|
+
}
|
|
423
|
+
if (typeof data === 'boolean') {
|
|
424
|
+
return { type: 'boolean' };
|
|
425
|
+
}
|
|
426
|
+
return { type: 'object' };
|
|
427
|
+
}
|
|
428
|
+
/**
|
|
429
|
+
* Merge multiple schemas into one
|
|
430
|
+
*/
|
|
431
|
+
mergeSchemas(schemas) {
|
|
432
|
+
if (schemas.length === 0)
|
|
433
|
+
return { type: 'object' };
|
|
434
|
+
if (schemas.length === 1)
|
|
435
|
+
return schemas[0];
|
|
436
|
+
// For simplicity, use first schema as base and mark fields as nullable if not in all
|
|
437
|
+
const base = { ...schemas[0] };
|
|
438
|
+
if (base.type === 'object' && base.properties) {
|
|
439
|
+
const allKeys = new Set();
|
|
440
|
+
const keyPresence = {};
|
|
441
|
+
for (const schema of schemas) {
|
|
442
|
+
if (schema.properties) {
|
|
443
|
+
for (const key of Object.keys(schema.properties)) {
|
|
444
|
+
allKeys.add(key);
|
|
445
|
+
keyPresence[key] = (keyPresence[key] || 0) + 1;
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
// Mark fields as nullable if not present in all schemas
|
|
450
|
+
for (const key of allKeys) {
|
|
451
|
+
if (keyPresence[key] < schemas.length) {
|
|
452
|
+
if (base.properties[key]) {
|
|
453
|
+
base.properties[key] = { ...base.properties[key], nullable: true };
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
// Only require fields present in ALL schemas
|
|
458
|
+
base.required = Array.from(allKeys).filter(key => keyPresence[key] === schemas.length);
|
|
459
|
+
}
|
|
460
|
+
return base;
|
|
461
|
+
}
|
|
462
|
+
/**
|
|
463
|
+
* Compare expected vs actual schema
|
|
464
|
+
*/
|
|
465
|
+
compareSchemas(expected, actual, nodeName) {
|
|
466
|
+
const issues = [];
|
|
467
|
+
if (expected.type !== actual.type) {
|
|
468
|
+
issues.push(`Type mismatch: expected ${expected.type}, got ${actual.type}`);
|
|
469
|
+
}
|
|
470
|
+
if (expected.type === 'object' && expected.properties && actual.properties) {
|
|
471
|
+
// Check required fields
|
|
472
|
+
for (const required of expected.required || []) {
|
|
473
|
+
if (!actual.properties[required]) {
|
|
474
|
+
issues.push(`Missing required field: ${required}`);
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
// Check for extra fields (if strict)
|
|
478
|
+
if (expected.additionalProperties === false) {
|
|
479
|
+
for (const key of Object.keys(actual.properties)) {
|
|
480
|
+
if (!expected.properties[key]) {
|
|
481
|
+
issues.push(`Unexpected field: ${key}`);
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
return issues;
|
|
487
|
+
}
|
|
488
|
+
/**
|
|
489
|
+
* Check array item consistency
|
|
490
|
+
*/
|
|
491
|
+
checkArrayConsistency(schema, nodeName) {
|
|
492
|
+
const issues = [];
|
|
493
|
+
if (schema.type === 'array' && schema.items?.type === 'object' && schema.items.properties) {
|
|
494
|
+
// Check if all required fields are consistently present
|
|
495
|
+
const requiredFields = schema.items.required || [];
|
|
496
|
+
for (const field of requiredFields) {
|
|
497
|
+
if (schema.items.properties[field]?.nullable) {
|
|
498
|
+
issues.push(`Array items have inconsistent field "${field}" - sometimes null/missing`);
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
return issues;
|
|
503
|
+
}
|
|
504
|
+
/**
|
|
505
|
+
* Check for unexpected nullability
|
|
506
|
+
*/
|
|
507
|
+
checkNullability(schema, nodeName) {
|
|
508
|
+
const issues = [];
|
|
509
|
+
if (schema.type === 'object' && schema.properties) {
|
|
510
|
+
for (const [field, fieldSchema] of Object.entries(schema.properties)) {
|
|
511
|
+
if (fieldSchema.nullable && !fieldSchema.type.includes('null')) {
|
|
512
|
+
issues.push(`Field "${field}" can be null but is not marked as nullable in schema`);
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
return issues;
|
|
517
|
+
}
|
|
518
|
+
/**
|
|
519
|
+
* Infer schemas from multiple executions
|
|
520
|
+
*/
|
|
521
|
+
inferSchemasFromExecutions(executions) {
|
|
522
|
+
const schemas = {};
|
|
523
|
+
for (const execution of executions) {
|
|
524
|
+
const runData = execution.data?.resultData?.runData;
|
|
525
|
+
if (!runData)
|
|
526
|
+
continue;
|
|
527
|
+
for (const [nodeName, nodeRuns] of Object.entries(runData)) {
|
|
528
|
+
const run = nodeRuns[0];
|
|
529
|
+
if (run?.data?.main?.[0]?.[0]) {
|
|
530
|
+
const data = run.data.main[0][0].json;
|
|
531
|
+
const schema = this.inferSchema(data);
|
|
532
|
+
if (!schemas[nodeName]) {
|
|
533
|
+
schemas[nodeName] = [];
|
|
534
|
+
}
|
|
535
|
+
schemas[nodeName].push(schema);
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
// Merge schemas for each node
|
|
540
|
+
const mergedSchemas = {};
|
|
541
|
+
for (const [nodeName, nodeSchemas] of Object.entries(schemas)) {
|
|
542
|
+
mergedSchemas[nodeName] = this.mergeSchemas(nodeSchemas);
|
|
543
|
+
}
|
|
544
|
+
return mergedSchemas;
|
|
545
|
+
}
|
|
546
|
+
/**
|
|
547
|
+
* Generate recommendations based on findings
|
|
548
|
+
*/
|
|
549
|
+
generateRecommendations(violations, nodeContracts, boundaryTests) {
|
|
550
|
+
const recommendations = [];
|
|
551
|
+
// Check for nodes without schemas
|
|
552
|
+
const noSchemaNodes = nodeContracts.filter(nc => !nc.outputSchema);
|
|
553
|
+
if (noSchemaNodes.length > 0) {
|
|
554
|
+
recommendations.push(`${noSchemaNodes.length} nodes have no schema data - execute the workflow to collect data shapes`);
|
|
555
|
+
}
|
|
556
|
+
// Check for boundary issues
|
|
557
|
+
const boundaryIssues = boundaryTests.filter(bt => !bt.compatible);
|
|
558
|
+
if (boundaryIssues.length > 0) {
|
|
559
|
+
recommendations.push(`${boundaryIssues.length} node boundaries have compatibility issues - add Set nodes to transform data`);
|
|
560
|
+
}
|
|
561
|
+
// Check for pagination issues
|
|
562
|
+
const paginationIssues = violations.filter(v => v.field === 'pagination');
|
|
563
|
+
if (paginationIssues.length > 0) {
|
|
564
|
+
recommendations.push('Pagination detected - ensure all pages are fetched using Loop Over Items or API pagination settings');
|
|
565
|
+
}
|
|
566
|
+
// Check for null/empty handling
|
|
567
|
+
const nullIssues = violations.filter(v => v.actual.includes('null') || v.actual.includes('empty'));
|
|
568
|
+
if (nullIssues.length > 0) {
|
|
569
|
+
recommendations.push('Empty/null results detected - add IF nodes to handle these cases gracefully');
|
|
570
|
+
}
|
|
571
|
+
return recommendations;
|
|
572
|
+
}
|
|
573
|
+
/**
|
|
574
|
+
* Quick contract check for a workflow
|
|
575
|
+
*/
|
|
576
|
+
async quickCheck(workflowId) {
|
|
577
|
+
const result = await this.testContracts(workflowId, {
|
|
578
|
+
inferSchemas: true,
|
|
579
|
+
checkArrayConsistency: true,
|
|
580
|
+
});
|
|
581
|
+
return {
|
|
582
|
+
compatible: result.passed,
|
|
583
|
+
boundaryIssues: result.boundaryTests.filter(bt => !bt.compatible).length,
|
|
584
|
+
schemaViolations: result.schemaViolations.length,
|
|
585
|
+
topIssue: result.schemaViolations[0]?.message || null,
|
|
586
|
+
};
|
|
587
|
+
}
|
|
588
|
+
// ============================================================================
|
|
589
|
+
// Schema Persistence & Drift Detection
|
|
590
|
+
// ============================================================================
|
|
591
|
+
/**
|
|
592
|
+
* Persist a schema snapshot as baseline
|
|
593
|
+
*/
|
|
594
|
+
async persistSchemaSnapshot(workflowId, workflowName, schemas, version) {
|
|
595
|
+
const snapshot = {
|
|
596
|
+
workflowId,
|
|
597
|
+
workflowName,
|
|
598
|
+
version,
|
|
599
|
+
timestamp: new Date().toISOString(),
|
|
600
|
+
schemas,
|
|
601
|
+
nodeCount: Object.keys(schemas).length,
|
|
602
|
+
checksum: this.calculateSchemaChecksum(schemas),
|
|
603
|
+
};
|
|
604
|
+
// Store snapshot in memory (using agent's memory store)
|
|
605
|
+
const key = `schema-snapshot:${workflowId}:${version}`;
|
|
606
|
+
await this.storeTestResult(key, snapshot);
|
|
607
|
+
// Also store as 'latest' if version is not 'latest'
|
|
608
|
+
if (version !== 'latest') {
|
|
609
|
+
await this.storeTestResult(`schema-snapshot:${workflowId}:latest`, snapshot);
|
|
610
|
+
}
|
|
611
|
+
// Emit event
|
|
612
|
+
this.emitEvent('schema.snapshot.persisted', {
|
|
613
|
+
workflowId,
|
|
614
|
+
version,
|
|
615
|
+
nodeCount: snapshot.nodeCount,
|
|
616
|
+
checksum: snapshot.checksum,
|
|
617
|
+
});
|
|
618
|
+
return snapshot;
|
|
619
|
+
}
|
|
620
|
+
/**
|
|
621
|
+
* Load a persisted schema snapshot
|
|
622
|
+
*/
|
|
623
|
+
async loadSchemaSnapshot(workflowId, version = 'latest') {
|
|
624
|
+
const key = `schema-snapshot:${workflowId}:${version}`;
|
|
625
|
+
try {
|
|
626
|
+
// Note: storeTestResult is available, but retrieval would need memory store integration
|
|
627
|
+
// For now, return null - full persistence requires memory store setup
|
|
628
|
+
return null;
|
|
629
|
+
}
|
|
630
|
+
catch {
|
|
631
|
+
return null;
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
/**
|
|
635
|
+
* Detect schema drift against baseline
|
|
636
|
+
*/
|
|
637
|
+
async detectSchemaDrift(workflowId, currentSchemas, currentVersion, allowedDriftTypes) {
|
|
638
|
+
// Load baseline snapshot
|
|
639
|
+
const baseline = await this.loadSchemaSnapshot(workflowId, 'latest');
|
|
640
|
+
if (!baseline) {
|
|
641
|
+
return {
|
|
642
|
+
hasDrift: false,
|
|
643
|
+
baselineVersion: 'none',
|
|
644
|
+
currentVersion,
|
|
645
|
+
baselineDate: '',
|
|
646
|
+
drifts: [],
|
|
647
|
+
breakingChanges: [],
|
|
648
|
+
summary: 'No baseline schema found - consider persisting current schemas as baseline',
|
|
649
|
+
};
|
|
650
|
+
}
|
|
651
|
+
const drifts = [];
|
|
652
|
+
const allNodes = new Set([
|
|
653
|
+
...Object.keys(baseline.schemas),
|
|
654
|
+
...Object.keys(currentSchemas),
|
|
655
|
+
]);
|
|
656
|
+
// Compare each node's schema
|
|
657
|
+
for (const nodeName of allNodes) {
|
|
658
|
+
const baselineSchema = baseline.schemas[nodeName];
|
|
659
|
+
const currentSchema = currentSchemas[nodeName];
|
|
660
|
+
// Node removed
|
|
661
|
+
if (baselineSchema && !currentSchema) {
|
|
662
|
+
drifts.push({
|
|
663
|
+
nodeName,
|
|
664
|
+
fieldPath: '',
|
|
665
|
+
driftType: 'field-removed',
|
|
666
|
+
baselineValue: baselineSchema,
|
|
667
|
+
currentValue: null,
|
|
668
|
+
isBreaking: true,
|
|
669
|
+
message: `Node "${nodeName}" was removed from workflow`,
|
|
670
|
+
suggestion: 'Verify this node removal is intentional',
|
|
671
|
+
});
|
|
672
|
+
continue;
|
|
673
|
+
}
|
|
674
|
+
// Node added
|
|
675
|
+
if (!baselineSchema && currentSchema) {
|
|
676
|
+
drifts.push({
|
|
677
|
+
nodeName,
|
|
678
|
+
fieldPath: '',
|
|
679
|
+
driftType: 'field-added',
|
|
680
|
+
baselineValue: null,
|
|
681
|
+
currentValue: currentSchema,
|
|
682
|
+
isBreaking: false,
|
|
683
|
+
message: `Node "${nodeName}" was added to workflow`,
|
|
684
|
+
suggestion: 'Update baseline schema if this is intentional',
|
|
685
|
+
});
|
|
686
|
+
continue;
|
|
687
|
+
}
|
|
688
|
+
// Compare schemas
|
|
689
|
+
if (baselineSchema && currentSchema) {
|
|
690
|
+
const nodeDrifts = this.compareSchemasForDrift(nodeName, baselineSchema, currentSchema, '');
|
|
691
|
+
drifts.push(...nodeDrifts);
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
// Filter out allowed drift types
|
|
695
|
+
const filteredDrifts = allowedDriftTypes
|
|
696
|
+
? drifts.filter(d => !allowedDriftTypes.includes(d.driftType))
|
|
697
|
+
: drifts;
|
|
698
|
+
const breakingChanges = filteredDrifts.filter(d => d.isBreaking);
|
|
699
|
+
// Generate summary
|
|
700
|
+
const summary = this.generateDriftSummary(filteredDrifts, breakingChanges, baseline);
|
|
701
|
+
const analysis = {
|
|
702
|
+
hasDrift: filteredDrifts.length > 0,
|
|
703
|
+
baselineVersion: baseline.version,
|
|
704
|
+
currentVersion,
|
|
705
|
+
baselineDate: baseline.timestamp,
|
|
706
|
+
drifts: filteredDrifts,
|
|
707
|
+
breakingChanges,
|
|
708
|
+
summary,
|
|
709
|
+
};
|
|
710
|
+
// Emit event
|
|
711
|
+
this.emitEvent('schema.drift.detected', {
|
|
712
|
+
workflowId,
|
|
713
|
+
hasDrift: analysis.hasDrift,
|
|
714
|
+
driftCount: filteredDrifts.length,
|
|
715
|
+
breakingCount: breakingChanges.length,
|
|
716
|
+
});
|
|
717
|
+
return analysis;
|
|
718
|
+
}
|
|
719
|
+
/**
|
|
720
|
+
* Compare two schemas recursively for drift
|
|
721
|
+
*/
|
|
722
|
+
compareSchemasForDrift(nodeName, baseline, current, path) {
|
|
723
|
+
const drifts = [];
|
|
724
|
+
const fieldPath = path || 'root';
|
|
725
|
+
// Type changed
|
|
726
|
+
if (baseline.type !== current.type) {
|
|
727
|
+
drifts.push({
|
|
728
|
+
nodeName,
|
|
729
|
+
fieldPath,
|
|
730
|
+
driftType: 'type-changed',
|
|
731
|
+
baselineValue: baseline.type,
|
|
732
|
+
currentValue: current.type,
|
|
733
|
+
isBreaking: true,
|
|
734
|
+
message: `Type changed from "${baseline.type}" to "${current.type}" at ${fieldPath}`,
|
|
735
|
+
suggestion: 'This is a breaking change - verify consumers can handle new type',
|
|
736
|
+
});
|
|
737
|
+
return drifts; // Type change is fundamental, skip further comparison
|
|
738
|
+
}
|
|
739
|
+
// Nullable changed
|
|
740
|
+
if (baseline.nullable !== current.nullable) {
|
|
741
|
+
drifts.push({
|
|
742
|
+
nodeName,
|
|
743
|
+
fieldPath,
|
|
744
|
+
driftType: 'nullable-changed',
|
|
745
|
+
baselineValue: baseline.nullable,
|
|
746
|
+
currentValue: current.nullable,
|
|
747
|
+
isBreaking: !current.nullable && baseline.nullable === true,
|
|
748
|
+
message: `Nullable changed from ${baseline.nullable} to ${current.nullable} at ${fieldPath}`,
|
|
749
|
+
suggestion: current.nullable
|
|
750
|
+
? 'Add null handling in consumers'
|
|
751
|
+
: 'Verify consumers don\'t expect nulls',
|
|
752
|
+
});
|
|
753
|
+
}
|
|
754
|
+
// Compare object properties
|
|
755
|
+
if (baseline.type === 'object' && baseline.properties && current.properties) {
|
|
756
|
+
const allKeys = new Set([
|
|
757
|
+
...Object.keys(baseline.properties),
|
|
758
|
+
...Object.keys(current.properties),
|
|
759
|
+
]);
|
|
760
|
+
for (const key of allKeys) {
|
|
761
|
+
const baselineProp = baseline.properties[key];
|
|
762
|
+
const currentProp = current.properties[key];
|
|
763
|
+
const propPath = path ? `${path}.${key}` : key;
|
|
764
|
+
// Field removed
|
|
765
|
+
if (baselineProp && !currentProp) {
|
|
766
|
+
const wasRequired = baseline.required?.includes(key) ?? false;
|
|
767
|
+
drifts.push({
|
|
768
|
+
nodeName,
|
|
769
|
+
fieldPath: propPath,
|
|
770
|
+
driftType: 'field-removed',
|
|
771
|
+
baselineValue: baselineProp,
|
|
772
|
+
currentValue: null,
|
|
773
|
+
isBreaking: wasRequired,
|
|
774
|
+
message: `Field "${key}" was removed at ${propPath}`,
|
|
775
|
+
suggestion: wasRequired
|
|
776
|
+
? 'Breaking: Required field removed - update consumers'
|
|
777
|
+
: 'Non-breaking: Optional field removed',
|
|
778
|
+
});
|
|
779
|
+
continue;
|
|
780
|
+
}
|
|
781
|
+
// Field added
|
|
782
|
+
if (!baselineProp && currentProp) {
|
|
783
|
+
drifts.push({
|
|
784
|
+
nodeName,
|
|
785
|
+
fieldPath: propPath,
|
|
786
|
+
driftType: 'field-added',
|
|
787
|
+
baselineValue: null,
|
|
788
|
+
currentValue: currentProp,
|
|
789
|
+
isBreaking: false, // Adding fields is backward compatible
|
|
790
|
+
message: `Field "${key}" was added at ${propPath}`,
|
|
791
|
+
suggestion: 'Non-breaking: New field added',
|
|
792
|
+
});
|
|
793
|
+
continue;
|
|
794
|
+
}
|
|
795
|
+
// Recursively compare
|
|
796
|
+
if (baselineProp && currentProp) {
|
|
797
|
+
drifts.push(...this.compareSchemasForDrift(nodeName, baselineProp, currentProp, propPath));
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
// Check required fields drift
|
|
801
|
+
const baselineRequired = new Set(baseline.required || []);
|
|
802
|
+
const currentRequired = new Set(current.required || []);
|
|
803
|
+
// New required fields (breaking)
|
|
804
|
+
for (const key of currentRequired) {
|
|
805
|
+
if (!baselineRequired.has(key)) {
|
|
806
|
+
drifts.push({
|
|
807
|
+
nodeName,
|
|
808
|
+
fieldPath: path ? `${path}.${key}` : key,
|
|
809
|
+
driftType: 'required-changed',
|
|
810
|
+
baselineValue: false,
|
|
811
|
+
currentValue: true,
|
|
812
|
+
isBreaking: true,
|
|
813
|
+
message: `Field "${key}" became required at ${path || 'root'}`,
|
|
814
|
+
suggestion: 'Breaking: Ensure all producers provide this field',
|
|
815
|
+
});
|
|
816
|
+
}
|
|
817
|
+
}
|
|
818
|
+
// No longer required (non-breaking)
|
|
819
|
+
for (const key of baselineRequired) {
|
|
820
|
+
if (!currentRequired.has(key)) {
|
|
821
|
+
drifts.push({
|
|
822
|
+
nodeName,
|
|
823
|
+
fieldPath: path ? `${path}.${key}` : key,
|
|
824
|
+
driftType: 'required-changed',
|
|
825
|
+
baselineValue: true,
|
|
826
|
+
currentValue: false,
|
|
827
|
+
isBreaking: false,
|
|
828
|
+
message: `Field "${key}" is no longer required at ${path || 'root'}`,
|
|
829
|
+
suggestion: 'Non-breaking: Field made optional',
|
|
830
|
+
});
|
|
831
|
+
}
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
// Compare array items
|
|
835
|
+
if (baseline.type === 'array' && baseline.items && current.items) {
|
|
836
|
+
drifts.push(...this.compareSchemasForDrift(nodeName, baseline.items, current.items, `${path}[]`));
|
|
837
|
+
}
|
|
838
|
+
// Compare enums
|
|
839
|
+
if (baseline.enum || current.enum) {
|
|
840
|
+
const baselineEnums = new Set(baseline.enum || []);
|
|
841
|
+
const currentEnums = new Set(current.enum || []);
|
|
842
|
+
// Removed enum values (breaking)
|
|
843
|
+
for (const val of baselineEnums) {
|
|
844
|
+
if (!currentEnums.has(val)) {
|
|
845
|
+
drifts.push({
|
|
846
|
+
nodeName,
|
|
847
|
+
fieldPath,
|
|
848
|
+
driftType: 'enum-changed',
|
|
849
|
+
baselineValue: Array.from(baselineEnums),
|
|
850
|
+
currentValue: Array.from(currentEnums),
|
|
851
|
+
isBreaking: true,
|
|
852
|
+
message: `Enum value "${val}" was removed at ${fieldPath}`,
|
|
853
|
+
suggestion: 'Breaking: Ensure no producers use removed value',
|
|
854
|
+
});
|
|
855
|
+
break;
|
|
856
|
+
}
|
|
857
|
+
}
|
|
858
|
+
// Added enum values (non-breaking)
|
|
859
|
+
for (const val of currentEnums) {
|
|
860
|
+
if (!baselineEnums.has(val)) {
|
|
861
|
+
drifts.push({
|
|
862
|
+
nodeName,
|
|
863
|
+
fieldPath,
|
|
864
|
+
driftType: 'enum-changed',
|
|
865
|
+
baselineValue: Array.from(baselineEnums),
|
|
866
|
+
currentValue: Array.from(currentEnums),
|
|
867
|
+
isBreaking: false,
|
|
868
|
+
message: `Enum value "${val}" was added at ${fieldPath}`,
|
|
869
|
+
suggestion: 'Non-breaking: New enum value added',
|
|
870
|
+
});
|
|
871
|
+
break;
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
}
|
|
875
|
+
return drifts;
|
|
876
|
+
}
|
|
877
|
+
/**
|
|
878
|
+
* Generate human-readable drift summary
|
|
879
|
+
*/
|
|
880
|
+
generateDriftSummary(drifts, breakingChanges, baseline) {
|
|
881
|
+
if (drifts.length === 0) {
|
|
882
|
+
return `No schema drift detected since baseline ${baseline.version} (${baseline.timestamp})`;
|
|
883
|
+
}
|
|
884
|
+
const parts = [];
|
|
885
|
+
parts.push(`Schema drift detected: ${drifts.length} change(s) since baseline ${baseline.version}`);
|
|
886
|
+
if (breakingChanges.length > 0) {
|
|
887
|
+
parts.push(`⚠️ ${breakingChanges.length} BREAKING change(s) detected!`);
|
|
888
|
+
}
|
|
889
|
+
// Categorize changes
|
|
890
|
+
const byType = {};
|
|
891
|
+
for (const drift of drifts) {
|
|
892
|
+
byType[drift.driftType] = (byType[drift.driftType] || 0) + 1;
|
|
893
|
+
}
|
|
894
|
+
const typeDescriptions = [];
|
|
895
|
+
for (const [type, count] of Object.entries(byType)) {
|
|
896
|
+
typeDescriptions.push(`${count} ${type}`);
|
|
897
|
+
}
|
|
898
|
+
parts.push(`Changes: ${typeDescriptions.join(', ')}`);
|
|
899
|
+
return parts.join('. ');
|
|
900
|
+
}
|
|
901
|
+
/**
|
|
902
|
+
* Calculate checksum for schema validation
|
|
903
|
+
*/
|
|
904
|
+
calculateSchemaChecksum(schemas) {
|
|
905
|
+
const str = JSON.stringify(schemas, Object.keys(schemas).sort());
|
|
906
|
+
let hash = 0;
|
|
907
|
+
for (let i = 0; i < str.length; i++) {
|
|
908
|
+
const char = str.charCodeAt(i);
|
|
909
|
+
hash = ((hash << 5) - hash) + char;
|
|
910
|
+
hash = hash & hash;
|
|
911
|
+
}
|
|
912
|
+
return Math.abs(hash).toString(16).padStart(8, '0');
|
|
913
|
+
}
|
|
914
|
+
/**
|
|
915
|
+
* List all schema snapshots for a workflow
|
|
916
|
+
*/
|
|
917
|
+
async listSchemaSnapshots(workflowId) {
|
|
918
|
+
// This would need integration with the memory store's list capability
|
|
919
|
+
// For now, return common versions
|
|
920
|
+
const versions = [];
|
|
921
|
+
// Check for latest
|
|
922
|
+
const latest = await this.loadSchemaSnapshot(workflowId, 'latest');
|
|
923
|
+
if (latest) {
|
|
924
|
+
versions.push('latest');
|
|
925
|
+
if (latest.version !== 'latest') {
|
|
926
|
+
versions.push(latest.version);
|
|
927
|
+
}
|
|
928
|
+
}
|
|
929
|
+
return versions;
|
|
930
|
+
}
|
|
931
|
+
/**
|
|
932
|
+
* Compare two versions of schemas
|
|
933
|
+
*/
|
|
934
|
+
async compareSchemaVersions(workflowId, versionA, versionB) {
|
|
935
|
+
const snapshotA = await this.loadSchemaSnapshot(workflowId, versionA);
|
|
936
|
+
const snapshotB = await this.loadSchemaSnapshot(workflowId, versionB);
|
|
937
|
+
if (!snapshotA || !snapshotB) {
|
|
938
|
+
return null;
|
|
939
|
+
}
|
|
940
|
+
const drifts = [];
|
|
941
|
+
const allNodes = new Set([
|
|
942
|
+
...Object.keys(snapshotA.schemas),
|
|
943
|
+
...Object.keys(snapshotB.schemas),
|
|
944
|
+
]);
|
|
945
|
+
for (const nodeName of allNodes) {
|
|
946
|
+
const schemaA = snapshotA.schemas[nodeName];
|
|
947
|
+
const schemaB = snapshotB.schemas[nodeName];
|
|
948
|
+
if (!schemaA && schemaB) {
|
|
949
|
+
drifts.push({
|
|
950
|
+
nodeName,
|
|
951
|
+
fieldPath: '',
|
|
952
|
+
driftType: 'field-added',
|
|
953
|
+
baselineValue: null,
|
|
954
|
+
currentValue: schemaB,
|
|
955
|
+
isBreaking: false,
|
|
956
|
+
message: `Node "${nodeName}" added in ${versionB}`,
|
|
957
|
+
suggestion: 'New node in workflow',
|
|
958
|
+
});
|
|
959
|
+
}
|
|
960
|
+
else if (schemaA && !schemaB) {
|
|
961
|
+
drifts.push({
|
|
962
|
+
nodeName,
|
|
963
|
+
fieldPath: '',
|
|
964
|
+
driftType: 'field-removed',
|
|
965
|
+
baselineValue: schemaA,
|
|
966
|
+
currentValue: null,
|
|
967
|
+
isBreaking: true,
|
|
968
|
+
message: `Node "${nodeName}" removed in ${versionB}`,
|
|
969
|
+
suggestion: 'Node no longer in workflow',
|
|
970
|
+
});
|
|
971
|
+
}
|
|
972
|
+
else if (schemaA && schemaB) {
|
|
973
|
+
drifts.push(...this.compareSchemasForDrift(nodeName, schemaA, schemaB, ''));
|
|
974
|
+
}
|
|
975
|
+
}
|
|
976
|
+
const breakingChanges = drifts.filter(d => d.isBreaking);
|
|
977
|
+
return {
|
|
978
|
+
hasDrift: drifts.length > 0,
|
|
979
|
+
baselineVersion: versionA,
|
|
980
|
+
currentVersion: versionB,
|
|
981
|
+
baselineDate: snapshotA.timestamp,
|
|
982
|
+
drifts,
|
|
983
|
+
breakingChanges,
|
|
984
|
+
summary: this.generateDriftSummary(drifts, breakingChanges, snapshotA),
|
|
985
|
+
};
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
exports.N8nContractTesterAgent = N8nContractTesterAgent;
|
|
989
|
+
//# sourceMappingURL=N8nContractTesterAgent.js.map
|