flow-walker-cli 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +299 -0
- package/CLAUDE.md +81 -0
- package/LICENSE +21 -0
- package/README.md +247 -0
- package/package.json +21 -0
- package/src/agent-bridge.ts +189 -0
- package/src/capture.ts +102 -0
- package/src/cli.ts +352 -0
- package/src/command-schema.ts +178 -0
- package/src/errors.ts +63 -0
- package/src/fingerprint.ts +82 -0
- package/src/flow-parser.ts +222 -0
- package/src/graph.ts +73 -0
- package/src/push.ts +170 -0
- package/src/reporter.ts +211 -0
- package/src/run-schema.ts +71 -0
- package/src/runner.ts +391 -0
- package/src/safety.ts +74 -0
- package/src/types.ts +82 -0
- package/src/validate.ts +115 -0
- package/src/walker.ts +656 -0
- package/src/yaml-writer.ts +194 -0
- package/tests/capture.test.ts +75 -0
- package/tests/command-schema.test.ts +133 -0
- package/tests/errors.test.ts +93 -0
- package/tests/fingerprint.test.ts +85 -0
- package/tests/flow-parser.test.ts +264 -0
- package/tests/graph.test.ts +111 -0
- package/tests/reporter.test.ts +188 -0
- package/tests/run-schema.test.ts +138 -0
- package/tests/runner.test.ts +150 -0
- package/tests/safety.test.ts +115 -0
- package/tests/validate.test.ts +193 -0
- package/tests/yaml-writer.test.ts +146 -0
- package/tsconfig.json +15 -0
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
import { writeFileSync, mkdirSync } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import type { Flow, FlowStep, ScreenEdge, ScreenNode } from './types.ts';
|
|
4
|
+
import type { NavigationGraph } from './graph.ts';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Generate YAML flow files from a navigation graph.
|
|
8
|
+
* Output matches sora's format: name, description, setup, steps[].
|
|
9
|
+
*/
|
|
10
|
+
export function generateFlows(graph: NavigationGraph): Flow[] {
|
|
11
|
+
const flows: Flow[] = [];
|
|
12
|
+
|
|
13
|
+
// Find root screens (screens with no incoming edges, or the first screen added)
|
|
14
|
+
const allTargets = new Set(graph.edges.map(e => e.target));
|
|
15
|
+
const roots = [...graph.nodes.values()].filter(n => !allTargets.has(n.id));
|
|
16
|
+
|
|
17
|
+
// If no clear root, use the first node
|
|
18
|
+
if (roots.length === 0 && graph.nodes.size > 0) {
|
|
19
|
+
roots.push(graph.nodes.values().next().value!);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// For each root, generate flows for each outgoing path
|
|
23
|
+
for (const root of roots) {
|
|
24
|
+
const outEdges = graph.edgesFrom(root.id);
|
|
25
|
+
|
|
26
|
+
if (outEdges.length === 0) {
|
|
27
|
+
// Single-screen flow
|
|
28
|
+
flows.push(buildSingleScreenFlow(root));
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// One flow per outgoing branch from root
|
|
33
|
+
for (const edge of outEdges) {
|
|
34
|
+
const targetNode = graph.nodes.get(edge.target);
|
|
35
|
+
if (!targetNode) continue;
|
|
36
|
+
|
|
37
|
+
const flow = buildBranchFlow(root, edge, targetNode, graph);
|
|
38
|
+
flows.push(flow);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return flows;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function buildSingleScreenFlow(screen: ScreenNode): Flow {
|
|
46
|
+
return {
|
|
47
|
+
name: screen.name,
|
|
48
|
+
description: `Screen: ${screen.name} (${screen.elementCount} interactive elements)`,
|
|
49
|
+
setup: 'normal',
|
|
50
|
+
steps: [
|
|
51
|
+
{
|
|
52
|
+
name: `Snapshot ${screen.name}`,
|
|
53
|
+
assert: { interactive_count: { min: Math.max(1, screen.elementCount - 2) } },
|
|
54
|
+
screenshot: screen.name,
|
|
55
|
+
},
|
|
56
|
+
],
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function buildBranchFlow(
|
|
61
|
+
root: ScreenNode,
|
|
62
|
+
firstEdge: ScreenEdge,
|
|
63
|
+
firstTarget: ScreenNode,
|
|
64
|
+
graph: NavigationGraph,
|
|
65
|
+
): Flow {
|
|
66
|
+
const steps: FlowStep[] = [];
|
|
67
|
+
const flowName = firstTarget.name;
|
|
68
|
+
|
|
69
|
+
// Step 1: snapshot root screen
|
|
70
|
+
steps.push({
|
|
71
|
+
name: `Verify ${root.name}`,
|
|
72
|
+
assert: { interactive_count: { min: Math.max(1, root.elementCount - 2) } },
|
|
73
|
+
screenshot: root.name,
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
// Step 2: press to navigate
|
|
77
|
+
steps.push({
|
|
78
|
+
name: `Press ${firstEdge.element.text || firstEdge.element.type}`,
|
|
79
|
+
press: {
|
|
80
|
+
type: firstEdge.element.type,
|
|
81
|
+
hint: firstEdge.element.text || undefined,
|
|
82
|
+
},
|
|
83
|
+
screenshot: firstTarget.name,
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
// Step 3: assert target screen
|
|
87
|
+
steps.push({
|
|
88
|
+
name: `Verify ${firstTarget.name}`,
|
|
89
|
+
assert: { interactive_count: { min: Math.max(1, firstTarget.elementCount - 2) } },
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
// Follow one more level of edges from target (if any)
|
|
93
|
+
const subEdges = graph.edgesFrom(firstTarget.id);
|
|
94
|
+
for (const subEdge of subEdges.slice(0, 3)) { // cap at 3 sub-branches per flow
|
|
95
|
+
const subTarget = graph.nodes.get(subEdge.target);
|
|
96
|
+
if (!subTarget || subTarget.id === root.id) continue;
|
|
97
|
+
|
|
98
|
+
steps.push({
|
|
99
|
+
name: `Press ${subEdge.element.text || subEdge.element.type}`,
|
|
100
|
+
press: {
|
|
101
|
+
type: subEdge.element.type,
|
|
102
|
+
hint: subEdge.element.text || undefined,
|
|
103
|
+
},
|
|
104
|
+
screenshot: subTarget.name,
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
steps.push({
|
|
108
|
+
name: `Back from ${subTarget.name}`,
|
|
109
|
+
back: true,
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Final step: back to root
|
|
114
|
+
steps.push({
|
|
115
|
+
name: `Back to ${root.name}`,
|
|
116
|
+
back: true,
|
|
117
|
+
assert: { interactive_count: { min: Math.max(1, root.elementCount - 2) } },
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
return {
|
|
121
|
+
name: flowName,
|
|
122
|
+
description: `${root.name} → ${firstTarget.name} navigation flow`,
|
|
123
|
+
setup: 'normal',
|
|
124
|
+
steps,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/** Serialize a Flow to YAML string (no library needed for this simple structure) */
|
|
129
|
+
export function toYaml(flow: Flow): string {
|
|
130
|
+
const lines: string[] = [];
|
|
131
|
+
|
|
132
|
+
lines.push(`# E2E Flow: ${flow.name}`);
|
|
133
|
+
lines.push('');
|
|
134
|
+
lines.push(`name: ${flow.name}`);
|
|
135
|
+
lines.push(`description: ${flow.description}`);
|
|
136
|
+
lines.push(`setup: ${flow.setup}`);
|
|
137
|
+
lines.push('');
|
|
138
|
+
lines.push('steps:');
|
|
139
|
+
|
|
140
|
+
for (const step of flow.steps) {
|
|
141
|
+
lines.push(` - name: ${step.name}`);
|
|
142
|
+
|
|
143
|
+
if (step.press) {
|
|
144
|
+
const parts = [`type: ${step.press.type}`];
|
|
145
|
+
if (step.press.hint) parts.push(`hint: "${step.press.hint}"`);
|
|
146
|
+
lines.push(` press: { ${parts.join(', ')} }`);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
if (step.scroll) {
|
|
150
|
+
lines.push(` scroll: ${step.scroll}`);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
if (step.back) {
|
|
154
|
+
lines.push(` back: true`);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (step.assert) {
|
|
158
|
+
const parts: string[] = [];
|
|
159
|
+
if (step.assert.interactive_count) {
|
|
160
|
+
parts.push(`interactive_count: { min: ${step.assert.interactive_count.min} }`);
|
|
161
|
+
}
|
|
162
|
+
if (step.assert.text) {
|
|
163
|
+
parts.push(`text: "${step.assert.text}"`);
|
|
164
|
+
}
|
|
165
|
+
if (parts.length > 0) {
|
|
166
|
+
lines.push(` assert:`);
|
|
167
|
+
for (const part of parts) {
|
|
168
|
+
lines.push(` ${part}`);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
if (step.screenshot) {
|
|
174
|
+
lines.push(` screenshot: ${step.screenshot}`);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
return lines.join('\n') + '\n';
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
/** Write all flows to the output directory */
|
|
182
|
+
export function writeFlows(flows: Flow[], outputDir: string): string[] {
|
|
183
|
+
mkdirSync(outputDir, { recursive: true });
|
|
184
|
+
const written: string[] = [];
|
|
185
|
+
|
|
186
|
+
for (const flow of flows) {
|
|
187
|
+
const filename = `${flow.name}.yaml`;
|
|
188
|
+
const filepath = join(outputDir, filename);
|
|
189
|
+
writeFileSync(filepath, toYaml(flow));
|
|
190
|
+
written.push(filepath);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
return written;
|
|
194
|
+
}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { describe, it } from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { existsSync, mkdirSync, rmdirSync } from 'node:fs';
|
|
4
|
+
import { join } from 'node:path';
|
|
5
|
+
import { ensureDir } from '../src/capture.ts';
|
|
6
|
+
|
|
7
|
+
describe('ensureDir', () => {
|
|
8
|
+
const testBase = join(import.meta.dirname!, '..', '.test-tmp');
|
|
9
|
+
|
|
10
|
+
it('creates directory if it does not exist', () => {
|
|
11
|
+
const dir = join(testBase, 'ensureDir-new');
|
|
12
|
+
try { rmdirSync(dir, { recursive: true } as any); } catch {}
|
|
13
|
+
ensureDir(dir);
|
|
14
|
+
assert.ok(existsSync(dir));
|
|
15
|
+
rmdirSync(dir, { recursive: true } as any);
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it('does not throw if directory already exists', () => {
|
|
19
|
+
const dir = join(testBase, 'ensureDir-exists');
|
|
20
|
+
mkdirSync(dir, { recursive: true });
|
|
21
|
+
assert.doesNotThrow(() => ensureDir(dir));
|
|
22
|
+
rmdirSync(dir, { recursive: true } as any);
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
it('creates nested directories', () => {
|
|
26
|
+
const dir = join(testBase, 'ensureDir-nested', 'a', 'b');
|
|
27
|
+
try { rmdirSync(join(testBase, 'ensureDir-nested'), { recursive: true } as any); } catch {}
|
|
28
|
+
ensureDir(dir);
|
|
29
|
+
assert.ok(existsSync(dir));
|
|
30
|
+
rmdirSync(join(testBase, 'ensureDir-nested'), { recursive: true } as any);
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
// Cleanup test base
|
|
34
|
+
it('cleanup', () => {
|
|
35
|
+
try { rmdirSync(testBase, { recursive: true } as any); } catch {}
|
|
36
|
+
assert.ok(true);
|
|
37
|
+
});
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
describe('capture module exports', () => {
|
|
41
|
+
it('exports screenshot function', async () => {
|
|
42
|
+
const mod = await import('../src/capture.ts');
|
|
43
|
+
assert.equal(typeof mod.screenshot, 'function');
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
it('exports startRecording function', async () => {
|
|
47
|
+
const mod = await import('../src/capture.ts');
|
|
48
|
+
assert.equal(typeof mod.startRecording, 'function');
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
it('exports stopRecording function', async () => {
|
|
52
|
+
const mod = await import('../src/capture.ts');
|
|
53
|
+
assert.equal(typeof mod.stopRecording, 'function');
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
it('exports startLogcat function', async () => {
|
|
57
|
+
const mod = await import('../src/capture.ts');
|
|
58
|
+
assert.equal(typeof mod.startLogcat, 'function');
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
it('exports stopLogcat function', async () => {
|
|
62
|
+
const mod = await import('../src/capture.ts');
|
|
63
|
+
assert.equal(typeof mod.stopLogcat, 'function');
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
it('exports getDeviceName function', async () => {
|
|
67
|
+
const mod = await import('../src/capture.ts');
|
|
68
|
+
assert.equal(typeof mod.getDeviceName, 'function');
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
it('exports ensureDir function', async () => {
|
|
72
|
+
const mod = await import('../src/capture.ts');
|
|
73
|
+
assert.equal(typeof mod.ensureDir, 'function');
|
|
74
|
+
});
|
|
75
|
+
});
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import { describe, it } from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { COMMAND_SCHEMAS, getCommandSchema, getSchemaEnvelope, SCHEMA_VERSION } from '../src/command-schema.ts';
|
|
4
|
+
|
|
5
|
+
describe('COMMAND_SCHEMAS', () => {
|
|
6
|
+
it('contains walk, run, report, and schema commands', () => {
|
|
7
|
+
const names = COMMAND_SCHEMAS.map(s => s.name);
|
|
8
|
+
assert.ok(names.includes('walk'));
|
|
9
|
+
assert.ok(names.includes('run'));
|
|
10
|
+
assert.ok(names.includes('report'));
|
|
11
|
+
assert.ok(names.includes('schema'));
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
it('has exactly 4 commands', () => {
|
|
15
|
+
assert.equal(COMMAND_SCHEMAS.length, 4);
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it('every schema has required fields', () => {
|
|
19
|
+
for (const schema of COMMAND_SCHEMAS) {
|
|
20
|
+
assert.ok(schema.name, `missing name`);
|
|
21
|
+
assert.ok(schema.description, `${schema.name}: missing description`);
|
|
22
|
+
assert.ok(Array.isArray(schema.args), `${schema.name}: args not array`);
|
|
23
|
+
assert.ok(Array.isArray(schema.flags), `${schema.name}: flags not array`);
|
|
24
|
+
assert.ok(typeof schema.exitCodes === 'object', `${schema.name}: exitCodes not object`);
|
|
25
|
+
assert.ok(Array.isArray(schema.examples), `${schema.name}: examples not array`);
|
|
26
|
+
assert.ok(schema.examples.length > 0, `${schema.name}: no examples`);
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
it('walk schema has --json and --dry-run flags', () => {
|
|
31
|
+
const walk = getCommandSchema('walk');
|
|
32
|
+
assert.ok(walk);
|
|
33
|
+
const flagNames = walk.flags.map(f => f.name);
|
|
34
|
+
assert.ok(flagNames.includes('--json'));
|
|
35
|
+
assert.ok(flagNames.includes('--dry-run'));
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
it('run schema has required flow arg', () => {
|
|
39
|
+
const run = getCommandSchema('run');
|
|
40
|
+
assert.ok(run);
|
|
41
|
+
assert.ok(run.args.some(a => a.name === 'flow' && a.required));
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
it('run schema has --dry-run flag', () => {
|
|
45
|
+
const run = getCommandSchema('run');
|
|
46
|
+
assert.ok(run);
|
|
47
|
+
assert.ok(run.flags.some(f => f.name === '--dry-run'));
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
it('run schema has exit code 1 for failing steps', () => {
|
|
51
|
+
const run = getCommandSchema('run');
|
|
52
|
+
assert.ok(run);
|
|
53
|
+
assert.ok(run.exitCodes['1']);
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
it('report schema has required run-dir arg', () => {
|
|
57
|
+
const report = getCommandSchema('report');
|
|
58
|
+
assert.ok(report);
|
|
59
|
+
assert.ok(report.args.some(a => a.name === 'run-dir' && a.required));
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
it('schema command has optional command arg', () => {
|
|
63
|
+
const schema = getCommandSchema('schema');
|
|
64
|
+
assert.ok(schema);
|
|
65
|
+
assert.ok(schema.args.some(a => a.name === 'command' && !a.required));
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it('all exit codes include 0 and 2', () => {
|
|
69
|
+
for (const schema of COMMAND_SCHEMAS) {
|
|
70
|
+
assert.ok(schema.exitCodes['0'], `${schema.name}: missing exit code 0`);
|
|
71
|
+
assert.ok(schema.exitCodes['2'], `${schema.name}: missing exit code 2`);
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
it('every flag has a type field', () => {
|
|
76
|
+
const validTypes = ['string', 'boolean', 'integer', 'path'];
|
|
77
|
+
for (const schema of COMMAND_SCHEMAS) {
|
|
78
|
+
for (const flag of schema.flags) {
|
|
79
|
+
assert.ok(validTypes.includes(flag.type), `${schema.name} flag ${flag.name}: invalid type "${flag.type}"`);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
it('every arg has a type field', () => {
|
|
85
|
+
const validTypes = ['string', 'path', 'integer'];
|
|
86
|
+
for (const schema of COMMAND_SCHEMAS) {
|
|
87
|
+
for (const arg of schema.args) {
|
|
88
|
+
assert.ok(validTypes.includes(arg.type), `${schema.name} arg ${arg.name}: invalid type "${arg.type}"`);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
it('boolean flags have no default (they default to false)', () => {
|
|
94
|
+
for (const schema of COMMAND_SCHEMAS) {
|
|
95
|
+
for (const flag of schema.flags) {
|
|
96
|
+
if (flag.type === 'boolean') {
|
|
97
|
+
assert.equal(flag.default, undefined, `${schema.name} flag ${flag.name}: boolean should not have default`);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
});
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
describe('getSchemaEnvelope', () => {
|
|
105
|
+
it('returns version and commands', () => {
|
|
106
|
+
const envelope = getSchemaEnvelope();
|
|
107
|
+
assert.ok(envelope.version);
|
|
108
|
+
assert.ok(Array.isArray(envelope.commands));
|
|
109
|
+
assert.equal(envelope.commands.length, COMMAND_SCHEMAS.length);
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
it('version matches SCHEMA_VERSION', () => {
|
|
113
|
+
const envelope = getSchemaEnvelope();
|
|
114
|
+
assert.equal(envelope.version, SCHEMA_VERSION);
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
it('version is semver format', () => {
|
|
118
|
+
assert.match(SCHEMA_VERSION, /^\d+\.\d+\.\d+$/);
|
|
119
|
+
});
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
describe('getCommandSchema', () => {
|
|
123
|
+
it('returns schema for known command', () => {
|
|
124
|
+
const schema = getCommandSchema('run');
|
|
125
|
+
assert.ok(schema);
|
|
126
|
+
assert.equal(schema.name, 'run');
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
it('returns undefined for unknown command', () => {
|
|
130
|
+
const schema = getCommandSchema('nonexistent');
|
|
131
|
+
assert.equal(schema, undefined);
|
|
132
|
+
});
|
|
133
|
+
});
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import { describe, it } from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { FlowWalkerError, ErrorCodes, formatError } from '../src/errors.ts';
|
|
4
|
+
|
|
5
|
+
describe('FlowWalkerError', () => {
|
|
6
|
+
it('has code, message, and diagnosticId', () => {
|
|
7
|
+
const err = new FlowWalkerError(ErrorCodes.INVALID_INPUT, 'bad input');
|
|
8
|
+
assert.equal(err.code, 'INVALID_INPUT');
|
|
9
|
+
assert.equal(err.message, 'bad input');
|
|
10
|
+
assert.equal(typeof err.diagnosticId, 'string');
|
|
11
|
+
assert.equal(err.diagnosticId.length, 8);
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
it('includes hint when provided', () => {
|
|
15
|
+
const err = new FlowWalkerError(ErrorCodes.FILE_NOT_FOUND, 'not found', 'check path');
|
|
16
|
+
assert.equal(err.hint, 'check path');
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
it('generates unique diagnosticId per instance', () => {
|
|
20
|
+
const err1 = new FlowWalkerError(ErrorCodes.COMMAND_FAILED, 'a');
|
|
21
|
+
const err2 = new FlowWalkerError(ErrorCodes.COMMAND_FAILED, 'b');
|
|
22
|
+
assert.notEqual(err1.diagnosticId, err2.diagnosticId);
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
it('toJSON returns structured error envelope', () => {
|
|
26
|
+
const err = new FlowWalkerError(ErrorCodes.DEVICE_ERROR, 'device gone', 'check adb');
|
|
27
|
+
const json = err.toJSON();
|
|
28
|
+
assert.equal(json.error.code, 'DEVICE_ERROR');
|
|
29
|
+
assert.equal(json.error.message, 'device gone');
|
|
30
|
+
assert.equal(json.error.hint, 'check adb');
|
|
31
|
+
assert.equal(json.error.diagnosticId, err.diagnosticId);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it('toJSON omits hint when not provided', () => {
|
|
35
|
+
const err = new FlowWalkerError(ErrorCodes.COMMAND_FAILED, 'fail');
|
|
36
|
+
const json = err.toJSON();
|
|
37
|
+
assert.equal(json.error.hint, undefined);
|
|
38
|
+
assert.ok(!('hint' in json.error));
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
it('is instanceof Error', () => {
|
|
42
|
+
const err = new FlowWalkerError(ErrorCodes.INVALID_ARGS, 'x');
|
|
43
|
+
assert.ok(err instanceof Error);
|
|
44
|
+
});
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
describe('ErrorCodes', () => {
|
|
48
|
+
it('has all required codes', () => {
|
|
49
|
+
assert.ok(ErrorCodes.INVALID_ARGS);
|
|
50
|
+
assert.ok(ErrorCodes.INVALID_INPUT);
|
|
51
|
+
assert.ok(ErrorCodes.FILE_NOT_FOUND);
|
|
52
|
+
assert.ok(ErrorCodes.FLOW_PARSE_ERROR);
|
|
53
|
+
assert.ok(ErrorCodes.STEP_FAILED);
|
|
54
|
+
assert.ok(ErrorCodes.DEVICE_ERROR);
|
|
55
|
+
assert.ok(ErrorCodes.COMMAND_FAILED);
|
|
56
|
+
});
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
describe('formatError', () => {
|
|
60
|
+
it('formats FlowWalkerError as JSON', () => {
|
|
61
|
+
const err = new FlowWalkerError(ErrorCodes.INVALID_INPUT, 'bad', 'fix it');
|
|
62
|
+
const out = formatError(err, true);
|
|
63
|
+
const parsed = JSON.parse(out);
|
|
64
|
+
assert.equal(parsed.error.code, 'INVALID_INPUT');
|
|
65
|
+
assert.equal(parsed.error.message, 'bad');
|
|
66
|
+
assert.equal(parsed.error.hint, 'fix it');
|
|
67
|
+
assert.ok(parsed.error.diagnosticId);
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
it('formats FlowWalkerError as human text', () => {
|
|
71
|
+
const err = new FlowWalkerError(ErrorCodes.FILE_NOT_FOUND, 'missing', 'check path');
|
|
72
|
+
const out = formatError(err, false);
|
|
73
|
+
assert.ok(out.includes('FILE_NOT_FOUND'));
|
|
74
|
+
assert.ok(out.includes('missing'));
|
|
75
|
+
assert.ok(out.includes('Hint: check path'));
|
|
76
|
+
assert.ok(out.includes(err.diagnosticId));
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
it('wraps unknown errors with COMMAND_FAILED code', () => {
|
|
80
|
+
const out = formatError('something broke', true);
|
|
81
|
+
const parsed = JSON.parse(out);
|
|
82
|
+
assert.equal(parsed.error.code, 'COMMAND_FAILED');
|
|
83
|
+
assert.ok(parsed.error.message.includes('something broke'));
|
|
84
|
+
assert.ok(parsed.error.diagnosticId);
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
it('wraps Error objects with COMMAND_FAILED code', () => {
|
|
88
|
+
const out = formatError(new Error('native error'), true);
|
|
89
|
+
const parsed = JSON.parse(out);
|
|
90
|
+
assert.equal(parsed.error.code, 'COMMAND_FAILED');
|
|
91
|
+
assert.ok(parsed.error.message.includes('native error'));
|
|
92
|
+
});
|
|
93
|
+
});
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import { describe, it } from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { computeFingerprint, deriveScreenName } from '../src/fingerprint.ts';
|
|
4
|
+
import type { SnapshotElement } from '../src/types.ts';
|
|
5
|
+
|
|
6
|
+
function el(type: string, text: string = '', flutterType?: string): SnapshotElement {
|
|
7
|
+
return { ref: '@e1', type, text, flutterType, enabled: true };
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
describe('computeFingerprint', () => {
|
|
11
|
+
it('returns deterministic hash for identical element sets', () => {
|
|
12
|
+
const elements = [el('button', 'Save'), el('button', 'Cancel'), el('textfield', 'Name')];
|
|
13
|
+
const hash1 = computeFingerprint(elements);
|
|
14
|
+
const hash2 = computeFingerprint(elements);
|
|
15
|
+
assert.equal(hash1, hash2, 'same elements should produce identical hash');
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it('produces same hash regardless of element order', () => {
|
|
19
|
+
const a = [el('button', 'A'), el('textfield', 'B')];
|
|
20
|
+
const b = [el('textfield', 'B'), el('button', 'A')];
|
|
21
|
+
assert.equal(computeFingerprint(a), computeFingerprint(b));
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
it('ignores text content — same types with different text produce same hash', () => {
|
|
25
|
+
const a = [el('button', 'Save'), el('button', 'Next')];
|
|
26
|
+
const b = [el('button', 'Delete'), el('button', 'Back')];
|
|
27
|
+
assert.equal(computeFingerprint(a), computeFingerprint(b));
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
it('different element types produce different hashes', () => {
|
|
31
|
+
const a = [el('button', 'Go'), el('button', 'Stop')];
|
|
32
|
+
const b = [el('textfield', 'Name'), el('textfield', 'Email')];
|
|
33
|
+
assert.notEqual(computeFingerprint(a), computeFingerprint(b));
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
it('uses flutterType when available for fingerprinting', () => {
|
|
37
|
+
const withFlutter = [el('button', 'Go', 'ElevatedButton')];
|
|
38
|
+
const withoutFlutter = [el('button', 'Go')];
|
|
39
|
+
assert.notEqual(computeFingerprint(withFlutter), computeFingerprint(withoutFlutter));
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
it('bucketing: minor count differences produce same hash', () => {
|
|
43
|
+
// 2 buttons vs 3 buttons both bucket to 2
|
|
44
|
+
const two = [el('button', 'A'), el('button', 'B')];
|
|
45
|
+
const three = [el('button', 'A'), el('button', 'B'), el('button', 'C')];
|
|
46
|
+
assert.equal(computeFingerprint(two), computeFingerprint(three));
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
it('bucketing: large count differences produce different hash', () => {
|
|
50
|
+
// 1 button vs 8 buttons
|
|
51
|
+
const one = [el('button', 'A')];
|
|
52
|
+
const eight = Array.from({ length: 8 }, (_, i) => el('button', String(i)));
|
|
53
|
+
assert.notEqual(computeFingerprint(one), computeFingerprint(eight));
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
it('returns a 12-char hex string', () => {
|
|
57
|
+
const hash = computeFingerprint([el('button', 'X')]);
|
|
58
|
+
assert.match(hash, /^[0-9a-f]{12}$/);
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
it('handles empty element list', () => {
|
|
62
|
+
const hash = computeFingerprint([]);
|
|
63
|
+
assert.match(hash, /^[0-9a-f]{12}$/);
|
|
64
|
+
});
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
describe('deriveScreenName', () => {
|
|
68
|
+
it('uses first short text as screen name', () => {
|
|
69
|
+
const elements = [el('button', 'Settings'), el('button', 'Profile')];
|
|
70
|
+
const name = deriveScreenName(elements);
|
|
71
|
+
assert.equal(name, 'settings');
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
it('converts to kebab-case', () => {
|
|
75
|
+
const elements = [el('button', 'My Profile Page')];
|
|
76
|
+
const name = deriveScreenName(elements);
|
|
77
|
+
assert.equal(name, 'my-profile-page');
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
it('falls back to type-based name when no text', () => {
|
|
81
|
+
const elements = [el('button', ''), el('button', '')];
|
|
82
|
+
const name = deriveScreenName(elements);
|
|
83
|
+
assert.match(name, /^screen-button-2$/);
|
|
84
|
+
});
|
|
85
|
+
});
|