qa360 2.0.11 → 2.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/ai.js +26 -14
- package/dist/commands/ask.d.ts +75 -23
- package/dist/commands/ask.js +413 -265
- package/dist/commands/crawl.d.ts +24 -0
- package/dist/commands/crawl.js +121 -0
- package/dist/commands/history.js +38 -3
- package/dist/commands/init.d.ts +89 -95
- package/dist/commands/init.js +282 -200
- package/dist/commands/run.d.ts +1 -0
- package/dist/core/adapters/playwright-ui.d.ts +45 -7
- package/dist/core/adapters/playwright-ui.js +365 -59
- package/dist/core/assertions/engine.d.ts +51 -0
- package/dist/core/assertions/engine.js +530 -0
- package/dist/core/assertions/index.d.ts +11 -0
- package/dist/core/assertions/index.js +11 -0
- package/dist/core/assertions/types.d.ts +121 -0
- package/dist/core/assertions/types.js +37 -0
- package/dist/core/crawler/index.d.ts +57 -0
- package/dist/core/crawler/index.js +281 -0
- package/dist/core/crawler/journey-generator.d.ts +49 -0
- package/dist/core/crawler/journey-generator.js +412 -0
- package/dist/core/crawler/page-analyzer.d.ts +88 -0
- package/dist/core/crawler/page-analyzer.js +709 -0
- package/dist/core/crawler/selector-generator.d.ts +34 -0
- package/dist/core/crawler/selector-generator.js +240 -0
- package/dist/core/crawler/types.d.ts +353 -0
- package/dist/core/crawler/types.js +6 -0
- package/dist/core/generation/crawler-pack-generator.d.ts +44 -0
- package/dist/core/generation/crawler-pack-generator.js +231 -0
- package/dist/core/generation/index.d.ts +2 -0
- package/dist/core/generation/index.js +2 -0
- package/dist/core/index.d.ts +3 -0
- package/dist/core/index.js +4 -0
- package/dist/core/types/pack-v1.d.ts +90 -0
- package/dist/index.js +6 -2
- package/examples/accessibility.yml +39 -16
- package/examples/api-basic.yml +19 -14
- package/examples/complete.yml +134 -42
- package/examples/fullstack.yml +66 -31
- package/examples/security.yml +47 -15
- package/examples/ui-basic.yml +16 -12
- package/package.json +3 -2
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* QA360 Assertions Engine - Type Definitions
|
|
3
|
+
*
|
|
4
|
+
* Comprehensive assertions for UI testing
|
|
5
|
+
*/
|
|
6
|
+
/**
|
|
7
|
+
* Assertion error
|
|
8
|
+
*/
|
|
9
|
+
export class AssertionError extends Error {
|
|
10
|
+
assertion;
|
|
11
|
+
actual;
|
|
12
|
+
expected;
|
|
13
|
+
constructor(message, assertion, actual, expected) {
|
|
14
|
+
super(message);
|
|
15
|
+
this.name = 'AssertionError';
|
|
16
|
+
this.assertion = assertion;
|
|
17
|
+
this.actual = actual;
|
|
18
|
+
this.expected = expected;
|
|
19
|
+
}
|
|
20
|
+
toString() {
|
|
21
|
+
const expected = this.expected !== undefined ? ` expected: ${JSON.stringify(this.expected)}` : '';
|
|
22
|
+
const actual = this.actual !== undefined ? ` actual: ${JSON.stringify(this.actual)}` : '';
|
|
23
|
+
return `${this.message}${expected}${actual}`;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Soft assertion collection
|
|
28
|
+
*/
|
|
29
|
+
export class SoftAssertionError extends Error {
|
|
30
|
+
results;
|
|
31
|
+
constructor(results) {
|
|
32
|
+
const failures = results.filter(r => !r.passed);
|
|
33
|
+
super(`${failures.length} soft assertion(s) failed:\n${failures.map(f => ` - ${f.error || f.assertion.message || f.assertion.type}`).join('\n')}`);
|
|
34
|
+
this.name = 'SoftAssertionError';
|
|
35
|
+
this.results = results;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* QA360 Web Crawler
|
|
3
|
+
*
|
|
4
|
+
* Automatically discovers and analyzes web applications for E2E test generation
|
|
5
|
+
*/
|
|
6
|
+
import type { CrawlOptions, CrawlResult, PageDefinition, CrawlProgress, CrawlEventHandler } from './types.js';
|
|
7
|
+
/**
|
|
8
|
+
* Web Crawler class
|
|
9
|
+
*/
|
|
10
|
+
export declare class WebCrawler {
|
|
11
|
+
private options;
|
|
12
|
+
private eventHandler?;
|
|
13
|
+
private analyzer;
|
|
14
|
+
private visited;
|
|
15
|
+
private queue;
|
|
16
|
+
private pages;
|
|
17
|
+
private errors;
|
|
18
|
+
private warnings;
|
|
19
|
+
private startTime;
|
|
20
|
+
private readonly defaultOptions;
|
|
21
|
+
constructor(options: CrawlOptions);
|
|
22
|
+
/**
|
|
23
|
+
* Set event handler for progress updates
|
|
24
|
+
*/
|
|
25
|
+
on(event: CrawlEventHandler): void;
|
|
26
|
+
/**
|
|
27
|
+
* Emit event to handler
|
|
28
|
+
*/
|
|
29
|
+
private emit;
|
|
30
|
+
/**
|
|
31
|
+
* Normalize URL for comparison
|
|
32
|
+
*/
|
|
33
|
+
private normalizeUrl;
|
|
34
|
+
/**
|
|
35
|
+
* Check if URL should be excluded
|
|
36
|
+
*/
|
|
37
|
+
private isExcluded;
|
|
38
|
+
/**
|
|
39
|
+
* Start crawling
|
|
40
|
+
*/
|
|
41
|
+
crawl(): Promise<CrawlResult>;
|
|
42
|
+
/**
|
|
43
|
+
* Build site map from crawled pages
|
|
44
|
+
*/
|
|
45
|
+
private buildSiteMap;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Crawl a web application and return results
|
|
49
|
+
*/
|
|
50
|
+
export declare function crawlWebsite(options: CrawlOptions): Promise<CrawlResult>;
|
|
51
|
+
/**
|
|
52
|
+
* Crawl a website with progress callback
|
|
53
|
+
*/
|
|
54
|
+
export declare function crawlWebsiteWithProgress(options: CrawlOptions, onProgress: (progress: CrawlProgress, page?: PageDefinition) => void): Promise<CrawlResult>;
|
|
55
|
+
export * from './types.js';
|
|
56
|
+
export { generateSelector, generateSelectorFromElement } from './selector-generator.js';
|
|
57
|
+
export { generateJourneys } from './journey-generator.js';
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* QA360 Web Crawler
|
|
3
|
+
*
|
|
4
|
+
* Automatically discovers and analyzes web applications for E2E test generation
|
|
5
|
+
*/
|
|
6
|
+
import { PageAnalyzer } from './page-analyzer.js';
|
|
7
|
+
import { JourneyGenerator } from './journey-generator.js';
|
|
8
|
+
/**
|
|
9
|
+
* Web Crawler class
|
|
10
|
+
*/
|
|
11
|
+
export class WebCrawler {
|
|
12
|
+
options;
|
|
13
|
+
eventHandler;
|
|
14
|
+
analyzer;
|
|
15
|
+
// Crawling state
|
|
16
|
+
visited = new Set();
|
|
17
|
+
queue = [];
|
|
18
|
+
pages = [];
|
|
19
|
+
errors = [];
|
|
20
|
+
warnings = [];
|
|
21
|
+
startTime = 0;
|
|
22
|
+
// Default options
|
|
23
|
+
defaultOptions = {
|
|
24
|
+
baseUrl: '',
|
|
25
|
+
maxDepth: 3,
|
|
26
|
+
maxPages: 50,
|
|
27
|
+
followLinks: true,
|
|
28
|
+
discoverForms: true,
|
|
29
|
+
discoverButtons: true,
|
|
30
|
+
excludePatterns: [],
|
|
31
|
+
timeout: 30000,
|
|
32
|
+
screenshots: false,
|
|
33
|
+
headless: true,
|
|
34
|
+
waitForNetworkIdle: true,
|
|
35
|
+
};
|
|
36
|
+
constructor(options) {
|
|
37
|
+
this.options = { ...this.defaultOptions, ...options };
|
|
38
|
+
this.analyzer = new PageAnalyzer(this.options);
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Set event handler for progress updates
|
|
42
|
+
*/
|
|
43
|
+
on(event) {
|
|
44
|
+
this.eventHandler = event;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Emit event to handler
|
|
48
|
+
*/
|
|
49
|
+
emit(event, data) {
|
|
50
|
+
this.eventHandler?.(event, data);
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Normalize URL for comparison
|
|
54
|
+
*/
|
|
55
|
+
normalizeUrl(url) {
|
|
56
|
+
try {
|
|
57
|
+
const parsed = new URL(url);
|
|
58
|
+
// Remove hash and trailing slash
|
|
59
|
+
return `${parsed.origin}${parsed.pathname.replace(/\/$/, '')}`;
|
|
60
|
+
}
|
|
61
|
+
catch {
|
|
62
|
+
return url;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Check if URL should be excluded
|
|
67
|
+
*/
|
|
68
|
+
isExcluded(url) {
|
|
69
|
+
const normalized = this.normalizeUrl(url);
|
|
70
|
+
// Check exclude patterns
|
|
71
|
+
for (const pattern of this.options.excludePatterns || []) {
|
|
72
|
+
try {
|
|
73
|
+
const regex = new RegExp(pattern);
|
|
74
|
+
if (regex.test(normalized) || regex.test(url)) {
|
|
75
|
+
return true;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
catch {
|
|
79
|
+
// Invalid regex, skip
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
// Always exclude certain patterns
|
|
83
|
+
const defaultExcludes = [
|
|
84
|
+
/\.pdf$/,
|
|
85
|
+
/\.zip$/,
|
|
86
|
+
/\.jpg$/,
|
|
87
|
+
/\.png$/,
|
|
88
|
+
/\.gif$/,
|
|
89
|
+
/\.svg$/,
|
|
90
|
+
/\/api\//,
|
|
91
|
+
/\/graphql/,
|
|
92
|
+
/mailto:/,
|
|
93
|
+
/tel:/,
|
|
94
|
+
'javascript:',
|
|
95
|
+
'#',
|
|
96
|
+
];
|
|
97
|
+
for (const exclude of defaultExcludes) {
|
|
98
|
+
if (typeof exclude === 'string' ? url.includes(exclude) : exclude.test(url)) {
|
|
99
|
+
return true;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
// Check if external (different origin)
|
|
103
|
+
try {
|
|
104
|
+
const baseOrigin = new URL(this.options.baseUrl).origin;
|
|
105
|
+
const urlOrigin = new URL(url).origin;
|
|
106
|
+
if (urlOrigin !== baseOrigin) {
|
|
107
|
+
return true;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
catch {
|
|
111
|
+
return true;
|
|
112
|
+
}
|
|
113
|
+
return false;
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Start crawling
|
|
117
|
+
*/
|
|
118
|
+
async crawl() {
|
|
119
|
+
this.startTime = Date.now();
|
|
120
|
+
this.visited.clear();
|
|
121
|
+
this.queue = [];
|
|
122
|
+
this.pages = [];
|
|
123
|
+
this.errors = [];
|
|
124
|
+
this.warnings = [];
|
|
125
|
+
// Start with base URL
|
|
126
|
+
const baseUrl = this.normalizeUrl(this.options.baseUrl);
|
|
127
|
+
this.queue.push({ url: this.options.baseUrl, depth: 0 });
|
|
128
|
+
this.visited.add(baseUrl);
|
|
129
|
+
// Crawl loop
|
|
130
|
+
const maxPages = this.options.maxPages ?? 50;
|
|
131
|
+
const maxDepth = this.options.maxDepth ?? 3;
|
|
132
|
+
while (this.queue.length > 0 && this.pages.length < maxPages) {
|
|
133
|
+
const { url, depth } = this.queue.shift();
|
|
134
|
+
// Check max depth
|
|
135
|
+
if (depth > maxDepth) {
|
|
136
|
+
this.warnings.push(`Skipped ${url} - max depth reached`);
|
|
137
|
+
continue;
|
|
138
|
+
}
|
|
139
|
+
// Emit progress
|
|
140
|
+
this.emit('page-start', {
|
|
141
|
+
currentPage: url,
|
|
142
|
+
completed: this.pages.length,
|
|
143
|
+
total: Math.min(maxPages, this.pages.length + this.queue.length),
|
|
144
|
+
progress: (this.pages.length / maxPages) * 100,
|
|
145
|
+
depth,
|
|
146
|
+
});
|
|
147
|
+
// Analyze page
|
|
148
|
+
try {
|
|
149
|
+
const page = await this.analyzer.analyze(url, depth);
|
|
150
|
+
this.pages.push(page);
|
|
151
|
+
this.emit('page-complete', page);
|
|
152
|
+
// Add links to queue
|
|
153
|
+
if ((this.options.followLinks ?? true) && depth < maxDepth) {
|
|
154
|
+
for (const link of page.elements.links) {
|
|
155
|
+
if (link.internal && !this.visited.has(this.normalizeUrl(link.url))) {
|
|
156
|
+
const normalized = this.normalizeUrl(link.url);
|
|
157
|
+
if (!this.isExcluded(link.url)) {
|
|
158
|
+
this.visited.add(normalized);
|
|
159
|
+
this.queue.push({ url: link.url, depth: depth + 1 });
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
catch (error) {
|
|
166
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
167
|
+
this.errors.push({ page: url, error: errorMsg });
|
|
168
|
+
this.emit('page-error', { page: url, error: errorMsg });
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
// Clean up analyzer
|
|
172
|
+
await this.analyzer.cleanup();
|
|
173
|
+
// Build site map
|
|
174
|
+
const siteMap = this.buildSiteMap();
|
|
175
|
+
// Generate user journeys
|
|
176
|
+
const journeyGenerator = new JourneyGenerator(siteMap);
|
|
177
|
+
const userJourneys = journeyGenerator.generateJourneys();
|
|
178
|
+
// Emit journeys as discovered
|
|
179
|
+
for (const journey of userJourneys) {
|
|
180
|
+
this.emit('journey-discovered', journey);
|
|
181
|
+
}
|
|
182
|
+
// Collect accessibility issues
|
|
183
|
+
const accessibilityIssues = this.pages
|
|
184
|
+
.filter(p => p.accessibility && p.accessibility.score < 90)
|
|
185
|
+
.map(p => ({
|
|
186
|
+
page: p.url,
|
|
187
|
+
score: p.accessibility.score,
|
|
188
|
+
violations: p.accessibility.violations.filter(v => v.impact !== 'minor'),
|
|
189
|
+
}));
|
|
190
|
+
// Collect all forms
|
|
191
|
+
const forms = this.pages.flatMap(p => p.elements.forms);
|
|
192
|
+
const duration = Date.now() - this.startTime;
|
|
193
|
+
this.emit('complete', { duration, pagesFound: this.pages.length });
|
|
194
|
+
return {
|
|
195
|
+
success: this.errors.length === 0 || this.pages.length > 0,
|
|
196
|
+
siteMap,
|
|
197
|
+
userJourneys,
|
|
198
|
+
forms,
|
|
199
|
+
accessibilityIssues,
|
|
200
|
+
warnings: this.warnings,
|
|
201
|
+
errors: this.errors,
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
/**
|
|
205
|
+
* Build site map from crawled pages
|
|
206
|
+
*/
|
|
207
|
+
buildSiteMap() {
|
|
208
|
+
const baseUrl = this.options.baseUrl;
|
|
209
|
+
// Group pages by depth
|
|
210
|
+
const pagesByDepth = {};
|
|
211
|
+
for (const page of this.pages) {
|
|
212
|
+
if (!pagesByDepth[page.depth]) {
|
|
213
|
+
pagesByDepth[page.depth] = [];
|
|
214
|
+
}
|
|
215
|
+
pagesByDepth[page.depth].push(page);
|
|
216
|
+
}
|
|
217
|
+
// Find orphan pages (no internal links to them)
|
|
218
|
+
const linkedUrls = new Set();
|
|
219
|
+
for (const page of this.pages) {
|
|
220
|
+
for (const link of page.elements.links) {
|
|
221
|
+
if (link.internal) {
|
|
222
|
+
linkedUrls.add(this.normalizeUrl(link.url));
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
const orphans = this.pages.filter(p => !linkedUrls.has(this.normalizeUrl(p.url)) && p.depth > 0);
|
|
227
|
+
// Page type distribution
|
|
228
|
+
const pageTypeDistribution = {};
|
|
229
|
+
for (const page of this.pages) {
|
|
230
|
+
pageTypeDistribution[page.pageType] = (pageTypeDistribution[page.pageType] || 0) + 1;
|
|
231
|
+
}
|
|
232
|
+
// Total counts
|
|
233
|
+
const totalLinks = this.pages.reduce((sum, p) => sum + p.elements.links.length, 0);
|
|
234
|
+
const totalForms = this.pages.reduce((sum, p) => sum + p.elements.forms.length, 0);
|
|
235
|
+
// Calculate stats
|
|
236
|
+
const avgLoadTime = this.pages.length > 0
|
|
237
|
+
? this.pages.reduce((sum, p) => sum + p.loadTime, 0) / this.pages.length
|
|
238
|
+
: 0;
|
|
239
|
+
const maxDepth = Math.max(...this.pages.map(p => p.depth));
|
|
240
|
+
return {
|
|
241
|
+
baseUrl,
|
|
242
|
+
pages: this.pages,
|
|
243
|
+
pagesByDepth,
|
|
244
|
+
orphans,
|
|
245
|
+
pageTypeDistribution,
|
|
246
|
+
totalLinks,
|
|
247
|
+
totalForms,
|
|
248
|
+
metadata: {
|
|
249
|
+
pagesCrawled: this.pages.length,
|
|
250
|
+
pagesSkipped: this.warnings.length,
|
|
251
|
+
pagesFailed: this.errors.length,
|
|
252
|
+
duration: Date.now() - this.startTime,
|
|
253
|
+
avgLoadTime: Math.round(avgLoadTime),
|
|
254
|
+
maxDepth,
|
|
255
|
+
},
|
|
256
|
+
};
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
/**
|
|
260
|
+
* Crawl a web application and return results
|
|
261
|
+
*/
|
|
262
|
+
export async function crawlWebsite(options) {
|
|
263
|
+
const crawler = new WebCrawler(options);
|
|
264
|
+
return crawler.crawl();
|
|
265
|
+
}
|
|
266
|
+
/**
|
|
267
|
+
* Crawl a website with progress callback
|
|
268
|
+
*/
|
|
269
|
+
export async function crawlWebsiteWithProgress(options, onProgress) {
|
|
270
|
+
const crawler = new WebCrawler(options);
|
|
271
|
+
crawler.on((event, data) => {
|
|
272
|
+
if (event === 'page-start' || event === 'page-complete') {
|
|
273
|
+
onProgress(data, event === 'page-complete' ? data : undefined);
|
|
274
|
+
}
|
|
275
|
+
});
|
|
276
|
+
return crawler.crawl();
|
|
277
|
+
}
|
|
278
|
+
// Re-export types
|
|
279
|
+
export * from './types.js';
|
|
280
|
+
export { generateSelector, generateSelectorFromElement } from './selector-generator.js';
|
|
281
|
+
export { generateJourneys } from './journey-generator.js';
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* QA360 Journey Generator
|
|
3
|
+
*
|
|
4
|
+
* Automatically generates user journeys from crawled pages
|
|
5
|
+
*/
|
|
6
|
+
import type { SiteMap, UserJourney } from './types.js';
|
|
7
|
+
/**
|
|
8
|
+
* Journey Generator class
|
|
9
|
+
*/
|
|
10
|
+
export declare class JourneyGenerator {
|
|
11
|
+
private siteMap;
|
|
12
|
+
constructor(siteMap: SiteMap);
|
|
13
|
+
/**
|
|
14
|
+
* Generate all user journeys from the site map
|
|
15
|
+
*/
|
|
16
|
+
generateJourneys(): UserJourney[];
|
|
17
|
+
/**
|
|
18
|
+
* Generate authentication journeys (login, signup)
|
|
19
|
+
*/
|
|
20
|
+
private generateAuthJourneys;
|
|
21
|
+
/**
|
|
22
|
+
* Generate login flow steps
|
|
23
|
+
*/
|
|
24
|
+
private generateLoginSteps;
|
|
25
|
+
/**
|
|
26
|
+
* Generate signup flow steps
|
|
27
|
+
*/
|
|
28
|
+
private generateSignupSteps;
|
|
29
|
+
/**
|
|
30
|
+
* Generate navigation journeys
|
|
31
|
+
*/
|
|
32
|
+
private generateNavigationJourneys;
|
|
33
|
+
/**
|
|
34
|
+
* Generate form submission journeys
|
|
35
|
+
*/
|
|
36
|
+
private generateFormJourneys;
|
|
37
|
+
/**
|
|
38
|
+
* Generate search journeys
|
|
39
|
+
*/
|
|
40
|
+
private generateSearchJourneys;
|
|
41
|
+
/**
|
|
42
|
+
* Generate transaction journeys (checkout, booking, etc.)
|
|
43
|
+
*/
|
|
44
|
+
private generateTransactionJourneys;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Generate journeys from a site map
|
|
48
|
+
*/
|
|
49
|
+
export declare function generateJourneys(siteMap: SiteMap): UserJourney[];
|