datagrok-tools 6.1.14 → 6.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/bin/commands/report.js +61 -0
- package/bin/commands/test.js +43 -2
- package/bin/utils/playwright-runner.js +259 -0
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,13 @@
|
|
|
1
1
|
# Datagrok-tools changelog
|
|
2
2
|
|
|
3
|
+
## 6.2.1 (2026-05-05)
|
|
4
|
+
|
|
5
|
+
* Reports: `grok report attach <ticket> <file>` — upload a file as a JIRA issue attachment via REST v2 multipart POST.
|
|
6
|
+
|
|
7
|
+
## 6.2.0 (2026-05-04)
|
|
8
|
+
|
|
9
|
+
* `grok test` — Playwright support: when a package's `package.json` declares `"playwrightTests": "<path>"`, `grok test` runs `npx playwright test` against that directory in addition to the existing Puppeteer pass and merges results into a single `test-report.csv`. Auth is unified with the Puppeteer pass (dev key from `~/.grok/config.yaml` → session token → cookie + `localStorage` injection — no login form). Optional `DATAGROK_DEV_KEY_2` env var enables a second-user identity for specs that need it (`DATAGROK_AUTH_TOKEN_2` exposed to specs). New `--skip-playwright` flag opts out of the Playwright pass for a single run.
|
|
10
|
+
|
|
3
11
|
## 6.1.14 (2026-05-01)
|
|
4
12
|
|
|
5
13
|
* Reports: `grok report comment` now converts Markdown body to JIRA wiki markup before POSTing, fixing rendered headings/list/HTML-entity mismatches in JIRA UI.
|
package/bin/commands/report.js
CHANGED
|
@@ -29,6 +29,8 @@ async function report(args) {
|
|
|
29
29
|
return await handleComment(args);
|
|
30
30
|
case 'label':
|
|
31
31
|
return await handleLabel(args);
|
|
32
|
+
case 'attach':
|
|
33
|
+
return await handleAttach(args);
|
|
32
34
|
default:
|
|
33
35
|
return false;
|
|
34
36
|
}
|
|
@@ -590,6 +592,65 @@ async function handleComment(args) {
|
|
|
590
592
|
return false;
|
|
591
593
|
}
|
|
592
594
|
}
|
|
595
|
+
async function handleAttach(args) {
|
|
596
|
+
const ticket = args._[2];
|
|
597
|
+
const filePath = args._[3];
|
|
598
|
+
if (!ticket || !filePath) {
|
|
599
|
+
color.error('Usage: grok report attach <ticket-key> <file> [--jira-url <url>]');
|
|
600
|
+
return false;
|
|
601
|
+
}
|
|
602
|
+
if (!_fs.default.existsSync(filePath)) {
|
|
603
|
+
color.error(`File not found: ${filePath}`);
|
|
604
|
+
return false;
|
|
605
|
+
}
|
|
606
|
+
const user = process.env.JIRA_USER;
|
|
607
|
+
const token = process.env.JIRA_TOKEN;
|
|
608
|
+
if (!user || !token) {
|
|
609
|
+
color.error('JIRA_USER and JIRA_TOKEN env vars are required for `grok report attach`.');
|
|
610
|
+
return false;
|
|
611
|
+
}
|
|
612
|
+
const base = resolveJiraBase(args);
|
|
613
|
+
const url = `${base}/rest/api/2/issue/${encodeURIComponent(ticket)}/attachments`;
|
|
614
|
+
|
|
615
|
+
// node-fetch v2 has no built-in FormData and the codebase doesn't depend on
|
|
616
|
+
// form-data. curl is universally available in deploy targets and handles
|
|
617
|
+
// multipart upload natively. JIRA REST v2 needs the X-Atlassian-Token
|
|
618
|
+
// anti-CSRF header; the field name must be `file`.
|
|
619
|
+
const {
|
|
620
|
+
spawnSync
|
|
621
|
+
} = require('child_process');
|
|
622
|
+
const r = spawnSync('curl', ['-sS', '-X', 'POST', '-u', `${user}:${token}`, '-H', 'X-Atlassian-Token: no-check', '-F', `file=@${filePath}`, '-w', '\n%{http_code}\n', url], {
|
|
623
|
+
encoding: 'utf8',
|
|
624
|
+
timeout: 120_000,
|
|
625
|
+
maxBuffer: 4 * 1024 * 1024
|
|
626
|
+
});
|
|
627
|
+
if (r.error) {
|
|
628
|
+
color.error(`curl spawn failed: ${r.error.message}`);
|
|
629
|
+
return false;
|
|
630
|
+
}
|
|
631
|
+
if (r.status !== 0) {
|
|
632
|
+
color.error(`curl exit ${r.status}: ${(r.stderr || '').slice(0, 400)}`);
|
|
633
|
+
return false;
|
|
634
|
+
}
|
|
635
|
+
// Stdout layout from curl `-w`: <body>\n<http_code>\n
|
|
636
|
+
const out = (r.stdout || '').trim();
|
|
637
|
+
const lastNl = out.lastIndexOf('\n');
|
|
638
|
+
const httpCode = lastNl >= 0 ? out.slice(lastNl + 1).trim() : '';
|
|
639
|
+
const body = lastNl >= 0 ? out.slice(0, lastNl) : out;
|
|
640
|
+
if (httpCode !== '200' && httpCode !== '201') {
|
|
641
|
+
color.error(`JIRA attachment POST failed (HTTP ${httpCode}): ${body.slice(0, 400)}`);
|
|
642
|
+
return false;
|
|
643
|
+
}
|
|
644
|
+
let id = null;
|
|
645
|
+
try {
|
|
646
|
+
const parsed = JSON.parse(body);
|
|
647
|
+
if (Array.isArray(parsed) && parsed[0] && (parsed[0].id || parsed[0].Id)) id = String(parsed[0].id || parsed[0].Id);
|
|
648
|
+
} catch (_) {/* ignored — JIRA usually returns JSON, but tolerate */}
|
|
649
|
+
const fileName = _path.default.basename(filePath);
|
|
650
|
+
color.success(`Attached ${fileName} to ${ticket}` + (id ? ` (id ${id})` : ''));
|
|
651
|
+
if (id) console.log(id);
|
|
652
|
+
return true;
|
|
653
|
+
}
|
|
593
654
|
async function handleLabel(args) {
|
|
594
655
|
const ticket = args._[2];
|
|
595
656
|
const labels = args._.slice(3).filter(s => s.length > 0);
|
package/bin/commands/test.js
CHANGED
|
@@ -19,13 +19,14 @@ var _build = require("./build");
|
|
|
19
19
|
var Papa = _interopRequireWildcard(require("papaparse"));
|
|
20
20
|
var _testUtils = _interopRequireWildcard(require("../utils/test-utils"));
|
|
21
21
|
var testUtils = _testUtils;
|
|
22
|
+
var playwrightRunner = _interopRequireWildcard(require("../utils/playwright-runner"));
|
|
22
23
|
function _interopRequireWildcard(e, t) { if ("function" == typeof WeakMap) var r = new WeakMap(), n = new WeakMap(); return (_interopRequireWildcard = function (e, t) { if (!t && e && e.__esModule) return e; var o, i, f = { __proto__: null, default: e }; if (null === e || "object" != typeof e && "function" != typeof e) return f; if (o = t ? n : r) { if (o.has(e)) return o.get(e); o.set(e, f); } for (const t in e) "default" !== t && {}.hasOwnProperty.call(e, t) && ((i = (o = Object.defineProperty) && Object.getOwnPropertyDescriptor(e, t)) && (i.get || i.set) ? o(f, t, i) : f[t] = e[t]); return f; })(e, t); }
|
|
23
24
|
/* eslint-disable max-len */
|
|
24
25
|
|
|
25
26
|
const execAsync = (0, _util.promisify)(_child_process.exec);
|
|
26
27
|
const execFileAsync = (0, _util.promisify)(_child_process.execFile);
|
|
27
28
|
const testInvocationTimeout = 3600000;
|
|
28
|
-
const availableCommandOptions = ['host', 'package', 'csv', 'gui', 'catchUnhandled', 'platform', 'core', 'report', 'skip-build', 'skip-publish', 'path', 'record', 'verbose', 'benchmark', 'category', 'test', 'stress-test', 'link', 'tag', 'ci-cd', 'debug', 'no-retry', 'dartium', 'f', 'params', 'logfailed'];
|
|
29
|
+
const availableCommandOptions = ['host', 'package', 'csv', 'gui', 'catchUnhandled', 'platform', 'core', 'report', 'skip-build', 'skip-publish', 'path', 'record', 'verbose', 'benchmark', 'category', 'test', 'stress-test', 'link', 'tag', 'ci-cd', 'debug', 'no-retry', 'dartium', 'f', 'params', 'logfailed', 'skip-playwright'];
|
|
29
30
|
const curDir = process.cwd();
|
|
30
31
|
|
|
31
32
|
/** Expands camelCase to space-separated lowercase: "dataManipulation" → "data manipulation" */
|
|
@@ -224,7 +225,47 @@ async function test(args) {
|
|
|
224
225
|
}
|
|
225
226
|
}
|
|
226
227
|
process.env.TARGET_PACKAGE = packageName;
|
|
227
|
-
|
|
228
|
+
let res;
|
|
229
|
+
try {
|
|
230
|
+
res = await runTesting(args);
|
|
231
|
+
} catch (e) {
|
|
232
|
+
// Don't let Puppeteer-side failures (login error, browser crash) skip the
|
|
233
|
+
// Playwright pass — the two suites have independent auth and runtime paths,
|
|
234
|
+
// and we want at least one half of the run reported.
|
|
235
|
+
color.error(`Puppeteer pass failed: ${e?.message || e}`);
|
|
236
|
+
res = {
|
|
237
|
+
failed: true,
|
|
238
|
+
verbosePassed: '',
|
|
239
|
+
verboseSkipped: '',
|
|
240
|
+
verboseFailed: `Puppeteer pass failed: ${e?.message || e}\n`,
|
|
241
|
+
passedAmount: 0,
|
|
242
|
+
skippedAmount: 0,
|
|
243
|
+
failedAmount: 1,
|
|
244
|
+
csv: '',
|
|
245
|
+
error: String(e?.message || e)
|
|
246
|
+
};
|
|
247
|
+
}
|
|
248
|
+
if (!args['skip-playwright']) {
|
|
249
|
+
const ptDir = playwrightRunner.hasPlaywrightTests(curDir);
|
|
250
|
+
if (ptDir) {
|
|
251
|
+
const ptRes = await playwrightRunner.runPlaywrightTests(curDir, ptDir, args, args.host ?? '');
|
|
252
|
+
// mergeBrowsersResults assumes both inputs have a header row; an empty
|
|
253
|
+
// Puppeteer CSV (filter matched zero tests) breaks the merge. Take the
|
|
254
|
+
// Playwright CSV verbatim in that case, otherwise merge.
|
|
255
|
+
if (!res.csv || res.csv.trim().split('\n').length < 2) {
|
|
256
|
+
res.csv = ptRes.csv;
|
|
257
|
+
res.passedAmount += ptRes.passedAmount;
|
|
258
|
+
res.failedAmount += ptRes.failedAmount;
|
|
259
|
+
res.skippedAmount += ptRes.skippedAmount;
|
|
260
|
+
res.failed = res.failed || ptRes.failed;
|
|
261
|
+
res.verbosePassed = (res.verbosePassed || '') + ptRes.verbosePassed;
|
|
262
|
+
res.verboseFailed = (res.verboseFailed || '') + ptRes.verboseFailed;
|
|
263
|
+
res.verboseSkipped = (res.verboseSkipped || '') + ptRes.verboseSkipped;
|
|
264
|
+
} else if (ptRes.csv && ptRes.csv.trim().split('\n').length >= 2) {
|
|
265
|
+
res = await (0, _testUtils.mergeBrowsersResults)([res, ptRes]);
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
}
|
|
228
269
|
if (args.csv) {
|
|
229
270
|
res.csv = (0, _testUtils.addColumnToCsv)(res.csv, 'stress_test', args['stress-test'] ?? false);
|
|
230
271
|
res.csv = (0, _testUtils.addColumnToCsv)(res.csv, 'benchmark', args.benchmark ?? false);
|
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.hasPlaywrightTests = hasPlaywrightTests;
|
|
8
|
+
exports.runPlaywrightTests = runPlaywrightTests;
|
|
9
|
+
var _child_process = require("child_process");
|
|
10
|
+
var _fs = _interopRequireDefault(require("fs"));
|
|
11
|
+
var _path = _interopRequireDefault(require("path"));
|
|
12
|
+
var _papaparse = _interopRequireDefault(require("papaparse"));
|
|
13
|
+
var color = _interopRequireWildcard(require("./color-utils"));
|
|
14
|
+
var testUtils = _interopRequireWildcard(require("./test-utils"));
|
|
15
|
+
function _interopRequireWildcard(e, t) { if ("function" == typeof WeakMap) var r = new WeakMap(), n = new WeakMap(); return (_interopRequireWildcard = function (e, t) { if (!t && e && e.__esModule) return e; var o, i, f = { __proto__: null, default: e }; if (null === e || "object" != typeof e && "function" != typeof e) return f; if (o = t ? n : r) { if (o.has(e)) return o.get(e); o.set(e, f); } for (const t in e) "default" !== t && {}.hasOwnProperty.call(e, t) && ((i = (o = Object.defineProperty) && Object.getOwnPropertyDescriptor(e, t)) && (i.get || i.set) ? o(f, t, i) : f[t] = e[t]); return f; })(e, t); }
|
|
16
|
+
function hasPlaywrightTests(pkgDir) {
|
|
17
|
+
const pkgJsonPath = _path.default.join(pkgDir, 'package.json');
|
|
18
|
+
if (!_fs.default.existsSync(pkgJsonPath)) return null;
|
|
19
|
+
let pkgJson;
|
|
20
|
+
try {
|
|
21
|
+
pkgJson = JSON.parse(_fs.default.readFileSync(pkgJsonPath, 'utf-8'));
|
|
22
|
+
} catch {
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
const rel = pkgJson.playwrightTests;
|
|
26
|
+
if (typeof rel !== 'string' || rel.length === 0) return null;
|
|
27
|
+
const abs = _path.default.resolve(pkgDir, rel);
|
|
28
|
+
if (!_fs.default.existsSync(abs)) return null;
|
|
29
|
+
return abs;
|
|
30
|
+
}
|
|
31
|
+
function flattenSuites(suites, testDir, pkgName, owner, verbose, rows) {
|
|
32
|
+
if (!suites) return;
|
|
33
|
+
const isoDate = new Date().toISOString();
|
|
34
|
+
for (var suite of suites) {
|
|
35
|
+
if (suite.specs) {
|
|
36
|
+
for (var spec of suite.specs) {
|
|
37
|
+
const specFile = spec.file || suite.file || '';
|
|
38
|
+
const absSpec = _path.default.isAbsolute(specFile) ? specFile : _path.default.resolve(testDir, specFile);
|
|
39
|
+
const category = _path.default.relative(testDir, _path.default.dirname(absSpec)).replace(/\\/g, '/');
|
|
40
|
+
for (var t of spec.tests || []) {
|
|
41
|
+
const result = t.results && t.results[t.results.length - 1] || undefined;
|
|
42
|
+
if (!result) continue;
|
|
43
|
+
const status = result.status;
|
|
44
|
+
const skipped = status === 'skipped';
|
|
45
|
+
const success = status === 'passed';
|
|
46
|
+
var errMsg = '';
|
|
47
|
+
if (!success && !skipped && result.errors && result.errors.length > 0) errMsg = result.errors.map(e => e.message || e.stack || '').filter(s => s.length > 0).join('\n');
|
|
48
|
+
var logs = '';
|
|
49
|
+
if (verbose) {
|
|
50
|
+
const out = (result.stdout || []).map(s => s.text || '').join('');
|
|
51
|
+
const err = (result.stderr || []).map(s => s.text || '').join('');
|
|
52
|
+
logs = [out, err].filter(s => s.length > 0).join('\n');
|
|
53
|
+
}
|
|
54
|
+
rows.push({
|
|
55
|
+
date: isoDate,
|
|
56
|
+
category: category,
|
|
57
|
+
name: spec.title,
|
|
58
|
+
success: success,
|
|
59
|
+
result: errMsg,
|
|
60
|
+
ms: Math.round(result.duration || 0),
|
|
61
|
+
skipped: skipped,
|
|
62
|
+
logs: logs,
|
|
63
|
+
owner: owner,
|
|
64
|
+
package: pkgName,
|
|
65
|
+
widgetsDifference: '',
|
|
66
|
+
flaking: false
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
if (suite.suites) flattenSuites(suite.suites, testDir, pkgName, owner, verbose, rows);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
function rowsToCsv(rows) {
|
|
75
|
+
// Column order must match what runTesting() actually serializes (see
|
|
76
|
+
// test-utils.ts:485 — `setOrder` is a no-op for the CSV writer, so the order
|
|
77
|
+
// is the natural order Dart's package-test emits). mergeBrowsersResults uses
|
|
78
|
+
// the first CSV's header, so any drift here misaligns Playwright rows.
|
|
79
|
+
const header = ['date', 'category', 'name', 'success', 'ms', 'skipped', 'owner', 'package', 'flaking', 'result', 'logs', 'widgetsDifference'];
|
|
80
|
+
return _papaparse.default.unparse({
|
|
81
|
+
fields: header,
|
|
82
|
+
data: rows.map(r => header.map(h => r[h]))
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
async function runPlaywrightTests(pkgDir, testDir, args, hostKey) {
|
|
86
|
+
const empty = {
|
|
87
|
+
failed: false,
|
|
88
|
+
verbosePassed: '',
|
|
89
|
+
verboseSkipped: '',
|
|
90
|
+
verboseFailed: '',
|
|
91
|
+
passedAmount: 0,
|
|
92
|
+
skippedAmount: 0,
|
|
93
|
+
failedAmount: 0,
|
|
94
|
+
csv: ''
|
|
95
|
+
};
|
|
96
|
+
let url;
|
|
97
|
+
let key;
|
|
98
|
+
try {
|
|
99
|
+
({
|
|
100
|
+
url,
|
|
101
|
+
key
|
|
102
|
+
} = testUtils.getDevKey(hostKey));
|
|
103
|
+
} catch (e) {
|
|
104
|
+
color.error(`Playwright: cannot resolve host '${hostKey}': ${e.message || e}`);
|
|
105
|
+
return {
|
|
106
|
+
...empty,
|
|
107
|
+
failed: true,
|
|
108
|
+
failedAmount: 1,
|
|
109
|
+
verboseFailed: `Playwright: ${e.message || e}\n`
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
let token;
|
|
113
|
+
try {
|
|
114
|
+
token = await testUtils.getToken(url, key);
|
|
115
|
+
} catch (e) {
|
|
116
|
+
color.error(`Playwright: cannot exchange dev key for token: ${e.message || e}`);
|
|
117
|
+
return {
|
|
118
|
+
...empty,
|
|
119
|
+
failed: true,
|
|
120
|
+
failedAmount: 1,
|
|
121
|
+
verboseFailed: `Playwright: ${e.message || e}\n`
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
let webUrl;
|
|
125
|
+
try {
|
|
126
|
+
webUrl = await testUtils.getWebUrl(url, token);
|
|
127
|
+
if (webUrl.endsWith('/')) webUrl = webUrl.slice(0, -1);
|
|
128
|
+
} catch {
|
|
129
|
+
webUrl = url.replace(/\/api\/?$/, '');
|
|
130
|
+
}
|
|
131
|
+
let token2 = '';
|
|
132
|
+
if (process.env.DATAGROK_DEV_KEY_2 && process.env.DATAGROK_DEV_KEY_2.length > 0) {
|
|
133
|
+
try {
|
|
134
|
+
token2 = await testUtils.getToken(url, process.env.DATAGROK_DEV_KEY_2);
|
|
135
|
+
} catch (e) {
|
|
136
|
+
color.warn(`Playwright: DATAGROK_DEV_KEY_2 set but failed to exchange for token: ${e.message || e}`);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
const configPath = _path.default.join(testDir, 'playwright.config.ts');
|
|
140
|
+
if (!_fs.default.existsSync(configPath)) {
|
|
141
|
+
color.error(`Playwright: ${configPath} not found.`);
|
|
142
|
+
return {
|
|
143
|
+
...empty,
|
|
144
|
+
failed: true,
|
|
145
|
+
failedAmount: 1,
|
|
146
|
+
verboseFailed: 'Playwright: missing playwright.config.ts\n'
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
const reportFile = _path.default.join(pkgDir, 'test-playwright-report.json');
|
|
150
|
+
if (_fs.default.existsSync(reportFile)) _fs.default.unlinkSync(reportFile);
|
|
151
|
+
const cliArgs = ['--no-install', 'playwright', 'test', `--config=${configPath}`];
|
|
152
|
+
if (!args.gui) cliArgs.push(`--reporter=json`);else cliArgs.push('--headed');
|
|
153
|
+
if (args.test) cliArgs.push(`--grep=${args.test}`);
|
|
154
|
+
if (args['no-retry']) cliArgs.push('--retries=0');
|
|
155
|
+
let testDirFinal = testDir;
|
|
156
|
+
if (args.category) {
|
|
157
|
+
const candidate = _path.default.join(testDir, args.category);
|
|
158
|
+
if (_fs.default.existsSync(candidate)) testDirFinal = candidate;
|
|
159
|
+
}
|
|
160
|
+
if (testDirFinal !== testDir) cliArgs.push(testDirFinal);
|
|
161
|
+
const env = {
|
|
162
|
+
...process.env,
|
|
163
|
+
DATAGROK_URL: webUrl,
|
|
164
|
+
DATAGROK_AUTH_TOKEN: token,
|
|
165
|
+
PLAYWRIGHT_JSON_OUTPUT_NAME: reportFile
|
|
166
|
+
};
|
|
167
|
+
if (token2) env.DATAGROK_AUTH_TOKEN_2 = token2;
|
|
168
|
+
color.info(`Playwright: running ${_path.default.relative(pkgDir, testDir) || '.'} against ${webUrl}`);
|
|
169
|
+
const stdoutChunks = [];
|
|
170
|
+
const stderrChunks = [];
|
|
171
|
+
const exitCode = await new Promise(resolve => {
|
|
172
|
+
const isWin = process.platform === 'win32';
|
|
173
|
+
const child = (0, _child_process.spawn)(isWin ? 'npx.cmd' : 'npx', cliArgs, {
|
|
174
|
+
cwd: pkgDir,
|
|
175
|
+
env: env,
|
|
176
|
+
shell: isWin
|
|
177
|
+
});
|
|
178
|
+
child.stdout.on('data', d => {
|
|
179
|
+
stdoutChunks.push(d);
|
|
180
|
+
if (args.gui || args.verbose) process.stdout.write(d);
|
|
181
|
+
});
|
|
182
|
+
child.stderr.on('data', d => {
|
|
183
|
+
stderrChunks.push(d);
|
|
184
|
+
process.stderr.write(d);
|
|
185
|
+
});
|
|
186
|
+
child.on('error', e => {
|
|
187
|
+
color.error(`Playwright: failed to spawn npx: ${e.message}`);
|
|
188
|
+
resolve(1);
|
|
189
|
+
});
|
|
190
|
+
child.on('close', code => resolve(code ?? 1));
|
|
191
|
+
});
|
|
192
|
+
if (args.gui) {
|
|
193
|
+
return {
|
|
194
|
+
...empty,
|
|
195
|
+
failed: exitCode !== 0,
|
|
196
|
+
failedAmount: exitCode !== 0 ? 1 : 0,
|
|
197
|
+
passedAmount: exitCode === 0 ? 1 : 0,
|
|
198
|
+
verboseFailed: exitCode !== 0 ? 'Playwright (gui mode) exited non-zero\n' : ''
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
let report;
|
|
202
|
+
if (_fs.default.existsSync(reportFile)) {
|
|
203
|
+
try {
|
|
204
|
+
report = JSON.parse(_fs.default.readFileSync(reportFile, 'utf-8'));
|
|
205
|
+
} catch (e) {
|
|
206
|
+
color.warn(`Playwright: cannot parse JSON report: ${e.message || e}`);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
if (!report) {
|
|
210
|
+
const stdoutText = Buffer.concat(stdoutChunks).toString('utf-8');
|
|
211
|
+
try {
|
|
212
|
+
report = JSON.parse(stdoutText);
|
|
213
|
+
} catch {/* ignore */}
|
|
214
|
+
}
|
|
215
|
+
if (!report) {
|
|
216
|
+
color.error('Playwright: no JSON report produced.');
|
|
217
|
+
const tail = Buffer.concat(stderrChunks).toString('utf-8').slice(-2000);
|
|
218
|
+
return {
|
|
219
|
+
...empty,
|
|
220
|
+
failed: true,
|
|
221
|
+
failedAmount: 1,
|
|
222
|
+
verboseFailed: `Playwright: no JSON report. stderr tail:\n${tail}\n`
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
const pkgJson = JSON.parse(_fs.default.readFileSync(_path.default.join(pkgDir, 'package.json'), 'utf-8'));
|
|
226
|
+
const owner = pkgJson.author && (pkgJson.author.email || pkgJson.author) || '';
|
|
227
|
+
const pkgName = process.env.TARGET_PACKAGE || args.package || pkgJson.name || '';
|
|
228
|
+
const rows = [];
|
|
229
|
+
flattenSuites(report.suites, testDir, pkgName, typeof owner === 'string' ? owner : '', args.verbose === true, rows);
|
|
230
|
+
let passedAmount = 0;
|
|
231
|
+
let failedAmount = 0;
|
|
232
|
+
let skippedAmount = 0;
|
|
233
|
+
let verbosePassed = '';
|
|
234
|
+
let verboseFailed = '';
|
|
235
|
+
let verboseSkipped = '';
|
|
236
|
+
for (var r of rows) {
|
|
237
|
+
const line = `${r.category}: ${r.name} (${r.ms} ms)\n`;
|
|
238
|
+
if (r.skipped) {
|
|
239
|
+
skippedAmount++;
|
|
240
|
+
verboseSkipped += line;
|
|
241
|
+
} else if (r.success) {
|
|
242
|
+
passedAmount++;
|
|
243
|
+
verbosePassed += line;
|
|
244
|
+
} else {
|
|
245
|
+
failedAmount++;
|
|
246
|
+
verboseFailed += `${r.category}: ${r.name} (${r.ms} ms) : ${r.result}\n`;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
return {
|
|
250
|
+
failed: failedAmount > 0,
|
|
251
|
+
passedAmount: passedAmount,
|
|
252
|
+
failedAmount: failedAmount,
|
|
253
|
+
skippedAmount: skippedAmount,
|
|
254
|
+
verbosePassed: verbosePassed,
|
|
255
|
+
verboseFailed: verboseFailed,
|
|
256
|
+
verboseSkipped: verboseSkipped,
|
|
257
|
+
csv: rowsToCsv(rows)
|
|
258
|
+
};
|
|
259
|
+
}
|
package/package.json
CHANGED