datagrok-tools 6.2.0 → 6.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,8 +1,16 @@
1
1
  # Datagrok-tools changelog
2
2
 
3
+ ## 6.2.2 (2026-05-05)
4
+
5
+ * `grok test` — Playwright runner now writes `test-report-playwright.csv` next to the existing merged `test-report.csv`, so CI can ship Playwright rows to a dedicated Datlas reporting bucket without disturbing the legacy `package` flow.
6
+
7
+ ## 6.2.1 (2026-05-05)
8
+
9
+ * Reports: `grok report attach <ticket> <file>` — upload a file as a JIRA issue attachment via REST v2 multipart POST.
10
+
3
11
  ## 6.2.0 (2026-05-04)
4
12
 
5
- * `grok test` — Playwright support: when a package's `package.json` declares `"playwrightTests": "<path>"`, `grok test` runs `npx playwright test` against that directory in addition to the existing Puppeteer pass and merges results into a single `test-report.csv`. Auth is unified with the Puppeteer pass (dev key from `~/.grok/config.yaml` → session token → cookie + `localStorage` injection — no login form). Optional `DATAGROK_DEV_KEY_2` env var enables a second-user identity for specs that need it (`DATAGROK_AUTH_TOKEN_2` exposed to specs). New `--no-playwright` flag opts out of the Playwright pass for a single run.
13
+ * `grok test` — Playwright support: when a package's `package.json` declares `"playwrightTests": "<path>"`, `grok test` runs `npx playwright test` against that directory in addition to the existing Puppeteer pass and merges results into a single `test-report.csv`. Auth is unified with the Puppeteer pass (dev key from `~/.grok/config.yaml` → session token → cookie + `localStorage` injection — no login form). Optional `DATAGROK_DEV_KEY_2` env var enables a second-user identity for specs that need it (`DATAGROK_AUTH_TOKEN_2` exposed to specs). New `--skip-playwright` flag opts out of the Playwright pass for a single run.
6
14
 
7
15
  ## 6.1.14 (2026-05-01)
8
16
 
@@ -29,6 +29,8 @@ async function report(args) {
29
29
  return await handleComment(args);
30
30
  case 'label':
31
31
  return await handleLabel(args);
32
+ case 'attach':
33
+ return await handleAttach(args);
32
34
  default:
33
35
  return false;
34
36
  }
@@ -590,6 +592,65 @@ async function handleComment(args) {
590
592
  return false;
591
593
  }
592
594
  }
595
+ async function handleAttach(args) {
596
+ const ticket = args._[2];
597
+ const filePath = args._[3];
598
+ if (!ticket || !filePath) {
599
+ color.error('Usage: grok report attach <ticket-key> <file> [--jira-url <url>]');
600
+ return false;
601
+ }
602
+ if (!_fs.default.existsSync(filePath)) {
603
+ color.error(`File not found: ${filePath}`);
604
+ return false;
605
+ }
606
+ const user = process.env.JIRA_USER;
607
+ const token = process.env.JIRA_TOKEN;
608
+ if (!user || !token) {
609
+ color.error('JIRA_USER and JIRA_TOKEN env vars are required for `grok report attach`.');
610
+ return false;
611
+ }
612
+ const base = resolveJiraBase(args);
613
+ const url = `${base}/rest/api/2/issue/${encodeURIComponent(ticket)}/attachments`;
614
+
615
+ // node-fetch v2 has no built-in FormData and the codebase doesn't depend on
616
+ // form-data. curl is universally available in deploy targets and handles
617
+ // multipart upload natively. JIRA REST v2 needs the X-Atlassian-Token
618
+ // anti-CSRF header; the field name must be `file`.
619
+ const {
620
+ spawnSync
621
+ } = require('child_process');
622
+ const r = spawnSync('curl', ['-sS', '-X', 'POST', '-u', `${user}:${token}`, '-H', 'X-Atlassian-Token: no-check', '-F', `file=@${filePath}`, '-w', '\n%{http_code}\n', url], {
623
+ encoding: 'utf8',
624
+ timeout: 120_000,
625
+ maxBuffer: 4 * 1024 * 1024
626
+ });
627
+ if (r.error) {
628
+ color.error(`curl spawn failed: ${r.error.message}`);
629
+ return false;
630
+ }
631
+ if (r.status !== 0) {
632
+ color.error(`curl exit ${r.status}: ${(r.stderr || '').slice(0, 400)}`);
633
+ return false;
634
+ }
635
+ // Stdout layout from curl `-w`: <body>\n<http_code>\n
636
+ const out = (r.stdout || '').trim();
637
+ const lastNl = out.lastIndexOf('\n');
638
+ const httpCode = lastNl >= 0 ? out.slice(lastNl + 1).trim() : '';
639
+ const body = lastNl >= 0 ? out.slice(0, lastNl) : out;
640
+ if (httpCode !== '200' && httpCode !== '201') {
641
+ color.error(`JIRA attachment POST failed (HTTP ${httpCode}): ${body.slice(0, 400)}`);
642
+ return false;
643
+ }
644
+ let id = null;
645
+ try {
646
+ const parsed = JSON.parse(body);
647
+ if (Array.isArray(parsed) && parsed[0] && (parsed[0].id || parsed[0].Id)) id = String(parsed[0].id || parsed[0].Id);
648
+ } catch (_) {/* ignored — JIRA usually returns JSON, but tolerate */}
649
+ const fileName = _path.default.basename(filePath);
650
+ color.success(`Attached ${fileName} to ${ticket}` + (id ? ` (id ${id})` : ''));
651
+ if (id) console.log(id);
652
+ return true;
653
+ }
593
654
  async function handleLabel(args) {
594
655
  const ticket = args._[2];
595
656
  const labels = args._.slice(3).filter(s => s.length > 0);
@@ -26,7 +26,7 @@ function _interopRequireWildcard(e, t) { if ("function" == typeof WeakMap) var r
26
26
  const execAsync = (0, _util.promisify)(_child_process.exec);
27
27
  const execFileAsync = (0, _util.promisify)(_child_process.execFile);
28
28
  const testInvocationTimeout = 3600000;
29
- const availableCommandOptions = ['host', 'package', 'csv', 'gui', 'catchUnhandled', 'platform', 'core', 'report', 'skip-build', 'skip-publish', 'path', 'record', 'verbose', 'benchmark', 'category', 'test', 'stress-test', 'link', 'tag', 'ci-cd', 'debug', 'no-retry', 'dartium', 'f', 'params', 'logfailed', 'no-playwright'];
29
+ const availableCommandOptions = ['host', 'package', 'csv', 'gui', 'catchUnhandled', 'platform', 'core', 'report', 'skip-build', 'skip-publish', 'path', 'record', 'verbose', 'benchmark', 'category', 'test', 'stress-test', 'link', 'tag', 'ci-cd', 'debug', 'no-retry', 'dartium', 'f', 'params', 'logfailed', 'skip-playwright'];
30
30
  const curDir = process.cwd();
31
31
 
32
32
  /** Expands camelCase to space-separated lowercase: "dataManipulation" → "data manipulation" */
@@ -225,8 +225,27 @@ async function test(args) {
225
225
  }
226
226
  }
227
227
  process.env.TARGET_PACKAGE = packageName;
228
- let res = await runTesting(args);
229
- if (!args['no-playwright']) {
228
+ let res;
229
+ try {
230
+ res = await runTesting(args);
231
+ } catch (e) {
232
+ // Don't let Puppeteer-side failures (login error, browser crash) skip the
233
+ // Playwright pass — the two suites have independent auth and runtime paths,
234
+ // and we want at least one half of the run reported.
235
+ color.error(`Puppeteer pass failed: ${e?.message || e}`);
236
+ res = {
237
+ failed: true,
238
+ verbosePassed: '',
239
+ verboseSkipped: '',
240
+ verboseFailed: `Puppeteer pass failed: ${e?.message || e}\n`,
241
+ passedAmount: 0,
242
+ skippedAmount: 0,
243
+ failedAmount: 1,
244
+ csv: '',
245
+ error: String(e?.message || e)
246
+ };
247
+ }
248
+ if (!args['skip-playwright']) {
230
249
  const ptDir = playwrightRunner.hasPlaywrightTests(curDir);
231
250
  if (ptDir) {
232
251
  const ptRes = await playwrightRunner.runPlaywrightTests(curDir, ptDir, args, args.host ?? '');
@@ -72,7 +72,11 @@ function flattenSuites(suites, testDir, pkgName, owner, verbose, rows) {
72
72
  }
73
73
  }
74
74
  function rowsToCsv(rows) {
75
- const header = ['date', 'category', 'name', 'success', 'result', 'ms', 'skipped', 'logs', 'owner', 'package', 'widgetsDifference', 'flaking'];
75
+ // Column order must match what runTesting() actually serializes (see
76
+ // test-utils.ts:485 — `setOrder` is a no-op for the CSV writer, so the order
77
+ // is the natural order Dart's package-test emits). mergeBrowsersResults uses
78
+ // the first CSV's header, so any drift here misaligns Playwright rows.
79
+ const header = ['date', 'category', 'name', 'success', 'ms', 'skipped', 'owner', 'package', 'flaking', 'result', 'logs', 'widgetsDifference'];
76
80
  return _papaparse.default.unparse({
77
81
  fields: header,
78
82
  data: rows.map(r => header.map(h => r[h]))
@@ -242,6 +246,15 @@ async function runPlaywrightTests(pkgDir, testDir, args, hostKey) {
242
246
  verboseFailed += `${r.category}: ${r.name} (${r.ms} ms) : ${r.result}\n`;
243
247
  }
244
248
  }
249
+ const csv = rowsToCsv(rows);
250
+ // Persist a Playwright-only CSV so the pipeline can ship it to the Datlas
251
+ // 'playwright' bucket, separate from the merged Puppeteer+Playwright
252
+ // test-report.csv that feeds the legacy 'package' bucket and JUnit.
253
+ try {
254
+ _fs.default.writeFileSync(_path.default.join(pkgDir, 'test-report-playwright.csv'), csv, 'utf-8');
255
+ } catch (e) {
256
+ color.warn(`Playwright: failed to write test-report-playwright.csv: ${e.message || e}`);
257
+ }
245
258
  return {
246
259
  failed: failedAmount > 0,
247
260
  passedAmount: passedAmount,
@@ -250,6 +263,6 @@ async function runPlaywrightTests(pkgDir, testDir, args, hostKey) {
250
263
  verbosePassed: verbosePassed,
251
264
  verboseFailed: verboseFailed,
252
265
  verboseSkipped: verboseSkipped,
253
- csv: rowsToCsv(rows)
266
+ csv: csv
254
267
  };
255
268
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "datagrok-tools",
3
- "version": "6.2.0",
3
+ "version": "6.2.2",
4
4
  "description": "Utility to upload and publish packages to Datagrok",
5
5
  "homepage": "https://github.com/datagrok-ai/public/tree/master/tools#readme",
6
6
  "dependencies": {