cucumber-reactive-reporter 1.0.10 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,182 +2,304 @@
2
2
 
3
3
  function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
4
4
 
5
+ var module$1 = require('module');
5
6
  var fs = _interopDefault(require('fs'));
6
7
  require('fs/promises');
7
8
  var ncp = _interopDefault(require('ncp'));
8
9
  var path = _interopDefault(require('path'));
9
10
  var ut = _interopDefault(require('util'));
10
11
 
11
- // import { createRequire } from 'module';
12
-
13
- ncp.limit = 16;
14
-
15
- let modulePath = require.resolve("./package.json"); //trick to resolve path to the installed module
16
-
17
- /**
18
- options.filter - a RegExp instance, against which each file name is tested to determine whether to copy it or not, or a function taking single parameter: copied file name, returning true or false, determining whether to copy file or not.
19
-
20
- options.transform - a function: function (read, write) { read.pipe(write) } used to apply streaming transforms while copying.
21
-
22
- options.clobber - boolean=true. if set to false, ncp will not overwrite destination files that already exist.
23
-
24
- options.dereference - boolean=false. If set to true, ncp will follow symbolic links. For example, a symlink in the source tree pointing to a regular file will become a regular file in the destination tree. Broken symlinks will result in errors.
25
-
26
- options.stopOnErr - boolean=false. If set to true, ncp will behave like cp -r, and stop on the first error it encounters. By default, ncp continues copying, logging all errors and returning an array.
27
-
28
- options.errs - stream. If options.stopOnErr is false, a stream can be provided, and errors will be written to this stream.
29
- */
30
-
31
-
32
- let cp = (source, destination, options) => {
33
- return new Promise((resolve, reject) => {
34
- ncp(source, destination, err => {
35
- if (err) {
36
- reject(new Error(err));
37
- }
38
-
39
- resolve();
40
- });
41
- });
42
- };
43
-
44
- let _makeSafe = input => {
45
- input = input.replace(/&/g, '&');
46
- input = input.replace(/</g, '&lt;');
47
- input = input.replace(/>/g, '&gt;');
48
- return input;
49
- };
50
12
  /**
51
- *
52
- * @param source path to the cucumber results json
53
- * @param dest folder path where html report gets written to
54
- * @param options
13
+ * Purpose: Normalize cucumber JSON into reporter store state.
14
+ * Responsibilities:
15
+ * - Normalize legacy cucumber JSON to a stable feature/scenario/step shape.
16
+ * - Build feature, scenario, and step maps for the UI store.
17
+ * Inputs/Outputs: Accepts parsed cucumber JSON; returns store-shaped state.
18
+ * Invariants: Input must be legacy JSON (features/elements/steps).
19
+ * See: /agents.md
55
20
  */
56
21
 
57
-
58
- const generate = async (source, dest, options) => {
59
- options ? true : options = {};
60
- const CUCUMBER_JSON_PATH = "_cucumber-results.json";
61
- const SETTINGS_JSON_PATH = "_reporter_settings.json";
62
- const HTML_PATH = path.join(path.dirname(modulePath), "react"); // "linkTags": [{
63
- // "pattern": "[a-zA-Z]*-(\\d)*$",
64
- // "link": "https://bydeluxe.atlassian.net/browse/"
65
- // }]
66
- //defaults
67
-
68
- const {
69
- title = "Cucumber Report",
70
- //report page title
71
- description = "Cucumber report",
72
- //description to be set at the page header
73
- metadata = {},
74
- linkTags = null
75
- } = options;
76
-
77
- let __dirname = path.resolve();
78
-
79
- if (path.isAbsolute(source) === false) {
80
- source = path.join(__dirname, source);
22
+ const LEGACY_FORMAT_HELP = ["Unsupported cucumber output format.", "This reporter expects legacy JSON (features/elements/steps).", "If you are using the message formatter, rerun with --format json:<file> or", 'use inputFormat: "auto" to detect message output.'].join(" ");
23
+ const INPUT_FORMAT_HELP = ['inputFormat must be "legacy-json" or "auto".', 'Use "legacy-json" for --format json:<file> output.', 'Use "auto" to detect and reject message formatter output explicitly.'].join(" ");
24
+ const ATTACHMENTS_ENCODING_HELP = ['attachmentsEncoding must be "auto", "base64", or "raw".', 'Use "raw" if your cucumber JSON stores text attachments unencoded.', 'Use "base64" if text attachments are base64-encoded.', 'Use "auto" to decode base64-looking text attachments.'].join(" ");
25
+ const normalizeMimeType = value => String(value ?? "").split(";")[0].trim().toLowerCase();
26
+ const shouldDecodeEmbedding = mimeType => {
27
+ if (!mimeType) {
28
+ return false;
81
29
  }
82
-
83
- fs.accessSync(source);
84
-
85
- if (!dest) {
86
- dest = path.dirname(source);
87
- } else {
88
- if (path.isAbsolute(dest) === false) {
89
- dest = path.resolve(dest);
30
+ if (mimeType.startsWith("text/")) {
31
+ return true;
32
+ }
33
+ return mimeType === "application/json" || mimeType === "application/xml";
34
+ };
35
+ const looksLikeBase64 = value => {
36
+ if (typeof value !== "string") {
37
+ return false;
38
+ }
39
+ const trimmed = value.trim();
40
+ if (!trimmed || trimmed.length % 4 !== 0) {
41
+ return false;
42
+ }
43
+ if (/[^A-Za-z0-9+/=]/.test(trimmed)) {
44
+ return false;
45
+ }
46
+ return true;
47
+ };
48
+ const isLikelyText = value => {
49
+ if (typeof value !== "string") {
50
+ return false;
51
+ }
52
+ if (value.includes("\uFFFD")) {
53
+ return false;
54
+ }
55
+ const sample = value.slice(0, 2000);
56
+ if (!sample.length) {
57
+ return true;
58
+ }
59
+ let printable = 0;
60
+ for (const char of sample) {
61
+ const code = char.charCodeAt(0);
62
+ if (code === 9 || code === 10 || code === 13) {
63
+ printable += 1;
64
+ continue;
65
+ }
66
+ if (code >= 32 && code !== 127) {
67
+ printable += 1;
90
68
  }
91
69
  }
92
-
93
- console.log(`__dirname: ${__dirname}\n` + `html path: ${HTML_PATH}\n` + `source: ${source}\n` + `destination: ${dest}\n` + `title: ${title}\n` + `description: ${description}\n` + `metadata: ${ut.inspect(metadata, false, null)}\n` + `linkTags: ${ut.inspect(linkTags, false, null)}\n`); //validate input json and make a copy
94
-
95
- let str = fs.readFileSync(source).toString();
96
- let obj = JSON.parse(str);
97
-
98
- let out = _prepDataForStore(obj);
99
-
100
- let modifiedJSON = JSON.stringify(out);
101
- let destExists = true;
102
-
103
- try {
104
- fs.accessSync(dest);
105
- } catch (err) {
106
- destExists = false;
70
+ return printable / sample.length > 0.85;
71
+ };
72
+ const decodeBase64Text = value => {
73
+ if (!looksLikeBase64(value)) {
74
+ return null;
107
75
  }
108
-
109
- if (!destExists) {
110
- fs.mkdirSync(dest, {
111
- recursive: true
112
- });
76
+ const decoded = Buffer.from(value, "base64").toString("utf8");
77
+ if (!isLikelyText(decoded)) {
78
+ return null;
113
79
  }
114
-
115
- fs.writeFileSync(path.join(dest, CUCUMBER_JSON_PATH), modifiedJSON);
116
- fs.writeFileSync(path.join(dest, SETTINGS_JSON_PATH), JSON.stringify(options));
117
- await cp(HTML_PATH, dest); //swap out some tokens in the html
118
-
119
- let indexPagePath = path.join(dest, "index.html");
120
- let htmlStr = fs.readFileSync(indexPagePath, "utf8").toString();
121
- let modified = htmlStr.replace(/-=title=-/g, _makeSafe(title));
122
- fs.writeFileSync(indexPagePath, modified, "utf8");
123
- console.log("done");
80
+ return decoded;
81
+ };
82
+ const normalizeEmbeddings = (embeddings, {
83
+ attachmentsEncoding
84
+ }) => {
85
+ if (!Array.isArray(embeddings)) {
86
+ return embeddings;
87
+ }
88
+ return embeddings.map(embedding => normalizeEmbedding(embedding, {
89
+ attachmentsEncoding
90
+ }));
91
+ };
92
+ const normalizeEmbedding = (embedding, {
93
+ attachmentsEncoding
94
+ }) => {
95
+ if (!embedding || typeof embedding !== "object") {
96
+ return embedding;
97
+ }
98
+ if (attachmentsEncoding === "raw") {
99
+ return embedding;
100
+ }
101
+ const mimeType = normalizeMimeType(embedding.mime_type ?? embedding.media?.type);
102
+ if (!shouldDecodeEmbedding(mimeType)) {
103
+ return embedding;
104
+ }
105
+ if (typeof embedding.data !== "string") {
106
+ return embedding;
107
+ }
108
+ // Legacy cucumber JSON embeds text payloads as base64; decode for readable output.
109
+ const decoded = decodeBase64Text(embedding.data);
110
+ if (!decoded) {
111
+ return embedding;
112
+ }
113
+ if (mimeType === "application/json") {
114
+ try {
115
+ JSON.parse(decoded);
116
+ } catch (err) {
117
+ return embedding;
118
+ }
119
+ } else if (["application/xml", "text/xml", "text/html"].includes(mimeType)) {
120
+ if (!decoded.includes("<")) {
121
+ return embedding;
122
+ }
123
+ }
124
+ return {
125
+ ...embedding,
126
+ data: decoded
127
+ };
128
+ };
129
+ const resolveAttachmentsEncoding = ({
130
+ attachmentsEncoding,
131
+ cucumberVersion
132
+ }) => {
133
+ if (!attachmentsEncoding) {
134
+ const parsed = parseCucumberMajor(cucumberVersion);
135
+ if (Number.isFinite(parsed)) {
136
+ return parsed < 7 ? "raw" : "base64";
137
+ }
138
+ return "auto";
139
+ }
140
+ if (!["auto", "base64", "raw"].includes(attachmentsEncoding)) {
141
+ throw new Error(ATTACHMENTS_ENCODING_HELP);
142
+ }
143
+ return attachmentsEncoding;
144
+ };
145
+ const parseCucumberMajor = cucumberVersion => {
146
+ if (!cucumberVersion) {
147
+ return null;
148
+ }
149
+ const value = String(cucumberVersion).trim();
150
+ if (!value) {
151
+ return null;
152
+ }
153
+ const match = value.match(/(\d+)(?:\.\d+)?/);
154
+ if (!match) {
155
+ return null;
156
+ }
157
+ const major = Number.parseInt(match[1], 10);
158
+ return Number.isFinite(major) ? major : null;
124
159
  };
125
160
 
126
- let _prepDataForStore = data => {
127
- let state = {};
128
- state.features = {};
129
- state.features.list = [];
130
- state.features.featuresMap = {};
131
- state.scenarios = {};
132
- state.scenarios.list = [];
133
- state.scenarios.scenariosMap = {};
134
- state.steps = {};
135
- state.steps.stepsMap = {};
136
- state.steps.totalDurationNanoSec = 0; //parse
137
-
161
+ /**
162
+ * Convert cucumber JSON into the reporter store shape.
163
+ * @param {unknown} input parsed cucumber JSON
164
+ * @returns {Object} normalized state for the UI store
165
+ * @throws {Error} when input is not legacy cucumber JSON
166
+ * @example
167
+ * const state = prepareStoreState(legacyJsonArray);
168
+ */
169
+ const prepareStoreState = (input, {
170
+ inputFormat = "legacy-json",
171
+ attachmentsEncoding,
172
+ cucumberVersion
173
+ } = {}) => {
174
+ if (!["legacy-json", "auto"].includes(inputFormat)) {
175
+ throw new Error(INPUT_FORMAT_HELP);
176
+ }
177
+ if (inputFormat === "auto" && looksLikeMessageStream(input)) {
178
+ throw new Error(LEGACY_FORMAT_HELP);
179
+ }
180
+ const resolvedEncoding = resolveAttachmentsEncoding({
181
+ attachmentsEncoding,
182
+ cucumberVersion
183
+ });
184
+ const features = resolveFeatures(input);
185
+ if (!features) {
186
+ throw new Error(LEGACY_FORMAT_HELP);
187
+ }
188
+ const state = createEmptyState();
138
189
  let featureIndex = 0;
139
- console.time("loadTotal");
140
-
141
- for (let f of data) {
142
- //FEATURE
143
- //cucumber id field is not guaranteed to be unique for feature/scenario/step
144
- f.id = `${featureIndex++}_${f.id}`;
145
-
146
- _processFeature(state, f); //SCENARIO
147
-
148
-
149
- let numScenarios = f.elements.length; //avoid multiple lookups;
150
-
151
- if (f.elements && numScenarios) {
152
- let sc_index = 0;
153
-
154
- for (let sc of f.elements) {
155
- //need to make scenario id unique as well
156
- sc_index++;
157
- let sc_id_arr = sc.id.split(";");
158
- sc_id_arr[0] = f.id;
159
-
160
- if (sc_id_arr.length) {
161
- sc_id_arr[1] = `${sc_index - 1}_${sc_id_arr[1]}`;
162
- }
163
-
164
- sc.id = sc_id_arr.join(";");
165
-
166
- _processScenario(state, f.id, sc); //STEPS
167
-
168
-
169
- for (let st of sc.steps) {
170
- _processStep(state, sc.id, st);
171
- }
172
- }
190
+ for (const rawFeature of features) {
191
+ if (!rawFeature) {
192
+ continue;
173
193
  }
194
+ const feature = normalizeFeature(rawFeature, featureIndex);
195
+ featureIndex += 1;
196
+ processFeature(state, feature);
197
+ processFeatureElements(state, feature, {
198
+ attachmentsEncoding: resolvedEncoding
199
+ });
174
200
  }
175
-
176
- console.timeEnd("loadTotal");
177
201
  return state;
178
202
  };
179
-
180
- let _processFeature = (state, f) => {
203
+ const createEmptyState = () => ({
204
+ features: {
205
+ list: [],
206
+ featuresMap: {}
207
+ },
208
+ scenarios: {
209
+ list: [],
210
+ scenariosMap: {}
211
+ },
212
+ steps: {
213
+ stepsMap: {},
214
+ totalDurationNanoSec: 0
215
+ }
216
+ });
217
+ const looksLikeMessageStream = input => {
218
+ if (!Array.isArray(input)) {
219
+ return false;
220
+ }
221
+ return input.some(item => {
222
+ if (!item || typeof item !== "object") {
223
+ return false;
224
+ }
225
+ return "gherkinDocument" in item || "pickle" in item || "testCaseStarted" in item || "testCaseFinished" in item || "envelope" in item;
226
+ });
227
+ };
228
+ const resolveFeatures = input => {
229
+ if (Array.isArray(input)) {
230
+ return input;
231
+ }
232
+ if (input && Array.isArray(input.features)) {
233
+ return input.features;
234
+ }
235
+ return null;
236
+ };
237
+ const normalizeFeature = (feature, index) => {
238
+ const baseId = feature?.id ?? feature?.name ?? "feature";
239
+ const elements = normalizeElements(feature);
240
+ return {
241
+ ...feature,
242
+ id: `${index}_${baseId}`,
243
+ elements,
244
+ tags: Array.isArray(feature?.tags) ? feature.tags : []
245
+ };
246
+ };
247
+ const normalizeElements = feature => {
248
+ if (!feature) {
249
+ return [];
250
+ }
251
+ if (Array.isArray(feature.elements)) {
252
+ return feature.elements;
253
+ }
254
+ if (Array.isArray(feature.scenarios)) {
255
+ return feature.scenarios;
256
+ }
257
+ if (Array.isArray(feature.children)) {
258
+ return flattenChildren(feature.children);
259
+ }
260
+ return [];
261
+ };
262
+ const flattenChildren = children => {
263
+ const flattened = [];
264
+ for (const child of children) {
265
+ if (!child) {
266
+ continue;
267
+ }
268
+ if (child.scenario) {
269
+ flattened.push(child.scenario);
270
+ continue;
271
+ }
272
+ if (child.background) {
273
+ flattened.push(child.background);
274
+ continue;
275
+ }
276
+ if (child.rule && Array.isArray(child.rule.children)) {
277
+ flattened.push(...flattenChildren(child.rule.children));
278
+ continue;
279
+ }
280
+ if (Array.isArray(child.children)) {
281
+ flattened.push(...flattenChildren(child.children));
282
+ continue;
283
+ }
284
+ flattened.push(child);
285
+ }
286
+ return flattened;
287
+ };
288
+ const normalizeScenario = (featureId, scenario, index) => {
289
+ const baseId = scenario?.id ?? scenario?.name ?? "scenario";
290
+ const scenarioId = buildScenarioId(featureId, baseId, index);
291
+ return {
292
+ ...scenario,
293
+ id: scenarioId,
294
+ tags: Array.isArray(scenario?.tags) ? scenario.tags : []
295
+ };
296
+ };
297
+ const buildScenarioId = (featureId, scenarioId, index) => {
298
+ const parts = String(scenarioId).split(";");
299
+ const suffix = parts.length > 1 ? parts[1] : parts[0];
300
+ return `${featureId};${index}_${suffix}`;
301
+ };
302
+ const processFeature = (state, feature) => {
181
303
  const {
182
304
  description,
183
305
  elements,
@@ -185,42 +307,34 @@ let _processFeature = (state, f) => {
185
307
  keyword,
186
308
  line,
187
309
  name,
188
- tags: [...tags],
310
+ tags,
189
311
  uri
190
- } = f;
191
- const allTags = [...tags]; //figure out if it has failed stuff
192
-
312
+ } = feature;
313
+ const allTags = Array.isArray(tags) ? [...tags] : [];
193
314
  let numFailedScenarios = 0;
194
315
  let numSkippedScenarios = 0;
195
-
196
- if (elements && elements.length) {
197
- for (let el of elements) {
198
- //collect scenario tags
199
- if (el.tags && el.tags.length) {
200
- let temp = allTags.map(t => t.name);
201
- el.tags.forEach(tag => {
202
- if (temp.includes(tag.name) === false) {
203
- allTags.push(tag);
204
- }
205
- });
316
+ const elementList = Array.isArray(elements) ? elements : [];
317
+ for (const element of elementList) {
318
+ const elementTags = Array.isArray(element?.tags) ? element.tags : [];
319
+ const seen = allTags.map(tag => tag.name);
320
+ for (const tag of elementTags) {
321
+ if (tag?.name && !seen.includes(tag.name)) {
322
+ allTags.push(tag);
206
323
  }
207
-
208
- if (el.steps && el.steps.length) {
209
- for (let step of el.steps) {
210
- if (step.result && step.result.status === "failed") {
211
- numFailedScenarios++;
212
- break;
213
- }
214
-
215
- if (step.result && step.result.status === "skipped") {
216
- numSkippedScenarios++;
217
- break;
218
- }
219
- }
324
+ }
325
+ const steps = Array.isArray(element?.steps) ? element.steps : [];
326
+ for (const step of steps) {
327
+ const status = step?.result?.status;
328
+ if (status === "failed") {
329
+ numFailedScenarios += 1;
330
+ break;
331
+ }
332
+ if (status === "skipped") {
333
+ numSkippedScenarios += 1;
334
+ break;
220
335
  }
221
336
  }
222
337
  }
223
-
224
338
  state.features.list.push(id);
225
339
  state.features.featuresMap[id] = {
226
340
  id,
@@ -229,20 +343,19 @@ let _processFeature = (state, f) => {
229
343
  keyword,
230
344
  name,
231
345
  line,
232
- tags,
346
+ tags: Array.isArray(tags) ? tags : [],
233
347
  allTags,
234
348
  numFailedScenarios,
235
349
  numSkippedScenarios
236
350
  };
237
351
  };
238
-
239
- let _processScenario = (state, featureId, scenario) => {
352
+ const processScenario = (state, featureId, scenario) => {
240
353
  const {
241
354
  id,
242
355
  keyword,
243
356
  line,
244
357
  name,
245
- tags: [...tags],
358
+ tags,
246
359
  type,
247
360
  uri
248
361
  } = scenario;
@@ -256,13 +369,44 @@ let _processScenario = (state, featureId, scenario) => {
256
369
  name,
257
370
  passedSteps: 0,
258
371
  skippedSteps: 0,
259
- tags,
372
+ tags: Array.isArray(tags) ? tags : [],
260
373
  type,
261
374
  uri
262
375
  };
263
376
  };
264
-
265
- let _processStep = (state, scenarioId, st) => {
377
+ const processFeatureElements = (state, feature, {
378
+ attachmentsEncoding
379
+ }) => {
380
+ const elements = feature.elements;
381
+ if (!elements.length) {
382
+ return;
383
+ }
384
+ let scenarioIndex = 0;
385
+ for (const rawScenario of elements) {
386
+ if (!rawScenario) {
387
+ continue;
388
+ }
389
+ const scenario = normalizeScenario(feature.id, rawScenario, scenarioIndex);
390
+ scenarioIndex += 1;
391
+ processScenario(state, feature.id, scenario);
392
+ processScenarioSteps(state, scenario, {
393
+ attachmentsEncoding
394
+ });
395
+ }
396
+ };
397
+ const processScenarioSteps = (state, scenario, {
398
+ attachmentsEncoding
399
+ }) => {
400
+ const steps = Array.isArray(scenario.steps) ? scenario.steps : [];
401
+ for (const step of steps) {
402
+ processStep(state, scenario.id, step, {
403
+ attachmentsEncoding
404
+ });
405
+ }
406
+ };
407
+ const processStep = (state, scenarioId, step, {
408
+ attachmentsEncoding
409
+ }) => {
266
410
  const {
267
411
  arguments: args,
268
412
  embeddings,
@@ -270,18 +414,22 @@ let _processStep = (state, scenarioId, st) => {
270
414
  keyword,
271
415
  line,
272
416
  name,
273
- result: {
274
- duration,
275
- error_message,
276
- status
277
- }
278
- } = st;
279
- let location = "";
280
- if (st.match) location = st.match.location;
281
- let step = {
282
- args,
417
+ result
418
+ } = step ?? {};
419
+ const {
283
420
  duration,
284
- embeddings,
421
+ error_message,
422
+ status
423
+ } = result ?? {};
424
+ const durationValue = typeof duration === "string" ? Number(duration) : duration;
425
+ const location = step?.match?.location ?? "";
426
+ const normalizedEmbeddings = normalizeEmbeddings(embeddings, {
427
+ attachmentsEncoding
428
+ });
429
+ const stepData = {
430
+ args,
431
+ duration: durationValue,
432
+ embeddings: normalizedEmbeddings,
285
433
  error_message,
286
434
  keyword,
287
435
  line,
@@ -289,28 +437,177 @@ let _processStep = (state, scenarioId, st) => {
289
437
  name,
290
438
  status
291
439
  };
292
- if (!state.steps.stepsMap[scenarioId]) state.steps.stepsMap[scenarioId] = {
293
- steps: []
294
- };
295
- state.steps.stepsMap[scenarioId].steps.push(step);
296
-
297
- if (isNaN(duration) === false) {
298
- state.steps.totalDurationNanoSec = state.steps.totalDurationNanoSec + duration;
440
+ if (!state.steps.stepsMap[scenarioId]) {
441
+ state.steps.stepsMap[scenarioId] = {
442
+ steps: []
443
+ };
299
444
  }
300
-
301
- if (!hidden || embeddings && embeddings.length) {
445
+ state.steps.stepsMap[scenarioId].steps.push(stepData);
446
+ if (Number.isFinite(durationValue)) {
447
+ state.steps.totalDurationNanoSec += durationValue;
448
+ }
449
+ if (!hidden || normalizedEmbeddings && normalizedEmbeddings.length) {
302
450
  if (status === "passed") {
303
- state.scenarios.scenariosMap[scenarioId].passedSteps++;
451
+ state.scenarios.scenariosMap[scenarioId].passedSteps += 1;
304
452
  } else if (status === "skipped") {
305
- state.scenarios.scenariosMap[scenarioId].skippedSteps++;
453
+ state.scenarios.scenariosMap[scenarioId].skippedSteps += 1;
306
454
  }
307
455
  }
308
-
309
456
  if (status === "failed") {
310
- state.scenarios.scenariosMap[scenarioId].failedSteps++;
457
+ state.scenarios.scenariosMap[scenarioId].failedSteps += 1;
311
458
  }
312
459
  };
313
460
 
461
+ /**
462
+ * Purpose: Generate HTML reports from cucumber JSON output.
463
+ * Responsibilities:
464
+ * - Normalize cucumber JSON into store state.
465
+ * - Copy report assets and write report metadata.
466
+ * Inputs/Outputs: Reads a cucumber JSON file and writes a report folder.
467
+ * Invariants: Expects legacy cucumber JSON (features/elements/steps).
468
+ * See: /agents.md
469
+ */
470
+ const require$1 = module$1.createRequire((typeof document === 'undefined' ? new (require('u' + 'rl').URL)('file:' + __filename).href : (document.currentScript && document.currentScript.src || new URL('cucumber-reactive-reporter.cjs.js', document.baseURI).href)));
471
+ ncp.limit = 16;
472
+ const modulePath = require$1.resolve("./package.json"); //trick to resolve path to the installed module
473
+
474
+ /**
475
+ options.filter - a RegExp instance, against which each file name is tested to determine whether to copy it or not, or a function taking single parameter: copied file name, returning true or false, determining whether to copy file or not.
476
+
477
+ options.transform - a function: function (read, write) { read.pipe(write) } used to apply streaming transforms while copying.
478
+
479
+ options.clobber - boolean=true. if set to false, ncp will not overwrite destination files that already exist.
480
+
481
+ options.dereference - boolean=false. If set to true, ncp will follow symbolic links. For example, a symlink in the source tree pointing to a regular file will become a regular file in the destination tree. Broken symlinks will result in errors.
482
+
483
+ options.stopOnErr - boolean=false. If set to true, ncp will behave like cp -r, and stop on the first error it encounters. By default, ncp continues copying, logging all errors and returning an array.
484
+
485
+ options.errs - stream. If options.stopOnErr is false, a stream can be provided, and errors will be written to this stream.
486
+ */
487
+
488
+ let cp = (source, destination, options) => {
489
+ return new Promise((resolve, reject) => {
490
+ ncp(source, destination, err => {
491
+ if (err) {
492
+ reject(new Error(err));
493
+ }
494
+ resolve();
495
+ });
496
+ });
497
+ };
498
+ const _makeSafe = input => {
499
+ input = input.replace(/&/g, '&amp;');
500
+ input = input.replace(/</g, '&lt;');
501
+ input = input.replace(/>/g, '&gt;');
502
+ return input;
503
+ };
504
+
505
+ /**
506
+ * Generate a report from cucumber JSON output.
507
+ * @param {string} source path to the cucumber results JSON file
508
+ * @param {string} dest folder path where the HTML report gets written
509
+ * @param {Object} options report configuration overrides
510
+ * @param {"legacy-json"|"auto"} [options.inputFormat] input JSON format selector
511
+ * @param {"auto"|"base64"|"raw"} [options.attachmentsEncoding] attachment encoding
512
+ * @param {string} [options.cucumberVersion] cucumber version (for encoding hints)
513
+ * @returns {Promise<void>} resolves when report assets are written
514
+ * @throws {Error} when input JSON is invalid or unsupported
515
+ * @example
516
+ * await generate("results/cucumber.json", "reports/out", { title: "Run #1" });
517
+ */
518
+ const generate = async (source, dest, options) => {
519
+ options ? true : options = {};
520
+ const CUCUMBER_JSON_PATH = "_cucumber-results.json";
521
+ const SETTINGS_JSON_PATH = "_reporter_settings.json";
522
+ const HTML_PATH = path.join(path.dirname(modulePath), "react");
523
+
524
+ // "linkTags": [{
525
+ // "pattern": "[a-zA-Z]*-(\\d)*$",
526
+ // "link": "https://bydeluxe.atlassian.net/browse/"
527
+
528
+ // }]
529
+ //defaults
530
+ const {
531
+ title = "Cucumber Report",
532
+ //report page title
533
+ description = "Cucumber report",
534
+ //description to be set at the page header
535
+ metadata = {},
536
+ linkTags = null,
537
+ inputFormat = "legacy-json",
538
+ attachmentsEncoding,
539
+ cucumberVersion
540
+ } = options;
541
+ let __dirname = path.resolve();
542
+ if (path.isAbsolute(source) === false) {
543
+ source = path.join(__dirname, source);
544
+ }
545
+ fs.accessSync(source);
546
+ if (!dest) {
547
+ dest = path.dirname(source);
548
+ } else {
549
+ if (path.isAbsolute(dest) === false) {
550
+ dest = path.resolve(dest);
551
+ }
552
+ }
553
+ console.log(`__dirname: ${__dirname}\n` + `html path: ${HTML_PATH}\n` + `source: ${source}\n` + `destination: ${dest}\n` + `title: ${title}\n` + `description: ${description}\n` + `metadata: ${ut.inspect(metadata, false, null)}\n` + `linkTags: ${ut.inspect(linkTags, false, null)}\n`);
554
+
555
+ //validate input json and make a copy
556
+ let str = fs.readFileSync(source, "utf8");
557
+ let obj = parseInputData(source, str);
558
+ let out = prepareStoreState(obj, {
559
+ inputFormat,
560
+ attachmentsEncoding,
561
+ cucumberVersion
562
+ });
563
+ let modifiedJSON = JSON.stringify(out);
564
+ let destExists = true;
565
+ try {
566
+ fs.accessSync(dest);
567
+ } catch (err) {
568
+ destExists = false;
569
+ }
570
+ if (!destExists) {
571
+ fs.mkdirSync(dest, {
572
+ recursive: true
573
+ });
574
+ }
575
+ fs.writeFileSync(path.join(dest, CUCUMBER_JSON_PATH), modifiedJSON);
576
+ fs.writeFileSync(path.join(dest, SETTINGS_JSON_PATH), JSON.stringify(options));
577
+ await cp(HTML_PATH, dest);
578
+ //swap out some tokens in the html
579
+ let indexPagePath = path.join(dest, "index.html");
580
+ let htmlStr = fs.readFileSync(indexPagePath, "utf8").toString();
581
+ let modified = htmlStr.replace(/-=title=-/g, _makeSafe(title));
582
+ fs.writeFileSync(indexPagePath, modified, "utf8");
583
+ console.log("done");
584
+ };
585
+ const parseInputData = (source, rawText) => {
586
+ try {
587
+ return JSON.parse(rawText);
588
+ } catch (err) {
589
+ const ndjson = parseNdjson(rawText);
590
+ if (ndjson) {
591
+ return ndjson;
592
+ }
593
+ throw new Error(`Invalid JSON in ${source}: ${err.message}`);
594
+ }
595
+ };
596
+ const parseNdjson = rawText => {
597
+ const lines = rawText.split(/\r?\n/).filter(line => line.trim().length);
598
+ if (!lines.length) {
599
+ return null;
600
+ }
601
+ const items = [];
602
+ for (const line of lines) {
603
+ try {
604
+ items.push(JSON.parse(line));
605
+ } catch (err) {
606
+ return null;
607
+ }
608
+ }
609
+ return items;
610
+ };
314
611
  var index = {
315
612
  generate: generate
316
613
  };