cucumber-reactive-reporter 1.0.11 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,181 +2,304 @@
2
2
 
3
3
  function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
4
4
 
5
+ var module$1 = require('module');
5
6
  var fs = _interopDefault(require('fs'));
7
+ require('fs/promises');
6
8
  var ncp = _interopDefault(require('ncp'));
7
9
  var path = _interopDefault(require('path'));
8
10
  var ut = _interopDefault(require('util'));
9
11
 
10
- // import { createRequire } from 'module';
11
-
12
- ncp.limit = 16;
13
-
14
- let modulePath = require.resolve("./package.json"); //trick to resolve path to the installed module
15
-
16
- /**
17
- options.filter - a RegExp instance, against which each file name is tested to determine whether to copy it or not, or a function taking single parameter: copied file name, returning true or false, determining whether to copy file or not.
18
-
19
- options.transform - a function: function (read, write) { read.pipe(write) } used to apply streaming transforms while copying.
20
-
21
- options.clobber - boolean=true. if set to false, ncp will not overwrite destination files that already exist.
22
-
23
- options.dereference - boolean=false. If set to true, ncp will follow symbolic links. For example, a symlink in the source tree pointing to a regular file will become a regular file in the destination tree. Broken symlinks will result in errors.
24
-
25
- options.stopOnErr - boolean=false. If set to true, ncp will behave like cp -r, and stop on the first error it encounters. By default, ncp continues copying, logging all errors and returning an array.
26
-
27
- options.errs - stream. If options.stopOnErr is false, a stream can be provided, and errors will be written to this stream.
28
- */
29
-
30
-
31
- let cp = (source, destination, options) => {
32
- return new Promise((resolve, reject) => {
33
- ncp(source, destination, err => {
34
- if (err) {
35
- reject(new Error(err));
36
- }
37
-
38
- resolve();
39
- });
40
- });
41
- };
42
-
43
- let _makeSafe = input => {
44
- input = input.replace(/&/g, '&');
45
- input = input.replace(/</g, '&lt;');
46
- input = input.replace(/>/g, '&gt;');
47
- return input;
48
- };
49
12
  /**
50
- *
51
- * @param source path to the cucumber results json
52
- * @param dest folder path where html report gets written to
53
- * @param options
13
+ * Purpose: Normalize cucumber JSON into reporter store state.
14
+ * Responsibilities:
15
+ * - Normalize legacy cucumber JSON to a stable feature/scenario/step shape.
16
+ * - Build feature, scenario, and step maps for the UI store.
17
+ * Inputs/Outputs: Accepts parsed cucumber JSON; returns store-shaped state.
18
+ * Invariants: Input must be legacy JSON (features/elements/steps).
19
+ * See: /agents.md
54
20
  */
55
21
 
56
-
57
- const generate = async (source, dest, options) => {
58
- options ? true : options = {};
59
- const CUCUMBER_JSON_PATH = "_cucumber-results.json";
60
- const SETTINGS_JSON_PATH = "_reporter_settings.json";
61
- const HTML_PATH = path.join(path.dirname(modulePath), "react"); // "linkTags": [{
62
- // "pattern": "[a-zA-Z]*-(\\d)*$",
63
- // "link": "https://bydeluxe.atlassian.net/browse/"
64
- // }]
65
- //defaults
66
-
67
- const {
68
- title = "Cucumber Report",
69
- //report page title
70
- description = "Cucumber report",
71
- //description to be set at the page header
72
- metadata = {},
73
- linkTags = null
74
- } = options;
75
-
76
- let __dirname = path.resolve();
77
-
78
- if (path.isAbsolute(source) === false) {
79
- source = path.join(__dirname, source);
22
+ const LEGACY_FORMAT_HELP = ["Unsupported cucumber output format.", "This reporter expects legacy JSON (features/elements/steps).", "If you are using the message formatter, rerun with --format json:<file> or", 'use inputFormat: "auto" to detect message output.'].join(" ");
23
+ const INPUT_FORMAT_HELP = ['inputFormat must be "legacy-json" or "auto".', 'Use "legacy-json" for --format json:<file> output.', 'Use "auto" to detect and reject message formatter output explicitly.'].join(" ");
24
+ const ATTACHMENTS_ENCODING_HELP = ['attachmentsEncoding must be "auto", "base64", or "raw".', 'Use "raw" if your cucumber JSON stores text attachments unencoded.', 'Use "base64" if text attachments are base64-encoded.', 'Use "auto" to decode base64-looking text attachments.'].join(" ");
25
+ const normalizeMimeType = value => String(value ?? "").split(";")[0].trim().toLowerCase();
26
+ const shouldDecodeEmbedding = mimeType => {
27
+ if (!mimeType) {
28
+ return false;
80
29
  }
81
-
82
- fs.accessSync(source);
83
-
84
- if (!dest) {
85
- dest = path.dirname(source);
86
- } else {
87
- if (path.isAbsolute(dest) === false) {
88
- dest = path.resolve(dest);
30
+ if (mimeType.startsWith("text/")) {
31
+ return true;
32
+ }
33
+ return mimeType === "application/json" || mimeType === "application/xml";
34
+ };
35
+ const looksLikeBase64 = value => {
36
+ if (typeof value !== "string") {
37
+ return false;
38
+ }
39
+ const trimmed = value.trim();
40
+ if (!trimmed || trimmed.length % 4 !== 0) {
41
+ return false;
42
+ }
43
+ if (/[^A-Za-z0-9+/=]/.test(trimmed)) {
44
+ return false;
45
+ }
46
+ return true;
47
+ };
48
+ const isLikelyText = value => {
49
+ if (typeof value !== "string") {
50
+ return false;
51
+ }
52
+ if (value.includes("\uFFFD")) {
53
+ return false;
54
+ }
55
+ const sample = value.slice(0, 2000);
56
+ if (!sample.length) {
57
+ return true;
58
+ }
59
+ let printable = 0;
60
+ for (const char of sample) {
61
+ const code = char.charCodeAt(0);
62
+ if (code === 9 || code === 10 || code === 13) {
63
+ printable += 1;
64
+ continue;
65
+ }
66
+ if (code >= 32 && code !== 127) {
67
+ printable += 1;
89
68
  }
90
69
  }
91
-
92
- console.log(`__dirname: ${__dirname}\n` + `html path: ${HTML_PATH}\n` + `source: ${source}\n` + `destination: ${dest}\n` + `title: ${title}\n` + `description: ${description}\n` + `metadata: ${ut.inspect(metadata, false, null)}\n` + `linkTags: ${ut.inspect(linkTags, false, null)}\n`); //validate input json and make a copy
93
-
94
- let str = fs.readFileSync(source).toString();
95
- let obj = JSON.parse(str);
96
-
97
- let out = _prepDataForStore(obj);
98
-
99
- let modifiedJSON = JSON.stringify(out);
100
- let destExists = true;
101
-
102
- try {
103
- fs.accessSync(dest);
104
- } catch (err) {
105
- destExists = false;
70
+ return printable / sample.length > 0.85;
71
+ };
72
+ const decodeBase64Text = value => {
73
+ if (!looksLikeBase64(value)) {
74
+ return null;
106
75
  }
107
-
108
- if (!destExists) {
109
- fs.mkdirSync(dest, {
110
- recursive: true
111
- });
76
+ const decoded = Buffer.from(value, "base64").toString("utf8");
77
+ if (!isLikelyText(decoded)) {
78
+ return null;
112
79
  }
113
-
114
- fs.writeFileSync(path.join(dest, CUCUMBER_JSON_PATH), modifiedJSON);
115
- fs.writeFileSync(path.join(dest, SETTINGS_JSON_PATH), JSON.stringify(options));
116
- await cp(HTML_PATH, dest); //swap out some tokens in the html
117
-
118
- let indexPagePath = path.join(dest, "index.html");
119
- let htmlStr = fs.readFileSync(indexPagePath, "utf8").toString();
120
- let modified = htmlStr.replace(/-=title=-/g, _makeSafe(title));
121
- fs.writeFileSync(indexPagePath, modified, "utf8");
122
- console.log("done");
80
+ return decoded;
81
+ };
82
+ const normalizeEmbeddings = (embeddings, {
83
+ attachmentsEncoding
84
+ }) => {
85
+ if (!Array.isArray(embeddings)) {
86
+ return embeddings;
87
+ }
88
+ return embeddings.map(embedding => normalizeEmbedding(embedding, {
89
+ attachmentsEncoding
90
+ }));
91
+ };
92
+ const normalizeEmbedding = (embedding, {
93
+ attachmentsEncoding
94
+ }) => {
95
+ if (!embedding || typeof embedding !== "object") {
96
+ return embedding;
97
+ }
98
+ if (attachmentsEncoding === "raw") {
99
+ return embedding;
100
+ }
101
+ const mimeType = normalizeMimeType(embedding.mime_type ?? embedding.media?.type);
102
+ if (!shouldDecodeEmbedding(mimeType)) {
103
+ return embedding;
104
+ }
105
+ if (typeof embedding.data !== "string") {
106
+ return embedding;
107
+ }
108
+ // Legacy cucumber JSON embeds text payloads as base64; decode for readable output.
109
+ const decoded = decodeBase64Text(embedding.data);
110
+ if (!decoded) {
111
+ return embedding;
112
+ }
113
+ if (mimeType === "application/json") {
114
+ try {
115
+ JSON.parse(decoded);
116
+ } catch (err) {
117
+ return embedding;
118
+ }
119
+ } else if (["application/xml", "text/xml", "text/html"].includes(mimeType)) {
120
+ if (!decoded.includes("<")) {
121
+ return embedding;
122
+ }
123
+ }
124
+ return {
125
+ ...embedding,
126
+ data: decoded
127
+ };
128
+ };
129
+ const resolveAttachmentsEncoding = ({
130
+ attachmentsEncoding,
131
+ cucumberVersion
132
+ }) => {
133
+ if (!attachmentsEncoding) {
134
+ const parsed = parseCucumberMajor(cucumberVersion);
135
+ if (Number.isFinite(parsed)) {
136
+ return parsed < 7 ? "raw" : "base64";
137
+ }
138
+ return "auto";
139
+ }
140
+ if (!["auto", "base64", "raw"].includes(attachmentsEncoding)) {
141
+ throw new Error(ATTACHMENTS_ENCODING_HELP);
142
+ }
143
+ return attachmentsEncoding;
144
+ };
145
+ const parseCucumberMajor = cucumberVersion => {
146
+ if (!cucumberVersion) {
147
+ return null;
148
+ }
149
+ const value = String(cucumberVersion).trim();
150
+ if (!value) {
151
+ return null;
152
+ }
153
+ const match = value.match(/(\d+)(?:\.\d+)?/);
154
+ if (!match) {
155
+ return null;
156
+ }
157
+ const major = Number.parseInt(match[1], 10);
158
+ return Number.isFinite(major) ? major : null;
123
159
  };
124
160
 
125
- let _prepDataForStore = data => {
126
- let state = {};
127
- state.features = {};
128
- state.features.list = [];
129
- state.features.featuresMap = {};
130
- state.scenarios = {};
131
- state.scenarios.list = [];
132
- state.scenarios.scenariosMap = {};
133
- state.steps = {};
134
- state.steps.stepsMap = {};
135
- state.steps.totalDurationNanoSec = 0; //parse
136
-
161
+ /**
162
+ * Convert cucumber JSON into the reporter store shape.
163
+ * @param {unknown} input parsed cucumber JSON
164
+ * @returns {Object} normalized state for the UI store
165
+ * @throws {Error} when input is not legacy cucumber JSON
166
+ * @example
167
+ * const state = prepareStoreState(legacyJsonArray);
168
+ */
169
+ const prepareStoreState = (input, {
170
+ inputFormat = "legacy-json",
171
+ attachmentsEncoding,
172
+ cucumberVersion
173
+ } = {}) => {
174
+ if (!["legacy-json", "auto"].includes(inputFormat)) {
175
+ throw new Error(INPUT_FORMAT_HELP);
176
+ }
177
+ if (inputFormat === "auto" && looksLikeMessageStream(input)) {
178
+ throw new Error(LEGACY_FORMAT_HELP);
179
+ }
180
+ const resolvedEncoding = resolveAttachmentsEncoding({
181
+ attachmentsEncoding,
182
+ cucumberVersion
183
+ });
184
+ const features = resolveFeatures(input);
185
+ if (!features) {
186
+ throw new Error(LEGACY_FORMAT_HELP);
187
+ }
188
+ const state = createEmptyState();
137
189
  let featureIndex = 0;
138
- console.time("loadTotal");
139
-
140
- for (let f of data) {
141
- //FEATURE
142
- //cucumber id field is not guaranteed to be unique for feature/scenario/step
143
- f.id = `${featureIndex++}_${f.id}`;
144
-
145
- _processFeature(state, f); //SCENARIO
146
-
147
-
148
- let numScenarios = f.elements.length; //avoid multiple lookups;
149
-
150
- if (f.elements && numScenarios) {
151
- let sc_index = 0;
152
-
153
- for (let sc of f.elements) {
154
- //need to make scenario id unique as well
155
- sc_index++;
156
- let sc_id_arr = sc.id.split(";");
157
- sc_id_arr[0] = f.id;
158
-
159
- if (sc_id_arr.length) {
160
- sc_id_arr[1] = `${sc_index - 1}_${sc_id_arr[1]}`;
161
- }
162
-
163
- sc.id = sc_id_arr.join(";");
164
-
165
- _processScenario(state, f.id, sc); //STEPS
166
-
167
-
168
- for (let st of sc.steps) {
169
- _processStep(state, sc.id, st);
170
- }
171
- }
190
+ for (const rawFeature of features) {
191
+ if (!rawFeature) {
192
+ continue;
172
193
  }
194
+ const feature = normalizeFeature(rawFeature, featureIndex);
195
+ featureIndex += 1;
196
+ processFeature(state, feature);
197
+ processFeatureElements(state, feature, {
198
+ attachmentsEncoding: resolvedEncoding
199
+ });
173
200
  }
174
-
175
- console.timeEnd("loadTotal");
176
201
  return state;
177
202
  };
178
-
179
- let _processFeature = (state, f) => {
203
+ const createEmptyState = () => ({
204
+ features: {
205
+ list: [],
206
+ featuresMap: {}
207
+ },
208
+ scenarios: {
209
+ list: [],
210
+ scenariosMap: {}
211
+ },
212
+ steps: {
213
+ stepsMap: {},
214
+ totalDurationNanoSec: 0
215
+ }
216
+ });
217
+ const looksLikeMessageStream = input => {
218
+ if (!Array.isArray(input)) {
219
+ return false;
220
+ }
221
+ return input.some(item => {
222
+ if (!item || typeof item !== "object") {
223
+ return false;
224
+ }
225
+ return "gherkinDocument" in item || "pickle" in item || "testCaseStarted" in item || "testCaseFinished" in item || "envelope" in item;
226
+ });
227
+ };
228
+ const resolveFeatures = input => {
229
+ if (Array.isArray(input)) {
230
+ return input;
231
+ }
232
+ if (input && Array.isArray(input.features)) {
233
+ return input.features;
234
+ }
235
+ return null;
236
+ };
237
+ const normalizeFeature = (feature, index) => {
238
+ const baseId = feature?.id ?? feature?.name ?? "feature";
239
+ const elements = normalizeElements(feature);
240
+ return {
241
+ ...feature,
242
+ id: `${index}_${baseId}`,
243
+ elements,
244
+ tags: Array.isArray(feature?.tags) ? feature.tags : []
245
+ };
246
+ };
247
+ const normalizeElements = feature => {
248
+ if (!feature) {
249
+ return [];
250
+ }
251
+ if (Array.isArray(feature.elements)) {
252
+ return feature.elements;
253
+ }
254
+ if (Array.isArray(feature.scenarios)) {
255
+ return feature.scenarios;
256
+ }
257
+ if (Array.isArray(feature.children)) {
258
+ return flattenChildren(feature.children);
259
+ }
260
+ return [];
261
+ };
262
+ const flattenChildren = children => {
263
+ const flattened = [];
264
+ for (const child of children) {
265
+ if (!child) {
266
+ continue;
267
+ }
268
+ if (child.scenario) {
269
+ flattened.push(child.scenario);
270
+ continue;
271
+ }
272
+ if (child.background) {
273
+ flattened.push(child.background);
274
+ continue;
275
+ }
276
+ if (child.rule && Array.isArray(child.rule.children)) {
277
+ flattened.push(...flattenChildren(child.rule.children));
278
+ continue;
279
+ }
280
+ if (Array.isArray(child.children)) {
281
+ flattened.push(...flattenChildren(child.children));
282
+ continue;
283
+ }
284
+ flattened.push(child);
285
+ }
286
+ return flattened;
287
+ };
288
+ const normalizeScenario = (featureId, scenario, index) => {
289
+ const baseId = scenario?.id ?? scenario?.name ?? "scenario";
290
+ const scenarioId = buildScenarioId(featureId, baseId, index);
291
+ return {
292
+ ...scenario,
293
+ id: scenarioId,
294
+ tags: Array.isArray(scenario?.tags) ? scenario.tags : []
295
+ };
296
+ };
297
+ const buildScenarioId = (featureId, scenarioId, index) => {
298
+ const parts = String(scenarioId).split(";");
299
+ const suffix = parts.length > 1 ? parts[1] : parts[0];
300
+ return `${featureId};${index}_${suffix}`;
301
+ };
302
+ const processFeature = (state, feature) => {
180
303
  const {
181
304
  description,
182
305
  elements,
@@ -184,42 +307,34 @@ let _processFeature = (state, f) => {
184
307
  keyword,
185
308
  line,
186
309
  name,
187
- tags: [...tags],
310
+ tags,
188
311
  uri
189
- } = f;
190
- const allTags = [...tags]; //figure out if it has failed stuff
191
-
312
+ } = feature;
313
+ const allTags = Array.isArray(tags) ? [...tags] : [];
192
314
  let numFailedScenarios = 0;
193
315
  let numSkippedScenarios = 0;
194
-
195
- if (elements && elements.length) {
196
- for (let el of elements) {
197
- //collect scenario tags
198
- if (el.tags && el.tags.length) {
199
- let temp = allTags.map(t => t.name);
200
- el.tags.forEach(tag => {
201
- if (temp.includes(tag.name) === false) {
202
- allTags.push(tag);
203
- }
204
- });
316
+ const elementList = Array.isArray(elements) ? elements : [];
317
+ for (const element of elementList) {
318
+ const elementTags = Array.isArray(element?.tags) ? element.tags : [];
319
+ const seen = allTags.map(tag => tag.name);
320
+ for (const tag of elementTags) {
321
+ if (tag?.name && !seen.includes(tag.name)) {
322
+ allTags.push(tag);
205
323
  }
206
-
207
- if (el.steps && el.steps.length) {
208
- for (let step of el.steps) {
209
- if (step.result && step.result.status === "failed") {
210
- numFailedScenarios++;
211
- break;
212
- }
213
-
214
- if (step.result && step.result.status === "skipped") {
215
- numSkippedScenarios++;
216
- break;
217
- }
218
- }
324
+ }
325
+ const steps = Array.isArray(element?.steps) ? element.steps : [];
326
+ for (const step of steps) {
327
+ const status = step?.result?.status;
328
+ if (status === "failed") {
329
+ numFailedScenarios += 1;
330
+ break;
331
+ }
332
+ if (status === "skipped") {
333
+ numSkippedScenarios += 1;
334
+ break;
219
335
  }
220
336
  }
221
337
  }
222
-
223
338
  state.features.list.push(id);
224
339
  state.features.featuresMap[id] = {
225
340
  id,
@@ -228,20 +343,19 @@ let _processFeature = (state, f) => {
228
343
  keyword,
229
344
  name,
230
345
  line,
231
- tags,
346
+ tags: Array.isArray(tags) ? tags : [],
232
347
  allTags,
233
348
  numFailedScenarios,
234
349
  numSkippedScenarios
235
350
  };
236
351
  };
237
-
238
- let _processScenario = (state, featureId, scenario) => {
352
+ const processScenario = (state, featureId, scenario) => {
239
353
  const {
240
354
  id,
241
355
  keyword,
242
356
  line,
243
357
  name,
244
- tags: [...tags],
358
+ tags,
245
359
  type,
246
360
  uri
247
361
  } = scenario;
@@ -255,13 +369,44 @@ let _processScenario = (state, featureId, scenario) => {
255
369
  name,
256
370
  passedSteps: 0,
257
371
  skippedSteps: 0,
258
- tags,
372
+ tags: Array.isArray(tags) ? tags : [],
259
373
  type,
260
374
  uri
261
375
  };
262
376
  };
263
-
264
- let _processStep = (state, scenarioId, st) => {
377
+ const processFeatureElements = (state, feature, {
378
+ attachmentsEncoding
379
+ }) => {
380
+ const elements = feature.elements;
381
+ if (!elements.length) {
382
+ return;
383
+ }
384
+ let scenarioIndex = 0;
385
+ for (const rawScenario of elements) {
386
+ if (!rawScenario) {
387
+ continue;
388
+ }
389
+ const scenario = normalizeScenario(feature.id, rawScenario, scenarioIndex);
390
+ scenarioIndex += 1;
391
+ processScenario(state, feature.id, scenario);
392
+ processScenarioSteps(state, scenario, {
393
+ attachmentsEncoding
394
+ });
395
+ }
396
+ };
397
+ const processScenarioSteps = (state, scenario, {
398
+ attachmentsEncoding
399
+ }) => {
400
+ const steps = Array.isArray(scenario.steps) ? scenario.steps : [];
401
+ for (const step of steps) {
402
+ processStep(state, scenario.id, step, {
403
+ attachmentsEncoding
404
+ });
405
+ }
406
+ };
407
+ const processStep = (state, scenarioId, step, {
408
+ attachmentsEncoding
409
+ }) => {
265
410
  const {
266
411
  arguments: args,
267
412
  embeddings,
@@ -269,18 +414,22 @@ let _processStep = (state, scenarioId, st) => {
269
414
  keyword,
270
415
  line,
271
416
  name,
272
- result: {
273
- duration,
274
- error_message,
275
- status
276
- }
277
- } = st;
278
- let location = "";
279
- if (st.match) location = st.match.location;
280
- let step = {
281
- args,
417
+ result
418
+ } = step ?? {};
419
+ const {
282
420
  duration,
283
- embeddings,
421
+ error_message,
422
+ status
423
+ } = result ?? {};
424
+ const durationValue = typeof duration === "string" ? Number(duration) : duration;
425
+ const location = step?.match?.location ?? "";
426
+ const normalizedEmbeddings = normalizeEmbeddings(embeddings, {
427
+ attachmentsEncoding
428
+ });
429
+ const stepData = {
430
+ args,
431
+ duration: durationValue,
432
+ embeddings: normalizedEmbeddings,
284
433
  error_message,
285
434
  keyword,
286
435
  line,
@@ -288,28 +437,177 @@ let _processStep = (state, scenarioId, st) => {
288
437
  name,
289
438
  status
290
439
  };
291
- if (!state.steps.stepsMap[scenarioId]) state.steps.stepsMap[scenarioId] = {
292
- steps: []
293
- };
294
- state.steps.stepsMap[scenarioId].steps.push(step);
295
-
296
- if (isNaN(duration) === false) {
297
- state.steps.totalDurationNanoSec = state.steps.totalDurationNanoSec + duration;
440
+ if (!state.steps.stepsMap[scenarioId]) {
441
+ state.steps.stepsMap[scenarioId] = {
442
+ steps: []
443
+ };
298
444
  }
299
-
300
- if (!hidden || embeddings && embeddings.length) {
445
+ state.steps.stepsMap[scenarioId].steps.push(stepData);
446
+ if (Number.isFinite(durationValue)) {
447
+ state.steps.totalDurationNanoSec += durationValue;
448
+ }
449
+ if (!hidden || normalizedEmbeddings && normalizedEmbeddings.length) {
301
450
  if (status === "passed") {
302
- state.scenarios.scenariosMap[scenarioId].passedSteps++;
451
+ state.scenarios.scenariosMap[scenarioId].passedSteps += 1;
303
452
  } else if (status === "skipped") {
304
- state.scenarios.scenariosMap[scenarioId].skippedSteps++;
453
+ state.scenarios.scenariosMap[scenarioId].skippedSteps += 1;
305
454
  }
306
455
  }
307
-
308
456
  if (status === "failed") {
309
- state.scenarios.scenariosMap[scenarioId].failedSteps++;
457
+ state.scenarios.scenariosMap[scenarioId].failedSteps += 1;
310
458
  }
311
459
  };
312
460
 
461
+ /**
462
+ * Purpose: Generate HTML reports from cucumber JSON output.
463
+ * Responsibilities:
464
+ * - Normalize cucumber JSON into store state.
465
+ * - Copy report assets and write report metadata.
466
+ * Inputs/Outputs: Reads a cucumber JSON file and writes a report folder.
467
+ * Invariants: Expects legacy cucumber JSON (features/elements/steps).
468
+ * See: /agents.md
469
+ */
470
+ const require$1 = module$1.createRequire((typeof document === 'undefined' ? new (require('u' + 'rl').URL)('file:' + __filename).href : (document.currentScript && document.currentScript.src || new URL('cucumber-reactive-reporter.cjs.js', document.baseURI).href)));
471
+ ncp.limit = 16;
472
+ const modulePath = require$1.resolve("./package.json"); //trick to resolve path to the installed module
473
+
474
+ /**
475
+ options.filter - a RegExp instance, against which each file name is tested to determine whether to copy it or not, or a function taking single parameter: copied file name, returning true or false, determining whether to copy file or not.
476
+
477
+ options.transform - a function: function (read, write) { read.pipe(write) } used to apply streaming transforms while copying.
478
+
479
+ options.clobber - boolean=true. if set to false, ncp will not overwrite destination files that already exist.
480
+
481
+ options.dereference - boolean=false. If set to true, ncp will follow symbolic links. For example, a symlink in the source tree pointing to a regular file will become a regular file in the destination tree. Broken symlinks will result in errors.
482
+
483
+ options.stopOnErr - boolean=false. If set to true, ncp will behave like cp -r, and stop on the first error it encounters. By default, ncp continues copying, logging all errors and returning an array.
484
+
485
+ options.errs - stream. If options.stopOnErr is false, a stream can be provided, and errors will be written to this stream.
486
+ */
487
+
488
+ let cp = (source, destination, options) => {
489
+ return new Promise((resolve, reject) => {
490
+ ncp(source, destination, err => {
491
+ if (err) {
492
+ reject(new Error(err));
493
+ }
494
+ resolve();
495
+ });
496
+ });
497
+ };
498
+ const _makeSafe = input => {
499
+ input = input.replace(/&/g, '&amp;');
500
+ input = input.replace(/</g, '&lt;');
501
+ input = input.replace(/>/g, '&gt;');
502
+ return input;
503
+ };
504
+
505
+ /**
506
+ * Generate a report from cucumber JSON output.
507
+ * @param {string} source path to the cucumber results JSON file
508
+ * @param {string} dest folder path where the HTML report gets written
509
+ * @param {Object} options report configuration overrides
510
+ * @param {"legacy-json"|"auto"} [options.inputFormat] input JSON format selector
511
+ * @param {"auto"|"base64"|"raw"} [options.attachmentsEncoding] attachment encoding
512
+ * @param {string} [options.cucumberVersion] cucumber version (for encoding hints)
513
+ * @returns {Promise<void>} resolves when report assets are written
514
+ * @throws {Error} when input JSON is invalid or unsupported
515
+ * @example
516
+ * await generate("results/cucumber.json", "reports/out", { title: "Run #1" });
517
+ */
518
+ const generate = async (source, dest, options) => {
519
+ options ? true : options = {};
520
+ const CUCUMBER_JSON_PATH = "_cucumber-results.json";
521
+ const SETTINGS_JSON_PATH = "_reporter_settings.json";
522
+ const HTML_PATH = path.join(path.dirname(modulePath), "react");
523
+
524
+ // "linkTags": [{
525
+ // "pattern": "[a-zA-Z]*-(\\d)*$",
526
+ // "link": "https://bydeluxe.atlassian.net/browse/"
527
+
528
+ // }]
529
+ //defaults
530
+ const {
531
+ title = "Cucumber Report",
532
+ //report page title
533
+ description = "Cucumber report",
534
+ //description to be set at the page header
535
+ metadata = {},
536
+ linkTags = null,
537
+ inputFormat = "legacy-json",
538
+ attachmentsEncoding,
539
+ cucumberVersion
540
+ } = options;
541
+ let __dirname = path.resolve();
542
+ if (path.isAbsolute(source) === false) {
543
+ source = path.join(__dirname, source);
544
+ }
545
+ fs.accessSync(source);
546
+ if (!dest) {
547
+ dest = path.dirname(source);
548
+ } else {
549
+ if (path.isAbsolute(dest) === false) {
550
+ dest = path.resolve(dest);
551
+ }
552
+ }
553
+ console.log(`__dirname: ${__dirname}\n` + `html path: ${HTML_PATH}\n` + `source: ${source}\n` + `destination: ${dest}\n` + `title: ${title}\n` + `description: ${description}\n` + `metadata: ${ut.inspect(metadata, false, null)}\n` + `linkTags: ${ut.inspect(linkTags, false, null)}\n`);
554
+
555
+ //validate input json and make a copy
556
+ let str = fs.readFileSync(source, "utf8");
557
+ let obj = parseInputData(source, str);
558
+ let out = prepareStoreState(obj, {
559
+ inputFormat,
560
+ attachmentsEncoding,
561
+ cucumberVersion
562
+ });
563
+ let modifiedJSON = JSON.stringify(out);
564
+ let destExists = true;
565
+ try {
566
+ fs.accessSync(dest);
567
+ } catch (err) {
568
+ destExists = false;
569
+ }
570
+ if (!destExists) {
571
+ fs.mkdirSync(dest, {
572
+ recursive: true
573
+ });
574
+ }
575
+ fs.writeFileSync(path.join(dest, CUCUMBER_JSON_PATH), modifiedJSON);
576
+ fs.writeFileSync(path.join(dest, SETTINGS_JSON_PATH), JSON.stringify(options));
577
+ await cp(HTML_PATH, dest);
578
+ //swap out some tokens in the html
579
+ let indexPagePath = path.join(dest, "index.html");
580
+ let htmlStr = fs.readFileSync(indexPagePath, "utf8").toString();
581
+ let modified = htmlStr.replace(/-=title=-/g, _makeSafe(title));
582
+ fs.writeFileSync(indexPagePath, modified, "utf8");
583
+ console.log("done");
584
+ };
585
+ const parseInputData = (source, rawText) => {
586
+ try {
587
+ return JSON.parse(rawText);
588
+ } catch (err) {
589
+ const ndjson = parseNdjson(rawText);
590
+ if (ndjson) {
591
+ return ndjson;
592
+ }
593
+ throw new Error(`Invalid JSON in ${source}: ${err.message}`);
594
+ }
595
+ };
596
+ const parseNdjson = rawText => {
597
+ const lines = rawText.split(/\r?\n/).filter(line => line.trim().length);
598
+ if (!lines.length) {
599
+ return null;
600
+ }
601
+ const items = [];
602
+ for (const line of lines) {
603
+ try {
604
+ items.push(JSON.parse(line));
605
+ } catch (err) {
606
+ return null;
607
+ }
608
+ }
609
+ return items;
610
+ };
313
611
  var index = {
314
612
  generate: generate
315
613
  };