cucumber-reactive-reporter 1.0.11 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,178 +1,301 @@
1
+ import { createRequire } from 'module';
1
2
  import fs from 'fs';
3
+ import 'fs/promises';
2
4
  import ncp from 'ncp';
3
5
  import path from 'path';
4
6
  import ut from 'util';
5
7
 
6
- // import { createRequire } from 'module';
7
-
8
- ncp.limit = 16;
9
-
10
- let modulePath = require.resolve("./package.json"); //trick to resolve path to the installed module
11
-
12
- /**
13
- options.filter - a RegExp instance, against which each file name is tested to determine whether to copy it or not, or a function taking single parameter: copied file name, returning true or false, determining whether to copy file or not.
14
-
15
- options.transform - a function: function (read, write) { read.pipe(write) } used to apply streaming transforms while copying.
16
-
17
- options.clobber - boolean=true. if set to false, ncp will not overwrite destination files that already exist.
18
-
19
- options.dereference - boolean=false. If set to true, ncp will follow symbolic links. For example, a symlink in the source tree pointing to a regular file will become a regular file in the destination tree. Broken symlinks will result in errors.
20
-
21
- options.stopOnErr - boolean=false. If set to true, ncp will behave like cp -r, and stop on the first error it encounters. By default, ncp continues copying, logging all errors and returning an array.
22
-
23
- options.errs - stream. If options.stopOnErr is false, a stream can be provided, and errors will be written to this stream.
24
- */
25
-
26
-
27
- let cp = (source, destination, options) => {
28
- return new Promise((resolve, reject) => {
29
- ncp(source, destination, err => {
30
- if (err) {
31
- reject(new Error(err));
32
- }
33
-
34
- resolve();
35
- });
36
- });
37
- };
38
-
39
- let _makeSafe = input => {
40
- input = input.replace(/&/g, '&');
41
- input = input.replace(/</g, '&lt;');
42
- input = input.replace(/>/g, '&gt;');
43
- return input;
44
- };
45
8
  /**
46
- *
47
- * @param source path to the cucumber results json
48
- * @param dest folder path where html report gets written to
49
- * @param options
9
+ * Purpose: Normalize cucumber JSON into reporter store state.
10
+ * Responsibilities:
11
+ * - Normalize legacy cucumber JSON to a stable feature/scenario/step shape.
12
+ * - Build feature, scenario, and step maps for the UI store.
13
+ * Inputs/Outputs: Accepts parsed cucumber JSON; returns store-shaped state.
14
+ * Invariants: Input must be legacy JSON (features/elements/steps).
15
+ * See: /agents.md
50
16
  */
51
17
 
52
-
53
- const generate = async (source, dest, options) => {
54
- options ? true : options = {};
55
- const CUCUMBER_JSON_PATH = "_cucumber-results.json";
56
- const SETTINGS_JSON_PATH = "_reporter_settings.json";
57
- const HTML_PATH = path.join(path.dirname(modulePath), "react"); // "linkTags": [{
58
- // "pattern": "[a-zA-Z]*-(\\d)*$",
59
- // "link": "https://bydeluxe.atlassian.net/browse/"
60
- // }]
61
- //defaults
62
-
63
- const {
64
- title = "Cucumber Report",
65
- //report page title
66
- description = "Cucumber report",
67
- //description to be set at the page header
68
- metadata = {},
69
- linkTags = null
70
- } = options;
71
-
72
- let __dirname = path.resolve();
73
-
74
- if (path.isAbsolute(source) === false) {
75
- source = path.join(__dirname, source);
18
+ const LEGACY_FORMAT_HELP = ["Unsupported cucumber output format.", "This reporter expects legacy JSON (features/elements/steps).", "If you are using the message formatter, rerun with --format json:<file> or", 'use inputFormat: "auto" to detect message output.'].join(" ");
19
+ const INPUT_FORMAT_HELP = ['inputFormat must be "legacy-json" or "auto".', 'Use "legacy-json" for --format json:<file> output.', 'Use "auto" to detect and reject message formatter output explicitly.'].join(" ");
20
+ const ATTACHMENTS_ENCODING_HELP = ['attachmentsEncoding must be "auto", "base64", or "raw".', 'Use "raw" if your cucumber JSON stores text attachments unencoded.', 'Use "base64" if text attachments are base64-encoded.', 'Use "auto" to decode base64-looking text attachments.'].join(" ");
21
+ const normalizeMimeType = value => String(value ?? "").split(";")[0].trim().toLowerCase();
22
+ const shouldDecodeEmbedding = mimeType => {
23
+ if (!mimeType) {
24
+ return false;
76
25
  }
77
-
78
- fs.accessSync(source);
79
-
80
- if (!dest) {
81
- dest = path.dirname(source);
82
- } else {
83
- if (path.isAbsolute(dest) === false) {
84
- dest = path.resolve(dest);
26
+ if (mimeType.startsWith("text/")) {
27
+ return true;
28
+ }
29
+ return mimeType === "application/json" || mimeType === "application/xml";
30
+ };
31
+ const looksLikeBase64 = value => {
32
+ if (typeof value !== "string") {
33
+ return false;
34
+ }
35
+ const trimmed = value.trim();
36
+ if (!trimmed || trimmed.length % 4 !== 0) {
37
+ return false;
38
+ }
39
+ if (/[^A-Za-z0-9+/=]/.test(trimmed)) {
40
+ return false;
41
+ }
42
+ return true;
43
+ };
44
+ const isLikelyText = value => {
45
+ if (typeof value !== "string") {
46
+ return false;
47
+ }
48
+ if (value.includes("\uFFFD")) {
49
+ return false;
50
+ }
51
+ const sample = value.slice(0, 2000);
52
+ if (!sample.length) {
53
+ return true;
54
+ }
55
+ let printable = 0;
56
+ for (const char of sample) {
57
+ const code = char.charCodeAt(0);
58
+ if (code === 9 || code === 10 || code === 13) {
59
+ printable += 1;
60
+ continue;
61
+ }
62
+ if (code >= 32 && code !== 127) {
63
+ printable += 1;
85
64
  }
86
65
  }
87
-
88
- console.log(`__dirname: ${__dirname}\n` + `html path: ${HTML_PATH}\n` + `source: ${source}\n` + `destination: ${dest}\n` + `title: ${title}\n` + `description: ${description}\n` + `metadata: ${ut.inspect(metadata, false, null)}\n` + `linkTags: ${ut.inspect(linkTags, false, null)}\n`); //validate input json and make a copy
89
-
90
- let str = fs.readFileSync(source).toString();
91
- let obj = JSON.parse(str);
92
-
93
- let out = _prepDataForStore(obj);
94
-
95
- let modifiedJSON = JSON.stringify(out);
96
- let destExists = true;
97
-
98
- try {
99
- fs.accessSync(dest);
100
- } catch (err) {
101
- destExists = false;
66
+ return printable / sample.length > 0.85;
67
+ };
68
+ const decodeBase64Text = value => {
69
+ if (!looksLikeBase64(value)) {
70
+ return null;
102
71
  }
103
-
104
- if (!destExists) {
105
- fs.mkdirSync(dest, {
106
- recursive: true
107
- });
72
+ const decoded = Buffer.from(value, "base64").toString("utf8");
73
+ if (!isLikelyText(decoded)) {
74
+ return null;
108
75
  }
109
-
110
- fs.writeFileSync(path.join(dest, CUCUMBER_JSON_PATH), modifiedJSON);
111
- fs.writeFileSync(path.join(dest, SETTINGS_JSON_PATH), JSON.stringify(options));
112
- await cp(HTML_PATH, dest); //swap out some tokens in the html
113
-
114
- let indexPagePath = path.join(dest, "index.html");
115
- let htmlStr = fs.readFileSync(indexPagePath, "utf8").toString();
116
- let modified = htmlStr.replace(/-=title=-/g, _makeSafe(title));
117
- fs.writeFileSync(indexPagePath, modified, "utf8");
118
- console.log("done");
76
+ return decoded;
77
+ };
78
+ const normalizeEmbeddings = (embeddings, {
79
+ attachmentsEncoding
80
+ }) => {
81
+ if (!Array.isArray(embeddings)) {
82
+ return embeddings;
83
+ }
84
+ return embeddings.map(embedding => normalizeEmbedding(embedding, {
85
+ attachmentsEncoding
86
+ }));
87
+ };
88
+ const normalizeEmbedding = (embedding, {
89
+ attachmentsEncoding
90
+ }) => {
91
+ if (!embedding || typeof embedding !== "object") {
92
+ return embedding;
93
+ }
94
+ if (attachmentsEncoding === "raw") {
95
+ return embedding;
96
+ }
97
+ const mimeType = normalizeMimeType(embedding.mime_type ?? embedding.media?.type);
98
+ if (!shouldDecodeEmbedding(mimeType)) {
99
+ return embedding;
100
+ }
101
+ if (typeof embedding.data !== "string") {
102
+ return embedding;
103
+ }
104
+ // Legacy cucumber JSON embeds text payloads as base64; decode for readable output.
105
+ const decoded = decodeBase64Text(embedding.data);
106
+ if (!decoded) {
107
+ return embedding;
108
+ }
109
+ if (mimeType === "application/json") {
110
+ try {
111
+ JSON.parse(decoded);
112
+ } catch (err) {
113
+ return embedding;
114
+ }
115
+ } else if (["application/xml", "text/xml", "text/html"].includes(mimeType)) {
116
+ if (!decoded.includes("<")) {
117
+ return embedding;
118
+ }
119
+ }
120
+ return {
121
+ ...embedding,
122
+ data: decoded
123
+ };
124
+ };
125
+ const resolveAttachmentsEncoding = ({
126
+ attachmentsEncoding,
127
+ cucumberVersion
128
+ }) => {
129
+ if (!attachmentsEncoding) {
130
+ const parsed = parseCucumberMajor(cucumberVersion);
131
+ if (Number.isFinite(parsed)) {
132
+ return parsed < 7 ? "raw" : "base64";
133
+ }
134
+ return "auto";
135
+ }
136
+ if (!["auto", "base64", "raw"].includes(attachmentsEncoding)) {
137
+ throw new Error(ATTACHMENTS_ENCODING_HELP);
138
+ }
139
+ return attachmentsEncoding;
140
+ };
141
+ const parseCucumberMajor = cucumberVersion => {
142
+ if (!cucumberVersion) {
143
+ return null;
144
+ }
145
+ const value = String(cucumberVersion).trim();
146
+ if (!value) {
147
+ return null;
148
+ }
149
+ const match = value.match(/(\d+)(?:\.\d+)?/);
150
+ if (!match) {
151
+ return null;
152
+ }
153
+ const major = Number.parseInt(match[1], 10);
154
+ return Number.isFinite(major) ? major : null;
119
155
  };
120
156
 
121
- let _prepDataForStore = data => {
122
- let state = {};
123
- state.features = {};
124
- state.features.list = [];
125
- state.features.featuresMap = {};
126
- state.scenarios = {};
127
- state.scenarios.list = [];
128
- state.scenarios.scenariosMap = {};
129
- state.steps = {};
130
- state.steps.stepsMap = {};
131
- state.steps.totalDurationNanoSec = 0; //parse
132
-
157
+ /**
158
+ * Convert cucumber JSON into the reporter store shape.
159
+ * @param {unknown} input parsed cucumber JSON
160
+ * @returns {Object} normalized state for the UI store
161
+ * @throws {Error} when input is not legacy cucumber JSON
162
+ * @example
163
+ * const state = prepareStoreState(legacyJsonArray);
164
+ */
165
+ const prepareStoreState = (input, {
166
+ inputFormat = "legacy-json",
167
+ attachmentsEncoding,
168
+ cucumberVersion
169
+ } = {}) => {
170
+ if (!["legacy-json", "auto"].includes(inputFormat)) {
171
+ throw new Error(INPUT_FORMAT_HELP);
172
+ }
173
+ if (inputFormat === "auto" && looksLikeMessageStream(input)) {
174
+ throw new Error(LEGACY_FORMAT_HELP);
175
+ }
176
+ const resolvedEncoding = resolveAttachmentsEncoding({
177
+ attachmentsEncoding,
178
+ cucumberVersion
179
+ });
180
+ const features = resolveFeatures(input);
181
+ if (!features) {
182
+ throw new Error(LEGACY_FORMAT_HELP);
183
+ }
184
+ const state = createEmptyState();
133
185
  let featureIndex = 0;
134
- console.time("loadTotal");
135
-
136
- for (let f of data) {
137
- //FEATURE
138
- //cucumber id field is not guaranteed to be unique for feature/scenario/step
139
- f.id = `${featureIndex++}_${f.id}`;
140
-
141
- _processFeature(state, f); //SCENARIO
142
-
143
-
144
- let numScenarios = f.elements.length; //avoid multiple lookups;
145
-
146
- if (f.elements && numScenarios) {
147
- let sc_index = 0;
148
-
149
- for (let sc of f.elements) {
150
- //need to make scenario id unique as well
151
- sc_index++;
152
- let sc_id_arr = sc.id.split(";");
153
- sc_id_arr[0] = f.id;
154
-
155
- if (sc_id_arr.length) {
156
- sc_id_arr[1] = `${sc_index - 1}_${sc_id_arr[1]}`;
157
- }
158
-
159
- sc.id = sc_id_arr.join(";");
160
-
161
- _processScenario(state, f.id, sc); //STEPS
162
-
163
-
164
- for (let st of sc.steps) {
165
- _processStep(state, sc.id, st);
166
- }
167
- }
186
+ for (const rawFeature of features) {
187
+ if (!rawFeature) {
188
+ continue;
168
189
  }
190
+ const feature = normalizeFeature(rawFeature, featureIndex);
191
+ featureIndex += 1;
192
+ processFeature(state, feature);
193
+ processFeatureElements(state, feature, {
194
+ attachmentsEncoding: resolvedEncoding
195
+ });
169
196
  }
170
-
171
- console.timeEnd("loadTotal");
172
197
  return state;
173
198
  };
174
-
175
- let _processFeature = (state, f) => {
199
+ const createEmptyState = () => ({
200
+ features: {
201
+ list: [],
202
+ featuresMap: {}
203
+ },
204
+ scenarios: {
205
+ list: [],
206
+ scenariosMap: {}
207
+ },
208
+ steps: {
209
+ stepsMap: {},
210
+ totalDurationNanoSec: 0
211
+ }
212
+ });
213
+ const looksLikeMessageStream = input => {
214
+ if (!Array.isArray(input)) {
215
+ return false;
216
+ }
217
+ return input.some(item => {
218
+ if (!item || typeof item !== "object") {
219
+ return false;
220
+ }
221
+ return "gherkinDocument" in item || "pickle" in item || "testCaseStarted" in item || "testCaseFinished" in item || "envelope" in item;
222
+ });
223
+ };
224
+ const resolveFeatures = input => {
225
+ if (Array.isArray(input)) {
226
+ return input;
227
+ }
228
+ if (input && Array.isArray(input.features)) {
229
+ return input.features;
230
+ }
231
+ return null;
232
+ };
233
+ const normalizeFeature = (feature, index) => {
234
+ const baseId = feature?.id ?? feature?.name ?? "feature";
235
+ const elements = normalizeElements(feature);
236
+ return {
237
+ ...feature,
238
+ id: `${index}_${baseId}`,
239
+ elements,
240
+ tags: Array.isArray(feature?.tags) ? feature.tags : []
241
+ };
242
+ };
243
+ const normalizeElements = feature => {
244
+ if (!feature) {
245
+ return [];
246
+ }
247
+ if (Array.isArray(feature.elements)) {
248
+ return feature.elements;
249
+ }
250
+ if (Array.isArray(feature.scenarios)) {
251
+ return feature.scenarios;
252
+ }
253
+ if (Array.isArray(feature.children)) {
254
+ return flattenChildren(feature.children);
255
+ }
256
+ return [];
257
+ };
258
+ const flattenChildren = children => {
259
+ const flattened = [];
260
+ for (const child of children) {
261
+ if (!child) {
262
+ continue;
263
+ }
264
+ if (child.scenario) {
265
+ flattened.push(child.scenario);
266
+ continue;
267
+ }
268
+ if (child.background) {
269
+ flattened.push(child.background);
270
+ continue;
271
+ }
272
+ if (child.rule && Array.isArray(child.rule.children)) {
273
+ flattened.push(...flattenChildren(child.rule.children));
274
+ continue;
275
+ }
276
+ if (Array.isArray(child.children)) {
277
+ flattened.push(...flattenChildren(child.children));
278
+ continue;
279
+ }
280
+ flattened.push(child);
281
+ }
282
+ return flattened;
283
+ };
284
+ const normalizeScenario = (featureId, scenario, index) => {
285
+ const baseId = scenario?.id ?? scenario?.name ?? "scenario";
286
+ const scenarioId = buildScenarioId(featureId, baseId, index);
287
+ return {
288
+ ...scenario,
289
+ id: scenarioId,
290
+ tags: Array.isArray(scenario?.tags) ? scenario.tags : []
291
+ };
292
+ };
293
+ const buildScenarioId = (featureId, scenarioId, index) => {
294
+ const parts = String(scenarioId).split(";");
295
+ const suffix = parts.length > 1 ? parts[1] : parts[0];
296
+ return `${featureId};${index}_${suffix}`;
297
+ };
298
+ const processFeature = (state, feature) => {
176
299
  const {
177
300
  description,
178
301
  elements,
@@ -180,42 +303,34 @@ let _processFeature = (state, f) => {
180
303
  keyword,
181
304
  line,
182
305
  name,
183
- tags: [...tags],
306
+ tags,
184
307
  uri
185
- } = f;
186
- const allTags = [...tags]; //figure out if it has failed stuff
187
-
308
+ } = feature;
309
+ const allTags = Array.isArray(tags) ? [...tags] : [];
188
310
  let numFailedScenarios = 0;
189
311
  let numSkippedScenarios = 0;
190
-
191
- if (elements && elements.length) {
192
- for (let el of elements) {
193
- //collect scenario tags
194
- if (el.tags && el.tags.length) {
195
- let temp = allTags.map(t => t.name);
196
- el.tags.forEach(tag => {
197
- if (temp.includes(tag.name) === false) {
198
- allTags.push(tag);
199
- }
200
- });
312
+ const elementList = Array.isArray(elements) ? elements : [];
313
+ for (const element of elementList) {
314
+ const elementTags = Array.isArray(element?.tags) ? element.tags : [];
315
+ const seen = allTags.map(tag => tag.name);
316
+ for (const tag of elementTags) {
317
+ if (tag?.name && !seen.includes(tag.name)) {
318
+ allTags.push(tag);
201
319
  }
202
-
203
- if (el.steps && el.steps.length) {
204
- for (let step of el.steps) {
205
- if (step.result && step.result.status === "failed") {
206
- numFailedScenarios++;
207
- break;
208
- }
209
-
210
- if (step.result && step.result.status === "skipped") {
211
- numSkippedScenarios++;
212
- break;
213
- }
214
- }
320
+ }
321
+ const steps = Array.isArray(element?.steps) ? element.steps : [];
322
+ for (const step of steps) {
323
+ const status = step?.result?.status;
324
+ if (status === "failed") {
325
+ numFailedScenarios += 1;
326
+ break;
327
+ }
328
+ if (status === "skipped") {
329
+ numSkippedScenarios += 1;
330
+ break;
215
331
  }
216
332
  }
217
333
  }
218
-
219
334
  state.features.list.push(id);
220
335
  state.features.featuresMap[id] = {
221
336
  id,
@@ -224,20 +339,19 @@ let _processFeature = (state, f) => {
224
339
  keyword,
225
340
  name,
226
341
  line,
227
- tags,
342
+ tags: Array.isArray(tags) ? tags : [],
228
343
  allTags,
229
344
  numFailedScenarios,
230
345
  numSkippedScenarios
231
346
  };
232
347
  };
233
-
234
- let _processScenario = (state, featureId, scenario) => {
348
+ const processScenario = (state, featureId, scenario) => {
235
349
  const {
236
350
  id,
237
351
  keyword,
238
352
  line,
239
353
  name,
240
- tags: [...tags],
354
+ tags,
241
355
  type,
242
356
  uri
243
357
  } = scenario;
@@ -251,13 +365,44 @@ let _processScenario = (state, featureId, scenario) => {
251
365
  name,
252
366
  passedSteps: 0,
253
367
  skippedSteps: 0,
254
- tags,
368
+ tags: Array.isArray(tags) ? tags : [],
255
369
  type,
256
370
  uri
257
371
  };
258
372
  };
259
-
260
- let _processStep = (state, scenarioId, st) => {
373
+ const processFeatureElements = (state, feature, {
374
+ attachmentsEncoding
375
+ }) => {
376
+ const elements = feature.elements;
377
+ if (!elements.length) {
378
+ return;
379
+ }
380
+ let scenarioIndex = 0;
381
+ for (const rawScenario of elements) {
382
+ if (!rawScenario) {
383
+ continue;
384
+ }
385
+ const scenario = normalizeScenario(feature.id, rawScenario, scenarioIndex);
386
+ scenarioIndex += 1;
387
+ processScenario(state, feature.id, scenario);
388
+ processScenarioSteps(state, scenario, {
389
+ attachmentsEncoding
390
+ });
391
+ }
392
+ };
393
+ const processScenarioSteps = (state, scenario, {
394
+ attachmentsEncoding
395
+ }) => {
396
+ const steps = Array.isArray(scenario.steps) ? scenario.steps : [];
397
+ for (const step of steps) {
398
+ processStep(state, scenario.id, step, {
399
+ attachmentsEncoding
400
+ });
401
+ }
402
+ };
403
+ const processStep = (state, scenarioId, step, {
404
+ attachmentsEncoding
405
+ }) => {
261
406
  const {
262
407
  arguments: args,
263
408
  embeddings,
@@ -265,18 +410,22 @@ let _processStep = (state, scenarioId, st) => {
265
410
  keyword,
266
411
  line,
267
412
  name,
268
- result: {
269
- duration,
270
- error_message,
271
- status
272
- }
273
- } = st;
274
- let location = "";
275
- if (st.match) location = st.match.location;
276
- let step = {
277
- args,
413
+ result
414
+ } = step ?? {};
415
+ const {
278
416
  duration,
279
- embeddings,
417
+ error_message,
418
+ status
419
+ } = result ?? {};
420
+ const durationValue = typeof duration === "string" ? Number(duration) : duration;
421
+ const location = step?.match?.location ?? "";
422
+ const normalizedEmbeddings = normalizeEmbeddings(embeddings, {
423
+ attachmentsEncoding
424
+ });
425
+ const stepData = {
426
+ args,
427
+ duration: durationValue,
428
+ embeddings: normalizedEmbeddings,
280
429
  error_message,
281
430
  keyword,
282
431
  line,
@@ -284,28 +433,177 @@ let _processStep = (state, scenarioId, st) => {
284
433
  name,
285
434
  status
286
435
  };
287
- if (!state.steps.stepsMap[scenarioId]) state.steps.stepsMap[scenarioId] = {
288
- steps: []
289
- };
290
- state.steps.stepsMap[scenarioId].steps.push(step);
291
-
292
- if (isNaN(duration) === false) {
293
- state.steps.totalDurationNanoSec = state.steps.totalDurationNanoSec + duration;
436
+ if (!state.steps.stepsMap[scenarioId]) {
437
+ state.steps.stepsMap[scenarioId] = {
438
+ steps: []
439
+ };
294
440
  }
295
-
296
- if (!hidden || embeddings && embeddings.length) {
441
+ state.steps.stepsMap[scenarioId].steps.push(stepData);
442
+ if (Number.isFinite(durationValue)) {
443
+ state.steps.totalDurationNanoSec += durationValue;
444
+ }
445
+ if (!hidden || normalizedEmbeddings && normalizedEmbeddings.length) {
297
446
  if (status === "passed") {
298
- state.scenarios.scenariosMap[scenarioId].passedSteps++;
447
+ state.scenarios.scenariosMap[scenarioId].passedSteps += 1;
299
448
  } else if (status === "skipped") {
300
- state.scenarios.scenariosMap[scenarioId].skippedSteps++;
449
+ state.scenarios.scenariosMap[scenarioId].skippedSteps += 1;
301
450
  }
302
451
  }
303
-
304
452
  if (status === "failed") {
305
- state.scenarios.scenariosMap[scenarioId].failedSteps++;
453
+ state.scenarios.scenariosMap[scenarioId].failedSteps += 1;
306
454
  }
307
455
  };
308
456
 
457
+ /**
458
+ * Purpose: Generate HTML reports from cucumber JSON output.
459
+ * Responsibilities:
460
+ * - Normalize cucumber JSON into store state.
461
+ * - Copy report assets and write report metadata.
462
+ * Inputs/Outputs: Reads a cucumber JSON file and writes a report folder.
463
+ * Invariants: Expects legacy cucumber JSON (features/elements/steps).
464
+ * See: /agents.md
465
+ */
466
+ const require = createRequire(import.meta.url);
467
+ ncp.limit = 16;
468
+ const modulePath = require.resolve("./package.json"); //trick to resolve path to the installed module
469
+
470
+ /**
471
+ options.filter - a RegExp instance, against which each file name is tested to determine whether to copy it or not, or a function taking single parameter: copied file name, returning true or false, determining whether to copy file or not.
472
+
473
+ options.transform - a function: function (read, write) { read.pipe(write) } used to apply streaming transforms while copying.
474
+
475
+ options.clobber - boolean=true. if set to false, ncp will not overwrite destination files that already exist.
476
+
477
+ options.dereference - boolean=false. If set to true, ncp will follow symbolic links. For example, a symlink in the source tree pointing to a regular file will become a regular file in the destination tree. Broken symlinks will result in errors.
478
+
479
+ options.stopOnErr - boolean=false. If set to true, ncp will behave like cp -r, and stop on the first error it encounters. By default, ncp continues copying, logging all errors and returning an array.
480
+
481
+ options.errs - stream. If options.stopOnErr is false, a stream can be provided, and errors will be written to this stream.
482
+ */
483
+
484
+ let cp = (source, destination, options) => {
485
+ return new Promise((resolve, reject) => {
486
+ ncp(source, destination, err => {
487
+ if (err) {
488
+ reject(new Error(err));
489
+ }
490
+ resolve();
491
+ });
492
+ });
493
+ };
494
+ const _makeSafe = input => {
495
+ input = input.replace(/&/g, '&amp;');
496
+ input = input.replace(/</g, '&lt;');
497
+ input = input.replace(/>/g, '&gt;');
498
+ return input;
499
+ };
500
+
501
+ /**
502
+ * Generate a report from cucumber JSON output.
503
+ * @param {string} source path to the cucumber results JSON file
504
+ * @param {string} dest folder path where the HTML report gets written
505
+ * @param {Object} options report configuration overrides
506
+ * @param {"legacy-json"|"auto"} [options.inputFormat] input JSON format selector
507
+ * @param {"auto"|"base64"|"raw"} [options.attachmentsEncoding] attachment encoding
508
+ * @param {string} [options.cucumberVersion] cucumber version (for encoding hints)
509
+ * @returns {Promise<void>} resolves when report assets are written
510
+ * @throws {Error} when input JSON is invalid or unsupported
511
+ * @example
512
+ * await generate("results/cucumber.json", "reports/out", { title: "Run #1" });
513
+ */
514
+ const generate = async (source, dest, options) => {
515
+ options ? true : options = {};
516
+ const CUCUMBER_JSON_PATH = "_cucumber-results.json";
517
+ const SETTINGS_JSON_PATH = "_reporter_settings.json";
518
+ const HTML_PATH = path.join(path.dirname(modulePath), "react");
519
+
520
+ // "linkTags": [{
521
+ // "pattern": "[a-zA-Z]*-(\\d)*$",
522
+ // "link": "https://bydeluxe.atlassian.net/browse/"
523
+
524
+ // }]
525
+ //defaults
526
+ const {
527
+ title = "Cucumber Report",
528
+ //report page title
529
+ description = "Cucumber report",
530
+ //description to be set at the page header
531
+ metadata = {},
532
+ linkTags = null,
533
+ inputFormat = "legacy-json",
534
+ attachmentsEncoding,
535
+ cucumberVersion
536
+ } = options;
537
+ let __dirname = path.resolve();
538
+ if (path.isAbsolute(source) === false) {
539
+ source = path.join(__dirname, source);
540
+ }
541
+ fs.accessSync(source);
542
+ if (!dest) {
543
+ dest = path.dirname(source);
544
+ } else {
545
+ if (path.isAbsolute(dest) === false) {
546
+ dest = path.resolve(dest);
547
+ }
548
+ }
549
+ console.log(`__dirname: ${__dirname}\n` + `html path: ${HTML_PATH}\n` + `source: ${source}\n` + `destination: ${dest}\n` + `title: ${title}\n` + `description: ${description}\n` + `metadata: ${ut.inspect(metadata, false, null)}\n` + `linkTags: ${ut.inspect(linkTags, false, null)}\n`);
550
+
551
+ //validate input json and make a copy
552
+ let str = fs.readFileSync(source, "utf8");
553
+ let obj = parseInputData(source, str);
554
+ let out = prepareStoreState(obj, {
555
+ inputFormat,
556
+ attachmentsEncoding,
557
+ cucumberVersion
558
+ });
559
+ let modifiedJSON = JSON.stringify(out);
560
+ let destExists = true;
561
+ try {
562
+ fs.accessSync(dest);
563
+ } catch (err) {
564
+ destExists = false;
565
+ }
566
+ if (!destExists) {
567
+ fs.mkdirSync(dest, {
568
+ recursive: true
569
+ });
570
+ }
571
+ fs.writeFileSync(path.join(dest, CUCUMBER_JSON_PATH), modifiedJSON);
572
+ fs.writeFileSync(path.join(dest, SETTINGS_JSON_PATH), JSON.stringify(options));
573
+ await cp(HTML_PATH, dest);
574
+ //swap out some tokens in the html
575
+ let indexPagePath = path.join(dest, "index.html");
576
+ let htmlStr = fs.readFileSync(indexPagePath, "utf8").toString();
577
+ let modified = htmlStr.replace(/-=title=-/g, _makeSafe(title));
578
+ fs.writeFileSync(indexPagePath, modified, "utf8");
579
+ console.log("done");
580
+ };
581
+ const parseInputData = (source, rawText) => {
582
+ try {
583
+ return JSON.parse(rawText);
584
+ } catch (err) {
585
+ const ndjson = parseNdjson(rawText);
586
+ if (ndjson) {
587
+ return ndjson;
588
+ }
589
+ throw new Error(`Invalid JSON in ${source}: ${err.message}`);
590
+ }
591
+ };
592
+ const parseNdjson = rawText => {
593
+ const lines = rawText.split(/\r?\n/).filter(line => line.trim().length);
594
+ if (!lines.length) {
595
+ return null;
596
+ }
597
+ const items = [];
598
+ for (const line of lines) {
599
+ try {
600
+ items.push(JSON.parse(line));
601
+ } catch (err) {
602
+ return null;
603
+ }
604
+ }
605
+ return items;
606
+ };
309
607
  var index = {
310
608
  generate: generate
311
609
  };