@dev-blinq/cucumber-js 1.0.38 → 1.0.39-stage
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/download-install.js +51 -10
- package/lib/api/gherkin.js +47 -1
- package/lib/api/gherkin.js.map +1 -1
- package/lib/configuration/axios_client.d.ts +1 -0
- package/lib/configuration/axios_client.js +40 -0
- package/lib/configuration/axios_client.js.map +1 -0
- package/lib/formatter/api.d.ts +2 -0
- package/lib/formatter/api.js +53 -0
- package/lib/formatter/api.js.map +1 -0
- package/lib/formatter/bvt_analysis_formatter.d.ts +2 -1
- package/lib/formatter/bvt_analysis_formatter.js +87 -27
- package/lib/formatter/bvt_analysis_formatter.js.map +1 -1
- package/lib/formatter/feature_data_format.d.ts +12 -3
- package/lib/formatter/feature_data_format.js +106 -8
- package/lib/formatter/feature_data_format.js.map +1 -1
- package/lib/formatter/helpers/report_generator.d.ts +29 -1
- package/lib/formatter/helpers/report_generator.js +139 -4
- package/lib/formatter/helpers/report_generator.js.map +1 -1
- package/lib/formatter/helpers/upload_serivce.d.ts +5 -0
- package/lib/formatter/helpers/upload_serivce.js +141 -13
- package/lib/formatter/helpers/upload_serivce.js.map +1 -1
- package/lib/formatter/helpers/uploader.js +94 -11
- package/lib/formatter/helpers/uploader.js.map +1 -1
- package/lib/version.d.ts +1 -1
- package/lib/version.js +1 -1
- package/lib/version.js.map +1 -1
- package/package.json +4 -1
package/bin/download-install.js
CHANGED
|
@@ -1,9 +1,14 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-var-requires */
|
|
2
|
+
/* eslint-disable no-console */
|
|
3
|
+
/* eslint-disable no-undef */
|
|
1
4
|
const { argv } = require('node:process')
|
|
2
5
|
const fs = require('fs')
|
|
3
6
|
const path = require('path')
|
|
4
7
|
const JSZip = require('jszip')
|
|
5
8
|
const { mkdirSync, writeFileSync } = require('node:fs')
|
|
6
9
|
const axios = require('axios').default
|
|
10
|
+
const tunnel = require('tunnel')
|
|
11
|
+
|
|
7
12
|
let token = null
|
|
8
13
|
let extractPath = null
|
|
9
14
|
const getSSoUrl = () => {
|
|
@@ -12,17 +17,40 @@ const getSSoUrl = () => {
|
|
|
12
17
|
return 'http://localhost:5000/api/auth'
|
|
13
18
|
case 'dev':
|
|
14
19
|
return 'https://dev.api.blinq.io/api/auth'
|
|
20
|
+
case 'stage':
|
|
21
|
+
return 'https://stage.api.blinq.io/api/auth'
|
|
15
22
|
default:
|
|
16
23
|
return 'https://api.blinq.io/api/auth'
|
|
17
24
|
}
|
|
18
25
|
}
|
|
19
26
|
|
|
27
|
+
const getProxy = () => {
|
|
28
|
+
if (!process.env.PROXY) {
|
|
29
|
+
return null
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const proxy = process.env.PROXY
|
|
33
|
+
const url = new URL(proxy)
|
|
34
|
+
const proxyObject = {
|
|
35
|
+
host: url.hostname,
|
|
36
|
+
port: Number(url.port),
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const { username, password } = url
|
|
40
|
+
|
|
41
|
+
if (username && password) {
|
|
42
|
+
proxyObject.proxyAuth = `${username}:${password}`
|
|
43
|
+
}
|
|
44
|
+
return tunnel.httpsOverHttp({ proxy: proxyObject })
|
|
45
|
+
}
|
|
20
46
|
const getWorkSpaceUrl = () => {
|
|
21
47
|
switch (process.env.NODE_ENV_BLINQ) {
|
|
22
48
|
case 'local':
|
|
23
49
|
return 'http://localhost:6000/api/workspace'
|
|
24
50
|
case 'dev':
|
|
25
51
|
return 'https://dev.api.blinq.io/api/workspace'
|
|
52
|
+
case "stage":
|
|
53
|
+
return 'https://stage.api.blinq.io/api/workspace'
|
|
26
54
|
default:
|
|
27
55
|
return 'https://api.blinq.io/api/workspace'
|
|
28
56
|
}
|
|
@@ -77,26 +105,34 @@ const dirExists = (path) => {
|
|
|
77
105
|
}
|
|
78
106
|
const ssoUrl = getSSoUrl()
|
|
79
107
|
|
|
108
|
+
const getProjectByAccessKey = async (access_key) => {
|
|
109
|
+
const accessKeyUrl = `${ssoUrl}/getProjectByAccessKey`
|
|
110
|
+
const response = await axios.post(accessKeyUrl, {
|
|
111
|
+
access_key,
|
|
112
|
+
httpAgent: getProxy(),
|
|
113
|
+
proxy: false,
|
|
114
|
+
})
|
|
115
|
+
if (response.status !== 200) {
|
|
116
|
+
console.error('Error: Invalid access key')
|
|
117
|
+
process.exit(1)
|
|
118
|
+
}
|
|
119
|
+
return response.data
|
|
120
|
+
};
|
|
121
|
+
|
|
80
122
|
const downloadAndInstall = async (extractPath, token) => {
|
|
81
123
|
if (!dirExists(extractPath)) {
|
|
82
124
|
fs.mkdirSync(extractPath, { recursive: true })
|
|
83
125
|
}
|
|
84
126
|
try {
|
|
85
|
-
const
|
|
86
|
-
const response = await axios.post(accessKeyUrl, {
|
|
87
|
-
access_key: token,
|
|
88
|
-
})
|
|
89
|
-
if(response.status !== 200){
|
|
90
|
-
console.error('Error: Invalid access key')
|
|
91
|
-
process.exit(1)
|
|
92
|
-
}
|
|
127
|
+
const data = await getProjectByAccessKey(token)
|
|
93
128
|
|
|
94
129
|
const workspaceUrl = getWorkSpaceUrl() + '/pull-workspace'
|
|
95
|
-
|
|
96
130
|
const res = await axios.get(workspaceUrl, {
|
|
97
131
|
params: {
|
|
98
|
-
projectId:
|
|
132
|
+
projectId: data.project._id,
|
|
99
133
|
},
|
|
134
|
+
httpAgent: getProxy(),
|
|
135
|
+
proxy: false,
|
|
100
136
|
responseType: 'arraybuffer',
|
|
101
137
|
headers: {
|
|
102
138
|
Authorization: `Bearer ${token}`,
|
|
@@ -104,6 +140,11 @@ const downloadAndInstall = async (extractPath, token) => {
|
|
|
104
140
|
},
|
|
105
141
|
})
|
|
106
142
|
|
|
143
|
+
if (res.status !== 200) {
|
|
144
|
+
console.error('Error: Unable to fetch workspace')
|
|
145
|
+
process.exit(1)
|
|
146
|
+
}
|
|
147
|
+
|
|
107
148
|
const zip = await JSZip.loadAsync(res.data)
|
|
108
149
|
for (const filename of Object.keys(zip.files)) {
|
|
109
150
|
const fileData = zip.files[filename]
|
package/lib/api/gherkin.js
CHANGED
|
@@ -9,17 +9,33 @@ const gherkin_utils_1 = require("@cucumber/gherkin-utils");
|
|
|
9
9
|
const pickle_filter_1 = __importDefault(require("../pickle_filter"));
|
|
10
10
|
const helpers_1 = require("../cli/helpers");
|
|
11
11
|
const feature_data_format_1 = require("../formatter/feature_data_format");
|
|
12
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
12
13
|
async function getFilteredPicklesAndErrors({ newId, cwd, logger, unexpandedFeaturePaths, featurePaths, coordinates, onEnvelope, }) {
|
|
13
14
|
const gherkinQuery = new gherkin_utils_1.Query();
|
|
14
15
|
const parseErrors = [];
|
|
15
16
|
let variables, fakeData, pickleIndex = 0;
|
|
17
|
+
let dataFunction = null;
|
|
18
|
+
let functionVars = null;
|
|
19
|
+
let mjsDataFiles = null;
|
|
20
|
+
let projectDir = process.cwd();
|
|
21
|
+
if (featurePaths.length > 0) {
|
|
22
|
+
projectDir = node_path_1.default.join(node_path_1.default.dirname(featurePaths[0]), '..', '..');
|
|
23
|
+
}
|
|
16
24
|
await gherkinFromPaths(featurePaths, {
|
|
17
25
|
newId,
|
|
18
26
|
relativeTo: cwd,
|
|
19
27
|
defaultDialect: coordinates.defaultDialect,
|
|
20
28
|
}, (envelope) => {
|
|
21
29
|
if (envelope.source) {
|
|
22
|
-
|
|
30
|
+
let newDataAfterExamplesModify = envelope.source.data;
|
|
31
|
+
const functionMatch = envelope.source.data.match(/@data:function:(.*?)\.(.*)/);
|
|
32
|
+
if (functionMatch) {
|
|
33
|
+
dataFunction = functionMatch[2];
|
|
34
|
+
const { newData, mjsData } = (0, feature_data_format_1.generateExamplesFromFunction)(envelope.source.data, featurePaths[0], dataFunction, functionMatch[1]);
|
|
35
|
+
newDataAfterExamplesModify = newData;
|
|
36
|
+
mjsDataFiles = mjsData;
|
|
37
|
+
}
|
|
38
|
+
const data = (0, feature_data_format_1.generateTestData)(newDataAfterExamplesModify, undefined, undefined, projectDir);
|
|
23
39
|
envelope.source.data = data.newContent;
|
|
24
40
|
variables = data.variables;
|
|
25
41
|
fakeData = data.otherFakeData;
|
|
@@ -28,6 +44,31 @@ async function getFilteredPicklesAndErrors({ newId, cwd, logger, unexpandedFeatu
|
|
|
28
44
|
envelope.gherkinDocument.feature.children =
|
|
29
45
|
envelope.gherkinDocument.feature.children.map((scenario) => {
|
|
30
46
|
if (scenario.scenario) {
|
|
47
|
+
if (dataFunction) {
|
|
48
|
+
const { tableHeader, tableBody } = scenario.scenario.examples[0];
|
|
49
|
+
functionVars = {
|
|
50
|
+
previous: tableHeader.cells.map((cell, index) => ({
|
|
51
|
+
header: cell.value,
|
|
52
|
+
value: tableBody[0].cells[index].value,
|
|
53
|
+
})),
|
|
54
|
+
new: [],
|
|
55
|
+
};
|
|
56
|
+
const generateResult = (0, feature_data_format_1.generateExamplesFromFunctionGherkin)(tableHeader.cells, tableBody[0].cells, mjsDataFiles);
|
|
57
|
+
functionVars.new = generateResult;
|
|
58
|
+
generateResult.map(({ value }, index) => (scenario.scenario.examples[0].tableBody[0].cells[index].value = value));
|
|
59
|
+
}
|
|
60
|
+
let fakeDataIdx = 0;
|
|
61
|
+
scenario.scenario.examples.forEach((example) => {
|
|
62
|
+
example.tableBody.forEach((row) => {
|
|
63
|
+
row.cells.forEach((cell, index) => {
|
|
64
|
+
if (fakeDataIdx < fakeData.length &&
|
|
65
|
+
fakeData[fakeDataIdx].var === cell.value) {
|
|
66
|
+
cell.value = fakeData[fakeDataIdx].fake;
|
|
67
|
+
fakeDataIdx++;
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
});
|
|
71
|
+
});
|
|
31
72
|
scenario.scenario.steps = scenario.scenario.steps.map((step) => {
|
|
32
73
|
step.text = (0, feature_data_format_1.generateTestData)(step.text, variables, fakeData).newContent;
|
|
33
74
|
return step;
|
|
@@ -38,6 +79,11 @@ async function getFilteredPicklesAndErrors({ newId, cwd, logger, unexpandedFeatu
|
|
|
38
79
|
}
|
|
39
80
|
if (envelope.pickle) {
|
|
40
81
|
envelope.pickle.steps = envelope.pickle.steps.map((step) => {
|
|
82
|
+
if (functionVars) {
|
|
83
|
+
functionVars.new.forEach(({ value }, index) => {
|
|
84
|
+
step.text = step.text.replace(functionVars.previous[index].value, value);
|
|
85
|
+
});
|
|
86
|
+
}
|
|
41
87
|
const generateData = (0, feature_data_format_1.generateTestData)(step.text, variables, fakeData);
|
|
42
88
|
step.text = generateData.newContent;
|
|
43
89
|
pickleIndex =
|
package/lib/api/gherkin.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"gherkin.js","sourceRoot":"","sources":["../../src/api/gherkin.ts"],"names":[],"mappings":";;;;;;AAAA,+DAGkC;AASlC,2DAA+D;AAC/D,qEAA2C;AAC3C,4CAA6C;AAG7C,0EAAmE;AAQ5D,KAAK,UAAU,2BAA2B,CAAC,EAChD,KAAK,EACL,GAAG,EACH,MAAM,EACN,sBAAsB,EACtB,YAAY,EACZ,WAAW,EACX,UAAU,GASX;IAIC,MAAM,YAAY,GAAG,IAAI,qBAAY,EAAE,CAAA;IACvC,MAAM,WAAW,GAAiB,EAAE,CAAA;IACpC,IAAI,SAAc,EAChB,QAGG,EACH,WAAW,GAAG,CAAC,CAAA;IAEjB,MAAM,gBAAgB,CACpB,YAAY,EACZ;QACE,KAAK;QACL,UAAU,EAAE,GAAG;QACf,cAAc,EAAE,WAAW,CAAC,cAAc;KAC3C,EACD,CAAC,QAAQ,EAAE,EAAE;QACX,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,MAAM,IAAI,GAAG,IAAA,sCAAgB,EAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;YACnD,QAAQ,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,UAAU,CAAA;YACtC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAA;YAC1B,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAA;SAC9B;QAED,IAAI,QAAQ,CAAC,eAAe,IAAI,QAAQ,CAAC,eAAe,CAAC,OAAO,EAAE;YAChE,QAAQ,CAAC,eAAe,CAAC,OAAO,CAAC,QAAQ;gBACvC,QAAQ,CAAC,eAAe,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE;oBACzD,IAAI,QAAQ,CAAC,QAAQ,EAAE;wBACrB,QAAQ,CAAC,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;4BAC7D,IAAI,CAAC,IAAI,GAAG,IAAA,sCAAgB,EAC1B,IAAI,CAAC,IAAI,EACT,SAAS,EACT,QAAQ,CACT,CAAC,UAAU,CAAA;4BACZ,OAAO,IAAI,CAAA;wBACb,CAAC,CAAC,CAAA;qBACH;oBACD,OAAO,QAAQ,CAAA;gBACjB,CAAC,CAAC,CAAA;SACL;QAED,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,QAAQ,CAAC,MAAM,CAAC,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;gBACzD,MAAM,YAAY,GAAG,IAAA,sCAAgB,EAAC,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,QAAQ,CAAC,CAAA;gBACrE,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC,UAAU,CAAA;gBACnC,WAAW;oBACT,YAAY,CAAC,SAAS,GAAG,WAAW;wBAClC,CAAC,CAAC,YAAY,CAAC,SAAS;wBACxB,CAAC,CAAC,WAAW,CAAA;gBACjB,OAAO,IAAI,CAAA;YACb,CAAC,CAAC,CAAA;YAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,EAAE,CAAC,EAAE,EAAE;gBACpC,QAAQ,CAAC,KAAK,EAAE,CAAA;aACjB;YACD,WAAW,GAAG,CAAC,CAAA;SAChB;QAED,YAAY,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;QAC7B,IAAI,QAAQ,CAAC,UAAU,EAAE;YACvB,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;SACtC;QACD,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAG,QAAQ,CAAC,CAAA;IACxB,CAAC,CACF,CAAA;IACD,MAAM,YAAY,GAAG,IAAI,uBAAY,CAAC;QACpC,GAAG;QACH,YAAY,EAAE,sBAAsB;QACpC,KAAK,EAAE,WAAW,CAAC,KAAK;QACxB,aAAa,EAAE,WAAW,CAAC,aAAa;KACzC,CAAC,CAAA;IACF,MAAM,eAAe,GAAyB,YAAY;SACvD,UAAU,EAAE;SACZ,MAAM,CAAC,CAAC,MAAM,EAAE,EAAE;QACjB,MAAM,eAAe,GAAG,YAAY;aACjC,mBAAmB,EAAE;aACrB,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,MAAM,CAAC,GAAG,CAAC,CAAA;QACxC,OAAO,YAAY,CAAC,OAAO,CAAC,EAAE,eAAe,EAAE,MAAM,EAAE,CAAC,CAAA;IAC1D,CAAC,CAAC;SACD,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE;QACd,MAAM,eAAe,GAAG,YAAY;aACjC,mBAAmB,EAAE;aACrB,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,MAAM,CAAC,GAAG,CAAC,CAAA;QACxC,MAAM,QAAQ,GAAG,YAAY,CAAC,WAAW,CACvC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAChD,CAAA;QACD,OAAO;YACL,eAAe;YACf,QAAQ;YACR,MAAM;SACP,CAAA;IACH,CAAC,CAAC,CAAA;IACJ,IAAA,sBAAY,EAAC,eAAe,EAAE,WAAW,CAAC,KAAK,EAAE,MAAM,CAAC,CAAA;IACxD,OAAO;QACL,eAAe;QACf,WAAW;KACZ,CAAA;AACH,CAAC;AArHD,kEAqHC;AAED,KAAK,UAAU,gBAAgB,CAC7B,KAAe,EACf,OAA8B,EAC9B,UAAwC;IAExC,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,oBAAoB,GAAG,gCAAc,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QACrE,oBAAoB,CAAC,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;QAC3C,oBAAoB,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QACvC,oBAAoB,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;IAC1C,CAAC,CAAC,CAAA;AACJ,CAAC","sourcesContent":["import {\n GherkinStreams,\n IGherkinStreamOptions,\n} from '@cucumber/gherkin-streams'\nimport {\n Envelope,\n GherkinDocument,\n IdGenerator,\n Location,\n ParseError,\n Pickle,\n} from '@cucumber/messages'\nimport { Query as GherkinQuery } from '@cucumber/gherkin-utils'\nimport PickleFilter from '../pickle_filter'\nimport { orderPickles } from '../cli/helpers'\nimport { ISourcesCoordinates } from './types'\nimport { ILogger } from '../logger'\nimport { generateTestData } from '../formatter/feature_data_format'\n\ninterface PickleWithDocument {\n gherkinDocument: GherkinDocument\n location: Location\n pickle: Pickle\n}\n\nexport async function getFilteredPicklesAndErrors({\n newId,\n cwd,\n logger,\n unexpandedFeaturePaths,\n featurePaths,\n coordinates,\n onEnvelope,\n}: {\n newId: IdGenerator.NewId\n cwd: string\n logger: ILogger\n unexpandedFeaturePaths: string[]\n featurePaths: string[]\n coordinates: ISourcesCoordinates\n onEnvelope?: (envelope: Envelope) => void\n}): Promise<{\n filteredPickles: PickleWithDocument[]\n parseErrors: ParseError[]\n}> {\n const gherkinQuery = new GherkinQuery()\n const parseErrors: ParseError[] = []\n let variables: any,\n fakeData: {\n var: string\n fake: string\n }[],\n pickleIndex = 0\n\n await gherkinFromPaths(\n featurePaths,\n {\n newId,\n relativeTo: cwd,\n defaultDialect: coordinates.defaultDialect,\n },\n (envelope) => {\n if (envelope.source) {\n const data = generateTestData(envelope.source.data)\n envelope.source.data = data.newContent\n variables = data.variables\n fakeData = data.otherFakeData\n }\n\n if (envelope.gherkinDocument && envelope.gherkinDocument.feature) {\n envelope.gherkinDocument.feature.children =\n envelope.gherkinDocument.feature.children.map((scenario) => {\n if (scenario.scenario) {\n scenario.scenario.steps = scenario.scenario.steps.map((step) => {\n step.text = generateTestData(\n step.text,\n variables,\n fakeData\n ).newContent\n return step\n })\n }\n return scenario\n })\n }\n\n if (envelope.pickle) {\n envelope.pickle.steps = envelope.pickle.steps.map((step) => {\n const generateData = generateTestData(step.text, variables, fakeData)\n step.text = generateData.newContent\n pickleIndex =\n generateData.fakeIndex > pickleIndex\n ? generateData.fakeIndex\n : pickleIndex\n return step\n })\n\n for (let i = 0; i < pickleIndex; i++) {\n fakeData.shift()\n }\n pickleIndex = 0\n }\n\n gherkinQuery.update(envelope)\n if (envelope.parseError) {\n parseErrors.push(envelope.parseError)\n }\n onEnvelope?.(envelope)\n }\n )\n const pickleFilter = new PickleFilter({\n cwd,\n featurePaths: unexpandedFeaturePaths,\n names: coordinates.names,\n tagExpression: coordinates.tagExpression,\n })\n const filteredPickles: PickleWithDocument[] = gherkinQuery\n .getPickles()\n .filter((pickle) => {\n const gherkinDocument = gherkinQuery\n .getGherkinDocuments()\n .find((doc) => doc.uri === pickle.uri)\n return pickleFilter.matches({ gherkinDocument, pickle })\n })\n .map((pickle) => {\n const gherkinDocument = gherkinQuery\n .getGherkinDocuments()\n .find((doc) => doc.uri === pickle.uri)\n const location = gherkinQuery.getLocation(\n pickle.astNodeIds[pickle.astNodeIds.length - 1]\n )\n return {\n gherkinDocument,\n location,\n pickle,\n }\n })\n orderPickles(filteredPickles, coordinates.order, logger)\n return {\n filteredPickles,\n parseErrors,\n }\n}\n\nasync function gherkinFromPaths(\n paths: string[],\n options: IGherkinStreamOptions,\n onEnvelope: (envelope: Envelope) => void\n): Promise<void> {\n return new Promise((resolve, reject) => {\n const gherkinMessageStream = GherkinStreams.fromPaths(paths, options)\n gherkinMessageStream.on('data', onEnvelope)\n gherkinMessageStream.on('end', resolve)\n gherkinMessageStream.on('error', reject)\n })\n}\n"]}
|
|
1
|
+
{"version":3,"file":"gherkin.js","sourceRoot":"","sources":["../../src/api/gherkin.ts"],"names":[],"mappings":";;;;;;AAAA,+DAGkC;AASlC,2DAA+D;AAC/D,qEAA2C;AAC3C,4CAA6C;AAG7C,0EAIyC;AACzC,0DAA4B;AAkBrB,KAAK,UAAU,2BAA2B,CAAC,EAChD,KAAK,EACL,GAAG,EACH,MAAM,EACN,sBAAsB,EACtB,YAAY,EACZ,WAAW,EACX,UAAU,GASX;IAIC,MAAM,YAAY,GAAG,IAAI,qBAAY,EAAE,CAAA;IACvC,MAAM,WAAW,GAAiB,EAAE,CAAA;IACpC,IAAI,SAAc,EAChB,QAGG,EACH,WAAW,GAAG,CAAC,CAAA;IACjB,IAAI,YAAY,GAAkB,IAAI,CAAA;IACtC,IAAI,YAAY,GAAwB,IAAI,CAAA;IAC5C,IAAI,YAAY,GAAQ,IAAI,CAAA;IAC5B,IAAI,UAAU,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;IAC9B,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE;QAC3B,UAAU,GAAG,mBAAI,CAAC,IAAI,CAAC,mBAAI,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;KAClE;IACD,MAAM,gBAAgB,CACpB,YAAY,EACZ;QACE,KAAK;QACL,UAAU,EAAE,GAAG;QACf,cAAc,EAAE,WAAW,CAAC,cAAc;KAC3C,EACD,CAAC,QAAQ,EAAE,EAAE;QACX,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,IAAI,0BAA0B,GAAG,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAA;YACrD,MAAM,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAC9C,4BAA4B,CAC7B,CAAA;YAED,IAAI,aAAa,EAAE;gBACjB,YAAY,GAAG,aAAa,CAAC,CAAC,CAAC,CAAA;gBAC/B,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,IAAA,kDAA4B,EACvD,QAAQ,CAAC,MAAM,CAAC,IAAI,EACpB,YAAY,CAAC,CAAC,CAAC,EACf,YAAY,EACZ,aAAa,CAAC,CAAC,CAAC,CACjB,CAAA;gBACD,0BAA0B,GAAG,OAAO,CAAA;gBACpC,YAAY,GAAG,OAAO,CAAA;aACvB;YAED,MAAM,IAAI,GAAG,IAAA,sCAAgB,EAC3B,0BAA0B,EAC1B,SAAS,EACT,SAAS,EACT,UAAU,CACX,CAAA;YACD,QAAQ,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,UAAU,CAAA;YACtC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAA;YAC1B,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAA;SAC9B;QAED,IAAI,QAAQ,CAAC,eAAe,IAAI,QAAQ,CAAC,eAAe,CAAC,OAAO,EAAE;YAChE,QAAQ,CAAC,eAAe,CAAC,OAAO,CAAC,QAAQ;gBACvC,QAAQ,CAAC,eAAe,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE;oBACzD,IAAI,QAAQ,CAAC,QAAQ,EAAE;wBACrB,IAAI,YAAY,EAAE;4BAChB,MAAM,EAAE,WAAW,EAAE,SAAS,EAAE,GAAG,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;4BAEhE,YAAY,GAAG;gCACb,QAAQ,EAAE,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC;oCAChD,MAAM,EAAE,IAAI,CAAC,KAAK;oCAClB,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,KAAK;iCACvC,CAAC,CAAC;gCACH,GAAG,EAAE,EAAE;6BACR,CAAA;4BAED,MAAM,cAAc,GAAG,IAAA,yDAAmC,EACxD,WAAW,CAAC,KAAK,EACjB,SAAS,CAAC,CAAC,CAAC,CAAC,KAAK,EAClB,YAAY,CACb,CAAA;4BAED,YAAY,CAAC,GAAG,GAAG,cAAc,CAAA;4BAEjC,cAAc,CAAC,GAAG,CAChB,CAAC,EAAE,KAAK,EAAE,EAAE,KAAK,EAAE,EAAE,CACnB,CAAC,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,KAAK,CAC/C,KAAK,CACN,CAAC,KAAK,GAAG,KAAK,CAAC,CACnB,CAAA;yBACF;wBACD,IAAI,WAAW,GAAG,CAAC,CAAA;wBACnB,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;4BAC7C,OAAO,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,EAAE;gCAChC,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;oCAChC,IACE,WAAW,GAAG,QAAQ,CAAC,MAAM;wCAC7B,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,KAAK,IAAI,CAAC,KAAK,EACxC;wCACA,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC,WAAW,CAAC,CAAC,IAAI,CAAA;wCACvC,WAAW,EAAE,CAAA;qCACd;gCACH,CAAC,CAAC,CAAA;4BACJ,CAAC,CAAC,CAAA;wBACJ,CAAC,CAAC,CAAA;wBACF,QAAQ,CAAC,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;4BAC7D,IAAI,CAAC,IAAI,GAAG,IAAA,sCAAgB,EAC1B,IAAI,CAAC,IAAI,EACT,SAAS,EACT,QAAQ,CACT,CAAC,UAAU,CAAA;4BACZ,OAAO,IAAI,CAAA;wBACb,CAAC,CAAC,CAAA;qBACH;oBACD,OAAO,QAAQ,CAAA;gBACjB,CAAC,CAAC,CAAA;SACL;QAED,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,QAAQ,CAAC,MAAM,CAAC,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;gBACzD,IAAI,YAAY,EAAE;oBAChB,YAAY,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,KAAK,EAAE,EAAE,KAAK,EAAE,EAAE;wBAC5C,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAC3B,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,KAAK,EAClC,KAAK,CACN,CAAA;oBACH,CAAC,CAAC,CAAA;iBACH;gBACD,MAAM,YAAY,GAAG,IAAA,sCAAgB,EAAC,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,QAAQ,CAAC,CAAA;gBACrE,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC,UAAU,CAAA;gBACnC,WAAW;oBACT,YAAY,CAAC,SAAS,GAAG,WAAW;wBAClC,CAAC,CAAC,YAAY,CAAC,SAAS;wBACxB,CAAC,CAAC,WAAW,CAAA;gBACjB,OAAO,IAAI,CAAA;YACb,CAAC,CAAC,CAAA;YAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,EAAE,CAAC,EAAE,EAAE;gBACpC,QAAQ,CAAC,KAAK,EAAE,CAAA;aACjB;YACD,WAAW,GAAG,CAAC,CAAA;SAChB;QAED,YAAY,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;QAC7B,IAAI,QAAQ,CAAC,UAAU,EAAE;YACvB,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;SACtC;QACD,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAG,QAAQ,CAAC,CAAA;IACxB,CAAC,CACF,CAAA;IACD,MAAM,YAAY,GAAG,IAAI,uBAAY,CAAC;QACpC,GAAG;QACH,YAAY,EAAE,sBAAsB;QACpC,KAAK,EAAE,WAAW,CAAC,KAAK;QACxB,aAAa,EAAE,WAAW,CAAC,aAAa;KACzC,CAAC,CAAA;IACF,MAAM,eAAe,GAAyB,YAAY;SACvD,UAAU,EAAE;SACZ,MAAM,CAAC,CAAC,MAAM,EAAE,EAAE;QACjB,MAAM,eAAe,GAAG,YAAY;aACjC,mBAAmB,EAAE;aACrB,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,MAAM,CAAC,GAAG,CAAC,CAAA;QACxC,OAAO,YAAY,CAAC,OAAO,CAAC,EAAE,eAAe,EAAE,MAAM,EAAE,CAAC,CAAA;IAC1D,CAAC,CAAC;SACD,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE;QACd,MAAM,eAAe,GAAG,YAAY;aACjC,mBAAmB,EAAE;aACrB,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,MAAM,CAAC,GAAG,CAAC,CAAA;QACxC,MAAM,QAAQ,GAAG,YAAY,CAAC,WAAW,CACvC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAChD,CAAA;QACD,OAAO;YACL,eAAe;YACf,QAAQ;YACR,MAAM;SACP,CAAA;IACH,CAAC,CAAC,CAAA;IACJ,IAAA,sBAAY,EAAC,eAAe,EAAE,WAAW,CAAC,KAAK,EAAE,MAAM,CAAC,CAAA;IACxD,OAAO;QACL,eAAe;QACf,WAAW;KACZ,CAAA;AACH,CAAC;AAjMD,kEAiMC;AAED,KAAK,UAAU,gBAAgB,CAC7B,KAAe,EACf,OAA8B,EAC9B,UAAwC;IAExC,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,oBAAoB,GAAG,gCAAc,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QACrE,oBAAoB,CAAC,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;QAC3C,oBAAoB,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;QACvC,oBAAoB,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;IAC1C,CAAC,CAAC,CAAA;AACJ,CAAC","sourcesContent":["import {\n GherkinStreams,\n IGherkinStreamOptions,\n} from '@cucumber/gherkin-streams'\nimport {\n Envelope,\n GherkinDocument,\n IdGenerator,\n Location,\n ParseError,\n Pickle,\n} from '@cucumber/messages'\nimport { Query as GherkinQuery } from '@cucumber/gherkin-utils'\nimport PickleFilter from '../pickle_filter'\nimport { orderPickles } from '../cli/helpers'\nimport { ISourcesCoordinates } from './types'\nimport { ILogger } from '../logger'\nimport {\n generateExamplesFromFunction,\n generateTestData,\n generateExamplesFromFunctionGherkin,\n} from '../formatter/feature_data_format'\nimport path from 'node:path'\ninterface PickleWithDocument {\n gherkinDocument: GherkinDocument\n location: Location\n pickle: Pickle\n}\n\ninterface FunctionVars {\n previous: {\n header: string\n value: any\n }[]\n new: {\n header: string\n value: any\n }[]\n}\n\nexport async function getFilteredPicklesAndErrors({\n newId,\n cwd,\n logger,\n unexpandedFeaturePaths,\n featurePaths,\n coordinates,\n onEnvelope,\n}: {\n newId: IdGenerator.NewId\n cwd: string\n logger: ILogger\n unexpandedFeaturePaths: string[]\n featurePaths: string[]\n coordinates: ISourcesCoordinates\n onEnvelope?: (envelope: Envelope) => void\n}): Promise<{\n filteredPickles: PickleWithDocument[]\n parseErrors: ParseError[]\n}> {\n const gherkinQuery = new GherkinQuery()\n const parseErrors: ParseError[] = []\n let variables: any,\n fakeData: {\n var: string\n fake: string\n }[],\n pickleIndex = 0\n let dataFunction: string | null = null\n let functionVars: FunctionVars | null = null\n let mjsDataFiles: any = null\n let projectDir = process.cwd()\n if (featurePaths.length > 0) {\n projectDir = path.join(path.dirname(featurePaths[0]), '..', '..')\n }\n await gherkinFromPaths(\n featurePaths,\n {\n newId,\n relativeTo: cwd,\n defaultDialect: coordinates.defaultDialect,\n },\n (envelope) => {\n if (envelope.source) {\n let newDataAfterExamplesModify = envelope.source.data\n const functionMatch = envelope.source.data.match(\n /@data:function:(.*?)\\.(.*)/\n )\n\n if (functionMatch) {\n dataFunction = functionMatch[2]\n const { newData, mjsData } = generateExamplesFromFunction(\n envelope.source.data,\n featurePaths[0],\n dataFunction,\n functionMatch[1]\n )\n newDataAfterExamplesModify = newData\n mjsDataFiles = mjsData\n }\n\n const data = generateTestData(\n newDataAfterExamplesModify,\n undefined,\n undefined,\n projectDir\n )\n envelope.source.data = data.newContent\n variables = data.variables\n fakeData = data.otherFakeData\n }\n\n if (envelope.gherkinDocument && envelope.gherkinDocument.feature) {\n envelope.gherkinDocument.feature.children =\n envelope.gherkinDocument.feature.children.map((scenario) => {\n if (scenario.scenario) {\n if (dataFunction) {\n const { tableHeader, tableBody } = scenario.scenario.examples[0]\n\n functionVars = {\n previous: tableHeader.cells.map((cell, index) => ({\n header: cell.value,\n value: tableBody[0].cells[index].value,\n })),\n new: [],\n }\n\n const generateResult = generateExamplesFromFunctionGherkin(\n tableHeader.cells,\n tableBody[0].cells,\n mjsDataFiles\n )\n\n functionVars.new = generateResult\n\n generateResult.map(\n ({ value }, index) =>\n (scenario.scenario.examples[0].tableBody[0].cells[\n index\n ].value = value)\n )\n }\n let fakeDataIdx = 0\n scenario.scenario.examples.forEach((example) => {\n example.tableBody.forEach((row) => {\n row.cells.forEach((cell, index) => {\n if (\n fakeDataIdx < fakeData.length &&\n fakeData[fakeDataIdx].var === cell.value\n ) {\n cell.value = fakeData[fakeDataIdx].fake\n fakeDataIdx++\n }\n })\n })\n })\n scenario.scenario.steps = scenario.scenario.steps.map((step) => {\n step.text = generateTestData(\n step.text,\n variables,\n fakeData\n ).newContent\n return step\n })\n }\n return scenario\n })\n }\n\n if (envelope.pickle) {\n envelope.pickle.steps = envelope.pickle.steps.map((step) => {\n if (functionVars) {\n functionVars.new.forEach(({ value }, index) => {\n step.text = step.text.replace(\n functionVars.previous[index].value,\n value\n )\n })\n }\n const generateData = generateTestData(step.text, variables, fakeData)\n step.text = generateData.newContent\n pickleIndex =\n generateData.fakeIndex > pickleIndex\n ? generateData.fakeIndex\n : pickleIndex\n return step\n })\n\n for (let i = 0; i < pickleIndex; i++) {\n fakeData.shift()\n }\n pickleIndex = 0\n }\n\n gherkinQuery.update(envelope)\n if (envelope.parseError) {\n parseErrors.push(envelope.parseError)\n }\n onEnvelope?.(envelope)\n }\n )\n const pickleFilter = new PickleFilter({\n cwd,\n featurePaths: unexpandedFeaturePaths,\n names: coordinates.names,\n tagExpression: coordinates.tagExpression,\n })\n const filteredPickles: PickleWithDocument[] = gherkinQuery\n .getPickles()\n .filter((pickle) => {\n const gherkinDocument = gherkinQuery\n .getGherkinDocuments()\n .find((doc) => doc.uri === pickle.uri)\n return pickleFilter.matches({ gherkinDocument, pickle })\n })\n .map((pickle) => {\n const gherkinDocument = gherkinQuery\n .getGherkinDocuments()\n .find((doc) => doc.uri === pickle.uri)\n const location = gherkinQuery.getLocation(\n pickle.astNodeIds[pickle.astNodeIds.length - 1]\n )\n return {\n gherkinDocument,\n location,\n pickle,\n }\n })\n orderPickles(filteredPickles, coordinates.order, logger)\n return {\n filteredPickles,\n parseErrors,\n }\n}\n\nasync function gherkinFromPaths(\n paths: string[],\n options: IGherkinStreamOptions,\n onEnvelope: (envelope: Envelope) => void\n): Promise<void> {\n return new Promise((resolve, reject) => {\n const gherkinMessageStream = GherkinStreams.fromPaths(paths, options)\n gherkinMessageStream.on('data', onEnvelope)\n gherkinMessageStream.on('end', resolve)\n gherkinMessageStream.on('error', reject)\n })\n}\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const axiosClient: import("axios").AxiosInstance;
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.axiosClient = void 0;
|
|
7
|
+
/* eslint-disable no-console */
|
|
8
|
+
const axios_1 = __importDefault(require("axios"));
|
|
9
|
+
const tunnel_1 = __importDefault(require("tunnel"));
|
|
10
|
+
const getProxy = () => {
|
|
11
|
+
if (!process.env.PROXY) {
|
|
12
|
+
return null;
|
|
13
|
+
}
|
|
14
|
+
const proxy = process.env.PROXY;
|
|
15
|
+
const url = new URL(proxy);
|
|
16
|
+
const proxyObject = {
|
|
17
|
+
host: url.hostname,
|
|
18
|
+
port: Number(url.port),
|
|
19
|
+
};
|
|
20
|
+
const { username, password } = url;
|
|
21
|
+
if (username && password) {
|
|
22
|
+
proxyObject.proxyAuth = `${username}:${password}`;
|
|
23
|
+
}
|
|
24
|
+
return tunnel_1.default.httpsOverHttp({ proxy: proxyObject });
|
|
25
|
+
};
|
|
26
|
+
const createAxiosClient = () => {
|
|
27
|
+
try {
|
|
28
|
+
const agent = getProxy();
|
|
29
|
+
return axios_1.default.create({
|
|
30
|
+
httpsAgent: agent,
|
|
31
|
+
proxy: false,
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
catch (error) {
|
|
35
|
+
console.log(error.message);
|
|
36
|
+
throw new Error(`Error creating axios client ${error instanceof Error ? error.message : error.response.data}`);
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
exports.axiosClient = createAxiosClient();
|
|
40
|
+
//# sourceMappingURL=axios_client.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"axios_client.js","sourceRoot":"","sources":["../../src/configuration/axios_client.ts"],"names":[],"mappings":";;;;;;AAAA,+BAA+B;AAC/B,kDAAyB;AACzB,oDAA6C;AAG7C,MAAM,QAAQ,GAAG,GAAiB,EAAE;IAClC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE;QACtB,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,KAAK,GAAkB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAA;IAC9C,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,CAAA;IAC1B,MAAM,WAAW,GAAiB;QAChC,IAAI,EAAE,GAAG,CAAC,QAAQ;QAClB,IAAI,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC;KACvB,CAAA;IAED,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,GAAG,CAAA;IAElC,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,WAAW,CAAC,SAAS,GAAG,GAAG,QAAQ,IAAI,QAAQ,EAAE,CAAA;KAClD;IACD,OAAO,gBAAM,CAAC,aAAa,CAAC,EAAE,KAAK,EAAE,WAAW,EAAE,CAAC,CAAA;AACrD,CAAC,CAAA;AAED,MAAM,iBAAiB,GAAG,GAAG,EAAE;IAC7B,IAAI;QACF,MAAM,KAAK,GAAmB,QAAQ,EAAE,CAAA;QACxC,OAAO,eAAK,CAAC,MAAM,CAAC;YAClB,UAAU,EAAE,KAAK;YACjB,KAAK,EAAE,KAAK;SACb,CAAC,CAAA;KACH;IAAC,OAAO,KAAK,EAAE;QACd,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;QAC1B,MAAM,IAAI,KAAK,CACb,+BACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAC1D,EAAE,CACH,CAAA;KACF;AACH,CAAC,CAAA;AAEY,QAAA,WAAW,GAAG,iBAAiB,EAAE,CAAA","sourcesContent":["/* eslint-disable no-console */\nimport axios from 'axios'\nimport tunnel, { ProxyOptions } from 'tunnel'\nimport { Agent } from 'http'\n\nconst getProxy = (): Agent | null => {\n if (!process.env.PROXY) {\n return null\n }\n\n const proxy: string | null = process.env.PROXY\n const url = new URL(proxy)\n const proxyObject: ProxyOptions = {\n host: url.hostname,\n port: Number(url.port),\n }\n\n const { username, password } = url\n\n if (username && password) {\n proxyObject.proxyAuth = `${username}:${password}`\n }\n return tunnel.httpsOverHttp({ proxy: proxyObject })\n}\n\nconst createAxiosClient = () => {\n try {\n const agent: string | Agent = getProxy()\n return axios.create({\n httpsAgent: agent,\n proxy: false,\n })\n } catch (error) {\n console.log(error.message)\n throw new Error(\n `Error creating axios client ${\n error instanceof Error ? error.message : error.response.data\n }`\n )\n }\n}\n\nexport const axiosClient = createAxiosClient()\n"]}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getProjectByAccessKey = void 0;
|
|
7
|
+
const axios_1 = __importDefault(require("axios"));
|
|
8
|
+
const tunnel_1 = __importDefault(require("tunnel"));
|
|
9
|
+
const getSSoUrl = () => {
|
|
10
|
+
switch (process.env.NODE_ENV_BLINQ) {
|
|
11
|
+
case 'local':
|
|
12
|
+
return 'http://localhost:5000/api/auth';
|
|
13
|
+
case 'dev':
|
|
14
|
+
return 'https://dev.api.blinq.io/api/auth';
|
|
15
|
+
case 'stage':
|
|
16
|
+
return 'https://stage.api.blinq.io/api/auth';
|
|
17
|
+
default:
|
|
18
|
+
return 'https://api.blinq.io/api/auth';
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
const getProxy = () => {
|
|
22
|
+
if (!process.env.PROXY) {
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
const proxy = process.env.PROXY;
|
|
26
|
+
const url = new URL(proxy);
|
|
27
|
+
const proxyObject = {
|
|
28
|
+
host: url.hostname,
|
|
29
|
+
port: Number(url.port),
|
|
30
|
+
};
|
|
31
|
+
const { username, password } = url;
|
|
32
|
+
if (username && password) {
|
|
33
|
+
//@ts-ignore
|
|
34
|
+
proxyObject.proxyAuth = `${username}:${password}`;
|
|
35
|
+
}
|
|
36
|
+
return tunnel_1.default.httpsOverHttp({ proxy: proxyObject });
|
|
37
|
+
};
|
|
38
|
+
const getProjectByAccessKey = async (access_key) => {
|
|
39
|
+
const ssoUrl = getSSoUrl();
|
|
40
|
+
const accessKeyUrl = `${ssoUrl}/getProjectByAccessKey`;
|
|
41
|
+
const response = await axios_1.default.post(accessKeyUrl, {
|
|
42
|
+
access_key,
|
|
43
|
+
httpAgent: getProxy(),
|
|
44
|
+
proxy: false,
|
|
45
|
+
});
|
|
46
|
+
if (response.status !== 200) {
|
|
47
|
+
console.error('Error: Invalid access key');
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
return response.data;
|
|
51
|
+
};
|
|
52
|
+
exports.getProjectByAccessKey = getProjectByAccessKey;
|
|
53
|
+
//# sourceMappingURL=api.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"api.js","sourceRoot":"","sources":["../../src/formatter/api.ts"],"names":[],"mappings":";;;;;;AAAA,kDAAyB;AACzB,oDAA2B;AAE3B,MAAM,SAAS,GAAG,GAAG,EAAE;IACnB,QAAQ,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE;QAClC,KAAK,OAAO;YACV,OAAO,gCAAgC,CAAA;QACzC,KAAK,KAAK;YACR,OAAO,mCAAmC,CAAA;QAC5C,KAAK,OAAO;YACV,OAAO,qCAAqC,CAAA;QAC9C;YACE,OAAO,+BAA+B,CAAA;KACzC;AACH,CAAC,CAAA;AACD,MAAM,QAAQ,GAAG,GAAG,EAAE;IACpB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE;QACtB,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,KAAK,CAAA;IAC/B,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,CAAA;IAC1B,MAAM,WAAW,GAAG;QAClB,IAAI,EAAE,GAAG,CAAC,QAAQ;QAClB,IAAI,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC;KACvB,CAAA;IAED,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,GAAG,CAAA;IAElC,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,YAAY;QACZ,WAAW,CAAC,SAAS,GAAG,GAAG,QAAQ,IAAI,QAAQ,EAAE,CAAA;KAClD;IACD,OAAO,gBAAM,CAAC,aAAa,CAAC,EAAE,KAAK,EAAE,WAAW,EAAE,CAAC,CAAA;AACrD,CAAC,CAAA;AAED,MAAM,qBAAqB,GAAG,KAAK,EAAE,UAAiB,EAAE,EAAE;IACxD,MAAM,MAAM,GAAG,SAAS,EAAE,CAAA;IAC1B,MAAM,YAAY,GAAG,GAAG,MAAM,wBAAwB,CAAA;IACtD,MAAM,QAAQ,GAAG,MAAM,eAAK,CAAC,IAAI,CAAC,YAAY,EAAE;QAC9C,UAAU;QACV,SAAS,EAAE,QAAQ,EAAE;QACrB,KAAK,EAAE,KAAK;KACb,CAAC,CAAA;IACF,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,OAAO,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAA;QAC1C,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;IACD,OAAO,QAAQ,CAAC,IAAI,CAAA;AACtB,CAAC,CAAC;AAEO,sDAAqB","sourcesContent":["import axios from 'axios'\nimport tunnel from 'tunnel'\n\nconst getSSoUrl = () => {\n switch (process.env.NODE_ENV_BLINQ) {\n case 'local':\n return 'http://localhost:5000/api/auth'\n case 'dev':\n return 'https://dev.api.blinq.io/api/auth'\n case 'stage':\n return 'https://stage.api.blinq.io/api/auth'\n default:\n return 'https://api.blinq.io/api/auth'\n }\n }\n const getProxy = () => {\n if (!process.env.PROXY) {\n return null\n }\n \n const proxy = process.env.PROXY\n const url = new URL(proxy)\n const proxyObject = {\n host: url.hostname,\n port: Number(url.port),\n }\n \n const { username, password } = url\n \n if (username && password) {\n //@ts-ignore\n proxyObject.proxyAuth = `${username}:${password}`\n }\n return tunnel.httpsOverHttp({ proxy: proxyObject })\n }\n \n const getProjectByAccessKey = async (access_key:string) => {\n const ssoUrl = getSSoUrl()\n const accessKeyUrl = `${ssoUrl}/getProjectByAccessKey`\n const response = await axios.post(accessKeyUrl, {\n access_key,\n httpAgent: getProxy(),\n proxy: false,\n })\n if (response.status !== 200) {\n console.error('Error: Invalid access key')\n process.exit(1)\n }\n return response.data\n };\n\n export { getProjectByAccessKey };"]}
|
|
@@ -14,5 +14,6 @@ export default class BVTAnalysisFormatter extends Formatter {
|
|
|
14
14
|
private uploadFinalReport;
|
|
15
15
|
private retrain;
|
|
16
16
|
private call_cucumber_client;
|
|
17
|
-
private
|
|
17
|
+
private getAppDataDir;
|
|
18
18
|
}
|
|
19
|
+
export declare function logReportLink(runId: string, projectId: string): void;
|
|
@@ -3,14 +3,18 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.logReportLink = void 0;
|
|
6
7
|
const child_process_1 = require("child_process");
|
|
8
|
+
const fs_1 = require("fs");
|
|
9
|
+
const promises_1 = require("fs/promises");
|
|
7
10
|
const path_1 = __importDefault(require("path"));
|
|
11
|
+
const tmp_1 = require("tmp");
|
|
8
12
|
const _1 = __importDefault(require("."));
|
|
9
13
|
const value_checker_1 = require("../value_checker");
|
|
10
14
|
const report_generator_1 = __importDefault(require("./helpers/report_generator"));
|
|
11
15
|
const uploader_1 = __importDefault(require("./helpers/uploader"));
|
|
12
|
-
const
|
|
13
|
-
const
|
|
16
|
+
const os_1 = __importDefault(require("os"));
|
|
17
|
+
const api_1 = require("./api");
|
|
14
18
|
//User token
|
|
15
19
|
const TOKEN = process.env.TOKEN;
|
|
16
20
|
class BVTAnalysisFormatter extends _1.default {
|
|
@@ -23,7 +27,7 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
23
27
|
throw new Error('TOKEN must be set');
|
|
24
28
|
}
|
|
25
29
|
options.eventBroadcaster.on('envelope', async (envelope) => {
|
|
26
|
-
this.reportGenerator.handleMessage(envelope);
|
|
30
|
+
await this.reportGenerator.handleMessage(envelope);
|
|
27
31
|
if ((0, value_checker_1.doesHaveValue)(envelope.meta) &&
|
|
28
32
|
(0, value_checker_1.doesHaveValue)(envelope.meta.runName)) {
|
|
29
33
|
this.runName = envelope.meta.runName;
|
|
@@ -35,7 +39,7 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
35
39
|
await this.analyzeReport(report);
|
|
36
40
|
}
|
|
37
41
|
else {
|
|
38
|
-
await this.uploadReport(report)
|
|
42
|
+
// await this.uploadReport(report)
|
|
39
43
|
}
|
|
40
44
|
this.exit = true;
|
|
41
45
|
}
|
|
@@ -51,7 +55,14 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
51
55
|
async finished() {
|
|
52
56
|
await new Promise((resolve) => {
|
|
53
57
|
const checkInterval = setInterval(() => {
|
|
54
|
-
|
|
58
|
+
let anyRem;
|
|
59
|
+
if (process.env.UPLOADING_TEST_CASE) {
|
|
60
|
+
anyRem = JSON.parse(process.env.UPLOADING_TEST_CASE);
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
anyRem = undefined;
|
|
64
|
+
}
|
|
65
|
+
if (this.exit && (!anyRem || anyRem.length === 0)) {
|
|
55
66
|
clearInterval(checkInterval);
|
|
56
67
|
resolve(null);
|
|
57
68
|
}
|
|
@@ -59,13 +70,18 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
59
70
|
});
|
|
60
71
|
}
|
|
61
72
|
async analyzeReport(report) {
|
|
62
|
-
if (report.result.status === 'PASSED'
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
process.exit(0);
|
|
73
|
+
if (report.result.status === 'PASSED' ||
|
|
74
|
+
process.env.NO_RETRAIN === 'false') {
|
|
75
|
+
if (report.result.status === 'PASSED') {
|
|
76
|
+
this.log('No test failed. No need to retrain\n');
|
|
67
77
|
}
|
|
68
|
-
process.
|
|
78
|
+
if (process.env.NO_RETRAIN === 'false') {
|
|
79
|
+
this.log('Retraining is skipped since the failed step contains an API request\n');
|
|
80
|
+
}
|
|
81
|
+
// const uploadSuccessful = await this.uploadFinalReport(report)
|
|
82
|
+
// process.exit(0)
|
|
83
|
+
this.exit = true;
|
|
84
|
+
return;
|
|
69
85
|
}
|
|
70
86
|
//checking if the type of report.result is JsonResultFailed or not
|
|
71
87
|
this.log('Some tests failed, starting the retraining...\n');
|
|
@@ -98,6 +114,7 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
98
114
|
return {
|
|
99
115
|
result: finalResult,
|
|
100
116
|
testCases: finalTestCases,
|
|
117
|
+
env: report.env,
|
|
101
118
|
};
|
|
102
119
|
}
|
|
103
120
|
async processTestCase(testCase, report) {
|
|
@@ -105,7 +122,7 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
105
122
|
return testCase;
|
|
106
123
|
}
|
|
107
124
|
const failedTestSteps = testCase.steps
|
|
108
|
-
.map((step, i) => (step.result.status
|
|
125
|
+
.map((step, i) => (step.result.status === 'FAILED' ? i : null))
|
|
109
126
|
.filter((i) => i !== null);
|
|
110
127
|
const retrainStats = await this.retrain(failedTestSteps, testCase);
|
|
111
128
|
if (!retrainStats) {
|
|
@@ -120,7 +137,7 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
120
137
|
let success = true;
|
|
121
138
|
try {
|
|
122
139
|
const { projectId, runId } = await this.uploader.uploadRun(finalReport, this.runName);
|
|
123
|
-
|
|
140
|
+
logReportLink(runId, projectId);
|
|
124
141
|
}
|
|
125
142
|
catch (err) {
|
|
126
143
|
this.log('Error uploading report\n');
|
|
@@ -129,12 +146,25 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
129
146
|
}
|
|
130
147
|
success = false;
|
|
131
148
|
}
|
|
149
|
+
finally {
|
|
150
|
+
try {
|
|
151
|
+
(0, fs_1.writeFileSync)(path_1.default.join(this.reportGenerator.reportFolder, 'report.json'), JSON.stringify(finalReport, null, 2), 'utf-8');
|
|
152
|
+
}
|
|
153
|
+
catch (e) {
|
|
154
|
+
console.error('failed to write report.json to local disk');
|
|
155
|
+
}
|
|
156
|
+
}
|
|
132
157
|
//this.log(JSON.stringify(finalReport, null, 2))
|
|
133
158
|
return success;
|
|
134
159
|
}
|
|
135
160
|
async retrain(failedTestCases, testCase) {
|
|
136
|
-
const
|
|
137
|
-
|
|
161
|
+
const data = await (0, api_1.getProjectByAccessKey)(TOKEN);
|
|
162
|
+
const currentTimestampInSeconds = Math.floor(Date.now() / 1000);
|
|
163
|
+
if (data.project.expriration_date < currentTimestampInSeconds) {
|
|
164
|
+
console.log('Warning: Your project has expired, retraining is restricted. Please contact sales.');
|
|
165
|
+
process.exit(1);
|
|
166
|
+
}
|
|
167
|
+
return await this.call_cucumber_client(failedTestCases, testCase);
|
|
138
168
|
}
|
|
139
169
|
async call_cucumber_client(stepsToRetrain, testCase) {
|
|
140
170
|
return new Promise((resolve, reject) => {
|
|
@@ -143,14 +173,21 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
143
173
|
process.cwd(),
|
|
144
174
|
path_1.default.join(process.cwd(), testCase.uri),
|
|
145
175
|
`${testCase.scenarioName}`,
|
|
176
|
+
'undefined',
|
|
146
177
|
`${stepsToRetrain.join(',')}`,
|
|
147
178
|
];
|
|
148
179
|
if (process.env.BLINQ_ENV) {
|
|
149
180
|
args.push(`--env=${process.env.BLINQ_ENV}`);
|
|
150
181
|
}
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
182
|
+
if (!(0, fs_1.existsSync)(path_1.default.join(this.getAppDataDir(), 'blinq.io', '.temp'))) {
|
|
183
|
+
(0, promises_1.mkdir)(path_1.default.join(this.getAppDataDir(), 'blinq.io', '.temp'), {
|
|
184
|
+
recursive: true,
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
(0, tmp_1.tmpName)(async (err, name) => {
|
|
188
|
+
const tempFile = path_1.default.join(this.getAppDataDir(), 'blinq.io', '.temp', path_1.default.basename(name));
|
|
189
|
+
console.log('File path: ', tempFile);
|
|
190
|
+
await (0, promises_1.writeFile)(tempFile, '', 'utf-8');
|
|
154
191
|
args.push(`--temp-file=${tempFile}`);
|
|
155
192
|
const cucumberClient = (0, child_process_1.spawn)('node', [cucumber_client_path, ...args], {
|
|
156
193
|
env: {
|
|
@@ -163,10 +200,11 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
163
200
|
cucumberClient.stderr.on('data', (data) => {
|
|
164
201
|
console.error(data.toString());
|
|
165
202
|
});
|
|
166
|
-
cucumberClient.on('close', (code) => {
|
|
203
|
+
cucumberClient.on('close', async (code) => {
|
|
167
204
|
if (code === 0) {
|
|
168
205
|
const reportData = (0, fs_1.readFileSync)(tempFile, 'utf-8');
|
|
169
206
|
const retrainStats = JSON.parse(reportData);
|
|
207
|
+
await (0, promises_1.unlink)(tempFile);
|
|
170
208
|
resolve(retrainStats);
|
|
171
209
|
}
|
|
172
210
|
else {
|
|
@@ -177,17 +215,39 @@ class BVTAnalysisFormatter extends _1.default {
|
|
|
177
215
|
});
|
|
178
216
|
});
|
|
179
217
|
}
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
reportLinkBaseUrl = 'http://localhost:3000';
|
|
218
|
+
getAppDataDir() {
|
|
219
|
+
if (process.env.BLINQ_APPDATA_DIR) {
|
|
220
|
+
return process.env.BLINQ_APPDATA_DIR;
|
|
184
221
|
}
|
|
185
|
-
|
|
186
|
-
|
|
222
|
+
let appDataDir;
|
|
223
|
+
switch (process.platform) {
|
|
224
|
+
case 'win32':
|
|
225
|
+
appDataDir = process.env.APPDATA;
|
|
226
|
+
break;
|
|
227
|
+
case 'darwin':
|
|
228
|
+
appDataDir = path_1.default.join(os_1.default.homedir(), 'Library', 'Application Support');
|
|
229
|
+
break;
|
|
230
|
+
default:
|
|
231
|
+
appDataDir = path_1.default.join(os_1.default.homedir(), '.config');
|
|
232
|
+
break;
|
|
187
233
|
}
|
|
188
|
-
|
|
189
|
-
this.log(`Report link: ${reportLink}\n`);
|
|
234
|
+
return appDataDir;
|
|
190
235
|
}
|
|
191
236
|
}
|
|
192
237
|
exports.default = BVTAnalysisFormatter;
|
|
238
|
+
function logReportLink(runId, projectId) {
|
|
239
|
+
let reportLinkBaseUrl = 'https://app.blinq.io';
|
|
240
|
+
if (process.env.NODE_ENV_BLINQ === 'local') {
|
|
241
|
+
reportLinkBaseUrl = 'http://localhost:3000';
|
|
242
|
+
}
|
|
243
|
+
else if (process.env.NODE_ENV_BLINQ === 'dev') {
|
|
244
|
+
reportLinkBaseUrl = 'https://dev.app.blinq.io';
|
|
245
|
+
}
|
|
246
|
+
else if (process.env.NODE_ENV_BLINQ === 'stage') {
|
|
247
|
+
reportLinkBaseUrl = 'https://stage.app.blinq.io';
|
|
248
|
+
}
|
|
249
|
+
const reportLink = `${reportLinkBaseUrl}/${projectId}/run-report/${runId}`;
|
|
250
|
+
console.log(`Report link: ${reportLink}\n`);
|
|
251
|
+
}
|
|
252
|
+
exports.logReportLink = logReportLink;
|
|
193
253
|
//# sourceMappingURL=bvt_analysis_formatter.js.map
|