@zohodesk/testinglibrary 0.4.77-n18-experimental → 0.4.78-n18-experimental
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/common/data-generator/steps/DataGenerator.spec.js +35 -0
- package/build/core/dataGenerator/DataGenerator.js +84 -0
- package/build/core/dataGenerator/DataGeneratorHelper.js +49 -0
- package/build/core/playwright/runner/SpawnRunner.js +3 -0
- package/build/core/playwright/setup/config-creator.js +19 -16
- package/build/core/playwright/setup/config-utils.js +1 -2
- package/build/utils/commonUtils.js +17 -0
- package/build/utils/fileUtils.js +20 -0
- package/npm-shrinkwrap.json +2096 -255
- package/package.json +3 -2
- package/build/test/Test.js +0 -13
- package/test-results/.last-run.json +0 -4
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
var _testinglibrary = require("@zohodesk/testinglibrary");
|
|
5
|
+
var _DataGenerator = _interopRequireDefault(require("@zohodesk/testinglibrary/DataGenerator"));
|
|
6
|
+
const {
|
|
7
|
+
Given,
|
|
8
|
+
When,
|
|
9
|
+
Then
|
|
10
|
+
} = (0, _testinglibrary.createBdd)();
|
|
11
|
+
const dataGenerator = new _DataGenerator.default();
|
|
12
|
+
Given('generate a {string} entity {string} with generator {string}', async ({
|
|
13
|
+
page,
|
|
14
|
+
context,
|
|
15
|
+
i18N,
|
|
16
|
+
cacheLayer,
|
|
17
|
+
executionContext
|
|
18
|
+
}, module, entityName, generatorName, dataTable) => {
|
|
19
|
+
const testInfo = _testinglibrary.test.info();
|
|
20
|
+
const scenarioName = testInfo.title.split('/').pop() || 'Unknown Scenario';
|
|
21
|
+
const generatedData = await dataGenerator.generate(testInfo, executionContext.actorInfo, "template", generatorName, scenarioName, dataTable ? dataTable.hashes() : []);
|
|
22
|
+
await cacheLayer.set(entityName, generatedData.data);
|
|
23
|
+
});
|
|
24
|
+
Given('generate a {string} entity {string} with API {string}', async ({
|
|
25
|
+
page,
|
|
26
|
+
context,
|
|
27
|
+
i18N,
|
|
28
|
+
cacheLayer,
|
|
29
|
+
executionContext
|
|
30
|
+
}, module, entityName, operationId, dataTable) => {
|
|
31
|
+
const testInfo = _testinglibrary.test.info();
|
|
32
|
+
const scenarioName = testInfo.title.split('/').pop() || 'Unknown Scenario';
|
|
33
|
+
const generatedData = await dataGenerator.generate(testInfo, executionContext.actorInfo, "API", operationId, scenarioName, dataTable ? dataTable.hashes() : []);
|
|
34
|
+
await cacheLayer.set(entityName, generatedData.data);
|
|
35
|
+
});
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.default = void 0;
|
|
8
|
+
var _path = _interopRequireDefault(require("path"));
|
|
9
|
+
var _fs = _interopRequireDefault(require("fs"));
|
|
10
|
+
var _logger = require("../../utils/logger");
|
|
11
|
+
var _DataGeneratorHelper = require("./DataGeneratorHelper");
|
|
12
|
+
function _classPrivateMethodInitSpec(e, a) { _checkPrivateRedeclaration(e, a), a.add(e); }
|
|
13
|
+
function _checkPrivateRedeclaration(e, t) { if (t.has(e)) throw new TypeError("Cannot initialize the same private elements twice on an object"); }
|
|
14
|
+
function _assertClassBrand(e, t, n) { if ("function" == typeof e ? e === t : e.has(t)) return arguments.length < 3 ? t : n; throw new TypeError("Private element is not present on this object"); }
|
|
15
|
+
var _DataGenerator_brand = /*#__PURE__*/new WeakSet();
|
|
16
|
+
class DataGenerator {
|
|
17
|
+
constructor() {
|
|
18
|
+
_classPrivateMethodInitSpec(this, _DataGenerator_brand);
|
|
19
|
+
}
|
|
20
|
+
async generate(testInfo, actorInfo, generatorType, generatorName, scenarioName, dataTable) {
|
|
21
|
+
try {
|
|
22
|
+
let generators;
|
|
23
|
+
if (generatorType === 'API') {
|
|
24
|
+
generators = await _assertClassBrand(_DataGenerator_brand, this, _generateAPIGenerator).call(this, generatorName);
|
|
25
|
+
} else {
|
|
26
|
+
generators = await _assertClassBrand(_DataGenerator_brand, this, _getGenerator).call(this, testInfo, generatorName);
|
|
27
|
+
}
|
|
28
|
+
const processedGenerators = await (0, _DataGeneratorHelper.processGenerator)(generators, dataTable);
|
|
29
|
+
const apiPayload = await _assertClassBrand(_DataGenerator_brand, this, _constructApiPayload).call(this, scenarioName, processedGenerators, actorInfo);
|
|
30
|
+
const response = await (0, _DataGeneratorHelper.makeRequest)(process.env.DG_SERVICE_DOMAIN + process.env.DG_SERVICE_API_PATH, apiPayload);
|
|
31
|
+
_logger.Logger.log(_logger.Logger.INFO_TYPE, `Generated response for the generator: ${generatorName} for scenario: ${scenarioName}, Response: ${JSON.stringify(response)}`);
|
|
32
|
+
return response;
|
|
33
|
+
} catch (error) {
|
|
34
|
+
console.error('Data Generation failed for the generator: ', generatorName, "\n\nError response :", error);
|
|
35
|
+
throw error;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
async function _getGenerator(testInfo, generatorName) {
|
|
40
|
+
let generator = null;
|
|
41
|
+
let generatorFilePath = await (0, _DataGeneratorHelper.getGeneratorFilePath)(testInfo.file);
|
|
42
|
+
generatorFilePath = _path.default.join(generatorFilePath, "../../data-generators/generators.json");
|
|
43
|
+
if (_fs.default.existsSync(generatorFilePath)) {
|
|
44
|
+
const data = _fs.default.readFileSync(generatorFilePath, 'utf8');
|
|
45
|
+
const generatorObj = JSON.parse(data);
|
|
46
|
+
if (generatorName || generatorObj.generators) {
|
|
47
|
+
generator = generatorObj.generators[generatorName] || null;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
if (!generator) {
|
|
51
|
+
throw new Error(`Generator "${generatorName}" could not be found in the path located at "${generatorFilePath}"`);
|
|
52
|
+
}
|
|
53
|
+
return generator;
|
|
54
|
+
}
|
|
55
|
+
async function _generateAPIGenerator(operationId) {
|
|
56
|
+
return [{
|
|
57
|
+
type: "dynamic",
|
|
58
|
+
generatorOperationId: operationId,
|
|
59
|
+
dataPath: "$.response.body:$",
|
|
60
|
+
name: operationId
|
|
61
|
+
}];
|
|
62
|
+
}
|
|
63
|
+
async function _constructApiPayload(scenarioName, processedGenerators, actorInfo) {
|
|
64
|
+
const dataGeneratorConfig = actorInfo['data-generator'] || {};
|
|
65
|
+
const apiPayload = {
|
|
66
|
+
scenario_name: scenarioName,
|
|
67
|
+
data_generation_templates: processedGenerators,
|
|
68
|
+
...dataGeneratorConfig
|
|
69
|
+
};
|
|
70
|
+
const account = apiPayload.account;
|
|
71
|
+
if (account) {
|
|
72
|
+
account.email = actorInfo.email;
|
|
73
|
+
account.password = actorInfo.password;
|
|
74
|
+
}
|
|
75
|
+
const environmentDetails = apiPayload.environmentDetails || {};
|
|
76
|
+
if (environmentDetails) {
|
|
77
|
+
environmentDetails.iam_url = process.env.DG_IAM_DOMAIN;
|
|
78
|
+
const domainUrl = new URL(process.env.domain);
|
|
79
|
+
environmentDetails.host = domainUrl.origin;
|
|
80
|
+
}
|
|
81
|
+
apiPayload.environmentDetails = environmentDetails;
|
|
82
|
+
return apiPayload;
|
|
83
|
+
}
|
|
84
|
+
var _default = exports.default = DataGenerator;
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.getGeneratorFilePath = getGeneratorFilePath;
|
|
7
|
+
exports.makeRequest = makeRequest;
|
|
8
|
+
exports.processGenerator = processGenerator;
|
|
9
|
+
//Create payload for the generators
|
|
10
|
+
async function processGenerator(generators, dataTable) {
|
|
11
|
+
if (!dataTable) {
|
|
12
|
+
return generators;
|
|
13
|
+
}
|
|
14
|
+
return generators.map(generator => {
|
|
15
|
+
dataTable.forEach(row => {
|
|
16
|
+
const generatorName = row.DG_API_NAME;
|
|
17
|
+
if (generatorName === generator.name) {
|
|
18
|
+
generator.params = generator.params ? generator.params : [];
|
|
19
|
+
|
|
20
|
+
// The API name row is only used for matching the template
|
|
21
|
+
// Filter out DG_API_NAME and collect other values
|
|
22
|
+
Object.entries(row).filter(([key]) => key !== 'DG_API_NAME').forEach(([key, value]) => {
|
|
23
|
+
if (value !== '') {
|
|
24
|
+
generator.params[key] = value;
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
return generator;
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
async function makeRequest(url, payload) {
|
|
33
|
+
const response = await fetch(url, {
|
|
34
|
+
method: 'POST',
|
|
35
|
+
headers: {
|
|
36
|
+
'Content-Type': 'application/json'
|
|
37
|
+
},
|
|
38
|
+
body: JSON.stringify(payload)
|
|
39
|
+
});
|
|
40
|
+
if (!response.ok) {
|
|
41
|
+
const errorBody = await response.text();
|
|
42
|
+
throw new Error(`HTTP error! status: ${response.status}, body: ${errorBody}`);
|
|
43
|
+
}
|
|
44
|
+
return response.json();
|
|
45
|
+
}
|
|
46
|
+
async function getGeneratorFilePath(featureFile) {
|
|
47
|
+
let generatorPath = featureFile.replace(".features-gen/", "");
|
|
48
|
+
return generatorPath;
|
|
49
|
+
}
|
|
@@ -8,6 +8,7 @@ var _logger = require("../../../utils/logger");
|
|
|
8
8
|
var _child_process = require("child_process");
|
|
9
9
|
var _RunnerHelper = _interopRequireDefault(require("./RunnerHelper"));
|
|
10
10
|
var _Runner = _interopRequireDefault(require("./Runner"));
|
|
11
|
+
var _commonUtils = require("../../../utils/commonUtils");
|
|
11
12
|
class SpawnRunner extends _Runner.default {
|
|
12
13
|
constructor(runnerObj) {
|
|
13
14
|
super(runnerObj);
|
|
@@ -30,6 +31,8 @@ class SpawnRunner extends _Runner.default {
|
|
|
30
31
|
});
|
|
31
32
|
}
|
|
32
33
|
runPreprocessing() {
|
|
34
|
+
//This below functoin is called to copy the data generator spec files to the current project
|
|
35
|
+
(0, _commonUtils.copyCommonSpecs)();
|
|
33
36
|
const {
|
|
34
37
|
tagArgs
|
|
35
38
|
} = this.runnerObj;
|
|
@@ -44,12 +44,6 @@ const testDir = (0, _configUtils.getTestDir)(bddMode, {
|
|
|
44
44
|
outputDir: _path.default.join(process.cwd(), 'uat', '.features-gen'),
|
|
45
45
|
uatPath: _path.default.join(process.cwd(), 'uat')
|
|
46
46
|
});
|
|
47
|
-
const smokeTestDir = (0, _configUtils.getTestDir)(bddMode, {
|
|
48
|
-
featureFilesFolder: _path.default.join(process.cwd(), 'uat', 'smokeTest', '**', '*.feature'),
|
|
49
|
-
stepDefinitionsFolder: _path.default.join(process.cwd(), 'uat', 'smokeTest', '**', '*smokeTest.spec.js'),
|
|
50
|
-
outputDir: _path.default.join(process.cwd(), 'uat', '.features-smoke-gen'),
|
|
51
|
-
uatPath: _path.default.join(process.cwd(), 'uat')
|
|
52
|
-
});
|
|
53
47
|
const use = {
|
|
54
48
|
trace,
|
|
55
49
|
video,
|
|
@@ -77,14 +71,23 @@ function getPlaywrightConfig() {
|
|
|
77
71
|
storageState: isAuthMode ? (0, _readConfigFile.getAuthFilePath)(_path.default.resolve(process.cwd(), authFilePath)) : {}
|
|
78
72
|
};
|
|
79
73
|
const dependencies = isAuthMode ? ['setup'] : [];
|
|
80
|
-
const smokeTestProject =
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
74
|
+
const smokeTestProject = isSmokeTest ? smokeTestConfig() : [];
|
|
75
|
+
function smokeTestConfig() {
|
|
76
|
+
const smokeTestDir = (0, _configUtils.getTestDir)(bddMode, {
|
|
77
|
+
featureFilesFolder: _path.default.join(process.cwd(), 'uat', 'smokeTest', '**', '*.feature'),
|
|
78
|
+
stepDefinitionsFolder: _path.default.join(process.cwd(), 'uat', '**', 'steps', '*.spec.js'),
|
|
79
|
+
outputDir: _path.default.join(process.cwd(), 'uat', '.features-smoke-gen'),
|
|
80
|
+
uatPath: _path.default.join(process.cwd(), 'uat', 'smokeTest')
|
|
81
|
+
});
|
|
82
|
+
return [{
|
|
83
|
+
name: 'smokeTest',
|
|
84
|
+
testDir: smokeTestDir,
|
|
85
|
+
use: {
|
|
86
|
+
...commonConfig
|
|
87
|
+
},
|
|
88
|
+
dependencies: dependencies
|
|
89
|
+
}];
|
|
90
|
+
}
|
|
88
91
|
const playwrightConfig = {
|
|
89
92
|
testDir,
|
|
90
93
|
globalTimeout: globalTimeout || 3600000,
|
|
@@ -101,11 +104,11 @@ function getPlaywrightConfig() {
|
|
|
101
104
|
testMatch: /.*\.setup\.js/,
|
|
102
105
|
testDir: _path.default.join(process.cwd(), 'uat'),
|
|
103
106
|
teardown: 'cleanup'
|
|
104
|
-
}, ...
|
|
107
|
+
}, ...smokeTestProject, {
|
|
105
108
|
name: 'cleanup',
|
|
106
109
|
testMatch: /.*\.teardown\.js/,
|
|
107
110
|
testDir: _path.default.join(process.cwd(), 'uat')
|
|
108
|
-
}, ...projects] : [...projects, ...
|
|
111
|
+
}, ...projects] : [...projects, ...smokeTestProject],
|
|
109
112
|
...uatConfig
|
|
110
113
|
};
|
|
111
114
|
return playwrightConfig;
|
|
@@ -162,9 +162,8 @@ function getModulePathForFeatureFiles(moduleList) {
|
|
|
162
162
|
if ((0, _fileUtils.checkIfFolderExistsWithPattern)(modulePath)) {
|
|
163
163
|
validModuleList.push(_path.default.join(modulePath, '**', '*.feature'));
|
|
164
164
|
} else {
|
|
165
|
-
_logger.Logger.log(_logger.Logger.FAILURE_TYPE, `Module ${moduleName} does not exist, Please check the module name`);
|
|
166
165
|
validModuleList = [];
|
|
167
|
-
|
|
166
|
+
throw new Error(`Module ${moduleName} does not exist. We have not triggered the execution for this module`);
|
|
168
167
|
}
|
|
169
168
|
});
|
|
170
169
|
return validModuleList;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.copyCommonSpecs = copyCommonSpecs;
|
|
8
|
+
var _fileUtils = require("./fileUtils");
|
|
9
|
+
var _path = _interopRequireDefault(require("path"));
|
|
10
|
+
function copyCommonSpecs() {
|
|
11
|
+
const libraryPath = require.resolve("@zohodesk/testinglibrary");
|
|
12
|
+
// libraryPath will be build/index.js to go to the common specs we need to go one level up
|
|
13
|
+
const commonSpecPath = _path.default.resolve(libraryPath, '../', 'common');
|
|
14
|
+
const destDirectory = _path.default.resolve(process.cwd(), 'uat', 'modules', '.testingLib-common');
|
|
15
|
+
(0, _fileUtils.deleteFolder)(destDirectory);
|
|
16
|
+
(0, _fileUtils.copyDirectory)(commonSpecPath, destDirectory);
|
|
17
|
+
}
|
package/build/utils/fileUtils.js
CHANGED
|
@@ -6,6 +6,7 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
6
6
|
});
|
|
7
7
|
exports.checkIfFileExists = checkIfFileExists;
|
|
8
8
|
exports.checkIfFolderExistsWithPattern = checkIfFolderExistsWithPattern;
|
|
9
|
+
exports.copyDirectory = copyDirectory;
|
|
9
10
|
exports.deleteFile = deleteFile;
|
|
10
11
|
exports.deleteFolder = deleteFolder;
|
|
11
12
|
exports.readFileContents = readFileContents;
|
|
@@ -86,4 +87,23 @@ function checkIfFolderExistsWithPattern(folderPath) {
|
|
|
86
87
|
_logger.Logger.error(err);
|
|
87
88
|
return false;
|
|
88
89
|
}
|
|
90
|
+
}
|
|
91
|
+
function copyDirectory(src, dest) {
|
|
92
|
+
if (!_fs.default.existsSync(dest)) {
|
|
93
|
+
_fs.default.mkdirSync(dest, {
|
|
94
|
+
recursive: true
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
const entries = _fs.default.readdirSync(src, {
|
|
98
|
+
withFileTypes: true
|
|
99
|
+
});
|
|
100
|
+
for (const entry of entries) {
|
|
101
|
+
const srcPath = _path.default.join(src, entry.name);
|
|
102
|
+
const destPath = _path.default.join(dest, entry.name);
|
|
103
|
+
if (entry.isDirectory()) {
|
|
104
|
+
copyDirectory(srcPath, destPath);
|
|
105
|
+
} else {
|
|
106
|
+
_fs.default.copyFileSync(srcPath, destPath);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
89
109
|
}
|