vue-ninja 0.3.0 → 0.4.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/vue-ninja.js +1 -62
- package/package.json +20 -20
- package/rollup.config.ts +19 -0
- package/dist/coverage.js +0 -44
- package/dist/e2e.js +0 -55
- package/dist/hash-checker.js +0 -82
- package/dist/jest.js +0 -72
- package/dist/lint.js +0 -51
- package/dist/logger.js +0 -23
- package/dist/models.js +0 -2
- package/dist/submit-results.js +0 -131
- package/dist/utils.js +0 -64
- package/dist/vitest.js +0 -68
package/dist/vue-ninja.js
CHANGED
|
@@ -1,63 +1,2 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
"use strict";
|
|
3
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
-
};
|
|
6
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
-
var command_line_args_1 = __importDefault(require("command-line-args"));
|
|
8
|
-
var rimraf_1 = __importDefault(require("rimraf"));
|
|
9
|
-
var vitest_1 = __importDefault(require("./vitest"));
|
|
10
|
-
var hash_checker_1 = __importDefault(require("./hash-checker"));
|
|
11
|
-
var coverage_1 = __importDefault(require("./coverage"));
|
|
12
|
-
var lint_1 = __importDefault(require("./lint"));
|
|
13
|
-
var e2e_1 = __importDefault(require("./e2e"));
|
|
14
|
-
var submit_results_1 = __importDefault(require("./submit-results"));
|
|
15
|
-
var logger_1 = __importDefault(require("./logger"));
|
|
16
|
-
// parse cli options
|
|
17
|
-
var options = (0, command_line_args_1.default)([
|
|
18
|
-
{ name: 'local', type: Boolean },
|
|
19
|
-
{ name: 'local-server', type: Boolean },
|
|
20
|
-
{ name: 'verbose', type: Boolean },
|
|
21
|
-
{ name: 'skip-check', type: Boolean }
|
|
22
|
-
]);
|
|
23
|
-
// clean up the directory
|
|
24
|
-
rimraf_1.default.sync('results/*');
|
|
25
|
-
var results = {
|
|
26
|
-
coverage: {
|
|
27
|
-
covered: 100,
|
|
28
|
-
total: 100
|
|
29
|
-
},
|
|
30
|
-
e2e: {
|
|
31
|
-
failed: 0,
|
|
32
|
-
success: 1
|
|
33
|
-
},
|
|
34
|
-
tslint: {
|
|
35
|
-
errors: 0
|
|
36
|
-
}
|
|
37
|
-
};
|
|
38
|
-
(0, hash_checker_1.default)(options)
|
|
39
|
-
.then(function (exercise) { return (results.exercise = exercise); })
|
|
40
|
-
.then(function () { return logger_1.default.debug(options, 'Exercise: ', results.exercise.title); })
|
|
41
|
-
.then(function () { return (0, vitest_1.default)(options); })
|
|
42
|
-
.then(function (vitestResults) { return (results.vitest = vitestResults); })
|
|
43
|
-
.then(function () { return logger_1.default.debug(options, 'Vitest results: ', JSON.stringify(results.vitest)); })
|
|
44
|
-
.then(function () { return (0, coverage_1.default)(options); })
|
|
45
|
-
.then(function (coverageResults) { return (results.coverage = coverageResults); })
|
|
46
|
-
.then(function () { return logger_1.default.debug(options, 'Coverage results: ', JSON.stringify(results.coverage)); })
|
|
47
|
-
.then(function () { return (0, lint_1.default)(options); })
|
|
48
|
-
.then(function (tslintResults) { return (results.tslint = tslintResults); })
|
|
49
|
-
.then(function () { return logger_1.default.debug(options, 'Linter results: ', JSON.stringify(results.tslint)); })
|
|
50
|
-
.then(function () { return (0, e2e_1.default)(options); })
|
|
51
|
-
.then(function (e2eResults) { return (results.e2e = e2eResults); })
|
|
52
|
-
.then(function () { return logger_1.default.debug(options, 'E2e results: ', JSON.stringify(results.e2e)); })
|
|
53
|
-
.then(function () { return (0, submit_results_1.default)(results, options); })
|
|
54
|
-
.catch(function (error) {
|
|
55
|
-
logger_1.default.error('Try to fix it and retry!');
|
|
56
|
-
if (options.verbose) {
|
|
57
|
-
logger_1.default.error(error);
|
|
58
|
-
}
|
|
59
|
-
if (options.local) {
|
|
60
|
-
logger_1.default.error('Running locally, we exit');
|
|
61
|
-
process.exit(1);
|
|
62
|
-
}
|
|
63
|
-
});
|
|
2
|
+
"use strict";var e=require("command-line-args"),r=require("rimraf"),t=require("fs"),s=require("crypto"),n=require("child_process"),o=require("prompt"),i=require("util"),a=require("glob"),u=require("chalk"),c=require("axios");function l(e){return e&&"object"==typeof e&&"default"in e?e:{default:e}}function d(e){if(e&&e.__esModule)return e;var r=Object.create(null);return e&&Object.keys(e).forEach((function(t){if("default"!==t){var s=Object.getOwnPropertyDescriptor(e,t);Object.defineProperty(r,t,s.get?s:{enumerable:!0,get:function(){return e[t]}})}})),r.default=e,Object.freeze(r)}var h=l(e),g=l(r),f=l(t),m=l(s),p=l(n),y=d(o),w=l(i),v=l(a),b=l(u),k=l(c);const j={log:console.log,error:e=>console.log(b.default.red(e)),warn:e=>console.log(b.default.yellow(e)),success:e=>console.log(b.default.green(e)),debug:(e,r,t)=>{e.verbose&&(t?console.log(b.default.gray(r),b.default.gray(t)):console.log(b.default.gray(r)))}},x=w.default.promisify(p.default.exec),E=w.default.promisify(f.default.readFile),P=e=>E(e,"utf8"),S=e=>x(e,{maxBuffer:1024e3}),O=e=>{const r=f.default.readFileSync(e),t=m.default.createHash("sha256");return t.update(r),t.digest("hex")};function q(e,r,t,s){const n=JSON.parse(e);if(!n)return j.error("An error occurred during the unit tests."),j.error("Run `npm run test:unit` to see the problem."),s();const o={failed:n.numFailedTests?n.numFailedTests:n.numFailedTestSuites,success:n.numPassedTests};return 0!==o.failed?function(e){const r=function(e){if(void 0===e.failed)return 0;const r=100-5*e.failed;return r>=0?r:0}(e);j.warn("Unit tests score: "+r+"%"),j.warn("Looks like you have "+(e.failed?e.failed:"a few")+" unit test(s) failing."),j.warn("Launch `npm run test:unit` and try to fix them.")}(o):j.success("Unit tests score: 100%"),t(o)}const J=e=>new Promise(((r,t)=>(j.log("Starting unit tests..."),S("npm run test:unit:ci").then((()=>P("results/vitest-results.json")),(r=>(j.debug(e,"Vitest test failed",r),P("results/vitest-results.json")))).then((e=>q(e,0,r,t)),(r=>(j.debug(e,"An error occurred while reading vitest results: ",r),function(e){return j.error("Error while running unit tests."),j.error("Maybe your code doesn't compile?"),j.error("Launch `npm run test:unit` to see more."),e()}(t)))))));function N(){return e="./{cypress,src}/**/*.@(cy|spec).ts",new Promise(((r,t)=>{v.default(e,{nodir:!0},((e,s)=>{e?(j.error("Could not find the spec files."),t(e)):r(s)}))}));var e}function A(){return N().then((e=>function(e){return e.map((e=>({name:e,hash:O(e)})))}(e)))}const C=e=>new Promise(((r,t)=>(j.log("Starting code coverage..."),P("coverage/coverage-summary.json").then((e=>function(e,r){const t=e.replace(/\\/g,"/"),s=JSON.parse(t),n={covered:s.total.lines.covered,total:s.total.lines.total},o=function(e){const r=100+e.covered-e.total;return r<0?0:r}(n);100!==o?(j.warn("Code coverage score: "+o+"%"),j.warn("Looks like you don't have a perfect code coverage."),j.warn("Maybe you skipped/deleted some unit tests?"),j.warn("Or maybe you added some code to try something, and this is not covered by a unit test?"),j.warn("To have more info open `coverage/index.html`"),j.warn("You should see which file has a problem.")):j.success("Code coverage score: 100%"),r(n)}(e,r)),(r=>{j.error("Error while reading the code coverage results. Try to run `npm run test:unit`."),j.debug(e,"Error while reading the code coverage results was: ",r),t()}))))),T="results/cypress-results.json";function L(){return S(`npx mochawesome-merge ./results/mochawesome/*.json -o ${T}`)}const $=e=>new Promise(((r,t)=>(j.log("Starting end to end tests..."),S("npm run test:e2e:ci").then((()=>L()),(r=>(j.debug(e,"Error while running e2e was: ",r),L()))).then((()=>P(T)),(()=>P(T))).then((e=>function(e){const r=JSON.parse(e),t=r.stats.failures,s=r.stats.passes,n={success:s,failed:t};return t?(j.warn("Looks like you have "+t+" e2e tests failing."),j.warn("Try to launch the tests using `npm run test:e2e`."),j.warn(`E2e tests score: ${Math.round(100*s/(s+t))}%`)):j.success("E2e tests score: 100%"),n}(e))).then((e=>r(e)),(r=>(j.debug(e,"Error while reading e2e results: ",r),j.error("Error while running end-to-end tests."),j.error("Check if your app is running with no error in the browser console,"),j.error("and try to launch the tests using `npm run test:e2e`."),t())))))),B="https://vue-exercises.ninja-squad.com";function M(e,r,t){if((r+=1)>3)return j.error("Aborting authentication after 3 failures"),Promise.reject();return(e=>new Promise(((r,t)=>{y.start(),y.get(e,((e,s)=>{e?t():r(s)}))})))({properties:{email:{required:!0},password:{hidden:!0,required:!0}}}).then((r=>k.default.post(`${e["local-server"]?"http://localhost:8080":B}/api/authentications`,r))).then((s=>201!==s.status?(j.error("Authentication failed, try again."),j.debug(e,`Authentication request returned with status: ${s.status}`),M(e,r,t)):(j.debug(e,"Authentication succeeded and we can store credentials"),function(e,r){return j.success("Authentication success"),e.token=r,e}(t,s.data.token))),(s=>(j.error("Authentication failed, try again."),j.debug(e,"An error occurred during authentication: ",s),M(e,r,t))))}function F(e,r,t){let s=100-5*r.vitest.failed-r.tslint.errors-(r.coverage.total-r.coverage.covered)-r.e2e.failed;var n;return s<10&&(s=10),t.local?(100!==s&&(j.error("Score is not perfect! "+s+"%"),n=1,process.exit(n)),j.success("Exercise score: "+s+"%")):100!==s?j.warn("Score is not perfect yet: "+s+"%"):j.success("Perfect score, congrats! "+s+"%"),{metadata:e,score:s}}function V(e,r){const t={Authorization:`Bearer: ${r.token}`},s=e["local-server"]?"http://localhost:8080":B,n={score:r.score};return k.default.post(`${s}/api/packs/${r.pack}/exercises/${r.id}/scores`,n,{headers:t}).then((t=>{if(401===t.status){j.error("Authentication failed. Please enter your credentials again.");return M(e,0,r).then((r=>V(e,r)),(r=>(j.error("Error while authenticating to submit results."),j.debug(e,"Error was: ",r),Promise.reject())))}return 201!==t.status?(j.error("Error while sending the results."),j.debug(e,JSON.stringify(t.status),JSON.stringify(t.data)),Promise.reject()):(j.log("Score submitted."),Promise.resolve(r))}),(r=>(j.error("Error while sending the results."),j.error("Maybe check your connection?"),j.debug(e,"Error while sending the result was: ",r),Promise.reject())))}const _=(e,r)=>P("./exercise.json").then((e=>JSON.parse(e)),(e=>(j.error("Looks like you have no exercise.json file."),j.debug(r,"An error occurred while reading exercise.json: ",e),Promise.reject()))).then((t=>F(t,e,r)),(e=>(j.error("Looks like you have an incorrect JSON in the exercise.json file."),j.debug(r,"An error occurred while parsing exercise.json: ",e),Promise.reject()))).then((({metadata:e,score:t})=>{e.score=t;return r.local?e:M(r,0,e)})).then((e=>r.local?e:V(r,e))),z=h.default([{name:"local",type:Boolean},{name:"local-server",type:Boolean},{name:"verbose",type:Boolean},{name:"skip-check",type:Boolean}]);g.default.sync("results/*");const H={coverage:{covered:100,total:100},e2e:{failed:0,success:1},tslint:{errors:0}};(e=>new Promise(((r,t)=>{j.log("Checking submission..."),P("./exercise.json").then((t=>{const s=JSON.parse(t);return j.success("Pack V"+s.pack+" - Exercise "+s.id+" - "+s.title),e["skip-check"]?r(s):s}),(r=>(j.error("Looks like you have no exercise.json file."),j.debug(e,"An error occurred while reading exercise.json: ",r),Promise.reject()))).then((e=>Promise.all([e,A()]))).then((([r,t])=>{j.debug(e,"Exercise :",JSON.stringify(r)),j.debug(e,"Hashes :",JSON.stringify(t));const s=[],n=[];return r.specs.forEach((e=>{const r=function(e,r){return e.find((e=>-1!==e.name.indexOf(r)))}(t,e.name);r?r.hash!==e.hash&&s.push(e.name):n.push(e.name)})),0!==s.length||0!==n.length?(0!==s.length&&(j.error("It looks like you forgot to update these spec files"),j.error("or that you modified them manually:"),s.forEach((e=>j.error(" - "+e)))),0!==n.length&&(j.error("It looks like you forgot to add these spec files:"),n.forEach((e=>j.error(" - "+e)))),j.error("Copy the new specs and try again."),Promise.reject()):r}),(r=>(j.debug(e,"Error while computing the hash of the spec files. ",r),Promise.reject(r)))).then((e=>r(e)),(e=>t(e)))})))(z).then((e=>H.exercise=e)).then((()=>j.debug(z,"Exercise: ",H.exercise.title))).then((()=>J(z))).then((e=>H.vitest=e)).then((()=>j.debug(z,"Vitest results: ",JSON.stringify(H.vitest)))).then((()=>C(z))).then((e=>H.coverage=e)).then((()=>j.debug(z,"Coverage results: ",JSON.stringify(H.coverage)))).then((()=>(e=>new Promise(((r,t)=>(j.log("Starting code analysis..."),S("npm run lint -- --no-fix --format=json --output-file=results/lint-results.json").then((()=>P("results/lint-results.json")),(()=>P("results/lint-results.json"))).then((e=>e?JSON.parse(e):Promise.reject()),(r=>{j.error("Error while running `npm run lint`."),j.error("Try to run `npm run lint`."),j.debug(e,"Error while running lint was: ",r),t()})).then((e=>{if(!e)return t();const s={errors:0};if(s.errors=e.map((e=>e.errorCount+e.warningCount)).reduce(((e,r)=>e+r),0),0!==s.errors){const e=Math.max(0,100-s.errors);j.warn("Code quality score: "+e+"%"),j.warn("Looks like you have some issues in your code."),j.warn("Try to run `npm run lint` to see what you can fix.")}else j.success("Code quality score: 100%");r(s)}),(r=>{j.error("Error while reading lint results."),j.error("Try to run `npm run lint`."),j.debug(e,"Error while reading lint errors was: ",r),t()}))))))(z))).then((e=>H.tslint=e)).then((()=>j.debug(z,"Linter results: ",JSON.stringify(H.tslint)))).then((()=>$(z))).then((e=>H.e2e=e)).then((()=>j.debug(z,"E2e results: ",JSON.stringify(H.e2e)))).then((()=>_(H,z))).catch((e=>{j.error("Try to fix it and retry!"),z.verbose&&j.error(e),z.local&&(j.error("Running locally, we exit"),process.exit(1))}));
|
package/package.json
CHANGED
|
@@ -1,42 +1,42 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "vue-ninja",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.4.0-beta.2",
|
|
4
4
|
"description": "CLI for the exercises of \"Become a ninja with Vue\"",
|
|
5
5
|
"main": "dist/vue-ninja.js",
|
|
6
6
|
"scripts": {
|
|
7
|
-
"build": "
|
|
8
|
-
"test": "
|
|
9
|
-
"
|
|
10
|
-
"
|
|
7
|
+
"build": "rollup --config rollup.config.ts --configPlugin @rollup/plugin-typescript --failAfterWarnings",
|
|
8
|
+
"test": "vitest",
|
|
9
|
+
"lint": "eslint {src,tests}/**/* --ext .ts",
|
|
10
|
+
"type-check": "tsc --noEmit"
|
|
11
11
|
},
|
|
12
12
|
"author": "",
|
|
13
13
|
"license": "ISC",
|
|
14
14
|
"dependencies": {
|
|
15
|
+
"axios": "0.27.2",
|
|
15
16
|
"chalk": "4.1.2",
|
|
16
17
|
"command-line-args": "5.2.1",
|
|
17
18
|
"glob": "8.0.3",
|
|
18
|
-
"hash-files": "1.1.1",
|
|
19
19
|
"prompt": "1.3.0",
|
|
20
|
-
"request": "2.88.2",
|
|
21
20
|
"rimraf": "3.0.2"
|
|
22
21
|
},
|
|
23
22
|
"devDependencies": {
|
|
24
|
-
"@
|
|
25
|
-
"@typescript-eslint/parser": "5.27.0",
|
|
23
|
+
"@rollup/plugin-typescript": "8.4.0",
|
|
26
24
|
"@types/command-line-args": "5.2.0",
|
|
27
|
-
"@types/glob": "
|
|
28
|
-
"@types/
|
|
29
|
-
"@types/node": "16.11.38",
|
|
30
|
-
"@types/request": "2.48.8",
|
|
25
|
+
"@types/glob": "8.0.0",
|
|
26
|
+
"@types/node": "16.11.56",
|
|
31
27
|
"@types/rimraf": "3.0.2",
|
|
32
|
-
"
|
|
33
|
-
"eslint": "
|
|
28
|
+
"@typescript-eslint/eslint-plugin": "5.36.1",
|
|
29
|
+
"@typescript-eslint/parser": "5.36.1",
|
|
30
|
+
"create-vue": "3.3.2",
|
|
31
|
+
"eslint": "8.23.0",
|
|
34
32
|
"eslint-config-prettier": "8.5.0",
|
|
35
|
-
"eslint-plugin-prettier": "4.
|
|
36
|
-
"
|
|
37
|
-
"
|
|
38
|
-
"
|
|
39
|
-
"
|
|
33
|
+
"eslint-plugin-prettier": "4.2.1",
|
|
34
|
+
"prettier": "2.7.1",
|
|
35
|
+
"rollup": "2.79.0",
|
|
36
|
+
"rollup-plugin-preserve-shebangs": "0.2.0",
|
|
37
|
+
"rollup-plugin-terser": "7.0.2",
|
|
38
|
+
"typescript": "4.8.2",
|
|
39
|
+
"vitest": "0.22.1"
|
|
40
40
|
},
|
|
41
41
|
"bin": {
|
|
42
42
|
"vue-ninja": "dist/vue-ninja.js"
|
package/rollup.config.ts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { RollupOptions } from 'rollup';
|
|
2
|
+
import typescript from '@rollup/plugin-typescript';
|
|
3
|
+
import { terser } from 'rollup-plugin-terser';
|
|
4
|
+
import { preserveShebangs } from "rollup-plugin-preserve-shebangs";
|
|
5
|
+
|
|
6
|
+
const config: RollupOptions = {
|
|
7
|
+
input: 'src/vue-ninja.ts',
|
|
8
|
+
output: {
|
|
9
|
+
file: 'dist/vue-ninja.js',
|
|
10
|
+
format: 'cjs',
|
|
11
|
+
// minify
|
|
12
|
+
plugins: [terser()],
|
|
13
|
+
},
|
|
14
|
+
external: ['command-line-args', 'rimraf', 'axios', 'chalk', 'prompt', 'glob', 'fs', 'crypto', 'child_process', 'util'],
|
|
15
|
+
// we need to keep the shebang to make the file executable
|
|
16
|
+
// we need the typescript plugin
|
|
17
|
+
plugins: [preserveShebangs(), typescript()]
|
|
18
|
+
};
|
|
19
|
+
export default config;
|
package/dist/coverage.js
DELETED
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
var logger_1 = __importDefault(require("./logger"));
|
|
7
|
-
var utils_1 = require("./utils");
|
|
8
|
-
var RESULT_FILE = 'coverage/coverage-summary.json';
|
|
9
|
-
function computeScore(coverageResults) {
|
|
10
|
-
var score = 100 + coverageResults.covered - coverageResults.total;
|
|
11
|
-
return score < 0 ? 0 : score;
|
|
12
|
-
}
|
|
13
|
-
function handleCoverageResults(data, resolve) {
|
|
14
|
-
// temporary workaround for https://github.com/istanbuljs/istanbul-reports/pull/20/files:
|
|
15
|
-
// the file paths are not properly encoded in the JSON, and so we transform backslashes to slashes.
|
|
16
|
-
// since everything else in the file is just hard-coded keys and numbers, this should work fine.
|
|
17
|
-
var dataEncoded = data.replace(/\\/g, '/');
|
|
18
|
-
var results = JSON.parse(dataEncoded);
|
|
19
|
-
var coverageResults = { covered: results.total.lines.covered, total: results.total.lines.total };
|
|
20
|
-
var score = computeScore(coverageResults);
|
|
21
|
-
if (score !== 100) {
|
|
22
|
-
logger_1.default.warn('Code coverage score: ' + score + '%');
|
|
23
|
-
logger_1.default.warn("Looks like you don't have a perfect code coverage.");
|
|
24
|
-
logger_1.default.warn('Maybe you skipped/deleted some unit tests?');
|
|
25
|
-
logger_1.default.warn('Or maybe you added some code to try something, and this is not covered by a unit test?');
|
|
26
|
-
logger_1.default.warn('To have more info open `coverage/index.html`');
|
|
27
|
-
logger_1.default.warn('You should see which file has a problem.');
|
|
28
|
-
}
|
|
29
|
-
else {
|
|
30
|
-
logger_1.default.success('Code coverage score: 100%');
|
|
31
|
-
}
|
|
32
|
-
resolve(coverageResults);
|
|
33
|
-
}
|
|
34
|
-
var coverage = function (options) {
|
|
35
|
-
return new Promise(function (resolve, reject) {
|
|
36
|
-
logger_1.default.log('Starting code coverage...');
|
|
37
|
-
return (0, utils_1.readFile)(RESULT_FILE).then(function (data) { return handleCoverageResults(data, resolve); }, function (error) {
|
|
38
|
-
logger_1.default.error('Error while reading the code coverage results. Try to run `npm run test:unit`.');
|
|
39
|
-
logger_1.default.debug(options, 'Error while reading the code coverage results was: ', error);
|
|
40
|
-
reject();
|
|
41
|
-
});
|
|
42
|
-
});
|
|
43
|
-
};
|
|
44
|
-
exports.default = coverage;
|
package/dist/e2e.js
DELETED
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
var logger_1 = __importDefault(require("./logger"));
|
|
7
|
-
var utils_1 = require("./utils");
|
|
8
|
-
var RESULT_FILE = 'results/cypress-results.json';
|
|
9
|
-
function warnUserOnFailure() {
|
|
10
|
-
logger_1.default.error('Error while running end-to-end tests.');
|
|
11
|
-
logger_1.default.error('Check if your app is running with no error in the browser console,');
|
|
12
|
-
logger_1.default.error('and try to launch the tests using `npm run test:e2e`.');
|
|
13
|
-
}
|
|
14
|
-
function handleCypressResults(data) {
|
|
15
|
-
var results = JSON.parse(data);
|
|
16
|
-
var failed = results.stats.failures;
|
|
17
|
-
var success = results.stats.passes;
|
|
18
|
-
var cypressResults = { success: success, failed: failed };
|
|
19
|
-
if (failed) {
|
|
20
|
-
logger_1.default.warn('Looks like you have ' + failed + ' e2e tests failing.');
|
|
21
|
-
logger_1.default.warn('Try to launch the tests using `npm run test:e2e`.');
|
|
22
|
-
logger_1.default.warn("E2e tests score: ".concat(Math.round((success * 100) / (success + failed)), "%"));
|
|
23
|
-
}
|
|
24
|
-
else {
|
|
25
|
-
logger_1.default.success('E2e tests score: 100%');
|
|
26
|
-
}
|
|
27
|
-
return cypressResults;
|
|
28
|
-
}
|
|
29
|
-
function mergeReports() {
|
|
30
|
-
return (0, utils_1.exec)("npx mochawesome-merge ./results/mochawesome/*.json -o ".concat(RESULT_FILE));
|
|
31
|
-
}
|
|
32
|
-
var e2e = function (options) {
|
|
33
|
-
return new Promise(function (resolve, reject) {
|
|
34
|
-
logger_1.default.log('Starting end to end tests...');
|
|
35
|
-
// cypress exit code is 1 if a test fail
|
|
36
|
-
// so we'll go into the error handler even if the execution went well
|
|
37
|
-
// and we try to read the results in both cases
|
|
38
|
-
// mode=production is a hacky way to ensure integration tests pass.
|
|
39
|
-
return (0, utils_1.exec)('npm run test:e2e:ci')
|
|
40
|
-
.then(
|
|
41
|
-
// each spec produces a report, so we need to merge them
|
|
42
|
-
function () { return mergeReports(); }, function (error) {
|
|
43
|
-
logger_1.default.debug(options, 'Error while running e2e was: ', error);
|
|
44
|
-
return mergeReports();
|
|
45
|
-
})
|
|
46
|
-
.then(function () { return (0, utils_1.readFile)(RESULT_FILE); }, function () { return (0, utils_1.readFile)(RESULT_FILE); })
|
|
47
|
-
.then(function (data) { return handleCypressResults(data); })
|
|
48
|
-
.then(function (results) { return resolve(results); }, function (error) {
|
|
49
|
-
logger_1.default.debug(options, 'Error while reading e2e results: ', error);
|
|
50
|
-
warnUserOnFailure();
|
|
51
|
-
return reject();
|
|
52
|
-
});
|
|
53
|
-
});
|
|
54
|
-
};
|
|
55
|
-
exports.default = e2e;
|
package/dist/hash-checker.js
DELETED
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
var hash_files_1 = __importDefault(require("hash-files"));
|
|
7
|
-
var utils_1 = require("./utils");
|
|
8
|
-
var logger_1 = __importDefault(require("./logger"));
|
|
9
|
-
function findSpecIfItExists(hashes, spec) {
|
|
10
|
-
return hashes.find(function (hash) {
|
|
11
|
-
return hash.name.indexOf(spec) !== -1;
|
|
12
|
-
});
|
|
13
|
-
}
|
|
14
|
-
function listSpecs() {
|
|
15
|
-
return (0, utils_1.listFiles)('./{cypress,src}/**/*.@(cy|spec).ts');
|
|
16
|
-
}
|
|
17
|
-
function computeHash(specs) {
|
|
18
|
-
return specs.map(function (name) {
|
|
19
|
-
return { name: name, hash: hash_files_1.default.sync({ files: [name] }) };
|
|
20
|
-
});
|
|
21
|
-
}
|
|
22
|
-
function computeHashesOfSpecs() {
|
|
23
|
-
return listSpecs().then(function (specs) { return computeHash(specs); });
|
|
24
|
-
}
|
|
25
|
-
var hashChecker = function (options) {
|
|
26
|
-
return new Promise(function (resolve, reject) {
|
|
27
|
-
logger_1.default.log('Checking submission...');
|
|
28
|
-
(0, utils_1.readFile)('./exercise.json')
|
|
29
|
-
.then(function (exerciseData) {
|
|
30
|
-
var exercise = JSON.parse(exerciseData);
|
|
31
|
-
logger_1.default.success('Pack V' + exercise.pack + ' - Exercise ' + exercise.id + ' - ' + exercise.title);
|
|
32
|
-
if (options['skip-check']) {
|
|
33
|
-
return resolve(exercise);
|
|
34
|
-
}
|
|
35
|
-
return exercise;
|
|
36
|
-
}, function (error) {
|
|
37
|
-
logger_1.default.error('Looks like you have no exercise.json file.');
|
|
38
|
-
logger_1.default.debug(options, 'An error occurred while reading exercise.json: ', error);
|
|
39
|
-
return Promise.reject();
|
|
40
|
-
})
|
|
41
|
-
// then compute the hash of every spec
|
|
42
|
-
.then(function (exercise) { return Promise.all([exercise, computeHashesOfSpecs()]); })
|
|
43
|
-
// then decide if the submission is correct or not
|
|
44
|
-
.then(function (_a) {
|
|
45
|
-
var exercise = _a[0], hashes = _a[1];
|
|
46
|
-
logger_1.default.debug(options, 'Exercise :', JSON.stringify(exercise));
|
|
47
|
-
logger_1.default.debug(options, 'Hashes :', JSON.stringify(hashes));
|
|
48
|
-
var specsInError = [];
|
|
49
|
-
var specsNotFound = [];
|
|
50
|
-
exercise.specs.forEach(function (file) {
|
|
51
|
-
var spec = findSpecIfItExists(hashes, file.name);
|
|
52
|
-
if (spec) {
|
|
53
|
-
if (spec.hash !== file.hash) {
|
|
54
|
-
specsInError.push(file.name);
|
|
55
|
-
}
|
|
56
|
-
}
|
|
57
|
-
else {
|
|
58
|
-
specsNotFound.push(file.name);
|
|
59
|
-
}
|
|
60
|
-
});
|
|
61
|
-
if (specsInError.length !== 0 || specsNotFound.length !== 0) {
|
|
62
|
-
if (specsInError.length !== 0) {
|
|
63
|
-
logger_1.default.error('It looks like you forgot to update these spec files');
|
|
64
|
-
logger_1.default.error('or that you modified them manually:');
|
|
65
|
-
specsInError.forEach(function (spec) { return logger_1.default.error(' - ' + spec); });
|
|
66
|
-
}
|
|
67
|
-
if (specsNotFound.length !== 0) {
|
|
68
|
-
logger_1.default.error('It looks like you forgot to add these spec files:');
|
|
69
|
-
specsNotFound.forEach(function (spec) { return logger_1.default.error(' - ' + spec); });
|
|
70
|
-
}
|
|
71
|
-
logger_1.default.error('Copy the new specs and try again.');
|
|
72
|
-
return Promise.reject();
|
|
73
|
-
}
|
|
74
|
-
return exercise;
|
|
75
|
-
}, function (error) {
|
|
76
|
-
logger_1.default.debug(options, 'Error while computing the hash of the spec files. ', error);
|
|
77
|
-
return Promise.reject(error);
|
|
78
|
-
})
|
|
79
|
-
.then(function (exercise) { return resolve(exercise); }, function (error) { return reject(error); });
|
|
80
|
-
});
|
|
81
|
-
};
|
|
82
|
-
exports.default = hashChecker;
|
package/dist/jest.js
DELETED
|
@@ -1,72 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
var utils_1 = require("./utils");
|
|
7
|
-
var logger_1 = __importDefault(require("./logger"));
|
|
8
|
-
var RESULT_FILE = 'results/jest-results.json';
|
|
9
|
-
function computeScore(jestResults) {
|
|
10
|
-
// eslint-disable-next-line no-undefined
|
|
11
|
-
if (jestResults.failed === undefined) {
|
|
12
|
-
return 0;
|
|
13
|
-
}
|
|
14
|
-
var score = 100 - jestResults.failed * 5;
|
|
15
|
-
return score >= 0 ? score : 0;
|
|
16
|
-
}
|
|
17
|
-
function warnUserThatSomeTestsFailed(jestResults) {
|
|
18
|
-
var score = computeScore(jestResults);
|
|
19
|
-
logger_1.default.warn('Unit tests score: ' + score + '%');
|
|
20
|
-
logger_1.default.warn('Looks like you have ' + (jestResults.failed ? jestResults.failed : 'a few') + ' unit test(s) failing.');
|
|
21
|
-
logger_1.default.warn('Launch `npm run test:unit` and try to fix them.');
|
|
22
|
-
}
|
|
23
|
-
function warnUserThatJestFailed(reject) {
|
|
24
|
-
logger_1.default.error('Error while running unit tests.');
|
|
25
|
-
logger_1.default.error("Maybe you code doesn't compile?");
|
|
26
|
-
logger_1.default.error('Launch `npm run test:unit` to see more.');
|
|
27
|
-
return reject();
|
|
28
|
-
}
|
|
29
|
-
function handleJestResults(data, options, resolve, reject) {
|
|
30
|
-
var results = JSON.parse(data);
|
|
31
|
-
if (!results) {
|
|
32
|
-
logger_1.default.error('An error occurred during the unit tests.');
|
|
33
|
-
logger_1.default.error('Run `npm run test:unit` to see the problem.');
|
|
34
|
-
return reject();
|
|
35
|
-
}
|
|
36
|
-
var failed = results.numRuntimeErrorTestSuites
|
|
37
|
-
? // we have failed test suites because of compilation errors
|
|
38
|
-
results.numFailedTests + results.numRuntimeErrorTestSuites
|
|
39
|
-
: // we just have failed tests
|
|
40
|
-
results.numFailedTests;
|
|
41
|
-
var jestResults = {
|
|
42
|
-
failed: failed,
|
|
43
|
-
success: results.numPassedTests
|
|
44
|
-
};
|
|
45
|
-
if (jestResults.failed !== 0) {
|
|
46
|
-
warnUserThatSomeTestsFailed(jestResults);
|
|
47
|
-
}
|
|
48
|
-
else {
|
|
49
|
-
logger_1.default.success('Unit tests score: 100%');
|
|
50
|
-
}
|
|
51
|
-
return resolve(jestResults);
|
|
52
|
-
}
|
|
53
|
-
var jest = function (options) {
|
|
54
|
-
return new Promise(function (resolve, reject) {
|
|
55
|
-
logger_1.default.log('Starting unit tests...');
|
|
56
|
-
// jest exit code is 1 if a test fails
|
|
57
|
-
// so we'll go into the error handler even if the execution went well
|
|
58
|
-
// and we try to read the results in both cases
|
|
59
|
-
return ((0, utils_1.exec)('npm run test:unit -- --no-cache --run-in-band --collect-coverage --reporters=default --reporters=jest-stare')
|
|
60
|
-
.then(function () { return (0, utils_1.readFile)(RESULT_FILE); }, function (error) {
|
|
61
|
-
logger_1.default.debug(options, 'Jest test failed', error);
|
|
62
|
-
return (0, utils_1.readFile)(RESULT_FILE);
|
|
63
|
-
})
|
|
64
|
-
// if results, great we handle them
|
|
65
|
-
// if not we warn the user
|
|
66
|
-
.then(function (data) { return handleJestResults(data, options, resolve, reject); }, function (error) {
|
|
67
|
-
logger_1.default.debug(options, 'An error occurred while reading jest results: ', error);
|
|
68
|
-
return warnUserThatJestFailed(reject);
|
|
69
|
-
}));
|
|
70
|
-
});
|
|
71
|
-
};
|
|
72
|
-
exports.default = jest;
|
package/dist/lint.js
DELETED
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
var logger_1 = __importDefault(require("./logger"));
|
|
7
|
-
var utils_1 = require("./utils");
|
|
8
|
-
var RESULT_FILE = 'results/lint-results.json';
|
|
9
|
-
var lint = function (options) {
|
|
10
|
-
return new Promise(function (resolve, reject) {
|
|
11
|
-
logger_1.default.log('Starting code analysis...');
|
|
12
|
-
return (0, utils_1.exec)("npm run lint -- --no-fix --format=json --output-file=".concat(RESULT_FILE))
|
|
13
|
-
.then(function () { return (0, utils_1.readFile)(RESULT_FILE); },
|
|
14
|
-
// eslint will error if there is a warning
|
|
15
|
-
function () { return (0, utils_1.readFile)(RESULT_FILE); })
|
|
16
|
-
.then(function (data) {
|
|
17
|
-
if (data) {
|
|
18
|
-
return JSON.parse(data);
|
|
19
|
-
}
|
|
20
|
-
return Promise.reject();
|
|
21
|
-
}, function (error) {
|
|
22
|
-
logger_1.default.error('Error while running `npm run lint`.');
|
|
23
|
-
logger_1.default.error('Try to run `npm run lint`.');
|
|
24
|
-
logger_1.default.debug(options, 'Error while running lint was: ', error);
|
|
25
|
-
reject();
|
|
26
|
-
})
|
|
27
|
-
.then(function (results) {
|
|
28
|
-
if (!results) {
|
|
29
|
-
return reject();
|
|
30
|
-
}
|
|
31
|
-
var lintResults = { errors: 0 };
|
|
32
|
-
lintResults.errors = results.map(function (file) { return file.errorCount + file.warningCount; }).reduce(function (a, b) { return a + b; }, 0);
|
|
33
|
-
if (lintResults.errors !== 0) {
|
|
34
|
-
var score = Math.max(0, 100 - lintResults.errors);
|
|
35
|
-
logger_1.default.warn('Code quality score: ' + score + '%');
|
|
36
|
-
logger_1.default.warn('Looks like you have some issues in your code.');
|
|
37
|
-
logger_1.default.warn('Try to run `npm run lint` to see what you can fix.');
|
|
38
|
-
}
|
|
39
|
-
else {
|
|
40
|
-
logger_1.default.success('Code quality score: 100%');
|
|
41
|
-
}
|
|
42
|
-
resolve(lintResults);
|
|
43
|
-
}, function (error) {
|
|
44
|
-
logger_1.default.error('Error while reading lint results.');
|
|
45
|
-
logger_1.default.error('Try to run `npm run lint`.');
|
|
46
|
-
logger_1.default.debug(options, 'Error while reading lint errors was: ', error);
|
|
47
|
-
reject();
|
|
48
|
-
});
|
|
49
|
-
});
|
|
50
|
-
};
|
|
51
|
-
exports.default = lint;
|
package/dist/logger.js
DELETED
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
var chalk_1 = __importDefault(require("chalk"));
|
|
7
|
-
var logger = {
|
|
8
|
-
log: console.log,
|
|
9
|
-
error: function (value) { return console.log(chalk_1.default.red(value)); },
|
|
10
|
-
warn: function (value) { return console.log(chalk_1.default.yellow(value)); },
|
|
11
|
-
success: function (value) { return console.log(chalk_1.default.green(value)); },
|
|
12
|
-
debug: function (options, value, arg) {
|
|
13
|
-
if (options.verbose) {
|
|
14
|
-
if (arg) {
|
|
15
|
-
console.log(chalk_1.default.gray(value), chalk_1.default.gray(arg));
|
|
16
|
-
}
|
|
17
|
-
else {
|
|
18
|
-
console.log(chalk_1.default.gray(value));
|
|
19
|
-
}
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
};
|
|
23
|
-
exports.default = logger;
|
package/dist/models.js
DELETED
package/dist/submit-results.js
DELETED
|
@@ -1,131 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
var logger_1 = __importDefault(require("./logger"));
|
|
7
|
-
var utils_1 = require("./utils");
|
|
8
|
-
var BASE_PATH = 'https://vue-exercises.ninja-squad.com';
|
|
9
|
-
var LOCALHOST = 'http://localhost:8080';
|
|
10
|
-
function storeCredentials(metadata, token) {
|
|
11
|
-
logger_1.default.success('Authentication success');
|
|
12
|
-
metadata.token = token;
|
|
13
|
-
return metadata;
|
|
14
|
-
}
|
|
15
|
-
function login(options, loginAttempts, metadata) {
|
|
16
|
-
// eslint-disable-next-line no-param-reassign
|
|
17
|
-
loginAttempts = loginAttempts + 1;
|
|
18
|
-
if (loginAttempts > 3) {
|
|
19
|
-
logger_1.default.error('Aborting authentication after 3 failures');
|
|
20
|
-
return Promise.reject();
|
|
21
|
-
}
|
|
22
|
-
var schema = {
|
|
23
|
-
properties: {
|
|
24
|
-
email: {
|
|
25
|
-
required: true
|
|
26
|
-
},
|
|
27
|
-
password: {
|
|
28
|
-
hidden: true,
|
|
29
|
-
required: true
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
};
|
|
33
|
-
return (0, utils_1.prompt)(schema)
|
|
34
|
-
.then(function (result) { return (0, utils_1.post)("".concat(options['local-server'] ? LOCALHOST : BASE_PATH, "/api/authentications"), result); })
|
|
35
|
-
.then(function (_a) {
|
|
36
|
-
var response = _a.response, body = _a.body;
|
|
37
|
-
if (response.statusCode !== 201) {
|
|
38
|
-
logger_1.default.error('Authentication failed, try again.');
|
|
39
|
-
logger_1.default.debug(options, 'Authentication request returned with status: ', JSON.stringify(response.statusCode));
|
|
40
|
-
return login(options, loginAttempts, metadata);
|
|
41
|
-
}
|
|
42
|
-
logger_1.default.debug(options, 'Authentication succeeded and we can store credentials');
|
|
43
|
-
return storeCredentials(metadata, body.token);
|
|
44
|
-
}, function (error) {
|
|
45
|
-
logger_1.default.error('Authentication failed, try again.');
|
|
46
|
-
logger_1.default.debug(options, 'An error occurred during authentication: ', error);
|
|
47
|
-
return login(options, loginAttempts, metadata);
|
|
48
|
-
});
|
|
49
|
-
}
|
|
50
|
-
function computeScore(metadata, results, options) {
|
|
51
|
-
var score = 100 - results.vitest.failed * 5 - results.tslint.errors - (results.coverage.total - results.coverage.covered) - results.e2e.failed;
|
|
52
|
-
if (score < 10) {
|
|
53
|
-
score = 10;
|
|
54
|
-
}
|
|
55
|
-
if (options.local) {
|
|
56
|
-
// if we are running locally we want to return a failure
|
|
57
|
-
if (score !== 100) {
|
|
58
|
-
logger_1.default.error('Score is not perfect! ' + score + '%');
|
|
59
|
-
(0, utils_1.exitProcessWithError)(1);
|
|
60
|
-
}
|
|
61
|
-
logger_1.default.success('Exercise score: ' + score + '%');
|
|
62
|
-
}
|
|
63
|
-
else if (score !== 100) {
|
|
64
|
-
logger_1.default.warn('Score is not perfect yet: ' + score + '%');
|
|
65
|
-
}
|
|
66
|
-
else {
|
|
67
|
-
logger_1.default.success('Perfect score, congrats! ' + score + '%');
|
|
68
|
-
}
|
|
69
|
-
return { metadata: metadata, score: score };
|
|
70
|
-
}
|
|
71
|
-
function postScore(options, metadata) {
|
|
72
|
-
var headers = { Authorization: "Bearer: ".concat(metadata.token) };
|
|
73
|
-
var path = options['local-server'] ? LOCALHOST : BASE_PATH;
|
|
74
|
-
var bodyToSubmit = { score: metadata.score };
|
|
75
|
-
return (0, utils_1.post)("".concat(path, "/api/packs/").concat(metadata.pack, "/exercises/").concat(metadata.id, "/scores"), bodyToSubmit, headers).then(function (_a) {
|
|
76
|
-
var response = _a.response, body = _a.body;
|
|
77
|
-
// if authentication error
|
|
78
|
-
if (response.statusCode === 401) {
|
|
79
|
-
logger_1.default.error('Authentication failed. Please enter your credentials again.');
|
|
80
|
-
var loginAttempts = 0;
|
|
81
|
-
// we try to reconnect 3 times
|
|
82
|
-
// then submit or fail
|
|
83
|
-
return login(options, loginAttempts, metadata).then(function (updatedMetadata) { return postScore(options, updatedMetadata); }, function (error) {
|
|
84
|
-
logger_1.default.error('Error while authenticating to submit results.');
|
|
85
|
-
logger_1.default.debug(options, 'Error was: ', error);
|
|
86
|
-
return Promise.reject();
|
|
87
|
-
});
|
|
88
|
-
}
|
|
89
|
-
else if (response.statusCode !== 201) {
|
|
90
|
-
logger_1.default.error('Error while sending the results.');
|
|
91
|
-
logger_1.default.debug(options, JSON.stringify(response.statusCode), JSON.stringify(body));
|
|
92
|
-
return Promise.reject();
|
|
93
|
-
}
|
|
94
|
-
logger_1.default.log('Score submitted.');
|
|
95
|
-
return Promise.resolve(metadata);
|
|
96
|
-
}, function (error) {
|
|
97
|
-
logger_1.default.error('Error while sending the results.');
|
|
98
|
-
logger_1.default.error('Maybe check your connection?');
|
|
99
|
-
logger_1.default.debug(options, 'Error while sending the result was: ', error);
|
|
100
|
-
return Promise.reject();
|
|
101
|
-
});
|
|
102
|
-
}
|
|
103
|
-
var submitResults = function (results, options) {
|
|
104
|
-
return (0, utils_1.readFile)('./exercise.json')
|
|
105
|
-
.then(function (data) { return JSON.parse(data); }, function (error) {
|
|
106
|
-
logger_1.default.error('Looks like you have no exercise.json file.');
|
|
107
|
-
logger_1.default.debug(options, 'An error occurred while reading exercise.json: ', error);
|
|
108
|
-
return Promise.reject();
|
|
109
|
-
})
|
|
110
|
-
.then(function (metadata) { return computeScore(metadata, results, options); }, function (error) {
|
|
111
|
-
logger_1.default.error('Looks like you have an incorrect JSON in the exercise.json file.');
|
|
112
|
-
logger_1.default.debug(options, 'An error occurred while parsing exercise.json: ', error);
|
|
113
|
-
return Promise.reject();
|
|
114
|
-
})
|
|
115
|
-
.then(function (_a) {
|
|
116
|
-
var metadata = _a.metadata, score = _a.score;
|
|
117
|
-
metadata.score = score;
|
|
118
|
-
var loginAttempts = 0;
|
|
119
|
-
if (options.local) {
|
|
120
|
-
return metadata;
|
|
121
|
-
}
|
|
122
|
-
return login(options, loginAttempts, metadata);
|
|
123
|
-
})
|
|
124
|
-
.then(function (metadata) {
|
|
125
|
-
if (!options.local) {
|
|
126
|
-
return postScore(options, metadata);
|
|
127
|
-
}
|
|
128
|
-
return metadata;
|
|
129
|
-
});
|
|
130
|
-
};
|
|
131
|
-
exports.default = submitResults;
|
package/dist/utils.js
DELETED
|
@@ -1,64 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.exitProcessWithError = exports.listFiles = exports.post = exports.prompt = exports.exec = exports.readFile = void 0;
|
|
7
|
-
var fs_1 = __importDefault(require("fs"));
|
|
8
|
-
var child_process_1 = __importDefault(require("child_process"));
|
|
9
|
-
var request_1 = __importDefault(require("request"));
|
|
10
|
-
var prompt_1 = __importDefault(require("prompt"));
|
|
11
|
-
var util_1 = __importDefault(require("util"));
|
|
12
|
-
var glob_1 = __importDefault(require("glob"));
|
|
13
|
-
var logger_1 = __importDefault(require("./logger"));
|
|
14
|
-
var execAsync = util_1.default.promisify(child_process_1.default.exec);
|
|
15
|
-
var readFileAsync = util_1.default.promisify(fs_1.default.readFile);
|
|
16
|
-
var readFile = function (file) { return readFileAsync(file, 'utf8'); };
|
|
17
|
-
exports.readFile = readFile;
|
|
18
|
-
var exec = function (cmd) { return execAsync(cmd, { maxBuffer: 1024 * 1000 }); };
|
|
19
|
-
exports.exec = exec;
|
|
20
|
-
var prompt = function (schema) {
|
|
21
|
-
return new Promise(function (resolve, reject) {
|
|
22
|
-
prompt_1.default.start();
|
|
23
|
-
prompt_1.default.get(schema, function (err, result) {
|
|
24
|
-
if (err) {
|
|
25
|
-
reject();
|
|
26
|
-
}
|
|
27
|
-
else {
|
|
28
|
-
resolve(result);
|
|
29
|
-
}
|
|
30
|
-
});
|
|
31
|
-
});
|
|
32
|
-
};
|
|
33
|
-
exports.prompt = prompt;
|
|
34
|
-
var post = function (url, json, headers) {
|
|
35
|
-
return new Promise(function (resolve, reject) {
|
|
36
|
-
request_1.default.post({ url: url, json: json, headers: headers }, function (err, response, body) {
|
|
37
|
-
if (err) {
|
|
38
|
-
reject(err);
|
|
39
|
-
}
|
|
40
|
-
else {
|
|
41
|
-
resolve({ response: response, body: body });
|
|
42
|
-
}
|
|
43
|
-
});
|
|
44
|
-
});
|
|
45
|
-
};
|
|
46
|
-
exports.post = post;
|
|
47
|
-
var listFiles = function (pattern) {
|
|
48
|
-
return new Promise(function (resolve, reject) {
|
|
49
|
-
(0, glob_1.default)(pattern, function (err, files) {
|
|
50
|
-
if (err) {
|
|
51
|
-
logger_1.default.error('Could not find the spec files.');
|
|
52
|
-
reject(err);
|
|
53
|
-
}
|
|
54
|
-
else {
|
|
55
|
-
resolve(files);
|
|
56
|
-
}
|
|
57
|
-
});
|
|
58
|
-
});
|
|
59
|
-
};
|
|
60
|
-
exports.listFiles = listFiles;
|
|
61
|
-
var exitProcessWithError = function (code) {
|
|
62
|
-
process.exit(code);
|
|
63
|
-
};
|
|
64
|
-
exports.exitProcessWithError = exitProcessWithError;
|
package/dist/vitest.js
DELETED
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
var utils_1 = require("./utils");
|
|
7
|
-
var logger_1 = __importDefault(require("./logger"));
|
|
8
|
-
var RESULT_FILE = 'results/vitest-results.json';
|
|
9
|
-
function computeScore(vitestResults) {
|
|
10
|
-
// eslint-disable-next-line no-undefined
|
|
11
|
-
if (vitestResults.failed === undefined) {
|
|
12
|
-
return 0;
|
|
13
|
-
}
|
|
14
|
-
var score = 100 - vitestResults.failed * 5;
|
|
15
|
-
return score >= 0 ? score : 0;
|
|
16
|
-
}
|
|
17
|
-
function warnUserThatSomeTestsFailed(vitestResults) {
|
|
18
|
-
var score = computeScore(vitestResults);
|
|
19
|
-
logger_1.default.warn('Unit tests score: ' + score + '%');
|
|
20
|
-
logger_1.default.warn('Looks like you have ' + (vitestResults.failed ? vitestResults.failed : 'a few') + ' unit test(s) failing.');
|
|
21
|
-
logger_1.default.warn('Launch `npm run test:unit` and try to fix them.');
|
|
22
|
-
}
|
|
23
|
-
function warnUserThatVitestFailed(reject) {
|
|
24
|
-
logger_1.default.error('Error while running unit tests.');
|
|
25
|
-
logger_1.default.error("Maybe your code doesn't compile?");
|
|
26
|
-
logger_1.default.error('Launch `npm run test:unit` to see more.');
|
|
27
|
-
return reject();
|
|
28
|
-
}
|
|
29
|
-
function handleVitestResults(data, options, resolve, reject) {
|
|
30
|
-
var results = JSON.parse(data);
|
|
31
|
-
if (!results) {
|
|
32
|
-
logger_1.default.error('An error occurred during the unit tests.');
|
|
33
|
-
logger_1.default.error('Run `npm run test:unit` to see the problem.');
|
|
34
|
-
return reject();
|
|
35
|
-
}
|
|
36
|
-
var failed = results.numFailedTests ? results.numFailedTests : results.numFailedTestSuites;
|
|
37
|
-
var vitestResults = {
|
|
38
|
-
failed: failed,
|
|
39
|
-
success: results.numPassedTests
|
|
40
|
-
};
|
|
41
|
-
if (vitestResults.failed !== 0) {
|
|
42
|
-
warnUserThatSomeTestsFailed(vitestResults);
|
|
43
|
-
}
|
|
44
|
-
else {
|
|
45
|
-
logger_1.default.success('Unit tests score: 100%');
|
|
46
|
-
}
|
|
47
|
-
return resolve(vitestResults);
|
|
48
|
-
}
|
|
49
|
-
var vitest = function (options) {
|
|
50
|
-
return new Promise(function (resolve, reject) {
|
|
51
|
-
logger_1.default.log('Starting unit tests...');
|
|
52
|
-
// vitest exit code is 1 if a test fails
|
|
53
|
-
// so we'll go into the error handler even if the execution went well
|
|
54
|
-
// and we try to read the results in both cases
|
|
55
|
-
return ((0, utils_1.exec)("npm run test:unit:ci")
|
|
56
|
-
.then(function () { return (0, utils_1.readFile)(RESULT_FILE); }, function (error) {
|
|
57
|
-
logger_1.default.debug(options, 'Vitest test failed', error);
|
|
58
|
-
return (0, utils_1.readFile)(RESULT_FILE);
|
|
59
|
-
})
|
|
60
|
-
// if results, great we handle them
|
|
61
|
-
// if not we warn the user
|
|
62
|
-
.then(function (data) { return handleVitestResults(data, options, resolve, reject); }, function (error) {
|
|
63
|
-
logger_1.default.debug(options, 'An error occurred while reading vitest results: ', error);
|
|
64
|
-
return warnUserThatVitestFailed(reject);
|
|
65
|
-
}));
|
|
66
|
-
});
|
|
67
|
-
};
|
|
68
|
-
exports.default = vitest;
|