contensis-cli 1.1.1 → 1.1.2-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/factories/RequestHandlerFactory.js +203 -0
- package/dist/factories/RequestHandlerFactory.js.map +7 -0
- package/dist/localisation/en-GB.js +28 -5
- package/dist/localisation/en-GB.js.map +2 -2
- package/dist/mappers/DevRequests-to-RequestHanderCliArgs.js +119 -0
- package/dist/mappers/DevRequests-to-RequestHanderCliArgs.js.map +7 -0
- package/dist/providers/GitHubCliModuleProvider.js +107 -0
- package/dist/providers/GitHubCliModuleProvider.js.map +7 -0
- package/dist/providers/HttpProvider.js +72 -0
- package/dist/providers/HttpProvider.js.map +7 -0
- package/dist/providers/ManifestProvider.js +53 -0
- package/dist/providers/ManifestProvider.js.map +7 -0
- package/dist/providers/file-provider.js +11 -0
- package/dist/providers/file-provider.js.map +2 -2
- package/dist/services/ContensisAuthService.js +19 -11
- package/dist/services/ContensisAuthService.js.map +2 -2
- package/dist/services/ContensisCliService.js +27 -14
- package/dist/services/ContensisCliService.js.map +2 -2
- package/dist/services/ContensisDevService.js +10 -59
- package/dist/services/ContensisDevService.js.map +3 -3
- package/dist/util/debug.js +29 -0
- package/dist/util/debug.js.map +7 -0
- package/dist/util/fetch.js +65 -0
- package/dist/util/fetch.js.map +7 -0
- package/dist/util/index.js.map +1 -1
- package/dist/util/logger.js.map +2 -2
- package/dist/version.js +1 -1
- package/dist/version.js.map +1 -1
- package/package.json +4 -1
- package/src/factories/RequestHandlerFactory.ts +225 -0
- package/src/localisation/en-GB.ts +54 -12
- package/src/mappers/DevRequests-to-RequestHanderCliArgs.ts +145 -0
- package/src/providers/GitHubCliModuleProvider.ts +114 -0
- package/src/providers/HttpProvider.ts +50 -0
- package/src/providers/ManifestProvider.ts +43 -0
- package/src/providers/file-provider.ts +9 -0
- package/src/services/ContensisAuthService.ts +23 -14
- package/src/services/ContensisCliService.ts +30 -15
- package/src/services/ContensisDevService.ts +19 -85
- package/src/util/debug.ts +1 -0
- package/src/util/fetch.ts +74 -0
- package/src/util/index.ts +1 -1
- package/src/util/logger.ts +0 -1
- package/src/version.ts +1 -1
- package/dist/mappers/DevRequests-to-RequestHanderSiteConfigYaml.js +0 -56
- package/dist/mappers/DevRequests-to-RequestHanderSiteConfigYaml.js.map +0 -7
- package/src/mappers/DevRequests-to-RequestHanderSiteConfigYaml.ts +0 -44
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
22
|
+
mod
|
|
23
|
+
));
|
|
24
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
25
|
+
var GitHubCliModuleProvider_exports = {};
|
|
26
|
+
__export(GitHubCliModuleProvider_exports, {
|
|
27
|
+
default: () => GitHubCliModuleProvider_default
|
|
28
|
+
});
|
|
29
|
+
module.exports = __toCommonJS(GitHubCliModuleProvider_exports);
|
|
30
|
+
var import_os = __toESM(require("os"));
|
|
31
|
+
var import_adm_zip = __toESM(require("adm-zip"));
|
|
32
|
+
var import_HttpProvider = __toESM(require("../providers/HttpProvider"));
|
|
33
|
+
var import_file_provider = require("./file-provider");
|
|
34
|
+
var import_fetch = require("../util/fetch");
|
|
35
|
+
class GitHubCliModuleProvider {
|
|
36
|
+
http;
|
|
37
|
+
repo;
|
|
38
|
+
baseUrl = "https://api.github.com/repos";
|
|
39
|
+
get releases_url() {
|
|
40
|
+
return `${this.baseUrl}/${this.repo}/releases`;
|
|
41
|
+
}
|
|
42
|
+
get latest_release_url() {
|
|
43
|
+
return `${this.baseUrl}/${this.repo}/releases/latest`;
|
|
44
|
+
}
|
|
45
|
+
download;
|
|
46
|
+
constructor(repo) {
|
|
47
|
+
this.http = new import_HttpProvider.default();
|
|
48
|
+
this.repo = repo;
|
|
49
|
+
}
|
|
50
|
+
async FindLatestRelease() {
|
|
51
|
+
const { http, latest_release_url, releases_url } = this;
|
|
52
|
+
const responses = await Promise.all([
|
|
53
|
+
http.get(latest_release_url, {
|
|
54
|
+
doRetry: (0, import_fetch.doRetry)({ silent: true })
|
|
55
|
+
}),
|
|
56
|
+
http.get(releases_url)
|
|
57
|
+
]);
|
|
58
|
+
const [latestErr, latest, latestResponse] = responses[0];
|
|
59
|
+
const [releasesErr, releases] = responses[1];
|
|
60
|
+
if (releasesErr) {
|
|
61
|
+
throw new Error(`Unable to get releases`, { cause: releasesErr });
|
|
62
|
+
} else if (!releases || releases.length === 0)
|
|
63
|
+
throw new Error(`No releases available`);
|
|
64
|
+
else if (latestErr && !latest) {
|
|
65
|
+
if ((latestResponse == null ? void 0 : latestResponse.status) === 404 && (releases == null ? void 0 : releases.length)) {
|
|
66
|
+
const release = releases.find((r) => r.prerelease) || releases[0];
|
|
67
|
+
if (release) {
|
|
68
|
+
return release;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
} else {
|
|
72
|
+
return latest;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
async DownloadRelease(release, {
|
|
76
|
+
path,
|
|
77
|
+
platforms,
|
|
78
|
+
unzip = true
|
|
79
|
+
}) {
|
|
80
|
+
const platform = platforms.find((p) => p[0] === import_os.default.platform()) || [
|
|
81
|
+
import_os.default.platform(),
|
|
82
|
+
import_os.default.platform()
|
|
83
|
+
];
|
|
84
|
+
const asset = release.assets.find(
|
|
85
|
+
(r) => r.name.toLowerCase().includes(platform[1])
|
|
86
|
+
);
|
|
87
|
+
if (asset) {
|
|
88
|
+
const filePath = (0, import_file_provider.joinPath)(path, asset.name);
|
|
89
|
+
(0, import_file_provider.removeDirectory)(path);
|
|
90
|
+
(0, import_file_provider.checkDir)(filePath);
|
|
91
|
+
await this.http.downloadFile(asset.browser_download_url, filePath);
|
|
92
|
+
if (unzip && asset.name.endsWith(".zip")) {
|
|
93
|
+
const zipFile = new import_adm_zip.default(filePath);
|
|
94
|
+
zipFile.extractAllTo(path);
|
|
95
|
+
(0, import_file_provider.removeFile)(filePath);
|
|
96
|
+
}
|
|
97
|
+
} else
|
|
98
|
+
throw new Error(
|
|
99
|
+
`no asset found in release ${release.tag_name} for platform ${import_os.default.platform()}
|
|
100
|
+
${release.html_url}`
|
|
101
|
+
);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
var GitHubCliModuleProvider_default = GitHubCliModuleProvider;
|
|
105
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
106
|
+
0 && (module.exports = {});
|
|
107
|
+
//# sourceMappingURL=GitHubCliModuleProvider.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/providers/GitHubCliModuleProvider.ts"],
|
|
4
|
+
"sourcesContent": ["import os from 'os';\nimport Zip from 'adm-zip';\nimport type { Endpoints } from '@octokit/types';\nimport HttpProvider from '~/providers/HttpProvider';\nimport {\n checkDir,\n joinPath,\n removeDirectory,\n removeFile,\n} from './file-provider';\nimport { doRetry } from '~/util/fetch';\n\ntype GitHubApiRelease =\n Endpoints['GET /repos/{owner}/{repo}/releases/latest']['response']['data'];\n\nclass GitHubCliModuleProvider {\n http: HttpProvider;\n repo: string;\n baseUrl = 'https://api.github.com/repos';\n\n get releases_url() {\n return `${this.baseUrl}/${this.repo}/releases`;\n }\n get latest_release_url() {\n return `${this.baseUrl}/${this.repo}/releases/latest`;\n }\n\n download?: {\n tag: string;\n name: string;\n url: string;\n browser_url: string;\n };\n\n constructor(repo: string) {\n this.http = new HttpProvider();\n this.repo = repo;\n }\n\n async FindLatestRelease() {\n const { http, latest_release_url, releases_url } = this;\n // return latest tag version is:\n\n const responses = await Promise.all([\n http.get<GitHubApiRelease>(latest_release_url, {\n doRetry: doRetry({ silent: true }),\n }),\n http.get<GitHubApiRelease[]>(releases_url),\n ]);\n\n const [latestErr, latest, latestResponse] = responses[0];\n const [releasesErr, releases] = responses[1];\n\n if (releasesErr) {\n throw new Error(`Unable to get releases`, { cause: releasesErr });\n } else if (!releases || releases.length === 0)\n throw new Error(`No releases available`);\n else if (latestErr && !latest) {\n if (latestResponse?.status === 404 && releases?.length) {\n // No latest release, check releases for prerelease version, fallback to last release\n const release = releases.find(r => r.prerelease) || releases[0];\n\n if (release) {\n return release;\n }\n }\n } else {\n return latest;\n }\n }\n\n async DownloadRelease(\n release: GitHubApiRelease,\n {\n path,\n platforms,\n unzip = true,\n }: { path: string; unzip?: boolean; platforms: [NodeJS.Platform, string][] }\n ) {\n // find os-specific asset\n const platform = platforms.find(p => p[0] === os.platform()) || [\n os.platform(),\n os.platform(),\n ];\n\n const asset = release.assets.find(r =>\n r.name.toLowerCase().includes(platform[1])\n );\n\n // download asset\n if (asset) {\n const filePath = joinPath(path, asset.name);\n removeDirectory(path);\n checkDir(filePath);\n await this.http.downloadFile(asset.browser_download_url, filePath);\n\n if (unzip && asset.name.endsWith('.zip')) {\n // unzip the downloaded file\n const zipFile = new Zip(filePath);\n zipFile.extractAllTo(path);\n\n // delete the downloaded zip file\n removeFile(filePath);\n }\n } else\n throw new Error(\n `no asset found in release ${\n release.tag_name\n } for platform ${os.platform()}\\n${release.html_url}`\n );\n }\n}\n\nexport default GitHubCliModuleProvider;\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,qBAAgB;AAEhB,0BAAyB;AACzB,2BAKO;AACP,mBAAwB;AAKxB,MAAM,wBAAwB;AAAA,EAC5B;AAAA,EACA;AAAA,EACA,UAAU;AAAA,EAEV,IAAI,eAAe;AACjB,WAAO,GAAG,KAAK,WAAW,KAAK;AAAA,EACjC;AAAA,EACA,IAAI,qBAAqB;AACvB,WAAO,GAAG,KAAK,WAAW,KAAK;AAAA,EACjC;AAAA,EAEA;AAAA,EAOA,YAAY,MAAc;AACxB,SAAK,OAAO,IAAI,oBAAAA,QAAa;AAC7B,SAAK,OAAO;AAAA,EACd;AAAA,EAEA,MAAM,oBAAoB;AACxB,UAAM,EAAE,MAAM,oBAAoB,aAAa,IAAI;AAGnD,UAAM,YAAY,MAAM,QAAQ,IAAI;AAAA,MAClC,KAAK,IAAsB,oBAAoB;AAAA,QAC7C,aAAS,sBAAQ,EAAE,QAAQ,KAAK,CAAC;AAAA,MACnC,CAAC;AAAA,MACD,KAAK,IAAwB,YAAY;AAAA,IAC3C,CAAC;AAED,UAAM,CAAC,WAAW,QAAQ,cAAc,IAAI,UAAU;AACtD,UAAM,CAAC,aAAa,QAAQ,IAAI,UAAU;AAE1C,QAAI,aAAa;AACf,YAAM,IAAI,MAAM,0BAA0B,EAAE,OAAO,YAAY,CAAC;AAAA,IAClE,WAAW,CAAC,YAAY,SAAS,WAAW;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,aAChC,aAAa,CAAC,QAAQ;AAC7B,WAAI,iDAAgB,YAAW,QAAO,qCAAU,SAAQ;AAEtD,cAAM,UAAU,SAAS,KAAK,OAAK,EAAE,UAAU,KAAK,SAAS;AAE7D,YAAI,SAAS;AACX,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,gBACJ,SACA;AAAA,IACE;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,EACV,GACA;AAEA,UAAM,WAAW,UAAU,KAAK,OAAK,EAAE,OAAO,UAAAC,QAAG,SAAS,CAAC,KAAK;AAAA,MAC9D,UAAAA,QAAG,SAAS;AAAA,MACZ,UAAAA,QAAG,SAAS;AAAA,IACd;AAEA,UAAM,QAAQ,QAAQ,OAAO;AAAA,MAAK,OAChC,EAAE,KAAK,YAAY,EAAE,SAAS,SAAS,EAAE;AAAA,IAC3C;AAGA,QAAI,OAAO;AACT,YAAM,eAAW,+BAAS,MAAM,MAAM,IAAI;AAC1C,gDAAgB,IAAI;AACpB,yCAAS,QAAQ;AACjB,YAAM,KAAK,KAAK,aAAa,MAAM,sBAAsB,QAAQ;AAEjE,UAAI,SAAS,MAAM,KAAK,SAAS,MAAM,GAAG;AAExC,cAAM,UAAU,IAAI,eAAAC,QAAI,QAAQ;AAChC,gBAAQ,aAAa,IAAI;AAGzB,6CAAW,QAAQ;AAAA,MACrB;AAAA,IACF;AACE,YAAM,IAAI;AAAA,QACR,6BACE,QAAQ,yBACO,UAAAD,QAAG,SAAS;AAAA,EAAM,QAAQ;AAAA,MAC7C;AAAA,EACJ;AACF;AAEA,IAAO,kCAAQ;",
|
|
6
|
+
"names": ["HttpProvider", "os", "Zip"]
|
|
7
|
+
}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to2, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to2, key) && key !== except)
|
|
16
|
+
__defProp(to2, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to2;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
22
|
+
mod
|
|
23
|
+
));
|
|
24
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
25
|
+
var HttpProvider_exports = {};
|
|
26
|
+
__export(HttpProvider_exports, {
|
|
27
|
+
default: () => HttpProvider_default
|
|
28
|
+
});
|
|
29
|
+
module.exports = __toCommonJS(HttpProvider_exports);
|
|
30
|
+
var import_await_to_js = __toESM(require("await-to-js"));
|
|
31
|
+
var import_fs = __toESM(require("fs"));
|
|
32
|
+
var import_stream = require("stream");
|
|
33
|
+
var import_promises = require("stream/promises");
|
|
34
|
+
var import_util = require("../util");
|
|
35
|
+
var import_fetch = require("../util/fetch");
|
|
36
|
+
class HttpProvider {
|
|
37
|
+
constructor() {
|
|
38
|
+
}
|
|
39
|
+
async get(url, init = {}) {
|
|
40
|
+
return this.fetch(url, { method: "GET", ...init });
|
|
41
|
+
}
|
|
42
|
+
async fetch(uri, init = {}) {
|
|
43
|
+
const [error, response] = await (0, import_await_to_js.default)((0, import_fetch.enhancedFetch)(uri, init));
|
|
44
|
+
if (response && !error) {
|
|
45
|
+
const [bodyError, text] = await (0, import_await_to_js.default)(response.text());
|
|
46
|
+
if (bodyError)
|
|
47
|
+
return [bodyError, void 0, response];
|
|
48
|
+
if ((0, import_util.isJson)(text)) {
|
|
49
|
+
const err = !response.status || !response.ok ? (0, import_util.tryParse)(text) : void 0;
|
|
50
|
+
const payload = response.status && response.ok ? (0, import_util.tryParse)(text) : void 0;
|
|
51
|
+
return [err, payload, response];
|
|
52
|
+
}
|
|
53
|
+
return [
|
|
54
|
+
response.ok ? null : new Error(text),
|
|
55
|
+
response.ok ? text : void 0,
|
|
56
|
+
response
|
|
57
|
+
];
|
|
58
|
+
}
|
|
59
|
+
return [error, void 0, response];
|
|
60
|
+
}
|
|
61
|
+
async downloadFile(url, destination) {
|
|
62
|
+
const res = await fetch(url);
|
|
63
|
+
if (res.ok && res.body !== null) {
|
|
64
|
+
const fileStream = import_fs.default.createWriteStream(destination, { flags: "wx" });
|
|
65
|
+
await (0, import_promises.finished)(import_stream.Readable.fromWeb(res.body).pipe(fileStream));
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
var HttpProvider_default = HttpProvider;
|
|
70
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
71
|
+
0 && (module.exports = {});
|
|
72
|
+
//# sourceMappingURL=HttpProvider.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/providers/HttpProvider.ts"],
|
|
4
|
+
"sourcesContent": ["import to from 'await-to-js';\nimport { FetchInit } from 'enterprise-fetch';\nimport fs from 'fs';\nimport { Readable } from 'stream';\nimport { finished } from 'stream/promises';\n\nimport { isJson, tryParse } from '~/util';\nimport { enhancedFetch } from '~/util/fetch';\nclass HttpProvider {\n constructor() {}\n\n async get<T = any>(url: string, init: FetchInit = {}) {\n return this.fetch<T>(url, { method: 'GET', ...init });\n }\n\n async fetch<T = any>(\n uri: string,\n init: FetchInit = {}\n ): Promise<[Error | null, T | undefined, Response | undefined]> {\n const [error, response] = await to(enhancedFetch(uri, init));\n\n if (response && !error) {\n const [bodyError, text] = await to(response.text());\n if (bodyError) return [bodyError, undefined, response];\n if (isJson(text)) {\n const err =\n !response.status || !response.ok ? tryParse(text) : undefined;\n const payload =\n response.status && response.ok ? tryParse(text) : undefined;\n return [err, payload, response];\n }\n return [\n response.ok ? null : new Error(text),\n response.ok ? (text as unknown as T) : undefined,\n response,\n ];\n }\n return [error, undefined, response];\n }\n\n async downloadFile(url: string, destination: string) {\n const res = await fetch(url);\n if (res.ok && res.body !== null) {\n const fileStream = fs.createWriteStream(destination, { flags: 'wx' });\n await finished(Readable.fromWeb(res.body as any).pipe(fileStream));\n }\n }\n}\n\nexport default HttpProvider;\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAe;AAEf,gBAAe;AACf,oBAAyB;AACzB,sBAAyB;AAEzB,kBAAiC;AACjC,mBAA8B;AAC9B,MAAM,aAAa;AAAA,EACjB,cAAc;AAAA,EAAC;AAAA,EAEf,MAAM,IAAa,KAAa,OAAkB,CAAC,GAAG;AACpD,WAAO,KAAK,MAAS,KAAK,EAAE,QAAQ,OAAO,GAAG,KAAK,CAAC;AAAA,EACtD;AAAA,EAEA,MAAM,MACJ,KACA,OAAkB,CAAC,GAC2C;AAC9D,UAAM,CAAC,OAAO,QAAQ,IAAI,UAAM,mBAAAA,aAAG,4BAAc,KAAK,IAAI,CAAC;AAE3D,QAAI,YAAY,CAAC,OAAO;AACtB,YAAM,CAAC,WAAW,IAAI,IAAI,UAAM,mBAAAA,SAAG,SAAS,KAAK,CAAC;AAClD,UAAI;AAAW,eAAO,CAAC,WAAW,QAAW,QAAQ;AACrD,cAAI,oBAAO,IAAI,GAAG;AAChB,cAAM,MACJ,CAAC,SAAS,UAAU,CAAC,SAAS,SAAK,sBAAS,IAAI,IAAI;AACtD,cAAM,UACJ,SAAS,UAAU,SAAS,SAAK,sBAAS,IAAI,IAAI;AACpD,eAAO,CAAC,KAAK,SAAS,QAAQ;AAAA,MAChC;AACA,aAAO;AAAA,QACL,SAAS,KAAK,OAAO,IAAI,MAAM,IAAI;AAAA,QACnC,SAAS,KAAM,OAAwB;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AACA,WAAO,CAAC,OAAO,QAAW,QAAQ;AAAA,EACpC;AAAA,EAEA,MAAM,aAAa,KAAa,aAAqB;AACnD,UAAM,MAAM,MAAM,MAAM,GAAG;AAC3B,QAAI,IAAI,MAAM,IAAI,SAAS,MAAM;AAC/B,YAAM,aAAa,UAAAC,QAAG,kBAAkB,aAAa,EAAE,OAAO,KAAK,CAAC;AACpE,gBAAM,0BAAS,uBAAS,QAAQ,IAAI,IAAW,EAAE,KAAK,UAAU,CAAC;AAAA,IACnE;AAAA,EACF;AACF;AAEA,IAAO,uBAAQ;",
|
|
6
|
+
"names": ["to", "fs"]
|
|
7
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var ManifestProvider_exports = {};
|
|
20
|
+
__export(ManifestProvider_exports, {
|
|
21
|
+
default: () => ManifestProvider_default
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(ManifestProvider_exports);
|
|
24
|
+
var import_util = require("../util");
|
|
25
|
+
var import_file_provider = require("./file-provider");
|
|
26
|
+
const MANIFEST_PATH = (0, import_file_provider.appPath)("cli-manifest.json");
|
|
27
|
+
class ManifestProvider {
|
|
28
|
+
manifest;
|
|
29
|
+
constructor() {
|
|
30
|
+
const manifest = (0, import_util.tryParse)((0, import_file_provider.readFile)(MANIFEST_PATH));
|
|
31
|
+
this.manifest = manifest || {};
|
|
32
|
+
}
|
|
33
|
+
get() {
|
|
34
|
+
return this.manifest;
|
|
35
|
+
}
|
|
36
|
+
getModule(name) {
|
|
37
|
+
var _a;
|
|
38
|
+
return (_a = this.manifest) == null ? void 0 : _a[name];
|
|
39
|
+
}
|
|
40
|
+
writeModule(name, moduleInfo) {
|
|
41
|
+
if (this.manifest)
|
|
42
|
+
this.manifest[name] = moduleInfo;
|
|
43
|
+
else
|
|
44
|
+
this.manifest = {
|
|
45
|
+
[name]: moduleInfo
|
|
46
|
+
};
|
|
47
|
+
(0, import_file_provider.writeFile)(MANIFEST_PATH, JSON.stringify(this.manifest, null, 2));
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
var ManifestProvider_default = ManifestProvider;
|
|
51
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
52
|
+
0 && (module.exports = {});
|
|
53
|
+
//# sourceMappingURL=ManifestProvider.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/providers/ManifestProvider.ts"],
|
|
4
|
+
"sourcesContent": ["import { tryParse } from '~/util';\nimport { appPath, readFile, writeFile } from './file-provider';\n\nexport type CliModule = {\n github: string;\n version: string;\n install?: string;\n cmd?: string;\n};\n\ntype CliManifest = {\n [moduleName: string]: CliModule;\n};\n\nconst MANIFEST_PATH = appPath('cli-manifest.json');\n\nclass ManifestProvider {\n private manifest: CliManifest;\n\n constructor() {\n const manifest = tryParse(readFile(MANIFEST_PATH));\n this.manifest = manifest || {};\n }\n\n get() {\n return this.manifest;\n }\n\n getModule(name: string) {\n return this.manifest?.[name];\n }\n writeModule(name: string, moduleInfo: CliModule) {\n if (this.manifest) this.manifest[name] = moduleInfo;\n else\n this.manifest = {\n [name]: moduleInfo,\n };\n\n writeFile(MANIFEST_PATH, JSON.stringify(this.manifest, null, 2));\n }\n}\n\nexport default ManifestProvider;\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAyB;AACzB,2BAA6C;AAa7C,MAAM,oBAAgB,8BAAQ,mBAAmB;AAEjD,MAAM,iBAAiB;AAAA,EACb;AAAA,EAER,cAAc;AACZ,UAAM,eAAW,0BAAS,+BAAS,aAAa,CAAC;AACjD,SAAK,WAAW,YAAY,CAAC;AAAA,EAC/B;AAAA,EAEA,MAAM;AACJ,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,UAAU,MAAc;AA5B1B;AA6BI,YAAO,UAAK,aAAL,mBAAgB;AAAA,EACzB;AAAA,EACA,YAAY,MAAc,YAAuB;AAC/C,QAAI,KAAK;AAAU,WAAK,SAAS,QAAQ;AAAA;AAEvC,WAAK,WAAW;AAAA,QACd,CAAC,OAAO;AAAA,MACV;AAEF,wCAAU,eAAe,KAAK,UAAU,KAAK,UAAU,MAAM,CAAC,CAAC;AAAA,EACjE;AACF;AAEA,IAAO,2BAAQ;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -28,10 +28,12 @@ __export(file_provider_exports, {
|
|
|
28
28
|
appRootDir: () => appRootDir,
|
|
29
29
|
checkDir: () => checkDir,
|
|
30
30
|
cwdPath: () => cwdPath,
|
|
31
|
+
joinPath: () => joinPath,
|
|
31
32
|
moveFile: () => moveFile,
|
|
32
33
|
readFile: () => readFile,
|
|
33
34
|
readFileAsJSON: () => readFileAsJSON,
|
|
34
35
|
readFiles: () => readFiles,
|
|
36
|
+
removeDirectory: () => removeDirectory,
|
|
35
37
|
removeFile: () => removeFile,
|
|
36
38
|
writeFile: () => writeFile
|
|
37
39
|
});
|
|
@@ -77,6 +79,12 @@ const removeFile = (filePath) => {
|
|
|
77
79
|
import_fs.default.rmSync(directoryPath);
|
|
78
80
|
}
|
|
79
81
|
};
|
|
82
|
+
const removeDirectory = (filePath) => {
|
|
83
|
+
const directoryPath = appPath(filePath);
|
|
84
|
+
if (import_fs.default.existsSync(directoryPath)) {
|
|
85
|
+
import_fs.default.rmSync(directoryPath, { force: true, recursive: true });
|
|
86
|
+
}
|
|
87
|
+
};
|
|
80
88
|
const moveFile = (file, fromPath, toPath) => {
|
|
81
89
|
const from = import_path.default.join(appRootDir, `${fromPath}${file}`);
|
|
82
90
|
const to = import_path.default.join(appRootDir, `${toPath}${file}`);
|
|
@@ -104,6 +112,7 @@ const checkDir = (filePath) => {
|
|
|
104
112
|
};
|
|
105
113
|
const appPath = (filePath) => import_path.default.isAbsolute(filePath) ? filePath : import_path.default.join(appRootDir, filePath);
|
|
106
114
|
const cwdPath = (filePath) => import_path.default.isAbsolute(filePath) ? filePath : import_path.default.join(process.cwd(), filePath);
|
|
115
|
+
const joinPath = import_path.default.join;
|
|
107
116
|
const detectFileType = (fromFile) => {
|
|
108
117
|
const fileData = readFile(fromFile);
|
|
109
118
|
if (!fileData)
|
|
@@ -148,10 +157,12 @@ const readFileAsJSON = async (fromFile) => {
|
|
|
148
157
|
appRootDir,
|
|
149
158
|
checkDir,
|
|
150
159
|
cwdPath,
|
|
160
|
+
joinPath,
|
|
151
161
|
moveFile,
|
|
152
162
|
readFile,
|
|
153
163
|
readFileAsJSON,
|
|
154
164
|
readFiles,
|
|
165
|
+
removeDirectory,
|
|
155
166
|
removeFile,
|
|
156
167
|
writeFile
|
|
157
168
|
});
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/providers/file-provider.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from 'fs';\nimport { homedir } from 'os';\nimport path from 'path';\nimport { tryParse } from '~/util';\nimport { csvToJson, detectCsv } from '~/util/csv.formatter';\nimport { unflattenObject } from '~/util/json.formatter';\nimport { Logger } from '~/util/logger';\nimport { xmlToJson } from '~/util/xml.formatter';\n\nconst userHomeDir = homedir();\n\nexport const appRootDir =\n process.env.CONTAINER_CONTEXT === 'true'\n ? process.cwd()\n : path.join(userHomeDir, '.contensis/');\n\nexport const readFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n const file = fs.readFileSync(directoryPath, 'utf8');\n return file;\n } else {\n return undefined;\n }\n};\n\nexport const readFiles = (directory: string, createDirectory = true) => {\n const directoryPath = appPath(directory);\n if (fs.existsSync(directoryPath)) {\n const files = fs.readdirSync(directoryPath);\n return files;\n } else if (createDirectory) {\n fs.mkdirSync(directoryPath, { recursive: true });\n return [];\n } else {\n throw new Error(`ENOENT: Directory does not exist ${directoryPath}`);\n // return undefined;\n }\n};\n\nexport const writeFile = (filePath: string, content: string) => {\n const directoryPath = appPath(filePath);\n fs.writeFileSync(directoryPath, content, { encoding: 'utf-8' });\n};\n\nexport const removeFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n fs.rmSync(directoryPath);\n }\n};\n\nexport const moveFile = (file: string, fromPath: string, toPath: string) => {\n const from = path.join(appRootDir, `${fromPath}${file}`);\n const to = path.join(appRootDir, `${toPath}${file}`);\n if (fs.existsSync(from)) {\n checkDir(toPath);\n // if (!fs.existsSync(toPath)) fs.mkdirSync(toPath, { recursive: true });\n\n fs.rename(from, to, err => {\n if (err)\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}`,\n err\n );\n console.info(`Renamed file \"${file}\" from: ${fromPath} to: ${toPath}`);\n });\n } else {\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}\\nFile does not exist!`\n );\n }\n};\n\nexport const checkDir = (filePath: string) => {\n const directoryPath = path.dirname(appPath(filePath));\n if (!fs.existsSync(directoryPath))\n fs.mkdirSync(directoryPath, { recursive: true });\n};\n\nexport const appPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(appRootDir, filePath);\n\nexport const cwdPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(process.cwd(), filePath);\n\ntype DetectedFileType =\n | { type: 'json'; contents: any }\n | { type: 'xml' | 'csv'; contents: string };\n\nconst detectFileType = (fromFile: string): DetectedFileType | undefined => {\n const fileData = readFile(fromFile);\n if (!fileData) throw new Error(`Unable to read file at ${fromFile}`);\n try {\n // if XML\n if (fileData.startsWith('<')) return { contents: fileData, type: 'xml' };\n\n // if JSON\n const jsonData = tryParse(fileData);\n if (jsonData) return { contents: jsonData, type: 'json' };\n\n // if CSV\n const csv = detectCsv(fileData);\n if (csv) return { contents: fileData, type: 'csv' };\n } catch (ex) {\n Logger.error(`Problem detecting file type ${fromFile}`, ex);\n }\n};\n\nexport const readFileAsJSON = async <T = any>(\n fromFile: string\n): Promise<T | undefined> => {\n const detectedFile = detectFileType(cwdPath(fromFile));\n if (!detectedFile) return undefined;\n try {\n switch (detectedFile.type) {\n case 'csv': {\n const flatJson = await csvToJson(detectedFile.contents);\n const unflattenedJson = flatJson.map(record => unflattenObject(record));\n return unflattenedJson as T;\n }\n case 'xml':\n return (await xmlToJson(detectedFile.contents)) as T;\n\n case 'json':\n default:\n return detectedFile.contents;\n }\n } catch (ex) {\n Logger.error(`Problem converting file from ${detectedFile.type}`, ex);\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,gBAAwB;AACxB,kBAAiB;AACjB,kBAAyB;AACzB,iBAAqC;AACrC,kBAAgC;AAChC,oBAAuB;AACvB,iBAA0B;AAE1B,MAAM,kBAAc,mBAAQ;AAErB,MAAM,aACX,QAAQ,IAAI,sBAAsB,SAC9B,QAAQ,IAAI,IACZ,YAAAA,QAAK,KAAK,aAAa,aAAa;AAEnC,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAC,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,OAAO,UAAAA,QAAG,aAAa,eAAe,MAAM;AAClD,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEO,MAAM,YAAY,CAAC,WAAmB,kBAAkB,SAAS;AACtE,QAAM,gBAAgB,QAAQ,SAAS;AACvC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,QAAQ,UAAAA,QAAG,YAAY,aAAa;AAC1C,WAAO;AAAA,EACT,WAAW,iBAAiB;AAC1B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAC/C,WAAO,CAAC;AAAA,EACV,OAAO;AACL,UAAM,IAAI,MAAM,oCAAoC,eAAe;AAAA,EAErE;AACF;AAEO,MAAM,YAAY,CAAC,UAAkB,YAAoB;AAC9D,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,YAAAA,QAAG,cAAc,eAAe,SAAS,EAAE,UAAU,QAAQ,CAAC;AAChE;AAEO,MAAM,aAAa,CAAC,aAAqB;AAC9C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,cAAAA,QAAG,OAAO,aAAa;AAAA,EACzB;AACF;AAEO,MAAM,WAAW,CAAC,MAAc,UAAkB,WAAmB;AAC1E,QAAM,OAAO,YAAAD,QAAK,KAAK,YAAY,GAAG,WAAW,MAAM;AACvD,QAAM,KAAK,YAAAA,QAAK,KAAK,YAAY,GAAG,SAAS,MAAM;AACnD,MAAI,UAAAC,QAAG,WAAW,IAAI,GAAG;AACvB,aAAS,MAAM;AAGf,cAAAA,QAAG,OAAO,MAAM,IAAI,SAAO;AACzB,UAAI;AACF,gBAAQ;AAAA,UACN,0BAA0B,eAAe,gBAAgB;AAAA,UACzD;AAAA,QACF;AACF,cAAQ,KAAK,iBAAiB,eAAe,gBAAgB,QAAQ;AAAA,IACvE,CAAC;AAAA,EACH,OAAO;AACL,YAAQ;AAAA,MACN,0BAA0B,eAAe,gBAAgB;AAAA;AAAA,IAC3D;AAAA,EACF;AACF;AAEO,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,YAAAD,QAAK,QAAQ,QAAQ,QAAQ,CAAC;AACpD,MAAI,CAAC,UAAAC,QAAG,WAAW,aAAa;AAC9B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AACnD;AAEO,MAAM,UAAU,CAAC,aACtB,YAAAD,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAEhE,MAAM,UAAU,CAAC,aACtB,YAAAA,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,QAAQ;
|
|
4
|
+
"sourcesContent": ["import fs from 'fs';\nimport { homedir } from 'os';\nimport path from 'path';\nimport { tryParse } from '~/util';\nimport { csvToJson, detectCsv } from '~/util/csv.formatter';\nimport { unflattenObject } from '~/util/json.formatter';\nimport { Logger } from '~/util/logger';\nimport { xmlToJson } from '~/util/xml.formatter';\n\nconst userHomeDir = homedir();\n\nexport const appRootDir =\n process.env.CONTAINER_CONTEXT === 'true'\n ? process.cwd()\n : path.join(userHomeDir, '.contensis/');\n\nexport const readFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n const file = fs.readFileSync(directoryPath, 'utf8');\n return file;\n } else {\n return undefined;\n }\n};\n\nexport const readFiles = (directory: string, createDirectory = true) => {\n const directoryPath = appPath(directory);\n if (fs.existsSync(directoryPath)) {\n const files = fs.readdirSync(directoryPath);\n return files;\n } else if (createDirectory) {\n fs.mkdirSync(directoryPath, { recursive: true });\n return [];\n } else {\n throw new Error(`ENOENT: Directory does not exist ${directoryPath}`);\n // return undefined;\n }\n};\n\nexport const writeFile = (filePath: string, content: string) => {\n const directoryPath = appPath(filePath);\n fs.writeFileSync(directoryPath, content, { encoding: 'utf-8' });\n};\n\nexport const removeFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n fs.rmSync(directoryPath);\n }\n};\n\nexport const removeDirectory = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n fs.rmSync(directoryPath, { force: true, recursive: true });\n }\n};\n\nexport const moveFile = (file: string, fromPath: string, toPath: string) => {\n const from = path.join(appRootDir, `${fromPath}${file}`);\n const to = path.join(appRootDir, `${toPath}${file}`);\n if (fs.existsSync(from)) {\n checkDir(toPath);\n // if (!fs.existsSync(toPath)) fs.mkdirSync(toPath, { recursive: true });\n\n fs.rename(from, to, err => {\n if (err)\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}`,\n err\n );\n console.info(`Renamed file \"${file}\" from: ${fromPath} to: ${toPath}`);\n });\n } else {\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}\\nFile does not exist!`\n );\n }\n};\n\nexport const checkDir = (filePath: string) => {\n const directoryPath = path.dirname(appPath(filePath));\n if (!fs.existsSync(directoryPath))\n fs.mkdirSync(directoryPath, { recursive: true });\n};\n\nexport const appPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(appRootDir, filePath);\n\nexport const cwdPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(process.cwd(), filePath);\n\nexport const joinPath = path.join;\n\ntype DetectedFileType =\n | { type: 'json'; contents: any }\n | { type: 'xml' | 'csv'; contents: string };\n\nconst detectFileType = (fromFile: string): DetectedFileType | undefined => {\n const fileData = readFile(fromFile);\n if (!fileData) throw new Error(`Unable to read file at ${fromFile}`);\n try {\n // if XML\n if (fileData.startsWith('<')) return { contents: fileData, type: 'xml' };\n\n // if JSON\n const jsonData = tryParse(fileData);\n if (jsonData) return { contents: jsonData, type: 'json' };\n\n // if CSV\n const csv = detectCsv(fileData);\n if (csv) return { contents: fileData, type: 'csv' };\n } catch (ex) {\n Logger.error(`Problem detecting file type ${fromFile}`, ex);\n }\n};\n\nexport const readFileAsJSON = async <T = any>(\n fromFile: string\n): Promise<T | undefined> => {\n const detectedFile = detectFileType(cwdPath(fromFile));\n if (!detectedFile) return undefined;\n try {\n switch (detectedFile.type) {\n case 'csv': {\n const flatJson = await csvToJson(detectedFile.contents);\n const unflattenedJson = flatJson.map(record => unflattenObject(record));\n return unflattenedJson as T;\n }\n case 'xml':\n return (await xmlToJson(detectedFile.contents)) as T;\n\n case 'json':\n default:\n return detectedFile.contents;\n }\n } catch (ex) {\n Logger.error(`Problem converting file from ${detectedFile.type}`, ex);\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,gBAAwB;AACxB,kBAAiB;AACjB,kBAAyB;AACzB,iBAAqC;AACrC,kBAAgC;AAChC,oBAAuB;AACvB,iBAA0B;AAE1B,MAAM,kBAAc,mBAAQ;AAErB,MAAM,aACX,QAAQ,IAAI,sBAAsB,SAC9B,QAAQ,IAAI,IACZ,YAAAA,QAAK,KAAK,aAAa,aAAa;AAEnC,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAC,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,OAAO,UAAAA,QAAG,aAAa,eAAe,MAAM;AAClD,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEO,MAAM,YAAY,CAAC,WAAmB,kBAAkB,SAAS;AACtE,QAAM,gBAAgB,QAAQ,SAAS;AACvC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,QAAQ,UAAAA,QAAG,YAAY,aAAa;AAC1C,WAAO;AAAA,EACT,WAAW,iBAAiB;AAC1B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAC/C,WAAO,CAAC;AAAA,EACV,OAAO;AACL,UAAM,IAAI,MAAM,oCAAoC,eAAe;AAAA,EAErE;AACF;AAEO,MAAM,YAAY,CAAC,UAAkB,YAAoB;AAC9D,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,YAAAA,QAAG,cAAc,eAAe,SAAS,EAAE,UAAU,QAAQ,CAAC;AAChE;AAEO,MAAM,aAAa,CAAC,aAAqB;AAC9C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,cAAAA,QAAG,OAAO,aAAa;AAAA,EACzB;AACF;AAEO,MAAM,kBAAkB,CAAC,aAAqB;AACnD,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,cAAAA,QAAG,OAAO,eAAe,EAAE,OAAO,MAAM,WAAW,KAAK,CAAC;AAAA,EAC3D;AACF;AAEO,MAAM,WAAW,CAAC,MAAc,UAAkB,WAAmB;AAC1E,QAAM,OAAO,YAAAD,QAAK,KAAK,YAAY,GAAG,WAAW,MAAM;AACvD,QAAM,KAAK,YAAAA,QAAK,KAAK,YAAY,GAAG,SAAS,MAAM;AACnD,MAAI,UAAAC,QAAG,WAAW,IAAI,GAAG;AACvB,aAAS,MAAM;AAGf,cAAAA,QAAG,OAAO,MAAM,IAAI,SAAO;AACzB,UAAI;AACF,gBAAQ;AAAA,UACN,0BAA0B,eAAe,gBAAgB;AAAA,UACzD;AAAA,QACF;AACF,cAAQ,KAAK,iBAAiB,eAAe,gBAAgB,QAAQ;AAAA,IACvE,CAAC;AAAA,EACH,OAAO;AACL,YAAQ;AAAA,MACN,0BAA0B,eAAe,gBAAgB;AAAA;AAAA,IAC3D;AAAA,EACF;AACF;AAEO,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,YAAAD,QAAK,QAAQ,QAAQ,QAAQ,CAAC;AACpD,MAAI,CAAC,UAAAC,QAAG,WAAW,aAAa;AAC9B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AACnD;AAEO,MAAM,UAAU,CAAC,aACtB,YAAAD,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAEhE,MAAM,UAAU,CAAC,aACtB,YAAAA,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,QAAQ;AAEnE,MAAM,WAAW,YAAAA,QAAK;AAM7B,MAAM,iBAAiB,CAAC,aAAmD;AACzE,QAAM,WAAW,SAAS,QAAQ;AAClC,MAAI,CAAC;AAAU,UAAM,IAAI,MAAM,0BAA0B,UAAU;AACnE,MAAI;AAEF,QAAI,SAAS,WAAW,GAAG;AAAG,aAAO,EAAE,UAAU,UAAU,MAAM,MAAM;AAGvE,UAAM,eAAW,sBAAS,QAAQ;AAClC,QAAI;AAAU,aAAO,EAAE,UAAU,UAAU,MAAM,OAAO;AAGxD,UAAM,UAAM,sBAAU,QAAQ;AAC9B,QAAI;AAAK,aAAO,EAAE,UAAU,UAAU,MAAM,MAAM;AAAA,EACpD,SAAS,IAAP;AACA,yBAAO,MAAM,+BAA+B,YAAY,EAAE;AAAA,EAC5D;AACF;AAEO,MAAM,iBAAiB,OAC5B,aAC2B;AAC3B,QAAM,eAAe,eAAe,QAAQ,QAAQ,CAAC;AACrD,MAAI,CAAC;AAAc,WAAO;AAC1B,MAAI;AACF,YAAQ,aAAa;AAAA,WACd,OAAO;AACV,cAAM,WAAW,UAAM,sBAAU,aAAa,QAAQ;AACtD,cAAM,kBAAkB,SAAS,IAAI,gBAAU,6BAAgB,MAAM,CAAC;AACtE,eAAO;AAAA,MACT;AAAA,WACK;AACH,eAAQ,UAAM,sBAAU,aAAa,QAAQ;AAAA,WAE1C;AAAA;AAEH,eAAO,aAAa;AAAA;AAAA,EAE1B,SAAS,IAAP;AACA,yBAAO,MAAM,gCAAgC,aAAa,QAAQ,EAAE;AAAA,EACtE;AACF;",
|
|
6
6
|
"names": ["path", "fs"]
|
|
7
7
|
}
|
|
@@ -24,6 +24,13 @@ module.exports = __toCommonJS(ContensisAuthService_exports);
|
|
|
24
24
|
var import_client = require("contensis-management-api/lib/client");
|
|
25
25
|
class ContensisAuthService {
|
|
26
26
|
client;
|
|
27
|
+
credentials;
|
|
28
|
+
get clientType() {
|
|
29
|
+
return this.credentials.clientType;
|
|
30
|
+
}
|
|
31
|
+
get clientDetails() {
|
|
32
|
+
return this.credentials.clientDetails;
|
|
33
|
+
}
|
|
27
34
|
constructor({
|
|
28
35
|
clientId = "",
|
|
29
36
|
clientSecret = "",
|
|
@@ -33,35 +40,36 @@ class ContensisAuthService {
|
|
|
33
40
|
projectId,
|
|
34
41
|
rootUrl
|
|
35
42
|
}) {
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
credentials = {
|
|
43
|
+
if (clientId && clientSecret)
|
|
44
|
+
this.credentials = {
|
|
39
45
|
clientType: "client_credentials",
|
|
40
46
|
clientDetails: {
|
|
41
47
|
clientId,
|
|
42
48
|
clientSecret
|
|
43
49
|
}
|
|
44
50
|
};
|
|
45
|
-
|
|
46
|
-
credentials = {
|
|
51
|
+
else if (username && password)
|
|
52
|
+
this.credentials = {
|
|
47
53
|
clientType: "contensis_classic",
|
|
48
54
|
clientDetails: {
|
|
49
55
|
username,
|
|
50
56
|
password
|
|
51
57
|
}
|
|
52
58
|
};
|
|
53
|
-
|
|
54
|
-
credentials = {
|
|
59
|
+
else if (refreshToken)
|
|
60
|
+
this.credentials = {
|
|
55
61
|
clientType: "contensis_classic_refresh_token",
|
|
56
62
|
clientDetails: {
|
|
57
63
|
refreshToken
|
|
58
64
|
}
|
|
59
65
|
};
|
|
60
|
-
|
|
61
|
-
credentials = {
|
|
62
|
-
|
|
66
|
+
else
|
|
67
|
+
this.credentials = {
|
|
68
|
+
clientType: "none",
|
|
69
|
+
clientDetails: { refreshToken: "" }
|
|
70
|
+
};
|
|
63
71
|
this.client = import_client.NodejsClient.create({
|
|
64
|
-
...credentials,
|
|
72
|
+
...this.credentials,
|
|
65
73
|
projectId,
|
|
66
74
|
rootUrl
|
|
67
75
|
});
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/services/ContensisAuthService.ts"],
|
|
4
|
-
"sourcesContent": ["import { NodejsClient } from 'contensis-management-api/lib/client';\nimport { ClientGrants, ClientGrantType } from 'contensis-core-api';\n\nclass ContensisAuthService {\n private client: NodejsClient;\n\n constructor({\n clientId = '',\n clientSecret = '',\n username,\n password,\n refreshToken,\n projectId,\n rootUrl,\n }: {\n clientId?: string;\n clientSecret?: string;\n username?: string;\n password?: string;\n refreshToken?: string;\n projectId: string;\n rootUrl: string;\n }) {\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA6B;AAG7B,MAAM,qBAAqB;AAAA,EACjB;AAAA,
|
|
4
|
+
"sourcesContent": ["import { NodejsClient } from 'contensis-management-api/lib/client';\nimport { ClientGrants, ClientGrantType } from 'contensis-core-api';\n\nclass ContensisAuthService {\n private client: NodejsClient;\n private credentials: {\n clientType: ClientGrantType;\n clientDetails: ClientGrants;\n };\n\n get clientType() {\n return this.credentials.clientType;\n }\n get clientDetails() {\n return this.credentials.clientDetails;\n }\n\n constructor({\n clientId = '',\n clientSecret = '',\n username,\n password,\n refreshToken,\n projectId,\n rootUrl,\n }: {\n clientId?: string;\n clientSecret?: string;\n username?: string;\n password?: string;\n refreshToken?: string;\n projectId: string;\n rootUrl: string;\n }) {\n if (clientId && clientSecret)\n this.credentials = {\n clientType: 'client_credentials',\n clientDetails: {\n clientId,\n clientSecret,\n },\n };\n else if (username && password)\n this.credentials = {\n clientType: 'contensis_classic',\n clientDetails: {\n username,\n password,\n },\n };\n else if (refreshToken)\n this.credentials = {\n clientType: 'contensis_classic_refresh_token',\n clientDetails: {\n refreshToken,\n },\n };\n else\n this.credentials = {\n clientType: 'none',\n clientDetails: { refreshToken: '' },\n };\n\n this.client = NodejsClient.create({\n ...this.credentials,\n projectId,\n rootUrl,\n });\n }\n\n ClassicToken = async (): Promise<string | null | undefined> => {\n // make sure our token isn't expried.\n await this.client.ensureBearerToken();\n return (this.client as any).contensisClassicToken;\n };\n BearerToken = async () =>\n this.client.bearerToken || (await this.client.ensureBearerToken());\n RefreshToken = async () =>\n !this.client.isRefreshTokenExpired() ? this.client.refreshToken : null;\n\n /* PROJECTS */\n ProjectId = () => this.client.clientConfig.projectId;\n RootUrl = () => this.client.clientConfig.rootUrl;\n}\n\nexport default ContensisAuthService;\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA6B;AAG7B,MAAM,qBAAqB;AAAA,EACjB;AAAA,EACA;AAAA,EAKR,IAAI,aAAa;AACf,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EACA,IAAI,gBAAgB;AAClB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,YAAY;AAAA,IACV,WAAW;AAAA,IACX,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAQG;AACD,QAAI,YAAY;AACd,WAAK,cAAc;AAAA,QACjB,YAAY;AAAA,QACZ,eAAe;AAAA,UACb;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,aACO,YAAY;AACnB,WAAK,cAAc;AAAA,QACjB,YAAY;AAAA,QACZ,eAAe;AAAA,UACb;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,aACO;AACP,WAAK,cAAc;AAAA,QACjB,YAAY;AAAA,QACZ,eAAe;AAAA,UACb;AAAA,QACF;AAAA,MACF;AAAA;AAEA,WAAK,cAAc;AAAA,QACjB,YAAY;AAAA,QACZ,eAAe,EAAE,cAAc,GAAG;AAAA,MACpC;AAEF,SAAK,SAAS,2BAAa,OAAO;AAAA,MAChC,GAAG,KAAK;AAAA,MACR;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,eAAe,YAAgD;AAE7D,UAAM,KAAK,OAAO,kBAAkB;AACpC,WAAQ,KAAK,OAAe;AAAA,EAC9B;AAAA,EACA,cAAc,YACZ,KAAK,OAAO,eAAgB,MAAM,KAAK,OAAO,kBAAkB;AAAA,EAClE,eAAe,YACb,CAAC,KAAK,OAAO,sBAAsB,IAAI,KAAK,OAAO,eAAe;AAAA,EAGpE,YAAY,MAAM,KAAK,OAAO,aAAa;AAAA,EAC3C,UAAU,MAAM,KAAK,OAAO,aAAa;AAC3C;AAEA,IAAO,+BAAQ;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -43,12 +43,13 @@ var import_CredentialProvider = __toESM(require("../providers/CredentialProvider
|
|
|
43
43
|
var import_util = require("../util");
|
|
44
44
|
var import_console = require("../util/console.printer");
|
|
45
45
|
var import_csv = require("../util/csv.formatter");
|
|
46
|
-
var import_xml = require("../util/xml.formatter");
|
|
47
46
|
var import_json = require("../util/json.formatter");
|
|
47
|
+
var import_xml = require("../util/xml.formatter");
|
|
48
|
+
var import_debug = require("../util/debug");
|
|
48
49
|
var import_diff = require("../util/diff");
|
|
50
|
+
var import_find = require("../util/find");
|
|
49
51
|
var import_logger = require("../util/logger");
|
|
50
52
|
var import_timers = require("../util/timers");
|
|
51
|
-
var import_find = require("../util/find");
|
|
52
53
|
let insecurePasswordWarningShown = false;
|
|
53
54
|
class ContensisCli {
|
|
54
55
|
static quit = (error) => {
|
|
@@ -64,6 +65,7 @@ class ContensisCli {
|
|
|
64
65
|
contensis;
|
|
65
66
|
contensisOpts;
|
|
66
67
|
currentProject;
|
|
68
|
+
debug = (0, import_debug.isDebug)();
|
|
67
69
|
sourceAlias;
|
|
68
70
|
targetEnv;
|
|
69
71
|
urls;
|
|
@@ -1611,7 +1613,7 @@ Components:`));
|
|
|
1611
1613
|
const contensis = await this.ConnectContensis();
|
|
1612
1614
|
if (contensis) {
|
|
1613
1615
|
const [err, blocks] = await contensis.blocks.GetBlocks();
|
|
1614
|
-
if (Array.isArray(blocks)) {
|
|
1616
|
+
if (Array.isArray(blocks) && blocks.length) {
|
|
1615
1617
|
await this.HandleFormattingAndOutput(blocks, () => {
|
|
1616
1618
|
log.success(messages.blocks.list(currentEnv, env.currentProject));
|
|
1617
1619
|
for (const {
|
|
@@ -1634,8 +1636,7 @@ Components:`));
|
|
|
1634
1636
|
return blocks;
|
|
1635
1637
|
}
|
|
1636
1638
|
if (err) {
|
|
1637
|
-
log.error(messages.blocks.noList(currentEnv));
|
|
1638
|
-
log.error((0, import_json.jsonFormatter)(err));
|
|
1639
|
+
log.error(messages.blocks.noList(currentEnv, env.currentProject));
|
|
1639
1640
|
}
|
|
1640
1641
|
}
|
|
1641
1642
|
};
|
|
@@ -1648,10 +1649,27 @@ Components:`));
|
|
|
1648
1649
|
branch,
|
|
1649
1650
|
version
|
|
1650
1651
|
);
|
|
1651
|
-
if (blocks) {
|
|
1652
|
+
if (err || (blocks == null ? void 0 : blocks.length) === 0) {
|
|
1653
|
+
log.warning(
|
|
1654
|
+
messages.blocks.noGet(
|
|
1655
|
+
blockId,
|
|
1656
|
+
branch,
|
|
1657
|
+
version,
|
|
1658
|
+
currentEnv,
|
|
1659
|
+
env.currentProject
|
|
1660
|
+
)
|
|
1661
|
+
);
|
|
1662
|
+
log.help(messages.blocks.noGetTip());
|
|
1663
|
+
} else if (blocks) {
|
|
1652
1664
|
await this.HandleFormattingAndOutput(blocks, () => {
|
|
1653
1665
|
log.success(
|
|
1654
|
-
messages.blocks.get(
|
|
1666
|
+
messages.blocks.get(
|
|
1667
|
+
blockId,
|
|
1668
|
+
branch,
|
|
1669
|
+
version,
|
|
1670
|
+
currentEnv,
|
|
1671
|
+
env.currentProject
|
|
1672
|
+
)
|
|
1655
1673
|
);
|
|
1656
1674
|
for (const block of blocks)
|
|
1657
1675
|
(0, import_console.printBlockVersion)(
|
|
@@ -1667,10 +1685,6 @@ Components:`));
|
|
|
1667
1685
|
});
|
|
1668
1686
|
return blocks;
|
|
1669
1687
|
}
|
|
1670
|
-
if (err) {
|
|
1671
|
-
log.error(messages.blocks.noList(currentEnv, env.currentProject));
|
|
1672
|
-
log.error((0, import_json.jsonFormatter)(err));
|
|
1673
|
-
}
|
|
1674
1688
|
}
|
|
1675
1689
|
};
|
|
1676
1690
|
PushBlock = async (block) => {
|
|
@@ -1915,9 +1929,7 @@ Components:`));
|
|
|
1915
1929
|
const { currentEnv, env, log, messages } = this;
|
|
1916
1930
|
const contensis = await this.ConnectContensis();
|
|
1917
1931
|
if (contensis) {
|
|
1918
|
-
const [err, renderers] = await contensis.renderers.GetRenderers(
|
|
1919
|
-
rendererId
|
|
1920
|
-
);
|
|
1932
|
+
const [err, renderers] = await contensis.renderers.GetRenderers();
|
|
1921
1933
|
if (Array.isArray(renderers)) {
|
|
1922
1934
|
await this.HandleFormattingAndOutput(renderers, () => {
|
|
1923
1935
|
log.success(messages.renderers.list(currentEnv, env.currentProject));
|
|
@@ -1944,6 +1956,7 @@ Components:`));
|
|
|
1944
1956
|
);
|
|
1945
1957
|
}
|
|
1946
1958
|
});
|
|
1959
|
+
return renderers;
|
|
1947
1960
|
}
|
|
1948
1961
|
if (err) {
|
|
1949
1962
|
log.error(messages.renderers.noList(currentEnv, env.currentProject));
|