contensis-cli 1.0.0-beta.89 → 1.0.0-beta.90
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.js +3 -0
- package/dist/commands/dev.js.map +2 -2
- package/dist/localisation/en-GB.js +14 -6
- package/dist/localisation/en-GB.js.map +2 -2
- package/dist/mappers/DevInit-to-CIWorkflow.js +127 -0
- package/dist/mappers/DevInit-to-CIWorkflow.js.map +7 -0
- package/dist/mappers/DevInit-to-RolePermissions.js +54 -0
- package/dist/mappers/DevInit-to-RolePermissions.js.map +7 -0
- package/dist/mappers/DevRequests-to-RequestHanderSiteConfigYaml.js +56 -0
- package/dist/mappers/DevRequests-to-RequestHanderSiteConfigYaml.js.map +7 -0
- package/dist/models/DevService.d.js +17 -0
- package/dist/models/DevService.d.js.map +7 -0
- package/dist/services/ContensisDevService.js +57 -64
- package/dist/services/ContensisDevService.js.map +2 -2
- package/dist/services/ContensisRoleService.js +7 -12
- package/dist/services/ContensisRoleService.js.map +2 -2
- package/dist/util/diff.js +63 -0
- package/dist/util/diff.js.map +3 -3
- package/dist/util/dotenv.js +57 -0
- package/dist/util/dotenv.js.map +7 -0
- package/dist/util/git.js +8 -1
- package/dist/util/git.js.map +2 -2
- package/dist/util/logger.js +12 -5
- package/dist/util/logger.js.map +2 -2
- package/dist/util/yaml.js +45 -0
- package/dist/util/yaml.js.map +7 -0
- package/dist/version.js +1 -1
- package/dist/version.js.map +1 -1
- package/package.json +3 -1
- package/src/commands/dev.ts +1 -0
- package/src/localisation/en-GB.ts +18 -8
- package/src/mappers/DevInit-to-CIWorkflow.ts +150 -0
- package/src/mappers/DevInit-to-RolePermissions.ts +33 -0
- package/src/mappers/DevRequests-to-RequestHanderSiteConfigYaml.ts +44 -0
- package/src/models/DevService.d.ts +5 -0
- package/src/services/ContensisDevService.ts +66 -64
- package/src/services/ContensisRoleService.ts +7 -15
- package/src/util/diff.ts +96 -0
- package/src/util/dotenv.ts +37 -0
- package/src/util/git.ts +19 -7
- package/src/util/logger.ts +11 -5
- package/src/util/yaml.ts +13 -0
- package/src/version.ts +1 -1
|
@@ -34,11 +34,15 @@ var import_inquirer = __toESM(require("inquirer"));
|
|
|
34
34
|
var import_path = __toESM(require("path"));
|
|
35
35
|
var import_yaml = require("yaml");
|
|
36
36
|
var import_ContensisRoleService = __toESM(require("./ContensisRoleService"));
|
|
37
|
-
var
|
|
37
|
+
var import_DevRequests_to_RequestHanderSiteConfigYaml = require("../mappers/DevRequests-to-RequestHanderSiteConfigYaml");
|
|
38
|
+
var import_DevInit_to_RolePermissions = require("../mappers/DevInit-to-RolePermissions");
|
|
38
39
|
var import_file_provider = require("../providers/file-provider");
|
|
39
40
|
var import_json = require("../util/json.formatter");
|
|
40
41
|
var import_git = require("../util/git");
|
|
41
42
|
var import_find = require("../util/find");
|
|
43
|
+
var import_dotenv = require("../util/dotenv");
|
|
44
|
+
var import_DevInit_to_CIWorkflow = require("../mappers/DevInit-to-CIWorkflow");
|
|
45
|
+
var import_diff = require("../util/diff");
|
|
42
46
|
class ContensisDev extends import_ContensisRoleService.default {
|
|
43
47
|
constructor(args, outputOpts, contensisOpts = {}) {
|
|
44
48
|
super(args, outputOpts, contensisOpts);
|
|
@@ -62,11 +66,9 @@ class ContensisDev extends import_ContensisRoleService.default {
|
|
|
62
66
|
let ciFileName = git.ciFileName;
|
|
63
67
|
const devKeyName = `${git.name} development`;
|
|
64
68
|
const devKeyDescription = `${git.name} [contensis-cli]`;
|
|
65
|
-
const devKeyPermissions = { blocks: [] };
|
|
66
69
|
let existingDevKey = apiKeyExists(devKeyName);
|
|
67
70
|
const deployKeyName = `${git.name} deployment`;
|
|
68
71
|
const deployKeyDescription = `${git.name} deploy [contensis-cli]`;
|
|
69
|
-
const deployKeyPermissions = { blocks: ["push", "release"] };
|
|
70
72
|
let existingDeployKey = apiKeyExists(deployKeyName);
|
|
71
73
|
const blockId = git.name;
|
|
72
74
|
const errors = [];
|
|
@@ -98,30 +100,31 @@ class ContensisDev extends import_ContensisRoleService.default {
|
|
|
98
100
|
({ ciFileName } = await import_inquirer.default.prompt([
|
|
99
101
|
{
|
|
100
102
|
type: "list",
|
|
101
|
-
message:
|
|
102
|
-
${log.infoText(
|
|
103
|
-
`Tell us which GitHub workflow builds the container image after each push:`
|
|
104
|
-
)}`,
|
|
103
|
+
message: messages.devinit.ciMultipleChoices(),
|
|
105
104
|
name: "ciFileName",
|
|
106
105
|
choices: workflowFiles,
|
|
107
106
|
default: workflowFiles.find((f) => f.includes("docker"))
|
|
108
107
|
}
|
|
109
108
|
]));
|
|
110
109
|
log.raw("");
|
|
110
|
+
git.ciFileName = ciFileName;
|
|
111
111
|
}
|
|
112
112
|
log.raw(log.infoText(messages.devinit.ciDetails(ciFileName)));
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
113
|
+
const mappedWorkflow = (0, import_DevInit_to_CIWorkflow.mapCIWorkflowContent)(this, git);
|
|
114
|
+
log.help(messages.devinit.ciIntro(git));
|
|
115
|
+
if (!dryRun) {
|
|
116
|
+
const { confirm } = await import_inquirer.default.prompt([
|
|
117
|
+
{
|
|
118
|
+
type: "confirm",
|
|
119
|
+
message: messages.devinit.confirm(),
|
|
120
|
+
name: "confirm",
|
|
121
|
+
default: false
|
|
122
|
+
}
|
|
123
|
+
]);
|
|
124
|
+
log.raw("");
|
|
125
|
+
if (!confirm)
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
125
128
|
const { accessToken } = await import_inquirer.default.prompt([
|
|
126
129
|
{
|
|
127
130
|
type: "input",
|
|
@@ -159,10 +162,7 @@ ${log.infoText(
|
|
|
159
162
|
let existingDevRole = (0, import_find.findByIdOrName)(roles || [], devKeyName, true);
|
|
160
163
|
existingDevRole = await this.CreateOrUpdateRole(
|
|
161
164
|
existingDevRole,
|
|
162
|
-
devKeyName,
|
|
163
|
-
devKeyDescription,
|
|
164
|
-
{ apiKeys: [devKeyName] },
|
|
165
|
-
devKeyPermissions
|
|
165
|
+
(0, import_DevInit_to_RolePermissions.devKeyRole)(devKeyName, devKeyDescription)
|
|
166
166
|
);
|
|
167
167
|
checkpoint("dev key role assigned");
|
|
168
168
|
log.success(messages.devinit.createDevKey(devKeyName, true));
|
|
@@ -173,63 +173,51 @@ ${log.infoText(
|
|
|
173
173
|
);
|
|
174
174
|
existingDeployRole = await this.CreateOrUpdateRole(
|
|
175
175
|
existingDeployRole,
|
|
176
|
-
deployKeyName,
|
|
177
|
-
deployKeyDescription,
|
|
178
|
-
{ apiKeys: [deployKeyName] },
|
|
179
|
-
deployKeyPermissions
|
|
176
|
+
(0, import_DevInit_to_RolePermissions.deployKeyRole)(deployKeyName, deployKeyDescription)
|
|
180
177
|
);
|
|
181
178
|
checkpoint("deploy key role assigned");
|
|
182
179
|
log.success(messages.devinit.createDeployKey(deployKeyName, true));
|
|
183
180
|
checkpoint("api keys done");
|
|
184
181
|
}
|
|
185
|
-
const
|
|
182
|
+
const envContentsToAdd = {
|
|
186
183
|
ALIAS: currentEnv,
|
|
187
|
-
PROJECT: currentProject
|
|
188
|
-
ACCESS_TOKEN: accessToken
|
|
184
|
+
PROJECT: currentProject
|
|
189
185
|
};
|
|
186
|
+
if (accessToken)
|
|
187
|
+
envContentsToAdd["ACCESS_TOKEN"] = accessToken;
|
|
190
188
|
const envFilePath = `${projectHome}/.env`;
|
|
191
189
|
const existingEnvFile = (0, import_file_provider.readFile)(envFilePath);
|
|
192
|
-
const
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
updatedEnvKeys.push(k);
|
|
201
|
-
}
|
|
202
|
-
envFileLines.push(newline || ln);
|
|
203
|
-
}
|
|
204
|
-
for (const addKey of existingFileLines.filter(
|
|
205
|
-
(efl) => !updatedEnvKeys.find(
|
|
206
|
-
(uek) => {
|
|
207
|
-
var _a;
|
|
208
|
-
return uek.startsWith(`${(_a = efl.split("=")) == null ? void 0 : _a[0]}=`);
|
|
209
|
-
}
|
|
210
|
-
) && Object.keys(envContents).find((ck) => {
|
|
211
|
-
var _a;
|
|
212
|
-
return ck === ((_a = efl.split("=")) == null ? void 0 : _a[0]);
|
|
213
|
-
})
|
|
214
|
-
).map((fl) => {
|
|
215
|
-
var _a;
|
|
216
|
-
return (_a = fl.split("=")) == null ? void 0 : _a[0];
|
|
217
|
-
})) {
|
|
218
|
-
envFileLines.push(`${addKey}=${envContents[addKey]}`);
|
|
219
|
-
}
|
|
190
|
+
const envFileLines = (0, import_dotenv.mergeDotEnvFileContents)(
|
|
191
|
+
(existingEnvFile || "").split("\n").filter((l) => !!l),
|
|
192
|
+
envContentsToAdd
|
|
193
|
+
);
|
|
194
|
+
const envDiff = (0, import_diff.diffFileContent)(
|
|
195
|
+
existingEnvFile || "",
|
|
196
|
+
envFileLines.join("\n")
|
|
197
|
+
);
|
|
220
198
|
if (dryRun) {
|
|
199
|
+
if (envDiff) {
|
|
200
|
+
log.info(`updating .env file ${envFilePath}: ${envDiff}`);
|
|
201
|
+
log.raw("");
|
|
202
|
+
}
|
|
221
203
|
checkpoint("skip .env file update (dry-run)");
|
|
222
|
-
log.info(`.env file`);
|
|
223
|
-
log.object(envFileLines);
|
|
224
204
|
} else {
|
|
205
|
+
if (envDiff)
|
|
206
|
+
log.info(`updating .env file ${envFilePath}`);
|
|
225
207
|
(0, import_file_provider.writeFile)(envFilePath, envFileLines.join("\n"));
|
|
226
208
|
checkpoint(".env file updated");
|
|
227
209
|
log.success(messages.devinit.writeEnvFile());
|
|
228
210
|
}
|
|
229
211
|
if (dryRun) {
|
|
212
|
+
if (mappedWorkflow == null ? void 0 : mappedWorkflow.diff) {
|
|
213
|
+
log.info(`updating${ciFileName} file: ${mappedWorkflow.diff}`);
|
|
214
|
+
log.raw("");
|
|
215
|
+
}
|
|
230
216
|
checkpoint("skip CI file update (dry-run)");
|
|
231
|
-
log.info(`${ciFileName} file`);
|
|
232
217
|
} else {
|
|
218
|
+
if (mappedWorkflow == null ? void 0 : mappedWorkflow.diff)
|
|
219
|
+
log.info(`updating${ciFileName} file`);
|
|
220
|
+
(0, import_file_provider.writeFile)(git.ciFilePath, [].join("\n"));
|
|
233
221
|
log.success(messages.devinit.writeCiFile(`./${ciFileName}`));
|
|
234
222
|
log.info(
|
|
235
223
|
messages.devinit.ciBlockTip(blockId, currentEnv, currentProject)
|
|
@@ -244,8 +232,13 @@ ${log.infoText(
|
|
|
244
232
|
existingDeployKey == null ? void 0 : existingDeployKey.sharedSecret
|
|
245
233
|
)
|
|
246
234
|
);
|
|
247
|
-
|
|
248
|
-
|
|
235
|
+
if (dryRun) {
|
|
236
|
+
log.success(messages.devinit.dryRun());
|
|
237
|
+
log.help(messages.devinit.noChanges());
|
|
238
|
+
} else {
|
|
239
|
+
log.success(messages.devinit.success());
|
|
240
|
+
log.help(messages.devinit.startProjectTip());
|
|
241
|
+
}
|
|
249
242
|
}
|
|
250
243
|
};
|
|
251
244
|
ExecRequestHandler = async (blockIds, overrideArgs) => {
|
|
@@ -255,7 +248,7 @@ ${log.infoText(
|
|
|
255
248
|
const exe = "Zengenti.Contensis.RequestHandler.LocalDevelopment";
|
|
256
249
|
const exePath = import_path.default.join(exeHome, exe);
|
|
257
250
|
const siteConfigPath = import_path.default.join(import_file_provider.appRootDir, "site_config.yaml");
|
|
258
|
-
const siteConfig = await (0,
|
|
251
|
+
const siteConfig = await (0, import_DevRequests_to_RequestHanderSiteConfigYaml.mapSiteConfigYaml)(this);
|
|
259
252
|
(0, import_file_provider.writeFile)("site_config.yaml", (0, import_yaml.stringify)(siteConfig));
|
|
260
253
|
const args = overrideArgs ? typeof (overrideArgs == null ? void 0 : overrideArgs[0]) === "string" && overrideArgs[0].includes(" ", 2) ? overrideArgs[0].split(" ") : overrideArgs : [];
|
|
261
254
|
if (!args.find((a) => a === "-c"))
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/services/ContensisDevService.ts"],
|
|
4
|
-
"sourcesContent": ["import to from 'await-to-js';\nimport { execFile, spawn } from 'child_process';\nimport inquirer from 'inquirer';\nimport path from 'path';\nimport { Entry, Role } from 'contensis-management-api/lib/models';\n\nimport { MigrateRequest } from 'migratortron';\nimport { stringify } from 'yaml';\nimport ContensisCli from './ContensisCliService';\nimport ContensisRole from './ContensisRoleService';\nimport { OutputOptionsConstructorArg } from '~/models/CliService';\nimport { mapSiteConfigYaml } from '~/mappers/ContensisCliService-to-RequestHanderSiteConfigYaml';\nimport { appRootDir, readFile, writeFile } from '~/providers/file-provider';\nimport { jsonFormatter } from '~/util/json.formatter';\nimport { GitHelper } from '~/util/git';\nimport { findByIdOrName } from '~/util/find';\n\nclass ContensisDev extends ContensisRole {\n constructor(\n args: string[],\n outputOpts?: OutputOptionsConstructorArg,\n contensisOpts: Partial<MigrateRequest> = {}\n ) {\n super(args, outputOpts, contensisOpts);\n }\n\n DevelopmentInit = async (projectHome: string, opts: any) => {\n const { dryRun = false } = opts || {};\n const { currentEnv, currentProject, log, messages } = this;\n const contensis = await this.ConnectContensis();\n\n if (contensis) {\n // Retrieve keys list for env\n const [keysErr, apiKeys] = await contensis.apiKeys.GetKeys();\n if (keysErr) {\n log.error(messages.keys.noList(currentEnv));\n log.error(jsonFormatter(keysErr));\n return;\n }\n const apiKeyExists = (findKey: string) =>\n apiKeys?.find(\n k => k.name.trim().toLowerCase() === findKey?.trim().toLowerCase()\n );\n\n // Retrieve git info\n const git = new GitHelper(projectHome);\n\n // Retrieve ci workflow info\n const workflowFiles = git.workflows;\n\n // Set variables for performing operations and logging etc.\n let ciFileName = git.ciFileName;\n\n const devKeyName = `${git.name} development`;\n const devKeyDescription = `${git.name} [contensis-cli]`;\n const devKeyPermissions = { blocks: [] } as Partial<Role['permissions']>;\n let existingDevKey = apiKeyExists(devKeyName);\n\n const deployKeyName = `${git.name} deployment`;\n const deployKeyDescription = `${git.name} deploy [contensis-cli]`;\n const deployKeyPermissions = { blocks: ['push', 'release'] } as Partial<\n Role['permissions']\n >;\n let existingDeployKey = apiKeyExists(deployKeyName);\n\n const blockId = git.name;\n const errors = [] as AppError[];\n\n // Start render console output\n log.raw('');\n log.success(messages.devinit.intro());\n log.raw('');\n log.raw(\n log.infoText(\n messages.devinit.projectDetails(\n git.name,\n currentEnv,\n currentProject,\n git\n )\n )\n );\n log.raw(\n log.infoText(\n messages.devinit.developmentKey(devKeyName, !!existingDevKey)\n )\n );\n log.raw(\n log.infoText(\n messages.devinit.deploymentKey(deployKeyName, !!existingDeployKey)\n )\n );\n log.raw('');\n\n if (Array.isArray(workflowFiles) && workflowFiles.length > 1) {\n // Choose GitHub workflow file (if multiple)\n ({ ciFileName } = await inquirer.prompt([\n {\n type: 'list',\n message: `Multiple GitHub workflow files found\\n${log.infoText(\n `Tell us which GitHub workflow builds the container image after each push:`\n )}`,\n name: 'ciFileName',\n choices: workflowFiles,\n default: workflowFiles.find(f => f.includes('docker')),\n },\n ]));\n log.raw('');\n }\n\n log.raw(log.infoText(messages.devinit.ciDetails(ciFileName)));\n ylog.help(messages.devinit.ciIntro(git));\n\n // Confirm prompt\n const { confirm } = await inquirer.prompt([\n {\n type: 'confirm',\n message: messages.devinit.confirm(),\n name: 'confirm',\n default: false,\n },\n ]);\n log.raw('');\n if (!confirm) return;\n\n // Access token prompt\n const { accessToken } = await inquirer.prompt([\n {\n type: 'input',\n message: messages.devinit.accessTokenPrompt(),\n name: 'accessToken',\n },\n ]);\n log.raw('');\n\n // Magic happens...\n const checkpoint = (op: string) => {\n if (errors.length) throw errors[0];\n else log.debug(`${op} completed ok`);\n return true;\n };\n\n // Arrange API keys for development and deployment\n const [getRolesErr, roles] = await to(contensis.roles.GetRoles());\n if (!roles && getRolesErr) errors.push(getRolesErr);\n checkpoint(`fetched ${roles?.length} roles`);\n if (dryRun) {\n checkpoint(`skip api key creation (dry-run)`);\n } else {\n existingDevKey = await this.CreateOrUpdateApiKey(\n existingDevKey,\n devKeyName,\n devKeyDescription\n );\n checkpoint('dev key created');\n\n existingDeployKey = await this.CreateOrUpdateApiKey(\n existingDeployKey,\n deployKeyName,\n deployKeyDescription\n );\n checkpoint('deploy key created');\n\n // Ensure dev API key is assigned to a role\n let existingDevRole = findByIdOrName(roles || [], devKeyName, true) as\n | Role\n | undefined;\n existingDevRole = await this.CreateOrUpdateRole(\n existingDevRole,\n devKeyName,\n devKeyDescription,\n { apiKeys: [devKeyName] },\n devKeyPermissions\n );\n checkpoint('dev key role assigned');\n log.success(messages.devinit.createDevKey(devKeyName, true));\n\n // Ensure deploy API key is assigned to a role with the right permissions\n let existingDeployRole = findByIdOrName(\n roles || [],\n deployKeyName,\n true\n ) as Role | undefined;\n existingDeployRole = await this.CreateOrUpdateRole(\n existingDeployRole,\n deployKeyName,\n deployKeyDescription,\n { apiKeys: [deployKeyName] },\n deployKeyPermissions\n );\n\n checkpoint('deploy key role assigned');\n log.success(messages.devinit.createDeployKey(deployKeyName, true));\n checkpoint('api keys done');\n }\n\n // Update or create a file called .env in project home\n const envContents = {\n ALIAS: currentEnv,\n PROJECT: currentProject,\n ACCESS_TOKEN: accessToken,\n };\n const envFilePath = `${projectHome}/.env`;\n const existingEnvFile = readFile(envFilePath);\n const existingFileLines = (existingEnvFile || '').split('\\n');\n const envFileLines: string[] = [];\n\n const updatedEnvKeys: string[] = [];\n for (const ln of existingFileLines) {\n let newline = '';\n for (const [k, v] of Object.entries(envContents))\n if (ln.startsWith(`${k}=`)) {\n newline = `${k}=${v}`;\n updatedEnvKeys.push(k);\n }\n envFileLines.push(newline || ln);\n }\n for (const addKey of existingFileLines\n .filter(\n efl =>\n !updatedEnvKeys.find(uek =>\n uek.startsWith(`${efl.split('=')?.[0]}=`)\n ) && Object.keys(envContents).find(ck => ck === efl.split('=')?.[0])\n )\n .map(fl => fl.split('=')?.[0]) as (keyof typeof envContents)[]) {\n envFileLines.push(`${addKey}=${envContents[addKey]}`);\n }\n\n if (dryRun) {\n checkpoint('skip .env file update (dry-run)');\n log.info(`.env file`);\n log.object(envFileLines);\n } else {\n writeFile(envFilePath, envFileLines.join('\\n'));\n checkpoint('.env file updated');\n log.success(messages.devinit.writeEnvFile());\n // log.help(messages.devinit.useEnvFileTip());\n }\n\n // Update CI file -- different for GH/GL -- create a sample one with build?\n if (dryRun) {\n checkpoint('skip CI file update (dry-run)');\n log.info(`${ciFileName} file`);\n // TODO: log what we might add to the file\n //log.object(envFileLines);\n } else {\n log.success(messages.devinit.writeCiFile(`./${ciFileName}`));\n log.info(\n messages.devinit.ciBlockTip(blockId, currentEnv, currentProject)\n );\n checkpoint('CI file updated');\n }\n\n // Echo Deployment API key to console, ask user to add secrets to repo\n log.warning(messages.devinit.addGitSecretsIntro());\n log.help(\n messages.devinit.addGitSecretsHelp(\n git,\n existingDeployKey?.id,\n existingDeployKey?.sharedSecret\n )\n );\n\n log.success(messages.devinit.success());\n log.help(messages.devinit.startProjectTip());\n }\n };\n\n ExecRequestHandler = async (blockIds: string[], overrideArgs?: string[]) => {\n // if no request handler exe\n // download it.\n\n // if update arg, redownload it\n\n const { log } = this;\n // const getPrefixOld = log.getPrefix;\n const exeHome = path.join(appRootDir, 'reqhan');\n const exe = 'Zengenti.Contensis.RequestHandler.LocalDevelopment';\n const exePath = path.join(exeHome, exe);\n const siteConfigPath = path.join(appRootDir, 'site_config.yaml');\n\n const siteConfig = await mapSiteConfigYaml(this);\n writeFile('site_config.yaml', stringify(siteConfig));\n\n const args = overrideArgs\n ? typeof overrideArgs?.[0] === 'string' &&\n overrideArgs[0].includes(' ', 2)\n ? overrideArgs[0].split(' ')\n : overrideArgs\n : []; // args could be [ '-c .\\\\site_config.yaml' ] or [ '-c', '.\\\\site_config.yaml' ]\n\n // Add required args\n if (!args.find(a => a === '-c')) args.push('-c', siteConfigPath);\n\n // const child = execFile(exePath, args);\n\n const child = spawn(exePath, args, { stdio: 'inherit' });\n\n // log.raw('');\n log.info(`Launching request handler...`);\n if (overrideArgs?.length)\n this.log.warning(\n `Spawning process with supplied args: ${JSON.stringify(\n child.spawnargs,\n null,\n 2\n )}`\n );\n\n let isRunning = false;\n\n // Log child output through event listeners\n child?.stdout?.on('data', data => {\n isRunning = true;\n log.raw(data);\n });\n\n child?.stderr?.on('data', data => {\n log.error(data);\n });\n\n child.on('spawn', () => {\n isRunning = true;\n log.help(\n `You may see a firewall popup requesting network access, it is safe to approve`\n );\n // log.getPrefix = () => Logger.infoText(`[rqh]`);\n });\n\n child.on('exit', code => {\n isRunning = false;\n\n log[code === 0 ? 'success' : 'warning'](\n `Request handler exited with code ${code}\\n`\n );\n });\n\n child.on('error', error => {\n isRunning = false;\n log.error(`Could not launch request handler due to error \\n${error}`);\n });\n\n await new Promise(resolve => setTimeout(resolve, 2000));\n\n // keep the method running until we can return\n while (true === true) {\n if (!isRunning) {\n // log.getPrefix = getPrefixOld; // restore logger state\n return;\n }\n await new Promise(resolve => setTimeout(resolve, 1000));\n }\n };\n}\nexport const devCommand = (\n commandArgs: string[],\n outputOpts: OutputOptionsConstructorArg,\n contensisOpts: Partial<MigrateRequest> = {}\n) => {\n return new ContensisDev(['', '', ...commandArgs], outputOpts, contensisOpts);\n};\n\nexport default ContensisDev;\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAe;AACf,2BAAgC;AAChC,sBAAqB;AACrB,kBAAiB;
|
|
4
|
+
"sourcesContent": ["import to from 'await-to-js';\nimport { execFile, spawn } from 'child_process';\nimport inquirer from 'inquirer';\nimport path from 'path';\nimport { parse, stringify } from 'yaml';\n\nimport { Role } from 'contensis-management-api/lib/models';\nimport { MigrateRequest } from 'migratortron';\n\nimport ContensisRole from './ContensisRoleService';\nimport { OutputOptionsConstructorArg } from '~/models/CliService';\nimport { EnvContentsToAdd } from '~/models/DevService';\nimport { mapSiteConfigYaml } from '~/mappers/DevRequests-to-RequestHanderSiteConfigYaml';\nimport {\n deployKeyRole,\n devKeyRole,\n} from '~/mappers/DevInit-to-RolePermissions';\nimport { appRootDir, readFile, writeFile } from '~/providers/file-provider';\nimport { jsonFormatter } from '~/util/json.formatter';\nimport { GitHelper } from '~/util/git';\nimport { findByIdOrName } from '~/util/find';\nimport { mergeDotEnvFileContents } from '~/util/dotenv';\nimport { mapCIWorkflowContent } from '~/mappers/DevInit-to-CIWorkflow';\nimport { diffFileContent } from '~/util/diff';\n\nclass ContensisDev extends ContensisRole {\n constructor(\n args: string[],\n outputOpts?: OutputOptionsConstructorArg,\n contensisOpts: Partial<MigrateRequest> = {}\n ) {\n super(args, outputOpts, contensisOpts);\n }\n\n DevelopmentInit = async (projectHome: string, opts: any) => {\n const { dryRun = false } = opts || {};\n const { currentEnv, currentProject, log, messages } = this;\n const contensis = await this.ConnectContensis();\n\n if (contensis) {\n // Retrieve keys list for env\n const [keysErr, apiKeys] = await contensis.apiKeys.GetKeys();\n if (keysErr) {\n log.error(messages.keys.noList(currentEnv));\n log.error(jsonFormatter(keysErr));\n return;\n }\n const apiKeyExists = (findKey: string) =>\n apiKeys?.find(\n k => k.name.trim().toLowerCase() === findKey?.trim().toLowerCase()\n );\n\n // Retrieve git info\n const git = new GitHelper(projectHome);\n\n // Retrieve ci workflow info\n const workflowFiles = git.workflows;\n\n // Set variables for performing operations and logging etc.\n let ciFileName = git.ciFileName;\n\n const devKeyName = `${git.name} development`;\n const devKeyDescription = `${git.name} [contensis-cli]`;\n let existingDevKey = apiKeyExists(devKeyName);\n\n const deployKeyName = `${git.name} deployment`;\n const deployKeyDescription = `${git.name} deploy [contensis-cli]`;\n\n let existingDeployKey = apiKeyExists(deployKeyName);\n\n const blockId = git.name;\n const errors = [] as AppError[];\n\n // Start render console output\n log.raw('');\n log.success(messages.devinit.intro());\n log.raw('');\n log.raw(\n log.infoText(\n messages.devinit.projectDetails(\n git.name,\n currentEnv,\n currentProject,\n git\n )\n )\n );\n log.raw(\n log.infoText(\n messages.devinit.developmentKey(devKeyName, !!existingDevKey)\n )\n );\n log.raw(\n log.infoText(\n messages.devinit.deploymentKey(deployKeyName, !!existingDeployKey)\n )\n );\n log.raw('');\n\n if (Array.isArray(workflowFiles) && workflowFiles.length > 1) {\n // Choose GitHub workflow file (if multiple)\n ({ ciFileName } = await inquirer.prompt([\n {\n type: 'list',\n message: messages.devinit.ciMultipleChoices(),\n name: 'ciFileName',\n choices: workflowFiles,\n default: workflowFiles.find(f => f.includes('docker')),\n },\n ]));\n log.raw('');\n git.ciFileName = ciFileName;\n }\n\n log.raw(log.infoText(messages.devinit.ciDetails(ciFileName)));\n\n // Look at the workflow file content and make updates\n const mappedWorkflow = mapCIWorkflowContent(this, git);\n\n log.help(messages.devinit.ciIntro(git));\n\n if (!dryRun) {\n // Confirm prompt\n const { confirm } = await inquirer.prompt([\n {\n type: 'confirm',\n message: messages.devinit.confirm(),\n name: 'confirm',\n default: false,\n },\n ]);\n log.raw('');\n if (!confirm) return;\n }\n\n // Access token prompt\n const { accessToken }: { accessToken: string } = await inquirer.prompt([\n {\n type: 'input',\n message: messages.devinit.accessTokenPrompt(),\n name: 'accessToken',\n },\n ]);\n log.raw('');\n\n // Magic happens...\n const checkpoint = (op: string) => {\n if (errors.length) throw errors[0];\n else log.debug(`${op} completed ok`);\n return true;\n };\n\n // Arrange API keys for development and deployment\n const [getRolesErr, roles] = await to(contensis.roles.GetRoles());\n if (!roles && getRolesErr) errors.push(getRolesErr);\n checkpoint(`fetched ${roles?.length} roles`);\n if (dryRun) {\n checkpoint(`skip api key creation (dry-run)`);\n } else {\n existingDevKey = await this.CreateOrUpdateApiKey(\n existingDevKey,\n devKeyName,\n devKeyDescription\n );\n checkpoint('dev key created');\n\n existingDeployKey = await this.CreateOrUpdateApiKey(\n existingDeployKey,\n deployKeyName,\n deployKeyDescription\n );\n checkpoint('deploy key created');\n\n // Ensure dev API key is assigned to a role\n let existingDevRole = findByIdOrName(roles || [], devKeyName, true) as\n | Role\n | undefined;\n existingDevRole = await this.CreateOrUpdateRole(\n existingDevRole,\n devKeyRole(devKeyName, devKeyDescription)\n );\n checkpoint('dev key role assigned');\n log.success(messages.devinit.createDevKey(devKeyName, true));\n\n // Ensure deploy API key is assigned to a role with the right permissions\n let existingDeployRole = findByIdOrName(\n roles || [],\n deployKeyName,\n true\n ) as Role | undefined;\n existingDeployRole = await this.CreateOrUpdateRole(\n existingDeployRole,\n deployKeyRole(deployKeyName, deployKeyDescription)\n );\n\n checkpoint('deploy key role assigned');\n log.success(messages.devinit.createDeployKey(deployKeyName, true));\n checkpoint('api keys done');\n }\n\n // Update or create a file called .env in project home\n const envContentsToAdd: EnvContentsToAdd = {\n ALIAS: currentEnv,\n PROJECT: currentProject,\n };\n if (accessToken) envContentsToAdd['ACCESS_TOKEN'] = accessToken;\n\n const envFilePath = `${projectHome}/.env`;\n const existingEnvFile = readFile(envFilePath);\n const envFileLines = mergeDotEnvFileContents(\n (existingEnvFile || '').split('\\n').filter(l => !!l),\n envContentsToAdd\n );\n const envDiff = diffFileContent(\n existingEnvFile || '',\n envFileLines.join('\\n')\n );\n\n if (dryRun) {\n if (envDiff) {\n log.info(`updating .env file ${envFilePath}: ${envDiff}`);\n log.raw('');\n }\n checkpoint('skip .env file update (dry-run)');\n } else {\n if (envDiff) log.info(`updating .env file ${envFilePath}`);\n writeFile(envFilePath, envFileLines.join('\\n'));\n checkpoint('.env file updated');\n log.success(messages.devinit.writeEnvFile());\n // log.help(messages.devinit.useEnvFileTip());\n }\n\n // Update CI file -- different for GH/GL -- create a sample one with build?\n if (dryRun) {\n if (mappedWorkflow?.diff) {\n log.info(`updating${ciFileName} file: ${mappedWorkflow.diff}`);\n log.raw('');\n }\n checkpoint('skip CI file update (dry-run)');\n //log.object(ciFileLines);\n } else {\n if (mappedWorkflow?.diff) log.info(`updating${ciFileName} file`);\n writeFile(git.ciFilePath, [].join('\\n'));\n log.success(messages.devinit.writeCiFile(`./${ciFileName}`));\n log.info(\n messages.devinit.ciBlockTip(blockId, currentEnv, currentProject)\n );\n checkpoint('CI file updated');\n }\n\n // Echo Deployment API key to console, ask user to add secrets to repo\n log.warning(messages.devinit.addGitSecretsIntro());\n log.help(\n messages.devinit.addGitSecretsHelp(\n git,\n existingDeployKey?.id,\n existingDeployKey?.sharedSecret\n )\n );\n\n if (dryRun) {\n log.success(messages.devinit.dryRun());\n log.help(messages.devinit.noChanges());\n } else {\n log.success(messages.devinit.success());\n log.help(messages.devinit.startProjectTip());\n }\n }\n };\n\n ExecRequestHandler = async (blockIds: string[], overrideArgs?: string[]) => {\n // if no request handler exe\n // download it.\n\n // if update arg, redownload it\n\n const { log } = this;\n // const getPrefixOld = log.getPrefix;\n const exeHome = path.join(appRootDir, 'reqhan');\n const exe = 'Zengenti.Contensis.RequestHandler.LocalDevelopment';\n const exePath = path.join(exeHome, exe);\n const siteConfigPath = path.join(appRootDir, 'site_config.yaml');\n\n const siteConfig = await mapSiteConfigYaml(this);\n writeFile('site_config.yaml', stringify(siteConfig));\n\n const args = overrideArgs\n ? typeof overrideArgs?.[0] === 'string' &&\n overrideArgs[0].includes(' ', 2)\n ? overrideArgs[0].split(' ')\n : overrideArgs\n : []; // args could be [ '-c .\\\\site_config.yaml' ] or [ '-c', '.\\\\site_config.yaml' ]\n\n // Add required args\n if (!args.find(a => a === '-c')) args.push('-c', siteConfigPath);\n\n // const child = execFile(exePath, args);\n\n const child = spawn(exePath, args, { stdio: 'inherit' });\n\n // log.raw('');\n log.info(`Launching request handler...`);\n if (overrideArgs?.length)\n this.log.warning(\n `Spawning process with supplied args: ${JSON.stringify(\n child.spawnargs,\n null,\n 2\n )}`\n );\n\n let isRunning = false;\n\n // Log child output through event listeners\n child?.stdout?.on('data', data => {\n isRunning = true;\n log.raw(data);\n });\n\n child?.stderr?.on('data', data => {\n log.error(data);\n });\n\n child.on('spawn', () => {\n isRunning = true;\n log.help(\n `You may see a firewall popup requesting network access, it is safe to approve`\n );\n // log.getPrefix = () => Logger.infoText(`[rqh]`);\n });\n\n child.on('exit', code => {\n isRunning = false;\n\n log[code === 0 ? 'success' : 'warning'](\n `Request handler exited with code ${code}\\n`\n );\n });\n\n child.on('error', error => {\n isRunning = false;\n log.error(`Could not launch request handler due to error \\n${error}`);\n });\n\n await new Promise(resolve => setTimeout(resolve, 2000));\n\n // keep the method running until we can return\n while (true === true) {\n if (!isRunning) {\n // log.getPrefix = getPrefixOld; // restore logger state\n return;\n }\n await new Promise(resolve => setTimeout(resolve, 1000));\n }\n };\n}\nexport const devCommand = (\n commandArgs: string[],\n outputOpts: OutputOptionsConstructorArg,\n contensisOpts: Partial<MigrateRequest> = {}\n) => {\n return new ContensisDev(['', '', ...commandArgs], outputOpts, contensisOpts);\n};\n\nexport default ContensisDev;\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAe;AACf,2BAAgC;AAChC,sBAAqB;AACrB,kBAAiB;AACjB,kBAAiC;AAKjC,kCAA0B;AAG1B,wDAAkC;AAClC,wCAGO;AACP,2BAAgD;AAChD,kBAA8B;AAC9B,iBAA0B;AAC1B,kBAA+B;AAC/B,oBAAwC;AACxC,mCAAqC;AACrC,kBAAgC;AAEhC,MAAM,qBAAqB,4BAAAA,QAAc;AAAA,EACvC,YACE,MACA,YACA,gBAAyC,CAAC,GAC1C;AACA,UAAM,MAAM,YAAY,aAAa;AAAA,EACvC;AAAA,EAEA,kBAAkB,OAAO,aAAqB,SAAc;AAC1D,UAAM,EAAE,SAAS,MAAM,IAAI,QAAQ,CAAC;AACpC,UAAM,EAAE,YAAY,gBAAgB,KAAK,SAAS,IAAI;AACtD,UAAM,YAAY,MAAM,KAAK,iBAAiB;AAE9C,QAAI,WAAW;AAEb,YAAM,CAAC,SAAS,OAAO,IAAI,MAAM,UAAU,QAAQ,QAAQ;AAC3D,UAAI,SAAS;AACX,YAAI,MAAM,SAAS,KAAK,OAAO,UAAU,CAAC;AAC1C,YAAI,UAAM,2BAAc,OAAO,CAAC;AAChC;AAAA,MACF;AACA,YAAM,eAAe,CAAC,YACpB,mCAAS;AAAA,QACP,OAAK,EAAE,KAAK,KAAK,EAAE,YAAY,OAAM,mCAAS,OAAO;AAAA;AAIzD,YAAM,MAAM,IAAI,qBAAU,WAAW;AAGrC,YAAM,gBAAgB,IAAI;AAG1B,UAAI,aAAa,IAAI;AAErB,YAAM,aAAa,GAAG,IAAI;AAC1B,YAAM,oBAAoB,GAAG,IAAI;AACjC,UAAI,iBAAiB,aAAa,UAAU;AAE5C,YAAM,gBAAgB,GAAG,IAAI;AAC7B,YAAM,uBAAuB,GAAG,IAAI;AAEpC,UAAI,oBAAoB,aAAa,aAAa;AAElD,YAAM,UAAU,IAAI;AACpB,YAAM,SAAS,CAAC;AAGhB,UAAI,IAAI,EAAE;AACV,UAAI,QAAQ,SAAS,QAAQ,MAAM,CAAC;AACpC,UAAI,IAAI,EAAE;AACV,UAAI;AAAA,QACF,IAAI;AAAA,UACF,SAAS,QAAQ;AAAA,YACf,IAAI;AAAA,YACJ;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,UAAI;AAAA,QACF,IAAI;AAAA,UACF,SAAS,QAAQ,eAAe,YAAY,CAAC,CAAC,cAAc;AAAA,QAC9D;AAAA,MACF;AACA,UAAI;AAAA,QACF,IAAI;AAAA,UACF,SAAS,QAAQ,cAAc,eAAe,CAAC,CAAC,iBAAiB;AAAA,QACnE;AAAA,MACF;AACA,UAAI,IAAI,EAAE;AAEV,UAAI,MAAM,QAAQ,aAAa,KAAK,cAAc,SAAS,GAAG;AAE5D,SAAC,EAAE,WAAW,IAAI,MAAM,gBAAAC,QAAS,OAAO;AAAA,UACtC;AAAA,YACE,MAAM;AAAA,YACN,SAAS,SAAS,QAAQ,kBAAkB;AAAA,YAC5C,MAAM;AAAA,YACN,SAAS;AAAA,YACT,SAAS,cAAc,KAAK,OAAK,EAAE,SAAS,QAAQ,CAAC;AAAA,UACvD;AAAA,QACF,CAAC;AACD,YAAI,IAAI,EAAE;AACV,YAAI,aAAa;AAAA,MACnB;AAEA,UAAI,IAAI,IAAI,SAAS,SAAS,QAAQ,UAAU,UAAU,CAAC,CAAC;AAG5D,YAAM,qBAAiB,mDAAqB,MAAM,GAAG;AAErD,UAAI,KAAK,SAAS,QAAQ,QAAQ,GAAG,CAAC;AAEtC,UAAI,CAAC,QAAQ;AAEX,cAAM,EAAE,QAAQ,IAAI,MAAM,gBAAAA,QAAS,OAAO;AAAA,UACxC;AAAA,YACE,MAAM;AAAA,YACN,SAAS,SAAS,QAAQ,QAAQ;AAAA,YAClC,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF,CAAC;AACD,YAAI,IAAI,EAAE;AACV,YAAI,CAAC;AAAS;AAAA,MAChB;AAGA,YAAM,EAAE,YAAY,IAA6B,MAAM,gBAAAA,QAAS,OAAO;AAAA,QACrE;AAAA,UACE,MAAM;AAAA,UACN,SAAS,SAAS,QAAQ,kBAAkB;AAAA,UAC5C,MAAM;AAAA,QACR;AAAA,MACF,CAAC;AACD,UAAI,IAAI,EAAE;AAGV,YAAM,aAAa,CAAC,OAAe;AACjC,YAAI,OAAO;AAAQ,gBAAM,OAAO;AAAA;AAC3B,cAAI,MAAM,GAAG,iBAAiB;AACnC,eAAO;AAAA,MACT;AAGA,YAAM,CAAC,aAAa,KAAK,IAAI,UAAM,mBAAAC,SAAG,UAAU,MAAM,SAAS,CAAC;AAChE,UAAI,CAAC,SAAS;AAAa,eAAO,KAAK,WAAW;AAClD,iBAAW,WAAW,+BAAO,cAAc;AAC3C,UAAI,QAAQ;AACV,mBAAW,iCAAiC;AAAA,MAC9C,OAAO;AACL,yBAAiB,MAAM,KAAK;AAAA,UAC1B;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,mBAAW,iBAAiB;AAE5B,4BAAoB,MAAM,KAAK;AAAA,UAC7B;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,mBAAW,oBAAoB;AAG/B,YAAI,sBAAkB,4BAAe,SAAS,CAAC,GAAG,YAAY,IAAI;AAGlE,0BAAkB,MAAM,KAAK;AAAA,UAC3B;AAAA,cACA,8CAAW,YAAY,iBAAiB;AAAA,QAC1C;AACA,mBAAW,uBAAuB;AAClC,YAAI,QAAQ,SAAS,QAAQ,aAAa,YAAY,IAAI,CAAC;AAG3D,YAAI,yBAAqB;AAAA,UACvB,SAAS,CAAC;AAAA,UACV;AAAA,UACA;AAAA,QACF;AACA,6BAAqB,MAAM,KAAK;AAAA,UAC9B;AAAA,cACA,iDAAc,eAAe,oBAAoB;AAAA,QACnD;AAEA,mBAAW,0BAA0B;AACrC,YAAI,QAAQ,SAAS,QAAQ,gBAAgB,eAAe,IAAI,CAAC;AACjE,mBAAW,eAAe;AAAA,MAC5B;AAGA,YAAM,mBAAqC;AAAA,QACzC,OAAO;AAAA,QACP,SAAS;AAAA,MACX;AACA,UAAI;AAAa,yBAAiB,kBAAkB;AAEpD,YAAM,cAAc,GAAG;AACvB,YAAM,sBAAkB,+BAAS,WAAW;AAC5C,YAAM,mBAAe;AAAA,SAClB,mBAAmB,IAAI,MAAM,IAAI,EAAE,OAAO,OAAK,CAAC,CAAC,CAAC;AAAA,QACnD;AAAA,MACF;AACA,YAAM,cAAU;AAAA,QACd,mBAAmB;AAAA,QACnB,aAAa,KAAK,IAAI;AAAA,MACxB;AAEA,UAAI,QAAQ;AACV,YAAI,SAAS;AACX,cAAI,KAAK,sBAAsB,gBAAgB,SAAS;AACxD,cAAI,IAAI,EAAE;AAAA,QACZ;AACA,mBAAW,iCAAiC;AAAA,MAC9C,OAAO;AACL,YAAI;AAAS,cAAI,KAAK,sBAAsB,aAAa;AACzD,4CAAU,aAAa,aAAa,KAAK,IAAI,CAAC;AAC9C,mBAAW,mBAAmB;AAC9B,YAAI,QAAQ,SAAS,QAAQ,aAAa,CAAC;AAAA,MAE7C;AAGA,UAAI,QAAQ;AACV,YAAI,iDAAgB,MAAM;AACxB,cAAI,KAAK,WAAW,oBAAoB,eAAe,MAAM;AAC7D,cAAI,IAAI,EAAE;AAAA,QACZ;AACA,mBAAW,+BAA+B;AAAA,MAE5C,OAAO;AACL,YAAI,iDAAgB;AAAM,cAAI,KAAK,WAAW,iBAAiB;AAC/D,4CAAU,IAAI,YAAY,CAAC,EAAE,KAAK,IAAI,CAAC;AACvC,YAAI,QAAQ,SAAS,QAAQ,YAAY,KAAK,YAAY,CAAC;AAC3D,YAAI;AAAA,UACF,SAAS,QAAQ,WAAW,SAAS,YAAY,cAAc;AAAA,QACjE;AACA,mBAAW,iBAAiB;AAAA,MAC9B;AAGA,UAAI,QAAQ,SAAS,QAAQ,mBAAmB,CAAC;AACjD,UAAI;AAAA,QACF,SAAS,QAAQ;AAAA,UACf;AAAA,UACA,uDAAmB;AAAA,UACnB,uDAAmB;AAAA,QACrB;AAAA,MACF;AAEA,UAAI,QAAQ;AACV,YAAI,QAAQ,SAAS,QAAQ,OAAO,CAAC;AACrC,YAAI,KAAK,SAAS,QAAQ,UAAU,CAAC;AAAA,MACvC,OAAO;AACL,YAAI,QAAQ,SAAS,QAAQ,QAAQ,CAAC;AACtC,YAAI,KAAK,SAAS,QAAQ,gBAAgB,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,EACF;AAAA,EAEA,qBAAqB,OAAO,UAAoB,iBAA4B;AA9Q9E;AAoRI,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,UAAU,YAAAC,QAAK,KAAK,iCAAY,QAAQ;AAC9C,UAAM,MAAM;AACZ,UAAM,UAAU,YAAAA,QAAK,KAAK,SAAS,GAAG;AACtC,UAAM,iBAAiB,YAAAA,QAAK,KAAK,iCAAY,kBAAkB;AAE/D,UAAM,aAAa,UAAM,qEAAkB,IAAI;AAC/C,wCAAU,wBAAoB,uBAAU,UAAU,CAAC;AAEnD,UAAM,OAAO,eACT,QAAO,6CAAe,QAAO,YAC7B,aAAa,GAAG,SAAS,KAAK,CAAC,IAC7B,aAAa,GAAG,MAAM,GAAG,IACzB,eACF,CAAC;AAGL,QAAI,CAAC,KAAK,KAAK,OAAK,MAAM,IAAI;AAAG,WAAK,KAAK,MAAM,cAAc;AAI/D,UAAM,YAAQ,4BAAM,SAAS,MAAM,EAAE,OAAO,UAAU,CAAC;AAGvD,QAAI,KAAK,8BAA8B;AACvC,QAAI,6CAAc;AAChB,WAAK,IAAI;AAAA,QACP,wCAAwC,KAAK;AAAA,UAC3C,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEF,QAAI,YAAY;AAGhB,yCAAO,WAAP,mBAAe,GAAG,QAAQ,UAAQ;AAChC,kBAAY;AACZ,UAAI,IAAI,IAAI;AAAA,IACd;AAEA,yCAAO,WAAP,mBAAe,GAAG,QAAQ,UAAQ;AAChC,UAAI,MAAM,IAAI;AAAA,IAChB;AAEA,UAAM,GAAG,SAAS,MAAM;AACtB,kBAAY;AACZ,UAAI;AAAA,QACF;AAAA,MACF;AAAA,IAEF,CAAC;AAED,UAAM,GAAG,QAAQ,UAAQ;AACvB,kBAAY;AAEZ,UAAI,SAAS,IAAI,YAAY;AAAA,QAC3B,oCAAoC;AAAA;AAAA,MACtC;AAAA,IACF,CAAC;AAED,UAAM,GAAG,SAAS,WAAS;AACzB,kBAAY;AACZ,UAAI,MAAM;AAAA,EAAmD,OAAO;AAAA,IACtE,CAAC;AAED,UAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,GAAI,CAAC;AAGtD,WAAO,MAAe;AACpB,UAAI,CAAC,WAAW;AAEd;AAAA,MACF;AACA,YAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,GAAI,CAAC;AAAA,IACxD;AAAA,EACF;AACF;AACO,MAAM,aAAa,CACxB,aACA,YACA,gBAAyC,CAAC,MACvC;AACH,SAAO,IAAI,aAAa,CAAC,IAAI,IAAI,GAAG,WAAW,GAAG,YAAY,aAAa;AAC7E;AAEA,IAAO,8BAAQ;",
|
|
6
6
|
"names": ["ContensisRole", "inquirer", "to", "path"]
|
|
7
7
|
}
|
|
@@ -55,31 +55,26 @@ class ContensisRole extends import_ContensisCliService.default {
|
|
|
55
55
|
return key;
|
|
56
56
|
}
|
|
57
57
|
};
|
|
58
|
-
CreateOrUpdateRole = async (existingRole,
|
|
58
|
+
CreateOrUpdateRole = async (existingRole, role) => {
|
|
59
59
|
const { contensis, currentEnv, messages } = this;
|
|
60
60
|
if (!contensis)
|
|
61
61
|
throw new Error("shouldnt be here");
|
|
62
62
|
if (existingRole) {
|
|
63
63
|
const [err, updated] = await contensis.roles.UpdateRole(existingRole.id, {
|
|
64
64
|
...existingRole,
|
|
65
|
-
|
|
66
|
-
permissions
|
|
65
|
+
...role
|
|
67
66
|
});
|
|
68
67
|
if (err)
|
|
69
|
-
throw new Error(messages.roles.failedSet(currentEnv, name), {
|
|
68
|
+
throw new Error(messages.roles.failedSet(currentEnv, role.name), {
|
|
70
69
|
cause: err
|
|
71
70
|
});
|
|
72
71
|
return updated;
|
|
73
72
|
} else {
|
|
74
|
-
const [err, created] = await contensis.roles.CreateRole(
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
enabled: true,
|
|
78
|
-
assignments,
|
|
79
|
-
permissions
|
|
80
|
-
});
|
|
73
|
+
const [err, created] = await contensis.roles.CreateRole(
|
|
74
|
+
role
|
|
75
|
+
);
|
|
81
76
|
if (err)
|
|
82
|
-
throw new Error(messages.roles.failedCreate(currentEnv, name), {
|
|
77
|
+
throw new Error(messages.roles.failedCreate(currentEnv, role.name), {
|
|
83
78
|
cause: err
|
|
84
79
|
});
|
|
85
80
|
return created;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/services/ContensisRoleService.ts"],
|
|
4
|
-
"sourcesContent": ["import { Role } from 'contensis-management-api/lib/models';\nimport { ApiKey, MigrateRequest } from 'migratortron';\n\nimport ContensisCli from './ContensisCliService';\nimport { OutputOptionsConstructorArg } from '~/models/CliService';\n\nclass ContensisRole extends ContensisCli {\n constructor(\n args: string[],\n outputOpts?: OutputOptionsConstructorArg,\n contensisOpts: Partial<MigrateRequest> = {}\n ) {\n super(args, outputOpts, contensisOpts);\n }\n\n CreateOrUpdateApiKey = async (\n existingKey: ApiKey | undefined,\n name: string,\n description: string\n ) => {\n const { contensis, currentEnv, messages } = this;\n if (!contensis) throw new Error('shouldnt be here');\n if (existingKey) {\n const [err, key] = await contensis.apiKeys.UpdateKey(existingKey.id, {\n name,\n description,\n });\n\n if (err)\n throw new Error(messages.keys.failedUpdate(currentEnv, name), {\n cause: err,\n });\n return key;\n } else {\n const [err, key] = await contensis.apiKeys.CreateKey(name, description);\n if (err)\n throw new Error(messages.keys.failedCreate(currentEnv, name), {\n cause: err,\n });\n\n return key;\n }\n };\n\n CreateOrUpdateRole = async (\n existingRole: Role | undefined,\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,iCAAyB;AAGzB,MAAM,sBAAsB,2BAAAA,QAAa;AAAA,EACvC,YACE,MACA,YACA,gBAAyC,CAAC,GAC1C;AACA,UAAM,MAAM,YAAY,aAAa;AAAA,EACvC;AAAA,EAEA,uBAAuB,OACrB,aACA,MACA,gBACG;AACH,UAAM,EAAE,WAAW,YAAY,SAAS,IAAI;AAC5C,QAAI,CAAC;AAAW,YAAM,IAAI,MAAM,kBAAkB;AAClD,QAAI,aAAa;AACf,YAAM,CAAC,KAAK,GAAG,IAAI,MAAM,UAAU,QAAQ,UAAU,YAAY,IAAI;AAAA,QACnE;AAAA,QACA;AAAA,MACF,CAAC;AAED,UAAI;AACF,cAAM,IAAI,MAAM,SAAS,KAAK,aAAa,YAAY,IAAI,GAAG;AAAA,UAC5D,OAAO;AAAA,QACT,CAAC;AACH,aAAO;AAAA,IACT,OAAO;AACL,YAAM,CAAC,KAAK,GAAG,IAAI,MAAM,UAAU,QAAQ,UAAU,MAAM,WAAW;AACtE,UAAI;AACF,cAAM,IAAI,MAAM,SAAS,KAAK,aAAa,YAAY,IAAI,GAAG;AAAA,UAC5D,OAAO;AAAA,QACT,CAAC;AAEH,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,qBAAqB,OACnB,cACA,
|
|
4
|
+
"sourcesContent": ["import { Role } from 'contensis-management-api/lib/models';\nimport { ApiKey, MigrateRequest } from 'migratortron';\n\nimport ContensisCli from './ContensisCliService';\nimport { OutputOptionsConstructorArg } from '~/models/CliService';\n\nclass ContensisRole extends ContensisCli {\n constructor(\n args: string[],\n outputOpts?: OutputOptionsConstructorArg,\n contensisOpts: Partial<MigrateRequest> = {}\n ) {\n super(args, outputOpts, contensisOpts);\n }\n\n CreateOrUpdateApiKey = async (\n existingKey: ApiKey | undefined,\n name: string,\n description: string\n ) => {\n const { contensis, currentEnv, messages } = this;\n if (!contensis) throw new Error('shouldnt be here');\n if (existingKey) {\n const [err, key] = await contensis.apiKeys.UpdateKey(existingKey.id, {\n name,\n description,\n });\n\n if (err)\n throw new Error(messages.keys.failedUpdate(currentEnv, name), {\n cause: err,\n });\n return key;\n } else {\n const [err, key] = await contensis.apiKeys.CreateKey(name, description);\n if (err)\n throw new Error(messages.keys.failedCreate(currentEnv, name), {\n cause: err,\n });\n\n return key;\n }\n };\n\n CreateOrUpdateRole = async (\n existingRole: Role | undefined,\n role: Partial<Role>\n ) => {\n const { contensis, currentEnv, messages } = this;\n if (!contensis) throw new Error('shouldnt be here');\n\n if (existingRole) {\n // TODO: check is update needed?\n const [err, updated] = await contensis.roles.UpdateRole(existingRole.id, {\n ...existingRole,\n ...role,\n });\n if (err)\n throw new Error(messages.roles.failedSet(currentEnv, role.name), {\n cause: err,\n });\n return updated;\n } else {\n const [err, created] = await contensis.roles.CreateRole(\n role as Omit<Role, 'id'>\n );\n if (err)\n throw new Error(messages.roles.failedCreate(currentEnv, role.name), {\n cause: err,\n });\n\n return created;\n }\n };\n}\nexport default ContensisRole;\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,iCAAyB;AAGzB,MAAM,sBAAsB,2BAAAA,QAAa;AAAA,EACvC,YACE,MACA,YACA,gBAAyC,CAAC,GAC1C;AACA,UAAM,MAAM,YAAY,aAAa;AAAA,EACvC;AAAA,EAEA,uBAAuB,OACrB,aACA,MACA,gBACG;AACH,UAAM,EAAE,WAAW,YAAY,SAAS,IAAI;AAC5C,QAAI,CAAC;AAAW,YAAM,IAAI,MAAM,kBAAkB;AAClD,QAAI,aAAa;AACf,YAAM,CAAC,KAAK,GAAG,IAAI,MAAM,UAAU,QAAQ,UAAU,YAAY,IAAI;AAAA,QACnE;AAAA,QACA;AAAA,MACF,CAAC;AAED,UAAI;AACF,cAAM,IAAI,MAAM,SAAS,KAAK,aAAa,YAAY,IAAI,GAAG;AAAA,UAC5D,OAAO;AAAA,QACT,CAAC;AACH,aAAO;AAAA,IACT,OAAO;AACL,YAAM,CAAC,KAAK,GAAG,IAAI,MAAM,UAAU,QAAQ,UAAU,MAAM,WAAW;AACtE,UAAI;AACF,cAAM,IAAI,MAAM,SAAS,KAAK,aAAa,YAAY,IAAI,GAAG;AAAA,UAC5D,OAAO;AAAA,QACT,CAAC;AAEH,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,qBAAqB,OACnB,cACA,SACG;AACH,UAAM,EAAE,WAAW,YAAY,SAAS,IAAI;AAC5C,QAAI,CAAC;AAAW,YAAM,IAAI,MAAM,kBAAkB;AAElD,QAAI,cAAc;AAEhB,YAAM,CAAC,KAAK,OAAO,IAAI,MAAM,UAAU,MAAM,WAAW,aAAa,IAAI;AAAA,QACvE,GAAG;AAAA,QACH,GAAG;AAAA,MACL,CAAC;AACD,UAAI;AACF,cAAM,IAAI,MAAM,SAAS,MAAM,UAAU,YAAY,KAAK,IAAI,GAAG;AAAA,UAC/D,OAAO;AAAA,QACT,CAAC;AACH,aAAO;AAAA,IACT,OAAO;AACL,YAAM,CAAC,KAAK,OAAO,IAAI,MAAM,UAAU,MAAM;AAAA,QAC3C;AAAA,MACF;AACA,UAAI;AACF,cAAM,IAAI,MAAM,SAAS,MAAM,aAAa,YAAY,KAAK,IAAI,GAAG;AAAA,UAClE,OAAO;AAAA,QACT,CAAC;AAEH,aAAO;AAAA,IACT;AAAA,EACF;AACF;AACA,IAAO,+BAAQ;",
|
|
6
6
|
"names": ["ContensisCli"]
|
|
7
7
|
}
|
package/dist/util/diff.js
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
2
3
|
var __defProp = Object.defineProperty;
|
|
3
4
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
5
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
8
|
var __export = (target, all) => {
|
|
7
9
|
for (var name in all)
|
|
@@ -15,12 +17,19 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
15
17
|
}
|
|
16
18
|
return to;
|
|
17
19
|
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
22
|
+
mod
|
|
23
|
+
));
|
|
18
24
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
25
|
var diff_exports = {};
|
|
20
26
|
__export(diff_exports, {
|
|
27
|
+
diffFileContent: () => diffFileContent,
|
|
21
28
|
diffLogStrings: () => diffLogStrings
|
|
22
29
|
});
|
|
23
30
|
module.exports = __toCommonJS(diff_exports);
|
|
31
|
+
var import_chalk = __toESM(require("chalk"));
|
|
32
|
+
var import_diff = require("diff");
|
|
24
33
|
const diffLogStrings = (updates, previous) => {
|
|
25
34
|
const lastFewLines = previous.split("\n").slice(-10);
|
|
26
35
|
const incomingLines = updates.split("\n");
|
|
@@ -32,8 +41,62 @@ const diffLogStrings = (updates, previous) => {
|
|
|
32
41
|
const differentFromPos = Math.max(...incomingLineIndices) + 1 || 0;
|
|
33
42
|
return incomingLines.slice(differentFromPos).join("\n");
|
|
34
43
|
};
|
|
44
|
+
const diffFileContent = (existingContent, newContent) => {
|
|
45
|
+
const diff = (0, import_diff.diffLines)(existingContent, newContent, { newlineIsToken: true });
|
|
46
|
+
const diffRanges = addDiffPositionInfo(diff);
|
|
47
|
+
const output = [];
|
|
48
|
+
const lnSpaceLength = Math.max(
|
|
49
|
+
...diffRanges.map((d) => d.startLineNumber.toString().length)
|
|
50
|
+
);
|
|
51
|
+
const lnSpaces = Array(lnSpaceLength).join(" ");
|
|
52
|
+
for (let i = 0; i < diffRanges.length; i++) {
|
|
53
|
+
const part = diffRanges[i];
|
|
54
|
+
if (part.added || part.removed) {
|
|
55
|
+
const colour = part.added ? "green" : part.removed ? "red" : "grey";
|
|
56
|
+
if (part.value !== "\n")
|
|
57
|
+
output.push(
|
|
58
|
+
`
|
|
59
|
+
${part.value.split("\n").map(
|
|
60
|
+
(ln, idx) => ln.trim() !== "" ? `${part.startLineNumber ? part.startLineNumber + idx : lnSpaces}${part.added ? "+" : part.removed ? "-" : " "} ${import_chalk.default[colour](`${ln}`)}` : ln
|
|
61
|
+
).join("\n")}`
|
|
62
|
+
);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return output.join("");
|
|
66
|
+
};
|
|
67
|
+
const addDiffPositionInfo = (diff) => {
|
|
68
|
+
const diffRanges = [];
|
|
69
|
+
let lineNumber = 0;
|
|
70
|
+
let column = 0;
|
|
71
|
+
for (let partIndex = 0; partIndex < diff.length; partIndex++) {
|
|
72
|
+
const part = diff[partIndex];
|
|
73
|
+
const startLineNumber = lineNumber;
|
|
74
|
+
const startColumn = column;
|
|
75
|
+
const substring = part.value;
|
|
76
|
+
const lines = substring.split("\n");
|
|
77
|
+
lines.forEach((line, lineIndex) => {
|
|
78
|
+
if (lineIndex === 0) {
|
|
79
|
+
column += line.length;
|
|
80
|
+
} else if (lineIndex > 0) {
|
|
81
|
+
lineNumber += 1;
|
|
82
|
+
column = line.length;
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
if (part.added === true || part.removed === true) {
|
|
86
|
+
diffRanges.push({
|
|
87
|
+
startLineNumber: startLineNumber + 1,
|
|
88
|
+
startColumn,
|
|
89
|
+
endLineNumber: lineNumber,
|
|
90
|
+
endColumn: column,
|
|
91
|
+
...part
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return diffRanges;
|
|
96
|
+
};
|
|
35
97
|
// Annotate the CommonJS export names for ESM import in node:
|
|
36
98
|
0 && (module.exports = {
|
|
99
|
+
diffFileContent,
|
|
37
100
|
diffLogStrings
|
|
38
101
|
});
|
|
39
102
|
//# sourceMappingURL=diff.js.map
|
package/dist/util/diff.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/util/diff.ts"],
|
|
4
|
-
"sourcesContent": ["
|
|
5
|
-
"mappings": "
|
|
6
|
-
"names": []
|
|
4
|
+
"sourcesContent": ["import chalk from 'chalk';\nimport { Change, diffLines } from 'diff';\nimport { add } from 'lodash';\n\nexport const diffLogStrings = (updates: string, previous: string) => {\n const lastFewLines = previous.split('\\n').slice(-10);\n const incomingLines = updates.split('\\n');\n\n // Find the line indices in the incoming lines\n // of the last few lines previously rendered\n const incomingLineIndices = [];\n for (const lastRenderedLine of lastFewLines) {\n if (lastRenderedLine.length > 10)\n incomingLineIndices.push(incomingLines.lastIndexOf(lastRenderedLine));\n }\n\n // Get the new lines from the next position on from the last of the already shown lines\n const differentFromPos = Math.max(...incomingLineIndices) + 1 || 0;\n // Return just the incoming lines from the position we matched\n return incomingLines.slice(differentFromPos).join('\\n');\n};\n\nexport const diffFileContent = (\n existingContent: string,\n newContent: string\n) => {\n const diff = diffLines(existingContent, newContent, { newlineIsToken: true });\n const diffRanges = addDiffPositionInfo(diff);\n\n // Create formatted output for console\n const output: string[] = [];\n const lnSpaceLength = Math.max(\n ...diffRanges.map(d => d.startLineNumber.toString().length)\n );\n\n const lnSpaces = Array(lnSpaceLength).join(' ');\n\n for (let i = 0; i < diffRanges.length; i++) {\n const part = diffRanges[i];\n if (part.added || part.removed) {\n const colour = part.added ? 'green' : part.removed ? 'red' : 'grey';\n\n if (part.value !== '\\n')\n output.push(\n `\\n${part.value\n .split('\\n')\n .map((ln, idx) =>\n ln.trim() !== ''\n ? `${\n part.startLineNumber ? part.startLineNumber + idx : lnSpaces\n }${part.added ? '+' : part.removed ? '-' : ' '} ${chalk[\n colour\n ](`${ln}`)}`\n : ln\n )\n .join('\\n')}`\n );\n }\n }\n\n return output.join('');\n // return retOutput.endsWith('\\n') ? retOutput : `${retOutput}\\n`;\n};\n\nconst addDiffPositionInfo = (diff: Change[]) => {\n const diffRanges: (Change & {\n startLineNumber: number;\n startColumn: number;\n endLineNumber: number;\n endColumn: number;\n })[] = [];\n\n let lineNumber = 0;\n let column = 0;\n for (let partIndex = 0; partIndex < diff.length; partIndex++) {\n const part = diff[partIndex];\n\n // // Skip any parts that aren't in `after`\n // if (part.removed === true) {\n // continue;\n // }\n\n const startLineNumber = lineNumber;\n const startColumn = column;\n\n // Split the part into lines. Loop throug these lines to find\n // the line no. and column at the end of this part.\n const substring = part.value;\n const lines = substring.split('\\n');\n lines.forEach((line, lineIndex) => {\n // The first `line` is actually just a continuation of the last line\n if (lineIndex === 0) {\n column += line.length;\n // All other lines come after a line break.\n } else if (lineIndex > 0) {\n lineNumber += 1;\n column = line.length;\n }\n });\n\n // Save a range for all of the parts with position info added\n if (part.added === true || part.removed === true) {\n diffRanges.push({\n startLineNumber: startLineNumber + 1,\n startColumn: startColumn,\n endLineNumber: lineNumber,\n endColumn: column,\n ...part,\n });\n }\n }\n return diffRanges;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAClB,kBAAkC;AAG3B,MAAM,iBAAiB,CAAC,SAAiB,aAAqB;AACnE,QAAM,eAAe,SAAS,MAAM,IAAI,EAAE,MAAM,GAAG;AACnD,QAAM,gBAAgB,QAAQ,MAAM,IAAI;AAIxC,QAAM,sBAAsB,CAAC;AAC7B,aAAW,oBAAoB,cAAc;AAC3C,QAAI,iBAAiB,SAAS;AAC5B,0BAAoB,KAAK,cAAc,YAAY,gBAAgB,CAAC;AAAA,EACxE;AAGA,QAAM,mBAAmB,KAAK,IAAI,GAAG,mBAAmB,IAAI,KAAK;AAEjE,SAAO,cAAc,MAAM,gBAAgB,EAAE,KAAK,IAAI;AACxD;AAEO,MAAM,kBAAkB,CAC7B,iBACA,eACG;AACH,QAAM,WAAO,uBAAU,iBAAiB,YAAY,EAAE,gBAAgB,KAAK,CAAC;AAC5E,QAAM,aAAa,oBAAoB,IAAI;AAG3C,QAAM,SAAmB,CAAC;AAC1B,QAAM,gBAAgB,KAAK;AAAA,IACzB,GAAG,WAAW,IAAI,OAAK,EAAE,gBAAgB,SAAS,EAAE,MAAM;AAAA,EAC5D;AAEA,QAAM,WAAW,MAAM,aAAa,EAAE,KAAK,GAAG;AAE9C,WAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,UAAM,OAAO,WAAW;AACxB,QAAI,KAAK,SAAS,KAAK,SAAS;AAC9B,YAAM,SAAS,KAAK,QAAQ,UAAU,KAAK,UAAU,QAAQ;AAE7D,UAAI,KAAK,UAAU;AACjB,eAAO;AAAA,UACL;AAAA,EAAK,KAAK,MACP,MAAM,IAAI,EACV;AAAA,YAAI,CAAC,IAAI,QACR,GAAG,KAAK,MAAM,KACV,GACE,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,WACnD,KAAK,QAAQ,MAAM,KAAK,UAAU,MAAM,OAAO,aAAAA,QAChD,QACA,GAAG,IAAI,MACT;AAAA,UACN,EACC,KAAK,IAAI;AAAA,QACd;AAAA,IACJ;AAAA,EACF;AAEA,SAAO,OAAO,KAAK,EAAE;AAEvB;AAEA,MAAM,sBAAsB,CAAC,SAAmB;AAC9C,QAAM,aAKC,CAAC;AAER,MAAI,aAAa;AACjB,MAAI,SAAS;AACb,WAAS,YAAY,GAAG,YAAY,KAAK,QAAQ,aAAa;AAC5D,UAAM,OAAO,KAAK;AAOlB,UAAM,kBAAkB;AACxB,UAAM,cAAc;AAIpB,UAAM,YAAY,KAAK;AACvB,UAAM,QAAQ,UAAU,MAAM,IAAI;AAClC,UAAM,QAAQ,CAAC,MAAM,cAAc;AAEjC,UAAI,cAAc,GAAG;AACnB,kBAAU,KAAK;AAAA,MAEjB,WAAW,YAAY,GAAG;AACxB,sBAAc;AACd,iBAAS,KAAK;AAAA,MAChB;AAAA,IACF,CAAC;AAGD,QAAI,KAAK,UAAU,QAAQ,KAAK,YAAY,MAAM;AAChD,iBAAW,KAAK;AAAA,QACd,iBAAiB,kBAAkB;AAAA,QACnC;AAAA,QACA,eAAe;AAAA,QACf,WAAW;AAAA,QACX,GAAG;AAAA,MACL,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO;AACT;",
|
|
6
|
+
"names": ["chalk"]
|
|
7
7
|
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var dotenv_exports = {};
|
|
20
|
+
__export(dotenv_exports, {
|
|
21
|
+
mergeDotEnvFileContents: () => mergeDotEnvFileContents
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(dotenv_exports);
|
|
24
|
+
const mergeDotEnvFileContents = (existingFileLines, envContentsToAdd) => {
|
|
25
|
+
const envFileLines = [];
|
|
26
|
+
if (existingFileLines.length === 0) {
|
|
27
|
+
envFileLines.push(
|
|
28
|
+
...Object.entries(envContentsToAdd).map(([k, v]) => `${k}=${v}`)
|
|
29
|
+
);
|
|
30
|
+
} else {
|
|
31
|
+
const updatedEnvKeys = [];
|
|
32
|
+
for (const ln of existingFileLines) {
|
|
33
|
+
let newline = "";
|
|
34
|
+
for (const [k, v] of Object.entries(envContentsToAdd))
|
|
35
|
+
if (ln.startsWith(`${k}=`)) {
|
|
36
|
+
newline = `${k}=${v}`;
|
|
37
|
+
updatedEnvKeys.push(k);
|
|
38
|
+
}
|
|
39
|
+
if (newline || ln)
|
|
40
|
+
envFileLines.push(newline || ln);
|
|
41
|
+
}
|
|
42
|
+
for (const addKey of Object.keys(envContentsToAdd).filter(
|
|
43
|
+
(efl) => !updatedEnvKeys.find((uek) => {
|
|
44
|
+
var _a;
|
|
45
|
+
return uek.startsWith(`${(_a = efl.split("=")) == null ? void 0 : _a[0]}`);
|
|
46
|
+
})
|
|
47
|
+
)) {
|
|
48
|
+
envFileLines.push(`${addKey}=${envContentsToAdd[addKey]}`);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
return envFileLines;
|
|
52
|
+
};
|
|
53
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
54
|
+
0 && (module.exports = {
|
|
55
|
+
mergeDotEnvFileContents
|
|
56
|
+
});
|
|
57
|
+
//# sourceMappingURL=dotenv.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/util/dotenv.ts"],
|
|
4
|
+
"sourcesContent": ["import { EnvContentsToAdd } from \"~/models/DevService\";\n\nexport const mergeDotEnvFileContents = (\n existingFileLines: string[],\n envContentsToAdd: EnvContentsToAdd\n): string[] => {\n const envFileLines: string[] = []; // the new .env file\n if (existingFileLines.length === 0) {\n // There is no env file, just create one from envContentsToAdd\n envFileLines.push(\n ...Object.entries(envContentsToAdd).map(([k, v]) => `${k}=${v}`)\n );\n } else {\n const updatedEnvKeys: string[] = [];\n // Find lines in env that already exist for the keys in envContentsToAdd\n // update them if they exist and add them to envFileLines\n for (const ln of existingFileLines) {\n let newline = '';\n for (const [k, v] of Object.entries(envContentsToAdd))\n if (ln.startsWith(`${k}=`)) {\n newline = `${k}=${v}`;\n updatedEnvKeys.push(k);\n }\n // Ensure an updated line or other lines from the existing env file are re-added\n if (newline || ln) envFileLines.push(newline || ln);\n }\n\n // Add the envContentsToAdd lines to the file that did not previously exist or had an update\n for (const addKey of Object.keys(envContentsToAdd).filter(\n efl =>\n !updatedEnvKeys.find(uek => uek.startsWith(`${efl.split('=')?.[0]}`))\n ) as (keyof typeof envContentsToAdd)[]) {\n envFileLines.push(`${addKey}=${envContentsToAdd[addKey]}`);\n }\n }\n return envFileLines;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,0BAA0B,CACrC,mBACA,qBACa;AACb,QAAM,eAAyB,CAAC;AAChC,MAAI,kBAAkB,WAAW,GAAG;AAElC,iBAAa;AAAA,MACX,GAAG,OAAO,QAAQ,gBAAgB,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,KAAK,GAAG;AAAA,IACjE;AAAA,EACF,OAAO;AACL,UAAM,iBAA2B,CAAC;AAGlC,eAAW,MAAM,mBAAmB;AAClC,UAAI,UAAU;AACd,iBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,gBAAgB;AAClD,YAAI,GAAG,WAAW,GAAG,IAAI,GAAG;AAC1B,oBAAU,GAAG,KAAK;AAClB,yBAAe,KAAK,CAAC;AAAA,QACvB;AAEF,UAAI,WAAW;AAAI,qBAAa,KAAK,WAAW,EAAE;AAAA,IACpD;AAGA,eAAW,UAAU,OAAO,KAAK,gBAAgB,EAAE;AAAA,MACjD,SACE,CAAC,eAAe,KAAK,SAAI;AA9BjC;AA8BoC,mBAAI,WAAW,IAAG,SAAI,MAAM,GAAG,MAAb,mBAAiB,IAAI;AAAA,OAAC;AAAA,IACxE,GAAwC;AACtC,mBAAa,KAAK,GAAG,UAAU,iBAAiB,SAAS;AAAA,IAC3D;AAAA,EACF;AACA,SAAO;AACT;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
package/dist/util/git.js
CHANGED
|
@@ -36,12 +36,19 @@ var import_file_provider = require("../providers/file-provider");
|
|
|
36
36
|
const GITLAB_CI_FILENAME = ".gitlab-ci.yml";
|
|
37
37
|
class GitHelper {
|
|
38
38
|
gitRepoPath;
|
|
39
|
+
ciFile;
|
|
39
40
|
config = {};
|
|
40
41
|
info;
|
|
41
42
|
home;
|
|
43
|
+
set ciFileName(fileName) {
|
|
44
|
+
this.ciFile = fileName;
|
|
45
|
+
}
|
|
42
46
|
get ciFileName() {
|
|
43
47
|
var _a;
|
|
44
|
-
return this.workflows ? this.type === "github" ? this.workflows.length > 1 ? "[multiple workflows]" : (_a = this.workflows) == null ? void 0 : _a[0] : GITLAB_CI_FILENAME : "[unknown]";
|
|
48
|
+
return this.ciFile || (this.workflows ? this.type === "github" ? this.workflows.length > 1 ? "[multiple workflows]" : (_a = this.workflows) == null ? void 0 : _a[0] : GITLAB_CI_FILENAME : "[unknown]");
|
|
49
|
+
}
|
|
50
|
+
get ciFilePath() {
|
|
51
|
+
return `${this.gitRepoPath}/${this.ciFileName}`;
|
|
45
52
|
}
|
|
46
53
|
get name() {
|
|
47
54
|
var _a, _b;
|
package/dist/util/git.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/util/git.ts"],
|
|
4
|
-
"sourcesContent": ["import giturl from 'giturl';\nimport hostedGitInfo from 'hosted-git-info';\nimport parseGitConfig from 'parse-git-config';\nimport path from 'path';\n\nimport { linuxSlash } from './os';\nimport { readFile, readFiles } from '~/providers/file-provider';\n\nconst GITLAB_CI_FILENAME = '.gitlab-ci.yml';\n\ntype GitConfig = parseGitConfig.Config;\n\nexport type GitTypes = hostedGitInfo.Hosts;\n\nexport class GitHelper {\n private gitRepoPath: string;\n config = {} as GitConfig;\n info: hostedGitInfo | undefined;\n home: string | undefined;\n\n get ciFileName() {\n return this.workflows\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,6BAA0B;AAC1B,8BAA2B;AAC3B,kBAAiB;AAEjB,gBAA2B;AAC3B,2BAAoC;AAEpC,MAAM,qBAAqB;AAMpB,MAAM,UAAU;AAAA,EACb;AAAA,
|
|
4
|
+
"sourcesContent": ["import giturl from 'giturl';\nimport hostedGitInfo from 'hosted-git-info';\nimport parseGitConfig from 'parse-git-config';\nimport path from 'path';\n\nimport { linuxSlash } from './os';\nimport { readFile, readFiles } from '~/providers/file-provider';\n\nconst GITLAB_CI_FILENAME = '.gitlab-ci.yml';\n\ntype GitConfig = parseGitConfig.Config;\n\nexport type GitTypes = hostedGitInfo.Hosts;\n\nexport class GitHelper {\n private gitRepoPath: string;\n private ciFile?: string;\n\n config = {} as GitConfig;\n info: hostedGitInfo | undefined;\n home: string | undefined;\n\n set ciFileName(fileName: string) {\n this.ciFile = fileName;\n }\n\n get ciFileName() {\n return (\n this.ciFile ||\n (this.workflows\n ? this.type === 'github'\n ? this.workflows.length > 1\n ? '[multiple workflows]'\n : this.workflows?.[0]\n : GITLAB_CI_FILENAME\n : '[unknown]')\n );\n }\n get ciFilePath() {\n return `${this.gitRepoPath}/${this.ciFileName}`;\n }\n get name() {\n return (\n this.info?.project || this.home?.split('/').pop() || '[set arg --name]'\n );\n }\n get originUrl() {\n return this.config.remote.origin.url;\n }\n get secretsUri() {\n return `${\n this.type === 'github'\n ? `${this.home}/settings/secrets/actions`\n : `${this.home}/-/settings/ci_cd`\n }`;\n }\n get type() {\n return this.info?.type || this.hostType();\n }\n get workflows() {\n return this.type === 'github'\n ? this.githubWorkflows()\n : this.gitlabWorkflow();\n }\n constructor(gitRepoPath: string = process.cwd()) {\n this.gitRepoPath = gitRepoPath;\n this.config = this.gitConfig();\n this.home = giturl.parse(this.originUrl);\n this.info = this.gitInfo();\n // console.log(this.config);\n // console.log(this.home);\n // console.log(this.info);\n }\n gitcwd = () => path.join(this.gitRepoPath);\n gitInfo = (url: string = this.originUrl) => hostedGitInfo.fromUrl(url);\n hostType = (url: string = this.originUrl): GitTypes => {\n if (url.includes('github.com')) return 'github';\n return 'gitlab';\n // if (url.includes('gitlab.com')) return 'gl';\n // if (url.includes('gitlab.zengenti.com')) return 'gl';\n };\n gitConfig = (cwd = this.gitRepoPath) => {\n // Find .git/config in project cwd\n const config = parseGitConfig.sync({\n path: '.git/config',\n expandKeys: true,\n });\n // console.log(cwd, config);\n if (Object.keys(config || {}).length) return config;\n\n // Recursively check the directory heirarchy for existance of a .git/config\n const pathParts = linuxSlash(cwd).split('/');\n for (let i = 1; i <= pathParts.length; i++) {\n const relPath = `${Array(i).fill('..').join('/')}/.git/config`;\n // Does not appear to work when using a shortened cwd, using relative path instead\n const config = parseGitConfig.sync({\n path: relPath,\n expandKeys: true,\n });\n // console.log(relPath, config);\n if (Object.keys(config || {}).length) {\n this.gitRepoPath = path.join(\n this.gitRepoPath,\n Array(i).fill('..').join('/')\n );\n return config;\n }\n }\n return config;\n };\n githubWorkflows = () => {\n const workflowPath = path.join(this.gitcwd(), '.github/workflows');\n const workflowFiles = readFiles(workflowPath, false);\n // console.log('gh workflows: ', workflowFiles);\n const addFolderSuffix = (files: string[]) =>\n files.map(f => `.github/workflows/${f}`);\n\n if (workflowFiles.some(f => f.includes('build')))\n return addFolderSuffix(workflowFiles.filter(f => f.includes('build')));\n return addFolderSuffix(workflowFiles);\n };\n gitlabWorkflow = (ciFileName = GITLAB_CI_FILENAME) => {\n const workflowPath = this.gitcwd();\n const workflowFilePath = path.join(workflowPath, ciFileName);\n const workflowFile = readFile(workflowFilePath);\n // console.log(ciFileName, workflowFile);\n\n return workflowFile;\n };\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,6BAA0B;AAC1B,8BAA2B;AAC3B,kBAAiB;AAEjB,gBAA2B;AAC3B,2BAAoC;AAEpC,MAAM,qBAAqB;AAMpB,MAAM,UAAU;AAAA,EACb;AAAA,EACA;AAAA,EAER,SAAS,CAAC;AAAA,EACV;AAAA,EACA;AAAA,EAEA,IAAI,WAAW,UAAkB;AAC/B,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,aAAa;AA1BnB;AA2BI,WACE,KAAK,WACJ,KAAK,YACF,KAAK,SAAS,WACZ,KAAK,UAAU,SAAS,IACtB,0BACA,UAAK,cAAL,mBAAiB,KACnB,qBACF;AAAA,EAER;AAAA,EACA,IAAI,aAAa;AACf,WAAO,GAAG,KAAK,eAAe,KAAK;AAAA,EACrC;AAAA,EACA,IAAI,OAAO;AAzCb;AA0CI,aACE,UAAK,SAAL,mBAAW,cAAW,UAAK,SAAL,mBAAW,MAAM,KAAK,UAAS;AAAA,EAEzD;AAAA,EACA,IAAI,YAAY;AACd,WAAO,KAAK,OAAO,OAAO,OAAO;AAAA,EACnC;AAAA,EACA,IAAI,aAAa;AACf,WAAO,GACL,KAAK,SAAS,WACV,GAAG,KAAK,kCACR,GAAG,KAAK;AAAA,EAEhB;AAAA,EACA,IAAI,OAAO;AAxDb;AAyDI,aAAO,UAAK,SAAL,mBAAW,SAAQ,KAAK,SAAS;AAAA,EAC1C;AAAA,EACA,IAAI,YAAY;AACd,WAAO,KAAK,SAAS,WACjB,KAAK,gBAAgB,IACrB,KAAK,eAAe;AAAA,EAC1B;AAAA,EACA,YAAY,cAAsB,QAAQ,IAAI,GAAG;AAC/C,SAAK,cAAc;AACnB,SAAK,SAAS,KAAK,UAAU;AAC7B,SAAK,OAAO,cAAAA,QAAO,MAAM,KAAK,SAAS;AACvC,SAAK,OAAO,KAAK,QAAQ;AAAA,EAI3B;AAAA,EACA,SAAS,MAAM,YAAAC,QAAK,KAAK,KAAK,WAAW;AAAA,EACzC,UAAU,CAAC,MAAc,KAAK,cAAc,uBAAAC,QAAc,QAAQ,GAAG;AAAA,EACrE,WAAW,CAAC,MAAc,KAAK,cAAwB;AACrD,QAAI,IAAI,SAAS,YAAY;AAAG,aAAO;AACvC,WAAO;AAAA,EAGT;AAAA,EACA,YAAY,CAAC,MAAM,KAAK,gBAAgB;AAEtC,UAAM,SAAS,wBAAAC,QAAe,KAAK;AAAA,MACjC,MAAM;AAAA,MACN,YAAY;AAAA,IACd,CAAC;AAED,QAAI,OAAO,KAAK,UAAU,CAAC,CAAC,EAAE;AAAQ,aAAO;AAG7C,UAAM,gBAAY,sBAAW,GAAG,EAAE,MAAM,GAAG;AAC3C,aAAS,IAAI,GAAG,KAAK,UAAU,QAAQ,KAAK;AAC1C,YAAM,UAAU,GAAG,MAAM,CAAC,EAAE,KAAK,IAAI,EAAE,KAAK,GAAG;AAE/C,YAAMC,UAAS,wBAAAD,QAAe,KAAK;AAAA,QACjC,MAAM;AAAA,QACN,YAAY;AAAA,MACd,CAAC;AAED,UAAI,OAAO,KAAKC,WAAU,CAAC,CAAC,EAAE,QAAQ;AACpC,aAAK,cAAc,YAAAH,QAAK;AAAA,UACtB,KAAK;AAAA,UACL,MAAM,CAAC,EAAE,KAAK,IAAI,EAAE,KAAK,GAAG;AAAA,QAC9B;AACA,eAAOG;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EACA,kBAAkB,MAAM;AACtB,UAAM,eAAe,YAAAH,QAAK,KAAK,KAAK,OAAO,GAAG,mBAAmB;AACjE,UAAM,oBAAgB,gCAAU,cAAc,KAAK;AAEnD,UAAM,kBAAkB,CAAC,UACvB,MAAM,IAAI,OAAK,qBAAqB,GAAG;AAEzC,QAAI,cAAc,KAAK,OAAK,EAAE,SAAS,OAAO,CAAC;AAC7C,aAAO,gBAAgB,cAAc,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,CAAC;AACvE,WAAO,gBAAgB,aAAa;AAAA,EACtC;AAAA,EACA,iBAAiB,CAAC,aAAa,uBAAuB;AACpD,UAAM,eAAe,KAAK,OAAO;AACjC,UAAM,mBAAmB,YAAAA,QAAK,KAAK,cAAc,UAAU;AAC3D,UAAM,mBAAe,+BAAS,gBAAgB;AAG9C,WAAO;AAAA,EACT;AACF;",
|
|
6
6
|
"names": ["giturl", "path", "hostedGitInfo", "parseGitConfig", "config"]
|
|
7
7
|
}
|