@mimik/be-project-builder 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +94 -0
- package/eslint.config.js +71 -0
- package/index.js +206 -0
- package/lib/buildfile.js +290 -0
- package/lib/common.js +13 -0
- package/lib/configFiles.js +90 -0
- package/lib/helpers.js +311 -0
- package/lib/readFileSync.js +69 -0
- package/lib/scaffold.js +108 -0
- package/lib/writeFileSync.js +70 -0
- package/package.json +37 -0
- package/scaffoldFiles/local/commitMsgCheck.txt +3 -0
- package/scaffoldFiles/local/dotFiles.txt +3 -0
- package/scaffoldFiles/local/jsdoc.json +6 -0
- package/scaffoldFiles/local/scripts.txt +3 -0
- package/scaffoldFiles/local/setup.txt +3 -0
- package/scaffoldFiles/local/start-example.json +37 -0
- package/scaffoldFiles/local/testSetup.txt +3 -0
- package/scaffoldFiles/local/unScripts.txt +3 -0
- package/scaffoldFiles/package.json +51 -0
- package/scaffoldFiles/src/common.txt +16 -0
- package/scaffoldFiles/src/dbValidateHelper.txt +4 -0
- package/scaffoldFiles/src/index.txt +34 -0
- package/scaffoldFiles/src/infoController.txt +34 -0
- package/scaffoldFiles/src/infoProcessor.txt +26 -0
- package/scaffoldFiles/src/mustacheConfig.txt +57 -0
- package/scaffoldFiles/src/mustacheController.txt +20 -0
- package/scaffoldFiles/test/mustacheCommon.txt +44 -0
- package/scaffoldFiles/test/mustacheHttp-test-detached.txt +34 -0
- package/scaffoldFiles/test/mustacheHttp-test-normal.txt +36 -0
- package/scaffoldFiles/test/set-env.txt +7 -0
- package/scaffoldFiles/test/systemEndpoints.txt +87 -0
- package/scaffoldFiles/test/util.txt +36 -0
- package/test.sh +4 -0
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
/* eslint-disable no-magic-numbers */
|
|
2
|
+
import { networkInterfaces } from 'os';
|
|
3
|
+
|
|
4
|
+
let ip;
|
|
5
|
+
|
|
6
|
+
Object.keys(networkInterfaces()).forEach(group => networkInterfaces()[group].forEach((interf) => {
|
|
7
|
+
if (interf.family === 'IPv4' && !interf.internal) [ip] = interf.cidr.split('/');
|
|
8
|
+
}));
|
|
9
|
+
const mST = [
|
|
10
|
+
{ key: 'basePath', env: 'CLI_MST_BASE_PATH', message: 'mST BasePath', initial: '/mST/v1' },
|
|
11
|
+
{ key: 'protocol', env: 'CLI_MST_PROTOCOL', message: 'mST Protocol', initial: 'http:' },
|
|
12
|
+
{ key: 'domainName', env: 'CLI_MST_DOMAIN_NAME', message: `mST DomainName (local IP address: ${ip})`, initial: ip || 'localhost' },
|
|
13
|
+
{ key: 'port', env: 'CLI_MST_PORT', message: 'MST Port', type: 'number', initial: 8025 },
|
|
14
|
+
{ key: 'passphrase', env: 'CLI_MST_PASSPHRASE', type: 'invisible', message: 'mST Passphrase (blank = null)', format: val => val === ' ' ? null : val },
|
|
15
|
+
{ key: 'admin.clientId', env: 'CLI_MST_ADMIN_CLIENT_ID', message: 'mST Admin Client ID', initial: '11111111-2222-3333-4444-555555555555' },
|
|
16
|
+
{ key: 'admin.clientSecret', env: 'CLI_MST_ADMIN_CLIENT_SECRET', type: 'invisible', message: 'mST Admin Client Secret (blank = null)', initial: 'a-secret-for-mST', format: val => val === ' ' ? null : val },
|
|
17
|
+
];
|
|
18
|
+
|
|
19
|
+
const mIT = [
|
|
20
|
+
{ key: 'basePath', env: 'CLI_MIT_BASE_PATH', message: 'mIT BasePath', initial: 'mIT/v1' },
|
|
21
|
+
{ key: 'protocol', env: 'CLI_MIT_PROTOCOL', message: 'mIT Protocol', initial: 'http:' },
|
|
22
|
+
{ key: 'domainName', env: 'CLI_MIT_DOMAIN_NAME', message: 'mIT DomainName', initial: 'localhost' },
|
|
23
|
+
{ key: 'port', env: 'CLI_MIT_PORT', message: 'mIT Port', type: 'number', initial: 8050 },
|
|
24
|
+
];
|
|
25
|
+
|
|
26
|
+
const mID = [
|
|
27
|
+
{ key: 'basePath', env: 'CLI_MID_BASE_PATH', message: 'mID BasePath', initial: 'mID/v1' },
|
|
28
|
+
{ key: 'protocol', env: 'CLI_MID_PROTOCOL', message: 'mID Protocol', initial: 'http:' },
|
|
29
|
+
{ key: 'domainName', env: 'CLI_MID_DOMAIN_NAME', message: 'mID DomainName', initial: 'localhost' },
|
|
30
|
+
{ key: 'port', env: 'CLI_MID_PORT', message: 'mID Port', type: 'number', initial: 8015 },
|
|
31
|
+
{ key: 'implicit.key', env: 'CLI_MID_IMPLICIT_KEY', type: 'invisible', message: 'mID Implicit Key', initial: 'a-secret-key-for-implicit' },
|
|
32
|
+
{ key: 'implicit.audience', env: 'CLI_MID_IMPLICIT_AUDIENCE', message: 'mID Implicit Audience', initial: 'https://mimik' },
|
|
33
|
+
{ key: 'admin.externalId', env: 'CLI_MID_ADMIN_EXT_ID', message: 'mID Admin External ID', initial: 'admin-test-local-mID' },
|
|
34
|
+
];
|
|
35
|
+
|
|
36
|
+
const location = [
|
|
37
|
+
{ key: 'url', env: 'CLI_LOCATION_URL', message: 'Location Provider URL', initial: 'http://pro.ip-api.com/json' },
|
|
38
|
+
{ key: 'key', env: 'CLI_LOCATION_KEY', type: 'invisible', message: 'Location Provider Key (blank = null)', format: val => val === ' ' ? null : val },
|
|
39
|
+
];
|
|
40
|
+
|
|
41
|
+
const keyBitbucket = [
|
|
42
|
+
{ key: 'bitbucket.username', env: 'CLI_BITBUCKET_USERNAME', message: 'Bitbucket username' },
|
|
43
|
+
{ key: 'bitbucket.password', env: 'CLI_BITBUCKET_PASSWORD', type: 'invisible', message: 'Bitbucket password (blank = null)', format: val => val === ' ' ? null : val },
|
|
44
|
+
];
|
|
45
|
+
|
|
46
|
+
const keySwagger = [
|
|
47
|
+
{ key: 'swaggerhub', env: 'CLI_SWAGGERHUB', type: 'invisible', message: 'Swaggerhub apikey (blank = null)', format: val => val === ' ' ? null : val },
|
|
48
|
+
];
|
|
49
|
+
|
|
50
|
+
const s3Log = [
|
|
51
|
+
{ key: 'bucketname', env: 'CLI_S3LOG_BUCKET_NAME', message: 'S3 bucket name', initial: 'bucket-tmp-mimikdev-com' },
|
|
52
|
+
{ key: 'region', env: 'CLI_S3LOG_REGION', message: 'Region of the S3 bucket', initial: 'us-west-2' },
|
|
53
|
+
{ key: 'accessKeyId', env: 'CLI_S3LOG_ACCESS_KEY_ID', type: 'invisible', message: 'Access key id of the S3 bucket (blank = null)', format: val => val === ' ' ? null : val },
|
|
54
|
+
{ key: 'secretAccessKey', env: 'CLI_S3LOG_SECRET_ACCESS_KEY', type: 'invisible', message: 'Secret access key of the S3 bucket (blank = null)', format: val => val === ' ' ? null : val },
|
|
55
|
+
{ key: 'maxEvents', env: 'CLI_S3LOG_MAX_EVENTS', message: 'Max logs accumulated before flushing', type: 'number', initial: 1000 },
|
|
56
|
+
{ key: 'timeout', env: 'CLI_S3LOG_TIMEOUT', message: 'Maximum time before flushing (in minute)', type: 'number', initial: 5 },
|
|
57
|
+
{ key: 'maxSize', env: 'CLI_S3LOG_MAX_SIZE', message: 'Maximum size of the accumulated logs before flushing (in MByte)', type: 'number', initial: 5 },
|
|
58
|
+
];
|
|
59
|
+
|
|
60
|
+
const kinesisLog = [
|
|
61
|
+
{ key: 'streamNameInfo', env: 'CLI_KINESIS_STREAM_NAME_INFO', message: 'Name of the Kinesis stream for info level logs', initial: 'kinesis-stream-tmp-mimikdev-com' },
|
|
62
|
+
{ key: 'streamNameError', env: 'CLI_KINESIS_STREAM_NAME_ERROR', message: 'Name of the Kinesis stream for error level logs', initial: 'kinesis-stream-tmp-mimikdev-com' },
|
|
63
|
+
{ key: 'streamNameOther', env: 'CLI_KINESIS_STREAM_NAME_OTHER', message: 'Name of the Kinesis stream for any other level logs', initial: 'kinesis-stream-tmp-mimikdev-com' },
|
|
64
|
+
{ key: 'region', env: 'CLI_KINESIS_REGION', message: 'Region of the Kinesis stream', initial: 'us-west-2' },
|
|
65
|
+
{ key: 'accessKeyId', env: 'CLI_KINESIS_ACCESS_KEY_ID', type: 'invisible', message: 'Access key id of the Kinesis stream (blank = null)', format: val => val === ' ' ? null : val },
|
|
66
|
+
{ key: 'secretAccessKey', env: 'CLI_KINESIS_SECRET_ACCESS_KEY', type: 'invisible', message: 'Secret access key of the Kinesis stream (blank = null)', format: val => val === ' ' ? null : val },
|
|
67
|
+
{ key: 'maxEvents', env: 'CLI_KINESIS_MAX_EVENTS', message: 'Maximum logs accumulated before flushing', type: 'number', initial: 1000 },
|
|
68
|
+
{ key: 'timeout', env: 'CLI_KINESIS_TIMEOUT', message: 'Maximum time before flushing (in minute)', type: 'number', initial: 5 },
|
|
69
|
+
{ key: 'maxSize', env: 'CLI_KINESIS_MAX_SIZE', message: 'Maximum size of the accumulated logs before flushing (in MByte)', type: 'number', initial: 5 },
|
|
70
|
+
{ key: 'maxRetries', env: 'CLI_KINESIS_MAX_RETRIES', message: 'Maximum retries to connect to Kinesis', type: 'number', initial: 4 },
|
|
71
|
+
{ key: 'httpOptions.socketTimeout', env: 'CLI_KINESIS_HTTP_OPTIONS_SOCKET_TIMEOUT', message: 'Socket timeout for the HTTP handler (in millisecond)', type: 'number', initial: 5000 },
|
|
72
|
+
{ key: 'httpOptions.connectionTimeout', env: 'CLI_KINESIS_HTTP_OPTIONS_CONNECTION_TIMEOUT', message: 'Connection timeout for the HTTP handler (in millisecond)', type: 'number', initial: 5000 },
|
|
73
|
+
];
|
|
74
|
+
|
|
75
|
+
const sumoLog = [
|
|
76
|
+
{ key: 'default.url', env: 'CLI_SUMOLOGIC_DEFAULT_URL', message: 'Sumologic default URL', initial: 'https://endpoint2.collection.us2.sumologic.com/receiver/v1/http/' },
|
|
77
|
+
{ key: 'default.code', env: 'CLI_SUMOLOGIC_DEFAULT_CODE', type: 'invisible', message: 'Sumologic default code (blank = null)', format: val => val === ' ' ? null : val },
|
|
78
|
+
];
|
|
79
|
+
|
|
80
|
+
export default {
|
|
81
|
+
mST,
|
|
82
|
+
mIT,
|
|
83
|
+
mID,
|
|
84
|
+
location,
|
|
85
|
+
keyBitbucket,
|
|
86
|
+
keySwagger,
|
|
87
|
+
s3Log,
|
|
88
|
+
kinesisLog,
|
|
89
|
+
sumoLog,
|
|
90
|
+
};
|
package/lib/helpers.js
ADDED
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
/* eslint default-case: 'off' */
|
|
2
|
+
import { access, stat } from 'fs/promises';
|
|
3
|
+
import { constants, statSync } from 'node:fs';
|
|
4
|
+
import { Base64 } from 'js-base64';
|
|
5
|
+
import Mustache from 'mustache';
|
|
6
|
+
import { PROJECT_DIR } from './common.js';
|
|
7
|
+
import SwaggerClient from 'swagger-client';
|
|
8
|
+
import color from 'ansi-colors';
|
|
9
|
+
import configFiles from './configFiles.js';
|
|
10
|
+
import { configureFile } from './buildfile.js';
|
|
11
|
+
import { getFileByNameSync } from './readFileSync.js';
|
|
12
|
+
import { join } from 'node:path';
|
|
13
|
+
import { load } from 'js-yaml';
|
|
14
|
+
import { parse } from 'comment-json';
|
|
15
|
+
import prompts from 'prompts';
|
|
16
|
+
import { rpRetry } from '@mimik/request-retry';
|
|
17
|
+
|
|
18
|
+
const API_PROVIDER_BITBUCKET = 'https://api.bitbucket.org/2.0/repositories';
|
|
19
|
+
const API_PROVIDER_SWAGGERHUB = 'https://api.swaggerhub.com/apis';
|
|
20
|
+
const API_SOURCE = '/src';
|
|
21
|
+
|
|
22
|
+
const { render } = Mustache;
|
|
23
|
+
|
|
24
|
+
const swaggerOptions = spec => ({
|
|
25
|
+
spec,
|
|
26
|
+
allowMetaPatches: false,
|
|
27
|
+
skipNormalization: true,
|
|
28
|
+
mode: 'strict',
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
export const isDirectorySync = (path) => {
|
|
32
|
+
try {
|
|
33
|
+
return statSync(path).isDirectory();
|
|
34
|
+
}
|
|
35
|
+
catch {
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
const opts = {
|
|
41
|
+
method: 'GET',
|
|
42
|
+
headers: {},
|
|
43
|
+
retry: {
|
|
44
|
+
logLevel: {
|
|
45
|
+
response: 'info',
|
|
46
|
+
request: 'info',
|
|
47
|
+
},
|
|
48
|
+
},
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
export const createAPI = async (homeDir, projectDir, service) => {
|
|
52
|
+
const API = {};
|
|
53
|
+
let name;
|
|
54
|
+
let version;
|
|
55
|
+
let account = 'mimiktech';
|
|
56
|
+
let provider = 'bitbucket';
|
|
57
|
+
let content;
|
|
58
|
+
let destination;
|
|
59
|
+
const { exist } = await prompts({ type: 'confirm', name: 'exist', message: 'API exists?' });
|
|
60
|
+
|
|
61
|
+
API.exist = exist;
|
|
62
|
+
if (!API.exist) return API;
|
|
63
|
+
const { location } = await prompts({
|
|
64
|
+
type: 'select',
|
|
65
|
+
name: 'location',
|
|
66
|
+
message: 'Location of the API',
|
|
67
|
+
choices: [
|
|
68
|
+
{ title: 'File', value: 'filename' },
|
|
69
|
+
{ title: 'Bitbucket', value: 'bitbucket' },
|
|
70
|
+
{ title: 'Swaggerhub', value: 'swaggerhub' },
|
|
71
|
+
],
|
|
72
|
+
});
|
|
73
|
+
switch (location) {
|
|
74
|
+
case 'filename': {
|
|
75
|
+
({ name } = await prompts({ type: 'text', name: 'name', message: 'API filename' }));
|
|
76
|
+
content = getFileByNameSync(homeDir, name);
|
|
77
|
+
({ version } = content.info);
|
|
78
|
+
destination = `${account}_swagger-${service.name}_${version}_swagger.json`;
|
|
79
|
+
break;
|
|
80
|
+
}
|
|
81
|
+
case 'bitbucket': {
|
|
82
|
+
name = `swagger-${service.name}`;
|
|
83
|
+
({ version } = await prompts({ type: 'text', name: 'version', message: 'API version', initial: null }));
|
|
84
|
+
({ account } = await prompts({ type: 'text', name: 'account', message: 'API account', initial: 'mimiktech' }));
|
|
85
|
+
const key = await configureFile(join(projectDir, 'key.json'), configFiles.keyBitbucket, { nonInteractive: false, jsonParse: parse });
|
|
86
|
+
const username = key.values['bitbucket.username'];
|
|
87
|
+
const password = key.values['bitbucket.password'];
|
|
88
|
+
|
|
89
|
+
if (!username || !password) throw new Error('missing values to log to bitbucket');
|
|
90
|
+
opts.headers.Authorization = `Basic ${Base64.encode(`${username}:${password}`)}`;
|
|
91
|
+
opts.url = `${API_PROVIDER_BITBUCKET}/${account}/${name}${API_SOURCE}/${version}/swagger.yml`;
|
|
92
|
+
const result = await rpRetry(opts);
|
|
93
|
+
const resultJSON = load(result);
|
|
94
|
+
const apiDefinitionResult = await SwaggerClient.resolve(swaggerOptions(resultJSON));
|
|
95
|
+
|
|
96
|
+
if (apiDefinitionResult.errors.length !== 0) throw new Error('errors while resolving definition');
|
|
97
|
+
content = apiDefinitionResult.spec;
|
|
98
|
+
destination = `${account}_${name}_${version}_swagger.json`;
|
|
99
|
+
break;
|
|
100
|
+
}
|
|
101
|
+
case 'swaggerhub': {
|
|
102
|
+
({ version } = await prompts({ type: 'text', name: 'version', message: 'API version', initial: null }));
|
|
103
|
+
({ account } = await prompts({ type: 'text', name: 'account', message: 'API account', initial: 'mimik' }));
|
|
104
|
+
const key = await configureFile(join(projectDir, 'key.json'), configFiles.keySwagger, { nonInteractive: false, jsonParse: parse });
|
|
105
|
+
const apiKey = key.values.swaggerhub;
|
|
106
|
+
|
|
107
|
+
opts.url = `${API_PROVIDER_SWAGGERHUB}/${account}/${service.type}/${version}?resolved=true`;
|
|
108
|
+
if (apiKey) opts.headers.Authorization = apiKey;
|
|
109
|
+
content = await rpRetry(opts);
|
|
110
|
+
destination = `${account}_swagger-${service.name}_${version}_swagger.json`;
|
|
111
|
+
provider = location;
|
|
112
|
+
break;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
Object.assign(API, { location, name, version, account, provider, content, destination });
|
|
116
|
+
return API;
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
export const createNewService = async (dir, homeDir, projectDir) => {
|
|
120
|
+
const { serviceName } = await prompts({ type: 'text', name: 'serviceName', message: 'Name of the service' });
|
|
121
|
+
if (!serviceName) return null;
|
|
122
|
+
const service = { name: serviceName };
|
|
123
|
+
|
|
124
|
+
if (isDirectorySync(join(projectDir, serviceName))) {
|
|
125
|
+
service.newService = false;
|
|
126
|
+
const pack = getFileByNameSync(join(projectDir, serviceName), 'package.json', { throwIfMissing: false });
|
|
127
|
+
|
|
128
|
+
if (pack) {
|
|
129
|
+
service.name = pack.name;
|
|
130
|
+
service.description = pack.description;
|
|
131
|
+
service.version = pack.version;
|
|
132
|
+
service.type = pack.mimik?.type;
|
|
133
|
+
service.APIVersion = pack.swaggerFile?.version;
|
|
134
|
+
service.APIAccount = pack.swaggerFile?.account;
|
|
135
|
+
service.APIProvider = pack.swaggerFile?.provider;
|
|
136
|
+
}
|
|
137
|
+
if (isDirectorySync(join(projectDir, serviceName, 'local'))) {
|
|
138
|
+
const startExample = getFileByNameSync(join(projectDir, serviceName, 'local'), 'start-example.json', { throwIfMissing: false, jsonParse: parse });
|
|
139
|
+
|
|
140
|
+
if (startExample) {
|
|
141
|
+
service.port = startExample.SERVER_PORT;
|
|
142
|
+
service.protocol = startExample.SERVER_PUBLIC_PROTOCOL;
|
|
143
|
+
service.domainName = startExample.SERVER_PUBLIC_DOMAIN_NAME;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return service;
|
|
147
|
+
}
|
|
148
|
+
service.newService = true;
|
|
149
|
+
const { description } = await prompts({ type: 'text', name: 'description', message: 'Description of the service', initial: service.description || null });
|
|
150
|
+
const { type } = await prompts({ type: 'text', name: 'type', message: 'Type of the service in mST', initial: service.type || null });
|
|
151
|
+
const { port } = await prompts({ type: 'number', name: 'port', message: 'Service port', initial: service.port || null });
|
|
152
|
+
const { protocol } = await prompts({ type: 'text', name: 'protocol', message: 'Service public protocol', initial: service.protocol || 'https:' });
|
|
153
|
+
const { domainName } = await prompts({ type: 'text', name: 'domainName', message: 'Service public domain name (blank = null)', initial: service.domainName || null, format: val => val === ' ' ? null : val });
|
|
154
|
+
|
|
155
|
+
Object.assign(service, { name: serviceName, description, type, port, domainName, protocol });
|
|
156
|
+
return service;
|
|
157
|
+
};
|
|
158
|
+
|
|
159
|
+
export const createService = async (dir, homeDir, projectDir) => {
|
|
160
|
+
const { isNewService } = await prompts({ type: 'confirm', name: 'isNewService', message: 'creating or updating a service ?' });
|
|
161
|
+
const newService = { isNewService };
|
|
162
|
+
if (isNewService) {
|
|
163
|
+
const { name } = await prompts({ type: 'text', name: 'name', message: 'name of the service' });
|
|
164
|
+
const service = {};
|
|
165
|
+
|
|
166
|
+
if (isDirectorySync(join(projectDir, name))) {
|
|
167
|
+
newService.isExisting = true;
|
|
168
|
+
const pack = getFileByNameSync(join(projectDir, name), 'package.json', { throwIfMissing: false });
|
|
169
|
+
|
|
170
|
+
if (pack) {
|
|
171
|
+
service.name = pack.name;
|
|
172
|
+
service.description = pack.description;
|
|
173
|
+
service.version = pack.version;
|
|
174
|
+
service.type = pack.mimik?.type;
|
|
175
|
+
service.APIVersion = pack.swaggerFile?.version;
|
|
176
|
+
service.APIAccount = pack.swaggerFile?.account;
|
|
177
|
+
}
|
|
178
|
+
if (isDirectorySync(join(projectDir, name, 'local'))) {
|
|
179
|
+
const startExample = getFileByNameSync(join(projectDir, name, 'local'), 'start-example.json', { throwIfMissing: false, jsonParse: parse });
|
|
180
|
+
|
|
181
|
+
if (startExample) {
|
|
182
|
+
service.port = startExample.SERVER_PORT;
|
|
183
|
+
service.protocol = startExample.SERVER_PUBLIC_PROTOCOL;
|
|
184
|
+
service.domainName = startExample.SERVER_PUBLIC_DOMAIN_NAME;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
const { description } = await prompts({ type: 'text', name: 'description', message: 'description of the service', initial: service.description || null });
|
|
189
|
+
const { type } = await prompts({ type: 'text', name: 'type', message: 'type of the service in mST', initial: service.type || null });
|
|
190
|
+
const { port } = await prompts({ type: 'number', name: 'port', message: 'service port', initial: service.port || null });
|
|
191
|
+
const { protocol } = await prompts({ type: 'text', name: 'protocol', message: 'service public protocol', initial: service.protocol || 'https:' });
|
|
192
|
+
const { domainName } = await prompts({ type: 'text', name: 'domainName', message: 'service public domain name (blank = null)', initial: service.domainName || null, format: val => val === ' ' ? null : val });
|
|
193
|
+
Object.assign(newService, { name, description, type, port, domainName, protocol, old: service });
|
|
194
|
+
}
|
|
195
|
+
return newService;
|
|
196
|
+
};
|
|
197
|
+
|
|
198
|
+
export const createCustomerConfig = async (dir, homeDir, projectDir) => {
|
|
199
|
+
const existingConfig = getFileByNameSync(projectDir, 'customerConfig.json', { throwIfMissing: false });
|
|
200
|
+
const configName = join(dir, 'customerConfig.json');
|
|
201
|
+
let configContent = existingConfig;
|
|
202
|
+
let customerName;
|
|
203
|
+
let account;
|
|
204
|
+
let dirName;
|
|
205
|
+
let confirm = true;
|
|
206
|
+
|
|
207
|
+
if (existingConfig) {
|
|
208
|
+
({ confirm } = await prompts({ type: 'confirm', name: 'confirm', message: `Customer config for ${color.green.bold(existingConfig.name)} version ${color.green.bold(existingConfig.version)} already exist, overwriting?` }));
|
|
209
|
+
}
|
|
210
|
+
if (confirm) {
|
|
211
|
+
const { location } = await prompts({
|
|
212
|
+
type: 'select',
|
|
213
|
+
name: 'location',
|
|
214
|
+
message: 'Location of the customer config',
|
|
215
|
+
choices: [
|
|
216
|
+
{ title: 'File', value: 'filename' },
|
|
217
|
+
{ title: 'Bitbucket', value: 'bitbucket' },
|
|
218
|
+
],
|
|
219
|
+
});
|
|
220
|
+
switch (location) {
|
|
221
|
+
case 'filename': {
|
|
222
|
+
({ dirName } = await prompts({ type: 'text', name: 'dirName', message: 'Customer config directory', initial: PROJECT_DIR }));
|
|
223
|
+
configContent = getFileByNameSync(homeDir, join(dirName, 'customerConfig.json'));
|
|
224
|
+
break;
|
|
225
|
+
}
|
|
226
|
+
case 'bitbucket': {
|
|
227
|
+
({ customerName } = await prompts({ type: 'text', name: 'customerName', message: 'Customer name', initial: null }));
|
|
228
|
+
({ account } = await prompts({ type: 'text', name: 'account', message: 'API account', initial: 'mimiktech' }));
|
|
229
|
+
const key = await configureFile(join(projectDir, 'key.json'), configFiles.keyBitbucket, { nonInteractive: false, jsonParse: parse });
|
|
230
|
+
const username = key.values['bitbucket.username'];
|
|
231
|
+
const password = key.values['bitbucket.password'];
|
|
232
|
+
|
|
233
|
+
if (!username || !password) throw new Error('missing values to log to bitbucket');
|
|
234
|
+
try {
|
|
235
|
+
const requestOpts = {
|
|
236
|
+
...opts,
|
|
237
|
+
headers: { ...opts.headers, Authorization: `Basic ${Base64.encode(`${username}:${password}`)}` },
|
|
238
|
+
url: `${API_PROVIDER_BITBUCKET}/${account}/${customerName}-configs/${API_SOURCE}/develop/mst-config.json`,
|
|
239
|
+
};
|
|
240
|
+
configContent = await rpRetry(requestOpts);
|
|
241
|
+
}
|
|
242
|
+
catch {
|
|
243
|
+
const requestOpts = {
|
|
244
|
+
...opts,
|
|
245
|
+
headers: { ...opts.headers, Authorization: `Basic ${Base64.encode(`${username}:${password}`)}` },
|
|
246
|
+
url: `${API_PROVIDER_BITBUCKET}/${account}/${customerName}-configs/${API_SOURCE}/develop/template-mst-config.json`,
|
|
247
|
+
};
|
|
248
|
+
configContent = await rpRetry(requestOpts);
|
|
249
|
+
configContent.name = customerName;
|
|
250
|
+
}
|
|
251
|
+
break;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
};
|
|
255
|
+
return { configName, configContent };
|
|
256
|
+
};
|
|
257
|
+
|
|
258
|
+
const RESERVED = new Set([
|
|
259
|
+
'break', 'case', 'catch', 'class', 'const', 'continue', 'debugger', 'default', 'delete', 'do', 'else',
|
|
260
|
+
'export', 'extends', 'finally', 'for', 'function', 'if', 'import', 'in', 'instanceof', 'new', 'return',
|
|
261
|
+
'super', 'switch', 'this', 'throw', 'try', 'typeof', 'var', 'void', 'while', 'with', 'yield', 'enum',
|
|
262
|
+
'await', 'implements', 'package', 'protected', 'static', 'interface', 'private', 'public', 'let',
|
|
263
|
+
]);
|
|
264
|
+
const isValidIdentifier = str => /^[A-Za-z_$][A-Za-z0-9_$]*$/u.test(str) && !RESERVED.has(str);
|
|
265
|
+
|
|
266
|
+
const toTemplateData = (functionNames) => {
|
|
267
|
+
const cleaned = functionNames.map((na) => {
|
|
268
|
+
const name = String(na).trim();
|
|
269
|
+
if (!isValidIdentifier(name)) throw new Error(`Invalid function name: "${name}"`);
|
|
270
|
+
return name;
|
|
271
|
+
});
|
|
272
|
+
return {
|
|
273
|
+
functions: cleaned.map((name, i, arr) => ({ name, last: i === arr.length - 1 })),
|
|
274
|
+
};
|
|
275
|
+
};
|
|
276
|
+
|
|
277
|
+
export const getControllers = (APIContent, template) => {
|
|
278
|
+
const controllerNames = {};
|
|
279
|
+
const controllers = {};
|
|
280
|
+
const { paths } = APIContent;
|
|
281
|
+
|
|
282
|
+
Object.keys(paths).forEach((path) => {
|
|
283
|
+
Object.keys(paths[path]).forEach((op) => {
|
|
284
|
+
const controllerFile = paths[path][op]['x-swagger-router-controller'];
|
|
285
|
+
|
|
286
|
+
if (controllerFile) {
|
|
287
|
+
if (controllerNames[controllerFile]) controllerNames[controllerFile].push(paths[path][op].operationId);
|
|
288
|
+
else controllerNames[controllerFile] = [paths[path][op].operationId];
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
});
|
|
292
|
+
Object.keys(controllerNames).forEach((name) => {
|
|
293
|
+
if (controllerNames[name]) {
|
|
294
|
+
const data = toTemplateData(controllerNames[name]);
|
|
295
|
+
|
|
296
|
+
controllers[`${name}.js`] = render(template, data);
|
|
297
|
+
}
|
|
298
|
+
});
|
|
299
|
+
return controllers;
|
|
300
|
+
};
|
|
301
|
+
|
|
302
|
+
export const dirExists = async (dirPath) => {
|
|
303
|
+
try {
|
|
304
|
+
await access(dirPath, constants.F_OK);
|
|
305
|
+
const status = await stat(dirPath);
|
|
306
|
+
return status.isDirectory();
|
|
307
|
+
}
|
|
308
|
+
catch {
|
|
309
|
+
return false;
|
|
310
|
+
}
|
|
311
|
+
};
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { basename, isAbsolute, parse, relative, resolve } from 'node:path';
|
|
2
|
+
import { existsSync, readFileSync, statSync } from 'node:fs';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Return the UTF-8 content of a file by name inside baseDir (SYNC).
|
|
6
|
+
*
|
|
7
|
+
* Resolution when `name` has no extension:
|
|
8
|
+
* 1) <name> (no extension)
|
|
9
|
+
* 2) <name>.txt
|
|
10
|
+
* 3) <name>.json
|
|
11
|
+
* 4) <name>.js
|
|
12
|
+
*
|
|
13
|
+
* If `name` has an extension, it must be one of .txt, .json, .js.
|
|
14
|
+
*
|
|
15
|
+
* @param {string} baseDir
|
|
16
|
+
* @param {string} name
|
|
17
|
+
* @param {object} [options]
|
|
18
|
+
* @param {boolean} [options.throwIfMissing=true]
|
|
19
|
+
* @param {function} [options.jsonParse=JSON.parse] - Use a this JSON parser to handle json files
|
|
20
|
+
* @returns {string|null} - file content, or null if not found and throwIfMissing=false
|
|
21
|
+
*/
|
|
22
|
+
export const getFileByNameSync = (baseDir, name, options = {}) => {
|
|
23
|
+
const { throwIfMissing = true, jsonParse = JSON.parse } = options;
|
|
24
|
+
if (!name) throw new Error('name is required');
|
|
25
|
+
const base = resolve(baseDir || '');
|
|
26
|
+
|
|
27
|
+
// Prevent path traversal & absolute paths
|
|
28
|
+
const sanitized = name.replace(/^[\\/]+/u, '');
|
|
29
|
+
const target = resolve(base, sanitized);
|
|
30
|
+
const rel = relative(base, target);
|
|
31
|
+
|
|
32
|
+
if (rel.startsWith('..') || isAbsolute(rel)) {
|
|
33
|
+
throw new Error(`Invalid name (path traversal): ${name}`);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const allowedExts = ['.txt', '.json', '.js'];
|
|
37
|
+
const parsed = parse(target);
|
|
38
|
+
|
|
39
|
+
// Build candidate list
|
|
40
|
+
const candidates = [];
|
|
41
|
+
let ext;
|
|
42
|
+
if (parsed.ext) {
|
|
43
|
+
ext = parsed.ext.toLowerCase();
|
|
44
|
+
if (!allowedExts.includes(ext)) {
|
|
45
|
+
throw new Error(`Unsupported extension '${ext}' (allowed: .txt, .json, .js)`);
|
|
46
|
+
}
|
|
47
|
+
candidates.push(target);
|
|
48
|
+
}
|
|
49
|
+
else {
|
|
50
|
+
candidates.push(target); // no extension
|
|
51
|
+
candidates.push(`${target}.txt`);
|
|
52
|
+
candidates.push(`${target}.json`);
|
|
53
|
+
candidates.push(`${target}.js`);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
for (const candidate of candidates) {
|
|
57
|
+
if (existsSync(candidate) && statSync(candidate).isFile()) {
|
|
58
|
+
const content = readFileSync(candidate, 'utf8');
|
|
59
|
+
if (ext === '.json') return jsonParse(content);
|
|
60
|
+
return content;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
if (throwIfMissing) {
|
|
65
|
+
const tried = candidates.map(candidate => basename(candidate)).join(', ');
|
|
66
|
+
throw new Error(`File not found for "${name}" in ${base} (tried: ${tried})`);
|
|
67
|
+
}
|
|
68
|
+
return null;
|
|
69
|
+
};
|
package/lib/scaffold.js
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import { PERM_DIR, PERM_FILE } from './common.js';
|
|
2
|
+
import { access, mkdir, rm, writeFile } from 'node:fs/promises';
|
|
3
|
+
import { join, resolve } from 'node:path';
|
|
4
|
+
|
|
5
|
+
const normalizeContent = (content) => {
|
|
6
|
+
if (content === null) return '';
|
|
7
|
+
return content;
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
/* eslint-disable no-await-in-loop */
|
|
11
|
+
const createNodes = async (base, nodes, ctx) => {
|
|
12
|
+
for (const node of nodes) {
|
|
13
|
+
if (typeof node === 'string') {
|
|
14
|
+
const dirPath = join(base, node);
|
|
15
|
+
await mkdir(dirPath, { recursive: true, mode: ctx.dirMode });
|
|
16
|
+
ctx.createdDirs.push(dirPath);
|
|
17
|
+
}
|
|
18
|
+
else {
|
|
19
|
+
if (!node || typeof node !== 'object' || !node.dir) {
|
|
20
|
+
throw new Error(`Invalid plan entry: ${JSON.stringify(node)}`);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const dirPath = join(base, node.dir);
|
|
24
|
+
await mkdir(dirPath, { recursive: true, mode: ctx.dirMode });
|
|
25
|
+
ctx.createdDirs.push(dirPath);
|
|
26
|
+
|
|
27
|
+
if (Array.isArray(node.files)) {
|
|
28
|
+
for (const file of node.files) {
|
|
29
|
+
if (!file || typeof file.name !== 'string' || !file.name) {
|
|
30
|
+
throw new Error(`Invalid file entry under ${node.dir}: ${JSON.stringify(file)}`);
|
|
31
|
+
}
|
|
32
|
+
const filePath = join(dirPath, file.name);
|
|
33
|
+
await writeFile(filePath, normalizeContent(file.content), { mode: ctx.fileMode, flag: 'wx' });
|
|
34
|
+
ctx.createdFiles.push(filePath);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (Array.isArray(node.dirs) && node.dirs.length) {
|
|
39
|
+
await createNodes(dirPath, node.dirs, ctx);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
/* eslint-enable no-await-in-loop */
|
|
45
|
+
|
|
46
|
+
const exists = async (node) => {
|
|
47
|
+
try {
|
|
48
|
+
await access(node);
|
|
49
|
+
return true;
|
|
50
|
+
}
|
|
51
|
+
catch { return false; }
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Create a project directory with subdirectories and files.
|
|
56
|
+
*
|
|
57
|
+
* If the root directory already exists, throws an Error.
|
|
58
|
+
*
|
|
59
|
+
* @param {string} rootName - Name or path of the root directory to create.
|
|
60
|
+
* @param {Array<Object|string>} plan - Structure to create under the root.
|
|
61
|
+
* - string: treated as a subdirectory name
|
|
62
|
+
* - object: { dir: string, files?: Array<{name:string, content?:string|Uint8Array}>, dirs?: Array<Object|string> }
|
|
63
|
+
* @param {object} [options]
|
|
64
|
+
* @param {number} [options.dirMode=0o755] - Mode for directories
|
|
65
|
+
* @param {number} [options.fileMode=0o644] - Mode for files
|
|
66
|
+
* @param {boolean} [options.cleanupOnError=true] - Remove root if creation fails mid-way
|
|
67
|
+
* @returns {Promise<{root:string, createdDirs:string[], createdFiles:string[]}>}
|
|
68
|
+
*/
|
|
69
|
+
export const scaffold = async (rootName, plan, options = {}) => {
|
|
70
|
+
const dirMode = options.dirMode ?? PERM_DIR;
|
|
71
|
+
const fileMode = options.fileMode ?? PERM_FILE;
|
|
72
|
+
const cleanupOnError = options.cleanupOnError ?? true;
|
|
73
|
+
|
|
74
|
+
if (!rootName || typeof rootName !== 'string') {
|
|
75
|
+
throw new Error('rootName must be a non-empty string');
|
|
76
|
+
}
|
|
77
|
+
const root = resolve(rootName);
|
|
78
|
+
|
|
79
|
+
if (await exists(root)) {
|
|
80
|
+
throw new Error(`Directory already exists: ${root}`);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const createdDirs = [];
|
|
84
|
+
const createdFiles = [];
|
|
85
|
+
|
|
86
|
+
try {
|
|
87
|
+
// Create root
|
|
88
|
+
await mkdir(root, { recursive: false, mode: dirMode });
|
|
89
|
+
createdDirs.push(root);
|
|
90
|
+
|
|
91
|
+
// Normalize plan to array
|
|
92
|
+
const nodes = Array.isArray(plan) ? plan : [plan];
|
|
93
|
+
await createNodes(root, nodes, { dirMode, fileMode, createdDirs, createdFiles });
|
|
94
|
+
|
|
95
|
+
return { root, createdDirs, createdFiles };
|
|
96
|
+
}
|
|
97
|
+
catch (err) {
|
|
98
|
+
if (cleanupOnError) {
|
|
99
|
+
try {
|
|
100
|
+
await rm(root, { recursive: true, force: true });
|
|
101
|
+
}
|
|
102
|
+
catch {
|
|
103
|
+
// ignore cleanup errors
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
throw err;
|
|
107
|
+
}
|
|
108
|
+
};
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { PERM_DIR, PERM_FILE, TAB } from './common.js';
|
|
2
|
+
import { dirname, extname, isAbsolute, relative, resolve } from 'node:path';
|
|
3
|
+
import { mkdirSync, writeFileSync } from 'node:fs';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Write a text file under baseDir.
|
|
7
|
+
* - Allowed extensions: (none => txt), .txt, .json, .js
|
|
8
|
+
* - Optionally throw if the file already exists.
|
|
9
|
+
*
|
|
10
|
+
* @param {string} baseDir
|
|
11
|
+
* @param {string} name // e.g. "welcome", "config.json", "script.js"
|
|
12
|
+
* @param {string|object} content // objects auto-JSON stringified for .json
|
|
13
|
+
* @param {object} [options]
|
|
14
|
+
* @param {boolean} [options.throwIfExists=false] - If true, error when file exists
|
|
15
|
+
* @param {boolean} [options.createDirs=true] - Create parent dirs if missing
|
|
16
|
+
* @param {number} [options.mode=0o644] - File permissions
|
|
17
|
+
* @param {number} [options.jsonIndent=2] - Spaces for JSON pretty print
|
|
18
|
+
* @param {boolean} [options.newlineAtEof=true] - Ensure trailing newline
|
|
19
|
+
* @param {function} [options.JSONStringify=JSON.stringify] - Use a this JSON stringifier to handle json files
|
|
20
|
+
* @returns {string} - absolute path written
|
|
21
|
+
*/
|
|
22
|
+
export const writeFileByNameSync = (baseDir, name, content, options = {}) => {
|
|
23
|
+
const {
|
|
24
|
+
throwIfExists = false,
|
|
25
|
+
createDirs = true,
|
|
26
|
+
mode = PERM_FILE,
|
|
27
|
+
jsonIndent = TAB,
|
|
28
|
+
newlineAtEof = true,
|
|
29
|
+
jsonStringify = JSON.stringify,
|
|
30
|
+
} = options;
|
|
31
|
+
|
|
32
|
+
if (!baseDir || !name) throw new Error('baseDir and name are required');
|
|
33
|
+
|
|
34
|
+
// Normalize & protect against path traversal
|
|
35
|
+
const base = resolve(baseDir);
|
|
36
|
+
const sanitized = name.replace(/^[\\/]+/u, '');
|
|
37
|
+
const target = resolve(base, sanitized);
|
|
38
|
+
const rel = relative(base, target);
|
|
39
|
+
if (rel.startsWith('..') || isAbsolute(rel)) {
|
|
40
|
+
throw new Error(`Invalid name (path traversal): ${name}`);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Validate extension
|
|
44
|
+
const ext = extname(target).toLowerCase();
|
|
45
|
+
const allowed = ['', '.txt', '.json', '.js'];
|
|
46
|
+
if (!allowed.includes(ext)) {
|
|
47
|
+
throw new Error(
|
|
48
|
+
`Unsupported extension '${ext || '(none)'}' (allowed: no ext, .txt, .json, .js)`,
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Ensure parent directories
|
|
53
|
+
if (createDirs) mkdirSync(dirname(target), { recursive: true, mode: PERM_DIR });
|
|
54
|
+
|
|
55
|
+
// Prepare content
|
|
56
|
+
let data;
|
|
57
|
+
if (ext === '.json' && typeof content !== 'string') {
|
|
58
|
+
data = jsonStringify(content, null, jsonIndent);
|
|
59
|
+
}
|
|
60
|
+
else {
|
|
61
|
+
data = String(content ?? '');
|
|
62
|
+
}
|
|
63
|
+
if (newlineAtEof && !data.endsWith('\n')) data += '\n';
|
|
64
|
+
|
|
65
|
+
// Write; 'wx' fails if file exists, 'w' overwrites
|
|
66
|
+
const flag = throwIfExists ? 'wx' : 'w';
|
|
67
|
+
writeFileSync(target, data, { encoding: 'utf8', mode, flag });
|
|
68
|
+
|
|
69
|
+
return target;
|
|
70
|
+
};
|