@asyncapi/cli 3.0.0 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/oclif.manifest.json
CHANGED
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@asyncapi/cli",
|
|
3
3
|
"description": "All in one CLI for all AsyncAPI tools",
|
|
4
|
-
"version": "3.0.
|
|
4
|
+
"version": "3.0.1",
|
|
5
5
|
"author": "@asyncapi",
|
|
6
6
|
"bin": {
|
|
7
7
|
"asyncapi": "./bin/run_bin"
|
|
@@ -94,6 +94,7 @@
|
|
|
94
94
|
"/bin",
|
|
95
95
|
"/lib",
|
|
96
96
|
"/assets",
|
|
97
|
+
"/scripts",
|
|
97
98
|
"/npm-shrinkwrap.json",
|
|
98
99
|
"/oclif.manifest.json"
|
|
99
100
|
],
|
|
@@ -174,7 +175,7 @@
|
|
|
174
175
|
"createhook": "oclif generate hook myhook --event=command_not_found",
|
|
175
176
|
"createhookinit": "oclif generate hook inithook --event=init",
|
|
176
177
|
"action:docker:build": "docker build -f github-action/Dockerfile -t asyncapi/github-action-for-cli:latest .",
|
|
177
|
-
"action:test": "cd github-action && make test"
|
|
178
|
+
"action:test": "npm run build && cd github-action && make test"
|
|
178
179
|
},
|
|
179
180
|
"types": "lib/index.d.ts"
|
|
180
181
|
}
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-var-requires */
|
|
2
|
+
const { spawnSync } = require('child_process');
|
|
3
|
+
const os = require('os');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
|
|
7
|
+
const allowedShells = ['zsh', 'bash'];
|
|
8
|
+
|
|
9
|
+
// Helper function to find the first existing file among a list of paths
|
|
10
|
+
function findExistingFile(possibleFiles) {
|
|
11
|
+
for (const file of possibleFiles) {
|
|
12
|
+
const fullPath = path.join(os.homedir(), file);
|
|
13
|
+
if (fs.existsSync(fullPath)) {
|
|
14
|
+
return fullPath;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const shellConfigs = {
|
|
21
|
+
zsh: {
|
|
22
|
+
rcFile: path.join(os.homedir(), '.zshrc'),
|
|
23
|
+
detectFile: path.join(os.homedir(), '.zshrc'),
|
|
24
|
+
postMessage: 'Run: source ~/.zshrc',
|
|
25
|
+
action: (output, rcFile) => {
|
|
26
|
+
const configContent = fs.existsSync(rcFile) ? fs.readFileSync(rcFile, 'utf-8') : '';
|
|
27
|
+
|
|
28
|
+
if (configContent.includes(output.trim())) {
|
|
29
|
+
console.log(`✅ Autocomplete is already configured in ${rcFile}. Skipping addition.`);
|
|
30
|
+
} else {
|
|
31
|
+
fs.appendFileSync(rcFile, `\n# AsyncAPI CLI Autocomplete\n${output}\n`);
|
|
32
|
+
console.log(`✅ Autocomplete configuration added to ${rcFile}.`);
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
},
|
|
36
|
+
bash: {
|
|
37
|
+
rcFile: findExistingFile(['.bashrc', '.bash_profile', '.profile']) || path.join(os.homedir(), '.bashrc'),
|
|
38
|
+
detectFile: findExistingFile(['.bashrc', '.bash_profile', '.profile']),
|
|
39
|
+
postMessage: '', // This will be set dynamically later
|
|
40
|
+
action: (output, rcFile) => {
|
|
41
|
+
const configContent = fs.existsSync(rcFile) ? fs.readFileSync(rcFile, 'utf-8') : '';
|
|
42
|
+
|
|
43
|
+
if (configContent.includes(output.trim())) {
|
|
44
|
+
console.log(`✅ Autocomplete is already configured in ${rcFile}. Skipping addition.`);
|
|
45
|
+
} else {
|
|
46
|
+
fs.appendFileSync(rcFile, `\n# AsyncAPI CLI Autocomplete\n${output}\n`);
|
|
47
|
+
console.log(`✅ Autocomplete configuration added to ${rcFile}.`);
|
|
48
|
+
}
|
|
49
|
+
},
|
|
50
|
+
},
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
// Set correct postMessage dynamically
|
|
54
|
+
if (shellConfigs.bash.detectFile) {
|
|
55
|
+
shellConfigs.bash.postMessage = `Run: source ${shellConfigs.bash.detectFile}`;
|
|
56
|
+
} else {
|
|
57
|
+
shellConfigs.bash.postMessage = 'Run: source ~/.bashrc';
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function getShellConfig(shell) {
|
|
61
|
+
if (!allowedShells.includes(shell)) {
|
|
62
|
+
throw new Error(`Unsupported shell: ${shell}. Autocomplete only supports zsh and bash.`);
|
|
63
|
+
}
|
|
64
|
+
return shellConfigs[shell];
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function detectShell() {
|
|
68
|
+
const detectedShells = [];
|
|
69
|
+
for (const [shell, config] of Object.entries(shellConfigs)) {
|
|
70
|
+
if (config.detectFile && fs.existsSync(config.detectFile)) {
|
|
71
|
+
detectedShells.push(shell);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return detectedShells;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function checkPotentialPath(potentialPath) {
|
|
78
|
+
if (potentialPath.includes(path.sep)) {
|
|
79
|
+
if (fs.existsSync(potentialPath)) {
|
|
80
|
+
return potentialPath;
|
|
81
|
+
}
|
|
82
|
+
} else {
|
|
83
|
+
const result = spawnSync('/bin/sh', ['-c', `command -v ${potentialPath}`], {
|
|
84
|
+
encoding: 'utf-8',
|
|
85
|
+
stdio: 'pipe',
|
|
86
|
+
});
|
|
87
|
+
if (result.status === 0 && result.stdout) {
|
|
88
|
+
return result.stdout.trim().split('\n')[0];
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
return null;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
function findCliExecutable() {
|
|
95
|
+
const possiblePaths = [
|
|
96
|
+
path.resolve('./bin/run'),
|
|
97
|
+
path.resolve('../bin/run'),
|
|
98
|
+
path.resolve('./node_modules/.bin/asyncapi'),
|
|
99
|
+
'asyncapi',
|
|
100
|
+
];
|
|
101
|
+
|
|
102
|
+
for (const potentialPath of possiblePaths) {
|
|
103
|
+
try {
|
|
104
|
+
const foundPath = checkPotentialPath(potentialPath);
|
|
105
|
+
if (foundPath) {
|
|
106
|
+
console.log(`Found CLI executable at: ${foundPath}`);
|
|
107
|
+
return foundPath;
|
|
108
|
+
}
|
|
109
|
+
} catch (error) {
|
|
110
|
+
console.warn(`⚠️ Ignored error while checking path ${potentialPath}: ${error.message}`);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
throw new Error('CLI executable not found. Ensure AsyncAPI CLI is installed.');
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function generateAutocompleteScript(shell) {
|
|
118
|
+
const executablePath = findCliExecutable();
|
|
119
|
+
const result = spawnSync(executablePath, ['autocomplete', 'script', shell], {
|
|
120
|
+
encoding: 'utf-8',
|
|
121
|
+
stdio: 'pipe',
|
|
122
|
+
});
|
|
123
|
+
if (result.status !== 0 || result.error) {
|
|
124
|
+
throw new Error(
|
|
125
|
+
`Autocomplete setup for ${shell} failed: ${result.stderr || result.error?.message || 'Unknown error'}`
|
|
126
|
+
);
|
|
127
|
+
}
|
|
128
|
+
const output = result.stdout;
|
|
129
|
+
if (!output || output.trim() === '') {
|
|
130
|
+
throw new Error(`No autocomplete script generated for ${shell}.`);
|
|
131
|
+
}
|
|
132
|
+
return output;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
function setupAutocomplete(shell) {
|
|
136
|
+
if (!allowedShells.includes(shell)) {
|
|
137
|
+
console.error(`❌ Autocomplete only supports zsh and bash. Skipping setup for ${shell}.`);
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
try {
|
|
142
|
+
const config = getShellConfig(shell);
|
|
143
|
+
console.log(`🔧 Generating autocomplete script for ${shell}...`);
|
|
144
|
+
const output = generateAutocompleteScript(shell);
|
|
145
|
+
config.action(output, config.rcFile);
|
|
146
|
+
console.log(`✅ Autocomplete configured for ${shell}. ${config.postMessage}`);
|
|
147
|
+
} catch (error) {
|
|
148
|
+
console.error(`❌ Failed to setup autocomplete for ${shell}: ${error.message}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Start
|
|
153
|
+
const shells = detectShell();
|
|
154
|
+
if (shells.length) {
|
|
155
|
+
for (const shell of shells) {
|
|
156
|
+
setupAutocomplete(shell);
|
|
157
|
+
}
|
|
158
|
+
} else {
|
|
159
|
+
console.log('⚠️ Shell not detected or unsupported. Autocomplete setup skipped.');
|
|
160
|
+
}
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-var-requires */
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const unzipper = require('unzipper');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
|
|
7
|
+
const { Parser } = require('@asyncapi/parser/cjs');
|
|
8
|
+
const { AvroSchemaParser } = require('@asyncapi/avro-schema-parser');
|
|
9
|
+
const { OpenAPISchemaParser } = require('@asyncapi/openapi-schema-parser');
|
|
10
|
+
const { RamlDTSchemaParser } = require('@asyncapi/raml-dt-schema-parser');
|
|
11
|
+
const { pipeline } = require('stream');
|
|
12
|
+
const { promisify } = require('util');
|
|
13
|
+
|
|
14
|
+
const streamPipeline = promisify(pipeline);
|
|
15
|
+
|
|
16
|
+
const parser = new Parser({
|
|
17
|
+
schemaParsers: [
|
|
18
|
+
AvroSchemaParser(),
|
|
19
|
+
OpenAPISchemaParser(),
|
|
20
|
+
RamlDTSchemaParser(),
|
|
21
|
+
]
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
const SPEC_EXAMPLES_ZIP_URL = 'https://github.com/asyncapi/spec/archive/refs/heads/master.zip';
|
|
25
|
+
const EXAMPLE_DIRECTORY = path.join(__dirname, '../assets/examples');
|
|
26
|
+
const TEMP_ZIP_NAME = 'spec-examples.zip';
|
|
27
|
+
|
|
28
|
+
const fetchAsyncAPIExamplesFromExternalURL = () => {
|
|
29
|
+
try {
|
|
30
|
+
return new Promise((resolve, reject) => {
|
|
31
|
+
fetch(SPEC_EXAMPLES_ZIP_URL)
|
|
32
|
+
.then(async (res) => {
|
|
33
|
+
if (res.status !== 200) {
|
|
34
|
+
return reject(new Error(`Failed to fetch examples from ${SPEC_EXAMPLES_ZIP_URL}`));
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const file = fs.createWriteStream(TEMP_ZIP_NAME);
|
|
38
|
+
await streamPipeline(res.body, file);
|
|
39
|
+
|
|
40
|
+
console.log('Fetched ZIP file');
|
|
41
|
+
resolve();
|
|
42
|
+
})
|
|
43
|
+
.catch(reject);
|
|
44
|
+
});
|
|
45
|
+
} catch (error) {
|
|
46
|
+
console.error(error);
|
|
47
|
+
}
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
const unzipAsyncAPIExamples = async () => {
|
|
51
|
+
return new Promise((resolve, reject) => {
|
|
52
|
+
if (!fs.existsSync(EXAMPLE_DIRECTORY)) {
|
|
53
|
+
fs.mkdirSync(EXAMPLE_DIRECTORY);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
fs.createReadStream(TEMP_ZIP_NAME)
|
|
57
|
+
.pipe(unzipper.Parse())
|
|
58
|
+
.on('entry', async (entry) => {
|
|
59
|
+
const fileName = entry.path;
|
|
60
|
+
if (fileName.includes('examples/') && fileName.includes('.yml') && entry.type === 'File') {
|
|
61
|
+
const fileContent = await entry.buffer();
|
|
62
|
+
const fileNameWithExtension = fileName.split('examples/')[1];
|
|
63
|
+
fs.writeFileSync(path.join(EXAMPLE_DIRECTORY, fileNameWithExtension), fileContent.toString());
|
|
64
|
+
} else {
|
|
65
|
+
entry.autodrain();
|
|
66
|
+
}
|
|
67
|
+
}).on('close', () => {
|
|
68
|
+
console.log('Unzipped all examples from ZIP');
|
|
69
|
+
resolve();
|
|
70
|
+
}).on('error', (error) => {
|
|
71
|
+
reject(new Error(`Error in unzipping from ZIP: ${error.message}`));
|
|
72
|
+
});
|
|
73
|
+
});
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
const buildCLIListFromExamples = async () => {
|
|
77
|
+
const files = fs.readdirSync(EXAMPLE_DIRECTORY);
|
|
78
|
+
const examples = files.filter(file => file.includes('.yml')).sort();
|
|
79
|
+
|
|
80
|
+
const buildExampleList = examples.map(async example => {
|
|
81
|
+
const examplePath = path.join(EXAMPLE_DIRECTORY, example);
|
|
82
|
+
const exampleContent = fs.readFileSync(examplePath, { encoding: 'utf-8' });
|
|
83
|
+
|
|
84
|
+
try {
|
|
85
|
+
const { document } = await parser.parse(exampleContent);
|
|
86
|
+
// Failed for some reason to parse this spec file (document is undefined), ignore for now
|
|
87
|
+
if (!document) {
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const title = document.info().title();
|
|
92
|
+
const protocols = listAllProtocolsForFile(document);
|
|
93
|
+
return {
|
|
94
|
+
name: protocols ? `${title} - (protocols: ${protocols})` : title,
|
|
95
|
+
value: example
|
|
96
|
+
};
|
|
97
|
+
} catch (error) {
|
|
98
|
+
console.error(error);
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
const exampleList = (await Promise.all(buildExampleList)).filter(item => !!item);
|
|
103
|
+
const orderedExampleList = exampleList.sort((a, b) => a.name.localeCompare(b.name));
|
|
104
|
+
|
|
105
|
+
fs.writeFileSync(path.join(EXAMPLE_DIRECTORY, 'examples.json'), JSON.stringify(orderedExampleList, null, 4));
|
|
106
|
+
};
|
|
107
|
+
|
|
108
|
+
const listAllProtocolsForFile = (document) => {
|
|
109
|
+
const servers = document.servers();
|
|
110
|
+
if (servers.length === 0) {
|
|
111
|
+
return '';
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return servers.all().map(server => server.protocol()).join(',');
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
const tidyUp = async () => {
|
|
118
|
+
fs.unlinkSync(TEMP_ZIP_NAME);
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
(async () => {
|
|
122
|
+
await fetchAsyncAPIExamplesFromExternalURL();
|
|
123
|
+
await unzipAsyncAPIExamples();
|
|
124
|
+
await buildCLIListFromExamples();
|
|
125
|
+
await tidyUp();
|
|
126
|
+
})();
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-var-requires */
|
|
2
|
+
|
|
3
|
+
const { rename, access, mkdir } = require('fs').promises;
|
|
4
|
+
const packageJson = require('../package.json');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const simpleGit = require('simple-git');
|
|
7
|
+
const git = simpleGit({baseDir: process.cwd()});
|
|
8
|
+
|
|
9
|
+
async function fileExists(checkPath) {
|
|
10
|
+
try {
|
|
11
|
+
await access(checkPath);
|
|
12
|
+
return true;
|
|
13
|
+
} catch (e) {
|
|
14
|
+
return false;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
async function checkAndRenameFile(generatedPath, newPath) {
|
|
19
|
+
if (await fileExists(generatedPath)) {
|
|
20
|
+
await rename(generatedPath, newPath);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async function createDirectory(directoryPath) {
|
|
25
|
+
const exists = await fileExists(directoryPath);
|
|
26
|
+
if (!exists) {
|
|
27
|
+
await mkdir(directoryPath);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
async function renameDeb({version, name, sha}) {
|
|
32
|
+
const dist = 'dist/deb';
|
|
33
|
+
|
|
34
|
+
// deb package naming convention: https://github.com/oclif/oclif/blob/fb5da961f925fa0eba5c5b05c8cee0c9bd156c00/src/upload-util.ts#L51
|
|
35
|
+
const generatedPath = path.resolve(dist, `${name}_${version}.${sha}-1_amd64.deb`);
|
|
36
|
+
const newPath = path.resolve(dist, 'asyncapi.deb');
|
|
37
|
+
await checkAndRenameFile(generatedPath, newPath);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
async function renameTar({version, name, sha}) {
|
|
41
|
+
const dist = 'dist';
|
|
42
|
+
|
|
43
|
+
const generatedPath = path.resolve(dist, `${name}-v${version}-${sha}-linux-x64.tar.gz`);
|
|
44
|
+
// for tarballs, the files are generated in `dist/` directory.
|
|
45
|
+
// Creates a new `tar` directory(`dist/tar`), and moves the generated tarball inside that directory.
|
|
46
|
+
const tarDirectory = path.resolve(dist, 'tar');
|
|
47
|
+
await createDirectory(tarDirectory);
|
|
48
|
+
const newPath = path.resolve(tarDirectory, 'asyncapi.tar.gz');
|
|
49
|
+
await checkAndRenameFile(generatedPath, newPath);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async function renameWindows({version, name, sha, arch}) {
|
|
53
|
+
const dist = 'dist/win32';
|
|
54
|
+
|
|
55
|
+
const generatedPath = path.resolve(dist, `${name}-v${version}-${sha}-${arch}.exe`);
|
|
56
|
+
const newPath = path.resolve(dist, `asyncapi.${arch}.exe`);
|
|
57
|
+
await checkAndRenameFile(generatedPath, newPath);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function renamePkg({version, name, sha, arch}) {
|
|
61
|
+
const dist = 'dist/macos';
|
|
62
|
+
|
|
63
|
+
const generatedPath = path.resolve(dist, `${name}-v${version}-${sha}-${arch}.pkg`);
|
|
64
|
+
const newPath = path.resolve(dist, `asyncapi.${arch}.pkg`);
|
|
65
|
+
await checkAndRenameFile(generatedPath, newPath);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async function renamePackages() {
|
|
69
|
+
const version = packageJson.version;
|
|
70
|
+
const name = 'asyncapi';
|
|
71
|
+
const sha = await git.revparse(['--short', 'HEAD']);
|
|
72
|
+
await renameDeb({version: version.split('-')[0], name, sha});
|
|
73
|
+
await renamePkg({version, name, sha, arch: 'x64'});
|
|
74
|
+
await renamePkg({version, name, sha, arch: 'arm64'});
|
|
75
|
+
await renameWindows({version, name, sha, arch: 'x64'});
|
|
76
|
+
await renameWindows({version, name, sha, arch: 'x86'});
|
|
77
|
+
await renameTar({version, name, sha});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
renamePackages();
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-var-requires */
|
|
2
|
+
const {writeFile, readFile} = require('fs').promises;
|
|
3
|
+
|
|
4
|
+
// Define the paths to the README and usage files
|
|
5
|
+
const README_PATH = './scripts/README.md'; // File path for the generated README file
|
|
6
|
+
const USAGE_PATH = './docs/usage.md'; // File path for the usage documentation file
|
|
7
|
+
|
|
8
|
+
const header = `---
|
|
9
|
+
title: 'Usage'
|
|
10
|
+
weight: 40
|
|
11
|
+
---
|
|
12
|
+
|
|
13
|
+
<!--
|
|
14
|
+
|
|
15
|
+
This file is automatically generated from updateUsageDocs.js script. In package.json in line 158-161 lines the following steps has been executed in order to run this script successfully -
|
|
16
|
+
|
|
17
|
+
* generate:readme:create: It creates the initial content for the README file by printing the usage and commands tags using printf and redirects the output to scripts/README.md file.
|
|
18
|
+
* generate:readme:commands: It changes the directory to the scripts folder and executes the oclif readme command. This command generates the usage and commands sections based on the CLI commands and updates the content in the scripts/README.md file.
|
|
19
|
+
* generate:assets: This script combines the two previously mentioned scripts (generate:readme:toc and generate:commands) to generate the necessary assets, such as the README file and usage documentation.
|
|
20
|
+
* generate:commands: This script executes the following steps:
|
|
21
|
+
- Runs the generate:readme:create script to create the initial content for the README file.
|
|
22
|
+
- Executes the generate:readme:commands script to generate the usage and commands sections based on the CLI commands.
|
|
23
|
+
- Runs the updateUsageDocs.js script using Node.js to update the usage documentation file with the contents of the generated README file.
|
|
24
|
+
- Deletes the scripts/README.md file using the rimraf command.
|
|
25
|
+
|
|
26
|
+
-->
|
|
27
|
+
|
|
28
|
+
The AsyncAPI CLI makes it easier to work with AsyncAPI documents.
|
|
29
|
+
`;
|
|
30
|
+
|
|
31
|
+
// Define an async function to write the header and the README contents to the usage documentation file
|
|
32
|
+
async function run() {
|
|
33
|
+
try {
|
|
34
|
+
await writeFile(USAGE_PATH, header);
|
|
35
|
+
const readmeContents = await readContents();
|
|
36
|
+
// Append the contents of the README file to the usage documentation file
|
|
37
|
+
await writeFile(USAGE_PATH, readmeContents, { flag: 'a' });
|
|
38
|
+
} catch (e) {
|
|
39
|
+
console.error(e);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
run();
|
|
44
|
+
|
|
45
|
+
async function readContents() {
|
|
46
|
+
let readmeContents;
|
|
47
|
+
let commandsContent = '';
|
|
48
|
+
|
|
49
|
+
while (commandsContent.length === 0) {
|
|
50
|
+
readmeContents = await readFile(README_PATH, 'utf8');
|
|
51
|
+
|
|
52
|
+
// Check if the content between <!-- commands --> and <!-- commandsstop --> is empty
|
|
53
|
+
const commandsStartText = '<!-- commands -->';
|
|
54
|
+
const commandStartIndex = readmeContents.indexOf(commandsStartText);
|
|
55
|
+
const commandStopIndex = readmeContents.indexOf('<!-- commandsstop -->');
|
|
56
|
+
//cutting the content between the above mentioned tags, removing white spaces and checking if there is some text as it will mean text was added by oclif
|
|
57
|
+
commandsContent = readmeContents.slice(commandStartIndex + commandsStartText.length, commandStopIndex).trim();
|
|
58
|
+
|
|
59
|
+
if (commandsContent.length === 0) {
|
|
60
|
+
console.log('No content between <!-- commands --> and <!-- commandsstop -->. Trying again...');
|
|
61
|
+
} else {
|
|
62
|
+
console.log('Content found!');
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
await delay(3000); // 3-second delay
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
return readmeContents;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function delay(ms) {
|
|
72
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
73
|
+
}
|