@asyncapi/cli 3.0.0 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,16 @@
1
+ import Command from '../../core/base';
2
+ export default class PreviewStudio extends Command {
3
+ static readonly description = "starts a new local instance of Studio in minimal state bundling all the refs of the schema file and with no editing allowed.";
4
+ static readonly flags: {
5
+ help: import("@oclif/core/lib/interfaces").BooleanFlag<void>;
6
+ port: import("@oclif/core/lib/interfaces").OptionFlag<number | undefined, import("@oclif/core/lib/interfaces").CustomOptions>;
7
+ base: import("@oclif/core/lib/interfaces").OptionFlag<string | undefined, import("@oclif/core/lib/interfaces").CustomOptions>;
8
+ baseDir: import("@oclif/core/lib/interfaces").OptionFlag<string | undefined, import("@oclif/core/lib/interfaces").CustomOptions>;
9
+ xOrigin: import("@oclif/core/lib/interfaces").BooleanFlag<boolean>;
10
+ suppressLogs: import("@oclif/core/lib/interfaces").BooleanFlag<boolean>;
11
+ };
12
+ static readonly args: {
13
+ 'spec-file': import("@oclif/core/lib/interfaces").Arg<string, Record<string, unknown>>;
14
+ };
15
+ run(): Promise<void>;
16
+ }
@@ -0,0 +1,38 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const tslib_1 = require("tslib");
4
+ const core_1 = require("@oclif/core");
5
+ const base_1 = tslib_1.__importDefault(require("../../core/base"));
6
+ const preview_flags_1 = require("../../core/flags/start/preview.flags");
7
+ const SpecificationFile_1 = require("../../core/models/SpecificationFile");
8
+ const Preview_1 = require("../../core/models/Preview");
9
+ class PreviewStudio extends base_1.default {
10
+ run() {
11
+ return tslib_1.__awaiter(this, void 0, void 0, function* () {
12
+ var _a;
13
+ const { args, flags } = yield this.parse(PreviewStudio);
14
+ let filePath = (_a = args['spec-file']) !== null && _a !== void 0 ? _a : flags.file;
15
+ const previewPort = flags.port;
16
+ if (!filePath) {
17
+ filePath = ((yield (0, SpecificationFile_1.load)()).getFilePath());
18
+ this.log(`Loaded the specification from: ${filePath}`);
19
+ }
20
+ try {
21
+ this.specFile = yield (0, SpecificationFile_1.load)(filePath);
22
+ }
23
+ catch (error) {
24
+ if (filePath) {
25
+ this.error(error);
26
+ }
27
+ }
28
+ this.metricsMetadata.port = previewPort;
29
+ (0, Preview_1.startPreview)(filePath, flags.base, flags.baseDir, flags.xOrigin, flags.suppressLogs, previewPort);
30
+ });
31
+ }
32
+ }
33
+ PreviewStudio.description = 'starts a new local instance of Studio in minimal state bundling all the refs of the schema file and with no editing allowed.';
34
+ PreviewStudio.flags = (0, preview_flags_1.previewFlags)();
35
+ PreviewStudio.args = {
36
+ 'spec-file': core_1.Args.string({ description: 'the path to the file to be opened with studio or context name', required: true }),
37
+ };
38
+ exports.default = PreviewStudio;
@@ -0,0 +1,8 @@
1
+ export declare const previewFlags: () => {
2
+ help: import("@oclif/core/lib/interfaces").BooleanFlag<void>;
3
+ port: import("@oclif/core/lib/interfaces").OptionFlag<number | undefined, import("@oclif/core/lib/interfaces").CustomOptions>;
4
+ base: import("@oclif/core/lib/interfaces").OptionFlag<string | undefined, import("@oclif/core/lib/interfaces").CustomOptions>;
5
+ baseDir: import("@oclif/core/lib/interfaces").OptionFlag<string | undefined, import("@oclif/core/lib/interfaces").CustomOptions>;
6
+ xOrigin: import("@oclif/core/lib/interfaces").BooleanFlag<boolean>;
7
+ suppressLogs: import("@oclif/core/lib/interfaces").BooleanFlag<boolean>;
8
+ };
@@ -0,0 +1,15 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.previewFlags = void 0;
4
+ const core_1 = require("@oclif/core");
5
+ const previewFlags = () => {
6
+ return {
7
+ help: core_1.Flags.help({ char: 'h' }),
8
+ port: core_1.Flags.integer({ char: 'p', description: 'port in which to start Studio in the preview mode' }),
9
+ base: core_1.Flags.string({ char: 'b', description: 'Path to the file which will act as a base. This is required when some properties need to be overwritten while bundling with the file.' }),
10
+ baseDir: core_1.Flags.string({ char: 'd', description: 'One relative/absolute path to directory relative to which paths to AsyncAPI Documents that should be bundled will be resolved.' }),
11
+ xOrigin: core_1.Flags.boolean({ char: 'x', description: 'Pass this switch to generate properties "x-origin" that will contain historical values of dereferenced "$ref"s.' }),
12
+ suppressLogs: core_1.Flags.boolean({ char: 'l', description: 'Pass this to suppress the detiled error logs.', default: false })
13
+ };
14
+ };
15
+ exports.previewFlags = previewFlags;
@@ -0,0 +1,2 @@
1
+ export declare const DEFAULT_PORT = 3210;
2
+ export declare function startPreview(filePath: string, base: string | undefined, baseDirectory: string | undefined, xOrigin: boolean | undefined, suppressLogs: boolean | undefined, port?: number): void;
@@ -0,0 +1,195 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DEFAULT_PORT = void 0;
4
+ exports.startPreview = startPreview;
5
+ const tslib_1 = require("tslib");
6
+ const specification_file_1 = require("../errors/specification-file");
7
+ const fs_1 = require("fs");
8
+ const bundler_1 = tslib_1.__importDefault(require("@asyncapi/bundler"));
9
+ const http_1 = require("http");
10
+ const ws_1 = require("ws");
11
+ const chokidar_1 = tslib_1.__importDefault(require("chokidar"));
12
+ const open_1 = tslib_1.__importDefault(require("open"));
13
+ const next_1 = tslib_1.__importDefault(require("next"));
14
+ const path_1 = tslib_1.__importDefault(require("path"));
15
+ const js_yaml_1 = tslib_1.__importDefault(require("js-yaml"));
16
+ const picocolors_1 = require("picocolors");
17
+ const package_json_1 = require("@asyncapi/studio/package.json");
18
+ const sockets = [];
19
+ const messageQueue = [];
20
+ const filePathsToWatch = new Set();
21
+ const defaultErrorMessage = 'error occured while bundling files. use --detailedLog or -l flag to get more details.';
22
+ let bundleError = true;
23
+ exports.DEFAULT_PORT = 3210;
24
+ function isValidFilePath(filePath) {
25
+ return (0, fs_1.existsSync)(filePath);
26
+ }
27
+ // eslint-disable-next-line sonarjs/cognitive-complexity
28
+ function startPreview(filePath, base, baseDirectory, xOrigin, suppressLogs, port = exports.DEFAULT_PORT) {
29
+ if (filePath && !isValidFilePath(filePath)) {
30
+ throw new specification_file_1.SpecificationFileNotFound(filePath);
31
+ }
32
+ const baseDir = path_1.default.dirname(path_1.default.resolve(filePath));
33
+ (0, bundler_1.default)(filePath).then((doc) => {
34
+ if (doc) {
35
+ bundleError = false;
36
+ }
37
+ }).catch((err) => {
38
+ if (suppressLogs) {
39
+ console.log(defaultErrorMessage);
40
+ }
41
+ else {
42
+ console.log(err);
43
+ }
44
+ });
45
+ const studioPath = path_1.default.dirname(require.resolve('@asyncapi/studio/package.json'));
46
+ const app = (0, next_1.default)({
47
+ dev: false,
48
+ dir: studioPath,
49
+ conf: {
50
+ distDir: 'build',
51
+ },
52
+ });
53
+ const handle = app.getRequestHandler();
54
+ const wsServer = new ws_1.WebSocketServer({ noServer: true });
55
+ wsServer.on('connection', (socket) => {
56
+ sockets.push(socket);
57
+ sendQueuedMessages();
58
+ });
59
+ wsServer.on('close', (socket) => {
60
+ sockets.splice(sockets.findIndex(s => s === socket));
61
+ });
62
+ app.prepare().then(() => {
63
+ if (filePath && !bundleError) {
64
+ messageQueue.push(JSON.stringify({
65
+ type: 'preview:connected',
66
+ code: 'Preview server connected'
67
+ }));
68
+ sendQueuedMessages();
69
+ findPathsToWatchFromSchemaRef(filePath, baseDir);
70
+ filePathsToWatch.add(path_1.default.resolve(baseDir, filePath));
71
+ chokidar_1.default.watch([...filePathsToWatch]).on('all', (event) => {
72
+ switch (event) {
73
+ case 'add':
74
+ (0, bundler_1.default)([filePath], {
75
+ base,
76
+ baseDir: baseDirectory,
77
+ xOrigin,
78
+ }).then((intitalDocument) => {
79
+ messageQueue.push(JSON.stringify({
80
+ type: 'preview:file:added',
81
+ code: (path_1.default.extname(filePath) === '.yaml' || path_1.default.extname(filePath) === '.yml') ?
82
+ intitalDocument.yml() : intitalDocument.string()
83
+ }));
84
+ sendQueuedMessages();
85
+ }).catch((e) => {
86
+ if (suppressLogs) {
87
+ console.log(defaultErrorMessage);
88
+ }
89
+ else {
90
+ console.log(e);
91
+ }
92
+ });
93
+ break;
94
+ case 'change':
95
+ (0, bundler_1.default)([filePath], {
96
+ base,
97
+ baseDir: baseDirectory,
98
+ xOrigin,
99
+ }).then((modifiedDocument) => {
100
+ messageQueue.push(JSON.stringify({
101
+ type: 'preview:file:changed',
102
+ code: (path_1.default.extname(filePath) === '.yaml' || path_1.default.extname(filePath) === '.yml') ?
103
+ modifiedDocument.yml() : modifiedDocument.string()
104
+ }));
105
+ sendQueuedMessages();
106
+ }).catch((error) => {
107
+ if (suppressLogs) {
108
+ console.log(defaultErrorMessage);
109
+ }
110
+ else {
111
+ console.log(error);
112
+ }
113
+ });
114
+ break;
115
+ case 'unlink':
116
+ messageQueue.push(JSON.stringify({
117
+ type: 'preview:file:deleted',
118
+ filePath,
119
+ }));
120
+ sendQueuedMessages();
121
+ break;
122
+ }
123
+ });
124
+ }
125
+ const server = (0, http_1.createServer)((req, res) => handle(req, res));
126
+ server.on('upgrade', (request, socket, head) => {
127
+ if (request.url === '/preview-server' && request.headers['origin'] === `http://localhost:${port}`) {
128
+ console.log('🔗 WebSocket connection established for the preview.');
129
+ wsServer.handleUpgrade(request, socket, head, (sock) => {
130
+ wsServer.emit('connection', sock, request);
131
+ });
132
+ }
133
+ else {
134
+ console.log('🔗 WebSocket connection not established.');
135
+ socket.destroy();
136
+ }
137
+ });
138
+ if (!bundleError) {
139
+ server.listen(port, () => {
140
+ const url = `http://localhost:${port}?previewServer=${port}&studio-version=${package_json_1.version}`;
141
+ console.log(`🎉 Connected to Preview Server running at ${(0, picocolors_1.blueBright)(url)}.`);
142
+ console.log(`🌐 Open this URL in your web browser: ${(0, picocolors_1.blueBright)(url)}`);
143
+ console.log(`🛑 If needed, press ${(0, picocolors_1.redBright)('Ctrl + C')} to stop the server.`);
144
+ if (filePath) {
145
+ for (const entry of filePathsToWatch) {
146
+ console.log(`👁️ Watching changes on file ${(0, picocolors_1.blueBright)(entry)}`);
147
+ }
148
+ }
149
+ else {
150
+ console.warn('Warning: No file was provided, and we couldn\'t find a default file (like "asyncapi.yaml" or "asyncapi.json") in the current folder. Starting Studio with a blank workspace.');
151
+ }
152
+ if (!bundleError) {
153
+ (0, open_1.default)(url);
154
+ }
155
+ }).on('error', (error) => {
156
+ console.error(`Failed to start server on port ${port}:`, error.message);
157
+ });
158
+ }
159
+ });
160
+ }
161
+ function sendQueuedMessages() {
162
+ while (messageQueue.length && sockets.length) {
163
+ const nextMessage = messageQueue.shift();
164
+ for (const socket of sockets) {
165
+ socket.send(nextMessage);
166
+ }
167
+ }
168
+ }
169
+ function isLocalRefAPath(key, value) {
170
+ return (typeof value === 'string' && key === '$ref' &&
171
+ (value.startsWith('.') || value.startsWith('./') ||
172
+ value.startsWith('../') || !value.startsWith('#')));
173
+ }
174
+ function findPathsToWatchFromSchemaRef(filePath, baseDir) {
175
+ if (filePath && !isValidFilePath(filePath)) {
176
+ throw new specification_file_1.SpecificationFileNotFound(filePath);
177
+ }
178
+ const document = js_yaml_1.default.load((0, fs_1.readFileSync)(filePath, 'utf-8'));
179
+ const stack = [document];
180
+ while (stack.length > 0) {
181
+ const current = stack.pop();
182
+ if (current === null || typeof current !== 'object') {
183
+ continue;
184
+ }
185
+ for (const [key, value] of Object.entries(current)) {
186
+ if (isLocalRefAPath(key, value)) {
187
+ const absolutePath = path_1.default.resolve(baseDir, value);
188
+ filePathsToWatch.add(absolutePath);
189
+ }
190
+ if (value !== null && typeof value === 'object') {
191
+ stack.push(value);
192
+ }
193
+ }
194
+ }
195
+ }
@@ -1451,6 +1451,78 @@
1451
1451
  "index.js"
1452
1452
  ]
1453
1453
  },
1454
+ "start:preview": {
1455
+ "aliases": [],
1456
+ "args": {
1457
+ "spec-file": {
1458
+ "description": "the path to the file to be opened with studio or context name",
1459
+ "name": "spec-file",
1460
+ "required": true
1461
+ }
1462
+ },
1463
+ "description": "starts a new local instance of Studio in minimal state bundling all the refs of the schema file and with no editing allowed.",
1464
+ "flags": {
1465
+ "help": {
1466
+ "char": "h",
1467
+ "description": "Show CLI help.",
1468
+ "name": "help",
1469
+ "allowNo": false,
1470
+ "type": "boolean"
1471
+ },
1472
+ "port": {
1473
+ "char": "p",
1474
+ "description": "port in which to start Studio in the preview mode",
1475
+ "name": "port",
1476
+ "hasDynamicHelp": false,
1477
+ "multiple": false,
1478
+ "type": "option"
1479
+ },
1480
+ "base": {
1481
+ "char": "b",
1482
+ "description": "Path to the file which will act as a base. This is required when some properties need to be overwritten while bundling with the file.",
1483
+ "name": "base",
1484
+ "hasDynamicHelp": false,
1485
+ "multiple": false,
1486
+ "type": "option"
1487
+ },
1488
+ "baseDir": {
1489
+ "char": "d",
1490
+ "description": "One relative/absolute path to directory relative to which paths to AsyncAPI Documents that should be bundled will be resolved.",
1491
+ "name": "baseDir",
1492
+ "hasDynamicHelp": false,
1493
+ "multiple": false,
1494
+ "type": "option"
1495
+ },
1496
+ "xOrigin": {
1497
+ "char": "x",
1498
+ "description": "Pass this switch to generate properties \"x-origin\" that will contain historical values of dereferenced \"$ref\"s.",
1499
+ "name": "xOrigin",
1500
+ "allowNo": false,
1501
+ "type": "boolean"
1502
+ },
1503
+ "suppressLogs": {
1504
+ "char": "l",
1505
+ "description": "Pass this to suppress the detiled error logs.",
1506
+ "name": "suppressLogs",
1507
+ "allowNo": false,
1508
+ "type": "boolean"
1509
+ }
1510
+ },
1511
+ "hasDynamicHelp": false,
1512
+ "hiddenAliases": [],
1513
+ "id": "start:preview",
1514
+ "pluginAlias": "@asyncapi/cli",
1515
+ "pluginName": "@asyncapi/cli",
1516
+ "pluginType": "core",
1517
+ "strict": true,
1518
+ "isESM": false,
1519
+ "relativePath": [
1520
+ "lib",
1521
+ "commands",
1522
+ "start",
1523
+ "preview.js"
1524
+ ]
1525
+ },
1454
1526
  "start:studio": {
1455
1527
  "aliases": [],
1456
1528
  "args": {
@@ -1776,5 +1848,5 @@
1776
1848
  ]
1777
1849
  }
1778
1850
  },
1779
- "version": "3.0.0"
1851
+ "version": "3.1.0"
1780
1852
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@asyncapi/cli",
3
3
  "description": "All in one CLI for all AsyncAPI tools",
4
- "version": "3.0.0",
4
+ "version": "3.1.0",
5
5
  "author": "@asyncapi",
6
6
  "bin": {
7
7
  "asyncapi": "./bin/run_bin"
@@ -19,7 +19,7 @@
19
19
  "@asyncapi/parser": "^3.3.0",
20
20
  "@asyncapi/protobuf-schema-parser": "^3.5.1",
21
21
  "@asyncapi/raml-dt-schema-parser": "^4.0.24",
22
- "@asyncapi/studio": "^0.23.1",
22
+ "@asyncapi/studio": "^0.24.2",
23
23
  "@changesets/changelog-git": "^0.2.0",
24
24
  "@clack/prompts": "^0.7.0",
25
25
  "@oclif/core": "^4.2.9",
@@ -94,6 +94,7 @@
94
94
  "/bin",
95
95
  "/lib",
96
96
  "/assets",
97
+ "/scripts",
97
98
  "/npm-shrinkwrap.json",
98
99
  "/oclif.manifest.json"
99
100
  ],
@@ -174,7 +175,7 @@
174
175
  "createhook": "oclif generate hook myhook --event=command_not_found",
175
176
  "createhookinit": "oclif generate hook inithook --event=init",
176
177
  "action:docker:build": "docker build -f github-action/Dockerfile -t asyncapi/github-action-for-cli:latest .",
177
- "action:test": "cd github-action && make test"
178
+ "action:test": "npm run build && cd github-action && make test"
178
179
  },
179
180
  "types": "lib/index.d.ts"
180
181
  }
@@ -0,0 +1,160 @@
1
+ /* eslint-disable @typescript-eslint/no-var-requires */
2
+ const { spawnSync } = require('child_process');
3
+ const os = require('os');
4
+ const fs = require('fs');
5
+ const path = require('path');
6
+
7
+ const allowedShells = ['zsh', 'bash'];
8
+
9
+ // Helper function to find the first existing file among a list of paths
10
+ function findExistingFile(possibleFiles) {
11
+ for (const file of possibleFiles) {
12
+ const fullPath = path.join(os.homedir(), file);
13
+ if (fs.existsSync(fullPath)) {
14
+ return fullPath;
15
+ }
16
+ }
17
+ return null;
18
+ }
19
+
20
+ const shellConfigs = {
21
+ zsh: {
22
+ rcFile: path.join(os.homedir(), '.zshrc'),
23
+ detectFile: path.join(os.homedir(), '.zshrc'),
24
+ postMessage: 'Run: source ~/.zshrc',
25
+ action: (output, rcFile) => {
26
+ const configContent = fs.existsSync(rcFile) ? fs.readFileSync(rcFile, 'utf-8') : '';
27
+
28
+ if (configContent.includes(output.trim())) {
29
+ console.log(`✅ Autocomplete is already configured in ${rcFile}. Skipping addition.`);
30
+ } else {
31
+ fs.appendFileSync(rcFile, `\n# AsyncAPI CLI Autocomplete\n${output}\n`);
32
+ console.log(`✅ Autocomplete configuration added to ${rcFile}.`);
33
+ }
34
+ },
35
+ },
36
+ bash: {
37
+ rcFile: findExistingFile(['.bashrc', '.bash_profile', '.profile']) || path.join(os.homedir(), '.bashrc'),
38
+ detectFile: findExistingFile(['.bashrc', '.bash_profile', '.profile']),
39
+ postMessage: '', // This will be set dynamically later
40
+ action: (output, rcFile) => {
41
+ const configContent = fs.existsSync(rcFile) ? fs.readFileSync(rcFile, 'utf-8') : '';
42
+
43
+ if (configContent.includes(output.trim())) {
44
+ console.log(`✅ Autocomplete is already configured in ${rcFile}. Skipping addition.`);
45
+ } else {
46
+ fs.appendFileSync(rcFile, `\n# AsyncAPI CLI Autocomplete\n${output}\n`);
47
+ console.log(`✅ Autocomplete configuration added to ${rcFile}.`);
48
+ }
49
+ },
50
+ },
51
+ };
52
+
53
+ // Set correct postMessage dynamically
54
+ if (shellConfigs.bash.detectFile) {
55
+ shellConfigs.bash.postMessage = `Run: source ${shellConfigs.bash.detectFile}`;
56
+ } else {
57
+ shellConfigs.bash.postMessage = 'Run: source ~/.bashrc';
58
+ }
59
+
60
+ function getShellConfig(shell) {
61
+ if (!allowedShells.includes(shell)) {
62
+ throw new Error(`Unsupported shell: ${shell}. Autocomplete only supports zsh and bash.`);
63
+ }
64
+ return shellConfigs[shell];
65
+ }
66
+
67
+ function detectShell() {
68
+ const detectedShells = [];
69
+ for (const [shell, config] of Object.entries(shellConfigs)) {
70
+ if (config.detectFile && fs.existsSync(config.detectFile)) {
71
+ detectedShells.push(shell);
72
+ }
73
+ }
74
+ return detectedShells;
75
+ }
76
+
77
+ function checkPotentialPath(potentialPath) {
78
+ if (potentialPath.includes(path.sep)) {
79
+ if (fs.existsSync(potentialPath)) {
80
+ return potentialPath;
81
+ }
82
+ } else {
83
+ const result = spawnSync('/bin/sh', ['-c', `command -v ${potentialPath}`], {
84
+ encoding: 'utf-8',
85
+ stdio: 'pipe',
86
+ });
87
+ if (result.status === 0 && result.stdout) {
88
+ return result.stdout.trim().split('\n')[0];
89
+ }
90
+ }
91
+ return null;
92
+ }
93
+
94
+ function findCliExecutable() {
95
+ const possiblePaths = [
96
+ path.resolve('./bin/run'),
97
+ path.resolve('../bin/run'),
98
+ path.resolve('./node_modules/.bin/asyncapi'),
99
+ 'asyncapi',
100
+ ];
101
+
102
+ for (const potentialPath of possiblePaths) {
103
+ try {
104
+ const foundPath = checkPotentialPath(potentialPath);
105
+ if (foundPath) {
106
+ console.log(`Found CLI executable at: ${foundPath}`);
107
+ return foundPath;
108
+ }
109
+ } catch (error) {
110
+ console.warn(`⚠️ Ignored error while checking path ${potentialPath}: ${error.message}`);
111
+ }
112
+ }
113
+
114
+ throw new Error('CLI executable not found. Ensure AsyncAPI CLI is installed.');
115
+ }
116
+
117
+ function generateAutocompleteScript(shell) {
118
+ const executablePath = findCliExecutable();
119
+ const result = spawnSync(executablePath, ['autocomplete', 'script', shell], {
120
+ encoding: 'utf-8',
121
+ stdio: 'pipe',
122
+ });
123
+ if (result.status !== 0 || result.error) {
124
+ throw new Error(
125
+ `Autocomplete setup for ${shell} failed: ${result.stderr || result.error?.message || 'Unknown error'}`
126
+ );
127
+ }
128
+ const output = result.stdout;
129
+ if (!output || output.trim() === '') {
130
+ throw new Error(`No autocomplete script generated for ${shell}.`);
131
+ }
132
+ return output;
133
+ }
134
+
135
+ function setupAutocomplete(shell) {
136
+ if (!allowedShells.includes(shell)) {
137
+ console.error(`❌ Autocomplete only supports zsh and bash. Skipping setup for ${shell}.`);
138
+ return;
139
+ }
140
+
141
+ try {
142
+ const config = getShellConfig(shell);
143
+ console.log(`🔧 Generating autocomplete script for ${shell}...`);
144
+ const output = generateAutocompleteScript(shell);
145
+ config.action(output, config.rcFile);
146
+ console.log(`✅ Autocomplete configured for ${shell}. ${config.postMessage}`);
147
+ } catch (error) {
148
+ console.error(`❌ Failed to setup autocomplete for ${shell}: ${error.message}`);
149
+ }
150
+ }
151
+
152
+ // Start
153
+ const shells = detectShell();
154
+ if (shells.length) {
155
+ for (const shell of shells) {
156
+ setupAutocomplete(shell);
157
+ }
158
+ } else {
159
+ console.log('⚠️ Shell not detected or unsupported. Autocomplete setup skipped.');
160
+ }
@@ -0,0 +1,126 @@
1
+ /* eslint-disable @typescript-eslint/no-var-requires */
2
+
3
+ const fs = require('fs');
4
+ const unzipper = require('unzipper');
5
+ const path = require('path');
6
+
7
+ const { Parser } = require('@asyncapi/parser/cjs');
8
+ const { AvroSchemaParser } = require('@asyncapi/avro-schema-parser');
9
+ const { OpenAPISchemaParser } = require('@asyncapi/openapi-schema-parser');
10
+ const { RamlDTSchemaParser } = require('@asyncapi/raml-dt-schema-parser');
11
+ const { pipeline } = require('stream');
12
+ const { promisify } = require('util');
13
+
14
+ const streamPipeline = promisify(pipeline);
15
+
16
+ const parser = new Parser({
17
+ schemaParsers: [
18
+ AvroSchemaParser(),
19
+ OpenAPISchemaParser(),
20
+ RamlDTSchemaParser(),
21
+ ]
22
+ });
23
+
24
+ const SPEC_EXAMPLES_ZIP_URL = 'https://github.com/asyncapi/spec/archive/refs/heads/master.zip';
25
+ const EXAMPLE_DIRECTORY = path.join(__dirname, '../assets/examples');
26
+ const TEMP_ZIP_NAME = 'spec-examples.zip';
27
+
28
+ const fetchAsyncAPIExamplesFromExternalURL = () => {
29
+ try {
30
+ return new Promise((resolve, reject) => {
31
+ fetch(SPEC_EXAMPLES_ZIP_URL)
32
+ .then(async (res) => {
33
+ if (res.status !== 200) {
34
+ return reject(new Error(`Failed to fetch examples from ${SPEC_EXAMPLES_ZIP_URL}`));
35
+ }
36
+
37
+ const file = fs.createWriteStream(TEMP_ZIP_NAME);
38
+ await streamPipeline(res.body, file);
39
+
40
+ console.log('Fetched ZIP file');
41
+ resolve();
42
+ })
43
+ .catch(reject);
44
+ });
45
+ } catch (error) {
46
+ console.error(error);
47
+ }
48
+ };
49
+
50
+ const unzipAsyncAPIExamples = async () => {
51
+ return new Promise((resolve, reject) => {
52
+ if (!fs.existsSync(EXAMPLE_DIRECTORY)) {
53
+ fs.mkdirSync(EXAMPLE_DIRECTORY);
54
+ }
55
+
56
+ fs.createReadStream(TEMP_ZIP_NAME)
57
+ .pipe(unzipper.Parse())
58
+ .on('entry', async (entry) => {
59
+ const fileName = entry.path;
60
+ if (fileName.includes('examples/') && fileName.includes('.yml') && entry.type === 'File') {
61
+ const fileContent = await entry.buffer();
62
+ const fileNameWithExtension = fileName.split('examples/')[1];
63
+ fs.writeFileSync(path.join(EXAMPLE_DIRECTORY, fileNameWithExtension), fileContent.toString());
64
+ } else {
65
+ entry.autodrain();
66
+ }
67
+ }).on('close', () => {
68
+ console.log('Unzipped all examples from ZIP');
69
+ resolve();
70
+ }).on('error', (error) => {
71
+ reject(new Error(`Error in unzipping from ZIP: ${error.message}`));
72
+ });
73
+ });
74
+ };
75
+
76
+ const buildCLIListFromExamples = async () => {
77
+ const files = fs.readdirSync(EXAMPLE_DIRECTORY);
78
+ const examples = files.filter(file => file.includes('.yml')).sort();
79
+
80
+ const buildExampleList = examples.map(async example => {
81
+ const examplePath = path.join(EXAMPLE_DIRECTORY, example);
82
+ const exampleContent = fs.readFileSync(examplePath, { encoding: 'utf-8' });
83
+
84
+ try {
85
+ const { document } = await parser.parse(exampleContent);
86
+ // Failed for some reason to parse this spec file (document is undefined), ignore for now
87
+ if (!document) {
88
+ return;
89
+ }
90
+
91
+ const title = document.info().title();
92
+ const protocols = listAllProtocolsForFile(document);
93
+ return {
94
+ name: protocols ? `${title} - (protocols: ${protocols})` : title,
95
+ value: example
96
+ };
97
+ } catch (error) {
98
+ console.error(error);
99
+ }
100
+ });
101
+
102
+ const exampleList = (await Promise.all(buildExampleList)).filter(item => !!item);
103
+ const orderedExampleList = exampleList.sort((a, b) => a.name.localeCompare(b.name));
104
+
105
+ fs.writeFileSync(path.join(EXAMPLE_DIRECTORY, 'examples.json'), JSON.stringify(orderedExampleList, null, 4));
106
+ };
107
+
108
+ const listAllProtocolsForFile = (document) => {
109
+ const servers = document.servers();
110
+ if (servers.length === 0) {
111
+ return '';
112
+ }
113
+
114
+ return servers.all().map(server => server.protocol()).join(',');
115
+ };
116
+
117
+ const tidyUp = async () => {
118
+ fs.unlinkSync(TEMP_ZIP_NAME);
119
+ };
120
+
121
+ (async () => {
122
+ await fetchAsyncAPIExamplesFromExternalURL();
123
+ await unzipAsyncAPIExamples();
124
+ await buildCLIListFromExamples();
125
+ await tidyUp();
126
+ })();
@@ -0,0 +1,80 @@
1
+ /* eslint-disable @typescript-eslint/no-var-requires */
2
+
3
+ const { rename, access, mkdir } = require('fs').promises;
4
+ const packageJson = require('../package.json');
5
+ const path = require('path');
6
+ const simpleGit = require('simple-git');
7
+ const git = simpleGit({baseDir: process.cwd()});
8
+
9
+ async function fileExists(checkPath) {
10
+ try {
11
+ await access(checkPath);
12
+ return true;
13
+ } catch (e) {
14
+ return false;
15
+ }
16
+ }
17
+
18
+ async function checkAndRenameFile(generatedPath, newPath) {
19
+ if (await fileExists(generatedPath)) {
20
+ await rename(generatedPath, newPath);
21
+ }
22
+ }
23
+
24
+ async function createDirectory(directoryPath) {
25
+ const exists = await fileExists(directoryPath);
26
+ if (!exists) {
27
+ await mkdir(directoryPath);
28
+ }
29
+ }
30
+
31
+ async function renameDeb({version, name, sha}) {
32
+ const dist = 'dist/deb';
33
+
34
+ // deb package naming convention: https://github.com/oclif/oclif/blob/fb5da961f925fa0eba5c5b05c8cee0c9bd156c00/src/upload-util.ts#L51
35
+ const generatedPath = path.resolve(dist, `${name}_${version}.${sha}-1_amd64.deb`);
36
+ const newPath = path.resolve(dist, 'asyncapi.deb');
37
+ await checkAndRenameFile(generatedPath, newPath);
38
+ }
39
+
40
+ async function renameTar({version, name, sha}) {
41
+ const dist = 'dist';
42
+
43
+ const generatedPath = path.resolve(dist, `${name}-v${version}-${sha}-linux-x64.tar.gz`);
44
+ // for tarballs, the files are generated in `dist/` directory.
45
+ // Creates a new `tar` directory(`dist/tar`), and moves the generated tarball inside that directory.
46
+ const tarDirectory = path.resolve(dist, 'tar');
47
+ await createDirectory(tarDirectory);
48
+ const newPath = path.resolve(tarDirectory, 'asyncapi.tar.gz');
49
+ await checkAndRenameFile(generatedPath, newPath);
50
+ }
51
+
52
+ async function renameWindows({version, name, sha, arch}) {
53
+ const dist = 'dist/win32';
54
+
55
+ const generatedPath = path.resolve(dist, `${name}-v${version}-${sha}-${arch}.exe`);
56
+ const newPath = path.resolve(dist, `asyncapi.${arch}.exe`);
57
+ await checkAndRenameFile(generatedPath, newPath);
58
+ }
59
+
60
+ async function renamePkg({version, name, sha, arch}) {
61
+ const dist = 'dist/macos';
62
+
63
+ const generatedPath = path.resolve(dist, `${name}-v${version}-${sha}-${arch}.pkg`);
64
+ const newPath = path.resolve(dist, `asyncapi.${arch}.pkg`);
65
+ await checkAndRenameFile(generatedPath, newPath);
66
+ }
67
+
68
+ async function renamePackages() {
69
+ const version = packageJson.version;
70
+ const name = 'asyncapi';
71
+ const sha = await git.revparse(['--short', 'HEAD']);
72
+ await renameDeb({version: version.split('-')[0], name, sha});
73
+ await renamePkg({version, name, sha, arch: 'x64'});
74
+ await renamePkg({version, name, sha, arch: 'arm64'});
75
+ await renameWindows({version, name, sha, arch: 'x64'});
76
+ await renameWindows({version, name, sha, arch: 'x86'});
77
+ await renameTar({version, name, sha});
78
+ }
79
+
80
+ renamePackages();
@@ -0,0 +1,73 @@
1
+ /* eslint-disable @typescript-eslint/no-var-requires */
2
+ const {writeFile, readFile} = require('fs').promises;
3
+
4
+ // Define the paths to the README and usage files
5
+ const README_PATH = './scripts/README.md'; // File path for the generated README file
6
+ const USAGE_PATH = './docs/usage.md'; // File path for the usage documentation file
7
+
8
+ const header = `---
9
+ title: 'Usage'
10
+ weight: 40
11
+ ---
12
+
13
+ <!--
14
+
15
+ This file is automatically generated from updateUsageDocs.js script. In package.json in line 158-161 lines the following steps has been executed in order to run this script successfully -
16
+
17
+ * generate:readme:create: It creates the initial content for the README file by printing the usage and commands tags using printf and redirects the output to scripts/README.md file.
18
+ * generate:readme:commands: It changes the directory to the scripts folder and executes the oclif readme command. This command generates the usage and commands sections based on the CLI commands and updates the content in the scripts/README.md file.
19
+ * generate:assets: This script combines the two previously mentioned scripts (generate:readme:toc and generate:commands) to generate the necessary assets, such as the README file and usage documentation.
20
+ * generate:commands: This script executes the following steps:
21
+ - Runs the generate:readme:create script to create the initial content for the README file.
22
+ - Executes the generate:readme:commands script to generate the usage and commands sections based on the CLI commands.
23
+ - Runs the updateUsageDocs.js script using Node.js to update the usage documentation file with the contents of the generated README file.
24
+ - Deletes the scripts/README.md file using the rimraf command.
25
+
26
+ -->
27
+
28
+ The AsyncAPI CLI makes it easier to work with AsyncAPI documents.
29
+ `;
30
+
31
+ // Define an async function to write the header and the README contents to the usage documentation file
32
+ async function run() {
33
+ try {
34
+ await writeFile(USAGE_PATH, header);
35
+ const readmeContents = await readContents();
36
+ // Append the contents of the README file to the usage documentation file
37
+ await writeFile(USAGE_PATH, readmeContents, { flag: 'a' });
38
+ } catch (e) {
39
+ console.error(e);
40
+ }
41
+ }
42
+
43
+ run();
44
+
45
+ async function readContents() {
46
+ let readmeContents;
47
+ let commandsContent = '';
48
+
49
+ while (commandsContent.length === 0) {
50
+ readmeContents = await readFile(README_PATH, 'utf8');
51
+
52
+ // Check if the content between <!-- commands --> and <!-- commandsstop --> is empty
53
+ const commandsStartText = '<!-- commands -->';
54
+ const commandStartIndex = readmeContents.indexOf(commandsStartText);
55
+ const commandStopIndex = readmeContents.indexOf('<!-- commandsstop -->');
56
+ //cutting the content between the above mentioned tags, removing white spaces and checking if there is some text as it will mean text was added by oclif
57
+ commandsContent = readmeContents.slice(commandStartIndex + commandsStartText.length, commandStopIndex).trim();
58
+
59
+ if (commandsContent.length === 0) {
60
+ console.log('No content between <!-- commands --> and <!-- commandsstop -->. Trying again...');
61
+ } else {
62
+ console.log('Content found!');
63
+ }
64
+
65
+ await delay(3000); // 3-second delay
66
+ }
67
+
68
+ return readmeContents;
69
+ }
70
+
71
+ function delay(ms) {
72
+ return new Promise(resolve => setTimeout(resolve, ms));
73
+ }