@redpanda-data/docs-extensions-and-macros 4.3.0 → 4.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/bin/doc-tools.js +376 -0
  2. package/cli-utils/add-caret-external-links.py +68 -0
  3. package/cli-utils/beta-from-antora.js +27 -0
  4. package/cli-utils/generate-cluster-docs.sh +83 -0
  5. package/cli-utils/install-test-dependencies.sh +158 -0
  6. package/cli-utils/python-venv.sh +20 -0
  7. package/cli-utils/start-cluster.sh +53 -0
  8. package/docker-compose/bootstrap.yml +67 -0
  9. package/docker-compose/docker-compose.yml +414 -0
  10. package/docker-compose/generate-profiles.yaml +77 -0
  11. package/docker-compose/rpk-profile.yaml +24 -0
  12. package/docker-compose/transactions-schema.json +37 -0
  13. package/docker-compose/transactions.md +46 -0
  14. package/docker-compose/transform/README.adoc +73 -0
  15. package/docker-compose/transform/go.mod +5 -0
  16. package/docker-compose/transform/go.sum +2 -0
  17. package/docker-compose/transform/regex.wasm +0 -0
  18. package/docker-compose/transform/transform.go +122 -0
  19. package/docker-compose/transform/transform.yaml +33 -0
  20. package/extensions/replace-attributes-in-attachments.js +1 -1
  21. package/extensions/util/compute-out.js +38 -0
  22. package/extensions/util/create-asciidoc-file.js +15 -0
  23. package/macros/data-template.js +2 -2
  24. package/package.json +15 -3
  25. package/tools/docusaurus-to-antora-conversion-scripts/convert-docs.sh +114 -0
  26. package/tools/docusaurus-to-antora-conversion-scripts/get-file-changes.sh +9 -0
  27. package/tools/docusaurus-to-antora-conversion-scripts/post-process-asciidoc.js +63 -0
  28. package/tools/docusaurus-to-antora-conversion-scripts/pre-process-markdown.js +108 -0
  29. package/tools/fetch-from-github.js +63 -0
  30. package/tools/gen-rpk-ascii.py +477 -0
  31. package/tools/get-console-version.js +53 -0
  32. package/tools/get-redpanda-version.js +53 -0
  33. package/tools/metrics/metrics.py +199 -0
  34. package/tools/metrics/requirements.txt +1 -0
  35. package/tools/property-extractor/Makefile +99 -0
  36. package/tools/property-extractor/README.adoc +206 -0
  37. package/tools/property-extractor/definitions.json +245 -0
  38. package/tools/property-extractor/file_pair.py +7 -0
  39. package/tools/property-extractor/json-to-asciidoc/generate_docs.py +460 -0
  40. package/tools/property-extractor/parser.py +224 -0
  41. package/tools/property-extractor/property_bag.py +4 -0
  42. package/tools/property-extractor/property_extractor.py +243 -0
  43. package/tools/property-extractor/requirements.txt +2 -0
  44. package/tools/property-extractor/tests/transformers_test.py +376 -0
  45. package/tools/property-extractor/transformers.py +397 -0
@@ -0,0 +1,376 @@
1
+ #!/usr/bin/env node
2
+
3
+ const { execSync, spawnSync } = require('child_process');
4
+ const { Command } = require('commander');
5
+ const path = require('path');
6
+ const fs = require('fs');
7
+
8
+ function findRepoRoot(start = process.cwd()) {
9
+ let dir = start;
10
+ while (dir !== path.parse(dir).root) {
11
+ // marker could be a .git folder or package.json or anything you choose
12
+ if (fs.existsSync(path.join(dir, '.git')) ||
13
+ fs.existsSync(path.join(dir, 'package.json'))) {
14
+ return dir;
15
+ }
16
+ dir = path.dirname(dir);
17
+ }
18
+ console.error('❌ Could not find repo root (no .git or package.json in any parent)');
19
+ process.exit(1);
20
+ }
21
+
22
+ // --------------------------------------------------------------------
23
+ // Dependency check functions
24
+ // --------------------------------------------------------------------
25
+ function checkDependency(command, versionArg, name, helpURL) {
26
+ try {
27
+ execSync(`${command} ${versionArg}`, { stdio: 'ignore' });
28
+ } catch (error) {
29
+ console.error(`Error: ${name} is required but not found or not working properly.
30
+ Please install ${name} and try again.
31
+ For more info, see: ${helpURL}`);
32
+ process.exit(1);
33
+ }
34
+ }
35
+
36
+ function checkCommandExists(command) {
37
+ try {
38
+ execSync(`which ${command}`, { stdio: 'ignore' });
39
+ return true;
40
+ } catch (error) {
41
+ console.error(`Error: \`${command}\` is required but not found. Please install \`${command}\` and try again.`);
42
+ return false;
43
+ }
44
+ }
45
+
46
+ function checkMake() {
47
+ if (!checkCommandExists('make')) {
48
+ console.error('Error: `make` is required but not found. Please install `make` to use the automation Makefile. For help, see: https://www.google.com/search?q=how+to+install+make');
49
+ process.exit(1);
50
+ }
51
+ }
52
+
53
+ function checkPython() {
54
+ const candidates = ['python3', 'python'];
55
+ let found = false;
56
+
57
+ for (const cmd of candidates) {
58
+ try {
59
+ const versionOutput = execSync(`${cmd} --version`, {
60
+ encoding: 'utf8',
61
+ stdio: ['pipe', 'pipe', 'ignore']
62
+ }).trim();
63
+ // versionOutput looks like "Python 3.x.y"
64
+ const versionString = versionOutput.split(' ')[1];
65
+ const [major, minor] = versionString.split('.').map(Number);
66
+ if (major > 3 || (major === 3 && minor >= 10)) {
67
+ found = true;
68
+ break;
69
+ } else {
70
+ console.error(`Error: Python 3.10 or higher is required. Detected version: ${versionString}`);
71
+ process.exit(1);
72
+ }
73
+ } catch {
74
+ // this candidate didn’t exist or errored—try the next one
75
+ }
76
+ }
77
+ if (!found) {
78
+ console.error('Error: Python 3.10 or higher is required but not found.\nPlease install Python and ensure `python3 --version` or `python --version` returns at least 3.10: https://www.geeksforgeeks.org/how-to-install-python-on-mac/');
79
+ process.exit(1);
80
+ }
81
+ }
82
+
83
+ function checkCompiler() {
84
+ const gccInstalled = checkCommandExists('gcc');
85
+ const clangInstalled = checkCommandExists('clang');
86
+ if (!gccInstalled && !clangInstalled) {
87
+ console.error('Error: A C++ compiler (such as gcc or clang) is required but not found. Please install one: https://osxdaily.com/2023/05/02/how-install-gcc-mac/');
88
+ process.exit(1);
89
+ }
90
+ }
91
+
92
+ function checkDocker() {
93
+ checkDependency('docker', '--version', 'Docker', 'https://docs.docker.com/get-docker/');
94
+ try {
95
+ execSync('docker info', { stdio: 'ignore' });
96
+ } catch (error) {
97
+ console.error('Error: Docker daemon appears to be not running. Please start Docker.');
98
+ process.exit(1);
99
+ }
100
+ }
101
+
102
+ function verifyPropertyDependencies() {
103
+ checkMake();
104
+ checkPython();
105
+ checkCompiler();
106
+ }
107
+
108
+ function verifyMetricsDependencies() {
109
+ checkPython();
110
+ if (!checkCommandExists('curl') || !checkCommandExists('tar')) {
111
+ // `checkCommandExists` already prints a helpful message.
112
+ process.exit(1);
113
+ }
114
+ checkDocker();
115
+ }
116
+ // --------------------------------------------------------------------
117
+ // Main CLI Definition
118
+ // --------------------------------------------------------------------
119
+ const programCli = new Command();
120
+
121
+ programCli
122
+ .name('doc-tools')
123
+ .description('Redpanda Document Automation CLI')
124
+ .version('1.0.1');
125
+
126
+ // Top-level commands.
127
+ programCli
128
+ .command('install-test-dependencies')
129
+ .description('Install packages for doc test workflows')
130
+ .action(() => {
131
+ const scriptPath = path.join(__dirname, '../cli-utils/install-test-dependencies.sh');
132
+ const result = spawnSync(scriptPath, { stdio: 'inherit', shell: true });
133
+ process.exit(result.status);
134
+ });
135
+
136
+ programCli
137
+ .command('get-redpanda-version')
138
+ .description('Print the latest Redpanda version')
139
+ .option('--beta', 'Return the latest RC (beta) version if available')
140
+ .option('--from-antora', 'Read prerelease flag from local antora.yml')
141
+ .action(async (options) => {
142
+ try {
143
+ await require('../tools/get-redpanda-version.js')(options);
144
+ } catch (err) {
145
+ console.error(err);
146
+ process.exit(1);
147
+ }
148
+ });
149
+
150
+ programCli
151
+ .command('get-console-version')
152
+ .description('Print the latest Console version')
153
+ .option('--beta', 'Return the latest beta version if available')
154
+ .option('--from-antora', 'Read prerelease flag from local antora.yml')
155
+ .action(async (options) => {
156
+ try {
157
+ await require('../tools/get-console-version.js')(options);
158
+ } catch (err) {
159
+ console.error(err);
160
+ process.exit(1);
161
+ }
162
+ });
163
+
164
+ // Create an "automation" subcommand group.
165
+ const automation = new Command('generate')
166
+ .description('Run docs automations (properties, metrics, and rpk docs generation)');
167
+
168
+ // --------------------------------------------------------------------
169
+ // Automation Subcommands: Delegate to a unified Bash script internally.
170
+ // --------------------------------------------------------------------
171
+
172
+ // Common options for both automation tasks.
173
+ const commonOptions = {
174
+ tag: 'latest',
175
+ dockerRepo: 'redpanda',
176
+ consoleTag: 'latest',
177
+ consoleDockerRepo: 'console'
178
+ };
179
+
180
+ function runClusterDocs(mode, tag, options) {
181
+ const script = path.join(__dirname, '../cli-utils/generate-cluster-docs.sh');
182
+ const args = [ mode, tag, options.dockerRepo, options.consoleTag, options.consoleDockerRepo ];
183
+ console.log(`Running ${script} with arguments: ${args.join(' ')}`);
184
+ const r = spawnSync('bash', [ script, ...args ], { stdio: 'inherit', shell: true });
185
+ if (r.status !== 0) process.exit(r.status);
186
+ }
187
+
188
+ // helper to diff two autogenerated directories
189
+ function diffDirs(kind, oldTag, newTag) {
190
+ const oldDir = path.join('autogenerated', oldTag, kind);
191
+ const newDir = path.join('autogenerated', newTag, kind);
192
+ const diffDir = path.join('autogenerated', 'diffs', kind, `${oldTag}_to_${newTag}`);
193
+ const patch = path.join(diffDir, 'changes.patch');
194
+
195
+ if (!fs.existsSync(oldDir)) {
196
+ console.error(`❌ Cannot diff: missing ${oldDir}`);
197
+ process.exit(1);
198
+ }
199
+ if (!fs.existsSync(newDir)) {
200
+ console.error(`❌ Cannot diff: missing ${newDir}`);
201
+ process.exit(1);
202
+ }
203
+
204
+ fs.mkdirSync(diffDir, { recursive: true });
205
+
206
+ const cmd = `diff -ru "${oldDir}" "${newDir}" > "${patch}" || true`;
207
+ const res = spawnSync(cmd, { stdio: 'inherit', shell: true });
208
+
209
+ if (res.error) {
210
+ console.error(`❌ diff failed: ${res.error.message}`);
211
+ process.exit(1);
212
+ }
213
+ console.log(`✅ Wrote patch: ${patch}`);
214
+ }
215
+
216
+ automation
217
+ .command('metrics-docs')
218
+ .description('Extract Redpanda metrics and generate JSON/AsciiDoc docs')
219
+ .option('--tag <tag>', 'Redpanda tag (default: latest)', commonOptions.tag)
220
+ .option('--docker-repo <repo>', '...', commonOptions.dockerRepo)
221
+ .option('--console-tag <tag>', '...', commonOptions.consoleTag)
222
+ .option('--console-docker-repo <repo>', '...', commonOptions.consoleDockerRepo)
223
+ .option('--diff <oldTag>', 'Also diff autogenerated metrics from <oldTag> → <tag>')
224
+ .action((options) => {
225
+ verifyMetricsDependencies();
226
+
227
+ const newTag = options.tag;
228
+ const oldTag = options.diff;
229
+
230
+ if (oldTag) {
231
+ const oldDir = path.join('autogenerated', oldTag, 'metrics');
232
+ if (!fs.existsSync(oldDir)) {
233
+ console.log(`⏳ Generating metrics docs for old tag ${oldTag}…`);
234
+ runClusterDocs('metrics', oldTag, options);
235
+ }
236
+ }
237
+
238
+ console.log(`⏳ Generating metrics docs for new tag ${newTag}…`);
239
+ runClusterDocs('metrics', newTag, options);
240
+
241
+ if (oldTag) {
242
+ diffDirs('metrics', oldTag, newTag);
243
+ }
244
+
245
+ process.exit(0);
246
+ });
247
+
248
+ automation
249
+ .command('property-docs')
250
+ .description('Extract properties from Redpanda source')
251
+ .option('--tag <tag>', 'Git tag or branch to extract from (default: dev)', 'dev')
252
+ .option('--diff <oldTag>', 'Also diff autogenerated properties from <oldTag> → <tag>')
253
+ .action((options) => {
254
+ verifyPropertyDependencies();
255
+
256
+ const newTag = options.tag;
257
+ const oldTag = options.diff;
258
+ const cwd = path.resolve(__dirname, '../tools/property-extractor');
259
+ const make = (tag) => {
260
+ console.log(`⏳ Building property docs for ${tag}…`);
261
+ const r = spawnSync('make', ['build', `TAG=${tag}`], { cwd, stdio: 'inherit' });
262
+ if (r.error ) { console.error(r.error); process.exit(1); }
263
+ if (r.status !== 0) process.exit(r.status);
264
+ };
265
+
266
+ if (oldTag) {
267
+ const oldDir = path.join('autogenerated', oldTag, 'properties');
268
+ if (!fs.existsSync(oldDir)) make(oldTag);
269
+ }
270
+
271
+ make(newTag);
272
+
273
+ if (oldTag) {
274
+ diffDirs('properties', oldTag, newTag);
275
+ }
276
+
277
+ process.exit(0);
278
+ });
279
+
280
+ automation
281
+ .command('rpk-docs')
282
+ .description('Generate documentation for rpk commands')
283
+ .option('--tag <tag>', 'Redpanda tag (default: latest)', commonOptions.tag)
284
+ .option('--docker-repo <repo>', '...', commonOptions.dockerRepo)
285
+ .option('--console-tag <tag>', '...', commonOptions.consoleTag)
286
+ .option('--console-docker-repo <repo>', '...', commonOptions.consoleDockerRepo)
287
+ .option('--diff <oldTag>', 'Also diff autogenerated rpk docs from <oldTag> → <tag>')
288
+ .action((options) => {
289
+ verifyMetricsDependencies();
290
+
291
+ const newTag = options.tag;
292
+ const oldTag = options.diff;
293
+
294
+ if (oldTag) {
295
+ const oldDir = path.join('autogenerated', oldTag, 'rpk');
296
+ if (!fs.existsSync(oldDir)) {
297
+ console.log(`⏳ Generating rpk docs for old tag ${oldTag}…`);
298
+ runClusterDocs('rpk', oldTag, options);
299
+ }
300
+ }
301
+
302
+ console.log(`⏳ Generating rpk docs for new tag ${newTag}…`);
303
+ runClusterDocs('rpk', newTag, options);
304
+
305
+ if (oldTag) {
306
+ diffDirs('rpk', oldTag, newTag);
307
+ }
308
+
309
+ process.exit(0);
310
+ });
311
+
312
+ programCli
313
+ .command('link-readme <subdir> <targetFilename>')
314
+ .description('Symlink a README.adoc into docs/modules/<module>/pages/')
315
+ .action((subdir, targetFilename) => {
316
+ const repoRoot = findRepoRoot();
317
+ const normalized = subdir.replace(/\/+$/, '');
318
+ const moduleName = normalized.split('/')[0];
319
+
320
+ const projectDir = path.join(repoRoot, normalized);
321
+ const pagesDir = path.join(repoRoot, 'docs', 'modules', moduleName, 'pages');
322
+ const sourceFile = path.join(repoRoot, normalized, 'README.adoc');
323
+ const destLink = path.join(pagesDir, targetFilename);
324
+
325
+ if (!fs.existsSync(projectDir)) {
326
+ console.error(`❌ Project directory not found: ${projectDir}`);
327
+ process.exit(1);
328
+ }
329
+ if (!fs.existsSync(sourceFile)) {
330
+ console.error(`❌ README.adoc not found in ${normalized}`);
331
+ process.exit(1);
332
+ }
333
+
334
+ fs.mkdirSync(pagesDir, { recursive: true });
335
+ const relPath = path.relative(pagesDir, sourceFile);
336
+
337
+ try {
338
+ fs.symlinkSync(relPath, destLink);
339
+ console.log(`✔️ Linked ${relPath} → ${destLink}`);
340
+ } catch (err) {
341
+ console.error(`❌ Failed to create symlink: ${err.message}`);
342
+ process.exit(1);
343
+ }
344
+ });
345
+
346
+ programCli
347
+ .command('fetch')
348
+ .description('Fetch a file or directory from GitHub and save locally')
349
+ .requiredOption('-o, --owner <owner>', 'GitHub repo owner or org')
350
+ .requiredOption('-r, --repo <repo>', 'GitHub repo name')
351
+ .requiredOption('-p, --remote-path <path>', 'Path in the repo to fetch')
352
+ .requiredOption('-d, --save-dir <dir>', 'Local directory to save into')
353
+ .option('-f, --filename <name>', 'Custom filename to save as')
354
+ .action(async (options) => {
355
+ try {
356
+ const fetchFromGithub = await require('../tools/fetch-from-github.js');
357
+ // options.owner, options.repo, options.remotePath, options.saveDir, options.filename
358
+ await fetchFromGithub(
359
+ options.owner,
360
+ options.repo,
361
+ options.remotePath,
362
+ options.saveDir,
363
+ options.filename
364
+ );
365
+ } catch (err) {
366
+ console.error('❌', err.message);
367
+ process.exit(1);
368
+ }
369
+ });
370
+
371
+
372
+ // Attach the automation group to the main program.
373
+ programCli.addCommand(automation);
374
+
375
+ programCli.parse(process.argv);
376
+
@@ -0,0 +1,68 @@
1
+ import os
2
+ import re
3
+
4
+ # Define the regular expression pattern to match the links
5
+ pattern = r'(https://[^\]]+)\[([^\]]+)\](?!\^)'
6
+
7
+ # Function to process a single file
8
+ def process_file(file_path):
9
+ with open(file_path, 'r', encoding='utf-8') as file:
10
+ content = file.read()
11
+
12
+ def replace_link(match):
13
+ link = match.group(1)
14
+ text = match.group(2)
15
+ if text.endswith('^'):
16
+ return match.group(0) # No modification if caret is already present
17
+ else:
18
+ return f"{link}[{text}^]"
19
+
20
+ lines = content.split('\n')
21
+ updated_lines = []
22
+ for line in lines:
23
+ if re.search(pattern, line):
24
+ line = re.sub(pattern, replace_link, line)
25
+ updated_lines.append(line)
26
+
27
+ # Write the updated content back to the file
28
+ - with open(file_path, 'w', encoding='utf-8') as file:
29
+ - file.write('\n'.join(updated_lines))
30
+ + try:
31
+ + with open(file_path, 'w', encoding='utf-8') as file:
32
+ + file.write('\n'.join(updated_lines))
33
+ + except Exception as e:
34
+ + print(f"Error writing to {file_path}: {e}")
35
+ + return False
36
+ + return True
37
+ # Get the directory of the current script
38
+ script_directory = os.path.dirname(os.path.abspath(__file__))
39
+
40
+ # Construct the directory path for the 'modules' directory
41
+ directory_path = os.path.join(script_directory, '..', 'modules')
42
+
43
+ # List of excluded file paths (relative paths)
44
+ # List of excluded file paths (relative paths)
45
+ exclusion_list = [
46
+ os.path.join('reference', 'pages', 'redpanda-operator', 'crd.adoc'),
47
+ os.path.join('reference', 'pages', 'k-console-helm-spec.adoc'),
48
+ os.path.join('reference', 'pages', 'crd.adoc'),
49
+ os.path.join('reference', 'pages', 'k-redpanda-helm-spec.adoc'),
50
+ os.path.join('reference', 'partials', 'bundle-contents-k8s.adoc'),
51
+ os.path.join('reference', 'partials', 'bundle-contents-linux.adoc'),
52
+ ]
53
+
54
+ # Function to process all .adoc files in a directory
55
+ def process_directory(directory_path):
56
+ for root, _, files in os.walk(directory_path):
57
+ for file in files:
58
+ if file.endswith('.adoc'):
59
+ file_path = os.path.join(root, file)
60
+ relative_file_path = os.path.relpath(file_path, directory_path)
61
+ if relative_file_path not in exclusion_list:
62
+ if process_file(file_path):
63
+ print(f"Processed: {file_path}")
64
+ else:
65
+ print(f"Failed to process: {file_path}")
66
+
67
+ # Call the function with the constructed directory path
68
+ process_directory(directory_path)
@@ -0,0 +1,27 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const yaml = require('js-yaml');
4
+
5
+ /**
6
+ * Look for antora.yml in the current working directory
7
+ * (the project's root), load it if present, and return
8
+ * its `prerelease` value (boolean). If missing or on error,
9
+ * returns false.
10
+ */
11
+ function getPrereleaseFromAntora() {
12
+ const antoraPath = path.join(process.cwd(), 'antora.yml');
13
+ if (!fs.existsSync(antoraPath)) {
14
+ return false;
15
+ }
16
+
17
+ try {
18
+ const fileContents = fs.readFileSync(antoraPath, 'utf8');
19
+ const antoraConfig = yaml.load(fileContents);
20
+ return antoraConfig.prerelease === true;
21
+ } catch (error) {
22
+ console.error('Error reading antora.yml:', error.message);
23
+ return false;
24
+ }
25
+ }
26
+
27
+ module.exports = { getPrereleaseFromAntora };
@@ -0,0 +1,83 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ # Check if Docker is installed and running
5
+ if ! command -v docker &> /dev/null; then
6
+ echo "❌ Docker is not installed or not in PATH. Please install Docker to continue."
7
+ exit 1
8
+ fi
9
+
10
+ # Check if Docker daemon is running
11
+ if ! docker info &> /dev/null; then
12
+ echo "❌ Docker daemon is not running. Please start Docker to continue."
13
+ exit 1
14
+ fi
15
+
16
+ # Remember where we started so we can always come back
17
+ ORIGINAL_PWD="$(pwd)"
18
+
19
+ # All "cli-utils…" calls should be relative to this script’s dir
20
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
21
+
22
+ MODE="${1:-metrics}"
23
+ TAG="${2:-latest}"
24
+ DOCKER_REPO="${3:-redpanda}"
25
+ CONSOLE_TAG="${4:-latest}"
26
+ CONSOLE_REPO="${5:-console}"
27
+
28
+ # if it's an RC tag, switch Docker repo
29
+ shopt -s nocasematch
30
+ if [[ "$TAG" =~ rc[0-9]+ ]]; then
31
+ DOCKER_REPO="redpanda-unstable"
32
+ fi
33
+ shopt -u nocasematch
34
+
35
+ if [[ "$TAG" == "latest" ]]; then
36
+ MAJOR_MINOR="latest"
37
+ else
38
+ MAJOR_MINOR="$(echo "$TAG" | sed -E 's/^v?([0-9]+\.[0-9]+).*$/\1/')"
39
+ fi
40
+
41
+ export REDPANDA_VERSION="$TAG"
42
+ export REDPANDA_DOCKER_REPO="$DOCKER_REPO"
43
+ export REDPANDA_CONSOLE_VERSION="$CONSOLE_TAG"
44
+ export REDPANDA_CONSOLE_DOCKER_REPO="$CONSOLE_REPO"
45
+
46
+ # Start up the cluster
47
+ "$SCRIPT_DIR"/start-cluster.sh "$TAG"
48
+
49
+ # Wait for it to settle
50
+ if [[ "$MODE" == "metrics" ]]; then
51
+ echo "Waiting 300 seconds for metrics to be available…"
52
+ sleep 300
53
+ else
54
+ echo "Waiting 30 seconds for cluster to be ready…"
55
+ sleep 30
56
+ fi
57
+
58
+ # Go back to where we were
59
+ cd "$ORIGINAL_PWD"
60
+
61
+ # Ensure Python venv (always create under cli-utils/venv)
62
+ "$SCRIPT_DIR"/python-venv.sh \
63
+ "$SCRIPT_DIR"/venv \
64
+ "$SCRIPT_DIR"/../tools/metrics/requirements.txt
65
+
66
+ if [[ "$MODE" == "metrics" ]]; then
67
+ "$SCRIPT_DIR"/venv/bin/python \
68
+ "$SCRIPT_DIR"/../tools/metrics/metrics.py \
69
+ "$TAG"
70
+ else
71
+ "$SCRIPT_DIR"/venv/bin/python \
72
+ "$SCRIPT_DIR"/../tools/gen-rpk-ascii.py \
73
+ "$TAG"
74
+ fi
75
+
76
+ echo "✅ Redpanda cluster docs generated successfully!"
77
+
78
+ # Tear down the cluster
79
+ cd "$SCRIPT_DIR"/../docker-compose
80
+ docker compose down --volumes
81
+
82
+ # Return to the original directory
83
+ cd "$ORIGINAL_PWD"