@redpanda-data/docs-extensions-and-macros 4.3.0 → 4.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/doc-tools.js +328 -0
- package/cli-utils/add-caret-external-links.py +68 -0
- package/cli-utils/beta-from-antora.js +27 -0
- package/cli-utils/generate-cluster-docs.sh +83 -0
- package/cli-utils/install-test-dependencies.sh +158 -0
- package/cli-utils/python-venv.sh +20 -0
- package/cli-utils/start-cluster.sh +53 -0
- package/docker-compose/bootstrap.yml +67 -0
- package/docker-compose/docker-compose.yml +414 -0
- package/docker-compose/generate-profiles.yaml +77 -0
- package/docker-compose/rpk-profile.yaml +24 -0
- package/docker-compose/transactions-schema.json +37 -0
- package/docker-compose/transactions.md +46 -0
- package/docker-compose/transform/README.adoc +73 -0
- package/docker-compose/transform/go.mod +5 -0
- package/docker-compose/transform/go.sum +2 -0
- package/docker-compose/transform/regex.wasm +0 -0
- package/docker-compose/transform/transform.go +122 -0
- package/docker-compose/transform/transform.yaml +33 -0
- package/extension-utils/compute-out.js +38 -0
- package/extension-utils/create-asciidoc-file.js +15 -0
- package/macros/data-template.js +2 -2
- package/package.json +15 -3
- package/tools/docusaurus-to-antora-conversion-scripts/convert-docs.sh +114 -0
- package/tools/docusaurus-to-antora-conversion-scripts/get-file-changes.sh +9 -0
- package/tools/docusaurus-to-antora-conversion-scripts/post-process-asciidoc.js +63 -0
- package/tools/docusaurus-to-antora-conversion-scripts/pre-process-markdown.js +108 -0
- package/tools/fetch-from-github.js +63 -0
- package/tools/gen-rpk-ascii.py +477 -0
- package/tools/get-console-version.js +53 -0
- package/tools/get-redpanda-version.js +53 -0
- package/tools/metrics/metrics.py +199 -0
- package/tools/metrics/requirements.txt +1 -0
- package/tools/property-extractor/Makefile +99 -0
- package/tools/property-extractor/README.adoc +206 -0
- package/tools/property-extractor/definitions.json +245 -0
- package/tools/property-extractor/file_pair.py +7 -0
- package/tools/property-extractor/json-to-asciidoc/generate_docs.py +460 -0
- package/tools/property-extractor/parser.py +224 -0
- package/tools/property-extractor/property_bag.py +4 -0
- package/tools/property-extractor/property_extractor.py +243 -0
- package/tools/property-extractor/requirements.txt +2 -0
- package/tools/property-extractor/tests/transformers_test.py +376 -0
- package/tools/property-extractor/transformers.py +397 -0
package/bin/doc-tools.js
ADDED
|
@@ -0,0 +1,328 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const { execSync, spawnSync } = require('child_process');
|
|
4
|
+
const { Command } = require('commander');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
|
|
8
|
+
// --------------------------------------------------------------------
|
|
9
|
+
// Dependency check functions
|
|
10
|
+
// --------------------------------------------------------------------
|
|
11
|
+
function checkDependency(command, versionArg, name, helpURL) {
|
|
12
|
+
try {
|
|
13
|
+
execSync(`${command} ${versionArg}`, { stdio: 'ignore' });
|
|
14
|
+
} catch (error) {
|
|
15
|
+
console.error(`Error: ${name} is required but not found or not working properly.
|
|
16
|
+
Please install ${name} and try again.
|
|
17
|
+
For more info, see: ${helpURL}`);
|
|
18
|
+
process.exit(1);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function checkCommandExists(command) {
|
|
23
|
+
try {
|
|
24
|
+
execSync(`which ${command}`, { stdio: 'ignore' });
|
|
25
|
+
return true;
|
|
26
|
+
} catch (error) {
|
|
27
|
+
console.error(`Error: \`${command}\` is required but not found. Please install \`${command}\` and try again.`);
|
|
28
|
+
return false;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function checkMake() {
|
|
33
|
+
if (!checkCommandExists('make')) {
|
|
34
|
+
console.error('Error: `make` is required but not found. Please install `make` to use the automation Makefile. For help, see: https://www.google.com/search?q=how+to+install+make');
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function checkPython() {
|
|
40
|
+
const candidates = ['python3', 'python'];
|
|
41
|
+
let found = false;
|
|
42
|
+
|
|
43
|
+
for (const cmd of candidates) {
|
|
44
|
+
try {
|
|
45
|
+
const versionOutput = execSync(`${cmd} --version`, {
|
|
46
|
+
encoding: 'utf8',
|
|
47
|
+
stdio: ['pipe', 'pipe', 'ignore']
|
|
48
|
+
}).trim();
|
|
49
|
+
// versionOutput looks like "Python 3.x.y"
|
|
50
|
+
const versionString = versionOutput.split(' ')[1];
|
|
51
|
+
const [major, minor] = versionString.split('.').map(Number);
|
|
52
|
+
if (major > 3 || (major === 3 && minor >= 10)) {
|
|
53
|
+
found = true;
|
|
54
|
+
break;
|
|
55
|
+
} else {
|
|
56
|
+
console.error(`Error: Python 3.10 or higher is required. Detected version: ${versionString}`);
|
|
57
|
+
process.exit(1);
|
|
58
|
+
}
|
|
59
|
+
} catch {
|
|
60
|
+
// this candidate didn’t exist or errored—try the next one
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
if (!found) {
|
|
64
|
+
console.error('Error: Python 3.10 or higher is required but not found.\nPlease install Python and ensure `python3 --version` or `python --version` returns at least 3.10: https://www.geeksforgeeks.org/how-to-install-python-on-mac/');
|
|
65
|
+
process.exit(1);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function checkCompiler() {
|
|
70
|
+
const gccInstalled = checkCommandExists('gcc');
|
|
71
|
+
const clangInstalled = checkCommandExists('clang');
|
|
72
|
+
if (!gccInstalled && !clangInstalled) {
|
|
73
|
+
console.error('Error: A C++ compiler (such as gcc or clang) is required but not found. Please install one: https://osxdaily.com/2023/05/02/how-install-gcc-mac/');
|
|
74
|
+
process.exit(1);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
function checkDocker() {
|
|
79
|
+
checkDependency('docker', '--version', 'Docker', 'https://docs.docker.com/get-docker/');
|
|
80
|
+
try {
|
|
81
|
+
execSync('docker info', { stdio: 'ignore' });
|
|
82
|
+
} catch (error) {
|
|
83
|
+
console.error('Error: Docker daemon appears to be not running. Please start Docker.');
|
|
84
|
+
process.exit(1);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
function verifyPropertyDependencies() {
|
|
89
|
+
checkMake();
|
|
90
|
+
checkPython();
|
|
91
|
+
checkCompiler();
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
function verifyMetricsDependencies() {
|
|
95
|
+
checkPython();
|
|
96
|
+
if (!checkCommandExists('curl') || !checkCommandExists('tar')) {
|
|
97
|
+
// `checkCommandExists` already prints a helpful message.
|
|
98
|
+
process.exit(1);
|
|
99
|
+
}
|
|
100
|
+
checkDocker();
|
|
101
|
+
}
|
|
102
|
+
// --------------------------------------------------------------------
|
|
103
|
+
// Main CLI Definition
|
|
104
|
+
// --------------------------------------------------------------------
|
|
105
|
+
const programCli = new Command();
|
|
106
|
+
|
|
107
|
+
programCli
|
|
108
|
+
.name('doc-tools')
|
|
109
|
+
.description('Redpanda Document Automation CLI')
|
|
110
|
+
.version('1.0.0');
|
|
111
|
+
|
|
112
|
+
// Top-level commands.
|
|
113
|
+
programCli
|
|
114
|
+
.command('install-test-dependencies')
|
|
115
|
+
.description('Install packages for doc test workflows')
|
|
116
|
+
.action(() => {
|
|
117
|
+
const scriptPath = path.join(__dirname, '../cli-utils/install-test-dependencies.sh');
|
|
118
|
+
const result = spawnSync(scriptPath, { stdio: 'inherit', shell: true });
|
|
119
|
+
process.exit(result.status);
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
programCli
|
|
123
|
+
.command('get-redpanda-version')
|
|
124
|
+
.description('Print the latest Redpanda version')
|
|
125
|
+
.option('--beta', 'Return the latest RC (beta) version if available')
|
|
126
|
+
.option('--from-antora', 'Read prerelease flag from local antora.yml')
|
|
127
|
+
.action(async (options) => {
|
|
128
|
+
try {
|
|
129
|
+
await require('../tools/get-redpanda-version.js')(options);
|
|
130
|
+
} catch (err) {
|
|
131
|
+
console.error(err);
|
|
132
|
+
process.exit(1);
|
|
133
|
+
}
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
programCli
|
|
137
|
+
.command('get-console-version')
|
|
138
|
+
.description('Print the latest Console version')
|
|
139
|
+
.option('--beta', 'Return the latest beta version if available')
|
|
140
|
+
.option('--from-antora', 'Read prerelease flag from local antora.yml')
|
|
141
|
+
.action(async (options) => {
|
|
142
|
+
try {
|
|
143
|
+
await require('../tools/get-console-version.js')(options);
|
|
144
|
+
} catch (err) {
|
|
145
|
+
console.error(err);
|
|
146
|
+
process.exit(1);
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
// Create an "automation" subcommand group.
|
|
151
|
+
const automation = new Command('generate')
|
|
152
|
+
.description('Run docs automations (properties, metrics, and rpk docs generation)');
|
|
153
|
+
|
|
154
|
+
// --------------------------------------------------------------------
|
|
155
|
+
// Automation Subcommands: Delegate to a unified Bash script internally.
|
|
156
|
+
// --------------------------------------------------------------------
|
|
157
|
+
|
|
158
|
+
// Common options for both automation tasks.
|
|
159
|
+
const commonOptions = {
|
|
160
|
+
tag: 'latest',
|
|
161
|
+
dockerRepo: 'redpanda',
|
|
162
|
+
consoleTag: 'latest',
|
|
163
|
+
consoleDockerRepo: 'console'
|
|
164
|
+
};
|
|
165
|
+
|
|
166
|
+
function runClusterDocs(mode, tag, options) {
|
|
167
|
+
const script = path.join(__dirname, '../cli-utils/generate-cluster-docs.sh');
|
|
168
|
+
const args = [ mode, tag, options.dockerRepo, options.consoleTag, options.consoleDockerRepo ];
|
|
169
|
+
console.log(`Running ${script} with arguments: ${args.join(' ')}`);
|
|
170
|
+
const r = spawnSync('bash', [ script, ...args ], { stdio: 'inherit', shell: true });
|
|
171
|
+
if (r.status !== 0) process.exit(r.status);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// helper to diff two autogenerated directories
|
|
175
|
+
function diffDirs(kind, oldTag, newTag) {
|
|
176
|
+
const oldDir = path.join('autogenerated', oldTag, kind);
|
|
177
|
+
const newDir = path.join('autogenerated', newTag, kind);
|
|
178
|
+
const diffDir = path.join('autogenerated', 'diffs', kind, `${oldTag}_to_${newTag}`);
|
|
179
|
+
const patch = path.join(diffDir, 'changes.patch');
|
|
180
|
+
|
|
181
|
+
if (!fs.existsSync(oldDir)) {
|
|
182
|
+
console.error(`❌ Cannot diff: missing ${oldDir}`);
|
|
183
|
+
process.exit(1);
|
|
184
|
+
}
|
|
185
|
+
if (!fs.existsSync(newDir)) {
|
|
186
|
+
console.error(`❌ Cannot diff: missing ${newDir}`);
|
|
187
|
+
process.exit(1);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
fs.mkdirSync(diffDir, { recursive: true });
|
|
191
|
+
|
|
192
|
+
const cmd = `diff -ru "${oldDir}" "${newDir}" > "${patch}" || true`;
|
|
193
|
+
const res = spawnSync(cmd, { stdio: 'inherit', shell: true });
|
|
194
|
+
|
|
195
|
+
if (res.error) {
|
|
196
|
+
console.error(`❌ diff failed: ${res.error.message}`);
|
|
197
|
+
process.exit(1);
|
|
198
|
+
}
|
|
199
|
+
console.log(`✅ Wrote patch: ${patch}`);
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
automation
|
|
203
|
+
.command('metrics-docs')
|
|
204
|
+
.description('Extract Redpanda metrics and generate JSON/AsciiDoc docs')
|
|
205
|
+
.option('--tag <tag>', 'Redpanda tag (default: latest)', commonOptions.tag)
|
|
206
|
+
.option('--docker-repo <repo>', '...', commonOptions.dockerRepo)
|
|
207
|
+
.option('--console-tag <tag>', '...', commonOptions.consoleTag)
|
|
208
|
+
.option('--console-docker-repo <repo>', '...', commonOptions.consoleDockerRepo)
|
|
209
|
+
.option('--diff <oldTag>', 'Also diff autogenerated metrics from <oldTag> → <tag>')
|
|
210
|
+
.action((options) => {
|
|
211
|
+
verifyMetricsDependencies();
|
|
212
|
+
|
|
213
|
+
const newTag = options.tag;
|
|
214
|
+
const oldTag = options.diff;
|
|
215
|
+
|
|
216
|
+
if (oldTag) {
|
|
217
|
+
const oldDir = path.join('autogenerated', oldTag, 'metrics');
|
|
218
|
+
if (!fs.existsSync(oldDir)) {
|
|
219
|
+
console.log(`⏳ Generating metrics docs for old tag ${oldTag}…`);
|
|
220
|
+
runClusterDocs('metrics', oldTag, options);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
console.log(`⏳ Generating metrics docs for new tag ${newTag}…`);
|
|
225
|
+
runClusterDocs('metrics', newTag, options);
|
|
226
|
+
|
|
227
|
+
if (oldTag) {
|
|
228
|
+
diffDirs('metrics', oldTag, newTag);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
process.exit(0);
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
automation
|
|
235
|
+
.command('property-docs')
|
|
236
|
+
.description('Extract properties from Redpanda source')
|
|
237
|
+
.option('--tag <tag>', 'Git tag or branch to extract from (default: dev)', 'dev')
|
|
238
|
+
.option('--diff <oldTag>', 'Also diff autogenerated properties from <oldTag> → <tag>')
|
|
239
|
+
.action((options) => {
|
|
240
|
+
verifyPropertyDependencies();
|
|
241
|
+
|
|
242
|
+
const newTag = options.tag;
|
|
243
|
+
const oldTag = options.diff;
|
|
244
|
+
const cwd = path.resolve(__dirname, '../tools/property-extractor');
|
|
245
|
+
const make = (tag) => {
|
|
246
|
+
console.log(`⏳ Building property docs for ${tag}…`);
|
|
247
|
+
const r = spawnSync('make', ['build', `TAG=${tag}`], { cwd, stdio: 'inherit' });
|
|
248
|
+
if (r.error ) { console.error(r.error); process.exit(1); }
|
|
249
|
+
if (r.status !== 0) process.exit(r.status);
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
if (oldTag) {
|
|
253
|
+
const oldDir = path.join('autogenerated', oldTag, 'properties');
|
|
254
|
+
if (!fs.existsSync(oldDir)) make(oldTag);
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
make(newTag);
|
|
258
|
+
|
|
259
|
+
if (oldTag) {
|
|
260
|
+
diffDirs('properties', oldTag, newTag);
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
process.exit(0);
|
|
264
|
+
});
|
|
265
|
+
|
|
266
|
+
automation
|
|
267
|
+
.command('rpk-docs')
|
|
268
|
+
.description('Generate documentation for rpk commands')
|
|
269
|
+
.option('--tag <tag>', 'Redpanda tag (default: latest)', commonOptions.tag)
|
|
270
|
+
.option('--docker-repo <repo>', '...', commonOptions.dockerRepo)
|
|
271
|
+
.option('--console-tag <tag>', '...', commonOptions.consoleTag)
|
|
272
|
+
.option('--console-docker-repo <repo>', '...', commonOptions.consoleDockerRepo)
|
|
273
|
+
.option('--diff <oldTag>', 'Also diff autogenerated rpk docs from <oldTag> → <tag>')
|
|
274
|
+
.action((options) => {
|
|
275
|
+
verifyMetricsDependencies();
|
|
276
|
+
|
|
277
|
+
const newTag = options.tag;
|
|
278
|
+
const oldTag = options.diff;
|
|
279
|
+
|
|
280
|
+
if (oldTag) {
|
|
281
|
+
const oldDir = path.join('autogenerated', oldTag, 'rpk');
|
|
282
|
+
if (!fs.existsSync(oldDir)) {
|
|
283
|
+
console.log(`⏳ Generating rpk docs for old tag ${oldTag}…`);
|
|
284
|
+
runClusterDocs('rpk', oldTag, options);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
console.log(`⏳ Generating rpk docs for new tag ${newTag}…`);
|
|
289
|
+
runClusterDocs('rpk', newTag, options);
|
|
290
|
+
|
|
291
|
+
if (oldTag) {
|
|
292
|
+
diffDirs('rpk', oldTag, newTag);
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
process.exit(0);
|
|
296
|
+
});
|
|
297
|
+
|
|
298
|
+
programCli
|
|
299
|
+
.command('fetch')
|
|
300
|
+
.description('Fetch a file or directory from GitHub and save locally')
|
|
301
|
+
.requiredOption('-o, --owner <owner>', 'GitHub repo owner or org')
|
|
302
|
+
.requiredOption('-r, --repo <repo>', 'GitHub repo name')
|
|
303
|
+
.requiredOption('-p, --remote-path <path>', 'Path in the repo to fetch')
|
|
304
|
+
.requiredOption('-d, --save-dir <dir>', 'Local directory to save into')
|
|
305
|
+
.option('-f, --filename <name>', 'Custom filename to save as')
|
|
306
|
+
.action(async (options) => {
|
|
307
|
+
try {
|
|
308
|
+
const fetchFromGithub = await require('../tools/fetch-from-github.js');
|
|
309
|
+
// options.owner, options.repo, options.remotePath, options.saveDir, options.filename
|
|
310
|
+
await fetchFromGithub(
|
|
311
|
+
options.owner,
|
|
312
|
+
options.repo,
|
|
313
|
+
options.remotePath,
|
|
314
|
+
options.saveDir,
|
|
315
|
+
options.filename
|
|
316
|
+
);
|
|
317
|
+
} catch (err) {
|
|
318
|
+
console.error('❌', err.message);
|
|
319
|
+
process.exit(1);
|
|
320
|
+
}
|
|
321
|
+
});
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
// Attach the automation group to the main program.
|
|
325
|
+
programCli.addCommand(automation);
|
|
326
|
+
|
|
327
|
+
programCli.parse(process.argv);
|
|
328
|
+
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import re
|
|
3
|
+
|
|
4
|
+
# Define the regular expression pattern to match the links
|
|
5
|
+
pattern = r'(https://[^\]]+)\[([^\]]+)\](?!\^)'
|
|
6
|
+
|
|
7
|
+
# Function to process a single file
|
|
8
|
+
def process_file(file_path):
|
|
9
|
+
with open(file_path, 'r', encoding='utf-8') as file:
|
|
10
|
+
content = file.read()
|
|
11
|
+
|
|
12
|
+
def replace_link(match):
|
|
13
|
+
link = match.group(1)
|
|
14
|
+
text = match.group(2)
|
|
15
|
+
if text.endswith('^'):
|
|
16
|
+
return match.group(0) # No modification if caret is already present
|
|
17
|
+
else:
|
|
18
|
+
return f"{link}[{text}^]"
|
|
19
|
+
|
|
20
|
+
lines = content.split('\n')
|
|
21
|
+
updated_lines = []
|
|
22
|
+
for line in lines:
|
|
23
|
+
if re.search(pattern, line):
|
|
24
|
+
line = re.sub(pattern, replace_link, line)
|
|
25
|
+
updated_lines.append(line)
|
|
26
|
+
|
|
27
|
+
# Write the updated content back to the file
|
|
28
|
+
- with open(file_path, 'w', encoding='utf-8') as file:
|
|
29
|
+
- file.write('\n'.join(updated_lines))
|
|
30
|
+
+ try:
|
|
31
|
+
+ with open(file_path, 'w', encoding='utf-8') as file:
|
|
32
|
+
+ file.write('\n'.join(updated_lines))
|
|
33
|
+
+ except Exception as e:
|
|
34
|
+
+ print(f"Error writing to {file_path}: {e}")
|
|
35
|
+
+ return False
|
|
36
|
+
+ return True
|
|
37
|
+
# Get the directory of the current script
|
|
38
|
+
script_directory = os.path.dirname(os.path.abspath(__file__))
|
|
39
|
+
|
|
40
|
+
# Construct the directory path for the 'modules' directory
|
|
41
|
+
directory_path = os.path.join(script_directory, '..', 'modules')
|
|
42
|
+
|
|
43
|
+
# List of excluded file paths (relative paths)
|
|
44
|
+
# List of excluded file paths (relative paths)
|
|
45
|
+
exclusion_list = [
|
|
46
|
+
os.path.join('reference', 'pages', 'redpanda-operator', 'crd.adoc'),
|
|
47
|
+
os.path.join('reference', 'pages', 'k-console-helm-spec.adoc'),
|
|
48
|
+
os.path.join('reference', 'pages', 'crd.adoc'),
|
|
49
|
+
os.path.join('reference', 'pages', 'k-redpanda-helm-spec.adoc'),
|
|
50
|
+
os.path.join('reference', 'partials', 'bundle-contents-k8s.adoc'),
|
|
51
|
+
os.path.join('reference', 'partials', 'bundle-contents-linux.adoc'),
|
|
52
|
+
]
|
|
53
|
+
|
|
54
|
+
# Function to process all .adoc files in a directory
|
|
55
|
+
def process_directory(directory_path):
|
|
56
|
+
for root, _, files in os.walk(directory_path):
|
|
57
|
+
for file in files:
|
|
58
|
+
if file.endswith('.adoc'):
|
|
59
|
+
file_path = os.path.join(root, file)
|
|
60
|
+
relative_file_path = os.path.relpath(file_path, directory_path)
|
|
61
|
+
if relative_file_path not in exclusion_list:
|
|
62
|
+
if process_file(file_path):
|
|
63
|
+
print(f"Processed: {file_path}")
|
|
64
|
+
else:
|
|
65
|
+
print(f"Failed to process: {file_path}")
|
|
66
|
+
|
|
67
|
+
# Call the function with the constructed directory path
|
|
68
|
+
process_directory(directory_path)
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const yaml = require('js-yaml');
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Look for antora.yml in the current working directory
|
|
7
|
+
* (the project's root), load it if present, and return
|
|
8
|
+
* its `prerelease` value (boolean). If missing or on error,
|
|
9
|
+
* returns false.
|
|
10
|
+
*/
|
|
11
|
+
function getPrereleaseFromAntora() {
|
|
12
|
+
const antoraPath = path.join(process.cwd(), 'antora.yml');
|
|
13
|
+
if (!fs.existsSync(antoraPath)) {
|
|
14
|
+
return false;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
try {
|
|
18
|
+
const fileContents = fs.readFileSync(antoraPath, 'utf8');
|
|
19
|
+
const antoraConfig = yaml.load(fileContents);
|
|
20
|
+
return antoraConfig.prerelease === true;
|
|
21
|
+
} catch (error) {
|
|
22
|
+
console.error('Error reading antora.yml:', error.message);
|
|
23
|
+
return false;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
module.exports = { getPrereleaseFromAntora };
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# Check if Docker is installed and running
|
|
5
|
+
if ! command -v docker &> /dev/null; then
|
|
6
|
+
echo "❌ Docker is not installed or not in PATH. Please install Docker to continue."
|
|
7
|
+
exit 1
|
|
8
|
+
fi
|
|
9
|
+
|
|
10
|
+
# Check if Docker daemon is running
|
|
11
|
+
if ! docker info &> /dev/null; then
|
|
12
|
+
echo "❌ Docker daemon is not running. Please start Docker to continue."
|
|
13
|
+
exit 1
|
|
14
|
+
fi
|
|
15
|
+
|
|
16
|
+
# Remember where we started so we can always come back
|
|
17
|
+
ORIGINAL_PWD="$(pwd)"
|
|
18
|
+
|
|
19
|
+
# All "cli-utils…" calls should be relative to this script’s dir
|
|
20
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
21
|
+
|
|
22
|
+
MODE="${1:-metrics}"
|
|
23
|
+
TAG="${2:-latest}"
|
|
24
|
+
DOCKER_REPO="${3:-redpanda}"
|
|
25
|
+
CONSOLE_TAG="${4:-latest}"
|
|
26
|
+
CONSOLE_REPO="${5:-console}"
|
|
27
|
+
|
|
28
|
+
# if it's an RC tag, switch Docker repo
|
|
29
|
+
shopt -s nocasematch
|
|
30
|
+
if [[ "$TAG" =~ rc[0-9]+ ]]; then
|
|
31
|
+
DOCKER_REPO="redpanda-unstable"
|
|
32
|
+
fi
|
|
33
|
+
shopt -u nocasematch
|
|
34
|
+
|
|
35
|
+
if [[ "$TAG" == "latest" ]]; then
|
|
36
|
+
MAJOR_MINOR="latest"
|
|
37
|
+
else
|
|
38
|
+
MAJOR_MINOR="$(echo "$TAG" | sed -E 's/^v?([0-9]+\.[0-9]+).*$/\1/')"
|
|
39
|
+
fi
|
|
40
|
+
|
|
41
|
+
export REDPANDA_VERSION="$TAG"
|
|
42
|
+
export REDPANDA_DOCKER_REPO="$DOCKER_REPO"
|
|
43
|
+
export REDPANDA_CONSOLE_VERSION="$CONSOLE_TAG"
|
|
44
|
+
export REDPANDA_CONSOLE_DOCKER_REPO="$CONSOLE_REPO"
|
|
45
|
+
|
|
46
|
+
# Start up the cluster
|
|
47
|
+
"$SCRIPT_DIR"/start-cluster.sh "$TAG"
|
|
48
|
+
|
|
49
|
+
# Wait for it to settle
|
|
50
|
+
if [[ "$MODE" == "metrics" ]]; then
|
|
51
|
+
echo "Waiting 300 seconds for metrics to be available…"
|
|
52
|
+
sleep 300
|
|
53
|
+
else
|
|
54
|
+
echo "Waiting 30 seconds for cluster to be ready…"
|
|
55
|
+
sleep 30
|
|
56
|
+
fi
|
|
57
|
+
|
|
58
|
+
# Go back to where we were
|
|
59
|
+
cd "$ORIGINAL_PWD"
|
|
60
|
+
|
|
61
|
+
# Ensure Python venv (always create under cli-utils/venv)
|
|
62
|
+
"$SCRIPT_DIR"/python-venv.sh \
|
|
63
|
+
"$SCRIPT_DIR"/venv \
|
|
64
|
+
"$SCRIPT_DIR"/../tools/metrics/requirements.txt
|
|
65
|
+
|
|
66
|
+
if [[ "$MODE" == "metrics" ]]; then
|
|
67
|
+
"$SCRIPT_DIR"/venv/bin/python \
|
|
68
|
+
"$SCRIPT_DIR"/../tools/metrics/metrics.py \
|
|
69
|
+
"$TAG"
|
|
70
|
+
else
|
|
71
|
+
"$SCRIPT_DIR"/venv/bin/python \
|
|
72
|
+
"$SCRIPT_DIR"/../tools/gen-rpk-ascii.py \
|
|
73
|
+
"$TAG"
|
|
74
|
+
fi
|
|
75
|
+
|
|
76
|
+
echo "✅ Redpanda cluster docs generated successfully!"
|
|
77
|
+
|
|
78
|
+
# Tear down the cluster
|
|
79
|
+
cd "$SCRIPT_DIR"/../docker-compose
|
|
80
|
+
docker compose down --volumes
|
|
81
|
+
|
|
82
|
+
# Return to the original directory
|
|
83
|
+
cd "$ORIGINAL_PWD"
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -e
|
|
3
|
+
|
|
4
|
+
# Function to install Node.js
|
|
5
|
+
install_node() {
|
|
6
|
+
if command -v node &>/dev/null; then
|
|
7
|
+
echo "Node.js is already installed. Version: $(node -v)"
|
|
8
|
+
else
|
|
9
|
+
echo "Installing Node.js..."
|
|
10
|
+
curl -fsSL https://fnm.vercel.app/install | bash || { echo "Failed to install fnm"; exit 1; }
|
|
11
|
+
# Load fnm into the current shell
|
|
12
|
+
export PATH=$HOME/.fnm:$PATH
|
|
13
|
+
eval "$(fnm env)" || { echo "Failed to load fnm environment"; exit 1; }
|
|
14
|
+
fnm install --lts || { echo "Failed to install Node.js"; exit 1; }
|
|
15
|
+
fnm use --lts || { echo "Failed to use Node.js"; exit 1; }
|
|
16
|
+
echo "Node.js version: $(node -v)"
|
|
17
|
+
fi
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
# Function to install Rust
|
|
21
|
+
install_rust() {
|
|
22
|
+
if command -v rustc &>/dev/null; then
|
|
23
|
+
echo "Rust is already installed. Version: $(rustc --version)"
|
|
24
|
+
else
|
|
25
|
+
echo "Installing Rust..."
|
|
26
|
+
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y || { echo "Failed to install Rust"; exit 1; }
|
|
27
|
+
source $HOME/.cargo/env || { echo "Failed to load Rust environment"; exit 1; }
|
|
28
|
+
echo "Rust version: $(rustc --version)"
|
|
29
|
+
fi
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
# Function to check if expect and jq are installed and install them if they're not
|
|
33
|
+
ensure_dependencies_installed() {
|
|
34
|
+
|
|
35
|
+
if ! command -v expect &> /dev/null; then
|
|
36
|
+
echo "Expect is not installed. Trying to install..."
|
|
37
|
+
missing_deps=1
|
|
38
|
+
|
|
39
|
+
# Detect OS
|
|
40
|
+
case "$(uname -s)" in
|
|
41
|
+
Linux)
|
|
42
|
+
echo "Detected Linux."
|
|
43
|
+
sudo apt-get update && sudo apt-get install expect -y || sudo yum install expect -y || { echo "Failed to install expect"; exit 1; }
|
|
44
|
+
;;
|
|
45
|
+
Darwin)
|
|
46
|
+
echo "Detected macOS."
|
|
47
|
+
# Assumes Homebrew is installed. If not, it attempts to install Homebrew first.
|
|
48
|
+
if ! command -v brew &> /dev/null; then
|
|
49
|
+
echo "Homebrew not found."
|
|
50
|
+
exit 1
|
|
51
|
+
fi
|
|
52
|
+
brew install expect || { echo "Failed to install expect"; exit 1; }
|
|
53
|
+
;;
|
|
54
|
+
*)
|
|
55
|
+
echo "Unsupported operating system. Please install expect manually."
|
|
56
|
+
exit 1
|
|
57
|
+
;;
|
|
58
|
+
esac
|
|
59
|
+
fi
|
|
60
|
+
|
|
61
|
+
if ! command -v jq &> /dev/null; then
|
|
62
|
+
echo "jq is not installed. Trying to install..."
|
|
63
|
+
|
|
64
|
+
# Install jq based on OS
|
|
65
|
+
case "$(uname -s)" in
|
|
66
|
+
Linux)
|
|
67
|
+
sudo apt-get install jq -y || sudo yum install jq -y || { echo "Failed to install jq"; exit 1; }
|
|
68
|
+
;;
|
|
69
|
+
Darwin)
|
|
70
|
+
brew install jq || { echo "Failed to install jq"; exit 1; }
|
|
71
|
+
;;
|
|
72
|
+
*)
|
|
73
|
+
echo "Unsupported operating system. Please install jq manually."
|
|
74
|
+
exit 1
|
|
75
|
+
;;
|
|
76
|
+
esac
|
|
77
|
+
fi
|
|
78
|
+
|
|
79
|
+
install_node
|
|
80
|
+
install_rust
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
# Ensure expect and jq are installed
|
|
84
|
+
ensure_dependencies_installed
|
|
85
|
+
|
|
86
|
+
# Function to check rpk installation and display its version
|
|
87
|
+
check_rpk_installed() {
|
|
88
|
+
if command -v rpk &>/dev/null; then
|
|
89
|
+
echo "rpk is already installed. Version information:"
|
|
90
|
+
rpk --version
|
|
91
|
+
return 0
|
|
92
|
+
else
|
|
93
|
+
return 1
|
|
94
|
+
fi
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
# Determine OS and architecture
|
|
98
|
+
OS="$(uname -s)"
|
|
99
|
+
ARCH="$(uname -m)"
|
|
100
|
+
|
|
101
|
+
# Check if rpk is already installed
|
|
102
|
+
if check_rpk_installed; then
|
|
103
|
+
exit 0
|
|
104
|
+
fi
|
|
105
|
+
|
|
106
|
+
# Check if running on macOS and use Homebrew to install rpk
|
|
107
|
+
if [ "${OS}" == "Darwin" ]; then
|
|
108
|
+
echo "Detected macOS. Attempting to install rpk using Homebrew..."
|
|
109
|
+
|
|
110
|
+
# Check if Homebrew is installed
|
|
111
|
+
if ! command -v brew &>/dev/null; then
|
|
112
|
+
echo "Homebrew not found."
|
|
113
|
+
exit 1
|
|
114
|
+
fi
|
|
115
|
+
|
|
116
|
+
# Install rpk
|
|
117
|
+
brew install redpanda-data/tap/redpanda || { echo "Failed to install rpk via Homebrew"; exit 1; }
|
|
118
|
+
|
|
119
|
+
# Verify installation
|
|
120
|
+
echo "rpk has been installed. Version information:"
|
|
121
|
+
rpk --version
|
|
122
|
+
exit 0
|
|
123
|
+
fi
|
|
124
|
+
|
|
125
|
+
# For Linux systems
|
|
126
|
+
if [ "${OS}" == "Linux" ]; then
|
|
127
|
+
FILENAME="rpk-linux-amd64.zip"
|
|
128
|
+
URL_BASE="https://github.com/redpanda-data/redpanda/releases"
|
|
129
|
+
|
|
130
|
+
# Download latest version of rpk
|
|
131
|
+
echo "Downloading ${FILENAME}..."
|
|
132
|
+
curl -Lf --retry 3 -O "${URL_BASE}/latest/download/${FILENAME}" \
|
|
133
|
+
|| { echo "Failed to download rpk"; exit 1; }
|
|
134
|
+
|
|
135
|
+
# Ensure the target directory exists
|
|
136
|
+
mkdir -p $HOME/.local/bin || { echo "Failed to create directory"; exit 1; }
|
|
137
|
+
|
|
138
|
+
# Unzip the rpk binary to the target directory
|
|
139
|
+
unzip -o "${FILENAME}" -d $HOME/.local/bin || { echo "Failed to unzip rpk"; exit 1; }
|
|
140
|
+
|
|
141
|
+
# Remove the downloaded archive
|
|
142
|
+
rm "${FILENAME}" || { echo "Failed to remove downloaded archive"; exit 1; }
|
|
143
|
+
|
|
144
|
+
# Add the target directory to PATH for the current session
|
|
145
|
+
export PATH=$HOME/.local/bin:$PATH
|
|
146
|
+
|
|
147
|
+
# Add the target directory to PATH for future sessions
|
|
148
|
+
echo 'export PATH=$HOME/.local/bin:$PATH' >> ~/.bashrc
|
|
149
|
+
source ~/.bashrc
|
|
150
|
+
|
|
151
|
+
# Verify installation
|
|
152
|
+
echo "rpk has been installed. Version information:"
|
|
153
|
+
rpk --version
|
|
154
|
+
exit 0
|
|
155
|
+
fi
|
|
156
|
+
|
|
157
|
+
echo "Unsupported operating system: ${OS}"
|
|
158
|
+
exit 1
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
#
|
|
3
|
+
# Create a new venv at $1 and install from $2
|
|
4
|
+
|
|
5
|
+
set -euo pipefail
|
|
6
|
+
|
|
7
|
+
VENV_DIR="${1:-venv}"
|
|
8
|
+
REQ_FILE="${2:-requirements.txt}"
|
|
9
|
+
|
|
10
|
+
echo "Recreating Python venv at $VENV_DIR..."
|
|
11
|
+
rm -rf "$VENV_DIR"
|
|
12
|
+
python3 -m venv "$VENV_DIR"
|
|
13
|
+
"$VENV_DIR/bin/pip" install --upgrade pip --quiet
|
|
14
|
+
|
|
15
|
+
if [[ -f "$REQ_FILE" ]]; then
|
|
16
|
+
echo "Installing $REQ_FILE..."
|
|
17
|
+
"$VENV_DIR/bin/pip" install -r "$REQ_FILE" --quiet
|
|
18
|
+
else
|
|
19
|
+
echo "⚠️ Requirements file not found at $REQ_FILE"
|
|
20
|
+
fi
|