@redpanda-data/docs-extensions-and-macros 4.2.5 → 4.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.adoc +184 -21
  2. package/bin/doc-tools.js +328 -0
  3. package/cli-utils/add-caret-external-links.py +68 -0
  4. package/cli-utils/beta-from-antora.js +27 -0
  5. package/cli-utils/generate-cluster-docs.sh +83 -0
  6. package/cli-utils/install-test-dependencies.sh +158 -0
  7. package/cli-utils/python-venv.sh +20 -0
  8. package/cli-utils/start-cluster.sh +53 -0
  9. package/docker-compose/bootstrap.yml +67 -0
  10. package/docker-compose/docker-compose.yml +414 -0
  11. package/docker-compose/generate-profiles.yaml +77 -0
  12. package/docker-compose/rpk-profile.yaml +24 -0
  13. package/docker-compose/transactions-schema.json +37 -0
  14. package/docker-compose/transactions.md +46 -0
  15. package/docker-compose/transform/README.adoc +73 -0
  16. package/docker-compose/transform/go.mod +5 -0
  17. package/docker-compose/transform/go.sum +2 -0
  18. package/docker-compose/transform/regex.wasm +0 -0
  19. package/docker-compose/transform/transform.go +122 -0
  20. package/docker-compose/transform/transform.yaml +33 -0
  21. package/extension-utils/compute-out.js +38 -0
  22. package/extension-utils/create-asciidoc-file.js +15 -0
  23. package/macros/data-template.js +591 -0
  24. package/package.json +21 -4
  25. package/tools/docusaurus-to-antora-conversion-scripts/convert-docs.sh +114 -0
  26. package/tools/docusaurus-to-antora-conversion-scripts/get-file-changes.sh +9 -0
  27. package/tools/docusaurus-to-antora-conversion-scripts/post-process-asciidoc.js +63 -0
  28. package/tools/docusaurus-to-antora-conversion-scripts/pre-process-markdown.js +108 -0
  29. package/tools/fetch-from-github.js +63 -0
  30. package/tools/gen-rpk-ascii.py +477 -0
  31. package/tools/get-console-version.js +53 -0
  32. package/tools/get-redpanda-version.js +53 -0
  33. package/tools/metrics/metrics.py +199 -0
  34. package/tools/metrics/requirements.txt +1 -0
  35. package/tools/property-extractor/Makefile +99 -0
  36. package/tools/property-extractor/README.adoc +206 -0
  37. package/tools/property-extractor/definitions.json +245 -0
  38. package/tools/property-extractor/file_pair.py +7 -0
  39. package/tools/property-extractor/json-to-asciidoc/generate_docs.py +460 -0
  40. package/tools/property-extractor/parser.py +224 -0
  41. package/tools/property-extractor/property_bag.py +4 -0
  42. package/tools/property-extractor/property_extractor.py +243 -0
  43. package/tools/property-extractor/requirements.txt +2 -0
  44. package/tools/property-extractor/tests/transformers_test.py +376 -0
  45. package/tools/property-extractor/transformers.py +397 -0
@@ -0,0 +1,114 @@
1
+ #!/bin/bash
2
+
3
+ if ! command -v pandoc &> /dev/null; then
4
+ echo "Error: Pandoc is not installed."
5
+ echo "Please visit https://pandoc.org/installing.html to install Pandoc."
6
+ exit 1
7
+ fi
8
+
9
+ if ! command -v kramdoc &> /dev/null; then
10
+ echo "Error: Kramdoc is not installed."
11
+ echo "Please install kramdoc using: gem install kramdown-asciidoc"
12
+ exit 1
13
+ fi
14
+
15
+ SOURCE_DIRECTORY="$1"
16
+
17
+ if [ -z "$SOURCE_DIRECTORY" ]; then
18
+ echo "Error: Source directory not provided."
19
+ echo "Usage: ./your_script.sh /path/to/your/source_directory"
20
+ exit 1
21
+ fi
22
+
23
+ OUTPUT_DIRECTORY="$(cd "$(dirname "$0")/../modules" && pwd)"
24
+
25
+ # Create the output and partials directories if they don't exist
26
+ mkdir -p "$OUTPUT_DIRECTORY"
27
+
28
+ function remove_leading_tabs() {
29
+ local mdx_file="$1"
30
+ local content="$(cat "$mdx_file")"
31
+
32
+ # Remove leading tabs in the <Tabs> elements
33
+ local updated_content="$(echo "$content" | perl -0777 -pe 's/(\s*)<TabItem([\s\S]*?)>([\s\S]*?)<\/TabItem>/sprintf("%s<TabItem%s>%s<\/TabItem>", $1, $2, $3 =~ s!^\t!!rmsg)/ge')"
34
+
35
+ # Write the updated content back to the file
36
+ echo "$updated_content" > "$mdx_file"
37
+ }
38
+
39
+ function preprocess_markdown() {
40
+ local markdown_file="$1"
41
+ node "$(dirname "$0")/pre-process-markdown.js" "$markdown_file"
42
+ }
43
+
44
+ # Convert a Markdown file to AsciiDoc and add the description
45
+ function convert_markdown_to_asciidoc() {
46
+ local markdown_file="$1"
47
+ local output_file="$2"
48
+ # Remove leading tabs from <Tab> elements
49
+ remove_leading_tabs "$markdown_file"
50
+
51
+ # Preprocess the markdown file
52
+ preprocess_markdown "$markdown_file"
53
+
54
+ local content="$(cat "$markdown_file")"
55
+
56
+ local output_file_dir="$(dirname "$output_file")"
57
+ mkdir -p "$output_file_dir"
58
+
59
+ # Extract the content of the meta description tag
60
+ local description="$(echo "$content" | sed -n 's/.*<meta name="description" content="\([^"]*\)".*/\1/p')"
61
+
62
+ # Remove the head element from the source Markdown file and save it
63
+ local cleaned_content="$(echo "$content" | sed '/<head>/,/<\/head>/d')"
64
+ # Remove the head element from the source Markdown file and save it
65
+ local cleaned_content
66
+ cleaned_content=$(echo "$content" | sed '/<head>/,/<\/head>/d')
67
+ local cleaned_file
68
+ cleaned_file=$(mktemp)
69
+ echo "$cleaned_content" > "$cleaned_file"
70
+
71
+ # Convert the cleaned Markdown file to AsciiDoc using Kramdoc
72
+ local asciidoc_content
73
+ asciidoc_content=$(kramdoc -o - "$cleaned_file")
74
+
75
+ # Clean up temporary file
76
+ rm -f "$cleaned_file"
77
+
78
+ # Insert the description attribute on the second line of the AsciiDoc content
79
+ asciidoc_content="$(echo "$asciidoc_content" | awk -v desc="$description" 'NR==1{print; print ":description: " desc ""; next} 1')"
80
+
81
+ # Write the updated AsciiDoc content to the output file
82
+ echo "$asciidoc_content" > "$output_file"
83
+
84
+ echo "Converted: $markdown_file -> $output_file"
85
+ }
86
+
87
+ # Convert all Markdown files in the source directory
88
+ # Initialize counters
89
+ success_count=0
90
+ failure_count=0
91
+
92
+ while IFS= read -r -d '' markdown_file; do
93
+ output_file="$(echo "$markdown_file" \
94
+ | sed "s|$SOURCE_DIRECTORY|$OUTPUT_DIRECTORY|" \
95
+ | sed 's|\.mdx$|.adoc|' \
96
+ | sed 's|\(.*\)/\(.*\)|\1/pages/\2|')"
97
+
98
+ if convert_markdown_to_asciidoc "$markdown_file" "$output_file"; then
99
+ # Run the Node.js script to process the output file
100
+ if node "$(dirname "$0")/post-process-asciidoc.js" "$output_file"; then
101
+ success_count=$((success_count + 1))
102
+ else
103
+ echo "Error: Failed to post-process ${output_file}"
104
+ failure_count=$((failure_count + 1))
105
+ fi
106
+ else
107
+ echo "Error: Failed to convert ${markdown_file}"
108
+ failure_count=$((failure_count + 1))
109
+ fi
110
+ done < <(find "$SOURCE_DIRECTORY" -name "*.mdx" -print0)
111
+
112
+ echo "Conversion complete. Success: ${success_count}, Failures: ${failure_count}"
113
+
114
+ echo "All Markdown files converted to AsciiDoc."
@@ -0,0 +1,9 @@
1
+ #!/bin/bash
2
+ set -euo pipefail
3
+ echo "Please enter the name of the first branch:"
4
+ read branch1
5
+ echo "Please enter the name of the second branch:"
6
+ read branch2
7
+
8
+ git fetch
9
+ git diff --summary $branch1..$branch2 -- ./modules/
@@ -0,0 +1,63 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ function processFile(file) {
5
+ let content;
6
+ try {
7
+ content = fs.readFileSync(file, 'utf-8');
8
+ } catch (err) {
9
+ console.error(`Error reading file ${file}: ${err.message}`);
10
+ return;
11
+ }
12
+
13
+ const newContent = content.replace(
14
+ /link:(\.\.\/)+([\w/.-]+)(#?[\w/.-]*)(\[.+?\])/g,
15
+ (match, dots, linkPath, anchor, linkText) => {
16
+ const depth = dots.match(/\.\.\//g).length;
17
+ const pathParts = linkPath.split('/');
18
+ // Ensure we don't go beyond the available path parts
19
+ const startIndex = Math.max(0, pathParts.length - depth);
20
+ const newPath = pathParts.slice(0, startIndex).join(':');
21
+ return `xref:${newPath}:${pathParts[pathParts.length - 1]}.adoc${anchor || ''}${linkText}`;
22
+ }
23
+ );
24
+
25
+ try {
26
+ fs.writeFileSync(file, newContent, 'utf-8');
27
+ } catch (err) {
28
+ console.error(`Error writing file ${file}: ${err.message}`);
29
+ }
30
+ }
31
+
32
+ function processDirectory(directory) {
33
+ const files = fs.readdirSync(directory);
34
+
35
+ files.forEach((file) => {
36
+ const filePath = path.join(directory, file);
37
+ const stat = fs.statSync(filePath);
38
+
39
+ if (stat.isFile() && path.extname(file) === '.adoc') {
40
+ processFile(filePath);
41
+ } else if (stat.isDirectory()) {
42
+ processDirectory(filePath);
43
+ }
44
+ });
45
+ }
46
+
47
+ const inputPath = process.argv[2];
48
+
49
+ if (!inputPath) {
50
+ console.error('No input path provided');
51
+ process.exit(1);
52
+ }
53
+
54
+ const stat = fs.statSync(inputPath);
55
+
56
+ if (stat.isFile()) {
57
+ processFile(inputPath);
58
+ } else if (stat.isDirectory()) {
59
+ processDirectory(inputPath);
60
+ } else {
61
+ console.error('Input path is neither a file nor a directory');
62
+ process.exit(1);
63
+ }
@@ -0,0 +1,108 @@
1
+ const fs = require('fs');
2
+ const { execSync } = require('child_process');
3
+ const pandoc = require('node-pandoc');
4
+ // Fail fast if required CLIs are missing
5
+ ['pandoc', 'kramdoc'].forEach(cmd => {
6
+ try {
7
+ execSync(`command -v ${cmd}`, { stdio: 'ignore' });
8
+ } catch {
9
+ console.error(`Required dependency "${cmd}" not found in PATH`);
10
+ process.exit(1);
11
+ }
12
+ });
13
+ const os = require('os');
14
+ const path = require('path');
15
+
16
+ function convertHtmlTableToAsciiDoc(htmlTable) {
17
+ return new Promise((resolve, reject) => {
18
+ pandoc(htmlTable, '-f html -t asciidoc', (err, result) => {
19
+ if (err) {
20
+ console.error(`Error converting HTML table to AsciiDoc: ${err.message}`);
21
+ resolve(htmlTable);
22
+ } else {
23
+ resolve(result);
24
+ }
25
+ });
26
+ });
27
+ }
28
+
29
+ function markdownToAsciidoc(markdown) {
30
+ const tempMarkdownPath = path.join(os.tmpdir(), 'temp_markdown.md');
31
+ fs.writeFileSync(tempMarkdownPath, markdown, 'utf-8');
32
+
33
+ let result;
34
+ try {
35
+ const command = `kramdoc -o - "${tempMarkdownPath}"`;
36
+ result = execSync(command, { encoding: 'utf-8' });
37
+ } catch (err) {
38
+ console.error(`Error converting Markdown to AsciiDoc: ${err.message}`);
39
+ result = markdown;
40
+ } finally {
41
+ fs.unlinkSync(tempMarkdownPath);
42
+ }
43
+ return result;
44
+ }
45
+
46
+ function processTabs(match) {
47
+ const tabItems = [...match.matchAll(/\s?<TabItem[^>]*value="([^"]+)"[^>]*label="([^"]+)"[^>]*>([\s\S]*?)<\/TabItem>/g)];
48
+
49
+ let result = ['\n<!--\n[tabs]'];
50
+ result.push('=====');
51
+ for (const tabItem of tabItems) {
52
+ const [_, value, label, content] = tabItem;
53
+ result.push(`${label}::`);
54
+ result.push('+');
55
+ result.push('--');
56
+ const asciidocContent = markdownToAsciidoc(content.trim(), '');
57
+ result.push(asciidocContent);
58
+ result.push('--');
59
+ }
60
+
61
+ result.push('=====');
62
+ result.push('-->');
63
+ return result.join('\n');
64
+ }
65
+
66
+ function processDetails(match) {
67
+ const detailsRegex = /<details>(?:\r?\n)<summary>([\s\S]*?)<\/summary>(?:\r?\n)([\s\S]*?)(?:\r?\n)<\/details>/g;
68
+
69
+ return match.replace(detailsRegex, (match, title, content) => {
70
+ const asciidocTitle = `.${title.trim()}`;
71
+ const asciidocBlock = `[%collapsible%]\n====\n${content.trim()}\n====`;
72
+
73
+ return `<!--\n${asciidocTitle}\n${asciidocBlock}\n-->`;
74
+ });
75
+ }
76
+
77
+ async function convertFile(file) {
78
+ const content = fs.readFileSync(file, 'utf-8');
79
+
80
+ var newContent = content.replace(/<Tabs>([\s\S]*?)<\/Tabs>/g, processTabs);
81
+ newContent = newContent.replace(/<details>([\s\S]*?)<\/details>/g, processDetails);
82
+
83
+ const htmlTableMatches = newContent.match(/\s?(<table>((.|\n)*?)<\/table>)/g);
84
+ if (htmlTableMatches) {
85
+ for (const htmlTableMatch of htmlTableMatches) {
86
+ const tableRegex = /(<table>((.|\n)*?)<\/table>)/;
87
+ const tableMatch = htmlTableMatch.match(tableRegex);
88
+ if (tableMatch) {
89
+ const htmlTable = tableMatch[0];
90
+ const asciidocTable = await convertHtmlTableToAsciiDoc(htmlTable);
91
+ newContent = newContent.replace(htmlTableMatch, `\n<!--\n${asciidocTable}\n-->`);
92
+ }
93
+ }
94
+ }
95
+
96
+ fs.writeFileSync(file, newContent, 'utf-8');
97
+ }
98
+
99
+ const inputFile = process.argv[2];
100
+ if (!inputFile) {
101
+ console.error('No input file provided');
102
+ process.exit(1);
103
+ }
104
+
105
+ convertFile(inputFile).catch((error) => {
106
+ console.error(`Error processing file: ${error.message}`);
107
+ process.exit(1);
108
+ });
@@ -0,0 +1,63 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ let octokitInstance = null;
5
+ async function loadOctokit() {
6
+ if (!octokitInstance) {
7
+ const { Octokit } = await import('@octokit/rest');
8
+ octokitInstance = process.env.VBOT_GITHUB_API_TOKEN
9
+ ? new Octokit({
10
+ auth: process.env.VBOT_GITHUB_API_TOKEN,
11
+ })
12
+ : new Octokit();
13
+
14
+ if (!process.env.VBOT_GITHUB_API_TOKEN) {
15
+ console.warn(
16
+ 'Warning: No GitHub token found (VBOT_GITHUB_API_TOKEN). API rate limits will be restricted.'
17
+ );
18
+ }
19
+ }
20
+ return octokitInstance;
21
+ }
22
+
23
+ async function saveFile(content, saveDir, filename) {
24
+ await fs.promises.mkdir(saveDir, { recursive: true });
25
+ const target = path.join(saveDir, filename);
26
+ await fs.promises.writeFile(target, content);
27
+ console.log(`Saved: ${target}`);
28
+ }
29
+
30
+ async function fetchFromGithub(owner, repo, remotePath, saveDir, customFilename) {
31
+ const octokit = await loadOctokit();
32
+
33
+ try {
34
+ const resp = await octokit.repos.getContent({ owner, repo, path: remotePath });
35
+ if (Array.isArray(resp.data)) {
36
+ // directory
37
+ for (const item of resp.data) {
38
+ if (item.type === 'file') {
39
+ await fetchFromGithub(owner, repo, item.path, saveDir, customFilename);
40
+ } else if (item.type === 'dir') {
41
+ // For directories, maintain the directory structure
42
+ const nestedDir = path.join(saveDir, path.basename(item.path));
43
+ await fetchFromGithub(owner, repo, item.path, nestedDir);
44
+ }
45
+ }
46
+ } else {
47
+ // single file
48
+ const content = Buffer.from(resp.data.content, 'base64').toString();
49
+ const filename = customFilename || path.basename(resp.data.path);
50
+ await saveFile(content, saveDir, filename);
51
+ }
52
+ } catch (error) {
53
+ if (error.status === 403 && error.message.includes('rate limit')) {
54
+ throw new Error(`GitHub API rate limit exceeded. Consider using a token via VBOT_GITHUB_API_TOKEN environment variable.`);
55
+ } else if (error.status === 404) {
56
+ throw new Error(`Path not found: ${remotePath} in ${owner}/${repo}`);
57
+ } else {
58
+ throw new Error(`Failed to fetch from GitHub: ${error.message}`);
59
+ }
60
+ }
61
+ }
62
+
63
+ module.exports = fetchFromGithub