@redpanda-data/docs-extensions-and-macros 4.2.5 → 4.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.adoc +184 -21
  2. package/bin/doc-tools.js +328 -0
  3. package/cli-utils/add-caret-external-links.py +68 -0
  4. package/cli-utils/beta-from-antora.js +27 -0
  5. package/cli-utils/generate-cluster-docs.sh +83 -0
  6. package/cli-utils/install-test-dependencies.sh +158 -0
  7. package/cli-utils/python-venv.sh +20 -0
  8. package/cli-utils/start-cluster.sh +53 -0
  9. package/docker-compose/bootstrap.yml +67 -0
  10. package/docker-compose/docker-compose.yml +414 -0
  11. package/docker-compose/generate-profiles.yaml +77 -0
  12. package/docker-compose/rpk-profile.yaml +24 -0
  13. package/docker-compose/transactions-schema.json +37 -0
  14. package/docker-compose/transactions.md +46 -0
  15. package/docker-compose/transform/README.adoc +73 -0
  16. package/docker-compose/transform/go.mod +5 -0
  17. package/docker-compose/transform/go.sum +2 -0
  18. package/docker-compose/transform/regex.wasm +0 -0
  19. package/docker-compose/transform/transform.go +122 -0
  20. package/docker-compose/transform/transform.yaml +33 -0
  21. package/extension-utils/compute-out.js +38 -0
  22. package/extension-utils/create-asciidoc-file.js +15 -0
  23. package/macros/data-template.js +591 -0
  24. package/package.json +21 -4
  25. package/tools/docusaurus-to-antora-conversion-scripts/convert-docs.sh +114 -0
  26. package/tools/docusaurus-to-antora-conversion-scripts/get-file-changes.sh +9 -0
  27. package/tools/docusaurus-to-antora-conversion-scripts/post-process-asciidoc.js +63 -0
  28. package/tools/docusaurus-to-antora-conversion-scripts/pre-process-markdown.js +108 -0
  29. package/tools/fetch-from-github.js +63 -0
  30. package/tools/gen-rpk-ascii.py +477 -0
  31. package/tools/get-console-version.js +53 -0
  32. package/tools/get-redpanda-version.js +53 -0
  33. package/tools/metrics/metrics.py +199 -0
  34. package/tools/metrics/requirements.txt +1 -0
  35. package/tools/property-extractor/Makefile +99 -0
  36. package/tools/property-extractor/README.adoc +206 -0
  37. package/tools/property-extractor/definitions.json +245 -0
  38. package/tools/property-extractor/file_pair.py +7 -0
  39. package/tools/property-extractor/json-to-asciidoc/generate_docs.py +460 -0
  40. package/tools/property-extractor/parser.py +224 -0
  41. package/tools/property-extractor/property_bag.py +4 -0
  42. package/tools/property-extractor/property_extractor.py +243 -0
  43. package/tools/property-extractor/requirements.txt +2 -0
  44. package/tools/property-extractor/tests/transformers_test.py +376 -0
  45. package/tools/property-extractor/transformers.py +397 -0
@@ -0,0 +1,68 @@
1
+ import os
2
+ import re
3
+
4
+ # Define the regular expression pattern to match the links
5
+ pattern = r'(https://[^\]]+)\[([^\]]+)\](?!\^)'
6
+
7
+ # Function to process a single file
8
+ def process_file(file_path):
9
+ with open(file_path, 'r', encoding='utf-8') as file:
10
+ content = file.read()
11
+
12
+ def replace_link(match):
13
+ link = match.group(1)
14
+ text = match.group(2)
15
+ if text.endswith('^'):
16
+ return match.group(0) # No modification if caret is already present
17
+ else:
18
+ return f"{link}[{text}^]"
19
+
20
+ lines = content.split('\n')
21
+ updated_lines = []
22
+ for line in lines:
23
+ if re.search(pattern, line):
24
+ line = re.sub(pattern, replace_link, line)
25
+ updated_lines.append(line)
26
+
27
+ # Write the updated content back to the file
28
+ - with open(file_path, 'w', encoding='utf-8') as file:
29
+ - file.write('\n'.join(updated_lines))
30
+ + try:
31
+ + with open(file_path, 'w', encoding='utf-8') as file:
32
+ + file.write('\n'.join(updated_lines))
33
+ + except Exception as e:
34
+ + print(f"Error writing to {file_path}: {e}")
35
+ + return False
36
+ + return True
37
+ # Get the directory of the current script
38
+ script_directory = os.path.dirname(os.path.abspath(__file__))
39
+
40
+ # Construct the directory path for the 'modules' directory
41
+ directory_path = os.path.join(script_directory, '..', 'modules')
42
+
43
+ # List of excluded file paths (relative paths)
44
+ # List of excluded file paths (relative paths)
45
+ exclusion_list = [
46
+ os.path.join('reference', 'pages', 'redpanda-operator', 'crd.adoc'),
47
+ os.path.join('reference', 'pages', 'k-console-helm-spec.adoc'),
48
+ os.path.join('reference', 'pages', 'crd.adoc'),
49
+ os.path.join('reference', 'pages', 'k-redpanda-helm-spec.adoc'),
50
+ os.path.join('reference', 'partials', 'bundle-contents-k8s.adoc'),
51
+ os.path.join('reference', 'partials', 'bundle-contents-linux.adoc'),
52
+ ]
53
+
54
+ # Function to process all .adoc files in a directory
55
+ def process_directory(directory_path):
56
+ for root, _, files in os.walk(directory_path):
57
+ for file in files:
58
+ if file.endswith('.adoc'):
59
+ file_path = os.path.join(root, file)
60
+ relative_file_path = os.path.relpath(file_path, directory_path)
61
+ if relative_file_path not in exclusion_list:
62
+ if process_file(file_path):
63
+ print(f"Processed: {file_path}")
64
+ else:
65
+ print(f"Failed to process: {file_path}")
66
+
67
+ # Call the function with the constructed directory path
68
+ process_directory(directory_path)
@@ -0,0 +1,27 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const yaml = require('js-yaml');
4
+
5
+ /**
6
+ * Look for antora.yml in the current working directory
7
+ * (the project's root), load it if present, and return
8
+ * its `prerelease` value (boolean). If missing or on error,
9
+ * returns false.
10
+ */
11
+ function getPrereleaseFromAntora() {
12
+ const antoraPath = path.join(process.cwd(), 'antora.yml');
13
+ if (!fs.existsSync(antoraPath)) {
14
+ return false;
15
+ }
16
+
17
+ try {
18
+ const fileContents = fs.readFileSync(antoraPath, 'utf8');
19
+ const antoraConfig = yaml.load(fileContents);
20
+ return antoraConfig.prerelease === true;
21
+ } catch (error) {
22
+ console.error('Error reading antora.yml:', error.message);
23
+ return false;
24
+ }
25
+ }
26
+
27
+ module.exports = { getPrereleaseFromAntora };
@@ -0,0 +1,83 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ # Check if Docker is installed and running
5
+ if ! command -v docker &> /dev/null; then
6
+ echo "❌ Docker is not installed or not in PATH. Please install Docker to continue."
7
+ exit 1
8
+ fi
9
+
10
+ # Check if Docker daemon is running
11
+ if ! docker info &> /dev/null; then
12
+ echo "❌ Docker daemon is not running. Please start Docker to continue."
13
+ exit 1
14
+ fi
15
+
16
+ # Remember where we started so we can always come back
17
+ ORIGINAL_PWD="$(pwd)"
18
+
19
+ # All "cli-utils…" calls should be relative to this script’s dir
20
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
21
+
22
+ MODE="${1:-metrics}"
23
+ TAG="${2:-latest}"
24
+ DOCKER_REPO="${3:-redpanda}"
25
+ CONSOLE_TAG="${4:-latest}"
26
+ CONSOLE_REPO="${5:-console}"
27
+
28
+ # if it's an RC tag, switch Docker repo
29
+ shopt -s nocasematch
30
+ if [[ "$TAG" =~ rc[0-9]+ ]]; then
31
+ DOCKER_REPO="redpanda-unstable"
32
+ fi
33
+ shopt -u nocasematch
34
+
35
+ if [[ "$TAG" == "latest" ]]; then
36
+ MAJOR_MINOR="latest"
37
+ else
38
+ MAJOR_MINOR="$(echo "$TAG" | sed -E 's/^v?([0-9]+\.[0-9]+).*$/\1/')"
39
+ fi
40
+
41
+ export REDPANDA_VERSION="$TAG"
42
+ export REDPANDA_DOCKER_REPO="$DOCKER_REPO"
43
+ export REDPANDA_CONSOLE_VERSION="$CONSOLE_TAG"
44
+ export REDPANDA_CONSOLE_DOCKER_REPO="$CONSOLE_REPO"
45
+
46
+ # Start up the cluster
47
+ "$SCRIPT_DIR"/start-cluster.sh "$TAG"
48
+
49
+ # Wait for it to settle
50
+ if [[ "$MODE" == "metrics" ]]; then
51
+ echo "Waiting 300 seconds for metrics to be available…"
52
+ sleep 300
53
+ else
54
+ echo "Waiting 30 seconds for cluster to be ready…"
55
+ sleep 30
56
+ fi
57
+
58
+ # Go back to where we were
59
+ cd "$ORIGINAL_PWD"
60
+
61
+ # Ensure Python venv (always create under cli-utils/venv)
62
+ "$SCRIPT_DIR"/python-venv.sh \
63
+ "$SCRIPT_DIR"/venv \
64
+ "$SCRIPT_DIR"/../tools/metrics/requirements.txt
65
+
66
+ if [[ "$MODE" == "metrics" ]]; then
67
+ "$SCRIPT_DIR"/venv/bin/python \
68
+ "$SCRIPT_DIR"/../tools/metrics/metrics.py \
69
+ "$TAG"
70
+ else
71
+ "$SCRIPT_DIR"/venv/bin/python \
72
+ "$SCRIPT_DIR"/../tools/gen-rpk-ascii.py \
73
+ "$TAG"
74
+ fi
75
+
76
+ echo "✅ Redpanda cluster docs generated successfully!"
77
+
78
+ # Tear down the cluster
79
+ cd "$SCRIPT_DIR"/../docker-compose
80
+ docker compose down --volumes
81
+
82
+ # Return to the original directory
83
+ cd "$ORIGINAL_PWD"
@@ -0,0 +1,158 @@
1
+ #!/bin/bash
2
+ set -e
3
+
4
+ # Function to install Node.js
5
+ install_node() {
6
+ if command -v node &>/dev/null; then
7
+ echo "Node.js is already installed. Version: $(node -v)"
8
+ else
9
+ echo "Installing Node.js..."
10
+ curl -fsSL https://fnm.vercel.app/install | bash || { echo "Failed to install fnm"; exit 1; }
11
+ # Load fnm into the current shell
12
+ export PATH=$HOME/.fnm:$PATH
13
+ eval "$(fnm env)" || { echo "Failed to load fnm environment"; exit 1; }
14
+ fnm install --lts || { echo "Failed to install Node.js"; exit 1; }
15
+ fnm use --lts || { echo "Failed to use Node.js"; exit 1; }
16
+ echo "Node.js version: $(node -v)"
17
+ fi
18
+ }
19
+
20
+ # Function to install Rust
21
+ install_rust() {
22
+ if command -v rustc &>/dev/null; then
23
+ echo "Rust is already installed. Version: $(rustc --version)"
24
+ else
25
+ echo "Installing Rust..."
26
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y || { echo "Failed to install Rust"; exit 1; }
27
+ source $HOME/.cargo/env || { echo "Failed to load Rust environment"; exit 1; }
28
+ echo "Rust version: $(rustc --version)"
29
+ fi
30
+ }
31
+
32
+ # Function to check if expect and jq are installed and install them if they're not
33
+ ensure_dependencies_installed() {
34
+
35
+ if ! command -v expect &> /dev/null; then
36
+ echo "Expect is not installed. Trying to install..."
37
+ missing_deps=1
38
+
39
+ # Detect OS
40
+ case "$(uname -s)" in
41
+ Linux)
42
+ echo "Detected Linux."
43
+ sudo apt-get update && sudo apt-get install expect -y || sudo yum install expect -y || { echo "Failed to install expect"; exit 1; }
44
+ ;;
45
+ Darwin)
46
+ echo "Detected macOS."
47
+ # Assumes Homebrew is installed. If not, it attempts to install Homebrew first.
48
+ if ! command -v brew &> /dev/null; then
49
+ echo "Homebrew not found."
50
+ exit 1
51
+ fi
52
+ brew install expect || { echo "Failed to install expect"; exit 1; }
53
+ ;;
54
+ *)
55
+ echo "Unsupported operating system. Please install expect manually."
56
+ exit 1
57
+ ;;
58
+ esac
59
+ fi
60
+
61
+ if ! command -v jq &> /dev/null; then
62
+ echo "jq is not installed. Trying to install..."
63
+
64
+ # Install jq based on OS
65
+ case "$(uname -s)" in
66
+ Linux)
67
+ sudo apt-get install jq -y || sudo yum install jq -y || { echo "Failed to install jq"; exit 1; }
68
+ ;;
69
+ Darwin)
70
+ brew install jq || { echo "Failed to install jq"; exit 1; }
71
+ ;;
72
+ *)
73
+ echo "Unsupported operating system. Please install jq manually."
74
+ exit 1
75
+ ;;
76
+ esac
77
+ fi
78
+
79
+ install_node
80
+ install_rust
81
+ }
82
+
83
+ # Ensure expect and jq are installed
84
+ ensure_dependencies_installed
85
+
86
+ # Function to check rpk installation and display its version
87
+ check_rpk_installed() {
88
+ if command -v rpk &>/dev/null; then
89
+ echo "rpk is already installed. Version information:"
90
+ rpk --version
91
+ return 0
92
+ else
93
+ return 1
94
+ fi
95
+ }
96
+
97
+ # Determine OS and architecture
98
+ OS="$(uname -s)"
99
+ ARCH="$(uname -m)"
100
+
101
+ # Check if rpk is already installed
102
+ if check_rpk_installed; then
103
+ exit 0
104
+ fi
105
+
106
+ # Check if running on macOS and use Homebrew to install rpk
107
+ if [ "${OS}" == "Darwin" ]; then
108
+ echo "Detected macOS. Attempting to install rpk using Homebrew..."
109
+
110
+ # Check if Homebrew is installed
111
+ if ! command -v brew &>/dev/null; then
112
+ echo "Homebrew not found."
113
+ exit 1
114
+ fi
115
+
116
+ # Install rpk
117
+ brew install redpanda-data/tap/redpanda || { echo "Failed to install rpk via Homebrew"; exit 1; }
118
+
119
+ # Verify installation
120
+ echo "rpk has been installed. Version information:"
121
+ rpk --version
122
+ exit 0
123
+ fi
124
+
125
+ # For Linux systems
126
+ if [ "${OS}" == "Linux" ]; then
127
+ FILENAME="rpk-linux-amd64.zip"
128
+ URL_BASE="https://github.com/redpanda-data/redpanda/releases"
129
+
130
+ # Download latest version of rpk
131
+ echo "Downloading ${FILENAME}..."
132
+ curl -Lf --retry 3 -O "${URL_BASE}/latest/download/${FILENAME}" \
133
+ || { echo "Failed to download rpk"; exit 1; }
134
+
135
+ # Ensure the target directory exists
136
+ mkdir -p $HOME/.local/bin || { echo "Failed to create directory"; exit 1; }
137
+
138
+ # Unzip the rpk binary to the target directory
139
+ unzip -o "${FILENAME}" -d $HOME/.local/bin || { echo "Failed to unzip rpk"; exit 1; }
140
+
141
+ # Remove the downloaded archive
142
+ rm "${FILENAME}" || { echo "Failed to remove downloaded archive"; exit 1; }
143
+
144
+ # Add the target directory to PATH for the current session
145
+ export PATH=$HOME/.local/bin:$PATH
146
+
147
+ # Add the target directory to PATH for future sessions
148
+ echo 'export PATH=$HOME/.local/bin:$PATH' >> ~/.bashrc
149
+ source ~/.bashrc
150
+
151
+ # Verify installation
152
+ echo "rpk has been installed. Version information:"
153
+ rpk --version
154
+ exit 0
155
+ fi
156
+
157
+ echo "Unsupported operating system: ${OS}"
158
+ exit 1
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env bash
2
+ #
3
+ # Create a new venv at $1 and install from $2
4
+
5
+ set -euo pipefail
6
+
7
+ VENV_DIR="${1:-venv}"
8
+ REQ_FILE="${2:-requirements.txt}"
9
+
10
+ echo "Recreating Python venv at $VENV_DIR..."
11
+ rm -rf "$VENV_DIR"
12
+ python3 -m venv "$VENV_DIR"
13
+ "$VENV_DIR/bin/pip" install --upgrade pip --quiet
14
+
15
+ if [[ -f "$REQ_FILE" ]]; then
16
+ echo "Installing $REQ_FILE..."
17
+ "$VENV_DIR/bin/pip" install -r "$REQ_FILE" --quiet
18
+ else
19
+ echo "⚠️ Requirements file not found at $REQ_FILE"
20
+ fi
@@ -0,0 +1,53 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ # Usage: start-cluster.sh <tag>
5
+ TAG="${1:-latest}"
6
+
7
+ # Where this script lives (cli-utils)
8
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
9
+
10
+ # One level up is the package root, where we expect docker‑compose/
11
+ PACKAGE_ROOT="$(cd "$SCRIPT_DIR"/.. && pwd)"
12
+ QUICKSTART_DIR="$PACKAGE_ROOT/docker-compose"
13
+
14
+ # Remember where the user called us from
15
+ CALLER_PWD="$(pwd)"
16
+
17
+ # Default quickstart version
18
+ MAJOR_MINOR="latest"
19
+ if [[ "$TAG" != "latest" ]]; then
20
+ MAJOR_MINOR="$(echo "$TAG" | sed -E 's/^v?([0-9]+\.[0-9]+).*$/\1/')"
21
+ fi
22
+
23
+ # Fetch quickstart into package root if needed
24
+ if [[ ! -d "$QUICKSTART_DIR" ]]; then
25
+ echo "📥 Fetching Redpanda quickstart for ${MAJOR_MINOR}…"
26
+ if [[ "$TAG" == "latest" ]]; then
27
+ curl -sSLf --retry 3 https://docs.redpanda.com/redpanda-quickstart.tar.gz \
28
+ | tar -C "$PACKAGE_ROOT" -xzf -
29
+ else
30
+ curl -sSLf --retry 3 "https://docs.redpanda.com/${MAJOR_MINOR}-redpanda-quickstart.tar.gz" \
31
+ | tar -C "$PACKAGE_ROOT" -xzf -
32
+ fi
33
+
34
+ if [[ ! -d "$QUICKSTART_DIR" ]]; then
35
+ echo "❌ Expected '$QUICKSTART_DIR' but none was found after extraction."
36
+ exit 1
37
+ fi
38
+ fi
39
+
40
+ # Switch into the quickstart dir and (re)start the cluster
41
+ cd "$QUICKSTART_DIR"
42
+
43
+ if docker compose ps | grep -q Up; then
44
+ echo "🛑 Stopping existing cluster…"
45
+ docker compose down --volumes
46
+ fi
47
+
48
+ echo "▶️ Starting Redpanda cluster…"
49
+ docker compose up -d
50
+
51
+ # Return to original directory
52
+ cd "$CALLER_PWD"
53
+ echo "✅ Cluster is up (version: ${TAG})"
@@ -0,0 +1,67 @@
1
+ # =================================================================
2
+ # This file defines initial cluster properties for a Redpanda cluster.
3
+ # Some of these settings are intended for quickstart development and evaluation
4
+ # and are not suitable for production environments.
5
+ #
6
+ # For more information on bootstrap files, see:
7
+ # https://docs.redpanda.com/current/deploy/deployment-option/self-hosted/manual/production/production-deployment/#configure-a-bootstrap-file
8
+ # =================================================================
9
+
10
+ #
11
+ # Enable SASL authentication for the Kafka and Admin APIs.
12
+ # https://docs.redpanda.com/current/reference/properties/cluster-properties/#admin_api_require_auth
13
+ admin_api_require_auth: true
14
+ # At least one superuser is required to be able to create other SASL users
15
+ # https://docs.redpanda.com/current/reference/properties/cluster-properties/#superusers
16
+ superusers:
17
+ - superuser
18
+ # https://docs.redpanda.com/current/reference/properties/cluster-properties/#enable_sasl
19
+ enable_sasl: true
20
+ # Allow topics to be created on first access.
21
+ # https://docs.redpanda.com/current/reference/properties/cluster-properties/#auto_create_topics_enabled
22
+ auto_create_topics_enabled: true
23
+ # Enable data transforms.
24
+ # https://docs.redpanda.com/current/develop/data-transforms/how-transforms-work/
25
+ data_transforms_enabled: true
26
+ # Enable audit logging (enterprise feature).
27
+ # https://docs.redpanda.com/current/manage/audit-logging/
28
+ audit_enabled: true
29
+ # Enable Tiered Storage (enterprise feature).
30
+ # https://docs.redpanda.com/current/manage/tiered-storage/
31
+ cloud_storage_enabled: true
32
+ cloud_storage_region: local
33
+ cloud_storage_access_key: minio
34
+ cloud_storage_secret_key: redpandaTieredStorage7
35
+ cloud_storage_api_endpoint: minio
36
+ cloud_storage_api_endpoint_port: 9000
37
+ cloud_storage_disable_tls: true
38
+ cloud_storage_bucket: redpanda
39
+ cloud_storage_enable_remote_write: true
40
+ cloud_storage_enable_remote_read: true
41
+ # Forces segments to be uploaded to Tiered Storage faster for the purposes of the quickstart
42
+ # https://docs.redpanda.com/current/reference/properties/object-storage-properties/#cloud_storage_segment_max_upload_interval_sec
43
+ cloud_storage_segment_max_upload_interval_sec: 60
44
+ # Continuous Data Balancing (enterprise feature) continuously monitors your node and rack availability and disk usage. This enables self-healing clusters that dynamically balance partitions, ensuring smooth operations and optimal cluster performance.
45
+ # https://docs.redpanda.com/current/manage/cluster-maintenance/continuous-data-balancing/
46
+ partition_autobalancing_mode: continuous
47
+ # Enable Redpanda to collect consumer group metrics.
48
+ # https://docs.redpanda.com/current/reference/properties/cluster-properties/#enable_consumer_group_metrics
49
+ enable_consumer_group_metrics:
50
+ - "group"
51
+ - "partition"
52
+ - "consumer_lag"
53
+ # Lower the interval for the autogeneration of consumer group metrics.
54
+ # https://docs.redpanda.com/current/reference/properties/cluster-properties/#consumer_group_lag_collection_interval_sec
55
+ consumer_group_lag_collection_interval_sec: 60
56
+ # Enable Redpanda to collect host metrics.
57
+ # https://docs.redpanda.com/current/reference/properties/cluster-properties/#enable_host_metrics
58
+ enable_host_metrics: true
59
+ # Enable for Iceberg metrics
60
+ iceberg_enabled: true
61
+ # Set up Iceberg REST catalog configuration
62
+ iceberg_catalog_type: rest
63
+ iceberg_rest_catalog_endpoint: http://catalog:8181
64
+ # Credentials are required, but the catalog ignores them
65
+ iceberg_rest_catalog_client_id: catalog
66
+ iceberg_rest_catalog_client_secret: catalog123
67
+ iceberg_catalog_commit_interval_ms: 5000