@redpanda-data/docs-extensions-and-macros 4.3.0 → 4.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/bin/doc-tools.js +376 -0
  2. package/cli-utils/add-caret-external-links.py +68 -0
  3. package/cli-utils/beta-from-antora.js +27 -0
  4. package/cli-utils/generate-cluster-docs.sh +83 -0
  5. package/cli-utils/install-test-dependencies.sh +158 -0
  6. package/cli-utils/python-venv.sh +20 -0
  7. package/cli-utils/start-cluster.sh +53 -0
  8. package/docker-compose/bootstrap.yml +67 -0
  9. package/docker-compose/docker-compose.yml +414 -0
  10. package/docker-compose/generate-profiles.yaml +77 -0
  11. package/docker-compose/rpk-profile.yaml +24 -0
  12. package/docker-compose/transactions-schema.json +37 -0
  13. package/docker-compose/transactions.md +46 -0
  14. package/docker-compose/transform/README.adoc +73 -0
  15. package/docker-compose/transform/go.mod +5 -0
  16. package/docker-compose/transform/go.sum +2 -0
  17. package/docker-compose/transform/regex.wasm +0 -0
  18. package/docker-compose/transform/transform.go +122 -0
  19. package/docker-compose/transform/transform.yaml +33 -0
  20. package/extensions/replace-attributes-in-attachments.js +1 -1
  21. package/extensions/util/compute-out.js +38 -0
  22. package/extensions/util/create-asciidoc-file.js +15 -0
  23. package/macros/data-template.js +2 -2
  24. package/package.json +15 -3
  25. package/tools/docusaurus-to-antora-conversion-scripts/convert-docs.sh +114 -0
  26. package/tools/docusaurus-to-antora-conversion-scripts/get-file-changes.sh +9 -0
  27. package/tools/docusaurus-to-antora-conversion-scripts/post-process-asciidoc.js +63 -0
  28. package/tools/docusaurus-to-antora-conversion-scripts/pre-process-markdown.js +108 -0
  29. package/tools/fetch-from-github.js +63 -0
  30. package/tools/gen-rpk-ascii.py +477 -0
  31. package/tools/get-console-version.js +53 -0
  32. package/tools/get-redpanda-version.js +53 -0
  33. package/tools/metrics/metrics.py +199 -0
  34. package/tools/metrics/requirements.txt +1 -0
  35. package/tools/property-extractor/Makefile +99 -0
  36. package/tools/property-extractor/README.adoc +206 -0
  37. package/tools/property-extractor/definitions.json +245 -0
  38. package/tools/property-extractor/file_pair.py +7 -0
  39. package/tools/property-extractor/json-to-asciidoc/generate_docs.py +460 -0
  40. package/tools/property-extractor/parser.py +224 -0
  41. package/tools/property-extractor/property_bag.py +4 -0
  42. package/tools/property-extractor/property_extractor.py +243 -0
  43. package/tools/property-extractor/requirements.txt +2 -0
  44. package/tools/property-extractor/tests/transformers_test.py +376 -0
  45. package/tools/property-extractor/transformers.py +397 -0
@@ -0,0 +1,122 @@
1
+ package main
2
+ // This data transform filters records based on a customizable regex pattern.
3
+ // If a record's key or value
4
+ // (determined by an environment variable) matches the specified regex,
5
+ // the record is forwarded to the output.
6
+ // Otherwise, it is dropped.
7
+ //
8
+ // Usage:
9
+ // 1. Provide the following environment variables in your Docker or configuration setup:
10
+ // - PATTERN : (required) a regular expression that determines what you want to match.
11
+ // - MATCH_VALUE : (optional) a boolean to decide whether to check the record value. If false,
12
+ // the record key is checked. Default is false.
13
+ //
14
+ // Example environment variables:
15
+ // PATTERN=".*\\.edu$"
16
+ // MATCH_VALUE="true"
17
+ //
18
+ // Logs:
19
+ // This transform logs information about each record and whether it matched.
20
+ // The logs appear in the _redpanda.transform_logs topic, so you can debug how your records are being processed.
21
+ //
22
+ // Build instructions:
23
+ // go mod tidy
24
+ // rpk transform build
25
+ //
26
+ // For more details on building transforms with the Redpanda SDK, see:
27
+ // https://docs.redpanda.com/current/develop/data-transforms
28
+ //
29
+
30
+ import (
31
+ "log"
32
+ "os"
33
+ "regexp"
34
+ "strings"
35
+
36
+ "github.com/redpanda-data/redpanda/src/transform-sdk/go/transform"
37
+ )
38
+
39
+ var (
40
+ re *regexp.Regexp
41
+ checkValue bool
42
+ )
43
+
44
+ func isTrueVar(v string) bool {
45
+ switch strings.ToLower(v) {
46
+ case "yes", "ok", "1", "true":
47
+ return true
48
+ default:
49
+ return false
50
+ }
51
+ }
52
+
53
+ // The main() function runs only once at startup. It performs all initialization steps:
54
+ // - Reads and compiles the regex pattern.
55
+ // - Determines whether to match on the key or value.
56
+ // - Registers the doRegexFilter() function to process records.
57
+ func main() {
58
+ // Set logging preferences, including timestamp and UTC time.
59
+ log.SetPrefix("[regex-transform] ")
60
+ log.SetFlags(log.Ldate | log.Ltime | log.LUTC | log.Lmicroseconds)
61
+
62
+ // Start logging the transformation process
63
+ log.Println("Starting transform...")
64
+
65
+ // Read the PATTERN environment variable to get the regex pattern.
66
+ pattern, ok := os.LookupEnv("PATTERN")
67
+ if !ok {
68
+ log.Fatal("Missing PATTERN environment variable")
69
+ }
70
+ // Log the regex pattern being used.
71
+ log.Printf("Using PATTERN: %q\n", pattern)
72
+ // Compile the regex pattern for later use.
73
+ re = regexp.MustCompile(pattern)
74
+
75
+ // Read the MATCH_VALUE environment variable to determine whether to check the record's value.
76
+ mk, ok := os.LookupEnv("MATCH_VALUE")
77
+ checkValue = ok && isTrueVar(mk)
78
+ log.Printf("MATCH_VALUE set to: %t\n", checkValue)
79
+
80
+ log.Println("Initialization complete, waiting for records...")
81
+
82
+ // Listen for records to be written, calling doRegexFilter() for each record.
83
+ transform.OnRecordWritten(doRegexFilter)
84
+ }
85
+
86
+ // The doRegexFilter() function executes each time a new record is written.
87
+ // It checks whether the record's key or value (based on MATCH_VALUE) matches the compiled regex.
88
+ // If it matches, the record is forwarded, if not, it's dropped.
89
+ func doRegexFilter(e transform.WriteEvent, w transform.RecordWriter) error {
90
+ // This stores the data to be checked (either the key or value).
91
+ var dataToCheck []byte
92
+
93
+ // Depending on the MATCH_VALUE environment variable, decide whether to check the record's key or value.
94
+ if checkValue {
95
+ // Use the value of the record if MATCH_VALUE is true.
96
+ dataToCheck = e.Record().Value
97
+ log.Printf("Checking record value: %s\n", string(dataToCheck))
98
+ } else {
99
+ // Use the key of the record if MATCH_VALUE is false.
100
+ dataToCheck = e.Record().Key
101
+ log.Printf("Checking record key: %s\n", string(dataToCheck))
102
+ }
103
+
104
+ // If there is no key or value to check, log and skip the record.
105
+ if dataToCheck == nil {
106
+ log.Println("Record has no key/value to check, skipping.")
107
+ return nil
108
+ }
109
+
110
+ // Check if the data matches the regex pattern.
111
+ pass := re.Match(dataToCheck)
112
+ if pass {
113
+ // If the record matches the pattern, log and write the record to the output topic.
114
+ log.Printf("Record matched pattern, passing through. Key: %s, Value: %s\n", string(e.Record().Key), string(e.Record().Value))
115
+ return w.Write(e.Record())
116
+ } else {
117
+ // If the record does not match the pattern, log and drop the record.
118
+ log.Printf("Record did not match pattern, dropping. Key: %s, Value: %s\n", string(e.Record().Key), string(e.Record().Value))
119
+ // Do not write the record if it doesn't match the pattern.
120
+ return nil
121
+ }
122
+ }
@@ -0,0 +1,33 @@
1
+ # Transform metadata used by the rpk transform build command.
2
+ # This metadata file tells rpk:
3
+ # 1) The transform’s display name, which also becomes the base for the .wasm file name.
4
+ # 2) A brief description of what it does.
5
+ # 3) Defaults for environment variables.
6
+ # 4) Input and output topics (if you want to define them here rather than in the deploy command).
7
+
8
+ # Human-readable name of the transform. rpk transform build uses this for the generated .wasm file.
9
+ name: regex
10
+
11
+ description: |
12
+ Filters the input topic to records that only match a regular expression.
13
+
14
+ Regular expressions are implemented using Go's regexp library, which uses the syntax of RE2.
15
+ See the RE2 wiki for allowed syntax: https://github.com/google/re2/wiki/Syntax
16
+
17
+ Environment variables:
18
+ - PATTERN: The regular expression that will match against records (required).
19
+ - MATCH_VALUE: By default, the regex matches keys, but if set to "true", the regex matches values.
20
+
21
+ # By default, no input topic is set here. (You can set it in your deploy command if preferred.)
22
+ input-topic: ""
23
+
24
+ # By default, no output topic is set here. (You can set it in your deploy command if preferred.)
25
+ output-topic: ""
26
+
27
+ # Indicates the specific TinyGo environment used to compile your transform.
28
+ language: tinygo-no-goroutines
29
+
30
+ env:
31
+ # The PATTERN variable must be provided at deploy time.
32
+ # Example: --var=PATTERN=".*@example.com"
33
+ PATTERN: '<required>'
@@ -182,7 +182,7 @@ function getDynamicReplacements(componentVersion, logger) {
182
182
  const versionNum = formatVersion(componentVersion.version || '', semver);
183
183
  const is24_3plus =
184
184
  versionNum && semver.gte(versionNum, '24.3.0') && componentVersion.title === 'Self-Managed';
185
- const useTagAttributes = isPrerelease || is24_3plus;
185
+ const useTagAttributes = isPrerelease || is24_3plus || componentVersion.title === 'Labs';
186
186
 
187
187
  // Derive Redpanda / Console versions
188
188
  const redpandaVersion = isPrerelease
@@ -0,0 +1,38 @@
1
+ 'use strict'
2
+
3
+ const { posix: path } = require('node:path')
4
+
5
+ function computeOut (src) {
6
+ const { component, version, module: module_, family, relative } = src
7
+ const outRelative = family === 'page' ? relative.replace(/\.adoc$/, '.html') : relative
8
+ const { dir: dirname, base: basename, ext: extname, name: stem } = path.parse(outRelative)
9
+ const componentVersion = this.getComponentVersion(component, version)
10
+ const versionSegment = componentVersion
11
+ const outDirSegments = []
12
+ const moduleRootPathSegments = []
13
+ if (component !== 'ROOT') outDirSegments.push(component)
14
+ if (versionSegment) outDirSegments.push(versionSegment)
15
+ if (module_ !== 'ROOT') outDirSegments.push(module_)
16
+ const outModuleDirSegments = outDirSegments.slice()
17
+ if (family !== 'page') {
18
+ outDirSegments.push(`_${family}s`)
19
+ moduleRootPathSegments.push('..')
20
+ }
21
+ if (dirname) {
22
+ outDirSegments.push(dirname)
23
+ for (const _ of dirname.split('/')) moduleRootPathSegments.push('..')
24
+ }
25
+ const rootPathSegments = moduleRootPathSegments.slice()
26
+ for (const _ of outModuleDirSegments) rootPathSegments.push('..')
27
+ const outDirname = outDirSegments.join('/')
28
+ const result = {
29
+ dirname: outDirname,
30
+ basename,
31
+ path: outDirname + '/' + basename,
32
+ moduleRootPath: moduleRootPathSegments.length ? moduleRootPathSegments.join('/') : '.',
33
+ rootPath: rootPathSegments.length ? rootPathSegments.join('/') : '.',
34
+ }
35
+ return result
36
+ }
37
+
38
+ module.exports = computeOut
@@ -0,0 +1,15 @@
1
+ 'use strict'
2
+
3
+ const computeOut = require('./compute-out')
4
+ const { posix: path } = require('node:path')
5
+
6
+ function createAsciiDocFile (contentCatalog, file) {
7
+ file.mediaType = 'text/asciidoc'
8
+ const src = file.src
9
+ const out = computeOut.call(contentCatalog, src)
10
+ const pub = { url: '/' + out.path, moduleRootPath: out.moduleRootPath, rootPath: out.rootPath }
11
+ contentCatalog.removeFile((file = contentCatalog.addFile(Object.assign(file, { path: out.path, out: null, pub: pub }))))
12
+ return file
13
+ }
14
+
15
+ module.exports = createAsciiDocFile
@@ -22,8 +22,8 @@ const jsonpath = require('jsonpath-plus');
22
22
  const yaml = require('yaml');
23
23
  // For synchronous HTTP fetching.
24
24
  const request = require('sync-request');
25
- const computeOut = require('../util/compute-out.js');
26
- const createAsciiDocFile = require('../util/create-asciidoc-file.js');
25
+ const computeOut = require('../extensions/util/compute-out.js');
26
+ const createAsciiDocFile = require('../extensions/util/create-asciidoc-file.js');
27
27
 
28
28
  // In-memory cache for external resources (avoid repeated network calls)
29
29
  const externalCache = new Map();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@redpanda-data/docs-extensions-and-macros",
3
- "version": "4.3.0",
3
+ "version": "4.4.1",
4
4
  "description": "Antora extensions and macros developed for Redpanda documentation.",
5
5
  "keywords": [
6
6
  "antora",
@@ -12,7 +12,13 @@
12
12
  "author": {
13
13
  "name": "Redpanda Docs Team"
14
14
  },
15
+ "bin": {
16
+ "doc-tools": "./bin/doc-tools.js"
17
+ },
15
18
  "scripts": {
19
+ "install-test-dependencies": "doc-tools install-test-dependencies",
20
+ "get-redpanda-version": "doc-tools get-redpanda-version",
21
+ "get-console-version": "doc-tools get-console-version",
16
22
  "build": "antora --to-dir docs --fetch local-antora-playbook.yml",
17
23
  "serve": "wds --node-resolve --open preview/test/ --watch --root-dir docs"
18
24
  },
@@ -55,8 +61,13 @@
55
61
  },
56
62
  "files": [
57
63
  "extensions",
64
+ "extension-utils",
58
65
  "asciidoc-extensions",
59
- "macros"
66
+ "macros",
67
+ "bin",
68
+ "cli-utils",
69
+ "tools",
70
+ "docker-compose"
60
71
  ],
61
72
  "license": "ISC",
62
73
  "repository": {
@@ -84,7 +95,8 @@
84
95
  "semver": "^7.6.0",
85
96
  "sync-request": "^6.1.0",
86
97
  "tar": "^7.4.3",
87
- "yaml": "^2.7.0"
98
+ "tree-sitter": "^0.22.4",
99
+ "yaml": "^2.7.1"
88
100
  },
89
101
  "devDependencies": {
90
102
  "@antora/cli": "3.1.4",
@@ -0,0 +1,114 @@
1
+ #!/bin/bash
2
+
3
+ if ! command -v pandoc &> /dev/null; then
4
+ echo "Error: Pandoc is not installed."
5
+ echo "Please visit https://pandoc.org/installing.html to install Pandoc."
6
+ exit 1
7
+ fi
8
+
9
+ if ! command -v kramdoc &> /dev/null; then
10
+ echo "Error: Kramdoc is not installed."
11
+ echo "Please install kramdoc using: gem install kramdown-asciidoc"
12
+ exit 1
13
+ fi
14
+
15
+ SOURCE_DIRECTORY="$1"
16
+
17
+ if [ -z "$SOURCE_DIRECTORY" ]; then
18
+ echo "Error: Source directory not provided."
19
+ echo "Usage: ./your_script.sh /path/to/your/source_directory"
20
+ exit 1
21
+ fi
22
+
23
+ OUTPUT_DIRECTORY="$(cd "$(dirname "$0")/../modules" && pwd)"
24
+
25
+ # Create the output and partials directories if they don't exist
26
+ mkdir -p "$OUTPUT_DIRECTORY"
27
+
28
+ function remove_leading_tabs() {
29
+ local mdx_file="$1"
30
+ local content="$(cat "$mdx_file")"
31
+
32
+ # Remove leading tabs in the <Tabs> elements
33
+ local updated_content="$(echo "$content" | perl -0777 -pe 's/(\s*)<TabItem([\s\S]*?)>([\s\S]*?)<\/TabItem>/sprintf("%s<TabItem%s>%s<\/TabItem>", $1, $2, $3 =~ s!^\t!!rmsg)/ge')"
34
+
35
+ # Write the updated content back to the file
36
+ echo "$updated_content" > "$mdx_file"
37
+ }
38
+
39
+ function preprocess_markdown() {
40
+ local markdown_file="$1"
41
+ node "$(dirname "$0")/pre-process-markdown.js" "$markdown_file"
42
+ }
43
+
44
+ # Convert a Markdown file to AsciiDoc and add the description
45
+ function convert_markdown_to_asciidoc() {
46
+ local markdown_file="$1"
47
+ local output_file="$2"
48
+ # Remove leading tabs from <Tab> elements
49
+ remove_leading_tabs "$markdown_file"
50
+
51
+ # Preprocess the markdown file
52
+ preprocess_markdown "$markdown_file"
53
+
54
+ local content="$(cat "$markdown_file")"
55
+
56
+ local output_file_dir="$(dirname "$output_file")"
57
+ mkdir -p "$output_file_dir"
58
+
59
+ # Extract the content of the meta description tag
60
+ local description="$(echo "$content" | sed -n 's/.*<meta name="description" content="\([^"]*\)".*/\1/p')"
61
+
62
+ # Remove the head element from the source Markdown file and save it
63
+ local cleaned_content="$(echo "$content" | sed '/<head>/,/<\/head>/d')"
64
+ # Remove the head element from the source Markdown file and save it
65
+ local cleaned_content
66
+ cleaned_content=$(echo "$content" | sed '/<head>/,/<\/head>/d')
67
+ local cleaned_file
68
+ cleaned_file=$(mktemp)
69
+ echo "$cleaned_content" > "$cleaned_file"
70
+
71
+ # Convert the cleaned Markdown file to AsciiDoc using Kramdoc
72
+ local asciidoc_content
73
+ asciidoc_content=$(kramdoc -o - "$cleaned_file")
74
+
75
+ # Clean up temporary file
76
+ rm -f "$cleaned_file"
77
+
78
+ # Insert the description attribute on the second line of the AsciiDoc content
79
+ asciidoc_content="$(echo "$asciidoc_content" | awk -v desc="$description" 'NR==1{print; print ":description: " desc ""; next} 1')"
80
+
81
+ # Write the updated AsciiDoc content to the output file
82
+ echo "$asciidoc_content" > "$output_file"
83
+
84
+ echo "Converted: $markdown_file -> $output_file"
85
+ }
86
+
87
+ # Convert all Markdown files in the source directory
88
+ # Initialize counters
89
+ success_count=0
90
+ failure_count=0
91
+
92
+ while IFS= read -r -d '' markdown_file; do
93
+ output_file="$(echo "$markdown_file" \
94
+ | sed "s|$SOURCE_DIRECTORY|$OUTPUT_DIRECTORY|" \
95
+ | sed 's|\.mdx$|.adoc|' \
96
+ | sed 's|\(.*\)/\(.*\)|\1/pages/\2|')"
97
+
98
+ if convert_markdown_to_asciidoc "$markdown_file" "$output_file"; then
99
+ # Run the Node.js script to process the output file
100
+ if node "$(dirname "$0")/post-process-asciidoc.js" "$output_file"; then
101
+ success_count=$((success_count + 1))
102
+ else
103
+ echo "Error: Failed to post-process ${output_file}"
104
+ failure_count=$((failure_count + 1))
105
+ fi
106
+ else
107
+ echo "Error: Failed to convert ${markdown_file}"
108
+ failure_count=$((failure_count + 1))
109
+ fi
110
+ done < <(find "$SOURCE_DIRECTORY" -name "*.mdx" -print0)
111
+
112
+ echo "Conversion complete. Success: ${success_count}, Failures: ${failure_count}"
113
+
114
+ echo "All Markdown files converted to AsciiDoc."
@@ -0,0 +1,9 @@
1
+ #!/bin/bash
2
+ set -euo pipefail
3
+ echo "Please enter the name of the first branch:"
4
+ read branch1
5
+ echo "Please enter the name of the second branch:"
6
+ read branch2
7
+
8
+ git fetch
9
+ git diff --summary $branch1..$branch2 -- ./modules/
@@ -0,0 +1,63 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ function processFile(file) {
5
+ let content;
6
+ try {
7
+ content = fs.readFileSync(file, 'utf-8');
8
+ } catch (err) {
9
+ console.error(`Error reading file ${file}: ${err.message}`);
10
+ return;
11
+ }
12
+
13
+ const newContent = content.replace(
14
+ /link:(\.\.\/)+([\w/.-]+)(#?[\w/.-]*)(\[.+?\])/g,
15
+ (match, dots, linkPath, anchor, linkText) => {
16
+ const depth = dots.match(/\.\.\//g).length;
17
+ const pathParts = linkPath.split('/');
18
+ // Ensure we don't go beyond the available path parts
19
+ const startIndex = Math.max(0, pathParts.length - depth);
20
+ const newPath = pathParts.slice(0, startIndex).join(':');
21
+ return `xref:${newPath}:${pathParts[pathParts.length - 1]}.adoc${anchor || ''}${linkText}`;
22
+ }
23
+ );
24
+
25
+ try {
26
+ fs.writeFileSync(file, newContent, 'utf-8');
27
+ } catch (err) {
28
+ console.error(`Error writing file ${file}: ${err.message}`);
29
+ }
30
+ }
31
+
32
+ function processDirectory(directory) {
33
+ const files = fs.readdirSync(directory);
34
+
35
+ files.forEach((file) => {
36
+ const filePath = path.join(directory, file);
37
+ const stat = fs.statSync(filePath);
38
+
39
+ if (stat.isFile() && path.extname(file) === '.adoc') {
40
+ processFile(filePath);
41
+ } else if (stat.isDirectory()) {
42
+ processDirectory(filePath);
43
+ }
44
+ });
45
+ }
46
+
47
+ const inputPath = process.argv[2];
48
+
49
+ if (!inputPath) {
50
+ console.error('No input path provided');
51
+ process.exit(1);
52
+ }
53
+
54
+ const stat = fs.statSync(inputPath);
55
+
56
+ if (stat.isFile()) {
57
+ processFile(inputPath);
58
+ } else if (stat.isDirectory()) {
59
+ processDirectory(inputPath);
60
+ } else {
61
+ console.error('Input path is neither a file nor a directory');
62
+ process.exit(1);
63
+ }
@@ -0,0 +1,108 @@
1
+ const fs = require('fs');
2
+ const { execSync } = require('child_process');
3
+ const pandoc = require('node-pandoc');
4
+ // Fail fast if required CLIs are missing
5
+ ['pandoc', 'kramdoc'].forEach(cmd => {
6
+ try {
7
+ execSync(`command -v ${cmd}`, { stdio: 'ignore' });
8
+ } catch {
9
+ console.error(`Required dependency "${cmd}" not found in PATH`);
10
+ process.exit(1);
11
+ }
12
+ });
13
+ const os = require('os');
14
+ const path = require('path');
15
+
16
+ function convertHtmlTableToAsciiDoc(htmlTable) {
17
+ return new Promise((resolve, reject) => {
18
+ pandoc(htmlTable, '-f html -t asciidoc', (err, result) => {
19
+ if (err) {
20
+ console.error(`Error converting HTML table to AsciiDoc: ${err.message}`);
21
+ resolve(htmlTable);
22
+ } else {
23
+ resolve(result);
24
+ }
25
+ });
26
+ });
27
+ }
28
+
29
+ function markdownToAsciidoc(markdown) {
30
+ const tempMarkdownPath = path.join(os.tmpdir(), 'temp_markdown.md');
31
+ fs.writeFileSync(tempMarkdownPath, markdown, 'utf-8');
32
+
33
+ let result;
34
+ try {
35
+ const command = `kramdoc -o - "${tempMarkdownPath}"`;
36
+ result = execSync(command, { encoding: 'utf-8' });
37
+ } catch (err) {
38
+ console.error(`Error converting Markdown to AsciiDoc: ${err.message}`);
39
+ result = markdown;
40
+ } finally {
41
+ fs.unlinkSync(tempMarkdownPath);
42
+ }
43
+ return result;
44
+ }
45
+
46
+ function processTabs(match) {
47
+ const tabItems = [...match.matchAll(/\s?<TabItem[^>]*value="([^"]+)"[^>]*label="([^"]+)"[^>]*>([\s\S]*?)<\/TabItem>/g)];
48
+
49
+ let result = ['\n<!--\n[tabs]'];
50
+ result.push('=====');
51
+ for (const tabItem of tabItems) {
52
+ const [_, value, label, content] = tabItem;
53
+ result.push(`${label}::`);
54
+ result.push('+');
55
+ result.push('--');
56
+ const asciidocContent = markdownToAsciidoc(content.trim(), '');
57
+ result.push(asciidocContent);
58
+ result.push('--');
59
+ }
60
+
61
+ result.push('=====');
62
+ result.push('-->');
63
+ return result.join('\n');
64
+ }
65
+
66
+ function processDetails(match) {
67
+ const detailsRegex = /<details>(?:\r?\n)<summary>([\s\S]*?)<\/summary>(?:\r?\n)([\s\S]*?)(?:\r?\n)<\/details>/g;
68
+
69
+ return match.replace(detailsRegex, (match, title, content) => {
70
+ const asciidocTitle = `.${title.trim()}`;
71
+ const asciidocBlock = `[%collapsible%]\n====\n${content.trim()}\n====`;
72
+
73
+ return `<!--\n${asciidocTitle}\n${asciidocBlock}\n-->`;
74
+ });
75
+ }
76
+
77
+ async function convertFile(file) {
78
+ const content = fs.readFileSync(file, 'utf-8');
79
+
80
+ var newContent = content.replace(/<Tabs>([\s\S]*?)<\/Tabs>/g, processTabs);
81
+ newContent = newContent.replace(/<details>([\s\S]*?)<\/details>/g, processDetails);
82
+
83
+ const htmlTableMatches = newContent.match(/\s?(<table>((.|\n)*?)<\/table>)/g);
84
+ if (htmlTableMatches) {
85
+ for (const htmlTableMatch of htmlTableMatches) {
86
+ const tableRegex = /(<table>((.|\n)*?)<\/table>)/;
87
+ const tableMatch = htmlTableMatch.match(tableRegex);
88
+ if (tableMatch) {
89
+ const htmlTable = tableMatch[0];
90
+ const asciidocTable = await convertHtmlTableToAsciiDoc(htmlTable);
91
+ newContent = newContent.replace(htmlTableMatch, `\n<!--\n${asciidocTable}\n-->`);
92
+ }
93
+ }
94
+ }
95
+
96
+ fs.writeFileSync(file, newContent, 'utf-8');
97
+ }
98
+
99
+ const inputFile = process.argv[2];
100
+ if (!inputFile) {
101
+ console.error('No input file provided');
102
+ process.exit(1);
103
+ }
104
+
105
+ convertFile(inputFile).catch((error) => {
106
+ console.error(`Error processing file: ${error.message}`);
107
+ process.exit(1);
108
+ });
@@ -0,0 +1,63 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ let octokitInstance = null;
5
+ async function loadOctokit() {
6
+ if (!octokitInstance) {
7
+ const { Octokit } = await import('@octokit/rest');
8
+ octokitInstance = process.env.VBOT_GITHUB_API_TOKEN
9
+ ? new Octokit({
10
+ auth: process.env.VBOT_GITHUB_API_TOKEN,
11
+ })
12
+ : new Octokit();
13
+
14
+ if (!process.env.VBOT_GITHUB_API_TOKEN) {
15
+ console.warn(
16
+ 'Warning: No GitHub token found (VBOT_GITHUB_API_TOKEN). API rate limits will be restricted.'
17
+ );
18
+ }
19
+ }
20
+ return octokitInstance;
21
+ }
22
+
23
+ async function saveFile(content, saveDir, filename) {
24
+ await fs.promises.mkdir(saveDir, { recursive: true });
25
+ const target = path.join(saveDir, filename);
26
+ await fs.promises.writeFile(target, content);
27
+ console.log(`Saved: ${target}`);
28
+ }
29
+
30
+ async function fetchFromGithub(owner, repo, remotePath, saveDir, customFilename) {
31
+ const octokit = await loadOctokit();
32
+
33
+ try {
34
+ const resp = await octokit.repos.getContent({ owner, repo, path: remotePath });
35
+ if (Array.isArray(resp.data)) {
36
+ // directory
37
+ for (const item of resp.data) {
38
+ if (item.type === 'file') {
39
+ await fetchFromGithub(owner, repo, item.path, saveDir, customFilename);
40
+ } else if (item.type === 'dir') {
41
+ // For directories, maintain the directory structure
42
+ const nestedDir = path.join(saveDir, path.basename(item.path));
43
+ await fetchFromGithub(owner, repo, item.path, nestedDir);
44
+ }
45
+ }
46
+ } else {
47
+ // single file
48
+ const content = Buffer.from(resp.data.content, 'base64').toString();
49
+ const filename = customFilename || path.basename(resp.data.path);
50
+ await saveFile(content, saveDir, filename);
51
+ }
52
+ } catch (error) {
53
+ if (error.status === 403 && error.message.includes('rate limit')) {
54
+ throw new Error(`GitHub API rate limit exceeded. Consider using a token via VBOT_GITHUB_API_TOKEN environment variable.`);
55
+ } else if (error.status === 404) {
56
+ throw new Error(`Path not found: ${remotePath} in ${owner}/${repo}`);
57
+ } else {
58
+ throw new Error(`Failed to fetch from GitHub: ${error.message}`);
59
+ }
60
+ }
61
+ }
62
+
63
+ module.exports = fetchFromGithub