@redpanda-data/docs-extensions-and-macros 4.12.5 → 4.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/README.adoc +33 -1064
  2. package/bin/doc-tools-mcp.js +720 -0
  3. package/bin/doc-tools.js +1050 -50
  4. package/bin/mcp-tools/antora.js +153 -0
  5. package/bin/mcp-tools/cache.js +89 -0
  6. package/bin/mcp-tools/cloud-regions.js +127 -0
  7. package/bin/mcp-tools/content-review.js +196 -0
  8. package/bin/mcp-tools/crd-docs.js +153 -0
  9. package/bin/mcp-tools/frontmatter.js +138 -0
  10. package/bin/mcp-tools/generated-docs-review.js +887 -0
  11. package/bin/mcp-tools/helm-docs.js +152 -0
  12. package/bin/mcp-tools/index.js +245 -0
  13. package/bin/mcp-tools/job-queue.js +468 -0
  14. package/bin/mcp-tools/mcp-validation.js +266 -0
  15. package/bin/mcp-tools/metrics-docs.js +146 -0
  16. package/bin/mcp-tools/openapi.js +174 -0
  17. package/bin/mcp-tools/prompt-discovery.js +283 -0
  18. package/bin/mcp-tools/property-docs.js +157 -0
  19. package/bin/mcp-tools/rpcn-docs.js +113 -0
  20. package/bin/mcp-tools/rpk-docs.js +141 -0
  21. package/bin/mcp-tools/telemetry.js +211 -0
  22. package/bin/mcp-tools/utils.js +131 -0
  23. package/bin/mcp-tools/versions.js +168 -0
  24. package/cli-utils/convert-doc-links.js +1 -1
  25. package/cli-utils/github-token.js +58 -0
  26. package/cli-utils/self-managed-docs-branch.js +2 -2
  27. package/cli-utils/setup-mcp.js +313 -0
  28. package/docker-compose/25.1/transactions.md +1 -1
  29. package/docker-compose/transactions.md +1 -1
  30. package/extensions/DEVELOPMENT.adoc +464 -0
  31. package/extensions/README.adoc +124 -0
  32. package/extensions/REFERENCE.adoc +768 -0
  33. package/extensions/USER_GUIDE.adoc +339 -0
  34. package/extensions/generate-rp-connect-info.js +3 -4
  35. package/extensions/version-fetcher/get-latest-console-version.js +38 -27
  36. package/extensions/version-fetcher/get-latest-redpanda-helm-version-from-operator.js +1 -1
  37. package/extensions/version-fetcher/get-latest-redpanda-version.js +65 -54
  38. package/extensions/version-fetcher/retry-util.js +88 -0
  39. package/extensions/version-fetcher/set-latest-version.js +6 -3
  40. package/macros/DEVELOPMENT.adoc +377 -0
  41. package/macros/README.adoc +105 -0
  42. package/macros/REFERENCE.adoc +222 -0
  43. package/macros/USER_GUIDE.adoc +220 -0
  44. package/macros/rp-connect-components.js +6 -6
  45. package/package.json +12 -3
  46. package/tools/bundle-openapi.js +20 -10
  47. package/tools/cloud-regions/generate-cloud-regions.js +1 -1
  48. package/tools/fetch-from-github.js +18 -4
  49. package/tools/gen-rpk-ascii.py +3 -1
  50. package/tools/generate-cli-docs.js +325 -0
  51. package/tools/get-console-version.js +4 -2
  52. package/tools/get-redpanda-version.js +4 -2
  53. package/tools/metrics/metrics.py +19 -7
  54. package/tools/property-extractor/Makefile +7 -1
  55. package/tools/property-extractor/cloud_config.py +4 -4
  56. package/tools/property-extractor/constant_resolver.py +11 -11
  57. package/tools/property-extractor/property_extractor.py +18 -16
  58. package/tools/property-extractor/topic_property_extractor.py +2 -2
  59. package/tools/property-extractor/transformers.py +7 -7
  60. package/tools/property-extractor/type_definition_extractor.py +4 -4
  61. package/tools/redpanda-connect/README.adoc +1 -1
  62. package/tools/redpanda-connect/generate-rpcn-connector-docs.js +5 -3
@@ -0,0 +1,325 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Generate CLI Reference Documentation
5
+ *
6
+ * This script automatically generates AsciiDoc documentation for the doc-tools CLI
7
+ * by executing commands with --help and parsing the output, then enhancing it with
8
+ * JSDoc comments from the source code.
9
+ */
10
+
11
+ const { execSync } = require('child_process');
12
+ const fs = require('fs');
13
+ const path = require('path');
14
+
15
+ /**
16
+ * Execute a CLI command and capture help output
17
+ */
18
+ function getHelpOutput(command) {
19
+ try {
20
+ return execSync(`npx doc-tools ${command} --help`, {
21
+ encoding: 'utf8',
22
+ stdio: ['pipe', 'pipe', 'pipe']
23
+ });
24
+ } catch (error) {
25
+ // Commander exits with code 0 for help, but some shells treat it as error
26
+ return error.stdout || '';
27
+ }
28
+ }
29
+
30
+ /**
31
+ * Sanitize path values in option descriptions to remove user-specific absolute paths
32
+ */
33
+ function sanitizePathInDescription(description) {
34
+ // Get the repository root path for relativization
35
+ const repoRoot = path.resolve(__dirname, '..');
36
+
37
+ let sanitized = description;
38
+
39
+ // First, handle repository root paths specifically
40
+ const repoRootEscaped = repoRoot.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
41
+ const repoRootPattern = new RegExp(repoRootEscaped, 'g');
42
+
43
+ sanitized = sanitized.replace(repoRootPattern, '<repository-root>');
44
+
45
+ // Then handle any remaining long absolute paths that contain our repo structure
46
+ // This regex matches paths that look like they're part of our repository
47
+ const longPathPattern = /\/[^\/\s"')]*docs-extensions-and-macros[^\/\s"')]*(?:\/[^\/\s"')\]]+)*/g;
48
+
49
+ sanitized = sanitized.replace(longPathPattern, (match) => {
50
+ // If this path wasn't already replaced and looks like a subpath, make it relative
51
+ if (!match.includes('<repository-root>')) {
52
+ const relativePath = path.relative(repoRoot, match);
53
+ if (relativePath && !relativePath.startsWith('..') && relativePath !== match) {
54
+ return `./${relativePath}`;
55
+ }
56
+ return '<repository-root>';
57
+ }
58
+ return match;
59
+ });
60
+
61
+ // Finally, handle generic home directory patterns for any remaining absolute paths
62
+ const homePattern = /\/(?:Users|home)\/[^\/\s"')]+/g;
63
+ sanitized = sanitized.replace(homePattern, '~');
64
+
65
+ return sanitized;
66
+ }
67
+
68
+ /**
69
+ * Parse command help output into structured data
70
+ */
71
+ function parseHelp(helpText) {
72
+ const lines = helpText.split('\n');
73
+ const result = {
74
+ usage: '',
75
+ description: '',
76
+ options: [],
77
+ commands: []
78
+ };
79
+
80
+ let currentSection = null;
81
+ let currentItem = null;
82
+
83
+ for (const line of lines) {
84
+ // Usage line
85
+ if (line.startsWith('Usage:')) {
86
+ result.usage = line.replace('Usage:', '').trim();
87
+ currentSection = null;
88
+ }
89
+ // Options section
90
+ else if (line === 'Options:') {
91
+ currentSection = 'options';
92
+ }
93
+ // Commands section
94
+ else if (line === 'Commands:') {
95
+ currentSection = 'commands';
96
+ }
97
+ // Option or command line (starts with spaces and has content)
98
+ else if (line.match(/^ \S/) && currentSection) {
99
+ // Match option/command name (everything up to 2+ spaces) and description
100
+ const match = line.match(/^ (.+?)\s{2,}(.*)/);
101
+ if (match) {
102
+ currentItem = {
103
+ name: match[1].trim(),
104
+ description: match[2].trim()
105
+ };
106
+ result[currentSection].push(currentItem);
107
+ }
108
+ }
109
+ // Continuation of description (more indentation than option lines)
110
+ else if (line.match(/^\s{10,}/) && currentItem) {
111
+ currentItem.description += ' ' + line.trim();
112
+ }
113
+ // Description (first non-empty line that's not a section)
114
+ else if (line.trim() && !currentSection && !result.description && !line.startsWith('Usage:')) {
115
+ result.description = line.trim();
116
+ }
117
+ }
118
+
119
+ return result;
120
+ }
121
+
122
+ /**
123
+ * Parse JSDoc comments from source file
124
+ */
125
+ function parseJSDocComments(sourceFile) {
126
+ const content = fs.readFileSync(sourceFile, 'utf8');
127
+ const comments = {};
128
+
129
+ // Regex to match JSDoc comments followed by command definitions
130
+ // Matches both top-level commands and automation.command()
131
+ const pattern = /\/\*\*\s*([\s\S]*?)\s*\*\/\s*(?:programCli|automation)\s*\.command\(['"]([^'"]+)['"]\)/g;
132
+
133
+ let match;
134
+ while ((match = pattern.exec(content)) !== null) {
135
+ const [, commentText, commandName] = match;
136
+
137
+ // Parse the comment into sections
138
+ const parsed = {
139
+ description: '',
140
+ why: '',
141
+ example: '',
142
+ requirements: ''
143
+ };
144
+
145
+ // Extract sections
146
+ const descMatch = commentText.match(/@description\s*([\s\S]*?)(?=@\w+|$)/);
147
+ if (descMatch) {
148
+ parsed.description = descMatch[1]
149
+ .split('\n')
150
+ .map(line => line.replace(/^\s*\*\s*/, '').trim())
151
+ .filter(line => line)
152
+ .join(' ');
153
+ }
154
+
155
+ const whyMatch = commentText.match(/@why\s*([\s\S]*?)(?=@\w+|$)/);
156
+ if (whyMatch) {
157
+ parsed.why = whyMatch[1]
158
+ .split('\n')
159
+ .map(line => line.replace(/^\s*\*\s*/, '').trim())
160
+ .filter(line => line)
161
+ .join(' ');
162
+ }
163
+
164
+ const exampleMatch = commentText.match(/@example\s*([\s\S]*?)(?=@\w+|$)/);
165
+ if (exampleMatch) {
166
+ parsed.example = exampleMatch[1]
167
+ .split('\n')
168
+ .map(line => line.replace(/^\s*\*\s?/, ''))
169
+ .join('\n')
170
+ .trim();
171
+ }
172
+
173
+ const reqMatch = commentText.match(/@requirements\s*([\s\S]*?)(?=@\w+|$)/);
174
+ if (reqMatch) {
175
+ parsed.requirements = reqMatch[1]
176
+ .split('\n')
177
+ .map(line => line.replace(/^\s*\*\s?/, ''))
178
+ .join('\n')
179
+ .trim();
180
+ }
181
+
182
+ comments[commandName] = parsed;
183
+ }
184
+
185
+ return comments;
186
+ }
187
+
188
+ /**
189
+ * Generate AsciiDoc for a command
190
+ */
191
+ function generateCommandDoc(commandName, helpData, jsdoc, level = 2) {
192
+ const heading = '='.repeat(level);
193
+ let doc = `${heading} ${commandName || 'doc-tools'}\n\n`;
194
+
195
+ // Add extended description from JSDoc if available
196
+ if (jsdoc && jsdoc.description) {
197
+ doc += `${jsdoc.description}\n\n`;
198
+ } else if (helpData.description) {
199
+ doc += `${helpData.description}\n\n`;
200
+ }
201
+
202
+ // Add "Why use it" section if available
203
+ if (jsdoc && jsdoc.why) {
204
+ doc += `*Why use it:*\n\n${jsdoc.why}\n\n`;
205
+ }
206
+
207
+ if (helpData.usage) {
208
+ doc += `*Usage:*\n\n`;
209
+ doc += `[,bash]\n----\n${helpData.usage}\n----\n\n`;
210
+ }
211
+
212
+ if (helpData.options.length > 0) {
213
+ doc += `*Options:*\n\n`;
214
+ helpData.options.forEach(opt => {
215
+ const name = opt.name.replace(/\s+/g, ' ');
216
+ const sanitizedDescription = sanitizePathInDescription(opt.description);
217
+ doc += `\`${name}\`::\n${sanitizedDescription}\n\n`;
218
+ });
219
+ }
220
+
221
+ if (helpData.commands.length > 0) {
222
+ doc += `*Commands:*\n\n`;
223
+ helpData.commands.forEach(cmd => {
224
+ const cmdName = cmd.name.split(' ')[0];
225
+ doc += `* \`${cmdName}\` - ${cmd.description}\n`;
226
+ });
227
+ doc += `\n`;
228
+ }
229
+
230
+ // Add examples from JSDoc if available
231
+ if (jsdoc && jsdoc.example) {
232
+ doc += `*Examples:*\n\n[,bash]\n----\n${jsdoc.example}\n----\n\n`;
233
+ }
234
+
235
+ // Add requirements from JSDoc if available
236
+ if (jsdoc && jsdoc.requirements) {
237
+ doc += `*Requirements:*\n\n${jsdoc.requirements}\n\n`;
238
+ }
239
+
240
+ return doc;
241
+ }
242
+
243
+ /**
244
+ * Main generation function
245
+ */
246
+ function generateDocs() {
247
+ console.log('Generating CLI documentation...');
248
+
249
+ // Parse JSDoc comments from source
250
+ const sourceFile = path.join(__dirname, '..', 'bin', 'doc-tools.js');
251
+ console.log(' Parsing JSDoc comments from source...');
252
+ const jsdocs = parseJSDocComments(sourceFile);
253
+ console.log(` Found ${Object.keys(jsdocs).length} documented commands`);
254
+
255
+ let doc = `= Doc Tools CLI Reference
256
+ :toc:
257
+ :toclevels: 3
258
+
259
+ Auto-generated reference documentation for the \`doc-tools\` command-line interface.
260
+
261
+ IMPORTANT: This documentation is auto-generated. Do not edit manually. Run \`npm run generate:cli-docs\` to regenerate.
262
+
263
+ `;
264
+
265
+ // Get main help
266
+ const mainHelp = getHelpOutput('');
267
+ const mainData = parseHelp(mainHelp);
268
+ doc += generateCommandDoc('', mainData, null, 2);
269
+
270
+ // Top-level commands (excluding 'generate' which has subcommands)
271
+ const topLevelCommands = [
272
+ 'install-test-dependencies',
273
+ 'get-redpanda-version',
274
+ 'get-console-version',
275
+ 'link-readme',
276
+ 'fetch',
277
+ 'setup-mcp'
278
+ ];
279
+
280
+ topLevelCommands.forEach(cmd => {
281
+ console.log(` Generating docs for: ${cmd}`);
282
+ const help = getHelpOutput(cmd);
283
+ const data = parseHelp(help);
284
+ const jsdoc = jsdocs[cmd];
285
+ doc += generateCommandDoc(cmd, data, jsdoc, 2);
286
+ });
287
+
288
+ // Generate command and its subcommands
289
+ console.log(' Generating docs for: generate');
290
+ const generateHelp = getHelpOutput('generate');
291
+ const generateData = parseHelp(generateHelp);
292
+ doc += generateCommandDoc('generate', generateData, null, 2);
293
+
294
+ // Generate subcommands
295
+ const generateSubcommands = [
296
+ 'property-docs',
297
+ 'metrics-docs',
298
+ 'rpk-docs',
299
+ 'rpcn-connector-docs',
300
+ 'helm-spec',
301
+ 'cloud-regions',
302
+ 'crd-spec',
303
+ 'bundle-openapi'
304
+ ];
305
+
306
+ generateSubcommands.forEach(subcmd => {
307
+ console.log(` Generating docs for: generate ${subcmd}`);
308
+ const help = getHelpOutput(`generate ${subcmd}`);
309
+ const data = parseHelp(help);
310
+ const jsdoc = jsdocs[subcmd];
311
+ doc += generateCommandDoc(`generate ${subcmd}`, data, jsdoc, 3);
312
+ });
313
+
314
+ // Write to file
315
+ const outputPath = path.join(__dirname, '..', 'CLI_REFERENCE.adoc');
316
+ fs.writeFileSync(outputPath, doc);
317
+ console.log(`✓ Generated: ${outputPath}`);
318
+ }
319
+
320
+ // Run if executed directly
321
+ if (require.main === module) {
322
+ generateDocs();
323
+ }
324
+
325
+ module.exports = { generateDocs };
@@ -24,9 +24,11 @@ module.exports = async function getConsoleVersion({ beta = false, fromAntora = f
24
24
  }
25
25
 
26
26
  // Initialize GitHub client
27
+ const { getGitHubToken } = require('../cli-utils/github-token');
27
28
  const { Octokit } = await import('@octokit/rest');
28
- const octokit = process.env.REDPANDA_GITHUB_TOKEN
29
- ? new Octokit({ auth: process.env.REDPANDA_GITHUB_TOKEN })
29
+ const token = getGitHubToken();
30
+ const octokit = token
31
+ ? new Octokit({ auth: token })
30
32
  : new Octokit();
31
33
 
32
34
  // Fetch latest release info
@@ -20,9 +20,11 @@ module.exports = async function getRedpandaVersion({ beta = false, fromAntora =
20
20
  }
21
21
 
22
22
  // Load Octokit
23
+ const { getGitHubToken } = require('../cli-utils/github-token');
23
24
  const { Octokit } = await import('@octokit/rest');
24
- const octokit = process.env.REDPANDA_GITHUB_TOKEN
25
- ? new Octokit({ auth: process.env.REDPANDA_GITHUB_TOKEN })
25
+ const token = getGitHubToken();
26
+ const octokit = token
27
+ ? new Octokit({ auth: token })
26
28
  : new Octokit();
27
29
 
28
30
  // Fetch version data
@@ -155,14 +155,15 @@ if __name__ == "__main__":
155
155
 
156
156
  tag_modified = sys.argv[1].strip()
157
157
 
158
- # Resolve the base autogenerated folder at the repo root
158
+ # Resolve the repo root
159
159
  repo_root = os.getcwd()
160
- gen_path = os.path.join(repo_root, "autogenerated")
161
- if not os.path.isdir(gen_path):
162
- logging.error(f"autogenerated folder not found at: {gen_path}")
163
- sys.exit(1)
164
160
 
165
- # Build the output directory using the already provided tag_modified.
161
+ # Build the output directory in modules/reference/pages for docs build
162
+ reference_pages_dir = os.path.join(repo_root, "modules", "reference", "pages")
163
+ ensure_directory_exists(reference_pages_dir)
164
+
165
+ # Also create versioned output in autogenerated for diffing
166
+ gen_path = os.path.join(repo_root, "autogenerated")
166
167
  output_dir = os.path.join(gen_path, tag_modified, "metrics")
167
168
  ensure_directory_exists(output_dir)
168
169
 
@@ -189,11 +190,22 @@ if __name__ == "__main__":
189
190
  "internal": internal_metrics
190
191
  }
191
192
 
192
- # Define output file paths.
193
+ # Define output file paths in modules/reference/pages (primary location for docs build)
194
+ PUBLIC_METRICS_REFERENCE = os.path.join(reference_pages_dir, "public-metrics-reference.adoc")
195
+ INTERNAL_METRICS_REFERENCE = os.path.join(reference_pages_dir, "internal-metrics-reference.adoc")
196
+
197
+ # Also save versioned copies in autogenerated for diffing
193
198
  JSON_OUTPUT_FILE = os.path.join(output_dir, "metrics.json")
194
199
  ASCIIDOC_OUTPUT_FILE = os.path.join(output_dir, "metrics.adoc")
195
200
  INTERNAL_ASCIIDOC_OUTPUT_FILE = os.path.join(output_dir, "internal-metrics.adoc")
196
201
 
202
+ # Write to modules/reference/pages (primary location)
203
+ output_asciidoc(public_metrics, PUBLIC_METRICS_REFERENCE)
204
+ output_asciidoc(internal_metrics, INTERNAL_METRICS_REFERENCE)
205
+
206
+ # Write to autogenerated for versioning and diffing
197
207
  output_json(merged_metrics, JSON_OUTPUT_FILE)
198
208
  output_asciidoc(public_metrics, ASCIIDOC_OUTPUT_FILE)
199
209
  output_asciidoc(internal_metrics, INTERNAL_ASCIIDOC_OUTPUT_FILE)
210
+
211
+ logging.info(f"✅ Generated {len(public_metrics)} public metrics and {len(internal_metrics)} internal metrics")
@@ -78,7 +78,13 @@ redpanda-git:
78
78
  @if [ -d "$(REDPANDA_SRC)" ]; then \
79
79
  git -C "$(REDPANDA_SRC)" fetch --all --tags -q; \
80
80
  else \
81
- git clone -q https://github.com/redpanda-data/redpanda.git "$(REDPANDA_SRC)"; \
81
+ GH_TOKEN=$${REDPANDA_GITHUB_TOKEN:-$${GITHUB_TOKEN:-$${GH_TOKEN}}}; \
82
+ if [ -n "$$GH_TOKEN" ]; then \
83
+ echo "🔑 Using authenticated clone (token provided)"; \
84
+ git clone -q https://$$GH_TOKEN@github.com/redpanda-data/redpanda.git "$(REDPANDA_SRC)"; \
85
+ else \
86
+ git clone -q https://github.com/redpanda-data/redpanda.git "$(REDPANDA_SRC)"; \
87
+ fi; \
82
88
  fi; \
83
89
  if git -C "$(REDPANDA_SRC)" rev-parse --verify -q "$(TAG)" >/dev/null; then \
84
90
  echo "🔖 Checking out '$(TAG)'"; \
@@ -117,7 +117,7 @@ def fetch_cloud_config(github_token: Optional[str] = None) -> CloudConfig:
117
117
  GitHubAuthError: Authentication or access problems with the GitHub API (including 401/403 responses).
118
118
  NetworkError: Network connectivity or timeout failures when contacting the GitHub API.
119
119
  CloudConfigParsingError: Failure to parse or validate the repository YAML files or their expected structure.
120
- CloudConfigError: Generic configuration error (e.g., missing token) or unexpected internal failures.
120
+ CloudConfigError: Generic configuration error (for example, missing token) or unexpected internal failures.
121
121
  """
122
122
  if not github_token:
123
123
  github_token = os.environ.get('GITHUB_TOKEN') or os.environ.get('REDPANDA_GITHUB_TOKEN')
@@ -246,10 +246,10 @@ def fetch_cloud_config(github_token: Optional[str] = None) -> CloudConfig:
246
246
  logger.warning(f"Skipping file with missing name/url: {file}")
247
247
  continue
248
248
 
249
- # Look for version YAML files (e.g., "25.1.yml", "25.2.yml")
249
+ # Look for version YAML files (for example, "25.1.yml", "25.2.yml")
250
250
  if file_name.endswith('.yml'):
251
251
  version_part = file_name.replace('.yml', '')
252
- # Check if it looks like a version number (e.g., "25.1", "25.2.1")
252
+ # Check if it looks like a version number (for example, "25.1", "25.2.1")
253
253
  if version_part.replace('.', '').isdigit():
254
254
  version_files.append((version_part, download_url))
255
255
  logger.debug(f"Found version file: {file_name} -> {version_part}")
@@ -282,7 +282,7 @@ def fetch_cloud_config(github_token: Optional[str] = None) -> CloudConfig:
282
282
  "No valid version files found in cloudv2/install-pack directory.\n"
283
283
  f"Found {len(version_files)} files but none had valid version formats.\n"
284
284
  f"Available files: {[v[0] for v in version_files]}\n"
285
- "Expected version format: 'X.Y' or 'X.Y.Z' (e.g., '25.1', '25.2.1')\n"
285
+ "Expected version format: 'X.Y' or 'X.Y.Z' (for example, '25.1', '25.2.1')\n"
286
286
  "Contact cloud team to verify configuration file naming convention."
287
287
  )
288
288
  logger.error(error_msg)
@@ -2,7 +2,7 @@
2
2
  """
3
3
  Resolves C++ constant references to their actual values.
4
4
 
5
- For properties that use constants as default values (e.g., `ss::sstring{net::tls_v1_2_cipher_suites}`),
5
+ For properties that use constants as default values (for example, `ss::sstring{net::tls_v1_2_cipher_suites}`),
6
6
  this module looks up the constant definition and extracts the actual string value.
7
7
  """
8
8
 
@@ -30,7 +30,7 @@ class ConstantResolver:
30
30
  Resolve a C++ constant name to its actual string value.
31
31
 
32
32
  Args:
33
- constant_name: The constant name (e.g., "net::tls_v1_2_cipher_suites" or "tls_v1_2_cipher_suites")
33
+ constant_name: The constant name (for example, "net::tls_v1_2_cipher_suites" or "tls_v1_2_cipher_suites")
34
34
 
35
35
  Returns:
36
36
  The actual string value, or None if not found
@@ -72,8 +72,8 @@ class ConstantResolver:
72
72
  Search for a constant definition in files matching the given patterns.
73
73
 
74
74
  Args:
75
- patterns: List of file patterns to search (e.g., ['net/tls.cc'])
76
- identifier: The constant identifier (e.g., 'tls_v1_2_cipher_suites')
75
+ patterns: List of file patterns to search (for example, ['net/tls.cc'])
76
+ identifier: The constant identifier (for example, 'tls_v1_2_cipher_suites')
77
77
 
78
78
  Returns:
79
79
  The constant's string value, or None if not found
@@ -111,7 +111,7 @@ class ConstantResolver:
111
111
  - constexpr std::array<std::string_view, N> array_name = {val1, val2, val3};
112
112
 
113
113
  Args:
114
- array_name: The array constant name (e.g., "supported_sasl_mechanisms")
114
+ array_name: The array constant name (for example, "supported_sasl_mechanisms")
115
115
 
116
116
  Returns:
117
117
  List of string values from the array, or None if not found
@@ -230,7 +230,7 @@ class ConstantResolver:
230
230
  Finds the class definition and extracts the `static constexpr const char* name` value.
231
231
 
232
232
  Args:
233
- class_ref: Qualified class name (e.g., "security::scram_sha256_authenticator")
233
+ class_ref: Qualified class name (for example, "security::scram_sha256_authenticator")
234
234
 
235
235
  Returns:
236
236
  Dict with 'value' and 'is_enterprise' keys, or None if not found
@@ -420,12 +420,12 @@ def resolve_validator_enum_constraint(validator_name: str, resolver: ConstantRes
420
420
 
421
421
  For validators like validate_sasl_mechanisms, this function:
422
422
  1. Finds the validator function in validators.cc
423
- 2. Parses it to find what constant array it validates against (e.g., supported_sasl_mechanisms)
423
+ 2. Parses it to find what constant array it validates against (for example, supported_sasl_mechanisms)
424
424
  3. Resolves that array to get the actual enum values
425
- 4. Checks for enterprise values (e.g., enterprise_sasl_mechanisms)
425
+ 4. Checks for enterprise values (for example, enterprise_sasl_mechanisms)
426
426
 
427
427
  Args:
428
- validator_name: Name of the validator function (e.g., "validate_sasl_mechanisms")
428
+ validator_name: Name of the validator function (for example, "validate_sasl_mechanisms")
429
429
  resolver: ConstantResolver instance
430
430
 
431
431
  Returns:
@@ -521,8 +521,8 @@ def resolve_runtime_validation_enum_constraint(property_name: str, defined_in: s
521
521
  }
522
522
 
523
523
  Args:
524
- property_name: Name of the property (e.g., "sasl_mechanism")
525
- defined_in: Path where property is defined (e.g., "src/v/kafka/client/configuration.cc")
524
+ property_name: Name of the property (for example, "sasl_mechanism")
525
+ defined_in: Path where property is defined (for example, "src/v/kafka/client/configuration.cc")
526
526
  resolver: ConstantResolver instance
527
527
 
528
528
  Returns:
@@ -233,7 +233,7 @@ class ConstexprCache:
233
233
  position (int): Position in the file
234
234
 
235
235
  Returns:
236
- str: Namespace (e.g., "model" or "config::tls")
236
+ str: Namespace (for example, "model" or "config::tls")
237
237
  """
238
238
  # Look backwards from position to find namespace declaration
239
239
  preceding = content[:position]
@@ -403,7 +403,7 @@ def process_enterprise_value(enterprise_str):
403
403
  2. C++ scoped enum-like tokens (foo::bar::BAZ) → "BAZ".
404
404
  3. Lambda expressions (strings starting with "[](" and ending with "}") → a short
405
405
  human-readable hint such as "Enterprise feature enabled" or context-specific text.
406
- 4. Simple literal values (e.g., "true", "false", "OIDC", or quoted strings) → returned as-is.
406
+ 4. Simple literal values (for example, "true", "false", "OIDC", or quoted strings) → returned as-is.
407
407
 
408
408
  Parameters:
409
409
  enterprise_str (str): Raw C++ expression text to be converted.
@@ -496,7 +496,7 @@ def resolve_cpp_function_call(function_name):
496
496
  functions from source using general patterns. No hardcoded patterns needed.
497
497
 
498
498
  Parameters:
499
- function_name (str): Fully-qualified C++ function name to resolve (e.g., "model::kafka_audit_logging_topic")
499
+ function_name (str): Fully-qualified C++ function name to resolve (for example, "model::kafka_audit_logging_topic")
500
500
 
501
501
  Returns:
502
502
  str or None: The literal string returned by the C++ function, or None if not found in cache
@@ -509,7 +509,7 @@ def resolve_cpp_function_call(function_name):
509
509
  logger.debug(f"Resolved function '{function_name}' -> '{cached_result}' from cache")
510
510
  return cached_result
511
511
 
512
- # Also try without namespace qualifier (e.g., "kafka_audit_logging_topic")
512
+ # Also try without namespace qualifier (for example, "kafka_audit_logging_topic")
513
513
  if '::' in function_name:
514
514
  simple_name = function_name.split('::')[-1]
515
515
  cached_result = _constexpr_cache.lookup_function(simple_name)
@@ -529,7 +529,7 @@ def resolve_constexpr_identifier(identifier):
529
529
  Searches common Redpanda source locations for constexpr string or string_view definitions matching the given identifier and returns the literal if found.
530
530
 
531
531
  Parameters:
532
- identifier (str): The identifier name to resolve (e.g., "scram" or "net::tls_v1_2_cipher_suites").
532
+ identifier (str): The identifier name to resolve (for example, "scram" or "net::tls_v1_2_cipher_suites").
533
533
 
534
534
  Returns:
535
535
  str or None: The resolved literal string value if found, otherwise `None`.
@@ -546,7 +546,7 @@ def resolve_constexpr_identifier(identifier):
546
546
  logger.debug(f"Could not find Redpanda source directory to resolve identifier: {identifier}")
547
547
  return None
548
548
 
549
- # Strip namespace qualifier if present (e.g., "net::tls_v1_2_cipher_suites" -> "tls_v1_2_cipher_suites")
549
+ # Strip namespace qualifier if present (for example, "net::tls_v1_2_cipher_suites" -> "tls_v1_2_cipher_suites")
550
550
  search_identifier = identifier.split('::')[-1] if '::' in identifier else identifier
551
551
 
552
552
  # Pattern to match constexpr string_view definitions
@@ -1567,7 +1567,7 @@ def resolve_type_and_default(properties, definitions):
1567
1567
  └─────────────────────────────────────────────────────────────────────────
1568
1568
 
1569
1569
  HOW TO ADD NEW TYPE DEFINITIONS:
1570
- 1. Identify the C++ type that needs a definition (e.g., new_endpoint_type)
1570
+ 1. Identify the C++ type that needs a definition (for example, new_endpoint_type)
1571
1571
  2. Analyze the C++ struct/class to determine JSON schema structure
1572
1572
  3. Add entry to definitions.json with appropriate JSON Schema:
1573
1573
  {
@@ -1662,10 +1662,10 @@ def resolve_type_and_default(properties, definitions):
1662
1662
 
1663
1663
  This function recognises common C++ patterns produced by the extractor and maps them to values suitable for JSON schema defaults and examples. Handled cases include:
1664
1664
  - std::nullopt -> null
1665
- - zero-argument functions (e.g., model::kafka_audit_logging_topic()) resolved from source when possible
1666
- - enum tokens (e.g., fips_mode_flag::disabled -> "disabled")
1665
+ - zero-argument functions (for example, model::kafka_audit_logging_topic()) resolved from source when possible
1666
+ - enum tokens (for example, fips_mode_flag::disabled -> "disabled")
1667
1667
  - constexpr identifiers and simple string constructors resolved to their literal strings when available
1668
- - known default constructors and truncated type names mapped to sensible defaults (e.g., duration -> 0, path -> "")
1668
+ - known default constructors and truncated type names mapped to sensible defaults (for example, duration -> 0, path -> "")
1669
1669
  - simple heuristics for unknown constructors and concatenated expressions
1670
1670
 
1671
1671
  Returns:
@@ -1722,7 +1722,7 @@ def resolve_type_and_default(properties, definitions):
1722
1722
  base = base + "s"
1723
1723
  return f'"{num} {base}"'
1724
1724
 
1725
- # Evaluate arithmetic in duration constructors (e.g., "60 * 5" -> "300 seconds")
1725
+ # Evaluate arithmetic in duration constructors (for example, "60 * 5" -> "300 seconds")
1726
1726
  if "*" in value:
1727
1727
  try:
1728
1728
  result = safe_arithmetic_eval(value)
@@ -1854,12 +1854,12 @@ def resolve_type_and_default(properties, definitions):
1854
1854
  - Integer and boolean literals → Python int and bool.
1855
1855
  - Object constructors (Type(arg1, arg2) or Type{...}) → dict mapping constructor arguments to the object's properties when a corresponding type definition exists.
1856
1856
  - Nested constructors → nested dicts with their fields expanded.
1857
- - Array initializer lists (e.g., {Type(...), Type(...)}) → Python list with each element expanded.
1857
+ - Array initializer lists (for example, {Type(...), Type(...)}) → Python list with each element expanded.
1858
1858
  - Special-case mappings for known type patterns (for example, an address-type constructor expanded into {"address", "port"} when the target type expects that shape).
1859
1859
  If a default cannot be resolved or the type is an enum, the original input is returned unchanged; the string "null" is converted to None. If default_str is not a string, it is returned as-is.
1860
1860
 
1861
1861
  Parameters:
1862
- type_name (str): The resolved type name for the default value (e.g., "model::broker_endpoint" or a primitive type like "string").
1862
+ type_name (str): The resolved type name for the default value (for example, "model::broker_endpoint" or a primitive type like "string").
1863
1863
  default_str (str | any): The C++ default expression to expand, or a non-string value already decoded.
1864
1864
 
1865
1865
  Returns:
@@ -1881,7 +1881,7 @@ def resolve_type_and_default(properties, definitions):
1881
1881
  else:
1882
1882
  return processed
1883
1883
 
1884
- # Handle string type with constructor syntax (e.g., ss::sstring{scram})
1884
+ # Handle string type with constructor syntax (for example, ss::sstring{scram})
1885
1885
  if type_name == "string" and ("{" in default_str or "(" in default_str):
1886
1886
  tname, args = parse_constructor(default_str)
1887
1887
  if tname and args:
@@ -1897,7 +1897,7 @@ def resolve_type_and_default(properties, definitions):
1897
1897
  type_def = resolve_definition_type(resolve_type_with_namespace(type_name, definitions))
1898
1898
  if "enum" in type_def:
1899
1899
  # Strip C++ namespace qualifiers from enum values
1900
- # e.g., model::partition_autobalancing_mode::continuous → continuous
1900
+ # for example, model::partition_autobalancing_mode::continuous → continuous
1901
1901
  if isinstance(default_str, str) and '::' in default_str:
1902
1902
  return default_str.split('::')[-1]
1903
1903
  return default_str
@@ -2775,7 +2775,7 @@ def main():
2775
2775
 
2776
2776
  # Load overrides file (contains both property and definition overrides)
2777
2777
  overrides = None
2778
- if options.overrides:
2778
+ if options.overrides and os.path.exists(options.overrides):
2779
2779
  try:
2780
2780
  with open(options.overrides) as f:
2781
2781
  overrides = json.load(f)
@@ -2790,6 +2790,8 @@ def main():
2790
2790
  except Exception as e:
2791
2791
  logging.error(f"Failed to load overrides file: {e}")
2792
2792
  sys.exit(1)
2793
+ elif options.overrides:
2794
+ logger.info(f"Overrides file not found: {options.overrides} (skipping)")
2793
2795
 
2794
2796
  # DEPRECATED: Support legacy --definitions flag for backward compatibility
2795
2797
  # Users should migrate to putting definitions in overrides.json under "definitions" key
@@ -444,8 +444,8 @@ class TopicPropertyExtractor:
444
444
  if not re.match(r'^[a-zA-Z][a-zA-Z0-9._-]*$', prop_name):
445
445
  return False
446
446
 
447
- # Reject Java-style package names (e.g., "redpanda.core.admin.Service")
448
- # Topic properties use lowercase with dots (e.g., "cleanup.policy", "segment.ms")
447
+ # Reject Java-style package names (for example, "redpanda.core.admin.Service")
448
+ # Topic properties use lowercase with dots (for example, "cleanup.policy", "segment.ms")
449
449
  # Split by dots and check each segment - reject if any segment after first has uppercase
450
450
  segments = prop_name.split('.')
451
451
  for i, segment in enumerate(segments):