@redpanda-data/docs-extensions-and-macros 4.8.0 → 4.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/doc-tools.js +88 -53
- package/package.json +1 -1
- package/tools/property-extractor/Makefile +62 -34
- package/tools/property-extractor/generate-handlebars-docs.js +344 -0
- package/tools/property-extractor/helpers/and.js +10 -0
- package/tools/property-extractor/helpers/eq.js +9 -0
- package/tools/property-extractor/helpers/formatPropertyValue.js +128 -0
- package/tools/property-extractor/helpers/formatUnits.js +26 -0
- package/tools/property-extractor/helpers/index.js +13 -0
- package/tools/property-extractor/helpers/join.js +18 -0
- package/tools/property-extractor/helpers/ne.js +9 -0
- package/tools/property-extractor/helpers/not.js +8 -0
- package/tools/property-extractor/helpers/or.js +10 -0
- package/tools/property-extractor/helpers/renderPropertyExample.js +42 -0
- package/tools/property-extractor/package-lock.json +77 -0
- package/tools/property-extractor/package.json +6 -0
- package/tools/property-extractor/property_extractor.py +1163 -20
- package/tools/property-extractor/requirements.txt +1 -0
- package/tools/property-extractor/templates/deprecated-properties.hbs +25 -0
- package/tools/property-extractor/templates/deprecated-property.hbs +7 -0
- package/tools/property-extractor/templates/property-page.hbs +22 -0
- package/tools/property-extractor/templates/property.hbs +70 -0
- package/tools/property-extractor/templates/topic-property.hbs +59 -0
- package/tools/property-extractor/transformers.py +80 -4
- package/tools/property-extractor/json-to-asciidoc/generate_docs.py +0 -491
|
@@ -1,491 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
import os
|
|
3
|
-
import re
|
|
4
|
-
import argparse
|
|
5
|
-
|
|
6
|
-
# --- Constants for Paths and Filenames ---
|
|
7
|
-
INPUT_JSON_PATH = "gen/"
|
|
8
|
-
INPUT_JSON_FILE = "properties-output.json"
|
|
9
|
-
|
|
10
|
-
OUTPUT_DIR_DEFAULT = "output"
|
|
11
|
-
PAGE_FOLDER_NAME = "pages"
|
|
12
|
-
ERROR_FOLDER_NAME = "error"
|
|
13
|
-
|
|
14
|
-
OUTPUT_FILE_BROKER = "broker-properties.adoc"
|
|
15
|
-
OUTPUT_FILE_CLUSTER = "cluster-properties.adoc"
|
|
16
|
-
OUTPUT_FILE_CLOUD = "object-storage-properties.adoc"
|
|
17
|
-
OUTPUT_FILE_TOPIC = "topic-properties.adoc"
|
|
18
|
-
OUTPUT_FILE_DEPRECATED = os.path.join("deprecated", "partials", "deprecated-properties.adoc")
|
|
19
|
-
ALL_PROPERTIES_FILE = "all_properties.txt"
|
|
20
|
-
|
|
21
|
-
ERROR_FILE_DESCRIPTION = "empty_description.txt"
|
|
22
|
-
ERROR_FILE_TYPE = "empty_type.txt"
|
|
23
|
-
ERROR_FILE_MAX_WITHOUT_MIN = "max_without_min.txt"
|
|
24
|
-
ERROR_FILE_MIN_WITHOUT_MAX = "min_without_max.txt"
|
|
25
|
-
|
|
26
|
-
# --- Static Documentation Strings ---
|
|
27
|
-
BROKER_PAGE_TITLE = (
|
|
28
|
-
"= Broker Configuration Properties\n"
|
|
29
|
-
":page-aliases: reference:node-properties.adoc, reference:node-configuration-sample.adoc\n"
|
|
30
|
-
":description: Reference of broker configuration properties.\n\n"
|
|
31
|
-
)
|
|
32
|
-
BROKER_INTRO = (
|
|
33
|
-
"Broker configuration properties are applied individually to each broker in a cluster. "
|
|
34
|
-
"You can find and modify these properties in the `redpanda.yaml` configuration file.\n\n"
|
|
35
|
-
"For information on how to edit broker properties, see xref:manage:cluster-maintenance/node-property-configuration.adoc[].\n\n"
|
|
36
|
-
"NOTE: All broker properties require that you restart Redpanda for any update to take effect.\n\n"
|
|
37
|
-
)
|
|
38
|
-
BROKER_TITLE = "== Broker configuration\n\n"
|
|
39
|
-
|
|
40
|
-
SCHEMA_REGISTRY_TITLE = "== Schema Registry\n\n"
|
|
41
|
-
PANDAPROXY_TITLE = "== HTTP Proxy\n\n"
|
|
42
|
-
KAFKA_CLIENT_TITLE = "== HTTP Proxy Client\n\n"
|
|
43
|
-
|
|
44
|
-
SCHEMA_REGISTRY_INTRO = (
|
|
45
|
-
"The Schema Registry provides configuration properties to help you enable producers and consumers "
|
|
46
|
-
"to share information needed to serialize and deserialize producer and consumer messages.\n\n"
|
|
47
|
-
"For information on how to edit broker properties for the Schema Registry, see xref:manage:cluster-maintenance/node-property-configuration.adoc[].\n\n"
|
|
48
|
-
)
|
|
49
|
-
PANDAPROXY_INTRO = (
|
|
50
|
-
"Redpanda HTTP Proxy allows access to your data through a REST API. For example, you can list topics or brokers, "
|
|
51
|
-
"get events, produce events, subscribe to events from topics using consumer groups, and commit offsets for a consumer.\n\n"
|
|
52
|
-
"See xref:develop:http-proxy.adoc[]\n\n"
|
|
53
|
-
)
|
|
54
|
-
KAFKA_CLIENT_INTRO = "Configuration options for HTTP Proxy Client.\n\n"
|
|
55
|
-
|
|
56
|
-
CLUSTER_PAGE_TITLE = (
|
|
57
|
-
"= Cluster Configuration Properties\n"
|
|
58
|
-
":page-aliases: reference:tunable-properties.adoc, reference:cluster-properties.adoc\n"
|
|
59
|
-
":description: Cluster configuration properties list.\n\n"
|
|
60
|
-
)
|
|
61
|
-
CLUSTER_CONFIG_INTRO = (
|
|
62
|
-
"Cluster configuration properties are the same for all brokers in a cluster, and are set at the cluster level.\n\n"
|
|
63
|
-
"For information on how to edit cluster properties, see xref:manage:cluster-maintenance/cluster-property-configuration.adoc[] "
|
|
64
|
-
"or xref:manage:kubernetes/k-cluster-property-configuration.adoc[].\n\n"
|
|
65
|
-
"NOTE: Some cluster properties require that you restart the cluster for any updates to take effect. "
|
|
66
|
-
"See the specific property details to identify whether or not a restart is required.\n\n"
|
|
67
|
-
)
|
|
68
|
-
CLUSTER_CONFIG_TITLE = "== Cluster configuration\n\n"
|
|
69
|
-
|
|
70
|
-
TOPIC_PAGE_TITLE = (
|
|
71
|
-
"= Topic Configuration Properties\n"
|
|
72
|
-
":page-aliases: reference:topic-properties.adoc\n"
|
|
73
|
-
":description: Reference of topic configuration properties.\n\n"
|
|
74
|
-
)
|
|
75
|
-
|
|
76
|
-
TOPIC_INTRO = (
|
|
77
|
-
"A topic-level property sets a Redpanda or Kafka configuration for a particular topic.\n\n"
|
|
78
|
-
"Many topic-level properties have corresponding xref:manage:cluster-maintenance/cluster-property-configuration.adoc[cluster properties] that set a default value for all topics of a cluster. To customize the value for a topic, you can set a topic-level property that overrides the value of the corresponding cluster property.\n\n"
|
|
79
|
-
"NOTE: All topic properties take effect immediately after being set.\n\n"
|
|
80
|
-
)
|
|
81
|
-
|
|
82
|
-
TOPIC_CONFIG_TITLE = "== Topic configuration\n\n"
|
|
83
|
-
|
|
84
|
-
CLOUD_PAGE_TITLE = (
|
|
85
|
-
"= Object Storage Properties\n"
|
|
86
|
-
":description: Reference of object storage properties.\n\n"
|
|
87
|
-
)
|
|
88
|
-
CLOUD_CONFIG_INTRO = (
|
|
89
|
-
"Object storage properties are a type of cluster property. For information on how to edit cluster properties, "
|
|
90
|
-
"see xref:manage:cluster-maintenance/cluster-property-configuration.adoc[].\n\n"
|
|
91
|
-
"NOTE: Some object storage properties require that you restart the cluster for any updates to take effect. "
|
|
92
|
-
"See the specific property details to identify whether or not a restart is required.\n\n"
|
|
93
|
-
)
|
|
94
|
-
CLOUD_CONFIG_TITLE = (
|
|
95
|
-
"== Object storage configuration\n\n"
|
|
96
|
-
"Object storage properties should only be set if you enable xref:manage:tiered-storage.adoc[Tiered Storage].\n\n"
|
|
97
|
-
)
|
|
98
|
-
|
|
99
|
-
DEPRECATED_PROPERTIES_TITLE = "\n== Configuration properties\n\n"
|
|
100
|
-
DEPRECATED_PROPERTIES_INTRO = "This is an exhaustive list of all the deprecated properties.\n\n"
|
|
101
|
-
DEPRECATED_BROKER_TITLE = "=== Broker properties\n\n"
|
|
102
|
-
DEPRECATED_CLUSTER_TITLE = "=== Cluster properties\n\n"
|
|
103
|
-
|
|
104
|
-
# --- Mapping Constants ---
|
|
105
|
-
DEFINED_IN_MAPPING = {
|
|
106
|
-
"src/v/config/node_config.cc": "broker",
|
|
107
|
-
"src/v/pandaproxy/schema_registry/configuration.cc": "schema reg",
|
|
108
|
-
"src/v/pandaproxy/rest/configuration.cc": "http proxy",
|
|
109
|
-
"src/v/kafka/client/configuration.cc": "http client",
|
|
110
|
-
"src/v/config/configuration.cc": "cluster",
|
|
111
|
-
"src/v/kafka/server/handlers/topics/types.cc": "topic"
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
SUFFIX_TO_UNIT = {
|
|
115
|
-
"ms": "milliseconds",
|
|
116
|
-
"sec": "seconds", # Code is not always consistent when using seconds.
|
|
117
|
-
"seconds": "seconds",
|
|
118
|
-
"bytes": "bytes",
|
|
119
|
-
"buf": "bytes",
|
|
120
|
-
"partitions": "number of partitions per topic",
|
|
121
|
-
"percent": "percent",
|
|
122
|
-
"bps": "bytes per second",
|
|
123
|
-
"fraction": "fraction"
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
# --- Utility Functions ---
|
|
127
|
-
def parse_arguments():
|
|
128
|
-
parser = argparse.ArgumentParser(
|
|
129
|
-
description="Generate documentation from properties JSON"
|
|
130
|
-
)
|
|
131
|
-
parser.add_argument(
|
|
132
|
-
"--output-dir",
|
|
133
|
-
type=str,
|
|
134
|
-
required=True,
|
|
135
|
-
help="Directory to save the generated documentation",
|
|
136
|
-
)
|
|
137
|
-
return parser.parse_args()
|
|
138
|
-
|
|
139
|
-
def ensure_directory_exists(directory):
|
|
140
|
-
os.makedirs(directory, exist_ok=True)
|
|
141
|
-
|
|
142
|
-
def load_json(input_path, input_file):
|
|
143
|
-
try:
|
|
144
|
-
with open(os.path.join(input_path, input_file), "r", encoding="utf-8") as json_file:
|
|
145
|
-
return json.load(json_file)
|
|
146
|
-
except FileNotFoundError:
|
|
147
|
-
print(f"Error: The file '{input_file}' does not exist.")
|
|
148
|
-
return {}
|
|
149
|
-
except json.JSONDecodeError as e:
|
|
150
|
-
print(f"Error: Failed to parse JSON in '{input_file}': {str(e)}")
|
|
151
|
-
return {}
|
|
152
|
-
|
|
153
|
-
def process_defaults(input_string, suffix):
|
|
154
|
-
# Test for ip:port in vector
|
|
155
|
-
vector_match = re.search(
|
|
156
|
-
r'std::vector<net::unresolved_address>\(\{\{("([\d.]+)",\s*(\d+))\}\}\)', input_string
|
|
157
|
-
)
|
|
158
|
-
if vector_match:
|
|
159
|
-
ip = vector_match.group(2)
|
|
160
|
-
port = vector_match.group(3)
|
|
161
|
-
return [f"{ip}:{port}"]
|
|
162
|
-
|
|
163
|
-
# Test for ip:port in single-string
|
|
164
|
-
broker_match = re.search(r'net::unresolved_address\("([\d.]+)",\s*(\d+)\)', input_string)
|
|
165
|
-
if broker_match:
|
|
166
|
-
ip = broker_match.group(1)
|
|
167
|
-
port = broker_match.group(2)
|
|
168
|
-
return f"{ip}:{port}"
|
|
169
|
-
|
|
170
|
-
# Handle single time units: seconds, milliseconds, hours, minutes
|
|
171
|
-
time_match = re.search(r"(\d+)(ms|s|min|h)", input_string)
|
|
172
|
-
# Handle complex time expressions like '24h*365'
|
|
173
|
-
complex_match = re.search(r"(\d+)(h|min|s|ms)\s*\*\s*(\d+)", input_string)
|
|
174
|
-
# Handle std::chrono::time expressions
|
|
175
|
-
chrono_match = re.search(r"std::chrono::(\w+)[\{\(](\d+)[\)\}]", input_string)
|
|
176
|
-
|
|
177
|
-
if time_match:
|
|
178
|
-
value = int(time_match.group(1))
|
|
179
|
-
unit = time_match.group(2)
|
|
180
|
-
if suffix == "ms":
|
|
181
|
-
if unit == "ms":
|
|
182
|
-
return value
|
|
183
|
-
elif unit == "s":
|
|
184
|
-
return value * 1000
|
|
185
|
-
elif unit == "min":
|
|
186
|
-
return value * 60 * 1000
|
|
187
|
-
elif unit == "h":
|
|
188
|
-
return value * 60 * 60 * 1000
|
|
189
|
-
elif suffix == "sec":
|
|
190
|
-
if unit == "s":
|
|
191
|
-
return value
|
|
192
|
-
elif unit == "min":
|
|
193
|
-
return value * 60
|
|
194
|
-
elif unit == "h":
|
|
195
|
-
return value * 60 * 60
|
|
196
|
-
elif unit == "ms":
|
|
197
|
-
return value / 1000
|
|
198
|
-
|
|
199
|
-
if complex_match:
|
|
200
|
-
value = int(complex_match.group(1))
|
|
201
|
-
unit = complex_match.group(2)
|
|
202
|
-
multiplier = int(complex_match.group(3))
|
|
203
|
-
if suffix == "ms":
|
|
204
|
-
if unit == "h":
|
|
205
|
-
return value * 60 * 60 * 1000 * multiplier
|
|
206
|
-
elif unit == "min":
|
|
207
|
-
return value * 60 * 1000 * multiplier
|
|
208
|
-
elif unit == "s":
|
|
209
|
-
return value * 1000 * multiplier
|
|
210
|
-
elif unit == "ms":
|
|
211
|
-
return value * multiplier
|
|
212
|
-
elif suffix == "sec":
|
|
213
|
-
if unit == "h":
|
|
214
|
-
return value * 60 * 60 * multiplier
|
|
215
|
-
elif unit == "min":
|
|
216
|
-
return value * 60 * multiplier
|
|
217
|
-
elif unit == "s":
|
|
218
|
-
return value * multiplier
|
|
219
|
-
elif unit == "ms":
|
|
220
|
-
return (value * multiplier) / 1000
|
|
221
|
-
|
|
222
|
-
if chrono_match:
|
|
223
|
-
chrono_unit = chrono_match.group(1)
|
|
224
|
-
chrono_value = int(chrono_match.group(2))
|
|
225
|
-
chrono_conversion = {
|
|
226
|
-
"milliseconds": 1,
|
|
227
|
-
"seconds": 1000,
|
|
228
|
-
"minutes": 60 * 1000,
|
|
229
|
-
"hours": 60 * 60 * 1000,
|
|
230
|
-
"days": 24 * 60 * 60 * 1000,
|
|
231
|
-
"weeks": 7 * 24 * 60 * 60 * 1000,
|
|
232
|
-
}
|
|
233
|
-
if suffix == "ms":
|
|
234
|
-
return chrono_value * chrono_conversion.get(chrono_unit, 1)
|
|
235
|
-
elif suffix == "sec":
|
|
236
|
-
if chrono_unit == "milliseconds":
|
|
237
|
-
return chrono_value / 1000
|
|
238
|
-
else:
|
|
239
|
-
return (chrono_value * chrono_conversion.get(chrono_unit, 1)) / 1000
|
|
240
|
-
|
|
241
|
-
# Return the original string if no pattern matches
|
|
242
|
-
return input_string
|
|
243
|
-
|
|
244
|
-
def generate_property_doc(key, value):
|
|
245
|
-
"""
|
|
246
|
-
Generate documentation string for a single property.
|
|
247
|
-
Returns None if required fields are missing.
|
|
248
|
-
"""
|
|
249
|
-
description = value.get("description", "").strip()
|
|
250
|
-
prop_type = value.get("type", "").strip()
|
|
251
|
-
if not description or not prop_type:
|
|
252
|
-
return None
|
|
253
|
-
|
|
254
|
-
# Capitalize first letter and ensure a period at the end.
|
|
255
|
-
description = description[0].upper() + description[1:]
|
|
256
|
-
if not description.endswith('.'):
|
|
257
|
-
description += '.'
|
|
258
|
-
|
|
259
|
-
lines = [f"=== {value.get('name')}\n\n", f"{description}\n\n"]
|
|
260
|
-
|
|
261
|
-
property_suffix = value.get("name").split('_')[-1]
|
|
262
|
-
if property_suffix in SUFFIX_TO_UNIT:
|
|
263
|
-
lines.append(f"*Unit:* {SUFFIX_TO_UNIT[property_suffix]}\n\n")
|
|
264
|
-
|
|
265
|
-
# For non-broker properties (node_config.cc indicates broker), add restart info.
|
|
266
|
-
if value.get("defined_in") != "src/v/config/node_config.cc":
|
|
267
|
-
restart = "Yes" if value.get("needs_restart", False) else "No"
|
|
268
|
-
lines.append(f"*Requires restart:* {restart}\n\n")
|
|
269
|
-
|
|
270
|
-
if "gets_restored" in value:
|
|
271
|
-
restored = "Yes" if value.get("gets_restored", False) else "No"
|
|
272
|
-
lines.append(f"*Gets restored during cluster restore:* {restored}\n\n")
|
|
273
|
-
|
|
274
|
-
visibility = value.get("visibility") or "user"
|
|
275
|
-
lines.append(f"*Visibility:* `{visibility}`\n\n")
|
|
276
|
-
|
|
277
|
-
if prop_type in ["string", "array", "number", "boolean", "integer"]:
|
|
278
|
-
lines.append(f"*Type:* {prop_type}\n\n")
|
|
279
|
-
|
|
280
|
-
# Add aliases if they exist
|
|
281
|
-
aliases = value.get("aliases")
|
|
282
|
-
if aliases and len(aliases) > 0:
|
|
283
|
-
aliases_str = ", ".join(f"`{alias}`" for alias in aliases)
|
|
284
|
-
lines.append(f"*Aliases:* {aliases_str}\n\n")
|
|
285
|
-
|
|
286
|
-
if value.get("maximum") is not None and value.get("minimum") is not None:
|
|
287
|
-
lines.append(
|
|
288
|
-
f"*Accepted values:* [`{value.get('minimum')}`, `{value.get('maximum')}`]\n\n"
|
|
289
|
-
)
|
|
290
|
-
|
|
291
|
-
default = value.get("default")
|
|
292
|
-
if default is None or default == "":
|
|
293
|
-
default_str = "null"
|
|
294
|
-
elif isinstance(default, bool):
|
|
295
|
-
default_str = "true" if default else "false"
|
|
296
|
-
else:
|
|
297
|
-
default_str = str(default).replace("'", "").lower()
|
|
298
|
-
default_str = process_defaults(default_str, property_suffix)
|
|
299
|
-
lines.append(f"*Default:* `{default_str}`\n\n")
|
|
300
|
-
lines.append("---\n\n")
|
|
301
|
-
return "".join(lines)
|
|
302
|
-
|
|
303
|
-
def write_data_to_file(output_dir, filename, data):
|
|
304
|
-
file_path = os.path.join(output_dir, filename)
|
|
305
|
-
ensure_directory_exists(os.path.dirname(file_path))
|
|
306
|
-
try:
|
|
307
|
-
with open(file_path, "w+", encoding="utf-8") as output:
|
|
308
|
-
output.write(data)
|
|
309
|
-
print(f"Data written to {file_path} successfully.")
|
|
310
|
-
return True
|
|
311
|
-
except Exception as e:
|
|
312
|
-
print(f"Error writing data to {filename}: {str(e)}")
|
|
313
|
-
return False
|
|
314
|
-
|
|
315
|
-
def write_error_file(output_dir, filename, error_content, total_properties):
|
|
316
|
-
file_path = os.path.join(output_dir, filename)
|
|
317
|
-
ensure_directory_exists(os.path.dirname(file_path))
|
|
318
|
-
try:
|
|
319
|
-
if os.path.exists(file_path):
|
|
320
|
-
os.remove(file_path)
|
|
321
|
-
if error_content:
|
|
322
|
-
error_content = error_content.rstrip("\n")
|
|
323
|
-
with open(file_path, "w+", encoding="utf-8") as output:
|
|
324
|
-
output.write(error_content)
|
|
325
|
-
error_count = len(error_content.split("\n"))
|
|
326
|
-
if error_count > 0:
|
|
327
|
-
empty_name = filename.replace("empty_", "").replace(".txt", "")
|
|
328
|
-
error_type = (
|
|
329
|
-
"deprecated properties"
|
|
330
|
-
if empty_name == "deprecated_properties"
|
|
331
|
-
else f"properties with empty {empty_name}"
|
|
332
|
-
)
|
|
333
|
-
error_percentage = round((error_count / total_properties) * 100, 2)
|
|
334
|
-
print(
|
|
335
|
-
f"You have {error_count} {error_type}. Percentage of errors: {error_percentage}%. Data written in '{filename}'."
|
|
336
|
-
)
|
|
337
|
-
except Exception as e:
|
|
338
|
-
print(f"Error writing error data to '{filename}': {str(e)}")
|
|
339
|
-
|
|
340
|
-
# --- Main Processing ---
|
|
341
|
-
def main():
|
|
342
|
-
args = parse_arguments()
|
|
343
|
-
output_dir = args.output_dir
|
|
344
|
-
page_folder = os.path.join(output_dir, PAGE_FOLDER_NAME)
|
|
345
|
-
error_folder = os.path.join(output_dir, ERROR_FOLDER_NAME)
|
|
346
|
-
|
|
347
|
-
data = load_json(INPUT_JSON_PATH, INPUT_JSON_FILE)
|
|
348
|
-
properties = data.get("properties", {})
|
|
349
|
-
total_properties = len(properties)
|
|
350
|
-
|
|
351
|
-
# Accumulators for property documentation and error logs.
|
|
352
|
-
broker_config_content = []
|
|
353
|
-
schema_registry_content = []
|
|
354
|
-
pandaproxy_content = []
|
|
355
|
-
kafka_client_content = []
|
|
356
|
-
cluster_config_content = []
|
|
357
|
-
cloud_config_content = []
|
|
358
|
-
topic_config_content = []
|
|
359
|
-
deprecated_broker_content = []
|
|
360
|
-
deprecated_cluster_content = []
|
|
361
|
-
all_properties = []
|
|
362
|
-
empty_description_errors = []
|
|
363
|
-
empty_type_errors = []
|
|
364
|
-
max_without_min_errors = []
|
|
365
|
-
min_without_max_errors = []
|
|
366
|
-
deprecated_properties_errors = []
|
|
367
|
-
|
|
368
|
-
for key, value in properties.items():
|
|
369
|
-
all_properties.append(key)
|
|
370
|
-
group = None
|
|
371
|
-
if key.startswith("cloud_"):
|
|
372
|
-
group = "cloud"
|
|
373
|
-
else:
|
|
374
|
-
group = DEFINED_IN_MAPPING.get(value.get("defined_in"))
|
|
375
|
-
|
|
376
|
-
# Handle deprecated properties.
|
|
377
|
-
if value.get("is_deprecated") is True:
|
|
378
|
-
deprecated_properties_errors.append(key)
|
|
379
|
-
if group == "broker":
|
|
380
|
-
deprecated_broker_content.append(f"- {key}\n\n")
|
|
381
|
-
elif group in ["cluster", "cloud"]:
|
|
382
|
-
deprecated_cluster_content.append(f"- {key}\n\n")
|
|
383
|
-
continue
|
|
384
|
-
|
|
385
|
-
# Log errors for missing description or type.
|
|
386
|
-
if not value.get("description", "").strip():
|
|
387
|
-
empty_description_errors.append(key)
|
|
388
|
-
if not value.get("type", "").strip():
|
|
389
|
-
empty_type_errors.append(key)
|
|
390
|
-
|
|
391
|
-
# Check for max/min inconsistencies.
|
|
392
|
-
if value.get("maximum") is not None and value.get("minimum") is None:
|
|
393
|
-
max_without_min_errors.append(key)
|
|
394
|
-
if value.get("minimum") is not None and value.get("maximum") is None:
|
|
395
|
-
min_without_max_errors.append(key)
|
|
396
|
-
|
|
397
|
-
property_doc = generate_property_doc(key, value)
|
|
398
|
-
if property_doc is None:
|
|
399
|
-
continue
|
|
400
|
-
|
|
401
|
-
group_mapping = {
|
|
402
|
-
"broker": broker_config_content,
|
|
403
|
-
"schema reg": schema_registry_content,
|
|
404
|
-
"http proxy": pandaproxy_content,
|
|
405
|
-
"http client": kafka_client_content,
|
|
406
|
-
"cluster": cluster_config_content,
|
|
407
|
-
"cloud": cloud_config_content,
|
|
408
|
-
"topic": topic_config_content,
|
|
409
|
-
}
|
|
410
|
-
if group in group_mapping:
|
|
411
|
-
group_mapping[group].append(property_doc)
|
|
412
|
-
|
|
413
|
-
# Construct final documentation pages.
|
|
414
|
-
broker_page = (
|
|
415
|
-
BROKER_PAGE_TITLE
|
|
416
|
-
+ BROKER_INTRO
|
|
417
|
-
+ BROKER_TITLE
|
|
418
|
-
+ "".join(broker_config_content)
|
|
419
|
-
+ "\n\n"
|
|
420
|
-
+ SCHEMA_REGISTRY_TITLE
|
|
421
|
-
+ SCHEMA_REGISTRY_INTRO
|
|
422
|
-
+ "".join(schema_registry_content)
|
|
423
|
-
+ "\n\n"
|
|
424
|
-
+ PANDAPROXY_TITLE
|
|
425
|
-
+ PANDAPROXY_INTRO
|
|
426
|
-
+ "".join(pandaproxy_content)
|
|
427
|
-
+ "\n\n"
|
|
428
|
-
+ KAFKA_CLIENT_TITLE
|
|
429
|
-
+ KAFKA_CLIENT_INTRO
|
|
430
|
-
+ "".join(kafka_client_content)
|
|
431
|
-
)
|
|
432
|
-
cluster_page = (
|
|
433
|
-
CLUSTER_PAGE_TITLE
|
|
434
|
-
+ CLUSTER_CONFIG_INTRO
|
|
435
|
-
+ CLUSTER_CONFIG_TITLE
|
|
436
|
-
+ "".join(cluster_config_content)
|
|
437
|
-
)
|
|
438
|
-
cloud_page = (
|
|
439
|
-
CLOUD_PAGE_TITLE
|
|
440
|
-
+ CLOUD_CONFIG_INTRO
|
|
441
|
-
+ CLOUD_CONFIG_TITLE
|
|
442
|
-
+ "".join(cloud_config_content)
|
|
443
|
-
)
|
|
444
|
-
topic_page = (
|
|
445
|
-
TOPIC_PAGE_TITLE
|
|
446
|
-
+ TOPIC_INTRO
|
|
447
|
-
+ TOPIC_CONFIG_TITLE
|
|
448
|
-
+ "".join(topic_config_content)
|
|
449
|
-
)
|
|
450
|
-
deprecated_page = (
|
|
451
|
-
DEPRECATED_PROPERTIES_TITLE
|
|
452
|
-
+ DEPRECATED_PROPERTIES_INTRO
|
|
453
|
-
+ DEPRECATED_BROKER_TITLE
|
|
454
|
-
+ "".join(deprecated_broker_content)
|
|
455
|
-
+ DEPRECATED_CLUSTER_TITLE
|
|
456
|
-
+ "".join(deprecated_cluster_content)
|
|
457
|
-
)
|
|
458
|
-
|
|
459
|
-
# Write output files.
|
|
460
|
-
write_data_to_file(page_folder, OUTPUT_FILE_BROKER, broker_page)
|
|
461
|
-
write_data_to_file(page_folder, OUTPUT_FILE_CLUSTER, cluster_page)
|
|
462
|
-
write_data_to_file(page_folder, OUTPUT_FILE_CLOUD, cloud_page)
|
|
463
|
-
write_data_to_file(page_folder, OUTPUT_FILE_TOPIC, topic_page)
|
|
464
|
-
write_data_to_file(page_folder, OUTPUT_FILE_DEPRECATED, deprecated_page)
|
|
465
|
-
write_data_to_file(output_dir, ALL_PROPERTIES_FILE, "\n".join(all_properties))
|
|
466
|
-
|
|
467
|
-
# Write error files.
|
|
468
|
-
write_error_file(
|
|
469
|
-
error_folder, ERROR_FILE_DESCRIPTION, "\n".join(empty_description_errors), total_properties
|
|
470
|
-
)
|
|
471
|
-
write_error_file(
|
|
472
|
-
error_folder, ERROR_FILE_TYPE, "\n".join(empty_type_errors), total_properties
|
|
473
|
-
)
|
|
474
|
-
write_error_file(
|
|
475
|
-
error_folder, ERROR_FILE_MAX_WITHOUT_MIN, "\n".join(max_without_min_errors), total_properties
|
|
476
|
-
)
|
|
477
|
-
write_error_file(
|
|
478
|
-
error_folder, ERROR_FILE_MIN_WITHOUT_MAX, "\n".join(min_without_max_errors), total_properties
|
|
479
|
-
)
|
|
480
|
-
write_error_file(
|
|
481
|
-
error_folder, "deprecated_properties.txt", "\n".join(deprecated_properties_errors), total_properties
|
|
482
|
-
)
|
|
483
|
-
|
|
484
|
-
# Print summary.
|
|
485
|
-
print(f"Total properties read: {total_properties}")
|
|
486
|
-
print(f"Total Broker properties: {len(broker_config_content)}")
|
|
487
|
-
print(f"Total Cluster properties: {len(cluster_config_content)}")
|
|
488
|
-
print(f"Total Cloud properties: {len(cloud_config_content)}")
|
|
489
|
-
|
|
490
|
-
if __name__ == "__main__":
|
|
491
|
-
main()
|