rolfedh-doc-utils 0.1.4__py3-none-any.whl → 0.1.41__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- archive_unused_files.py +18 -5
- archive_unused_images.py +9 -2
- callout_lib/__init__.py +22 -0
- callout_lib/converter_bullets.py +103 -0
- callout_lib/converter_comments.py +295 -0
- callout_lib/converter_deflist.py +134 -0
- callout_lib/detector.py +364 -0
- callout_lib/table_parser.py +804 -0
- check_published_links.py +1083 -0
- check_scannability.py +6 -0
- check_source_directives.py +101 -0
- convert_callouts_interactive.py +567 -0
- convert_callouts_to_deflist.py +628 -0
- convert_freemarker_to_asciidoc.py +288 -0
- convert_tables_to_deflists.py +479 -0
- doc_utils/convert_freemarker_to_asciidoc.py +708 -0
- doc_utils/duplicate_content.py +409 -0
- doc_utils/duplicate_includes.py +347 -0
- doc_utils/extract_link_attributes.py +618 -0
- doc_utils/format_asciidoc_spacing.py +285 -0
- doc_utils/insert_abstract_role.py +220 -0
- doc_utils/inventory_conditionals.py +164 -0
- doc_utils/missing_source_directive.py +211 -0
- doc_utils/replace_link_attributes.py +187 -0
- doc_utils/spinner.py +119 -0
- doc_utils/unused_adoc.py +150 -22
- doc_utils/unused_attributes.py +218 -6
- doc_utils/unused_images.py +81 -9
- doc_utils/validate_links.py +576 -0
- doc_utils/version.py +8 -0
- doc_utils/version_check.py +243 -0
- doc_utils/warnings_report.py +237 -0
- doc_utils_cli.py +158 -0
- extract_link_attributes.py +120 -0
- find_duplicate_content.py +209 -0
- find_duplicate_includes.py +198 -0
- find_unused_attributes.py +84 -6
- format_asciidoc_spacing.py +134 -0
- insert_abstract_role.py +163 -0
- inventory_conditionals.py +53 -0
- replace_link_attributes.py +214 -0
- rolfedh_doc_utils-0.1.41.dist-info/METADATA +246 -0
- rolfedh_doc_utils-0.1.41.dist-info/RECORD +52 -0
- {rolfedh_doc_utils-0.1.4.dist-info → rolfedh_doc_utils-0.1.41.dist-info}/WHEEL +1 -1
- rolfedh_doc_utils-0.1.41.dist-info/entry_points.txt +20 -0
- rolfedh_doc_utils-0.1.41.dist-info/top_level.txt +21 -0
- validate_links.py +213 -0
- rolfedh_doc_utils-0.1.4.dist-info/METADATA +0 -285
- rolfedh_doc_utils-0.1.4.dist-info/RECORD +0 -17
- rolfedh_doc_utils-0.1.4.dist-info/entry_points.txt +0 -5
- rolfedh_doc_utils-0.1.4.dist-info/top_level.txt +0 -5
- {rolfedh_doc_utils-0.1.4.dist-info → rolfedh_doc_utils-0.1.41.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Version checking utility for doc-utils.
|
|
4
|
+
|
|
5
|
+
Checks PyPI for the latest version and notifies users if an update is available.
|
|
6
|
+
Includes caching to avoid excessive PyPI requests.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
import sys
|
|
12
|
+
import time
|
|
13
|
+
import urllib.request
|
|
14
|
+
import urllib.error
|
|
15
|
+
from datetime import datetime, timedelta
|
|
16
|
+
from importlib.metadata import version as get_installed_version
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Optional, Tuple
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_cache_dir() -> Path:
|
|
22
|
+
"""Get the cache directory for version check data."""
|
|
23
|
+
# Use XDG_CACHE_HOME if set, otherwise ~/.cache
|
|
24
|
+
cache_home = os.environ.get('XDG_CACHE_HOME')
|
|
25
|
+
if cache_home:
|
|
26
|
+
cache_dir = Path(cache_home) / 'doc-utils'
|
|
27
|
+
else:
|
|
28
|
+
cache_dir = Path.home() / '.cache' / 'doc-utils'
|
|
29
|
+
|
|
30
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
31
|
+
return cache_dir
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def get_cache_file() -> Path:
|
|
35
|
+
"""Get the cache file path."""
|
|
36
|
+
return get_cache_dir() / 'version_check.json'
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def read_cache() -> Optional[dict]:
|
|
40
|
+
"""Read version check cache."""
|
|
41
|
+
cache_file = get_cache_file()
|
|
42
|
+
if not cache_file.exists():
|
|
43
|
+
return None
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
with open(cache_file, 'r') as f:
|
|
47
|
+
data = json.load(f)
|
|
48
|
+
# Check if cache is still valid (24 hours)
|
|
49
|
+
last_check = datetime.fromisoformat(data['last_check'])
|
|
50
|
+
if datetime.now() - last_check < timedelta(hours=24):
|
|
51
|
+
return data
|
|
52
|
+
except (json.JSONDecodeError, KeyError, ValueError):
|
|
53
|
+
pass
|
|
54
|
+
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write_cache(latest_version: str, current_version: str):
|
|
59
|
+
"""Write version check cache."""
|
|
60
|
+
cache_file = get_cache_file()
|
|
61
|
+
data = {
|
|
62
|
+
'last_check': datetime.now().isoformat(),
|
|
63
|
+
'latest_version': latest_version,
|
|
64
|
+
'current_version': current_version,
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
with open(cache_file, 'w') as f:
|
|
69
|
+
json.dump(data, f)
|
|
70
|
+
except Exception:
|
|
71
|
+
# Silently fail if we can't write cache
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def fetch_latest_version() -> Optional[str]:
|
|
76
|
+
"""Fetch the latest version from PyPI."""
|
|
77
|
+
try:
|
|
78
|
+
# Use PyPI JSON API
|
|
79
|
+
url = 'https://pypi.org/pypi/rolfedh-doc-utils/json'
|
|
80
|
+
with urllib.request.urlopen(url, timeout=2) as response:
|
|
81
|
+
data = json.loads(response.read())
|
|
82
|
+
return data['info']['version']
|
|
83
|
+
except (urllib.error.URLError, json.JSONDecodeError, KeyError, TimeoutError):
|
|
84
|
+
# Silently fail if we can't reach PyPI
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def parse_version(version_str: str) -> Tuple[int, ...]:
|
|
89
|
+
"""Parse version string into tuple of integers for comparison."""
|
|
90
|
+
try:
|
|
91
|
+
# Remove any pre-release or dev suffixes
|
|
92
|
+
version_str = version_str.split('+')[0].split('-')[0]
|
|
93
|
+
return tuple(int(x) for x in version_str.split('.'))
|
|
94
|
+
except (ValueError, AttributeError):
|
|
95
|
+
return (0,)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def check_for_update(force_check: bool = False) -> Optional[str]:
|
|
99
|
+
"""
|
|
100
|
+
Check if a newer version is available.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
force_check: If True, bypass cache and always check PyPI
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
The latest version string if an update is available, None otherwise
|
|
107
|
+
"""
|
|
108
|
+
try:
|
|
109
|
+
current_version = get_installed_version('rolfedh-doc-utils')
|
|
110
|
+
except Exception:
|
|
111
|
+
# Can't determine installed version
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
# Check cache first (unless forced)
|
|
115
|
+
if not force_check:
|
|
116
|
+
cache_data = read_cache()
|
|
117
|
+
if cache_data:
|
|
118
|
+
latest_version = cache_data['latest_version']
|
|
119
|
+
# Only notify if there's a newer version
|
|
120
|
+
if parse_version(latest_version) > parse_version(current_version):
|
|
121
|
+
return latest_version
|
|
122
|
+
return None
|
|
123
|
+
|
|
124
|
+
# Fetch from PyPI
|
|
125
|
+
latest_version = fetch_latest_version()
|
|
126
|
+
if not latest_version:
|
|
127
|
+
return None
|
|
128
|
+
|
|
129
|
+
# Cache the result
|
|
130
|
+
write_cache(latest_version, current_version)
|
|
131
|
+
|
|
132
|
+
# Check if update is available
|
|
133
|
+
if parse_version(latest_version) > parse_version(current_version):
|
|
134
|
+
return latest_version
|
|
135
|
+
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def detect_install_method() -> str:
|
|
140
|
+
"""
|
|
141
|
+
Detect how the package was installed.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
'pipx' or 'pip'
|
|
145
|
+
|
|
146
|
+
Note: Defaults to 'pipx' as the recommended installation method.
|
|
147
|
+
"""
|
|
148
|
+
# Check if running from pipx venv (standard pipx install)
|
|
149
|
+
if 'pipx' in sys.prefix.lower():
|
|
150
|
+
return 'pipx'
|
|
151
|
+
|
|
152
|
+
# Check PIPX_HOME environment variable
|
|
153
|
+
pipx_home = os.environ.get('PIPX_HOME') or os.path.join(Path.home(), '.local', 'pipx')
|
|
154
|
+
if pipx_home and str(Path(sys.prefix)).startswith(str(Path(pipx_home))):
|
|
155
|
+
return 'pipx'
|
|
156
|
+
|
|
157
|
+
# Check if executable is in typical pipx bin location
|
|
158
|
+
try:
|
|
159
|
+
exe_path = Path(sys.executable)
|
|
160
|
+
if '.local/pipx' in str(exe_path):
|
|
161
|
+
return 'pipx'
|
|
162
|
+
except Exception:
|
|
163
|
+
pass
|
|
164
|
+
|
|
165
|
+
# Default to pipx as the recommended method (per CLAUDE.md guidelines)
|
|
166
|
+
# This ensures users see the recommended upgrade command even for editable installs
|
|
167
|
+
return 'pipx'
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def show_update_notification(latest_version: str, current_version: str = None):
|
|
171
|
+
"""Show update notification to user."""
|
|
172
|
+
if not current_version:
|
|
173
|
+
try:
|
|
174
|
+
current_version = get_installed_version('rolfedh-doc-utils')
|
|
175
|
+
except Exception:
|
|
176
|
+
current_version = 'unknown'
|
|
177
|
+
|
|
178
|
+
# Detect installation method and recommend appropriate upgrade command
|
|
179
|
+
install_method = detect_install_method()
|
|
180
|
+
|
|
181
|
+
# Use stderr to avoid interfering with tool output
|
|
182
|
+
print(f"\n📦 Update available: {current_version} → {latest_version}", file=sys.stderr)
|
|
183
|
+
|
|
184
|
+
if install_method == 'pipx':
|
|
185
|
+
print(f" Run: pipx upgrade rolfedh-doc-utils", file=sys.stderr)
|
|
186
|
+
else:
|
|
187
|
+
print(f" Run: pip install --upgrade rolfedh-doc-utils", file=sys.stderr)
|
|
188
|
+
|
|
189
|
+
print("", file=sys.stderr)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def check_version_on_startup():
|
|
193
|
+
"""
|
|
194
|
+
Check for updates on tool startup.
|
|
195
|
+
|
|
196
|
+
This should be called early in the main() function of each CLI tool.
|
|
197
|
+
It runs asynchronously and won't block the tool execution.
|
|
198
|
+
"""
|
|
199
|
+
# Skip version check in certain conditions
|
|
200
|
+
if any([
|
|
201
|
+
os.environ.get('DOC_UTILS_NO_VERSION_CHECK'), # User opt-out
|
|
202
|
+
os.environ.get('CI'), # Running in CI
|
|
203
|
+
not sys.stderr.isatty(), # Not running in terminal
|
|
204
|
+
]):
|
|
205
|
+
return
|
|
206
|
+
|
|
207
|
+
try:
|
|
208
|
+
latest_version = check_for_update()
|
|
209
|
+
if latest_version:
|
|
210
|
+
show_update_notification(latest_version)
|
|
211
|
+
except Exception:
|
|
212
|
+
# Never let version checking break the tool
|
|
213
|
+
pass
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def disable_version_check():
|
|
217
|
+
"""
|
|
218
|
+
Instructions for disabling version check.
|
|
219
|
+
|
|
220
|
+
Users can disable by setting DOC_UTILS_NO_VERSION_CHECK environment variable.
|
|
221
|
+
"""
|
|
222
|
+
print("To disable version checking, set the environment variable:")
|
|
223
|
+
print(" export DOC_UTILS_NO_VERSION_CHECK=1")
|
|
224
|
+
print("\nOr add it to your shell configuration file.")
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
if __name__ == "__main__":
|
|
228
|
+
# For testing/debugging
|
|
229
|
+
import argparse
|
|
230
|
+
parser = argparse.ArgumentParser(description="Check for doc-utils updates")
|
|
231
|
+
parser.add_argument('--force', action='store_true', help='Force check (bypass cache)')
|
|
232
|
+
parser.add_argument('--disable-instructions', action='store_true',
|
|
233
|
+
help='Show instructions for disabling version check')
|
|
234
|
+
args = parser.parse_args()
|
|
235
|
+
|
|
236
|
+
if args.disable_instructions:
|
|
237
|
+
disable_version_check()
|
|
238
|
+
else:
|
|
239
|
+
latest = check_for_update(force_check=args.force)
|
|
240
|
+
if latest:
|
|
241
|
+
show_update_notification(latest)
|
|
242
|
+
else:
|
|
243
|
+
print("You are running the latest version!")
|
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Generate AsciiDoc warnings report for callout conversion issues.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import List, Dict, Set
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class WarningInfo:
|
|
11
|
+
"""Information about a specific warning."""
|
|
12
|
+
|
|
13
|
+
def __init__(self, warning_type: str, file_name: str, line_info: str,
|
|
14
|
+
code_nums: List[int] = None, explanation_nums: List[int] = None):
|
|
15
|
+
self.warning_type = warning_type # 'mismatch' or 'missing'
|
|
16
|
+
self.file_name = file_name
|
|
17
|
+
self.line_info = line_info # e.g., "211" or "55-72"
|
|
18
|
+
self.code_nums = code_nums or []
|
|
19
|
+
self.explanation_nums = explanation_nums or []
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def parse_warning_message(warning_msg: str) -> WarningInfo:
|
|
23
|
+
"""
|
|
24
|
+
Parse a warning message to extract structured information.
|
|
25
|
+
|
|
26
|
+
Examples:
|
|
27
|
+
- "WARNING: file.adoc lines 55-72: Callout mismatch: code has [1, 2], explanations have [1, 3]"
|
|
28
|
+
- "WARNING: file.adoc line 211: Code block has callouts [1, 2, 3, 4] but no explanations found..."
|
|
29
|
+
"""
|
|
30
|
+
import re
|
|
31
|
+
|
|
32
|
+
# Extract file name and line info
|
|
33
|
+
match = re.match(r'WARNING: (.+?) lines? ([\d-]+):', warning_msg)
|
|
34
|
+
if not match:
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
file_name = match.group(1)
|
|
38
|
+
line_info = match.group(2)
|
|
39
|
+
|
|
40
|
+
# Determine warning type and extract callout numbers
|
|
41
|
+
if 'Callout mismatch' in warning_msg:
|
|
42
|
+
# Parse: "code has [1, 2], explanations have [1, 3]"
|
|
43
|
+
code_match = re.search(r'code has \[([^\]]+)\]', warning_msg)
|
|
44
|
+
exp_match = re.search(r'explanations have \[([^\]]+)\]', warning_msg)
|
|
45
|
+
|
|
46
|
+
code_nums = []
|
|
47
|
+
exp_nums = []
|
|
48
|
+
|
|
49
|
+
if code_match:
|
|
50
|
+
code_nums = [int(n.strip()) for n in code_match.group(1).split(',')]
|
|
51
|
+
if exp_match:
|
|
52
|
+
exp_nums = [int(n.strip()) for n in exp_match.group(1).split(',')]
|
|
53
|
+
|
|
54
|
+
return WarningInfo('mismatch', file_name, line_info, code_nums, exp_nums)
|
|
55
|
+
|
|
56
|
+
elif 'but no explanations found' in warning_msg:
|
|
57
|
+
# Parse: "Code block has callouts [1, 2, 3, 4] but no explanations found"
|
|
58
|
+
callouts_match = re.search(r'has callouts \[([^\]]+)\]', warning_msg)
|
|
59
|
+
|
|
60
|
+
code_nums = []
|
|
61
|
+
if callouts_match:
|
|
62
|
+
code_nums = [int(n.strip()) for n in callouts_match.group(1).split(',')]
|
|
63
|
+
|
|
64
|
+
return WarningInfo('missing', file_name, line_info, code_nums, [])
|
|
65
|
+
|
|
66
|
+
return None
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def analyze_mismatch(code_nums: List[int], exp_nums: List[int]) -> List[str]:
|
|
70
|
+
"""
|
|
71
|
+
Analyze what's wrong with a callout mismatch.
|
|
72
|
+
|
|
73
|
+
Returns a list of issue descriptions.
|
|
74
|
+
"""
|
|
75
|
+
issues = []
|
|
76
|
+
code_set = set(code_nums)
|
|
77
|
+
exp_set = set(exp_nums)
|
|
78
|
+
|
|
79
|
+
# Check for duplicates in explanations
|
|
80
|
+
exp_counts = {}
|
|
81
|
+
for num in exp_nums:
|
|
82
|
+
exp_counts[num] = exp_counts.get(num, 0) + 1
|
|
83
|
+
|
|
84
|
+
duplicates = [num for num, count in exp_counts.items() if count > 1]
|
|
85
|
+
if duplicates:
|
|
86
|
+
for dup in duplicates:
|
|
87
|
+
count = exp_counts[dup]
|
|
88
|
+
issues.append(f"Duplicate callout: {dup} (appears {count} times in explanations)")
|
|
89
|
+
|
|
90
|
+
# Check for missing callouts (in code but not in explanations)
|
|
91
|
+
missing_in_exp = code_set - exp_set
|
|
92
|
+
if missing_in_exp:
|
|
93
|
+
for num in sorted(missing_in_exp):
|
|
94
|
+
issues.append(f"Missing callout: {num} (in code but not in explanations)")
|
|
95
|
+
|
|
96
|
+
# Check for extra callouts (in explanations but not in code)
|
|
97
|
+
extra_in_exp = exp_set - code_set
|
|
98
|
+
if extra_in_exp:
|
|
99
|
+
for num in sorted(extra_in_exp):
|
|
100
|
+
issues.append(f"Extra callout: {num} (in explanations but not in code)")
|
|
101
|
+
|
|
102
|
+
# Check for off-by-one errors
|
|
103
|
+
if code_nums and exp_nums:
|
|
104
|
+
code_start = min(code_nums)
|
|
105
|
+
exp_start = min(exp_nums)
|
|
106
|
+
if code_start != exp_start and not (missing_in_exp or extra_in_exp or duplicates):
|
|
107
|
+
issues.append(f"Off-by-one error (code starts at {code_start}, explanations start at {exp_start})")
|
|
108
|
+
|
|
109
|
+
return issues
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def generate_warnings_report(warnings: List[str], output_path: Path = None) -> str:
|
|
113
|
+
"""
|
|
114
|
+
Generate an AsciiDoc warnings report from warning messages.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
warnings: List of warning message strings
|
|
118
|
+
output_path: Path to write report file (if None, returns content only)
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
The report content as a string
|
|
122
|
+
"""
|
|
123
|
+
if not warnings:
|
|
124
|
+
return ""
|
|
125
|
+
|
|
126
|
+
# Parse all warnings
|
|
127
|
+
parsed_warnings = []
|
|
128
|
+
for warning in warnings:
|
|
129
|
+
parsed = parse_warning_message(warning)
|
|
130
|
+
if parsed:
|
|
131
|
+
parsed_warnings.append(parsed)
|
|
132
|
+
|
|
133
|
+
if not parsed_warnings:
|
|
134
|
+
return ""
|
|
135
|
+
|
|
136
|
+
# Group warnings by type
|
|
137
|
+
mismatch_warnings = [w for w in parsed_warnings if w.warning_type == 'mismatch']
|
|
138
|
+
missing_warnings = [w for w in parsed_warnings if w.warning_type == 'missing']
|
|
139
|
+
|
|
140
|
+
# Generate report content
|
|
141
|
+
lines = []
|
|
142
|
+
lines.append("= Callout Conversion Warnings Report")
|
|
143
|
+
lines.append(":toc:")
|
|
144
|
+
lines.append("")
|
|
145
|
+
lines.append(f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
|
146
|
+
lines.append("")
|
|
147
|
+
|
|
148
|
+
# Summary
|
|
149
|
+
lines.append("== Summary")
|
|
150
|
+
lines.append("")
|
|
151
|
+
lines.append(f"Total warnings: {len(parsed_warnings)}")
|
|
152
|
+
if mismatch_warnings:
|
|
153
|
+
lines.append(f"- Callout mismatches: {len(mismatch_warnings)}")
|
|
154
|
+
if missing_warnings:
|
|
155
|
+
lines.append(f"- Missing explanations: {len(missing_warnings)}")
|
|
156
|
+
lines.append("")
|
|
157
|
+
lines.append("== Recommended Actions")
|
|
158
|
+
lines.append("")
|
|
159
|
+
lines.append("1. Review each warning below and fix callout issues where appropriate")
|
|
160
|
+
lines.append("2. For callout mismatches: Ensure code callouts match explanation numbers")
|
|
161
|
+
lines.append("3. For missing explanations: Check if explanations are shared with another block or missing")
|
|
162
|
+
lines.append("4. After fixing issues, rerun the conversion command")
|
|
163
|
+
lines.append("")
|
|
164
|
+
lines.append("== Force Mode Option")
|
|
165
|
+
lines.append("")
|
|
166
|
+
lines.append("CAUTION: Use this option sparingly and only after reviewing all warnings.")
|
|
167
|
+
lines.append("")
|
|
168
|
+
lines.append("If you've reviewed all warnings and confirmed that remaining issues are acceptable,")
|
|
169
|
+
lines.append("you can use the `--force` option to strip callouts from code blocks despite warnings:")
|
|
170
|
+
lines.append("")
|
|
171
|
+
lines.append("[source,bash]")
|
|
172
|
+
lines.append("----")
|
|
173
|
+
lines.append("convert-callouts-to-deflist --force modules/")
|
|
174
|
+
lines.append("----")
|
|
175
|
+
lines.append("")
|
|
176
|
+
lines.append("Force mode will:")
|
|
177
|
+
lines.append("")
|
|
178
|
+
lines.append("- Strip callouts from blocks with missing explanations (without creating explanation lists)")
|
|
179
|
+
lines.append("- Convert blocks with callout mismatches using available explanations")
|
|
180
|
+
lines.append("- Require confirmation before proceeding (unless in dry-run mode)")
|
|
181
|
+
lines.append("")
|
|
182
|
+
lines.append("IMPORTANT: Always work in a git branch and review changes with `git diff` before committing.")
|
|
183
|
+
lines.append("")
|
|
184
|
+
|
|
185
|
+
# Callout Mismatch section
|
|
186
|
+
if mismatch_warnings:
|
|
187
|
+
lines.append("== Callout Mismatch Warnings")
|
|
188
|
+
lines.append("")
|
|
189
|
+
lines.append("Callout numbers in code don't match explanation numbers.")
|
|
190
|
+
lines.append("")
|
|
191
|
+
|
|
192
|
+
for warning in mismatch_warnings:
|
|
193
|
+
lines.append(f"=== {warning.file_name}")
|
|
194
|
+
lines.append("")
|
|
195
|
+
lines.append(f"*Lines {warning.line_info}*")
|
|
196
|
+
lines.append("")
|
|
197
|
+
lines.append(f"Code has:: {warning.code_nums}")
|
|
198
|
+
lines.append(f"Explanations have:: {warning.explanation_nums}")
|
|
199
|
+
lines.append("")
|
|
200
|
+
|
|
201
|
+
issues = analyze_mismatch(warning.code_nums, warning.explanation_nums)
|
|
202
|
+
if issues:
|
|
203
|
+
lines.append("Issues detected::")
|
|
204
|
+
for issue in issues:
|
|
205
|
+
lines.append(f"- {issue}")
|
|
206
|
+
lines.append("")
|
|
207
|
+
|
|
208
|
+
# Missing Explanations section
|
|
209
|
+
if missing_warnings:
|
|
210
|
+
lines.append("== Missing Explanations Warnings")
|
|
211
|
+
lines.append("")
|
|
212
|
+
lines.append("Code blocks with callouts but no explanations found after them.")
|
|
213
|
+
lines.append("")
|
|
214
|
+
|
|
215
|
+
for warning in missing_warnings:
|
|
216
|
+
lines.append(f"=== {warning.file_name}")
|
|
217
|
+
lines.append("")
|
|
218
|
+
lines.append(f"*Line {warning.line_info}*")
|
|
219
|
+
lines.append("")
|
|
220
|
+
lines.append(f"Callouts in code:: {warning.code_nums}")
|
|
221
|
+
lines.append("")
|
|
222
|
+
lines.append("Possible causes::")
|
|
223
|
+
lines.append("- Explanations shared with another code block (e.g., in conditional sections)")
|
|
224
|
+
lines.append("- Explanations in unexpected location")
|
|
225
|
+
lines.append("- Documentation error (missing explanations)")
|
|
226
|
+
lines.append("")
|
|
227
|
+
lines.append("Action:: Review this block manually")
|
|
228
|
+
lines.append("")
|
|
229
|
+
|
|
230
|
+
content = '\n'.join(lines)
|
|
231
|
+
|
|
232
|
+
# Write to file if path provided
|
|
233
|
+
if output_path:
|
|
234
|
+
with open(output_path, 'w', encoding='utf-8') as f:
|
|
235
|
+
f.write(content)
|
|
236
|
+
|
|
237
|
+
return content
|
doc_utils_cli.py
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
doc-utils - CLI tools for AsciiDoc documentation projects
|
|
4
|
+
|
|
5
|
+
Main entry point that provides a hub for all doc-utils tools.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import argparse
|
|
9
|
+
import sys
|
|
10
|
+
from doc_utils.version import __version__
|
|
11
|
+
from doc_utils.version_check import check_version_on_startup
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# Tool definitions with descriptions
|
|
15
|
+
TOOLS = [
|
|
16
|
+
{
|
|
17
|
+
'name': 'validate-links',
|
|
18
|
+
'description': 'Validates all links in documentation with URL transposition',
|
|
19
|
+
'example': 'validate-links --transpose "https://prod--https://preview"'
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
'name': 'extract-link-attributes',
|
|
23
|
+
'description': 'Extracts link/xref macros with attributes into reusable definitions',
|
|
24
|
+
'example': 'extract-link-attributes --dry-run'
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
'name': 'replace-link-attributes',
|
|
28
|
+
'description': 'Resolves Vale LinkAttribute issues by replacing attributes in link URLs',
|
|
29
|
+
'example': 'replace-link-attributes --dry-run'
|
|
30
|
+
},
|
|
31
|
+
{
|
|
32
|
+
'name': 'format-asciidoc-spacing',
|
|
33
|
+
'description': 'Standardizes spacing after headings and around includes',
|
|
34
|
+
'example': 'format-asciidoc-spacing --dry-run modules/'
|
|
35
|
+
},
|
|
36
|
+
{
|
|
37
|
+
'name': 'check-scannability',
|
|
38
|
+
'description': 'Analyzes document readability by checking sentence/paragraph length',
|
|
39
|
+
'example': 'check-scannability --max-sentence-length 5'
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
'name': 'archive-unused-files',
|
|
43
|
+
'description': 'Finds and optionally archives unreferenced AsciiDoc files',
|
|
44
|
+
'example': 'archive-unused-files # preview\narchive-unused-files --archive # execute'
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
'name': 'archive-unused-images',
|
|
48
|
+
'description': 'Finds and optionally archives unreferenced image files',
|
|
49
|
+
'example': 'archive-unused-images # preview\narchive-unused-images --archive # execute'
|
|
50
|
+
},
|
|
51
|
+
{
|
|
52
|
+
'name': 'find-unused-attributes',
|
|
53
|
+
'description': 'Identifies unused attribute definitions in AsciiDoc files',
|
|
54
|
+
'example': 'find-unused-attributes # auto-discovers attributes files'
|
|
55
|
+
},
|
|
56
|
+
{
|
|
57
|
+
'name': 'convert-callouts-to-deflist',
|
|
58
|
+
'description': 'Converts callouts to definition lists (batch mode)',
|
|
59
|
+
'example': 'convert-callouts-to-deflist --dry-run modules/'
|
|
60
|
+
},
|
|
61
|
+
{
|
|
62
|
+
'name': 'convert-callouts-interactive',
|
|
63
|
+
'description': 'Interactively converts callouts with per-block format selection',
|
|
64
|
+
'example': 'convert-callouts-interactive modules/'
|
|
65
|
+
},
|
|
66
|
+
]
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def print_tools_list():
|
|
70
|
+
"""Print a formatted list of all available tools."""
|
|
71
|
+
print("\n🛠️ Available Tools:\n")
|
|
72
|
+
|
|
73
|
+
for tool in TOOLS:
|
|
74
|
+
# Print tool name and description
|
|
75
|
+
experimental = " [EXPERIMENTAL]" if "[EXPERIMENTAL]" in tool['description'] else ""
|
|
76
|
+
desc = tool['description'].replace(" [EXPERIMENTAL]", "")
|
|
77
|
+
print(f" {tool['name']}{experimental}")
|
|
78
|
+
print(f" {desc}")
|
|
79
|
+
print()
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def print_help():
|
|
83
|
+
"""Print comprehensive help information."""
|
|
84
|
+
print(f"doc-utils v{__version__}")
|
|
85
|
+
print("\nCLI tools for maintaining clean, consistent AsciiDoc documentation repositories.")
|
|
86
|
+
|
|
87
|
+
print_tools_list()
|
|
88
|
+
|
|
89
|
+
print("📚 Usage:")
|
|
90
|
+
print(" doc-utils --version Show version information")
|
|
91
|
+
print(" doc-utils --list List all available tools")
|
|
92
|
+
print(" doc-utils --help Show this help message")
|
|
93
|
+
print(" <tool-name> --help Show help for a specific tool")
|
|
94
|
+
print()
|
|
95
|
+
print("📖 Documentation:")
|
|
96
|
+
print(" https://rolfedh.github.io/doc-utils/")
|
|
97
|
+
print()
|
|
98
|
+
print("💡 Examples:")
|
|
99
|
+
print(" find-unused-attributes")
|
|
100
|
+
print(" check-scannability --max-sentence-length 5")
|
|
101
|
+
print(" format-asciidoc-spacing --dry-run modules/")
|
|
102
|
+
print()
|
|
103
|
+
print("⚠️ Safety First:")
|
|
104
|
+
print(" Always work in a git branch and review changes with 'git diff'")
|
|
105
|
+
print()
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def main():
|
|
109
|
+
"""Main entry point for doc-utils command."""
|
|
110
|
+
# Check for updates (non-blocking)
|
|
111
|
+
check_version_on_startup()
|
|
112
|
+
|
|
113
|
+
parser = argparse.ArgumentParser(
|
|
114
|
+
description='doc-utils - CLI tools for AsciiDoc documentation projects',
|
|
115
|
+
add_help=False # We'll handle help ourselves
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
parser.add_argument(
|
|
119
|
+
'--version',
|
|
120
|
+
action='version',
|
|
121
|
+
version=f'doc-utils {__version__}'
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
parser.add_argument(
|
|
125
|
+
'--list',
|
|
126
|
+
action='store_true',
|
|
127
|
+
help='List all available tools'
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
parser.add_argument(
|
|
131
|
+
'--help', '-h',
|
|
132
|
+
action='store_true',
|
|
133
|
+
help='Show this help message'
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
# Parse known args to allow for future expansion
|
|
137
|
+
args, remaining = parser.parse_known_args()
|
|
138
|
+
|
|
139
|
+
# Handle flags
|
|
140
|
+
if args.list:
|
|
141
|
+
print_tools_list()
|
|
142
|
+
return 0
|
|
143
|
+
|
|
144
|
+
if args.help or len(sys.argv) == 1:
|
|
145
|
+
print_help()
|
|
146
|
+
return 0
|
|
147
|
+
|
|
148
|
+
# If we get here with remaining args, show help
|
|
149
|
+
if remaining:
|
|
150
|
+
print(f"doc-utils: unknown command or option: {' '.join(remaining)}")
|
|
151
|
+
print("\nRun 'doc-utils --help' for usage information.")
|
|
152
|
+
return 1
|
|
153
|
+
|
|
154
|
+
return 0
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
if __name__ == '__main__':
|
|
158
|
+
sys.exit(main())
|