rolfedh-doc-utils 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- doc_utils/extract_link_attributes.py +453 -0
- doc_utils/unused_attributes.py +94 -6
- doc_utils/validate_links.py +576 -0
- extract_link_attributes.py +93 -0
- find_unused_attributes.py +47 -6
- {rolfedh_doc_utils-0.1.9.dist-info → rolfedh_doc_utils-0.1.11.dist-info}/METADATA +5 -3
- {rolfedh_doc_utils-0.1.9.dist-info → rolfedh_doc_utils-0.1.11.dist-info}/RECORD +12 -8
- {rolfedh_doc_utils-0.1.9.dist-info → rolfedh_doc_utils-0.1.11.dist-info}/entry_points.txt +2 -0
- {rolfedh_doc_utils-0.1.9.dist-info → rolfedh_doc_utils-0.1.11.dist-info}/top_level.txt +2 -0
- validate_links.py +202 -0
- {rolfedh_doc_utils-0.1.9.dist-info → rolfedh_doc_utils-0.1.11.dist-info}/WHEEL +0 -0
- {rolfedh_doc_utils-0.1.9.dist-info → rolfedh_doc_utils-0.1.11.dist-info}/licenses/LICENSE +0 -0
find_unused_attributes.py
CHANGED
|
@@ -1,23 +1,61 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Find Unused AsciiDoc Attributes
|
|
3
3
|
|
|
4
|
-
Scans
|
|
4
|
+
Scans an attributes file for attribute definitions (e.g., :version: 1.1), then recursively scans all .adoc files in the current directory (ignoring symlinks) for usages of those attributes (e.g., {version}).
|
|
5
|
+
|
|
6
|
+
If no attributes file is specified, the tool will auto-discover attributes files in the repository and let you choose one interactively.
|
|
5
7
|
|
|
6
8
|
Any attribute defined but not used in any .adoc file is reported as NOT USED in both the command line output and a timestamped output file.
|
|
7
9
|
"""
|
|
8
10
|
|
|
9
11
|
import argparse
|
|
10
12
|
import os
|
|
13
|
+
import sys
|
|
11
14
|
from datetime import datetime
|
|
12
|
-
from doc_utils.unused_attributes import find_unused_attributes
|
|
15
|
+
from doc_utils.unused_attributes import find_unused_attributes, find_attributes_files, select_attributes_file
|
|
13
16
|
|
|
14
17
|
def main():
|
|
15
18
|
parser = argparse.ArgumentParser(description='Find unused AsciiDoc attributes.')
|
|
16
|
-
parser.add_argument(
|
|
19
|
+
parser.add_argument(
|
|
20
|
+
'attributes_file',
|
|
21
|
+
nargs='?', # Make it optional
|
|
22
|
+
help='Path to the attributes file. If not specified, auto-discovers attributes files.'
|
|
23
|
+
)
|
|
17
24
|
parser.add_argument('-o', '--output', action='store_true', help='Write results to a timestamped txt file in your home directory.')
|
|
18
25
|
args = parser.parse_args()
|
|
19
26
|
|
|
20
|
-
|
|
27
|
+
# Determine which attributes file to use
|
|
28
|
+
if args.attributes_file:
|
|
29
|
+
# User specified a file
|
|
30
|
+
attr_file = args.attributes_file
|
|
31
|
+
else:
|
|
32
|
+
# Auto-discover attributes files
|
|
33
|
+
print("Searching for attributes files...")
|
|
34
|
+
attributes_files = find_attributes_files('.')
|
|
35
|
+
|
|
36
|
+
if not attributes_files:
|
|
37
|
+
print("No attributes files found in the repository.")
|
|
38
|
+
print("You can specify a file directly: find-unused-attributes <path-to-attributes-file>")
|
|
39
|
+
return 1
|
|
40
|
+
|
|
41
|
+
attr_file = select_attributes_file(attributes_files)
|
|
42
|
+
if not attr_file:
|
|
43
|
+
print("No attributes file selected.")
|
|
44
|
+
return 1
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
unused = find_unused_attributes(attr_file, '.')
|
|
48
|
+
except FileNotFoundError as e:
|
|
49
|
+
print(f"Error: {e}")
|
|
50
|
+
print(f"\nPlease ensure the file '{attr_file}' exists.")
|
|
51
|
+
print("Usage: find-unused-attributes [<path-to-attributes-file>]")
|
|
52
|
+
return 1
|
|
53
|
+
except (ValueError, PermissionError) as e:
|
|
54
|
+
print(f"Error: {e}")
|
|
55
|
+
return 1
|
|
56
|
+
except Exception as e:
|
|
57
|
+
print(f"Unexpected error: {e}")
|
|
58
|
+
return 1
|
|
21
59
|
|
|
22
60
|
lines = [f":{attr}: NOT USED" for attr in unused]
|
|
23
61
|
output = '\n'.join(lines)
|
|
@@ -33,9 +71,12 @@ def main():
|
|
|
33
71
|
home_dir = os.path.expanduser('~')
|
|
34
72
|
filename = os.path.join(home_dir, f'unused_attributes_{timestamp}.txt')
|
|
35
73
|
with open(filename, 'w', encoding='utf-8') as f:
|
|
36
|
-
f.write('Unused attributes in ' +
|
|
74
|
+
f.write('Unused attributes in ' + attr_file + '\n')
|
|
37
75
|
f.write(output + '\n')
|
|
38
76
|
print(f'Results written to: {filename}')
|
|
39
77
|
|
|
78
|
+
return 0
|
|
79
|
+
|
|
40
80
|
if __name__ == '__main__':
|
|
41
|
-
|
|
81
|
+
import sys
|
|
82
|
+
sys.exit(main())
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rolfedh-doc-utils
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.11
|
|
4
4
|
Summary: CLI tools for AsciiDoc documentation projects
|
|
5
5
|
Author: Rolfe Dlugy-Hegwer
|
|
6
6
|
License: MIT License
|
|
@@ -79,8 +79,10 @@ pip install -e .
|
|
|
79
79
|
|
|
80
80
|
| Tool | Description | Usage |
|
|
81
81
|
|------|-------------|-------|
|
|
82
|
-
| **`
|
|
83
|
-
| **`
|
|
82
|
+
| **`validate-links`** [EXPERIMENTAL] | Validates all links in documentation, with URL transposition for preview environments | `validate-links --transpose "https://prod--https://preview"` |
|
|
83
|
+
| **`extract-link-attributes`** | Extracts link/xref macros with attributes into reusable definitions | `extract-link-attributes --dry-run` |
|
|
84
|
+
| **`replace-link-attributes`** | Resolves Vale LinkAttribute issues by replacing attributes in link URLs | `replace-link-attributes --dry-run` |
|
|
85
|
+
| **`format-asciidoc-spacing`** [EXPERIMENTAL] | Standardizes spacing after headings and around includes | `format-asciidoc-spacing --dry-run modules/` |
|
|
84
86
|
| **`check-scannability`** | Analyzes readability (sentence/paragraph length) | `check-scannability --max-words 25` |
|
|
85
87
|
| **`archive-unused-files`** | Finds and archives unreferenced .adoc files | `archive-unused-files` (preview)<br>`archive-unused-files --archive` (execute) |
|
|
86
88
|
| **`archive-unused-images`** | Finds and archives unreferenced images | `archive-unused-images` (preview)<br>`archive-unused-images --archive` (execute) |
|
|
@@ -1,21 +1,25 @@
|
|
|
1
1
|
archive_unused_files.py,sha256=KMC5a1WL3rZ5owoVnncvfpT1YeMKbVXq9giHvadDgbM,1936
|
|
2
2
|
archive_unused_images.py,sha256=PG2o3haovYckgfhoPhl6KRG_a9czyZuqlLkzkupKTCY,1526
|
|
3
3
|
check_scannability.py,sha256=gcM-vFXKHGP_yFBz7-V5xbXWhIMmtMzBYIGwP9CFbzI,5140
|
|
4
|
-
|
|
4
|
+
extract_link_attributes.py,sha256=utDM1FE-VEr649HhIH5BreXvxDNLnnAJO9dB5rs5f9Q,2535
|
|
5
|
+
find_unused_attributes.py,sha256=V8qI7O0u18ExbSho-hLfyBeRVqowLKGrFugY55JxZN0,3023
|
|
5
6
|
format_asciidoc_spacing.py,sha256=ROp-cdMs2_hk8H4z5ljT0iDgGtsiECZ8TVjjcN_oOWE,3874
|
|
6
7
|
replace_link_attributes.py,sha256=vg_aufw7dKXvh_epCKRNq_hEBMU_9crZ_JyJPpxSMNk,6454
|
|
8
|
+
validate_links.py,sha256=DoSB0h3mmjzTY2f0oN6ybTP6jCNkzN7T3qM6oXc2AwE,5585
|
|
7
9
|
doc_utils/__init__.py,sha256=qqZR3lohzkP63soymrEZPBGzzk6-nFzi4_tSffjmu_0,74
|
|
10
|
+
doc_utils/extract_link_attributes.py,sha256=qBpJuTXNrhy15klpqC0iELZzcSLztEzMSmhEnKyQZT0,15574
|
|
8
11
|
doc_utils/file_utils.py,sha256=fpTh3xx759sF8sNocdn_arsP3KAv8XA6cTQTAVIZiZg,4247
|
|
9
12
|
doc_utils/format_asciidoc_spacing.py,sha256=XnVJekaj39aDzjV3xFKl58flM41AaJzejxNYJIIAMz0,10139
|
|
10
13
|
doc_utils/replace_link_attributes.py,sha256=kBiePbxjQn3O2rzqmYY8Mqy_mJgZ6yw048vSZ5SSB5E,6587
|
|
11
14
|
doc_utils/scannability.py,sha256=XwlmHqDs69p_V36X7DLjPTy0DUoLszSGqYjJ9wE-3hg,982
|
|
12
15
|
doc_utils/topic_map_parser.py,sha256=tKcIO1m9r2K6dvPRGue58zqMr0O2zKU1gnZMzEE3U6o,4571
|
|
13
16
|
doc_utils/unused_adoc.py,sha256=2cbqcYr1os2EhETUU928BlPRlsZVSdI00qaMhqjSIqQ,5263
|
|
14
|
-
doc_utils/unused_attributes.py,sha256=
|
|
17
|
+
doc_utils/unused_attributes.py,sha256=EjTtWIKW_aXsR1JOgw5RSDVAqitJ_NfRMVOXVGaiWTY,5282
|
|
15
18
|
doc_utils/unused_images.py,sha256=nqn36Bbrmon2KlGlcaruNjJJvTQ8_9H0WU9GvCW7rW8,1456
|
|
16
|
-
|
|
17
|
-
rolfedh_doc_utils-0.1.
|
|
18
|
-
rolfedh_doc_utils-0.1.
|
|
19
|
-
rolfedh_doc_utils-0.1.
|
|
20
|
-
rolfedh_doc_utils-0.1.
|
|
21
|
-
rolfedh_doc_utils-0.1.
|
|
19
|
+
doc_utils/validate_links.py,sha256=iBGXnwdeLlgIT3fo3v01ApT5k0X2FtctsvkrE6E3VMk,19610
|
|
20
|
+
rolfedh_doc_utils-0.1.11.dist-info/licenses/LICENSE,sha256=vLxtwMVOJA_hEy8b77niTkdmQI9kNJskXHq0dBS36e0,1075
|
|
21
|
+
rolfedh_doc_utils-0.1.11.dist-info/METADATA,sha256=22seO4nEGTjlibUZ8tPRxTFyYpmLRsfY7sZssteQl1g,7386
|
|
22
|
+
rolfedh_doc_utils-0.1.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
23
|
+
rolfedh_doc_utils-0.1.11.dist-info/entry_points.txt,sha256=2J4Ojc3kkuArpe2xcUOPc0LxSWCmnctvw8hy8zpnbO4,418
|
|
24
|
+
rolfedh_doc_utils-0.1.11.dist-info/top_level.txt,sha256=1w0JWD7w7gnM5Sga2K4fJieNZ7CHPTAf0ozYk5iIlmo,182
|
|
25
|
+
rolfedh_doc_utils-0.1.11.dist-info/RECORD,,
|
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
archive-unused-files = archive_unused_files:main
|
|
3
3
|
archive-unused-images = archive_unused_images:main
|
|
4
4
|
check-scannability = check_scannability:main
|
|
5
|
+
extract-link-attributes = extract_link_attributes:main
|
|
5
6
|
find-unused-attributes = find_unused_attributes:main
|
|
6
7
|
format-asciidoc-spacing = format_asciidoc_spacing:main
|
|
7
8
|
replace-link-attributes = replace_link_attributes:main
|
|
9
|
+
validate-links = validate_links:main
|
validate_links.py
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Validate links in AsciiDoc documentation.
|
|
4
|
+
|
|
5
|
+
This tool checks all links in AsciiDoc files for validity, including:
|
|
6
|
+
- External HTTP/HTTPS links
|
|
7
|
+
- Internal cross-references (xref)
|
|
8
|
+
- Image paths
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import argparse
|
|
12
|
+
import sys
|
|
13
|
+
import json
|
|
14
|
+
from doc_utils.validate_links import LinkValidator, parse_transpositions, format_results
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def main():
|
|
18
|
+
"""Main entry point for the validate-links CLI tool."""
|
|
19
|
+
parser = argparse.ArgumentParser(
|
|
20
|
+
description='Validate links in AsciiDoc documentation',
|
|
21
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
22
|
+
epilog="""
|
|
23
|
+
Examples:
|
|
24
|
+
# Basic validation
|
|
25
|
+
validate-links
|
|
26
|
+
|
|
27
|
+
# Validate against preview environment
|
|
28
|
+
validate-links --transpose "https://docs.redhat.com--https://preview.docs.redhat.com"
|
|
29
|
+
|
|
30
|
+
# Multiple transpositions
|
|
31
|
+
validate-links \\
|
|
32
|
+
--transpose "https://docs.redhat.com--https://preview.docs.redhat.com" \\
|
|
33
|
+
--transpose "https://access.redhat.com--https://stage.access.redhat.com"
|
|
34
|
+
|
|
35
|
+
# With specific options
|
|
36
|
+
validate-links \\
|
|
37
|
+
--transpose "https://docs.example.com--https://preview.example.com" \\
|
|
38
|
+
--attributes-file common-attributes.adoc \\
|
|
39
|
+
--timeout 15 \\
|
|
40
|
+
--retry 3 \\
|
|
41
|
+
--parallel 20 \\
|
|
42
|
+
--exclude-domain localhost \\
|
|
43
|
+
--exclude-domain example.com
|
|
44
|
+
|
|
45
|
+
# Export results to JSON
|
|
46
|
+
validate-links --output report.json --format json
|
|
47
|
+
"""
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
parser.add_argument(
|
|
51
|
+
'--transpose',
|
|
52
|
+
action='append',
|
|
53
|
+
help='Transpose URLs from production to preview/staging (format: from_url--to_url)'
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
parser.add_argument(
|
|
57
|
+
'--attributes-file',
|
|
58
|
+
help='Path to the AsciiDoc attributes file'
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
parser.add_argument(
|
|
62
|
+
'--scan-dir',
|
|
63
|
+
action='append',
|
|
64
|
+
help='Directory to scan for .adoc files (can be used multiple times, default: current directory)'
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
parser.add_argument(
|
|
68
|
+
'--timeout',
|
|
69
|
+
type=int,
|
|
70
|
+
default=10,
|
|
71
|
+
help='Timeout in seconds for each URL check (default: 10)'
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
parser.add_argument(
|
|
75
|
+
'--retry',
|
|
76
|
+
type=int,
|
|
77
|
+
default=3,
|
|
78
|
+
help='Number of retries for failed URLs (default: 3)'
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
parser.add_argument(
|
|
82
|
+
'--parallel',
|
|
83
|
+
type=int,
|
|
84
|
+
default=10,
|
|
85
|
+
help='Number of parallel URL checks (default: 10)'
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
parser.add_argument(
|
|
89
|
+
'--cache-duration',
|
|
90
|
+
type=int,
|
|
91
|
+
default=3600,
|
|
92
|
+
help='Cache duration in seconds (default: 3600)'
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
parser.add_argument(
|
|
96
|
+
'--exclude-domain',
|
|
97
|
+
action='append',
|
|
98
|
+
dest='exclude_domains',
|
|
99
|
+
help='Domain to exclude from validation (can be used multiple times)'
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
parser.add_argument(
|
|
103
|
+
'--no-cache',
|
|
104
|
+
action='store_true',
|
|
105
|
+
help='Disable caching of validation results'
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
parser.add_argument(
|
|
109
|
+
'--output',
|
|
110
|
+
help='Output file for results'
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
parser.add_argument(
|
|
114
|
+
'--format',
|
|
115
|
+
choices=['text', 'json', 'junit'],
|
|
116
|
+
default='text',
|
|
117
|
+
help='Output format (default: text)'
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
parser.add_argument(
|
|
121
|
+
'-v', '--verbose',
|
|
122
|
+
action='store_true',
|
|
123
|
+
help='Show verbose output including warnings'
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
parser.add_argument(
|
|
127
|
+
'--fail-on-broken',
|
|
128
|
+
action='store_true',
|
|
129
|
+
help='Exit with error code if broken links are found'
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
args = parser.parse_args()
|
|
133
|
+
|
|
134
|
+
# Parse transpositions
|
|
135
|
+
transpositions = parse_transpositions(args.transpose)
|
|
136
|
+
|
|
137
|
+
# Show configuration
|
|
138
|
+
print("Validating links in documentation...")
|
|
139
|
+
if args.attributes_file:
|
|
140
|
+
print(f"Loading attributes from {args.attributes_file}")
|
|
141
|
+
if transpositions:
|
|
142
|
+
print("\nURL Transposition Rules:")
|
|
143
|
+
for from_url, to_url in transpositions:
|
|
144
|
+
print(f" {from_url} → {to_url}")
|
|
145
|
+
print()
|
|
146
|
+
|
|
147
|
+
# Create validator
|
|
148
|
+
validator = LinkValidator(
|
|
149
|
+
timeout=args.timeout,
|
|
150
|
+
retry=args.retry,
|
|
151
|
+
parallel=args.parallel,
|
|
152
|
+
cache_duration=args.cache_duration if not args.no_cache else 0,
|
|
153
|
+
transpositions=transpositions
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
try:
|
|
157
|
+
# Run validation
|
|
158
|
+
results = validator.validate_all(
|
|
159
|
+
scan_dirs=args.scan_dir,
|
|
160
|
+
attributes_file=args.attributes_file,
|
|
161
|
+
exclude_domains=args.exclude_domains
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
# Format output
|
|
165
|
+
if args.format == 'json':
|
|
166
|
+
output = json.dumps(results, indent=2)
|
|
167
|
+
elif args.format == 'junit':
|
|
168
|
+
# TODO: Implement JUnit XML format
|
|
169
|
+
output = format_results(results, verbose=args.verbose)
|
|
170
|
+
else:
|
|
171
|
+
output = format_results(results, verbose=args.verbose)
|
|
172
|
+
|
|
173
|
+
# Save or print output
|
|
174
|
+
if args.output:
|
|
175
|
+
with open(args.output, 'w', encoding='utf-8') as f:
|
|
176
|
+
f.write(output)
|
|
177
|
+
print(f"Results saved to {args.output}")
|
|
178
|
+
# Still print summary to console
|
|
179
|
+
if args.format != 'text':
|
|
180
|
+
summary = results['summary']
|
|
181
|
+
print(f"\nSummary: {summary['valid']} valid, {summary['broken']} broken, "
|
|
182
|
+
f"{summary['warnings']} warnings")
|
|
183
|
+
else:
|
|
184
|
+
print(output)
|
|
185
|
+
|
|
186
|
+
# Exit code
|
|
187
|
+
if args.fail_on_broken and results['summary']['broken'] > 0:
|
|
188
|
+
sys.exit(1)
|
|
189
|
+
|
|
190
|
+
except KeyboardInterrupt:
|
|
191
|
+
print("\nValidation cancelled.")
|
|
192
|
+
sys.exit(1)
|
|
193
|
+
except Exception as e:
|
|
194
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
195
|
+
if args.verbose:
|
|
196
|
+
import traceback
|
|
197
|
+
traceback.print_exc()
|
|
198
|
+
sys.exit(1)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
if __name__ == '__main__':
|
|
202
|
+
main()
|
|
File without changes
|
|
File without changes
|