rolfedh-doc-utils 0.1.8__tar.gz → 0.1.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rolfedh_doc_utils-0.1.8/rolfedh_doc_utils.egg-info → rolfedh_doc_utils-0.1.9}/PKG-INFO +2 -1
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/README.md +1 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9/doc_utils}/format_asciidoc_spacing.py +59 -155
- rolfedh_doc_utils-0.1.9/doc_utils/replace_link_attributes.py +168 -0
- rolfedh_doc_utils-0.1.9/format_asciidoc_spacing.py +128 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/pyproject.toml +3 -2
- rolfedh_doc_utils-0.1.9/replace_link_attributes.py +186 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9/rolfedh_doc_utils.egg-info}/PKG-INFO +2 -1
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/rolfedh_doc_utils.egg-info/SOURCES.txt +3 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/rolfedh_doc_utils.egg-info/entry_points.txt +1 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/rolfedh_doc_utils.egg-info/top_level.txt +1 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/LICENSE +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/doc_utils/__init__.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/doc_utils/file_utils.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/doc_utils/scannability.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/doc_utils/topic_map_parser.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/doc_utils/unused_adoc.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/doc_utils/unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/doc_utils/unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/find_unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/rolfedh_doc_utils.egg-info/dependency_links.txt +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/rolfedh_doc_utils.egg-info/requires.txt +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/setup.cfg +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/setup.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_auto_discovery.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_cli_entry_points.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_file_utils.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_fixture_archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_fixture_archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_fixture_check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_parse_exclude_list.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_symlink_handling.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_topic_map_parser.py +0 -0
- {rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_unused_attributes.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rolfedh-doc-utils
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.9
|
|
4
4
|
Summary: CLI tools for AsciiDoc documentation projects
|
|
5
5
|
Author: Rolfe Dlugy-Hegwer
|
|
6
6
|
License: MIT License
|
|
@@ -79,6 +79,7 @@ pip install -e .
|
|
|
79
79
|
|
|
80
80
|
| Tool | Description | Usage |
|
|
81
81
|
|------|-------------|-------|
|
|
82
|
+
| **`replace-link-attributes`** | Resolves Vale LinkAttribute violations by replacing attributes in link URLs | `replace-link-attributes --dry-run` |
|
|
82
83
|
| **`format-asciidoc-spacing`** | Standardizes spacing after headings and around includes | `format-asciidoc-spacing --dry-run modules/` |
|
|
83
84
|
| **`check-scannability`** | Analyzes readability (sentence/paragraph length) | `check-scannability --max-words 25` |
|
|
84
85
|
| **`archive-unused-files`** | Finds and archives unreferenced .adoc files | `archive-unused-files` (preview)<br>`archive-unused-files --archive` (execute) |
|
|
@@ -46,6 +46,7 @@ pip install -e .
|
|
|
46
46
|
|
|
47
47
|
| Tool | Description | Usage |
|
|
48
48
|
|------|-------------|-------|
|
|
49
|
+
| **`replace-link-attributes`** | Resolves Vale LinkAttribute violations by replacing attributes in link URLs | `replace-link-attributes --dry-run` |
|
|
49
50
|
| **`format-asciidoc-spacing`** | Standardizes spacing after headings and around includes | `format-asciidoc-spacing --dry-run modules/` |
|
|
50
51
|
| **`check-scannability`** | Analyzes readability (sentence/paragraph length) | `check-scannability --max-words 25` |
|
|
51
52
|
| **`archive-unused-files`** | Finds and archives unreferenced .adoc files | `archive-unused-files` (preview)<br>`archive-unused-files --archive` (execute) |
|
|
@@ -1,88 +1,75 @@
|
|
|
1
|
-
|
|
1
|
+
"""
|
|
2
|
+
Format AsciiDoc spacing - ensures blank lines after headings and around include directives.
|
|
2
3
|
|
|
3
|
-
|
|
4
|
+
Core logic for formatting AsciiDoc files with proper spacing.
|
|
5
|
+
"""
|
|
4
6
|
|
|
5
|
-
import argparse
|
|
6
|
-
import os
|
|
7
7
|
import re
|
|
8
|
-
import sys
|
|
9
8
|
from pathlib import Path
|
|
10
9
|
from typing import List, Tuple
|
|
11
10
|
|
|
12
11
|
|
|
13
|
-
|
|
14
|
-
class Colors:
|
|
15
|
-
RED = '\033[0;31m'
|
|
16
|
-
GREEN = '\033[0;32m'
|
|
17
|
-
YELLOW = '\033[1;33m'
|
|
18
|
-
NC = '\033[0m' # No Color
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
def print_colored(message: str, color: str = Colors.NC) -> None:
|
|
22
|
-
"""Print message with color"""
|
|
23
|
-
print(f"{color}{message}{Colors.NC}")
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False) -> bool:
|
|
12
|
+
def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False) -> Tuple[bool, List[str]]:
|
|
27
13
|
"""
|
|
28
14
|
Process a single AsciiDoc file to fix spacing issues.
|
|
29
|
-
|
|
15
|
+
|
|
30
16
|
Args:
|
|
31
17
|
file_path: Path to the file to process
|
|
32
18
|
dry_run: If True, show what would be changed without modifying
|
|
33
19
|
verbose: If True, show detailed output
|
|
34
|
-
|
|
20
|
+
|
|
35
21
|
Returns:
|
|
36
|
-
|
|
22
|
+
Tuple of (changes_made, messages) where messages is a list of verbose output
|
|
37
23
|
"""
|
|
24
|
+
messages = []
|
|
25
|
+
|
|
38
26
|
if verbose:
|
|
39
|
-
|
|
40
|
-
|
|
27
|
+
messages.append(f"Processing: {file_path}")
|
|
28
|
+
|
|
41
29
|
try:
|
|
42
30
|
with open(file_path, 'r', encoding='utf-8') as f:
|
|
43
31
|
lines = f.readlines()
|
|
44
32
|
except (IOError, UnicodeDecodeError) as e:
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
33
|
+
raise IOError(f"Error reading {file_path}: {e}")
|
|
34
|
+
|
|
48
35
|
# Remove trailing newlines from lines for processing
|
|
49
36
|
lines = [line.rstrip('\n\r') for line in lines]
|
|
50
|
-
|
|
37
|
+
|
|
51
38
|
new_lines = []
|
|
52
39
|
changes_made = False
|
|
53
40
|
in_block = False # Track if we're inside a block (admonition, listing, etc.)
|
|
54
41
|
in_conditional = False # Track if we're inside a conditional block
|
|
55
|
-
|
|
42
|
+
|
|
56
43
|
for i, current_line in enumerate(lines):
|
|
57
44
|
prev_line = lines[i-1] if i > 0 else ""
|
|
58
45
|
next_line = lines[i+1] if i + 1 < len(lines) else ""
|
|
59
|
-
|
|
46
|
+
|
|
60
47
|
# Check for conditional start (ifdef:: or ifndef::)
|
|
61
48
|
if re.match(r'^(ifdef::|ifndef::)', current_line):
|
|
62
49
|
in_conditional = True
|
|
63
50
|
# Add blank line before conditional if needed
|
|
64
|
-
if (prev_line and
|
|
51
|
+
if (prev_line and
|
|
65
52
|
not re.match(r'^\s*$', prev_line) and
|
|
66
53
|
not re.match(r'^(ifdef::|ifndef::|endif::)', prev_line)):
|
|
67
54
|
new_lines.append("")
|
|
68
55
|
changes_made = True
|
|
69
56
|
if verbose:
|
|
70
|
-
|
|
57
|
+
messages.append(" Added blank line before conditional block")
|
|
71
58
|
new_lines.append(current_line)
|
|
72
|
-
|
|
59
|
+
|
|
73
60
|
# Check for conditional end (endif::)
|
|
74
61
|
elif re.match(r'^endif::', current_line):
|
|
75
62
|
new_lines.append(current_line)
|
|
76
63
|
in_conditional = False
|
|
77
64
|
# Add blank line after conditional if needed
|
|
78
|
-
if (next_line and
|
|
65
|
+
if (next_line and
|
|
79
66
|
not re.match(r'^\s*$', next_line) and
|
|
80
67
|
not re.match(r'^(ifdef::|ifndef::|endif::)', next_line)):
|
|
81
68
|
new_lines.append("")
|
|
82
69
|
changes_made = True
|
|
83
70
|
if verbose:
|
|
84
|
-
|
|
85
|
-
|
|
71
|
+
messages.append(" Added blank line after conditional block")
|
|
72
|
+
|
|
86
73
|
# Check for block delimiters (====, ----, ...., ____)
|
|
87
74
|
# These are used for admonitions, listing blocks, literal blocks, etc.
|
|
88
75
|
elif re.match(r'^(====+|----+|\.\.\.\.+|____+)$', current_line):
|
|
@@ -91,17 +78,17 @@ def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False)
|
|
|
91
78
|
# Check if current line is a heading (but not if we're in a block)
|
|
92
79
|
elif not in_block and re.match(r'^=+\s+', current_line):
|
|
93
80
|
new_lines.append(current_line)
|
|
94
|
-
|
|
81
|
+
|
|
95
82
|
# Check if next line is not empty and not another heading
|
|
96
|
-
if (next_line and
|
|
97
|
-
not re.match(r'^=+\s+', next_line) and
|
|
83
|
+
if (next_line and
|
|
84
|
+
not re.match(r'^=+\s+', next_line) and
|
|
98
85
|
not re.match(r'^\s*$', next_line)):
|
|
99
86
|
new_lines.append("")
|
|
100
87
|
changes_made = True
|
|
101
88
|
if verbose:
|
|
102
89
|
truncated = current_line[:50] + "..." if len(current_line) > 50 else current_line
|
|
103
|
-
|
|
104
|
-
|
|
90
|
+
messages.append(f" Added blank line after heading: {truncated}")
|
|
91
|
+
|
|
105
92
|
# Check if current line is a comment (AsciiDoc comments start with //)
|
|
106
93
|
elif re.match(r'^//', current_line):
|
|
107
94
|
# Skip special handling if we're inside a conditional block
|
|
@@ -111,16 +98,16 @@ def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False)
|
|
|
111
98
|
# Check if next line is an include directive
|
|
112
99
|
if next_line and re.match(r'^include::', next_line):
|
|
113
100
|
# This comment belongs to the include, add blank line before comment if needed
|
|
114
|
-
if (prev_line and
|
|
115
|
-
not re.match(r'^\s*$', prev_line) and
|
|
101
|
+
if (prev_line and
|
|
102
|
+
not re.match(r'^\s*$', prev_line) and
|
|
116
103
|
not re.match(r'^//', prev_line) and
|
|
117
104
|
not re.match(r'^:', prev_line)): # Don't add if previous is attribute
|
|
118
105
|
new_lines.append("")
|
|
119
106
|
changes_made = True
|
|
120
107
|
if verbose:
|
|
121
|
-
|
|
108
|
+
messages.append(" Added blank line before comment above include")
|
|
122
109
|
new_lines.append(current_line)
|
|
123
|
-
|
|
110
|
+
|
|
124
111
|
# Check if current line is an attribute (starts with :)
|
|
125
112
|
elif re.match(r'^:', current_line):
|
|
126
113
|
# Skip special handling if we're inside a conditional block
|
|
@@ -130,16 +117,16 @@ def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False)
|
|
|
130
117
|
# Check if next line is an include directive
|
|
131
118
|
if next_line and re.match(r'^include::', next_line):
|
|
132
119
|
# This attribute belongs to the include, add blank line before attribute if needed
|
|
133
|
-
if (prev_line and
|
|
134
|
-
not re.match(r'^\s*$', prev_line) and
|
|
120
|
+
if (prev_line and
|
|
121
|
+
not re.match(r'^\s*$', prev_line) and
|
|
135
122
|
not re.match(r'^//', prev_line) and
|
|
136
123
|
not re.match(r'^:', prev_line)): # Don't add if previous is comment or attribute
|
|
137
124
|
new_lines.append("")
|
|
138
125
|
changes_made = True
|
|
139
126
|
if verbose:
|
|
140
|
-
|
|
127
|
+
messages.append(" Added blank line before attribute above include")
|
|
141
128
|
new_lines.append(current_line)
|
|
142
|
-
|
|
129
|
+
|
|
143
130
|
# Check if current line is an include directive
|
|
144
131
|
elif re.match(r'^include::', current_line):
|
|
145
132
|
# Skip special handling if we're inside a conditional block
|
|
@@ -148,7 +135,7 @@ def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False)
|
|
|
148
135
|
else:
|
|
149
136
|
# Check if this is an attribute include (contains "attribute" in the path)
|
|
150
137
|
is_attribute_include = 'attribute' in current_line.lower()
|
|
151
|
-
|
|
138
|
+
|
|
152
139
|
# Check if this appears near the top of the file (within first 10 lines after H1)
|
|
153
140
|
# Find the H1 heading position
|
|
154
141
|
h1_position = -1
|
|
@@ -156,45 +143,45 @@ def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False)
|
|
|
156
143
|
if re.match(r'^=\s+', lines[j]): # H1 heading starts with single =
|
|
157
144
|
h1_position = j
|
|
158
145
|
break
|
|
159
|
-
|
|
146
|
+
|
|
160
147
|
# If this is an attribute include near the H1 heading, don't add surrounding blank lines
|
|
161
148
|
is_near_h1 = h1_position >= 0 and (i - h1_position) <= 2
|
|
162
|
-
|
|
149
|
+
|
|
163
150
|
# Check if previous line is a comment or attribute (which belongs to this include)
|
|
164
151
|
has_comment_above = prev_line and re.match(r'^//', prev_line)
|
|
165
152
|
has_attribute_above = prev_line and re.match(r'^:', prev_line)
|
|
166
|
-
|
|
153
|
+
|
|
167
154
|
# If it's an attribute include near H1, only the heading's blank line is needed
|
|
168
155
|
if not (is_attribute_include and is_near_h1):
|
|
169
156
|
# Don't add blank line if there's a comment or attribute above (it was handled by the comment/attribute logic)
|
|
170
157
|
if not has_comment_above and not has_attribute_above:
|
|
171
158
|
# Add blank line before include if previous line is not empty and not an include
|
|
172
|
-
if (prev_line and
|
|
173
|
-
not re.match(r'^\s*$', prev_line) and
|
|
159
|
+
if (prev_line and
|
|
160
|
+
not re.match(r'^\s*$', prev_line) and
|
|
174
161
|
not re.match(r'^include::', prev_line)):
|
|
175
162
|
new_lines.append("")
|
|
176
163
|
changes_made = True
|
|
177
164
|
if verbose:
|
|
178
165
|
truncated = current_line[:50] + "..." if len(current_line) > 50 else current_line
|
|
179
|
-
|
|
180
|
-
|
|
166
|
+
messages.append(f" Added blank line before include: {truncated}")
|
|
167
|
+
|
|
181
168
|
new_lines.append(current_line)
|
|
182
|
-
|
|
169
|
+
|
|
183
170
|
# If it's an attribute include near H1, don't add blank line after
|
|
184
171
|
if not (is_attribute_include and is_near_h1):
|
|
185
172
|
# Add blank line after include if next line exists and is not empty and not an include
|
|
186
|
-
if (next_line and
|
|
187
|
-
not re.match(r'^\s*$', next_line) and
|
|
173
|
+
if (next_line and
|
|
174
|
+
not re.match(r'^\s*$', next_line) and
|
|
188
175
|
not re.match(r'^include::', next_line)):
|
|
189
176
|
new_lines.append("")
|
|
190
177
|
changes_made = True
|
|
191
178
|
if verbose:
|
|
192
179
|
truncated = current_line[:50] + "..." if len(current_line) > 50 else current_line
|
|
193
|
-
|
|
194
|
-
|
|
180
|
+
messages.append(f" Added blank line after include: {truncated}")
|
|
181
|
+
|
|
195
182
|
else:
|
|
196
183
|
new_lines.append(current_line)
|
|
197
|
-
|
|
184
|
+
|
|
198
185
|
# Apply changes if any were made
|
|
199
186
|
if changes_made:
|
|
200
187
|
# Clean up any consecutive blank lines we may have added
|
|
@@ -207,112 +194,29 @@ def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False)
|
|
|
207
194
|
# Skip this blank line as we already have one
|
|
208
195
|
continue
|
|
209
196
|
cleaned_lines.append(line)
|
|
210
|
-
|
|
211
|
-
if dry_run:
|
|
212
|
-
print_colored(f"Would modify: {file_path}", Colors.YELLOW)
|
|
213
|
-
else:
|
|
197
|
+
|
|
198
|
+
if not dry_run:
|
|
214
199
|
try:
|
|
215
200
|
with open(file_path, 'w', encoding='utf-8') as f:
|
|
216
201
|
for line in cleaned_lines:
|
|
217
202
|
f.write(line + '\n')
|
|
218
|
-
print_colored(f"Modified: {file_path}", Colors.GREEN)
|
|
219
203
|
except IOError as e:
|
|
220
|
-
|
|
221
|
-
return False
|
|
204
|
+
raise IOError(f"Error writing {file_path}: {e}")
|
|
222
205
|
else:
|
|
223
206
|
if verbose:
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
return changes_made
|
|
207
|
+
messages.append(" No changes needed")
|
|
208
|
+
|
|
209
|
+
return changes_made, messages
|
|
227
210
|
|
|
228
211
|
|
|
229
212
|
def find_adoc_files(path: Path) -> List[Path]:
|
|
230
213
|
"""Find all .adoc files in the given path"""
|
|
231
214
|
adoc_files = []
|
|
232
|
-
|
|
215
|
+
|
|
233
216
|
if path.is_file():
|
|
234
217
|
if path.suffix == '.adoc':
|
|
235
218
|
adoc_files.append(path)
|
|
236
|
-
else:
|
|
237
|
-
print_colored(f"Warning: {path} is not an AsciiDoc file (.adoc)", Colors.YELLOW)
|
|
238
219
|
elif path.is_dir():
|
|
239
220
|
adoc_files = list(path.rglob('*.adoc'))
|
|
240
|
-
|
|
241
|
-
return adoc_files
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
def main():
|
|
245
|
-
"""Main entry point"""
|
|
246
|
-
parser = argparse.ArgumentParser(
|
|
247
|
-
description="Format AsciiDoc files to ensure proper spacing",
|
|
248
|
-
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
249
|
-
epilog="""
|
|
250
|
-
Format AsciiDoc files to ensure proper spacing:
|
|
251
|
-
- Blank line after headings (=, ==, ===, etc.)
|
|
252
|
-
- Blank lines around include:: directives
|
|
253
|
-
|
|
254
|
-
Examples:
|
|
255
|
-
%(prog)s # Process all .adoc files in current directory
|
|
256
|
-
%(prog)s modules/ # Process all .adoc files in modules/
|
|
257
|
-
%(prog)s assemblies/my-guide.adoc # Process single file
|
|
258
|
-
%(prog)s --dry-run modules/ # Preview changes without modifying
|
|
259
|
-
"""
|
|
260
|
-
)
|
|
261
|
-
|
|
262
|
-
parser.add_argument(
|
|
263
|
-
'path',
|
|
264
|
-
nargs='?',
|
|
265
|
-
default='.',
|
|
266
|
-
help='File or directory to process (default: current directory)'
|
|
267
|
-
)
|
|
268
|
-
parser.add_argument(
|
|
269
|
-
'-n', '--dry-run',
|
|
270
|
-
action='store_true',
|
|
271
|
-
help='Show what would be changed without modifying files'
|
|
272
|
-
)
|
|
273
|
-
parser.add_argument(
|
|
274
|
-
'-v', '--verbose',
|
|
275
|
-
action='store_true',
|
|
276
|
-
help='Show detailed output'
|
|
277
|
-
)
|
|
278
|
-
|
|
279
|
-
args = parser.parse_args()
|
|
280
|
-
|
|
281
|
-
# Convert path to Path object
|
|
282
|
-
target_path = Path(args.path)
|
|
283
|
-
|
|
284
|
-
# Check if path exists
|
|
285
|
-
if not target_path.exists():
|
|
286
|
-
print_colored(f"Error: Path does not exist: {target_path}", Colors.RED)
|
|
287
|
-
sys.exit(1)
|
|
288
|
-
|
|
289
|
-
# Display dry-run mode message
|
|
290
|
-
if args.dry_run:
|
|
291
|
-
print_colored("DRY RUN MODE - No files will be modified", Colors.YELLOW)
|
|
292
|
-
|
|
293
|
-
# Find all AsciiDoc files
|
|
294
|
-
adoc_files = find_adoc_files(target_path)
|
|
295
|
-
|
|
296
|
-
if not adoc_files:
|
|
297
|
-
print(f"Processed 0 AsciiDoc file(s)")
|
|
298
|
-
print("AsciiDoc spacing formatting complete!")
|
|
299
|
-
return
|
|
300
|
-
|
|
301
|
-
# Process each file
|
|
302
|
-
files_processed = 0
|
|
303
|
-
for file_path in adoc_files:
|
|
304
|
-
try:
|
|
305
|
-
process_file(file_path, args.dry_run, args.verbose)
|
|
306
|
-
files_processed += 1
|
|
307
|
-
except KeyboardInterrupt:
|
|
308
|
-
print_colored("\nOperation cancelled by user", Colors.YELLOW)
|
|
309
|
-
sys.exit(1)
|
|
310
|
-
except Exception as e:
|
|
311
|
-
print_colored(f"Unexpected error processing {file_path}: {e}", Colors.RED)
|
|
312
|
-
|
|
313
|
-
print(f"Processed {files_processed} AsciiDoc file(s)")
|
|
314
|
-
print("AsciiDoc spacing formatting complete!")
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
if __name__ == "__main__":
|
|
318
|
-
main()
|
|
221
|
+
|
|
222
|
+
return adoc_files
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Replace AsciiDoc attributes within link URLs with their actual values.
|
|
3
|
+
|
|
4
|
+
This module finds and replaces attribute references (like {attribute-name}) that appear
|
|
5
|
+
in the URL portion of AsciiDoc link macros (link: and xref:) with their resolved values
|
|
6
|
+
from attributes.adoc. Link text is preserved unchanged.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import re
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Dict, List, Tuple, Optional
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def find_attributes_files(root_dir: Path) -> List[Path]:
|
|
15
|
+
"""Find all attributes.adoc files in the repository."""
|
|
16
|
+
attributes_files = []
|
|
17
|
+
|
|
18
|
+
for path in root_dir.rglob('**/attributes.adoc'):
|
|
19
|
+
# Skip hidden directories and common build directories
|
|
20
|
+
parts = path.parts
|
|
21
|
+
if any(part.startswith('.') or part in ['target', 'build', 'node_modules'] for part in parts):
|
|
22
|
+
continue
|
|
23
|
+
attributes_files.append(path)
|
|
24
|
+
|
|
25
|
+
return attributes_files
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def load_attributes(attributes_file: Path) -> Dict[str, str]:
|
|
29
|
+
"""Load attribute definitions from an attributes.adoc file."""
|
|
30
|
+
attributes = {}
|
|
31
|
+
|
|
32
|
+
with open(attributes_file, 'r', encoding='utf-8') as f:
|
|
33
|
+
for line in f:
|
|
34
|
+
# Match attribute definitions
|
|
35
|
+
# Format: :attribute-name: value
|
|
36
|
+
match = re.match(r'^:([a-zA-Z0-9_-]+):\s*(.*)$', line)
|
|
37
|
+
if match:
|
|
38
|
+
attr_name = match.group(1)
|
|
39
|
+
attr_value = match.group(2).strip()
|
|
40
|
+
attributes[attr_name] = attr_value
|
|
41
|
+
|
|
42
|
+
return attributes
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def resolve_nested_attributes(attributes: Dict[str, str], max_iterations: int = 10) -> Dict[str, str]:
|
|
46
|
+
"""Resolve nested attribute references within attribute values."""
|
|
47
|
+
for _ in range(max_iterations):
|
|
48
|
+
changes_made = False
|
|
49
|
+
|
|
50
|
+
for attr_name, attr_value in attributes.items():
|
|
51
|
+
# Find all attribute references in the value
|
|
52
|
+
refs = re.findall(r'\{([a-zA-Z0-9_-]+)\}', attr_value)
|
|
53
|
+
|
|
54
|
+
for ref in refs:
|
|
55
|
+
if ref in attributes:
|
|
56
|
+
new_value = attr_value.replace(f'{{{ref}}}', attributes[ref])
|
|
57
|
+
if new_value != attr_value:
|
|
58
|
+
attributes[attr_name] = new_value
|
|
59
|
+
changes_made = True
|
|
60
|
+
attr_value = new_value
|
|
61
|
+
|
|
62
|
+
if not changes_made:
|
|
63
|
+
break
|
|
64
|
+
|
|
65
|
+
return attributes
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def replace_link_attributes_in_file(file_path: Path, attributes: Dict[str, str], dry_run: bool = False) -> int:
|
|
69
|
+
"""Replace attribute references within link macros in a single file."""
|
|
70
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
71
|
+
content = f.read()
|
|
72
|
+
|
|
73
|
+
original_content = content
|
|
74
|
+
replacement_count = 0
|
|
75
|
+
|
|
76
|
+
# Find all link macros containing attributes in the URL portion only
|
|
77
|
+
# Match link: and xref: macros, capturing URL and text separately
|
|
78
|
+
link_patterns = [
|
|
79
|
+
# link:url[text] - replace only in URL portion
|
|
80
|
+
(r'link:([^[\]]*)\[([^\]]*)\]', 'link'),
|
|
81
|
+
# xref:target[text] - replace only in target portion
|
|
82
|
+
(r'xref:([^[\]]*)\[([^\]]*)\]', 'xref'),
|
|
83
|
+
# link:url[] or xref:target[] - replace in URL/target portion
|
|
84
|
+
(r'(link|xref):([^[\]]*)\[\]', 'empty_text')
|
|
85
|
+
]
|
|
86
|
+
|
|
87
|
+
for pattern, link_type in link_patterns:
|
|
88
|
+
matches = list(re.finditer(pattern, content))
|
|
89
|
+
|
|
90
|
+
# Process matches in reverse order to maintain string positions
|
|
91
|
+
for match in reversed(matches):
|
|
92
|
+
if link_type == 'empty_text':
|
|
93
|
+
# For links with empty text []
|
|
94
|
+
macro_type = match.group(1) # 'link' or 'xref'
|
|
95
|
+
url_part = match.group(2)
|
|
96
|
+
text_part = ''
|
|
97
|
+
|
|
98
|
+
# Check if URL contains attributes
|
|
99
|
+
if re.search(r'\{[a-zA-Z0-9_-]+\}', url_part):
|
|
100
|
+
modified_url = url_part
|
|
101
|
+
|
|
102
|
+
# Replace attributes only in URL
|
|
103
|
+
attr_matches = re.findall(r'\{([a-zA-Z0-9_-]+)\}', url_part)
|
|
104
|
+
for attr_name in attr_matches:
|
|
105
|
+
if attr_name in attributes:
|
|
106
|
+
attr_pattern = re.escape(f'{{{attr_name}}}')
|
|
107
|
+
modified_url = re.sub(attr_pattern, attributes[attr_name], modified_url)
|
|
108
|
+
replacement_count += 1
|
|
109
|
+
|
|
110
|
+
if modified_url != url_part:
|
|
111
|
+
# Reconstruct the link with modified URL
|
|
112
|
+
modified = f'{macro_type}:{modified_url}[]'
|
|
113
|
+
start = match.start()
|
|
114
|
+
end = match.end()
|
|
115
|
+
content = content[:start] + modified + content[end:]
|
|
116
|
+
else:
|
|
117
|
+
# For links with text
|
|
118
|
+
url_part = match.group(1)
|
|
119
|
+
text_part = match.group(2)
|
|
120
|
+
|
|
121
|
+
# Check if URL contains attributes
|
|
122
|
+
if re.search(r'\{[a-zA-Z0-9_-]+\}', url_part):
|
|
123
|
+
modified_url = url_part
|
|
124
|
+
|
|
125
|
+
# Replace attributes only in URL
|
|
126
|
+
attr_matches = re.findall(r'\{([a-zA-Z0-9_-]+)\}', url_part)
|
|
127
|
+
for attr_name in attr_matches:
|
|
128
|
+
if attr_name in attributes:
|
|
129
|
+
attr_pattern = re.escape(f'{{{attr_name}}}')
|
|
130
|
+
modified_url = re.sub(attr_pattern, attributes[attr_name], modified_url)
|
|
131
|
+
replacement_count += 1
|
|
132
|
+
|
|
133
|
+
if modified_url != url_part:
|
|
134
|
+
# Reconstruct the link with modified URL but original text
|
|
135
|
+
if link_type == 'link':
|
|
136
|
+
modified = f'link:{modified_url}[{text_part}]'
|
|
137
|
+
else: # xref
|
|
138
|
+
modified = f'xref:{modified_url}[{text_part}]'
|
|
139
|
+
|
|
140
|
+
start = match.start()
|
|
141
|
+
end = match.end()
|
|
142
|
+
content = content[:start] + modified + content[end:]
|
|
143
|
+
|
|
144
|
+
# Write changes if not in dry-run mode
|
|
145
|
+
if content != original_content:
|
|
146
|
+
if not dry_run:
|
|
147
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
148
|
+
f.write(content)
|
|
149
|
+
|
|
150
|
+
return replacement_count
|
|
151
|
+
|
|
152
|
+
return 0
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def find_adoc_files(root_dir: Path, exclude_dirs: Optional[set] = None) -> List[Path]:
|
|
156
|
+
"""Find all *.adoc files in the repository."""
|
|
157
|
+
if exclude_dirs is None:
|
|
158
|
+
exclude_dirs = {'.git', 'target', 'build', 'node_modules'}
|
|
159
|
+
|
|
160
|
+
adoc_files = []
|
|
161
|
+
|
|
162
|
+
for path in root_dir.rglob('*.adoc'):
|
|
163
|
+
# Check if any part of the path is in exclude_dirs
|
|
164
|
+
parts = set(path.parts)
|
|
165
|
+
if not parts.intersection(exclude_dirs):
|
|
166
|
+
adoc_files.append(path)
|
|
167
|
+
|
|
168
|
+
return adoc_files
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
format-asciidoc-spacing - Format AsciiDoc spacing.
|
|
4
|
+
|
|
5
|
+
Ensures blank lines after headings and around include directives.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import argparse
|
|
9
|
+
import sys
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
from doc_utils.format_asciidoc_spacing import process_file, find_adoc_files
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# Colors for output
|
|
16
|
+
class Colors:
|
|
17
|
+
RED = '\033[0;31m'
|
|
18
|
+
GREEN = '\033[0;32m'
|
|
19
|
+
YELLOW = '\033[1;33m'
|
|
20
|
+
NC = '\033[0m' # No Color
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def print_colored(message: str, color: str = Colors.NC) -> None:
|
|
24
|
+
"""Print message with color"""
|
|
25
|
+
print(f"{color}{message}{Colors.NC}")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def main():
|
|
29
|
+
"""Main entry point"""
|
|
30
|
+
parser = argparse.ArgumentParser(
|
|
31
|
+
description="Format AsciiDoc files to ensure proper spacing",
|
|
32
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
33
|
+
epilog="""
|
|
34
|
+
Format AsciiDoc files to ensure proper spacing:
|
|
35
|
+
- Blank line after headings (=, ==, ===, etc.)
|
|
36
|
+
- Blank lines around include:: directives
|
|
37
|
+
|
|
38
|
+
Examples:
|
|
39
|
+
%(prog)s # Process all .adoc files in current directory
|
|
40
|
+
%(prog)s modules/ # Process all .adoc files in modules/
|
|
41
|
+
%(prog)s assemblies/my-guide.adoc # Process single file
|
|
42
|
+
%(prog)s --dry-run modules/ # Preview changes without modifying
|
|
43
|
+
"""
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
parser.add_argument(
|
|
47
|
+
'path',
|
|
48
|
+
nargs='?',
|
|
49
|
+
default='.',
|
|
50
|
+
help='File or directory to process (default: current directory)'
|
|
51
|
+
)
|
|
52
|
+
parser.add_argument(
|
|
53
|
+
'-n', '--dry-run',
|
|
54
|
+
action='store_true',
|
|
55
|
+
help='Show what would be changed without modifying files'
|
|
56
|
+
)
|
|
57
|
+
parser.add_argument(
|
|
58
|
+
'-v', '--verbose',
|
|
59
|
+
action='store_true',
|
|
60
|
+
help='Show detailed output'
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
args = parser.parse_args()
|
|
64
|
+
|
|
65
|
+
# Convert path to Path object
|
|
66
|
+
target_path = Path(args.path)
|
|
67
|
+
|
|
68
|
+
# Check if path exists
|
|
69
|
+
if not target_path.exists():
|
|
70
|
+
print_colored(f"Error: Path does not exist: {target_path}", Colors.RED)
|
|
71
|
+
sys.exit(1)
|
|
72
|
+
|
|
73
|
+
# Display dry-run mode message
|
|
74
|
+
if args.dry_run:
|
|
75
|
+
print_colored("DRY RUN MODE - No files will be modified", Colors.YELLOW)
|
|
76
|
+
|
|
77
|
+
# Find all AsciiDoc files
|
|
78
|
+
adoc_files = find_adoc_files(target_path)
|
|
79
|
+
|
|
80
|
+
if not adoc_files:
|
|
81
|
+
if target_path.is_file():
|
|
82
|
+
print_colored(f"Warning: {target_path} is not an AsciiDoc file (.adoc)", Colors.YELLOW)
|
|
83
|
+
print(f"Processed 0 AsciiDoc file(s)")
|
|
84
|
+
print("AsciiDoc spacing formatting complete!")
|
|
85
|
+
return
|
|
86
|
+
|
|
87
|
+
# Process each file
|
|
88
|
+
files_processed = 0
|
|
89
|
+
files_modified = 0
|
|
90
|
+
|
|
91
|
+
for file_path in adoc_files:
|
|
92
|
+
try:
|
|
93
|
+
changes_made, messages = process_file(file_path, args.dry_run, args.verbose)
|
|
94
|
+
|
|
95
|
+
# Print verbose messages
|
|
96
|
+
if args.verbose:
|
|
97
|
+
for msg in messages:
|
|
98
|
+
print(msg)
|
|
99
|
+
|
|
100
|
+
if changes_made:
|
|
101
|
+
files_modified += 1
|
|
102
|
+
if args.dry_run:
|
|
103
|
+
print_colored(f"Would modify: {file_path}", Colors.YELLOW)
|
|
104
|
+
else:
|
|
105
|
+
print_colored(f"Modified: {file_path}", Colors.GREEN)
|
|
106
|
+
elif args.verbose:
|
|
107
|
+
print(f" No changes needed for: {file_path}")
|
|
108
|
+
|
|
109
|
+
files_processed += 1
|
|
110
|
+
|
|
111
|
+
except KeyboardInterrupt:
|
|
112
|
+
print_colored("\nOperation cancelled by user", Colors.YELLOW)
|
|
113
|
+
sys.exit(1)
|
|
114
|
+
except IOError as e:
|
|
115
|
+
print_colored(f"{e}", Colors.RED)
|
|
116
|
+
except Exception as e:
|
|
117
|
+
print_colored(f"Unexpected error processing {file_path}: {e}", Colors.RED)
|
|
118
|
+
|
|
119
|
+
print(f"Processed {files_processed} AsciiDoc file(s)")
|
|
120
|
+
if args.dry_run and files_modified > 0:
|
|
121
|
+
print(f"Would modify {files_modified} file(s)")
|
|
122
|
+
elif files_modified > 0:
|
|
123
|
+
print(f"Modified {files_modified} file(s)")
|
|
124
|
+
print("AsciiDoc spacing formatting complete!")
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
if __name__ == "__main__":
|
|
128
|
+
main()
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "rolfedh-doc-utils"
|
|
7
|
-
version = "0.1.
|
|
7
|
+
version = "0.1.9"
|
|
8
8
|
description = "CLI tools for AsciiDoc documentation projects"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.8"
|
|
@@ -20,10 +20,11 @@ archive-unused-files = "archive_unused_files:main"
|
|
|
20
20
|
archive-unused-images = "archive_unused_images:main"
|
|
21
21
|
find-unused-attributes = "find_unused_attributes:main"
|
|
22
22
|
format-asciidoc-spacing = "format_asciidoc_spacing:main"
|
|
23
|
+
replace-link-attributes = "replace_link_attributes:main"
|
|
23
24
|
|
|
24
25
|
[tool.setuptools.packages.find]
|
|
25
26
|
where = ["."]
|
|
26
27
|
include = ["doc_utils*"]
|
|
27
28
|
|
|
28
29
|
[tool.setuptools]
|
|
29
|
-
py-modules = ["find_unused_attributes", "check_scannability", "archive_unused_files", "archive_unused_images", "format_asciidoc_spacing"]
|
|
30
|
+
py-modules = ["find_unused_attributes", "check_scannability", "archive_unused_files", "archive_unused_images", "format_asciidoc_spacing", "replace_link_attributes"]
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
replace-link-attributes - Replace AsciiDoc attributes within link URLs with their actual values.
|
|
4
|
+
|
|
5
|
+
This script finds and replaces attribute references (like {attribute-name}) that appear
|
|
6
|
+
in the URL portion of AsciiDoc link macros (link: and xref:) with their resolved values
|
|
7
|
+
from attributes.adoc. Link text is preserved unchanged.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import argparse
|
|
11
|
+
import sys
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
from doc_utils.replace_link_attributes import (
|
|
16
|
+
find_attributes_files,
|
|
17
|
+
load_attributes,
|
|
18
|
+
resolve_nested_attributes,
|
|
19
|
+
replace_link_attributes_in_file,
|
|
20
|
+
find_adoc_files
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def prompt_for_attributes_file(attributes_files: list[Path]) -> Optional[Path]:
|
|
25
|
+
"""Prompt user to select or specify attributes file."""
|
|
26
|
+
if not attributes_files:
|
|
27
|
+
print("No attributes.adoc files found in the repository.")
|
|
28
|
+
response = input("Enter the path to your attributes.adoc file (or 'q' to quit): ").strip()
|
|
29
|
+
if response.lower() == 'q':
|
|
30
|
+
return None
|
|
31
|
+
path = Path(response)
|
|
32
|
+
if path.exists() and path.is_file():
|
|
33
|
+
return path
|
|
34
|
+
else:
|
|
35
|
+
print(f"Error: File not found: {response}")
|
|
36
|
+
return None
|
|
37
|
+
|
|
38
|
+
if len(attributes_files) == 1:
|
|
39
|
+
file_path = attributes_files[0]
|
|
40
|
+
response = input(f"Found attributes file: {file_path}\nUse this file? (y/n/q): ").strip().lower()
|
|
41
|
+
if response == 'y':
|
|
42
|
+
return file_path
|
|
43
|
+
elif response == 'q':
|
|
44
|
+
return None
|
|
45
|
+
else:
|
|
46
|
+
response = input("Enter the path to your attributes.adoc file (or 'q' to quit): ").strip()
|
|
47
|
+
if response.lower() == 'q':
|
|
48
|
+
return None
|
|
49
|
+
path = Path(response)
|
|
50
|
+
if path.exists() and path.is_file():
|
|
51
|
+
return path
|
|
52
|
+
else:
|
|
53
|
+
print(f"Error: File not found: {response}")
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
# Multiple files found
|
|
57
|
+
print("\nFound multiple attributes.adoc files:")
|
|
58
|
+
for i, file_path in enumerate(attributes_files, 1):
|
|
59
|
+
print(f" {i}. {file_path}")
|
|
60
|
+
print(f" {len(attributes_files) + 1}. Enter custom path")
|
|
61
|
+
|
|
62
|
+
while True:
|
|
63
|
+
response = input(f"\nSelect option (1-{len(attributes_files) + 1}) or 'q' to quit: ").strip()
|
|
64
|
+
if response.lower() == 'q':
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
choice = int(response)
|
|
69
|
+
if 1 <= choice <= len(attributes_files):
|
|
70
|
+
return attributes_files[choice - 1]
|
|
71
|
+
elif choice == len(attributes_files) + 1:
|
|
72
|
+
response = input("Enter the path to your attributes.adoc file: ").strip()
|
|
73
|
+
path = Path(response)
|
|
74
|
+
if path.exists() and path.is_file():
|
|
75
|
+
return path
|
|
76
|
+
else:
|
|
77
|
+
print(f"Error: File not found: {response}")
|
|
78
|
+
else:
|
|
79
|
+
print(f"Invalid choice. Please enter a number between 1 and {len(attributes_files) + 1}")
|
|
80
|
+
except ValueError:
|
|
81
|
+
print("Invalid input. Please enter a number.")
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def main():
|
|
85
|
+
parser = argparse.ArgumentParser(
|
|
86
|
+
description='Replace AsciiDoc attributes within link macros with their actual values.'
|
|
87
|
+
)
|
|
88
|
+
parser.add_argument(
|
|
89
|
+
'--dry-run', '-n',
|
|
90
|
+
action='store_true',
|
|
91
|
+
help='Show what would be changed without making actual modifications'
|
|
92
|
+
)
|
|
93
|
+
parser.add_argument(
|
|
94
|
+
'--path', '-p',
|
|
95
|
+
type=str,
|
|
96
|
+
default='.',
|
|
97
|
+
help='Repository path to search (default: current directory)'
|
|
98
|
+
)
|
|
99
|
+
parser.add_argument(
|
|
100
|
+
'--attributes-file', '-a',
|
|
101
|
+
type=str,
|
|
102
|
+
help='Path to attributes.adoc file (skips interactive selection)'
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
args = parser.parse_args()
|
|
106
|
+
|
|
107
|
+
# Determine repository root
|
|
108
|
+
repo_root = Path(args.path).resolve()
|
|
109
|
+
|
|
110
|
+
if not repo_root.exists() or not repo_root.is_dir():
|
|
111
|
+
print(f"Error: Directory not found: {repo_root}")
|
|
112
|
+
sys.exit(1)
|
|
113
|
+
|
|
114
|
+
print(f"{'DRY RUN MODE - ' if args.dry_run else ''}Searching in: {repo_root}")
|
|
115
|
+
|
|
116
|
+
# Find or get attributes file
|
|
117
|
+
if args.attributes_file:
|
|
118
|
+
attributes_file = Path(args.attributes_file)
|
|
119
|
+
if not attributes_file.exists():
|
|
120
|
+
print(f"Error: Specified attributes file not found: {attributes_file}")
|
|
121
|
+
sys.exit(1)
|
|
122
|
+
else:
|
|
123
|
+
print("\nSearching for attributes.adoc files...")
|
|
124
|
+
attributes_files = find_attributes_files(repo_root)
|
|
125
|
+
attributes_file = prompt_for_attributes_file(attributes_files)
|
|
126
|
+
|
|
127
|
+
if not attributes_file:
|
|
128
|
+
print("Operation cancelled.")
|
|
129
|
+
sys.exit(0)
|
|
130
|
+
|
|
131
|
+
print(f"\nLoading attributes from: {attributes_file}")
|
|
132
|
+
attributes = load_attributes(attributes_file)
|
|
133
|
+
|
|
134
|
+
if not attributes:
|
|
135
|
+
print("No attributes found in the file.")
|
|
136
|
+
sys.exit(1)
|
|
137
|
+
|
|
138
|
+
print(f"Found {len(attributes)} attributes")
|
|
139
|
+
|
|
140
|
+
# Resolve nested references
|
|
141
|
+
print("Resolving nested attribute references...")
|
|
142
|
+
attributes = resolve_nested_attributes(attributes)
|
|
143
|
+
|
|
144
|
+
# Find all AsciiDoc files
|
|
145
|
+
print(f"\nSearching for *.adoc files in {repo_root}")
|
|
146
|
+
adoc_files = find_adoc_files(repo_root)
|
|
147
|
+
|
|
148
|
+
# Exclude the attributes file itself
|
|
149
|
+
adoc_files = [f for f in adoc_files if f != attributes_file]
|
|
150
|
+
|
|
151
|
+
print(f"Found {len(adoc_files)} AsciiDoc files to process")
|
|
152
|
+
|
|
153
|
+
if args.dry_run:
|
|
154
|
+
print("\n*** DRY RUN MODE - No files will be modified ***\n")
|
|
155
|
+
|
|
156
|
+
# Process each file
|
|
157
|
+
total_replacements = 0
|
|
158
|
+
files_modified = 0
|
|
159
|
+
|
|
160
|
+
for file_path in adoc_files:
|
|
161
|
+
replacements = replace_link_attributes_in_file(file_path, attributes, args.dry_run)
|
|
162
|
+
if replacements > 0:
|
|
163
|
+
rel_path = file_path.relative_to(repo_root)
|
|
164
|
+
prefix = "[DRY RUN] " if args.dry_run else ""
|
|
165
|
+
print(f" {prefix}Modified {rel_path}: {replacements} replacements")
|
|
166
|
+
total_replacements += replacements
|
|
167
|
+
files_modified += 1
|
|
168
|
+
|
|
169
|
+
# Summary
|
|
170
|
+
print(f"\nSummary:")
|
|
171
|
+
if args.dry_run:
|
|
172
|
+
print(f" Would modify {files_modified} files")
|
|
173
|
+
print(f" Would make {total_replacements} replacements")
|
|
174
|
+
print("\nRun without --dry-run to apply changes.")
|
|
175
|
+
else:
|
|
176
|
+
print(f" Total files modified: {files_modified}")
|
|
177
|
+
print(f" Total replacements: {total_replacements}")
|
|
178
|
+
|
|
179
|
+
if total_replacements == 0:
|
|
180
|
+
print("\nNo attribute references found within link macros.")
|
|
181
|
+
else:
|
|
182
|
+
print("\nReplacement complete!")
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
if __name__ == '__main__':
|
|
186
|
+
main()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rolfedh-doc-utils
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.9
|
|
4
4
|
Summary: CLI tools for AsciiDoc documentation projects
|
|
5
5
|
Author: Rolfe Dlugy-Hegwer
|
|
6
6
|
License: MIT License
|
|
@@ -79,6 +79,7 @@ pip install -e .
|
|
|
79
79
|
|
|
80
80
|
| Tool | Description | Usage |
|
|
81
81
|
|------|-------------|-------|
|
|
82
|
+
| **`replace-link-attributes`** | Resolves Vale LinkAttribute violations by replacing attributes in link URLs | `replace-link-attributes --dry-run` |
|
|
82
83
|
| **`format-asciidoc-spacing`** | Standardizes spacing after headings and around includes | `format-asciidoc-spacing --dry-run modules/` |
|
|
83
84
|
| **`check-scannability`** | Analyzes readability (sentence/paragraph length) | `check-scannability --max-words 25` |
|
|
84
85
|
| **`archive-unused-files`** | Finds and archives unreferenced .adoc files | `archive-unused-files` (preview)<br>`archive-unused-files --archive` (execute) |
|
|
@@ -6,9 +6,12 @@ check_scannability.py
|
|
|
6
6
|
find_unused_attributes.py
|
|
7
7
|
format_asciidoc_spacing.py
|
|
8
8
|
pyproject.toml
|
|
9
|
+
replace_link_attributes.py
|
|
9
10
|
setup.py
|
|
10
11
|
doc_utils/__init__.py
|
|
11
12
|
doc_utils/file_utils.py
|
|
13
|
+
doc_utils/format_asciidoc_spacing.py
|
|
14
|
+
doc_utils/replace_link_attributes.py
|
|
12
15
|
doc_utils/scannability.py
|
|
13
16
|
doc_utils/topic_map_parser.py
|
|
14
17
|
doc_utils/unused_adoc.py
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/rolfedh_doc_utils.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_fixture_archive_unused_files.py
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_fixture_archive_unused_images.py
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.8 → rolfedh_doc_utils-0.1.9}/tests/test_fixture_check_scannability.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|