rolfedh-doc-utils 0.1.20__tar.gz → 0.1.22__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rolfedh_doc_utils-0.1.20/rolfedh_doc_utils.egg-info → rolfedh_doc_utils-0.1.22}/PKG-INFO +4 -3
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/README.md +3 -2
- rolfedh_doc_utils-0.1.22/convert_callouts_to_deflist.py +593 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/format_asciidoc_spacing.py +7 -1
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/version.py +1 -1
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils_cli.py +7 -2
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/pyproject.toml +3 -2
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22/rolfedh_doc_utils.egg-info}/PKG-INFO +4 -3
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/rolfedh_doc_utils.egg-info/SOURCES.txt +1 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/rolfedh_doc_utils.egg-info/entry_points.txt +1 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/rolfedh_doc_utils.egg-info/top_level.txt +1 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/LICENSE +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/__init__.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/extract_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/file_utils.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/replace_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/scannability.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/spinner.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/topic_map_parser.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/unused_adoc.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/validate_links.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/doc_utils/version_check.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/extract_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/find_unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/format_asciidoc_spacing.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/replace_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/rolfedh_doc_utils.egg-info/dependency_links.txt +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/rolfedh_doc_utils.egg-info/requires.txt +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/setup.cfg +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/setup.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_auto_discovery.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_cli_entry_points.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_extract_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_file_utils.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_fixture_archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_fixture_archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_fixture_check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_parse_exclude_list.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_replace_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_symlink_handling.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_topic_map_parser.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_validate_links.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_version_check.py +0 -0
- {rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/validate_links.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rolfedh-doc-utils
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.22
|
|
4
4
|
Summary: CLI tools for AsciiDoc documentation projects
|
|
5
5
|
Author: Rolfe Dlugy-Hegwer
|
|
6
6
|
License: MIT License
|
|
@@ -94,14 +94,15 @@ doc-utils --version # Show version
|
|
|
94
94
|
|
|
95
95
|
| Tool | Description | Usage |
|
|
96
96
|
|------|-------------|-------|
|
|
97
|
-
| **`validate-links`**
|
|
97
|
+
| **`validate-links`** | Validates all links in documentation, with URL transposition for preview environments | `validate-links --transpose "https://prod--https://preview"` |
|
|
98
98
|
| **`extract-link-attributes`** | Extracts link/xref macros with attributes into reusable definitions | `extract-link-attributes --dry-run` |
|
|
99
99
|
| **`replace-link-attributes`** | Resolves Vale LinkAttribute issues by replacing attributes in link URLs | `replace-link-attributes --dry-run` |
|
|
100
|
-
| **`format-asciidoc-spacing`**
|
|
100
|
+
| **`format-asciidoc-spacing`** | Standardizes spacing after headings and around includes | `format-asciidoc-spacing --dry-run modules/` |
|
|
101
101
|
| **`check-scannability`** | Analyzes readability (sentence/paragraph length) | `check-scannability --max-words 25` |
|
|
102
102
|
| **`archive-unused-files`** | Finds and archives unreferenced .adoc files | `archive-unused-files` (preview)<br>`archive-unused-files --archive` (execute) |
|
|
103
103
|
| **`archive-unused-images`** | Finds and archives unreferenced images | `archive-unused-images` (preview)<br>`archive-unused-images --archive` (execute) |
|
|
104
104
|
| **`find-unused-attributes`** | Identifies unused attribute definitions | `find-unused-attributes attributes.adoc` |
|
|
105
|
+
| **`convert-callouts-to-deflist`** | Converts callout-style annotations to definition list format | `convert-callouts-to-deflist --dry-run modules/` |
|
|
105
106
|
|
|
106
107
|
## 📖 Documentation
|
|
107
108
|
|
|
@@ -61,14 +61,15 @@ doc-utils --version # Show version
|
|
|
61
61
|
|
|
62
62
|
| Tool | Description | Usage |
|
|
63
63
|
|------|-------------|-------|
|
|
64
|
-
| **`validate-links`**
|
|
64
|
+
| **`validate-links`** | Validates all links in documentation, with URL transposition for preview environments | `validate-links --transpose "https://prod--https://preview"` |
|
|
65
65
|
| **`extract-link-attributes`** | Extracts link/xref macros with attributes into reusable definitions | `extract-link-attributes --dry-run` |
|
|
66
66
|
| **`replace-link-attributes`** | Resolves Vale LinkAttribute issues by replacing attributes in link URLs | `replace-link-attributes --dry-run` |
|
|
67
|
-
| **`format-asciidoc-spacing`**
|
|
67
|
+
| **`format-asciidoc-spacing`** | Standardizes spacing after headings and around includes | `format-asciidoc-spacing --dry-run modules/` |
|
|
68
68
|
| **`check-scannability`** | Analyzes readability (sentence/paragraph length) | `check-scannability --max-words 25` |
|
|
69
69
|
| **`archive-unused-files`** | Finds and archives unreferenced .adoc files | `archive-unused-files` (preview)<br>`archive-unused-files --archive` (execute) |
|
|
70
70
|
| **`archive-unused-images`** | Finds and archives unreferenced images | `archive-unused-images` (preview)<br>`archive-unused-images --archive` (execute) |
|
|
71
71
|
| **`find-unused-attributes`** | Identifies unused attribute definitions | `find-unused-attributes attributes.adoc` |
|
|
72
|
+
| **`convert-callouts-to-deflist`** | Converts callout-style annotations to definition list format | `convert-callouts-to-deflist --dry-run modules/` |
|
|
72
73
|
|
|
73
74
|
## 📖 Documentation
|
|
74
75
|
|
|
@@ -0,0 +1,593 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
convert-callouts-to-deflist - Convert AsciiDoc callouts to definition list format
|
|
4
|
+
|
|
5
|
+
Converts code blocks with callout-style annotations (<1>, <2>, etc.) to cleaner
|
|
6
|
+
definition list format with "where:" prefix.
|
|
7
|
+
|
|
8
|
+
This tool automatically scans all .adoc files in the current directory (recursively)
|
|
9
|
+
by default, or you can specify a specific file or directory.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import re
|
|
13
|
+
import sys
|
|
14
|
+
import argparse
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import List, Dict, Tuple, Optional
|
|
17
|
+
from dataclasses import dataclass
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# Colors for output
|
|
21
|
+
class Colors:
|
|
22
|
+
RED = '\033[0;31m'
|
|
23
|
+
GREEN = '\033[0;32m'
|
|
24
|
+
YELLOW = '\033[1;33m'
|
|
25
|
+
NC = '\033[0m' # No Color
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def print_colored(message: str, color: str = Colors.NC) -> None:
|
|
29
|
+
"""Print message with color"""
|
|
30
|
+
print(f"{color}{message}{Colors.NC}")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class Callout:
|
|
35
|
+
"""Represents a callout with its number and explanation text."""
|
|
36
|
+
number: int
|
|
37
|
+
text: str
|
|
38
|
+
is_optional: bool = False
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class CodeBlock:
|
|
43
|
+
"""Represents a code block with its content and metadata."""
|
|
44
|
+
start_line: int
|
|
45
|
+
end_line: int
|
|
46
|
+
delimiter: str
|
|
47
|
+
content: List[str]
|
|
48
|
+
language: Optional[str] = None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class CalloutConverter:
|
|
52
|
+
"""Converts callout-style documentation to definition list format."""
|
|
53
|
+
|
|
54
|
+
# Pattern for code block start: [source,language] or [source]
|
|
55
|
+
CODE_BLOCK_START = re.compile(r'^\[source(?:,\s*(\w+))?\]')
|
|
56
|
+
|
|
57
|
+
# Pattern for callout number at end of line in code block
|
|
58
|
+
CALLOUT_IN_CODE = re.compile(r'<(\d+)>\s*$')
|
|
59
|
+
|
|
60
|
+
# Pattern for callout explanation line: <1> Explanation text
|
|
61
|
+
CALLOUT_EXPLANATION = re.compile(r'^<(\d+)>\s+(.+)$')
|
|
62
|
+
|
|
63
|
+
# Pattern to detect user-replaceable values in angle brackets
|
|
64
|
+
# Excludes heredoc syntax (<<) and comparison operators
|
|
65
|
+
USER_VALUE_PATTERN = re.compile(r'(?<!<)<([a-zA-Z][^>]*)>')
|
|
66
|
+
|
|
67
|
+
def __init__(self, dry_run: bool = False, verbose: bool = False):
|
|
68
|
+
self.dry_run = dry_run
|
|
69
|
+
self.verbose = verbose
|
|
70
|
+
self.changes_made = 0
|
|
71
|
+
self.warnings = [] # Collect warnings for summary
|
|
72
|
+
|
|
73
|
+
def log(self, message: str):
|
|
74
|
+
"""Print message if verbose mode is enabled."""
|
|
75
|
+
if self.verbose:
|
|
76
|
+
print(f"[INFO] {message}")
|
|
77
|
+
|
|
78
|
+
def find_code_blocks(self, lines: List[str]) -> List[CodeBlock]:
|
|
79
|
+
"""Find all code blocks in the document."""
|
|
80
|
+
blocks = []
|
|
81
|
+
i = 0
|
|
82
|
+
|
|
83
|
+
while i < len(lines):
|
|
84
|
+
match = self.CODE_BLOCK_START.match(lines[i])
|
|
85
|
+
if match:
|
|
86
|
+
language = match.group(1)
|
|
87
|
+
start = i
|
|
88
|
+
i += 1
|
|
89
|
+
|
|
90
|
+
# Find the delimiter line (---- or ....)
|
|
91
|
+
if i < len(lines) and lines[i].strip() in ['----', '....']:
|
|
92
|
+
delimiter = lines[i].strip()
|
|
93
|
+
i += 1
|
|
94
|
+
content_start = i
|
|
95
|
+
|
|
96
|
+
# Find the closing delimiter
|
|
97
|
+
while i < len(lines):
|
|
98
|
+
if lines[i].strip() == delimiter:
|
|
99
|
+
content = lines[content_start:i]
|
|
100
|
+
blocks.append(CodeBlock(
|
|
101
|
+
start_line=start,
|
|
102
|
+
end_line=i,
|
|
103
|
+
delimiter=delimiter,
|
|
104
|
+
content=content,
|
|
105
|
+
language=language
|
|
106
|
+
))
|
|
107
|
+
break
|
|
108
|
+
i += 1
|
|
109
|
+
i += 1
|
|
110
|
+
|
|
111
|
+
return blocks
|
|
112
|
+
|
|
113
|
+
def extract_callouts_from_code(self, content: List[str]) -> Dict[int, Optional[str]]:
|
|
114
|
+
"""
|
|
115
|
+
Extract callout numbers from code block content.
|
|
116
|
+
Returns dict mapping callout number to either:
|
|
117
|
+
- The user-replaceable value (if angle brackets found), or
|
|
118
|
+
- The actual code line (for callouts explaining code behavior)
|
|
119
|
+
|
|
120
|
+
For each callout, attempts to extract the most relevant user-replaceable value
|
|
121
|
+
by looking for angle-bracket enclosed values on the same line.
|
|
122
|
+
"""
|
|
123
|
+
callouts = {}
|
|
124
|
+
|
|
125
|
+
for line in content:
|
|
126
|
+
# Look for callout number at end of line
|
|
127
|
+
callout_match = self.CALLOUT_IN_CODE.search(line)
|
|
128
|
+
if callout_match:
|
|
129
|
+
callout_num = int(callout_match.group(1))
|
|
130
|
+
|
|
131
|
+
# Extract the user-replaceable value (content in angle brackets)
|
|
132
|
+
# Remove the callout number first
|
|
133
|
+
line_without_callout = self.CALLOUT_IN_CODE.sub('', line).strip()
|
|
134
|
+
|
|
135
|
+
# Find all angle-bracket enclosed values
|
|
136
|
+
user_values = self.USER_VALUE_PATTERN.findall(line_without_callout)
|
|
137
|
+
|
|
138
|
+
if user_values:
|
|
139
|
+
# Use the rightmost (closest to the callout) user value
|
|
140
|
+
callouts[callout_num] = user_values[-1]
|
|
141
|
+
else:
|
|
142
|
+
# No angle-bracket value found - store the actual code line
|
|
143
|
+
# This will be used as the term in the definition list
|
|
144
|
+
callouts[callout_num] = line_without_callout
|
|
145
|
+
|
|
146
|
+
return callouts
|
|
147
|
+
|
|
148
|
+
def extract_callout_explanations(self, lines: List[str], start_line: int) -> Tuple[Dict[int, Callout], int]:
|
|
149
|
+
"""
|
|
150
|
+
Extract callout explanations following a code block.
|
|
151
|
+
Returns dict of callouts and the line number where explanations end.
|
|
152
|
+
"""
|
|
153
|
+
explanations = {}
|
|
154
|
+
i = start_line + 1 # Start after the closing delimiter
|
|
155
|
+
|
|
156
|
+
# Skip blank lines
|
|
157
|
+
while i < len(lines) and not lines[i].strip():
|
|
158
|
+
i += 1
|
|
159
|
+
|
|
160
|
+
# Collect consecutive callout explanation lines
|
|
161
|
+
while i < len(lines):
|
|
162
|
+
match = self.CALLOUT_EXPLANATION.match(lines[i])
|
|
163
|
+
if match:
|
|
164
|
+
num = int(match.group(1))
|
|
165
|
+
text = match.group(2).strip()
|
|
166
|
+
|
|
167
|
+
# Check if marked as optional
|
|
168
|
+
is_optional = False
|
|
169
|
+
if text.lower().startswith('optional.') or text.lower().startswith('optional:'):
|
|
170
|
+
is_optional = True
|
|
171
|
+
text = text[9:].strip() # Remove "Optional." or "Optional:"
|
|
172
|
+
elif '(Optional)' in text or '(optional)' in text:
|
|
173
|
+
is_optional = True
|
|
174
|
+
text = re.sub(r'\s*\(optional\)\s*', ' ', text, flags=re.IGNORECASE).strip()
|
|
175
|
+
|
|
176
|
+
explanations[num] = Callout(num, text, is_optional)
|
|
177
|
+
i += 1
|
|
178
|
+
else:
|
|
179
|
+
break
|
|
180
|
+
|
|
181
|
+
return explanations, i - 1
|
|
182
|
+
|
|
183
|
+
def validate_callouts(self, code_callouts: Dict[int, str], explanations: Dict[int, Callout],
|
|
184
|
+
input_file: Path = None, block_start: int = None, block_end: int = None) -> bool:
|
|
185
|
+
"""
|
|
186
|
+
Validate that callout numbers in code match explanation numbers.
|
|
187
|
+
Returns True if valid, False otherwise.
|
|
188
|
+
"""
|
|
189
|
+
code_nums = set(code_callouts.keys())
|
|
190
|
+
explanation_nums = set(explanations.keys())
|
|
191
|
+
|
|
192
|
+
if code_nums != explanation_nums:
|
|
193
|
+
# Format warning message with file and line numbers
|
|
194
|
+
if input_file and block_start is not None and block_end is not None:
|
|
195
|
+
# Line numbers are 1-indexed for display
|
|
196
|
+
line_range = f"{block_start + 1}-{block_end + 1}"
|
|
197
|
+
warning_msg = (
|
|
198
|
+
f"WARNING: {input_file.name} lines {line_range}: Callout mismatch: "
|
|
199
|
+
f"code has {sorted(code_nums)}, explanations have {sorted(explanation_nums)}"
|
|
200
|
+
)
|
|
201
|
+
print_colored(warning_msg, Colors.YELLOW)
|
|
202
|
+
# Store warning for summary
|
|
203
|
+
self.warnings.append(warning_msg)
|
|
204
|
+
else:
|
|
205
|
+
self.log(f"Callout mismatch: code has {code_nums}, explanations have {explanation_nums}")
|
|
206
|
+
return False
|
|
207
|
+
|
|
208
|
+
return True
|
|
209
|
+
|
|
210
|
+
def remove_callouts_from_code(self, content: List[str]) -> List[str]:
|
|
211
|
+
"""Remove callout numbers from code block content."""
|
|
212
|
+
cleaned = []
|
|
213
|
+
for line in content:
|
|
214
|
+
cleaned.append(self.CALLOUT_IN_CODE.sub('', line))
|
|
215
|
+
return cleaned
|
|
216
|
+
|
|
217
|
+
def create_definition_list(self, code_callouts: Dict[int, str], explanations: Dict[int, Callout]) -> List[str]:
|
|
218
|
+
"""
|
|
219
|
+
Create definition list from callouts and explanations.
|
|
220
|
+
|
|
221
|
+
For callouts with user-replaceable values in angle brackets, uses those.
|
|
222
|
+
For callouts without values, uses the actual code line as the term.
|
|
223
|
+
"""
|
|
224
|
+
lines = ['\nwhere:\n']
|
|
225
|
+
|
|
226
|
+
# Sort by callout number
|
|
227
|
+
for num in sorted(code_callouts.keys()):
|
|
228
|
+
value = code_callouts[num]
|
|
229
|
+
explanation = explanations[num]
|
|
230
|
+
|
|
231
|
+
# Check if this is a user-replaceable value (contains angle brackets but not heredoc)
|
|
232
|
+
# User values are single words/phrases in angle brackets like <my-value>
|
|
233
|
+
user_values = self.USER_VALUE_PATTERN.findall(value)
|
|
234
|
+
|
|
235
|
+
if user_values and len(user_values) == 1 and len(value) < 100:
|
|
236
|
+
# This looks like a user-replaceable value placeholder
|
|
237
|
+
# Format the value (ensure it has angle brackets)
|
|
238
|
+
user_value = user_values[0]
|
|
239
|
+
if not user_value.startswith('<'):
|
|
240
|
+
user_value = f'<{user_value}>'
|
|
241
|
+
if not user_value.endswith('>'):
|
|
242
|
+
user_value = f'{user_value}>'
|
|
243
|
+
term = f'`{user_value}`'
|
|
244
|
+
else:
|
|
245
|
+
# This is a code line - use it as-is in backticks
|
|
246
|
+
term = f'`{value}`'
|
|
247
|
+
|
|
248
|
+
# Prepend "Optional. " to the explanation text if marked as optional
|
|
249
|
+
explanation_text = explanation.text
|
|
250
|
+
if explanation.is_optional:
|
|
251
|
+
explanation_text = f'Optional. {explanation_text}'
|
|
252
|
+
|
|
253
|
+
lines.append(f'\n{term}::')
|
|
254
|
+
lines.append(f'{explanation_text}\n')
|
|
255
|
+
|
|
256
|
+
return lines
|
|
257
|
+
|
|
258
|
+
def convert_file(self, input_file: Path) -> Tuple[int, bool]:
|
|
259
|
+
"""
|
|
260
|
+
Convert callouts in a file to definition list format.
|
|
261
|
+
Returns tuple of (number of conversions, whether file was modified).
|
|
262
|
+
"""
|
|
263
|
+
# Read input file
|
|
264
|
+
try:
|
|
265
|
+
with open(input_file, 'r', encoding='utf-8') as f:
|
|
266
|
+
lines = [line.rstrip('\n') for line in f]
|
|
267
|
+
except Exception as e:
|
|
268
|
+
print_colored(f"Error reading {input_file}: {e}", Colors.RED)
|
|
269
|
+
return 0, False
|
|
270
|
+
|
|
271
|
+
self.log(f"Processing {input_file} ({len(lines)} lines)")
|
|
272
|
+
|
|
273
|
+
# Find all code blocks
|
|
274
|
+
blocks = self.find_code_blocks(lines)
|
|
275
|
+
self.log(f"Found {len(blocks)} code blocks")
|
|
276
|
+
|
|
277
|
+
if not blocks:
|
|
278
|
+
return 0, False
|
|
279
|
+
|
|
280
|
+
# Process blocks in reverse order to maintain line numbers
|
|
281
|
+
new_lines = lines.copy()
|
|
282
|
+
conversions = 0
|
|
283
|
+
|
|
284
|
+
for block in reversed(blocks):
|
|
285
|
+
# Extract callouts from code
|
|
286
|
+
code_callouts = self.extract_callouts_from_code(block.content)
|
|
287
|
+
|
|
288
|
+
if not code_callouts:
|
|
289
|
+
self.log(f"No callouts in block at line {block.start_line + 1}")
|
|
290
|
+
continue
|
|
291
|
+
|
|
292
|
+
self.log(f"Block at line {block.start_line + 1} has callouts: {list(code_callouts.keys())}")
|
|
293
|
+
|
|
294
|
+
# Extract explanations
|
|
295
|
+
explanations, explanation_end = self.extract_callout_explanations(new_lines, block.end_line)
|
|
296
|
+
|
|
297
|
+
if not explanations:
|
|
298
|
+
self.log(f"No explanations found after block at line {block.start_line + 1}")
|
|
299
|
+
continue
|
|
300
|
+
|
|
301
|
+
# Validate callouts match
|
|
302
|
+
if not self.validate_callouts(code_callouts, explanations, input_file, block.start_line, block.end_line):
|
|
303
|
+
continue
|
|
304
|
+
|
|
305
|
+
self.log(f"Converting block at line {block.start_line + 1}")
|
|
306
|
+
|
|
307
|
+
# Remove callouts from code
|
|
308
|
+
cleaned_content = self.remove_callouts_from_code(block.content)
|
|
309
|
+
|
|
310
|
+
# Create definition list
|
|
311
|
+
def_list = self.create_definition_list(code_callouts, explanations)
|
|
312
|
+
|
|
313
|
+
# Replace in document
|
|
314
|
+
# 1. Update code block content
|
|
315
|
+
content_start = block.start_line + 2 # After [source] and ----
|
|
316
|
+
content_end = block.end_line
|
|
317
|
+
|
|
318
|
+
# 2. Remove old callout explanations
|
|
319
|
+
explanation_start = block.end_line + 1
|
|
320
|
+
while explanation_start < len(new_lines) and not new_lines[explanation_start].strip():
|
|
321
|
+
explanation_start += 1
|
|
322
|
+
|
|
323
|
+
# Build the new section
|
|
324
|
+
new_section = (
|
|
325
|
+
new_lines[:content_start] +
|
|
326
|
+
cleaned_content +
|
|
327
|
+
[new_lines[content_end]] + # Keep closing delimiter
|
|
328
|
+
def_list +
|
|
329
|
+
new_lines[explanation_end + 1:]
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
new_lines = new_section
|
|
333
|
+
conversions += 1
|
|
334
|
+
self.changes_made += 1
|
|
335
|
+
|
|
336
|
+
# Write output
|
|
337
|
+
if conversions > 0 and not self.dry_run:
|
|
338
|
+
try:
|
|
339
|
+
with open(input_file, 'w', encoding='utf-8') as f:
|
|
340
|
+
f.write('\n'.join(new_lines) + '\n')
|
|
341
|
+
self.log(f"Wrote {input_file}")
|
|
342
|
+
except Exception as e:
|
|
343
|
+
print_colored(f"Error writing {input_file}: {e}", Colors.RED)
|
|
344
|
+
return 0, False
|
|
345
|
+
|
|
346
|
+
return conversions, conversions > 0
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
def find_adoc_files(path: Path, exclude_dirs: List[str] = None, exclude_files: List[str] = None) -> List[Path]:
|
|
350
|
+
"""
|
|
351
|
+
Find all .adoc files in the given path.
|
|
352
|
+
|
|
353
|
+
Args:
|
|
354
|
+
path: Path to search (file or directory)
|
|
355
|
+
exclude_dirs: List of directory patterns to exclude
|
|
356
|
+
exclude_files: List of file patterns to exclude
|
|
357
|
+
|
|
358
|
+
Returns:
|
|
359
|
+
List of Path objects for .adoc files
|
|
360
|
+
"""
|
|
361
|
+
adoc_files = []
|
|
362
|
+
exclude_dirs = exclude_dirs or []
|
|
363
|
+
exclude_files = exclude_files or []
|
|
364
|
+
|
|
365
|
+
# Always exclude .vale directory by default (Vale linter fixtures)
|
|
366
|
+
if '.vale' not in exclude_dirs:
|
|
367
|
+
exclude_dirs.append('.vale')
|
|
368
|
+
|
|
369
|
+
if path.is_file():
|
|
370
|
+
if path.suffix == '.adoc':
|
|
371
|
+
# Check if file should be excluded
|
|
372
|
+
if not any(excl in str(path) for excl in exclude_files):
|
|
373
|
+
adoc_files.append(path)
|
|
374
|
+
elif path.is_dir():
|
|
375
|
+
# Recursively find all .adoc files
|
|
376
|
+
for adoc_file in path.rglob('*.adoc'):
|
|
377
|
+
# Check if in excluded directory
|
|
378
|
+
if any(excl in str(adoc_file) for excl in exclude_dirs):
|
|
379
|
+
continue
|
|
380
|
+
# Check if file should be excluded
|
|
381
|
+
if any(excl in str(adoc_file) for excl in exclude_files):
|
|
382
|
+
continue
|
|
383
|
+
adoc_files.append(adoc_file)
|
|
384
|
+
|
|
385
|
+
return sorted(adoc_files)
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
def load_exclusion_list(exclusion_file: Path) -> Tuple[List[str], List[str]]:
|
|
389
|
+
"""
|
|
390
|
+
Load exclusion list from file.
|
|
391
|
+
Returns tuple of (excluded_dirs, excluded_files).
|
|
392
|
+
"""
|
|
393
|
+
excluded_dirs = []
|
|
394
|
+
excluded_files = []
|
|
395
|
+
|
|
396
|
+
try:
|
|
397
|
+
with open(exclusion_file, 'r') as f:
|
|
398
|
+
for line in f:
|
|
399
|
+
line = line.strip()
|
|
400
|
+
# Skip comments and empty lines
|
|
401
|
+
if not line or line.startswith('#'):
|
|
402
|
+
continue
|
|
403
|
+
|
|
404
|
+
# If it ends with /, it's a directory
|
|
405
|
+
if line.endswith('/'):
|
|
406
|
+
excluded_dirs.append(line.rstrip('/'))
|
|
407
|
+
else:
|
|
408
|
+
# Could be file or directory - check if it has extension
|
|
409
|
+
if '.' in Path(line).name:
|
|
410
|
+
excluded_files.append(line)
|
|
411
|
+
else:
|
|
412
|
+
excluded_dirs.append(line)
|
|
413
|
+
except Exception as e:
|
|
414
|
+
print_colored(f"Warning: Could not read exclusion file {exclusion_file}: {e}", Colors.YELLOW)
|
|
415
|
+
|
|
416
|
+
return excluded_dirs, excluded_files
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
def main():
|
|
420
|
+
"""Main entry point"""
|
|
421
|
+
parser = argparse.ArgumentParser(
|
|
422
|
+
description='Convert AsciiDoc callouts to definition list format',
|
|
423
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
424
|
+
epilog="""
|
|
425
|
+
Convert AsciiDoc callout-style documentation to definition list format.
|
|
426
|
+
|
|
427
|
+
This script identifies code blocks with callout numbers (<1>, <2>, etc.) and their
|
|
428
|
+
corresponding explanation lines, then converts them to a cleaner definition list format
|
|
429
|
+
with "where:" prefix.
|
|
430
|
+
|
|
431
|
+
Examples:
|
|
432
|
+
%(prog)s # Process all .adoc files in current directory
|
|
433
|
+
%(prog)s modules/ # Process all .adoc files in modules/
|
|
434
|
+
%(prog)s assemblies/my-guide.adoc # Process single file
|
|
435
|
+
%(prog)s --dry-run modules/ # Preview changes without modifying
|
|
436
|
+
%(prog)s --exclude-dir .vale modules/ # Exclude .vale directory
|
|
437
|
+
|
|
438
|
+
Example transformation:
|
|
439
|
+
FROM:
|
|
440
|
+
[source,yaml]
|
|
441
|
+
----
|
|
442
|
+
name: <my-secret> <1>
|
|
443
|
+
key: <my-key> <2>
|
|
444
|
+
----
|
|
445
|
+
<1> Secret name
|
|
446
|
+
<2> Key value
|
|
447
|
+
|
|
448
|
+
TO:
|
|
449
|
+
[source,yaml]
|
|
450
|
+
----
|
|
451
|
+
name: <my-secret>
|
|
452
|
+
key: <my-key>
|
|
453
|
+
----
|
|
454
|
+
|
|
455
|
+
where:
|
|
456
|
+
|
|
457
|
+
`<my-secret>`::
|
|
458
|
+
Secret name
|
|
459
|
+
|
|
460
|
+
`<my-key>`::
|
|
461
|
+
Key value
|
|
462
|
+
"""
|
|
463
|
+
)
|
|
464
|
+
|
|
465
|
+
parser.add_argument(
|
|
466
|
+
'path',
|
|
467
|
+
nargs='?',
|
|
468
|
+
default='.',
|
|
469
|
+
help='File or directory to process (default: current directory)'
|
|
470
|
+
)
|
|
471
|
+
parser.add_argument(
|
|
472
|
+
'-n', '--dry-run',
|
|
473
|
+
action='store_true',
|
|
474
|
+
help='Show what would be changed without modifying files'
|
|
475
|
+
)
|
|
476
|
+
parser.add_argument(
|
|
477
|
+
'-v', '--verbose',
|
|
478
|
+
action='store_true',
|
|
479
|
+
help='Enable verbose output'
|
|
480
|
+
)
|
|
481
|
+
parser.add_argument(
|
|
482
|
+
'--exclude-dir',
|
|
483
|
+
action='append',
|
|
484
|
+
dest='exclude_dirs',
|
|
485
|
+
default=[],
|
|
486
|
+
help='Directory to exclude (can be used multiple times)'
|
|
487
|
+
)
|
|
488
|
+
parser.add_argument(
|
|
489
|
+
'--exclude-file',
|
|
490
|
+
action='append',
|
|
491
|
+
dest='exclude_files',
|
|
492
|
+
default=[],
|
|
493
|
+
help='File to exclude (can be used multiple times)'
|
|
494
|
+
)
|
|
495
|
+
parser.add_argument(
|
|
496
|
+
'--exclude-list',
|
|
497
|
+
type=Path,
|
|
498
|
+
help='Path to file containing directories/files to exclude, one per line'
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
args = parser.parse_args()
|
|
502
|
+
|
|
503
|
+
# Load exclusion list if provided
|
|
504
|
+
if args.exclude_list:
|
|
505
|
+
if args.exclude_list.exists():
|
|
506
|
+
excluded_dirs, excluded_files = load_exclusion_list(args.exclude_list)
|
|
507
|
+
args.exclude_dirs.extend(excluded_dirs)
|
|
508
|
+
args.exclude_files.extend(excluded_files)
|
|
509
|
+
else:
|
|
510
|
+
print_colored(f"Warning: Exclusion list file not found: {args.exclude_list}", Colors.YELLOW)
|
|
511
|
+
|
|
512
|
+
# Convert path to Path object
|
|
513
|
+
target_path = Path(args.path)
|
|
514
|
+
|
|
515
|
+
# Check if path exists
|
|
516
|
+
if not target_path.exists():
|
|
517
|
+
print_colored(f"Error: Path does not exist: {target_path}", Colors.RED)
|
|
518
|
+
sys.exit(1)
|
|
519
|
+
|
|
520
|
+
# Display dry-run mode message
|
|
521
|
+
if args.dry_run:
|
|
522
|
+
print_colored("DRY RUN MODE - No files will be modified", Colors.YELLOW)
|
|
523
|
+
|
|
524
|
+
# Find all AsciiDoc files
|
|
525
|
+
adoc_files = find_adoc_files(target_path, args.exclude_dirs, args.exclude_files)
|
|
526
|
+
|
|
527
|
+
if not adoc_files:
|
|
528
|
+
if target_path.is_file():
|
|
529
|
+
print_colored(f"Warning: {target_path} is not an AsciiDoc file (.adoc)", Colors.YELLOW)
|
|
530
|
+
else:
|
|
531
|
+
print(f"No AsciiDoc files found in {target_path}")
|
|
532
|
+
print("Processed 0 AsciiDoc file(s)")
|
|
533
|
+
return
|
|
534
|
+
|
|
535
|
+
print(f"Found {len(adoc_files)} AsciiDoc file(s) to process")
|
|
536
|
+
|
|
537
|
+
# Create converter
|
|
538
|
+
converter = CalloutConverter(dry_run=args.dry_run, verbose=args.verbose)
|
|
539
|
+
|
|
540
|
+
# Process each file
|
|
541
|
+
files_processed = 0
|
|
542
|
+
files_modified = 0
|
|
543
|
+
total_conversions = 0
|
|
544
|
+
|
|
545
|
+
for file_path in adoc_files:
|
|
546
|
+
try:
|
|
547
|
+
conversions, modified = converter.convert_file(file_path)
|
|
548
|
+
|
|
549
|
+
if modified:
|
|
550
|
+
files_modified += 1
|
|
551
|
+
total_conversions += conversions
|
|
552
|
+
if args.dry_run:
|
|
553
|
+
print_colored(f"Would modify: {file_path} ({conversions} code block(s))", Colors.YELLOW)
|
|
554
|
+
else:
|
|
555
|
+
print_colored(f"Modified: {file_path} ({conversions} code block(s))", Colors.GREEN)
|
|
556
|
+
elif args.verbose:
|
|
557
|
+
print(f" No callouts found in: {file_path}")
|
|
558
|
+
|
|
559
|
+
files_processed += 1
|
|
560
|
+
|
|
561
|
+
except KeyboardInterrupt:
|
|
562
|
+
print_colored("\nOperation cancelled by user", Colors.YELLOW)
|
|
563
|
+
sys.exit(1)
|
|
564
|
+
except Exception as e:
|
|
565
|
+
print_colored(f"Unexpected error processing {file_path}: {e}", Colors.RED)
|
|
566
|
+
if args.verbose:
|
|
567
|
+
import traceback
|
|
568
|
+
traceback.print_exc()
|
|
569
|
+
|
|
570
|
+
# Summary
|
|
571
|
+
print(f"\nProcessed {files_processed} AsciiDoc file(s)")
|
|
572
|
+
if args.dry_run and files_modified > 0:
|
|
573
|
+
print(f"Would modify {files_modified} file(s) with {total_conversions} code block conversion(s)")
|
|
574
|
+
elif files_modified > 0:
|
|
575
|
+
print_colored(f"Modified {files_modified} file(s) with {total_conversions} code block conversion(s)", Colors.GREEN)
|
|
576
|
+
else:
|
|
577
|
+
print("No files with callouts to convert")
|
|
578
|
+
|
|
579
|
+
# Display warning summary if any warnings were collected
|
|
580
|
+
if converter.warnings:
|
|
581
|
+
print_colored(f"\n⚠️ {len(converter.warnings)} Warning(s):", Colors.YELLOW)
|
|
582
|
+
for warning in converter.warnings:
|
|
583
|
+
print_colored(f" {warning}", Colors.YELLOW)
|
|
584
|
+
print()
|
|
585
|
+
|
|
586
|
+
if args.dry_run and files_modified > 0:
|
|
587
|
+
print_colored("DRY RUN - No files were modified", Colors.YELLOW)
|
|
588
|
+
|
|
589
|
+
return 0 if files_processed >= 0 else 1
|
|
590
|
+
|
|
591
|
+
|
|
592
|
+
if __name__ == '__main__':
|
|
593
|
+
sys.exit(main())
|
|
@@ -175,8 +175,14 @@ def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False)
|
|
|
175
175
|
|
|
176
176
|
# Check if current line is an include directive
|
|
177
177
|
elif re.match(r'^include::', current_line):
|
|
178
|
-
#
|
|
178
|
+
# Handle includes inside conditional blocks
|
|
179
179
|
if in_conditional:
|
|
180
|
+
# Add blank line between consecutive includes within conditional blocks
|
|
181
|
+
if prev_line and re.match(r'^include::', prev_line):
|
|
182
|
+
new_lines.append("")
|
|
183
|
+
changes_made = True
|
|
184
|
+
if verbose:
|
|
185
|
+
messages.append(" Added blank line between includes in conditional block")
|
|
180
186
|
new_lines.append(current_line)
|
|
181
187
|
else:
|
|
182
188
|
# Check if this is an attribute include (contains "attribute" in the path)
|
|
@@ -15,7 +15,7 @@ from doc_utils.version_check import check_version_on_startup
|
|
|
15
15
|
TOOLS = [
|
|
16
16
|
{
|
|
17
17
|
'name': 'validate-links',
|
|
18
|
-
'description': 'Validates all links in documentation with URL transposition
|
|
18
|
+
'description': 'Validates all links in documentation with URL transposition',
|
|
19
19
|
'example': 'validate-links --transpose "https://prod--https://preview"'
|
|
20
20
|
},
|
|
21
21
|
{
|
|
@@ -30,7 +30,7 @@ TOOLS = [
|
|
|
30
30
|
},
|
|
31
31
|
{
|
|
32
32
|
'name': 'format-asciidoc-spacing',
|
|
33
|
-
'description': 'Standardizes spacing after headings and around includes
|
|
33
|
+
'description': 'Standardizes spacing after headings and around includes',
|
|
34
34
|
'example': 'format-asciidoc-spacing --dry-run modules/'
|
|
35
35
|
},
|
|
36
36
|
{
|
|
@@ -53,6 +53,11 @@ TOOLS = [
|
|
|
53
53
|
'description': 'Identifies unused attribute definitions in AsciiDoc files',
|
|
54
54
|
'example': 'find-unused-attributes # auto-discovers attributes files'
|
|
55
55
|
},
|
|
56
|
+
{
|
|
57
|
+
'name': 'convert-callouts-to-deflist',
|
|
58
|
+
'description': 'Converts callout-style annotations to definition list format',
|
|
59
|
+
'example': 'convert-callouts-to-deflist --dry-run modules/'
|
|
60
|
+
},
|
|
56
61
|
]
|
|
57
62
|
|
|
58
63
|
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "rolfedh-doc-utils"
|
|
7
|
-
version = "0.1.
|
|
7
|
+
version = "0.1.22"
|
|
8
8
|
description = "CLI tools for AsciiDoc documentation projects"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.8"
|
|
@@ -24,10 +24,11 @@ format-asciidoc-spacing = "format_asciidoc_spacing:main"
|
|
|
24
24
|
replace-link-attributes = "replace_link_attributes:main"
|
|
25
25
|
extract-link-attributes = "extract_link_attributes:main"
|
|
26
26
|
validate-links = "validate_links:main"
|
|
27
|
+
convert-callouts-to-deflist = "convert_callouts_to_deflist:main"
|
|
27
28
|
|
|
28
29
|
[tool.setuptools.packages.find]
|
|
29
30
|
where = ["."]
|
|
30
31
|
include = ["doc_utils*"]
|
|
31
32
|
|
|
32
33
|
[tool.setuptools]
|
|
33
|
-
py-modules = ["doc_utils_cli", "find_unused_attributes", "check_scannability", "archive_unused_files", "archive_unused_images", "format_asciidoc_spacing", "replace_link_attributes", "extract_link_attributes", "validate_links"]
|
|
34
|
+
py-modules = ["doc_utils_cli", "find_unused_attributes", "check_scannability", "archive_unused_files", "archive_unused_images", "format_asciidoc_spacing", "replace_link_attributes", "extract_link_attributes", "validate_links", "convert_callouts_to_deflist"]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rolfedh-doc-utils
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.22
|
|
4
4
|
Summary: CLI tools for AsciiDoc documentation projects
|
|
5
5
|
Author: Rolfe Dlugy-Hegwer
|
|
6
6
|
License: MIT License
|
|
@@ -94,14 +94,15 @@ doc-utils --version # Show version
|
|
|
94
94
|
|
|
95
95
|
| Tool | Description | Usage |
|
|
96
96
|
|------|-------------|-------|
|
|
97
|
-
| **`validate-links`**
|
|
97
|
+
| **`validate-links`** | Validates all links in documentation, with URL transposition for preview environments | `validate-links --transpose "https://prod--https://preview"` |
|
|
98
98
|
| **`extract-link-attributes`** | Extracts link/xref macros with attributes into reusable definitions | `extract-link-attributes --dry-run` |
|
|
99
99
|
| **`replace-link-attributes`** | Resolves Vale LinkAttribute issues by replacing attributes in link URLs | `replace-link-attributes --dry-run` |
|
|
100
|
-
| **`format-asciidoc-spacing`**
|
|
100
|
+
| **`format-asciidoc-spacing`** | Standardizes spacing after headings and around includes | `format-asciidoc-spacing --dry-run modules/` |
|
|
101
101
|
| **`check-scannability`** | Analyzes readability (sentence/paragraph length) | `check-scannability --max-words 25` |
|
|
102
102
|
| **`archive-unused-files`** | Finds and archives unreferenced .adoc files | `archive-unused-files` (preview)<br>`archive-unused-files --archive` (execute) |
|
|
103
103
|
| **`archive-unused-images`** | Finds and archives unreferenced images | `archive-unused-images` (preview)<br>`archive-unused-images --archive` (execute) |
|
|
104
104
|
| **`find-unused-attributes`** | Identifies unused attribute definitions | `find-unused-attributes attributes.adoc` |
|
|
105
|
+
| **`convert-callouts-to-deflist`** | Converts callout-style annotations to definition list format | `convert-callouts-to-deflist --dry-run modules/` |
|
|
105
106
|
|
|
106
107
|
## 📖 Documentation
|
|
107
108
|
|
{rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/rolfedh_doc_utils.egg-info/entry_points.txt
RENAMED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
archive-unused-files = archive_unused_files:main
|
|
3
3
|
archive-unused-images = archive_unused_images:main
|
|
4
4
|
check-scannability = check_scannability:main
|
|
5
|
+
convert-callouts-to-deflist = convert_callouts_to_deflist:main
|
|
5
6
|
doc-utils = doc_utils_cli:main
|
|
6
7
|
extract-link-attributes = extract_link_attributes:main
|
|
7
8
|
find-unused-attributes = find_unused_attributes:main
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/rolfedh_doc_utils.egg-info/requires.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_fixture_archive_unused_files.py
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_fixture_archive_unused_images.py
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.20 → rolfedh_doc_utils-0.1.22}/tests/test_fixture_check_scannability.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|