rolfedh-doc-utils 0.1.8__py3-none-any.whl → 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,453 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Extract link and xref macros containing attributes into attribute definitions.
4
+ """
5
+
6
+ import os
7
+ import re
8
+ from pathlib import Path
9
+ from typing import Dict, List, Set, Tuple, Optional
10
+ from collections import defaultdict
11
+ import unicodedata
12
+
13
+
14
+ def find_attribute_files(base_path: str = '.') -> List[str]:
15
+ """Find potential attribute files in the repository."""
16
+ common_patterns = [
17
+ '**/common-attributes.adoc',
18
+ '**/attributes.adoc',
19
+ '**/*-attributes.adoc',
20
+ '**/attributes-*.adoc',
21
+ '**/common_attributes.adoc',
22
+ '**/_common-attributes.adoc'
23
+ ]
24
+
25
+ attribute_files = []
26
+ base = Path(base_path)
27
+
28
+ for pattern in common_patterns:
29
+ for file_path in base.glob(pattern):
30
+ if file_path.is_file():
31
+ rel_path = file_path.relative_to(base)
32
+ attribute_files.append(str(rel_path))
33
+
34
+ # Remove duplicates while preserving order
35
+ seen = set()
36
+ unique_files = []
37
+ for f in attribute_files:
38
+ if f not in seen:
39
+ seen.add(f)
40
+ unique_files.append(f)
41
+
42
+ return sorted(unique_files)
43
+
44
+
45
+ def select_attribute_file(attribute_files: List[str]) -> str:
46
+ """Let user interactively select an attribute file."""
47
+ if not attribute_files:
48
+ return None
49
+
50
+ print("\nMultiple attribute files found. Please select one:")
51
+ for i, file_path in enumerate(attribute_files, 1):
52
+ print(f" {i}. {file_path}")
53
+
54
+ while True:
55
+ try:
56
+ choice = input(f"\nEnter your choice (1-{len(attribute_files)}): ").strip()
57
+ index = int(choice) - 1
58
+ if 0 <= index < len(attribute_files):
59
+ return attribute_files[index]
60
+ else:
61
+ print(f"Please enter a number between 1 and {len(attribute_files)}")
62
+ except (ValueError, EOFError, KeyboardInterrupt):
63
+ print("\nOperation cancelled.")
64
+ return None
65
+
66
+
67
+ def load_existing_attributes(file_path: str) -> Dict[str, str]:
68
+ """Load existing attributes from file."""
69
+ attributes = {}
70
+ if not os.path.exists(file_path):
71
+ return attributes
72
+
73
+ with open(file_path, 'r', encoding='utf-8') as f:
74
+ for line in f:
75
+ # Match attribute definitions
76
+ match = re.match(r'^:([^:]+):\s*(.+)$', line)
77
+ if match:
78
+ attr_name = match.group(1).strip()
79
+ attr_value = match.group(2).strip()
80
+ attributes[attr_name] = attr_value
81
+
82
+ return attributes
83
+
84
+
85
+ def find_link_macros(file_path: str) -> List[Tuple[str, str, str, int]]:
86
+ """
87
+ Find all link: and xref: macros containing attributes in their URLs.
88
+
89
+ Returns list of tuples: (full_macro, url, link_text, line_number)
90
+ """
91
+ macros = []
92
+
93
+ with open(file_path, 'r', encoding='utf-8') as f:
94
+ for line_num, line in enumerate(f, 1):
95
+ # Pattern to match link: and xref: macros
96
+ # Matches: (link|xref):url[text] where url contains {attribute}
97
+ patterns = [
98
+ r'(link:([^[\]]*\{[^}]+\}[^[\]]*)\[([^\]]*)\])',
99
+ r'(xref:([^[\]]*\{[^}]+\}[^[\]]*)\[([^\]]*)\])'
100
+ ]
101
+
102
+ for pattern in patterns:
103
+ for match in re.finditer(pattern, line, re.IGNORECASE):
104
+ full_macro = match.group(1)
105
+ url = match.group(2)
106
+ link_text = match.group(3)
107
+ macros.append((full_macro, url, link_text, line_num))
108
+
109
+ return macros
110
+
111
+
112
+ def generate_attribute_name(url: str, existing_attrs: Set[str], counter: int) -> str:
113
+ """Generate a unique attribute name from URL."""
114
+ # Start with a base name from the URL
115
+ base_name = url
116
+
117
+ # Extract domain or path components
118
+ if '://' in url:
119
+ # Remove protocol
120
+ base_name = re.sub(r'^[^:]+://', '', url)
121
+
122
+ # Remove attributes from the name generation
123
+ base_name = re.sub(r'\{[^}]+\}', '', base_name)
124
+
125
+ # Extract meaningful parts
126
+ if '/' in base_name:
127
+ parts = base_name.split('/')
128
+ # Use domain and last path component
129
+ if len(parts) > 1:
130
+ domain_part = parts[0].replace('.', '-')
131
+ path_part = parts[-1].split('.')[0] if parts[-1] else ''
132
+ if path_part:
133
+ base_name = f"{domain_part}-{path_part}"
134
+ else:
135
+ base_name = domain_part
136
+
137
+ # Clean up the name
138
+ base_name = re.sub(r'[^a-zA-Z0-9-]', '-', base_name)
139
+ base_name = re.sub(r'-+', '-', base_name)
140
+ base_name = base_name.strip('-').lower()
141
+
142
+ # Limit length
143
+ if len(base_name) > 30:
144
+ base_name = base_name[:30]
145
+
146
+ # Make it unique
147
+ attr_name = f"link-{base_name}"
148
+ original_name = attr_name
149
+ suffix = 1
150
+
151
+ while attr_name in existing_attrs:
152
+ attr_name = f"{original_name}-{suffix}"
153
+ suffix += 1
154
+
155
+ return attr_name
156
+
157
+
158
+ def group_macros_by_url(macros: List[Tuple[str, str, str, str, int]]) -> Dict[str, List[Tuple[str, str, str, int]]]:
159
+ """
160
+ Group macros by URL, collecting all link text variations.
161
+
162
+ Returns: Dict[url, List[(file_path, link_text, full_macro, line_number)]]
163
+ """
164
+ url_groups = defaultdict(list)
165
+
166
+ for file_path, full_macro, url, link_text, line_num in macros:
167
+ url_groups[url].append((file_path, link_text, full_macro, line_num))
168
+
169
+ return url_groups
170
+
171
+
172
+ def select_link_text(url: str, variations: List[Tuple[str, str, str, int]], interactive: bool = True) -> str:
173
+ """
174
+ Select link text for a URL with multiple variations.
175
+
176
+ variations: List[(file_path, link_text, full_macro, line_number)]
177
+ """
178
+ # Extract unique link texts
179
+ unique_texts = {}
180
+ for file_path, link_text, _, line_num in variations:
181
+ if link_text not in unique_texts:
182
+ unique_texts[link_text] = []
183
+ unique_texts[link_text].append(f"{file_path}:{line_num}")
184
+
185
+ if len(unique_texts) == 1:
186
+ # Only one variation, use it
187
+ return list(unique_texts.keys())[0]
188
+
189
+ if not interactive:
190
+ # Use most common (appears in most locations)
191
+ most_common = max(unique_texts.items(), key=lambda x: len(x[1]))
192
+ return most_common[0]
193
+
194
+ # Interactive selection
195
+ print(f"\nMultiple link text variations found for URL: {url}")
196
+ print("Please select the preferred text:")
197
+
198
+ text_list = list(unique_texts.items())
199
+ for i, (text, locations) in enumerate(text_list, 1):
200
+ print(f"\n {i}. \"{text}\"")
201
+ print(f" Used in: {', '.join(locations[:3])}")
202
+ if len(locations) > 3:
203
+ print(f" ... and {len(locations) - 3} more locations")
204
+
205
+ print(f"\n {len(text_list) + 1}. Enter custom text")
206
+
207
+ while True:
208
+ try:
209
+ choice = input(f"\nEnter your choice (1-{len(text_list) + 1}): ").strip()
210
+ index = int(choice) - 1
211
+
212
+ if 0 <= index < len(text_list):
213
+ return text_list[index][0]
214
+ elif index == len(text_list):
215
+ custom_text = input("Enter custom link text: ").strip()
216
+ if custom_text:
217
+ return custom_text
218
+ else:
219
+ print("Text cannot be empty. Please try again.")
220
+ else:
221
+ print(f"Please enter a number between 1 and {len(text_list) + 1}")
222
+ except (ValueError, EOFError, KeyboardInterrupt):
223
+ print("\nUsing most common text variation.")
224
+ most_common = max(unique_texts.items(), key=lambda x: len(x[1]))
225
+ return most_common[0]
226
+
227
+
228
+ def collect_all_macros(scan_dirs: List[str] = None) -> List[Tuple[str, str, str, str, int]]:
229
+ """
230
+ Collect all link/xref macros with attributes from all .adoc files.
231
+
232
+ Returns: List[(file_path, full_macro, url, link_text, line_number)]
233
+ """
234
+ if scan_dirs is None:
235
+ scan_dirs = ['.']
236
+
237
+ all_macros = []
238
+
239
+ for scan_dir in scan_dirs:
240
+ for root, _, files in os.walk(scan_dir):
241
+ # Skip hidden directories and .archive
242
+ if '/.archive' in root or '/.' in root:
243
+ continue
244
+
245
+ for file in files:
246
+ if file.endswith('.adoc'):
247
+ file_path = os.path.join(root, file)
248
+ macros = find_link_macros(file_path)
249
+ for full_macro, url, link_text, line_num in macros:
250
+ all_macros.append((file_path, full_macro, url, link_text, line_num))
251
+
252
+ return all_macros
253
+
254
+
255
+ def create_attributes(url_groups: Dict[str, List[Tuple[str, str, str, int]]],
256
+ existing_attrs: Dict[str, str],
257
+ interactive: bool = True) -> Dict[str, str]:
258
+ """
259
+ Create new attributes for each unique URL.
260
+
261
+ Returns: Dict[attribute_name, attribute_value]
262
+ """
263
+ new_attributes = {}
264
+ existing_attr_names = set(existing_attrs.keys())
265
+ counter = 1
266
+
267
+ for url, variations in url_groups.items():
268
+ # Check if this URL already has an attribute
269
+ existing_attr = None
270
+ for attr_name, attr_value in existing_attrs.items():
271
+ if url in attr_value:
272
+ existing_attr = attr_name
273
+ break
274
+
275
+ if existing_attr:
276
+ print(f"URL already has attribute {{{existing_attr}}}: {url}")
277
+ continue
278
+
279
+ # Select link text
280
+ link_text = select_link_text(url, variations, interactive)
281
+
282
+ # Generate attribute name
283
+ attr_name = generate_attribute_name(url, existing_attr_names | set(new_attributes.keys()), counter)
284
+ counter += 1
285
+
286
+ # Determine macro type (link or xref)
287
+ first_macro = variations[0][2] # full_macro from first variation
288
+ macro_type = 'xref' if first_macro.startswith('xref:') else 'link'
289
+
290
+ # Create attribute value
291
+ attr_value = f"{macro_type}:{url}[{link_text}]"
292
+ new_attributes[attr_name] = attr_value
293
+
294
+ print(f"Created attribute: :{attr_name}: {attr_value}")
295
+
296
+ return new_attributes
297
+
298
+
299
+ def update_attribute_file(file_path: str, new_attributes: Dict[str, str], dry_run: bool = False):
300
+ """Add new attributes to the attribute file."""
301
+ if not new_attributes:
302
+ print("No new attributes to add.")
303
+ return
304
+
305
+ if dry_run:
306
+ print(f"\n[DRY RUN] Would add {len(new_attributes)} attributes to {file_path}:")
307
+ for attr_name, attr_value in new_attributes.items():
308
+ print(f" :{attr_name}: {attr_value}")
309
+ return
310
+
311
+ # Ensure directory exists
312
+ os.makedirs(os.path.dirname(file_path) if os.path.dirname(file_path) else '.', exist_ok=True)
313
+
314
+ # Append new attributes
315
+ with open(file_path, 'a', encoding='utf-8') as f:
316
+ if os.path.getsize(file_path) > 0:
317
+ f.write('\n') # Add newline if file not empty
318
+ f.write('// Extracted link attributes\n')
319
+ for attr_name, attr_value in sorted(new_attributes.items()):
320
+ f.write(f":{attr_name}: {attr_value}\n")
321
+
322
+ print(f"Added {len(new_attributes)} attributes to {file_path}")
323
+
324
+
325
+ def replace_macros_with_attributes(file_updates: Dict[str, List[Tuple[str, str]]], dry_run: bool = False):
326
+ """
327
+ Replace link/xref macros with their attribute references.
328
+
329
+ file_updates: Dict[file_path, List[(old_macro, attribute_ref)]]
330
+ """
331
+ for file_path, replacements in file_updates.items():
332
+ if dry_run:
333
+ print(f"\n[DRY RUN] Would update {file_path}:")
334
+ for old_macro, attr_ref in replacements[:3]:
335
+ print(f" Replace: {old_macro}")
336
+ print(f" With: {attr_ref}")
337
+ if len(replacements) > 3:
338
+ print(f" ... and {len(replacements) - 3} more replacements")
339
+ continue
340
+
341
+ # Read file
342
+ with open(file_path, 'r', encoding='utf-8') as f:
343
+ content = f.read()
344
+
345
+ # Apply replacements
346
+ for old_macro, attr_ref in replacements:
347
+ content = content.replace(old_macro, attr_ref)
348
+
349
+ # Write file
350
+ with open(file_path, 'w', encoding='utf-8') as f:
351
+ f.write(content)
352
+
353
+ print(f"Updated {file_path}: {len(replacements)} replacements")
354
+
355
+
356
+ def prepare_file_updates(url_groups: Dict[str, List[Tuple[str, str, str, int]]],
357
+ attribute_mapping: Dict[str, str]) -> Dict[str, List[Tuple[str, str]]]:
358
+ """
359
+ Prepare file updates mapping macros to attribute references.
360
+
361
+ Returns: Dict[file_path, List[(old_macro, attribute_ref)]]
362
+ """
363
+ file_updates = defaultdict(list)
364
+
365
+ # Create reverse mapping from URL to attribute name
366
+ url_to_attr = {}
367
+ for attr_name, attr_value in attribute_mapping.items():
368
+ # Extract URL from attribute value
369
+ match = re.match(r'(?:link|xref):([^\[]+)\[', attr_value)
370
+ if match:
371
+ url = match.group(1)
372
+ url_to_attr[url] = attr_name
373
+
374
+ # Map each macro occurrence to its attribute
375
+ for url, variations in url_groups.items():
376
+ if url in url_to_attr:
377
+ attr_name = url_to_attr[url]
378
+ for file_path, _, full_macro, _ in variations:
379
+ file_updates[file_path].append((full_macro, f"{{{attr_name}}}"))
380
+
381
+ return dict(file_updates)
382
+
383
+
384
+ def extract_link_attributes(attributes_file: str = None,
385
+ scan_dirs: List[str] = None,
386
+ interactive: bool = True,
387
+ dry_run: bool = False) -> bool:
388
+ """
389
+ Main function to extract link attributes.
390
+
391
+ Returns: True if successful, False otherwise
392
+ """
393
+ # Find or confirm attributes file
394
+ if not attributes_file:
395
+ found_files = find_attribute_files()
396
+
397
+ if not found_files:
398
+ print("No attribute files found.")
399
+ response = input("Create common-attributes.adoc? (y/n): ").strip().lower()
400
+ if response == 'y':
401
+ attributes_file = 'common-attributes.adoc'
402
+ else:
403
+ print("Please specify an attribute file with --attributes-file")
404
+ return False
405
+ elif len(found_files) == 1:
406
+ attributes_file = found_files[0]
407
+ print(f"Using attribute file: {attributes_file}")
408
+ else:
409
+ attributes_file = select_attribute_file(found_files)
410
+ if not attributes_file:
411
+ return False
412
+
413
+ # Load existing attributes
414
+ existing_attrs = load_existing_attributes(attributes_file)
415
+ print(f"Loaded {len(existing_attrs)} existing attributes")
416
+
417
+ # Collect all macros
418
+ print("\nScanning for link and xref macros with attributes...")
419
+ all_macros = collect_all_macros(scan_dirs)
420
+
421
+ if not all_macros:
422
+ print("No link or xref macros with attributes found.")
423
+ return True
424
+
425
+ print(f"Found {len(all_macros)} link/xref macros with attributes")
426
+
427
+ # Group by URL
428
+ url_groups = group_macros_by_url(all_macros)
429
+ print(f"Grouped into {len(url_groups)} unique URLs")
430
+
431
+ # Create new attributes
432
+ new_attributes = create_attributes(url_groups, existing_attrs, interactive)
433
+
434
+ if not new_attributes:
435
+ print("No new attributes to create.")
436
+ return True
437
+
438
+ # Update attribute file
439
+ update_attribute_file(attributes_file, new_attributes, dry_run)
440
+
441
+ # Prepare file updates
442
+ all_attributes = {**existing_attrs, **new_attributes}
443
+ file_updates = prepare_file_updates(url_groups, all_attributes)
444
+
445
+ # Replace macros
446
+ replace_macros_with_attributes(file_updates, dry_run)
447
+
448
+ if dry_run:
449
+ print("\n[DRY RUN] No files were modified. Run without --dry-run to apply changes.")
450
+ else:
451
+ print(f"\nSuccessfully extracted {len(new_attributes)} link attributes")
452
+
453
+ return True
@@ -0,0 +1,222 @@
1
+ """
2
+ Format AsciiDoc spacing - ensures blank lines after headings and around include directives.
3
+
4
+ Core logic for formatting AsciiDoc files with proper spacing.
5
+ """
6
+
7
+ import re
8
+ from pathlib import Path
9
+ from typing import List, Tuple
10
+
11
+
12
+ def process_file(file_path: Path, dry_run: bool = False, verbose: bool = False) -> Tuple[bool, List[str]]:
13
+ """
14
+ Process a single AsciiDoc file to fix spacing issues.
15
+
16
+ Args:
17
+ file_path: Path to the file to process
18
+ dry_run: If True, show what would be changed without modifying
19
+ verbose: If True, show detailed output
20
+
21
+ Returns:
22
+ Tuple of (changes_made, messages) where messages is a list of verbose output
23
+ """
24
+ messages = []
25
+
26
+ if verbose:
27
+ messages.append(f"Processing: {file_path}")
28
+
29
+ try:
30
+ with open(file_path, 'r', encoding='utf-8') as f:
31
+ lines = f.readlines()
32
+ except (IOError, UnicodeDecodeError) as e:
33
+ raise IOError(f"Error reading {file_path}: {e}")
34
+
35
+ # Remove trailing newlines from lines for processing
36
+ lines = [line.rstrip('\n\r') for line in lines]
37
+
38
+ new_lines = []
39
+ changes_made = False
40
+ in_block = False # Track if we're inside a block (admonition, listing, etc.)
41
+ in_conditional = False # Track if we're inside a conditional block
42
+
43
+ for i, current_line in enumerate(lines):
44
+ prev_line = lines[i-1] if i > 0 else ""
45
+ next_line = lines[i+1] if i + 1 < len(lines) else ""
46
+
47
+ # Check for conditional start (ifdef:: or ifndef::)
48
+ if re.match(r'^(ifdef::|ifndef::)', current_line):
49
+ in_conditional = True
50
+ # Add blank line before conditional if needed
51
+ if (prev_line and
52
+ not re.match(r'^\s*$', prev_line) and
53
+ not re.match(r'^(ifdef::|ifndef::|endif::)', prev_line)):
54
+ new_lines.append("")
55
+ changes_made = True
56
+ if verbose:
57
+ messages.append(" Added blank line before conditional block")
58
+ new_lines.append(current_line)
59
+
60
+ # Check for conditional end (endif::)
61
+ elif re.match(r'^endif::', current_line):
62
+ new_lines.append(current_line)
63
+ in_conditional = False
64
+ # Add blank line after conditional if needed
65
+ if (next_line and
66
+ not re.match(r'^\s*$', next_line) and
67
+ not re.match(r'^(ifdef::|ifndef::|endif::)', next_line)):
68
+ new_lines.append("")
69
+ changes_made = True
70
+ if verbose:
71
+ messages.append(" Added blank line after conditional block")
72
+
73
+ # Check for block delimiters (====, ----, ...., ____)
74
+ # These are used for admonitions, listing blocks, literal blocks, etc.
75
+ elif re.match(r'^(====+|----+|\.\.\.\.+|____+)$', current_line):
76
+ in_block = not in_block # Toggle block state
77
+ new_lines.append(current_line)
78
+ # Check if current line is a heading (but not if we're in a block)
79
+ elif not in_block and re.match(r'^=+\s+', current_line):
80
+ new_lines.append(current_line)
81
+
82
+ # Check if next line is not empty and not another heading
83
+ if (next_line and
84
+ not re.match(r'^=+\s+', next_line) and
85
+ not re.match(r'^\s*$', next_line)):
86
+ new_lines.append("")
87
+ changes_made = True
88
+ if verbose:
89
+ truncated = current_line[:50] + "..." if len(current_line) > 50 else current_line
90
+ messages.append(f" Added blank line after heading: {truncated}")
91
+
92
+ # Check if current line is a comment (AsciiDoc comments start with //)
93
+ elif re.match(r'^//', current_line):
94
+ # Skip special handling if we're inside a conditional block
95
+ if in_conditional:
96
+ new_lines.append(current_line)
97
+ else:
98
+ # Check if next line is an include directive
99
+ if next_line and re.match(r'^include::', next_line):
100
+ # This comment belongs to the include, add blank line before comment if needed
101
+ if (prev_line and
102
+ not re.match(r'^\s*$', prev_line) and
103
+ not re.match(r'^//', prev_line) and
104
+ not re.match(r'^:', prev_line)): # Don't add if previous is attribute
105
+ new_lines.append("")
106
+ changes_made = True
107
+ if verbose:
108
+ messages.append(" Added blank line before comment above include")
109
+ new_lines.append(current_line)
110
+
111
+ # Check if current line is an attribute (starts with :)
112
+ elif re.match(r'^:', current_line):
113
+ # Skip special handling if we're inside a conditional block
114
+ if in_conditional:
115
+ new_lines.append(current_line)
116
+ else:
117
+ # Check if next line is an include directive
118
+ if next_line and re.match(r'^include::', next_line):
119
+ # This attribute belongs to the include, add blank line before attribute if needed
120
+ if (prev_line and
121
+ not re.match(r'^\s*$', prev_line) and
122
+ not re.match(r'^//', prev_line) and
123
+ not re.match(r'^:', prev_line)): # Don't add if previous is comment or attribute
124
+ new_lines.append("")
125
+ changes_made = True
126
+ if verbose:
127
+ messages.append(" Added blank line before attribute above include")
128
+ new_lines.append(current_line)
129
+
130
+ # Check if current line is an include directive
131
+ elif re.match(r'^include::', current_line):
132
+ # Skip special handling if we're inside a conditional block
133
+ if in_conditional:
134
+ new_lines.append(current_line)
135
+ else:
136
+ # Check if this is an attribute include (contains "attribute" in the path)
137
+ is_attribute_include = 'attribute' in current_line.lower()
138
+
139
+ # Check if this appears near the top of the file (within first 10 lines after H1)
140
+ # Find the H1 heading position
141
+ h1_position = -1
142
+ for j in range(min(i, 10)): # Look back up to 10 lines or to current position
143
+ if re.match(r'^=\s+', lines[j]): # H1 heading starts with single =
144
+ h1_position = j
145
+ break
146
+
147
+ # If this is an attribute include near the H1 heading, don't add surrounding blank lines
148
+ is_near_h1 = h1_position >= 0 and (i - h1_position) <= 2
149
+
150
+ # Check if previous line is a comment or attribute (which belongs to this include)
151
+ has_comment_above = prev_line and re.match(r'^//', prev_line)
152
+ has_attribute_above = prev_line and re.match(r'^:', prev_line)
153
+
154
+ # If it's an attribute include near H1, only the heading's blank line is needed
155
+ if not (is_attribute_include and is_near_h1):
156
+ # Don't add blank line if there's a comment or attribute above (it was handled by the comment/attribute logic)
157
+ if not has_comment_above and not has_attribute_above:
158
+ # Add blank line before include if previous line is not empty and not an include
159
+ if (prev_line and
160
+ not re.match(r'^\s*$', prev_line) and
161
+ not re.match(r'^include::', prev_line)):
162
+ new_lines.append("")
163
+ changes_made = True
164
+ if verbose:
165
+ truncated = current_line[:50] + "..." if len(current_line) > 50 else current_line
166
+ messages.append(f" Added blank line before include: {truncated}")
167
+
168
+ new_lines.append(current_line)
169
+
170
+ # If it's an attribute include near H1, don't add blank line after
171
+ if not (is_attribute_include and is_near_h1):
172
+ # Add blank line after include if next line exists and is not empty and not an include
173
+ if (next_line and
174
+ not re.match(r'^\s*$', next_line) and
175
+ not re.match(r'^include::', next_line)):
176
+ new_lines.append("")
177
+ changes_made = True
178
+ if verbose:
179
+ truncated = current_line[:50] + "..." if len(current_line) > 50 else current_line
180
+ messages.append(f" Added blank line after include: {truncated}")
181
+
182
+ else:
183
+ new_lines.append(current_line)
184
+
185
+ # Apply changes if any were made
186
+ if changes_made:
187
+ # Clean up any consecutive blank lines we may have added
188
+ cleaned_lines = []
189
+ for i, line in enumerate(new_lines):
190
+ # Check if this is a blank line we're about to add
191
+ if line == "":
192
+ # Check if the previous line is also a blank line
193
+ if i > 0 and cleaned_lines and cleaned_lines[-1] == "":
194
+ # Skip this blank line as we already have one
195
+ continue
196
+ cleaned_lines.append(line)
197
+
198
+ if not dry_run:
199
+ try:
200
+ with open(file_path, 'w', encoding='utf-8') as f:
201
+ for line in cleaned_lines:
202
+ f.write(line + '\n')
203
+ except IOError as e:
204
+ raise IOError(f"Error writing {file_path}: {e}")
205
+ else:
206
+ if verbose:
207
+ messages.append(" No changes needed")
208
+
209
+ return changes_made, messages
210
+
211
+
212
+ def find_adoc_files(path: Path) -> List[Path]:
213
+ """Find all .adoc files in the given path"""
214
+ adoc_files = []
215
+
216
+ if path.is_file():
217
+ if path.suffix == '.adoc':
218
+ adoc_files.append(path)
219
+ elif path.is_dir():
220
+ adoc_files = list(path.rglob('*.adoc'))
221
+
222
+ return adoc_files