rolfedh-doc-utils 0.1.11__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- archive_unused_files.py +1 -0
- archive_unused_images.py +1 -0
- check_scannability.py +1 -0
- doc_utils/extract_link_attributes.py +140 -17
- doc_utils/spinner.py +119 -0
- extract_link_attributes.py +15 -1
- find_unused_attributes.py +7 -1
- format_asciidoc_spacing.py +1 -0
- replace_link_attributes.py +16 -9
- {rolfedh_doc_utils-0.1.11.dist-info → rolfedh_doc_utils-0.1.13.dist-info}/METADATA +1 -1
- rolfedh_doc_utils-0.1.13.dist-info/RECORD +26 -0
- validate_links.py +6 -0
- rolfedh_doc_utils-0.1.11.dist-info/RECORD +0 -25
- {rolfedh_doc_utils-0.1.11.dist-info → rolfedh_doc_utils-0.1.13.dist-info}/WHEEL +0 -0
- {rolfedh_doc_utils-0.1.11.dist-info → rolfedh_doc_utils-0.1.13.dist-info}/entry_points.txt +0 -0
- {rolfedh_doc_utils-0.1.11.dist-info → rolfedh_doc_utils-0.1.13.dist-info}/licenses/LICENSE +0 -0
- {rolfedh_doc_utils-0.1.11.dist-info → rolfedh_doc_utils-0.1.13.dist-info}/top_level.txt +0 -0
archive_unused_files.py
CHANGED
|
@@ -11,6 +11,7 @@ import argparse
|
|
|
11
11
|
from doc_utils.unused_adoc import find_unused_adoc
|
|
12
12
|
from doc_utils.file_utils import parse_exclude_list_file
|
|
13
13
|
|
|
14
|
+
from doc_utils.spinner import Spinner
|
|
14
15
|
def main():
|
|
15
16
|
parser = argparse.ArgumentParser(
|
|
16
17
|
description='Archive unused AsciiDoc files.',
|
archive_unused_images.py
CHANGED
|
@@ -10,6 +10,7 @@ import argparse
|
|
|
10
10
|
from doc_utils.unused_images import find_unused_images
|
|
11
11
|
from doc_utils.file_utils import parse_exclude_list_file
|
|
12
12
|
|
|
13
|
+
from doc_utils.spinner import Spinner
|
|
13
14
|
def main():
|
|
14
15
|
parser = argparse.ArgumentParser(description='Archive unused image files.')
|
|
15
16
|
parser.add_argument('--archive', action='store_true', help='Move the files to a dated zip in the archive directory.')
|
check_scannability.py
CHANGED
|
@@ -19,6 +19,7 @@ from datetime import datetime
|
|
|
19
19
|
from doc_utils.scannability import check_scannability
|
|
20
20
|
from doc_utils.file_utils import collect_files, parse_exclude_list_file
|
|
21
21
|
|
|
22
|
+
from doc_utils.spinner import Spinner
|
|
22
23
|
BASE_SENTENCE_WORD_LIMIT = 22
|
|
23
24
|
BASE_PARAGRAPH_SENTENCE_LIMIT = 3
|
|
24
25
|
|
|
@@ -10,6 +10,9 @@ from typing import Dict, List, Set, Tuple, Optional
|
|
|
10
10
|
from collections import defaultdict
|
|
11
11
|
import unicodedata
|
|
12
12
|
|
|
13
|
+
from .spinner import Spinner
|
|
14
|
+
from .validate_links import LinkValidator
|
|
15
|
+
|
|
13
16
|
|
|
14
17
|
def find_attribute_files(base_path: str = '.') -> List[str]:
|
|
15
18
|
"""Find potential attribute files in the repository."""
|
|
@@ -254,13 +257,14 @@ def collect_all_macros(scan_dirs: List[str] = None) -> List[Tuple[str, str, str,
|
|
|
254
257
|
|
|
255
258
|
def create_attributes(url_groups: Dict[str, List[Tuple[str, str, str, int]]],
|
|
256
259
|
existing_attrs: Dict[str, str],
|
|
257
|
-
interactive: bool = True) -> Dict[str, str]:
|
|
260
|
+
interactive: bool = True) -> Tuple[Dict[str, str], Dict[str, str]]:
|
|
258
261
|
"""
|
|
259
|
-
Create new attributes for each unique URL.
|
|
262
|
+
Create new attributes for each unique URL and track existing ones.
|
|
260
263
|
|
|
261
|
-
Returns:
|
|
264
|
+
Returns: Tuple[new_attributes, existing_matching_attributes]
|
|
262
265
|
"""
|
|
263
266
|
new_attributes = {}
|
|
267
|
+
existing_matching_attributes = {}
|
|
264
268
|
existing_attr_names = set(existing_attrs.keys())
|
|
265
269
|
counter = 1
|
|
266
270
|
|
|
@@ -270,6 +274,7 @@ def create_attributes(url_groups: Dict[str, List[Tuple[str, str, str, int]]],
|
|
|
270
274
|
for attr_name, attr_value in existing_attrs.items():
|
|
271
275
|
if url in attr_value:
|
|
272
276
|
existing_attr = attr_name
|
|
277
|
+
existing_matching_attributes[attr_name] = attr_value
|
|
273
278
|
break
|
|
274
279
|
|
|
275
280
|
if existing_attr:
|
|
@@ -293,7 +298,7 @@ def create_attributes(url_groups: Dict[str, List[Tuple[str, str, str, int]]],
|
|
|
293
298
|
|
|
294
299
|
print(f"Created attribute: :{attr_name}: {attr_value}")
|
|
295
300
|
|
|
296
|
-
return new_attributes
|
|
301
|
+
return new_attributes, existing_matching_attributes
|
|
297
302
|
|
|
298
303
|
|
|
299
304
|
def update_attribute_file(file_path: str, new_attributes: Dict[str, str], dry_run: bool = False):
|
|
@@ -381,10 +386,73 @@ def prepare_file_updates(url_groups: Dict[str, List[Tuple[str, str, str, int]]],
|
|
|
381
386
|
return dict(file_updates)
|
|
382
387
|
|
|
383
388
|
|
|
389
|
+
def validate_link_attributes(attributes_file: str, fail_on_broken: bool = False) -> bool:
|
|
390
|
+
"""
|
|
391
|
+
Validate URLs in link-* attributes.
|
|
392
|
+
|
|
393
|
+
Returns: True if validation passes (no broken links or fail_on_broken is False), False otherwise
|
|
394
|
+
"""
|
|
395
|
+
if not os.path.exists(attributes_file):
|
|
396
|
+
return True # No file to validate yet
|
|
397
|
+
|
|
398
|
+
print(f"\nValidating links in {attributes_file}...")
|
|
399
|
+
spinner = Spinner("Validating link attributes")
|
|
400
|
+
spinner.start()
|
|
401
|
+
|
|
402
|
+
# Extract link attributes from file
|
|
403
|
+
link_attributes = {}
|
|
404
|
+
with open(attributes_file, 'r', encoding='utf-8') as f:
|
|
405
|
+
for line_num, line in enumerate(f, 1):
|
|
406
|
+
# Match :link-*: URL patterns
|
|
407
|
+
match = re.match(r'^:(link-[a-zA-Z0-9_-]+):\s*(https?://[^\s]+)', line)
|
|
408
|
+
if match:
|
|
409
|
+
attr_name = match.group(1)
|
|
410
|
+
url = match.group(2).strip()
|
|
411
|
+
link_attributes[attr_name] = (url, line_num)
|
|
412
|
+
|
|
413
|
+
if not link_attributes:
|
|
414
|
+
spinner.stop("No link attributes to validate")
|
|
415
|
+
return True
|
|
416
|
+
|
|
417
|
+
# Validate each URL
|
|
418
|
+
validator = LinkValidator(timeout=10, retry=2, parallel=5)
|
|
419
|
+
broken_links = []
|
|
420
|
+
|
|
421
|
+
for attr_name, (url, line_num) in link_attributes.items():
|
|
422
|
+
try:
|
|
423
|
+
is_valid = validator.validate_url(url)
|
|
424
|
+
if not is_valid:
|
|
425
|
+
broken_links.append((attr_name, url, line_num))
|
|
426
|
+
except Exception as e:
|
|
427
|
+
broken_links.append((attr_name, url, line_num))
|
|
428
|
+
|
|
429
|
+
# Report results
|
|
430
|
+
total = len(link_attributes)
|
|
431
|
+
broken = len(broken_links)
|
|
432
|
+
valid = total - broken
|
|
433
|
+
|
|
434
|
+
spinner.stop(f"Validated {total} link attributes: {valid} valid, {broken} broken")
|
|
435
|
+
|
|
436
|
+
if broken_links:
|
|
437
|
+
print("\n⚠️ Broken link attributes found:")
|
|
438
|
+
for attr_name, url, line_num in broken_links:
|
|
439
|
+
print(f" Line {line_num}: :{attr_name}: {url}")
|
|
440
|
+
|
|
441
|
+
if fail_on_broken:
|
|
442
|
+
print("\nStopping extraction due to broken links (--fail-on-broken)")
|
|
443
|
+
return False
|
|
444
|
+
else:
|
|
445
|
+
print("\nContinuing with extraction despite broken links...")
|
|
446
|
+
|
|
447
|
+
return True
|
|
448
|
+
|
|
449
|
+
|
|
384
450
|
def extract_link_attributes(attributes_file: str = None,
|
|
385
451
|
scan_dirs: List[str] = None,
|
|
386
452
|
interactive: bool = True,
|
|
387
|
-
dry_run: bool = False
|
|
453
|
+
dry_run: bool = False,
|
|
454
|
+
validate_links: bool = False,
|
|
455
|
+
fail_on_broken: bool = False) -> bool:
|
|
388
456
|
"""
|
|
389
457
|
Main function to extract link attributes.
|
|
390
458
|
|
|
@@ -410,13 +478,22 @@ def extract_link_attributes(attributes_file: str = None,
|
|
|
410
478
|
if not attributes_file:
|
|
411
479
|
return False
|
|
412
480
|
|
|
481
|
+
# Validate existing link attributes if requested
|
|
482
|
+
if validate_links:
|
|
483
|
+
if not validate_link_attributes(attributes_file, fail_on_broken):
|
|
484
|
+
return False
|
|
485
|
+
|
|
413
486
|
# Load existing attributes
|
|
487
|
+
spinner = Spinner("Loading existing attributes")
|
|
488
|
+
spinner.start()
|
|
414
489
|
existing_attrs = load_existing_attributes(attributes_file)
|
|
415
|
-
|
|
490
|
+
spinner.stop(f"Loaded {len(existing_attrs)} existing attributes")
|
|
416
491
|
|
|
417
492
|
# Collect all macros
|
|
418
|
-
|
|
493
|
+
spinner = Spinner("Scanning for link and xref macros with attributes")
|
|
494
|
+
spinner.start()
|
|
419
495
|
all_macros = collect_all_macros(scan_dirs)
|
|
496
|
+
spinner.stop()
|
|
420
497
|
|
|
421
498
|
if not all_macros:
|
|
422
499
|
print("No link or xref macros with attributes found.")
|
|
@@ -425,29 +502,75 @@ def extract_link_attributes(attributes_file: str = None,
|
|
|
425
502
|
print(f"Found {len(all_macros)} link/xref macros with attributes")
|
|
426
503
|
|
|
427
504
|
# Group by URL
|
|
505
|
+
spinner = Spinner("Grouping macros by URL")
|
|
506
|
+
spinner.start()
|
|
428
507
|
url_groups = group_macros_by_url(all_macros)
|
|
429
|
-
|
|
508
|
+
spinner.stop(f"Grouped into {len(url_groups)} unique URLs")
|
|
430
509
|
|
|
431
|
-
# Create new attributes
|
|
432
|
-
new_attributes = create_attributes(url_groups, existing_attrs, interactive)
|
|
510
|
+
# Create new attributes and track existing ones
|
|
511
|
+
new_attributes, existing_matching_attributes = create_attributes(url_groups, existing_attrs, interactive)
|
|
433
512
|
|
|
434
|
-
if not new_attributes:
|
|
435
|
-
print("No new attributes to create.")
|
|
513
|
+
if not new_attributes and not existing_matching_attributes:
|
|
514
|
+
print("No new attributes to create and no existing attributes match found URLs.")
|
|
436
515
|
return True
|
|
437
516
|
|
|
438
|
-
#
|
|
439
|
-
|
|
517
|
+
# Validate new attributes before writing if requested
|
|
518
|
+
if validate_links and not dry_run and new_attributes:
|
|
519
|
+
print("\nValidating new link attributes...")
|
|
520
|
+
spinner = Spinner("Validating new URLs")
|
|
521
|
+
spinner.start()
|
|
522
|
+
|
|
523
|
+
validator = LinkValidator(timeout=10, retry=2, parallel=5)
|
|
524
|
+
broken_new = []
|
|
525
|
+
|
|
526
|
+
for attr_name, attr_value in new_attributes.items():
|
|
527
|
+
# Extract URL from attribute value (could be link: or xref:)
|
|
528
|
+
url_match = re.search(r'(https?://[^\[]+)', attr_value)
|
|
529
|
+
if url_match:
|
|
530
|
+
url = url_match.group(1).strip()
|
|
531
|
+
try:
|
|
532
|
+
if not validator.validate_url(url):
|
|
533
|
+
broken_new.append((attr_name, url))
|
|
534
|
+
except Exception:
|
|
535
|
+
broken_new.append((attr_name, url))
|
|
536
|
+
|
|
537
|
+
spinner.stop(f"Validated {len(new_attributes)} new attributes")
|
|
538
|
+
|
|
539
|
+
if broken_new:
|
|
540
|
+
print("\n⚠️ Broken URLs in new attributes:")
|
|
541
|
+
for attr_name, url in broken_new:
|
|
542
|
+
print(f" :{attr_name}: {url}")
|
|
543
|
+
|
|
544
|
+
if fail_on_broken:
|
|
545
|
+
print("\nStopping due to broken URLs in new attributes (--fail-on-broken)")
|
|
546
|
+
return False
|
|
547
|
+
|
|
548
|
+
# Update attribute file (only if there are new attributes)
|
|
549
|
+
if new_attributes:
|
|
550
|
+
update_attribute_file(attributes_file, new_attributes, dry_run)
|
|
440
551
|
|
|
441
|
-
# Prepare file updates
|
|
552
|
+
# Prepare file updates (include both new and existing matching attributes)
|
|
442
553
|
all_attributes = {**existing_attrs, **new_attributes}
|
|
443
554
|
file_updates = prepare_file_updates(url_groups, all_attributes)
|
|
444
555
|
|
|
445
556
|
# Replace macros
|
|
446
|
-
|
|
557
|
+
if file_updates:
|
|
558
|
+
spinner = Spinner(f"Updating {len(file_updates)} files")
|
|
559
|
+
spinner.start()
|
|
560
|
+
replace_macros_with_attributes(file_updates, dry_run)
|
|
561
|
+
spinner.stop(f"Updated {len(file_updates)} files")
|
|
447
562
|
|
|
448
563
|
if dry_run:
|
|
449
564
|
print("\n[DRY RUN] No files were modified. Run without --dry-run to apply changes.")
|
|
450
565
|
else:
|
|
451
|
-
|
|
566
|
+
total_processed = len(new_attributes) + len(existing_matching_attributes)
|
|
567
|
+
if new_attributes and existing_matching_attributes:
|
|
568
|
+
print(f"\nSuccessfully processed {total_processed} link attributes:")
|
|
569
|
+
print(f" - Created {len(new_attributes)} new attributes")
|
|
570
|
+
print(f" - Replaced macros using {len(existing_matching_attributes)} existing attributes")
|
|
571
|
+
elif new_attributes:
|
|
572
|
+
print(f"\nSuccessfully extracted {len(new_attributes)} link attributes")
|
|
573
|
+
elif existing_matching_attributes:
|
|
574
|
+
print(f"\nSuccessfully replaced macros using {len(existing_matching_attributes)} existing link attributes")
|
|
452
575
|
|
|
453
576
|
return True
|
doc_utils/spinner.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Spinner utility for showing progress during long-running operations.
|
|
3
|
+
|
|
4
|
+
This module provides a simple spinner that can be used by all doc-utils tools
|
|
5
|
+
to indicate that processing is in progress.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
import threading
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Spinner:
|
|
15
|
+
"""A simple spinner to show progress during long operations."""
|
|
16
|
+
|
|
17
|
+
FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏']
|
|
18
|
+
|
|
19
|
+
def __init__(self, message: str = "Processing"):
|
|
20
|
+
"""
|
|
21
|
+
Initialize the spinner with a message.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
message: The message to display alongside the spinner
|
|
25
|
+
"""
|
|
26
|
+
self.message = message
|
|
27
|
+
self.spinning = False
|
|
28
|
+
self.thread: Optional[threading.Thread] = None
|
|
29
|
+
self.frame_index = 0
|
|
30
|
+
|
|
31
|
+
def _spin(self):
|
|
32
|
+
"""Internal method that runs in a separate thread to animate the spinner."""
|
|
33
|
+
while self.spinning:
|
|
34
|
+
frame = self.FRAMES[self.frame_index % len(self.FRAMES)]
|
|
35
|
+
sys.stdout.write(f'\r{frame} {self.message}...')
|
|
36
|
+
sys.stdout.flush()
|
|
37
|
+
self.frame_index += 1
|
|
38
|
+
time.sleep(0.1)
|
|
39
|
+
|
|
40
|
+
def start(self):
|
|
41
|
+
"""Start the spinner animation."""
|
|
42
|
+
if not self.spinning:
|
|
43
|
+
self.spinning = True
|
|
44
|
+
self.thread = threading.Thread(target=self._spin)
|
|
45
|
+
self.thread.daemon = True
|
|
46
|
+
self.thread.start()
|
|
47
|
+
|
|
48
|
+
def stop(self, final_message: Optional[str] = None, success: bool = True):
|
|
49
|
+
"""
|
|
50
|
+
Stop the spinner animation.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
final_message: Optional message to display after stopping
|
|
54
|
+
success: Whether the operation was successful (affects the symbol shown)
|
|
55
|
+
"""
|
|
56
|
+
if self.spinning:
|
|
57
|
+
self.spinning = False
|
|
58
|
+
if self.thread:
|
|
59
|
+
self.thread.join()
|
|
60
|
+
|
|
61
|
+
# Clear the spinner line completely
|
|
62
|
+
sys.stdout.write('\r' + ' ' * 80 + '\r')
|
|
63
|
+
|
|
64
|
+
# Write final message if provided
|
|
65
|
+
if final_message:
|
|
66
|
+
symbol = '✓' if success else '✗'
|
|
67
|
+
sys.stdout.write(f'{symbol} {final_message}\n')
|
|
68
|
+
|
|
69
|
+
sys.stdout.flush()
|
|
70
|
+
|
|
71
|
+
def __enter__(self):
|
|
72
|
+
"""Context manager entry - start the spinner."""
|
|
73
|
+
self.start()
|
|
74
|
+
return self
|
|
75
|
+
|
|
76
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
77
|
+
"""Context manager exit - stop the spinner."""
|
|
78
|
+
success = exc_type is None
|
|
79
|
+
self.stop(success=success)
|
|
80
|
+
return False
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def with_spinner(message: str = "Processing"):
|
|
84
|
+
"""
|
|
85
|
+
Decorator to add a spinner to a function.
|
|
86
|
+
|
|
87
|
+
Usage:
|
|
88
|
+
@with_spinner("Loading data")
|
|
89
|
+
def load_data():
|
|
90
|
+
# ... long running operation
|
|
91
|
+
return data
|
|
92
|
+
"""
|
|
93
|
+
def decorator(func):
|
|
94
|
+
def wrapper(*args, **kwargs):
|
|
95
|
+
spinner = Spinner(message)
|
|
96
|
+
spinner.start()
|
|
97
|
+
try:
|
|
98
|
+
result = func(*args, **kwargs)
|
|
99
|
+
spinner.stop(success=True)
|
|
100
|
+
return result
|
|
101
|
+
except Exception as e:
|
|
102
|
+
spinner.stop(success=False)
|
|
103
|
+
raise e
|
|
104
|
+
return wrapper
|
|
105
|
+
return decorator
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
# Convenience functions for common operations
|
|
109
|
+
def show_progress(message: str = "Processing", total: Optional[int] = None):
|
|
110
|
+
"""
|
|
111
|
+
Show progress with optional item count.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
message: The base message to display
|
|
115
|
+
total: Optional total number of items being processed
|
|
116
|
+
"""
|
|
117
|
+
if total:
|
|
118
|
+
return Spinner(f"{message} ({total} items)")
|
|
119
|
+
return Spinner(message)
|
extract_link_attributes.py
CHANGED
|
@@ -65,6 +65,18 @@ Examples:
|
|
|
65
65
|
help='Enable verbose output'
|
|
66
66
|
)
|
|
67
67
|
|
|
68
|
+
parser.add_argument(
|
|
69
|
+
'--validate-links',
|
|
70
|
+
action='store_true',
|
|
71
|
+
help='Validate URLs in link-* attributes before extraction'
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
parser.add_argument(
|
|
75
|
+
'--fail-on-broken',
|
|
76
|
+
action='store_true',
|
|
77
|
+
help='Exit extraction if broken links are found in attributes (requires --validate-links)'
|
|
78
|
+
)
|
|
79
|
+
|
|
68
80
|
args = parser.parse_args()
|
|
69
81
|
|
|
70
82
|
try:
|
|
@@ -72,7 +84,9 @@ Examples:
|
|
|
72
84
|
attributes_file=args.attributes_file,
|
|
73
85
|
scan_dirs=args.scan_dir,
|
|
74
86
|
interactive=not args.non_interactive,
|
|
75
|
-
dry_run=args.dry_run
|
|
87
|
+
dry_run=args.dry_run,
|
|
88
|
+
validate_links=args.validate_links,
|
|
89
|
+
fail_on_broken=args.fail_on_broken
|
|
76
90
|
)
|
|
77
91
|
|
|
78
92
|
if not success:
|
find_unused_attributes.py
CHANGED
|
@@ -13,6 +13,7 @@ import os
|
|
|
13
13
|
import sys
|
|
14
14
|
from datetime import datetime
|
|
15
15
|
from doc_utils.unused_attributes import find_unused_attributes, find_attributes_files, select_attributes_file
|
|
16
|
+
from doc_utils.spinner import Spinner
|
|
16
17
|
|
|
17
18
|
def main():
|
|
18
19
|
parser = argparse.ArgumentParser(description='Find unused AsciiDoc attributes.')
|
|
@@ -30,8 +31,10 @@ def main():
|
|
|
30
31
|
attr_file = args.attributes_file
|
|
31
32
|
else:
|
|
32
33
|
# Auto-discover attributes files
|
|
33
|
-
|
|
34
|
+
spinner = Spinner("Searching for attributes files")
|
|
35
|
+
spinner.start()
|
|
34
36
|
attributes_files = find_attributes_files('.')
|
|
37
|
+
spinner.stop()
|
|
35
38
|
|
|
36
39
|
if not attributes_files:
|
|
37
40
|
print("No attributes files found in the repository.")
|
|
@@ -44,7 +47,10 @@ def main():
|
|
|
44
47
|
return 1
|
|
45
48
|
|
|
46
49
|
try:
|
|
50
|
+
spinner = Spinner(f"Analyzing attributes in {os.path.basename(attr_file)}")
|
|
51
|
+
spinner.start()
|
|
47
52
|
unused = find_unused_attributes(attr_file, '.')
|
|
53
|
+
spinner.stop(f"Found {len(unused)} unused attributes")
|
|
48
54
|
except FileNotFoundError as e:
|
|
49
55
|
print(f"Error: {e}")
|
|
50
56
|
print(f"\nPlease ensure the file '{attr_file}' exists.")
|
format_asciidoc_spacing.py
CHANGED
replace_link_attributes.py
CHANGED
|
@@ -19,6 +19,7 @@ from doc_utils.replace_link_attributes import (
|
|
|
19
19
|
replace_link_attributes_in_file,
|
|
20
20
|
find_adoc_files
|
|
21
21
|
)
|
|
22
|
+
from doc_utils.spinner import Spinner
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
def prompt_for_attributes_file(attributes_files: list[Path]) -> Optional[Path]:
|
|
@@ -120,30 +121,33 @@ def main():
|
|
|
120
121
|
print(f"Error: Specified attributes file not found: {attributes_file}")
|
|
121
122
|
sys.exit(1)
|
|
122
123
|
else:
|
|
123
|
-
|
|
124
|
+
spinner = Spinner("Searching for attributes.adoc files")
|
|
125
|
+
spinner.start()
|
|
124
126
|
attributes_files = find_attributes_files(repo_root)
|
|
127
|
+
spinner.stop()
|
|
125
128
|
attributes_file = prompt_for_attributes_file(attributes_files)
|
|
126
129
|
|
|
127
130
|
if not attributes_file:
|
|
128
131
|
print("Operation cancelled.")
|
|
129
132
|
sys.exit(0)
|
|
130
133
|
|
|
131
|
-
|
|
134
|
+
spinner = Spinner(f"Loading attributes from {attributes_file.name}")
|
|
135
|
+
spinner.start()
|
|
132
136
|
attributes = load_attributes(attributes_file)
|
|
133
137
|
|
|
134
138
|
if not attributes:
|
|
135
|
-
|
|
139
|
+
spinner.stop("No attributes found in the file", success=False)
|
|
136
140
|
sys.exit(1)
|
|
137
141
|
|
|
138
|
-
print(f"Found {len(attributes)} attributes")
|
|
139
|
-
|
|
140
142
|
# Resolve nested references
|
|
141
|
-
print("Resolving nested attribute references...")
|
|
142
143
|
attributes = resolve_nested_attributes(attributes)
|
|
144
|
+
spinner.stop(f"Loaded and resolved {len(attributes)} attributes")
|
|
143
145
|
|
|
144
146
|
# Find all AsciiDoc files
|
|
145
|
-
|
|
147
|
+
spinner = Spinner(f"Searching for .adoc files in {repo_root}")
|
|
148
|
+
spinner.start()
|
|
146
149
|
adoc_files = find_adoc_files(repo_root)
|
|
150
|
+
spinner.stop()
|
|
147
151
|
|
|
148
152
|
# Exclude the attributes file itself
|
|
149
153
|
adoc_files = [f for f in adoc_files if f != attributes_file]
|
|
@@ -157,15 +161,18 @@ def main():
|
|
|
157
161
|
total_replacements = 0
|
|
158
162
|
files_modified = 0
|
|
159
163
|
|
|
164
|
+
spinner = Spinner(f"Processing {len(adoc_files)} files")
|
|
165
|
+
spinner.start()
|
|
166
|
+
|
|
160
167
|
for file_path in adoc_files:
|
|
161
168
|
replacements = replace_link_attributes_in_file(file_path, attributes, args.dry_run)
|
|
162
169
|
if replacements > 0:
|
|
163
170
|
rel_path = file_path.relative_to(repo_root)
|
|
164
|
-
prefix = "[DRY RUN] " if args.dry_run else ""
|
|
165
|
-
print(f" {prefix}Modified {rel_path}: {replacements} replacements")
|
|
166
171
|
total_replacements += replacements
|
|
167
172
|
files_modified += 1
|
|
168
173
|
|
|
174
|
+
spinner.stop(f"Processed {len(adoc_files)} files")
|
|
175
|
+
|
|
169
176
|
# Summary
|
|
170
177
|
print(f"\nSummary:")
|
|
171
178
|
if args.dry_run:
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
archive_unused_files.py,sha256=h7CRwSPBVCOQs0hn_ASD4EXz8QJFcAO2x3KX9FVhXNM,1974
|
|
2
|
+
archive_unused_images.py,sha256=4GSVPYkxqFoY-isy47P_1AhD1ziXgmajFiBGCtZ3olg,1564
|
|
3
|
+
check_scannability.py,sha256=MvGLW4UGGcx-jZLsVRYXpXNAIEQyJZZnsN99zJzbtyc,5178
|
|
4
|
+
extract_link_attributes.py,sha256=rp1yRYIWOEvU3l6lpN4b5rCBae5Q7bdBxEDQ9BNuFH8,2976
|
|
5
|
+
find_unused_attributes.py,sha256=IUJKJr_MzxBXqg9rafUs9Kwi8AbU0x-H0AVflc1dhCU,3288
|
|
6
|
+
format_asciidoc_spacing.py,sha256=_XpHqxYWm1AnZLUK_cDpfAJtsDCDF0b66m3opfYnIuU,3912
|
|
7
|
+
replace_link_attributes.py,sha256=ZkBqrrpIiYGccGMgRjDBrWQKgpfOzHIegURmcgTwaHg,6614
|
|
8
|
+
validate_links.py,sha256=409fTAyBGTUrp6iSWuJ9AXExcdz8dC_4QeA_RvCIhus,5845
|
|
9
|
+
doc_utils/__init__.py,sha256=qqZR3lohzkP63soymrEZPBGzzk6-nFzi4_tSffjmu_0,74
|
|
10
|
+
doc_utils/extract_link_attributes.py,sha256=IIEq2bQmACwDszmaCMeMnYnPKwxSOHWbu_spYOJezlE,20700
|
|
11
|
+
doc_utils/file_utils.py,sha256=fpTh3xx759sF8sNocdn_arsP3KAv8XA6cTQTAVIZiZg,4247
|
|
12
|
+
doc_utils/format_asciidoc_spacing.py,sha256=XnVJekaj39aDzjV3xFKl58flM41AaJzejxNYJIIAMz0,10139
|
|
13
|
+
doc_utils/replace_link_attributes.py,sha256=kBiePbxjQn3O2rzqmYY8Mqy_mJgZ6yw048vSZ5SSB5E,6587
|
|
14
|
+
doc_utils/scannability.py,sha256=XwlmHqDs69p_V36X7DLjPTy0DUoLszSGqYjJ9wE-3hg,982
|
|
15
|
+
doc_utils/spinner.py,sha256=lJg15qzODiKoR0G6uFIk2BdVNgn9jFexoTRUMrjiWvk,3554
|
|
16
|
+
doc_utils/topic_map_parser.py,sha256=tKcIO1m9r2K6dvPRGue58zqMr0O2zKU1gnZMzEE3U6o,4571
|
|
17
|
+
doc_utils/unused_adoc.py,sha256=2cbqcYr1os2EhETUU928BlPRlsZVSdI00qaMhqjSIqQ,5263
|
|
18
|
+
doc_utils/unused_attributes.py,sha256=EjTtWIKW_aXsR1JOgw5RSDVAqitJ_NfRMVOXVGaiWTY,5282
|
|
19
|
+
doc_utils/unused_images.py,sha256=nqn36Bbrmon2KlGlcaruNjJJvTQ8_9H0WU9GvCW7rW8,1456
|
|
20
|
+
doc_utils/validate_links.py,sha256=iBGXnwdeLlgIT3fo3v01ApT5k0X2FtctsvkrE6E3VMk,19610
|
|
21
|
+
rolfedh_doc_utils-0.1.13.dist-info/licenses/LICENSE,sha256=vLxtwMVOJA_hEy8b77niTkdmQI9kNJskXHq0dBS36e0,1075
|
|
22
|
+
rolfedh_doc_utils-0.1.13.dist-info/METADATA,sha256=BijPKBcklacOn-2U2VTfcMEJBkyOMoI84Ce5ItNK-vc,7386
|
|
23
|
+
rolfedh_doc_utils-0.1.13.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
24
|
+
rolfedh_doc_utils-0.1.13.dist-info/entry_points.txt,sha256=2J4Ojc3kkuArpe2xcUOPc0LxSWCmnctvw8hy8zpnbO4,418
|
|
25
|
+
rolfedh_doc_utils-0.1.13.dist-info/top_level.txt,sha256=1w0JWD7w7gnM5Sga2K4fJieNZ7CHPTAf0ozYk5iIlmo,182
|
|
26
|
+
rolfedh_doc_utils-0.1.13.dist-info/RECORD,,
|
validate_links.py
CHANGED
|
@@ -12,6 +12,7 @@ import argparse
|
|
|
12
12
|
import sys
|
|
13
13
|
import json
|
|
14
14
|
from doc_utils.validate_links import LinkValidator, parse_transpositions, format_results
|
|
15
|
+
from doc_utils.spinner import Spinner
|
|
15
16
|
|
|
16
17
|
|
|
17
18
|
def main():
|
|
@@ -155,11 +156,16 @@ Examples:
|
|
|
155
156
|
|
|
156
157
|
try:
|
|
157
158
|
# Run validation
|
|
159
|
+
spinner = Spinner("Validating links")
|
|
160
|
+
spinner.start()
|
|
158
161
|
results = validator.validate_all(
|
|
159
162
|
scan_dirs=args.scan_dir,
|
|
160
163
|
attributes_file=args.attributes_file,
|
|
161
164
|
exclude_domains=args.exclude_domains
|
|
162
165
|
)
|
|
166
|
+
total = results['summary']['total']
|
|
167
|
+
valid = results['summary']['valid']
|
|
168
|
+
spinner.stop(f"Validated {total} links: {valid} valid")
|
|
163
169
|
|
|
164
170
|
# Format output
|
|
165
171
|
if args.format == 'json':
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
archive_unused_files.py,sha256=KMC5a1WL3rZ5owoVnncvfpT1YeMKbVXq9giHvadDgbM,1936
|
|
2
|
-
archive_unused_images.py,sha256=PG2o3haovYckgfhoPhl6KRG_a9czyZuqlLkzkupKTCY,1526
|
|
3
|
-
check_scannability.py,sha256=gcM-vFXKHGP_yFBz7-V5xbXWhIMmtMzBYIGwP9CFbzI,5140
|
|
4
|
-
extract_link_attributes.py,sha256=utDM1FE-VEr649HhIH5BreXvxDNLnnAJO9dB5rs5f9Q,2535
|
|
5
|
-
find_unused_attributes.py,sha256=V8qI7O0u18ExbSho-hLfyBeRVqowLKGrFugY55JxZN0,3023
|
|
6
|
-
format_asciidoc_spacing.py,sha256=ROp-cdMs2_hk8H4z5ljT0iDgGtsiECZ8TVjjcN_oOWE,3874
|
|
7
|
-
replace_link_attributes.py,sha256=vg_aufw7dKXvh_epCKRNq_hEBMU_9crZ_JyJPpxSMNk,6454
|
|
8
|
-
validate_links.py,sha256=DoSB0h3mmjzTY2f0oN6ybTP6jCNkzN7T3qM6oXc2AwE,5585
|
|
9
|
-
doc_utils/__init__.py,sha256=qqZR3lohzkP63soymrEZPBGzzk6-nFzi4_tSffjmu_0,74
|
|
10
|
-
doc_utils/extract_link_attributes.py,sha256=qBpJuTXNrhy15klpqC0iELZzcSLztEzMSmhEnKyQZT0,15574
|
|
11
|
-
doc_utils/file_utils.py,sha256=fpTh3xx759sF8sNocdn_arsP3KAv8XA6cTQTAVIZiZg,4247
|
|
12
|
-
doc_utils/format_asciidoc_spacing.py,sha256=XnVJekaj39aDzjV3xFKl58flM41AaJzejxNYJIIAMz0,10139
|
|
13
|
-
doc_utils/replace_link_attributes.py,sha256=kBiePbxjQn3O2rzqmYY8Mqy_mJgZ6yw048vSZ5SSB5E,6587
|
|
14
|
-
doc_utils/scannability.py,sha256=XwlmHqDs69p_V36X7DLjPTy0DUoLszSGqYjJ9wE-3hg,982
|
|
15
|
-
doc_utils/topic_map_parser.py,sha256=tKcIO1m9r2K6dvPRGue58zqMr0O2zKU1gnZMzEE3U6o,4571
|
|
16
|
-
doc_utils/unused_adoc.py,sha256=2cbqcYr1os2EhETUU928BlPRlsZVSdI00qaMhqjSIqQ,5263
|
|
17
|
-
doc_utils/unused_attributes.py,sha256=EjTtWIKW_aXsR1JOgw5RSDVAqitJ_NfRMVOXVGaiWTY,5282
|
|
18
|
-
doc_utils/unused_images.py,sha256=nqn36Bbrmon2KlGlcaruNjJJvTQ8_9H0WU9GvCW7rW8,1456
|
|
19
|
-
doc_utils/validate_links.py,sha256=iBGXnwdeLlgIT3fo3v01ApT5k0X2FtctsvkrE6E3VMk,19610
|
|
20
|
-
rolfedh_doc_utils-0.1.11.dist-info/licenses/LICENSE,sha256=vLxtwMVOJA_hEy8b77niTkdmQI9kNJskXHq0dBS36e0,1075
|
|
21
|
-
rolfedh_doc_utils-0.1.11.dist-info/METADATA,sha256=22seO4nEGTjlibUZ8tPRxTFyYpmLRsfY7sZssteQl1g,7386
|
|
22
|
-
rolfedh_doc_utils-0.1.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
23
|
-
rolfedh_doc_utils-0.1.11.dist-info/entry_points.txt,sha256=2J4Ojc3kkuArpe2xcUOPc0LxSWCmnctvw8hy8zpnbO4,418
|
|
24
|
-
rolfedh_doc_utils-0.1.11.dist-info/top_level.txt,sha256=1w0JWD7w7gnM5Sga2K4fJieNZ7CHPTAf0ozYk5iIlmo,182
|
|
25
|
-
rolfedh_doc_utils-0.1.11.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|