rolfedh-doc-utils 0.1.11__tar.gz → 0.1.12__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rolfedh_doc_utils-0.1.11/rolfedh_doc_utils.egg-info → rolfedh_doc_utils-0.1.12}/PKG-INFO +1 -1
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/archive_unused_files.py +1 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/archive_unused_images.py +1 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/check_scannability.py +1 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/extract_link_attributes.py +117 -5
- rolfedh_doc_utils-0.1.12/doc_utils/spinner.py +119 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/extract_link_attributes.py +15 -1
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/find_unused_attributes.py +7 -1
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/format_asciidoc_spacing.py +1 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/pyproject.toml +1 -1
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/replace_link_attributes.py +16 -9
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12/rolfedh_doc_utils.egg-info}/PKG-INFO +1 -1
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/rolfedh_doc_utils.egg-info/SOURCES.txt +1 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/validate_links.py +6 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/LICENSE +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/README.md +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/__init__.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/file_utils.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/format_asciidoc_spacing.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/replace_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/scannability.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/topic_map_parser.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/unused_adoc.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/doc_utils/validate_links.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/rolfedh_doc_utils.egg-info/dependency_links.txt +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/rolfedh_doc_utils.egg-info/entry_points.txt +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/rolfedh_doc_utils.egg-info/requires.txt +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/rolfedh_doc_utils.egg-info/top_level.txt +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/setup.cfg +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/setup.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_auto_discovery.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_cli_entry_points.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_extract_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_file_utils.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_fixture_archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_fixture_archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_fixture_check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_parse_exclude_list.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_symlink_handling.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_topic_map_parser.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_validate_links.py +0 -0
|
@@ -11,6 +11,7 @@ import argparse
|
|
|
11
11
|
from doc_utils.unused_adoc import find_unused_adoc
|
|
12
12
|
from doc_utils.file_utils import parse_exclude_list_file
|
|
13
13
|
|
|
14
|
+
from doc_utils.spinner import Spinner
|
|
14
15
|
def main():
|
|
15
16
|
parser = argparse.ArgumentParser(
|
|
16
17
|
description='Archive unused AsciiDoc files.',
|
|
@@ -10,6 +10,7 @@ import argparse
|
|
|
10
10
|
from doc_utils.unused_images import find_unused_images
|
|
11
11
|
from doc_utils.file_utils import parse_exclude_list_file
|
|
12
12
|
|
|
13
|
+
from doc_utils.spinner import Spinner
|
|
13
14
|
def main():
|
|
14
15
|
parser = argparse.ArgumentParser(description='Archive unused image files.')
|
|
15
16
|
parser.add_argument('--archive', action='store_true', help='Move the files to a dated zip in the archive directory.')
|
|
@@ -19,6 +19,7 @@ from datetime import datetime
|
|
|
19
19
|
from doc_utils.scannability import check_scannability
|
|
20
20
|
from doc_utils.file_utils import collect_files, parse_exclude_list_file
|
|
21
21
|
|
|
22
|
+
from doc_utils.spinner import Spinner
|
|
22
23
|
BASE_SENTENCE_WORD_LIMIT = 22
|
|
23
24
|
BASE_PARAGRAPH_SENTENCE_LIMIT = 3
|
|
24
25
|
|
|
@@ -10,6 +10,9 @@ from typing import Dict, List, Set, Tuple, Optional
|
|
|
10
10
|
from collections import defaultdict
|
|
11
11
|
import unicodedata
|
|
12
12
|
|
|
13
|
+
from .spinner import Spinner
|
|
14
|
+
from .validate_links import LinkValidator
|
|
15
|
+
|
|
13
16
|
|
|
14
17
|
def find_attribute_files(base_path: str = '.') -> List[str]:
|
|
15
18
|
"""Find potential attribute files in the repository."""
|
|
@@ -381,10 +384,73 @@ def prepare_file_updates(url_groups: Dict[str, List[Tuple[str, str, str, int]]],
|
|
|
381
384
|
return dict(file_updates)
|
|
382
385
|
|
|
383
386
|
|
|
387
|
+
def validate_link_attributes(attributes_file: str, fail_on_broken: bool = False) -> bool:
|
|
388
|
+
"""
|
|
389
|
+
Validate URLs in link-* attributes.
|
|
390
|
+
|
|
391
|
+
Returns: True if validation passes (no broken links or fail_on_broken is False), False otherwise
|
|
392
|
+
"""
|
|
393
|
+
if not os.path.exists(attributes_file):
|
|
394
|
+
return True # No file to validate yet
|
|
395
|
+
|
|
396
|
+
print(f"\nValidating links in {attributes_file}...")
|
|
397
|
+
spinner = Spinner("Validating link attributes")
|
|
398
|
+
spinner.start()
|
|
399
|
+
|
|
400
|
+
# Extract link attributes from file
|
|
401
|
+
link_attributes = {}
|
|
402
|
+
with open(attributes_file, 'r', encoding='utf-8') as f:
|
|
403
|
+
for line_num, line in enumerate(f, 1):
|
|
404
|
+
# Match :link-*: URL patterns
|
|
405
|
+
match = re.match(r'^:(link-[a-zA-Z0-9_-]+):\s*(https?://[^\s]+)', line)
|
|
406
|
+
if match:
|
|
407
|
+
attr_name = match.group(1)
|
|
408
|
+
url = match.group(2).strip()
|
|
409
|
+
link_attributes[attr_name] = (url, line_num)
|
|
410
|
+
|
|
411
|
+
if not link_attributes:
|
|
412
|
+
spinner.stop("No link attributes to validate")
|
|
413
|
+
return True
|
|
414
|
+
|
|
415
|
+
# Validate each URL
|
|
416
|
+
validator = LinkValidator(timeout=10, retry=2, parallel=5)
|
|
417
|
+
broken_links = []
|
|
418
|
+
|
|
419
|
+
for attr_name, (url, line_num) in link_attributes.items():
|
|
420
|
+
try:
|
|
421
|
+
is_valid = validator.validate_url(url)
|
|
422
|
+
if not is_valid:
|
|
423
|
+
broken_links.append((attr_name, url, line_num))
|
|
424
|
+
except Exception as e:
|
|
425
|
+
broken_links.append((attr_name, url, line_num))
|
|
426
|
+
|
|
427
|
+
# Report results
|
|
428
|
+
total = len(link_attributes)
|
|
429
|
+
broken = len(broken_links)
|
|
430
|
+
valid = total - broken
|
|
431
|
+
|
|
432
|
+
spinner.stop(f"Validated {total} link attributes: {valid} valid, {broken} broken")
|
|
433
|
+
|
|
434
|
+
if broken_links:
|
|
435
|
+
print("\n⚠️ Broken link attributes found:")
|
|
436
|
+
for attr_name, url, line_num in broken_links:
|
|
437
|
+
print(f" Line {line_num}: :{attr_name}: {url}")
|
|
438
|
+
|
|
439
|
+
if fail_on_broken:
|
|
440
|
+
print("\nStopping extraction due to broken links (--fail-on-broken)")
|
|
441
|
+
return False
|
|
442
|
+
else:
|
|
443
|
+
print("\nContinuing with extraction despite broken links...")
|
|
444
|
+
|
|
445
|
+
return True
|
|
446
|
+
|
|
447
|
+
|
|
384
448
|
def extract_link_attributes(attributes_file: str = None,
|
|
385
449
|
scan_dirs: List[str] = None,
|
|
386
450
|
interactive: bool = True,
|
|
387
|
-
dry_run: bool = False
|
|
451
|
+
dry_run: bool = False,
|
|
452
|
+
validate_links: bool = False,
|
|
453
|
+
fail_on_broken: bool = False) -> bool:
|
|
388
454
|
"""
|
|
389
455
|
Main function to extract link attributes.
|
|
390
456
|
|
|
@@ -410,13 +476,22 @@ def extract_link_attributes(attributes_file: str = None,
|
|
|
410
476
|
if not attributes_file:
|
|
411
477
|
return False
|
|
412
478
|
|
|
479
|
+
# Validate existing link attributes if requested
|
|
480
|
+
if validate_links:
|
|
481
|
+
if not validate_link_attributes(attributes_file, fail_on_broken):
|
|
482
|
+
return False
|
|
483
|
+
|
|
413
484
|
# Load existing attributes
|
|
485
|
+
spinner = Spinner("Loading existing attributes")
|
|
486
|
+
spinner.start()
|
|
414
487
|
existing_attrs = load_existing_attributes(attributes_file)
|
|
415
|
-
|
|
488
|
+
spinner.stop(f"Loaded {len(existing_attrs)} existing attributes")
|
|
416
489
|
|
|
417
490
|
# Collect all macros
|
|
418
|
-
|
|
491
|
+
spinner = Spinner("Scanning for link and xref macros with attributes")
|
|
492
|
+
spinner.start()
|
|
419
493
|
all_macros = collect_all_macros(scan_dirs)
|
|
494
|
+
spinner.stop()
|
|
420
495
|
|
|
421
496
|
if not all_macros:
|
|
422
497
|
print("No link or xref macros with attributes found.")
|
|
@@ -425,8 +500,10 @@ def extract_link_attributes(attributes_file: str = None,
|
|
|
425
500
|
print(f"Found {len(all_macros)} link/xref macros with attributes")
|
|
426
501
|
|
|
427
502
|
# Group by URL
|
|
503
|
+
spinner = Spinner("Grouping macros by URL")
|
|
504
|
+
spinner.start()
|
|
428
505
|
url_groups = group_macros_by_url(all_macros)
|
|
429
|
-
|
|
506
|
+
spinner.stop(f"Grouped into {len(url_groups)} unique URLs")
|
|
430
507
|
|
|
431
508
|
# Create new attributes
|
|
432
509
|
new_attributes = create_attributes(url_groups, existing_attrs, interactive)
|
|
@@ -435,6 +512,37 @@ def extract_link_attributes(attributes_file: str = None,
|
|
|
435
512
|
print("No new attributes to create.")
|
|
436
513
|
return True
|
|
437
514
|
|
|
515
|
+
# Validate new attributes before writing if requested
|
|
516
|
+
if validate_links and not dry_run:
|
|
517
|
+
print("\nValidating new link attributes...")
|
|
518
|
+
spinner = Spinner("Validating new URLs")
|
|
519
|
+
spinner.start()
|
|
520
|
+
|
|
521
|
+
validator = LinkValidator(timeout=10, retry=2, parallel=5)
|
|
522
|
+
broken_new = []
|
|
523
|
+
|
|
524
|
+
for attr_name, attr_value in new_attributes.items():
|
|
525
|
+
# Extract URL from attribute value (could be link: or xref:)
|
|
526
|
+
url_match = re.search(r'(https?://[^\[]+)', attr_value)
|
|
527
|
+
if url_match:
|
|
528
|
+
url = url_match.group(1).strip()
|
|
529
|
+
try:
|
|
530
|
+
if not validator.validate_url(url):
|
|
531
|
+
broken_new.append((attr_name, url))
|
|
532
|
+
except Exception:
|
|
533
|
+
broken_new.append((attr_name, url))
|
|
534
|
+
|
|
535
|
+
spinner.stop(f"Validated {len(new_attributes)} new attributes")
|
|
536
|
+
|
|
537
|
+
if broken_new:
|
|
538
|
+
print("\n⚠️ Broken URLs in new attributes:")
|
|
539
|
+
for attr_name, url in broken_new:
|
|
540
|
+
print(f" :{attr_name}: {url}")
|
|
541
|
+
|
|
542
|
+
if fail_on_broken:
|
|
543
|
+
print("\nStopping due to broken URLs in new attributes (--fail-on-broken)")
|
|
544
|
+
return False
|
|
545
|
+
|
|
438
546
|
# Update attribute file
|
|
439
547
|
update_attribute_file(attributes_file, new_attributes, dry_run)
|
|
440
548
|
|
|
@@ -443,7 +551,11 @@ def extract_link_attributes(attributes_file: str = None,
|
|
|
443
551
|
file_updates = prepare_file_updates(url_groups, all_attributes)
|
|
444
552
|
|
|
445
553
|
# Replace macros
|
|
446
|
-
|
|
554
|
+
if file_updates:
|
|
555
|
+
spinner = Spinner(f"Updating {len(file_updates)} files")
|
|
556
|
+
spinner.start()
|
|
557
|
+
replace_macros_with_attributes(file_updates, dry_run)
|
|
558
|
+
spinner.stop(f"Updated {len(file_updates)} files")
|
|
447
559
|
|
|
448
560
|
if dry_run:
|
|
449
561
|
print("\n[DRY RUN] No files were modified. Run without --dry-run to apply changes.")
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Spinner utility for showing progress during long-running operations.
|
|
3
|
+
|
|
4
|
+
This module provides a simple spinner that can be used by all doc-utils tools
|
|
5
|
+
to indicate that processing is in progress.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
import threading
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Spinner:
|
|
15
|
+
"""A simple spinner to show progress during long operations."""
|
|
16
|
+
|
|
17
|
+
FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏']
|
|
18
|
+
|
|
19
|
+
def __init__(self, message: str = "Processing"):
|
|
20
|
+
"""
|
|
21
|
+
Initialize the spinner with a message.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
message: The message to display alongside the spinner
|
|
25
|
+
"""
|
|
26
|
+
self.message = message
|
|
27
|
+
self.spinning = False
|
|
28
|
+
self.thread: Optional[threading.Thread] = None
|
|
29
|
+
self.frame_index = 0
|
|
30
|
+
|
|
31
|
+
def _spin(self):
|
|
32
|
+
"""Internal method that runs in a separate thread to animate the spinner."""
|
|
33
|
+
while self.spinning:
|
|
34
|
+
frame = self.FRAMES[self.frame_index % len(self.FRAMES)]
|
|
35
|
+
sys.stdout.write(f'\r{frame} {self.message}...')
|
|
36
|
+
sys.stdout.flush()
|
|
37
|
+
self.frame_index += 1
|
|
38
|
+
time.sleep(0.1)
|
|
39
|
+
|
|
40
|
+
def start(self):
|
|
41
|
+
"""Start the spinner animation."""
|
|
42
|
+
if not self.spinning:
|
|
43
|
+
self.spinning = True
|
|
44
|
+
self.thread = threading.Thread(target=self._spin)
|
|
45
|
+
self.thread.daemon = True
|
|
46
|
+
self.thread.start()
|
|
47
|
+
|
|
48
|
+
def stop(self, final_message: Optional[str] = None, success: bool = True):
|
|
49
|
+
"""
|
|
50
|
+
Stop the spinner animation.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
final_message: Optional message to display after stopping
|
|
54
|
+
success: Whether the operation was successful (affects the symbol shown)
|
|
55
|
+
"""
|
|
56
|
+
if self.spinning:
|
|
57
|
+
self.spinning = False
|
|
58
|
+
if self.thread:
|
|
59
|
+
self.thread.join()
|
|
60
|
+
|
|
61
|
+
# Clear the spinner line completely
|
|
62
|
+
sys.stdout.write('\r' + ' ' * 80 + '\r')
|
|
63
|
+
|
|
64
|
+
# Write final message if provided
|
|
65
|
+
if final_message:
|
|
66
|
+
symbol = '✓' if success else '✗'
|
|
67
|
+
sys.stdout.write(f'{symbol} {final_message}\n')
|
|
68
|
+
|
|
69
|
+
sys.stdout.flush()
|
|
70
|
+
|
|
71
|
+
def __enter__(self):
|
|
72
|
+
"""Context manager entry - start the spinner."""
|
|
73
|
+
self.start()
|
|
74
|
+
return self
|
|
75
|
+
|
|
76
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
77
|
+
"""Context manager exit - stop the spinner."""
|
|
78
|
+
success = exc_type is None
|
|
79
|
+
self.stop(success=success)
|
|
80
|
+
return False
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def with_spinner(message: str = "Processing"):
|
|
84
|
+
"""
|
|
85
|
+
Decorator to add a spinner to a function.
|
|
86
|
+
|
|
87
|
+
Usage:
|
|
88
|
+
@with_spinner("Loading data")
|
|
89
|
+
def load_data():
|
|
90
|
+
# ... long running operation
|
|
91
|
+
return data
|
|
92
|
+
"""
|
|
93
|
+
def decorator(func):
|
|
94
|
+
def wrapper(*args, **kwargs):
|
|
95
|
+
spinner = Spinner(message)
|
|
96
|
+
spinner.start()
|
|
97
|
+
try:
|
|
98
|
+
result = func(*args, **kwargs)
|
|
99
|
+
spinner.stop(success=True)
|
|
100
|
+
return result
|
|
101
|
+
except Exception as e:
|
|
102
|
+
spinner.stop(success=False)
|
|
103
|
+
raise e
|
|
104
|
+
return wrapper
|
|
105
|
+
return decorator
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
# Convenience functions for common operations
|
|
109
|
+
def show_progress(message: str = "Processing", total: Optional[int] = None):
|
|
110
|
+
"""
|
|
111
|
+
Show progress with optional item count.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
message: The base message to display
|
|
115
|
+
total: Optional total number of items being processed
|
|
116
|
+
"""
|
|
117
|
+
if total:
|
|
118
|
+
return Spinner(f"{message} ({total} items)")
|
|
119
|
+
return Spinner(message)
|
|
@@ -65,6 +65,18 @@ Examples:
|
|
|
65
65
|
help='Enable verbose output'
|
|
66
66
|
)
|
|
67
67
|
|
|
68
|
+
parser.add_argument(
|
|
69
|
+
'--validate-links',
|
|
70
|
+
action='store_true',
|
|
71
|
+
help='Validate URLs in link-* attributes before extraction'
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
parser.add_argument(
|
|
75
|
+
'--fail-on-broken',
|
|
76
|
+
action='store_true',
|
|
77
|
+
help='Exit extraction if broken links are found in attributes (requires --validate-links)'
|
|
78
|
+
)
|
|
79
|
+
|
|
68
80
|
args = parser.parse_args()
|
|
69
81
|
|
|
70
82
|
try:
|
|
@@ -72,7 +84,9 @@ Examples:
|
|
|
72
84
|
attributes_file=args.attributes_file,
|
|
73
85
|
scan_dirs=args.scan_dir,
|
|
74
86
|
interactive=not args.non_interactive,
|
|
75
|
-
dry_run=args.dry_run
|
|
87
|
+
dry_run=args.dry_run,
|
|
88
|
+
validate_links=args.validate_links,
|
|
89
|
+
fail_on_broken=args.fail_on_broken
|
|
76
90
|
)
|
|
77
91
|
|
|
78
92
|
if not success:
|
|
@@ -13,6 +13,7 @@ import os
|
|
|
13
13
|
import sys
|
|
14
14
|
from datetime import datetime
|
|
15
15
|
from doc_utils.unused_attributes import find_unused_attributes, find_attributes_files, select_attributes_file
|
|
16
|
+
from doc_utils.spinner import Spinner
|
|
16
17
|
|
|
17
18
|
def main():
|
|
18
19
|
parser = argparse.ArgumentParser(description='Find unused AsciiDoc attributes.')
|
|
@@ -30,8 +31,10 @@ def main():
|
|
|
30
31
|
attr_file = args.attributes_file
|
|
31
32
|
else:
|
|
32
33
|
# Auto-discover attributes files
|
|
33
|
-
|
|
34
|
+
spinner = Spinner("Searching for attributes files")
|
|
35
|
+
spinner.start()
|
|
34
36
|
attributes_files = find_attributes_files('.')
|
|
37
|
+
spinner.stop()
|
|
35
38
|
|
|
36
39
|
if not attributes_files:
|
|
37
40
|
print("No attributes files found in the repository.")
|
|
@@ -44,7 +47,10 @@ def main():
|
|
|
44
47
|
return 1
|
|
45
48
|
|
|
46
49
|
try:
|
|
50
|
+
spinner = Spinner(f"Analyzing attributes in {os.path.basename(attr_file)}")
|
|
51
|
+
spinner.start()
|
|
47
52
|
unused = find_unused_attributes(attr_file, '.')
|
|
53
|
+
spinner.stop(f"Found {len(unused)} unused attributes")
|
|
48
54
|
except FileNotFoundError as e:
|
|
49
55
|
print(f"Error: {e}")
|
|
50
56
|
print(f"\nPlease ensure the file '{attr_file}' exists.")
|
|
@@ -19,6 +19,7 @@ from doc_utils.replace_link_attributes import (
|
|
|
19
19
|
replace_link_attributes_in_file,
|
|
20
20
|
find_adoc_files
|
|
21
21
|
)
|
|
22
|
+
from doc_utils.spinner import Spinner
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
def prompt_for_attributes_file(attributes_files: list[Path]) -> Optional[Path]:
|
|
@@ -120,30 +121,33 @@ def main():
|
|
|
120
121
|
print(f"Error: Specified attributes file not found: {attributes_file}")
|
|
121
122
|
sys.exit(1)
|
|
122
123
|
else:
|
|
123
|
-
|
|
124
|
+
spinner = Spinner("Searching for attributes.adoc files")
|
|
125
|
+
spinner.start()
|
|
124
126
|
attributes_files = find_attributes_files(repo_root)
|
|
127
|
+
spinner.stop()
|
|
125
128
|
attributes_file = prompt_for_attributes_file(attributes_files)
|
|
126
129
|
|
|
127
130
|
if not attributes_file:
|
|
128
131
|
print("Operation cancelled.")
|
|
129
132
|
sys.exit(0)
|
|
130
133
|
|
|
131
|
-
|
|
134
|
+
spinner = Spinner(f"Loading attributes from {attributes_file.name}")
|
|
135
|
+
spinner.start()
|
|
132
136
|
attributes = load_attributes(attributes_file)
|
|
133
137
|
|
|
134
138
|
if not attributes:
|
|
135
|
-
|
|
139
|
+
spinner.stop("No attributes found in the file", success=False)
|
|
136
140
|
sys.exit(1)
|
|
137
141
|
|
|
138
|
-
print(f"Found {len(attributes)} attributes")
|
|
139
|
-
|
|
140
142
|
# Resolve nested references
|
|
141
|
-
print("Resolving nested attribute references...")
|
|
142
143
|
attributes = resolve_nested_attributes(attributes)
|
|
144
|
+
spinner.stop(f"Loaded and resolved {len(attributes)} attributes")
|
|
143
145
|
|
|
144
146
|
# Find all AsciiDoc files
|
|
145
|
-
|
|
147
|
+
spinner = Spinner(f"Searching for .adoc files in {repo_root}")
|
|
148
|
+
spinner.start()
|
|
146
149
|
adoc_files = find_adoc_files(repo_root)
|
|
150
|
+
spinner.stop()
|
|
147
151
|
|
|
148
152
|
# Exclude the attributes file itself
|
|
149
153
|
adoc_files = [f for f in adoc_files if f != attributes_file]
|
|
@@ -157,15 +161,18 @@ def main():
|
|
|
157
161
|
total_replacements = 0
|
|
158
162
|
files_modified = 0
|
|
159
163
|
|
|
164
|
+
spinner = Spinner(f"Processing {len(adoc_files)} files")
|
|
165
|
+
spinner.start()
|
|
166
|
+
|
|
160
167
|
for file_path in adoc_files:
|
|
161
168
|
replacements = replace_link_attributes_in_file(file_path, attributes, args.dry_run)
|
|
162
169
|
if replacements > 0:
|
|
163
170
|
rel_path = file_path.relative_to(repo_root)
|
|
164
|
-
prefix = "[DRY RUN] " if args.dry_run else ""
|
|
165
|
-
print(f" {prefix}Modified {rel_path}: {replacements} replacements")
|
|
166
171
|
total_replacements += replacements
|
|
167
172
|
files_modified += 1
|
|
168
173
|
|
|
174
|
+
spinner.stop(f"Processed {len(adoc_files)} files")
|
|
175
|
+
|
|
169
176
|
# Summary
|
|
170
177
|
print(f"\nSummary:")
|
|
171
178
|
if args.dry_run:
|
|
@@ -12,6 +12,7 @@ import argparse
|
|
|
12
12
|
import sys
|
|
13
13
|
import json
|
|
14
14
|
from doc_utils.validate_links import LinkValidator, parse_transpositions, format_results
|
|
15
|
+
from doc_utils.spinner import Spinner
|
|
15
16
|
|
|
16
17
|
|
|
17
18
|
def main():
|
|
@@ -155,11 +156,16 @@ Examples:
|
|
|
155
156
|
|
|
156
157
|
try:
|
|
157
158
|
# Run validation
|
|
159
|
+
spinner = Spinner("Validating links")
|
|
160
|
+
spinner.start()
|
|
158
161
|
results = validator.validate_all(
|
|
159
162
|
scan_dirs=args.scan_dir,
|
|
160
163
|
attributes_file=args.attributes_file,
|
|
161
164
|
exclude_domains=args.exclude_domains
|
|
162
165
|
)
|
|
166
|
+
total = results['summary']['total']
|
|
167
|
+
valid = results['summary']['valid']
|
|
168
|
+
spinner.stop(f"Validated {total} links: {valid} valid")
|
|
163
169
|
|
|
164
170
|
# Format output
|
|
165
171
|
if args.format == 'json':
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/rolfedh_doc_utils.egg-info/entry_points.txt
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/rolfedh_doc_utils.egg-info/requires.txt
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/rolfedh_doc_utils.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_fixture_archive_unused_files.py
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_fixture_archive_unused_images.py
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.11 → rolfedh_doc_utils-0.1.12}/tests/test_fixture_check_scannability.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|