rolfedh-doc-utils 0.1.4__py3-none-any.whl → 0.1.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. archive_unused_files.py +18 -5
  2. archive_unused_images.py +9 -2
  3. callout_lib/__init__.py +22 -0
  4. callout_lib/converter_bullets.py +103 -0
  5. callout_lib/converter_comments.py +295 -0
  6. callout_lib/converter_deflist.py +134 -0
  7. callout_lib/detector.py +364 -0
  8. callout_lib/table_parser.py +804 -0
  9. check_published_links.py +1083 -0
  10. check_scannability.py +6 -0
  11. check_source_directives.py +101 -0
  12. convert_callouts_interactive.py +567 -0
  13. convert_callouts_to_deflist.py +628 -0
  14. convert_freemarker_to_asciidoc.py +288 -0
  15. convert_tables_to_deflists.py +479 -0
  16. doc_utils/convert_freemarker_to_asciidoc.py +708 -0
  17. doc_utils/duplicate_content.py +409 -0
  18. doc_utils/duplicate_includes.py +347 -0
  19. doc_utils/extract_link_attributes.py +618 -0
  20. doc_utils/format_asciidoc_spacing.py +285 -0
  21. doc_utils/insert_abstract_role.py +220 -0
  22. doc_utils/inventory_conditionals.py +164 -0
  23. doc_utils/missing_source_directive.py +211 -0
  24. doc_utils/replace_link_attributes.py +187 -0
  25. doc_utils/spinner.py +119 -0
  26. doc_utils/unused_adoc.py +150 -22
  27. doc_utils/unused_attributes.py +218 -6
  28. doc_utils/unused_images.py +81 -9
  29. doc_utils/validate_links.py +576 -0
  30. doc_utils/version.py +8 -0
  31. doc_utils/version_check.py +243 -0
  32. doc_utils/warnings_report.py +237 -0
  33. doc_utils_cli.py +158 -0
  34. extract_link_attributes.py +120 -0
  35. find_duplicate_content.py +209 -0
  36. find_duplicate_includes.py +198 -0
  37. find_unused_attributes.py +84 -6
  38. format_asciidoc_spacing.py +134 -0
  39. insert_abstract_role.py +163 -0
  40. inventory_conditionals.py +53 -0
  41. replace_link_attributes.py +214 -0
  42. rolfedh_doc_utils-0.1.41.dist-info/METADATA +246 -0
  43. rolfedh_doc_utils-0.1.41.dist-info/RECORD +52 -0
  44. {rolfedh_doc_utils-0.1.4.dist-info → rolfedh_doc_utils-0.1.41.dist-info}/WHEEL +1 -1
  45. rolfedh_doc_utils-0.1.41.dist-info/entry_points.txt +20 -0
  46. rolfedh_doc_utils-0.1.41.dist-info/top_level.txt +21 -0
  47. validate_links.py +213 -0
  48. rolfedh_doc_utils-0.1.4.dist-info/METADATA +0 -285
  49. rolfedh_doc_utils-0.1.4.dist-info/RECORD +0 -17
  50. rolfedh_doc_utils-0.1.4.dist-info/entry_points.txt +0 -5
  51. rolfedh_doc_utils-0.1.4.dist-info/top_level.txt +0 -5
  52. {rolfedh_doc_utils-0.1.4.dist-info → rolfedh_doc_utils-0.1.41.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,120 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Extract link and xref macros containing attributes into attribute definitions.
4
+
5
+ This tool finds all link: and xref: macros whose URLs contain attributes,
6
+ creates attribute definitions for them, and replaces the macros with
7
+ attribute references.
8
+ """
9
+
10
+ import argparse
11
+ import sys
12
+ from doc_utils.extract_link_attributes import extract_link_attributes
13
+ from doc_utils.version_check import check_version_on_startup
14
+ from doc_utils.version import __version__
15
+
16
+
17
+ def main():
18
+ # Check for updates (non-blocking, won't interfere with tool operation)
19
+ check_version_on_startup()
20
+ """Main entry point for the extract-link-attributes CLI tool."""
21
+ parser = argparse.ArgumentParser(
22
+ description='Extract link and xref macros containing attributes into attribute definitions',
23
+ formatter_class=argparse.RawDescriptionHelpFormatter,
24
+ epilog="""
25
+ Examples:
26
+ # Interactive mode with auto-discovery
27
+ extract-link-attributes
28
+
29
+ # Specify attribute file
30
+ extract-link-attributes --attributes-file common-attributes.adoc
31
+
32
+ # Non-interactive mode (uses most common link text)
33
+ extract-link-attributes --non-interactive
34
+
35
+ # Dry run to preview changes
36
+ extract-link-attributes --dry-run
37
+
38
+ # Scan specific directories
39
+ extract-link-attributes --scan-dir modules --scan-dir assemblies
40
+ """
41
+ )
42
+
43
+ parser.add_argument(
44
+ '--attributes-file',
45
+ help='Path to the attributes file to update (auto-discovered if not specified)'
46
+ )
47
+
48
+ parser.add_argument(
49
+ '--scan-dir',
50
+ action='append',
51
+ help='Directory to scan for .adoc files (can be used multiple times, default: current directory)'
52
+ )
53
+
54
+ parser.add_argument(
55
+ '--non-interactive',
56
+ action='store_true',
57
+ help='Non-interactive mode: automatically use most common link text for variations'
58
+ )
59
+
60
+ parser.add_argument(
61
+ '--dry-run',
62
+ action='store_true',
63
+ help='Preview changes without modifying files'
64
+ )
65
+
66
+ parser.add_argument(
67
+ '-v', '--verbose',
68
+ action='store_true',
69
+ help='Enable verbose output'
70
+ )
71
+
72
+ parser.add_argument(
73
+ '--validate-links',
74
+ action='store_true',
75
+ help='Validate URLs in link-* attributes before extraction'
76
+ )
77
+
78
+ parser.add_argument(
79
+ '--fail-on-broken',
80
+ action='store_true',
81
+ help='Exit extraction if broken links are found in attributes (requires --validate-links)'
82
+ )
83
+
84
+ parser.add_argument(
85
+ '--macro-type',
86
+ choices=['link', 'xref', 'both'],
87
+ default='both',
88
+ help='Type of macros to process: link, xref, or both (default: both)'
89
+ )
90
+ parser.add_argument('--version', action='version', version=f'%(prog)s {__version__}')
91
+
92
+ args = parser.parse_args()
93
+
94
+ try:
95
+ success = extract_link_attributes(
96
+ attributes_file=args.attributes_file,
97
+ scan_dirs=args.scan_dir,
98
+ interactive=not args.non_interactive,
99
+ dry_run=args.dry_run,
100
+ validate_links=args.validate_links,
101
+ fail_on_broken=args.fail_on_broken,
102
+ macro_type=args.macro_type
103
+ )
104
+
105
+ if not success:
106
+ sys.exit(1)
107
+
108
+ except KeyboardInterrupt:
109
+ print("\nOperation cancelled.")
110
+ sys.exit(1)
111
+ except Exception as e:
112
+ print(f"Error: {e}", file=sys.stderr)
113
+ if args.verbose:
114
+ import traceback
115
+ traceback.print_exc()
116
+ sys.exit(1)
117
+
118
+
119
+ if __name__ == '__main__':
120
+ main()
@@ -0,0 +1,209 @@
1
+ """
2
+ Find Duplicate Content in AsciiDoc Files
3
+
4
+ Scans AsciiDoc files for duplicate and similar content blocks including:
5
+ - Recurring notes (NOTE, TIP, WARNING, IMPORTANT, CAUTION)
6
+ - Tables
7
+ - Step sequences (ordered lists)
8
+ - Code blocks
9
+
10
+ This tool helps identify content that could be refactored into reusable components.
11
+ """
12
+
13
+ import argparse
14
+ import os
15
+ import sys
16
+ from datetime import datetime
17
+ from doc_utils.duplicate_content import (
18
+ find_duplicates,
19
+ format_report,
20
+ generate_csv_report
21
+ )
22
+ from doc_utils.spinner import Spinner
23
+ from doc_utils.version_check import check_version_on_startup
24
+ from doc_utils.version import __version__
25
+
26
+
27
+ def main():
28
+ # Check for updates (non-blocking, won't interfere with tool operation)
29
+ check_version_on_startup()
30
+
31
+ parser = argparse.ArgumentParser(
32
+ description='Find duplicate and similar content in AsciiDoc files.',
33
+ formatter_class=argparse.RawDescriptionHelpFormatter,
34
+ epilog="""
35
+ Examples:
36
+ find-duplicate-content # Scan current directory, write txt report
37
+ find-duplicate-content ./docs # Scan specific directory
38
+ find-duplicate-content -t note -t table # Find only notes and tables
39
+ find-duplicate-content -s 0.7 # Include 70%+ similar content
40
+ find-duplicate-content --format csv # Write CSV report to ./reports/
41
+ find-duplicate-content --no-output # Display results without saving report
42
+ """
43
+ )
44
+
45
+ parser.add_argument(
46
+ 'directory',
47
+ nargs='?',
48
+ default='.',
49
+ help='Directory to scan (default: current directory)'
50
+ )
51
+
52
+ parser.add_argument(
53
+ '-t', '--type',
54
+ dest='block_types',
55
+ action='append',
56
+ choices=['note', 'tip', 'warning', 'important', 'caution', 'table', 'steps', 'code'],
57
+ help='Block types to search for (can be specified multiple times). Default: all types'
58
+ )
59
+
60
+ parser.add_argument(
61
+ '-s', '--similarity',
62
+ type=float,
63
+ default=0.8,
64
+ metavar='THRESHOLD',
65
+ help='Minimum similarity threshold (0.0-1.0). Default: 0.8'
66
+ )
67
+
68
+ parser.add_argument(
69
+ '-m', '--min-length',
70
+ type=int,
71
+ default=50,
72
+ metavar='CHARS',
73
+ help='Minimum content length to consider. Default: 50 characters'
74
+ )
75
+
76
+ parser.add_argument(
77
+ '--exact-only',
78
+ action='store_true',
79
+ help='Only find exact duplicates (sets similarity to 1.0)'
80
+ )
81
+
82
+ parser.add_argument(
83
+ '-e', '--exclude-dir',
84
+ dest='exclude_dirs',
85
+ action='append',
86
+ default=[],
87
+ metavar='DIR',
88
+ help='Directory to exclude (can be specified multiple times)'
89
+ )
90
+
91
+ parser.add_argument(
92
+ '--no-content',
93
+ action='store_true',
94
+ help='Hide content preview in output'
95
+ )
96
+
97
+ parser.add_argument(
98
+ '--no-output',
99
+ action='store_true',
100
+ help='Do not write report to ./reports/ directory (report is written by default)'
101
+ )
102
+
103
+ parser.add_argument(
104
+ '--format',
105
+ choices=['txt', 'csv', 'json', 'md'],
106
+ default='txt',
107
+ help='Output format (default: txt)'
108
+ )
109
+
110
+ parser.add_argument(
111
+ '--version',
112
+ action='version',
113
+ version=f'%(prog)s {__version__}'
114
+ )
115
+
116
+ args = parser.parse_args()
117
+
118
+ # Validate arguments
119
+ if not os.path.isdir(args.directory):
120
+ print(f"Error: '{args.directory}' is not a valid directory")
121
+ return 1
122
+
123
+ if args.similarity < 0 or args.similarity > 1:
124
+ print("Error: Similarity threshold must be between 0.0 and 1.0")
125
+ return 1
126
+
127
+ # Set up parameters
128
+ similarity = 1.0 if args.exact_only else args.similarity
129
+ exclude_dirs = ['.git', '.archive', 'target', 'build', 'node_modules'] + args.exclude_dirs
130
+
131
+ # Build command line options summary
132
+ cmd_options = ['find-duplicate-content']
133
+ if args.directory != '.':
134
+ cmd_options.append(args.directory)
135
+ if args.block_types:
136
+ for bt in args.block_types:
137
+ cmd_options.append(f'-t {bt}')
138
+ if args.exact_only:
139
+ cmd_options.append('--exact-only')
140
+ elif args.similarity != 0.8:
141
+ cmd_options.append(f'-s {args.similarity}')
142
+ if args.min_length != 50:
143
+ cmd_options.append(f'-m {args.min_length}')
144
+ for ed in args.exclude_dirs:
145
+ cmd_options.append(f'-e {ed}')
146
+ if args.no_content:
147
+ cmd_options.append('--no-content')
148
+ if args.no_output:
149
+ cmd_options.append('--no-output')
150
+ if args.format != 'txt':
151
+ cmd_options.append(f'--format {args.format}')
152
+ cmd_line = ' '.join(cmd_options)
153
+
154
+ # Run analysis
155
+ spinner = Spinner(f"Scanning AsciiDoc files in {args.directory}")
156
+ spinner.start()
157
+
158
+ try:
159
+ duplicate_groups = find_duplicates(
160
+ root_dir=args.directory,
161
+ min_similarity=similarity,
162
+ min_content_length=args.min_length,
163
+ exclude_dirs=exclude_dirs,
164
+ block_types=args.block_types
165
+ )
166
+ except Exception as e:
167
+ spinner.stop()
168
+ print(f"Error: {e}")
169
+ return 1
170
+
171
+ spinner.stop(f"Found {len(duplicate_groups)} groups of duplicate content")
172
+
173
+ # Print command line options used
174
+ print(f"\nCommand: {cmd_line}")
175
+ print(f"Directory: {os.path.abspath(args.directory)}\n")
176
+
177
+ # Generate report based on format
178
+ if args.format == 'csv':
179
+ report = generate_csv_report(duplicate_groups)
180
+ else:
181
+ report = format_report(
182
+ duplicate_groups,
183
+ show_content=not args.no_content
184
+ )
185
+
186
+ print(report)
187
+
188
+ # Write report file by default (unless --no-output)
189
+ if not args.no_output and duplicate_groups:
190
+ reports_dir = './reports'
191
+ os.makedirs(reports_dir, exist_ok=True)
192
+
193
+ timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
194
+ filename = f'{reports_dir}/duplicate-content_{timestamp}.{args.format}'
195
+
196
+ with open(filename, 'w', encoding='utf-8') as f:
197
+ f.write(f"Duplicate content report\n")
198
+ f.write(f"Command: {cmd_line}\n")
199
+ f.write(f"Directory: {os.path.abspath(args.directory)}\n")
200
+ f.write(f"Generated: {datetime.now().isoformat()}\n\n")
201
+ f.write(report)
202
+
203
+ print(f"\nReport written to: {filename}")
204
+
205
+ return 0
206
+
207
+
208
+ if __name__ == '__main__':
209
+ sys.exit(main())
@@ -0,0 +1,198 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Find AsciiDoc files that are included more than once.
4
+
5
+ Scans AsciiDoc files for include:: macros and identifies files that are
6
+ included from multiple locations, helping identify opportunities for
7
+ content reuse or potential maintenance issues.
8
+
9
+ Usage:
10
+ find-duplicate-includes [directory] [options]
11
+ """
12
+
13
+ import argparse
14
+ import os
15
+ import sys
16
+ from datetime import datetime
17
+
18
+ from doc_utils.duplicate_includes import (
19
+ DEFAULT_COMMON_INCLUDES,
20
+ DEFAULT_EXCLUDE_DIRS,
21
+ find_duplicate_includes,
22
+ format_txt_report,
23
+ format_csv_report,
24
+ format_json_report,
25
+ format_md_report,
26
+ )
27
+
28
+
29
+ def build_cmd_line(args: argparse.Namespace) -> str:
30
+ """Reconstruct the command line for display."""
31
+ parts = ['find-duplicate-includes']
32
+
33
+ if args.directory != '.':
34
+ parts.append(args.directory)
35
+
36
+ if args.include_common:
37
+ parts.append('--include-common')
38
+
39
+ for d in (args.exclude_dir or []):
40
+ parts.append(f'-e {d}')
41
+
42
+ for f in (args.exclude_file or []):
43
+ parts.append(f'--exclude-file {f}')
44
+
45
+ if args.format != 'txt':
46
+ parts.append(f'--format {args.format}')
47
+
48
+ if args.no_output:
49
+ parts.append('--no-output')
50
+
51
+ return ' '.join(parts)
52
+
53
+
54
+ def main():
55
+ parser = argparse.ArgumentParser(
56
+ description='Find AsciiDoc files that are included more than once.',
57
+ formatter_class=argparse.RawDescriptionHelpFormatter,
58
+ epilog="""
59
+ Examples:
60
+ # Scan current directory
61
+ find-duplicate-includes
62
+
63
+ # Scan a specific directory
64
+ find-duplicate-includes ./docs
65
+
66
+ # Include common files (attributes.adoc, etc.) in results
67
+ find-duplicate-includes --include-common
68
+
69
+ # Exclude specific directories
70
+ find-duplicate-includes -e archive -e drafts
71
+
72
+ # Generate CSV report
73
+ find-duplicate-includes --format csv
74
+
75
+ # Display only, no report file
76
+ find-duplicate-includes --no-output
77
+ """
78
+ )
79
+
80
+ parser.add_argument(
81
+ 'directory',
82
+ nargs='?',
83
+ default='.',
84
+ help='Directory to scan (default: current directory)'
85
+ )
86
+ parser.add_argument(
87
+ '--include-common',
88
+ action='store_true',
89
+ help='Include common files (attributes.adoc, etc.) in results'
90
+ )
91
+ parser.add_argument(
92
+ '-e', '--exclude-dir',
93
+ action='append',
94
+ metavar='DIR',
95
+ help='Directory to exclude (can be repeated)'
96
+ )
97
+ parser.add_argument(
98
+ '--exclude-file',
99
+ action='append',
100
+ metavar='FILE',
101
+ help='File to exclude (can be repeated)'
102
+ )
103
+ parser.add_argument(
104
+ '--no-output',
105
+ action='store_true',
106
+ help='Do not write report file (stdout only)'
107
+ )
108
+ parser.add_argument(
109
+ '--format',
110
+ choices=['txt', 'csv', 'json', 'md'],
111
+ default='txt',
112
+ help='Output format (default: txt)'
113
+ )
114
+
115
+ args = parser.parse_args()
116
+
117
+ # Validate directory
118
+ if not os.path.isdir(args.directory):
119
+ print(f"Error: '{args.directory}' is not a valid directory", file=sys.stderr)
120
+ sys.exit(1)
121
+
122
+ # Build exclusion sets
123
+ exclude_dirs = set(DEFAULT_EXCLUDE_DIRS)
124
+ if args.exclude_dir:
125
+ exclude_dirs.update(args.exclude_dir)
126
+
127
+ exclude_files = set()
128
+ if args.exclude_file:
129
+ exclude_files.update(args.exclude_file)
130
+
131
+ # Build command line for display
132
+ cmd_line = build_cmd_line(args)
133
+
134
+ # Find duplicates
135
+ duplicates, total_files, excluded_common = find_duplicate_includes(
136
+ directory=args.directory,
137
+ exclude_dirs=exclude_dirs,
138
+ exclude_files=exclude_files,
139
+ include_common=args.include_common,
140
+ common_includes=DEFAULT_COMMON_INCLUDES
141
+ )
142
+
143
+ # Format report
144
+ formatters = {
145
+ 'txt': format_txt_report,
146
+ 'csv': format_csv_report,
147
+ 'json': format_json_report,
148
+ 'md': format_md_report,
149
+ }
150
+
151
+ formatter = formatters[args.format]
152
+ report = formatter(duplicates, total_files, excluded_common, args.directory, cmd_line)
153
+
154
+ # Output summary to stdout
155
+ if duplicates:
156
+ print(f"\n\u2713 Found {len(duplicates)} files included more than once")
157
+ else:
158
+ if excluded_common:
159
+ print(f"\n\u2713 No unexpected duplicates found ({excluded_common} common files excluded)")
160
+ else:
161
+ print("\n\u2713 No files are included more than once")
162
+
163
+ print(f"\nCommand: {cmd_line}")
164
+ print(f"Directory: {os.path.abspath(args.directory)}")
165
+ print(f"Files scanned: {total_files}\n")
166
+
167
+ # Print report content
168
+ if args.format == 'txt':
169
+ # Skip header lines already printed
170
+ lines = report.split('\n')
171
+ # Find where the actual results start (after the header)
172
+ start = 0
173
+ for i, line in enumerate(lines):
174
+ if line.startswith('=') or line.startswith('No ') or line.startswith('Found '):
175
+ start = i
176
+ break
177
+ print('\n'.join(lines[start:]))
178
+ else:
179
+ print(report)
180
+
181
+ # Write report file
182
+ if not args.no_output and duplicates:
183
+ reports_dir = './reports'
184
+ os.makedirs(reports_dir, exist_ok=True)
185
+
186
+ timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
187
+ filename = f'{reports_dir}/duplicate-includes_{timestamp}.{args.format}'
188
+
189
+ with open(filename, 'w', encoding='utf-8') as f:
190
+ f.write(report)
191
+
192
+ print(f"\nReport written to: {filename}")
193
+
194
+ return 1 if duplicates else 0
195
+
196
+
197
+ if __name__ == '__main__':
198
+ sys.exit(main())
find_unused_attributes.py CHANGED
@@ -1,23 +1,74 @@
1
1
  """
2
2
  Find Unused AsciiDoc Attributes
3
3
 
4
- Scans a user-specified attributes file (e.g., attributes.adoc) for attribute definitions (e.g., :version: 1.1), then recursively scans all .adoc files in the current directory (ignoring symlinks) for usages of those attributes (e.g., {version}).
4
+ Scans an attributes file for attribute definitions (e.g., :version: 1.1), then recursively scans all .adoc files in the current directory (ignoring symlinks) for usages of those attributes (e.g., {version}).
5
+
6
+ If no attributes file is specified, the tool will auto-discover attributes files in the repository and let you choose one interactively.
5
7
 
6
8
  Any attribute defined but not used in any .adoc file is reported as NOT USED in both the command line output and a timestamped output file.
7
9
  """
8
10
 
9
11
  import argparse
10
12
  import os
13
+ import sys
11
14
  from datetime import datetime
12
- from doc_utils.unused_attributes import find_unused_attributes
15
+ from doc_utils.unused_attributes import find_unused_attributes, find_attributes_files, select_attributes_file, comment_out_unused_attributes, remove_unused_attributes
16
+ from doc_utils.spinner import Spinner
17
+ from doc_utils.version_check import check_version_on_startup
18
+ from doc_utils.version import __version__
13
19
 
14
20
  def main():
21
+ # Check for updates (non-blocking, won't interfere with tool operation)
22
+ check_version_on_startup()
15
23
  parser = argparse.ArgumentParser(description='Find unused AsciiDoc attributes.')
16
- parser.add_argument('attributes_file', help='Path to the attributes.adoc file to scan for attribute definitions.')
24
+ parser.add_argument(
25
+ 'attributes_file',
26
+ nargs='?', # Make it optional
27
+ help='Path to the attributes file. If not specified, auto-discovers attributes files.'
28
+ )
17
29
  parser.add_argument('-o', '--output', action='store_true', help='Write results to a timestamped txt file in your home directory.')
30
+ parser.add_argument('-c', '--comment-out', action='store_true', help='Comment out unused attributes in the attributes file with "// Unused".')
31
+ parser.add_argument('-r', '--remove', action='store_true', help='Remove unused attributes from the attributes file. Also removes lines already marked with "// Unused".')
32
+ parser.add_argument('--version', action='version', version=f'%(prog)s {__version__}')
18
33
  args = parser.parse_args()
19
34
 
20
- unused = find_unused_attributes(args.attributes_file, '.')
35
+ # Determine which attributes file to use
36
+ if args.attributes_file:
37
+ # User specified a file
38
+ attr_file = args.attributes_file
39
+ else:
40
+ # Auto-discover attributes files
41
+ spinner = Spinner("Searching for attributes files")
42
+ spinner.start()
43
+ attributes_files = find_attributes_files('.')
44
+ spinner.stop()
45
+
46
+ if not attributes_files:
47
+ print("No attributes files found in the repository.")
48
+ print("You can specify a file directly: find-unused-attributes <path-to-attributes-file>")
49
+ return 1
50
+
51
+ attr_file = select_attributes_file(attributes_files)
52
+ if not attr_file:
53
+ print("No attributes file selected.")
54
+ return 1
55
+
56
+ try:
57
+ spinner = Spinner(f"Analyzing attributes in {os.path.basename(attr_file)}")
58
+ spinner.start()
59
+ unused = find_unused_attributes(attr_file, '.')
60
+ spinner.stop(f"Found {len(unused)} unused attributes")
61
+ except FileNotFoundError as e:
62
+ print(f"Error: {e}")
63
+ print(f"\nPlease ensure the file '{attr_file}' exists.")
64
+ print("Usage: find-unused-attributes [<path-to-attributes-file>]")
65
+ return 1
66
+ except (ValueError, PermissionError) as e:
67
+ print(f"Error: {e}")
68
+ return 1
69
+ except Exception as e:
70
+ print(f"Unexpected error: {e}")
71
+ return 1
21
72
 
22
73
  lines = [f":{attr}: NOT USED" for attr in unused]
23
74
  output = '\n'.join(lines)
@@ -33,9 +84,36 @@ def main():
33
84
  home_dir = os.path.expanduser('~')
34
85
  filename = os.path.join(home_dir, f'unused_attributes_{timestamp}.txt')
35
86
  with open(filename, 'w', encoding='utf-8') as f:
36
- f.write('Unused attributes in ' + args.attributes_file + '\n')
87
+ f.write('Unused attributes in ' + attr_file + '\n')
37
88
  f.write(output + '\n')
38
89
  print(f'Results written to: {filename}')
39
90
 
91
+ if args.comment_out and output:
92
+ # Ask for confirmation before modifying the file
93
+ print(f'\nThis will comment out {len(unused)} unused attributes in: {attr_file}')
94
+ response = input('Continue? (y/n): ').strip().lower()
95
+ if response == 'y':
96
+ commented_count = comment_out_unused_attributes(attr_file, unused)
97
+ print(f'Commented out {commented_count} unused attributes in: {attr_file}')
98
+ else:
99
+ print('Operation cancelled.')
100
+
101
+ if args.remove:
102
+ # Ask for confirmation before modifying the file
103
+ if output:
104
+ print(f'\nThis will remove {len(unused)} unused attributes from: {attr_file}')
105
+ print('(Also removes any lines already marked with "// Unused")')
106
+ else:
107
+ print(f'\nThis will remove lines marked with "// Unused" from: {attr_file}')
108
+ response = input('Continue? (y/n): ').strip().lower()
109
+ if response == 'y':
110
+ removed_count = remove_unused_attributes(attr_file, unused if output else None)
111
+ print(f'Removed {removed_count} lines from: {attr_file}')
112
+ else:
113
+ print('Operation cancelled.')
114
+
115
+ return 0
116
+
40
117
  if __name__ == '__main__':
41
- main()
118
+ import sys
119
+ sys.exit(main())