rolfedh-doc-utils 0.1.38__py3-none-any.whl → 0.1.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,209 @@
1
+ """
2
+ Find Duplicate Content in AsciiDoc Files
3
+
4
+ Scans AsciiDoc files for duplicate and similar content blocks including:
5
+ - Recurring notes (NOTE, TIP, WARNING, IMPORTANT, CAUTION)
6
+ - Tables
7
+ - Step sequences (ordered lists)
8
+ - Code blocks
9
+
10
+ This tool helps identify content that could be refactored into reusable components.
11
+ """
12
+
13
+ import argparse
14
+ import os
15
+ import sys
16
+ from datetime import datetime
17
+ from doc_utils.duplicate_content import (
18
+ find_duplicates,
19
+ format_report,
20
+ generate_csv_report
21
+ )
22
+ from doc_utils.spinner import Spinner
23
+ from doc_utils.version_check import check_version_on_startup
24
+ from doc_utils.version import __version__
25
+
26
+
27
+ def main():
28
+ # Check for updates (non-blocking, won't interfere with tool operation)
29
+ check_version_on_startup()
30
+
31
+ parser = argparse.ArgumentParser(
32
+ description='Find duplicate and similar content in AsciiDoc files.',
33
+ formatter_class=argparse.RawDescriptionHelpFormatter,
34
+ epilog="""
35
+ Examples:
36
+ find-duplicate-content # Scan current directory, write txt report
37
+ find-duplicate-content ./docs # Scan specific directory
38
+ find-duplicate-content -t note -t table # Find only notes and tables
39
+ find-duplicate-content -s 0.7 # Include 70%+ similar content
40
+ find-duplicate-content --format csv # Write CSV report to ./reports/
41
+ find-duplicate-content --no-output # Display results without saving report
42
+ """
43
+ )
44
+
45
+ parser.add_argument(
46
+ 'directory',
47
+ nargs='?',
48
+ default='.',
49
+ help='Directory to scan (default: current directory)'
50
+ )
51
+
52
+ parser.add_argument(
53
+ '-t', '--type',
54
+ dest='block_types',
55
+ action='append',
56
+ choices=['note', 'tip', 'warning', 'important', 'caution', 'table', 'steps', 'code'],
57
+ help='Block types to search for (can be specified multiple times). Default: all types'
58
+ )
59
+
60
+ parser.add_argument(
61
+ '-s', '--similarity',
62
+ type=float,
63
+ default=0.8,
64
+ metavar='THRESHOLD',
65
+ help='Minimum similarity threshold (0.0-1.0). Default: 0.8'
66
+ )
67
+
68
+ parser.add_argument(
69
+ '-m', '--min-length',
70
+ type=int,
71
+ default=50,
72
+ metavar='CHARS',
73
+ help='Minimum content length to consider. Default: 50 characters'
74
+ )
75
+
76
+ parser.add_argument(
77
+ '--exact-only',
78
+ action='store_true',
79
+ help='Only find exact duplicates (sets similarity to 1.0)'
80
+ )
81
+
82
+ parser.add_argument(
83
+ '-e', '--exclude-dir',
84
+ dest='exclude_dirs',
85
+ action='append',
86
+ default=[],
87
+ metavar='DIR',
88
+ help='Directory to exclude (can be specified multiple times)'
89
+ )
90
+
91
+ parser.add_argument(
92
+ '--no-content',
93
+ action='store_true',
94
+ help='Hide content preview in output'
95
+ )
96
+
97
+ parser.add_argument(
98
+ '--no-output',
99
+ action='store_true',
100
+ help='Do not write report to ./reports/ directory (report is written by default)'
101
+ )
102
+
103
+ parser.add_argument(
104
+ '--format',
105
+ choices=['txt', 'csv', 'json', 'md'],
106
+ default='txt',
107
+ help='Output format (default: txt)'
108
+ )
109
+
110
+ parser.add_argument(
111
+ '--version',
112
+ action='version',
113
+ version=f'%(prog)s {__version__}'
114
+ )
115
+
116
+ args = parser.parse_args()
117
+
118
+ # Validate arguments
119
+ if not os.path.isdir(args.directory):
120
+ print(f"Error: '{args.directory}' is not a valid directory")
121
+ return 1
122
+
123
+ if args.similarity < 0 or args.similarity > 1:
124
+ print("Error: Similarity threshold must be between 0.0 and 1.0")
125
+ return 1
126
+
127
+ # Set up parameters
128
+ similarity = 1.0 if args.exact_only else args.similarity
129
+ exclude_dirs = ['.git', '.archive', 'target', 'build', 'node_modules'] + args.exclude_dirs
130
+
131
+ # Build command line options summary
132
+ cmd_options = ['find-duplicate-content']
133
+ if args.directory != '.':
134
+ cmd_options.append(args.directory)
135
+ if args.block_types:
136
+ for bt in args.block_types:
137
+ cmd_options.append(f'-t {bt}')
138
+ if args.exact_only:
139
+ cmd_options.append('--exact-only')
140
+ elif args.similarity != 0.8:
141
+ cmd_options.append(f'-s {args.similarity}')
142
+ if args.min_length != 50:
143
+ cmd_options.append(f'-m {args.min_length}')
144
+ for ed in args.exclude_dirs:
145
+ cmd_options.append(f'-e {ed}')
146
+ if args.no_content:
147
+ cmd_options.append('--no-content')
148
+ if args.no_output:
149
+ cmd_options.append('--no-output')
150
+ if args.format != 'txt':
151
+ cmd_options.append(f'--format {args.format}')
152
+ cmd_line = ' '.join(cmd_options)
153
+
154
+ # Run analysis
155
+ spinner = Spinner(f"Scanning AsciiDoc files in {args.directory}")
156
+ spinner.start()
157
+
158
+ try:
159
+ duplicate_groups = find_duplicates(
160
+ root_dir=args.directory,
161
+ min_similarity=similarity,
162
+ min_content_length=args.min_length,
163
+ exclude_dirs=exclude_dirs,
164
+ block_types=args.block_types
165
+ )
166
+ except Exception as e:
167
+ spinner.stop()
168
+ print(f"Error: {e}")
169
+ return 1
170
+
171
+ spinner.stop(f"Found {len(duplicate_groups)} groups of duplicate content")
172
+
173
+ # Print command line options used
174
+ print(f"\nCommand: {cmd_line}")
175
+ print(f"Directory: {os.path.abspath(args.directory)}\n")
176
+
177
+ # Generate report based on format
178
+ if args.format == 'csv':
179
+ report = generate_csv_report(duplicate_groups)
180
+ else:
181
+ report = format_report(
182
+ duplicate_groups,
183
+ show_content=not args.no_content
184
+ )
185
+
186
+ print(report)
187
+
188
+ # Write report file by default (unless --no-output)
189
+ if not args.no_output and duplicate_groups:
190
+ reports_dir = './reports'
191
+ os.makedirs(reports_dir, exist_ok=True)
192
+
193
+ timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
194
+ filename = f'{reports_dir}/duplicate-content_{timestamp}.{args.format}'
195
+
196
+ with open(filename, 'w', encoding='utf-8') as f:
197
+ f.write(f"Duplicate content report\n")
198
+ f.write(f"Command: {cmd_line}\n")
199
+ f.write(f"Directory: {os.path.abspath(args.directory)}\n")
200
+ f.write(f"Generated: {datetime.now().isoformat()}\n\n")
201
+ f.write(report)
202
+
203
+ print(f"\nReport written to: {filename}")
204
+
205
+ return 0
206
+
207
+
208
+ if __name__ == '__main__':
209
+ sys.exit(main())
@@ -0,0 +1,198 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Find AsciiDoc files that are included more than once.
4
+
5
+ Scans AsciiDoc files for include:: macros and identifies files that are
6
+ included from multiple locations, helping identify opportunities for
7
+ content reuse or potential maintenance issues.
8
+
9
+ Usage:
10
+ find-duplicate-includes [directory] [options]
11
+ """
12
+
13
+ import argparse
14
+ import os
15
+ import sys
16
+ from datetime import datetime
17
+
18
+ from doc_utils.duplicate_includes import (
19
+ DEFAULT_COMMON_INCLUDES,
20
+ DEFAULT_EXCLUDE_DIRS,
21
+ find_duplicate_includes,
22
+ format_txt_report,
23
+ format_csv_report,
24
+ format_json_report,
25
+ format_md_report,
26
+ )
27
+
28
+
29
+ def build_cmd_line(args: argparse.Namespace) -> str:
30
+ """Reconstruct the command line for display."""
31
+ parts = ['find-duplicate-includes']
32
+
33
+ if args.directory != '.':
34
+ parts.append(args.directory)
35
+
36
+ if args.include_common:
37
+ parts.append('--include-common')
38
+
39
+ for d in (args.exclude_dir or []):
40
+ parts.append(f'-e {d}')
41
+
42
+ for f in (args.exclude_file or []):
43
+ parts.append(f'--exclude-file {f}')
44
+
45
+ if args.format != 'txt':
46
+ parts.append(f'--format {args.format}')
47
+
48
+ if args.no_output:
49
+ parts.append('--no-output')
50
+
51
+ return ' '.join(parts)
52
+
53
+
54
+ def main():
55
+ parser = argparse.ArgumentParser(
56
+ description='Find AsciiDoc files that are included more than once.',
57
+ formatter_class=argparse.RawDescriptionHelpFormatter,
58
+ epilog="""
59
+ Examples:
60
+ # Scan current directory
61
+ find-duplicate-includes
62
+
63
+ # Scan a specific directory
64
+ find-duplicate-includes ./docs
65
+
66
+ # Include common files (attributes.adoc, etc.) in results
67
+ find-duplicate-includes --include-common
68
+
69
+ # Exclude specific directories
70
+ find-duplicate-includes -e archive -e drafts
71
+
72
+ # Generate CSV report
73
+ find-duplicate-includes --format csv
74
+
75
+ # Display only, no report file
76
+ find-duplicate-includes --no-output
77
+ """
78
+ )
79
+
80
+ parser.add_argument(
81
+ 'directory',
82
+ nargs='?',
83
+ default='.',
84
+ help='Directory to scan (default: current directory)'
85
+ )
86
+ parser.add_argument(
87
+ '--include-common',
88
+ action='store_true',
89
+ help='Include common files (attributes.adoc, etc.) in results'
90
+ )
91
+ parser.add_argument(
92
+ '-e', '--exclude-dir',
93
+ action='append',
94
+ metavar='DIR',
95
+ help='Directory to exclude (can be repeated)'
96
+ )
97
+ parser.add_argument(
98
+ '--exclude-file',
99
+ action='append',
100
+ metavar='FILE',
101
+ help='File to exclude (can be repeated)'
102
+ )
103
+ parser.add_argument(
104
+ '--no-output',
105
+ action='store_true',
106
+ help='Do not write report file (stdout only)'
107
+ )
108
+ parser.add_argument(
109
+ '--format',
110
+ choices=['txt', 'csv', 'json', 'md'],
111
+ default='txt',
112
+ help='Output format (default: txt)'
113
+ )
114
+
115
+ args = parser.parse_args()
116
+
117
+ # Validate directory
118
+ if not os.path.isdir(args.directory):
119
+ print(f"Error: '{args.directory}' is not a valid directory", file=sys.stderr)
120
+ sys.exit(1)
121
+
122
+ # Build exclusion sets
123
+ exclude_dirs = set(DEFAULT_EXCLUDE_DIRS)
124
+ if args.exclude_dir:
125
+ exclude_dirs.update(args.exclude_dir)
126
+
127
+ exclude_files = set()
128
+ if args.exclude_file:
129
+ exclude_files.update(args.exclude_file)
130
+
131
+ # Build command line for display
132
+ cmd_line = build_cmd_line(args)
133
+
134
+ # Find duplicates
135
+ duplicates, total_files, excluded_common = find_duplicate_includes(
136
+ directory=args.directory,
137
+ exclude_dirs=exclude_dirs,
138
+ exclude_files=exclude_files,
139
+ include_common=args.include_common,
140
+ common_includes=DEFAULT_COMMON_INCLUDES
141
+ )
142
+
143
+ # Format report
144
+ formatters = {
145
+ 'txt': format_txt_report,
146
+ 'csv': format_csv_report,
147
+ 'json': format_json_report,
148
+ 'md': format_md_report,
149
+ }
150
+
151
+ formatter = formatters[args.format]
152
+ report = formatter(duplicates, total_files, excluded_common, args.directory, cmd_line)
153
+
154
+ # Output summary to stdout
155
+ if duplicates:
156
+ print(f"\n\u2713 Found {len(duplicates)} files included more than once")
157
+ else:
158
+ if excluded_common:
159
+ print(f"\n\u2713 No unexpected duplicates found ({excluded_common} common files excluded)")
160
+ else:
161
+ print("\n\u2713 No files are included more than once")
162
+
163
+ print(f"\nCommand: {cmd_line}")
164
+ print(f"Directory: {os.path.abspath(args.directory)}")
165
+ print(f"Files scanned: {total_files}\n")
166
+
167
+ # Print report content
168
+ if args.format == 'txt':
169
+ # Skip header lines already printed
170
+ lines = report.split('\n')
171
+ # Find where the actual results start (after the header)
172
+ start = 0
173
+ for i, line in enumerate(lines):
174
+ if line.startswith('=') or line.startswith('No ') or line.startswith('Found '):
175
+ start = i
176
+ break
177
+ print('\n'.join(lines[start:]))
178
+ else:
179
+ print(report)
180
+
181
+ # Write report file
182
+ if not args.no_output and duplicates:
183
+ reports_dir = './reports'
184
+ os.makedirs(reports_dir, exist_ok=True)
185
+
186
+ timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
187
+ filename = f'{reports_dir}/duplicate-includes_{timestamp}.{args.format}'
188
+
189
+ with open(filename, 'w', encoding='utf-8') as f:
190
+ f.write(report)
191
+
192
+ print(f"\nReport written to: {filename}")
193
+
194
+ return 1 if duplicates else 0
195
+
196
+
197
+ if __name__ == '__main__':
198
+ sys.exit(main())
find_unused_attributes.py CHANGED
@@ -12,7 +12,7 @@ import argparse
12
12
  import os
13
13
  import sys
14
14
  from datetime import datetime
15
- from doc_utils.unused_attributes import find_unused_attributes, find_attributes_files, select_attributes_file, comment_out_unused_attributes
15
+ from doc_utils.unused_attributes import find_unused_attributes, find_attributes_files, select_attributes_file, comment_out_unused_attributes, remove_unused_attributes
16
16
  from doc_utils.spinner import Spinner
17
17
  from doc_utils.version_check import check_version_on_startup
18
18
  from doc_utils.version import __version__
@@ -28,6 +28,7 @@ def main():
28
28
  )
29
29
  parser.add_argument('-o', '--output', action='store_true', help='Write results to a timestamped txt file in your home directory.')
30
30
  parser.add_argument('-c', '--comment-out', action='store_true', help='Comment out unused attributes in the attributes file with "// Unused".')
31
+ parser.add_argument('-r', '--remove', action='store_true', help='Remove unused attributes from the attributes file. Also removes lines already marked with "// Unused".')
31
32
  parser.add_argument('--version', action='version', version=f'%(prog)s {__version__}')
32
33
  args = parser.parse_args()
33
34
 
@@ -97,6 +98,20 @@ def main():
97
98
  else:
98
99
  print('Operation cancelled.')
99
100
 
101
+ if args.remove:
102
+ # Ask for confirmation before modifying the file
103
+ if output:
104
+ print(f'\nThis will remove {len(unused)} unused attributes from: {attr_file}')
105
+ print('(Also removes any lines already marked with "// Unused")')
106
+ else:
107
+ print(f'\nThis will remove lines marked with "// Unused" from: {attr_file}')
108
+ response = input('Continue? (y/n): ').strip().lower()
109
+ if response == 'y':
110
+ removed_count = remove_unused_attributes(attr_file, unused if output else None)
111
+ print(f'Removed {removed_count} lines from: {attr_file}')
112
+ else:
113
+ print('Operation cancelled.')
114
+
100
115
  return 0
101
116
 
102
117
  if __name__ == '__main__':
@@ -0,0 +1,53 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ CLI tool to create an inventory of AsciiDoc conditional directives.
4
+
5
+ Scans .adoc files for ifdef, ifndef, endif, and ifeval directives
6
+ and creates a timestamped inventory file.
7
+
8
+ Usage:
9
+ inventory-conditionals [directory] [-o OUTPUT_DIR]
10
+
11
+ If no directory is specified, the current working directory is used.
12
+ """
13
+
14
+ import argparse
15
+ from pathlib import Path
16
+
17
+ from doc_utils.inventory_conditionals import create_inventory
18
+
19
+
20
+ def main():
21
+ parser = argparse.ArgumentParser(
22
+ description='Create an inventory of AsciiDoc conditional directives.'
23
+ )
24
+ parser.add_argument(
25
+ 'directory',
26
+ nargs='?',
27
+ default='.',
28
+ help='Directory to scan for .adoc files (default: current directory)'
29
+ )
30
+ parser.add_argument(
31
+ '-o', '--output-dir',
32
+ default=None,
33
+ help='Directory to write the inventory file (default: current directory)'
34
+ )
35
+
36
+ args = parser.parse_args()
37
+
38
+ directory = Path(args.directory).resolve()
39
+ if not directory.is_dir():
40
+ print(f"Error: {directory} is not a valid directory")
41
+ return 1
42
+
43
+ output_dir = Path(args.output_dir).resolve() if args.output_dir else Path.cwd()
44
+
45
+ print(f"Scanning for .adoc files in: {directory}")
46
+ output_file = create_inventory(directory, output_dir)
47
+ print(f"Inventory written to: {output_file}")
48
+
49
+ return 0
50
+
51
+
52
+ if __name__ == '__main__':
53
+ exit(main())
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rolfedh-doc-utils
3
- Version: 0.1.38
3
+ Version: 0.1.40
4
4
  Summary: CLI tools for AsciiDoc documentation projects
5
5
  Author: Rolfe Dlugy-Hegwer
6
6
  License: MIT License
@@ -103,6 +103,7 @@ doc-utils --version # Show version
103
103
  | **`archive-unused-files`** | Finds and archives unreferenced .adoc files | `archive-unused-files` (preview)<br>`archive-unused-files --archive` (execute) |
104
104
  | **`archive-unused-images`** | Finds and archives unreferenced images | `archive-unused-images` (preview)<br>`archive-unused-images --archive` (execute) |
105
105
  | **`find-unused-attributes`** | Identifies unused attribute definitions | `find-unused-attributes attributes.adoc` |
106
+ | **`inventory-conditionals`** | Creates inventory of `ifdef`/`ifndef` conditionals | `inventory-conditionals ~/docs -o ~/reports/` |
106
107
  | **`convert-callouts-to-deflist`** | Converts callout-style annotations to definition list format | `convert-callouts-to-deflist --dry-run modules/` |
107
108
 
108
109
  ## 📖 Documentation
@@ -5,11 +5,15 @@ check_scannability.py,sha256=O6ROr-e624jVPvPpASpsWo0gTfuCFpA2mTSX61BjAEI,5478
5
5
  check_source_directives.py,sha256=JiIvn_ph9VKPMH4zg-aSsuIGQZcnI_imj7rZLLE04L8,3660
6
6
  convert_callouts_interactive.py,sha256=4PjiVIOWxNJiJLQuBHT3x6rE46-hgfFHSaoo5quYIs8,22889
7
7
  convert_callouts_to_deflist.py,sha256=BoqW5_GkQ-KqNzn4vmE6lsQosrPV0lkB-bfAx3dzyMw,25886
8
+ convert_freemarker_to_asciidoc.py,sha256=ki0bFDPWxl9aUHK_-xqffIKF4KJYMXA8S4XLG_mOA0U,10097
8
9
  convert_tables_to_deflists.py,sha256=PIP6xummuMqC3aSzahKKRBYahes_j5ZpHp_-k6BjurY,15599
9
10
  doc_utils_cli.py,sha256=J3CE7cTDDCRGkhAknYejNWHhk5t9YFGt27WDVfR98Xk,5111
10
11
  extract_link_attributes.py,sha256=wR2SmR2la-jR6DzDbas2PoNONgRZ4dZ6aqwzkwEv8Gs,3516
11
- find_unused_attributes.py,sha256=77CxFdm72wj6SO81w-auMdDjnvF83jWy_qaM7DsAtBw,4263
12
+ find_duplicate_content.py,sha256=iYWekmriItXWSd8nBnIQN_FoZkv6quPJNL0qjv6UxUA,6343
13
+ find_duplicate_includes.py,sha256=sQaVLOe4Ksc3t08_A_2GaLMwQCgKe9Nsr8c3ipp1Ph0,5456
14
+ find_unused_attributes.py,sha256=AQVJsvRRgGsDjOZClcvJRQ5i5H2YrClcR-1nRLVBzI8,5140
12
15
  format_asciidoc_spacing.py,sha256=nmWpw2dgwhd81LXyznq0rT8w6Z7cNRyGtPJGRyKFRdc,4212
16
+ inventory_conditionals.py,sha256=vLWEDTj9MbqUnA_iw4g-HEVX47fSG8tfd4KpSJKg6kA,1416
13
17
  replace_link_attributes.py,sha256=Cpc4E-j9j-4_y0LOstAKYOPl02Ln_2bGNIeqp3ZVCdA,7624
14
18
  validate_links.py,sha256=lWuK8sgfiFdfcUdSVAt_5U9JHVde_oa6peSUlBQtsac,6145
15
19
  callout_lib/__init__.py,sha256=8B82N_z4D1LaZVYgd5jZR53QAabtgPzADOyGlnvihj0,665
@@ -19,24 +23,28 @@ callout_lib/converter_deflist.py,sha256=Ocr3gutTo_Sl_MkzethZH1UO6mCDEcuExGMZF5Mf
19
23
  callout_lib/detector.py,sha256=S0vZDa4zhTSn6Kv0hWfG56W-5srGxUc-nvpLe_gIx-A,15971
20
24
  callout_lib/table_parser.py,sha256=ZucisADE8RDAk5HtIrttaPgBi6Hf8ZUpw7KzfbcmEjc,31450
21
25
  doc_utils/__init__.py,sha256=qqZR3lohzkP63soymrEZPBGzzk6-nFzi4_tSffjmu_0,74
26
+ doc_utils/convert_freemarker_to_asciidoc.py,sha256=UGQ7iS_9bkVdDMAWBORXbK0Q5mLPmDs1cDJqoR4LLH8,22491
27
+ doc_utils/duplicate_content.py,sha256=rFrIuiDE5CqWQyL7wTLL-GlrGVDNs1fsq36eQIsCMug,14580
28
+ doc_utils/duplicate_includes.py,sha256=8hpL7fq_pHcKMS0C50LTwTyzqth39nMQ9Lz67gie8b0,10654
22
29
  doc_utils/extract_link_attributes.py,sha256=U0EvPZReJQigNfbT-icBsVT6Li64hYki5W7MQz6qqbc,22743
23
30
  doc_utils/file_utils.py,sha256=fpTh3xx759sF8sNocdn_arsP3KAv8XA6cTQTAVIZiZg,4247
24
31
  doc_utils/format_asciidoc_spacing.py,sha256=RL2WU_dG_UfGL01LnevcyJfKsvYy_ogNyeoVX-Fyqks,13579
32
+ doc_utils/inventory_conditionals.py,sha256=PSrdmeBHbpayvXgaRryqvjUlLZYryPgU9js8IBYqB7g,5486
25
33
  doc_utils/missing_source_directive.py,sha256=X3Acn0QJTk6XjmBXhGus5JAjlIitCiicCRE3fslifyw,8048
26
34
  doc_utils/replace_link_attributes.py,sha256=gmAs68_njBqEz-Qni-UGgeYEDTMxlTWk_IOm76FONNE,7279
27
35
  doc_utils/scannability.py,sha256=XwlmHqDs69p_V36X7DLjPTy0DUoLszSGqYjJ9wE-3hg,982
28
36
  doc_utils/spinner.py,sha256=lJg15qzODiKoR0G6uFIk2BdVNgn9jFexoTRUMrjiWvk,3554
29
37
  doc_utils/topic_map_parser.py,sha256=tKcIO1m9r2K6dvPRGue58zqMr0O2zKU1gnZMzEE3U6o,4571
30
38
  doc_utils/unused_adoc.py,sha256=LPQWPGEOizXECxepk7E_5cjTVvKn6RXQYTWG97Ps5VQ,9077
31
- doc_utils/unused_attributes.py,sha256=OHyAdaBD7aNo357B0SLBN5NC_jNY5TWXMwgtfJNh3X8,7621
39
+ doc_utils/unused_attributes.py,sha256=2UmqdXd5ogaPtj9_teApM0IlkdCmzBZNRh7XXrVYJOk,9032
32
40
  doc_utils/unused_images.py,sha256=hL8Qrik9QCkVh54eBLuNczRS9tMnsqIEfavNamM1UeQ,5664
33
41
  doc_utils/validate_links.py,sha256=iBGXnwdeLlgIT3fo3v01ApT5k0X2FtctsvkrE6E3VMk,19610
34
- doc_utils/version.py,sha256=zVnktTYITGhLqPNoyXbSnWi5bQassZ3M9S4LgDCGD-E,203
42
+ doc_utils/version.py,sha256=SpBhN4XHbHDf1DgElG_ucDzHppRDj0mgi0TKjJ9LZcA,203
35
43
  doc_utils/version_check.py,sha256=-31Y6AN0KGi_CUCAVOOhf6bPO3r7SQIXPxxeffLAF0w,7535
36
44
  doc_utils/warnings_report.py,sha256=20yfwqBjOprfFhQwCujbcsvjJCbHHhmH84uAujm-y-o,8877
37
- rolfedh_doc_utils-0.1.38.dist-info/licenses/LICENSE,sha256=vLxtwMVOJA_hEy8b77niTkdmQI9kNJskXHq0dBS36e0,1075
38
- rolfedh_doc_utils-0.1.38.dist-info/METADATA,sha256=gkxP50nyURTB-KhxNcT9rlDP77xVOXaRts6F1J6Dvus,8520
39
- rolfedh_doc_utils-0.1.38.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
40
- rolfedh_doc_utils-0.1.38.dist-info/entry_points.txt,sha256=SpOivexG_Bx1MWn4yC1UUXhttcXqZG851kejOAa3RFQ,744
41
- rolfedh_doc_utils-0.1.38.dist-info/top_level.txt,sha256=FdhF5liH3KqypcFiLU-3GJdXJTIGwJO_UBAcOnC1yTo,338
42
- rolfedh_doc_utils-0.1.38.dist-info/RECORD,,
45
+ rolfedh_doc_utils-0.1.40.dist-info/licenses/LICENSE,sha256=vLxtwMVOJA_hEy8b77niTkdmQI9kNJskXHq0dBS36e0,1075
46
+ rolfedh_doc_utils-0.1.40.dist-info/METADATA,sha256=ihbfWFiiKMkKJIIQiAshaIF2pvUICaSd3dTSnatqiKo,8654
47
+ rolfedh_doc_utils-0.1.40.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
48
+ rolfedh_doc_utils-0.1.40.dist-info/entry_points.txt,sha256=IQlgrL_EV-sqMJyhFGJnVO6F2Ofk9dIxrUmsM8Gwfzk,974
49
+ rolfedh_doc_utils-0.1.40.dist-info/top_level.txt,sha256=T9C7VhOScdaqnOl4OASCar9Nh2EUnPmimj50sMY0LyA,439
50
+ rolfedh_doc_utils-0.1.40.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -6,10 +6,14 @@ check-scannability = check_scannability:main
6
6
  check-source-directives = check_source_directives:main
7
7
  convert-callouts-interactive = convert_callouts_interactive:main
8
8
  convert-callouts-to-deflist = convert_callouts_to_deflist:main
9
+ convert-freemarker-to-asciidoc = convert_freemarker_to_asciidoc:main
9
10
  convert-tables-to-deflists = convert_tables_to_deflists:main
10
11
  doc-utils = doc_utils_cli:main
11
12
  extract-link-attributes = extract_link_attributes:main
13
+ find-duplicate-content = find_duplicate_content:main
14
+ find-duplicate-includes = find_duplicate_includes:main
12
15
  find-unused-attributes = find_unused_attributes:main
13
16
  format-asciidoc-spacing = format_asciidoc_spacing:main
17
+ inventory-conditionals = inventory_conditionals:main
14
18
  replace-link-attributes = replace_link_attributes:main
15
19
  validate-links = validate_links:main
@@ -6,11 +6,15 @@ check_scannability
6
6
  check_source_directives
7
7
  convert_callouts_interactive
8
8
  convert_callouts_to_deflist
9
+ convert_freemarker_to_asciidoc
9
10
  convert_tables_to_deflists
10
11
  doc_utils
11
12
  doc_utils_cli
12
13
  extract_link_attributes
14
+ find_duplicate_content
15
+ find_duplicate_includes
13
16
  find_unused_attributes
14
17
  format_asciidoc_spacing
18
+ inventory_conditionals
15
19
  replace_link_attributes
16
20
  validate_links