rolfedh-doc-utils 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- archive_unused_files.py +1 -0
- archive_unused_images.py +1 -0
- check_scannability.py +1 -0
- doc_utils/extract_link_attributes.py +117 -5
- doc_utils/spinner.py +119 -0
- doc_utils/unused_attributes.py +94 -6
- doc_utils/validate_links.py +576 -0
- extract_link_attributes.py +15 -1
- find_unused_attributes.py +53 -6
- format_asciidoc_spacing.py +1 -0
- replace_link_attributes.py +16 -9
- {rolfedh_doc_utils-0.1.10.dist-info → rolfedh_doc_utils-0.1.12.dist-info}/METADATA +3 -2
- rolfedh_doc_utils-0.1.12.dist-info/RECORD +26 -0
- {rolfedh_doc_utils-0.1.10.dist-info → rolfedh_doc_utils-0.1.12.dist-info}/entry_points.txt +1 -0
- {rolfedh_doc_utils-0.1.10.dist-info → rolfedh_doc_utils-0.1.12.dist-info}/top_level.txt +1 -0
- validate_links.py +208 -0
- rolfedh_doc_utils-0.1.10.dist-info/RECORD +0 -23
- {rolfedh_doc_utils-0.1.10.dist-info → rolfedh_doc_utils-0.1.12.dist-info}/WHEEL +0 -0
- {rolfedh_doc_utils-0.1.10.dist-info → rolfedh_doc_utils-0.1.12.dist-info}/licenses/LICENSE +0 -0
replace_link_attributes.py
CHANGED
|
@@ -19,6 +19,7 @@ from doc_utils.replace_link_attributes import (
|
|
|
19
19
|
replace_link_attributes_in_file,
|
|
20
20
|
find_adoc_files
|
|
21
21
|
)
|
|
22
|
+
from doc_utils.spinner import Spinner
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
def prompt_for_attributes_file(attributes_files: list[Path]) -> Optional[Path]:
|
|
@@ -120,30 +121,33 @@ def main():
|
|
|
120
121
|
print(f"Error: Specified attributes file not found: {attributes_file}")
|
|
121
122
|
sys.exit(1)
|
|
122
123
|
else:
|
|
123
|
-
|
|
124
|
+
spinner = Spinner("Searching for attributes.adoc files")
|
|
125
|
+
spinner.start()
|
|
124
126
|
attributes_files = find_attributes_files(repo_root)
|
|
127
|
+
spinner.stop()
|
|
125
128
|
attributes_file = prompt_for_attributes_file(attributes_files)
|
|
126
129
|
|
|
127
130
|
if not attributes_file:
|
|
128
131
|
print("Operation cancelled.")
|
|
129
132
|
sys.exit(0)
|
|
130
133
|
|
|
131
|
-
|
|
134
|
+
spinner = Spinner(f"Loading attributes from {attributes_file.name}")
|
|
135
|
+
spinner.start()
|
|
132
136
|
attributes = load_attributes(attributes_file)
|
|
133
137
|
|
|
134
138
|
if not attributes:
|
|
135
|
-
|
|
139
|
+
spinner.stop("No attributes found in the file", success=False)
|
|
136
140
|
sys.exit(1)
|
|
137
141
|
|
|
138
|
-
print(f"Found {len(attributes)} attributes")
|
|
139
|
-
|
|
140
142
|
# Resolve nested references
|
|
141
|
-
print("Resolving nested attribute references...")
|
|
142
143
|
attributes = resolve_nested_attributes(attributes)
|
|
144
|
+
spinner.stop(f"Loaded and resolved {len(attributes)} attributes")
|
|
143
145
|
|
|
144
146
|
# Find all AsciiDoc files
|
|
145
|
-
|
|
147
|
+
spinner = Spinner(f"Searching for .adoc files in {repo_root}")
|
|
148
|
+
spinner.start()
|
|
146
149
|
adoc_files = find_adoc_files(repo_root)
|
|
150
|
+
spinner.stop()
|
|
147
151
|
|
|
148
152
|
# Exclude the attributes file itself
|
|
149
153
|
adoc_files = [f for f in adoc_files if f != attributes_file]
|
|
@@ -157,15 +161,18 @@ def main():
|
|
|
157
161
|
total_replacements = 0
|
|
158
162
|
files_modified = 0
|
|
159
163
|
|
|
164
|
+
spinner = Spinner(f"Processing {len(adoc_files)} files")
|
|
165
|
+
spinner.start()
|
|
166
|
+
|
|
160
167
|
for file_path in adoc_files:
|
|
161
168
|
replacements = replace_link_attributes_in_file(file_path, attributes, args.dry_run)
|
|
162
169
|
if replacements > 0:
|
|
163
170
|
rel_path = file_path.relative_to(repo_root)
|
|
164
|
-
prefix = "[DRY RUN] " if args.dry_run else ""
|
|
165
|
-
print(f" {prefix}Modified {rel_path}: {replacements} replacements")
|
|
166
171
|
total_replacements += replacements
|
|
167
172
|
files_modified += 1
|
|
168
173
|
|
|
174
|
+
spinner.stop(f"Processed {len(adoc_files)} files")
|
|
175
|
+
|
|
169
176
|
# Summary
|
|
170
177
|
print(f"\nSummary:")
|
|
171
178
|
if args.dry_run:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rolfedh-doc-utils
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.12
|
|
4
4
|
Summary: CLI tools for AsciiDoc documentation projects
|
|
5
5
|
Author: Rolfe Dlugy-Hegwer
|
|
6
6
|
License: MIT License
|
|
@@ -79,9 +79,10 @@ pip install -e .
|
|
|
79
79
|
|
|
80
80
|
| Tool | Description | Usage |
|
|
81
81
|
|------|-------------|-------|
|
|
82
|
+
| **`validate-links`** [EXPERIMENTAL] | Validates all links in documentation, with URL transposition for preview environments | `validate-links --transpose "https://prod--https://preview"` |
|
|
82
83
|
| **`extract-link-attributes`** | Extracts link/xref macros with attributes into reusable definitions | `extract-link-attributes --dry-run` |
|
|
83
84
|
| **`replace-link-attributes`** | Resolves Vale LinkAttribute issues by replacing attributes in link URLs | `replace-link-attributes --dry-run` |
|
|
84
|
-
| **`format-asciidoc-spacing`** | Standardizes spacing after headings and around includes | `format-asciidoc-spacing --dry-run modules/` |
|
|
85
|
+
| **`format-asciidoc-spacing`** [EXPERIMENTAL] | Standardizes spacing after headings and around includes | `format-asciidoc-spacing --dry-run modules/` |
|
|
85
86
|
| **`check-scannability`** | Analyzes readability (sentence/paragraph length) | `check-scannability --max-words 25` |
|
|
86
87
|
| **`archive-unused-files`** | Finds and archives unreferenced .adoc files | `archive-unused-files` (preview)<br>`archive-unused-files --archive` (execute) |
|
|
87
88
|
| **`archive-unused-images`** | Finds and archives unreferenced images | `archive-unused-images` (preview)<br>`archive-unused-images --archive` (execute) |
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
archive_unused_files.py,sha256=h7CRwSPBVCOQs0hn_ASD4EXz8QJFcAO2x3KX9FVhXNM,1974
|
|
2
|
+
archive_unused_images.py,sha256=4GSVPYkxqFoY-isy47P_1AhD1ziXgmajFiBGCtZ3olg,1564
|
|
3
|
+
check_scannability.py,sha256=MvGLW4UGGcx-jZLsVRYXpXNAIEQyJZZnsN99zJzbtyc,5178
|
|
4
|
+
extract_link_attributes.py,sha256=rp1yRYIWOEvU3l6lpN4b5rCBae5Q7bdBxEDQ9BNuFH8,2976
|
|
5
|
+
find_unused_attributes.py,sha256=IUJKJr_MzxBXqg9rafUs9Kwi8AbU0x-H0AVflc1dhCU,3288
|
|
6
|
+
format_asciidoc_spacing.py,sha256=_XpHqxYWm1AnZLUK_cDpfAJtsDCDF0b66m3opfYnIuU,3912
|
|
7
|
+
replace_link_attributes.py,sha256=ZkBqrrpIiYGccGMgRjDBrWQKgpfOzHIegURmcgTwaHg,6614
|
|
8
|
+
validate_links.py,sha256=409fTAyBGTUrp6iSWuJ9AXExcdz8dC_4QeA_RvCIhus,5845
|
|
9
|
+
doc_utils/__init__.py,sha256=qqZR3lohzkP63soymrEZPBGzzk6-nFzi4_tSffjmu_0,74
|
|
10
|
+
doc_utils/extract_link_attributes.py,sha256=onSe1AOlKHLH6t1nQ3T_DvDAhP9sbANhc6W05tAAgPg,19639
|
|
11
|
+
doc_utils/file_utils.py,sha256=fpTh3xx759sF8sNocdn_arsP3KAv8XA6cTQTAVIZiZg,4247
|
|
12
|
+
doc_utils/format_asciidoc_spacing.py,sha256=XnVJekaj39aDzjV3xFKl58flM41AaJzejxNYJIIAMz0,10139
|
|
13
|
+
doc_utils/replace_link_attributes.py,sha256=kBiePbxjQn3O2rzqmYY8Mqy_mJgZ6yw048vSZ5SSB5E,6587
|
|
14
|
+
doc_utils/scannability.py,sha256=XwlmHqDs69p_V36X7DLjPTy0DUoLszSGqYjJ9wE-3hg,982
|
|
15
|
+
doc_utils/spinner.py,sha256=lJg15qzODiKoR0G6uFIk2BdVNgn9jFexoTRUMrjiWvk,3554
|
|
16
|
+
doc_utils/topic_map_parser.py,sha256=tKcIO1m9r2K6dvPRGue58zqMr0O2zKU1gnZMzEE3U6o,4571
|
|
17
|
+
doc_utils/unused_adoc.py,sha256=2cbqcYr1os2EhETUU928BlPRlsZVSdI00qaMhqjSIqQ,5263
|
|
18
|
+
doc_utils/unused_attributes.py,sha256=EjTtWIKW_aXsR1JOgw5RSDVAqitJ_NfRMVOXVGaiWTY,5282
|
|
19
|
+
doc_utils/unused_images.py,sha256=nqn36Bbrmon2KlGlcaruNjJJvTQ8_9H0WU9GvCW7rW8,1456
|
|
20
|
+
doc_utils/validate_links.py,sha256=iBGXnwdeLlgIT3fo3v01ApT5k0X2FtctsvkrE6E3VMk,19610
|
|
21
|
+
rolfedh_doc_utils-0.1.12.dist-info/licenses/LICENSE,sha256=vLxtwMVOJA_hEy8b77niTkdmQI9kNJskXHq0dBS36e0,1075
|
|
22
|
+
rolfedh_doc_utils-0.1.12.dist-info/METADATA,sha256=Bzd64efcaXy4RZh9wkUxqlvr_Y1HmsuoocIbxWwaclE,7386
|
|
23
|
+
rolfedh_doc_utils-0.1.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
24
|
+
rolfedh_doc_utils-0.1.12.dist-info/entry_points.txt,sha256=2J4Ojc3kkuArpe2xcUOPc0LxSWCmnctvw8hy8zpnbO4,418
|
|
25
|
+
rolfedh_doc_utils-0.1.12.dist-info/top_level.txt,sha256=1w0JWD7w7gnM5Sga2K4fJieNZ7CHPTAf0ozYk5iIlmo,182
|
|
26
|
+
rolfedh_doc_utils-0.1.12.dist-info/RECORD,,
|
validate_links.py
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Validate links in AsciiDoc documentation.
|
|
4
|
+
|
|
5
|
+
This tool checks all links in AsciiDoc files for validity, including:
|
|
6
|
+
- External HTTP/HTTPS links
|
|
7
|
+
- Internal cross-references (xref)
|
|
8
|
+
- Image paths
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import argparse
|
|
12
|
+
import sys
|
|
13
|
+
import json
|
|
14
|
+
from doc_utils.validate_links import LinkValidator, parse_transpositions, format_results
|
|
15
|
+
from doc_utils.spinner import Spinner
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def main():
|
|
19
|
+
"""Main entry point for the validate-links CLI tool."""
|
|
20
|
+
parser = argparse.ArgumentParser(
|
|
21
|
+
description='Validate links in AsciiDoc documentation',
|
|
22
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
23
|
+
epilog="""
|
|
24
|
+
Examples:
|
|
25
|
+
# Basic validation
|
|
26
|
+
validate-links
|
|
27
|
+
|
|
28
|
+
# Validate against preview environment
|
|
29
|
+
validate-links --transpose "https://docs.redhat.com--https://preview.docs.redhat.com"
|
|
30
|
+
|
|
31
|
+
# Multiple transpositions
|
|
32
|
+
validate-links \\
|
|
33
|
+
--transpose "https://docs.redhat.com--https://preview.docs.redhat.com" \\
|
|
34
|
+
--transpose "https://access.redhat.com--https://stage.access.redhat.com"
|
|
35
|
+
|
|
36
|
+
# With specific options
|
|
37
|
+
validate-links \\
|
|
38
|
+
--transpose "https://docs.example.com--https://preview.example.com" \\
|
|
39
|
+
--attributes-file common-attributes.adoc \\
|
|
40
|
+
--timeout 15 \\
|
|
41
|
+
--retry 3 \\
|
|
42
|
+
--parallel 20 \\
|
|
43
|
+
--exclude-domain localhost \\
|
|
44
|
+
--exclude-domain example.com
|
|
45
|
+
|
|
46
|
+
# Export results to JSON
|
|
47
|
+
validate-links --output report.json --format json
|
|
48
|
+
"""
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
parser.add_argument(
|
|
52
|
+
'--transpose',
|
|
53
|
+
action='append',
|
|
54
|
+
help='Transpose URLs from production to preview/staging (format: from_url--to_url)'
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
parser.add_argument(
|
|
58
|
+
'--attributes-file',
|
|
59
|
+
help='Path to the AsciiDoc attributes file'
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
parser.add_argument(
|
|
63
|
+
'--scan-dir',
|
|
64
|
+
action='append',
|
|
65
|
+
help='Directory to scan for .adoc files (can be used multiple times, default: current directory)'
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
parser.add_argument(
|
|
69
|
+
'--timeout',
|
|
70
|
+
type=int,
|
|
71
|
+
default=10,
|
|
72
|
+
help='Timeout in seconds for each URL check (default: 10)'
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
parser.add_argument(
|
|
76
|
+
'--retry',
|
|
77
|
+
type=int,
|
|
78
|
+
default=3,
|
|
79
|
+
help='Number of retries for failed URLs (default: 3)'
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
parser.add_argument(
|
|
83
|
+
'--parallel',
|
|
84
|
+
type=int,
|
|
85
|
+
default=10,
|
|
86
|
+
help='Number of parallel URL checks (default: 10)'
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
parser.add_argument(
|
|
90
|
+
'--cache-duration',
|
|
91
|
+
type=int,
|
|
92
|
+
default=3600,
|
|
93
|
+
help='Cache duration in seconds (default: 3600)'
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
parser.add_argument(
|
|
97
|
+
'--exclude-domain',
|
|
98
|
+
action='append',
|
|
99
|
+
dest='exclude_domains',
|
|
100
|
+
help='Domain to exclude from validation (can be used multiple times)'
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
parser.add_argument(
|
|
104
|
+
'--no-cache',
|
|
105
|
+
action='store_true',
|
|
106
|
+
help='Disable caching of validation results'
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
parser.add_argument(
|
|
110
|
+
'--output',
|
|
111
|
+
help='Output file for results'
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
parser.add_argument(
|
|
115
|
+
'--format',
|
|
116
|
+
choices=['text', 'json', 'junit'],
|
|
117
|
+
default='text',
|
|
118
|
+
help='Output format (default: text)'
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
parser.add_argument(
|
|
122
|
+
'-v', '--verbose',
|
|
123
|
+
action='store_true',
|
|
124
|
+
help='Show verbose output including warnings'
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
parser.add_argument(
|
|
128
|
+
'--fail-on-broken',
|
|
129
|
+
action='store_true',
|
|
130
|
+
help='Exit with error code if broken links are found'
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
args = parser.parse_args()
|
|
134
|
+
|
|
135
|
+
# Parse transpositions
|
|
136
|
+
transpositions = parse_transpositions(args.transpose)
|
|
137
|
+
|
|
138
|
+
# Show configuration
|
|
139
|
+
print("Validating links in documentation...")
|
|
140
|
+
if args.attributes_file:
|
|
141
|
+
print(f"Loading attributes from {args.attributes_file}")
|
|
142
|
+
if transpositions:
|
|
143
|
+
print("\nURL Transposition Rules:")
|
|
144
|
+
for from_url, to_url in transpositions:
|
|
145
|
+
print(f" {from_url} → {to_url}")
|
|
146
|
+
print()
|
|
147
|
+
|
|
148
|
+
# Create validator
|
|
149
|
+
validator = LinkValidator(
|
|
150
|
+
timeout=args.timeout,
|
|
151
|
+
retry=args.retry,
|
|
152
|
+
parallel=args.parallel,
|
|
153
|
+
cache_duration=args.cache_duration if not args.no_cache else 0,
|
|
154
|
+
transpositions=transpositions
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
try:
|
|
158
|
+
# Run validation
|
|
159
|
+
spinner = Spinner("Validating links")
|
|
160
|
+
spinner.start()
|
|
161
|
+
results = validator.validate_all(
|
|
162
|
+
scan_dirs=args.scan_dir,
|
|
163
|
+
attributes_file=args.attributes_file,
|
|
164
|
+
exclude_domains=args.exclude_domains
|
|
165
|
+
)
|
|
166
|
+
total = results['summary']['total']
|
|
167
|
+
valid = results['summary']['valid']
|
|
168
|
+
spinner.stop(f"Validated {total} links: {valid} valid")
|
|
169
|
+
|
|
170
|
+
# Format output
|
|
171
|
+
if args.format == 'json':
|
|
172
|
+
output = json.dumps(results, indent=2)
|
|
173
|
+
elif args.format == 'junit':
|
|
174
|
+
# TODO: Implement JUnit XML format
|
|
175
|
+
output = format_results(results, verbose=args.verbose)
|
|
176
|
+
else:
|
|
177
|
+
output = format_results(results, verbose=args.verbose)
|
|
178
|
+
|
|
179
|
+
# Save or print output
|
|
180
|
+
if args.output:
|
|
181
|
+
with open(args.output, 'w', encoding='utf-8') as f:
|
|
182
|
+
f.write(output)
|
|
183
|
+
print(f"Results saved to {args.output}")
|
|
184
|
+
# Still print summary to console
|
|
185
|
+
if args.format != 'text':
|
|
186
|
+
summary = results['summary']
|
|
187
|
+
print(f"\nSummary: {summary['valid']} valid, {summary['broken']} broken, "
|
|
188
|
+
f"{summary['warnings']} warnings")
|
|
189
|
+
else:
|
|
190
|
+
print(output)
|
|
191
|
+
|
|
192
|
+
# Exit code
|
|
193
|
+
if args.fail_on_broken and results['summary']['broken'] > 0:
|
|
194
|
+
sys.exit(1)
|
|
195
|
+
|
|
196
|
+
except KeyboardInterrupt:
|
|
197
|
+
print("\nValidation cancelled.")
|
|
198
|
+
sys.exit(1)
|
|
199
|
+
except Exception as e:
|
|
200
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
201
|
+
if args.verbose:
|
|
202
|
+
import traceback
|
|
203
|
+
traceback.print_exc()
|
|
204
|
+
sys.exit(1)
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
if __name__ == '__main__':
|
|
208
|
+
main()
|
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
archive_unused_files.py,sha256=KMC5a1WL3rZ5owoVnncvfpT1YeMKbVXq9giHvadDgbM,1936
|
|
2
|
-
archive_unused_images.py,sha256=PG2o3haovYckgfhoPhl6KRG_a9czyZuqlLkzkupKTCY,1526
|
|
3
|
-
check_scannability.py,sha256=gcM-vFXKHGP_yFBz7-V5xbXWhIMmtMzBYIGwP9CFbzI,5140
|
|
4
|
-
extract_link_attributes.py,sha256=utDM1FE-VEr649HhIH5BreXvxDNLnnAJO9dB5rs5f9Q,2535
|
|
5
|
-
find_unused_attributes.py,sha256=fk-K32eoCVHxoj7RiBNgSmX1arBLuwYfdSAOMc-wIx0,1677
|
|
6
|
-
format_asciidoc_spacing.py,sha256=ROp-cdMs2_hk8H4z5ljT0iDgGtsiECZ8TVjjcN_oOWE,3874
|
|
7
|
-
replace_link_attributes.py,sha256=vg_aufw7dKXvh_epCKRNq_hEBMU_9crZ_JyJPpxSMNk,6454
|
|
8
|
-
doc_utils/__init__.py,sha256=qqZR3lohzkP63soymrEZPBGzzk6-nFzi4_tSffjmu_0,74
|
|
9
|
-
doc_utils/extract_link_attributes.py,sha256=qBpJuTXNrhy15klpqC0iELZzcSLztEzMSmhEnKyQZT0,15574
|
|
10
|
-
doc_utils/file_utils.py,sha256=fpTh3xx759sF8sNocdn_arsP3KAv8XA6cTQTAVIZiZg,4247
|
|
11
|
-
doc_utils/format_asciidoc_spacing.py,sha256=XnVJekaj39aDzjV3xFKl58flM41AaJzejxNYJIIAMz0,10139
|
|
12
|
-
doc_utils/replace_link_attributes.py,sha256=kBiePbxjQn3O2rzqmYY8Mqy_mJgZ6yw048vSZ5SSB5E,6587
|
|
13
|
-
doc_utils/scannability.py,sha256=XwlmHqDs69p_V36X7DLjPTy0DUoLszSGqYjJ9wE-3hg,982
|
|
14
|
-
doc_utils/topic_map_parser.py,sha256=tKcIO1m9r2K6dvPRGue58zqMr0O2zKU1gnZMzEE3U6o,4571
|
|
15
|
-
doc_utils/unused_adoc.py,sha256=2cbqcYr1os2EhETUU928BlPRlsZVSdI00qaMhqjSIqQ,5263
|
|
16
|
-
doc_utils/unused_attributes.py,sha256=HBgmHelqearfWl3TTC2bZGiJytjLADIgiGQUNKqXXPg,1847
|
|
17
|
-
doc_utils/unused_images.py,sha256=nqn36Bbrmon2KlGlcaruNjJJvTQ8_9H0WU9GvCW7rW8,1456
|
|
18
|
-
rolfedh_doc_utils-0.1.10.dist-info/licenses/LICENSE,sha256=vLxtwMVOJA_hEy8b77niTkdmQI9kNJskXHq0dBS36e0,1075
|
|
19
|
-
rolfedh_doc_utils-0.1.10.dist-info/METADATA,sha256=Kk1Ur-SbE2XIP55NJ7Y5oVB-KNScnlADwmZyFSthTXo,7180
|
|
20
|
-
rolfedh_doc_utils-0.1.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
21
|
-
rolfedh_doc_utils-0.1.10.dist-info/entry_points.txt,sha256=aQtQRDwcdDN-VLBCnQBfmoozzQiaCUZ9dqcLLv8fCkM,381
|
|
22
|
-
rolfedh_doc_utils-0.1.10.dist-info/top_level.txt,sha256=ILTc2mA4sHdDp0GvKC8JXO1I_DBP7vvF5hn-PFkMcL8,167
|
|
23
|
-
rolfedh_doc_utils-0.1.10.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|