aiox-core 5.0.0 → 5.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.aiox-core/data/entity-registry.yaml +5297 -1814
- package/.aiox-core/data/registry-update-log.jsonl +2 -0
- package/.aiox-core/development/templates/service-template/README.md.hbs +158 -158
- package/.aiox-core/development/templates/service-template/__tests__/index.test.ts.hbs +237 -237
- package/.aiox-core/development/templates/service-template/client.ts.hbs +403 -403
- package/.aiox-core/development/templates/service-template/errors.ts.hbs +182 -182
- package/.aiox-core/development/templates/service-template/index.ts.hbs +120 -120
- package/.aiox-core/development/templates/service-template/package.json.hbs +87 -87
- package/.aiox-core/development/templates/service-template/types.ts.hbs +145 -145
- package/.aiox-core/development/templates/squad-template/LICENSE +21 -21
- package/.aiox-core/infrastructure/scripts/tool-resolver.js +4 -4
- package/.aiox-core/infrastructure/templates/aiox-sync.yaml.template +182 -182
- package/.aiox-core/infrastructure/templates/coderabbit.yaml.template +279 -279
- package/.aiox-core/infrastructure/templates/github-workflows/ci.yml.template +169 -169
- package/.aiox-core/infrastructure/templates/github-workflows/pr-automation.yml.template +330 -330
- package/.aiox-core/infrastructure/templates/github-workflows/release.yml.template +196 -196
- package/.aiox-core/infrastructure/templates/gitignore/gitignore-aiox-base.tmpl +63 -63
- package/.aiox-core/infrastructure/templates/gitignore/gitignore-brownfield-merge.tmpl +18 -18
- package/.aiox-core/infrastructure/templates/gitignore/gitignore-node.tmpl +85 -85
- package/.aiox-core/infrastructure/templates/gitignore/gitignore-python.tmpl +145 -145
- package/.aiox-core/install-manifest.yaml +58 -58
- package/.aiox-core/local-config.yaml.template +71 -71
- package/.aiox-core/monitor/hooks/lib/__init__.py +1 -1
- package/.aiox-core/monitor/hooks/lib/enrich.py +58 -58
- package/.aiox-core/monitor/hooks/lib/send_event.py +47 -47
- package/.aiox-core/monitor/hooks/notification.py +29 -29
- package/.aiox-core/monitor/hooks/post_tool_use.py +45 -45
- package/.aiox-core/monitor/hooks/pre_compact.py +29 -29
- package/.aiox-core/monitor/hooks/pre_tool_use.py +40 -40
- package/.aiox-core/monitor/hooks/stop.py +29 -29
- package/.aiox-core/monitor/hooks/subagent_stop.py +29 -29
- package/.aiox-core/monitor/hooks/user_prompt_submit.py +38 -38
- package/.aiox-core/product/templates/adr.hbs +125 -125
- package/.aiox-core/product/templates/dbdr.hbs +241 -241
- package/.aiox-core/product/templates/engine/elicitation.js +2 -3
- package/.aiox-core/product/templates/epic.hbs +212 -212
- package/.aiox-core/product/templates/pmdr.hbs +186 -186
- package/.aiox-core/product/templates/prd-v2.0.hbs +216 -216
- package/.aiox-core/product/templates/prd.hbs +201 -201
- package/.aiox-core/product/templates/story.hbs +263 -263
- package/.aiox-core/product/templates/task.hbs +170 -170
- package/.aiox-core/product/templates/tmpl-comment-on-examples.sql +158 -158
- package/.aiox-core/product/templates/tmpl-migration-script.sql +91 -91
- package/.aiox-core/product/templates/tmpl-rls-granular-policies.sql +104 -104
- package/.aiox-core/product/templates/tmpl-rls-kiss-policy.sql +10 -10
- package/.aiox-core/product/templates/tmpl-rls-roles.sql +135 -135
- package/.aiox-core/product/templates/tmpl-rls-simple.sql +77 -77
- package/.aiox-core/product/templates/tmpl-rls-tenant.sql +152 -152
- package/.aiox-core/product/templates/tmpl-rollback-script.sql +77 -77
- package/.aiox-core/product/templates/tmpl-seed-data.sql +140 -140
- package/.aiox-core/product/templates/tmpl-smoke-test.sql +16 -16
- package/.aiox-core/product/templates/tmpl-staging-copy-merge.sql +139 -139
- package/.aiox-core/product/templates/tmpl-stored-proc.sql +140 -140
- package/.aiox-core/product/templates/tmpl-trigger.sql +152 -152
- package/.aiox-core/product/templates/tmpl-view-materialized.sql +133 -133
- package/.aiox-core/product/templates/tmpl-view.sql +177 -177
- package/.aiox-core/scripts/pm.sh +0 -0
- package/.claude/hooks/code-intel-pretool.cjs +107 -0
- package/.claude/hooks/enforce-architecture-first.py +196 -196
- package/.claude/hooks/mind-clone-governance.py +192 -192
- package/.claude/hooks/read-protection.py +151 -151
- package/.claude/hooks/slug-validation.py +176 -176
- package/.claude/hooks/sql-governance.py +182 -182
- package/.claude/hooks/write-path-validation.py +194 -194
- package/LICENSE +33 -33
- package/bin/aiox-graph.js +0 -0
- package/bin/aiox-minimal.js +0 -0
- package/bin/aiox.js +0 -0
- package/docs/guides/aios-workflows/README.md +247 -0
- package/docs/guides/aios-workflows/bob-orchestrator-workflow.md +1536 -0
- package/package.json +1 -1
- package/packages/aiox-install/bin/aiox-install.js +0 -0
- package/packages/aiox-install/bin/edmcp.js +0 -0
- package/packages/aiox-pro-cli/bin/aiox-pro.js +0 -0
- package/packages/installer/src/wizard/pro-setup.js +210 -123
- package/pro/README.md +66 -0
- package/pro/license/degradation.js +220 -0
- package/pro/license/errors.js +450 -0
- package/pro/license/feature-gate.js +354 -0
- package/pro/license/index.js +181 -0
- package/pro/license/license-api.js +679 -0
- package/pro/license/license-cache.js +523 -0
- package/pro/license/license-crypto.js +303 -0
- package/scripts/check-markdown-links.py +352 -352
- package/scripts/dashboard-parallel-dev.sh +0 -0
- package/scripts/dashboard-parallel-phase3.sh +0 -0
- package/scripts/dashboard-parallel-phase4.sh +0 -0
- package/scripts/glue/README.md +355 -0
- package/scripts/glue/compose-agent-prompt.cjs +362 -0
- package/scripts/install-monitor-hooks.sh +0 -0
- package/.aiox-core/lib/build.json +0 -1
|
@@ -1,352 +1,352 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
"""
|
|
3
|
-
Markdown Link Checker for AIOX Documentation
|
|
4
|
-
|
|
5
|
-
Validates internal markdown links and tracks documentation status.
|
|
6
|
-
Inspired by Obsidian's broken link checker.
|
|
7
|
-
|
|
8
|
-
Usage:
|
|
9
|
-
python scripts/check-markdown-links.py # Default report
|
|
10
|
-
python scripts/check-markdown-links.py --json # JSON output for CI
|
|
11
|
-
python scripts/check-markdown-links.py --fix # Auto-fix broken links (add coming soon)
|
|
12
|
-
python scripts/check-markdown-links.py --summary # Quick summary only
|
|
13
|
-
|
|
14
|
-
Exit codes:
|
|
15
|
-
0 - All links valid (or only coming soon)
|
|
16
|
-
1 - Broken links found (needs attention)
|
|
17
|
-
2 - Incorrect markings found (exists but marked coming soon)
|
|
18
|
-
"""
|
|
19
|
-
|
|
20
|
-
import argparse
|
|
21
|
-
import json
|
|
22
|
-
import os
|
|
23
|
-
import re
|
|
24
|
-
import sys
|
|
25
|
-
from collections import defaultdict
|
|
26
|
-
from pathlib import Path
|
|
27
|
-
from typing import Optional
|
|
28
|
-
|
|
29
|
-
# Configuration
|
|
30
|
-
DOCS_DIR = "docs"
|
|
31
|
-
LINK_PATTERN = re.compile(r'\[([^\]]*)\]\(([^)]+)\)')
|
|
32
|
-
COMING_SOON_MARKER = " *(coming soon)*"
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def normalize_path(source_file: str, link: str) -> Optional[str]:
|
|
36
|
-
"""Resolve a relative link to an absolute path."""
|
|
37
|
-
# Skip external links, mailto, and anchors
|
|
38
|
-
if link.startswith(('http://', 'https://', 'mailto:', '#')):
|
|
39
|
-
return None
|
|
40
|
-
|
|
41
|
-
# Remove anchor from link
|
|
42
|
-
link = link.split('#')[0]
|
|
43
|
-
if not link:
|
|
44
|
-
return None
|
|
45
|
-
|
|
46
|
-
# Handle URL encoding
|
|
47
|
-
link = link.replace('%20', ' ')
|
|
48
|
-
|
|
49
|
-
# Resolve relative path
|
|
50
|
-
source_dir = os.path.dirname(source_file)
|
|
51
|
-
return os.path.normpath(os.path.join(source_dir, link))
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
def is_coming_soon(line: str, link: str) -> bool:
|
|
55
|
-
"""Check if a link is marked as coming soon."""
|
|
56
|
-
link_escaped = re.escape(f"]({link})")
|
|
57
|
-
return bool(re.search(link_escaped + r'.*coming soon', line, re.IGNORECASE))
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
def scan_file(filepath: str) -> dict:
|
|
61
|
-
"""Scan a markdown file for link issues."""
|
|
62
|
-
results = {
|
|
63
|
-
'broken': [],
|
|
64
|
-
'coming_soon': [],
|
|
65
|
-
'incorrect_marking': [],
|
|
66
|
-
'valid': []
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
try:
|
|
70
|
-
with open(filepath, 'r', encoding='utf-8') as f:
|
|
71
|
-
lines = f.readlines()
|
|
72
|
-
|
|
73
|
-
for line_num, line in enumerate(lines, 1):
|
|
74
|
-
for match in LINK_PATTERN.finditer(line):
|
|
75
|
-
text, link = match.group(1), match.group(2)
|
|
76
|
-
resolved = normalize_path(filepath, link)
|
|
77
|
-
|
|
78
|
-
if resolved is None:
|
|
79
|
-
continue
|
|
80
|
-
|
|
81
|
-
exists = os.path.exists(resolved)
|
|
82
|
-
coming_soon = is_coming_soon(line, link)
|
|
83
|
-
|
|
84
|
-
info = {
|
|
85
|
-
'line': line_num,
|
|
86
|
-
'text': text,
|
|
87
|
-
'link': link,
|
|
88
|
-
'resolved': resolved,
|
|
89
|
-
'line_content': line.rstrip()
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
if exists and coming_soon:
|
|
93
|
-
results['incorrect_marking'].append(info)
|
|
94
|
-
elif not exists and coming_soon:
|
|
95
|
-
results['coming_soon'].append(info)
|
|
96
|
-
elif not exists:
|
|
97
|
-
results['broken'].append(info)
|
|
98
|
-
else:
|
|
99
|
-
results['valid'].append(info)
|
|
100
|
-
|
|
101
|
-
except Exception as e:
|
|
102
|
-
print(f"Error scanning {filepath}: {e}", file=sys.stderr)
|
|
103
|
-
|
|
104
|
-
return results
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
def fix_broken_link(filepath: str, line_num: int, link: str) -> bool:
|
|
108
|
-
"""Add 'coming soon' marker to a broken link."""
|
|
109
|
-
try:
|
|
110
|
-
with open(filepath, 'r', encoding='utf-8') as f:
|
|
111
|
-
lines = f.readlines()
|
|
112
|
-
|
|
113
|
-
line_idx = line_num - 1
|
|
114
|
-
line = lines[line_idx]
|
|
115
|
-
|
|
116
|
-
# Find the link and add marker after it
|
|
117
|
-
pattern = re.escape(f"]({link})")
|
|
118
|
-
if re.search(pattern + r'\s*\*\(coming soon\)\*', line, re.IGNORECASE):
|
|
119
|
-
return False # Already marked
|
|
120
|
-
|
|
121
|
-
new_line = re.sub(
|
|
122
|
-
pattern,
|
|
123
|
-
f"]({link}){COMING_SOON_MARKER}",
|
|
124
|
-
line
|
|
125
|
-
)
|
|
126
|
-
|
|
127
|
-
if new_line != line:
|
|
128
|
-
lines[line_idx] = new_line
|
|
129
|
-
with open(filepath, 'w', encoding='utf-8') as f:
|
|
130
|
-
f.writelines(lines)
|
|
131
|
-
return True
|
|
132
|
-
|
|
133
|
-
except Exception as e:
|
|
134
|
-
print(f"Error fixing {filepath}:{line_num}: {e}", file=sys.stderr)
|
|
135
|
-
|
|
136
|
-
return False
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
def fix_incorrect_marking(filepath: str, line_num: int, link: str) -> bool:
|
|
140
|
-
"""Remove 'coming soon' marker from a link to existing file."""
|
|
141
|
-
try:
|
|
142
|
-
with open(filepath, 'r', encoding='utf-8') as f:
|
|
143
|
-
lines = f.readlines()
|
|
144
|
-
|
|
145
|
-
line_idx = line_num - 1
|
|
146
|
-
line = lines[line_idx]
|
|
147
|
-
|
|
148
|
-
# Remove the coming soon marker after this specific link
|
|
149
|
-
pattern = re.escape(f"]({link})") + r'\s*\*\(coming soon\)\*'
|
|
150
|
-
new_line = re.sub(pattern, f"]({link})", line, flags=re.IGNORECASE)
|
|
151
|
-
|
|
152
|
-
if new_line != line:
|
|
153
|
-
lines[line_idx] = new_line
|
|
154
|
-
with open(filepath, 'w', encoding='utf-8') as f:
|
|
155
|
-
f.writelines(lines)
|
|
156
|
-
return True
|
|
157
|
-
|
|
158
|
-
except Exception as e:
|
|
159
|
-
print(f"Error fixing {filepath}:{line_num}: {e}", file=sys.stderr)
|
|
160
|
-
|
|
161
|
-
return False
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
def scan_docs(docs_dir: str = DOCS_DIR) -> dict:
|
|
165
|
-
"""Scan all markdown files in docs directory."""
|
|
166
|
-
all_results = {
|
|
167
|
-
'broken': [],
|
|
168
|
-
'coming_soon': [],
|
|
169
|
-
'incorrect_marking': [],
|
|
170
|
-
'valid': [],
|
|
171
|
-
'files_scanned': 0
|
|
172
|
-
}
|
|
173
|
-
|
|
174
|
-
for root, _, files in os.walk(docs_dir):
|
|
175
|
-
for file in files:
|
|
176
|
-
if file.endswith('.md'):
|
|
177
|
-
filepath = os.path.join(root, file)
|
|
178
|
-
results = scan_file(filepath)
|
|
179
|
-
|
|
180
|
-
all_results['files_scanned'] += 1
|
|
181
|
-
all_results['broken'].extend([(filepath, i) for i in results['broken']])
|
|
182
|
-
all_results['coming_soon'].extend([(filepath, i) for i in results['coming_soon']])
|
|
183
|
-
all_results['incorrect_marking'].extend([(filepath, i) for i in results['incorrect_marking']])
|
|
184
|
-
all_results['valid'].extend([(filepath, i) for i in results['valid']])
|
|
185
|
-
|
|
186
|
-
return all_results
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
def print_report(results: dict, verbose: bool = True):
|
|
190
|
-
"""Print a human-readable report."""
|
|
191
|
-
print("=" * 70)
|
|
192
|
-
print("MARKDOWN LINK VERIFICATION REPORT")
|
|
193
|
-
print("=" * 70)
|
|
194
|
-
print()
|
|
195
|
-
|
|
196
|
-
# Broken links
|
|
197
|
-
print(f"## 1. BROKEN LINKS (no 'coming soon' marker): {len(results['broken'])}")
|
|
198
|
-
print("-" * 60)
|
|
199
|
-
if verbose and results['broken']:
|
|
200
|
-
for fp, info in sorted(results['broken'], key=lambda x: x[0]):
|
|
201
|
-
print(f" {fp}:{info['line']} -> {info['link']}")
|
|
202
|
-
print()
|
|
203
|
-
|
|
204
|
-
# Incorrect markings
|
|
205
|
-
print(f"## 2. INCORRECT: File EXISTS but marked 'coming soon': {len(results['incorrect_marking'])}")
|
|
206
|
-
print("-" * 60)
|
|
207
|
-
if verbose and results['incorrect_marking']:
|
|
208
|
-
for fp, info in sorted(results['incorrect_marking'], key=lambda x: x[0]):
|
|
209
|
-
print(f" {fp}:{info['line']} -> {info['link']}")
|
|
210
|
-
print()
|
|
211
|
-
|
|
212
|
-
# Coming soon (planned content)
|
|
213
|
-
print(f"## 3. PLANNED CONTENT: Links marked 'coming soon': {len(results['coming_soon'])}")
|
|
214
|
-
print("-" * 60)
|
|
215
|
-
if verbose and results['coming_soon']:
|
|
216
|
-
by_dest = defaultdict(list)
|
|
217
|
-
for fp, info in results['coming_soon']:
|
|
218
|
-
by_dest[info['link']].append(fp)
|
|
219
|
-
for link, sources in sorted(by_dest.items()):
|
|
220
|
-
print(f" {link} ({len(sources)} refs)")
|
|
221
|
-
print()
|
|
222
|
-
|
|
223
|
-
# Summary
|
|
224
|
-
print("=" * 70)
|
|
225
|
-
print("SUMMARY")
|
|
226
|
-
print("=" * 70)
|
|
227
|
-
print(f" Files scanned: {results['files_scanned']}")
|
|
228
|
-
print(f" Valid links: {len(results['valid'])}")
|
|
229
|
-
print(f" Broken links (ACTION: mark coming soon): {len(results['broken'])}")
|
|
230
|
-
print(f" Incorrect markings (ACTION: remove coming soon): {len(results['incorrect_marking'])}")
|
|
231
|
-
print(f" Planned content (coming soon): {len(results['coming_soon'])}")
|
|
232
|
-
|
|
233
|
-
# Unique destinations to create
|
|
234
|
-
unique_dests = set(info['link'] for _, info in results['coming_soon'])
|
|
235
|
-
print(f" Unique destinations to create: {len(unique_dests)}")
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
def print_json(results: dict):
|
|
239
|
-
"""Print results as JSON for CI integration."""
|
|
240
|
-
output = {
|
|
241
|
-
'summary': {
|
|
242
|
-
'files_scanned': results['files_scanned'],
|
|
243
|
-
'valid_links': len(results['valid']),
|
|
244
|
-
'broken_links': len(results['broken']),
|
|
245
|
-
'incorrect_markings': len(results['incorrect_marking']),
|
|
246
|
-
'coming_soon_links': len(results['coming_soon']),
|
|
247
|
-
},
|
|
248
|
-
'broken': [
|
|
249
|
-
{'file': fp, **info}
|
|
250
|
-
for fp, info in results['broken']
|
|
251
|
-
],
|
|
252
|
-
'incorrect_marking': [
|
|
253
|
-
{'file': fp, **info}
|
|
254
|
-
for fp, info in results['incorrect_marking']
|
|
255
|
-
],
|
|
256
|
-
'coming_soon_destinations': list(set(
|
|
257
|
-
info['link'] for _, info in results['coming_soon']
|
|
258
|
-
))
|
|
259
|
-
}
|
|
260
|
-
print(json.dumps(output, indent=2))
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
def print_summary(results: dict):
|
|
264
|
-
"""Print a quick summary only."""
|
|
265
|
-
broken = len(results['broken'])
|
|
266
|
-
incorrect = len(results['incorrect_marking'])
|
|
267
|
-
coming_soon = len(results['coming_soon'])
|
|
268
|
-
|
|
269
|
-
status = "PASS" if broken == 0 and incorrect == 0 else "FAIL"
|
|
270
|
-
|
|
271
|
-
print(f"Link Check: {status}")
|
|
272
|
-
print(f" Broken: {broken} | Incorrect: {incorrect} | Coming Soon: {coming_soon}")
|
|
273
|
-
|
|
274
|
-
if broken > 0:
|
|
275
|
-
print(f" Run with --fix to auto-mark broken links as 'coming soon'")
|
|
276
|
-
if incorrect > 0:
|
|
277
|
-
print(f" Run with --fix to remove incorrect 'coming soon' markers")
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
def main():
|
|
281
|
-
parser = argparse.ArgumentParser(
|
|
282
|
-
description="Check markdown links in AIOX documentation",
|
|
283
|
-
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
284
|
-
epilog=__doc__
|
|
285
|
-
)
|
|
286
|
-
parser.add_argument(
|
|
287
|
-
'--json',
|
|
288
|
-
action='store_true',
|
|
289
|
-
help='Output results as JSON'
|
|
290
|
-
)
|
|
291
|
-
parser.add_argument(
|
|
292
|
-
'--fix',
|
|
293
|
-
action='store_true',
|
|
294
|
-
help='Auto-fix issues (add/remove coming soon markers)'
|
|
295
|
-
)
|
|
296
|
-
parser.add_argument(
|
|
297
|
-
'--summary',
|
|
298
|
-
action='store_true',
|
|
299
|
-
help='Show summary only'
|
|
300
|
-
)
|
|
301
|
-
parser.add_argument(
|
|
302
|
-
'--dir',
|
|
303
|
-
default=DOCS_DIR,
|
|
304
|
-
help=f'Directory to scan (default: {DOCS_DIR})'
|
|
305
|
-
)
|
|
306
|
-
|
|
307
|
-
args = parser.parse_args()
|
|
308
|
-
|
|
309
|
-
# Scan documentation
|
|
310
|
-
results = scan_docs(args.dir)
|
|
311
|
-
|
|
312
|
-
# Auto-fix if requested
|
|
313
|
-
if args.fix:
|
|
314
|
-
fixed_broken = 0
|
|
315
|
-
fixed_incorrect = 0
|
|
316
|
-
|
|
317
|
-
# Fix broken links (add coming soon)
|
|
318
|
-
for fp, info in results['broken']:
|
|
319
|
-
if fix_broken_link(fp, info['line'], info['link']):
|
|
320
|
-
fixed_broken += 1
|
|
321
|
-
|
|
322
|
-
# Fix incorrect markings (remove coming soon)
|
|
323
|
-
for fp, info in results['incorrect_marking']:
|
|
324
|
-
if fix_incorrect_marking(fp, info['line'], info['link']):
|
|
325
|
-
fixed_incorrect += 1
|
|
326
|
-
|
|
327
|
-
print(f"Fixed {fixed_broken} broken links (added 'coming soon')")
|
|
328
|
-
print(f"Fixed {fixed_incorrect} incorrect markings (removed 'coming soon')")
|
|
329
|
-
print()
|
|
330
|
-
|
|
331
|
-
# Re-scan after fixes
|
|
332
|
-
results = scan_docs(args.dir)
|
|
333
|
-
|
|
334
|
-
# Output results
|
|
335
|
-
if args.json:
|
|
336
|
-
print_json(results)
|
|
337
|
-
elif args.summary:
|
|
338
|
-
print_summary(results)
|
|
339
|
-
else:
|
|
340
|
-
print_report(results)
|
|
341
|
-
|
|
342
|
-
# Exit code
|
|
343
|
-
if len(results['broken']) > 0:
|
|
344
|
-
sys.exit(1)
|
|
345
|
-
elif len(results['incorrect_marking']) > 0:
|
|
346
|
-
sys.exit(2)
|
|
347
|
-
else:
|
|
348
|
-
sys.exit(0)
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
if __name__ == "__main__":
|
|
352
|
-
main()
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Markdown Link Checker for AIOX Documentation
|
|
4
|
+
|
|
5
|
+
Validates internal markdown links and tracks documentation status.
|
|
6
|
+
Inspired by Obsidian's broken link checker.
|
|
7
|
+
|
|
8
|
+
Usage:
|
|
9
|
+
python scripts/check-markdown-links.py # Default report
|
|
10
|
+
python scripts/check-markdown-links.py --json # JSON output for CI
|
|
11
|
+
python scripts/check-markdown-links.py --fix # Auto-fix broken links (add coming soon)
|
|
12
|
+
python scripts/check-markdown-links.py --summary # Quick summary only
|
|
13
|
+
|
|
14
|
+
Exit codes:
|
|
15
|
+
0 - All links valid (or only coming soon)
|
|
16
|
+
1 - Broken links found (needs attention)
|
|
17
|
+
2 - Incorrect markings found (exists but marked coming soon)
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import argparse
|
|
21
|
+
import json
|
|
22
|
+
import os
|
|
23
|
+
import re
|
|
24
|
+
import sys
|
|
25
|
+
from collections import defaultdict
|
|
26
|
+
from pathlib import Path
|
|
27
|
+
from typing import Optional
|
|
28
|
+
|
|
29
|
+
# Configuration
|
|
30
|
+
DOCS_DIR = "docs"
|
|
31
|
+
LINK_PATTERN = re.compile(r'\[([^\]]*)\]\(([^)]+)\)')
|
|
32
|
+
COMING_SOON_MARKER = " *(coming soon)*"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def normalize_path(source_file: str, link: str) -> Optional[str]:
|
|
36
|
+
"""Resolve a relative link to an absolute path."""
|
|
37
|
+
# Skip external links, mailto, and anchors
|
|
38
|
+
if link.startswith(('http://', 'https://', 'mailto:', '#')):
|
|
39
|
+
return None
|
|
40
|
+
|
|
41
|
+
# Remove anchor from link
|
|
42
|
+
link = link.split('#')[0]
|
|
43
|
+
if not link:
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
# Handle URL encoding
|
|
47
|
+
link = link.replace('%20', ' ')
|
|
48
|
+
|
|
49
|
+
# Resolve relative path
|
|
50
|
+
source_dir = os.path.dirname(source_file)
|
|
51
|
+
return os.path.normpath(os.path.join(source_dir, link))
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def is_coming_soon(line: str, link: str) -> bool:
|
|
55
|
+
"""Check if a link is marked as coming soon."""
|
|
56
|
+
link_escaped = re.escape(f"]({link})")
|
|
57
|
+
return bool(re.search(link_escaped + r'.*coming soon', line, re.IGNORECASE))
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def scan_file(filepath: str) -> dict:
|
|
61
|
+
"""Scan a markdown file for link issues."""
|
|
62
|
+
results = {
|
|
63
|
+
'broken': [],
|
|
64
|
+
'coming_soon': [],
|
|
65
|
+
'incorrect_marking': [],
|
|
66
|
+
'valid': []
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
71
|
+
lines = f.readlines()
|
|
72
|
+
|
|
73
|
+
for line_num, line in enumerate(lines, 1):
|
|
74
|
+
for match in LINK_PATTERN.finditer(line):
|
|
75
|
+
text, link = match.group(1), match.group(2)
|
|
76
|
+
resolved = normalize_path(filepath, link)
|
|
77
|
+
|
|
78
|
+
if resolved is None:
|
|
79
|
+
continue
|
|
80
|
+
|
|
81
|
+
exists = os.path.exists(resolved)
|
|
82
|
+
coming_soon = is_coming_soon(line, link)
|
|
83
|
+
|
|
84
|
+
info = {
|
|
85
|
+
'line': line_num,
|
|
86
|
+
'text': text,
|
|
87
|
+
'link': link,
|
|
88
|
+
'resolved': resolved,
|
|
89
|
+
'line_content': line.rstrip()
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if exists and coming_soon:
|
|
93
|
+
results['incorrect_marking'].append(info)
|
|
94
|
+
elif not exists and coming_soon:
|
|
95
|
+
results['coming_soon'].append(info)
|
|
96
|
+
elif not exists:
|
|
97
|
+
results['broken'].append(info)
|
|
98
|
+
else:
|
|
99
|
+
results['valid'].append(info)
|
|
100
|
+
|
|
101
|
+
except Exception as e:
|
|
102
|
+
print(f"Error scanning {filepath}: {e}", file=sys.stderr)
|
|
103
|
+
|
|
104
|
+
return results
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def fix_broken_link(filepath: str, line_num: int, link: str) -> bool:
|
|
108
|
+
"""Add 'coming soon' marker to a broken link."""
|
|
109
|
+
try:
|
|
110
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
111
|
+
lines = f.readlines()
|
|
112
|
+
|
|
113
|
+
line_idx = line_num - 1
|
|
114
|
+
line = lines[line_idx]
|
|
115
|
+
|
|
116
|
+
# Find the link and add marker after it
|
|
117
|
+
pattern = re.escape(f"]({link})")
|
|
118
|
+
if re.search(pattern + r'\s*\*\(coming soon\)\*', line, re.IGNORECASE):
|
|
119
|
+
return False # Already marked
|
|
120
|
+
|
|
121
|
+
new_line = re.sub(
|
|
122
|
+
pattern,
|
|
123
|
+
f"]({link}){COMING_SOON_MARKER}",
|
|
124
|
+
line
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
if new_line != line:
|
|
128
|
+
lines[line_idx] = new_line
|
|
129
|
+
with open(filepath, 'w', encoding='utf-8') as f:
|
|
130
|
+
f.writelines(lines)
|
|
131
|
+
return True
|
|
132
|
+
|
|
133
|
+
except Exception as e:
|
|
134
|
+
print(f"Error fixing {filepath}:{line_num}: {e}", file=sys.stderr)
|
|
135
|
+
|
|
136
|
+
return False
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def fix_incorrect_marking(filepath: str, line_num: int, link: str) -> bool:
|
|
140
|
+
"""Remove 'coming soon' marker from a link to existing file."""
|
|
141
|
+
try:
|
|
142
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
143
|
+
lines = f.readlines()
|
|
144
|
+
|
|
145
|
+
line_idx = line_num - 1
|
|
146
|
+
line = lines[line_idx]
|
|
147
|
+
|
|
148
|
+
# Remove the coming soon marker after this specific link
|
|
149
|
+
pattern = re.escape(f"]({link})") + r'\s*\*\(coming soon\)\*'
|
|
150
|
+
new_line = re.sub(pattern, f"]({link})", line, flags=re.IGNORECASE)
|
|
151
|
+
|
|
152
|
+
if new_line != line:
|
|
153
|
+
lines[line_idx] = new_line
|
|
154
|
+
with open(filepath, 'w', encoding='utf-8') as f:
|
|
155
|
+
f.writelines(lines)
|
|
156
|
+
return True
|
|
157
|
+
|
|
158
|
+
except Exception as e:
|
|
159
|
+
print(f"Error fixing {filepath}:{line_num}: {e}", file=sys.stderr)
|
|
160
|
+
|
|
161
|
+
return False
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def scan_docs(docs_dir: str = DOCS_DIR) -> dict:
|
|
165
|
+
"""Scan all markdown files in docs directory."""
|
|
166
|
+
all_results = {
|
|
167
|
+
'broken': [],
|
|
168
|
+
'coming_soon': [],
|
|
169
|
+
'incorrect_marking': [],
|
|
170
|
+
'valid': [],
|
|
171
|
+
'files_scanned': 0
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
for root, _, files in os.walk(docs_dir):
|
|
175
|
+
for file in files:
|
|
176
|
+
if file.endswith('.md'):
|
|
177
|
+
filepath = os.path.join(root, file)
|
|
178
|
+
results = scan_file(filepath)
|
|
179
|
+
|
|
180
|
+
all_results['files_scanned'] += 1
|
|
181
|
+
all_results['broken'].extend([(filepath, i) for i in results['broken']])
|
|
182
|
+
all_results['coming_soon'].extend([(filepath, i) for i in results['coming_soon']])
|
|
183
|
+
all_results['incorrect_marking'].extend([(filepath, i) for i in results['incorrect_marking']])
|
|
184
|
+
all_results['valid'].extend([(filepath, i) for i in results['valid']])
|
|
185
|
+
|
|
186
|
+
return all_results
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def print_report(results: dict, verbose: bool = True):
|
|
190
|
+
"""Print a human-readable report."""
|
|
191
|
+
print("=" * 70)
|
|
192
|
+
print("MARKDOWN LINK VERIFICATION REPORT")
|
|
193
|
+
print("=" * 70)
|
|
194
|
+
print()
|
|
195
|
+
|
|
196
|
+
# Broken links
|
|
197
|
+
print(f"## 1. BROKEN LINKS (no 'coming soon' marker): {len(results['broken'])}")
|
|
198
|
+
print("-" * 60)
|
|
199
|
+
if verbose and results['broken']:
|
|
200
|
+
for fp, info in sorted(results['broken'], key=lambda x: x[0]):
|
|
201
|
+
print(f" {fp}:{info['line']} -> {info['link']}")
|
|
202
|
+
print()
|
|
203
|
+
|
|
204
|
+
# Incorrect markings
|
|
205
|
+
print(f"## 2. INCORRECT: File EXISTS but marked 'coming soon': {len(results['incorrect_marking'])}")
|
|
206
|
+
print("-" * 60)
|
|
207
|
+
if verbose and results['incorrect_marking']:
|
|
208
|
+
for fp, info in sorted(results['incorrect_marking'], key=lambda x: x[0]):
|
|
209
|
+
print(f" {fp}:{info['line']} -> {info['link']}")
|
|
210
|
+
print()
|
|
211
|
+
|
|
212
|
+
# Coming soon (planned content)
|
|
213
|
+
print(f"## 3. PLANNED CONTENT: Links marked 'coming soon': {len(results['coming_soon'])}")
|
|
214
|
+
print("-" * 60)
|
|
215
|
+
if verbose and results['coming_soon']:
|
|
216
|
+
by_dest = defaultdict(list)
|
|
217
|
+
for fp, info in results['coming_soon']:
|
|
218
|
+
by_dest[info['link']].append(fp)
|
|
219
|
+
for link, sources in sorted(by_dest.items()):
|
|
220
|
+
print(f" {link} ({len(sources)} refs)")
|
|
221
|
+
print()
|
|
222
|
+
|
|
223
|
+
# Summary
|
|
224
|
+
print("=" * 70)
|
|
225
|
+
print("SUMMARY")
|
|
226
|
+
print("=" * 70)
|
|
227
|
+
print(f" Files scanned: {results['files_scanned']}")
|
|
228
|
+
print(f" Valid links: {len(results['valid'])}")
|
|
229
|
+
print(f" Broken links (ACTION: mark coming soon): {len(results['broken'])}")
|
|
230
|
+
print(f" Incorrect markings (ACTION: remove coming soon): {len(results['incorrect_marking'])}")
|
|
231
|
+
print(f" Planned content (coming soon): {len(results['coming_soon'])}")
|
|
232
|
+
|
|
233
|
+
# Unique destinations to create
|
|
234
|
+
unique_dests = set(info['link'] for _, info in results['coming_soon'])
|
|
235
|
+
print(f" Unique destinations to create: {len(unique_dests)}")
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def print_json(results: dict):
|
|
239
|
+
"""Print results as JSON for CI integration."""
|
|
240
|
+
output = {
|
|
241
|
+
'summary': {
|
|
242
|
+
'files_scanned': results['files_scanned'],
|
|
243
|
+
'valid_links': len(results['valid']),
|
|
244
|
+
'broken_links': len(results['broken']),
|
|
245
|
+
'incorrect_markings': len(results['incorrect_marking']),
|
|
246
|
+
'coming_soon_links': len(results['coming_soon']),
|
|
247
|
+
},
|
|
248
|
+
'broken': [
|
|
249
|
+
{'file': fp, **info}
|
|
250
|
+
for fp, info in results['broken']
|
|
251
|
+
],
|
|
252
|
+
'incorrect_marking': [
|
|
253
|
+
{'file': fp, **info}
|
|
254
|
+
for fp, info in results['incorrect_marking']
|
|
255
|
+
],
|
|
256
|
+
'coming_soon_destinations': list(set(
|
|
257
|
+
info['link'] for _, info in results['coming_soon']
|
|
258
|
+
))
|
|
259
|
+
}
|
|
260
|
+
print(json.dumps(output, indent=2))
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def print_summary(results: dict):
|
|
264
|
+
"""Print a quick summary only."""
|
|
265
|
+
broken = len(results['broken'])
|
|
266
|
+
incorrect = len(results['incorrect_marking'])
|
|
267
|
+
coming_soon = len(results['coming_soon'])
|
|
268
|
+
|
|
269
|
+
status = "PASS" if broken == 0 and incorrect == 0 else "FAIL"
|
|
270
|
+
|
|
271
|
+
print(f"Link Check: {status}")
|
|
272
|
+
print(f" Broken: {broken} | Incorrect: {incorrect} | Coming Soon: {coming_soon}")
|
|
273
|
+
|
|
274
|
+
if broken > 0:
|
|
275
|
+
print(f" Run with --fix to auto-mark broken links as 'coming soon'")
|
|
276
|
+
if incorrect > 0:
|
|
277
|
+
print(f" Run with --fix to remove incorrect 'coming soon' markers")
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def main():
|
|
281
|
+
parser = argparse.ArgumentParser(
|
|
282
|
+
description="Check markdown links in AIOX documentation",
|
|
283
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
284
|
+
epilog=__doc__
|
|
285
|
+
)
|
|
286
|
+
parser.add_argument(
|
|
287
|
+
'--json',
|
|
288
|
+
action='store_true',
|
|
289
|
+
help='Output results as JSON'
|
|
290
|
+
)
|
|
291
|
+
parser.add_argument(
|
|
292
|
+
'--fix',
|
|
293
|
+
action='store_true',
|
|
294
|
+
help='Auto-fix issues (add/remove coming soon markers)'
|
|
295
|
+
)
|
|
296
|
+
parser.add_argument(
|
|
297
|
+
'--summary',
|
|
298
|
+
action='store_true',
|
|
299
|
+
help='Show summary only'
|
|
300
|
+
)
|
|
301
|
+
parser.add_argument(
|
|
302
|
+
'--dir',
|
|
303
|
+
default=DOCS_DIR,
|
|
304
|
+
help=f'Directory to scan (default: {DOCS_DIR})'
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
args = parser.parse_args()
|
|
308
|
+
|
|
309
|
+
# Scan documentation
|
|
310
|
+
results = scan_docs(args.dir)
|
|
311
|
+
|
|
312
|
+
# Auto-fix if requested
|
|
313
|
+
if args.fix:
|
|
314
|
+
fixed_broken = 0
|
|
315
|
+
fixed_incorrect = 0
|
|
316
|
+
|
|
317
|
+
# Fix broken links (add coming soon)
|
|
318
|
+
for fp, info in results['broken']:
|
|
319
|
+
if fix_broken_link(fp, info['line'], info['link']):
|
|
320
|
+
fixed_broken += 1
|
|
321
|
+
|
|
322
|
+
# Fix incorrect markings (remove coming soon)
|
|
323
|
+
for fp, info in results['incorrect_marking']:
|
|
324
|
+
if fix_incorrect_marking(fp, info['line'], info['link']):
|
|
325
|
+
fixed_incorrect += 1
|
|
326
|
+
|
|
327
|
+
print(f"Fixed {fixed_broken} broken links (added 'coming soon')")
|
|
328
|
+
print(f"Fixed {fixed_incorrect} incorrect markings (removed 'coming soon')")
|
|
329
|
+
print()
|
|
330
|
+
|
|
331
|
+
# Re-scan after fixes
|
|
332
|
+
results = scan_docs(args.dir)
|
|
333
|
+
|
|
334
|
+
# Output results
|
|
335
|
+
if args.json:
|
|
336
|
+
print_json(results)
|
|
337
|
+
elif args.summary:
|
|
338
|
+
print_summary(results)
|
|
339
|
+
else:
|
|
340
|
+
print_report(results)
|
|
341
|
+
|
|
342
|
+
# Exit code
|
|
343
|
+
if len(results['broken']) > 0:
|
|
344
|
+
sys.exit(1)
|
|
345
|
+
elif len(results['incorrect_marking']) > 0:
|
|
346
|
+
sys.exit(2)
|
|
347
|
+
else:
|
|
348
|
+
sys.exit(0)
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
if __name__ == "__main__":
|
|
352
|
+
main()
|