@codihaus/claude-skills 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/knowledge/domains/_index.md +105 -0
- package/knowledge/domains/ecommerce/_index.md +499 -0
- package/knowledge/domains/saas/_index.md +371 -0
- package/knowledge/stacks/_index.md +101 -0
- package/knowledge/stacks/directus/_index.md +349 -0
- package/knowledge/stacks/nextjs/_index.md +654 -0
- package/knowledge/stacks/nuxt/_index.md +469 -0
- package/package.json +3 -1
- package/project-scripts/graph.py +330 -0
- package/skills/_registry.md +61 -0
- package/skills/dev-coding/SKILL.md +16 -5
- package/skills/dev-coding-backend/SKILL.md +116 -251
- package/skills/dev-coding-frontend/SKILL.md +134 -388
- package/skills/dev-review/SKILL.md +13 -2
- package/skills/dev-scout/SKILL.md +180 -2
- package/skills/dev-scout/references/stack-patterns.md +371 -0
- package/skills/dev-specs/SKILL.md +74 -2
- package/src/commands/init.js +89 -12
- package/src/utils/project-setup.js +444 -0
- package/src/utils/skills.js +87 -1
- /package/{skills/dev-coding-frontend/references/nextjs.md → knowledge/stacks/nextjs/references/performance.md} +0 -0
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Knowledge Graph Generator for Plans
|
|
4
|
+
|
|
5
|
+
Scans markdown files in plans/ directory, extracts [[wikilinks]],
|
|
6
|
+
and generates a knowledge graph (JSON + Mermaid visualization).
|
|
7
|
+
|
|
8
|
+
Usage:
|
|
9
|
+
python scripts/graph.py # Full scan
|
|
10
|
+
python scripts/graph.py --check-path <path> # Only if path in plans/
|
|
11
|
+
python scripts/graph.py --json # Output JSON to stdout
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import argparse
|
|
15
|
+
import json
|
|
16
|
+
import os
|
|
17
|
+
import re
|
|
18
|
+
import sys
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from datetime import datetime
|
|
21
|
+
from typing import Dict, List, Set, Tuple
|
|
22
|
+
|
|
23
|
+
# Wikilink pattern: [[link]] or [[link|display]]
|
|
24
|
+
WIKILINK_PATTERN = re.compile(r'\[\[([^\]|]+)(?:\|[^\]]+)?\]\]')
|
|
25
|
+
|
|
26
|
+
# Node type detection from path/filename
|
|
27
|
+
NODE_TYPES = {
|
|
28
|
+
'brd/use-cases/': 'use-case',
|
|
29
|
+
'brd/changes/': 'change-request',
|
|
30
|
+
'brd/README': 'brd',
|
|
31
|
+
'brd/context': 'context',
|
|
32
|
+
'features/': 'feature',
|
|
33
|
+
'specs/': 'spec',
|
|
34
|
+
'scout': 'scout',
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def detect_node_type(filepath: str) -> str:
|
|
39
|
+
"""Detect node type from file path."""
|
|
40
|
+
for pattern, node_type in NODE_TYPES.items():
|
|
41
|
+
if pattern in filepath:
|
|
42
|
+
return node_type
|
|
43
|
+
return 'document'
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def extract_node_id(filepath: str) -> str:
|
|
47
|
+
"""Extract a clean node ID from filepath."""
|
|
48
|
+
path = Path(filepath)
|
|
49
|
+
# Remove plans/ prefix and .md suffix
|
|
50
|
+
rel_path = str(path).replace('plans/', '').replace('.md', '')
|
|
51
|
+
# Convert to ID: brd/use-cases/auth/UC-AUTH-001-login -> uc-auth-001
|
|
52
|
+
|
|
53
|
+
filename = path.stem.lower()
|
|
54
|
+
|
|
55
|
+
# Use case files: extract UC-XXX-NNN
|
|
56
|
+
if filename.startswith('uc-'):
|
|
57
|
+
parts = filename.split('-')
|
|
58
|
+
if len(parts) >= 3:
|
|
59
|
+
return f"{parts[0]}-{parts[1]}-{parts[2]}"
|
|
60
|
+
|
|
61
|
+
# CR files: extract CR-NNN
|
|
62
|
+
if filename.startswith('cr-'):
|
|
63
|
+
parts = filename.split('-')
|
|
64
|
+
if len(parts) >= 2:
|
|
65
|
+
return f"{parts[0]}-{parts[1]}"
|
|
66
|
+
|
|
67
|
+
# Feature README -> feature-{name}
|
|
68
|
+
if '/features/' in str(path) and filename == 'readme':
|
|
69
|
+
feature = path.parent.name
|
|
70
|
+
return f"feature-{feature}"
|
|
71
|
+
|
|
72
|
+
# Spec files
|
|
73
|
+
if '/specs/' in str(path):
|
|
74
|
+
if filename == 'readme':
|
|
75
|
+
feature = path.parent.parent.name
|
|
76
|
+
return f"specs-{feature}"
|
|
77
|
+
return filename
|
|
78
|
+
|
|
79
|
+
# Default: use filename
|
|
80
|
+
return filename
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def extract_wikilinks(content: str) -> List[str]:
|
|
84
|
+
"""Extract all wikilinks from markdown content."""
|
|
85
|
+
matches = WIKILINK_PATTERN.findall(content)
|
|
86
|
+
# Normalize: lowercase, strip whitespace
|
|
87
|
+
return [m.strip().lower() for m in matches]
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def get_node_label(filepath: str, content: str) -> str:
|
|
91
|
+
"""Extract a human-readable label from file."""
|
|
92
|
+
path = Path(filepath)
|
|
93
|
+
|
|
94
|
+
# Try to get title from first heading
|
|
95
|
+
for line in content.split('\n')[:10]:
|
|
96
|
+
if line.startswith('# '):
|
|
97
|
+
title = line[2:].strip()
|
|
98
|
+
# Remove UC-XXX-NNN: prefix for cleaner label
|
|
99
|
+
if ':' in title:
|
|
100
|
+
return title.split(':', 1)[1].strip()
|
|
101
|
+
return title
|
|
102
|
+
|
|
103
|
+
# Fallback to filename
|
|
104
|
+
return path.stem.replace('-', ' ').title()
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def scan_plans_directory(plans_dir: str) -> Tuple[Dict, Dict, List]:
|
|
108
|
+
"""
|
|
109
|
+
Scan plans directory and build graph.
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
nodes: Dict of node_id -> node_data
|
|
113
|
+
edges: List of (from_id, to_id, relation)
|
|
114
|
+
errors: List of error messages
|
|
115
|
+
"""
|
|
116
|
+
nodes = {}
|
|
117
|
+
edges = []
|
|
118
|
+
errors = []
|
|
119
|
+
|
|
120
|
+
plans_path = Path(plans_dir)
|
|
121
|
+
if not plans_path.exists():
|
|
122
|
+
return nodes, edges, [f"Plans directory not found: {plans_dir}"]
|
|
123
|
+
|
|
124
|
+
# Scan all markdown files
|
|
125
|
+
for md_file in plans_path.rglob('*.md'):
|
|
126
|
+
# Skip docs-graph.md itself
|
|
127
|
+
if md_file.name == 'docs-graph.md':
|
|
128
|
+
continue
|
|
129
|
+
|
|
130
|
+
rel_path = str(md_file.relative_to(plans_path.parent))
|
|
131
|
+
|
|
132
|
+
try:
|
|
133
|
+
content = md_file.read_text(encoding='utf-8')
|
|
134
|
+
except Exception as e:
|
|
135
|
+
errors.append(f"Failed to read {rel_path}: {e}")
|
|
136
|
+
continue
|
|
137
|
+
|
|
138
|
+
node_id = extract_node_id(rel_path)
|
|
139
|
+
node_type = detect_node_type(rel_path)
|
|
140
|
+
label = get_node_label(rel_path, content)
|
|
141
|
+
|
|
142
|
+
# Add node
|
|
143
|
+
nodes[node_id] = {
|
|
144
|
+
'id': node_id,
|
|
145
|
+
'type': node_type,
|
|
146
|
+
'label': label,
|
|
147
|
+
'path': rel_path,
|
|
148
|
+
'mtime': md_file.stat().st_mtime,
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
# Extract links
|
|
152
|
+
links = extract_wikilinks(content)
|
|
153
|
+
for link in links:
|
|
154
|
+
link_id = link.lower().replace(' ', '-')
|
|
155
|
+
edges.append({
|
|
156
|
+
'from': node_id,
|
|
157
|
+
'to': link_id,
|
|
158
|
+
'relation': 'links_to',
|
|
159
|
+
})
|
|
160
|
+
|
|
161
|
+
return nodes, edges, errors
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def generate_mermaid(nodes: Dict, edges: List) -> str:
|
|
165
|
+
"""Generate Mermaid flowchart from graph."""
|
|
166
|
+
lines = [
|
|
167
|
+
"```mermaid",
|
|
168
|
+
"flowchart TB",
|
|
169
|
+
]
|
|
170
|
+
|
|
171
|
+
# Group nodes by type
|
|
172
|
+
type_groups = {}
|
|
173
|
+
for node_id, node in nodes.items():
|
|
174
|
+
node_type = node['type']
|
|
175
|
+
if node_type not in type_groups:
|
|
176
|
+
type_groups[node_type] = []
|
|
177
|
+
type_groups[node_type].append(node)
|
|
178
|
+
|
|
179
|
+
# Type display names and styles
|
|
180
|
+
type_names = {
|
|
181
|
+
'brd': 'BRD',
|
|
182
|
+
'use-case': 'Use Cases',
|
|
183
|
+
'change-request': 'Changes',
|
|
184
|
+
'feature': 'Features',
|
|
185
|
+
'spec': 'Specs',
|
|
186
|
+
'scout': 'Scout',
|
|
187
|
+
'context': 'Context',
|
|
188
|
+
'document': 'Documents',
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
# Node shapes by type
|
|
192
|
+
type_shapes = {
|
|
193
|
+
'brd': ('[[', ']]'), # Stadium
|
|
194
|
+
'use-case': ('[', ']'), # Rectangle
|
|
195
|
+
'change-request': ('{{', '}}'), # Hexagon
|
|
196
|
+
'feature': ('([', '])'), # Pill
|
|
197
|
+
'spec': ('[/', '/]'), # Parallelogram
|
|
198
|
+
'scout': ('[(', ')]'), # Cylinder
|
|
199
|
+
'context': ('(', ')'), # Rounded
|
|
200
|
+
'document': ('[', ']'), # Rectangle
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
# Generate subgraphs
|
|
204
|
+
for node_type, type_nodes in type_groups.items():
|
|
205
|
+
display_name = type_names.get(node_type, node_type.title())
|
|
206
|
+
shape = type_shapes.get(node_type, ('[', ']'))
|
|
207
|
+
|
|
208
|
+
lines.append(f" subgraph {node_type}[{display_name}]")
|
|
209
|
+
for node in type_nodes:
|
|
210
|
+
safe_id = node['id'].replace('-', '_')
|
|
211
|
+
label = node['label'][:30] # Truncate long labels
|
|
212
|
+
lines.append(f" {safe_id}{shape[0]}\"{label}\"{shape[1]}")
|
|
213
|
+
lines.append(" end")
|
|
214
|
+
|
|
215
|
+
# Generate edges (only for nodes that exist)
|
|
216
|
+
node_ids = set(nodes.keys())
|
|
217
|
+
for edge in edges:
|
|
218
|
+
from_id = edge['from'].replace('-', '_')
|
|
219
|
+
to_id = edge['to'].replace('-', '_')
|
|
220
|
+
|
|
221
|
+
# Only add edge if both nodes exist
|
|
222
|
+
if edge['from'] in node_ids and edge['to'] in node_ids:
|
|
223
|
+
lines.append(f" {from_id} --> {to_id}")
|
|
224
|
+
|
|
225
|
+
lines.append("```")
|
|
226
|
+
return '\n'.join(lines)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def save_graph(plans_dir: str, nodes: Dict, edges: List, errors: List):
|
|
230
|
+
"""Save graph to JSON and Mermaid files."""
|
|
231
|
+
plans_path = Path(plans_dir)
|
|
232
|
+
|
|
233
|
+
# Build graph data
|
|
234
|
+
graph_data = {
|
|
235
|
+
'generated': datetime.now().isoformat(),
|
|
236
|
+
'node_count': len(nodes),
|
|
237
|
+
'edge_count': len(edges),
|
|
238
|
+
'nodes': nodes,
|
|
239
|
+
'edges': edges,
|
|
240
|
+
'errors': errors,
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
# Save JSON
|
|
244
|
+
json_path = plans_path / 'docs-graph.json'
|
|
245
|
+
with open(json_path, 'w', encoding='utf-8') as f:
|
|
246
|
+
json.dump(graph_data, f, indent=2)
|
|
247
|
+
|
|
248
|
+
# Generate and save Mermaid
|
|
249
|
+
mermaid = generate_mermaid(nodes, edges)
|
|
250
|
+
md_content = f"""# Documentation Graph
|
|
251
|
+
|
|
252
|
+
> **Generated**: {datetime.now().strftime('%Y-%m-%d %H:%M')}
|
|
253
|
+
> **Nodes**: {len(nodes)} | **Edges**: {len(edges)}
|
|
254
|
+
|
|
255
|
+
## Graph
|
|
256
|
+
|
|
257
|
+
{mermaid}
|
|
258
|
+
|
|
259
|
+
## Node Index
|
|
260
|
+
|
|
261
|
+
| ID | Type | Label | Path |
|
|
262
|
+
|----|------|-------|------|
|
|
263
|
+
"""
|
|
264
|
+
|
|
265
|
+
for node_id, node in sorted(nodes.items()):
|
|
266
|
+
md_content += f"| {node_id} | {node['type']} | {node['label']} | [{node['path']}](./{node['path'].replace('plans/', '')}) |\n"
|
|
267
|
+
|
|
268
|
+
if errors:
|
|
269
|
+
md_content += "\n## Errors\n\n"
|
|
270
|
+
for error in errors:
|
|
271
|
+
md_content += f"- {error}\n"
|
|
272
|
+
|
|
273
|
+
md_path = plans_path / 'docs-graph.md'
|
|
274
|
+
with open(md_path, 'w', encoding='utf-8') as f:
|
|
275
|
+
f.write(md_content)
|
|
276
|
+
|
|
277
|
+
return json_path, md_path
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def main():
|
|
281
|
+
parser = argparse.ArgumentParser(description='Generate knowledge graph from plans/')
|
|
282
|
+
parser.add_argument('--check-path', type=str, help='Only run if path is in plans/')
|
|
283
|
+
parser.add_argument('--json', action='store_true', help='Output JSON to stdout')
|
|
284
|
+
parser.add_argument('--plans-dir', type=str, default='plans', help='Plans directory path')
|
|
285
|
+
args = parser.parse_args()
|
|
286
|
+
|
|
287
|
+
# If check-path provided, only run if it's in plans/
|
|
288
|
+
if args.check_path:
|
|
289
|
+
if not args.check_path.startswith('plans/') and '/plans/' not in args.check_path:
|
|
290
|
+
# Not a plans file, exit silently
|
|
291
|
+
sys.exit(0)
|
|
292
|
+
|
|
293
|
+
# Find plans directory
|
|
294
|
+
plans_dir = args.plans_dir
|
|
295
|
+
if not os.path.isabs(plans_dir):
|
|
296
|
+
# Look in current directory and parent directories
|
|
297
|
+
cwd = Path.cwd()
|
|
298
|
+
for parent in [cwd] + list(cwd.parents)[:3]:
|
|
299
|
+
candidate = parent / plans_dir
|
|
300
|
+
if candidate.exists():
|
|
301
|
+
plans_dir = str(candidate)
|
|
302
|
+
break
|
|
303
|
+
|
|
304
|
+
if not Path(plans_dir).exists():
|
|
305
|
+
print(f"Plans directory not found: {plans_dir}", file=sys.stderr)
|
|
306
|
+
sys.exit(1)
|
|
307
|
+
|
|
308
|
+
# Scan and build graph
|
|
309
|
+
nodes, edges, errors = scan_plans_directory(plans_dir)
|
|
310
|
+
|
|
311
|
+
if args.json:
|
|
312
|
+
# Output JSON to stdout
|
|
313
|
+
print(json.dumps({
|
|
314
|
+
'nodes': nodes,
|
|
315
|
+
'edges': edges,
|
|
316
|
+
'errors': errors,
|
|
317
|
+
}, indent=2))
|
|
318
|
+
else:
|
|
319
|
+
# Save to files
|
|
320
|
+
json_path, md_path = save_graph(plans_dir, nodes, edges, errors)
|
|
321
|
+
print(f"Graph updated: {len(nodes)} nodes, {len(edges)} edges")
|
|
322
|
+
print(f" → {json_path}")
|
|
323
|
+
print(f" → {md_path}")
|
|
324
|
+
|
|
325
|
+
if errors:
|
|
326
|
+
print(f" ⚠ {len(errors)} errors (see graph.md)")
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
if __name__ == '__main__':
|
|
330
|
+
main()
|
package/skills/_registry.md
CHANGED
|
@@ -104,12 +104,15 @@ During each skill, consider suggesting:
|
|
|
104
104
|
- After: Informs `/dev-specs` with decisions
|
|
105
105
|
|
|
106
106
|
### /dev-scout
|
|
107
|
+
- Outputs: `scout.md` (codebase) + `stack.md` (HOW project works)
|
|
108
|
+
- `stack.md` captures: API layer, SDK, patterns, validation, services
|
|
107
109
|
- After: "Ready for implementation? Use `/dev-specs` to plan"
|
|
108
110
|
- If patterns unclear: "Use `/utils/diagram` to visualize architecture"
|
|
109
111
|
|
|
110
112
|
### /dev-specs
|
|
111
113
|
- Before: Ensure `/debrief` completed (BRD exists)
|
|
112
114
|
- During: Calls `/dev-arch` for patterns and decisions
|
|
115
|
+
- **Critical**: Reads `stack.md` to use correct SDK, patterns, validation
|
|
113
116
|
- Reads: `_quality-attributes.md` for spec-level checklists
|
|
114
117
|
- After: "Use `/dev-coding` to implement"
|
|
115
118
|
|
|
@@ -168,6 +171,64 @@ Quality attributes covered:
|
|
|
168
171
|
- **Reliability** - doesn't break
|
|
169
172
|
- **Testability** - can verify
|
|
170
173
|
|
|
174
|
+
## Stack & Domain Knowledge
|
|
175
|
+
|
|
176
|
+
Skills reference platform and business knowledge for accurate output.
|
|
177
|
+
|
|
178
|
+
> **Location**: `knowledge/` folder (separate from `skills/`)
|
|
179
|
+
> - `skills/` = Invocable workflows (user runs `/dev-coding`)
|
|
180
|
+
> - `knowledge/` = Reference material (loaded by skills)
|
|
181
|
+
|
|
182
|
+
### Stack Knowledge (`knowledge/stacks/`)
|
|
183
|
+
|
|
184
|
+
Technical knowledge - HOW to build with specific tools:
|
|
185
|
+
|
|
186
|
+
| Stack | Folder | Type |
|
|
187
|
+
|-------|--------|------|
|
|
188
|
+
| Directus | `stacks/directus/` | Backend BaaS |
|
|
189
|
+
| Nuxt.js | `stacks/nuxt/` | Vue + SSR Framework |
|
|
190
|
+
| Next.js | `stacks/nextjs/` | React + SSR Framework |
|
|
191
|
+
|
|
192
|
+
**Folder structure:**
|
|
193
|
+
```
|
|
194
|
+
stacks/{name}/
|
|
195
|
+
├── _index.md # Main knowledge
|
|
196
|
+
├── references/ # Detailed docs
|
|
197
|
+
└── assets/ # Code templates
|
|
198
|
+
```
|
|
199
|
+
|
|
200
|
+
**How it works:**
|
|
201
|
+
1. `/dev-scout` detects stack → writes `stack.md`
|
|
202
|
+
2. `/dev-specs` reads `stack.md` → loads `stacks/{name}/_index.md`
|
|
203
|
+
3. Uses "For /dev-specs" section for correct patterns
|
|
204
|
+
4. Deep patterns in `references/`, templates in `assets/`
|
|
205
|
+
|
|
206
|
+
### Domain Knowledge (`knowledge/domains/`)
|
|
207
|
+
|
|
208
|
+
Business knowledge - WHAT to build:
|
|
209
|
+
|
|
210
|
+
| Domain | Folder | Description |
|
|
211
|
+
|--------|--------|-------------|
|
|
212
|
+
| SaaS | `domains/saas/` | Subscriptions, multi-tenancy, billing |
|
|
213
|
+
| E-commerce | `domains/ecommerce/` | Products, carts, orders, fulfillment |
|
|
214
|
+
| Insurance | Planned | Policies, claims |
|
|
215
|
+
|
|
216
|
+
**Folder structure:**
|
|
217
|
+
```
|
|
218
|
+
domains/{name}/
|
|
219
|
+
├── _index.md # Main knowledge
|
|
220
|
+
├── references/ # Detailed docs
|
|
221
|
+
└── assets/ # Code templates
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
Domain knowledge captures:
|
|
225
|
+
- Business terminology
|
|
226
|
+
- Common entities and workflows
|
|
227
|
+
- State machines and lifecycles
|
|
228
|
+
- Domain-specific validation rules
|
|
229
|
+
|
|
230
|
+
See `knowledge/stacks/_index.md` and `knowledge/domains/_index.md` for details.
|
|
231
|
+
|
|
171
232
|
## Skill Awareness Protocol
|
|
172
233
|
|
|
173
234
|
Every skill should:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
name: dev-coding
|
|
3
3
|
description: Implement features from specs with backend and frontend skills
|
|
4
|
-
version: 1.
|
|
4
|
+
version: 1.3.0
|
|
5
5
|
---
|
|
6
6
|
|
|
7
7
|
# /dev-coding - Implementation Skill
|
|
@@ -59,17 +59,28 @@ Updates to specs:
|
|
|
59
59
|
2. Read scout for patterns
|
|
60
60
|
→ plans/features/{feature}/scout.md OR plans/scout/README.md
|
|
61
61
|
|
|
62
|
-
3. Read
|
|
62
|
+
3. Read stack.md and stack knowledge (CRITICAL)
|
|
63
|
+
→ plans/scout/stack.md
|
|
64
|
+
→ Check "Stack Knowledge References" section
|
|
65
|
+
→ Read each referenced knowledge/stacks/*.md file
|
|
66
|
+
→ Use "For /dev-coding" sections for implementation patterns
|
|
67
|
+
|
|
68
|
+
Examples:
|
|
69
|
+
- Directus project → Read knowledge/stacks/directus/_index.md
|
|
70
|
+
- Nuxt project → Read knowledge/stacks/nuxt/_index.md
|
|
71
|
+
- Next.js project → Read knowledge/stacks/nextjs/_index.md
|
|
72
|
+
|
|
73
|
+
4. Read architecture decisions
|
|
63
74
|
→ plans/features/{feature}/architecture.md
|
|
64
75
|
|
|
65
|
-
|
|
76
|
+
5. Read quality attributes (Implementation Level)
|
|
66
77
|
→ skills/_quality-attributes.md
|
|
67
78
|
→ Focus on: query efficiency, memory, concurrency, error handling
|
|
68
79
|
|
|
69
|
-
|
|
80
|
+
6. Read docs-graph for dependencies
|
|
70
81
|
→ plans/docs-graph.json
|
|
71
82
|
|
|
72
|
-
|
|
83
|
+
7. Check: Are dependencies complete?
|
|
73
84
|
→ If UC depends on another UC, verify it's done
|
|
74
85
|
→ Warn if not, let user decide to proceed
|
|
75
86
|
```
|