@codihaus/claude-skills 1.6.13 → 1.6.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@codihaus/claude-skills",
3
- "version": "1.6.13",
3
+ "version": "1.6.14",
4
4
  "description": "Claude Code skills for software development workflow",
5
5
  "main": "src/index.js",
6
6
  "bin": {
@@ -2,8 +2,11 @@
2
2
  """
3
3
  Knowledge Graph Generator for Plans
4
4
 
5
- Scans markdown files in plans/ directory, extracts [[wikilinks]],
6
- and generates a knowledge graph (JSON + Mermaid visualization).
5
+ Scans markdown files in plans/ directory, extracts relationships from:
6
+ - [[wikilinks]] - Internal references
7
+ - [text](path.md) - Markdown links to other docs
8
+
9
+ Generates a knowledge graph (JSON + Mermaid visualization).
7
10
 
8
11
  Usage:
9
12
  python scripts/graph.py # Full scan
@@ -23,6 +26,9 @@ from typing import Dict, List, Set, Tuple
23
26
  # Wikilink pattern: [[link]] or [[link|display]]
24
27
  WIKILINK_PATTERN = re.compile(r'\[\[([^\]|]+)(?:\|[^\]]+)?\]\]')
25
28
 
29
+ # Markdown link pattern: [text](path.md)
30
+ MARKDOWN_LINK_PATTERN = re.compile(r'\[([^\]]+)\]\(([^)]+\.md)\)')
31
+
26
32
  # Node type detection from path/filename
27
33
  NODE_TYPES = {
28
34
  'brd/use-cases/': 'use-case',
@@ -87,6 +93,51 @@ def extract_wikilinks(content: str) -> List[str]:
87
93
  return [m.strip().lower() for m in matches]
88
94
 
89
95
 
96
+ def extract_markdown_links(content: str, source_path: str) -> List[str]:
97
+ """
98
+ Extract markdown links and resolve them to node IDs.
99
+
100
+ Args:
101
+ content: Markdown content
102
+ source_path: Path to the source file (relative to repo root)
103
+
104
+ Returns:
105
+ List of target node IDs
106
+ """
107
+ matches = MARKDOWN_LINK_PATTERN.findall(content)
108
+ node_ids = []
109
+
110
+ for text, link_path in matches:
111
+ # Skip external links
112
+ if link_path.startswith('http://') or link_path.startswith('https://'):
113
+ continue
114
+
115
+ # Skip anchors
116
+ if '#' in link_path:
117
+ link_path = link_path.split('#')[0]
118
+ if not link_path: # Pure anchor link
119
+ continue
120
+
121
+ # Resolve relative path
122
+ source_dir = Path(source_path).parent
123
+ target_path = (source_dir / link_path).resolve()
124
+
125
+ # Make relative to repo root
126
+ try:
127
+ repo_root = Path.cwd()
128
+ rel_target = target_path.relative_to(repo_root)
129
+
130
+ # Only process if it's in plans/
131
+ if str(rel_target).startswith('plans/'):
132
+ node_id = extract_node_id(str(rel_target))
133
+ node_ids.append(node_id)
134
+ except (ValueError, Exception):
135
+ # Path resolution failed, skip
136
+ continue
137
+
138
+ return node_ids
139
+
140
+
90
141
  def get_node_label(filepath: str, content: str) -> str:
91
142
  """Extract a human-readable label from file."""
92
143
  path = Path(filepath)
@@ -148,14 +199,23 @@ def scan_plans_directory(plans_dir: str) -> Tuple[Dict, Dict, List]:
148
199
  'mtime': md_file.stat().st_mtime,
149
200
  }
150
201
 
151
- # Extract links
152
- links = extract_wikilinks(content)
153
- for link in links:
202
+ # Extract wikilinks
203
+ wikilinks = extract_wikilinks(content)
204
+ for link in wikilinks:
154
205
  link_id = link.lower().replace(' ', '-')
155
206
  edges.append({
156
207
  'from': node_id,
157
208
  'to': link_id,
158
- 'relation': 'links_to',
209
+ 'relation': 'wikilink',
210
+ })
211
+
212
+ # Extract markdown links
213
+ md_links = extract_markdown_links(content, rel_path)
214
+ for target_id in md_links:
215
+ edges.append({
216
+ 'from': node_id,
217
+ 'to': target_id,
218
+ 'relation': 'markdown_link',
159
219
  })
160
220
 
161
221
  return nodes, edges, errors
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  name: dev-specs
3
3
  description: Generate lean implementation specifications from BRD use cases
4
- version: 2.0.0
4
+ version: 2.0.1
5
5
  ---
6
6
 
7
7
  # /dev-specs - Lean Implementation Specifications
@@ -83,18 +83,20 @@ plans/features/{feature}/
83
83
  ```
84
84
 
85
85
  **Each UC spec includes:**
86
- - Requirements (bulleted, testable)
87
- - Technical constraints (stack-aware from tech-context.md)
88
- - Acceptance criteria (testable outcomes)
89
- - Files to modify (where to work)
90
- - API contract (if applicable)
91
- - Dependencies (what must exist first)
92
- - Implementation notes (DO/DON'T)
86
+ - **Requirements** - WHAT to achieve (testable outcomes)
87
+ - **Acceptance Criteria** - HOW to verify it works
88
+ - **Technical Constraints** - Stack patterns to use (reference tech-context.md)
89
+ - **Work Area** - General location (e.g., "auth feature area", NOT specific files)
90
+ - **API Contract** - If external interface
91
+ - **Dependencies** - What must exist first
93
92
 
94
93
  **Each UC spec does NOT include:**
95
- - Pseudocode (gets stale)
96
- - Detailed implementation steps
97
- - Code examples (reference existing patterns instead)
94
+ - Specific file names to create/modify
95
+ - DO/DON'T implementation lists
96
+ - Pseudocode or step-by-step instructions
97
+ - ❌ "How to code" details
98
+
99
+ **Engineer figures out:** File names, implementation approach, code structure
98
100
 
99
101
  ## Success Criteria
100
102
 
@@ -2,8 +2,8 @@
2
2
 
3
3
  ## Principle
4
4
 
5
- **Include:** Requirements (testable), technical constraints, acceptance criteria, file checklist
6
- **Exclude:** Pseudocode, detailed HOW, code examples (reference patterns instead)
5
+ **Include:** Requirements (testable), technical constraints, acceptance criteria, work area guidance
6
+ **Exclude:** Pseudocode, detailed HOW, specific file lists, DO/DON'T lists (reference patterns instead)
7
7
  **Length:** ~150 lines max per UC
8
8
 
9
9
  ---
@@ -47,10 +47,10 @@
47
47
  - Validation: {where schemas go}
48
48
  - Forms: {which library}
49
49
 
50
- **Code Location:**
51
- - Primary location: `{path}`
52
- - Related files: `{paths}`
53
- - Follow pattern: `{reference file}`
50
+ **Work Area:**
51
+ - General location: {feature area, e.g., "auth feature", "billing system"}
52
+ - Pattern reference: See tech-context.md → {section}
53
+ - Integration points: {what this connects to}
54
54
 
55
55
  **External Dependencies:**
56
56
  {if applicable}
@@ -69,17 +69,6 @@
69
69
  **Edge Cases:**
70
70
  - [ ] Given {edge}, When {action}, Then {behavior}
71
71
 
72
- ## Files to Modify
73
-
74
- **New Files:**
75
- - [ ] `{path}` - {purpose}
76
-
77
- **Modified Files:**
78
- - [ ] `{path}` - {what changes}
79
-
80
- **Reference Pattern:**
81
- - See `{existing file}` for similar implementation
82
-
83
72
  ## API Contract (if applicable)
84
73
 
85
74
  **Endpoint:** `{METHOD} {path}`
@@ -123,24 +112,23 @@
123
112
  **Blocks:**
124
113
  - [ ] [[uc-{group}-{nnn}]] - {why}
125
114
 
126
- ## Implementation Notes
127
-
128
- **DO:**
129
- - {guidance}
115
+ ## Notes
130
116
 
131
- **DON'T:**
132
- - {anti-pattern}
117
+ **Context:**
118
+ {any additional context that helps understand the requirements}
133
119
 
134
- **Watch Out For:**
135
- - {gotcha}
120
+ **Edge Cases:**
121
+ {known edge cases or gotchas to be aware of}
136
122
  ```
137
123
 
138
124
  ---
139
125
 
140
126
  ## Key Rules
141
127
 
142
- 1. **No pseudocode** - requirements and constraints only
143
- 2. **Reference patterns** - don't rewrite them
144
- 3. **Testable criteria** - every requirement maps to a test
145
- 4. **Stack-aware** - uses tech-context.md for correct approach
146
- 5. **Scannable** - read in 2-3 minutes
128
+ 1. **WHAT, not HOW** - define outcomes, not implementation steps
129
+ 2. **No specific file lists** - general work area only, engineer determines files
130
+ 3. **No DO/DON'T lists** - reference patterns instead
131
+ 4. **No pseudocode** - requirements and constraints only
132
+ 5. **Testable criteria** - every requirement maps to a test
133
+ 6. **Stack-aware** - uses tech-context.md for correct approach
134
+ 7. **Scannable** - read in 2-3 minutes