@unlaxer/dve-toolkit 4.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/cli/dve-tool.ts +734 -0
- package/config.ts +64 -0
- package/context/bundle.ts +142 -0
- package/graph/builder.ts +254 -0
- package/graph/cluster.ts +105 -0
- package/graph/query.ts +82 -0
- package/graph/schema.ts +169 -0
- package/install.sh +78 -0
- package/package.json +29 -0
- package/parser/annotation-parser.ts +77 -0
- package/parser/decision-parser.ts +104 -0
- package/parser/drift-detector.ts +45 -0
- package/parser/git-linker.ts +62 -0
- package/parser/glossary-builder.ts +116 -0
- package/parser/session-parser.ts +213 -0
- package/parser/spec-parser.ts +65 -0
- package/parser/state-detector.ts +379 -0
- package/scripts/audit-duplicates.sh +101 -0
- package/scripts/discover-decisions.sh +129 -0
- package/scripts/recover-all.sh +150 -0
- package/scripts/recover-dialogues.sh +190 -0
- package/server/api.ts +297 -0
- package/server/slack.ts +217 -0
- package/skills/dve-annotate.md +26 -0
- package/skills/dve-build.md +15 -0
- package/skills/dve-context.md +22 -0
- package/skills/dve-serve.md +17 -0
- package/skills/dve-status.md +18 -0
- package/skills/dve-trace.md +16 -0
- package/tsconfig.json +15 -0
- package/update.sh +73 -0
- package/version.txt +1 -0
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
// Session parser — graduated extraction from dge/sessions/*.md
|
|
2
|
+
|
|
3
|
+
import { readFileSync } from "node:fs";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import type { Session, Gap, ParseResult } from "../graph/schema.js";
|
|
6
|
+
|
|
7
|
+
const GAP_MARKER = /^.*→\s*Gap\s*発見[::]\s*(.+)/;
|
|
8
|
+
const GAP_TABLE_ROW = /\|\s*(\d+)\s*\|\s*(.+?)\s*\|\s*(.+?)\s*\|\s*\*{0,2}(Critical|High|Medium|Low)\*{0,2}\s*\|/;
|
|
9
|
+
const GAP_TABLE_ROW_5COL = /\|\s*(\d+)\s*\|\s*(.+?)\s*\|\s*(.+?)\s*\|\s*(Critical|High|Medium|Low)\s*\|\s*(.+?)\s*\|/;
|
|
10
|
+
const CHAR_LINE = /^\*{0,2}([^\*]+?)\*{0,2}[::]/;
|
|
11
|
+
const ICON_NAME = /^([\p{Emoji_Presentation}\p{Emoji}\uFE0F]+)\s*(.+)/u;
|
|
12
|
+
|
|
13
|
+
interface SessionParseOutput {
|
|
14
|
+
session: ParseResult<Session>;
|
|
15
|
+
gaps: ParseResult<Gap>[];
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function extractDateAndTheme(filename: string): { date: string; theme: string } | null {
|
|
19
|
+
const stem = path.basename(filename, ".md");
|
|
20
|
+
const match = stem.match(/^(\d{4}-\d{2}-\d{2})-(.+)$/);
|
|
21
|
+
if (match) return { date: match[1], theme: match[2] };
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function extractFrontmatter(lines: string[]): Record<string, string> {
|
|
26
|
+
const fm: Record<string, string> = {};
|
|
27
|
+
for (const line of lines) {
|
|
28
|
+
const m = line.match(/^-\s+\*{0,2}(\w[\w\s]*?)\*{0,2}[::]\s*(.+)/);
|
|
29
|
+
if (m) fm[m[1].trim().toLowerCase()] = m[2].trim();
|
|
30
|
+
if (line.startsWith("---") && Object.keys(fm).length > 0) break;
|
|
31
|
+
if (line.startsWith("## ") && Object.keys(fm).length > 0) break;
|
|
32
|
+
}
|
|
33
|
+
return fm;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function parseSession(filePath: string): SessionParseOutput {
|
|
37
|
+
const content = readFileSync(filePath, "utf-8");
|
|
38
|
+
const lines = content.split("\n");
|
|
39
|
+
const warnings: string[] = [];
|
|
40
|
+
|
|
41
|
+
// Level 1: filename → date + id
|
|
42
|
+
const fileInfo = extractDateAndTheme(filePath);
|
|
43
|
+
const id = path.basename(filePath, ".md");
|
|
44
|
+
const date = fileInfo?.date ?? "";
|
|
45
|
+
const theme = fileInfo?.theme ?? "";
|
|
46
|
+
|
|
47
|
+
if (!fileInfo) {
|
|
48
|
+
warnings.push("Could not extract date/theme from filename");
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Frontmatter extraction
|
|
52
|
+
const fm = extractFrontmatter(lines);
|
|
53
|
+
|
|
54
|
+
// Characters from frontmatter
|
|
55
|
+
const charsRaw = fm["characters"] ?? "";
|
|
56
|
+
const characters = charsRaw
|
|
57
|
+
? charsRaw.split(/[,、]/).map((c) => c.trim()).filter(Boolean)
|
|
58
|
+
: [];
|
|
59
|
+
|
|
60
|
+
const flow = fm["flow"]?.replace(/\s*\(.+\)/, "") ?? "quick";
|
|
61
|
+
const structure = fm["structure"] ?? "roundtable";
|
|
62
|
+
const sessionTheme = fm["theme"] ?? fm["テーマ"] ?? theme;
|
|
63
|
+
|
|
64
|
+
const session: ParseResult<Session> = {
|
|
65
|
+
node: {
|
|
66
|
+
id,
|
|
67
|
+
date,
|
|
68
|
+
theme: sessionTheme || theme,
|
|
69
|
+
flow,
|
|
70
|
+
structure,
|
|
71
|
+
characters,
|
|
72
|
+
file_path: filePath,
|
|
73
|
+
content,
|
|
74
|
+
},
|
|
75
|
+
confidence: fileInfo ? 0.9 : 0.5,
|
|
76
|
+
warnings: [...warnings],
|
|
77
|
+
source: { file: filePath },
|
|
78
|
+
};
|
|
79
|
+
|
|
80
|
+
// Level 2: Gap markers
|
|
81
|
+
const gaps: ParseResult<Gap>[] = [];
|
|
82
|
+
let gapIndex = 0;
|
|
83
|
+
|
|
84
|
+
for (let i = 0; i < lines.length; i++) {
|
|
85
|
+
const line = lines[i];
|
|
86
|
+
const gapMatch = line.match(GAP_MARKER);
|
|
87
|
+
if (gapMatch) {
|
|
88
|
+
gapIndex++;
|
|
89
|
+
const gapId = `${id}#G-${String(gapIndex).padStart(3, "0")}`;
|
|
90
|
+
gaps.push({
|
|
91
|
+
node: {
|
|
92
|
+
id: gapId,
|
|
93
|
+
session_id: id,
|
|
94
|
+
summary: gapMatch[1].trim(),
|
|
95
|
+
category: "",
|
|
96
|
+
severity: "Unknown",
|
|
97
|
+
status: "Active",
|
|
98
|
+
line_ref: i + 1,
|
|
99
|
+
discovered_by: [],
|
|
100
|
+
},
|
|
101
|
+
confidence: 0.9,
|
|
102
|
+
warnings: [],
|
|
103
|
+
source: { file: filePath, line: i + 1 },
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Level 3: Gap table → category + severity (supports 4-col and 5-col tables)
|
|
109
|
+
const tableGaps: { index: number; summary: string; category: string; severity: string; status?: string }[] = [];
|
|
110
|
+
for (const line of lines) {
|
|
111
|
+
const match5 = line.match(GAP_TABLE_ROW_5COL);
|
|
112
|
+
if (match5) {
|
|
113
|
+
tableGaps.push({
|
|
114
|
+
index: parseInt(match5[1], 10),
|
|
115
|
+
summary: match5[2].trim(),
|
|
116
|
+
category: match5[3].trim(),
|
|
117
|
+
severity: match5[4],
|
|
118
|
+
status: match5[5].trim(),
|
|
119
|
+
});
|
|
120
|
+
continue;
|
|
121
|
+
}
|
|
122
|
+
const tableMatch = line.match(GAP_TABLE_ROW);
|
|
123
|
+
if (tableMatch) {
|
|
124
|
+
tableGaps.push({
|
|
125
|
+
index: parseInt(tableMatch[1], 10),
|
|
126
|
+
summary: tableMatch[2].trim(),
|
|
127
|
+
category: tableMatch[3].trim(),
|
|
128
|
+
severity: tableMatch[4],
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Collect all inline Gap markers with line numbers for cross-matching
|
|
134
|
+
const markerLines: { line: number; text: string }[] = [];
|
|
135
|
+
for (let i = 0; i < lines.length; i++) {
|
|
136
|
+
if (GAP_MARKER.test(lines[i])) {
|
|
137
|
+
markerLines.push({ line: i + 1, text: lines[i] });
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// If no gap markers found but table exists, create gaps from table
|
|
142
|
+
// Try to match table gaps to inline markers by keyword similarity
|
|
143
|
+
if (gaps.length === 0 && tableGaps.length > 0) {
|
|
144
|
+
for (const tg of tableGaps) {
|
|
145
|
+
const gapId = `${id}#G-${String(tg.index).padStart(3, "0")}`;
|
|
146
|
+
const status = tg.status?.includes("Resolved") ? "Archived" as const
|
|
147
|
+
: tg.status?.includes("Active") ? "Active" as const
|
|
148
|
+
: "Active" as const;
|
|
149
|
+
|
|
150
|
+
// Try to find matching inline marker by keyword overlap
|
|
151
|
+
let bestLine = 0;
|
|
152
|
+
let bestScore = 0;
|
|
153
|
+
const keywords = tg.summary.split(/[\s、。,./]+/).filter(w => w.length >= 2);
|
|
154
|
+
for (const marker of markerLines) {
|
|
155
|
+
let score = 0;
|
|
156
|
+
for (const kw of keywords) {
|
|
157
|
+
if (marker.text.includes(kw)) score++;
|
|
158
|
+
}
|
|
159
|
+
if (score > bestScore) {
|
|
160
|
+
bestScore = score;
|
|
161
|
+
bestLine = marker.line;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
gaps.push({
|
|
166
|
+
node: {
|
|
167
|
+
id: gapId,
|
|
168
|
+
session_id: id,
|
|
169
|
+
summary: tg.summary,
|
|
170
|
+
category: tg.category,
|
|
171
|
+
severity: tg.severity as Gap["severity"],
|
|
172
|
+
status,
|
|
173
|
+
line_ref: bestLine, // 0 if no match found
|
|
174
|
+
discovered_by: [],
|
|
175
|
+
},
|
|
176
|
+
confidence: bestLine > 0 ? 0.85 : 0.8,
|
|
177
|
+
warnings: bestLine > 0 ? [] : ["Gap extracted from table (no inline marker found)"],
|
|
178
|
+
source: { file: filePath, line: bestLine || undefined },
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
} else {
|
|
182
|
+
// Match table rows to gaps by index
|
|
183
|
+
for (const tg of tableGaps) {
|
|
184
|
+
const gap = gaps[tg.index - 1];
|
|
185
|
+
if (gap?.node) {
|
|
186
|
+
if (tg.category) gap.node.category = tg.category;
|
|
187
|
+
if (tg.severity) {
|
|
188
|
+
gap.node.severity = tg.severity as Gap["severity"];
|
|
189
|
+
gap.confidence = 0.95;
|
|
190
|
+
}
|
|
191
|
+
if (tg.status?.includes("Resolved")) gap.node.status = "Archived";
|
|
192
|
+
if (tg.summary.length > (gap.node.summary?.length ?? 0)) {
|
|
193
|
+
gap.node.summary = tg.summary;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// Mark gaps without severity
|
|
199
|
+
for (const gap of gaps) {
|
|
200
|
+
if (gap.node.severity === "Unknown") {
|
|
201
|
+
gap.warnings.push("severity not found (no gap table or pre-v3 format)");
|
|
202
|
+
gap.confidence = Math.min(gap.confidence, 0.7);
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
if (gaps.length === 0) {
|
|
208
|
+
session.warnings.push("No gap markers found");
|
|
209
|
+
session.confidence = Math.min(session.confidence, 0.6);
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
return { session, gaps };
|
|
213
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
// Spec parser — extract specs from dge/specs/*.md
|
|
2
|
+
|
|
3
|
+
import { readFileSync } from "node:fs";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import type { Spec, ParseResult } from "../graph/schema.js";
|
|
6
|
+
|
|
7
|
+
const FIELD_RE = /^-\s+\*{0,2}(\w[\w\s/]*?)\*{0,2}[::]\s*(.+)/;
|
|
8
|
+
const DD_REF_RE = /DD-\d+/g;
|
|
9
|
+
const SESSION_LINK_RE = /\[([^\]]+)\]\(\.\.\/sessions\/([^)]+)\)/g;
|
|
10
|
+
|
|
11
|
+
export function parseSpec(filePath: string): ParseResult<Spec> {
|
|
12
|
+
const content = readFileSync(filePath, "utf-8");
|
|
13
|
+
const lines = content.split("\n");
|
|
14
|
+
const warnings: string[] = [];
|
|
15
|
+
const stem = path.basename(filePath, ".md");
|
|
16
|
+
|
|
17
|
+
// Title from H1
|
|
18
|
+
const h1 = lines.find((l) => l.startsWith("# "));
|
|
19
|
+
const rawTitle = h1?.replace(/^#\s+/, "") ?? stem;
|
|
20
|
+
|
|
21
|
+
// Extract type from title prefix (e.g. "TECH Spec:", "UC Spec:")
|
|
22
|
+
let type: Spec["type"] = "TECH";
|
|
23
|
+
if (/^UC\b/i.test(rawTitle)) type = "UC";
|
|
24
|
+
else if (/^TECH\b/i.test(rawTitle)) type = "TECH";
|
|
25
|
+
else if (/^DD\b|^ADR\b/i.test(rawTitle)) type = "DD";
|
|
26
|
+
else if (/^DQ\b/i.test(rawTitle)) type = "DQ";
|
|
27
|
+
else if (/^ACT\b/i.test(rawTitle)) type = "ACT";
|
|
28
|
+
|
|
29
|
+
const title = rawTitle.replace(/^(UC|TECH|DD|ADR|DQ|ACT)\s+Spec[::]?\s*/i, "").trim();
|
|
30
|
+
|
|
31
|
+
// Fields
|
|
32
|
+
const fields: Record<string, string> = {};
|
|
33
|
+
for (const line of lines) {
|
|
34
|
+
const m = line.match(FIELD_RE);
|
|
35
|
+
if (m) fields[m[1].trim().toLowerCase()] = m[2].trim();
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const status = (fields["status"] ?? "draft") as Spec["status"];
|
|
39
|
+
const migratedTo = fields["migrated_to"] ?? fields["migrated to"] ?? undefined;
|
|
40
|
+
|
|
41
|
+
// DD references
|
|
42
|
+
const decisionRefs: string[] = [];
|
|
43
|
+
const resolves = fields["resolves"] ?? "";
|
|
44
|
+
// Also scan session links for DD refs
|
|
45
|
+
const allText = content;
|
|
46
|
+
for (const match of allText.matchAll(DD_REF_RE)) {
|
|
47
|
+
if (!decisionRefs.includes(match[0])) decisionRefs.push(match[0]);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return {
|
|
51
|
+
node: {
|
|
52
|
+
id: stem,
|
|
53
|
+
title,
|
|
54
|
+
type,
|
|
55
|
+
status,
|
|
56
|
+
decision_refs: decisionRefs,
|
|
57
|
+
migrated_to: migratedTo,
|
|
58
|
+
file_path: filePath,
|
|
59
|
+
content,
|
|
60
|
+
},
|
|
61
|
+
confidence: title ? 1.0 : 0.5,
|
|
62
|
+
warnings,
|
|
63
|
+
source: { file: filePath },
|
|
64
|
+
};
|
|
65
|
+
}
|
|
@@ -0,0 +1,379 @@
|
|
|
1
|
+
// State detector — detect DRE install state and project phase for each project
|
|
2
|
+
|
|
3
|
+
import { readFileSync, existsSync, readdirSync } from "node:fs";
|
|
4
|
+
import { execSync } from "node:child_process";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
|
|
7
|
+
// ─── DRE Install State ───
|
|
8
|
+
|
|
9
|
+
export type DREInstallState = "FRESH" | "INSTALLED" | "CUSTOMIZED" | "OUTDATED" | "UNKNOWN";
|
|
10
|
+
|
|
11
|
+
export interface DREState {
|
|
12
|
+
installState: DREInstallState;
|
|
13
|
+
localVersion: string | null;
|
|
14
|
+
kitVersion: string | null;
|
|
15
|
+
customizedFiles: string[];
|
|
16
|
+
totalFiles: number;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function detectDREState(projectPath: string): DREState {
|
|
20
|
+
const claudeDir = path.join(projectPath, ".claude");
|
|
21
|
+
const dreVersionFile = path.join(claudeDir, ".dre-version");
|
|
22
|
+
|
|
23
|
+
// Check if .claude/ exists
|
|
24
|
+
if (!existsSync(claudeDir)) {
|
|
25
|
+
return { installState: "FRESH", localVersion: null, kitVersion: null, customizedFiles: [], totalFiles: 0 };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// Read local version
|
|
29
|
+
const localVersion = existsSync(dreVersionFile)
|
|
30
|
+
? readFileSync(dreVersionFile, "utf-8").trim()
|
|
31
|
+
: null;
|
|
32
|
+
|
|
33
|
+
if (!localVersion) {
|
|
34
|
+
return { installState: "FRESH", localVersion: null, kitVersion: null, customizedFiles: [], totalFiles: 0 };
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Try to find kit version
|
|
38
|
+
let kitVersion: string | null = null;
|
|
39
|
+
const kitVersionPaths = [
|
|
40
|
+
path.join(projectPath, "dre", "kit", "version.txt"),
|
|
41
|
+
path.join(projectPath, "node_modules", "@unlaxer", "dre-toolkit", "version.txt"),
|
|
42
|
+
];
|
|
43
|
+
for (const p of kitVersionPaths) {
|
|
44
|
+
if (existsSync(p)) {
|
|
45
|
+
kitVersion = readFileSync(p, "utf-8").trim();
|
|
46
|
+
break;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Count files and detect customizations
|
|
51
|
+
const dirs = ["rules", "skills", "agents", "commands", "profiles"];
|
|
52
|
+
let totalFiles = 0;
|
|
53
|
+
const customizedFiles: string[] = [];
|
|
54
|
+
|
|
55
|
+
for (const dir of dirs) {
|
|
56
|
+
const localDir = path.join(claudeDir, dir);
|
|
57
|
+
if (!existsSync(localDir)) continue;
|
|
58
|
+
|
|
59
|
+
const files = readdirSync(localDir).filter((f) => f.endsWith(".md"));
|
|
60
|
+
totalFiles += files.length;
|
|
61
|
+
|
|
62
|
+
// Check if any files differ from kit
|
|
63
|
+
const kitDir = kitVersionPaths[0]
|
|
64
|
+
? path.join(path.dirname(kitVersionPaths[0]), "..", "kit", dir)
|
|
65
|
+
: null;
|
|
66
|
+
|
|
67
|
+
if (kitDir && existsSync(kitDir)) {
|
|
68
|
+
for (const file of files) {
|
|
69
|
+
const localFile = path.join(localDir, file);
|
|
70
|
+
const kitFile = path.join(kitDir, file);
|
|
71
|
+
if (existsSync(kitFile)) {
|
|
72
|
+
try {
|
|
73
|
+
const localContent = readFileSync(localFile, "utf-8");
|
|
74
|
+
const kitContent = readFileSync(kitFile, "utf-8");
|
|
75
|
+
if (localContent !== kitContent) {
|
|
76
|
+
customizedFiles.push(`${dir}/${file}`);
|
|
77
|
+
}
|
|
78
|
+
} catch { /* ignore */ }
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Determine state
|
|
85
|
+
let installState: DREInstallState = "INSTALLED";
|
|
86
|
+
if (kitVersion && localVersion !== kitVersion) {
|
|
87
|
+
installState = "OUTDATED";
|
|
88
|
+
} else if (customizedFiles.length > 0) {
|
|
89
|
+
installState = "CUSTOMIZED";
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
return { installState, localVersion, kitVersion, customizedFiles, totalFiles };
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// ─── Development Phase ───
|
|
96
|
+
|
|
97
|
+
export type DevPhase = "spec" | "implementation" | "stabilization" | "maintenance" | "unknown";
|
|
98
|
+
|
|
99
|
+
export interface PhaseState {
|
|
100
|
+
phase: DevPhase;
|
|
101
|
+
source: string; // where was phase detected
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
export function detectPhase(projectPath: string): PhaseState {
|
|
105
|
+
// 1. Check CLAUDE.md for active_phase
|
|
106
|
+
const claudeMd = path.join(projectPath, ".claude", "CLAUDE.md");
|
|
107
|
+
if (existsSync(claudeMd)) {
|
|
108
|
+
const content = readFileSync(claudeMd, "utf-8");
|
|
109
|
+
const match = content.match(/active_phase[::]\s*(spec|implementation|stabilization|maintenance)/i);
|
|
110
|
+
if (match) {
|
|
111
|
+
return { phase: match[1].toLowerCase() as DevPhase, source: ".claude/CLAUDE.md" };
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// 2. Check CLAUDE.md at root
|
|
116
|
+
const rootClaude = path.join(projectPath, "CLAUDE.md");
|
|
117
|
+
if (existsSync(rootClaude)) {
|
|
118
|
+
const content = readFileSync(rootClaude, "utf-8");
|
|
119
|
+
const match = content.match(/active_phase[::]\s*(spec|implementation|stabilization|maintenance)/i);
|
|
120
|
+
if (match) {
|
|
121
|
+
return { phase: match[1].toLowerCase() as DevPhase, source: "CLAUDE.md" };
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// 3. Heuristic: check git commit messages for phase prefixes
|
|
126
|
+
try {
|
|
127
|
+
const log = execSync("git log --oneline -20", {
|
|
128
|
+
cwd: projectPath, encoding: "utf-8", timeout: 5000,
|
|
129
|
+
}).trim();
|
|
130
|
+
|
|
131
|
+
const lines = log.split("\n");
|
|
132
|
+
const prefixes = lines.map((l) => {
|
|
133
|
+
const m = l.match(/^\w+ (hotfix|fix|feat|docs|spec|test|refactor)[:(!]/);
|
|
134
|
+
return m?.[1] ?? null;
|
|
135
|
+
}).filter(Boolean) as string[];
|
|
136
|
+
|
|
137
|
+
if (prefixes.length === 0) return { phase: "unknown", source: "no commits" };
|
|
138
|
+
|
|
139
|
+
// Count prefix types
|
|
140
|
+
const counts: Record<string, number> = {};
|
|
141
|
+
for (const p of prefixes) counts[p] = (counts[p] ?? 0) + 1;
|
|
142
|
+
|
|
143
|
+
if (counts["hotfix"]) return { phase: "maintenance", source: "git log (hotfix commits)" };
|
|
144
|
+
if ((counts["fix"] ?? 0) + (counts["test"] ?? 0) > (counts["feat"] ?? 0)) {
|
|
145
|
+
return { phase: "stabilization", source: "git log (fix/test > feat)" };
|
|
146
|
+
}
|
|
147
|
+
if ((counts["docs"] ?? 0) + (counts["spec"] ?? 0) > (counts["feat"] ?? 0)) {
|
|
148
|
+
return { phase: "spec", source: "git log (docs/spec dominant)" };
|
|
149
|
+
}
|
|
150
|
+
return { phase: "implementation", source: "git log (feat dominant)" };
|
|
151
|
+
} catch {
|
|
152
|
+
return { phase: "unknown", source: "git not available" };
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// ─── DRE Workflow State Machine ───
|
|
157
|
+
|
|
158
|
+
export interface SMPhase {
|
|
159
|
+
id: string;
|
|
160
|
+
source: "base" | "plugin";
|
|
161
|
+
plugin?: string; // "dge" | "dde" etc.
|
|
162
|
+
active: boolean;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
export interface SubState {
|
|
166
|
+
id: string;
|
|
167
|
+
description: string;
|
|
168
|
+
active: boolean;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
export interface PluginSM {
|
|
172
|
+
plugin: string;
|
|
173
|
+
phaseId: string;
|
|
174
|
+
states: SubState[];
|
|
175
|
+
current: string | null;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
export interface WorkflowState {
|
|
179
|
+
phases: SMPhase[];
|
|
180
|
+
currentPhase: string;
|
|
181
|
+
currentSource: string;
|
|
182
|
+
subState: string | null;
|
|
183
|
+
stack: string[];
|
|
184
|
+
plugins: { id: string; version: string | null; phase: string; insertAfter: string }[];
|
|
185
|
+
pluginSMs: PluginSM[];
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
const BASE_PHASES = ["backlog", "spec", "impl", "review", "release"];
|
|
189
|
+
|
|
190
|
+
const PLUGIN_DEFS: { dir: string; id: string; phase: string; insertAfter: string }[] = [
|
|
191
|
+
{ dir: "dge", id: "dge", phase: "gap_extraction", insertAfter: "spec" },
|
|
192
|
+
{ dir: "dde", id: "dde", phase: "doc_deficit_check", insertAfter: "review" },
|
|
193
|
+
];
|
|
194
|
+
|
|
195
|
+
function readYamlSM(projectPath: string): { phases: string[]; pluginPhases: Record<string, string> } | null {
|
|
196
|
+
const smPath = path.join(projectPath, ".dre", "state-machine.yaml");
|
|
197
|
+
if (!existsSync(smPath)) return null;
|
|
198
|
+
// Simple line-based YAML parse (avoid dependency)
|
|
199
|
+
const content = readFileSync(smPath, "utf-8");
|
|
200
|
+
const phases: string[] = [];
|
|
201
|
+
const pluginPhases: Record<string, string> = {};
|
|
202
|
+
let inPhases = false;
|
|
203
|
+
for (const line of content.split("\n")) {
|
|
204
|
+
if (/^phases:/.test(line)) { inPhases = true; continue; }
|
|
205
|
+
if (inPhases) {
|
|
206
|
+
const idMatch = line.match(/^\s+-\s+id:\s*(\S+)/);
|
|
207
|
+
if (idMatch) phases.push(idMatch[1]);
|
|
208
|
+
const pluginMatch = line.match(/^\s+-\s+(\w+)\s*#\s*(\w+)/);
|
|
209
|
+
if (pluginMatch) pluginPhases[pluginMatch[2]] = pluginMatch[1];
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
return phases.length > 0 ? { phases, pluginPhases } : null;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
interface ContextData {
|
|
216
|
+
stack: string[];
|
|
217
|
+
current_phase: string | null;
|
|
218
|
+
sub_state: string | null;
|
|
219
|
+
plugins_sm: Record<string, { plugin: string; states: { id: string; next: string | null; description: string }[]; current: string | null }>;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
function readContextJson(projectPath: string): ContextData | null {
|
|
223
|
+
const ctxPath = path.join(projectPath, ".dre", "context.json");
|
|
224
|
+
if (!existsSync(ctxPath)) return null;
|
|
225
|
+
try {
|
|
226
|
+
const data = JSON.parse(readFileSync(ctxPath, "utf-8"));
|
|
227
|
+
return {
|
|
228
|
+
stack: data.stack ?? [],
|
|
229
|
+
current_phase: data.current_phase ?? null,
|
|
230
|
+
sub_state: data.sub_state ?? null,
|
|
231
|
+
plugins_sm: data.plugins_sm ?? {},
|
|
232
|
+
};
|
|
233
|
+
} catch { return null; }
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
export function detectWorkflowState(projectPath: string): WorkflowState {
|
|
237
|
+
// 1. Try real state-machine.yaml
|
|
238
|
+
const yamlSM = readYamlSM(projectPath);
|
|
239
|
+
|
|
240
|
+
// 2. Detect installed plugins
|
|
241
|
+
const detectedPlugins: WorkflowState["plugins"] = [];
|
|
242
|
+
for (const pdef of PLUGIN_DEFS) {
|
|
243
|
+
const pluginDir = path.join(projectPath, pdef.dir);
|
|
244
|
+
if (existsSync(pluginDir)) {
|
|
245
|
+
let version: string | null = null;
|
|
246
|
+
const versionFiles = [
|
|
247
|
+
path.join(pluginDir, "kit", "version.txt"),
|
|
248
|
+
path.join(pluginDir, "version.txt"),
|
|
249
|
+
];
|
|
250
|
+
for (const vf of versionFiles) {
|
|
251
|
+
if (existsSync(vf)) { version = readFileSync(vf, "utf-8").trim(); break; }
|
|
252
|
+
}
|
|
253
|
+
detectedPlugins.push({ ...pdef, version });
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// 3. Build phase list
|
|
258
|
+
let phases: SMPhase[];
|
|
259
|
+
if (yamlSM) {
|
|
260
|
+
// Use real SM definition
|
|
261
|
+
phases = yamlSM.phases.map((id) => {
|
|
262
|
+
const pluginId = Object.entries(yamlSM.pluginPhases).find(([, phase]) => phase === id)?.[0];
|
|
263
|
+
return {
|
|
264
|
+
id,
|
|
265
|
+
source: pluginId ? "plugin" as const : "base" as const,
|
|
266
|
+
plugin: pluginId,
|
|
267
|
+
active: false,
|
|
268
|
+
};
|
|
269
|
+
});
|
|
270
|
+
} else {
|
|
271
|
+
// Build from defaults + detected plugins
|
|
272
|
+
const phaseList: SMPhase[] = BASE_PHASES.map((id) => ({
|
|
273
|
+
id, source: "base" as const, active: false,
|
|
274
|
+
}));
|
|
275
|
+
|
|
276
|
+
// Insert plugin phases
|
|
277
|
+
for (const plugin of detectedPlugins) {
|
|
278
|
+
const insertIdx = phaseList.findIndex((p) => p.id === plugin.insertAfter);
|
|
279
|
+
if (insertIdx >= 0) {
|
|
280
|
+
phaseList.splice(insertIdx + 1, 0, {
|
|
281
|
+
id: plugin.phase,
|
|
282
|
+
source: "plugin",
|
|
283
|
+
plugin: plugin.id,
|
|
284
|
+
active: false,
|
|
285
|
+
});
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
phases = phaseList;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
// 4. Determine current phase + sub-state
|
|
292
|
+
let currentPhase = "unknown";
|
|
293
|
+
let currentSource = "not detected";
|
|
294
|
+
let subState: string | null = null;
|
|
295
|
+
let stack: string[] = [];
|
|
296
|
+
const pluginSMs: PluginSM[] = [];
|
|
297
|
+
|
|
298
|
+
// Priority 1: .dre/context.json (runtime state)
|
|
299
|
+
const ctx = readContextJson(projectPath);
|
|
300
|
+
if (ctx && ctx.stack.length > 0) {
|
|
301
|
+
currentPhase = ctx.current_phase ?? ctx.stack[ctx.stack.length - 1].toLowerCase();
|
|
302
|
+
currentSource = ".dre/context.json";
|
|
303
|
+
stack = ctx.stack;
|
|
304
|
+
subState = ctx.sub_state;
|
|
305
|
+
|
|
306
|
+
// Extract plugin sub-state machines
|
|
307
|
+
for (const [phaseId, psm] of Object.entries(ctx.plugins_sm ?? {})) {
|
|
308
|
+
pluginSMs.push({
|
|
309
|
+
plugin: psm.plugin,
|
|
310
|
+
phaseId,
|
|
311
|
+
states: (psm.states ?? []).map((s: any) => ({
|
|
312
|
+
id: s.id,
|
|
313
|
+
description: s.description ?? "",
|
|
314
|
+
active: s.id === psm.current,
|
|
315
|
+
})),
|
|
316
|
+
current: psm.current,
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
} else {
|
|
320
|
+
// Priority 2: CLAUDE.md active_phase
|
|
321
|
+
const phaseResult = detectPhase(projectPath);
|
|
322
|
+
if (phaseResult.phase !== "unknown") {
|
|
323
|
+
const phaseMap: Record<string, string> = {
|
|
324
|
+
spec: "spec", implementation: "impl",
|
|
325
|
+
stabilization: "review", maintenance: "release",
|
|
326
|
+
};
|
|
327
|
+
currentPhase = phaseMap[phaseResult.phase] ?? phaseResult.phase;
|
|
328
|
+
currentSource = phaseResult.source;
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
// Mark active phase
|
|
333
|
+
for (const phase of phases) {
|
|
334
|
+
phase.active = phase.id === currentPhase;
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
return { phases, currentPhase, currentSource, subState, stack, plugins: detectedPlugins, pluginSMs };
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
// ─── Combined Project State ───
|
|
341
|
+
|
|
342
|
+
export interface ProjectState {
|
|
343
|
+
projectName: string;
|
|
344
|
+
projectPath: string;
|
|
345
|
+
dre: DREState;
|
|
346
|
+
phase: PhaseState;
|
|
347
|
+
workflow: WorkflowState;
|
|
348
|
+
dgeSessionCount: number;
|
|
349
|
+
ddCount: number;
|
|
350
|
+
lastSessionDate: string | null;
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
export function detectProjectState(projectName: string, projectPath: string): ProjectState {
|
|
354
|
+
const dre = detectDREState(projectPath);
|
|
355
|
+
const phase = detectPhase(projectPath);
|
|
356
|
+
const workflow = detectWorkflowState(projectPath);
|
|
357
|
+
|
|
358
|
+
const sessionsDir = path.join(projectPath, "dge", "sessions");
|
|
359
|
+
const decisionsDir = path.join(projectPath, "dge", "decisions");
|
|
360
|
+
|
|
361
|
+
let dgeSessionCount = 0;
|
|
362
|
+
let ddCount = 0;
|
|
363
|
+
let lastSessionDate: string | null = null;
|
|
364
|
+
|
|
365
|
+
if (existsSync(sessionsDir)) {
|
|
366
|
+
const files = readdirSync(sessionsDir).filter((f) => f.endsWith(".md") && f !== "index.md");
|
|
367
|
+
dgeSessionCount = files.length;
|
|
368
|
+
const dates = files
|
|
369
|
+
.map((f) => f.match(/^(\d{4}-\d{2}-\d{2})/)?.[1])
|
|
370
|
+
.filter(Boolean) as string[];
|
|
371
|
+
if (dates.length > 0) lastSessionDate = dates.sort().reverse()[0];
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
if (existsSync(decisionsDir)) {
|
|
375
|
+
ddCount = readdirSync(decisionsDir).filter((f) => f.endsWith(".md") && f !== "index.md").length;
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
return { projectName, projectPath, dre, phase, workflow, dgeSessionCount, ddCount, lastSessionDate };
|
|
379
|
+
}
|