@majeanson/lac 3.1.1 → 3.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +26 -2
- package/bin/lac-lsp.js +1 -1
- package/dist/index.mjs +12979 -836
- package/dist/index.mjs.map +1 -1
- package/dist/lsp.mjs +36 -1
- package/dist/mcp.mjs +2032 -442
- package/package.json +1 -1
package/dist/mcp.mjs
CHANGED
|
@@ -4,9 +4,8 @@ import process$1 from "node:process";
|
|
|
4
4
|
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
5
5
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
6
6
|
import { CallToolRequestSchema, ListToolsRequestSchema } from "@modelcontextprotocol/sdk/types.js";
|
|
7
|
-
import readline from "node:readline";
|
|
8
|
-
import Anthropic from "@anthropic-ai/sdk";
|
|
9
7
|
import { execSync } from "node:child_process";
|
|
8
|
+
import "@anthropic-ai/sdk";
|
|
10
9
|
|
|
11
10
|
//#region ../../node_modules/.bun/zod@4.3.6/node_modules/zod/v4/core/core.js
|
|
12
11
|
/** A special constant with type `never` */
|
|
@@ -3741,6 +3740,27 @@ const LineageSchema$1 = object$1({
|
|
|
3741
3740
|
children: array$1(string$2()).optional(),
|
|
3742
3741
|
spawnReason: string$2().nullable().optional()
|
|
3743
3742
|
});
|
|
3743
|
+
const StatusTransitionSchema$1 = object$1({
|
|
3744
|
+
from: FeatureStatusSchema$1,
|
|
3745
|
+
to: FeatureStatusSchema$1,
|
|
3746
|
+
date: string$2().regex(/^\d{4}-\d{2}-\d{2}$/, "date must be YYYY-MM-DD"),
|
|
3747
|
+
reason: string$2().optional()
|
|
3748
|
+
});
|
|
3749
|
+
const RevisionSchema$1 = object$1({
|
|
3750
|
+
date: string$2().regex(/^\d{4}-\d{2}-\d{2}$/, "date must be YYYY-MM-DD"),
|
|
3751
|
+
author: string$2().min(1),
|
|
3752
|
+
fields_changed: array$1(string$2()).min(1),
|
|
3753
|
+
reason: string$2().min(1)
|
|
3754
|
+
});
|
|
3755
|
+
const PublicInterfaceEntrySchema$1 = object$1({
|
|
3756
|
+
name: string$2().min(1),
|
|
3757
|
+
type: string$2().min(1),
|
|
3758
|
+
description: string$2().optional()
|
|
3759
|
+
});
|
|
3760
|
+
const CodeSnippetSchema$1 = object$1({
|
|
3761
|
+
label: string$2().min(1),
|
|
3762
|
+
snippet: string$2().min(1)
|
|
3763
|
+
});
|
|
3744
3764
|
const FeatureSchema$1 = object$1({
|
|
3745
3765
|
featureKey: string$2().regex(FEATURE_KEY_PATTERN$1, "featureKey must match pattern <domain>-YYYY-NNN (e.g. feat-2026-001, proc-2026-001)"),
|
|
3746
3766
|
title: string$2().min(1),
|
|
@@ -3756,12 +3776,26 @@ const FeatureSchema$1 = object$1({
|
|
|
3756
3776
|
annotations: array$1(AnnotationSchema$1).optional(),
|
|
3757
3777
|
lineage: LineageSchema$1.optional(),
|
|
3758
3778
|
successCriteria: string$2().optional(),
|
|
3759
|
-
domain: string$2().optional()
|
|
3779
|
+
domain: string$2().optional(),
|
|
3780
|
+
priority: number$2().int().min(1).max(5).optional(),
|
|
3781
|
+
statusHistory: array$1(StatusTransitionSchema$1).optional(),
|
|
3782
|
+
revisions: array$1(RevisionSchema$1).optional(),
|
|
3783
|
+
superseded_by: string$2().regex(FEATURE_KEY_PATTERN$1, "superseded_by must be a valid featureKey").optional(),
|
|
3784
|
+
superseded_from: array$1(string$2().regex(FEATURE_KEY_PATTERN$1, "each superseded_from entry must be a valid featureKey")).optional(),
|
|
3785
|
+
merged_into: string$2().regex(FEATURE_KEY_PATTERN$1, "merged_into must be a valid featureKey").optional(),
|
|
3786
|
+
merged_from: array$1(string$2().regex(FEATURE_KEY_PATTERN$1, "each merged_from entry must be a valid featureKey")).optional(),
|
|
3787
|
+
userGuide: string$2().optional(),
|
|
3788
|
+
componentFile: string$2().optional(),
|
|
3789
|
+
npmPackages: array$1(string$2()).optional(),
|
|
3790
|
+
publicInterface: array$1(PublicInterfaceEntrySchema$1).optional(),
|
|
3791
|
+
externalDependencies: array$1(string$2()).optional(),
|
|
3792
|
+
lastVerifiedDate: string$2().regex(/^\d{4}-\d{2}-\d{2}$/, "lastVerifiedDate must be YYYY-MM-DD").optional(),
|
|
3793
|
+
codeSnippets: array$1(CodeSnippetSchema$1).optional()
|
|
3760
3794
|
});
|
|
3761
3795
|
|
|
3762
3796
|
//#endregion
|
|
3763
3797
|
//#region ../feature-schema/dist/validate.mjs
|
|
3764
|
-
function validateFeature
|
|
3798
|
+
function validateFeature(data) {
|
|
3765
3799
|
const result = FeatureSchema$1.safeParse(data);
|
|
3766
3800
|
if (result.success) return {
|
|
3767
3801
|
success: true,
|
|
@@ -3775,6 +3809,563 @@ function validateFeature$1(data) {
|
|
|
3775
3809
|
};
|
|
3776
3810
|
}
|
|
3777
3811
|
|
|
3812
|
+
//#endregion
|
|
3813
|
+
//#region ../feature-schema/dist/keygen.mjs
|
|
3814
|
+
const LAC_DIR = ".lac";
|
|
3815
|
+
const COUNTER_FILE = "counter";
|
|
3816
|
+
const KEYS_FILE = "keys";
|
|
3817
|
+
/**
|
|
3818
|
+
* Returns the current year as a number.
|
|
3819
|
+
*/
|
|
3820
|
+
function getCurrentYear() {
|
|
3821
|
+
return (/* @__PURE__ */ new Date()).getFullYear();
|
|
3822
|
+
}
|
|
3823
|
+
/**
|
|
3824
|
+
* Pads a counter number to a zero-padded 3-digit string (e.g. 1 → "001").
|
|
3825
|
+
*/
|
|
3826
|
+
function padCounter(n) {
|
|
3827
|
+
return String(n).padStart(3, "0");
|
|
3828
|
+
}
|
|
3829
|
+
/**
|
|
3830
|
+
* Walks up the directory tree from `fromDir` to find the nearest `.lac/` directory.
|
|
3831
|
+
* Returns the path to the `.lac/` directory if found, otherwise null.
|
|
3832
|
+
*/
|
|
3833
|
+
function findLacDir(fromDir) {
|
|
3834
|
+
let current = path.resolve(fromDir);
|
|
3835
|
+
while (true) {
|
|
3836
|
+
const candidate = path.join(current, LAC_DIR);
|
|
3837
|
+
if (fs.existsSync(candidate) && fs.statSync(candidate).isDirectory()) return candidate;
|
|
3838
|
+
const parent = path.dirname(current);
|
|
3839
|
+
if (parent === current) return null;
|
|
3840
|
+
current = parent;
|
|
3841
|
+
}
|
|
3842
|
+
}
|
|
3843
|
+
/**
|
|
3844
|
+
* Reads or initialises the `.lac/counter` file and returns the next
|
|
3845
|
+
* featureKey string like "feat-2026-001".
|
|
3846
|
+
*
|
|
3847
|
+
* The counter file stores a single integer representing the last-used counter
|
|
3848
|
+
* for the current year. When the year changes the counter resets to 1.
|
|
3849
|
+
*
|
|
3850
|
+
* Format of the counter file (two lines):
|
|
3851
|
+
* <year>
|
|
3852
|
+
* <last-used-counter>
|
|
3853
|
+
*
|
|
3854
|
+
* If the file does not exist it is created, and the first key (NNN=001) is
|
|
3855
|
+
* returned. The `.lac/` directory must already exist in a parent of
|
|
3856
|
+
* `fromDir`; if it cannot be found this function throws an Error.
|
|
3857
|
+
*
|
|
3858
|
+
* Duplicate detection: after generating the key, the `.lac/keys` file is
|
|
3859
|
+
* consulted. If the generated key already exists there, the counter is
|
|
3860
|
+
* incremented until a unique key is found.
|
|
3861
|
+
*
|
|
3862
|
+
* @param prefix Domain prefix for the key (default: "feat"). Set via `domain`
|
|
3863
|
+
* in `lac.config.json` to get keys like "proc-2026-001".
|
|
3864
|
+
*/
|
|
3865
|
+
function generateFeatureKey(fromDir, prefix = "feat") {
|
|
3866
|
+
const lacDir = findLacDir(fromDir);
|
|
3867
|
+
if (!lacDir) throw new Error(`Could not find a .lac/ directory in "${fromDir}" or any of its parents. Run "lac workspace init" to initialise a life-as-code workspace.`);
|
|
3868
|
+
const counterPath = path.join(lacDir, COUNTER_FILE);
|
|
3869
|
+
const keysPath = path.join(lacDir, KEYS_FILE);
|
|
3870
|
+
const year = getCurrentYear();
|
|
3871
|
+
let counter = 1;
|
|
3872
|
+
if (fs.existsSync(counterPath)) try {
|
|
3873
|
+
const lines = fs.readFileSync(counterPath, "utf-8").trim().split("\n").map((l) => l.trim());
|
|
3874
|
+
const storedYear = parseInt(lines[0] ?? "", 10);
|
|
3875
|
+
const storedCounter = parseInt(lines[1] ?? "", 10);
|
|
3876
|
+
if (isNaN(storedYear) || isNaN(storedCounter)) {
|
|
3877
|
+
process.stderr.write("Warning: .lac/counter was corrupt — reset to 1\n");
|
|
3878
|
+
fs.writeFileSync(counterPath, `${year}\n1\n`, "utf-8");
|
|
3879
|
+
counter = 1;
|
|
3880
|
+
} else if (storedYear === year) counter = storedCounter + 1;
|
|
3881
|
+
} catch {
|
|
3882
|
+
process.stderr.write("Warning: .lac/counter was corrupt — reset to 1\n");
|
|
3883
|
+
fs.writeFileSync(counterPath, `${year}\n1\n`, "utf-8");
|
|
3884
|
+
counter = 1;
|
|
3885
|
+
}
|
|
3886
|
+
let existingKeys = /* @__PURE__ */ new Set();
|
|
3887
|
+
if (fs.existsSync(keysPath)) existingKeys = new Set(fs.readFileSync(keysPath, "utf-8").trim().split("\n").filter(Boolean));
|
|
3888
|
+
while (existingKeys.has(`${prefix}-${year}-${padCounter(counter)}`)) counter++;
|
|
3889
|
+
const featureKey = `${prefix}-${year}-${padCounter(counter)}`;
|
|
3890
|
+
existingKeys.add(featureKey);
|
|
3891
|
+
const counterTmp = counterPath + ".tmp";
|
|
3892
|
+
const keysTmp = keysPath + ".tmp";
|
|
3893
|
+
fs.writeFileSync(counterTmp, `${year}\n${counter}\n`, "utf-8");
|
|
3894
|
+
fs.writeFileSync(keysTmp, Array.from(existingKeys).join("\n") + "\n", "utf-8");
|
|
3895
|
+
fs.renameSync(counterTmp, counterPath);
|
|
3896
|
+
fs.renameSync(keysTmp, keysPath);
|
|
3897
|
+
return featureKey;
|
|
3898
|
+
}
|
|
3899
|
+
/**
|
|
3900
|
+
* Registers an externally-supplied featureKey in `.lac/keys` so that future
|
|
3901
|
+
* auto-generated keys never collide with it. Also advances the counter if
|
|
3902
|
+
* the key's sequence number is ≥ the current counter value for this year.
|
|
3903
|
+
*
|
|
3904
|
+
* If no `.lac/` directory is found the call is a no-op (workspace hasn't
|
|
3905
|
+
* been initialised yet; the key simply won't be tracked).
|
|
3906
|
+
*
|
|
3907
|
+
* Idempotent — safe to call multiple times with the same key.
|
|
3908
|
+
*/
|
|
3909
|
+
function registerFeatureKey(fromDir, key) {
|
|
3910
|
+
const lacDir = findLacDir(fromDir);
|
|
3911
|
+
if (!lacDir) return;
|
|
3912
|
+
const counterPath = path.join(lacDir, COUNTER_FILE);
|
|
3913
|
+
const keysPath = path.join(lacDir, KEYS_FILE);
|
|
3914
|
+
const year = getCurrentYear();
|
|
3915
|
+
let existingKeys = /* @__PURE__ */ new Set();
|
|
3916
|
+
if (fs.existsSync(keysPath)) existingKeys = new Set(fs.readFileSync(keysPath, "utf-8").trim().split("\n").filter(Boolean));
|
|
3917
|
+
if (existingKeys.has(key)) return;
|
|
3918
|
+
let currentCounter = 0;
|
|
3919
|
+
if (fs.existsSync(counterPath)) try {
|
|
3920
|
+
const lines = fs.readFileSync(counterPath, "utf-8").trim().split("\n").map((l) => l.trim());
|
|
3921
|
+
const storedYear = parseInt(lines[0] ?? "", 10);
|
|
3922
|
+
const storedCounter = parseInt(lines[1] ?? "", 10);
|
|
3923
|
+
if (!isNaN(storedYear) && !isNaN(storedCounter) && storedYear === year) currentCounter = storedCounter;
|
|
3924
|
+
} catch {}
|
|
3925
|
+
const match = /^[a-z][a-z0-9]*-(\d{4})-(\d{3})$/.exec(key);
|
|
3926
|
+
let newCounter = currentCounter;
|
|
3927
|
+
if (match) {
|
|
3928
|
+
const keyYear = parseInt(match[1], 10);
|
|
3929
|
+
const keyNum = parseInt(match[2], 10);
|
|
3930
|
+
if (keyYear === year && keyNum > currentCounter) newCounter = keyNum;
|
|
3931
|
+
}
|
|
3932
|
+
existingKeys.add(key);
|
|
3933
|
+
const counterTmp = counterPath + ".tmp";
|
|
3934
|
+
const keysTmp = keysPath + ".tmp";
|
|
3935
|
+
fs.writeFileSync(counterTmp, `${year}\n${newCounter}\n`, "utf-8");
|
|
3936
|
+
fs.writeFileSync(keysTmp, Array.from(existingKeys).join("\n") + "\n", "utf-8");
|
|
3937
|
+
fs.renameSync(counterTmp, counterPath);
|
|
3938
|
+
fs.renameSync(keysTmp, keysPath);
|
|
3939
|
+
}
|
|
3940
|
+
|
|
3941
|
+
//#endregion
|
|
3942
|
+
//#region ../lac-mcp/src/tools/analysis.ts
|
|
3943
|
+
function scanFeatures$1(dir) {
|
|
3944
|
+
const results = [];
|
|
3945
|
+
walk(dir, results);
|
|
3946
|
+
return results;
|
|
3947
|
+
}
|
|
3948
|
+
function walk(dir, results) {
|
|
3949
|
+
try {
|
|
3950
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
3951
|
+
for (const e of entries) {
|
|
3952
|
+
if ([
|
|
3953
|
+
"node_modules",
|
|
3954
|
+
".git",
|
|
3955
|
+
"dist"
|
|
3956
|
+
].includes(e.name)) continue;
|
|
3957
|
+
const full = path.join(dir, e.name);
|
|
3958
|
+
if (e.isDirectory()) walk(full, results);
|
|
3959
|
+
else if (e.name === "feature.json") try {
|
|
3960
|
+
const r = validateFeature(JSON.parse(fs.readFileSync(full, "utf-8")));
|
|
3961
|
+
if (r.success) results.push({
|
|
3962
|
+
feature: r.data,
|
|
3963
|
+
filePath: full
|
|
3964
|
+
});
|
|
3965
|
+
} catch {}
|
|
3966
|
+
}
|
|
3967
|
+
} catch {}
|
|
3968
|
+
}
|
|
3969
|
+
function resolve(p, root) {
|
|
3970
|
+
return path.isAbsolute(p) ? p : path.resolve(root, p);
|
|
3971
|
+
}
|
|
3972
|
+
const RISK_KEYWORDS = [
|
|
3973
|
+
"revisit",
|
|
3974
|
+
"temporary",
|
|
3975
|
+
"todo",
|
|
3976
|
+
"hack",
|
|
3977
|
+
"fixme",
|
|
3978
|
+
"workaround",
|
|
3979
|
+
"short-term",
|
|
3980
|
+
"quick fix"
|
|
3981
|
+
];
|
|
3982
|
+
function handleAuditDecisions(a, workspaceRoot$1) {
|
|
3983
|
+
const features = scanFeatures$1(a.path ? resolve(String(a.path), workspaceRoot$1) : workspaceRoot$1);
|
|
3984
|
+
const missingDecisions = [];
|
|
3985
|
+
const flaggedDecisions = [];
|
|
3986
|
+
const unaddressedReopens = [];
|
|
3987
|
+
const domainGroups = /* @__PURE__ */ new Map();
|
|
3988
|
+
let cleanCount = 0;
|
|
3989
|
+
for (const { feature } of features) {
|
|
3990
|
+
if (feature.status === "draft") continue;
|
|
3991
|
+
let hasIssue = false;
|
|
3992
|
+
if (!feature.decisions?.length) {
|
|
3993
|
+
missingDecisions.push(` ${feature.featureKey.padEnd(20)} ${feature.status}`);
|
|
3994
|
+
hasIssue = true;
|
|
3995
|
+
} else for (const d of feature.decisions) {
|
|
3996
|
+
const text = (d.decision + " " + d.rationale).toLowerCase();
|
|
3997
|
+
const found = RISK_KEYWORDS.find((k) => text.includes(k));
|
|
3998
|
+
if (found) {
|
|
3999
|
+
flaggedDecisions.push({
|
|
4000
|
+
key: feature.featureKey,
|
|
4001
|
+
decision: d.decision,
|
|
4002
|
+
keyword: found
|
|
4003
|
+
});
|
|
4004
|
+
hasIssue = true;
|
|
4005
|
+
}
|
|
4006
|
+
}
|
|
4007
|
+
const staleAnnotation = feature.annotations?.find((a$1) => a$1.type === "stale-review");
|
|
4008
|
+
if (staleAnnotation) {
|
|
4009
|
+
unaddressedReopens.push(` ${feature.featureKey.padEnd(20)} ${feature.status.padEnd(10)} — ${staleAnnotation.body}`);
|
|
4010
|
+
hasIssue = true;
|
|
4011
|
+
}
|
|
4012
|
+
if (feature.domain) {
|
|
4013
|
+
const group = domainGroups.get(feature.domain) ?? [];
|
|
4014
|
+
group.push(feature);
|
|
4015
|
+
domainGroups.set(feature.domain, group);
|
|
4016
|
+
}
|
|
4017
|
+
if (!hasIssue) cleanCount++;
|
|
4018
|
+
}
|
|
4019
|
+
const duplicateSuspects = [];
|
|
4020
|
+
for (const [domain, group] of domainGroups) for (let i = 0; i < group.length; i++) for (let j = i + 1; j < group.length; j++) {
|
|
4021
|
+
const fi = group[i];
|
|
4022
|
+
const fj = group[j];
|
|
4023
|
+
if (!fi || !fj) continue;
|
|
4024
|
+
const wordsA = new Set(fi.title.toLowerCase().split(/\W+/).filter((w) => w.length > 3));
|
|
4025
|
+
const wordsB = new Set(fj.title.toLowerCase().split(/\W+/).filter((w) => w.length > 3));
|
|
4026
|
+
if ([...wordsA].filter((w) => wordsB.has(w)).length >= 2) duplicateSuspects.push(` ${fi.featureKey} + ${fj.featureKey} (${domain}) — "${fi.title}" / "${fj.title}"`);
|
|
4027
|
+
}
|
|
4028
|
+
const sections = [];
|
|
4029
|
+
if (unaddressedReopens.length > 0) sections.push(`⚠ Unaddressed reopens — stale fields not yet reviewed (${unaddressedReopens.length}):\n${unaddressedReopens.join("\n")}`);
|
|
4030
|
+
if (missingDecisions.length > 0) sections.push(`⚠ Missing decisions (${missingDecisions.length}):\n${missingDecisions.join("\n")}`);
|
|
4031
|
+
if (flaggedDecisions.length > 0) {
|
|
4032
|
+
const lines = flaggedDecisions.map((f) => ` ${f.key.padEnd(20)} "${f.decision.slice(0, 60)}" — contains "${f.keyword}"`);
|
|
4033
|
+
sections.push(`⚠ Decisions flagged for review (${flaggedDecisions.length}):\n${lines.join("\n")}`);
|
|
4034
|
+
}
|
|
4035
|
+
if (duplicateSuspects.length > 0) sections.push(`⚠ Possible duplicates (${duplicateSuspects.length}):\n${duplicateSuspects.join("\n")}`);
|
|
4036
|
+
sections.push(`✓ ${cleanCount} feature(s) with clean decisions`);
|
|
4037
|
+
return { content: [{
|
|
4038
|
+
type: "text",
|
|
4039
|
+
text: sections.join("\n\n")
|
|
4040
|
+
}] };
|
|
4041
|
+
}
|
|
4042
|
+
function handleFeatureSimilarity(a, workspaceRoot$1) {
|
|
4043
|
+
if (!a.path) return {
|
|
4044
|
+
content: [{
|
|
4045
|
+
type: "text",
|
|
4046
|
+
text: "path is required"
|
|
4047
|
+
}],
|
|
4048
|
+
isError: true
|
|
4049
|
+
};
|
|
4050
|
+
const featureDir = resolve(String(a.path), workspaceRoot$1);
|
|
4051
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
4052
|
+
let raw;
|
|
4053
|
+
try {
|
|
4054
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
4055
|
+
} catch {
|
|
4056
|
+
return {
|
|
4057
|
+
content: [{
|
|
4058
|
+
type: "text",
|
|
4059
|
+
text: `No feature.json found at "${featurePath}"`
|
|
4060
|
+
}],
|
|
4061
|
+
isError: true
|
|
4062
|
+
};
|
|
4063
|
+
}
|
|
4064
|
+
const result = validateFeature(JSON.parse(raw));
|
|
4065
|
+
if (!result.success) return {
|
|
4066
|
+
content: [{
|
|
4067
|
+
type: "text",
|
|
4068
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
4069
|
+
}],
|
|
4070
|
+
isError: true
|
|
4071
|
+
};
|
|
4072
|
+
const target = result.data;
|
|
4073
|
+
const targetTags = new Set(target.tags ?? []);
|
|
4074
|
+
const targetWords = new Set((target.title + " " + target.problem).toLowerCase().split(/\W+/).filter((w) => w.length > 4));
|
|
4075
|
+
const allFeatures = scanFeatures$1(workspaceRoot$1);
|
|
4076
|
+
const matches = [];
|
|
4077
|
+
for (const { feature } of allFeatures) {
|
|
4078
|
+
if (feature.featureKey === target.featureKey) continue;
|
|
4079
|
+
if (feature.lineage?.parent === target.featureKey || target.lineage?.parent === feature.featureKey) continue;
|
|
4080
|
+
let score = 0;
|
|
4081
|
+
const reasons = [];
|
|
4082
|
+
if (target.domain && feature.domain === target.domain) {
|
|
4083
|
+
score += 3;
|
|
4084
|
+
reasons.push(`same domain: ${feature.domain}`);
|
|
4085
|
+
}
|
|
4086
|
+
const sharedTags = (feature.tags ?? []).filter((t) => targetTags.has(t));
|
|
4087
|
+
if (sharedTags.length > 0) {
|
|
4088
|
+
score += sharedTags.length * 2;
|
|
4089
|
+
reasons.push(`shared tags: ${sharedTags.join(", ")}`);
|
|
4090
|
+
}
|
|
4091
|
+
const featureWords = new Set((feature.title + " " + feature.problem).toLowerCase().split(/\W+/).filter((w) => w.length > 4));
|
|
4092
|
+
const wordOverlap = [...targetWords].filter((w) => featureWords.has(w)).length;
|
|
4093
|
+
if (wordOverlap >= 2) {
|
|
4094
|
+
score += wordOverlap;
|
|
4095
|
+
reasons.push(`${wordOverlap} shared keywords`);
|
|
4096
|
+
}
|
|
4097
|
+
if (score > 0) matches.push({
|
|
4098
|
+
feature,
|
|
4099
|
+
score,
|
|
4100
|
+
reasons
|
|
4101
|
+
});
|
|
4102
|
+
}
|
|
4103
|
+
matches.sort((a$1, b) => b.score - a$1.score);
|
|
4104
|
+
if (matches.length === 0) return { content: [{
|
|
4105
|
+
type: "text",
|
|
4106
|
+
text: `No similar features found for "${target.featureKey} — ${target.title}".`
|
|
4107
|
+
}] };
|
|
4108
|
+
const stars = (score) => score >= 6 ? "★★★" : score >= 4 ? "★★ " : "★ ";
|
|
4109
|
+
return { content: [{
|
|
4110
|
+
type: "text",
|
|
4111
|
+
text: [`Similar features to "${target.featureKey} — ${target.title}":\n`, ...matches.slice(0, 10).map((m) => `${stars(m.score)} ${m.feature.featureKey.padEnd(20)} "${m.feature.title}"\n ${m.reasons.join(" · ")}`)].join("\n")
|
|
4112
|
+
}] };
|
|
4113
|
+
}
|
|
4114
|
+
|
|
4115
|
+
//#endregion
|
|
4116
|
+
//#region ../lac-mcp/src/tools/git-tools.ts
|
|
4117
|
+
function handleTimeTravel(a, workspaceRoot$1) {
|
|
4118
|
+
if (!a.path) return {
|
|
4119
|
+
content: [{
|
|
4120
|
+
type: "text",
|
|
4121
|
+
text: "path is required"
|
|
4122
|
+
}],
|
|
4123
|
+
isError: true
|
|
4124
|
+
};
|
|
4125
|
+
const featureDir = path.isAbsolute(String(a.path)) ? String(a.path) : path.resolve(workspaceRoot$1, String(a.path));
|
|
4126
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
4127
|
+
let gitRoot;
|
|
4128
|
+
try {
|
|
4129
|
+
gitRoot = execSync("git rev-parse --show-toplevel", {
|
|
4130
|
+
cwd: featureDir,
|
|
4131
|
+
encoding: "utf-8",
|
|
4132
|
+
stdio: [
|
|
4133
|
+
"pipe",
|
|
4134
|
+
"pipe",
|
|
4135
|
+
"pipe"
|
|
4136
|
+
]
|
|
4137
|
+
}).trim();
|
|
4138
|
+
} catch {
|
|
4139
|
+
return {
|
|
4140
|
+
content: [{
|
|
4141
|
+
type: "text",
|
|
4142
|
+
text: "Not a git repository. time_travel requires git."
|
|
4143
|
+
}],
|
|
4144
|
+
isError: true
|
|
4145
|
+
};
|
|
4146
|
+
}
|
|
4147
|
+
const relPath = path.relative(gitRoot, featurePath).replace(/\\/g, "/");
|
|
4148
|
+
let logOutput;
|
|
4149
|
+
try {
|
|
4150
|
+
logOutput = execSync(`git log --format="%H %as %s" -- "${relPath}"`, {
|
|
4151
|
+
cwd: gitRoot,
|
|
4152
|
+
encoding: "utf-8",
|
|
4153
|
+
stdio: [
|
|
4154
|
+
"pipe",
|
|
4155
|
+
"pipe",
|
|
4156
|
+
"pipe"
|
|
4157
|
+
]
|
|
4158
|
+
}).trim();
|
|
4159
|
+
} catch {
|
|
4160
|
+
logOutput = "";
|
|
4161
|
+
}
|
|
4162
|
+
if (!logOutput) return { content: [{
|
|
4163
|
+
type: "text",
|
|
4164
|
+
text: `No git history found for "${relPath}". Has this file been committed?`
|
|
4165
|
+
}] };
|
|
4166
|
+
const commits = logOutput.split("\n").map((line) => {
|
|
4167
|
+
const parts = line.split(" ");
|
|
4168
|
+
return {
|
|
4169
|
+
sha: parts[0] ?? "",
|
|
4170
|
+
date: parts[1] ?? "",
|
|
4171
|
+
message: parts.slice(2).join(" ")
|
|
4172
|
+
};
|
|
4173
|
+
});
|
|
4174
|
+
if (!a.date && !a.commit) {
|
|
4175
|
+
const historyLines = commits.map((c) => ` ${c.date} ${c.sha.slice(0, 8)} ${c.message}`);
|
|
4176
|
+
return { content: [{
|
|
4177
|
+
type: "text",
|
|
4178
|
+
text: `Git history for "${relPath}" (${commits.length} commit(s)):\n\n${historyLines.join("\n")}\n\nCall time_travel again with date (YYYY-MM-DD) or commit (SHA) to view a specific version.`
|
|
4179
|
+
}] };
|
|
4180
|
+
}
|
|
4181
|
+
let targetSha;
|
|
4182
|
+
if (a.commit) targetSha = String(a.commit);
|
|
4183
|
+
else {
|
|
4184
|
+
const targetDate = String(a.date);
|
|
4185
|
+
const match = commits.find((c) => c.date <= targetDate);
|
|
4186
|
+
if (!match) return { content: [{
|
|
4187
|
+
type: "text",
|
|
4188
|
+
text: `No commits found at or before "${targetDate}".\n\nAvailable history:\n${commits.map((c) => ` ${c.date} ${c.sha.slice(0, 8)} ${c.message}`).join("\n")}`
|
|
4189
|
+
}] };
|
|
4190
|
+
targetSha = match.sha;
|
|
4191
|
+
}
|
|
4192
|
+
let historicalContent;
|
|
4193
|
+
try {
|
|
4194
|
+
historicalContent = execSync(`git show "${targetSha}:${relPath}"`, {
|
|
4195
|
+
cwd: gitRoot,
|
|
4196
|
+
encoding: "utf-8",
|
|
4197
|
+
stdio: [
|
|
4198
|
+
"pipe",
|
|
4199
|
+
"pipe",
|
|
4200
|
+
"pipe"
|
|
4201
|
+
]
|
|
4202
|
+
});
|
|
4203
|
+
} catch {
|
|
4204
|
+
return {
|
|
4205
|
+
content: [{
|
|
4206
|
+
type: "text",
|
|
4207
|
+
text: `Could not read "${relPath}" at commit ${targetSha.slice(0, 8)}.`
|
|
4208
|
+
}],
|
|
4209
|
+
isError: true
|
|
4210
|
+
};
|
|
4211
|
+
}
|
|
4212
|
+
const targetCommit = commits.find((c) => c.sha === targetSha || targetSha != null && c.sha.length >= 7 && targetSha.startsWith(c.sha.slice(0, 7)));
|
|
4213
|
+
const commitInfo = targetCommit ? `${targetCommit.date} ${targetSha.slice(0, 8)} ${targetCommit.message}` : targetSha.slice(0, 8);
|
|
4214
|
+
let displayContent;
|
|
4215
|
+
try {
|
|
4216
|
+
const validation = validateFeature(JSON.parse(historicalContent));
|
|
4217
|
+
displayContent = validation.success ? JSON.stringify(validation.data, null, 2) : historicalContent;
|
|
4218
|
+
} catch {
|
|
4219
|
+
displayContent = historicalContent;
|
|
4220
|
+
}
|
|
4221
|
+
const newerCommits = commits.filter((c) => targetCommit?.date != null ? c.date > targetCommit.date : false);
|
|
4222
|
+
return { content: [{
|
|
4223
|
+
type: "text",
|
|
4224
|
+
text: `feature.json at: ${commitInfo}${newerCommits.length > 0 ? `\n\n[${newerCommits.length} commit(s) made after this snapshot]` : ""}\n\n${displayContent}`
|
|
4225
|
+
}] };
|
|
4226
|
+
}
|
|
4227
|
+
|
|
4228
|
+
//#endregion
|
|
4229
|
+
//#region ../lac-mcp/src/tools/impact.ts
|
|
4230
|
+
const SOURCE_EXTENSIONS$1 = new Set([
|
|
4231
|
+
".ts",
|
|
4232
|
+
".tsx",
|
|
4233
|
+
".js",
|
|
4234
|
+
".jsx",
|
|
4235
|
+
".py",
|
|
4236
|
+
".go",
|
|
4237
|
+
".rs",
|
|
4238
|
+
".vue",
|
|
4239
|
+
".svelte"
|
|
4240
|
+
]);
|
|
4241
|
+
function scanFeatures(dir) {
|
|
4242
|
+
const results = [];
|
|
4243
|
+
walkFeatures(dir, results);
|
|
4244
|
+
return results;
|
|
4245
|
+
}
|
|
4246
|
+
function walkFeatures(dir, results) {
|
|
4247
|
+
try {
|
|
4248
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
4249
|
+
for (const e of entries) {
|
|
4250
|
+
if ([
|
|
4251
|
+
"node_modules",
|
|
4252
|
+
".git",
|
|
4253
|
+
"dist"
|
|
4254
|
+
].includes(e.name)) continue;
|
|
4255
|
+
const full = path.join(dir, e.name);
|
|
4256
|
+
if (e.isDirectory()) walkFeatures(full, results);
|
|
4257
|
+
else if (e.name === "feature.json") try {
|
|
4258
|
+
const r = validateFeature(JSON.parse(fs.readFileSync(full, "utf-8")));
|
|
4259
|
+
if (r.success) results.push({
|
|
4260
|
+
feature: r.data,
|
|
4261
|
+
filePath: full
|
|
4262
|
+
});
|
|
4263
|
+
} catch {}
|
|
4264
|
+
}
|
|
4265
|
+
} catch {}
|
|
4266
|
+
}
|
|
4267
|
+
function getSourceFiles(dir) {
|
|
4268
|
+
const files = [];
|
|
4269
|
+
try {
|
|
4270
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
4271
|
+
for (const e of entries) {
|
|
4272
|
+
if ([
|
|
4273
|
+
"node_modules",
|
|
4274
|
+
".git",
|
|
4275
|
+
"dist"
|
|
4276
|
+
].includes(e.name)) continue;
|
|
4277
|
+
const full = path.join(dir, e.name);
|
|
4278
|
+
if (e.isDirectory()) files.push(...getSourceFiles(full));
|
|
4279
|
+
else if (SOURCE_EXTENSIONS$1.has(path.extname(e.name))) files.push(full);
|
|
4280
|
+
}
|
|
4281
|
+
} catch {}
|
|
4282
|
+
return files;
|
|
4283
|
+
}
|
|
4284
|
+
function findOwningFeatureKey(filePath) {
|
|
4285
|
+
let current = path.dirname(filePath);
|
|
4286
|
+
while (true) {
|
|
4287
|
+
const candidate = path.join(current, "feature.json");
|
|
4288
|
+
if (fs.existsSync(candidate)) try {
|
|
4289
|
+
const r = validateFeature(JSON.parse(fs.readFileSync(candidate, "utf-8")));
|
|
4290
|
+
if (r.success) return r.data.featureKey;
|
|
4291
|
+
} catch {}
|
|
4292
|
+
const parent = path.dirname(current);
|
|
4293
|
+
if (parent === current) return null;
|
|
4294
|
+
current = parent;
|
|
4295
|
+
}
|
|
4296
|
+
}
|
|
4297
|
+
function handleCrossFeatureImpact(a, workspaceRoot$1) {
|
|
4298
|
+
if (!a.file) return {
|
|
4299
|
+
content: [{
|
|
4300
|
+
type: "text",
|
|
4301
|
+
text: "file parameter is required"
|
|
4302
|
+
}],
|
|
4303
|
+
isError: true
|
|
4304
|
+
};
|
|
4305
|
+
const targetFile = path.isAbsolute(String(a.file)) ? String(a.file) : path.resolve(workspaceRoot$1, String(a.file));
|
|
4306
|
+
if (!fs.existsSync(targetFile)) return {
|
|
4307
|
+
content: [{
|
|
4308
|
+
type: "text",
|
|
4309
|
+
text: `File not found: "${targetFile}"`
|
|
4310
|
+
}],
|
|
4311
|
+
isError: true
|
|
4312
|
+
};
|
|
4313
|
+
const targetBasename = path.basename(targetFile);
|
|
4314
|
+
const targetNoExt = path.basename(targetFile, path.extname(targetFile));
|
|
4315
|
+
const targetRelFromRoot = path.relative(workspaceRoot$1, targetFile).replace(/\\/g, "/");
|
|
4316
|
+
const patterns = [...new Set([
|
|
4317
|
+
targetBasename,
|
|
4318
|
+
targetNoExt,
|
|
4319
|
+
targetRelFromRoot
|
|
4320
|
+
])];
|
|
4321
|
+
const owningKey = findOwningFeatureKey(targetFile);
|
|
4322
|
+
const features = scanFeatures(workspaceRoot$1);
|
|
4323
|
+
const impacts = [];
|
|
4324
|
+
for (const { feature, filePath: featureJsonPath } of features) {
|
|
4325
|
+
if (feature.featureKey === owningKey) continue;
|
|
4326
|
+
const featureDir = path.dirname(featureJsonPath);
|
|
4327
|
+
const sourceFiles = getSourceFiles(featureDir);
|
|
4328
|
+
const matchedFiles = [];
|
|
4329
|
+
const matchedPatterns = [];
|
|
4330
|
+
for (const srcFile of sourceFiles) {
|
|
4331
|
+
if (srcFile === targetFile) continue;
|
|
4332
|
+
try {
|
|
4333
|
+
const content = fs.readFileSync(srcFile, "utf-8");
|
|
4334
|
+
const matched = patterns.filter((p) => content.includes(p));
|
|
4335
|
+
if (matched.length > 0) {
|
|
4336
|
+
matchedFiles.push(path.relative(featureDir, srcFile));
|
|
4337
|
+
matchedPatterns.push(...matched);
|
|
4338
|
+
}
|
|
4339
|
+
} catch {}
|
|
4340
|
+
}
|
|
4341
|
+
if (matchedFiles.length > 0) impacts.push({
|
|
4342
|
+
feature,
|
|
4343
|
+
matchedFiles,
|
|
4344
|
+
patterns: [...new Set(matchedPatterns)]
|
|
4345
|
+
});
|
|
4346
|
+
}
|
|
4347
|
+
const lines = [
|
|
4348
|
+
`Impact analysis: ${path.relative(workspaceRoot$1, targetFile)}`,
|
|
4349
|
+
"─".repeat(50),
|
|
4350
|
+
owningKey ? `Owned by : ${owningKey}` : "No owning feature found (untracked file)"
|
|
4351
|
+
];
|
|
4352
|
+
if (impacts.length === 0) lines.push("\nNo other features reference this file.");
|
|
4353
|
+
else {
|
|
4354
|
+
lines.push(`\n${impacts.length} feature(s) reference this file — changes may affect them:\n`);
|
|
4355
|
+
for (const imp of impacts) {
|
|
4356
|
+
lines.push(` ${imp.feature.featureKey.padEnd(20)} ${imp.feature.status.padEnd(10)} "${imp.feature.title}"`);
|
|
4357
|
+
const fileList = imp.matchedFiles.slice(0, 3).join(", ");
|
|
4358
|
+
const more = imp.matchedFiles.length > 3 ? ` +${imp.matchedFiles.length - 3} more` : "";
|
|
4359
|
+
lines.push(` referenced in: ${fileList}${more}`);
|
|
4360
|
+
}
|
|
4361
|
+
lines.push("\n⚠ Changes to this file may affect all features listed above.");
|
|
4362
|
+
}
|
|
4363
|
+
return { content: [{
|
|
4364
|
+
type: "text",
|
|
4365
|
+
text: lines.join("\n")
|
|
4366
|
+
}] };
|
|
4367
|
+
}
|
|
4368
|
+
|
|
3778
4369
|
//#endregion
|
|
3779
4370
|
//#region ../lac-claude/dist/index.mjs
|
|
3780
4371
|
Object.freeze({ status: "aborted" });
|
|
@@ -7418,6 +8009,27 @@ const LineageSchema = object({
|
|
|
7418
8009
|
children: array(string()).optional(),
|
|
7419
8010
|
spawnReason: string().nullable().optional()
|
|
7420
8011
|
});
|
|
8012
|
+
const StatusTransitionSchema = object({
|
|
8013
|
+
from: FeatureStatusSchema,
|
|
8014
|
+
to: FeatureStatusSchema,
|
|
8015
|
+
date: string().regex(/^\d{4}-\d{2}-\d{2}$/, "date must be YYYY-MM-DD"),
|
|
8016
|
+
reason: string().optional()
|
|
8017
|
+
});
|
|
8018
|
+
const RevisionSchema = object({
|
|
8019
|
+
date: string().regex(/^\d{4}-\d{2}-\d{2}$/, "date must be YYYY-MM-DD"),
|
|
8020
|
+
author: string().min(1),
|
|
8021
|
+
fields_changed: array(string()).min(1),
|
|
8022
|
+
reason: string().min(1)
|
|
8023
|
+
});
|
|
8024
|
+
const PublicInterfaceEntrySchema = object({
|
|
8025
|
+
name: string().min(1),
|
|
8026
|
+
type: string().min(1),
|
|
8027
|
+
description: string().optional()
|
|
8028
|
+
});
|
|
8029
|
+
const CodeSnippetSchema = object({
|
|
8030
|
+
label: string().min(1),
|
|
8031
|
+
snippet: string().min(1)
|
|
8032
|
+
});
|
|
7421
8033
|
const FeatureSchema = object({
|
|
7422
8034
|
featureKey: string().regex(FEATURE_KEY_PATTERN, "featureKey must match pattern <domain>-YYYY-NNN (e.g. feat-2026-001, proc-2026-001)"),
|
|
7423
8035
|
title: string().min(1),
|
|
@@ -7433,55 +8045,22 @@ const FeatureSchema = object({
|
|
|
7433
8045
|
annotations: array(AnnotationSchema).optional(),
|
|
7434
8046
|
lineage: LineageSchema.optional(),
|
|
7435
8047
|
successCriteria: string().optional(),
|
|
7436
|
-
domain: string().optional()
|
|
8048
|
+
domain: string().optional(),
|
|
8049
|
+
priority: number().int().min(1).max(5).optional(),
|
|
8050
|
+
statusHistory: array(StatusTransitionSchema).optional(),
|
|
8051
|
+
revisions: array(RevisionSchema).optional(),
|
|
8052
|
+
superseded_by: string().regex(FEATURE_KEY_PATTERN, "superseded_by must be a valid featureKey").optional(),
|
|
8053
|
+
superseded_from: array(string().regex(FEATURE_KEY_PATTERN, "each superseded_from entry must be a valid featureKey")).optional(),
|
|
8054
|
+
merged_into: string().regex(FEATURE_KEY_PATTERN, "merged_into must be a valid featureKey").optional(),
|
|
8055
|
+
merged_from: array(string().regex(FEATURE_KEY_PATTERN, "each merged_from entry must be a valid featureKey")).optional(),
|
|
8056
|
+
userGuide: string().optional(),
|
|
8057
|
+
componentFile: string().optional(),
|
|
8058
|
+
npmPackages: array(string()).optional(),
|
|
8059
|
+
publicInterface: array(PublicInterfaceEntrySchema).optional(),
|
|
8060
|
+
externalDependencies: array(string()).optional(),
|
|
8061
|
+
lastVerifiedDate: string().regex(/^\d{4}-\d{2}-\d{2}$/, "lastVerifiedDate must be YYYY-MM-DD").optional(),
|
|
8062
|
+
codeSnippets: array(CodeSnippetSchema).optional()
|
|
7437
8063
|
});
|
|
7438
|
-
function validateFeature(data) {
|
|
7439
|
-
const result = FeatureSchema.safeParse(data);
|
|
7440
|
-
if (result.success) return {
|
|
7441
|
-
success: true,
|
|
7442
|
-
data: result.data
|
|
7443
|
-
};
|
|
7444
|
-
return {
|
|
7445
|
-
success: false,
|
|
7446
|
-
errors: result.error.issues.map((issue$1$1) => {
|
|
7447
|
-
return `${issue$1$1.path.length > 0 ? `${issue$1$1.path.join(".")}: ` : ""}${issue$1$1.message}`;
|
|
7448
|
-
})
|
|
7449
|
-
};
|
|
7450
|
-
}
|
|
7451
|
-
function createClient() {
|
|
7452
|
-
let apiKey = process$1.env.ANTHROPIC_API_KEY;
|
|
7453
|
-
if (!apiKey) {
|
|
7454
|
-
const configPath = findLacConfig();
|
|
7455
|
-
if (configPath) try {
|
|
7456
|
-
apiKey = JSON.parse(fs.readFileSync(configPath, "utf-8"))?.ai?.apiKey;
|
|
7457
|
-
} catch {}
|
|
7458
|
-
}
|
|
7459
|
-
if (!apiKey) throw new Error("ANTHROPIC_API_KEY not set.\nSet it via:\n export ANTHROPIC_API_KEY=sk-ant-...\nOr add it to .lac/config.json:\n { \"ai\": { \"apiKey\": \"sk-ant-...\" } }\nGet a key at https://console.anthropic.com/settings/keys");
|
|
7460
|
-
return new Anthropic({ apiKey });
|
|
7461
|
-
}
|
|
7462
|
-
function findLacConfig() {
|
|
7463
|
-
let current = process$1.cwd();
|
|
7464
|
-
while (true) {
|
|
7465
|
-
const candidate = path.join(current, ".lac", "config.json");
|
|
7466
|
-
if (fs.existsSync(candidate)) return candidate;
|
|
7467
|
-
const parent = path.dirname(current);
|
|
7468
|
-
if (parent === current) return null;
|
|
7469
|
-
current = parent;
|
|
7470
|
-
}
|
|
7471
|
-
}
|
|
7472
|
-
async function generateText(client, systemPrompt, userMessage, model = "claude-sonnet-4-6") {
|
|
7473
|
-
const content = (await client.messages.create({
|
|
7474
|
-
model,
|
|
7475
|
-
max_tokens: 4096,
|
|
7476
|
-
system: systemPrompt,
|
|
7477
|
-
messages: [{
|
|
7478
|
-
role: "user",
|
|
7479
|
-
content: userMessage
|
|
7480
|
-
}]
|
|
7481
|
-
})).content[0];
|
|
7482
|
-
if (!content || content.type !== "text") throw new Error("Unexpected response type from Claude API");
|
|
7483
|
-
return content.text;
|
|
7484
|
-
}
|
|
7485
8064
|
const SOURCE_EXTENSIONS = new Set([
|
|
7486
8065
|
".ts",
|
|
7487
8066
|
".tsx",
|
|
@@ -7500,16 +8079,20 @@ const SOURCE_EXTENSIONS = new Set([
|
|
|
7500
8079
|
]);
|
|
7501
8080
|
const MAX_FILE_CHARS = 8e3;
|
|
7502
8081
|
const MAX_TOTAL_CHARS = 32e4;
|
|
7503
|
-
function buildContext(featureDir, feature) {
|
|
8082
|
+
function buildContext(featureDir, feature, opts = {}) {
|
|
8083
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
8084
|
+
const { files: sourceFiles, truncatedPaths } = gatherSourceFiles(featureDir, opts.maxFileChars);
|
|
7504
8085
|
return {
|
|
7505
8086
|
feature,
|
|
7506
|
-
featurePath
|
|
7507
|
-
sourceFiles
|
|
7508
|
-
gitLog: getGitLog(featureDir)
|
|
8087
|
+
featurePath,
|
|
8088
|
+
sourceFiles,
|
|
8089
|
+
gitLog: getGitLog(featureDir),
|
|
8090
|
+
truncatedFiles: truncatedPaths
|
|
7509
8091
|
};
|
|
7510
8092
|
}
|
|
7511
|
-
function gatherSourceFiles(dir) {
|
|
8093
|
+
function gatherSourceFiles(dir, maxFileChars = MAX_FILE_CHARS) {
|
|
7512
8094
|
const files = [];
|
|
8095
|
+
const truncatedPaths = [];
|
|
7513
8096
|
let totalChars = 0;
|
|
7514
8097
|
const priorityNames = [
|
|
7515
8098
|
"package.json",
|
|
@@ -7520,11 +8103,15 @@ function gatherSourceFiles(dir) {
|
|
|
7520
8103
|
for (const name of priorityNames) {
|
|
7521
8104
|
const p = path.join(dir, name);
|
|
7522
8105
|
if (fs.existsSync(p)) try {
|
|
7523
|
-
const
|
|
8106
|
+
const raw = fs.readFileSync(p, "utf-8");
|
|
8107
|
+
const wasTruncated = raw.length > 4e3;
|
|
8108
|
+
const content = truncate(raw, 4e3);
|
|
7524
8109
|
files.push({
|
|
7525
8110
|
relativePath: name,
|
|
7526
|
-
content
|
|
8111
|
+
content,
|
|
8112
|
+
truncated: wasTruncated || void 0
|
|
7527
8113
|
});
|
|
8114
|
+
if (wasTruncated) truncatedPaths.push(name);
|
|
7528
8115
|
totalChars += content.length;
|
|
7529
8116
|
} catch {}
|
|
7530
8117
|
}
|
|
@@ -7537,16 +8124,23 @@ function gatherSourceFiles(dir) {
|
|
|
7537
8124
|
if (totalChars >= MAX_TOTAL_CHARS) break;
|
|
7538
8125
|
if (priorityNames.includes(path.basename(filePath))) continue;
|
|
7539
8126
|
try {
|
|
7540
|
-
const
|
|
8127
|
+
const raw = fs.readFileSync(filePath, "utf-8");
|
|
8128
|
+
const wasTruncated = raw.length > maxFileChars;
|
|
8129
|
+
const content = truncate(raw, maxFileChars);
|
|
7541
8130
|
const relativePath = path.relative(dir, filePath);
|
|
7542
8131
|
files.push({
|
|
7543
8132
|
relativePath,
|
|
7544
|
-
content
|
|
8133
|
+
content,
|
|
8134
|
+
truncated: wasTruncated || void 0
|
|
7545
8135
|
});
|
|
8136
|
+
if (wasTruncated) truncatedPaths.push(relativePath);
|
|
7546
8137
|
totalChars += content.length;
|
|
7547
8138
|
} catch {}
|
|
7548
8139
|
}
|
|
7549
|
-
return
|
|
8140
|
+
return {
|
|
8141
|
+
files,
|
|
8142
|
+
truncatedPaths
|
|
8143
|
+
};
|
|
7550
8144
|
}
|
|
7551
8145
|
function walkDir(dir) {
|
|
7552
8146
|
const results = [];
|
|
@@ -7582,6 +8176,7 @@ function getGitLog(dir) {
|
|
|
7582
8176
|
}
|
|
7583
8177
|
function contextToString(ctx) {
|
|
7584
8178
|
const parts = [];
|
|
8179
|
+
if (ctx.truncatedFiles.length > 0) parts.push(`⚠ WARNING: ${ctx.truncatedFiles.length} file(s) were truncated — extraction may be incomplete:\n` + ctx.truncatedFiles.map((f) => ` - ${f}`).join("\n"));
|
|
7585
8180
|
parts.push("=== feature.json ===");
|
|
7586
8181
|
parts.push(JSON.stringify(ctx.feature, null, 2));
|
|
7587
8182
|
if (ctx.gitLog) {
|
|
@@ -7589,32 +8184,11 @@ function contextToString(ctx) {
|
|
|
7589
8184
|
parts.push(ctx.gitLog);
|
|
7590
8185
|
}
|
|
7591
8186
|
for (const file of ctx.sourceFiles) {
|
|
7592
|
-
parts.push(`\n=== ${file.relativePath} ===`);
|
|
8187
|
+
parts.push(`\n=== ${file.relativePath}${file.truncated ? " [truncated]" : ""} ===`);
|
|
7593
8188
|
parts.push(file.content);
|
|
7594
8189
|
}
|
|
7595
8190
|
return parts.join("\n");
|
|
7596
8191
|
}
|
|
7597
|
-
const RESET = "\x1B[0m";
|
|
7598
|
-
const BOLD = "\x1B[1m";
|
|
7599
|
-
const GREEN = "\x1B[32m";
|
|
7600
|
-
const CYAN = "\x1B[36m";
|
|
7601
|
-
const DIM = "\x1B[2m";
|
|
7602
|
-
function formatValue(value) {
|
|
7603
|
-
if (typeof value === "string") return value.length > 300 ? value.slice(0, 300) + "…" : value;
|
|
7604
|
-
return JSON.stringify(value, null, 2);
|
|
7605
|
-
}
|
|
7606
|
-
function printDiff(diffs) {
|
|
7607
|
-
const separator = "━".repeat(52);
|
|
7608
|
-
for (const diff of diffs) {
|
|
7609
|
-
const label = diff.wasEmpty ? "empty → generated" : "updated";
|
|
7610
|
-
process.stdout.write(`\n${BOLD}${CYAN}${separator}${RESET}\n`);
|
|
7611
|
-
process.stdout.write(`${BOLD} ${diff.field}${RESET} ${DIM}(${label})${RESET}\n`);
|
|
7612
|
-
process.stdout.write(`${CYAN}${separator}${RESET}\n`);
|
|
7613
|
-
const lines = formatValue(diff.proposed).split("\n");
|
|
7614
|
-
for (const line of lines) process.stdout.write(`${GREEN} ${line}${RESET}\n`);
|
|
7615
|
-
}
|
|
7616
|
-
process.stdout.write("\n");
|
|
7617
|
-
}
|
|
7618
8192
|
const FILL_PROMPTS = {
|
|
7619
8193
|
analysis: {
|
|
7620
8194
|
system: `You are a software engineering analyst. Given a feature.json and the feature's source code, write a clear analysis section. Cover: what the code does architecturally, key patterns used, and why they were likely chosen. Be specific — name actual functions, modules, and techniques visible in the code. Write in first-person technical prose, 150-300 words. Return only the analysis text, no JSON wrapper, no markdown heading.`,
|
|
@@ -7671,16 +8245,71 @@ Return ONLY a valid JSON array — no other text:
|
|
|
7671
8245
|
system: `You are a software engineering analyst. Write a plain-language success criteria statement for this feature — "how do we know it's done and working?" Be specific and testable. 1-3 sentences. Return only the text, no JSON wrapper, no heading.`,
|
|
7672
8246
|
userSuffix: "Write the success criteria for this feature."
|
|
7673
8247
|
},
|
|
8248
|
+
userGuide: {
|
|
8249
|
+
system: `You are a technical writer writing for end users — not developers. Given a feature.json, write a plain-language user guide for this feature. Explain what it does and how to use it in everyday language. Avoid technical terms, implementation details, and acceptance-criteria framing. Write from the user's perspective: what they will see, what they can do, and why it helps them. 2-5 sentences or a short bullet list. Return only the guide text, no JSON wrapper, no heading.`,
|
|
8250
|
+
userSuffix: "Write a plain-language user guide for this feature."
|
|
8251
|
+
},
|
|
7674
8252
|
domain: {
|
|
7675
8253
|
system: `You are a software engineering analyst. Identify the primary technical domain for this feature from its code and problem statement. Return a single lowercase word or short hyphenated phrase (e.g. "auth", "payments", "notifications", "data-pipeline"). Return only the domain value — nothing else.`,
|
|
7676
8254
|
userSuffix: "Identify the domain for this feature."
|
|
8255
|
+
},
|
|
8256
|
+
componentFile: {
|
|
8257
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, identify the single primary file that implements this feature. Return a relative path from the project root (e.g. "src/components/FeatureCard.tsx", "packages/lac-mcp/src/index.ts"). Return only the path — nothing else.`,
|
|
8258
|
+
userSuffix: "Identify the primary source file for this feature."
|
|
8259
|
+
},
|
|
8260
|
+
npmPackages: {
|
|
8261
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, list the npm packages this feature directly imports or depends on at runtime. Exclude dev-only tools (vitest, eslint, etc.). Exclude Node built-ins.
|
|
8262
|
+
|
|
8263
|
+
Return ONLY a valid JSON array of package name strings — no other text:
|
|
8264
|
+
["package-a", "package-b"]`,
|
|
8265
|
+
userSuffix: "List the npm packages this feature depends on."
|
|
8266
|
+
},
|
|
8267
|
+
publicInterface: {
|
|
8268
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, extract the public interface — exported props, function signatures, or API surface that consumers of this feature depend on.
|
|
8269
|
+
|
|
8270
|
+
Return ONLY a valid JSON array — no other text:
|
|
8271
|
+
[
|
|
8272
|
+
{
|
|
8273
|
+
"name": "string",
|
|
8274
|
+
"type": "string",
|
|
8275
|
+
"description": "string"
|
|
8276
|
+
}
|
|
8277
|
+
]`,
|
|
8278
|
+
userSuffix: "Extract the public interface for this feature."
|
|
8279
|
+
},
|
|
8280
|
+
externalDependencies: {
|
|
8281
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, identify runtime dependencies on other features or internal modules that are NOT captured by the lineage (parent/children). These are cross-feature implementation dependencies — e.g. a feature that calls into another feature's API at runtime, or imports a shared utility that belongs to a distinct feature.
|
|
8282
|
+
|
|
8283
|
+
Return ONLY a valid JSON array of featureKey strings or relative file paths — no other text:
|
|
8284
|
+
["feat-2026-003", "src/utils/shared.ts"]`,
|
|
8285
|
+
userSuffix: "List the external runtime dependencies for this feature."
|
|
8286
|
+
},
|
|
8287
|
+
lastVerifiedDate: {
|
|
8288
|
+
system: `You are a software engineering analyst. Return today's date in YYYY-MM-DD format as the lastVerifiedDate — marking that this feature.json was reviewed and confirmed accurate right now. Return only the date string — nothing else.`,
|
|
8289
|
+
userSuffix: `Return today's date as the lastVerifiedDate.`
|
|
8290
|
+
},
|
|
8291
|
+
codeSnippets: {
|
|
8292
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, extract 2-5 critical one-liners or short code blocks that are the most important to preserve verbatim — glob patterns, key API calls, non-obvious configuration, or architectural pivots. These are the snippets someone would need to reconstruct this feature accurately.
|
|
8293
|
+
|
|
8294
|
+
Return ONLY a valid JSON array — no other text:
|
|
8295
|
+
[
|
|
8296
|
+
{
|
|
8297
|
+
"label": "string",
|
|
8298
|
+
"snippet": "string"
|
|
8299
|
+
}
|
|
8300
|
+
]`,
|
|
8301
|
+
userSuffix: "Extract the critical code snippets for this feature."
|
|
7677
8302
|
}
|
|
7678
8303
|
};
|
|
7679
8304
|
const JSON_FIELDS = new Set([
|
|
7680
8305
|
"decisions",
|
|
7681
8306
|
"knownLimitations",
|
|
7682
8307
|
"tags",
|
|
7683
|
-
"annotations"
|
|
8308
|
+
"annotations",
|
|
8309
|
+
"npmPackages",
|
|
8310
|
+
"publicInterface",
|
|
8311
|
+
"externalDependencies",
|
|
8312
|
+
"codeSnippets"
|
|
7684
8313
|
]);
|
|
7685
8314
|
const ALL_FILLABLE_FIELDS = [
|
|
7686
8315
|
"analysis",
|
|
@@ -7689,189 +8318,31 @@ const ALL_FILLABLE_FIELDS = [
|
|
|
7689
8318
|
"knownLimitations",
|
|
7690
8319
|
"tags",
|
|
7691
8320
|
"successCriteria",
|
|
7692
|
-
"
|
|
8321
|
+
"userGuide",
|
|
8322
|
+
"domain",
|
|
8323
|
+
"componentFile",
|
|
8324
|
+
"npmPackages",
|
|
8325
|
+
"publicInterface",
|
|
8326
|
+
"externalDependencies",
|
|
8327
|
+
"lastVerifiedDate",
|
|
8328
|
+
"codeSnippets"
|
|
7693
8329
|
];
|
|
7694
8330
|
function getMissingFields(feature) {
|
|
7695
8331
|
return ALL_FILLABLE_FIELDS.filter((field) => {
|
|
7696
8332
|
const val = feature[field];
|
|
7697
8333
|
if (val === void 0 || val === null) return true;
|
|
7698
8334
|
if (typeof val === "string") return val.trim().length === 0;
|
|
7699
|
-
if (Array.isArray(val)) return
|
|
8335
|
+
if (Array.isArray(val)) return false;
|
|
7700
8336
|
return false;
|
|
7701
8337
|
});
|
|
7702
8338
|
}
|
|
7703
|
-
const
|
|
7704
|
-
|
|
7705
|
-
|
|
7706
|
-
|
|
7707
|
-
|
|
7708
|
-
|
|
7709
|
-
|
|
7710
|
-
userSuffix: "Generate a Vitest test suite for this feature."
|
|
7711
|
-
},
|
|
7712
|
-
migration: {
|
|
7713
|
-
system: `You are an expert database engineer. You will be given a feature.json. Generate a database migration scaffold for the data model this feature implies. Use SQL with clear comments. Include both up (CREATE) and down (DROP) sections. Return only the SQL, no explanation.`,
|
|
7714
|
-
userSuffix: "Generate a database migration for this feature."
|
|
7715
|
-
},
|
|
7716
|
-
docs: {
|
|
7717
|
-
system: `You are a technical writer. You will be given a feature.json. Generate user-facing documentation for this feature. Write it clearly enough that any end user can understand it (not developer-focused). Cover: what it does, how to use it, and known limitations. Use Markdown. Return only the documentation, no explanation.`,
|
|
7718
|
-
userSuffix: "Generate user-facing documentation for this feature."
|
|
7719
|
-
}
|
|
7720
|
-
};
|
|
7721
|
-
async function fillFeature(options) {
|
|
7722
|
-
const { featureDir, dryRun = false, skipConfirm = false, model = "claude-sonnet-4-6" } = options;
|
|
7723
|
-
const featurePath = path.join(featureDir, "feature.json");
|
|
7724
|
-
let raw;
|
|
7725
|
-
try {
|
|
7726
|
-
raw = fs.readFileSync(featurePath, "utf-8");
|
|
7727
|
-
} catch {
|
|
7728
|
-
throw new Error(`No feature.json found at "${featurePath}"`);
|
|
7729
|
-
}
|
|
7730
|
-
let parsed;
|
|
7731
|
-
try {
|
|
7732
|
-
parsed = JSON.parse(raw);
|
|
7733
|
-
} catch {
|
|
7734
|
-
throw new Error(`Invalid JSON in "${featurePath}"`);
|
|
7735
|
-
}
|
|
7736
|
-
const result = validateFeature(parsed);
|
|
7737
|
-
if (!result.success) throw new Error(`Invalid feature.json: ${result.errors.join(", ")}`);
|
|
7738
|
-
const feature = result.data;
|
|
7739
|
-
const client = createClient();
|
|
7740
|
-
const fieldsToFill = options.fields ? options.fields : getMissingFields(feature);
|
|
7741
|
-
if (fieldsToFill.length === 0) {
|
|
7742
|
-
process$1.stdout.write(` All fields already filled for ${feature.featureKey}.\n`);
|
|
7743
|
-
return {
|
|
7744
|
-
applied: false,
|
|
7745
|
-
fields: [],
|
|
7746
|
-
patch: {}
|
|
7747
|
-
};
|
|
7748
|
-
}
|
|
7749
|
-
process$1.stdout.write(`\nAnalyzing ${feature.featureKey} (${feature.title})...\n`);
|
|
7750
|
-
const ctx = buildContext(featureDir, feature);
|
|
7751
|
-
const contextStr = contextToString(ctx);
|
|
7752
|
-
process$1.stdout.write(`Reading ${ctx.sourceFiles.length} source file(s)...\n`);
|
|
7753
|
-
process$1.stdout.write(`Generating with ${model}...\n`);
|
|
7754
|
-
const patch = {};
|
|
7755
|
-
const diffs = [];
|
|
7756
|
-
for (const field of fieldsToFill) {
|
|
7757
|
-
const prompt = FILL_PROMPTS[field];
|
|
7758
|
-
if (!prompt) continue;
|
|
7759
|
-
process$1.stdout.write(` → ${field}...`);
|
|
7760
|
-
try {
|
|
7761
|
-
const rawValue = await generateText(client, prompt.system, `${contextStr}\n\n${prompt.userSuffix}`, model);
|
|
7762
|
-
let value = rawValue.trim();
|
|
7763
|
-
if (JSON_FIELDS.has(field)) try {
|
|
7764
|
-
const jsonStr = rawValue.match(/```(?:json)?\s*([\s\S]*?)```/)?.[1] ?? rawValue;
|
|
7765
|
-
value = JSON.parse(jsonStr.trim());
|
|
7766
|
-
} catch {
|
|
7767
|
-
process$1.stderr.write(`\n Warning: could not parse JSON for "${field}", storing as string\n`);
|
|
7768
|
-
}
|
|
7769
|
-
patch[field] = value;
|
|
7770
|
-
const existing = feature[field];
|
|
7771
|
-
const wasEmpty = existing === void 0 || existing === null || typeof existing === "string" && existing.trim().length === 0 || Array.isArray(existing) && existing.length === 0;
|
|
7772
|
-
diffs.push({
|
|
7773
|
-
field,
|
|
7774
|
-
wasEmpty,
|
|
7775
|
-
proposed: value
|
|
7776
|
-
});
|
|
7777
|
-
process$1.stdout.write(" done\n");
|
|
7778
|
-
} catch (err) {
|
|
7779
|
-
process$1.stdout.write(" failed\n");
|
|
7780
|
-
process$1.stderr.write(` Error generating "${field}": ${err instanceof Error ? err.message : String(err)}\n`);
|
|
7781
|
-
}
|
|
7782
|
-
}
|
|
7783
|
-
if (diffs.length === 0) return {
|
|
7784
|
-
applied: false,
|
|
7785
|
-
fields: [],
|
|
7786
|
-
patch: {}
|
|
7787
|
-
};
|
|
7788
|
-
printDiff(diffs);
|
|
7789
|
-
if (dryRun) {
|
|
7790
|
-
process$1.stdout.write(" [dry-run] No changes written.\n\n");
|
|
7791
|
-
return {
|
|
7792
|
-
applied: false,
|
|
7793
|
-
fields: Object.keys(patch),
|
|
7794
|
-
patch
|
|
7795
|
-
};
|
|
7796
|
-
}
|
|
7797
|
-
if (!skipConfirm) {
|
|
7798
|
-
const answer = await askUser("Apply? [Y]es / [n]o / [f]ield-by-field: ");
|
|
7799
|
-
if (answer.toLowerCase() === "n") {
|
|
7800
|
-
process$1.stdout.write(" Cancelled.\n");
|
|
7801
|
-
return {
|
|
7802
|
-
applied: false,
|
|
7803
|
-
fields: Object.keys(patch),
|
|
7804
|
-
patch
|
|
7805
|
-
};
|
|
7806
|
-
}
|
|
7807
|
-
if (answer.toLowerCase() === "f") {
|
|
7808
|
-
const approved = {};
|
|
7809
|
-
for (const [field, value] of Object.entries(patch)) if ((await askUser(` Apply "${field}"? [Y/n]: `)).toLowerCase() !== "n") approved[field] = value;
|
|
7810
|
-
for (const key of Object.keys(patch)) if (!(key in approved)) delete patch[key];
|
|
7811
|
-
Object.assign(patch, approved);
|
|
7812
|
-
}
|
|
7813
|
-
}
|
|
7814
|
-
const updated = {
|
|
7815
|
-
...parsed,
|
|
7816
|
-
...patch
|
|
7817
|
-
};
|
|
7818
|
-
fs.writeFileSync(featurePath, JSON.stringify(updated, null, 2) + "\n", "utf-8");
|
|
7819
|
-
const count = Object.keys(patch).length;
|
|
7820
|
-
process$1.stdout.write(`\n ✓ Updated ${feature.featureKey} — ${count} field${count === 1 ? "" : "s"} written.\n\n`);
|
|
7821
|
-
return {
|
|
7822
|
-
applied: true,
|
|
7823
|
-
fields: Object.keys(patch),
|
|
7824
|
-
patch
|
|
7825
|
-
};
|
|
7826
|
-
}
|
|
7827
|
-
async function genFromFeature(options) {
|
|
7828
|
-
const { featureDir, type, dryRun = false, model = "claude-sonnet-4-6" } = options;
|
|
7829
|
-
const featurePath = path.join(featureDir, "feature.json");
|
|
7830
|
-
let raw;
|
|
7831
|
-
try {
|
|
7832
|
-
raw = fs.readFileSync(featurePath, "utf-8");
|
|
7833
|
-
} catch {
|
|
7834
|
-
throw new Error(`No feature.json found at "${featurePath}"`);
|
|
7835
|
-
}
|
|
7836
|
-
const result = validateFeature(JSON.parse(raw));
|
|
7837
|
-
if (!result.success) throw new Error(`Invalid feature.json: ${result.errors.join(", ")}`);
|
|
7838
|
-
const feature = result.data;
|
|
7839
|
-
const promptConfig = GEN_PROMPTS[type];
|
|
7840
|
-
if (!promptConfig) throw new Error(`Unknown generation type: "${type}". Available: component, test, migration, docs`);
|
|
7841
|
-
const client = createClient();
|
|
7842
|
-
process$1.stdout.write(`\nGenerating ${type} for ${feature.featureKey} (${feature.title})...\n`);
|
|
7843
|
-
process$1.stdout.write(`Model: ${model}\n\n`);
|
|
7844
|
-
const contextStr = contextToString(buildContext(featureDir, feature));
|
|
7845
|
-
const generated = await generateText(client, promptConfig.system, `${contextStr}\n\n${promptConfig.userSuffix}`, model);
|
|
7846
|
-
if (dryRun) {
|
|
7847
|
-
process$1.stdout.write(generated);
|
|
7848
|
-
process$1.stdout.write("\n\n [dry-run] No file written.\n");
|
|
7849
|
-
return generated;
|
|
7850
|
-
}
|
|
7851
|
-
const outFile = options.outFile ?? path.join(featureDir, `${feature.featureKey}${typeToExt(type)}`);
|
|
7852
|
-
fs.writeFileSync(outFile, generated, "utf-8");
|
|
7853
|
-
process$1.stdout.write(` ✓ Written to ${outFile}\n\n`);
|
|
7854
|
-
return generated;
|
|
7855
|
-
}
|
|
7856
|
-
function typeToExt(type) {
|
|
7857
|
-
return {
|
|
7858
|
-
component: ".tsx",
|
|
7859
|
-
test: ".test.ts",
|
|
7860
|
-
migration: ".sql",
|
|
7861
|
-
docs: ".md"
|
|
7862
|
-
}[type] ?? ".txt";
|
|
7863
|
-
}
|
|
7864
|
-
function askUser(question) {
|
|
7865
|
-
return new Promise((resolve) => {
|
|
7866
|
-
const rl = readline.createInterface({
|
|
7867
|
-
input: process$1.stdin,
|
|
7868
|
-
output: process$1.stdout
|
|
7869
|
-
});
|
|
7870
|
-
rl.question(question, (answer) => {
|
|
7871
|
-
rl.close();
|
|
7872
|
-
resolve(answer.trim() || "y");
|
|
7873
|
-
});
|
|
7874
|
-
});
|
|
8339
|
+
const PROMPT_LOG_FILENAME = "prompt.log.jsonl";
|
|
8340
|
+
/** Append one or more entries to the feature's prompt.log.jsonl. Creates the file if absent. */
|
|
8341
|
+
function appendPromptLog(featureDir, entries) {
|
|
8342
|
+
if (entries.length === 0) return;
|
|
8343
|
+
const logPath = path.join(featureDir, PROMPT_LOG_FILENAME);
|
|
8344
|
+
const lines = entries.map((e) => JSON.stringify(e)).join("\n") + "\n";
|
|
8345
|
+
fs.appendFileSync(logPath, lines, "utf-8");
|
|
7875
8346
|
}
|
|
7876
8347
|
|
|
7877
8348
|
//#endregion
|
|
@@ -7882,65 +8353,6 @@ const server = new Server({
|
|
|
7882
8353
|
version: "1.0.0"
|
|
7883
8354
|
}, { capabilities: { tools: {} } });
|
|
7884
8355
|
server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
7885
|
-
{
|
|
7886
|
-
name: "fill_feature",
|
|
7887
|
-
description: "Fill missing fields in a feature.json using AI analysis of the code. Returns proposed changes and optionally applies them.",
|
|
7888
|
-
inputSchema: {
|
|
7889
|
-
type: "object",
|
|
7890
|
-
properties: {
|
|
7891
|
-
path: {
|
|
7892
|
-
type: "string",
|
|
7893
|
-
description: "Absolute or relative path to the feature folder (contains feature.json)"
|
|
7894
|
-
},
|
|
7895
|
-
fields: {
|
|
7896
|
-
type: "array",
|
|
7897
|
-
items: { type: "string" },
|
|
7898
|
-
description: "Specific fields to fill. Omit to fill all missing fields. Options: analysis, decisions, implementation, knownLimitations, tags, successCriteria, domain"
|
|
7899
|
-
},
|
|
7900
|
-
dryRun: {
|
|
7901
|
-
type: "boolean",
|
|
7902
|
-
description: "If true, returns proposed changes without writing to disk"
|
|
7903
|
-
},
|
|
7904
|
-
model: {
|
|
7905
|
-
type: "string",
|
|
7906
|
-
description: "Claude model to use (default: claude-sonnet-4-6)"
|
|
7907
|
-
}
|
|
7908
|
-
},
|
|
7909
|
-
required: ["path"]
|
|
7910
|
-
}
|
|
7911
|
-
},
|
|
7912
|
-
{
|
|
7913
|
-
name: "generate_from_feature",
|
|
7914
|
-
description: "Generate code artifacts from a feature.json — component, tests, migration, or docs.",
|
|
7915
|
-
inputSchema: {
|
|
7916
|
-
type: "object",
|
|
7917
|
-
properties: {
|
|
7918
|
-
path: {
|
|
7919
|
-
type: "string",
|
|
7920
|
-
description: "Absolute or relative path to the feature folder"
|
|
7921
|
-
},
|
|
7922
|
-
type: {
|
|
7923
|
-
type: "string",
|
|
7924
|
-
enum: [
|
|
7925
|
-
"component",
|
|
7926
|
-
"test",
|
|
7927
|
-
"migration",
|
|
7928
|
-
"docs"
|
|
7929
|
-
],
|
|
7930
|
-
description: "What to generate"
|
|
7931
|
-
},
|
|
7932
|
-
dryRun: {
|
|
7933
|
-
type: "boolean",
|
|
7934
|
-
description: "If true, returns generated content without writing to disk"
|
|
7935
|
-
},
|
|
7936
|
-
model: {
|
|
7937
|
-
type: "string",
|
|
7938
|
-
description: "Claude model to use (default: claude-sonnet-4-6)"
|
|
7939
|
-
}
|
|
7940
|
-
},
|
|
7941
|
-
required: ["path", "type"]
|
|
7942
|
-
}
|
|
7943
|
-
},
|
|
7944
8356
|
{
|
|
7945
8357
|
name: "blame_file",
|
|
7946
8358
|
description: "Show which feature owns a file — returns the feature summary.",
|
|
@@ -7955,7 +8367,7 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
7955
8367
|
},
|
|
7956
8368
|
{
|
|
7957
8369
|
name: "search_features",
|
|
7958
|
-
description: "Search all features in the workspace by key, title, tags, or
|
|
8370
|
+
description: "Search all features in the workspace by key, title, tags, problem, analysis, implementation, or decisions text.",
|
|
7959
8371
|
inputSchema: {
|
|
7960
8372
|
type: "object",
|
|
7961
8373
|
properties: {
|
|
@@ -7972,6 +8384,14 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
7972
8384
|
"deprecated"
|
|
7973
8385
|
],
|
|
7974
8386
|
description: "Filter by status (optional)"
|
|
8387
|
+
},
|
|
8388
|
+
domain: {
|
|
8389
|
+
type: "string",
|
|
8390
|
+
description: "Filter by domain (optional)"
|
|
8391
|
+
},
|
|
8392
|
+
path: {
|
|
8393
|
+
type: "string",
|
|
8394
|
+
description: "Directory to scan (default: workspace root)"
|
|
7975
8395
|
}
|
|
7976
8396
|
},
|
|
7977
8397
|
required: ["query"]
|
|
@@ -7979,7 +8399,7 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
7979
8399
|
},
|
|
7980
8400
|
{
|
|
7981
8401
|
name: "create_feature",
|
|
7982
|
-
description: "Create a new feature.json in the specified directory.",
|
|
8402
|
+
description: "Create a new feature.json in the specified directory. After creating, immediately call read_feature_context on the same path to analyze surrounding code and fill all required fields before calling advance_feature.",
|
|
7983
8403
|
inputSchema: {
|
|
7984
8404
|
type: "object",
|
|
7985
8405
|
properties: {
|
|
@@ -7989,7 +8409,7 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
7989
8409
|
},
|
|
7990
8410
|
featureKey: {
|
|
7991
8411
|
type: "string",
|
|
7992
|
-
description: "Feature key (e.g. feat-2026-042)"
|
|
8412
|
+
description: "Feature key (e.g. feat-2026-042). Omit to auto-generate the next key from the workspace counter — recommended to avoid duplicates."
|
|
7993
8413
|
},
|
|
7994
8414
|
title: {
|
|
7995
8415
|
type: "string",
|
|
@@ -8012,7 +8432,6 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8012
8432
|
},
|
|
8013
8433
|
required: [
|
|
8014
8434
|
"dir",
|
|
8015
|
-
"featureKey",
|
|
8016
8435
|
"title",
|
|
8017
8436
|
"problem"
|
|
8018
8437
|
]
|
|
@@ -8023,10 +8442,16 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8023
8442
|
description: "Show the parent/child lineage tree for a feature key.",
|
|
8024
8443
|
inputSchema: {
|
|
8025
8444
|
type: "object",
|
|
8026
|
-
properties: {
|
|
8027
|
-
|
|
8028
|
-
|
|
8029
|
-
|
|
8445
|
+
properties: {
|
|
8446
|
+
featureKey: {
|
|
8447
|
+
type: "string",
|
|
8448
|
+
description: "Feature key to look up"
|
|
8449
|
+
},
|
|
8450
|
+
path: {
|
|
8451
|
+
type: "string",
|
|
8452
|
+
description: "Directory to scan (default: workspace root)"
|
|
8453
|
+
}
|
|
8454
|
+
},
|
|
8030
8455
|
required: ["featureKey"]
|
|
8031
8456
|
}
|
|
8032
8457
|
},
|
|
@@ -8035,10 +8460,16 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8035
8460
|
description: "Check all features for completeness and required fields.",
|
|
8036
8461
|
inputSchema: {
|
|
8037
8462
|
type: "object",
|
|
8038
|
-
properties: {
|
|
8039
|
-
|
|
8040
|
-
|
|
8041
|
-
|
|
8463
|
+
properties: {
|
|
8464
|
+
path: {
|
|
8465
|
+
type: "string",
|
|
8466
|
+
description: "Directory to scan (default: workspace root)"
|
|
8467
|
+
},
|
|
8468
|
+
revisionWarnings: {
|
|
8469
|
+
type: "boolean",
|
|
8470
|
+
description: "Include warnings for features with no revision entries (default: true). Set false during migration of existing repos."
|
|
8471
|
+
}
|
|
8472
|
+
}
|
|
8042
8473
|
}
|
|
8043
8474
|
},
|
|
8044
8475
|
{
|
|
@@ -8055,7 +8486,7 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8055
8486
|
},
|
|
8056
8487
|
{
|
|
8057
8488
|
name: "write_feature_fields",
|
|
8058
|
-
description: "Patch a feature.json with new field values. Use this after read_feature_context — write the fields you generated back to disk.",
|
|
8489
|
+
description: "Patch a feature.json with new field values. Use this after read_feature_context — write the fields you generated back to disk. If you are changing intent-critical fields (problem, analysis, implementation, decisions, successCriteria), pass a revision object with author and reason. After writing, call advance_feature to check if the feature is ready to transition.",
|
|
8059
8490
|
inputSchema: {
|
|
8060
8491
|
type: "object",
|
|
8061
8492
|
properties: {
|
|
@@ -8066,10 +8497,263 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8066
8497
|
fields: {
|
|
8067
8498
|
type: "object",
|
|
8068
8499
|
description: "Key-value pairs to merge into feature.json. Values may be strings, arrays, or objects depending on the field."
|
|
8500
|
+
},
|
|
8501
|
+
revision: {
|
|
8502
|
+
type: "object",
|
|
8503
|
+
description: "Required when changing intent-critical fields (problem, analysis, implementation, decisions, successCriteria). Appended to the revisions array.",
|
|
8504
|
+
properties: {
|
|
8505
|
+
author: {
|
|
8506
|
+
type: "string",
|
|
8507
|
+
description: "Who is making the change."
|
|
8508
|
+
},
|
|
8509
|
+
reason: {
|
|
8510
|
+
type: "string",
|
|
8511
|
+
description: "Why these fields are being changed."
|
|
8512
|
+
}
|
|
8513
|
+
},
|
|
8514
|
+
required: ["author", "reason"]
|
|
8069
8515
|
}
|
|
8070
8516
|
},
|
|
8071
8517
|
required: ["path", "fields"]
|
|
8072
8518
|
}
|
|
8519
|
+
},
|
|
8520
|
+
{
|
|
8521
|
+
name: "advance_feature",
|
|
8522
|
+
description: "Validate and transition a feature to a new status. Call this after write_feature_fields — it checks that required fields are filled for the target status and writes the new status. If fields are missing it returns exactly which ones so you can ask the user or fill them first. Transitions: draft→active (requires analysis, implementation, decisions, successCriteria), active→frozen (requires all fields + tags + knownLimitations), frozen→active (reopen — requires a reason describing what changed), any→deprecated.",
|
|
8523
|
+
inputSchema: {
|
|
8524
|
+
type: "object",
|
|
8525
|
+
properties: {
|
|
8526
|
+
path: {
|
|
8527
|
+
type: "string",
|
|
8528
|
+
description: "Absolute or relative path to the feature folder"
|
|
8529
|
+
},
|
|
8530
|
+
to: {
|
|
8531
|
+
type: "string",
|
|
8532
|
+
enum: [
|
|
8533
|
+
"active",
|
|
8534
|
+
"frozen",
|
|
8535
|
+
"deprecated"
|
|
8536
|
+
],
|
|
8537
|
+
description: "Target status"
|
|
8538
|
+
},
|
|
8539
|
+
reason: {
|
|
8540
|
+
type: "string",
|
|
8541
|
+
description: "Required when reopening (frozen→active). Describe what changed."
|
|
8542
|
+
}
|
|
8543
|
+
},
|
|
8544
|
+
required: ["path", "to"]
|
|
8545
|
+
}
|
|
8546
|
+
},
|
|
8547
|
+
{
|
|
8548
|
+
name: "spawn_child_feature",
|
|
8549
|
+
description: "Spawn a child feature from a parent — use when a bug is found, a subtask is extracted, or scope is split. Creates the child feature.json with lineage.parent set and patches the parent's lineage.children. After spawning, call read_feature_context on the child path to begin its lifecycle.",
|
|
8550
|
+
inputSchema: {
|
|
8551
|
+
type: "object",
|
|
8552
|
+
properties: {
|
|
8553
|
+
parentPath: {
|
|
8554
|
+
type: "string",
|
|
8555
|
+
description: "Absolute or relative path to the parent feature folder"
|
|
8556
|
+
},
|
|
8557
|
+
dir: {
|
|
8558
|
+
type: "string",
|
|
8559
|
+
description: "Directory to create the child feature in"
|
|
8560
|
+
},
|
|
8561
|
+
title: {
|
|
8562
|
+
type: "string",
|
|
8563
|
+
description: "Child feature title"
|
|
8564
|
+
},
|
|
8565
|
+
problem: {
|
|
8566
|
+
type: "string",
|
|
8567
|
+
description: "Problem the child addresses"
|
|
8568
|
+
},
|
|
8569
|
+
spawnReason: {
|
|
8570
|
+
type: "string",
|
|
8571
|
+
description: "Why this child was spawned (e.g. \"bug: login fails on Safari\", \"scope split: extract payment flow\")"
|
|
8572
|
+
}
|
|
8573
|
+
},
|
|
8574
|
+
required: [
|
|
8575
|
+
"parentPath",
|
|
8576
|
+
"dir",
|
|
8577
|
+
"title",
|
|
8578
|
+
"problem",
|
|
8579
|
+
"spawnReason"
|
|
8580
|
+
]
|
|
8581
|
+
}
|
|
8582
|
+
},
|
|
8583
|
+
{
|
|
8584
|
+
name: "get_feature_status",
|
|
8585
|
+
description: "Lightweight orientation tool — returns the current lifecycle state of a feature: status, filled vs missing fields, stale fields flagged from reopens, valid next transitions, and the exact next tool to call. Use this whenever picking up a feature mid-session to know where it stands before taking action.",
|
|
8586
|
+
inputSchema: {
|
|
8587
|
+
type: "object",
|
|
8588
|
+
properties: { path: {
|
|
8589
|
+
type: "string",
|
|
8590
|
+
description: "Absolute or relative path to the feature folder"
|
|
8591
|
+
} },
|
|
8592
|
+
required: ["path"]
|
|
8593
|
+
}
|
|
8594
|
+
},
|
|
8595
|
+
{
|
|
8596
|
+
name: "extract_feature_from_code",
|
|
8597
|
+
description: "Inverse of create_feature — given a directory with existing code but NO feature.json, reads all source files and returns instructions for Claude to generate a complete feature.json proposal. Use this to onboard legacy code into LAC. After calling this tool, generate the fields, then call create_feature followed by write_feature_fields.",
|
|
8598
|
+
inputSchema: {
|
|
8599
|
+
type: "object",
|
|
8600
|
+
properties: {
|
|
8601
|
+
path: {
|
|
8602
|
+
type: "string",
|
|
8603
|
+
description: "Directory containing source code (must NOT already have a feature.json)"
|
|
8604
|
+
},
|
|
8605
|
+
maxFileSize: {
|
|
8606
|
+
type: "number",
|
|
8607
|
+
description: "Maximum characters to read per file before truncating (default: 8000). Increase for large files."
|
|
8608
|
+
}
|
|
8609
|
+
},
|
|
8610
|
+
required: ["path"]
|
|
8611
|
+
}
|
|
8612
|
+
},
|
|
8613
|
+
{
|
|
8614
|
+
name: "feature_changelog",
|
|
8615
|
+
description: "Generate a chronological changelog for a feature — shows status transitions (from statusHistory), reopens, and spawned children in timeline form. Use this to understand the full history of a feature.",
|
|
8616
|
+
inputSchema: {
|
|
8617
|
+
type: "object",
|
|
8618
|
+
properties: { path: {
|
|
8619
|
+
type: "string",
|
|
8620
|
+
description: "Absolute or relative path to the feature folder"
|
|
8621
|
+
} },
|
|
8622
|
+
required: ["path"]
|
|
8623
|
+
}
|
|
8624
|
+
},
|
|
8625
|
+
{
|
|
8626
|
+
name: "roadmap_view",
|
|
8627
|
+
description: "Return a structured overview of all features in the workspace grouped by status (active → draft → frozen → deprecated) and sorted by priority. Shows missing fields and child counts at a glance. Use this to orient before a session or plan what to work on.",
|
|
8628
|
+
inputSchema: {
|
|
8629
|
+
type: "object",
|
|
8630
|
+
properties: { path: {
|
|
8631
|
+
type: "string",
|
|
8632
|
+
description: "Directory to scan (default: workspace root)"
|
|
8633
|
+
} }
|
|
8634
|
+
}
|
|
8635
|
+
},
|
|
8636
|
+
{
|
|
8637
|
+
name: "suggest_split",
|
|
8638
|
+
description: "Analyze a feature and recommend whether it should be broken into child features. Reads source files, detects split signals (file count, mixed domains, \"and\" in problem statement), and returns context + instructions for Claude to propose a split and call spawn_child_feature.",
|
|
8639
|
+
inputSchema: {
|
|
8640
|
+
type: "object",
|
|
8641
|
+
properties: { path: {
|
|
8642
|
+
type: "string",
|
|
8643
|
+
description: "Absolute or relative path to the feature folder"
|
|
8644
|
+
} },
|
|
8645
|
+
required: ["path"]
|
|
8646
|
+
}
|
|
8647
|
+
},
|
|
8648
|
+
{
|
|
8649
|
+
name: "feature_summary_for_pr",
|
|
8650
|
+
description: "Generate a ready-to-paste pull request description from a feature.json — includes problem, what was built, key decisions, known limitations, success criteria, and lineage. Use this when opening a PR for a feature.",
|
|
8651
|
+
inputSchema: {
|
|
8652
|
+
type: "object",
|
|
8653
|
+
properties: { path: {
|
|
8654
|
+
type: "string",
|
|
8655
|
+
description: "Absolute or relative path to the feature folder"
|
|
8656
|
+
} },
|
|
8657
|
+
required: ["path"]
|
|
8658
|
+
}
|
|
8659
|
+
},
|
|
8660
|
+
{
|
|
8661
|
+
name: "audit_decisions",
|
|
8662
|
+
description: "Scan all features and surface technical debt in decisions: features missing decisions, decisions with risky language (revisit/temporary/hack/workaround), and features with suspiciously similar titles in the same domain that may be duplicates. Run this periodically to keep the workspace healthy.",
|
|
8663
|
+
inputSchema: {
|
|
8664
|
+
type: "object",
|
|
8665
|
+
properties: { path: {
|
|
8666
|
+
type: "string",
|
|
8667
|
+
description: "Directory to scan (default: workspace root)"
|
|
8668
|
+
} }
|
|
8669
|
+
}
|
|
8670
|
+
},
|
|
8671
|
+
{
|
|
8672
|
+
name: "feature_similarity",
|
|
8673
|
+
description: "Find features semantically similar to a given one — same domain, shared tags, or overlapping keywords in title/problem. Use this before create_feature to avoid duplication, or to discover related work.",
|
|
8674
|
+
inputSchema: {
|
|
8675
|
+
type: "object",
|
|
8676
|
+
properties: { path: {
|
|
8677
|
+
type: "string",
|
|
8678
|
+
description: "Absolute or relative path to the feature folder to compare against"
|
|
8679
|
+
} },
|
|
8680
|
+
required: ["path"]
|
|
8681
|
+
}
|
|
8682
|
+
},
|
|
8683
|
+
{
|
|
8684
|
+
name: "time_travel",
|
|
8685
|
+
description: "Show what a feature.json looked like at a specific point in git history. Call with just path to see the full commit history for the file. Call with path + date (YYYY-MM-DD) or commit (SHA) to view that specific version.",
|
|
8686
|
+
inputSchema: {
|
|
8687
|
+
type: "object",
|
|
8688
|
+
properties: {
|
|
8689
|
+
path: {
|
|
8690
|
+
type: "string",
|
|
8691
|
+
description: "Absolute or relative path to the feature folder"
|
|
8692
|
+
},
|
|
8693
|
+
date: {
|
|
8694
|
+
type: "string",
|
|
8695
|
+
description: "YYYY-MM-DD — show the most recent version at or before this date"
|
|
8696
|
+
},
|
|
8697
|
+
commit: {
|
|
8698
|
+
type: "string",
|
|
8699
|
+
description: "Git commit SHA to view (full or short)"
|
|
8700
|
+
}
|
|
8701
|
+
},
|
|
8702
|
+
required: ["path"]
|
|
8703
|
+
}
|
|
8704
|
+
},
|
|
8705
|
+
{
|
|
8706
|
+
name: "cross_feature_impact",
|
|
8707
|
+
description: "Given a source file, find all features whose code imports or references it. Use this before refactoring a shared utility, changing an interface, or deleting a file — shows the blast radius across all tracked features.",
|
|
8708
|
+
inputSchema: {
|
|
8709
|
+
type: "object",
|
|
8710
|
+
properties: { file: {
|
|
8711
|
+
type: "string",
|
|
8712
|
+
description: "Absolute or relative path to the source file to analyze"
|
|
8713
|
+
} },
|
|
8714
|
+
required: ["file"]
|
|
8715
|
+
}
|
|
8716
|
+
},
|
|
8717
|
+
{
|
|
8718
|
+
name: "summarize_workspace",
|
|
8719
|
+
description: "Summarize the entire codebase by reading only feature.json files and READMEs — no source code. Returns a structured overview: project purpose, features grouped by domain, key decisions, and stats. Ideal for fast orientation before a coding session or for injecting project context into an AI prompt.",
|
|
8720
|
+
inputSchema: {
|
|
8721
|
+
type: "object",
|
|
8722
|
+
properties: {
|
|
8723
|
+
path: {
|
|
8724
|
+
type: "string",
|
|
8725
|
+
description: "Directory to scan (default: workspace root)"
|
|
8726
|
+
},
|
|
8727
|
+
format: {
|
|
8728
|
+
type: "string",
|
|
8729
|
+
enum: ["markdown", "json"],
|
|
8730
|
+
description: "Output format: \"markdown\" (default) or \"json\""
|
|
8731
|
+
}
|
|
8732
|
+
},
|
|
8733
|
+
required: []
|
|
8734
|
+
}
|
|
8735
|
+
},
|
|
8736
|
+
{
|
|
8737
|
+
name: "extract_all_features",
|
|
8738
|
+
description: "Scan a repository and return a manifest of all directories that should have feature.json files but do not yet. Useful for onboarding a legacy or external repo into LAC. After calling this tool, iterate over the returned candidates and call extract_feature_from_code on each one.",
|
|
8739
|
+
inputSchema: {
|
|
8740
|
+
type: "object",
|
|
8741
|
+
properties: {
|
|
8742
|
+
path: {
|
|
8743
|
+
type: "string",
|
|
8744
|
+
description: "Root directory to scan (default: workspace root)"
|
|
8745
|
+
},
|
|
8746
|
+
strategy: {
|
|
8747
|
+
type: "string",
|
|
8748
|
+
enum: ["module", "directory"],
|
|
8749
|
+
description: "\"module\" (default) — directories containing package.json, go.mod, Cargo.toml, index.ts, etc. \"directory\" — every directory that contains source files."
|
|
8750
|
+
},
|
|
8751
|
+
depth: {
|
|
8752
|
+
type: "number",
|
|
8753
|
+
description: "Maximum directory depth to descend (default: 4 for module, 2 for directory)"
|
|
8754
|
+
}
|
|
8755
|
+
}
|
|
8756
|
+
}
|
|
8073
8757
|
}
|
|
8074
8758
|
] }));
|
|
8075
8759
|
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
@@ -8077,131 +8761,683 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
8077
8761
|
const a = args ?? {};
|
|
8078
8762
|
try {
|
|
8079
8763
|
switch (name) {
|
|
8080
|
-
case "
|
|
8081
|
-
const
|
|
8082
|
-
|
|
8083
|
-
|
|
8084
|
-
|
|
8085
|
-
|
|
8086
|
-
|
|
8764
|
+
case "blame_file": {
|
|
8765
|
+
const filePath = resolvePath(String(a.file));
|
|
8766
|
+
const feature = findNearestFeature(path.dirname(filePath));
|
|
8767
|
+
if (!feature) return { content: [{
|
|
8768
|
+
type: "text",
|
|
8769
|
+
text: `No feature.json found for "${a.file}". The file may not be under any tracked feature directory.`
|
|
8770
|
+
}] };
|
|
8771
|
+
return { content: [{
|
|
8772
|
+
type: "text",
|
|
8773
|
+
text: `File : ${String(a.file)}\n${formatFeatureSummary(feature)}`
|
|
8774
|
+
}] };
|
|
8775
|
+
}
|
|
8776
|
+
case "search_features": {
|
|
8777
|
+
const query = String(a.query).toLowerCase();
|
|
8778
|
+
const statusFilter = a.status;
|
|
8779
|
+
const domainFilter = a.domain;
|
|
8780
|
+
const matches = scanAllFeatures(a.path ? resolvePath(String(a.path)) : workspaceRoot).filter(({ feature }) => {
|
|
8781
|
+
if (statusFilter && feature.status !== statusFilter) return false;
|
|
8782
|
+
if (domainFilter && feature.domain !== domainFilter) return false;
|
|
8783
|
+
const decisionsText = (feature.decisions ?? []).map((d) => d.decision + " " + d.rationale).join(" ");
|
|
8784
|
+
return [
|
|
8785
|
+
feature.featureKey,
|
|
8786
|
+
feature.title,
|
|
8787
|
+
feature.problem,
|
|
8788
|
+
feature.analysis ?? "",
|
|
8789
|
+
feature.implementation ?? "",
|
|
8790
|
+
decisionsText,
|
|
8791
|
+
...feature.tags ?? []
|
|
8792
|
+
].join(" ").toLowerCase().includes(query);
|
|
8793
|
+
});
|
|
8794
|
+
if (matches.length === 0) return { content: [{
|
|
8795
|
+
type: "text",
|
|
8796
|
+
text: `No features found matching "${a.query}".`
|
|
8797
|
+
}] };
|
|
8798
|
+
const lines = matches.map(({ feature }) => `${statusIcon(feature.status)} ${feature.featureKey.padEnd(18)} ${feature.status.padEnd(12)} ${feature.title}\n ${feature.problem.slice(0, 80)}`);
|
|
8799
|
+
return { content: [{
|
|
8800
|
+
type: "text",
|
|
8801
|
+
text: `Found ${matches.length} feature(s):\n\n${lines.join("\n\n")}`
|
|
8802
|
+
}] };
|
|
8803
|
+
}
|
|
8804
|
+
case "create_feature": {
|
|
8805
|
+
const dir = resolvePath(String(a.dir));
|
|
8806
|
+
const featurePath = path.join(dir, "feature.json");
|
|
8807
|
+
if (fs.existsSync(featurePath)) return { content: [{
|
|
8808
|
+
type: "text",
|
|
8809
|
+
text: `feature.json already exists at "${featurePath}".`
|
|
8810
|
+
}] };
|
|
8811
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
8812
|
+
let featureKey;
|
|
8813
|
+
if (a.featureKey) {
|
|
8814
|
+
featureKey = String(a.featureKey);
|
|
8815
|
+
registerFeatureKey(dir, featureKey);
|
|
8816
|
+
} else featureKey = generateFeatureKey(dir);
|
|
8817
|
+
const feature = {
|
|
8818
|
+
featureKey,
|
|
8819
|
+
title: String(a.title),
|
|
8820
|
+
status: String(a.status ?? "draft"),
|
|
8821
|
+
problem: String(a.problem),
|
|
8822
|
+
schemaVersion: 1
|
|
8823
|
+
};
|
|
8824
|
+
fs.writeFileSync(featurePath, JSON.stringify(feature, null, 2) + "\n", "utf-8");
|
|
8825
|
+
return { content: [{
|
|
8826
|
+
type: "text",
|
|
8827
|
+
text: `Created "${featureKey}" at "${featurePath}" (${feature.status}).\n\nNext: call read_feature_context on "${dir}" to analyze the code and fill missing fields, then advance_feature when ready.`
|
|
8828
|
+
}] };
|
|
8829
|
+
}
|
|
8830
|
+
case "get_lineage": {
|
|
8831
|
+
const featureKey = String(a.featureKey);
|
|
8832
|
+
const features = scanAllFeatures(a.path ? resolvePath(String(a.path)) : workspaceRoot);
|
|
8833
|
+
const featureMap = new Map(features.map(({ feature }) => [feature.featureKey, feature]));
|
|
8834
|
+
const root = featureMap.get(featureKey);
|
|
8835
|
+
if (!root) return { content: [{
|
|
8836
|
+
type: "text",
|
|
8837
|
+
text: `Feature "${featureKey}" not found.`
|
|
8838
|
+
}] };
|
|
8839
|
+
const childrenOf = /* @__PURE__ */ new Map();
|
|
8840
|
+
for (const { feature } of features) {
|
|
8841
|
+
const parent = feature.lineage?.parent;
|
|
8842
|
+
if (parent) {
|
|
8843
|
+
const existing = childrenOf.get(parent) ?? [];
|
|
8844
|
+
existing.push(feature.featureKey);
|
|
8845
|
+
childrenOf.set(parent, existing);
|
|
8846
|
+
}
|
|
8847
|
+
}
|
|
8848
|
+
return { content: [{
|
|
8849
|
+
type: "text",
|
|
8850
|
+
text: buildLineageTree(root, featureMap, childrenOf, 0)
|
|
8851
|
+
}] };
|
|
8852
|
+
}
|
|
8853
|
+
case "lint_workspace": {
|
|
8854
|
+
const scanDir = a.path ? resolvePath(String(a.path)) : workspaceRoot;
|
|
8855
|
+
const revisionWarnings = a.revisionWarnings !== false;
|
|
8856
|
+
const features = scanAllFeatures(scanDir);
|
|
8857
|
+
const featureKeys = new Set(features.map(({ feature }) => feature.featureKey));
|
|
8858
|
+
const INTENT_CRITICAL_LINT = [
|
|
8859
|
+
"problem",
|
|
8860
|
+
"analysis",
|
|
8861
|
+
"implementation",
|
|
8862
|
+
"decisions",
|
|
8863
|
+
"successCriteria"
|
|
8864
|
+
];
|
|
8865
|
+
const results = features.map(({ feature, filePath }) => {
|
|
8866
|
+
const issues = [];
|
|
8867
|
+
const warnings = [];
|
|
8868
|
+
const raw = feature;
|
|
8869
|
+
if (!feature.problem?.trim()) issues.push("missing problem");
|
|
8870
|
+
if (feature.status === "active") {
|
|
8871
|
+
if (!feature.analysis?.trim()) issues.push("missing analysis");
|
|
8872
|
+
if (!feature.implementation?.trim()) issues.push("missing implementation");
|
|
8873
|
+
if (!feature.decisions?.length) issues.push("no decisions recorded");
|
|
8874
|
+
}
|
|
8875
|
+
if (feature.lineage?.parent && !featureKeys.has(feature.lineage.parent)) issues.push(`orphaned: parent "${feature.lineage.parent}" not found`);
|
|
8876
|
+
for (const child of feature.lineage?.children ?? []) if (!featureKeys.has(child)) issues.push(`broken child ref: "${child}" not found`);
|
|
8877
|
+
if (raw.superseded_by && !featureKeys.has(String(raw.superseded_by))) issues.push(`broken superseded_by ref: "${raw.superseded_by}" not found`);
|
|
8878
|
+
if (raw.merged_into && !featureKeys.has(String(raw.merged_into))) issues.push(`broken merged_into ref: "${raw.merged_into}" not found`);
|
|
8879
|
+
for (const key of raw.merged_from ?? []) if (!featureKeys.has(key)) issues.push(`broken merged_from ref: "${key}" not found`);
|
|
8880
|
+
if (feature.status === "active" || feature.status === "draft") {
|
|
8881
|
+
const preFreeze = getMissingForTransition(feature, "frozen");
|
|
8882
|
+
if (preFreeze.length > 0) warnings.push(`will block freeze — missing: ${preFreeze.join(", ")}`);
|
|
8883
|
+
}
|
|
8884
|
+
if (raw.superseded_by && feature.status !== "deprecated") warnings.push(`superseded_by set but status is "${feature.status}" — consider deprecating`);
|
|
8885
|
+
if (raw.merged_into && feature.status !== "deprecated") warnings.push(`merged_into set but status is "${feature.status}" — consider deprecating`);
|
|
8886
|
+
const hasRevisions = Array.isArray(raw.revisions) && raw.revisions.length > 0;
|
|
8887
|
+
if (revisionWarnings && !hasRevisions) {
|
|
8888
|
+
const filledCritical = INTENT_CRITICAL_LINT.filter((f) => {
|
|
8889
|
+
const val = raw[f];
|
|
8890
|
+
if (val === void 0 || val === null) return false;
|
|
8891
|
+
if (typeof val === "string") return val.trim().length > 0;
|
|
8892
|
+
if (Array.isArray(val)) return val.length > 0;
|
|
8893
|
+
return false;
|
|
8894
|
+
});
|
|
8895
|
+
if (filledCritical.length > 0) warnings.push(`no revisions recorded for: ${filledCritical.join(", ")}`);
|
|
8896
|
+
}
|
|
8897
|
+
return {
|
|
8898
|
+
feature,
|
|
8899
|
+
filePath,
|
|
8900
|
+
issues,
|
|
8901
|
+
warnings
|
|
8902
|
+
};
|
|
8087
8903
|
});
|
|
8904
|
+
const featureByKey = new Map(features.map(({ feature }) => [feature.featureKey, feature]));
|
|
8905
|
+
for (const result of results) {
|
|
8906
|
+
const raw = featureByKey.get(result.feature.featureKey);
|
|
8907
|
+
if (!raw) continue;
|
|
8908
|
+
if (raw.merged_into) {
|
|
8909
|
+
const target = featureByKey.get(String(raw.merged_into));
|
|
8910
|
+
if (target) {
|
|
8911
|
+
if (!(target.merged_from ?? []).includes(result.feature.featureKey)) result.warnings.push(`merged_into "${raw.merged_into}" but that feature does not list this key in merged_from`);
|
|
8912
|
+
}
|
|
8913
|
+
}
|
|
8914
|
+
for (const sourceKey of raw.merged_from ?? []) {
|
|
8915
|
+
const source = featureByKey.get(sourceKey);
|
|
8916
|
+
if (source && source.merged_into !== result.feature.featureKey) result.warnings.push(`merged_from includes "${sourceKey}" but that feature does not point merged_into this key`);
|
|
8917
|
+
}
|
|
8918
|
+
if (raw.superseded_by) {
|
|
8919
|
+
const successor = featureByKey.get(String(raw.superseded_by));
|
|
8920
|
+
if (successor) {
|
|
8921
|
+
if (!(successor.superseded_from ?? []).includes(result.feature.featureKey)) result.warnings.push(`superseded_by "${raw.superseded_by}" but that feature does not list this key in superseded_from`);
|
|
8922
|
+
}
|
|
8923
|
+
}
|
|
8924
|
+
}
|
|
8925
|
+
const keyCount = /* @__PURE__ */ new Map();
|
|
8926
|
+
for (const { feature, filePath } of features) {
|
|
8927
|
+
const paths = keyCount.get(feature.featureKey) ?? [];
|
|
8928
|
+
paths.push(filePath);
|
|
8929
|
+
keyCount.set(feature.featureKey, paths);
|
|
8930
|
+
}
|
|
8931
|
+
const duplicateKeys = [...keyCount.entries()].filter(([, paths]) => paths.length > 1);
|
|
8932
|
+
const failures = results.filter((r) => r.issues.length > 0);
|
|
8933
|
+
const warned = results.filter((r) => r.warnings.length > 0);
|
|
8934
|
+
const passes = results.filter((r) => r.issues.length === 0);
|
|
8935
|
+
const lines = [...passes.map((r) => ` ✓ ${r.feature.featureKey.padEnd(18)} ${r.feature.status}${r.warnings.length > 0 ? ` ⚠ ${r.warnings.join("; ")}` : ""}`), ...failures.map((r) => ` ✗ ${r.feature.featureKey.padEnd(18)} ${r.feature.status}\n ${r.issues.join(", ")}${r.warnings.length > 0 ? `\n ⚠ ${r.warnings.join("; ")}` : ""}`)];
|
|
8936
|
+
if (duplicateKeys.length > 0) {
|
|
8937
|
+
lines.push("");
|
|
8938
|
+
lines.push(`⛔ Duplicate featureKeys detected (${duplicateKeys.length}):`);
|
|
8939
|
+
for (const [key, paths] of duplicateKeys) {
|
|
8940
|
+
lines.push(` ${key}`);
|
|
8941
|
+
for (const p of paths) lines.push(` ${path.relative(scanDir, p)}`);
|
|
8942
|
+
}
|
|
8943
|
+
}
|
|
8944
|
+
return { content: [{
|
|
8945
|
+
type: "text",
|
|
8946
|
+
text: `${passes.length} passed, ${failures.length} failed, ${warned.length} warned — ${results.length} features checked${duplicateKeys.length > 0 ? ` ⛔ ${duplicateKeys.length} duplicate key(s)` : ""}\n\n${lines.join("\n")}`
|
|
8947
|
+
}] };
|
|
8948
|
+
}
|
|
8949
|
+
case "read_feature_context": {
|
|
8950
|
+
const featureDir = resolvePath(String(a.path));
|
|
8951
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
8952
|
+
let raw;
|
|
8953
|
+
try {
|
|
8954
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
8955
|
+
} catch {
|
|
8956
|
+
return {
|
|
8957
|
+
content: [{
|
|
8958
|
+
type: "text",
|
|
8959
|
+
text: `No feature.json found at "${featurePath}"`
|
|
8960
|
+
}],
|
|
8961
|
+
isError: true
|
|
8962
|
+
};
|
|
8963
|
+
}
|
|
8964
|
+
const result = validateFeature(JSON.parse(raw));
|
|
8965
|
+
if (!result.success) return {
|
|
8966
|
+
content: [{
|
|
8967
|
+
type: "text",
|
|
8968
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
8969
|
+
}],
|
|
8970
|
+
isError: true
|
|
8971
|
+
};
|
|
8972
|
+
const feature = result.data;
|
|
8973
|
+
const contextStr = contextToString(buildContext(featureDir, feature));
|
|
8974
|
+
const missingFields = getMissingFields(feature);
|
|
8975
|
+
let componentFileWarning = "";
|
|
8976
|
+
if (feature.componentFile) {
|
|
8977
|
+
const notFound = feature.componentFile.split(",").map((s) => s.trim()).filter(Boolean).filter((p) => {
|
|
8978
|
+
return [path.resolve(featureDir, p), path.resolve(workspaceRoot, p)].every((c) => !fs.existsSync(c));
|
|
8979
|
+
});
|
|
8980
|
+
if (notFound.length > 0) componentFileWarning = `\n## ⚠ componentFile drift\nThese paths do not exist on disk — update componentFile to match actual source files:\n${notFound.map((p) => ` - ${p}`).join("\n")}\n`;
|
|
8981
|
+
}
|
|
8982
|
+
const fieldInstructions = missingFields.map((field) => {
|
|
8983
|
+
const prompt = FILL_PROMPTS[field];
|
|
8984
|
+
const isJson = JSON_FIELDS.has(field);
|
|
8985
|
+
return `### ${field}\n${prompt.system}\n${prompt.userSuffix}\n${isJson ? "(Return valid JSON for this field)" : "(Return plain text for this field)"}`;
|
|
8986
|
+
}).join("\n\n");
|
|
8987
|
+
const staleAnnotation = feature.annotations?.find((ann) => ann.type === "stale-review");
|
|
8988
|
+
const staleWarning = staleAnnotation ? `## ⚠ Stale fields (feature was reopened)\n${staleAnnotation.body}\nReview and rewrite these fields against the current code, then call write_feature_fields.\n\n` : "";
|
|
8989
|
+
const instructions = missingFields.length === 0 ? staleWarning || "All fillable fields are already populated. No generation needed." : `${staleWarning}## Missing fields to fill (${missingFields.join(", ")})\n\nGenerate each field described below, then call write_feature_fields with all values at once. Fill ALL missing fields before calling advance_feature.\n\n${fieldInstructions}`;
|
|
8990
|
+
return { content: [{
|
|
8991
|
+
type: "text",
|
|
8992
|
+
text: `${componentFileWarning}${instructions}\n\n## Context\n\n${contextStr}`
|
|
8993
|
+
}] };
|
|
8994
|
+
}
|
|
8995
|
+
case "write_feature_fields": {
|
|
8996
|
+
const featureDir = resolvePath(String(a.path));
|
|
8997
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
8998
|
+
let raw;
|
|
8999
|
+
try {
|
|
9000
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
9001
|
+
} catch {
|
|
9002
|
+
return {
|
|
9003
|
+
content: [{
|
|
9004
|
+
type: "text",
|
|
9005
|
+
text: `No feature.json found at "${featurePath}"`
|
|
9006
|
+
}],
|
|
9007
|
+
isError: true
|
|
9008
|
+
};
|
|
9009
|
+
}
|
|
9010
|
+
const existing = JSON.parse(raw);
|
|
9011
|
+
const fields = a.fields;
|
|
9012
|
+
if (!fields || typeof fields !== "object" || Array.isArray(fields)) return {
|
|
9013
|
+
content: [{
|
|
9014
|
+
type: "text",
|
|
9015
|
+
text: "fields must be a JSON object"
|
|
9016
|
+
}],
|
|
9017
|
+
isError: true
|
|
9018
|
+
};
|
|
9019
|
+
const INTENT_CRITICAL = new Set([
|
|
9020
|
+
"problem",
|
|
9021
|
+
"analysis",
|
|
9022
|
+
"implementation",
|
|
9023
|
+
"decisions",
|
|
9024
|
+
"successCriteria"
|
|
9025
|
+
]);
|
|
9026
|
+
const changingCritical = Object.keys(fields).filter((k) => INTENT_CRITICAL.has(k));
|
|
9027
|
+
const updated = {
|
|
9028
|
+
...existing,
|
|
9029
|
+
...fields
|
|
9030
|
+
};
|
|
9031
|
+
updated.lastVerifiedDate = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
9032
|
+
const revisionInput = a.revision;
|
|
9033
|
+
let revisionWarning = "";
|
|
9034
|
+
if (changingCritical.length > 0) if (revisionInput?.author && revisionInput?.reason) {
|
|
9035
|
+
const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
9036
|
+
updated.revisions = [...existing.revisions ?? [], {
|
|
9037
|
+
date: today,
|
|
9038
|
+
author: revisionInput.author,
|
|
9039
|
+
fields_changed: changingCritical,
|
|
9040
|
+
reason: revisionInput.reason
|
|
9041
|
+
}];
|
|
9042
|
+
updated.annotations = (existing.annotations ?? []).filter((ann) => ann.type !== "stale-review");
|
|
9043
|
+
} else revisionWarning = `\n\n⚠ Intent-critical fields changed (${changingCritical.join(", ")}) without a revision entry. Pass a "revision" object with author and reason to attribute this change.`;
|
|
9044
|
+
fs.writeFileSync(featurePath, JSON.stringify(updated, null, 2) + "\n", "utf-8");
|
|
9045
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
9046
|
+
appendPromptLog(featureDir, Object.keys(fields).map((field) => {
|
|
9047
|
+
const val = fields[field];
|
|
9048
|
+
return {
|
|
9049
|
+
date: now,
|
|
9050
|
+
field,
|
|
9051
|
+
source: "mcp",
|
|
9052
|
+
value_preview: (typeof val === "string" ? val : JSON.stringify(val)).slice(0, 120)
|
|
9053
|
+
};
|
|
9054
|
+
}));
|
|
9055
|
+
const writtenKeys = Object.keys(fields);
|
|
9056
|
+
const afterResult = validateFeature(JSON.parse(fs.readFileSync(featurePath, "utf-8")));
|
|
9057
|
+
const stillMissing = afterResult.success ? getMissingFields(afterResult.data) : [];
|
|
9058
|
+
const nextHint = stillMissing.length > 0 ? `${stillMissing.length} field(s) still missing: ${stillMissing.join(", ")}. Fill all remaining fields with write_feature_fields before calling advance_feature.` : `All AI fields filled. Call advance_feature to transition status when ready.`;
|
|
8088
9059
|
return { content: [{
|
|
8089
9060
|
type: "text",
|
|
8090
|
-
text:
|
|
9061
|
+
text: `✓ Wrote ${writtenKeys.length} field(s) to ${featurePath}: ${writtenKeys.join(", ")}\n\n${nextHint}${revisionWarning}`
|
|
8091
9062
|
}] };
|
|
8092
9063
|
}
|
|
8093
|
-
case "
|
|
8094
|
-
|
|
8095
|
-
|
|
8096
|
-
|
|
8097
|
-
|
|
8098
|
-
|
|
8099
|
-
|
|
8100
|
-
|
|
8101
|
-
|
|
8102
|
-
|
|
8103
|
-
|
|
8104
|
-
|
|
8105
|
-
|
|
9064
|
+
case "advance_feature": {
|
|
9065
|
+
const featureDir = resolvePath(String(a.path));
|
|
9066
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
9067
|
+
let raw;
|
|
9068
|
+
try {
|
|
9069
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
9070
|
+
} catch {
|
|
9071
|
+
return {
|
|
9072
|
+
content: [{
|
|
9073
|
+
type: "text",
|
|
9074
|
+
text: `No feature.json found at "${featurePath}"`
|
|
9075
|
+
}],
|
|
9076
|
+
isError: true
|
|
9077
|
+
};
|
|
9078
|
+
}
|
|
9079
|
+
const parsed = JSON.parse(raw);
|
|
9080
|
+
const result = validateFeature(parsed);
|
|
9081
|
+
if (!result.success) return {
|
|
9082
|
+
content: [{
|
|
9083
|
+
type: "text",
|
|
9084
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
9085
|
+
}],
|
|
9086
|
+
isError: true
|
|
9087
|
+
};
|
|
9088
|
+
const feature = result.data;
|
|
9089
|
+
const to = String(a.to);
|
|
9090
|
+
const from = feature.status;
|
|
9091
|
+
const reason = a.reason ? String(a.reason) : void 0;
|
|
9092
|
+
const illegal = checkIllegalTransition(from, to);
|
|
9093
|
+
if (illegal) return { content: [{
|
|
8106
9094
|
type: "text",
|
|
8107
|
-
text:
|
|
9095
|
+
text: illegal
|
|
8108
9096
|
}] };
|
|
8109
|
-
return { content: [{
|
|
9097
|
+
if (from === "frozen" && to === "active" && !reason) return { content: [{
|
|
8110
9098
|
type: "text",
|
|
8111
|
-
text:
|
|
9099
|
+
text: "Reopening a frozen feature requires a reason. Call advance_feature again with the reason parameter describing what changed."
|
|
8112
9100
|
}] };
|
|
8113
|
-
|
|
8114
|
-
|
|
8115
|
-
const query = String(a.query).toLowerCase();
|
|
8116
|
-
const statusFilter = a.status;
|
|
8117
|
-
const matches = scanAllFeatures(workspaceRoot).filter(({ feature }) => {
|
|
8118
|
-
if (statusFilter && feature.status !== statusFilter) return false;
|
|
8119
|
-
return [
|
|
8120
|
-
feature.featureKey,
|
|
8121
|
-
feature.title,
|
|
8122
|
-
feature.problem,
|
|
8123
|
-
...feature.tags ?? [],
|
|
8124
|
-
feature.analysis ?? ""
|
|
8125
|
-
].join(" ").toLowerCase().includes(query);
|
|
8126
|
-
});
|
|
8127
|
-
if (matches.length === 0) return { content: [{
|
|
9101
|
+
const missing = getMissingForTransition(feature, to);
|
|
9102
|
+
if (missing.length > 0) return { content: [{
|
|
8128
9103
|
type: "text",
|
|
8129
|
-
text: `
|
|
9104
|
+
text: `Cannot advance "${feature.featureKey}" to "${to}" — ${missing.length} required field(s) missing: ${missing.join(", ")}.\n\nCall read_feature_context on this path, fill the missing fields with write_feature_fields, then try advance_feature again.`
|
|
8130
9105
|
}] };
|
|
8131
|
-
|
|
9106
|
+
let deprecationHint = "";
|
|
9107
|
+
if (to === "deprecated") {
|
|
9108
|
+
const hasSuperseeded = !!parsed.superseded_by;
|
|
9109
|
+
const hasMerged = !!parsed.merged_into;
|
|
9110
|
+
if (!hasSuperseeded && !hasMerged) deprecationHint = "\n\n⚠ No lifecycle pointer set. Consider running `lac supersede` or `lac merge`, or call write_feature_fields with superseded_by or merged_into before deprecating so future readers know where this feature went.";
|
|
9111
|
+
}
|
|
9112
|
+
const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
9113
|
+
const updated = {
|
|
9114
|
+
...parsed,
|
|
9115
|
+
status: to
|
|
9116
|
+
};
|
|
9117
|
+
if (to === "frozen") updated.lastVerifiedDate = today;
|
|
9118
|
+
updated.statusHistory = [...updated.statusHistory ?? [], {
|
|
9119
|
+
from,
|
|
9120
|
+
to,
|
|
9121
|
+
date: today,
|
|
9122
|
+
...reason ? { reason } : {}
|
|
9123
|
+
}];
|
|
9124
|
+
if (from === "frozen" && to === "active" && reason) {
|
|
9125
|
+
const filledCritical = [
|
|
9126
|
+
"analysis",
|
|
9127
|
+
"implementation",
|
|
9128
|
+
"decisions",
|
|
9129
|
+
"successCriteria"
|
|
9130
|
+
].filter((f) => {
|
|
9131
|
+
const val = feature[f];
|
|
9132
|
+
if (val === void 0 || val === null) return false;
|
|
9133
|
+
if (typeof val === "string") return val.trim().length > 0;
|
|
9134
|
+
if (Array.isArray(val)) return val.length > 0;
|
|
9135
|
+
return false;
|
|
9136
|
+
});
|
|
9137
|
+
const staleBody = filledCritical.length > 0 ? `Fields that may need updating after reopen: ${filledCritical.join(", ")}` : "Review all intent-critical fields after reopen";
|
|
9138
|
+
updated.annotations = [
|
|
9139
|
+
...updated.annotations ?? [],
|
|
9140
|
+
{
|
|
9141
|
+
id: `reopen-${Date.now()}`,
|
|
9142
|
+
author: "lac advance",
|
|
9143
|
+
date: today,
|
|
9144
|
+
type: "reopen",
|
|
9145
|
+
body: reason
|
|
9146
|
+
},
|
|
9147
|
+
{
|
|
9148
|
+
id: `stale-${Date.now() + 1}`,
|
|
9149
|
+
author: "lac advance",
|
|
9150
|
+
date: today,
|
|
9151
|
+
type: "stale-review",
|
|
9152
|
+
body: staleBody
|
|
9153
|
+
}
|
|
9154
|
+
];
|
|
9155
|
+
}
|
|
9156
|
+
fs.writeFileSync(featurePath, JSON.stringify(updated, null, 2) + "\n", "utf-8");
|
|
9157
|
+
const nextStep = to === "active" ? from === "frozen" ? "Feature reopened. Call read_feature_context to review stale fields, update with write_feature_fields, then advance_feature to frozen when ready." : "Feature is active. Call read_feature_context to fill any missing fields, then advance_feature to frozen when complete." : to === "frozen" ? "Feature is frozen. If a bug is found or requirements change, call spawn_child_feature or advance_feature with to: \"active\" and a reason." : "Feature deprecated.";
|
|
8132
9158
|
return { content: [{
|
|
8133
9159
|
type: "text",
|
|
8134
|
-
text:
|
|
9160
|
+
text: `✓ "${feature.featureKey}" ${from} → ${to}.\n\n${nextStep}${deprecationHint}`
|
|
8135
9161
|
}] };
|
|
8136
9162
|
}
|
|
8137
|
-
case "
|
|
8138
|
-
const
|
|
8139
|
-
const
|
|
8140
|
-
|
|
9163
|
+
case "spawn_child_feature": {
|
|
9164
|
+
const parentDir = resolvePath(String(a.parentPath));
|
|
9165
|
+
const parentFeaturePath = path.join(parentDir, "feature.json");
|
|
9166
|
+
let parentRaw;
|
|
9167
|
+
try {
|
|
9168
|
+
parentRaw = fs.readFileSync(parentFeaturePath, "utf-8");
|
|
9169
|
+
} catch {
|
|
9170
|
+
return {
|
|
9171
|
+
content: [{
|
|
9172
|
+
type: "text",
|
|
9173
|
+
text: `No feature.json found at "${parentFeaturePath}"`
|
|
9174
|
+
}],
|
|
9175
|
+
isError: true
|
|
9176
|
+
};
|
|
9177
|
+
}
|
|
9178
|
+
const parentParsed = JSON.parse(parentRaw);
|
|
9179
|
+
const parentResult = validateFeature(parentParsed);
|
|
9180
|
+
if (!parentResult.success) return {
|
|
9181
|
+
content: [{
|
|
9182
|
+
type: "text",
|
|
9183
|
+
text: `Invalid parent feature.json: ${parentResult.errors.join(", ")}`
|
|
9184
|
+
}],
|
|
9185
|
+
isError: true
|
|
9186
|
+
};
|
|
9187
|
+
const parentFeature = parentResult.data;
|
|
9188
|
+
const childDir = resolvePath(String(a.dir));
|
|
9189
|
+
const childFeaturePath = path.join(childDir, "feature.json");
|
|
9190
|
+
if (fs.existsSync(childFeaturePath)) return { content: [{
|
|
8141
9191
|
type: "text",
|
|
8142
|
-
text: `feature.json already exists at "${
|
|
9192
|
+
text: `feature.json already exists at "${childFeaturePath}".`
|
|
8143
9193
|
}] };
|
|
8144
|
-
|
|
8145
|
-
|
|
9194
|
+
fs.mkdirSync(childDir, { recursive: true });
|
|
9195
|
+
const childKey = generateFeatureKey(childDir);
|
|
9196
|
+
const child = {
|
|
9197
|
+
featureKey: childKey,
|
|
8146
9198
|
title: String(a.title),
|
|
8147
|
-
status:
|
|
9199
|
+
status: "draft",
|
|
8148
9200
|
problem: String(a.problem),
|
|
8149
|
-
schemaVersion: 1
|
|
9201
|
+
schemaVersion: 1,
|
|
9202
|
+
...parentFeature.domain ? { domain: parentFeature.domain } : {},
|
|
9203
|
+
...parentFeature.tags?.length ? { tags: parentFeature.tags } : {},
|
|
9204
|
+
lineage: {
|
|
9205
|
+
parent: parentFeature.featureKey,
|
|
9206
|
+
spawnReason: String(a.spawnReason)
|
|
9207
|
+
}
|
|
8150
9208
|
};
|
|
8151
|
-
fs.
|
|
8152
|
-
|
|
9209
|
+
fs.writeFileSync(childFeaturePath, JSON.stringify(child, null, 2) + "\n", "utf-8");
|
|
9210
|
+
const existingChildren = parentParsed.lineage?.children ?? [];
|
|
9211
|
+
const updatedParent = {
|
|
9212
|
+
...parentParsed,
|
|
9213
|
+
lineage: {
|
|
9214
|
+
...parentParsed.lineage ?? {},
|
|
9215
|
+
children: [...existingChildren, childKey]
|
|
9216
|
+
}
|
|
9217
|
+
};
|
|
9218
|
+
fs.writeFileSync(parentFeaturePath, JSON.stringify(updatedParent, null, 2) + "\n", "utf-8");
|
|
8153
9219
|
return { content: [{
|
|
8154
9220
|
type: "text",
|
|
8155
|
-
text:
|
|
9221
|
+
text: `✓ Spawned "${childKey}" under "${parentFeature.featureKey}".\nReason: ${a.spawnReason}\nChild path: ${childDir}\n\nNext: call read_feature_context on "${childDir}" to begin the child feature's lifecycle.`
|
|
8156
9222
|
}] };
|
|
8157
9223
|
}
|
|
8158
|
-
case "
|
|
8159
|
-
const
|
|
8160
|
-
const
|
|
8161
|
-
|
|
8162
|
-
|
|
8163
|
-
|
|
9224
|
+
case "get_feature_status": {
|
|
9225
|
+
const featureDir = resolvePath(String(a.path));
|
|
9226
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
9227
|
+
let raw;
|
|
9228
|
+
try {
|
|
9229
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
9230
|
+
} catch {
|
|
9231
|
+
return {
|
|
9232
|
+
content: [{
|
|
9233
|
+
type: "text",
|
|
9234
|
+
text: `No feature.json found at "${featurePath}"`
|
|
9235
|
+
}],
|
|
9236
|
+
isError: true
|
|
9237
|
+
};
|
|
9238
|
+
}
|
|
9239
|
+
const result = validateFeature(JSON.parse(raw));
|
|
9240
|
+
if (!result.success) return {
|
|
9241
|
+
content: [{
|
|
9242
|
+
type: "text",
|
|
9243
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
9244
|
+
}],
|
|
9245
|
+
isError: true
|
|
9246
|
+
};
|
|
9247
|
+
const feature = result.data;
|
|
9248
|
+
const missingFields = getMissingFields(feature);
|
|
9249
|
+
const staleAnnotation = feature.annotations?.find((ann) => ann.type === "stale-review");
|
|
9250
|
+
const validTransitions = [];
|
|
9251
|
+
if (feature.status !== "deprecated") validTransitions.push("deprecated");
|
|
9252
|
+
if (feature.status === "draft") validTransitions.push("active");
|
|
9253
|
+
if (feature.status === "active") validTransitions.push("frozen");
|
|
9254
|
+
if (feature.status === "frozen") validTransitions.push("active (requires reason)");
|
|
9255
|
+
const nextAction = missingFields.length > 0 ? `call read_feature_context to fill: ${missingFields.join(", ")}` : staleAnnotation ? `call read_feature_context to review stale fields (reopened feature)` : feature.status === "draft" ? `call advance_feature with to: "active"` : feature.status === "active" ? `call advance_feature with to: "frozen" when complete` : feature.status === "frozen" ? `frozen — call spawn_child_feature for bugs, or advance_feature to reopen` : "deprecated — no action needed";
|
|
9256
|
+
const sinceDate = (feature.statusHistory ? [...feature.statusHistory].reverse().find((h) => h.to === feature.status) : void 0)?.date ?? null;
|
|
9257
|
+
return { content: [{
|
|
8164
9258
|
type: "text",
|
|
8165
|
-
text:
|
|
9259
|
+
text: [
|
|
9260
|
+
`Key : ${feature.featureKey}`,
|
|
9261
|
+
`Title : ${feature.title}`,
|
|
9262
|
+
`Status : ${statusIcon(feature.status)} ${feature.status}${sinceDate ? ` (since ${sinceDate})` : ""}`,
|
|
9263
|
+
`Missing : ${missingFields.length === 0 ? "none" : missingFields.join(", ")}`,
|
|
9264
|
+
`Stale : ${staleAnnotation ? staleAnnotation.body : "none"}`,
|
|
9265
|
+
`Transitions: ${validTransitions.join(", ")}`,
|
|
9266
|
+
`Parent : ${feature.lineage?.parent ?? "none"}`,
|
|
9267
|
+
`Children : ${feature.lineage?.children?.length ?? 0}`,
|
|
9268
|
+
``,
|
|
9269
|
+
`Next action: ${nextAction}`
|
|
9270
|
+
].join("\n")
|
|
8166
9271
|
}] };
|
|
8167
|
-
|
|
8168
|
-
|
|
8169
|
-
|
|
8170
|
-
|
|
8171
|
-
|
|
8172
|
-
|
|
8173
|
-
|
|
8174
|
-
|
|
9272
|
+
}
|
|
9273
|
+
case "extract_feature_from_code": {
|
|
9274
|
+
const dir = resolvePath(String(a.path));
|
|
9275
|
+
const featurePath = path.join(dir, "feature.json");
|
|
9276
|
+
if (fs.existsSync(featurePath)) return { content: [{
|
|
9277
|
+
type: "text",
|
|
9278
|
+
text: `feature.json already exists at "${featurePath}". Use read_feature_context instead.`
|
|
9279
|
+
}] };
|
|
9280
|
+
const placeholder = {
|
|
9281
|
+
featureKey: "extract-pending",
|
|
9282
|
+
title: "(pending)",
|
|
9283
|
+
status: "draft",
|
|
9284
|
+
problem: "(to be determined)"
|
|
9285
|
+
};
|
|
9286
|
+
const maxFileChars = a.maxFileSize ? Number(a.maxFileSize) : void 0;
|
|
9287
|
+
const ctx = buildContext(dir, placeholder, maxFileChars !== void 0 ? { maxFileChars } : {});
|
|
9288
|
+
if (ctx.sourceFiles.length === 0) return { content: [{
|
|
9289
|
+
type: "text",
|
|
9290
|
+
text: `No source files found in "${dir}". Is this the right directory?`
|
|
9291
|
+
}] };
|
|
9292
|
+
const parts = [];
|
|
9293
|
+
if (ctx.truncatedFiles.length > 0) {
|
|
9294
|
+
parts.push(`⚠ WARNING: ${ctx.truncatedFiles.length} file(s) were truncated at ${maxFileChars ?? 8e3} chars — extraction may be incomplete:`);
|
|
9295
|
+
for (const f of ctx.truncatedFiles) parts.push(` - ${f}`);
|
|
9296
|
+
parts.push(`Tip: re-call with maxFileSize set higher (e.g. 16000) to capture the full content.`);
|
|
9297
|
+
parts.push("");
|
|
8175
9298
|
}
|
|
9299
|
+
if (ctx.gitLog) {
|
|
9300
|
+
parts.push("=== git log (last 20 commits) ===");
|
|
9301
|
+
parts.push(ctx.gitLog);
|
|
9302
|
+
}
|
|
9303
|
+
for (const file of ctx.sourceFiles) {
|
|
9304
|
+
parts.push(`\n=== ${file.relativePath}${file.truncated ? " [truncated]" : ""} ===`);
|
|
9305
|
+
parts.push(file.content);
|
|
9306
|
+
}
|
|
9307
|
+
const rawContext = parts.join("\n");
|
|
8176
9308
|
return { content: [{
|
|
8177
9309
|
type: "text",
|
|
8178
|
-
text:
|
|
9310
|
+
text: `${`## Extract feature.json from existing code
|
|
9311
|
+
|
|
9312
|
+
No feature.json exists at "${dir}". Analyze the ${ctx.sourceFiles.length} source file(s) below and generate a complete feature.json proposal.
|
|
9313
|
+
|
|
9314
|
+
When done, execute in order:
|
|
9315
|
+
1. Call create_feature with: dir="${dir}", plus your generated title and problem
|
|
9316
|
+
2. Call write_feature_fields with: path="${dir}", fields containing analysis, decisions, implementation, knownLimitations, tags, successCriteria, domain
|
|
9317
|
+
3. Call advance_feature to transition when ready
|
|
9318
|
+
|
|
9319
|
+
### Fields to generate
|
|
9320
|
+
**title** — Short descriptive name (5-10 words)
|
|
9321
|
+
**problem** — What problem does this code solve? 1-2 sentences.
|
|
9322
|
+
**domain** — Single lowercase word or hyphenated phrase (e.g. "auth", "data-pipeline")
|
|
9323
|
+
**tags** — 3-6 lowercase tags as JSON array: ["tag1", "tag2"]
|
|
9324
|
+
**analysis** — Architectural overview, key patterns, why they were chosen. 150-300 words.
|
|
9325
|
+
**decisions** — 2-4 key technical decisions as JSON array: [{"decision":"...","rationale":"...","alternativesConsidered":["..."]}]
|
|
9326
|
+
**implementation** — Main components, data flow, non-obvious patterns. 100-200 words.
|
|
9327
|
+
**knownLimitations** — 2-4 limitations/TODOs as JSON array: ["..."]
|
|
9328
|
+
**successCriteria** — How do we know this works? 1-3 testable sentences.`}\n\n## Source files\n\n${rawContext}`
|
|
8179
9329
|
}] };
|
|
8180
9330
|
}
|
|
8181
|
-
case "
|
|
8182
|
-
const
|
|
8183
|
-
|
|
8184
|
-
|
|
8185
|
-
|
|
8186
|
-
|
|
8187
|
-
|
|
8188
|
-
if (!feature.decisions?.length) issues.push("no decisions recorded");
|
|
8189
|
-
}
|
|
9331
|
+
case "feature_changelog": {
|
|
9332
|
+
const featureDir = resolvePath(String(a.path));
|
|
9333
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
9334
|
+
let raw;
|
|
9335
|
+
try {
|
|
9336
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
9337
|
+
} catch {
|
|
8190
9338
|
return {
|
|
8191
|
-
|
|
8192
|
-
|
|
8193
|
-
|
|
9339
|
+
content: [{
|
|
9340
|
+
type: "text",
|
|
9341
|
+
text: `No feature.json found at "${featurePath}"`
|
|
9342
|
+
}],
|
|
9343
|
+
isError: true
|
|
8194
9344
|
};
|
|
9345
|
+
}
|
|
9346
|
+
const result = validateFeature(JSON.parse(raw));
|
|
9347
|
+
if (!result.success) return {
|
|
9348
|
+
content: [{
|
|
9349
|
+
type: "text",
|
|
9350
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
9351
|
+
}],
|
|
9352
|
+
isError: true
|
|
9353
|
+
};
|
|
9354
|
+
const feature = result.data;
|
|
9355
|
+
const events = [];
|
|
9356
|
+
if (feature.statusHistory?.length) for (const h of feature.statusHistory) events.push({
|
|
9357
|
+
date: h.date,
|
|
9358
|
+
label: `${statusIcon(h.to)} ${h.from} → ${h.to}${h.reason ? ` — "${h.reason}"` : ""}`
|
|
8195
9359
|
});
|
|
8196
|
-
|
|
8197
|
-
|
|
8198
|
-
|
|
9360
|
+
else events.push({
|
|
9361
|
+
date: "(unknown)",
|
|
9362
|
+
label: `${statusIcon(feature.status)} current: ${feature.status}`
|
|
9363
|
+
});
|
|
9364
|
+
for (const rev of feature.revisions ?? []) events.push({
|
|
9365
|
+
date: rev.date,
|
|
9366
|
+
label: `✎ revision by ${rev.author}: ${rev.fields_changed.join(", ")} — ${rev.reason}`
|
|
9367
|
+
});
|
|
9368
|
+
for (const ann of feature.annotations ?? []) if (ann.type === "spawn") events.push({
|
|
9369
|
+
date: ann.date,
|
|
9370
|
+
label: `↳ spawned child — ${ann.body}`
|
|
9371
|
+
});
|
|
9372
|
+
else if (ann.type === "reopen") events.push({
|
|
9373
|
+
date: ann.date,
|
|
9374
|
+
label: `↺ reopened — ${ann.body}`
|
|
9375
|
+
});
|
|
9376
|
+
else if (ann.type === "stale-review") events.push({
|
|
9377
|
+
date: ann.date,
|
|
9378
|
+
label: `⚠ stale-review — ${ann.body}`
|
|
9379
|
+
});
|
|
9380
|
+
else events.push({
|
|
9381
|
+
date: ann.date,
|
|
9382
|
+
label: `[${ann.type}] ${ann.body} (by ${ann.author})`
|
|
9383
|
+
});
|
|
9384
|
+
const annotatedChildren = new Set((feature.annotations ?? []).filter((a$1) => a$1.type === "spawn").map((a$1) => a$1.body));
|
|
9385
|
+
for (const child of feature.lineage?.children ?? []) if (!annotatedChildren.has(child)) events.push({
|
|
9386
|
+
date: "(unknown)",
|
|
9387
|
+
label: `↳ spawned child: ${child}`
|
|
9388
|
+
});
|
|
9389
|
+
events.sort((a$1, b) => a$1.date.localeCompare(b.date));
|
|
9390
|
+
const decisionsLine = feature.decisions?.length ? `\nDecisions: ${feature.decisions.map((d) => `"${d.decision}"`).join(" · ")}` : "";
|
|
8199
9391
|
return { content: [{
|
|
8200
9392
|
type: "text",
|
|
8201
|
-
text: `${
|
|
9393
|
+
text: `${`${feature.featureKey} — "${feature.title}"\n${"─".repeat(50)}`}\n${events.map((e) => `${e.date.padEnd(12)} ${e.label}`).join("\n")}${decisionsLine}`
|
|
8202
9394
|
}] };
|
|
8203
9395
|
}
|
|
8204
|
-
case "
|
|
9396
|
+
case "roadmap_view": {
|
|
9397
|
+
const features = scanAllFeatures(a.path ? resolvePath(String(a.path)) : workspaceRoot);
|
|
9398
|
+
const byStatus = {
|
|
9399
|
+
active: [],
|
|
9400
|
+
draft: [],
|
|
9401
|
+
frozen: [],
|
|
9402
|
+
deprecated: []
|
|
9403
|
+
};
|
|
9404
|
+
for (const f of features) {
|
|
9405
|
+
const group = byStatus[f.feature.status];
|
|
9406
|
+
if (group) group.push(f);
|
|
9407
|
+
}
|
|
9408
|
+
for (const group of Object.values(byStatus)) group.sort((a$1, b) => {
|
|
9409
|
+
const pa = a$1.feature.priority ?? 9999;
|
|
9410
|
+
const pb = b.feature.priority ?? 9999;
|
|
9411
|
+
return pa !== pb ? pa - pb : a$1.feature.featureKey.localeCompare(b.feature.featureKey);
|
|
9412
|
+
});
|
|
9413
|
+
const formatGroup = (status, items) => {
|
|
9414
|
+
if (items.length === 0) return "";
|
|
9415
|
+
const rows = items.map(({ feature }) => {
|
|
9416
|
+
const priority = feature.priority ? `P${feature.priority}` : " - ";
|
|
9417
|
+
const childCount = feature.lineage?.children?.length ?? 0;
|
|
9418
|
+
const childNote = childCount > 0 ? ` [${childCount}↳]` : "";
|
|
9419
|
+
const missing = getMissingFields(feature);
|
|
9420
|
+
const warn = missing.length > 0 ? ` ⚠ missing: ${missing.join(", ")}` : "";
|
|
9421
|
+
return ` ${priority.padEnd(3)} ${feature.featureKey.padEnd(18)} ${feature.title}${childNote}${warn}`;
|
|
9422
|
+
});
|
|
9423
|
+
return [`${statusIcon(status)} ${status.toUpperCase()} (${items.length})`, ...rows].join("\n");
|
|
9424
|
+
};
|
|
9425
|
+
const sections = [
|
|
9426
|
+
"active",
|
|
9427
|
+
"draft",
|
|
9428
|
+
"frozen",
|
|
9429
|
+
"deprecated"
|
|
9430
|
+
].map((s) => formatGroup(s, byStatus[s] ?? [])).filter(Boolean);
|
|
9431
|
+
if (sections.length === 0) return { content: [{
|
|
9432
|
+
type: "text",
|
|
9433
|
+
text: "No features found."
|
|
9434
|
+
}] };
|
|
9435
|
+
return { content: [{
|
|
9436
|
+
type: "text",
|
|
9437
|
+
text: sections.join("\n\n")
|
|
9438
|
+
}] };
|
|
9439
|
+
}
|
|
9440
|
+
case "suggest_split": {
|
|
8205
9441
|
const featureDir = resolvePath(String(a.path));
|
|
8206
9442
|
const featurePath = path.join(featureDir, "feature.json");
|
|
8207
9443
|
let raw;
|
|
@@ -8216,7 +9452,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
8216
9452
|
isError: true
|
|
8217
9453
|
};
|
|
8218
9454
|
}
|
|
8219
|
-
const result = validateFeature
|
|
9455
|
+
const result = validateFeature(JSON.parse(raw));
|
|
8220
9456
|
if (!result.success) return {
|
|
8221
9457
|
content: [{
|
|
8222
9458
|
type: "text",
|
|
@@ -8225,19 +9461,34 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
8225
9461
|
isError: true
|
|
8226
9462
|
};
|
|
8227
9463
|
const feature = result.data;
|
|
8228
|
-
const
|
|
8229
|
-
const
|
|
8230
|
-
const
|
|
8231
|
-
|
|
8232
|
-
|
|
8233
|
-
|
|
8234
|
-
|
|
9464
|
+
const ctx = buildContext(featureDir, feature);
|
|
9465
|
+
const contextStr = contextToString(ctx);
|
|
9466
|
+
const signals = [
|
|
9467
|
+
ctx.sourceFiles.length > 8 ? `⚠ ${ctx.sourceFiles.length} source files (large)` : null,
|
|
9468
|
+
(feature.decisions?.length ?? 0) > 4 ? `⚠ ${feature.decisions.length} decisions (broad scope)` : null,
|
|
9469
|
+
(feature.title + " " + feature.problem).toLowerCase().includes(" and ") ? "⚠ title/problem contains \"and\" (possible dual concern)" : null
|
|
9470
|
+
].filter(Boolean);
|
|
9471
|
+
const signalNote = signals.length > 0 ? `\n**Signals detected:**\n${signals.map((s) => `- ${s}`).join("\n")}\n` : "\n**No obvious split signals — evaluate from the code.**\n";
|
|
8235
9472
|
return { content: [{
|
|
8236
9473
|
type: "text",
|
|
8237
|
-
text: `${
|
|
9474
|
+
text: `${`## Suggest split for "${feature.featureKey}" — "${feature.title}"
|
|
9475
|
+
${signalNote}
|
|
9476
|
+
Analyze the source files and determine whether this feature should be broken into smaller child features.
|
|
9477
|
+
|
|
9478
|
+
**Split signals to look for:**
|
|
9479
|
+
- Source files with distinct concerns that don't depend on each other
|
|
9480
|
+
- Multiple technical domains in the same codebase
|
|
9481
|
+
- Decisions covering unrelated areas
|
|
9482
|
+
- Problem statement describes multiple independent things
|
|
9483
|
+
|
|
9484
|
+
**Your response:**
|
|
9485
|
+
1. Recommend: split or keep as-is, with 2-3 sentence justification
|
|
9486
|
+
2. If split: propose 2-4 child features each with title, problem, spawnReason, and which files belong to it
|
|
9487
|
+
Then call spawn_child_feature for each (parentPath="${featureDir}", dir=<new subfolder>)
|
|
9488
|
+
3. If keep: explain what makes this feature cohesive`}\n\n## Context\n\n${contextStr}`
|
|
8238
9489
|
}] };
|
|
8239
9490
|
}
|
|
8240
|
-
case "
|
|
9491
|
+
case "feature_summary_for_pr": {
|
|
8241
9492
|
const featureDir = resolvePath(String(a.path));
|
|
8242
9493
|
const featurePath = path.join(featureDir, "feature.json");
|
|
8243
9494
|
let raw;
|
|
@@ -8252,24 +9503,317 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
8252
9503
|
isError: true
|
|
8253
9504
|
};
|
|
8254
9505
|
}
|
|
8255
|
-
const
|
|
8256
|
-
|
|
8257
|
-
if (!fields || typeof fields !== "object" || Array.isArray(fields)) return {
|
|
9506
|
+
const result = validateFeature(JSON.parse(raw));
|
|
9507
|
+
if (!result.success) return {
|
|
8258
9508
|
content: [{
|
|
8259
9509
|
type: "text",
|
|
8260
|
-
text:
|
|
9510
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
8261
9511
|
}],
|
|
8262
9512
|
isError: true
|
|
8263
9513
|
};
|
|
8264
|
-
const
|
|
8265
|
-
|
|
8266
|
-
|
|
9514
|
+
const feature = result.data;
|
|
9515
|
+
const lines = [
|
|
9516
|
+
`## ${feature.featureKey} — ${feature.title}`,
|
|
9517
|
+
"",
|
|
9518
|
+
`**Problem:** ${feature.problem}`
|
|
9519
|
+
];
|
|
9520
|
+
if (feature.implementation) lines.push("", "**What was built:**", feature.implementation);
|
|
9521
|
+
else if (feature.analysis) lines.push("", "**Overview:**", feature.analysis.slice(0, 300) + (feature.analysis.length > 300 ? "…" : ""));
|
|
9522
|
+
if (feature.decisions?.length) {
|
|
9523
|
+
lines.push("", "**Key decisions:**");
|
|
9524
|
+
for (const d of feature.decisions) lines.push(`- **${d.decision}** — ${d.rationale}`);
|
|
9525
|
+
}
|
|
9526
|
+
if (feature.knownLimitations?.length) {
|
|
9527
|
+
lines.push("", "**Known limitations:**");
|
|
9528
|
+
for (const l of feature.knownLimitations) lines.push(`- ${l}`);
|
|
9529
|
+
}
|
|
9530
|
+
if (feature.successCriteria) lines.push("", `**Success criteria:** ${feature.successCriteria}`);
|
|
9531
|
+
const lineageParts = [];
|
|
9532
|
+
if (feature.lineage?.parent) lineageParts.push(`child of \`${feature.lineage.parent}\``);
|
|
9533
|
+
if (feature.lineage?.children?.length) lineageParts.push(`spawned: ${feature.lineage.children.map((c) => `\`${c}\``).join(", ")}`);
|
|
9534
|
+
if (lineageParts.length) lines.push("", `**Lineage:** ${lineageParts.join(" · ")}`);
|
|
9535
|
+
if (feature.tags?.length) lines.push("", `**Tags:** ${feature.tags.map((t) => `\`${t}\``).join(", ")}`);
|
|
9536
|
+
lines.push("", "---", `*Generated from [\`${feature.featureKey}\`](feature.json) via LAC*`);
|
|
9537
|
+
return { content: [{
|
|
9538
|
+
type: "text",
|
|
9539
|
+
text: lines.join("\n")
|
|
9540
|
+
}] };
|
|
9541
|
+
}
|
|
9542
|
+
case "summarize_workspace": {
|
|
9543
|
+
const scanDir = a.path ? resolvePath(String(a.path)) : workspaceRoot;
|
|
9544
|
+
const format = a.format === "json" ? "json" : "markdown";
|
|
9545
|
+
function readReadmeSummary(dir) {
|
|
9546
|
+
for (const name$1 of [
|
|
9547
|
+
"README.md",
|
|
9548
|
+
"readme.md",
|
|
9549
|
+
"Readme.md"
|
|
9550
|
+
]) {
|
|
9551
|
+
const p = path.join(dir, name$1);
|
|
9552
|
+
if (!fs.existsSync(p)) continue;
|
|
9553
|
+
try {
|
|
9554
|
+
const lines$1 = fs.readFileSync(p, "utf-8").split("\n");
|
|
9555
|
+
const parts = [];
|
|
9556
|
+
let started = false;
|
|
9557
|
+
for (const line of lines$1) {
|
|
9558
|
+
if (!started && line.trim()) started = true;
|
|
9559
|
+
if (!started) continue;
|
|
9560
|
+
parts.push(line);
|
|
9561
|
+
if (parts.length >= 2 && line.trim() === "") break;
|
|
9562
|
+
if (parts.length >= 8) break;
|
|
9563
|
+
}
|
|
9564
|
+
const text = parts.join("\n").trim();
|
|
9565
|
+
return text.length > 300 ? text.slice(0, 297) + "…" : text;
|
|
9566
|
+
} catch {}
|
|
9567
|
+
}
|
|
9568
|
+
return null;
|
|
9569
|
+
}
|
|
9570
|
+
const allFeatures = scanAllFeatures(scanDir);
|
|
9571
|
+
if (allFeatures.length === 0) return { content: [{
|
|
9572
|
+
type: "text",
|
|
9573
|
+
text: "No features found."
|
|
9574
|
+
}] };
|
|
9575
|
+
const rootReadme = readReadmeSummary(scanDir);
|
|
9576
|
+
const readmeCache = /* @__PURE__ */ new Map();
|
|
9577
|
+
const cachedReadme = (dir) => {
|
|
9578
|
+
if (!readmeCache.has(dir)) readmeCache.set(dir, readReadmeSummary(dir));
|
|
9579
|
+
return readmeCache.get(dir) ?? null;
|
|
8267
9580
|
};
|
|
8268
|
-
|
|
8269
|
-
|
|
9581
|
+
const statusCounts = {
|
|
9582
|
+
active: 0,
|
|
9583
|
+
draft: 0,
|
|
9584
|
+
frozen: 0,
|
|
9585
|
+
deprecated: 0
|
|
9586
|
+
};
|
|
9587
|
+
let staleCount = 0;
|
|
9588
|
+
for (const { feature } of allFeatures) {
|
|
9589
|
+
const s = feature.status;
|
|
9590
|
+
if (s in statusCounts) statusCounts[s] = (statusCounts[s] ?? 0) + 1;
|
|
9591
|
+
if (feature.annotations?.some((a$1) => a$1.type === "stale-review")) staleCount++;
|
|
9592
|
+
}
|
|
9593
|
+
const byDomain = /* @__PURE__ */ new Map();
|
|
9594
|
+
const noDomain = [];
|
|
9595
|
+
for (const f of allFeatures) {
|
|
9596
|
+
const d = f.feature.domain;
|
|
9597
|
+
if (d) {
|
|
9598
|
+
const group = byDomain.get(d) ?? [];
|
|
9599
|
+
group.push(f);
|
|
9600
|
+
byDomain.set(d, group);
|
|
9601
|
+
} else noDomain.push(f);
|
|
9602
|
+
}
|
|
9603
|
+
if (format === "json") {
|
|
9604
|
+
const data = {
|
|
9605
|
+
rootReadme: rootReadme ?? null,
|
|
9606
|
+
stats: {
|
|
9607
|
+
total: allFeatures.length,
|
|
9608
|
+
byStatus: statusCounts,
|
|
9609
|
+
domains: [...byDomain.keys()]
|
|
9610
|
+
},
|
|
9611
|
+
domains: Object.fromEntries([...byDomain.entries()].map(([domain, features]) => [domain, features.map(({ feature, filePath }) => ({
|
|
9612
|
+
key: feature.featureKey,
|
|
9613
|
+
title: feature.title,
|
|
9614
|
+
status: feature.status,
|
|
9615
|
+
problem: feature.problem,
|
|
9616
|
+
tags: feature.tags ?? [],
|
|
9617
|
+
decisionsCount: feature.decisions?.length ?? 0,
|
|
9618
|
+
readme: readReadmeSummary(path.dirname(filePath)),
|
|
9619
|
+
path: filePath
|
|
9620
|
+
}))])),
|
|
9621
|
+
uncategorized: noDomain.map(({ feature, filePath }) => ({
|
|
9622
|
+
key: feature.featureKey,
|
|
9623
|
+
title: feature.title,
|
|
9624
|
+
status: feature.status,
|
|
9625
|
+
problem: feature.problem,
|
|
9626
|
+
tags: feature.tags ?? [],
|
|
9627
|
+
decisionsCount: feature.decisions?.length ?? 0,
|
|
9628
|
+
readme: readReadmeSummary(path.dirname(filePath)),
|
|
9629
|
+
path: filePath
|
|
9630
|
+
}))
|
|
9631
|
+
};
|
|
9632
|
+
return { content: [{
|
|
9633
|
+
type: "text",
|
|
9634
|
+
text: JSON.stringify(data, null, 2)
|
|
9635
|
+
}] };
|
|
9636
|
+
}
|
|
9637
|
+
const lines = [];
|
|
9638
|
+
if (rootReadme) lines.push("## Project", "", rootReadme, "");
|
|
9639
|
+
lines.push("## Stats", `${allFeatures.length} features — ${statusCounts.active} active · ${statusCounts.draft} draft · ${statusCounts.frozen} frozen · ${statusCounts.deprecated} deprecated${staleCount > 0 ? ` · ${staleCount} stale (needs review)` : ""}`, "");
|
|
9640
|
+
const formatRow = (feature, filePath) => {
|
|
9641
|
+
const readme = cachedReadme(path.dirname(filePath));
|
|
9642
|
+
const problem = feature.problem.length > 100 ? feature.problem.slice(0, 97) + "…" : feature.problem;
|
|
9643
|
+
const tags = feature.tags?.length ? ` [${feature.tags.join(", ")}]` : "";
|
|
9644
|
+
const dec = feature.decisions?.length ? ` ${feature.decisions.length} decisions` : "";
|
|
9645
|
+
const readmeLine = readme ? `\n ${(readme.split("\n")[0] ?? "").replace(/^#+\s*/, "").slice(0, 80)}` : "";
|
|
9646
|
+
return ` ${statusIcon(feature.status)} ${feature.featureKey.padEnd(18)} ${feature.title}${tags}${dec}\n ${problem}${readmeLine}`;
|
|
9647
|
+
};
|
|
9648
|
+
for (const [domain, features] of [...byDomain.entries()].sort(([a$1], [b]) => a$1.localeCompare(b))) {
|
|
9649
|
+
lines.push(`### ${domain}`);
|
|
9650
|
+
for (const { feature, filePath } of features) lines.push(formatRow(feature, filePath));
|
|
9651
|
+
lines.push("");
|
|
9652
|
+
}
|
|
9653
|
+
if (noDomain.length > 0) {
|
|
9654
|
+
lines.push("### (no domain)");
|
|
9655
|
+
for (const { feature, filePath } of noDomain) lines.push(formatRow(feature, filePath));
|
|
9656
|
+
lines.push("");
|
|
9657
|
+
}
|
|
9658
|
+
return { content: [{
|
|
9659
|
+
type: "text",
|
|
9660
|
+
text: lines.join("\n")
|
|
9661
|
+
}] };
|
|
9662
|
+
}
|
|
9663
|
+
case "audit_decisions": return { ...handleAuditDecisions(a, workspaceRoot) };
|
|
9664
|
+
case "feature_similarity": return { ...handleFeatureSimilarity(a, workspaceRoot) };
|
|
9665
|
+
case "time_travel": return { ...handleTimeTravel(a, workspaceRoot) };
|
|
9666
|
+
case "cross_feature_impact": return { ...handleCrossFeatureImpact(a, workspaceRoot) };
|
|
9667
|
+
case "extract_all_features": {
|
|
9668
|
+
const toUnix = (p) => p.replace(/\\/g, "/");
|
|
9669
|
+
const scanRoot = a.path ? resolvePath(String(a.path)) : workspaceRoot;
|
|
9670
|
+
const strategy = String(a.strategy ?? "module");
|
|
9671
|
+
const defaultDepth = strategy === "directory" ? 2 : 4;
|
|
9672
|
+
const maxDepth = a.depth ? Number(a.depth) : defaultDepth;
|
|
9673
|
+
const MODULE_SIGNALS = new Set([
|
|
9674
|
+
"package.json",
|
|
9675
|
+
"go.mod",
|
|
9676
|
+
"Cargo.toml",
|
|
9677
|
+
"pyproject.toml",
|
|
9678
|
+
"setup.py",
|
|
9679
|
+
"pom.xml",
|
|
9680
|
+
"build.gradle",
|
|
9681
|
+
"build.gradle.kts",
|
|
9682
|
+
"Gemfile",
|
|
9683
|
+
"composer.json",
|
|
9684
|
+
"index.ts",
|
|
9685
|
+
"index.js",
|
|
9686
|
+
"index.tsx",
|
|
9687
|
+
"mod.ts",
|
|
9688
|
+
"main.rs",
|
|
9689
|
+
"main.go",
|
|
9690
|
+
"main.ts",
|
|
9691
|
+
"main.js",
|
|
9692
|
+
"main.py",
|
|
9693
|
+
"__init__.py",
|
|
9694
|
+
"lib.rs"
|
|
9695
|
+
]);
|
|
9696
|
+
const SOURCE_EXTS = new Set([
|
|
9697
|
+
".ts",
|
|
9698
|
+
".tsx",
|
|
9699
|
+
".js",
|
|
9700
|
+
".jsx",
|
|
9701
|
+
".py",
|
|
9702
|
+
".go",
|
|
9703
|
+
".rs",
|
|
9704
|
+
".java",
|
|
9705
|
+
".kt",
|
|
9706
|
+
".cs",
|
|
9707
|
+
".rb",
|
|
9708
|
+
".php",
|
|
9709
|
+
".vue",
|
|
9710
|
+
".svelte",
|
|
9711
|
+
".sql",
|
|
9712
|
+
".c",
|
|
9713
|
+
".cpp",
|
|
9714
|
+
".swift"
|
|
9715
|
+
]);
|
|
9716
|
+
const SKIP = new Set([
|
|
9717
|
+
"node_modules",
|
|
9718
|
+
".git",
|
|
9719
|
+
"dist",
|
|
9720
|
+
"build",
|
|
9721
|
+
"out",
|
|
9722
|
+
"__pycache__",
|
|
9723
|
+
".turbo",
|
|
9724
|
+
"coverage",
|
|
9725
|
+
"vendor",
|
|
9726
|
+
"target",
|
|
9727
|
+
".next",
|
|
9728
|
+
".nuxt",
|
|
9729
|
+
".cache",
|
|
9730
|
+
".venv",
|
|
9731
|
+
"venv",
|
|
9732
|
+
"_archive",
|
|
9733
|
+
"tmp",
|
|
9734
|
+
"temp",
|
|
9735
|
+
"migrations",
|
|
9736
|
+
"fixtures",
|
|
9737
|
+
"mocks",
|
|
9738
|
+
"__mocks__"
|
|
9739
|
+
]);
|
|
9740
|
+
const candidates = [];
|
|
9741
|
+
const alreadyDocumented = [];
|
|
9742
|
+
function mcpWalk(dir, depth) {
|
|
9743
|
+
if (depth > maxDepth) return;
|
|
9744
|
+
let entries;
|
|
9745
|
+
try {
|
|
9746
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
9747
|
+
} catch {
|
|
9748
|
+
return;
|
|
9749
|
+
}
|
|
9750
|
+
const names = new Set(entries.filter((e) => e.isFile()).map((e) => e.name));
|
|
9751
|
+
if (names.has("feature.json")) alreadyDocumented.push(toUnix(path.relative(scanRoot, dir)) || ".");
|
|
9752
|
+
else if (depth > 0) {
|
|
9753
|
+
const signals = [];
|
|
9754
|
+
let sourceFileCount = 0;
|
|
9755
|
+
for (const name$1 of names) {
|
|
9756
|
+
if (MODULE_SIGNALS.has(name$1) || name$1.endsWith(".csproj")) signals.push(name$1);
|
|
9757
|
+
if (SOURCE_EXTS.has(path.extname(name$1))) sourceFileCount++;
|
|
9758
|
+
}
|
|
9759
|
+
if (strategy === "module" ? signals.length > 0 : sourceFileCount > 0) candidates.push({
|
|
9760
|
+
dir,
|
|
9761
|
+
relativePath: toUnix(path.relative(scanRoot, dir)),
|
|
9762
|
+
signals,
|
|
9763
|
+
sourceFileCount,
|
|
9764
|
+
alreadyHasFeature: false,
|
|
9765
|
+
parentDir: null
|
|
9766
|
+
});
|
|
9767
|
+
}
|
|
9768
|
+
for (const e of entries) {
|
|
9769
|
+
if (!e.isDirectory() || e.name.startsWith(".") || SKIP.has(e.name)) continue;
|
|
9770
|
+
mcpWalk(path.join(dir, e.name), depth + 1);
|
|
9771
|
+
}
|
|
9772
|
+
}
|
|
9773
|
+
mcpWalk(scanRoot, 0);
|
|
9774
|
+
candidates.sort((a$1, b) => a$1.dir.split(path.sep).length - b.dir.split(path.sep).length);
|
|
9775
|
+
const candidateDirs = new Set(candidates.map((c) => c.dir));
|
|
9776
|
+
for (const c of candidates) {
|
|
9777
|
+
let parent = path.dirname(c.dir);
|
|
9778
|
+
while (parent !== scanRoot && parent !== path.dirname(parent)) {
|
|
9779
|
+
if (candidateDirs.has(parent)) {
|
|
9780
|
+
c.parentDir = parent;
|
|
9781
|
+
break;
|
|
9782
|
+
}
|
|
9783
|
+
parent = path.dirname(parent);
|
|
9784
|
+
}
|
|
9785
|
+
}
|
|
9786
|
+
if (candidates.length === 0) return { content: [{
|
|
9787
|
+
type: "text",
|
|
9788
|
+
text: [
|
|
9789
|
+
`No undocumented modules found in "${scanRoot}".`,
|
|
9790
|
+
alreadyDocumented.length > 0 ? `${alreadyDocumented.length} director${alreadyDocumented.length === 1 ? "y is" : "ies are"} already documented:\n${alreadyDocumented.map((p) => ` - \`${p}\``).join("\n")}` : "",
|
|
9791
|
+
strategy === "module" ? "Tip: try strategy=\"directory\" to capture all directories with source files." : ""
|
|
9792
|
+
].filter(Boolean).join("\n")
|
|
9793
|
+
}] };
|
|
9794
|
+
const lines = [
|
|
9795
|
+
`## extract_all_features — ${scanRoot}`,
|
|
9796
|
+
"",
|
|
9797
|
+
`**Strategy:** ${strategy} **Depth:** ${maxDepth}`,
|
|
9798
|
+
`**Found:** ${candidates.length} undocumented module${candidates.length === 1 ? "" : "s"}`,
|
|
9799
|
+
alreadyDocumented.length > 0 ? `**Already documented:** ${alreadyDocumented.length} (skipped):\n${alreadyDocumented.map((p) => ` - \`${p}\``).join("\n")}` : null,
|
|
9800
|
+
"",
|
|
9801
|
+
"### Candidates",
|
|
9802
|
+
""
|
|
9803
|
+
].filter((s) => s !== null);
|
|
9804
|
+
for (const c of candidates) {
|
|
9805
|
+
const indent = c.parentDir ? " " : "";
|
|
9806
|
+
const sigStr = c.signals.length > 0 ? ` [${c.signals.slice(0, 3).join(", ")}]` : "";
|
|
9807
|
+
const parentNote = c.parentDir ? ` (child of ${toUnix(path.relative(scanRoot, c.parentDir))})` : "";
|
|
9808
|
+
lines.push(`${indent}- \`${c.relativePath}\` — ${c.sourceFileCount} src file${c.sourceFileCount === 1 ? "" : "s"}${sigStr}${parentNote}`);
|
|
9809
|
+
}
|
|
9810
|
+
lines.push("");
|
|
9811
|
+
lines.push("### Next steps");
|
|
9812
|
+
lines.push("");
|
|
9813
|
+
lines.push("Call `extract_feature_from_code` on each candidate path above, in order (parents before children).", "Then call `create_feature` + `write_feature_fields` for each.", "Finally, wire lineage: for each child feature, set `lineage.parent` in write_feature_fields.", "", `**First call:** \`extract_feature_from_code("${candidates[0]?.dir}")\``);
|
|
8270
9814
|
return { content: [{
|
|
8271
9815
|
type: "text",
|
|
8272
|
-
text:
|
|
9816
|
+
text: lines.join("\n")
|
|
8273
9817
|
}] };
|
|
8274
9818
|
}
|
|
8275
9819
|
default: return { content: [{
|
|
@@ -8295,7 +9839,7 @@ function findNearestFeature(startDir) {
|
|
|
8295
9839
|
while (true) {
|
|
8296
9840
|
const candidate = path.join(current, "feature.json");
|
|
8297
9841
|
if (fs.existsSync(candidate)) try {
|
|
8298
|
-
const result = validateFeature
|
|
9842
|
+
const result = validateFeature(JSON.parse(fs.readFileSync(candidate, "utf-8")));
|
|
8299
9843
|
if (result.success) return result.data;
|
|
8300
9844
|
} catch {}
|
|
8301
9845
|
const parent = path.dirname(current);
|
|
@@ -8312,11 +9856,11 @@ function walkForFeatures(dir, results) {
|
|
|
8312
9856
|
try {
|
|
8313
9857
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
8314
9858
|
for (const entry of entries) {
|
|
8315
|
-
if (entry.name === "node_modules" || entry.name === ".git" || entry.name === "dist") continue;
|
|
9859
|
+
if (entry.name === "node_modules" || entry.name === ".git" || entry.name === "dist" || entry.name === "_archive") continue;
|
|
8316
9860
|
const full = path.join(dir, entry.name);
|
|
8317
9861
|
if (entry.isDirectory()) walkForFeatures(full, results);
|
|
8318
9862
|
else if (entry.name === "feature.json") try {
|
|
8319
|
-
const result = validateFeature
|
|
9863
|
+
const result = validateFeature(JSON.parse(fs.readFileSync(full, "utf-8")));
|
|
8320
9864
|
if (result.success) results.push({
|
|
8321
9865
|
feature: result.data,
|
|
8322
9866
|
filePath: full
|
|
@@ -8335,13 +9879,18 @@ function formatFeatureSummary(feature) {
|
|
|
8335
9879
|
if (feature.analysis) lines.push(`Analysis : ${feature.analysis.slice(0, 200)}`);
|
|
8336
9880
|
if (feature.successCriteria) lines.push(`Success : ${feature.successCriteria}`);
|
|
8337
9881
|
if (feature.domain) lines.push(`Domain : ${feature.domain}`);
|
|
9882
|
+
if (feature.priority) lines.push(`Priority : P${feature.priority}/5`);
|
|
8338
9883
|
if (feature.decisions?.length) lines.push(`Decisions : ${feature.decisions.length} recorded`);
|
|
8339
9884
|
if (feature.lineage?.parent) lines.push(`Parent : ${feature.lineage.parent}`);
|
|
8340
9885
|
if (feature.lineage?.children?.length) lines.push(`Children : ${feature.lineage.children.join(", ")}`);
|
|
8341
9886
|
return lines.join("\n");
|
|
8342
9887
|
}
|
|
8343
9888
|
function buildLineageTree(feature, map, childrenOf, depth) {
|
|
8344
|
-
return [`${" ".repeat(depth)}${statusIcon(feature.status)} ${feature.featureKey} (${feature.status}) — ${feature.title}`, ...(childrenOf.get(feature.featureKey) ?? feature.lineage?.children ?? []).
|
|
9889
|
+
return [`${" ".repeat(depth)}${statusIcon(feature.status)} ${feature.featureKey} (${feature.status}) — ${feature.title}`, ...(childrenOf.get(feature.featureKey) ?? feature.lineage?.children ?? []).slice().sort((a, b) => {
|
|
9890
|
+
const pa = map.get(a)?.priority ?? 9999;
|
|
9891
|
+
const pb = map.get(b)?.priority ?? 9999;
|
|
9892
|
+
return pa !== pb ? pa - pb : a.localeCompare(b);
|
|
9893
|
+
}).flatMap((key) => {
|
|
8345
9894
|
const child = map.get(key);
|
|
8346
9895
|
return child ? [buildLineageTree(child, map, childrenOf, depth + 1)] : [];
|
|
8347
9896
|
})].join("\n");
|
|
@@ -8354,6 +9903,47 @@ function statusIcon(status) {
|
|
|
8354
9903
|
deprecated: "⊘"
|
|
8355
9904
|
}[status] ?? "?";
|
|
8356
9905
|
}
|
|
9906
|
+
const REQUIRED_FOR_ACTIVE = [
|
|
9907
|
+
"analysis",
|
|
9908
|
+
"implementation",
|
|
9909
|
+
"successCriteria"
|
|
9910
|
+
];
|
|
9911
|
+
const REQUIRED_FOR_FROZEN = [
|
|
9912
|
+
"analysis",
|
|
9913
|
+
"implementation",
|
|
9914
|
+
"successCriteria",
|
|
9915
|
+
"knownLimitations",
|
|
9916
|
+
"tags",
|
|
9917
|
+
"userGuide",
|
|
9918
|
+
"componentFile"
|
|
9919
|
+
];
|
|
9920
|
+
function getMissingForTransition(feature, to) {
|
|
9921
|
+
const required$2 = to === "active" ? REQUIRED_FOR_ACTIVE : to === "frozen" ? REQUIRED_FOR_FROZEN : [];
|
|
9922
|
+
const missing = [];
|
|
9923
|
+
for (const field of required$2) {
|
|
9924
|
+
const val = feature[field];
|
|
9925
|
+
if (val === void 0 || val === null) {
|
|
9926
|
+
missing.push(field);
|
|
9927
|
+
continue;
|
|
9928
|
+
}
|
|
9929
|
+
if (typeof val === "string" && val.trim().length === 0) {
|
|
9930
|
+
missing.push(field);
|
|
9931
|
+
continue;
|
|
9932
|
+
}
|
|
9933
|
+
if (Array.isArray(val) && val.length === 0) {
|
|
9934
|
+
missing.push(field);
|
|
9935
|
+
continue;
|
|
9936
|
+
}
|
|
9937
|
+
}
|
|
9938
|
+
if ((to === "active" || to === "frozen") && (!feature.decisions || feature.decisions.length === 0)) missing.push("decisions");
|
|
9939
|
+
return [...new Set(missing)];
|
|
9940
|
+
}
|
|
9941
|
+
function checkIllegalTransition(from, to) {
|
|
9942
|
+
if (from === to) return `Feature is already "${to}".`;
|
|
9943
|
+
if (from === "deprecated") return `Cannot transition from deprecated. Create a new feature instead.`;
|
|
9944
|
+
if (to === "draft") return `Cannot transition back to draft. Use "active" to reopen.`;
|
|
9945
|
+
return null;
|
|
9946
|
+
}
|
|
8357
9947
|
async function main() {
|
|
8358
9948
|
const transport = new StdioServerTransport();
|
|
8359
9949
|
await server.connect(transport);
|