@majeanson/lac 3.1.0 → 3.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +26 -2
- package/bin/lac-lsp.js +1 -1
- package/dist/index.mjs +2351 -718
- package/dist/index.mjs.map +1 -1
- package/dist/lsp.mjs +35 -1
- package/dist/mcp.mjs +2006 -440
- package/package.json +1 -1
package/dist/mcp.mjs
CHANGED
|
@@ -4,9 +4,8 @@ import process$1 from "node:process";
|
|
|
4
4
|
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
5
5
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
6
6
|
import { CallToolRequestSchema, ListToolsRequestSchema } from "@modelcontextprotocol/sdk/types.js";
|
|
7
|
-
import readline from "node:readline";
|
|
8
|
-
import Anthropic from "@anthropic-ai/sdk";
|
|
9
7
|
import { execSync } from "node:child_process";
|
|
8
|
+
import "@anthropic-ai/sdk";
|
|
10
9
|
|
|
11
10
|
//#region ../../node_modules/.bun/zod@4.3.6/node_modules/zod/v4/core/core.js
|
|
12
11
|
/** A special constant with type `never` */
|
|
@@ -3741,6 +3740,27 @@ const LineageSchema$1 = object$1({
|
|
|
3741
3740
|
children: array$1(string$2()).optional(),
|
|
3742
3741
|
spawnReason: string$2().nullable().optional()
|
|
3743
3742
|
});
|
|
3743
|
+
const StatusTransitionSchema$1 = object$1({
|
|
3744
|
+
from: FeatureStatusSchema$1,
|
|
3745
|
+
to: FeatureStatusSchema$1,
|
|
3746
|
+
date: string$2().regex(/^\d{4}-\d{2}-\d{2}$/, "date must be YYYY-MM-DD"),
|
|
3747
|
+
reason: string$2().optional()
|
|
3748
|
+
});
|
|
3749
|
+
const RevisionSchema$1 = object$1({
|
|
3750
|
+
date: string$2().regex(/^\d{4}-\d{2}-\d{2}$/, "date must be YYYY-MM-DD"),
|
|
3751
|
+
author: string$2().min(1),
|
|
3752
|
+
fields_changed: array$1(string$2()).min(1),
|
|
3753
|
+
reason: string$2().min(1)
|
|
3754
|
+
});
|
|
3755
|
+
const PublicInterfaceEntrySchema$1 = object$1({
|
|
3756
|
+
name: string$2().min(1),
|
|
3757
|
+
type: string$2().min(1),
|
|
3758
|
+
description: string$2().optional()
|
|
3759
|
+
});
|
|
3760
|
+
const CodeSnippetSchema$1 = object$1({
|
|
3761
|
+
label: string$2().min(1),
|
|
3762
|
+
snippet: string$2().min(1)
|
|
3763
|
+
});
|
|
3744
3764
|
const FeatureSchema$1 = object$1({
|
|
3745
3765
|
featureKey: string$2().regex(FEATURE_KEY_PATTERN$1, "featureKey must match pattern <domain>-YYYY-NNN (e.g. feat-2026-001, proc-2026-001)"),
|
|
3746
3766
|
title: string$2().min(1),
|
|
@@ -3756,12 +3776,25 @@ const FeatureSchema$1 = object$1({
|
|
|
3756
3776
|
annotations: array$1(AnnotationSchema$1).optional(),
|
|
3757
3777
|
lineage: LineageSchema$1.optional(),
|
|
3758
3778
|
successCriteria: string$2().optional(),
|
|
3759
|
-
domain: string$2().optional()
|
|
3779
|
+
domain: string$2().optional(),
|
|
3780
|
+
priority: number$2().int().min(1).max(5).optional(),
|
|
3781
|
+
statusHistory: array$1(StatusTransitionSchema$1).optional(),
|
|
3782
|
+
revisions: array$1(RevisionSchema$1).optional(),
|
|
3783
|
+
superseded_by: string$2().regex(FEATURE_KEY_PATTERN$1, "superseded_by must be a valid featureKey").optional(),
|
|
3784
|
+
superseded_from: array$1(string$2().regex(FEATURE_KEY_PATTERN$1, "each superseded_from entry must be a valid featureKey")).optional(),
|
|
3785
|
+
merged_into: string$2().regex(FEATURE_KEY_PATTERN$1, "merged_into must be a valid featureKey").optional(),
|
|
3786
|
+
merged_from: array$1(string$2().regex(FEATURE_KEY_PATTERN$1, "each merged_from entry must be a valid featureKey")).optional(),
|
|
3787
|
+
componentFile: string$2().optional(),
|
|
3788
|
+
npmPackages: array$1(string$2()).optional(),
|
|
3789
|
+
publicInterface: array$1(PublicInterfaceEntrySchema$1).optional(),
|
|
3790
|
+
externalDependencies: array$1(string$2()).optional(),
|
|
3791
|
+
lastVerifiedDate: string$2().regex(/^\d{4}-\d{2}-\d{2}$/, "lastVerifiedDate must be YYYY-MM-DD").optional(),
|
|
3792
|
+
codeSnippets: array$1(CodeSnippetSchema$1).optional()
|
|
3760
3793
|
});
|
|
3761
3794
|
|
|
3762
3795
|
//#endregion
|
|
3763
3796
|
//#region ../feature-schema/dist/validate.mjs
|
|
3764
|
-
function validateFeature
|
|
3797
|
+
function validateFeature(data) {
|
|
3765
3798
|
const result = FeatureSchema$1.safeParse(data);
|
|
3766
3799
|
if (result.success) return {
|
|
3767
3800
|
success: true,
|
|
@@ -3775,6 +3808,563 @@ function validateFeature$1(data) {
|
|
|
3775
3808
|
};
|
|
3776
3809
|
}
|
|
3777
3810
|
|
|
3811
|
+
//#endregion
|
|
3812
|
+
//#region ../feature-schema/dist/keygen.mjs
|
|
3813
|
+
const LAC_DIR = ".lac";
|
|
3814
|
+
const COUNTER_FILE = "counter";
|
|
3815
|
+
const KEYS_FILE = "keys";
|
|
3816
|
+
/**
|
|
3817
|
+
* Returns the current year as a number.
|
|
3818
|
+
*/
|
|
3819
|
+
function getCurrentYear() {
|
|
3820
|
+
return (/* @__PURE__ */ new Date()).getFullYear();
|
|
3821
|
+
}
|
|
3822
|
+
/**
|
|
3823
|
+
* Pads a counter number to a zero-padded 3-digit string (e.g. 1 → "001").
|
|
3824
|
+
*/
|
|
3825
|
+
function padCounter(n) {
|
|
3826
|
+
return String(n).padStart(3, "0");
|
|
3827
|
+
}
|
|
3828
|
+
/**
|
|
3829
|
+
* Walks up the directory tree from `fromDir` to find the nearest `.lac/` directory.
|
|
3830
|
+
* Returns the path to the `.lac/` directory if found, otherwise null.
|
|
3831
|
+
*/
|
|
3832
|
+
function findLacDir(fromDir) {
|
|
3833
|
+
let current = path.resolve(fromDir);
|
|
3834
|
+
while (true) {
|
|
3835
|
+
const candidate = path.join(current, LAC_DIR);
|
|
3836
|
+
if (fs.existsSync(candidate) && fs.statSync(candidate).isDirectory()) return candidate;
|
|
3837
|
+
const parent = path.dirname(current);
|
|
3838
|
+
if (parent === current) return null;
|
|
3839
|
+
current = parent;
|
|
3840
|
+
}
|
|
3841
|
+
}
|
|
3842
|
+
/**
|
|
3843
|
+
* Reads or initialises the `.lac/counter` file and returns the next
|
|
3844
|
+
* featureKey string like "feat-2026-001".
|
|
3845
|
+
*
|
|
3846
|
+
* The counter file stores a single integer representing the last-used counter
|
|
3847
|
+
* for the current year. When the year changes the counter resets to 1.
|
|
3848
|
+
*
|
|
3849
|
+
* Format of the counter file (two lines):
|
|
3850
|
+
* <year>
|
|
3851
|
+
* <last-used-counter>
|
|
3852
|
+
*
|
|
3853
|
+
* If the file does not exist it is created, and the first key (NNN=001) is
|
|
3854
|
+
* returned. The `.lac/` directory must already exist in a parent of
|
|
3855
|
+
* `fromDir`; if it cannot be found this function throws an Error.
|
|
3856
|
+
*
|
|
3857
|
+
* Duplicate detection: after generating the key, the `.lac/keys` file is
|
|
3858
|
+
* consulted. If the generated key already exists there, the counter is
|
|
3859
|
+
* incremented until a unique key is found.
|
|
3860
|
+
*
|
|
3861
|
+
* @param prefix Domain prefix for the key (default: "feat"). Set via `domain`
|
|
3862
|
+
* in `lac.config.json` to get keys like "proc-2026-001".
|
|
3863
|
+
*/
|
|
3864
|
+
function generateFeatureKey(fromDir, prefix = "feat") {
|
|
3865
|
+
const lacDir = findLacDir(fromDir);
|
|
3866
|
+
if (!lacDir) throw new Error(`Could not find a .lac/ directory in "${fromDir}" or any of its parents. Run "lac workspace init" to initialise a life-as-code workspace.`);
|
|
3867
|
+
const counterPath = path.join(lacDir, COUNTER_FILE);
|
|
3868
|
+
const keysPath = path.join(lacDir, KEYS_FILE);
|
|
3869
|
+
const year = getCurrentYear();
|
|
3870
|
+
let counter = 1;
|
|
3871
|
+
if (fs.existsSync(counterPath)) try {
|
|
3872
|
+
const lines = fs.readFileSync(counterPath, "utf-8").trim().split("\n").map((l) => l.trim());
|
|
3873
|
+
const storedYear = parseInt(lines[0] ?? "", 10);
|
|
3874
|
+
const storedCounter = parseInt(lines[1] ?? "", 10);
|
|
3875
|
+
if (isNaN(storedYear) || isNaN(storedCounter)) {
|
|
3876
|
+
process.stderr.write("Warning: .lac/counter was corrupt — reset to 1\n");
|
|
3877
|
+
fs.writeFileSync(counterPath, `${year}\n1\n`, "utf-8");
|
|
3878
|
+
counter = 1;
|
|
3879
|
+
} else if (storedYear === year) counter = storedCounter + 1;
|
|
3880
|
+
} catch {
|
|
3881
|
+
process.stderr.write("Warning: .lac/counter was corrupt — reset to 1\n");
|
|
3882
|
+
fs.writeFileSync(counterPath, `${year}\n1\n`, "utf-8");
|
|
3883
|
+
counter = 1;
|
|
3884
|
+
}
|
|
3885
|
+
let existingKeys = /* @__PURE__ */ new Set();
|
|
3886
|
+
if (fs.existsSync(keysPath)) existingKeys = new Set(fs.readFileSync(keysPath, "utf-8").trim().split("\n").filter(Boolean));
|
|
3887
|
+
while (existingKeys.has(`${prefix}-${year}-${padCounter(counter)}`)) counter++;
|
|
3888
|
+
const featureKey = `${prefix}-${year}-${padCounter(counter)}`;
|
|
3889
|
+
existingKeys.add(featureKey);
|
|
3890
|
+
const counterTmp = counterPath + ".tmp";
|
|
3891
|
+
const keysTmp = keysPath + ".tmp";
|
|
3892
|
+
fs.writeFileSync(counterTmp, `${year}\n${counter}\n`, "utf-8");
|
|
3893
|
+
fs.writeFileSync(keysTmp, Array.from(existingKeys).join("\n") + "\n", "utf-8");
|
|
3894
|
+
fs.renameSync(counterTmp, counterPath);
|
|
3895
|
+
fs.renameSync(keysTmp, keysPath);
|
|
3896
|
+
return featureKey;
|
|
3897
|
+
}
|
|
3898
|
+
/**
|
|
3899
|
+
* Registers an externally-supplied featureKey in `.lac/keys` so that future
|
|
3900
|
+
* auto-generated keys never collide with it. Also advances the counter if
|
|
3901
|
+
* the key's sequence number is ≥ the current counter value for this year.
|
|
3902
|
+
*
|
|
3903
|
+
* If no `.lac/` directory is found the call is a no-op (workspace hasn't
|
|
3904
|
+
* been initialised yet; the key simply won't be tracked).
|
|
3905
|
+
*
|
|
3906
|
+
* Idempotent — safe to call multiple times with the same key.
|
|
3907
|
+
*/
|
|
3908
|
+
function registerFeatureKey(fromDir, key) {
|
|
3909
|
+
const lacDir = findLacDir(fromDir);
|
|
3910
|
+
if (!lacDir) return;
|
|
3911
|
+
const counterPath = path.join(lacDir, COUNTER_FILE);
|
|
3912
|
+
const keysPath = path.join(lacDir, KEYS_FILE);
|
|
3913
|
+
const year = getCurrentYear();
|
|
3914
|
+
let existingKeys = /* @__PURE__ */ new Set();
|
|
3915
|
+
if (fs.existsSync(keysPath)) existingKeys = new Set(fs.readFileSync(keysPath, "utf-8").trim().split("\n").filter(Boolean));
|
|
3916
|
+
if (existingKeys.has(key)) return;
|
|
3917
|
+
let currentCounter = 0;
|
|
3918
|
+
if (fs.existsSync(counterPath)) try {
|
|
3919
|
+
const lines = fs.readFileSync(counterPath, "utf-8").trim().split("\n").map((l) => l.trim());
|
|
3920
|
+
const storedYear = parseInt(lines[0] ?? "", 10);
|
|
3921
|
+
const storedCounter = parseInt(lines[1] ?? "", 10);
|
|
3922
|
+
if (!isNaN(storedYear) && !isNaN(storedCounter) && storedYear === year) currentCounter = storedCounter;
|
|
3923
|
+
} catch {}
|
|
3924
|
+
const match = /^[a-z][a-z0-9]*-(\d{4})-(\d{3})$/.exec(key);
|
|
3925
|
+
let newCounter = currentCounter;
|
|
3926
|
+
if (match) {
|
|
3927
|
+
const keyYear = parseInt(match[1], 10);
|
|
3928
|
+
const keyNum = parseInt(match[2], 10);
|
|
3929
|
+
if (keyYear === year && keyNum > currentCounter) newCounter = keyNum;
|
|
3930
|
+
}
|
|
3931
|
+
existingKeys.add(key);
|
|
3932
|
+
const counterTmp = counterPath + ".tmp";
|
|
3933
|
+
const keysTmp = keysPath + ".tmp";
|
|
3934
|
+
fs.writeFileSync(counterTmp, `${year}\n${newCounter}\n`, "utf-8");
|
|
3935
|
+
fs.writeFileSync(keysTmp, Array.from(existingKeys).join("\n") + "\n", "utf-8");
|
|
3936
|
+
fs.renameSync(counterTmp, counterPath);
|
|
3937
|
+
fs.renameSync(keysTmp, keysPath);
|
|
3938
|
+
}
|
|
3939
|
+
|
|
3940
|
+
//#endregion
|
|
3941
|
+
//#region ../lac-mcp/src/tools/analysis.ts
|
|
3942
|
+
function scanFeatures$1(dir) {
|
|
3943
|
+
const results = [];
|
|
3944
|
+
walk(dir, results);
|
|
3945
|
+
return results;
|
|
3946
|
+
}
|
|
3947
|
+
function walk(dir, results) {
|
|
3948
|
+
try {
|
|
3949
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
3950
|
+
for (const e of entries) {
|
|
3951
|
+
if ([
|
|
3952
|
+
"node_modules",
|
|
3953
|
+
".git",
|
|
3954
|
+
"dist"
|
|
3955
|
+
].includes(e.name)) continue;
|
|
3956
|
+
const full = path.join(dir, e.name);
|
|
3957
|
+
if (e.isDirectory()) walk(full, results);
|
|
3958
|
+
else if (e.name === "feature.json") try {
|
|
3959
|
+
const r = validateFeature(JSON.parse(fs.readFileSync(full, "utf-8")));
|
|
3960
|
+
if (r.success) results.push({
|
|
3961
|
+
feature: r.data,
|
|
3962
|
+
filePath: full
|
|
3963
|
+
});
|
|
3964
|
+
} catch {}
|
|
3965
|
+
}
|
|
3966
|
+
} catch {}
|
|
3967
|
+
}
|
|
3968
|
+
function resolve(p, root) {
|
|
3969
|
+
return path.isAbsolute(p) ? p : path.resolve(root, p);
|
|
3970
|
+
}
|
|
3971
|
+
const RISK_KEYWORDS = [
|
|
3972
|
+
"revisit",
|
|
3973
|
+
"temporary",
|
|
3974
|
+
"todo",
|
|
3975
|
+
"hack",
|
|
3976
|
+
"fixme",
|
|
3977
|
+
"workaround",
|
|
3978
|
+
"short-term",
|
|
3979
|
+
"quick fix"
|
|
3980
|
+
];
|
|
3981
|
+
function handleAuditDecisions(a, workspaceRoot$1) {
|
|
3982
|
+
const features = scanFeatures$1(a.path ? resolve(String(a.path), workspaceRoot$1) : workspaceRoot$1);
|
|
3983
|
+
const missingDecisions = [];
|
|
3984
|
+
const flaggedDecisions = [];
|
|
3985
|
+
const unaddressedReopens = [];
|
|
3986
|
+
const domainGroups = /* @__PURE__ */ new Map();
|
|
3987
|
+
let cleanCount = 0;
|
|
3988
|
+
for (const { feature } of features) {
|
|
3989
|
+
if (feature.status === "draft") continue;
|
|
3990
|
+
let hasIssue = false;
|
|
3991
|
+
if (!feature.decisions?.length) {
|
|
3992
|
+
missingDecisions.push(` ${feature.featureKey.padEnd(20)} ${feature.status}`);
|
|
3993
|
+
hasIssue = true;
|
|
3994
|
+
} else for (const d of feature.decisions) {
|
|
3995
|
+
const text = (d.decision + " " + d.rationale).toLowerCase();
|
|
3996
|
+
const found = RISK_KEYWORDS.find((k) => text.includes(k));
|
|
3997
|
+
if (found) {
|
|
3998
|
+
flaggedDecisions.push({
|
|
3999
|
+
key: feature.featureKey,
|
|
4000
|
+
decision: d.decision,
|
|
4001
|
+
keyword: found
|
|
4002
|
+
});
|
|
4003
|
+
hasIssue = true;
|
|
4004
|
+
}
|
|
4005
|
+
}
|
|
4006
|
+
const staleAnnotation = feature.annotations?.find((a$1) => a$1.type === "stale-review");
|
|
4007
|
+
if (staleAnnotation) {
|
|
4008
|
+
unaddressedReopens.push(` ${feature.featureKey.padEnd(20)} ${feature.status.padEnd(10)} — ${staleAnnotation.body}`);
|
|
4009
|
+
hasIssue = true;
|
|
4010
|
+
}
|
|
4011
|
+
if (feature.domain) {
|
|
4012
|
+
const group = domainGroups.get(feature.domain) ?? [];
|
|
4013
|
+
group.push(feature);
|
|
4014
|
+
domainGroups.set(feature.domain, group);
|
|
4015
|
+
}
|
|
4016
|
+
if (!hasIssue) cleanCount++;
|
|
4017
|
+
}
|
|
4018
|
+
const duplicateSuspects = [];
|
|
4019
|
+
for (const [domain, group] of domainGroups) for (let i = 0; i < group.length; i++) for (let j = i + 1; j < group.length; j++) {
|
|
4020
|
+
const fi = group[i];
|
|
4021
|
+
const fj = group[j];
|
|
4022
|
+
if (!fi || !fj) continue;
|
|
4023
|
+
const wordsA = new Set(fi.title.toLowerCase().split(/\W+/).filter((w) => w.length > 3));
|
|
4024
|
+
const wordsB = new Set(fj.title.toLowerCase().split(/\W+/).filter((w) => w.length > 3));
|
|
4025
|
+
if ([...wordsA].filter((w) => wordsB.has(w)).length >= 2) duplicateSuspects.push(` ${fi.featureKey} + ${fj.featureKey} (${domain}) — "${fi.title}" / "${fj.title}"`);
|
|
4026
|
+
}
|
|
4027
|
+
const sections = [];
|
|
4028
|
+
if (unaddressedReopens.length > 0) sections.push(`⚠ Unaddressed reopens — stale fields not yet reviewed (${unaddressedReopens.length}):\n${unaddressedReopens.join("\n")}`);
|
|
4029
|
+
if (missingDecisions.length > 0) sections.push(`⚠ Missing decisions (${missingDecisions.length}):\n${missingDecisions.join("\n")}`);
|
|
4030
|
+
if (flaggedDecisions.length > 0) {
|
|
4031
|
+
const lines = flaggedDecisions.map((f) => ` ${f.key.padEnd(20)} "${f.decision.slice(0, 60)}" — contains "${f.keyword}"`);
|
|
4032
|
+
sections.push(`⚠ Decisions flagged for review (${flaggedDecisions.length}):\n${lines.join("\n")}`);
|
|
4033
|
+
}
|
|
4034
|
+
if (duplicateSuspects.length > 0) sections.push(`⚠ Possible duplicates (${duplicateSuspects.length}):\n${duplicateSuspects.join("\n")}`);
|
|
4035
|
+
sections.push(`✓ ${cleanCount} feature(s) with clean decisions`);
|
|
4036
|
+
return { content: [{
|
|
4037
|
+
type: "text",
|
|
4038
|
+
text: sections.join("\n\n")
|
|
4039
|
+
}] };
|
|
4040
|
+
}
|
|
4041
|
+
function handleFeatureSimilarity(a, workspaceRoot$1) {
|
|
4042
|
+
if (!a.path) return {
|
|
4043
|
+
content: [{
|
|
4044
|
+
type: "text",
|
|
4045
|
+
text: "path is required"
|
|
4046
|
+
}],
|
|
4047
|
+
isError: true
|
|
4048
|
+
};
|
|
4049
|
+
const featureDir = resolve(String(a.path), workspaceRoot$1);
|
|
4050
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
4051
|
+
let raw;
|
|
4052
|
+
try {
|
|
4053
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
4054
|
+
} catch {
|
|
4055
|
+
return {
|
|
4056
|
+
content: [{
|
|
4057
|
+
type: "text",
|
|
4058
|
+
text: `No feature.json found at "${featurePath}"`
|
|
4059
|
+
}],
|
|
4060
|
+
isError: true
|
|
4061
|
+
};
|
|
4062
|
+
}
|
|
4063
|
+
const result = validateFeature(JSON.parse(raw));
|
|
4064
|
+
if (!result.success) return {
|
|
4065
|
+
content: [{
|
|
4066
|
+
type: "text",
|
|
4067
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
4068
|
+
}],
|
|
4069
|
+
isError: true
|
|
4070
|
+
};
|
|
4071
|
+
const target = result.data;
|
|
4072
|
+
const targetTags = new Set(target.tags ?? []);
|
|
4073
|
+
const targetWords = new Set((target.title + " " + target.problem).toLowerCase().split(/\W+/).filter((w) => w.length > 4));
|
|
4074
|
+
const allFeatures = scanFeatures$1(workspaceRoot$1);
|
|
4075
|
+
const matches = [];
|
|
4076
|
+
for (const { feature } of allFeatures) {
|
|
4077
|
+
if (feature.featureKey === target.featureKey) continue;
|
|
4078
|
+
if (feature.lineage?.parent === target.featureKey || target.lineage?.parent === feature.featureKey) continue;
|
|
4079
|
+
let score = 0;
|
|
4080
|
+
const reasons = [];
|
|
4081
|
+
if (target.domain && feature.domain === target.domain) {
|
|
4082
|
+
score += 3;
|
|
4083
|
+
reasons.push(`same domain: ${feature.domain}`);
|
|
4084
|
+
}
|
|
4085
|
+
const sharedTags = (feature.tags ?? []).filter((t) => targetTags.has(t));
|
|
4086
|
+
if (sharedTags.length > 0) {
|
|
4087
|
+
score += sharedTags.length * 2;
|
|
4088
|
+
reasons.push(`shared tags: ${sharedTags.join(", ")}`);
|
|
4089
|
+
}
|
|
4090
|
+
const featureWords = new Set((feature.title + " " + feature.problem).toLowerCase().split(/\W+/).filter((w) => w.length > 4));
|
|
4091
|
+
const wordOverlap = [...targetWords].filter((w) => featureWords.has(w)).length;
|
|
4092
|
+
if (wordOverlap >= 2) {
|
|
4093
|
+
score += wordOverlap;
|
|
4094
|
+
reasons.push(`${wordOverlap} shared keywords`);
|
|
4095
|
+
}
|
|
4096
|
+
if (score > 0) matches.push({
|
|
4097
|
+
feature,
|
|
4098
|
+
score,
|
|
4099
|
+
reasons
|
|
4100
|
+
});
|
|
4101
|
+
}
|
|
4102
|
+
matches.sort((a$1, b) => b.score - a$1.score);
|
|
4103
|
+
if (matches.length === 0) return { content: [{
|
|
4104
|
+
type: "text",
|
|
4105
|
+
text: `No similar features found for "${target.featureKey} — ${target.title}".`
|
|
4106
|
+
}] };
|
|
4107
|
+
const stars = (score) => score >= 6 ? "★★★" : score >= 4 ? "★★ " : "★ ";
|
|
4108
|
+
return { content: [{
|
|
4109
|
+
type: "text",
|
|
4110
|
+
text: [`Similar features to "${target.featureKey} — ${target.title}":\n`, ...matches.slice(0, 10).map((m) => `${stars(m.score)} ${m.feature.featureKey.padEnd(20)} "${m.feature.title}"\n ${m.reasons.join(" · ")}`)].join("\n")
|
|
4111
|
+
}] };
|
|
4112
|
+
}
|
|
4113
|
+
|
|
4114
|
+
//#endregion
|
|
4115
|
+
//#region ../lac-mcp/src/tools/git-tools.ts
|
|
4116
|
+
function handleTimeTravel(a, workspaceRoot$1) {
|
|
4117
|
+
if (!a.path) return {
|
|
4118
|
+
content: [{
|
|
4119
|
+
type: "text",
|
|
4120
|
+
text: "path is required"
|
|
4121
|
+
}],
|
|
4122
|
+
isError: true
|
|
4123
|
+
};
|
|
4124
|
+
const featureDir = path.isAbsolute(String(a.path)) ? String(a.path) : path.resolve(workspaceRoot$1, String(a.path));
|
|
4125
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
4126
|
+
let gitRoot;
|
|
4127
|
+
try {
|
|
4128
|
+
gitRoot = execSync("git rev-parse --show-toplevel", {
|
|
4129
|
+
cwd: featureDir,
|
|
4130
|
+
encoding: "utf-8",
|
|
4131
|
+
stdio: [
|
|
4132
|
+
"pipe",
|
|
4133
|
+
"pipe",
|
|
4134
|
+
"pipe"
|
|
4135
|
+
]
|
|
4136
|
+
}).trim();
|
|
4137
|
+
} catch {
|
|
4138
|
+
return {
|
|
4139
|
+
content: [{
|
|
4140
|
+
type: "text",
|
|
4141
|
+
text: "Not a git repository. time_travel requires git."
|
|
4142
|
+
}],
|
|
4143
|
+
isError: true
|
|
4144
|
+
};
|
|
4145
|
+
}
|
|
4146
|
+
const relPath = path.relative(gitRoot, featurePath).replace(/\\/g, "/");
|
|
4147
|
+
let logOutput;
|
|
4148
|
+
try {
|
|
4149
|
+
logOutput = execSync(`git log --format="%H %as %s" -- "${relPath}"`, {
|
|
4150
|
+
cwd: gitRoot,
|
|
4151
|
+
encoding: "utf-8",
|
|
4152
|
+
stdio: [
|
|
4153
|
+
"pipe",
|
|
4154
|
+
"pipe",
|
|
4155
|
+
"pipe"
|
|
4156
|
+
]
|
|
4157
|
+
}).trim();
|
|
4158
|
+
} catch {
|
|
4159
|
+
logOutput = "";
|
|
4160
|
+
}
|
|
4161
|
+
if (!logOutput) return { content: [{
|
|
4162
|
+
type: "text",
|
|
4163
|
+
text: `No git history found for "${relPath}". Has this file been committed?`
|
|
4164
|
+
}] };
|
|
4165
|
+
const commits = logOutput.split("\n").map((line) => {
|
|
4166
|
+
const parts = line.split(" ");
|
|
4167
|
+
return {
|
|
4168
|
+
sha: parts[0] ?? "",
|
|
4169
|
+
date: parts[1] ?? "",
|
|
4170
|
+
message: parts.slice(2).join(" ")
|
|
4171
|
+
};
|
|
4172
|
+
});
|
|
4173
|
+
if (!a.date && !a.commit) {
|
|
4174
|
+
const historyLines = commits.map((c) => ` ${c.date} ${c.sha.slice(0, 8)} ${c.message}`);
|
|
4175
|
+
return { content: [{
|
|
4176
|
+
type: "text",
|
|
4177
|
+
text: `Git history for "${relPath}" (${commits.length} commit(s)):\n\n${historyLines.join("\n")}\n\nCall time_travel again with date (YYYY-MM-DD) or commit (SHA) to view a specific version.`
|
|
4178
|
+
}] };
|
|
4179
|
+
}
|
|
4180
|
+
let targetSha;
|
|
4181
|
+
if (a.commit) targetSha = String(a.commit);
|
|
4182
|
+
else {
|
|
4183
|
+
const targetDate = String(a.date);
|
|
4184
|
+
const match = commits.find((c) => c.date <= targetDate);
|
|
4185
|
+
if (!match) return { content: [{
|
|
4186
|
+
type: "text",
|
|
4187
|
+
text: `No commits found at or before "${targetDate}".\n\nAvailable history:\n${commits.map((c) => ` ${c.date} ${c.sha.slice(0, 8)} ${c.message}`).join("\n")}`
|
|
4188
|
+
}] };
|
|
4189
|
+
targetSha = match.sha;
|
|
4190
|
+
}
|
|
4191
|
+
let historicalContent;
|
|
4192
|
+
try {
|
|
4193
|
+
historicalContent = execSync(`git show "${targetSha}:${relPath}"`, {
|
|
4194
|
+
cwd: gitRoot,
|
|
4195
|
+
encoding: "utf-8",
|
|
4196
|
+
stdio: [
|
|
4197
|
+
"pipe",
|
|
4198
|
+
"pipe",
|
|
4199
|
+
"pipe"
|
|
4200
|
+
]
|
|
4201
|
+
});
|
|
4202
|
+
} catch {
|
|
4203
|
+
return {
|
|
4204
|
+
content: [{
|
|
4205
|
+
type: "text",
|
|
4206
|
+
text: `Could not read "${relPath}" at commit ${targetSha.slice(0, 8)}.`
|
|
4207
|
+
}],
|
|
4208
|
+
isError: true
|
|
4209
|
+
};
|
|
4210
|
+
}
|
|
4211
|
+
const targetCommit = commits.find((c) => c.sha === targetSha || targetSha != null && c.sha.length >= 7 && targetSha.startsWith(c.sha.slice(0, 7)));
|
|
4212
|
+
const commitInfo = targetCommit ? `${targetCommit.date} ${targetSha.slice(0, 8)} ${targetCommit.message}` : targetSha.slice(0, 8);
|
|
4213
|
+
let displayContent;
|
|
4214
|
+
try {
|
|
4215
|
+
const validation = validateFeature(JSON.parse(historicalContent));
|
|
4216
|
+
displayContent = validation.success ? JSON.stringify(validation.data, null, 2) : historicalContent;
|
|
4217
|
+
} catch {
|
|
4218
|
+
displayContent = historicalContent;
|
|
4219
|
+
}
|
|
4220
|
+
const newerCommits = commits.filter((c) => targetCommit?.date != null ? c.date > targetCommit.date : false);
|
|
4221
|
+
return { content: [{
|
|
4222
|
+
type: "text",
|
|
4223
|
+
text: `feature.json at: ${commitInfo}${newerCommits.length > 0 ? `\n\n[${newerCommits.length} commit(s) made after this snapshot]` : ""}\n\n${displayContent}`
|
|
4224
|
+
}] };
|
|
4225
|
+
}
|
|
4226
|
+
|
|
4227
|
+
//#endregion
|
|
4228
|
+
//#region ../lac-mcp/src/tools/impact.ts
|
|
4229
|
+
const SOURCE_EXTENSIONS$1 = new Set([
|
|
4230
|
+
".ts",
|
|
4231
|
+
".tsx",
|
|
4232
|
+
".js",
|
|
4233
|
+
".jsx",
|
|
4234
|
+
".py",
|
|
4235
|
+
".go",
|
|
4236
|
+
".rs",
|
|
4237
|
+
".vue",
|
|
4238
|
+
".svelte"
|
|
4239
|
+
]);
|
|
4240
|
+
function scanFeatures(dir) {
|
|
4241
|
+
const results = [];
|
|
4242
|
+
walkFeatures(dir, results);
|
|
4243
|
+
return results;
|
|
4244
|
+
}
|
|
4245
|
+
function walkFeatures(dir, results) {
|
|
4246
|
+
try {
|
|
4247
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
4248
|
+
for (const e of entries) {
|
|
4249
|
+
if ([
|
|
4250
|
+
"node_modules",
|
|
4251
|
+
".git",
|
|
4252
|
+
"dist"
|
|
4253
|
+
].includes(e.name)) continue;
|
|
4254
|
+
const full = path.join(dir, e.name);
|
|
4255
|
+
if (e.isDirectory()) walkFeatures(full, results);
|
|
4256
|
+
else if (e.name === "feature.json") try {
|
|
4257
|
+
const r = validateFeature(JSON.parse(fs.readFileSync(full, "utf-8")));
|
|
4258
|
+
if (r.success) results.push({
|
|
4259
|
+
feature: r.data,
|
|
4260
|
+
filePath: full
|
|
4261
|
+
});
|
|
4262
|
+
} catch {}
|
|
4263
|
+
}
|
|
4264
|
+
} catch {}
|
|
4265
|
+
}
|
|
4266
|
+
function getSourceFiles(dir) {
|
|
4267
|
+
const files = [];
|
|
4268
|
+
try {
|
|
4269
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
4270
|
+
for (const e of entries) {
|
|
4271
|
+
if ([
|
|
4272
|
+
"node_modules",
|
|
4273
|
+
".git",
|
|
4274
|
+
"dist"
|
|
4275
|
+
].includes(e.name)) continue;
|
|
4276
|
+
const full = path.join(dir, e.name);
|
|
4277
|
+
if (e.isDirectory()) files.push(...getSourceFiles(full));
|
|
4278
|
+
else if (SOURCE_EXTENSIONS$1.has(path.extname(e.name))) files.push(full);
|
|
4279
|
+
}
|
|
4280
|
+
} catch {}
|
|
4281
|
+
return files;
|
|
4282
|
+
}
|
|
4283
|
+
function findOwningFeatureKey(filePath) {
|
|
4284
|
+
let current = path.dirname(filePath);
|
|
4285
|
+
while (true) {
|
|
4286
|
+
const candidate = path.join(current, "feature.json");
|
|
4287
|
+
if (fs.existsSync(candidate)) try {
|
|
4288
|
+
const r = validateFeature(JSON.parse(fs.readFileSync(candidate, "utf-8")));
|
|
4289
|
+
if (r.success) return r.data.featureKey;
|
|
4290
|
+
} catch {}
|
|
4291
|
+
const parent = path.dirname(current);
|
|
4292
|
+
if (parent === current) return null;
|
|
4293
|
+
current = parent;
|
|
4294
|
+
}
|
|
4295
|
+
}
|
|
4296
|
+
function handleCrossFeatureImpact(a, workspaceRoot$1) {
|
|
4297
|
+
if (!a.file) return {
|
|
4298
|
+
content: [{
|
|
4299
|
+
type: "text",
|
|
4300
|
+
text: "file parameter is required"
|
|
4301
|
+
}],
|
|
4302
|
+
isError: true
|
|
4303
|
+
};
|
|
4304
|
+
const targetFile = path.isAbsolute(String(a.file)) ? String(a.file) : path.resolve(workspaceRoot$1, String(a.file));
|
|
4305
|
+
if (!fs.existsSync(targetFile)) return {
|
|
4306
|
+
content: [{
|
|
4307
|
+
type: "text",
|
|
4308
|
+
text: `File not found: "${targetFile}"`
|
|
4309
|
+
}],
|
|
4310
|
+
isError: true
|
|
4311
|
+
};
|
|
4312
|
+
const targetBasename = path.basename(targetFile);
|
|
4313
|
+
const targetNoExt = path.basename(targetFile, path.extname(targetFile));
|
|
4314
|
+
const targetRelFromRoot = path.relative(workspaceRoot$1, targetFile).replace(/\\/g, "/");
|
|
4315
|
+
const patterns = [...new Set([
|
|
4316
|
+
targetBasename,
|
|
4317
|
+
targetNoExt,
|
|
4318
|
+
targetRelFromRoot
|
|
4319
|
+
])];
|
|
4320
|
+
const owningKey = findOwningFeatureKey(targetFile);
|
|
4321
|
+
const features = scanFeatures(workspaceRoot$1);
|
|
4322
|
+
const impacts = [];
|
|
4323
|
+
for (const { feature, filePath: featureJsonPath } of features) {
|
|
4324
|
+
if (feature.featureKey === owningKey) continue;
|
|
4325
|
+
const featureDir = path.dirname(featureJsonPath);
|
|
4326
|
+
const sourceFiles = getSourceFiles(featureDir);
|
|
4327
|
+
const matchedFiles = [];
|
|
4328
|
+
const matchedPatterns = [];
|
|
4329
|
+
for (const srcFile of sourceFiles) {
|
|
4330
|
+
if (srcFile === targetFile) continue;
|
|
4331
|
+
try {
|
|
4332
|
+
const content = fs.readFileSync(srcFile, "utf-8");
|
|
4333
|
+
const matched = patterns.filter((p) => content.includes(p));
|
|
4334
|
+
if (matched.length > 0) {
|
|
4335
|
+
matchedFiles.push(path.relative(featureDir, srcFile));
|
|
4336
|
+
matchedPatterns.push(...matched);
|
|
4337
|
+
}
|
|
4338
|
+
} catch {}
|
|
4339
|
+
}
|
|
4340
|
+
if (matchedFiles.length > 0) impacts.push({
|
|
4341
|
+
feature,
|
|
4342
|
+
matchedFiles,
|
|
4343
|
+
patterns: [...new Set(matchedPatterns)]
|
|
4344
|
+
});
|
|
4345
|
+
}
|
|
4346
|
+
const lines = [
|
|
4347
|
+
`Impact analysis: ${path.relative(workspaceRoot$1, targetFile)}`,
|
|
4348
|
+
"─".repeat(50),
|
|
4349
|
+
owningKey ? `Owned by : ${owningKey}` : "No owning feature found (untracked file)"
|
|
4350
|
+
];
|
|
4351
|
+
if (impacts.length === 0) lines.push("\nNo other features reference this file.");
|
|
4352
|
+
else {
|
|
4353
|
+
lines.push(`\n${impacts.length} feature(s) reference this file — changes may affect them:\n`);
|
|
4354
|
+
for (const imp of impacts) {
|
|
4355
|
+
lines.push(` ${imp.feature.featureKey.padEnd(20)} ${imp.feature.status.padEnd(10)} "${imp.feature.title}"`);
|
|
4356
|
+
const fileList = imp.matchedFiles.slice(0, 3).join(", ");
|
|
4357
|
+
const more = imp.matchedFiles.length > 3 ? ` +${imp.matchedFiles.length - 3} more` : "";
|
|
4358
|
+
lines.push(` referenced in: ${fileList}${more}`);
|
|
4359
|
+
}
|
|
4360
|
+
lines.push("\n⚠ Changes to this file may affect all features listed above.");
|
|
4361
|
+
}
|
|
4362
|
+
return { content: [{
|
|
4363
|
+
type: "text",
|
|
4364
|
+
text: lines.join("\n")
|
|
4365
|
+
}] };
|
|
4366
|
+
}
|
|
4367
|
+
|
|
3778
4368
|
//#endregion
|
|
3779
4369
|
//#region ../lac-claude/dist/index.mjs
|
|
3780
4370
|
Object.freeze({ status: "aborted" });
|
|
@@ -7418,6 +8008,27 @@ const LineageSchema = object({
|
|
|
7418
8008
|
children: array(string()).optional(),
|
|
7419
8009
|
spawnReason: string().nullable().optional()
|
|
7420
8010
|
});
|
|
8011
|
+
const StatusTransitionSchema = object({
|
|
8012
|
+
from: FeatureStatusSchema,
|
|
8013
|
+
to: FeatureStatusSchema,
|
|
8014
|
+
date: string().regex(/^\d{4}-\d{2}-\d{2}$/, "date must be YYYY-MM-DD"),
|
|
8015
|
+
reason: string().optional()
|
|
8016
|
+
});
|
|
8017
|
+
const RevisionSchema = object({
|
|
8018
|
+
date: string().regex(/^\d{4}-\d{2}-\d{2}$/, "date must be YYYY-MM-DD"),
|
|
8019
|
+
author: string().min(1),
|
|
8020
|
+
fields_changed: array(string()).min(1),
|
|
8021
|
+
reason: string().min(1)
|
|
8022
|
+
});
|
|
8023
|
+
const PublicInterfaceEntrySchema = object({
|
|
8024
|
+
name: string().min(1),
|
|
8025
|
+
type: string().min(1),
|
|
8026
|
+
description: string().optional()
|
|
8027
|
+
});
|
|
8028
|
+
const CodeSnippetSchema = object({
|
|
8029
|
+
label: string().min(1),
|
|
8030
|
+
snippet: string().min(1)
|
|
8031
|
+
});
|
|
7421
8032
|
const FeatureSchema = object({
|
|
7422
8033
|
featureKey: string().regex(FEATURE_KEY_PATTERN, "featureKey must match pattern <domain>-YYYY-NNN (e.g. feat-2026-001, proc-2026-001)"),
|
|
7423
8034
|
title: string().min(1),
|
|
@@ -7433,55 +8044,21 @@ const FeatureSchema = object({
|
|
|
7433
8044
|
annotations: array(AnnotationSchema).optional(),
|
|
7434
8045
|
lineage: LineageSchema.optional(),
|
|
7435
8046
|
successCriteria: string().optional(),
|
|
7436
|
-
domain: string().optional()
|
|
8047
|
+
domain: string().optional(),
|
|
8048
|
+
priority: number().int().min(1).max(5).optional(),
|
|
8049
|
+
statusHistory: array(StatusTransitionSchema).optional(),
|
|
8050
|
+
revisions: array(RevisionSchema).optional(),
|
|
8051
|
+
superseded_by: string().regex(FEATURE_KEY_PATTERN, "superseded_by must be a valid featureKey").optional(),
|
|
8052
|
+
superseded_from: array(string().regex(FEATURE_KEY_PATTERN, "each superseded_from entry must be a valid featureKey")).optional(),
|
|
8053
|
+
merged_into: string().regex(FEATURE_KEY_PATTERN, "merged_into must be a valid featureKey").optional(),
|
|
8054
|
+
merged_from: array(string().regex(FEATURE_KEY_PATTERN, "each merged_from entry must be a valid featureKey")).optional(),
|
|
8055
|
+
componentFile: string().optional(),
|
|
8056
|
+
npmPackages: array(string()).optional(),
|
|
8057
|
+
publicInterface: array(PublicInterfaceEntrySchema).optional(),
|
|
8058
|
+
externalDependencies: array(string()).optional(),
|
|
8059
|
+
lastVerifiedDate: string().regex(/^\d{4}-\d{2}-\d{2}$/, "lastVerifiedDate must be YYYY-MM-DD").optional(),
|
|
8060
|
+
codeSnippets: array(CodeSnippetSchema).optional()
|
|
7437
8061
|
});
|
|
7438
|
-
function validateFeature(data) {
|
|
7439
|
-
const result = FeatureSchema.safeParse(data);
|
|
7440
|
-
if (result.success) return {
|
|
7441
|
-
success: true,
|
|
7442
|
-
data: result.data
|
|
7443
|
-
};
|
|
7444
|
-
return {
|
|
7445
|
-
success: false,
|
|
7446
|
-
errors: result.error.issues.map((issue$1$1) => {
|
|
7447
|
-
return `${issue$1$1.path.length > 0 ? `${issue$1$1.path.join(".")}: ` : ""}${issue$1$1.message}`;
|
|
7448
|
-
})
|
|
7449
|
-
};
|
|
7450
|
-
}
|
|
7451
|
-
function createClient() {
|
|
7452
|
-
let apiKey = process$1.env.ANTHROPIC_API_KEY;
|
|
7453
|
-
if (!apiKey) {
|
|
7454
|
-
const configPath = findLacConfig();
|
|
7455
|
-
if (configPath) try {
|
|
7456
|
-
apiKey = JSON.parse(fs.readFileSync(configPath, "utf-8"))?.ai?.apiKey;
|
|
7457
|
-
} catch {}
|
|
7458
|
-
}
|
|
7459
|
-
if (!apiKey) throw new Error("ANTHROPIC_API_KEY not set.\nSet it via:\n export ANTHROPIC_API_KEY=sk-ant-...\nOr add it to .lac/config.json:\n { \"ai\": { \"apiKey\": \"sk-ant-...\" } }\nGet a key at https://console.anthropic.com/settings/keys");
|
|
7460
|
-
return new Anthropic({ apiKey });
|
|
7461
|
-
}
|
|
7462
|
-
function findLacConfig() {
|
|
7463
|
-
let current = process$1.cwd();
|
|
7464
|
-
while (true) {
|
|
7465
|
-
const candidate = path.join(current, ".lac", "config.json");
|
|
7466
|
-
if (fs.existsSync(candidate)) return candidate;
|
|
7467
|
-
const parent = path.dirname(current);
|
|
7468
|
-
if (parent === current) return null;
|
|
7469
|
-
current = parent;
|
|
7470
|
-
}
|
|
7471
|
-
}
|
|
7472
|
-
async function generateText(client, systemPrompt, userMessage, model = "claude-sonnet-4-6") {
|
|
7473
|
-
const content = (await client.messages.create({
|
|
7474
|
-
model,
|
|
7475
|
-
max_tokens: 4096,
|
|
7476
|
-
system: systemPrompt,
|
|
7477
|
-
messages: [{
|
|
7478
|
-
role: "user",
|
|
7479
|
-
content: userMessage
|
|
7480
|
-
}]
|
|
7481
|
-
})).content[0];
|
|
7482
|
-
if (!content || content.type !== "text") throw new Error("Unexpected response type from Claude API");
|
|
7483
|
-
return content.text;
|
|
7484
|
-
}
|
|
7485
8062
|
const SOURCE_EXTENSIONS = new Set([
|
|
7486
8063
|
".ts",
|
|
7487
8064
|
".tsx",
|
|
@@ -7500,16 +8077,20 @@ const SOURCE_EXTENSIONS = new Set([
|
|
|
7500
8077
|
]);
|
|
7501
8078
|
const MAX_FILE_CHARS = 8e3;
|
|
7502
8079
|
const MAX_TOTAL_CHARS = 32e4;
|
|
7503
|
-
function buildContext(featureDir, feature) {
|
|
8080
|
+
function buildContext(featureDir, feature, opts = {}) {
|
|
8081
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
8082
|
+
const { files: sourceFiles, truncatedPaths } = gatherSourceFiles(featureDir, opts.maxFileChars);
|
|
7504
8083
|
return {
|
|
7505
8084
|
feature,
|
|
7506
|
-
featurePath
|
|
7507
|
-
sourceFiles
|
|
7508
|
-
gitLog: getGitLog(featureDir)
|
|
8085
|
+
featurePath,
|
|
8086
|
+
sourceFiles,
|
|
8087
|
+
gitLog: getGitLog(featureDir),
|
|
8088
|
+
truncatedFiles: truncatedPaths
|
|
7509
8089
|
};
|
|
7510
8090
|
}
|
|
7511
|
-
function gatherSourceFiles(dir) {
|
|
8091
|
+
function gatherSourceFiles(dir, maxFileChars = MAX_FILE_CHARS) {
|
|
7512
8092
|
const files = [];
|
|
8093
|
+
const truncatedPaths = [];
|
|
7513
8094
|
let totalChars = 0;
|
|
7514
8095
|
const priorityNames = [
|
|
7515
8096
|
"package.json",
|
|
@@ -7520,11 +8101,15 @@ function gatherSourceFiles(dir) {
|
|
|
7520
8101
|
for (const name of priorityNames) {
|
|
7521
8102
|
const p = path.join(dir, name);
|
|
7522
8103
|
if (fs.existsSync(p)) try {
|
|
7523
|
-
const
|
|
8104
|
+
const raw = fs.readFileSync(p, "utf-8");
|
|
8105
|
+
const wasTruncated = raw.length > 4e3;
|
|
8106
|
+
const content = truncate(raw, 4e3);
|
|
7524
8107
|
files.push({
|
|
7525
8108
|
relativePath: name,
|
|
7526
|
-
content
|
|
8109
|
+
content,
|
|
8110
|
+
truncated: wasTruncated || void 0
|
|
7527
8111
|
});
|
|
8112
|
+
if (wasTruncated) truncatedPaths.push(name);
|
|
7528
8113
|
totalChars += content.length;
|
|
7529
8114
|
} catch {}
|
|
7530
8115
|
}
|
|
@@ -7537,16 +8122,23 @@ function gatherSourceFiles(dir) {
|
|
|
7537
8122
|
if (totalChars >= MAX_TOTAL_CHARS) break;
|
|
7538
8123
|
if (priorityNames.includes(path.basename(filePath))) continue;
|
|
7539
8124
|
try {
|
|
7540
|
-
const
|
|
8125
|
+
const raw = fs.readFileSync(filePath, "utf-8");
|
|
8126
|
+
const wasTruncated = raw.length > maxFileChars;
|
|
8127
|
+
const content = truncate(raw, maxFileChars);
|
|
7541
8128
|
const relativePath = path.relative(dir, filePath);
|
|
7542
8129
|
files.push({
|
|
7543
8130
|
relativePath,
|
|
7544
|
-
content
|
|
8131
|
+
content,
|
|
8132
|
+
truncated: wasTruncated || void 0
|
|
7545
8133
|
});
|
|
8134
|
+
if (wasTruncated) truncatedPaths.push(relativePath);
|
|
7546
8135
|
totalChars += content.length;
|
|
7547
8136
|
} catch {}
|
|
7548
8137
|
}
|
|
7549
|
-
return
|
|
8138
|
+
return {
|
|
8139
|
+
files,
|
|
8140
|
+
truncatedPaths
|
|
8141
|
+
};
|
|
7550
8142
|
}
|
|
7551
8143
|
function walkDir(dir) {
|
|
7552
8144
|
const results = [];
|
|
@@ -7582,6 +8174,7 @@ function getGitLog(dir) {
|
|
|
7582
8174
|
}
|
|
7583
8175
|
function contextToString(ctx) {
|
|
7584
8176
|
const parts = [];
|
|
8177
|
+
if (ctx.truncatedFiles.length > 0) parts.push(`⚠ WARNING: ${ctx.truncatedFiles.length} file(s) were truncated — extraction may be incomplete:\n` + ctx.truncatedFiles.map((f) => ` - ${f}`).join("\n"));
|
|
7585
8178
|
parts.push("=== feature.json ===");
|
|
7586
8179
|
parts.push(JSON.stringify(ctx.feature, null, 2));
|
|
7587
8180
|
if (ctx.gitLog) {
|
|
@@ -7589,32 +8182,11 @@ function contextToString(ctx) {
|
|
|
7589
8182
|
parts.push(ctx.gitLog);
|
|
7590
8183
|
}
|
|
7591
8184
|
for (const file of ctx.sourceFiles) {
|
|
7592
|
-
parts.push(`\n=== ${file.relativePath} ===`);
|
|
8185
|
+
parts.push(`\n=== ${file.relativePath}${file.truncated ? " [truncated]" : ""} ===`);
|
|
7593
8186
|
parts.push(file.content);
|
|
7594
8187
|
}
|
|
7595
8188
|
return parts.join("\n");
|
|
7596
8189
|
}
|
|
7597
|
-
const RESET = "\x1B[0m";
|
|
7598
|
-
const BOLD = "\x1B[1m";
|
|
7599
|
-
const GREEN = "\x1B[32m";
|
|
7600
|
-
const CYAN = "\x1B[36m";
|
|
7601
|
-
const DIM = "\x1B[2m";
|
|
7602
|
-
function formatValue(value) {
|
|
7603
|
-
if (typeof value === "string") return value.length > 300 ? value.slice(0, 300) + "…" : value;
|
|
7604
|
-
return JSON.stringify(value, null, 2);
|
|
7605
|
-
}
|
|
7606
|
-
function printDiff(diffs) {
|
|
7607
|
-
const separator = "━".repeat(52);
|
|
7608
|
-
for (const diff of diffs) {
|
|
7609
|
-
const label = diff.wasEmpty ? "empty → generated" : "updated";
|
|
7610
|
-
process.stdout.write(`\n${BOLD}${CYAN}${separator}${RESET}\n`);
|
|
7611
|
-
process.stdout.write(`${BOLD} ${diff.field}${RESET} ${DIM}(${label})${RESET}\n`);
|
|
7612
|
-
process.stdout.write(`${CYAN}${separator}${RESET}\n`);
|
|
7613
|
-
const lines = formatValue(diff.proposed).split("\n");
|
|
7614
|
-
for (const line of lines) process.stdout.write(`${GREEN} ${line}${RESET}\n`);
|
|
7615
|
-
}
|
|
7616
|
-
process.stdout.write("\n");
|
|
7617
|
-
}
|
|
7618
8190
|
const FILL_PROMPTS = {
|
|
7619
8191
|
analysis: {
|
|
7620
8192
|
system: `You are a software engineering analyst. Given a feature.json and the feature's source code, write a clear analysis section. Cover: what the code does architecturally, key patterns used, and why they were likely chosen. Be specific — name actual functions, modules, and techniques visible in the code. Write in first-person technical prose, 150-300 words. Return only the analysis text, no JSON wrapper, no markdown heading.`,
|
|
@@ -7674,13 +8246,64 @@ Return ONLY a valid JSON array — no other text:
|
|
|
7674
8246
|
domain: {
|
|
7675
8247
|
system: `You are a software engineering analyst. Identify the primary technical domain for this feature from its code and problem statement. Return a single lowercase word or short hyphenated phrase (e.g. "auth", "payments", "notifications", "data-pipeline"). Return only the domain value — nothing else.`,
|
|
7676
8248
|
userSuffix: "Identify the domain for this feature."
|
|
8249
|
+
},
|
|
8250
|
+
componentFile: {
|
|
8251
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, identify the single primary file that implements this feature. Return a relative path from the project root (e.g. "src/components/FeatureCard.tsx", "packages/lac-mcp/src/index.ts"). Return only the path — nothing else.`,
|
|
8252
|
+
userSuffix: "Identify the primary source file for this feature."
|
|
8253
|
+
},
|
|
8254
|
+
npmPackages: {
|
|
8255
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, list the npm packages this feature directly imports or depends on at runtime. Exclude dev-only tools (vitest, eslint, etc.). Exclude Node built-ins.
|
|
8256
|
+
|
|
8257
|
+
Return ONLY a valid JSON array of package name strings — no other text:
|
|
8258
|
+
["package-a", "package-b"]`,
|
|
8259
|
+
userSuffix: "List the npm packages this feature depends on."
|
|
8260
|
+
},
|
|
8261
|
+
publicInterface: {
|
|
8262
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, extract the public interface — exported props, function signatures, or API surface that consumers of this feature depend on.
|
|
8263
|
+
|
|
8264
|
+
Return ONLY a valid JSON array — no other text:
|
|
8265
|
+
[
|
|
8266
|
+
{
|
|
8267
|
+
"name": "string",
|
|
8268
|
+
"type": "string",
|
|
8269
|
+
"description": "string"
|
|
8270
|
+
}
|
|
8271
|
+
]`,
|
|
8272
|
+
userSuffix: "Extract the public interface for this feature."
|
|
8273
|
+
},
|
|
8274
|
+
externalDependencies: {
|
|
8275
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, identify runtime dependencies on other features or internal modules that are NOT captured by the lineage (parent/children). These are cross-feature implementation dependencies — e.g. a feature that calls into another feature's API at runtime, or imports a shared utility that belongs to a distinct feature.
|
|
8276
|
+
|
|
8277
|
+
Return ONLY a valid JSON array of featureKey strings or relative file paths — no other text:
|
|
8278
|
+
["feat-2026-003", "src/utils/shared.ts"]`,
|
|
8279
|
+
userSuffix: "List the external runtime dependencies for this feature."
|
|
8280
|
+
},
|
|
8281
|
+
lastVerifiedDate: {
|
|
8282
|
+
system: `You are a software engineering analyst. Return today's date in YYYY-MM-DD format as the lastVerifiedDate — marking that this feature.json was reviewed and confirmed accurate right now. Return only the date string — nothing else.`,
|
|
8283
|
+
userSuffix: `Return today's date as the lastVerifiedDate.`
|
|
8284
|
+
},
|
|
8285
|
+
codeSnippets: {
|
|
8286
|
+
system: `You are a software engineering analyst. Given a feature.json and its source code, extract 2-5 critical one-liners or short code blocks that are the most important to preserve verbatim — glob patterns, key API calls, non-obvious configuration, or architectural pivots. These are the snippets someone would need to reconstruct this feature accurately.
|
|
8287
|
+
|
|
8288
|
+
Return ONLY a valid JSON array — no other text:
|
|
8289
|
+
[
|
|
8290
|
+
{
|
|
8291
|
+
"label": "string",
|
|
8292
|
+
"snippet": "string"
|
|
8293
|
+
}
|
|
8294
|
+
]`,
|
|
8295
|
+
userSuffix: "Extract the critical code snippets for this feature."
|
|
7677
8296
|
}
|
|
7678
8297
|
};
|
|
7679
8298
|
const JSON_FIELDS = new Set([
|
|
7680
8299
|
"decisions",
|
|
7681
8300
|
"knownLimitations",
|
|
7682
8301
|
"tags",
|
|
7683
|
-
"annotations"
|
|
8302
|
+
"annotations",
|
|
8303
|
+
"npmPackages",
|
|
8304
|
+
"publicInterface",
|
|
8305
|
+
"externalDependencies",
|
|
8306
|
+
"codeSnippets"
|
|
7684
8307
|
]);
|
|
7685
8308
|
const ALL_FILLABLE_FIELDS = [
|
|
7686
8309
|
"analysis",
|
|
@@ -7689,7 +8312,13 @@ const ALL_FILLABLE_FIELDS = [
|
|
|
7689
8312
|
"knownLimitations",
|
|
7690
8313
|
"tags",
|
|
7691
8314
|
"successCriteria",
|
|
7692
|
-
"domain"
|
|
8315
|
+
"domain",
|
|
8316
|
+
"componentFile",
|
|
8317
|
+
"npmPackages",
|
|
8318
|
+
"publicInterface",
|
|
8319
|
+
"externalDependencies",
|
|
8320
|
+
"lastVerifiedDate",
|
|
8321
|
+
"codeSnippets"
|
|
7693
8322
|
];
|
|
7694
8323
|
function getMissingFields(feature) {
|
|
7695
8324
|
return ALL_FILLABLE_FIELDS.filter((field) => {
|
|
@@ -7700,178 +8329,13 @@ function getMissingFields(feature) {
|
|
|
7700
8329
|
return false;
|
|
7701
8330
|
});
|
|
7702
8331
|
}
|
|
7703
|
-
const
|
|
7704
|
-
|
|
7705
|
-
|
|
7706
|
-
|
|
7707
|
-
|
|
7708
|
-
|
|
7709
|
-
|
|
7710
|
-
userSuffix: "Generate a Vitest test suite for this feature."
|
|
7711
|
-
},
|
|
7712
|
-
migration: {
|
|
7713
|
-
system: `You are an expert database engineer. You will be given a feature.json. Generate a database migration scaffold for the data model this feature implies. Use SQL with clear comments. Include both up (CREATE) and down (DROP) sections. Return only the SQL, no explanation.`,
|
|
7714
|
-
userSuffix: "Generate a database migration for this feature."
|
|
7715
|
-
},
|
|
7716
|
-
docs: {
|
|
7717
|
-
system: `You are a technical writer. You will be given a feature.json. Generate user-facing documentation for this feature. Write it clearly enough that any end user can understand it (not developer-focused). Cover: what it does, how to use it, and known limitations. Use Markdown. Return only the documentation, no explanation.`,
|
|
7718
|
-
userSuffix: "Generate user-facing documentation for this feature."
|
|
7719
|
-
}
|
|
7720
|
-
};
|
|
7721
|
-
async function fillFeature(options) {
|
|
7722
|
-
const { featureDir, dryRun = false, skipConfirm = false, model = "claude-sonnet-4-6" } = options;
|
|
7723
|
-
const featurePath = path.join(featureDir, "feature.json");
|
|
7724
|
-
let raw;
|
|
7725
|
-
try {
|
|
7726
|
-
raw = fs.readFileSync(featurePath, "utf-8");
|
|
7727
|
-
} catch {
|
|
7728
|
-
throw new Error(`No feature.json found at "${featurePath}"`);
|
|
7729
|
-
}
|
|
7730
|
-
let parsed;
|
|
7731
|
-
try {
|
|
7732
|
-
parsed = JSON.parse(raw);
|
|
7733
|
-
} catch {
|
|
7734
|
-
throw new Error(`Invalid JSON in "${featurePath}"`);
|
|
7735
|
-
}
|
|
7736
|
-
const result = validateFeature(parsed);
|
|
7737
|
-
if (!result.success) throw new Error(`Invalid feature.json: ${result.errors.join(", ")}`);
|
|
7738
|
-
const feature = result.data;
|
|
7739
|
-
const client = createClient();
|
|
7740
|
-
const fieldsToFill = options.fields ? options.fields : getMissingFields(feature);
|
|
7741
|
-
if (fieldsToFill.length === 0) {
|
|
7742
|
-
process$1.stdout.write(` All fields already filled for ${feature.featureKey}.\n`);
|
|
7743
|
-
return {
|
|
7744
|
-
applied: false,
|
|
7745
|
-
fields: [],
|
|
7746
|
-
patch: {}
|
|
7747
|
-
};
|
|
7748
|
-
}
|
|
7749
|
-
process$1.stdout.write(`\nAnalyzing ${feature.featureKey} (${feature.title})...\n`);
|
|
7750
|
-
const ctx = buildContext(featureDir, feature);
|
|
7751
|
-
const contextStr = contextToString(ctx);
|
|
7752
|
-
process$1.stdout.write(`Reading ${ctx.sourceFiles.length} source file(s)...\n`);
|
|
7753
|
-
process$1.stdout.write(`Generating with ${model}...\n`);
|
|
7754
|
-
const patch = {};
|
|
7755
|
-
const diffs = [];
|
|
7756
|
-
for (const field of fieldsToFill) {
|
|
7757
|
-
const prompt = FILL_PROMPTS[field];
|
|
7758
|
-
if (!prompt) continue;
|
|
7759
|
-
process$1.stdout.write(` → ${field}...`);
|
|
7760
|
-
try {
|
|
7761
|
-
const rawValue = await generateText(client, prompt.system, `${contextStr}\n\n${prompt.userSuffix}`, model);
|
|
7762
|
-
let value = rawValue.trim();
|
|
7763
|
-
if (JSON_FIELDS.has(field)) try {
|
|
7764
|
-
const jsonStr = rawValue.match(/```(?:json)?\s*([\s\S]*?)```/)?.[1] ?? rawValue;
|
|
7765
|
-
value = JSON.parse(jsonStr.trim());
|
|
7766
|
-
} catch {
|
|
7767
|
-
process$1.stderr.write(`\n Warning: could not parse JSON for "${field}", storing as string\n`);
|
|
7768
|
-
}
|
|
7769
|
-
patch[field] = value;
|
|
7770
|
-
const existing = feature[field];
|
|
7771
|
-
const wasEmpty = existing === void 0 || existing === null || typeof existing === "string" && existing.trim().length === 0 || Array.isArray(existing) && existing.length === 0;
|
|
7772
|
-
diffs.push({
|
|
7773
|
-
field,
|
|
7774
|
-
wasEmpty,
|
|
7775
|
-
proposed: value
|
|
7776
|
-
});
|
|
7777
|
-
process$1.stdout.write(" done\n");
|
|
7778
|
-
} catch (err) {
|
|
7779
|
-
process$1.stdout.write(" failed\n");
|
|
7780
|
-
process$1.stderr.write(` Error generating "${field}": ${err instanceof Error ? err.message : String(err)}\n`);
|
|
7781
|
-
}
|
|
7782
|
-
}
|
|
7783
|
-
if (diffs.length === 0) return {
|
|
7784
|
-
applied: false,
|
|
7785
|
-
fields: [],
|
|
7786
|
-
patch: {}
|
|
7787
|
-
};
|
|
7788
|
-
printDiff(diffs);
|
|
7789
|
-
if (dryRun) {
|
|
7790
|
-
process$1.stdout.write(" [dry-run] No changes written.\n\n");
|
|
7791
|
-
return {
|
|
7792
|
-
applied: false,
|
|
7793
|
-
fields: Object.keys(patch),
|
|
7794
|
-
patch
|
|
7795
|
-
};
|
|
7796
|
-
}
|
|
7797
|
-
if (!skipConfirm) {
|
|
7798
|
-
const answer = await askUser("Apply? [Y]es / [n]o / [f]ield-by-field: ");
|
|
7799
|
-
if (answer.toLowerCase() === "n") {
|
|
7800
|
-
process$1.stdout.write(" Cancelled.\n");
|
|
7801
|
-
return {
|
|
7802
|
-
applied: false,
|
|
7803
|
-
fields: Object.keys(patch),
|
|
7804
|
-
patch
|
|
7805
|
-
};
|
|
7806
|
-
}
|
|
7807
|
-
if (answer.toLowerCase() === "f") {
|
|
7808
|
-
const approved = {};
|
|
7809
|
-
for (const [field, value] of Object.entries(patch)) if ((await askUser(` Apply "${field}"? [Y/n]: `)).toLowerCase() !== "n") approved[field] = value;
|
|
7810
|
-
for (const key of Object.keys(patch)) if (!(key in approved)) delete patch[key];
|
|
7811
|
-
Object.assign(patch, approved);
|
|
7812
|
-
}
|
|
7813
|
-
}
|
|
7814
|
-
const updated = {
|
|
7815
|
-
...parsed,
|
|
7816
|
-
...patch
|
|
7817
|
-
};
|
|
7818
|
-
fs.writeFileSync(featurePath, JSON.stringify(updated, null, 2) + "\n", "utf-8");
|
|
7819
|
-
const count = Object.keys(patch).length;
|
|
7820
|
-
process$1.stdout.write(`\n ✓ Updated ${feature.featureKey} — ${count} field${count === 1 ? "" : "s"} written.\n\n`);
|
|
7821
|
-
return {
|
|
7822
|
-
applied: true,
|
|
7823
|
-
fields: Object.keys(patch),
|
|
7824
|
-
patch
|
|
7825
|
-
};
|
|
7826
|
-
}
|
|
7827
|
-
async function genFromFeature(options) {
|
|
7828
|
-
const { featureDir, type, dryRun = false, model = "claude-sonnet-4-6" } = options;
|
|
7829
|
-
const featurePath = path.join(featureDir, "feature.json");
|
|
7830
|
-
let raw;
|
|
7831
|
-
try {
|
|
7832
|
-
raw = fs.readFileSync(featurePath, "utf-8");
|
|
7833
|
-
} catch {
|
|
7834
|
-
throw new Error(`No feature.json found at "${featurePath}"`);
|
|
7835
|
-
}
|
|
7836
|
-
const result = validateFeature(JSON.parse(raw));
|
|
7837
|
-
if (!result.success) throw new Error(`Invalid feature.json: ${result.errors.join(", ")}`);
|
|
7838
|
-
const feature = result.data;
|
|
7839
|
-
const promptConfig = GEN_PROMPTS[type];
|
|
7840
|
-
if (!promptConfig) throw new Error(`Unknown generation type: "${type}". Available: component, test, migration, docs`);
|
|
7841
|
-
const client = createClient();
|
|
7842
|
-
process$1.stdout.write(`\nGenerating ${type} for ${feature.featureKey} (${feature.title})...\n`);
|
|
7843
|
-
process$1.stdout.write(`Model: ${model}\n\n`);
|
|
7844
|
-
const contextStr = contextToString(buildContext(featureDir, feature));
|
|
7845
|
-
const generated = await generateText(client, promptConfig.system, `${contextStr}\n\n${promptConfig.userSuffix}`, model);
|
|
7846
|
-
if (dryRun) {
|
|
7847
|
-
process$1.stdout.write(generated);
|
|
7848
|
-
process$1.stdout.write("\n\n [dry-run] No file written.\n");
|
|
7849
|
-
return generated;
|
|
7850
|
-
}
|
|
7851
|
-
const outFile = options.outFile ?? path.join(featureDir, `${feature.featureKey}${typeToExt(type)}`);
|
|
7852
|
-
fs.writeFileSync(outFile, generated, "utf-8");
|
|
7853
|
-
process$1.stdout.write(` ✓ Written to ${outFile}\n\n`);
|
|
7854
|
-
return generated;
|
|
7855
|
-
}
|
|
7856
|
-
function typeToExt(type) {
|
|
7857
|
-
return {
|
|
7858
|
-
component: ".tsx",
|
|
7859
|
-
test: ".test.ts",
|
|
7860
|
-
migration: ".sql",
|
|
7861
|
-
docs: ".md"
|
|
7862
|
-
}[type] ?? ".txt";
|
|
7863
|
-
}
|
|
7864
|
-
function askUser(question) {
|
|
7865
|
-
return new Promise((resolve) => {
|
|
7866
|
-
const rl = readline.createInterface({
|
|
7867
|
-
input: process$1.stdin,
|
|
7868
|
-
output: process$1.stdout
|
|
7869
|
-
});
|
|
7870
|
-
rl.question(question, (answer) => {
|
|
7871
|
-
rl.close();
|
|
7872
|
-
resolve(answer.trim() || "y");
|
|
7873
|
-
});
|
|
7874
|
-
});
|
|
8332
|
+
const PROMPT_LOG_FILENAME = "prompt.log.jsonl";
|
|
8333
|
+
/** Append one or more entries to the feature's prompt.log.jsonl. Creates the file if absent. */
|
|
8334
|
+
function appendPromptLog(featureDir, entries) {
|
|
8335
|
+
if (entries.length === 0) return;
|
|
8336
|
+
const logPath = path.join(featureDir, PROMPT_LOG_FILENAME);
|
|
8337
|
+
const lines = entries.map((e) => JSON.stringify(e)).join("\n") + "\n";
|
|
8338
|
+
fs.appendFileSync(logPath, lines, "utf-8");
|
|
7875
8339
|
}
|
|
7876
8340
|
|
|
7877
8341
|
//#endregion
|
|
@@ -7882,65 +8346,6 @@ const server = new Server({
|
|
|
7882
8346
|
version: "1.0.0"
|
|
7883
8347
|
}, { capabilities: { tools: {} } });
|
|
7884
8348
|
server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
7885
|
-
{
|
|
7886
|
-
name: "fill_feature",
|
|
7887
|
-
description: "Fill missing fields in a feature.json using AI analysis of the code. Returns proposed changes and optionally applies them.",
|
|
7888
|
-
inputSchema: {
|
|
7889
|
-
type: "object",
|
|
7890
|
-
properties: {
|
|
7891
|
-
path: {
|
|
7892
|
-
type: "string",
|
|
7893
|
-
description: "Absolute or relative path to the feature folder (contains feature.json)"
|
|
7894
|
-
},
|
|
7895
|
-
fields: {
|
|
7896
|
-
type: "array",
|
|
7897
|
-
items: { type: "string" },
|
|
7898
|
-
description: "Specific fields to fill. Omit to fill all missing fields. Options: analysis, decisions, implementation, knownLimitations, tags, successCriteria, domain"
|
|
7899
|
-
},
|
|
7900
|
-
dryRun: {
|
|
7901
|
-
type: "boolean",
|
|
7902
|
-
description: "If true, returns proposed changes without writing to disk"
|
|
7903
|
-
},
|
|
7904
|
-
model: {
|
|
7905
|
-
type: "string",
|
|
7906
|
-
description: "Claude model to use (default: claude-sonnet-4-6)"
|
|
7907
|
-
}
|
|
7908
|
-
},
|
|
7909
|
-
required: ["path"]
|
|
7910
|
-
}
|
|
7911
|
-
},
|
|
7912
|
-
{
|
|
7913
|
-
name: "generate_from_feature",
|
|
7914
|
-
description: "Generate code artifacts from a feature.json — component, tests, migration, or docs.",
|
|
7915
|
-
inputSchema: {
|
|
7916
|
-
type: "object",
|
|
7917
|
-
properties: {
|
|
7918
|
-
path: {
|
|
7919
|
-
type: "string",
|
|
7920
|
-
description: "Absolute or relative path to the feature folder"
|
|
7921
|
-
},
|
|
7922
|
-
type: {
|
|
7923
|
-
type: "string",
|
|
7924
|
-
enum: [
|
|
7925
|
-
"component",
|
|
7926
|
-
"test",
|
|
7927
|
-
"migration",
|
|
7928
|
-
"docs"
|
|
7929
|
-
],
|
|
7930
|
-
description: "What to generate"
|
|
7931
|
-
},
|
|
7932
|
-
dryRun: {
|
|
7933
|
-
type: "boolean",
|
|
7934
|
-
description: "If true, returns generated content without writing to disk"
|
|
7935
|
-
},
|
|
7936
|
-
model: {
|
|
7937
|
-
type: "string",
|
|
7938
|
-
description: "Claude model to use (default: claude-sonnet-4-6)"
|
|
7939
|
-
}
|
|
7940
|
-
},
|
|
7941
|
-
required: ["path", "type"]
|
|
7942
|
-
}
|
|
7943
|
-
},
|
|
7944
8349
|
{
|
|
7945
8350
|
name: "blame_file",
|
|
7946
8351
|
description: "Show which feature owns a file — returns the feature summary.",
|
|
@@ -7955,7 +8360,7 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
7955
8360
|
},
|
|
7956
8361
|
{
|
|
7957
8362
|
name: "search_features",
|
|
7958
|
-
description: "Search all features in the workspace by key, title, tags, or
|
|
8363
|
+
description: "Search all features in the workspace by key, title, tags, problem, analysis, implementation, or decisions text.",
|
|
7959
8364
|
inputSchema: {
|
|
7960
8365
|
type: "object",
|
|
7961
8366
|
properties: {
|
|
@@ -7972,6 +8377,14 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
7972
8377
|
"deprecated"
|
|
7973
8378
|
],
|
|
7974
8379
|
description: "Filter by status (optional)"
|
|
8380
|
+
},
|
|
8381
|
+
domain: {
|
|
8382
|
+
type: "string",
|
|
8383
|
+
description: "Filter by domain (optional)"
|
|
8384
|
+
},
|
|
8385
|
+
path: {
|
|
8386
|
+
type: "string",
|
|
8387
|
+
description: "Directory to scan (default: workspace root)"
|
|
7975
8388
|
}
|
|
7976
8389
|
},
|
|
7977
8390
|
required: ["query"]
|
|
@@ -7989,7 +8402,7 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
7989
8402
|
},
|
|
7990
8403
|
featureKey: {
|
|
7991
8404
|
type: "string",
|
|
7992
|
-
description: "Feature key (e.g. feat-2026-042)"
|
|
8405
|
+
description: "Feature key (e.g. feat-2026-042). Omit to auto-generate the next key from the workspace counter — recommended to avoid duplicates."
|
|
7993
8406
|
},
|
|
7994
8407
|
title: {
|
|
7995
8408
|
type: "string",
|
|
@@ -8012,7 +8425,6 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8012
8425
|
},
|
|
8013
8426
|
required: [
|
|
8014
8427
|
"dir",
|
|
8015
|
-
"featureKey",
|
|
8016
8428
|
"title",
|
|
8017
8429
|
"problem"
|
|
8018
8430
|
]
|
|
@@ -8023,10 +8435,16 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8023
8435
|
description: "Show the parent/child lineage tree for a feature key.",
|
|
8024
8436
|
inputSchema: {
|
|
8025
8437
|
type: "object",
|
|
8026
|
-
properties: {
|
|
8027
|
-
|
|
8028
|
-
|
|
8029
|
-
|
|
8438
|
+
properties: {
|
|
8439
|
+
featureKey: {
|
|
8440
|
+
type: "string",
|
|
8441
|
+
description: "Feature key to look up"
|
|
8442
|
+
},
|
|
8443
|
+
path: {
|
|
8444
|
+
type: "string",
|
|
8445
|
+
description: "Directory to scan (default: workspace root)"
|
|
8446
|
+
}
|
|
8447
|
+
},
|
|
8030
8448
|
required: ["featureKey"]
|
|
8031
8449
|
}
|
|
8032
8450
|
},
|
|
@@ -8035,10 +8453,16 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8035
8453
|
description: "Check all features for completeness and required fields.",
|
|
8036
8454
|
inputSchema: {
|
|
8037
8455
|
type: "object",
|
|
8038
|
-
properties: {
|
|
8039
|
-
|
|
8040
|
-
|
|
8041
|
-
|
|
8456
|
+
properties: {
|
|
8457
|
+
path: {
|
|
8458
|
+
type: "string",
|
|
8459
|
+
description: "Directory to scan (default: workspace root)"
|
|
8460
|
+
},
|
|
8461
|
+
revisionWarnings: {
|
|
8462
|
+
type: "boolean",
|
|
8463
|
+
description: "Include warnings for features with no revision entries (default: true). Set false during migration of existing repos."
|
|
8464
|
+
}
|
|
8465
|
+
}
|
|
8042
8466
|
}
|
|
8043
8467
|
},
|
|
8044
8468
|
{
|
|
@@ -8055,7 +8479,7 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8055
8479
|
},
|
|
8056
8480
|
{
|
|
8057
8481
|
name: "write_feature_fields",
|
|
8058
|
-
description: "Patch a feature.json with new field values. Use this after read_feature_context — write the fields you generated back to disk.",
|
|
8482
|
+
description: "Patch a feature.json with new field values. Use this after read_feature_context — write the fields you generated back to disk. If you are changing intent-critical fields (problem, analysis, implementation, decisions, successCriteria), pass a revision object with author and reason. After writing, call advance_feature to check if the feature is ready to transition.",
|
|
8059
8483
|
inputSchema: {
|
|
8060
8484
|
type: "object",
|
|
8061
8485
|
properties: {
|
|
@@ -8066,10 +8490,263 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [
|
|
|
8066
8490
|
fields: {
|
|
8067
8491
|
type: "object",
|
|
8068
8492
|
description: "Key-value pairs to merge into feature.json. Values may be strings, arrays, or objects depending on the field."
|
|
8493
|
+
},
|
|
8494
|
+
revision: {
|
|
8495
|
+
type: "object",
|
|
8496
|
+
description: "Required when changing intent-critical fields (problem, analysis, implementation, decisions, successCriteria). Appended to the revisions array.",
|
|
8497
|
+
properties: {
|
|
8498
|
+
author: {
|
|
8499
|
+
type: "string",
|
|
8500
|
+
description: "Who is making the change."
|
|
8501
|
+
},
|
|
8502
|
+
reason: {
|
|
8503
|
+
type: "string",
|
|
8504
|
+
description: "Why these fields are being changed."
|
|
8505
|
+
}
|
|
8506
|
+
},
|
|
8507
|
+
required: ["author", "reason"]
|
|
8069
8508
|
}
|
|
8070
8509
|
},
|
|
8071
8510
|
required: ["path", "fields"]
|
|
8072
8511
|
}
|
|
8512
|
+
},
|
|
8513
|
+
{
|
|
8514
|
+
name: "advance_feature",
|
|
8515
|
+
description: "Validate and transition a feature to a new status. Call this after write_feature_fields — it checks that required fields are filled for the target status and writes the new status. If fields are missing it returns exactly which ones so you can ask the user or fill them first. Transitions: draft→active (requires analysis, implementation, decisions, successCriteria), active→frozen (requires all fields + tags + knownLimitations), frozen→active (reopen — requires a reason describing what changed), any→deprecated.",
|
|
8516
|
+
inputSchema: {
|
|
8517
|
+
type: "object",
|
|
8518
|
+
properties: {
|
|
8519
|
+
path: {
|
|
8520
|
+
type: "string",
|
|
8521
|
+
description: "Absolute or relative path to the feature folder"
|
|
8522
|
+
},
|
|
8523
|
+
to: {
|
|
8524
|
+
type: "string",
|
|
8525
|
+
enum: [
|
|
8526
|
+
"active",
|
|
8527
|
+
"frozen",
|
|
8528
|
+
"deprecated"
|
|
8529
|
+
],
|
|
8530
|
+
description: "Target status"
|
|
8531
|
+
},
|
|
8532
|
+
reason: {
|
|
8533
|
+
type: "string",
|
|
8534
|
+
description: "Required when reopening (frozen→active). Describe what changed."
|
|
8535
|
+
}
|
|
8536
|
+
},
|
|
8537
|
+
required: ["path", "to"]
|
|
8538
|
+
}
|
|
8539
|
+
},
|
|
8540
|
+
{
|
|
8541
|
+
name: "spawn_child_feature",
|
|
8542
|
+
description: "Spawn a child feature from a parent — use when a bug is found, a subtask is extracted, or scope is split. Creates the child feature.json with lineage.parent set and patches the parent's lineage.children. After spawning, call read_feature_context on the child path to begin its lifecycle.",
|
|
8543
|
+
inputSchema: {
|
|
8544
|
+
type: "object",
|
|
8545
|
+
properties: {
|
|
8546
|
+
parentPath: {
|
|
8547
|
+
type: "string",
|
|
8548
|
+
description: "Absolute or relative path to the parent feature folder"
|
|
8549
|
+
},
|
|
8550
|
+
dir: {
|
|
8551
|
+
type: "string",
|
|
8552
|
+
description: "Directory to create the child feature in"
|
|
8553
|
+
},
|
|
8554
|
+
title: {
|
|
8555
|
+
type: "string",
|
|
8556
|
+
description: "Child feature title"
|
|
8557
|
+
},
|
|
8558
|
+
problem: {
|
|
8559
|
+
type: "string",
|
|
8560
|
+
description: "Problem the child addresses"
|
|
8561
|
+
},
|
|
8562
|
+
spawnReason: {
|
|
8563
|
+
type: "string",
|
|
8564
|
+
description: "Why this child was spawned (e.g. \"bug: login fails on Safari\", \"scope split: extract payment flow\")"
|
|
8565
|
+
}
|
|
8566
|
+
},
|
|
8567
|
+
required: [
|
|
8568
|
+
"parentPath",
|
|
8569
|
+
"dir",
|
|
8570
|
+
"title",
|
|
8571
|
+
"problem",
|
|
8572
|
+
"spawnReason"
|
|
8573
|
+
]
|
|
8574
|
+
}
|
|
8575
|
+
},
|
|
8576
|
+
{
|
|
8577
|
+
name: "get_feature_status",
|
|
8578
|
+
description: "Lightweight orientation tool — returns the current lifecycle state of a feature: status, filled vs missing fields, stale fields flagged from reopens, valid next transitions, and the exact next tool to call. Use this whenever picking up a feature mid-session to know where it stands before taking action.",
|
|
8579
|
+
inputSchema: {
|
|
8580
|
+
type: "object",
|
|
8581
|
+
properties: { path: {
|
|
8582
|
+
type: "string",
|
|
8583
|
+
description: "Absolute or relative path to the feature folder"
|
|
8584
|
+
} },
|
|
8585
|
+
required: ["path"]
|
|
8586
|
+
}
|
|
8587
|
+
},
|
|
8588
|
+
{
|
|
8589
|
+
name: "extract_feature_from_code",
|
|
8590
|
+
description: "Inverse of create_feature — given a directory with existing code but NO feature.json, reads all source files and returns instructions for Claude to generate a complete feature.json proposal. Use this to onboard legacy code into LAC. After calling this tool, generate the fields, then call create_feature followed by write_feature_fields.",
|
|
8591
|
+
inputSchema: {
|
|
8592
|
+
type: "object",
|
|
8593
|
+
properties: {
|
|
8594
|
+
path: {
|
|
8595
|
+
type: "string",
|
|
8596
|
+
description: "Directory containing source code (must NOT already have a feature.json)"
|
|
8597
|
+
},
|
|
8598
|
+
maxFileSize: {
|
|
8599
|
+
type: "number",
|
|
8600
|
+
description: "Maximum characters to read per file before truncating (default: 8000). Increase for large files."
|
|
8601
|
+
}
|
|
8602
|
+
},
|
|
8603
|
+
required: ["path"]
|
|
8604
|
+
}
|
|
8605
|
+
},
|
|
8606
|
+
{
|
|
8607
|
+
name: "feature_changelog",
|
|
8608
|
+
description: "Generate a chronological changelog for a feature — shows status transitions (from statusHistory), reopens, and spawned children in timeline form. Use this to understand the full history of a feature.",
|
|
8609
|
+
inputSchema: {
|
|
8610
|
+
type: "object",
|
|
8611
|
+
properties: { path: {
|
|
8612
|
+
type: "string",
|
|
8613
|
+
description: "Absolute or relative path to the feature folder"
|
|
8614
|
+
} },
|
|
8615
|
+
required: ["path"]
|
|
8616
|
+
}
|
|
8617
|
+
},
|
|
8618
|
+
{
|
|
8619
|
+
name: "roadmap_view",
|
|
8620
|
+
description: "Return a structured overview of all features in the workspace grouped by status (active → draft → frozen → deprecated) and sorted by priority. Shows missing fields and child counts at a glance. Use this to orient before a session or plan what to work on.",
|
|
8621
|
+
inputSchema: {
|
|
8622
|
+
type: "object",
|
|
8623
|
+
properties: { path: {
|
|
8624
|
+
type: "string",
|
|
8625
|
+
description: "Directory to scan (default: workspace root)"
|
|
8626
|
+
} }
|
|
8627
|
+
}
|
|
8628
|
+
},
|
|
8629
|
+
{
|
|
8630
|
+
name: "suggest_split",
|
|
8631
|
+
description: "Analyze a feature and recommend whether it should be broken into child features. Reads source files, detects split signals (file count, mixed domains, \"and\" in problem statement), and returns context + instructions for Claude to propose a split and call spawn_child_feature.",
|
|
8632
|
+
inputSchema: {
|
|
8633
|
+
type: "object",
|
|
8634
|
+
properties: { path: {
|
|
8635
|
+
type: "string",
|
|
8636
|
+
description: "Absolute or relative path to the feature folder"
|
|
8637
|
+
} },
|
|
8638
|
+
required: ["path"]
|
|
8639
|
+
}
|
|
8640
|
+
},
|
|
8641
|
+
{
|
|
8642
|
+
name: "feature_summary_for_pr",
|
|
8643
|
+
description: "Generate a ready-to-paste pull request description from a feature.json — includes problem, what was built, key decisions, known limitations, success criteria, and lineage. Use this when opening a PR for a feature.",
|
|
8644
|
+
inputSchema: {
|
|
8645
|
+
type: "object",
|
|
8646
|
+
properties: { path: {
|
|
8647
|
+
type: "string",
|
|
8648
|
+
description: "Absolute or relative path to the feature folder"
|
|
8649
|
+
} },
|
|
8650
|
+
required: ["path"]
|
|
8651
|
+
}
|
|
8652
|
+
},
|
|
8653
|
+
{
|
|
8654
|
+
name: "audit_decisions",
|
|
8655
|
+
description: "Scan all features and surface technical debt in decisions: features missing decisions, decisions with risky language (revisit/temporary/hack/workaround), and features with suspiciously similar titles in the same domain that may be duplicates. Run this periodically to keep the workspace healthy.",
|
|
8656
|
+
inputSchema: {
|
|
8657
|
+
type: "object",
|
|
8658
|
+
properties: { path: {
|
|
8659
|
+
type: "string",
|
|
8660
|
+
description: "Directory to scan (default: workspace root)"
|
|
8661
|
+
} }
|
|
8662
|
+
}
|
|
8663
|
+
},
|
|
8664
|
+
{
|
|
8665
|
+
name: "feature_similarity",
|
|
8666
|
+
description: "Find features semantically similar to a given one — same domain, shared tags, or overlapping keywords in title/problem. Use this before create_feature to avoid duplication, or to discover related work.",
|
|
8667
|
+
inputSchema: {
|
|
8668
|
+
type: "object",
|
|
8669
|
+
properties: { path: {
|
|
8670
|
+
type: "string",
|
|
8671
|
+
description: "Absolute or relative path to the feature folder to compare against"
|
|
8672
|
+
} },
|
|
8673
|
+
required: ["path"]
|
|
8674
|
+
}
|
|
8675
|
+
},
|
|
8676
|
+
{
|
|
8677
|
+
name: "time_travel",
|
|
8678
|
+
description: "Show what a feature.json looked like at a specific point in git history. Call with just path to see the full commit history for the file. Call with path + date (YYYY-MM-DD) or commit (SHA) to view that specific version.",
|
|
8679
|
+
inputSchema: {
|
|
8680
|
+
type: "object",
|
|
8681
|
+
properties: {
|
|
8682
|
+
path: {
|
|
8683
|
+
type: "string",
|
|
8684
|
+
description: "Absolute or relative path to the feature folder"
|
|
8685
|
+
},
|
|
8686
|
+
date: {
|
|
8687
|
+
type: "string",
|
|
8688
|
+
description: "YYYY-MM-DD — show the most recent version at or before this date"
|
|
8689
|
+
},
|
|
8690
|
+
commit: {
|
|
8691
|
+
type: "string",
|
|
8692
|
+
description: "Git commit SHA to view (full or short)"
|
|
8693
|
+
}
|
|
8694
|
+
},
|
|
8695
|
+
required: ["path"]
|
|
8696
|
+
}
|
|
8697
|
+
},
|
|
8698
|
+
{
|
|
8699
|
+
name: "cross_feature_impact",
|
|
8700
|
+
description: "Given a source file, find all features whose code imports or references it. Use this before refactoring a shared utility, changing an interface, or deleting a file — shows the blast radius across all tracked features.",
|
|
8701
|
+
inputSchema: {
|
|
8702
|
+
type: "object",
|
|
8703
|
+
properties: { file: {
|
|
8704
|
+
type: "string",
|
|
8705
|
+
description: "Absolute or relative path to the source file to analyze"
|
|
8706
|
+
} },
|
|
8707
|
+
required: ["file"]
|
|
8708
|
+
}
|
|
8709
|
+
},
|
|
8710
|
+
{
|
|
8711
|
+
name: "summarize_workspace",
|
|
8712
|
+
description: "Summarize the entire codebase by reading only feature.json files and READMEs — no source code. Returns a structured overview: project purpose, features grouped by domain, key decisions, and stats. Ideal for fast orientation before a coding session or for injecting project context into an AI prompt.",
|
|
8713
|
+
inputSchema: {
|
|
8714
|
+
type: "object",
|
|
8715
|
+
properties: {
|
|
8716
|
+
path: {
|
|
8717
|
+
type: "string",
|
|
8718
|
+
description: "Directory to scan (default: workspace root)"
|
|
8719
|
+
},
|
|
8720
|
+
format: {
|
|
8721
|
+
type: "string",
|
|
8722
|
+
enum: ["markdown", "json"],
|
|
8723
|
+
description: "Output format: \"markdown\" (default) or \"json\""
|
|
8724
|
+
}
|
|
8725
|
+
},
|
|
8726
|
+
required: []
|
|
8727
|
+
}
|
|
8728
|
+
},
|
|
8729
|
+
{
|
|
8730
|
+
name: "extract_all_features",
|
|
8731
|
+
description: "Scan a repository and return a manifest of all directories that should have feature.json files but do not yet. Useful for onboarding a legacy or external repo into LAC. After calling this tool, iterate over the returned candidates and call extract_feature_from_code on each one.",
|
|
8732
|
+
inputSchema: {
|
|
8733
|
+
type: "object",
|
|
8734
|
+
properties: {
|
|
8735
|
+
path: {
|
|
8736
|
+
type: "string",
|
|
8737
|
+
description: "Root directory to scan (default: workspace root)"
|
|
8738
|
+
},
|
|
8739
|
+
strategy: {
|
|
8740
|
+
type: "string",
|
|
8741
|
+
enum: ["module", "directory"],
|
|
8742
|
+
description: "\"module\" (default) — directories containing package.json, go.mod, Cargo.toml, index.ts, etc. \"directory\" — every directory that contains source files."
|
|
8743
|
+
},
|
|
8744
|
+
depth: {
|
|
8745
|
+
type: "number",
|
|
8746
|
+
description: "Maximum directory depth to descend (default: 4 for module, 2 for directory)"
|
|
8747
|
+
}
|
|
8748
|
+
}
|
|
8749
|
+
}
|
|
8073
8750
|
}
|
|
8074
8751
|
] }));
|
|
8075
8752
|
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
@@ -8077,131 +8754,668 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
8077
8754
|
const a = args ?? {};
|
|
8078
8755
|
try {
|
|
8079
8756
|
switch (name) {
|
|
8080
|
-
case "
|
|
8081
|
-
const
|
|
8082
|
-
|
|
8083
|
-
|
|
8084
|
-
|
|
8085
|
-
|
|
8086
|
-
|
|
8757
|
+
case "blame_file": {
|
|
8758
|
+
const filePath = resolvePath(String(a.file));
|
|
8759
|
+
const feature = findNearestFeature(path.dirname(filePath));
|
|
8760
|
+
if (!feature) return { content: [{
|
|
8761
|
+
type: "text",
|
|
8762
|
+
text: `No feature.json found for "${a.file}". The file may not be under any tracked feature directory.`
|
|
8763
|
+
}] };
|
|
8764
|
+
return { content: [{
|
|
8765
|
+
type: "text",
|
|
8766
|
+
text: `File : ${String(a.file)}\n${formatFeatureSummary(feature)}`
|
|
8767
|
+
}] };
|
|
8768
|
+
}
|
|
8769
|
+
case "search_features": {
|
|
8770
|
+
const query = String(a.query).toLowerCase();
|
|
8771
|
+
const statusFilter = a.status;
|
|
8772
|
+
const domainFilter = a.domain;
|
|
8773
|
+
const matches = scanAllFeatures(a.path ? resolvePath(String(a.path)) : workspaceRoot).filter(({ feature }) => {
|
|
8774
|
+
if (statusFilter && feature.status !== statusFilter) return false;
|
|
8775
|
+
if (domainFilter && feature.domain !== domainFilter) return false;
|
|
8776
|
+
const decisionsText = (feature.decisions ?? []).map((d) => d.decision + " " + d.rationale).join(" ");
|
|
8777
|
+
return [
|
|
8778
|
+
feature.featureKey,
|
|
8779
|
+
feature.title,
|
|
8780
|
+
feature.problem,
|
|
8781
|
+
feature.analysis ?? "",
|
|
8782
|
+
feature.implementation ?? "",
|
|
8783
|
+
decisionsText,
|
|
8784
|
+
...feature.tags ?? []
|
|
8785
|
+
].join(" ").toLowerCase().includes(query);
|
|
8786
|
+
});
|
|
8787
|
+
if (matches.length === 0) return { content: [{
|
|
8788
|
+
type: "text",
|
|
8789
|
+
text: `No features found matching "${a.query}".`
|
|
8790
|
+
}] };
|
|
8791
|
+
const lines = matches.map(({ feature }) => `${statusIcon(feature.status)} ${feature.featureKey.padEnd(18)} ${feature.status.padEnd(12)} ${feature.title}\n ${feature.problem.slice(0, 80)}`);
|
|
8792
|
+
return { content: [{
|
|
8793
|
+
type: "text",
|
|
8794
|
+
text: `Found ${matches.length} feature(s):\n\n${lines.join("\n\n")}`
|
|
8795
|
+
}] };
|
|
8796
|
+
}
|
|
8797
|
+
case "create_feature": {
|
|
8798
|
+
const dir = resolvePath(String(a.dir));
|
|
8799
|
+
const featurePath = path.join(dir, "feature.json");
|
|
8800
|
+
if (fs.existsSync(featurePath)) return { content: [{
|
|
8801
|
+
type: "text",
|
|
8802
|
+
text: `feature.json already exists at "${featurePath}".`
|
|
8803
|
+
}] };
|
|
8804
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
8805
|
+
let featureKey;
|
|
8806
|
+
if (a.featureKey) {
|
|
8807
|
+
featureKey = String(a.featureKey);
|
|
8808
|
+
registerFeatureKey(dir, featureKey);
|
|
8809
|
+
} else featureKey = generateFeatureKey(dir);
|
|
8810
|
+
const feature = {
|
|
8811
|
+
featureKey,
|
|
8812
|
+
title: String(a.title),
|
|
8813
|
+
status: String(a.status ?? "draft"),
|
|
8814
|
+
problem: String(a.problem),
|
|
8815
|
+
schemaVersion: 1
|
|
8816
|
+
};
|
|
8817
|
+
fs.writeFileSync(featurePath, JSON.stringify(feature, null, 2) + "\n", "utf-8");
|
|
8818
|
+
return { content: [{
|
|
8819
|
+
type: "text",
|
|
8820
|
+
text: `Created "${featureKey}" at "${featurePath}" (${feature.status}).\n\nNext: call read_feature_context on "${dir}" to analyze the code and fill missing fields, then advance_feature when ready.`
|
|
8821
|
+
}] };
|
|
8822
|
+
}
|
|
8823
|
+
case "get_lineage": {
|
|
8824
|
+
const featureKey = String(a.featureKey);
|
|
8825
|
+
const features = scanAllFeatures(a.path ? resolvePath(String(a.path)) : workspaceRoot);
|
|
8826
|
+
const featureMap = new Map(features.map(({ feature }) => [feature.featureKey, feature]));
|
|
8827
|
+
const root = featureMap.get(featureKey);
|
|
8828
|
+
if (!root) return { content: [{
|
|
8829
|
+
type: "text",
|
|
8830
|
+
text: `Feature "${featureKey}" not found.`
|
|
8831
|
+
}] };
|
|
8832
|
+
const childrenOf = /* @__PURE__ */ new Map();
|
|
8833
|
+
for (const { feature } of features) {
|
|
8834
|
+
const parent = feature.lineage?.parent;
|
|
8835
|
+
if (parent) {
|
|
8836
|
+
const existing = childrenOf.get(parent) ?? [];
|
|
8837
|
+
existing.push(feature.featureKey);
|
|
8838
|
+
childrenOf.set(parent, existing);
|
|
8839
|
+
}
|
|
8840
|
+
}
|
|
8841
|
+
return { content: [{
|
|
8842
|
+
type: "text",
|
|
8843
|
+
text: buildLineageTree(root, featureMap, childrenOf, 0)
|
|
8844
|
+
}] };
|
|
8845
|
+
}
|
|
8846
|
+
case "lint_workspace": {
|
|
8847
|
+
const scanDir = a.path ? resolvePath(String(a.path)) : workspaceRoot;
|
|
8848
|
+
const revisionWarnings = a.revisionWarnings !== false;
|
|
8849
|
+
const features = scanAllFeatures(scanDir);
|
|
8850
|
+
const featureKeys = new Set(features.map(({ feature }) => feature.featureKey));
|
|
8851
|
+
const INTENT_CRITICAL_LINT = [
|
|
8852
|
+
"problem",
|
|
8853
|
+
"analysis",
|
|
8854
|
+
"implementation",
|
|
8855
|
+
"decisions",
|
|
8856
|
+
"successCriteria"
|
|
8857
|
+
];
|
|
8858
|
+
const results = features.map(({ feature, filePath }) => {
|
|
8859
|
+
const issues = [];
|
|
8860
|
+
const warnings = [];
|
|
8861
|
+
const raw = feature;
|
|
8862
|
+
if (!feature.problem?.trim()) issues.push("missing problem");
|
|
8863
|
+
if (feature.status === "active") {
|
|
8864
|
+
if (!feature.analysis?.trim()) issues.push("missing analysis");
|
|
8865
|
+
if (!feature.implementation?.trim()) issues.push("missing implementation");
|
|
8866
|
+
if (!feature.decisions?.length) issues.push("no decisions recorded");
|
|
8867
|
+
}
|
|
8868
|
+
if (feature.lineage?.parent && !featureKeys.has(feature.lineage.parent)) issues.push(`orphaned: parent "${feature.lineage.parent}" not found`);
|
|
8869
|
+
for (const child of feature.lineage?.children ?? []) if (!featureKeys.has(child)) issues.push(`broken child ref: "${child}" not found`);
|
|
8870
|
+
if (raw.superseded_by && !featureKeys.has(String(raw.superseded_by))) issues.push(`broken superseded_by ref: "${raw.superseded_by}" not found`);
|
|
8871
|
+
if (raw.merged_into && !featureKeys.has(String(raw.merged_into))) issues.push(`broken merged_into ref: "${raw.merged_into}" not found`);
|
|
8872
|
+
for (const key of raw.merged_from ?? []) if (!featureKeys.has(key)) issues.push(`broken merged_from ref: "${key}" not found`);
|
|
8873
|
+
if (raw.superseded_by && feature.status !== "deprecated") warnings.push(`superseded_by set but status is "${feature.status}" — consider deprecating`);
|
|
8874
|
+
if (raw.merged_into && feature.status !== "deprecated") warnings.push(`merged_into set but status is "${feature.status}" — consider deprecating`);
|
|
8875
|
+
const hasRevisions = Array.isArray(raw.revisions) && raw.revisions.length > 0;
|
|
8876
|
+
if (revisionWarnings && !hasRevisions) {
|
|
8877
|
+
const filledCritical = INTENT_CRITICAL_LINT.filter((f) => {
|
|
8878
|
+
const val = raw[f];
|
|
8879
|
+
if (val === void 0 || val === null) return false;
|
|
8880
|
+
if (typeof val === "string") return val.trim().length > 0;
|
|
8881
|
+
if (Array.isArray(val)) return val.length > 0;
|
|
8882
|
+
return false;
|
|
8883
|
+
});
|
|
8884
|
+
if (filledCritical.length > 0) warnings.push(`no revisions recorded for: ${filledCritical.join(", ")}`);
|
|
8885
|
+
}
|
|
8886
|
+
return {
|
|
8887
|
+
feature,
|
|
8888
|
+
filePath,
|
|
8889
|
+
issues,
|
|
8890
|
+
warnings
|
|
8891
|
+
};
|
|
8087
8892
|
});
|
|
8893
|
+
const featureByKey = new Map(features.map(({ feature }) => [feature.featureKey, feature]));
|
|
8894
|
+
for (const result of results) {
|
|
8895
|
+
const raw = featureByKey.get(result.feature.featureKey);
|
|
8896
|
+
if (!raw) continue;
|
|
8897
|
+
if (raw.merged_into) {
|
|
8898
|
+
const target = featureByKey.get(String(raw.merged_into));
|
|
8899
|
+
if (target) {
|
|
8900
|
+
if (!(target.merged_from ?? []).includes(result.feature.featureKey)) result.warnings.push(`merged_into "${raw.merged_into}" but that feature does not list this key in merged_from`);
|
|
8901
|
+
}
|
|
8902
|
+
}
|
|
8903
|
+
for (const sourceKey of raw.merged_from ?? []) {
|
|
8904
|
+
const source = featureByKey.get(sourceKey);
|
|
8905
|
+
if (source && source.merged_into !== result.feature.featureKey) result.warnings.push(`merged_from includes "${sourceKey}" but that feature does not point merged_into this key`);
|
|
8906
|
+
}
|
|
8907
|
+
if (raw.superseded_by) {
|
|
8908
|
+
const successor = featureByKey.get(String(raw.superseded_by));
|
|
8909
|
+
if (successor) {
|
|
8910
|
+
if (!(successor.superseded_from ?? []).includes(result.feature.featureKey)) result.warnings.push(`superseded_by "${raw.superseded_by}" but that feature does not list this key in superseded_from`);
|
|
8911
|
+
}
|
|
8912
|
+
}
|
|
8913
|
+
}
|
|
8914
|
+
const keyCount = /* @__PURE__ */ new Map();
|
|
8915
|
+
for (const { feature, filePath } of features) {
|
|
8916
|
+
const paths = keyCount.get(feature.featureKey) ?? [];
|
|
8917
|
+
paths.push(filePath);
|
|
8918
|
+
keyCount.set(feature.featureKey, paths);
|
|
8919
|
+
}
|
|
8920
|
+
const duplicateKeys = [...keyCount.entries()].filter(([, paths]) => paths.length > 1);
|
|
8921
|
+
const failures = results.filter((r) => r.issues.length > 0);
|
|
8922
|
+
const warned = results.filter((r) => r.warnings.length > 0);
|
|
8923
|
+
const passes = results.filter((r) => r.issues.length === 0);
|
|
8924
|
+
const lines = [...passes.map((r) => ` ✓ ${r.feature.featureKey.padEnd(18)} ${r.feature.status}${r.warnings.length > 0 ? ` ⚠ ${r.warnings.join("; ")}` : ""}`), ...failures.map((r) => ` ✗ ${r.feature.featureKey.padEnd(18)} ${r.feature.status}\n ${r.issues.join(", ")}${r.warnings.length > 0 ? `\n ⚠ ${r.warnings.join("; ")}` : ""}`)];
|
|
8925
|
+
if (duplicateKeys.length > 0) {
|
|
8926
|
+
lines.push("");
|
|
8927
|
+
lines.push(`⛔ Duplicate featureKeys detected (${duplicateKeys.length}):`);
|
|
8928
|
+
for (const [key, paths] of duplicateKeys) {
|
|
8929
|
+
lines.push(` ${key}`);
|
|
8930
|
+
for (const p of paths) lines.push(` ${path.relative(scanDir, p)}`);
|
|
8931
|
+
}
|
|
8932
|
+
}
|
|
8933
|
+
return { content: [{
|
|
8934
|
+
type: "text",
|
|
8935
|
+
text: `${passes.length} passed, ${failures.length} failed, ${warned.length} warned — ${results.length} features checked${duplicateKeys.length > 0 ? ` ⛔ ${duplicateKeys.length} duplicate key(s)` : ""}\n\n${lines.join("\n")}`
|
|
8936
|
+
}] };
|
|
8937
|
+
}
|
|
8938
|
+
case "read_feature_context": {
|
|
8939
|
+
const featureDir = resolvePath(String(a.path));
|
|
8940
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
8941
|
+
let raw;
|
|
8942
|
+
try {
|
|
8943
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
8944
|
+
} catch {
|
|
8945
|
+
return {
|
|
8946
|
+
content: [{
|
|
8947
|
+
type: "text",
|
|
8948
|
+
text: `No feature.json found at "${featurePath}"`
|
|
8949
|
+
}],
|
|
8950
|
+
isError: true
|
|
8951
|
+
};
|
|
8952
|
+
}
|
|
8953
|
+
const result = validateFeature(JSON.parse(raw));
|
|
8954
|
+
if (!result.success) return {
|
|
8955
|
+
content: [{
|
|
8956
|
+
type: "text",
|
|
8957
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
8958
|
+
}],
|
|
8959
|
+
isError: true
|
|
8960
|
+
};
|
|
8961
|
+
const feature = result.data;
|
|
8962
|
+
const contextStr = contextToString(buildContext(featureDir, feature));
|
|
8963
|
+
const missingFields = getMissingFields(feature);
|
|
8964
|
+
const fieldInstructions = missingFields.map((field) => {
|
|
8965
|
+
const prompt = FILL_PROMPTS[field];
|
|
8966
|
+
const isJson = JSON_FIELDS.has(field);
|
|
8967
|
+
return `### ${field}\n${prompt.system}\n${prompt.userSuffix}\n${isJson ? "(Return valid JSON for this field)" : "(Return plain text for this field)"}`;
|
|
8968
|
+
}).join("\n\n");
|
|
8969
|
+
const staleAnnotation = feature.annotations?.find((ann) => ann.type === "stale-review");
|
|
8970
|
+
const staleWarning = staleAnnotation ? `## ⚠ Stale fields (feature was reopened)\n${staleAnnotation.body}\nReview and rewrite these fields against the current code, then call write_feature_fields.\n\n` : "";
|
|
8971
|
+
return { content: [{
|
|
8972
|
+
type: "text",
|
|
8973
|
+
text: `${missingFields.length === 0 ? staleWarning || "All fillable fields are already populated. No generation needed." : `${staleWarning}## Missing fields to fill (${missingFields.join(", ")})\n\nGenerate each field described below, then call write_feature_fields with all values at once. After writing, call advance_feature to check if the feature is ready to transition.\n\n${fieldInstructions}`}\n\n## Context\n\n${contextStr}`
|
|
8974
|
+
}] };
|
|
8975
|
+
}
|
|
8976
|
+
case "write_feature_fields": {
|
|
8977
|
+
const featureDir = resolvePath(String(a.path));
|
|
8978
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
8979
|
+
let raw;
|
|
8980
|
+
try {
|
|
8981
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
8982
|
+
} catch {
|
|
8983
|
+
return {
|
|
8984
|
+
content: [{
|
|
8985
|
+
type: "text",
|
|
8986
|
+
text: `No feature.json found at "${featurePath}"`
|
|
8987
|
+
}],
|
|
8988
|
+
isError: true
|
|
8989
|
+
};
|
|
8990
|
+
}
|
|
8991
|
+
const existing = JSON.parse(raw);
|
|
8992
|
+
const fields = a.fields;
|
|
8993
|
+
if (!fields || typeof fields !== "object" || Array.isArray(fields)) return {
|
|
8994
|
+
content: [{
|
|
8995
|
+
type: "text",
|
|
8996
|
+
text: "fields must be a JSON object"
|
|
8997
|
+
}],
|
|
8998
|
+
isError: true
|
|
8999
|
+
};
|
|
9000
|
+
const INTENT_CRITICAL = new Set([
|
|
9001
|
+
"problem",
|
|
9002
|
+
"analysis",
|
|
9003
|
+
"implementation",
|
|
9004
|
+
"decisions",
|
|
9005
|
+
"successCriteria"
|
|
9006
|
+
]);
|
|
9007
|
+
const changingCritical = Object.keys(fields).filter((k) => INTENT_CRITICAL.has(k));
|
|
9008
|
+
const updated = {
|
|
9009
|
+
...existing,
|
|
9010
|
+
...fields
|
|
9011
|
+
};
|
|
9012
|
+
const revisionInput = a.revision;
|
|
9013
|
+
let revisionWarning = "";
|
|
9014
|
+
if (changingCritical.length > 0) if (revisionInput?.author && revisionInput?.reason) {
|
|
9015
|
+
const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
9016
|
+
updated.revisions = [...existing.revisions ?? [], {
|
|
9017
|
+
date: today,
|
|
9018
|
+
author: revisionInput.author,
|
|
9019
|
+
fields_changed: changingCritical,
|
|
9020
|
+
reason: revisionInput.reason
|
|
9021
|
+
}];
|
|
9022
|
+
} else revisionWarning = `\n\n⚠ Intent-critical fields changed (${changingCritical.join(", ")}) without a revision entry. Pass a "revision" object with author and reason to attribute this change.`;
|
|
9023
|
+
fs.writeFileSync(featurePath, JSON.stringify(updated, null, 2) + "\n", "utf-8");
|
|
9024
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
9025
|
+
appendPromptLog(featureDir, Object.keys(fields).map((field) => {
|
|
9026
|
+
const val = fields[field];
|
|
9027
|
+
return {
|
|
9028
|
+
date: now,
|
|
9029
|
+
field,
|
|
9030
|
+
source: "mcp",
|
|
9031
|
+
value_preview: (typeof val === "string" ? val : JSON.stringify(val)).slice(0, 120)
|
|
9032
|
+
};
|
|
9033
|
+
}));
|
|
9034
|
+
const writtenKeys = Object.keys(fields);
|
|
9035
|
+
const afterResult = validateFeature(JSON.parse(fs.readFileSync(featurePath, "utf-8")));
|
|
9036
|
+
const stillMissing = afterResult.success ? getMissingFields(afterResult.data) : [];
|
|
9037
|
+
const nextHint = stillMissing.length > 0 ? `${stillMissing.length} field(s) still missing: ${stillMissing.join(", ")}. Continue filling or call advance_feature to check if the current fields are sufficient to transition.` : `All AI fields filled. Call advance_feature to transition status when ready.`;
|
|
8088
9038
|
return { content: [{
|
|
8089
9039
|
type: "text",
|
|
8090
|
-
text:
|
|
9040
|
+
text: `✓ Wrote ${writtenKeys.length} field(s) to ${featurePath}: ${writtenKeys.join(", ")}\n\n${nextHint}${revisionWarning}`
|
|
8091
9041
|
}] };
|
|
8092
9042
|
}
|
|
8093
|
-
case "
|
|
8094
|
-
|
|
8095
|
-
|
|
8096
|
-
|
|
8097
|
-
|
|
8098
|
-
|
|
8099
|
-
|
|
8100
|
-
|
|
8101
|
-
|
|
8102
|
-
|
|
8103
|
-
|
|
8104
|
-
|
|
8105
|
-
|
|
9043
|
+
case "advance_feature": {
|
|
9044
|
+
const featureDir = resolvePath(String(a.path));
|
|
9045
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
9046
|
+
let raw;
|
|
9047
|
+
try {
|
|
9048
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
9049
|
+
} catch {
|
|
9050
|
+
return {
|
|
9051
|
+
content: [{
|
|
9052
|
+
type: "text",
|
|
9053
|
+
text: `No feature.json found at "${featurePath}"`
|
|
9054
|
+
}],
|
|
9055
|
+
isError: true
|
|
9056
|
+
};
|
|
9057
|
+
}
|
|
9058
|
+
const parsed = JSON.parse(raw);
|
|
9059
|
+
const result = validateFeature(parsed);
|
|
9060
|
+
if (!result.success) return {
|
|
9061
|
+
content: [{
|
|
9062
|
+
type: "text",
|
|
9063
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
9064
|
+
}],
|
|
9065
|
+
isError: true
|
|
9066
|
+
};
|
|
9067
|
+
const feature = result.data;
|
|
9068
|
+
const to = String(a.to);
|
|
9069
|
+
const from = feature.status;
|
|
9070
|
+
const reason = a.reason ? String(a.reason) : void 0;
|
|
9071
|
+
const illegal = checkIllegalTransition(from, to);
|
|
9072
|
+
if (illegal) return { content: [{
|
|
8106
9073
|
type: "text",
|
|
8107
|
-
text:
|
|
9074
|
+
text: illegal
|
|
8108
9075
|
}] };
|
|
8109
|
-
return { content: [{
|
|
9076
|
+
if (from === "frozen" && to === "active" && !reason) return { content: [{
|
|
8110
9077
|
type: "text",
|
|
8111
|
-
text:
|
|
9078
|
+
text: "Reopening a frozen feature requires a reason. Call advance_feature again with the reason parameter describing what changed."
|
|
8112
9079
|
}] };
|
|
8113
|
-
|
|
8114
|
-
|
|
8115
|
-
const query = String(a.query).toLowerCase();
|
|
8116
|
-
const statusFilter = a.status;
|
|
8117
|
-
const matches = scanAllFeatures(workspaceRoot).filter(({ feature }) => {
|
|
8118
|
-
if (statusFilter && feature.status !== statusFilter) return false;
|
|
8119
|
-
return [
|
|
8120
|
-
feature.featureKey,
|
|
8121
|
-
feature.title,
|
|
8122
|
-
feature.problem,
|
|
8123
|
-
...feature.tags ?? [],
|
|
8124
|
-
feature.analysis ?? ""
|
|
8125
|
-
].join(" ").toLowerCase().includes(query);
|
|
8126
|
-
});
|
|
8127
|
-
if (matches.length === 0) return { content: [{
|
|
9080
|
+
const missing = getMissingForTransition(feature, to);
|
|
9081
|
+
if (missing.length > 0) return { content: [{
|
|
8128
9082
|
type: "text",
|
|
8129
|
-
text: `
|
|
9083
|
+
text: `Cannot advance "${feature.featureKey}" to "${to}" — ${missing.length} required field(s) missing: ${missing.join(", ")}.\n\nCall read_feature_context on this path, fill the missing fields with write_feature_fields, then try advance_feature again.`
|
|
8130
9084
|
}] };
|
|
8131
|
-
|
|
9085
|
+
let deprecationHint = "";
|
|
9086
|
+
if (to === "deprecated") {
|
|
9087
|
+
const hasSuperseeded = !!parsed.superseded_by;
|
|
9088
|
+
const hasMerged = !!parsed.merged_into;
|
|
9089
|
+
if (!hasSuperseeded && !hasMerged) deprecationHint = "\n\n⚠ No lifecycle pointer set. Consider running `lac supersede` or `lac merge`, or call write_feature_fields with superseded_by or merged_into before deprecating so future readers know where this feature went.";
|
|
9090
|
+
}
|
|
9091
|
+
const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
9092
|
+
const updated = {
|
|
9093
|
+
...parsed,
|
|
9094
|
+
status: to
|
|
9095
|
+
};
|
|
9096
|
+
updated.statusHistory = [...updated.statusHistory ?? [], {
|
|
9097
|
+
from,
|
|
9098
|
+
to,
|
|
9099
|
+
date: today,
|
|
9100
|
+
...reason ? { reason } : {}
|
|
9101
|
+
}];
|
|
9102
|
+
if (from === "frozen" && to === "active" && reason) {
|
|
9103
|
+
const filledCritical = [
|
|
9104
|
+
"analysis",
|
|
9105
|
+
"implementation",
|
|
9106
|
+
"decisions",
|
|
9107
|
+
"successCriteria"
|
|
9108
|
+
].filter((f) => {
|
|
9109
|
+
const val = feature[f];
|
|
9110
|
+
if (val === void 0 || val === null) return false;
|
|
9111
|
+
if (typeof val === "string") return val.trim().length > 0;
|
|
9112
|
+
if (Array.isArray(val)) return val.length > 0;
|
|
9113
|
+
return false;
|
|
9114
|
+
});
|
|
9115
|
+
const staleBody = filledCritical.length > 0 ? `Fields that may need updating after reopen: ${filledCritical.join(", ")}` : "Review all intent-critical fields after reopen";
|
|
9116
|
+
updated.annotations = [
|
|
9117
|
+
...updated.annotations ?? [],
|
|
9118
|
+
{
|
|
9119
|
+
id: `reopen-${Date.now()}`,
|
|
9120
|
+
author: "lac advance",
|
|
9121
|
+
date: today,
|
|
9122
|
+
type: "reopen",
|
|
9123
|
+
body: reason
|
|
9124
|
+
},
|
|
9125
|
+
{
|
|
9126
|
+
id: `stale-${Date.now() + 1}`,
|
|
9127
|
+
author: "lac advance",
|
|
9128
|
+
date: today,
|
|
9129
|
+
type: "stale-review",
|
|
9130
|
+
body: staleBody
|
|
9131
|
+
}
|
|
9132
|
+
];
|
|
9133
|
+
}
|
|
9134
|
+
fs.writeFileSync(featurePath, JSON.stringify(updated, null, 2) + "\n", "utf-8");
|
|
9135
|
+
const nextStep = to === "active" ? from === "frozen" ? "Feature reopened. Call read_feature_context to review stale fields, update with write_feature_fields, then advance_feature to frozen when ready." : "Feature is active. Call read_feature_context to fill any missing fields, then advance_feature to frozen when complete." : to === "frozen" ? "Feature is frozen. If a bug is found or requirements change, call spawn_child_feature or advance_feature with to: \"active\" and a reason." : "Feature deprecated.";
|
|
8132
9136
|
return { content: [{
|
|
8133
9137
|
type: "text",
|
|
8134
|
-
text:
|
|
9138
|
+
text: `✓ "${feature.featureKey}" ${from} → ${to}.\n\n${nextStep}${deprecationHint}`
|
|
8135
9139
|
}] };
|
|
8136
9140
|
}
|
|
8137
|
-
case "
|
|
8138
|
-
const
|
|
8139
|
-
const
|
|
8140
|
-
|
|
9141
|
+
case "spawn_child_feature": {
|
|
9142
|
+
const parentDir = resolvePath(String(a.parentPath));
|
|
9143
|
+
const parentFeaturePath = path.join(parentDir, "feature.json");
|
|
9144
|
+
let parentRaw;
|
|
9145
|
+
try {
|
|
9146
|
+
parentRaw = fs.readFileSync(parentFeaturePath, "utf-8");
|
|
9147
|
+
} catch {
|
|
9148
|
+
return {
|
|
9149
|
+
content: [{
|
|
9150
|
+
type: "text",
|
|
9151
|
+
text: `No feature.json found at "${parentFeaturePath}"`
|
|
9152
|
+
}],
|
|
9153
|
+
isError: true
|
|
9154
|
+
};
|
|
9155
|
+
}
|
|
9156
|
+
const parentParsed = JSON.parse(parentRaw);
|
|
9157
|
+
const parentResult = validateFeature(parentParsed);
|
|
9158
|
+
if (!parentResult.success) return {
|
|
9159
|
+
content: [{
|
|
9160
|
+
type: "text",
|
|
9161
|
+
text: `Invalid parent feature.json: ${parentResult.errors.join(", ")}`
|
|
9162
|
+
}],
|
|
9163
|
+
isError: true
|
|
9164
|
+
};
|
|
9165
|
+
const parentFeature = parentResult.data;
|
|
9166
|
+
const childDir = resolvePath(String(a.dir));
|
|
9167
|
+
const childFeaturePath = path.join(childDir, "feature.json");
|
|
9168
|
+
if (fs.existsSync(childFeaturePath)) return { content: [{
|
|
8141
9169
|
type: "text",
|
|
8142
|
-
text: `feature.json already exists at "${
|
|
9170
|
+
text: `feature.json already exists at "${childFeaturePath}".`
|
|
8143
9171
|
}] };
|
|
8144
|
-
|
|
8145
|
-
|
|
9172
|
+
fs.mkdirSync(childDir, { recursive: true });
|
|
9173
|
+
const childKey = generateFeatureKey(childDir);
|
|
9174
|
+
const child = {
|
|
9175
|
+
featureKey: childKey,
|
|
8146
9176
|
title: String(a.title),
|
|
8147
|
-
status:
|
|
9177
|
+
status: "draft",
|
|
8148
9178
|
problem: String(a.problem),
|
|
8149
|
-
schemaVersion: 1
|
|
9179
|
+
schemaVersion: 1,
|
|
9180
|
+
...parentFeature.domain ? { domain: parentFeature.domain } : {},
|
|
9181
|
+
...parentFeature.tags?.length ? { tags: parentFeature.tags } : {},
|
|
9182
|
+
lineage: {
|
|
9183
|
+
parent: parentFeature.featureKey,
|
|
9184
|
+
spawnReason: String(a.spawnReason)
|
|
9185
|
+
}
|
|
8150
9186
|
};
|
|
8151
|
-
fs.
|
|
8152
|
-
|
|
9187
|
+
fs.writeFileSync(childFeaturePath, JSON.stringify(child, null, 2) + "\n", "utf-8");
|
|
9188
|
+
const existingChildren = parentParsed.lineage?.children ?? [];
|
|
9189
|
+
const updatedParent = {
|
|
9190
|
+
...parentParsed,
|
|
9191
|
+
lineage: {
|
|
9192
|
+
...parentParsed.lineage ?? {},
|
|
9193
|
+
children: [...existingChildren, childKey]
|
|
9194
|
+
}
|
|
9195
|
+
};
|
|
9196
|
+
fs.writeFileSync(parentFeaturePath, JSON.stringify(updatedParent, null, 2) + "\n", "utf-8");
|
|
8153
9197
|
return { content: [{
|
|
8154
9198
|
type: "text",
|
|
8155
|
-
text:
|
|
9199
|
+
text: `✓ Spawned "${childKey}" under "${parentFeature.featureKey}".\nReason: ${a.spawnReason}\nChild path: ${childDir}\n\nNext: call read_feature_context on "${childDir}" to begin the child feature's lifecycle.`
|
|
8156
9200
|
}] };
|
|
8157
9201
|
}
|
|
8158
|
-
case "
|
|
8159
|
-
const
|
|
8160
|
-
const
|
|
8161
|
-
|
|
8162
|
-
|
|
8163
|
-
|
|
9202
|
+
case "get_feature_status": {
|
|
9203
|
+
const featureDir = resolvePath(String(a.path));
|
|
9204
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
9205
|
+
let raw;
|
|
9206
|
+
try {
|
|
9207
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
9208
|
+
} catch {
|
|
9209
|
+
return {
|
|
9210
|
+
content: [{
|
|
9211
|
+
type: "text",
|
|
9212
|
+
text: `No feature.json found at "${featurePath}"`
|
|
9213
|
+
}],
|
|
9214
|
+
isError: true
|
|
9215
|
+
};
|
|
9216
|
+
}
|
|
9217
|
+
const result = validateFeature(JSON.parse(raw));
|
|
9218
|
+
if (!result.success) return {
|
|
9219
|
+
content: [{
|
|
9220
|
+
type: "text",
|
|
9221
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
9222
|
+
}],
|
|
9223
|
+
isError: true
|
|
9224
|
+
};
|
|
9225
|
+
const feature = result.data;
|
|
9226
|
+
const missingFields = getMissingFields(feature);
|
|
9227
|
+
const staleAnnotation = feature.annotations?.find((ann) => ann.type === "stale-review");
|
|
9228
|
+
const validTransitions = [];
|
|
9229
|
+
if (feature.status !== "deprecated") validTransitions.push("deprecated");
|
|
9230
|
+
if (feature.status === "draft") validTransitions.push("active");
|
|
9231
|
+
if (feature.status === "active") validTransitions.push("frozen");
|
|
9232
|
+
if (feature.status === "frozen") validTransitions.push("active (requires reason)");
|
|
9233
|
+
const nextAction = missingFields.length > 0 ? `call read_feature_context to fill: ${missingFields.join(", ")}` : staleAnnotation ? `call read_feature_context to review stale fields (reopened feature)` : feature.status === "draft" ? `call advance_feature with to: "active"` : feature.status === "active" ? `call advance_feature with to: "frozen" when complete` : feature.status === "frozen" ? `frozen — call spawn_child_feature for bugs, or advance_feature to reopen` : "deprecated — no action needed";
|
|
9234
|
+
const sinceDate = (feature.statusHistory ? [...feature.statusHistory].reverse().find((h) => h.to === feature.status) : void 0)?.date ?? null;
|
|
9235
|
+
return { content: [{
|
|
8164
9236
|
type: "text",
|
|
8165
|
-
text:
|
|
9237
|
+
text: [
|
|
9238
|
+
`Key : ${feature.featureKey}`,
|
|
9239
|
+
`Title : ${feature.title}`,
|
|
9240
|
+
`Status : ${statusIcon(feature.status)} ${feature.status}${sinceDate ? ` (since ${sinceDate})` : ""}`,
|
|
9241
|
+
`Missing : ${missingFields.length === 0 ? "none" : missingFields.join(", ")}`,
|
|
9242
|
+
`Stale : ${staleAnnotation ? staleAnnotation.body : "none"}`,
|
|
9243
|
+
`Transitions: ${validTransitions.join(", ")}`,
|
|
9244
|
+
`Parent : ${feature.lineage?.parent ?? "none"}`,
|
|
9245
|
+
`Children : ${feature.lineage?.children?.length ?? 0}`,
|
|
9246
|
+
``,
|
|
9247
|
+
`Next action: ${nextAction}`
|
|
9248
|
+
].join("\n")
|
|
8166
9249
|
}] };
|
|
8167
|
-
|
|
8168
|
-
|
|
8169
|
-
|
|
8170
|
-
|
|
8171
|
-
|
|
8172
|
-
|
|
8173
|
-
|
|
8174
|
-
|
|
9250
|
+
}
|
|
9251
|
+
case "extract_feature_from_code": {
|
|
9252
|
+
const dir = resolvePath(String(a.path));
|
|
9253
|
+
const featurePath = path.join(dir, "feature.json");
|
|
9254
|
+
if (fs.existsSync(featurePath)) return { content: [{
|
|
9255
|
+
type: "text",
|
|
9256
|
+
text: `feature.json already exists at "${featurePath}". Use read_feature_context instead.`
|
|
9257
|
+
}] };
|
|
9258
|
+
const placeholder = {
|
|
9259
|
+
featureKey: "extract-pending",
|
|
9260
|
+
title: "(pending)",
|
|
9261
|
+
status: "draft",
|
|
9262
|
+
problem: "(to be determined)"
|
|
9263
|
+
};
|
|
9264
|
+
const maxFileChars = a.maxFileSize ? Number(a.maxFileSize) : void 0;
|
|
9265
|
+
const ctx = buildContext(dir, placeholder, maxFileChars !== void 0 ? { maxFileChars } : {});
|
|
9266
|
+
if (ctx.sourceFiles.length === 0) return { content: [{
|
|
9267
|
+
type: "text",
|
|
9268
|
+
text: `No source files found in "${dir}". Is this the right directory?`
|
|
9269
|
+
}] };
|
|
9270
|
+
const parts = [];
|
|
9271
|
+
if (ctx.truncatedFiles.length > 0) {
|
|
9272
|
+
parts.push(`⚠ WARNING: ${ctx.truncatedFiles.length} file(s) were truncated at ${maxFileChars ?? 8e3} chars — extraction may be incomplete:`);
|
|
9273
|
+
for (const f of ctx.truncatedFiles) parts.push(` - ${f}`);
|
|
9274
|
+
parts.push(`Tip: re-call with maxFileSize set higher (e.g. 16000) to capture the full content.`);
|
|
9275
|
+
parts.push("");
|
|
9276
|
+
}
|
|
9277
|
+
if (ctx.gitLog) {
|
|
9278
|
+
parts.push("=== git log (last 20 commits) ===");
|
|
9279
|
+
parts.push(ctx.gitLog);
|
|
8175
9280
|
}
|
|
9281
|
+
for (const file of ctx.sourceFiles) {
|
|
9282
|
+
parts.push(`\n=== ${file.relativePath}${file.truncated ? " [truncated]" : ""} ===`);
|
|
9283
|
+
parts.push(file.content);
|
|
9284
|
+
}
|
|
9285
|
+
const rawContext = parts.join("\n");
|
|
8176
9286
|
return { content: [{
|
|
8177
9287
|
type: "text",
|
|
8178
|
-
text:
|
|
9288
|
+
text: `${`## Extract feature.json from existing code
|
|
9289
|
+
|
|
9290
|
+
No feature.json exists at "${dir}". Analyze the ${ctx.sourceFiles.length} source file(s) below and generate a complete feature.json proposal.
|
|
9291
|
+
|
|
9292
|
+
When done, execute in order:
|
|
9293
|
+
1. Call create_feature with: dir="${dir}", plus your generated title and problem
|
|
9294
|
+
2. Call write_feature_fields with: path="${dir}", fields containing analysis, decisions, implementation, knownLimitations, tags, successCriteria, domain
|
|
9295
|
+
3. Call advance_feature to transition when ready
|
|
9296
|
+
|
|
9297
|
+
### Fields to generate
|
|
9298
|
+
**title** — Short descriptive name (5-10 words)
|
|
9299
|
+
**problem** — What problem does this code solve? 1-2 sentences.
|
|
9300
|
+
**domain** — Single lowercase word or hyphenated phrase (e.g. "auth", "data-pipeline")
|
|
9301
|
+
**tags** — 3-6 lowercase tags as JSON array: ["tag1", "tag2"]
|
|
9302
|
+
**analysis** — Architectural overview, key patterns, why they were chosen. 150-300 words.
|
|
9303
|
+
**decisions** — 2-4 key technical decisions as JSON array: [{"decision":"...","rationale":"...","alternativesConsidered":["..."]}]
|
|
9304
|
+
**implementation** — Main components, data flow, non-obvious patterns. 100-200 words.
|
|
9305
|
+
**knownLimitations** — 2-4 limitations/TODOs as JSON array: ["..."]
|
|
9306
|
+
**successCriteria** — How do we know this works? 1-3 testable sentences.`}\n\n## Source files\n\n${rawContext}`
|
|
8179
9307
|
}] };
|
|
8180
9308
|
}
|
|
8181
|
-
case "
|
|
8182
|
-
const
|
|
8183
|
-
|
|
8184
|
-
|
|
8185
|
-
|
|
8186
|
-
|
|
8187
|
-
|
|
8188
|
-
if (!feature.decisions?.length) issues.push("no decisions recorded");
|
|
8189
|
-
}
|
|
9309
|
+
case "feature_changelog": {
|
|
9310
|
+
const featureDir = resolvePath(String(a.path));
|
|
9311
|
+
const featurePath = path.join(featureDir, "feature.json");
|
|
9312
|
+
let raw;
|
|
9313
|
+
try {
|
|
9314
|
+
raw = fs.readFileSync(featurePath, "utf-8");
|
|
9315
|
+
} catch {
|
|
8190
9316
|
return {
|
|
8191
|
-
|
|
8192
|
-
|
|
8193
|
-
|
|
9317
|
+
content: [{
|
|
9318
|
+
type: "text",
|
|
9319
|
+
text: `No feature.json found at "${featurePath}"`
|
|
9320
|
+
}],
|
|
9321
|
+
isError: true
|
|
8194
9322
|
};
|
|
9323
|
+
}
|
|
9324
|
+
const result = validateFeature(JSON.parse(raw));
|
|
9325
|
+
if (!result.success) return {
|
|
9326
|
+
content: [{
|
|
9327
|
+
type: "text",
|
|
9328
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
9329
|
+
}],
|
|
9330
|
+
isError: true
|
|
9331
|
+
};
|
|
9332
|
+
const feature = result.data;
|
|
9333
|
+
const events = [];
|
|
9334
|
+
if (feature.statusHistory?.length) for (const h of feature.statusHistory) events.push({
|
|
9335
|
+
date: h.date,
|
|
9336
|
+
label: `${statusIcon(h.to)} ${h.from} → ${h.to}${h.reason ? ` — "${h.reason}"` : ""}`
|
|
8195
9337
|
});
|
|
8196
|
-
|
|
8197
|
-
|
|
8198
|
-
|
|
9338
|
+
else events.push({
|
|
9339
|
+
date: "(unknown)",
|
|
9340
|
+
label: `${statusIcon(feature.status)} current: ${feature.status}`
|
|
9341
|
+
});
|
|
9342
|
+
for (const rev of feature.revisions ?? []) events.push({
|
|
9343
|
+
date: rev.date,
|
|
9344
|
+
label: `✎ revision by ${rev.author}: ${rev.fields_changed.join(", ")} — ${rev.reason}`
|
|
9345
|
+
});
|
|
9346
|
+
for (const ann of feature.annotations ?? []) if (ann.type === "spawn") events.push({
|
|
9347
|
+
date: ann.date,
|
|
9348
|
+
label: `↳ spawned child — ${ann.body}`
|
|
9349
|
+
});
|
|
9350
|
+
else if (ann.type === "reopen") events.push({
|
|
9351
|
+
date: ann.date,
|
|
9352
|
+
label: `↺ reopened — ${ann.body}`
|
|
9353
|
+
});
|
|
9354
|
+
else if (ann.type === "stale-review") events.push({
|
|
9355
|
+
date: ann.date,
|
|
9356
|
+
label: `⚠ stale-review — ${ann.body}`
|
|
9357
|
+
});
|
|
9358
|
+
else events.push({
|
|
9359
|
+
date: ann.date,
|
|
9360
|
+
label: `[${ann.type}] ${ann.body} (by ${ann.author})`
|
|
9361
|
+
});
|
|
9362
|
+
const annotatedChildren = new Set((feature.annotations ?? []).filter((a$1) => a$1.type === "spawn").map((a$1) => a$1.body));
|
|
9363
|
+
for (const child of feature.lineage?.children ?? []) if (!annotatedChildren.has(child)) events.push({
|
|
9364
|
+
date: "(unknown)",
|
|
9365
|
+
label: `↳ spawned child: ${child}`
|
|
9366
|
+
});
|
|
9367
|
+
events.sort((a$1, b) => a$1.date.localeCompare(b.date));
|
|
9368
|
+
const decisionsLine = feature.decisions?.length ? `\nDecisions: ${feature.decisions.map((d) => `"${d.decision}"`).join(" · ")}` : "";
|
|
8199
9369
|
return { content: [{
|
|
8200
9370
|
type: "text",
|
|
8201
|
-
text: `${
|
|
9371
|
+
text: `${`${feature.featureKey} — "${feature.title}"\n${"─".repeat(50)}`}\n${events.map((e) => `${e.date.padEnd(12)} ${e.label}`).join("\n")}${decisionsLine}`
|
|
8202
9372
|
}] };
|
|
8203
9373
|
}
|
|
8204
|
-
case "
|
|
9374
|
+
case "roadmap_view": {
|
|
9375
|
+
const features = scanAllFeatures(a.path ? resolvePath(String(a.path)) : workspaceRoot);
|
|
9376
|
+
const byStatus = {
|
|
9377
|
+
active: [],
|
|
9378
|
+
draft: [],
|
|
9379
|
+
frozen: [],
|
|
9380
|
+
deprecated: []
|
|
9381
|
+
};
|
|
9382
|
+
for (const f of features) {
|
|
9383
|
+
const group = byStatus[f.feature.status];
|
|
9384
|
+
if (group) group.push(f);
|
|
9385
|
+
}
|
|
9386
|
+
for (const group of Object.values(byStatus)) group.sort((a$1, b) => {
|
|
9387
|
+
const pa = a$1.feature.priority ?? 9999;
|
|
9388
|
+
const pb = b.feature.priority ?? 9999;
|
|
9389
|
+
return pa !== pb ? pa - pb : a$1.feature.featureKey.localeCompare(b.feature.featureKey);
|
|
9390
|
+
});
|
|
9391
|
+
const formatGroup = (status, items) => {
|
|
9392
|
+
if (items.length === 0) return "";
|
|
9393
|
+
const rows = items.map(({ feature }) => {
|
|
9394
|
+
const priority = feature.priority ? `P${feature.priority}` : " - ";
|
|
9395
|
+
const childCount = feature.lineage?.children?.length ?? 0;
|
|
9396
|
+
const childNote = childCount > 0 ? ` [${childCount}↳]` : "";
|
|
9397
|
+
const missing = getMissingFields(feature);
|
|
9398
|
+
const warn = missing.length > 0 ? ` ⚠ missing: ${missing.join(", ")}` : "";
|
|
9399
|
+
return ` ${priority.padEnd(3)} ${feature.featureKey.padEnd(18)} ${feature.title}${childNote}${warn}`;
|
|
9400
|
+
});
|
|
9401
|
+
return [`${statusIcon(status)} ${status.toUpperCase()} (${items.length})`, ...rows].join("\n");
|
|
9402
|
+
};
|
|
9403
|
+
const sections = [
|
|
9404
|
+
"active",
|
|
9405
|
+
"draft",
|
|
9406
|
+
"frozen",
|
|
9407
|
+
"deprecated"
|
|
9408
|
+
].map((s) => formatGroup(s, byStatus[s] ?? [])).filter(Boolean);
|
|
9409
|
+
if (sections.length === 0) return { content: [{
|
|
9410
|
+
type: "text",
|
|
9411
|
+
text: "No features found."
|
|
9412
|
+
}] };
|
|
9413
|
+
return { content: [{
|
|
9414
|
+
type: "text",
|
|
9415
|
+
text: sections.join("\n\n")
|
|
9416
|
+
}] };
|
|
9417
|
+
}
|
|
9418
|
+
case "suggest_split": {
|
|
8205
9419
|
const featureDir = resolvePath(String(a.path));
|
|
8206
9420
|
const featurePath = path.join(featureDir, "feature.json");
|
|
8207
9421
|
let raw;
|
|
@@ -8216,7 +9430,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
8216
9430
|
isError: true
|
|
8217
9431
|
};
|
|
8218
9432
|
}
|
|
8219
|
-
const result = validateFeature
|
|
9433
|
+
const result = validateFeature(JSON.parse(raw));
|
|
8220
9434
|
if (!result.success) return {
|
|
8221
9435
|
content: [{
|
|
8222
9436
|
type: "text",
|
|
@@ -8225,19 +9439,34 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
8225
9439
|
isError: true
|
|
8226
9440
|
};
|
|
8227
9441
|
const feature = result.data;
|
|
8228
|
-
const
|
|
8229
|
-
const
|
|
8230
|
-
const
|
|
8231
|
-
|
|
8232
|
-
|
|
8233
|
-
|
|
8234
|
-
|
|
9442
|
+
const ctx = buildContext(featureDir, feature);
|
|
9443
|
+
const contextStr = contextToString(ctx);
|
|
9444
|
+
const signals = [
|
|
9445
|
+
ctx.sourceFiles.length > 8 ? `⚠ ${ctx.sourceFiles.length} source files (large)` : null,
|
|
9446
|
+
(feature.decisions?.length ?? 0) > 4 ? `⚠ ${feature.decisions.length} decisions (broad scope)` : null,
|
|
9447
|
+
(feature.title + " " + feature.problem).toLowerCase().includes(" and ") ? "⚠ title/problem contains \"and\" (possible dual concern)" : null
|
|
9448
|
+
].filter(Boolean);
|
|
9449
|
+
const signalNote = signals.length > 0 ? `\n**Signals detected:**\n${signals.map((s) => `- ${s}`).join("\n")}\n` : "\n**No obvious split signals — evaluate from the code.**\n";
|
|
8235
9450
|
return { content: [{
|
|
8236
9451
|
type: "text",
|
|
8237
|
-
text: `${
|
|
9452
|
+
text: `${`## Suggest split for "${feature.featureKey}" — "${feature.title}"
|
|
9453
|
+
${signalNote}
|
|
9454
|
+
Analyze the source files and determine whether this feature should be broken into smaller child features.
|
|
9455
|
+
|
|
9456
|
+
**Split signals to look for:**
|
|
9457
|
+
- Source files with distinct concerns that don't depend on each other
|
|
9458
|
+
- Multiple technical domains in the same codebase
|
|
9459
|
+
- Decisions covering unrelated areas
|
|
9460
|
+
- Problem statement describes multiple independent things
|
|
9461
|
+
|
|
9462
|
+
**Your response:**
|
|
9463
|
+
1. Recommend: split or keep as-is, with 2-3 sentence justification
|
|
9464
|
+
2. If split: propose 2-4 child features each with title, problem, spawnReason, and which files belong to it
|
|
9465
|
+
Then call spawn_child_feature for each (parentPath="${featureDir}", dir=<new subfolder>)
|
|
9466
|
+
3. If keep: explain what makes this feature cohesive`}\n\n## Context\n\n${contextStr}`
|
|
8238
9467
|
}] };
|
|
8239
9468
|
}
|
|
8240
|
-
case "
|
|
9469
|
+
case "feature_summary_for_pr": {
|
|
8241
9470
|
const featureDir = resolvePath(String(a.path));
|
|
8242
9471
|
const featurePath = path.join(featureDir, "feature.json");
|
|
8243
9472
|
let raw;
|
|
@@ -8252,24 +9481,317 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
8252
9481
|
isError: true
|
|
8253
9482
|
};
|
|
8254
9483
|
}
|
|
8255
|
-
const
|
|
8256
|
-
|
|
8257
|
-
if (!fields || typeof fields !== "object" || Array.isArray(fields)) return {
|
|
9484
|
+
const result = validateFeature(JSON.parse(raw));
|
|
9485
|
+
if (!result.success) return {
|
|
8258
9486
|
content: [{
|
|
8259
9487
|
type: "text",
|
|
8260
|
-
text:
|
|
9488
|
+
text: `Invalid feature.json: ${result.errors.join(", ")}`
|
|
8261
9489
|
}],
|
|
8262
9490
|
isError: true
|
|
8263
9491
|
};
|
|
8264
|
-
const
|
|
8265
|
-
|
|
8266
|
-
|
|
9492
|
+
const feature = result.data;
|
|
9493
|
+
const lines = [
|
|
9494
|
+
`## ${feature.featureKey} — ${feature.title}`,
|
|
9495
|
+
"",
|
|
9496
|
+
`**Problem:** ${feature.problem}`
|
|
9497
|
+
];
|
|
9498
|
+
if (feature.implementation) lines.push("", "**What was built:**", feature.implementation);
|
|
9499
|
+
else if (feature.analysis) lines.push("", "**Overview:**", feature.analysis.slice(0, 300) + (feature.analysis.length > 300 ? "…" : ""));
|
|
9500
|
+
if (feature.decisions?.length) {
|
|
9501
|
+
lines.push("", "**Key decisions:**");
|
|
9502
|
+
for (const d of feature.decisions) lines.push(`- **${d.decision}** — ${d.rationale}`);
|
|
9503
|
+
}
|
|
9504
|
+
if (feature.knownLimitations?.length) {
|
|
9505
|
+
lines.push("", "**Known limitations:**");
|
|
9506
|
+
for (const l of feature.knownLimitations) lines.push(`- ${l}`);
|
|
9507
|
+
}
|
|
9508
|
+
if (feature.successCriteria) lines.push("", `**Success criteria:** ${feature.successCriteria}`);
|
|
9509
|
+
const lineageParts = [];
|
|
9510
|
+
if (feature.lineage?.parent) lineageParts.push(`child of \`${feature.lineage.parent}\``);
|
|
9511
|
+
if (feature.lineage?.children?.length) lineageParts.push(`spawned: ${feature.lineage.children.map((c) => `\`${c}\``).join(", ")}`);
|
|
9512
|
+
if (lineageParts.length) lines.push("", `**Lineage:** ${lineageParts.join(" · ")}`);
|
|
9513
|
+
if (feature.tags?.length) lines.push("", `**Tags:** ${feature.tags.map((t) => `\`${t}\``).join(", ")}`);
|
|
9514
|
+
lines.push("", "---", `*Generated from [\`${feature.featureKey}\`](feature.json) via LAC*`);
|
|
9515
|
+
return { content: [{
|
|
9516
|
+
type: "text",
|
|
9517
|
+
text: lines.join("\n")
|
|
9518
|
+
}] };
|
|
9519
|
+
}
|
|
9520
|
+
case "summarize_workspace": {
|
|
9521
|
+
const scanDir = a.path ? resolvePath(String(a.path)) : workspaceRoot;
|
|
9522
|
+
const format = a.format === "json" ? "json" : "markdown";
|
|
9523
|
+
function readReadmeSummary(dir) {
|
|
9524
|
+
for (const name$1 of [
|
|
9525
|
+
"README.md",
|
|
9526
|
+
"readme.md",
|
|
9527
|
+
"Readme.md"
|
|
9528
|
+
]) {
|
|
9529
|
+
const p = path.join(dir, name$1);
|
|
9530
|
+
if (!fs.existsSync(p)) continue;
|
|
9531
|
+
try {
|
|
9532
|
+
const lines$1 = fs.readFileSync(p, "utf-8").split("\n");
|
|
9533
|
+
const parts = [];
|
|
9534
|
+
let started = false;
|
|
9535
|
+
for (const line of lines$1) {
|
|
9536
|
+
if (!started && line.trim()) started = true;
|
|
9537
|
+
if (!started) continue;
|
|
9538
|
+
parts.push(line);
|
|
9539
|
+
if (parts.length >= 2 && line.trim() === "") break;
|
|
9540
|
+
if (parts.length >= 8) break;
|
|
9541
|
+
}
|
|
9542
|
+
const text = parts.join("\n").trim();
|
|
9543
|
+
return text.length > 300 ? text.slice(0, 297) + "…" : text;
|
|
9544
|
+
} catch {}
|
|
9545
|
+
}
|
|
9546
|
+
return null;
|
|
9547
|
+
}
|
|
9548
|
+
const allFeatures = scanAllFeatures(scanDir);
|
|
9549
|
+
if (allFeatures.length === 0) return { content: [{
|
|
9550
|
+
type: "text",
|
|
9551
|
+
text: "No features found."
|
|
9552
|
+
}] };
|
|
9553
|
+
const rootReadme = readReadmeSummary(scanDir);
|
|
9554
|
+
const readmeCache = /* @__PURE__ */ new Map();
|
|
9555
|
+
const cachedReadme = (dir) => {
|
|
9556
|
+
if (!readmeCache.has(dir)) readmeCache.set(dir, readReadmeSummary(dir));
|
|
9557
|
+
return readmeCache.get(dir) ?? null;
|
|
8267
9558
|
};
|
|
8268
|
-
|
|
8269
|
-
|
|
9559
|
+
const statusCounts = {
|
|
9560
|
+
active: 0,
|
|
9561
|
+
draft: 0,
|
|
9562
|
+
frozen: 0,
|
|
9563
|
+
deprecated: 0
|
|
9564
|
+
};
|
|
9565
|
+
let staleCount = 0;
|
|
9566
|
+
for (const { feature } of allFeatures) {
|
|
9567
|
+
const s = feature.status;
|
|
9568
|
+
if (s in statusCounts) statusCounts[s] = (statusCounts[s] ?? 0) + 1;
|
|
9569
|
+
if (feature.annotations?.some((a$1) => a$1.type === "stale-review")) staleCount++;
|
|
9570
|
+
}
|
|
9571
|
+
const byDomain = /* @__PURE__ */ new Map();
|
|
9572
|
+
const noDomain = [];
|
|
9573
|
+
for (const f of allFeatures) {
|
|
9574
|
+
const d = f.feature.domain;
|
|
9575
|
+
if (d) {
|
|
9576
|
+
const group = byDomain.get(d) ?? [];
|
|
9577
|
+
group.push(f);
|
|
9578
|
+
byDomain.set(d, group);
|
|
9579
|
+
} else noDomain.push(f);
|
|
9580
|
+
}
|
|
9581
|
+
if (format === "json") {
|
|
9582
|
+
const data = {
|
|
9583
|
+
rootReadme: rootReadme ?? null,
|
|
9584
|
+
stats: {
|
|
9585
|
+
total: allFeatures.length,
|
|
9586
|
+
byStatus: statusCounts,
|
|
9587
|
+
domains: [...byDomain.keys()]
|
|
9588
|
+
},
|
|
9589
|
+
domains: Object.fromEntries([...byDomain.entries()].map(([domain, features]) => [domain, features.map(({ feature, filePath }) => ({
|
|
9590
|
+
key: feature.featureKey,
|
|
9591
|
+
title: feature.title,
|
|
9592
|
+
status: feature.status,
|
|
9593
|
+
problem: feature.problem,
|
|
9594
|
+
tags: feature.tags ?? [],
|
|
9595
|
+
decisionsCount: feature.decisions?.length ?? 0,
|
|
9596
|
+
readme: readReadmeSummary(path.dirname(filePath)),
|
|
9597
|
+
path: filePath
|
|
9598
|
+
}))])),
|
|
9599
|
+
uncategorized: noDomain.map(({ feature, filePath }) => ({
|
|
9600
|
+
key: feature.featureKey,
|
|
9601
|
+
title: feature.title,
|
|
9602
|
+
status: feature.status,
|
|
9603
|
+
problem: feature.problem,
|
|
9604
|
+
tags: feature.tags ?? [],
|
|
9605
|
+
decisionsCount: feature.decisions?.length ?? 0,
|
|
9606
|
+
readme: readReadmeSummary(path.dirname(filePath)),
|
|
9607
|
+
path: filePath
|
|
9608
|
+
}))
|
|
9609
|
+
};
|
|
9610
|
+
return { content: [{
|
|
9611
|
+
type: "text",
|
|
9612
|
+
text: JSON.stringify(data, null, 2)
|
|
9613
|
+
}] };
|
|
9614
|
+
}
|
|
9615
|
+
const lines = [];
|
|
9616
|
+
if (rootReadme) lines.push("## Project", "", rootReadme, "");
|
|
9617
|
+
lines.push("## Stats", `${allFeatures.length} features — ${statusCounts.active} active · ${statusCounts.draft} draft · ${statusCounts.frozen} frozen · ${statusCounts.deprecated} deprecated${staleCount > 0 ? ` · ${staleCount} stale (needs review)` : ""}`, "");
|
|
9618
|
+
const formatRow = (feature, filePath) => {
|
|
9619
|
+
const readme = cachedReadme(path.dirname(filePath));
|
|
9620
|
+
const problem = feature.problem.length > 100 ? feature.problem.slice(0, 97) + "…" : feature.problem;
|
|
9621
|
+
const tags = feature.tags?.length ? ` [${feature.tags.join(", ")}]` : "";
|
|
9622
|
+
const dec = feature.decisions?.length ? ` ${feature.decisions.length} decisions` : "";
|
|
9623
|
+
const readmeLine = readme ? `\n ${(readme.split("\n")[0] ?? "").replace(/^#+\s*/, "").slice(0, 80)}` : "";
|
|
9624
|
+
return ` ${statusIcon(feature.status)} ${feature.featureKey.padEnd(18)} ${feature.title}${tags}${dec}\n ${problem}${readmeLine}`;
|
|
9625
|
+
};
|
|
9626
|
+
for (const [domain, features] of [...byDomain.entries()].sort(([a$1], [b]) => a$1.localeCompare(b))) {
|
|
9627
|
+
lines.push(`### ${domain}`);
|
|
9628
|
+
for (const { feature, filePath } of features) lines.push(formatRow(feature, filePath));
|
|
9629
|
+
lines.push("");
|
|
9630
|
+
}
|
|
9631
|
+
if (noDomain.length > 0) {
|
|
9632
|
+
lines.push("### (no domain)");
|
|
9633
|
+
for (const { feature, filePath } of noDomain) lines.push(formatRow(feature, filePath));
|
|
9634
|
+
lines.push("");
|
|
9635
|
+
}
|
|
9636
|
+
return { content: [{
|
|
9637
|
+
type: "text",
|
|
9638
|
+
text: lines.join("\n")
|
|
9639
|
+
}] };
|
|
9640
|
+
}
|
|
9641
|
+
case "audit_decisions": return { ...handleAuditDecisions(a, workspaceRoot) };
|
|
9642
|
+
case "feature_similarity": return { ...handleFeatureSimilarity(a, workspaceRoot) };
|
|
9643
|
+
case "time_travel": return { ...handleTimeTravel(a, workspaceRoot) };
|
|
9644
|
+
case "cross_feature_impact": return { ...handleCrossFeatureImpact(a, workspaceRoot) };
|
|
9645
|
+
case "extract_all_features": {
|
|
9646
|
+
const toUnix = (p) => p.replace(/\\/g, "/");
|
|
9647
|
+
const scanRoot = a.path ? resolvePath(String(a.path)) : workspaceRoot;
|
|
9648
|
+
const strategy = String(a.strategy ?? "module");
|
|
9649
|
+
const defaultDepth = strategy === "directory" ? 2 : 4;
|
|
9650
|
+
const maxDepth = a.depth ? Number(a.depth) : defaultDepth;
|
|
9651
|
+
const MODULE_SIGNALS = new Set([
|
|
9652
|
+
"package.json",
|
|
9653
|
+
"go.mod",
|
|
9654
|
+
"Cargo.toml",
|
|
9655
|
+
"pyproject.toml",
|
|
9656
|
+
"setup.py",
|
|
9657
|
+
"pom.xml",
|
|
9658
|
+
"build.gradle",
|
|
9659
|
+
"build.gradle.kts",
|
|
9660
|
+
"Gemfile",
|
|
9661
|
+
"composer.json",
|
|
9662
|
+
"index.ts",
|
|
9663
|
+
"index.js",
|
|
9664
|
+
"index.tsx",
|
|
9665
|
+
"mod.ts",
|
|
9666
|
+
"main.rs",
|
|
9667
|
+
"main.go",
|
|
9668
|
+
"main.ts",
|
|
9669
|
+
"main.js",
|
|
9670
|
+
"main.py",
|
|
9671
|
+
"__init__.py",
|
|
9672
|
+
"lib.rs"
|
|
9673
|
+
]);
|
|
9674
|
+
const SOURCE_EXTS = new Set([
|
|
9675
|
+
".ts",
|
|
9676
|
+
".tsx",
|
|
9677
|
+
".js",
|
|
9678
|
+
".jsx",
|
|
9679
|
+
".py",
|
|
9680
|
+
".go",
|
|
9681
|
+
".rs",
|
|
9682
|
+
".java",
|
|
9683
|
+
".kt",
|
|
9684
|
+
".cs",
|
|
9685
|
+
".rb",
|
|
9686
|
+
".php",
|
|
9687
|
+
".vue",
|
|
9688
|
+
".svelte",
|
|
9689
|
+
".sql",
|
|
9690
|
+
".c",
|
|
9691
|
+
".cpp",
|
|
9692
|
+
".swift"
|
|
9693
|
+
]);
|
|
9694
|
+
const SKIP = new Set([
|
|
9695
|
+
"node_modules",
|
|
9696
|
+
".git",
|
|
9697
|
+
"dist",
|
|
9698
|
+
"build",
|
|
9699
|
+
"out",
|
|
9700
|
+
"__pycache__",
|
|
9701
|
+
".turbo",
|
|
9702
|
+
"coverage",
|
|
9703
|
+
"vendor",
|
|
9704
|
+
"target",
|
|
9705
|
+
".next",
|
|
9706
|
+
".nuxt",
|
|
9707
|
+
".cache",
|
|
9708
|
+
".venv",
|
|
9709
|
+
"venv",
|
|
9710
|
+
"_archive",
|
|
9711
|
+
"tmp",
|
|
9712
|
+
"temp",
|
|
9713
|
+
"migrations",
|
|
9714
|
+
"fixtures",
|
|
9715
|
+
"mocks",
|
|
9716
|
+
"__mocks__"
|
|
9717
|
+
]);
|
|
9718
|
+
const candidates = [];
|
|
9719
|
+
const alreadyDocumented = [];
|
|
9720
|
+
function mcpWalk(dir, depth) {
|
|
9721
|
+
if (depth > maxDepth) return;
|
|
9722
|
+
let entries;
|
|
9723
|
+
try {
|
|
9724
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
9725
|
+
} catch {
|
|
9726
|
+
return;
|
|
9727
|
+
}
|
|
9728
|
+
const names = new Set(entries.filter((e) => e.isFile()).map((e) => e.name));
|
|
9729
|
+
if (names.has("feature.json")) alreadyDocumented.push(toUnix(path.relative(scanRoot, dir)) || ".");
|
|
9730
|
+
else if (depth > 0) {
|
|
9731
|
+
const signals = [];
|
|
9732
|
+
let sourceFileCount = 0;
|
|
9733
|
+
for (const name$1 of names) {
|
|
9734
|
+
if (MODULE_SIGNALS.has(name$1) || name$1.endsWith(".csproj")) signals.push(name$1);
|
|
9735
|
+
if (SOURCE_EXTS.has(path.extname(name$1))) sourceFileCount++;
|
|
9736
|
+
}
|
|
9737
|
+
if (strategy === "module" ? signals.length > 0 : sourceFileCount > 0) candidates.push({
|
|
9738
|
+
dir,
|
|
9739
|
+
relativePath: toUnix(path.relative(scanRoot, dir)),
|
|
9740
|
+
signals,
|
|
9741
|
+
sourceFileCount,
|
|
9742
|
+
alreadyHasFeature: false,
|
|
9743
|
+
parentDir: null
|
|
9744
|
+
});
|
|
9745
|
+
}
|
|
9746
|
+
for (const e of entries) {
|
|
9747
|
+
if (!e.isDirectory() || e.name.startsWith(".") || SKIP.has(e.name)) continue;
|
|
9748
|
+
mcpWalk(path.join(dir, e.name), depth + 1);
|
|
9749
|
+
}
|
|
9750
|
+
}
|
|
9751
|
+
mcpWalk(scanRoot, 0);
|
|
9752
|
+
candidates.sort((a$1, b) => a$1.dir.split(path.sep).length - b.dir.split(path.sep).length);
|
|
9753
|
+
const candidateDirs = new Set(candidates.map((c) => c.dir));
|
|
9754
|
+
for (const c of candidates) {
|
|
9755
|
+
let parent = path.dirname(c.dir);
|
|
9756
|
+
while (parent !== scanRoot && parent !== path.dirname(parent)) {
|
|
9757
|
+
if (candidateDirs.has(parent)) {
|
|
9758
|
+
c.parentDir = parent;
|
|
9759
|
+
break;
|
|
9760
|
+
}
|
|
9761
|
+
parent = path.dirname(parent);
|
|
9762
|
+
}
|
|
9763
|
+
}
|
|
9764
|
+
if (candidates.length === 0) return { content: [{
|
|
9765
|
+
type: "text",
|
|
9766
|
+
text: [
|
|
9767
|
+
`No undocumented modules found in "${scanRoot}".`,
|
|
9768
|
+
alreadyDocumented.length > 0 ? `${alreadyDocumented.length} director${alreadyDocumented.length === 1 ? "y is" : "ies are"} already documented:\n${alreadyDocumented.map((p) => ` - \`${p}\``).join("\n")}` : "",
|
|
9769
|
+
strategy === "module" ? "Tip: try strategy=\"directory\" to capture all directories with source files." : ""
|
|
9770
|
+
].filter(Boolean).join("\n")
|
|
9771
|
+
}] };
|
|
9772
|
+
const lines = [
|
|
9773
|
+
`## extract_all_features — ${scanRoot}`,
|
|
9774
|
+
"",
|
|
9775
|
+
`**Strategy:** ${strategy} **Depth:** ${maxDepth}`,
|
|
9776
|
+
`**Found:** ${candidates.length} undocumented module${candidates.length === 1 ? "" : "s"}`,
|
|
9777
|
+
alreadyDocumented.length > 0 ? `**Already documented:** ${alreadyDocumented.length} (skipped):\n${alreadyDocumented.map((p) => ` - \`${p}\``).join("\n")}` : null,
|
|
9778
|
+
"",
|
|
9779
|
+
"### Candidates",
|
|
9780
|
+
""
|
|
9781
|
+
].filter((s) => s !== null);
|
|
9782
|
+
for (const c of candidates) {
|
|
9783
|
+
const indent = c.parentDir ? " " : "";
|
|
9784
|
+
const sigStr = c.signals.length > 0 ? ` [${c.signals.slice(0, 3).join(", ")}]` : "";
|
|
9785
|
+
const parentNote = c.parentDir ? ` (child of ${toUnix(path.relative(scanRoot, c.parentDir))})` : "";
|
|
9786
|
+
lines.push(`${indent}- \`${c.relativePath}\` — ${c.sourceFileCount} src file${c.sourceFileCount === 1 ? "" : "s"}${sigStr}${parentNote}`);
|
|
9787
|
+
}
|
|
9788
|
+
lines.push("");
|
|
9789
|
+
lines.push("### Next steps");
|
|
9790
|
+
lines.push("");
|
|
9791
|
+
lines.push("Call `extract_feature_from_code` on each candidate path above, in order (parents before children).", "Then call `create_feature` + `write_feature_fields` for each.", "Finally, wire lineage: for each child feature, set `lineage.parent` in write_feature_fields.", "", `**First call:** \`extract_feature_from_code("${candidates[0]?.dir}")\``);
|
|
8270
9792
|
return { content: [{
|
|
8271
9793
|
type: "text",
|
|
8272
|
-
text:
|
|
9794
|
+
text: lines.join("\n")
|
|
8273
9795
|
}] };
|
|
8274
9796
|
}
|
|
8275
9797
|
default: return { content: [{
|
|
@@ -8295,7 +9817,7 @@ function findNearestFeature(startDir) {
|
|
|
8295
9817
|
while (true) {
|
|
8296
9818
|
const candidate = path.join(current, "feature.json");
|
|
8297
9819
|
if (fs.existsSync(candidate)) try {
|
|
8298
|
-
const result = validateFeature
|
|
9820
|
+
const result = validateFeature(JSON.parse(fs.readFileSync(candidate, "utf-8")));
|
|
8299
9821
|
if (result.success) return result.data;
|
|
8300
9822
|
} catch {}
|
|
8301
9823
|
const parent = path.dirname(current);
|
|
@@ -8312,11 +9834,11 @@ function walkForFeatures(dir, results) {
|
|
|
8312
9834
|
try {
|
|
8313
9835
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
8314
9836
|
for (const entry of entries) {
|
|
8315
|
-
if (entry.name === "node_modules" || entry.name === ".git" || entry.name === "dist") continue;
|
|
9837
|
+
if (entry.name === "node_modules" || entry.name === ".git" || entry.name === "dist" || entry.name === "_archive") continue;
|
|
8316
9838
|
const full = path.join(dir, entry.name);
|
|
8317
9839
|
if (entry.isDirectory()) walkForFeatures(full, results);
|
|
8318
9840
|
else if (entry.name === "feature.json") try {
|
|
8319
|
-
const result = validateFeature
|
|
9841
|
+
const result = validateFeature(JSON.parse(fs.readFileSync(full, "utf-8")));
|
|
8320
9842
|
if (result.success) results.push({
|
|
8321
9843
|
feature: result.data,
|
|
8322
9844
|
filePath: full
|
|
@@ -8335,13 +9857,18 @@ function formatFeatureSummary(feature) {
|
|
|
8335
9857
|
if (feature.analysis) lines.push(`Analysis : ${feature.analysis.slice(0, 200)}`);
|
|
8336
9858
|
if (feature.successCriteria) lines.push(`Success : ${feature.successCriteria}`);
|
|
8337
9859
|
if (feature.domain) lines.push(`Domain : ${feature.domain}`);
|
|
9860
|
+
if (feature.priority) lines.push(`Priority : P${feature.priority}/5`);
|
|
8338
9861
|
if (feature.decisions?.length) lines.push(`Decisions : ${feature.decisions.length} recorded`);
|
|
8339
9862
|
if (feature.lineage?.parent) lines.push(`Parent : ${feature.lineage.parent}`);
|
|
8340
9863
|
if (feature.lineage?.children?.length) lines.push(`Children : ${feature.lineage.children.join(", ")}`);
|
|
8341
9864
|
return lines.join("\n");
|
|
8342
9865
|
}
|
|
8343
9866
|
function buildLineageTree(feature, map, childrenOf, depth) {
|
|
8344
|
-
return [`${" ".repeat(depth)}${statusIcon(feature.status)} ${feature.featureKey} (${feature.status}) — ${feature.title}`, ...(childrenOf.get(feature.featureKey) ?? feature.lineage?.children ?? []).
|
|
9867
|
+
return [`${" ".repeat(depth)}${statusIcon(feature.status)} ${feature.featureKey} (${feature.status}) — ${feature.title}`, ...(childrenOf.get(feature.featureKey) ?? feature.lineage?.children ?? []).slice().sort((a, b) => {
|
|
9868
|
+
const pa = map.get(a)?.priority ?? 9999;
|
|
9869
|
+
const pb = map.get(b)?.priority ?? 9999;
|
|
9870
|
+
return pa !== pb ? pa - pb : a.localeCompare(b);
|
|
9871
|
+
}).flatMap((key) => {
|
|
8345
9872
|
const child = map.get(key);
|
|
8346
9873
|
return child ? [buildLineageTree(child, map, childrenOf, depth + 1)] : [];
|
|
8347
9874
|
})].join("\n");
|
|
@@ -8354,6 +9881,45 @@ function statusIcon(status) {
|
|
|
8354
9881
|
deprecated: "⊘"
|
|
8355
9882
|
}[status] ?? "?";
|
|
8356
9883
|
}
|
|
9884
|
+
const REQUIRED_FOR_ACTIVE = [
|
|
9885
|
+
"analysis",
|
|
9886
|
+
"implementation",
|
|
9887
|
+
"successCriteria"
|
|
9888
|
+
];
|
|
9889
|
+
const REQUIRED_FOR_FROZEN = [
|
|
9890
|
+
"analysis",
|
|
9891
|
+
"implementation",
|
|
9892
|
+
"successCriteria",
|
|
9893
|
+
"knownLimitations",
|
|
9894
|
+
"tags"
|
|
9895
|
+
];
|
|
9896
|
+
function getMissingForTransition(feature, to) {
|
|
9897
|
+
const required$2 = to === "active" ? REQUIRED_FOR_ACTIVE : to === "frozen" ? REQUIRED_FOR_FROZEN : [];
|
|
9898
|
+
const missing = [];
|
|
9899
|
+
for (const field of required$2) {
|
|
9900
|
+
const val = feature[field];
|
|
9901
|
+
if (val === void 0 || val === null) {
|
|
9902
|
+
missing.push(field);
|
|
9903
|
+
continue;
|
|
9904
|
+
}
|
|
9905
|
+
if (typeof val === "string" && val.trim().length === 0) {
|
|
9906
|
+
missing.push(field);
|
|
9907
|
+
continue;
|
|
9908
|
+
}
|
|
9909
|
+
if (Array.isArray(val) && val.length === 0) {
|
|
9910
|
+
missing.push(field);
|
|
9911
|
+
continue;
|
|
9912
|
+
}
|
|
9913
|
+
}
|
|
9914
|
+
if ((to === "active" || to === "frozen") && (!feature.decisions || feature.decisions.length === 0)) missing.push("decisions");
|
|
9915
|
+
return [...new Set(missing)];
|
|
9916
|
+
}
|
|
9917
|
+
function checkIllegalTransition(from, to) {
|
|
9918
|
+
if (from === to) return `Feature is already "${to}".`;
|
|
9919
|
+
if (from === "deprecated") return `Cannot transition from deprecated. Create a new feature instead.`;
|
|
9920
|
+
if (to === "draft") return `Cannot transition back to draft. Use "active" to reopen.`;
|
|
9921
|
+
return null;
|
|
9922
|
+
}
|
|
8357
9923
|
async function main() {
|
|
8358
9924
|
const transport = new StdioServerTransport();
|
|
8359
9925
|
await server.connect(transport);
|