skill-flow 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +190 -0
- package/README.md +108 -0
- package/README.zh.md +108 -0
- package/dist/adapters/channel-adapters.d.ts +8 -0
- package/dist/adapters/channel-adapters.js +56 -0
- package/dist/adapters/channel-adapters.js.map +1 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +118 -0
- package/dist/cli.js.map +1 -0
- package/dist/domain/types.d.ts +133 -0
- package/dist/domain/types.js +2 -0
- package/dist/domain/types.js.map +1 -0
- package/dist/services/deployment-applier.d.ts +6 -0
- package/dist/services/deployment-applier.js +54 -0
- package/dist/services/deployment-applier.js.map +1 -0
- package/dist/services/deployment-planner.d.ts +11 -0
- package/dist/services/deployment-planner.js +179 -0
- package/dist/services/deployment-planner.js.map +1 -0
- package/dist/services/doctor-service.d.ts +5 -0
- package/dist/services/doctor-service.js +129 -0
- package/dist/services/doctor-service.js.map +1 -0
- package/dist/services/inventory-service.d.ts +14 -0
- package/dist/services/inventory-service.js +186 -0
- package/dist/services/inventory-service.js.map +1 -0
- package/dist/services/skill-flow.d.ts +60 -0
- package/dist/services/skill-flow.js +260 -0
- package/dist/services/skill-flow.js.map +1 -0
- package/dist/services/source-service.d.ts +35 -0
- package/dist/services/source-service.js +270 -0
- package/dist/services/source-service.js.map +1 -0
- package/dist/services/workflow-service.d.ts +5 -0
- package/dist/services/workflow-service.js +32 -0
- package/dist/services/workflow-service.js.map +1 -0
- package/dist/state/store.d.ts +14 -0
- package/dist/state/store.js +59 -0
- package/dist/state/store.js.map +1 -0
- package/dist/tests/skill-flow.test.d.ts +1 -0
- package/dist/tests/skill-flow.test.js +926 -0
- package/dist/tests/skill-flow.test.js.map +1 -0
- package/dist/tui/config-app.d.ts +47 -0
- package/dist/tui/config-app.js +732 -0
- package/dist/tui/config-app.js.map +1 -0
- package/dist/tui/selection-state.d.ts +8 -0
- package/dist/tui/selection-state.js +32 -0
- package/dist/tui/selection-state.js.map +1 -0
- package/dist/utils/constants.d.ts +19 -0
- package/dist/utils/constants.js +164 -0
- package/dist/utils/constants.js.map +1 -0
- package/dist/utils/format.d.ts +6 -0
- package/dist/utils/format.js +45 -0
- package/dist/utils/format.js.map +1 -0
- package/dist/utils/fs.d.ts +10 -0
- package/dist/utils/fs.js +89 -0
- package/dist/utils/fs.js.map +1 -0
- package/dist/utils/git.d.ts +3 -0
- package/dist/utils/git.js +12 -0
- package/dist/utils/git.js.map +1 -0
- package/dist/utils/result.d.ts +4 -0
- package/dist/utils/result.js +15 -0
- package/dist/utils/result.js.map +1 -0
- package/dist/utils/source-id.d.ts +2 -0
- package/dist/utils/source-id.js +16 -0
- package/dist/utils/source-id.js.map +1 -0
- package/img/img-1.jpg +0 -0
- package/package.json +39 -0
- package/src/adapters/channel-adapters.ts +75 -0
- package/src/cli.tsx +147 -0
- package/src/domain/types.ts +175 -0
- package/src/services/deployment-applier.ts +81 -0
- package/src/services/deployment-planner.ts +259 -0
- package/src/services/doctor-service.ts +156 -0
- package/src/services/inventory-service.ts +251 -0
- package/src/services/skill-flow.ts +381 -0
- package/src/services/source-service.ts +427 -0
- package/src/services/workflow-service.ts +56 -0
- package/src/state/store.ts +68 -0
- package/src/tests/skill-flow.test.ts +1184 -0
- package/src/tui/config-app.tsx +1094 -0
- package/src/tui/selection-state.ts +45 -0
- package/src/utils/constants.ts +201 -0
- package/src/utils/format.ts +59 -0
- package/src/utils/fs.ts +102 -0
- package/src/utils/git.ts +16 -0
- package/src/utils/result.ts +23 -0
- package/src/utils/source-id.ts +19 -0
- package/tsconfig.json +22 -0
- package/vitest.config.ts +8 -0
|
@@ -0,0 +1,1184 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { execFileSync } from "node:child_process";
|
|
5
|
+
import { afterEach, beforeEach, describe, expect, test } from "vitest";
|
|
6
|
+
import { SkillFlowApp } from "../services/skill-flow.js";
|
|
7
|
+
import {
|
|
8
|
+
buildProjectionWarningMap,
|
|
9
|
+
buildCommandBar,
|
|
10
|
+
buildContextBar,
|
|
11
|
+
buildSaveLabel,
|
|
12
|
+
draftsEqual,
|
|
13
|
+
getPaneWidths,
|
|
14
|
+
getPaneViewportCount,
|
|
15
|
+
getSaveDisplayPhase,
|
|
16
|
+
} from "../tui/config-app.js";
|
|
17
|
+
import {
|
|
18
|
+
TARGET_COMPAT_READ_CANDIDATES,
|
|
19
|
+
TARGET_DEFINITIONS,
|
|
20
|
+
TARGET_PATH_CANDIDATES,
|
|
21
|
+
} from "../utils/constants.js";
|
|
22
|
+
import {
|
|
23
|
+
getParentSelectionState,
|
|
24
|
+
toggleChild,
|
|
25
|
+
toggleParent,
|
|
26
|
+
type TreeSelectionState,
|
|
27
|
+
} from "../tui/selection-state.js";
|
|
28
|
+
|
|
29
|
+
describe.sequential("skill-flow", () => {
|
|
30
|
+
let sandboxRoot: string;
|
|
31
|
+
let stateRoot: string;
|
|
32
|
+
let targetsRoot: string;
|
|
33
|
+
|
|
34
|
+
beforeEach(async () => {
|
|
35
|
+
sandboxRoot = await fs.mkdtemp(path.join(os.tmpdir(), "skill-flow-test-"));
|
|
36
|
+
stateRoot = path.join(sandboxRoot, "state");
|
|
37
|
+
targetsRoot = path.join(sandboxRoot, "targets");
|
|
38
|
+
await fs.mkdir(targetsRoot, { recursive: true });
|
|
39
|
+
|
|
40
|
+
process.env.SKILL_FLOW_STATE_ROOT = stateRoot;
|
|
41
|
+
process.env.SKILL_FLOW_TARGET_CLAUDE_CODE = path.join(targetsRoot, "claude");
|
|
42
|
+
process.env.SKILL_FLOW_TARGET_CODEX = path.join(targetsRoot, "codex");
|
|
43
|
+
process.env.SKILL_FLOW_TARGET_CURSOR = path.join(targetsRoot, "cursor");
|
|
44
|
+
process.env.SKILL_FLOW_TARGET_GITHUB_COPILOT = path.join(targetsRoot, "github-copilot");
|
|
45
|
+
process.env.SKILL_FLOW_TARGET_GEMINI_CLI = path.join(targetsRoot, "gemini-cli");
|
|
46
|
+
process.env.SKILL_FLOW_TARGET_OPENCODE = path.join(targetsRoot, "opencode");
|
|
47
|
+
process.env.SKILL_FLOW_TARGET_OPENCLAW = path.join(targetsRoot, "openclaw");
|
|
48
|
+
process.env.SKILL_FLOW_TARGET_PI = path.join(targetsRoot, "pi");
|
|
49
|
+
process.env.SKILL_FLOW_TARGET_WINDSURF = path.join(targetsRoot, "windsurf");
|
|
50
|
+
process.env.SKILL_FLOW_TARGET_ROO_CODE = path.join(targetsRoot, "roo-code");
|
|
51
|
+
process.env.SKILL_FLOW_TARGET_CLINE = path.join(targetsRoot, "cline");
|
|
52
|
+
process.env.SKILL_FLOW_TARGET_AMP = path.join(targetsRoot, "amp");
|
|
53
|
+
process.env.SKILL_FLOW_TARGET_KIRO = path.join(targetsRoot, "kiro");
|
|
54
|
+
|
|
55
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE, { recursive: true });
|
|
56
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_CODEX, { recursive: true });
|
|
57
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_CURSOR, { recursive: true });
|
|
58
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_GITHUB_COPILOT, { recursive: true });
|
|
59
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_GEMINI_CLI, { recursive: true });
|
|
60
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_OPENCODE, { recursive: true });
|
|
61
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_OPENCLAW, { recursive: true });
|
|
62
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_PI, { recursive: true });
|
|
63
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_WINDSURF, { recursive: true });
|
|
64
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_ROO_CODE, { recursive: true });
|
|
65
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_CLINE, { recursive: true });
|
|
66
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_AMP, { recursive: true });
|
|
67
|
+
await fs.mkdir(process.env.SKILL_FLOW_TARGET_KIRO, { recursive: true });
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
afterEach(async () => {
|
|
71
|
+
delete process.env.SKILL_FLOW_STATE_ROOT;
|
|
72
|
+
delete process.env.SKILL_FLOW_TARGET_CLAUDE_CODE;
|
|
73
|
+
delete process.env.SKILL_FLOW_TARGET_CODEX;
|
|
74
|
+
delete process.env.SKILL_FLOW_TARGET_CURSOR;
|
|
75
|
+
delete process.env.SKILL_FLOW_TARGET_GITHUB_COPILOT;
|
|
76
|
+
delete process.env.SKILL_FLOW_TARGET_GEMINI_CLI;
|
|
77
|
+
delete process.env.SKILL_FLOW_TARGET_OPENCODE;
|
|
78
|
+
delete process.env.SKILL_FLOW_TARGET_OPENCLAW;
|
|
79
|
+
delete process.env.SKILL_FLOW_TARGET_PI;
|
|
80
|
+
delete process.env.SKILL_FLOW_TARGET_WINDSURF;
|
|
81
|
+
delete process.env.SKILL_FLOW_TARGET_ROO_CODE;
|
|
82
|
+
delete process.env.SKILL_FLOW_TARGET_CLINE;
|
|
83
|
+
delete process.env.SKILL_FLOW_TARGET_AMP;
|
|
84
|
+
delete process.env.SKILL_FLOW_TARGET_KIRO;
|
|
85
|
+
await fs.rm(sandboxRoot, { recursive: true, force: true });
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
test("adds a git source and discovers valid skills", async () => {
|
|
89
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
90
|
+
"frontend/SKILL.md": skillDoc("frontend", "Build frontend flows."),
|
|
91
|
+
"ops/SKILL.md": skillDoc("ops", "Run operator workflows."),
|
|
92
|
+
});
|
|
93
|
+
const app = new SkillFlowApp();
|
|
94
|
+
|
|
95
|
+
const result = await app.addSource(repoPath);
|
|
96
|
+
|
|
97
|
+
expect(result.ok).toBe(true);
|
|
98
|
+
if (!result.ok) {
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
expect(result.data.leafCount).toBe(2);
|
|
102
|
+
expect(result.warnings).toHaveLength(0);
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
test("returns a clear error when git fetch fails", async () => {
|
|
106
|
+
const app = new SkillFlowApp();
|
|
107
|
+
|
|
108
|
+
const result = await app.addSource(path.join(sandboxRoot, "missing-repo"));
|
|
109
|
+
|
|
110
|
+
expect(result.ok).toBe(false);
|
|
111
|
+
if (result.ok) {
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
expect(result.errors[0]?.code).toBe("GIT_CLONE_FAILED");
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
test("rejects uninstall for an unknown workflow group", async () => {
|
|
118
|
+
const app = new SkillFlowApp();
|
|
119
|
+
|
|
120
|
+
const result = await app.uninstall(["missing-source"]);
|
|
121
|
+
|
|
122
|
+
expect(result.ok).toBe(false);
|
|
123
|
+
if (result.ok) {
|
|
124
|
+
return;
|
|
125
|
+
}
|
|
126
|
+
expect(result.errors[0]?.code).toBe("SOURCE_NOT_FOUND");
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
test("rejects a source with zero valid skills", async () => {
|
|
130
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
131
|
+
"broken/SKILL.md": "No heading here",
|
|
132
|
+
});
|
|
133
|
+
const app = new SkillFlowApp();
|
|
134
|
+
|
|
135
|
+
const result = await app.addSource(repoPath);
|
|
136
|
+
|
|
137
|
+
expect(result.ok).toBe(false);
|
|
138
|
+
if (result.ok) {
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
expect(result.errors[0]?.code).toBe("NO_VALID_LEAFS");
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
test("keeps valid skills and warns about invalid ones", async () => {
|
|
145
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
146
|
+
"good/SKILL.md": skillDoc("good", "Good description."),
|
|
147
|
+
"bad/SKILL.md": "Broken file",
|
|
148
|
+
});
|
|
149
|
+
const app = new SkillFlowApp();
|
|
150
|
+
|
|
151
|
+
const result = await app.addSource(repoPath);
|
|
152
|
+
|
|
153
|
+
expect(result.ok).toBe(true);
|
|
154
|
+
if (!result.ok) {
|
|
155
|
+
return;
|
|
156
|
+
}
|
|
157
|
+
expect(result.data.leafCount).toBe(1);
|
|
158
|
+
expect(result.warnings).toHaveLength(1);
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
test("accepts skills that use YAML frontmatter metadata", async () => {
|
|
162
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
163
|
+
"browse/SKILL.md": `---
|
|
164
|
+
name: browse
|
|
165
|
+
version: 1.1.0
|
|
166
|
+
description: |
|
|
167
|
+
Fast headless browser for QA testing and site dogfooding.
|
|
168
|
+
Opens pages and validates flows.
|
|
169
|
+
---
|
|
170
|
+
<!-- generated -->
|
|
171
|
+
|
|
172
|
+
## Preamble
|
|
173
|
+
`,
|
|
174
|
+
});
|
|
175
|
+
const app = new SkillFlowApp();
|
|
176
|
+
|
|
177
|
+
const result = await app.addSource(repoPath);
|
|
178
|
+
|
|
179
|
+
expect(result.ok).toBe(true);
|
|
180
|
+
if (!result.ok) {
|
|
181
|
+
return;
|
|
182
|
+
}
|
|
183
|
+
expect(result.data.leafCount).toBe(1);
|
|
184
|
+
|
|
185
|
+
const listResult = await app.listWorkflows();
|
|
186
|
+
expect(listResult.ok).toBe(true);
|
|
187
|
+
if (!listResult.ok) {
|
|
188
|
+
return;
|
|
189
|
+
}
|
|
190
|
+
expect(listResult.data.summaries[0]?.leafs[0]?.title).toBe("browse");
|
|
191
|
+
expect(listResult.data.summaries[0]?.leafs[0]?.description).toContain(
|
|
192
|
+
"Fast headless browser",
|
|
193
|
+
);
|
|
194
|
+
expect(listResult.data.summaries[0]?.leafs[0]?.name).toBe("browse");
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
test("blocks apply preview when foreign content already exists at target path", async () => {
|
|
198
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
199
|
+
"good/SKILL.md": skillDoc("good", "Good description."),
|
|
200
|
+
});
|
|
201
|
+
const app = new SkillFlowApp();
|
|
202
|
+
const added = await app.addSource(repoPath);
|
|
203
|
+
expect(added.ok).toBe(true);
|
|
204
|
+
if (!added.ok) {
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
const sourceId = added.data.manifest.id;
|
|
209
|
+
const leafId = `${sourceId}:good`;
|
|
210
|
+
await fs.mkdir(path.join(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!, "good"), {
|
|
211
|
+
recursive: true,
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
const preview = await app.previewDraft(sourceId, {
|
|
215
|
+
enabledTargets: ["claude-code"],
|
|
216
|
+
selectedLeafIds: [leafId],
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
expect(preview.ok).toBe(true);
|
|
220
|
+
if (!preview.ok) {
|
|
221
|
+
return;
|
|
222
|
+
}
|
|
223
|
+
expect(preview.data.plan.blocked).toHaveLength(1);
|
|
224
|
+
expect(preview.data.plan.blocked[0]?.reason).toContain("Foreign content");
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
test("doctor detects broken symlinks", async () => {
|
|
228
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
229
|
+
"good/SKILL.md": skillDoc("good", "Good description."),
|
|
230
|
+
});
|
|
231
|
+
const app = new SkillFlowApp();
|
|
232
|
+
const added = await app.addSource(repoPath);
|
|
233
|
+
expect(added.ok).toBe(true);
|
|
234
|
+
if (!added.ok) {
|
|
235
|
+
return;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
const sourceId = added.data.manifest.id;
|
|
239
|
+
const leafId = `${sourceId}:good`;
|
|
240
|
+
const applied = await app.applyDraft(sourceId, {
|
|
241
|
+
enabledTargets: ["claude-code"],
|
|
242
|
+
selectedLeafIds: [leafId],
|
|
243
|
+
});
|
|
244
|
+
expect(applied.ok).toBe(true);
|
|
245
|
+
|
|
246
|
+
await fs.rm(path.join(stateRoot, "source", "git", sourceId, "good"), {
|
|
247
|
+
recursive: true,
|
|
248
|
+
force: true,
|
|
249
|
+
});
|
|
250
|
+
|
|
251
|
+
const doctor = await app.doctor();
|
|
252
|
+
expect(doctor.ok).toBe(true);
|
|
253
|
+
if (!doctor.ok) {
|
|
254
|
+
return;
|
|
255
|
+
}
|
|
256
|
+
expect(doctor.data.issues.some((issue) => issue.code === "BROKEN_SYMLINK")).toBe(true);
|
|
257
|
+
});
|
|
258
|
+
|
|
259
|
+
test("scans host directories too, but keeps the first discovered duplicate only", async () => {
|
|
260
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
261
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
262
|
+
".agents/skills/gstack-browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
263
|
+
});
|
|
264
|
+
const app = new SkillFlowApp();
|
|
265
|
+
|
|
266
|
+
const result = await app.addSource(repoPath);
|
|
267
|
+
|
|
268
|
+
expect(result.ok).toBe(true);
|
|
269
|
+
if (!result.ok) {
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
expect(result.data.leafCount).toBe(1);
|
|
273
|
+
expect(
|
|
274
|
+
result.warnings.some((warning) =>
|
|
275
|
+
warning.message.includes("Duplicate skill content"),
|
|
276
|
+
),
|
|
277
|
+
).toBe(true);
|
|
278
|
+
const list = await app.listWorkflows();
|
|
279
|
+
expect(list.ok).toBe(true);
|
|
280
|
+
if (!list.ok) {
|
|
281
|
+
return;
|
|
282
|
+
}
|
|
283
|
+
expect(list.data.summaries[0]?.leafs.map((leaf) => leaf.relativePath)).toEqual([
|
|
284
|
+
"browse",
|
|
285
|
+
]);
|
|
286
|
+
});
|
|
287
|
+
|
|
288
|
+
test("discovers a unique skill from a host directory when no earlier duplicate exists", async () => {
|
|
289
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
290
|
+
".agents/skills/gstack-browse/SKILL.md": skillDoc("gstack-browse", "Host directory skill."),
|
|
291
|
+
});
|
|
292
|
+
const app = new SkillFlowApp();
|
|
293
|
+
|
|
294
|
+
const result = await app.addSource(repoPath);
|
|
295
|
+
|
|
296
|
+
expect(result.ok).toBe(true);
|
|
297
|
+
if (!result.ok) {
|
|
298
|
+
return;
|
|
299
|
+
}
|
|
300
|
+
expect(result.data.leafCount).toBe(1);
|
|
301
|
+
const list = await app.listWorkflows();
|
|
302
|
+
expect(list.ok).toBe(true);
|
|
303
|
+
if (!list.ok) {
|
|
304
|
+
return;
|
|
305
|
+
}
|
|
306
|
+
expect(list.data.summaries[0]?.leafs[0]?.relativePath).toBe(".agents/skills/gstack-browse");
|
|
307
|
+
});
|
|
308
|
+
|
|
309
|
+
test("prefers visible second-level skill directories before hidden second-level directories", async () => {
|
|
310
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
311
|
+
"catalog/browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
312
|
+
"catalog/.generated/browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
313
|
+
});
|
|
314
|
+
const app = new SkillFlowApp();
|
|
315
|
+
|
|
316
|
+
const result = await app.addSource(repoPath);
|
|
317
|
+
|
|
318
|
+
expect(result.ok).toBe(true);
|
|
319
|
+
if (!result.ok) {
|
|
320
|
+
return;
|
|
321
|
+
}
|
|
322
|
+
expect(result.data.leafCount).toBe(1);
|
|
323
|
+
const list = await app.listWorkflows();
|
|
324
|
+
expect(list.ok).toBe(true);
|
|
325
|
+
if (!list.ok) {
|
|
326
|
+
return;
|
|
327
|
+
}
|
|
328
|
+
expect(list.data.summaries[0]?.leafs[0]?.relativePath).toBe("catalog/browse");
|
|
329
|
+
expect(
|
|
330
|
+
result.warnings.some((warning) =>
|
|
331
|
+
warning.message.includes("catalog/.generated/browse"),
|
|
332
|
+
),
|
|
333
|
+
).toBe(true);
|
|
334
|
+
});
|
|
335
|
+
|
|
336
|
+
test("dedupes skills by metadata name and description", async () => {
|
|
337
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
338
|
+
"browse/SKILL.md": `---
|
|
339
|
+
name: browse
|
|
340
|
+
description: |
|
|
341
|
+
Canonical browse skill.
|
|
342
|
+
---
|
|
343
|
+
## Body
|
|
344
|
+
`,
|
|
345
|
+
"copy-of-browse/SKILL.md": `---
|
|
346
|
+
name: browse
|
|
347
|
+
description: |
|
|
348
|
+
Canonical browse skill.
|
|
349
|
+
---
|
|
350
|
+
## Body
|
|
351
|
+
`,
|
|
352
|
+
});
|
|
353
|
+
const app = new SkillFlowApp();
|
|
354
|
+
|
|
355
|
+
const result = await app.addSource(repoPath);
|
|
356
|
+
|
|
357
|
+
expect(result.ok).toBe(true);
|
|
358
|
+
if (!result.ok) {
|
|
359
|
+
return;
|
|
360
|
+
}
|
|
361
|
+
expect(result.data.leafCount).toBe(1);
|
|
362
|
+
expect(
|
|
363
|
+
result.warnings.some((warning) =>
|
|
364
|
+
warning.message.includes("Duplicate skill content"),
|
|
365
|
+
),
|
|
366
|
+
).toBe(true);
|
|
367
|
+
});
|
|
368
|
+
|
|
369
|
+
test("keeps same-name skills when descriptions differ", async () => {
|
|
370
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
371
|
+
"browse/SKILL.md": skillDoc("browse", "Canonical browse skill."),
|
|
372
|
+
"copy-of-browse/SKILL.md": skillDoc("browse", "Different browse skill."),
|
|
373
|
+
});
|
|
374
|
+
const app = new SkillFlowApp();
|
|
375
|
+
|
|
376
|
+
const result = await app.addSource(repoPath);
|
|
377
|
+
|
|
378
|
+
expect(result.ok).toBe(true);
|
|
379
|
+
if (!result.ok) {
|
|
380
|
+
return;
|
|
381
|
+
}
|
|
382
|
+
expect(result.data.leafCount).toBe(2);
|
|
383
|
+
});
|
|
384
|
+
|
|
385
|
+
test("apply uses natural skill names and removes legacy prefixed paths", async () => {
|
|
386
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
387
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
388
|
+
});
|
|
389
|
+
const app = new SkillFlowApp();
|
|
390
|
+
const added = await app.addSource(repoPath);
|
|
391
|
+
expect(added.ok).toBe(true);
|
|
392
|
+
if (!added.ok) {
|
|
393
|
+
return;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
const sourceId = added.data.manifest.id;
|
|
397
|
+
const leafId = `${sourceId}:browse`;
|
|
398
|
+
const legacyPath = path.join(
|
|
399
|
+
process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!,
|
|
400
|
+
`${sourceId}--browse`,
|
|
401
|
+
);
|
|
402
|
+
|
|
403
|
+
await fs.symlink(
|
|
404
|
+
path.join(stateRoot, "source", "git", sourceId, "browse"),
|
|
405
|
+
legacyPath,
|
|
406
|
+
"junction",
|
|
407
|
+
);
|
|
408
|
+
|
|
409
|
+
const lockPath = path.join(stateRoot, "lock.json");
|
|
410
|
+
const lockFile = JSON.parse(await fs.readFile(lockPath, "utf8")) as {
|
|
411
|
+
deployments: Array<Record<string, string>>;
|
|
412
|
+
};
|
|
413
|
+
lockFile.deployments.push({
|
|
414
|
+
sourceId,
|
|
415
|
+
leafId,
|
|
416
|
+
target: "claude-code",
|
|
417
|
+
targetPath: legacyPath,
|
|
418
|
+
strategy: "symlink",
|
|
419
|
+
status: "active",
|
|
420
|
+
contentHash: "legacy",
|
|
421
|
+
appliedAt: new Date().toISOString(),
|
|
422
|
+
});
|
|
423
|
+
await fs.writeFile(lockPath, `${JSON.stringify(lockFile, null, 2)}\n`, "utf8");
|
|
424
|
+
|
|
425
|
+
const applied = await app.applyDraft(sourceId, {
|
|
426
|
+
enabledTargets: ["claude-code"],
|
|
427
|
+
selectedLeafIds: [leafId],
|
|
428
|
+
});
|
|
429
|
+
|
|
430
|
+
expect(applied.ok).toBe(true);
|
|
431
|
+
expect(await pathExists(legacyPath)).toBe(false);
|
|
432
|
+
expect(
|
|
433
|
+
await pathExists(path.join(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!, "browse")),
|
|
434
|
+
).toBe(true);
|
|
435
|
+
});
|
|
436
|
+
|
|
437
|
+
test("keeps the earlier selected cross-group duplicate when linkName name and description all match", async () => {
|
|
438
|
+
const repoA = await createRepo(sandboxRoot, {
|
|
439
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
440
|
+
});
|
|
441
|
+
const repoB = await createRepo(sandboxRoot, {
|
|
442
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
443
|
+
});
|
|
444
|
+
const app = new SkillFlowApp();
|
|
445
|
+
const addedA = await app.addSource(repoA);
|
|
446
|
+
const addedB = await app.addSource(repoB);
|
|
447
|
+
expect(addedA.ok).toBe(true);
|
|
448
|
+
expect(addedB.ok).toBe(true);
|
|
449
|
+
if (!addedA.ok || !addedB.ok) {
|
|
450
|
+
return;
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
const sourceA = addedA.data.manifest.id;
|
|
454
|
+
const sourceB = addedB.data.manifest.id;
|
|
455
|
+
const leafA = `${sourceA}:browse`;
|
|
456
|
+
const leafB = `${sourceB}:browse`;
|
|
457
|
+
|
|
458
|
+
const firstApply = await app.applyDraft(sourceA, {
|
|
459
|
+
enabledTargets: ["claude-code"],
|
|
460
|
+
selectedLeafIds: [leafA],
|
|
461
|
+
});
|
|
462
|
+
expect(firstApply.ok).toBe(true);
|
|
463
|
+
|
|
464
|
+
const secondApply = await app.applyDraft(sourceB, {
|
|
465
|
+
enabledTargets: ["claude-code"],
|
|
466
|
+
selectedLeafIds: [leafB],
|
|
467
|
+
});
|
|
468
|
+
expect(secondApply.ok).toBe(true);
|
|
469
|
+
if (!secondApply.ok) {
|
|
470
|
+
return;
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
expect(secondApply.data.draft.selectedLeafIds).toEqual([]);
|
|
474
|
+
expect(
|
|
475
|
+
await pathExists(path.join(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!, "browse")),
|
|
476
|
+
).toBe(true);
|
|
477
|
+
|
|
478
|
+
const lockPath = path.join(stateRoot, "lock.json");
|
|
479
|
+
const lock = JSON.parse(await fs.readFile(lockPath, "utf8")) as {
|
|
480
|
+
deployments: Array<{ sourceId: string; targetPath: string }>;
|
|
481
|
+
};
|
|
482
|
+
expect(
|
|
483
|
+
lock.deployments.filter((deployment) =>
|
|
484
|
+
deployment.targetPath.endsWith(path.join("claude", "browse")),
|
|
485
|
+
),
|
|
486
|
+
).toHaveLength(1);
|
|
487
|
+
expect(lock.deployments[0]?.sourceId).toBe(sourceA);
|
|
488
|
+
});
|
|
489
|
+
|
|
490
|
+
test("renames cross-group projections when linkName matches but content differs", async () => {
|
|
491
|
+
const repoA = await createRepo(sandboxRoot, {
|
|
492
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow from A."),
|
|
493
|
+
});
|
|
494
|
+
const repoB = await createRepo(sandboxRoot, {
|
|
495
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow from B."),
|
|
496
|
+
});
|
|
497
|
+
const app = new SkillFlowApp();
|
|
498
|
+
const addedA = await app.addSource(repoA);
|
|
499
|
+
const addedB = await app.addSource(repoB);
|
|
500
|
+
expect(addedA.ok).toBe(true);
|
|
501
|
+
expect(addedB.ok).toBe(true);
|
|
502
|
+
if (!addedA.ok || !addedB.ok) {
|
|
503
|
+
return;
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
const sourceA = addedA.data.manifest.id;
|
|
507
|
+
const sourceB = addedB.data.manifest.id;
|
|
508
|
+
const leafA = `${sourceA}:browse`;
|
|
509
|
+
const leafB = `${sourceB}:browse`;
|
|
510
|
+
|
|
511
|
+
const firstApply = await app.applyDraft(sourceA, {
|
|
512
|
+
enabledTargets: ["claude-code"],
|
|
513
|
+
selectedLeafIds: [leafA],
|
|
514
|
+
});
|
|
515
|
+
expect(firstApply.ok).toBe(true);
|
|
516
|
+
expect(
|
|
517
|
+
await pathExists(path.join(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!, "browse")),
|
|
518
|
+
).toBe(true);
|
|
519
|
+
|
|
520
|
+
const secondApply = await app.applyDraft(sourceB, {
|
|
521
|
+
enabledTargets: ["claude-code"],
|
|
522
|
+
selectedLeafIds: [leafB],
|
|
523
|
+
});
|
|
524
|
+
expect(secondApply.ok).toBe(true);
|
|
525
|
+
|
|
526
|
+
expect(
|
|
527
|
+
await pathExists(path.join(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!, "browse")),
|
|
528
|
+
).toBe(false);
|
|
529
|
+
expect(
|
|
530
|
+
await pathExists(
|
|
531
|
+
path.join(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!, `${sourceA}-browse`),
|
|
532
|
+
),
|
|
533
|
+
).toBe(true);
|
|
534
|
+
expect(
|
|
535
|
+
await pathExists(
|
|
536
|
+
path.join(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!, `${sourceB}-browse`),
|
|
537
|
+
),
|
|
538
|
+
).toBe(true);
|
|
539
|
+
});
|
|
540
|
+
|
|
541
|
+
test("doctor reports unavailable target paths", async () => {
|
|
542
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
543
|
+
"good/SKILL.md": skillDoc("good", "Good description."),
|
|
544
|
+
});
|
|
545
|
+
const app = new SkillFlowApp();
|
|
546
|
+
const added = await app.addSource(repoPath);
|
|
547
|
+
expect(added.ok).toBe(true);
|
|
548
|
+
if (!added.ok) {
|
|
549
|
+
return;
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
await fs.rm(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!, {
|
|
553
|
+
recursive: true,
|
|
554
|
+
force: true,
|
|
555
|
+
});
|
|
556
|
+
|
|
557
|
+
const doctor = await app.previewDraft(added.data.manifest.id, {
|
|
558
|
+
enabledTargets: ["claude-code"],
|
|
559
|
+
selectedLeafIds: [`${added.data.manifest.id}:good`],
|
|
560
|
+
});
|
|
561
|
+
|
|
562
|
+
expect(doctor.ok).toBe(true);
|
|
563
|
+
if (!doctor.ok) {
|
|
564
|
+
return;
|
|
565
|
+
}
|
|
566
|
+
expect(doctor.data.plan.blocked[0]?.reason).toContain("Target directory not found");
|
|
567
|
+
});
|
|
568
|
+
|
|
569
|
+
test("update detects added skills", async () => {
|
|
570
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
571
|
+
"good/SKILL.md": skillDoc("good", "Good description."),
|
|
572
|
+
});
|
|
573
|
+
const app = new SkillFlowApp();
|
|
574
|
+
const added = await app.addSource(repoPath);
|
|
575
|
+
expect(added.ok).toBe(true);
|
|
576
|
+
|
|
577
|
+
await writeRepoFiles(repoPath, {
|
|
578
|
+
"extra/SKILL.md": skillDoc("extra", "Extra description."),
|
|
579
|
+
});
|
|
580
|
+
git(repoPath, ["add", "."]);
|
|
581
|
+
git(repoPath, ["commit", "-m", "add extra"]);
|
|
582
|
+
|
|
583
|
+
const updated = await app.updateSources([added.ok ? added.data.manifest.id : ""]);
|
|
584
|
+
expect(updated.ok).toBe(true);
|
|
585
|
+
if (!updated.ok) {
|
|
586
|
+
return;
|
|
587
|
+
}
|
|
588
|
+
expect(updated.data.updated[0]?.addedLeafIds).toHaveLength(1);
|
|
589
|
+
});
|
|
590
|
+
|
|
591
|
+
test("update removes projections for deleted skills", async () => {
|
|
592
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
593
|
+
"good/SKILL.md": skillDoc("good", "Good description."),
|
|
594
|
+
});
|
|
595
|
+
const app = new SkillFlowApp();
|
|
596
|
+
const added = await app.addSource(repoPath);
|
|
597
|
+
expect(added.ok).toBe(true);
|
|
598
|
+
if (!added.ok) {
|
|
599
|
+
return;
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
const sourceId = added.data.manifest.id;
|
|
603
|
+
const leafId = `${sourceId}:good`;
|
|
604
|
+
await app.applyDraft(sourceId, {
|
|
605
|
+
enabledTargets: ["claude-code"],
|
|
606
|
+
selectedLeafIds: [leafId],
|
|
607
|
+
});
|
|
608
|
+
|
|
609
|
+
await fs.rm(path.join(repoPath, "good"), { recursive: true, force: true });
|
|
610
|
+
git(repoPath, ["add", "."]);
|
|
611
|
+
git(repoPath, ["commit", "-m", "remove good"]);
|
|
612
|
+
|
|
613
|
+
const updated = await app.updateSources([sourceId]);
|
|
614
|
+
expect(updated.ok).toBe(true);
|
|
615
|
+
if (!updated.ok) {
|
|
616
|
+
return;
|
|
617
|
+
}
|
|
618
|
+
expect(updated.data.updated[0]?.removedLeafIds).toEqual([leafId]);
|
|
619
|
+
expect(
|
|
620
|
+
await pathExists(path.join(process.env.SKILL_FLOW_TARGET_CLAUDE_CODE!, "good")),
|
|
621
|
+
).toBe(false);
|
|
622
|
+
});
|
|
623
|
+
|
|
624
|
+
test("update surfaces invalidated skills", async () => {
|
|
625
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
626
|
+
"good/SKILL.md": skillDoc("good", "Good description."),
|
|
627
|
+
});
|
|
628
|
+
const app = new SkillFlowApp();
|
|
629
|
+
const added = await app.addSource(repoPath);
|
|
630
|
+
expect(added.ok).toBe(true);
|
|
631
|
+
if (!added.ok) {
|
|
632
|
+
return;
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
await writeRepoFiles(repoPath, {
|
|
636
|
+
"good/SKILL.md": "Broken now",
|
|
637
|
+
});
|
|
638
|
+
git(repoPath, ["add", "."]);
|
|
639
|
+
git(repoPath, ["commit", "-m", "invalidate"]);
|
|
640
|
+
|
|
641
|
+
const updated = await app.updateSources([added.data.manifest.id]);
|
|
642
|
+
expect(updated.ok).toBe(true);
|
|
643
|
+
if (!updated.ok) {
|
|
644
|
+
return;
|
|
645
|
+
}
|
|
646
|
+
expect(updated.data.updated[0]?.invalidatedLeafIds).toHaveLength(1);
|
|
647
|
+
});
|
|
648
|
+
|
|
649
|
+
test("selection state machine handles parent child partial transitions", () => {
|
|
650
|
+
let state: TreeSelectionState = {
|
|
651
|
+
allLeafIds: ["a", "b"],
|
|
652
|
+
selectedLeafIds: [],
|
|
653
|
+
};
|
|
654
|
+
|
|
655
|
+
expect(getParentSelectionState(state)).toBe("empty");
|
|
656
|
+
state = toggleChild(state, "a");
|
|
657
|
+
expect(getParentSelectionState(state)).toBe("partial");
|
|
658
|
+
state = toggleParent(state);
|
|
659
|
+
expect(getParentSelectionState(state)).toBe("full");
|
|
660
|
+
state = toggleChild(state, "b");
|
|
661
|
+
expect(getParentSelectionState(state)).toBe("partial");
|
|
662
|
+
});
|
|
663
|
+
|
|
664
|
+
test("doctor detects drift in copied projections", async () => {
|
|
665
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
666
|
+
"good/SKILL.md": skillDoc("good", "Good description."),
|
|
667
|
+
});
|
|
668
|
+
const app = new SkillFlowApp();
|
|
669
|
+
const added = await app.addSource(repoPath);
|
|
670
|
+
expect(added.ok).toBe(true);
|
|
671
|
+
if (!added.ok) {
|
|
672
|
+
return;
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
const sourceId = added.data.manifest.id;
|
|
676
|
+
const leafId = `${sourceId}:good`;
|
|
677
|
+
await app.applyDraft(sourceId, {
|
|
678
|
+
enabledTargets: ["openclaw"],
|
|
679
|
+
selectedLeafIds: [leafId],
|
|
680
|
+
});
|
|
681
|
+
|
|
682
|
+
await writeRepoFiles(process.env.SKILL_FLOW_TARGET_OPENCLAW!, {
|
|
683
|
+
["good/SKILL.md"]: "# Good\nMutated copy.",
|
|
684
|
+
});
|
|
685
|
+
|
|
686
|
+
const doctor = await app.doctor();
|
|
687
|
+
expect(doctor.ok).toBe(true);
|
|
688
|
+
if (!doctor.ok) {
|
|
689
|
+
return;
|
|
690
|
+
}
|
|
691
|
+
expect(doctor.data.issues.some((issue) => issue.code === "DRIFT_COPY")).toBe(true);
|
|
692
|
+
});
|
|
693
|
+
|
|
694
|
+
test("keeps metadata warnings on valid skills", async () => {
|
|
695
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
696
|
+
"folder-name/SKILL.md": skillDoc("bad--name", "x".repeat(1025)),
|
|
697
|
+
});
|
|
698
|
+
const app = new SkillFlowApp();
|
|
699
|
+
|
|
700
|
+
const result = await app.addSource(repoPath);
|
|
701
|
+
|
|
702
|
+
expect(result.ok).toBe(true);
|
|
703
|
+
if (!result.ok) {
|
|
704
|
+
return;
|
|
705
|
+
}
|
|
706
|
+
const list = await app.listWorkflows();
|
|
707
|
+
expect(list.ok).toBe(true);
|
|
708
|
+
if (!list.ok) {
|
|
709
|
+
return;
|
|
710
|
+
}
|
|
711
|
+
expect(list.data.summaries[0]?.leafs[0]?.metadataWarnings.length).toBeGreaterThan(0);
|
|
712
|
+
});
|
|
713
|
+
|
|
714
|
+
test("reads old lock entries without metadata fields", async () => {
|
|
715
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
716
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
717
|
+
});
|
|
718
|
+
const app = new SkillFlowApp();
|
|
719
|
+
const added = await app.addSource(repoPath);
|
|
720
|
+
expect(added.ok).toBe(true);
|
|
721
|
+
if (!added.ok) {
|
|
722
|
+
return;
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
const lockPath = path.join(stateRoot, "lock.json");
|
|
726
|
+
const lock = JSON.parse(await fs.readFile(lockPath, "utf8")) as {
|
|
727
|
+
leafInventory: Array<Record<string, unknown>>;
|
|
728
|
+
};
|
|
729
|
+
lock.leafInventory = lock.leafInventory.map((leaf) => {
|
|
730
|
+
const next = { ...leaf };
|
|
731
|
+
delete next.metadataWarnings;
|
|
732
|
+
delete next.linkName;
|
|
733
|
+
return next;
|
|
734
|
+
});
|
|
735
|
+
await fs.writeFile(lockPath, `${JSON.stringify(lock, null, 2)}\n`, "utf8");
|
|
736
|
+
|
|
737
|
+
const list = await app.listWorkflows();
|
|
738
|
+
expect(list.ok).toBe(true);
|
|
739
|
+
if (!list.ok) {
|
|
740
|
+
return;
|
|
741
|
+
}
|
|
742
|
+
expect(list.data.summaries[0]?.leafs[0]?.metadataWarnings).toEqual([]);
|
|
743
|
+
expect(list.data.summaries[0]?.leafs[0]?.linkName).toBe("browse");
|
|
744
|
+
});
|
|
745
|
+
|
|
746
|
+
test("previewDraft is read-only and does not reconcile inventory on its own", async () => {
|
|
747
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
748
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
749
|
+
});
|
|
750
|
+
const app = new SkillFlowApp();
|
|
751
|
+
const added = await app.addSource(repoPath);
|
|
752
|
+
expect(added.ok).toBe(true);
|
|
753
|
+
if (!added.ok) {
|
|
754
|
+
return;
|
|
755
|
+
}
|
|
756
|
+
|
|
757
|
+
const sourceId = added.data.manifest.id;
|
|
758
|
+
const lockPath = path.join(stateRoot, "lock.json");
|
|
759
|
+
const lock = JSON.parse(await fs.readFile(lockPath, "utf8")) as {
|
|
760
|
+
sources: Array<{ id: string; leafIds: string[] }>;
|
|
761
|
+
leafInventory: Array<Record<string, unknown>>;
|
|
762
|
+
};
|
|
763
|
+
const existingLeaf = lock.leafInventory[0] as {
|
|
764
|
+
id: string;
|
|
765
|
+
absolutePath: string;
|
|
766
|
+
linkName: string;
|
|
767
|
+
name: string;
|
|
768
|
+
relativePath: string;
|
|
769
|
+
skillFilePath: string;
|
|
770
|
+
sourceId: string;
|
|
771
|
+
title: string;
|
|
772
|
+
};
|
|
773
|
+
const generatedLeafId = `${sourceId}:.agents/skills/generated`;
|
|
774
|
+
lock.sources[0]!.leafIds.push(generatedLeafId);
|
|
775
|
+
lock.leafInventory.push({
|
|
776
|
+
...existingLeaf,
|
|
777
|
+
id: generatedLeafId,
|
|
778
|
+
relativePath: ".agents/skills/generated",
|
|
779
|
+
absolutePath: path.join(stateRoot, "source", "git", sourceId, ".agents/skills/generated"),
|
|
780
|
+
skillFilePath: path.join(
|
|
781
|
+
stateRoot,
|
|
782
|
+
"source",
|
|
783
|
+
"git",
|
|
784
|
+
sourceId,
|
|
785
|
+
".agents/skills/generated/SKILL.md",
|
|
786
|
+
),
|
|
787
|
+
linkName: "generated",
|
|
788
|
+
name: "generated",
|
|
789
|
+
title: "generated",
|
|
790
|
+
});
|
|
791
|
+
const mutatedLock = `${JSON.stringify(lock, null, 2)}\n`;
|
|
792
|
+
await fs.writeFile(lockPath, mutatedLock, "utf8");
|
|
793
|
+
|
|
794
|
+
const preview = await app.previewDraft(sourceId, {
|
|
795
|
+
enabledTargets: ["claude-code"],
|
|
796
|
+
selectedLeafIds: [`${sourceId}:browse`],
|
|
797
|
+
});
|
|
798
|
+
|
|
799
|
+
expect(preview.ok).toBe(true);
|
|
800
|
+
expect(await fs.readFile(lockPath, "utf8")).toBe(mutatedLock);
|
|
801
|
+
});
|
|
802
|
+
|
|
803
|
+
test("config helpers derive save, command, and context states", () => {
|
|
804
|
+
expect(draftsEqual(
|
|
805
|
+
{
|
|
806
|
+
enabledTargets: ["codex", "claude-code"],
|
|
807
|
+
selectedLeafIds: ["b", "a"],
|
|
808
|
+
},
|
|
809
|
+
{
|
|
810
|
+
enabledTargets: ["claude-code", "codex"],
|
|
811
|
+
selectedLeafIds: ["a", "b"],
|
|
812
|
+
},
|
|
813
|
+
)).toBe(true);
|
|
814
|
+
expect(getSaveDisplayPhase("idle", true)).toBe("dirty");
|
|
815
|
+
expect(buildSaveLabel("dirty", 3)).toContain("DIRTY");
|
|
816
|
+
expect(getPaneViewportCount(16, 1)).toBe(10);
|
|
817
|
+
expect(getPaneWidths(100).reduce((sum, width) => sum + width, 0)).toBeLessThanOrEqual(98);
|
|
818
|
+
expect(
|
|
819
|
+
buildCommandBar({
|
|
820
|
+
changeCount: 3,
|
|
821
|
+
focus: "groups",
|
|
822
|
+
saveFocused: false,
|
|
823
|
+
savePhase: "dirty",
|
|
824
|
+
}),
|
|
825
|
+
).toContain("inspect skills");
|
|
826
|
+
expect(
|
|
827
|
+
buildContextBar({
|
|
828
|
+
blockedCount: 0,
|
|
829
|
+
changeCount: 3,
|
|
830
|
+
previewError: undefined,
|
|
831
|
+
previewLoading: false,
|
|
832
|
+
savePhase: "clean",
|
|
833
|
+
saveMessage: undefined,
|
|
834
|
+
selectedLeafName: "gstack",
|
|
835
|
+
selectedLeafWarnings: ["description should be at most 1024 characters"],
|
|
836
|
+
skippedLeafs: 21,
|
|
837
|
+
sourceId: "gstack",
|
|
838
|
+
}),
|
|
839
|
+
).toContain("warning:");
|
|
840
|
+
});
|
|
841
|
+
|
|
842
|
+
test("projection warning helper marks identical cross-group skills as skipped", () => {
|
|
843
|
+
const warnings = buildProjectionWarningMap({
|
|
844
|
+
drafts: {
|
|
845
|
+
alpha: { enabledTargets: ["claude-code"], selectedLeafIds: ["alpha:browse"] },
|
|
846
|
+
beta: { enabledTargets: ["claude-code"], selectedLeafIds: ["beta:browse"] },
|
|
847
|
+
},
|
|
848
|
+
summaries: [
|
|
849
|
+
{
|
|
850
|
+
source: {
|
|
851
|
+
id: "alpha",
|
|
852
|
+
locator: "alpha",
|
|
853
|
+
kind: "git",
|
|
854
|
+
displayName: "alpha",
|
|
855
|
+
addedAt: "",
|
|
856
|
+
},
|
|
857
|
+
lock: undefined,
|
|
858
|
+
bindings: { targets: {} },
|
|
859
|
+
activeTargetCount: 0,
|
|
860
|
+
health: "ACTIVE",
|
|
861
|
+
leafs: [
|
|
862
|
+
{
|
|
863
|
+
id: "alpha:browse",
|
|
864
|
+
sourceId: "alpha",
|
|
865
|
+
name: "browse",
|
|
866
|
+
linkName: "browse",
|
|
867
|
+
title: "browse",
|
|
868
|
+
description: "Browser flow.",
|
|
869
|
+
relativePath: "browse",
|
|
870
|
+
absolutePath: "/tmp/alpha/browse",
|
|
871
|
+
skillFilePath: "/tmp/alpha/browse/SKILL.md",
|
|
872
|
+
contentHash: "a",
|
|
873
|
+
metadataWarnings: [],
|
|
874
|
+
valid: true,
|
|
875
|
+
},
|
|
876
|
+
],
|
|
877
|
+
},
|
|
878
|
+
{
|
|
879
|
+
source: {
|
|
880
|
+
id: "beta",
|
|
881
|
+
locator: "beta",
|
|
882
|
+
kind: "git",
|
|
883
|
+
displayName: "beta",
|
|
884
|
+
addedAt: "",
|
|
885
|
+
},
|
|
886
|
+
lock: undefined,
|
|
887
|
+
bindings: { targets: {} },
|
|
888
|
+
activeTargetCount: 0,
|
|
889
|
+
health: "ACTIVE",
|
|
890
|
+
leafs: [
|
|
891
|
+
{
|
|
892
|
+
id: "beta:browse",
|
|
893
|
+
sourceId: "beta",
|
|
894
|
+
name: "browse",
|
|
895
|
+
linkName: "browse",
|
|
896
|
+
title: "browse",
|
|
897
|
+
description: "Browser flow.",
|
|
898
|
+
relativePath: "browse",
|
|
899
|
+
absolutePath: "/tmp/beta/browse",
|
|
900
|
+
skillFilePath: "/tmp/beta/browse/SKILL.md",
|
|
901
|
+
contentHash: "b",
|
|
902
|
+
metadataWarnings: [],
|
|
903
|
+
valid: true,
|
|
904
|
+
},
|
|
905
|
+
],
|
|
906
|
+
},
|
|
907
|
+
],
|
|
908
|
+
sourceId: "beta",
|
|
909
|
+
});
|
|
910
|
+
|
|
911
|
+
expect(warnings["beta:browse"]?.[0]).toContain("will be skipped");
|
|
912
|
+
});
|
|
913
|
+
|
|
914
|
+
test("projection warning helper marks cross-group name collisions as renamed", () => {
|
|
915
|
+
const warnings = buildProjectionWarningMap({
|
|
916
|
+
drafts: {
|
|
917
|
+
alpha: { enabledTargets: ["claude-code"], selectedLeafIds: ["alpha:browse"] },
|
|
918
|
+
beta: { enabledTargets: ["claude-code"], selectedLeafIds: ["beta:browse"] },
|
|
919
|
+
},
|
|
920
|
+
summaries: [
|
|
921
|
+
{
|
|
922
|
+
source: {
|
|
923
|
+
id: "alpha",
|
|
924
|
+
locator: "alpha",
|
|
925
|
+
kind: "git",
|
|
926
|
+
displayName: "alpha",
|
|
927
|
+
addedAt: "",
|
|
928
|
+
},
|
|
929
|
+
lock: undefined,
|
|
930
|
+
bindings: { targets: {} },
|
|
931
|
+
activeTargetCount: 0,
|
|
932
|
+
health: "ACTIVE",
|
|
933
|
+
leafs: [
|
|
934
|
+
{
|
|
935
|
+
id: "alpha:browse",
|
|
936
|
+
sourceId: "alpha",
|
|
937
|
+
name: "browse",
|
|
938
|
+
linkName: "browse",
|
|
939
|
+
title: "browse",
|
|
940
|
+
description: "Browser flow A.",
|
|
941
|
+
relativePath: "browse",
|
|
942
|
+
absolutePath: "/tmp/alpha/browse",
|
|
943
|
+
skillFilePath: "/tmp/alpha/browse/SKILL.md",
|
|
944
|
+
contentHash: "a",
|
|
945
|
+
metadataWarnings: [],
|
|
946
|
+
valid: true,
|
|
947
|
+
},
|
|
948
|
+
],
|
|
949
|
+
},
|
|
950
|
+
{
|
|
951
|
+
source: {
|
|
952
|
+
id: "beta",
|
|
953
|
+
locator: "beta",
|
|
954
|
+
kind: "git",
|
|
955
|
+
displayName: "beta",
|
|
956
|
+
addedAt: "",
|
|
957
|
+
},
|
|
958
|
+
lock: undefined,
|
|
959
|
+
bindings: { targets: {} },
|
|
960
|
+
activeTargetCount: 0,
|
|
961
|
+
health: "ACTIVE",
|
|
962
|
+
leafs: [
|
|
963
|
+
{
|
|
964
|
+
id: "beta:browse",
|
|
965
|
+
sourceId: "beta",
|
|
966
|
+
name: "browse",
|
|
967
|
+
linkName: "browse",
|
|
968
|
+
title: "browse",
|
|
969
|
+
description: "Browser flow B.",
|
|
970
|
+
relativePath: "browse",
|
|
971
|
+
absolutePath: "/tmp/beta/browse",
|
|
972
|
+
skillFilePath: "/tmp/beta/browse/SKILL.md",
|
|
973
|
+
contentHash: "b",
|
|
974
|
+
metadataWarnings: [],
|
|
975
|
+
valid: true,
|
|
976
|
+
},
|
|
977
|
+
],
|
|
978
|
+
},
|
|
979
|
+
],
|
|
980
|
+
sourceId: "beta",
|
|
981
|
+
});
|
|
982
|
+
|
|
983
|
+
expect(warnings["beta:browse"]?.[0]).toContain("will deploy as beta-browse");
|
|
984
|
+
});
|
|
985
|
+
|
|
986
|
+
test("supports cursor and pi target projections", async () => {
|
|
987
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
988
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
989
|
+
});
|
|
990
|
+
const app = new SkillFlowApp();
|
|
991
|
+
const added = await app.addSource(repoPath);
|
|
992
|
+
expect(added.ok).toBe(true);
|
|
993
|
+
if (!added.ok) {
|
|
994
|
+
return;
|
|
995
|
+
}
|
|
996
|
+
|
|
997
|
+
const sourceId = added.data.manifest.id;
|
|
998
|
+
const leafId = `${sourceId}:browse`;
|
|
999
|
+
const applied = await app.applyDraft(sourceId, {
|
|
1000
|
+
enabledTargets: ["cursor", "pi"],
|
|
1001
|
+
selectedLeafIds: [leafId],
|
|
1002
|
+
});
|
|
1003
|
+
|
|
1004
|
+
expect(applied.ok).toBe(true);
|
|
1005
|
+
expect(await pathExists(path.join(process.env.SKILL_FLOW_TARGET_CURSOR!, "browse"))).toBe(
|
|
1006
|
+
true,
|
|
1007
|
+
);
|
|
1008
|
+
expect(await pathExists(path.join(process.env.SKILL_FLOW_TARGET_PI!, "browse"))).toBe(
|
|
1009
|
+
true,
|
|
1010
|
+
);
|
|
1011
|
+
});
|
|
1012
|
+
|
|
1013
|
+
test("supports additional global agent target projections", async () => {
|
|
1014
|
+
const repoPath = await createRepo(sandboxRoot, {
|
|
1015
|
+
"browse/SKILL.md": skillDoc("browse", "Browser flow."),
|
|
1016
|
+
});
|
|
1017
|
+
const app = new SkillFlowApp();
|
|
1018
|
+
const added = await app.addSource(repoPath);
|
|
1019
|
+
expect(added.ok).toBe(true);
|
|
1020
|
+
if (!added.ok) {
|
|
1021
|
+
return;
|
|
1022
|
+
}
|
|
1023
|
+
|
|
1024
|
+
const sourceId = added.data.manifest.id;
|
|
1025
|
+
const leafId = `${sourceId}:browse`;
|
|
1026
|
+
const applied = await app.applyDraft(sourceId, {
|
|
1027
|
+
enabledTargets: [
|
|
1028
|
+
"github-copilot",
|
|
1029
|
+
"gemini-cli",
|
|
1030
|
+
"windsurf",
|
|
1031
|
+
"roo-code",
|
|
1032
|
+
"cline",
|
|
1033
|
+
"amp",
|
|
1034
|
+
"kiro",
|
|
1035
|
+
],
|
|
1036
|
+
selectedLeafIds: [leafId],
|
|
1037
|
+
});
|
|
1038
|
+
|
|
1039
|
+
expect(applied.ok).toBe(true);
|
|
1040
|
+
expect(
|
|
1041
|
+
await pathExists(path.join(process.env.SKILL_FLOW_TARGET_GITHUB_COPILOT!, "browse")),
|
|
1042
|
+
).toBe(true);
|
|
1043
|
+
expect(
|
|
1044
|
+
await pathExists(path.join(process.env.SKILL_FLOW_TARGET_GEMINI_CLI!, "browse")),
|
|
1045
|
+
).toBe(true);
|
|
1046
|
+
expect(await pathExists(path.join(process.env.SKILL_FLOW_TARGET_WINDSURF!, "browse"))).toBe(
|
|
1047
|
+
true,
|
|
1048
|
+
);
|
|
1049
|
+
expect(await pathExists(path.join(process.env.SKILL_FLOW_TARGET_ROO_CODE!, "browse"))).toBe(
|
|
1050
|
+
true,
|
|
1051
|
+
);
|
|
1052
|
+
expect(await pathExists(path.join(process.env.SKILL_FLOW_TARGET_CLINE!, "browse"))).toBe(
|
|
1053
|
+
true,
|
|
1054
|
+
);
|
|
1055
|
+
expect(await pathExists(path.join(process.env.SKILL_FLOW_TARGET_AMP!, "browse"))).toBe(
|
|
1056
|
+
true,
|
|
1057
|
+
);
|
|
1058
|
+
expect(await pathExists(path.join(process.env.SKILL_FLOW_TARGET_KIRO!, "browse"))).toBe(
|
|
1059
|
+
true,
|
|
1060
|
+
);
|
|
1061
|
+
});
|
|
1062
|
+
|
|
1063
|
+
test("discovers all configured global targets with isolated roots", async () => {
|
|
1064
|
+
const app = new SkillFlowApp();
|
|
1065
|
+
|
|
1066
|
+
const targets = await app.getAvailableTargets();
|
|
1067
|
+
|
|
1068
|
+
expect(targets).toEqual([
|
|
1069
|
+
"claude-code",
|
|
1070
|
+
"codex",
|
|
1071
|
+
"cursor",
|
|
1072
|
+
"github-copilot",
|
|
1073
|
+
"gemini-cli",
|
|
1074
|
+
"opencode",
|
|
1075
|
+
"openclaw",
|
|
1076
|
+
"pi",
|
|
1077
|
+
"windsurf",
|
|
1078
|
+
"roo-code",
|
|
1079
|
+
"cline",
|
|
1080
|
+
"amp",
|
|
1081
|
+
"kiro",
|
|
1082
|
+
]);
|
|
1083
|
+
});
|
|
1084
|
+
|
|
1085
|
+
test("includes config-based OpenCode skills directory in default detection paths", () => {
|
|
1086
|
+
expect(TARGET_PATH_CANDIDATES.opencode).toContain(
|
|
1087
|
+
path.join(os.homedir(), ".config", "opencode", "skills"),
|
|
1088
|
+
);
|
|
1089
|
+
expect(TARGET_PATH_CANDIDATES["github-copilot"]).toContain(
|
|
1090
|
+
path.join(os.homedir(), ".copilot", "skills"),
|
|
1091
|
+
);
|
|
1092
|
+
expect(TARGET_PATH_CANDIDATES["gemini-cli"]).toContain(
|
|
1093
|
+
path.join(os.homedir(), ".gemini", "skills"),
|
|
1094
|
+
);
|
|
1095
|
+
expect(TARGET_PATH_CANDIDATES.windsurf).toContain(
|
|
1096
|
+
path.join(os.homedir(), ".codeium", "windsurf", "skills"),
|
|
1097
|
+
);
|
|
1098
|
+
expect(TARGET_PATH_CANDIDATES["roo-code"]).toContain(
|
|
1099
|
+
path.join(os.homedir(), ".roo", "skills"),
|
|
1100
|
+
);
|
|
1101
|
+
expect(TARGET_PATH_CANDIDATES.cline).toContain(
|
|
1102
|
+
path.join(os.homedir(), ".cline", "skills"),
|
|
1103
|
+
);
|
|
1104
|
+
expect(TARGET_PATH_CANDIDATES.amp).toContain(
|
|
1105
|
+
path.join(os.homedir(), ".config", "agents", "skills"),
|
|
1106
|
+
);
|
|
1107
|
+
expect(TARGET_PATH_CANDIDATES.kiro).toContain(
|
|
1108
|
+
path.join(os.homedir(), ".kiro", "skills"),
|
|
1109
|
+
);
|
|
1110
|
+
});
|
|
1111
|
+
|
|
1112
|
+
test("classifies shared global roots as compatibility reads instead of write roots", () => {
|
|
1113
|
+
expect(TARGET_DEFINITIONS.codex.writerKey).toBe("agents-skills");
|
|
1114
|
+
expect(TARGET_PATH_CANDIDATES.codex).toContain(
|
|
1115
|
+
path.join(os.homedir(), ".agents", "skills"),
|
|
1116
|
+
);
|
|
1117
|
+
expect(TARGET_COMPAT_READ_CANDIDATES["gemini-cli"]).toContain(
|
|
1118
|
+
path.join(os.homedir(), ".agents", "skills"),
|
|
1119
|
+
);
|
|
1120
|
+
expect(TARGET_COMPAT_READ_CANDIDATES["github-copilot"]).toContain(
|
|
1121
|
+
path.join(os.homedir(), ".agents", "skills"),
|
|
1122
|
+
);
|
|
1123
|
+
expect(TARGET_COMPAT_READ_CANDIDATES.cursor).toContain(
|
|
1124
|
+
path.join(os.homedir(), ".claude", "skills"),
|
|
1125
|
+
);
|
|
1126
|
+
expect(TARGET_COMPAT_READ_CANDIDATES.pi).toContain(
|
|
1127
|
+
path.join(os.homedir(), ".claude", "skills"),
|
|
1128
|
+
);
|
|
1129
|
+
expect(TARGET_COMPAT_READ_CANDIDATES.amp).toContain(
|
|
1130
|
+
path.join(os.homedir(), ".claude", "skills"),
|
|
1131
|
+
);
|
|
1132
|
+
expect(TARGET_PATH_CANDIDATES["gemini-cli"]).not.toContain(
|
|
1133
|
+
path.join(os.homedir(), ".agents", "skills"),
|
|
1134
|
+
);
|
|
1135
|
+
expect(TARGET_PATH_CANDIDATES["github-copilot"]).not.toContain(
|
|
1136
|
+
path.join(os.homedir(), ".claude", "skills"),
|
|
1137
|
+
);
|
|
1138
|
+
});
|
|
1139
|
+
});
|
|
1140
|
+
|
|
1141
|
+
async function createRepo(
|
|
1142
|
+
root: string,
|
|
1143
|
+
files: Record<string, string>,
|
|
1144
|
+
): Promise<string> {
|
|
1145
|
+
const repoPath = await fs.mkdtemp(path.join(root, "repo-"));
|
|
1146
|
+
git(repoPath, ["init"]);
|
|
1147
|
+
git(repoPath, ["config", "user.email", "test@example.com"]);
|
|
1148
|
+
git(repoPath, ["config", "user.name", "Skill Flow Test"]);
|
|
1149
|
+
await writeRepoFiles(repoPath, files);
|
|
1150
|
+
git(repoPath, ["add", "."]);
|
|
1151
|
+
git(repoPath, ["commit", "-m", "initial"]);
|
|
1152
|
+
return repoPath;
|
|
1153
|
+
}
|
|
1154
|
+
|
|
1155
|
+
async function writeRepoFiles(root: string, files: Record<string, string>) {
|
|
1156
|
+
for (const [relativePath, content] of Object.entries(files)) {
|
|
1157
|
+
const absolutePath = path.join(root, relativePath);
|
|
1158
|
+
await fs.mkdir(path.dirname(absolutePath), { recursive: true });
|
|
1159
|
+
await fs.writeFile(absolutePath, content, "utf8");
|
|
1160
|
+
}
|
|
1161
|
+
}
|
|
1162
|
+
|
|
1163
|
+
function skillDoc(name: string, description: string, heading?: string) {
|
|
1164
|
+
return `---
|
|
1165
|
+
name: ${name}
|
|
1166
|
+
description: |
|
|
1167
|
+
${description}
|
|
1168
|
+
---
|
|
1169
|
+
${heading ? `\n# ${heading}\n` : ""}
|
|
1170
|
+
`;
|
|
1171
|
+
}
|
|
1172
|
+
|
|
1173
|
+
function git(cwd: string, args: string[]) {
|
|
1174
|
+
execFileSync("git", args, { cwd, stdio: "pipe" });
|
|
1175
|
+
}
|
|
1176
|
+
|
|
1177
|
+
async function pathExists(targetPath: string) {
|
|
1178
|
+
try {
|
|
1179
|
+
await fs.lstat(targetPath);
|
|
1180
|
+
return true;
|
|
1181
|
+
} catch {
|
|
1182
|
+
return false;
|
|
1183
|
+
}
|
|
1184
|
+
}
|