opencode-conductor-cdd-plugin 1.0.0-beta.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +202 -0
- package/README.md +163 -0
- package/README.test.md +51 -0
- package/dist/commands/implement.d.ts +1 -0
- package/dist/commands/implement.js +30 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +108 -0
- package/dist/index.test.d.ts +1 -0
- package/dist/index.test.js +122 -0
- package/dist/prompts/agent/cdd.md +41 -0
- package/dist/prompts/agent/implementer.md +22 -0
- package/dist/prompts/agent.md +23 -0
- package/dist/prompts/cdd/implement.json +4 -0
- package/dist/prompts/cdd/newTrack.json +4 -0
- package/dist/prompts/cdd/revert.json +4 -0
- package/dist/prompts/cdd/setup.json +4 -0
- package/dist/prompts/cdd/setup.test.d.ts +1 -0
- package/dist/prompts/cdd/setup.test.js +132 -0
- package/dist/prompts/cdd/setup.test.ts +168 -0
- package/dist/prompts/cdd/status.json +4 -0
- package/dist/prompts/strategies/delegate.md +11 -0
- package/dist/prompts/strategies/manual.md +9 -0
- package/dist/templates/code_styleguides/c.md +28 -0
- package/dist/templates/code_styleguides/cpp.md +46 -0
- package/dist/templates/code_styleguides/csharp.md +115 -0
- package/dist/templates/code_styleguides/dart.md +238 -0
- package/dist/templates/code_styleguides/general.md +23 -0
- package/dist/templates/code_styleguides/go.md +48 -0
- package/dist/templates/code_styleguides/html-css.md +49 -0
- package/dist/templates/code_styleguides/java.md +39 -0
- package/dist/templates/code_styleguides/javascript.md +51 -0
- package/dist/templates/code_styleguides/julia.md +27 -0
- package/dist/templates/code_styleguides/kotlin.md +41 -0
- package/dist/templates/code_styleguides/php.md +37 -0
- package/dist/templates/code_styleguides/python.md +37 -0
- package/dist/templates/code_styleguides/react.md +37 -0
- package/dist/templates/code_styleguides/ruby.md +39 -0
- package/dist/templates/code_styleguides/rust.md +44 -0
- package/dist/templates/code_styleguides/shell.md +35 -0
- package/dist/templates/code_styleguides/solidity.md +60 -0
- package/dist/templates/code_styleguides/sql.md +39 -0
- package/dist/templates/code_styleguides/swift.md +36 -0
- package/dist/templates/code_styleguides/typescript.md +43 -0
- package/dist/templates/code_styleguides/vue.md +38 -0
- package/dist/templates/code_styleguides/zig.md +27 -0
- package/dist/templates/workflow.md +336 -0
- package/dist/tools/background.d.ts +54 -0
- package/dist/tools/background.js +198 -0
- package/dist/tools/commands.d.ts +11 -0
- package/dist/tools/commands.js +80 -0
- package/dist/tools/commands.test.d.ts +1 -0
- package/dist/tools/commands.test.js +142 -0
- package/dist/tools/delegate.d.ts +3 -0
- package/dist/tools/delegate.js +45 -0
- package/dist/utils/autogenerateFlow.d.ts +65 -0
- package/dist/utils/autogenerateFlow.js +391 -0
- package/dist/utils/autogenerateFlow.test.d.ts +1 -0
- package/dist/utils/autogenerateFlow.test.js +610 -0
- package/dist/utils/bootstrap.d.ts +1 -0
- package/dist/utils/bootstrap.js +46 -0
- package/dist/utils/commandFactory.d.ts +11 -0
- package/dist/utils/commandFactory.js +69 -0
- package/dist/utils/commitMessages.d.ts +35 -0
- package/dist/utils/commitMessages.js +33 -0
- package/dist/utils/commitMessages.test.d.ts +1 -0
- package/dist/utils/commitMessages.test.js +79 -0
- package/dist/utils/configDetection.d.ts +7 -0
- package/dist/utils/configDetection.js +49 -0
- package/dist/utils/configDetection.test.d.ts +1 -0
- package/dist/utils/configDetection.test.js +119 -0
- package/dist/utils/contentGeneration.d.ts +10 -0
- package/dist/utils/contentGeneration.js +141 -0
- package/dist/utils/contentGeneration.test.d.ts +1 -0
- package/dist/utils/contentGeneration.test.js +147 -0
- package/dist/utils/contextAnalysis.d.ts +100 -0
- package/dist/utils/contextAnalysis.js +308 -0
- package/dist/utils/contextAnalysis.test.d.ts +1 -0
- package/dist/utils/contextAnalysis.test.js +307 -0
- package/dist/utils/gitNotes.d.ts +23 -0
- package/dist/utils/gitNotes.js +53 -0
- package/dist/utils/gitNotes.test.d.ts +1 -0
- package/dist/utils/gitNotes.test.js +105 -0
- package/dist/utils/ignoreMatcher.d.ts +9 -0
- package/dist/utils/ignoreMatcher.js +77 -0
- package/dist/utils/ignoreMatcher.test.d.ts +1 -0
- package/dist/utils/ignoreMatcher.test.js +126 -0
- package/dist/utils/stateManager.d.ts +10 -0
- package/dist/utils/stateManager.js +30 -0
- package/package.json +90 -0
- package/scripts/convert-legacy.cjs +17 -0
- package/scripts/postinstall.cjs +38 -0
|
@@ -0,0 +1,307 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from "vitest";
|
|
2
|
+
import { readFile, readdir, stat } from "fs/promises";
|
|
3
|
+
import { execSync } from "child_process";
|
|
4
|
+
import { analyzeProjectContext, parseManifests, analyzeDocs, analyzeGitHistory, analyzeCodeStructure, parseIgnoreFiles, parseCICDConfigs, } from "./contextAnalysis.js";
|
|
5
|
+
vi.mock("fs/promises", () => ({
|
|
6
|
+
readFile: vi.fn(),
|
|
7
|
+
readdir: vi.fn(),
|
|
8
|
+
stat: vi.fn(),
|
|
9
|
+
}));
|
|
10
|
+
vi.mock("child_process", () => ({
|
|
11
|
+
execSync: vi.fn(),
|
|
12
|
+
}));
|
|
13
|
+
describe("Context Analysis Module", () => {
|
|
14
|
+
beforeEach(() => {
|
|
15
|
+
vi.clearAllMocks();
|
|
16
|
+
});
|
|
17
|
+
describe("parseManifests", () => {
|
|
18
|
+
it("should parse package.json and extract dependencies", async () => {
|
|
19
|
+
const mockPackageJson = JSON.stringify({
|
|
20
|
+
name: "test-project",
|
|
21
|
+
version: "1.0.0",
|
|
22
|
+
dependencies: {
|
|
23
|
+
react: "^18.0.0",
|
|
24
|
+
typescript: "^5.0.0",
|
|
25
|
+
},
|
|
26
|
+
devDependencies: {
|
|
27
|
+
vitest: "^4.0.0",
|
|
28
|
+
},
|
|
29
|
+
scripts: {
|
|
30
|
+
test: "vitest",
|
|
31
|
+
build: "tsc",
|
|
32
|
+
},
|
|
33
|
+
});
|
|
34
|
+
vi.mocked(readFile).mockResolvedValue(mockPackageJson);
|
|
35
|
+
vi.mocked(readdir).mockResolvedValue([
|
|
36
|
+
"package.json",
|
|
37
|
+
]);
|
|
38
|
+
const result = await parseManifests("/test/project");
|
|
39
|
+
expect(result.manifests).toHaveLength(1);
|
|
40
|
+
expect(result.manifests[0].type).toBe("package.json");
|
|
41
|
+
expect(result.manifests[0].dependencies).toContain("react");
|
|
42
|
+
expect(result.manifests[0].dependencies).toContain("typescript");
|
|
43
|
+
expect(result.manifests[0].devDependencies).toContain("vitest");
|
|
44
|
+
expect(result.manifests[0].scripts).toEqual({
|
|
45
|
+
test: "vitest",
|
|
46
|
+
build: "tsc",
|
|
47
|
+
});
|
|
48
|
+
});
|
|
49
|
+
it("should parse requirements.txt and extract dependencies", async () => {
|
|
50
|
+
const mockRequirements = `
|
|
51
|
+
flask==2.0.0
|
|
52
|
+
django>=3.2.0
|
|
53
|
+
pytest
|
|
54
|
+
requests==2.28.0
|
|
55
|
+
`;
|
|
56
|
+
vi.mocked(readFile).mockResolvedValue(mockRequirements);
|
|
57
|
+
vi.mocked(readdir).mockResolvedValue([
|
|
58
|
+
"requirements.txt",
|
|
59
|
+
]);
|
|
60
|
+
const result = await parseManifests("/test/project");
|
|
61
|
+
expect(result.manifests).toHaveLength(1);
|
|
62
|
+
expect(result.manifests[0].type).toBe("requirements.txt");
|
|
63
|
+
expect(result.manifests[0].dependencies).toContain("flask");
|
|
64
|
+
expect(result.manifests[0].dependencies).toContain("django");
|
|
65
|
+
expect(result.manifests[0].dependencies).toContain("pytest");
|
|
66
|
+
});
|
|
67
|
+
it("should parse Cargo.toml and extract dependencies", async () => {
|
|
68
|
+
const mockCargoToml = `
|
|
69
|
+
[package]
|
|
70
|
+
name = "test-project"
|
|
71
|
+
version = "0.1.0"
|
|
72
|
+
|
|
73
|
+
[dependencies]
|
|
74
|
+
tokio = "1.0"
|
|
75
|
+
serde = { version = "1.0", features = ["derive"] }
|
|
76
|
+
`;
|
|
77
|
+
vi.mocked(readFile).mockResolvedValue(mockCargoToml);
|
|
78
|
+
vi.mocked(readdir).mockResolvedValue([
|
|
79
|
+
"Cargo.toml",
|
|
80
|
+
]);
|
|
81
|
+
const result = await parseManifests("/test/project");
|
|
82
|
+
expect(result.manifests).toHaveLength(1);
|
|
83
|
+
expect(result.manifests[0].type).toBe("Cargo.toml");
|
|
84
|
+
expect(result.manifests[0].dependencies).toContain("tokio");
|
|
85
|
+
expect(result.manifests[0].dependencies).toContain("serde");
|
|
86
|
+
});
|
|
87
|
+
it("should handle missing manifest files gracefully", async () => {
|
|
88
|
+
vi.mocked(readdir).mockResolvedValue([]);
|
|
89
|
+
const result = await parseManifests("/test/project");
|
|
90
|
+
expect(result.manifests).toHaveLength(0);
|
|
91
|
+
expect(result.warnings).toContain("No manifest files found");
|
|
92
|
+
});
|
|
93
|
+
it("should handle file read errors gracefully", async () => {
|
|
94
|
+
vi.mocked(readdir).mockResolvedValue(["package.json"]);
|
|
95
|
+
vi.mocked(readFile).mockRejectedValue(new Error("Permission denied"));
|
|
96
|
+
const result = await parseManifests("/test/project");
|
|
97
|
+
expect(result.manifests).toHaveLength(0);
|
|
98
|
+
expect(result.warnings.some((w) => w.includes("Failed to read package.json"))).toBe(true);
|
|
99
|
+
});
|
|
100
|
+
});
|
|
101
|
+
describe("analyzeDocs", () => {
|
|
102
|
+
it("should analyze README.md and extract key information", async () => {
|
|
103
|
+
const mockReadme = `
|
|
104
|
+
# Test Project
|
|
105
|
+
|
|
106
|
+
A TypeScript library for testing.
|
|
107
|
+
|
|
108
|
+
## Features
|
|
109
|
+
- Feature A
|
|
110
|
+
- Feature B
|
|
111
|
+
|
|
112
|
+
## Installation
|
|
113
|
+
npm install test-project
|
|
114
|
+
|
|
115
|
+
## Usage
|
|
116
|
+
import { foo } from 'test-project'
|
|
117
|
+
`;
|
|
118
|
+
vi.mocked(readFile).mockResolvedValue(mockReadme);
|
|
119
|
+
vi.mocked(readdir).mockResolvedValue(["README.md"]);
|
|
120
|
+
const result = await analyzeDocs("/test/project");
|
|
121
|
+
expect(result.docs).toHaveLength(1);
|
|
122
|
+
expect(result.docs[0].filename).toBe("README.md");
|
|
123
|
+
expect(result.docs[0].content).toContain("TypeScript library");
|
|
124
|
+
expect(result.docs[0].content).toContain("Feature A");
|
|
125
|
+
});
|
|
126
|
+
it("should sample large files (> 10KB)", async () => {
|
|
127
|
+
const largeLine = "x".repeat(100);
|
|
128
|
+
const largeContent = Array(250).fill(largeLine).join("\n") + "\n\nIMPORTANT INFO";
|
|
129
|
+
vi.mocked(readFile).mockResolvedValue(largeContent);
|
|
130
|
+
vi.mocked(readdir).mockResolvedValue(["README.md"]);
|
|
131
|
+
const result = await analyzeDocs("/test/project");
|
|
132
|
+
expect(result.docs).toHaveLength(1);
|
|
133
|
+
expect(result.docs[0].content.length).toBeLessThan(largeContent.length);
|
|
134
|
+
expect(result.warnings).toContain("Large file detected, using sampling");
|
|
135
|
+
});
|
|
136
|
+
it("should handle missing README gracefully", async () => {
|
|
137
|
+
vi.mocked(readdir).mockResolvedValue([]);
|
|
138
|
+
const result = await analyzeDocs("/test/project");
|
|
139
|
+
expect(result.docs).toHaveLength(0);
|
|
140
|
+
expect(result.warnings).toContain("No README found");
|
|
141
|
+
});
|
|
142
|
+
});
|
|
143
|
+
describe("analyzeGitHistory", () => {
|
|
144
|
+
it("should analyze git commit patterns", async () => {
|
|
145
|
+
vi.mocked(execSync).mockImplementation(() => `abc1234 feat: add new feature
|
|
146
|
+
def5678 fix: bug fix
|
|
147
|
+
abc9101 chore: update deps
|
|
148
|
+
def1121 test: add tests
|
|
149
|
+
abc3141 feat(auth): implement login
|
|
150
|
+
`);
|
|
151
|
+
const result = await analyzeGitHistory("/test/project");
|
|
152
|
+
expect(result.commitCount).toBe(5);
|
|
153
|
+
expect(result.conventionalCommits).toBeGreaterThan(0);
|
|
154
|
+
expect(result.patterns).toContain("conventional");
|
|
155
|
+
});
|
|
156
|
+
it("should detect non-conventional commit patterns", async () => {
|
|
157
|
+
vi.mocked(execSync).mockImplementation(() => `abc1234 Add new feature
|
|
158
|
+
def5678 Fix bug
|
|
159
|
+
abc9101 Update README
|
|
160
|
+
def1121 Random commit message
|
|
161
|
+
`);
|
|
162
|
+
const result = await analyzeGitHistory("/test/project");
|
|
163
|
+
expect(result.commitCount).toBe(4);
|
|
164
|
+
expect(result.conventionalCommits).toBe(0);
|
|
165
|
+
expect(result.patterns).not.toContain("conventional");
|
|
166
|
+
});
|
|
167
|
+
it("should handle git command failures gracefully", async () => {
|
|
168
|
+
vi.mocked(execSync).mockImplementation(() => {
|
|
169
|
+
throw new Error("Not a git repository");
|
|
170
|
+
});
|
|
171
|
+
const result = await analyzeGitHistory("/test/project");
|
|
172
|
+
expect(result.commitCount).toBe(0);
|
|
173
|
+
expect(result.warnings.some((w) => w.includes("Not a git repository"))).toBe(true);
|
|
174
|
+
});
|
|
175
|
+
});
|
|
176
|
+
describe("analyzeCodeStructure", () => {
|
|
177
|
+
it("should analyze directory structure", async () => {
|
|
178
|
+
vi.mocked(readdir).mockImplementation(async (path) => {
|
|
179
|
+
if (path === "/test/project") {
|
|
180
|
+
return ["src", "tests", "package.json", "README.md", "index.ts", "config.ts"];
|
|
181
|
+
}
|
|
182
|
+
if (path === "/test/project/src") {
|
|
183
|
+
return ["utils.ts", "components"];
|
|
184
|
+
}
|
|
185
|
+
return [];
|
|
186
|
+
});
|
|
187
|
+
vi.mocked(stat).mockImplementation(async (path) => {
|
|
188
|
+
const pathStr = String(path);
|
|
189
|
+
if (pathStr.endsWith(".ts") || pathStr.endsWith(".md") || pathStr.endsWith(".json")) {
|
|
190
|
+
return { isDirectory: () => false, isFile: () => true };
|
|
191
|
+
}
|
|
192
|
+
return { isDirectory: () => true, isFile: () => false };
|
|
193
|
+
});
|
|
194
|
+
const result = await analyzeCodeStructure("/test/project");
|
|
195
|
+
expect(result.structure).toContain("src/");
|
|
196
|
+
expect(result.structure).toContain("tests/");
|
|
197
|
+
expect(result.fileExtensions).toContain(".ts");
|
|
198
|
+
});
|
|
199
|
+
it("should respect max depth of 5", async () => {
|
|
200
|
+
// This test ensures we don't traverse too deeply
|
|
201
|
+
let depth = 0;
|
|
202
|
+
vi.mocked(readdir).mockImplementation(async () => {
|
|
203
|
+
depth++;
|
|
204
|
+
if (depth > 5) {
|
|
205
|
+
throw new Error("Max depth exceeded");
|
|
206
|
+
}
|
|
207
|
+
return depth < 5 ? ["nested"] : [];
|
|
208
|
+
});
|
|
209
|
+
const result = await analyzeCodeStructure("/test/project");
|
|
210
|
+
expect(result.warnings).not.toContain("Max depth exceeded");
|
|
211
|
+
});
|
|
212
|
+
});
|
|
213
|
+
describe("parseIgnoreFiles", () => {
|
|
214
|
+
it("should parse .gitignore patterns", async () => {
|
|
215
|
+
const mockGitignore = `
|
|
216
|
+
node_modules/
|
|
217
|
+
dist/
|
|
218
|
+
*.log
|
|
219
|
+
.env
|
|
220
|
+
`;
|
|
221
|
+
vi.mocked(readFile).mockResolvedValue(mockGitignore);
|
|
222
|
+
vi.mocked(readdir).mockResolvedValue([".gitignore"]);
|
|
223
|
+
const result = await parseIgnoreFiles("/test/project");
|
|
224
|
+
expect(result.patterns).toContain("node_modules/");
|
|
225
|
+
expect(result.patterns).toContain("dist/");
|
|
226
|
+
expect(result.patterns).toContain("*.log");
|
|
227
|
+
expect(result.patterns).toContain(".env");
|
|
228
|
+
});
|
|
229
|
+
});
|
|
230
|
+
describe("parseCICDConfigs", () => {
|
|
231
|
+
it("should parse GitHub Actions workflow", async () => {
|
|
232
|
+
const mockWorkflow = `
|
|
233
|
+
name: CI
|
|
234
|
+
on: [push]
|
|
235
|
+
jobs:
|
|
236
|
+
test:
|
|
237
|
+
runs-on: ubuntu-latest
|
|
238
|
+
steps:
|
|
239
|
+
- run: npm test
|
|
240
|
+
- run: npm run build
|
|
241
|
+
`;
|
|
242
|
+
vi.mocked(readdir).mockImplementation(async (path) => {
|
|
243
|
+
const pathStr = String(path);
|
|
244
|
+
if (pathStr.endsWith("workflows")) {
|
|
245
|
+
return ["ci.yml"];
|
|
246
|
+
}
|
|
247
|
+
return [".github"];
|
|
248
|
+
});
|
|
249
|
+
vi.mocked(readFile).mockResolvedValue(mockWorkflow);
|
|
250
|
+
const result = await parseCICDConfigs("/test/project");
|
|
251
|
+
expect(result.configs).toHaveLength(1);
|
|
252
|
+
expect(result.configs[0].type).toBe("github-actions");
|
|
253
|
+
expect(result.configs[0].commands).toContain("npm test");
|
|
254
|
+
expect(result.configs[0].commands).toContain("npm run build");
|
|
255
|
+
});
|
|
256
|
+
});
|
|
257
|
+
describe("analyzeProjectContext (Integration)", () => {
|
|
258
|
+
it("should analyze complete project context with all sources", async () => {
|
|
259
|
+
// Setup mocks for all analysis functions
|
|
260
|
+
const mockPackageJson = JSON.stringify({
|
|
261
|
+
dependencies: { react: "^18.0.0" },
|
|
262
|
+
scripts: { test: "vitest" },
|
|
263
|
+
});
|
|
264
|
+
vi.mocked(readFile).mockImplementation(async (path) => {
|
|
265
|
+
if (typeof path === "string" && path.endsWith("package.json")) {
|
|
266
|
+
return mockPackageJson;
|
|
267
|
+
}
|
|
268
|
+
if (typeof path === "string" && path.endsWith("README.md")) {
|
|
269
|
+
return "# Test Project\nA React application";
|
|
270
|
+
}
|
|
271
|
+
return "";
|
|
272
|
+
});
|
|
273
|
+
vi.mocked(readdir).mockResolvedValue([
|
|
274
|
+
"package.json",
|
|
275
|
+
"README.md",
|
|
276
|
+
"src",
|
|
277
|
+
]);
|
|
278
|
+
vi.mocked(execSync).mockReturnValue(Buffer.from("feat: initial commit"));
|
|
279
|
+
const result = await analyzeProjectContext("/test/project");
|
|
280
|
+
expect(result.raw.manifests).not.toHaveLength(0);
|
|
281
|
+
expect(result.raw.docs).not.toHaveLength(0);
|
|
282
|
+
expect(result.insights.techStack.frameworks).toContainEqual(expect.objectContaining({ name: "react" }));
|
|
283
|
+
expect(result.meta.filesAnalyzed).toBeGreaterThan(0);
|
|
284
|
+
expect(result.meta.analysisTimeMs).toBeGreaterThanOrEqual(0);
|
|
285
|
+
});
|
|
286
|
+
it("should calculate confidence scores for tech stack", async () => {
|
|
287
|
+
const mockPackageJson = JSON.stringify({
|
|
288
|
+
dependencies: {
|
|
289
|
+
react: "^18.0.0",
|
|
290
|
+
"react-dom": "^18.0.0",
|
|
291
|
+
},
|
|
292
|
+
});
|
|
293
|
+
vi.mocked(readFile).mockResolvedValue(mockPackageJson);
|
|
294
|
+
vi.mocked(readdir).mockResolvedValue(["package.json"]);
|
|
295
|
+
const result = await analyzeProjectContext("/test/project");
|
|
296
|
+
const reactFramework = result.insights.techStack.frameworks.find((f) => f.name === "react");
|
|
297
|
+
expect(reactFramework).toBeDefined();
|
|
298
|
+
expect(reactFramework.confidence).toBeGreaterThanOrEqual(0.7);
|
|
299
|
+
});
|
|
300
|
+
it("should fallback when no context available", async () => {
|
|
301
|
+
vi.mocked(readdir).mockResolvedValue([]);
|
|
302
|
+
const result = await analyzeProjectContext("/test/project");
|
|
303
|
+
expect(result.meta.warnings).toContain("No manifest files found");
|
|
304
|
+
expect(result.meta.warnings).toContain("No README found");
|
|
305
|
+
});
|
|
306
|
+
});
|
|
307
|
+
});
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
export interface TaskNoteMetadata {
|
|
2
|
+
trackId: string;
|
|
3
|
+
taskName: string;
|
|
4
|
+
summary: string;
|
|
5
|
+
filesChanged: string[];
|
|
6
|
+
reason: string;
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Formats task metadata into a structured note format
|
|
10
|
+
*/
|
|
11
|
+
export declare function formatTaskNote(metadata: TaskNoteMetadata): string;
|
|
12
|
+
/**
|
|
13
|
+
* Attaches a task note to a specific commit using git notes
|
|
14
|
+
* @param commitHash - Full or short commit hash
|
|
15
|
+
* @param metadata - Task metadata to attach
|
|
16
|
+
*/
|
|
17
|
+
export declare function attachTaskNote(commitHash: string, metadata: TaskNoteMetadata): Promise<void>;
|
|
18
|
+
/**
|
|
19
|
+
* Retrieves the task note attached to a specific commit
|
|
20
|
+
* @param commitHash - Full or short commit hash
|
|
21
|
+
* @returns The note content, or empty string if no note exists
|
|
22
|
+
*/
|
|
23
|
+
export declare function getTaskNote(commitHash: string): Promise<string>;
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { exec } from "child_process";
|
|
2
|
+
import { promisify } from "util";
|
|
3
|
+
const execAsync = promisify(exec);
|
|
4
|
+
/**
|
|
5
|
+
* Formats task metadata into a structured note format
|
|
6
|
+
*/
|
|
7
|
+
export function formatTaskNote(metadata) {
|
|
8
|
+
const filesSection = metadata.filesChanged.length > 0
|
|
9
|
+
? metadata.filesChanged.map(f => `- ${f}`).join("\n")
|
|
10
|
+
: "(none)";
|
|
11
|
+
return `Track ID: ${metadata.trackId}
|
|
12
|
+
Task: ${metadata.taskName}
|
|
13
|
+
Summary: ${metadata.summary}
|
|
14
|
+
|
|
15
|
+
Files Changed:
|
|
16
|
+
${filesSection}
|
|
17
|
+
|
|
18
|
+
Reason: ${metadata.reason}`;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Attaches a task note to a specific commit using git notes
|
|
22
|
+
* @param commitHash - Full or short commit hash
|
|
23
|
+
* @param metadata - Task metadata to attach
|
|
24
|
+
*/
|
|
25
|
+
export async function attachTaskNote(commitHash, metadata) {
|
|
26
|
+
const noteContent = formatTaskNote(metadata);
|
|
27
|
+
try {
|
|
28
|
+
// Use git notes add to attach the note
|
|
29
|
+
// The -f flag forces overwriting if a note already exists
|
|
30
|
+
await execAsync(`git notes add -f -m "${noteContent.replace(/"/g, '\\"')}" ${commitHash}`);
|
|
31
|
+
}
|
|
32
|
+
catch (error) {
|
|
33
|
+
throw new Error(`Failed to attach note to commit ${commitHash}: ${error}`);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Retrieves the task note attached to a specific commit
|
|
38
|
+
* @param commitHash - Full or short commit hash
|
|
39
|
+
* @returns The note content, or empty string if no note exists
|
|
40
|
+
*/
|
|
41
|
+
export async function getTaskNote(commitHash) {
|
|
42
|
+
try {
|
|
43
|
+
const { stdout } = await execAsync(`git notes show ${commitHash}`);
|
|
44
|
+
return stdout.trim();
|
|
45
|
+
}
|
|
46
|
+
catch (error) {
|
|
47
|
+
// git notes show returns error if no note exists
|
|
48
|
+
if (error.message?.includes("no note found")) {
|
|
49
|
+
return "";
|
|
50
|
+
}
|
|
51
|
+
throw new Error(`Failed to retrieve note for commit ${commitHash}: ${error}`);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import { execSync } from "child_process";
|
|
3
|
+
import { mkdirSync, rmSync, writeFileSync } from "fs";
|
|
4
|
+
import { join } from "path";
|
|
5
|
+
import { attachTaskNote, getTaskNote, formatTaskNote } from "./gitNotes.js";
|
|
6
|
+
describe("gitNotes", () => {
|
|
7
|
+
const testDir = join(process.cwd(), "test-git-repo");
|
|
8
|
+
let testCommitHash;
|
|
9
|
+
beforeEach(() => {
|
|
10
|
+
// Create a temporary git repository for testing
|
|
11
|
+
mkdirSync(testDir, { recursive: true });
|
|
12
|
+
process.chdir(testDir);
|
|
13
|
+
execSync("git init", { stdio: "pipe" });
|
|
14
|
+
execSync('git config user.email "test@example.com"', { stdio: "pipe" });
|
|
15
|
+
execSync('git config user.name "Test User"', { stdio: "pipe" });
|
|
16
|
+
// Create a test commit
|
|
17
|
+
writeFileSync(join(testDir, "test.txt"), "test content");
|
|
18
|
+
execSync("git add test.txt", { stdio: "pipe" });
|
|
19
|
+
execSync('git commit -m "test commit"', { stdio: "pipe" });
|
|
20
|
+
testCommitHash = execSync("git log -1 --format=%H", { encoding: "utf-8" }).trim();
|
|
21
|
+
});
|
|
22
|
+
afterEach(() => {
|
|
23
|
+
// Clean up
|
|
24
|
+
process.chdir(join(testDir, ".."));
|
|
25
|
+
rmSync(testDir, { recursive: true, force: true });
|
|
26
|
+
});
|
|
27
|
+
describe("formatTaskNote", () => {
|
|
28
|
+
it("should format a task note with all metadata", () => {
|
|
29
|
+
const note = formatTaskNote({
|
|
30
|
+
trackId: "track_123",
|
|
31
|
+
taskName: "Implement user authentication",
|
|
32
|
+
summary: "Added JWT-based auth",
|
|
33
|
+
filesChanged: ["src/auth.ts", "src/middleware/auth.ts"],
|
|
34
|
+
reason: "Required for secure user sessions"
|
|
35
|
+
});
|
|
36
|
+
expect(note).toContain("Track ID: track_123");
|
|
37
|
+
expect(note).toContain("Task: Implement user authentication");
|
|
38
|
+
expect(note).toContain("Summary: Added JWT-based auth");
|
|
39
|
+
expect(note).toContain("Files Changed:");
|
|
40
|
+
expect(note).toContain("- src/auth.ts");
|
|
41
|
+
expect(note).toContain("- src/middleware/auth.ts");
|
|
42
|
+
expect(note).toContain("Reason: Required for secure user sessions");
|
|
43
|
+
});
|
|
44
|
+
it("should handle empty files array", () => {
|
|
45
|
+
const note = formatTaskNote({
|
|
46
|
+
trackId: "track_123",
|
|
47
|
+
taskName: "Update docs",
|
|
48
|
+
summary: "Fixed typos",
|
|
49
|
+
filesChanged: [],
|
|
50
|
+
reason: "Documentation maintenance"
|
|
51
|
+
});
|
|
52
|
+
expect(note).toContain("Files Changed:");
|
|
53
|
+
expect(note).toContain("(none)");
|
|
54
|
+
});
|
|
55
|
+
});
|
|
56
|
+
describe("attachTaskNote", () => {
|
|
57
|
+
it("should attach a note to a commit", async () => {
|
|
58
|
+
const metadata = {
|
|
59
|
+
trackId: "track_123",
|
|
60
|
+
taskName: "Test task",
|
|
61
|
+
summary: "Test summary",
|
|
62
|
+
filesChanged: ["test.txt"],
|
|
63
|
+
reason: "Test reason"
|
|
64
|
+
};
|
|
65
|
+
await attachTaskNote(testCommitHash, metadata);
|
|
66
|
+
const note = await getTaskNote(testCommitHash);
|
|
67
|
+
expect(note).toContain("Track ID: track_123");
|
|
68
|
+
expect(note).toContain("Task: Test task");
|
|
69
|
+
});
|
|
70
|
+
it("should throw error for invalid commit hash", async () => {
|
|
71
|
+
const metadata = {
|
|
72
|
+
trackId: "track_123",
|
|
73
|
+
taskName: "Test task",
|
|
74
|
+
summary: "Test summary",
|
|
75
|
+
filesChanged: [],
|
|
76
|
+
reason: "Test reason"
|
|
77
|
+
};
|
|
78
|
+
await expect(attachTaskNote("invalid_hash", metadata)).rejects.toThrow();
|
|
79
|
+
});
|
|
80
|
+
});
|
|
81
|
+
describe("getTaskNote", () => {
|
|
82
|
+
it("should retrieve an attached note", async () => {
|
|
83
|
+
const metadata = {
|
|
84
|
+
trackId: "track_456",
|
|
85
|
+
taskName: "Another test",
|
|
86
|
+
summary: "Another summary",
|
|
87
|
+
filesChanged: ["file1.ts", "file2.ts"],
|
|
88
|
+
reason: "Testing retrieval"
|
|
89
|
+
};
|
|
90
|
+
await attachTaskNote(testCommitHash, metadata);
|
|
91
|
+
const note = await getTaskNote(testCommitHash);
|
|
92
|
+
expect(note).toContain("Track ID: track_456");
|
|
93
|
+
expect(note).toContain("Task: Another test");
|
|
94
|
+
expect(note).toContain("file1.ts");
|
|
95
|
+
expect(note).toContain("file2.ts");
|
|
96
|
+
});
|
|
97
|
+
it("should return empty string when no note exists", async () => {
|
|
98
|
+
const note = await getTaskNote(testCommitHash);
|
|
99
|
+
expect(note).toBe("");
|
|
100
|
+
});
|
|
101
|
+
it("should throw error for invalid commit hash", async () => {
|
|
102
|
+
await expect(getTaskNote("invalid_hash")).rejects.toThrow();
|
|
103
|
+
});
|
|
104
|
+
});
|
|
105
|
+
});
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export interface IgnorePatterns {
|
|
2
|
+
gitignore: string[];
|
|
3
|
+
ignore: string[];
|
|
4
|
+
geminiignore: string[];
|
|
5
|
+
}
|
|
6
|
+
export declare const buildIgnoreMatcher: (_rootDir: string, patterns: IgnorePatterns) => {
|
|
7
|
+
ignores: (relativePath: string) => boolean;
|
|
8
|
+
shouldTraverse: (relativePath: string) => boolean;
|
|
9
|
+
};
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
const safetyBlacklist = [".git", "node_modules", ".DS_Store"];
|
|
2
|
+
const normalizePath = (relativePath) => relativePath.replace(/\\/g, "/").replace(/\/$/, "");
|
|
3
|
+
const normalizePatterns = (rawPatterns) => rawPatterns
|
|
4
|
+
.map((pattern) => pattern.trim())
|
|
5
|
+
.filter((pattern) => pattern.length > 0 && !pattern.startsWith("#"));
|
|
6
|
+
const matchesPattern = (normalizedPath, pattern) => {
|
|
7
|
+
if (pattern === "*")
|
|
8
|
+
return true;
|
|
9
|
+
if (pattern.startsWith("*.")) {
|
|
10
|
+
const ext = pattern.slice(1);
|
|
11
|
+
return normalizedPath.endsWith(ext);
|
|
12
|
+
}
|
|
13
|
+
if (pattern.includes("**")) {
|
|
14
|
+
let regexPattern = pattern;
|
|
15
|
+
if (pattern.startsWith("**/")) {
|
|
16
|
+
const remainder = pattern.slice(3);
|
|
17
|
+
regexPattern = `(^|.*/)${remainder.replace(/[.+?^${}()|[\]\\]/g, "\\$&")}`;
|
|
18
|
+
}
|
|
19
|
+
else if (pattern.endsWith("/**/*")) {
|
|
20
|
+
const prefix = pattern.slice(0, -5);
|
|
21
|
+
regexPattern = `${prefix.replace(/[.+?^${}()|[\]\\]/g, "\\$&")}/.*`;
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
const segments = pattern.split("/");
|
|
25
|
+
const regexSegments = segments.map((segment, idx) => {
|
|
26
|
+
if (segment === "**") {
|
|
27
|
+
if (idx === 0)
|
|
28
|
+
return "(.*/)?";
|
|
29
|
+
if (idx === segments.length - 1)
|
|
30
|
+
return "(/.*)?";
|
|
31
|
+
return "(/.*/|/)";
|
|
32
|
+
}
|
|
33
|
+
if (segment === "*")
|
|
34
|
+
return "[^/]*";
|
|
35
|
+
return segment.replace(/[.+?^${}()|[\]\\]/g, "\\$&");
|
|
36
|
+
});
|
|
37
|
+
regexPattern = regexSegments.join("");
|
|
38
|
+
}
|
|
39
|
+
return new RegExp(`^${regexPattern}$`).test(normalizedPath);
|
|
40
|
+
}
|
|
41
|
+
if (normalizedPath === pattern)
|
|
42
|
+
return true;
|
|
43
|
+
return normalizedPath.startsWith(`${pattern}/`);
|
|
44
|
+
};
|
|
45
|
+
const hasAllowedDescendant = (normalizedPath, allowlist) => allowlist.some((allow) => allow === normalizedPath || allow.startsWith(`${normalizedPath}/`));
|
|
46
|
+
export const buildIgnoreMatcher = (_rootDir, patterns) => {
|
|
47
|
+
const allPatterns = normalizePatterns([
|
|
48
|
+
...patterns.gitignore,
|
|
49
|
+
...patterns.ignore,
|
|
50
|
+
...patterns.geminiignore,
|
|
51
|
+
]);
|
|
52
|
+
const allowlist = allPatterns.filter((pattern) => pattern.startsWith("!")).map((pattern) => {
|
|
53
|
+
return normalizePath(pattern.slice(1));
|
|
54
|
+
});
|
|
55
|
+
const ignores = allPatterns
|
|
56
|
+
.filter((pattern) => !pattern.startsWith("!"))
|
|
57
|
+
.map((pattern) => normalizePath(pattern));
|
|
58
|
+
const isIgnored = (relativePath) => {
|
|
59
|
+
const normalized = normalizePath(relativePath);
|
|
60
|
+
const parts = normalized.split("/").filter(Boolean);
|
|
61
|
+
if (parts.some((part) => safetyBlacklist.includes(part))) {
|
|
62
|
+
return !allowlist.some((allow) => matchesPattern(normalized, allow));
|
|
63
|
+
}
|
|
64
|
+
if (allowlist.some((allow) => matchesPattern(normalized, allow)))
|
|
65
|
+
return false;
|
|
66
|
+
return ignores.some((ignore) => matchesPattern(normalized, ignore));
|
|
67
|
+
};
|
|
68
|
+
const shouldTraverse = (relativePath) => {
|
|
69
|
+
if (relativePath === "." || relativePath === "")
|
|
70
|
+
return true;
|
|
71
|
+
const normalized = normalizePath(relativePath);
|
|
72
|
+
if (!isIgnored(normalized))
|
|
73
|
+
return true;
|
|
74
|
+
return hasAllowedDescendant(normalized, allowlist);
|
|
75
|
+
};
|
|
76
|
+
return { ignores: isIgnored, shouldTraverse };
|
|
77
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|