@draht/coding-agent 2026.3.4 → 2026.3.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +36 -0
- package/agents/architect.md +1 -0
- package/agents/debugger.md +1 -0
- package/agents/git-committer.md +1 -0
- package/agents/implementer.md +1 -0
- package/agents/reviewer.md +1 -0
- package/agents/security-auditor.md +1 -0
- package/agents/verifier.md +1 -0
- package/dist/agents/architect.md +1 -0
- package/dist/agents/debugger.md +1 -0
- package/dist/agents/git-committer.md +1 -0
- package/dist/agents/implementer.md +1 -0
- package/dist/agents/reviewer.md +1 -0
- package/dist/agents/security-auditor.md +1 -0
- package/dist/agents/verifier.md +1 -0
- package/dist/config.d.ts +7 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +15 -0
- package/dist/config.js.map +1 -1
- package/dist/core/package-manager.d.ts.map +1 -1
- package/dist/core/package-manager.js +7 -7
- package/dist/core/package-manager.js.map +1 -1
- package/dist/core/prompt-templates.d.ts.map +1 -1
- package/dist/core/prompt-templates.js +8 -6
- package/dist/core/prompt-templates.js.map +1 -1
- package/dist/core/resource-loader.d.ts.map +1 -1
- package/dist/core/resource-loader.js +9 -8
- package/dist/core/resource-loader.js.map +1 -1
- package/dist/core/settings-manager.d.ts.map +1 -1
- package/dist/core/settings-manager.js +2 -2
- package/dist/core/settings-manager.js.map +1 -1
- package/dist/core/skills.d.ts.map +1 -1
- package/dist/core/skills.js +3 -3
- package/dist/core/skills.js.map +1 -1
- package/dist/gsd/domain-validator.d.ts +18 -0
- package/dist/gsd/domain-validator.d.ts.map +1 -0
- package/dist/gsd/domain-validator.js +61 -0
- package/dist/gsd/domain-validator.js.map +1 -0
- package/dist/gsd/domain.d.ts +12 -0
- package/dist/gsd/domain.d.ts.map +1 -0
- package/dist/gsd/domain.js +113 -0
- package/dist/gsd/domain.js.map +1 -0
- package/dist/gsd/git.d.ts +20 -0
- package/dist/gsd/git.d.ts.map +1 -0
- package/dist/gsd/git.js +59 -0
- package/dist/gsd/git.js.map +1 -0
- package/dist/gsd/hook-utils.d.ts +22 -0
- package/dist/gsd/hook-utils.d.ts.map +1 -0
- package/dist/gsd/hook-utils.js +100 -0
- package/dist/gsd/hook-utils.js.map +1 -0
- package/dist/gsd/index.d.ts +9 -0
- package/dist/gsd/index.d.ts.map +1 -0
- package/dist/gsd/index.js +8 -0
- package/dist/gsd/index.js.map +1 -0
- package/dist/gsd/planning.d.ts +20 -0
- package/dist/gsd/planning.d.ts.map +1 -0
- package/dist/gsd/planning.js +167 -0
- package/dist/gsd/planning.js.map +1 -0
- package/dist/hooks/gsd/draht-post-task.js +44 -11
- package/dist/hooks/gsd/draht-quality-gate.js +99 -57
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -0
- package/dist/index.js.map +1 -1
- package/dist/migrations.d.ts.map +1 -1
- package/dist/migrations.js +2 -2
- package/dist/migrations.js.map +1 -1
- package/dist/modes/interactive/components/config-selector.d.ts.map +1 -1
- package/dist/modes/interactive/components/config-selector.js +3 -3
- package/dist/modes/interactive/components/config-selector.js.map +1 -1
- package/dist/modes/interactive/interactive-mode.d.ts.map +1 -1
- package/dist/modes/interactive/interactive-mode.js +2 -2
- package/dist/modes/interactive/interactive-mode.js.map +1 -1
- package/dist/prompts/agents/build.md +5 -1
- package/dist/prompts/agents/plan.md +5 -1
- package/dist/prompts/agents/verify.md +5 -1
- package/dist/prompts/commands/atomic-commit.md +8 -16
- package/dist/prompts/commands/discuss-phase.md +19 -3
- package/dist/prompts/commands/execute-phase.md +59 -35
- package/dist/prompts/commands/fix.md +14 -6
- package/dist/prompts/commands/init-project.md +21 -3
- package/dist/prompts/commands/map-codebase.md +24 -19
- package/dist/prompts/commands/new-project.md +20 -2
- package/dist/prompts/commands/next-milestone.md +8 -2
- package/dist/prompts/commands/pause-work.md +4 -0
- package/dist/prompts/commands/plan-phase.md +35 -7
- package/dist/prompts/commands/progress.md +4 -0
- package/dist/prompts/commands/quick.md +20 -7
- package/dist/prompts/commands/resume-work.md +4 -0
- package/dist/prompts/commands/review.md +16 -10
- package/dist/prompts/commands/verify-work.md +37 -17
- package/hooks/gsd/draht-post-task.js +44 -11
- package/hooks/gsd/draht-quality-gate.js +99 -57
- package/package.json +5 -5
- package/prompts/agents/build.md +5 -1
- package/prompts/agents/plan.md +5 -1
- package/prompts/agents/verify.md +5 -1
- package/prompts/commands/atomic-commit.md +8 -16
- package/prompts/commands/discuss-phase.md +19 -3
- package/prompts/commands/execute-phase.md +59 -35
- package/prompts/commands/fix.md +14 -6
- package/prompts/commands/init-project.md +21 -3
- package/prompts/commands/map-codebase.md +24 -19
- package/prompts/commands/new-project.md +20 -2
- package/prompts/commands/next-milestone.md +8 -2
- package/prompts/commands/pause-work.md +4 -0
- package/prompts/commands/plan-phase.md +35 -7
- package/prompts/commands/progress.md +4 -0
- package/prompts/commands/quick.md +20 -7
- package/prompts/commands/resume-work.md +4 -0
- package/prompts/commands/review.md +16 -10
- package/prompts/commands/verify-work.md +37 -17
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generate a DOMAIN-MODEL.md scaffold from PROJECT.md.
|
|
3
|
+
* Requires .planning/PROJECT.md to exist.
|
|
4
|
+
* Returns the path to the created file.
|
|
5
|
+
*/
|
|
6
|
+
export declare function createDomainModel(cwd: string): string;
|
|
7
|
+
/**
|
|
8
|
+
* Scan the codebase and write .planning/codebase/ analysis files.
|
|
9
|
+
* Returns array of created file paths.
|
|
10
|
+
*/
|
|
11
|
+
export declare function mapCodebase(cwd: string): string[];
|
|
12
|
+
//# sourceMappingURL=domain.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"domain.d.ts","sourceRoot":"","sources":["../../src/gsd/domain.ts"],"names":[],"mappings":"AAsBA;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAqCrD;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,EAAE,CAoFjD","sourcesContent":["// GSD Domain module — domain model and codebase mapping operations.\n// Part of the draht GSD (Get Shit Done) methodology.\n// Exported via src/gsd/index.ts and @draht/coding-agent.\n\nimport { execSync } from \"node:child_process\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\n\nconst PLANNING = \".planning\";\n\nfunction planningPath(cwd: string, ...segments: string[]): string {\n\treturn path.join(cwd, PLANNING, ...segments);\n}\n\nfunction ensureDir(dir: string): void {\n\tif (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });\n}\n\nfunction timestamp(): string {\n\treturn new Date().toISOString().replace(\"T\", \" \").slice(0, 19);\n}\n\n/**\n * Generate a DOMAIN-MODEL.md scaffold from PROJECT.md.\n * Requires .planning/PROJECT.md to exist.\n * Returns the path to the created file.\n */\nexport function createDomainModel(cwd: string): string {\n\tconst projectPath = planningPath(cwd, \"PROJECT.md\");\n\tif (!fs.existsSync(projectPath)) {\n\t\tthrow new Error(\"No PROJECT.md found — run create-project first\");\n\t}\n\n\tconst outPath = planningPath(cwd, \"DOMAIN-MODEL.md\");\n\tconst tmpl = `# Domain Model\n\n## Bounded Contexts\n[Extract from PROJECT.md — identify distinct areas of responsibility]\n\n## Context Map\n[How bounded contexts interact — upstream/downstream, shared kernel, etc.]\n\n## Entities\n[Core domain objects with identity]\n\n## Value Objects\n[Immutable objects defined by attributes]\n\n## Aggregates\n[Cluster of entities with a root — transactional boundary]\n\n## Domain Events\n[Things that happen in the domain]\n\n## Ubiquitous Language Glossary\n| Term | Context | Definition |\n|------|---------|------------|\n| [term] | [context] | [definition] |\n\n---\nGenerated from PROJECT.md: ${timestamp()}\n`;\n\tfs.writeFileSync(outPath, tmpl, \"utf-8\");\n\treturn outPath;\n}\n\n/**\n * Scan the codebase and write .planning/codebase/ analysis files.\n * Returns array of created file paths.\n */\nexport function mapCodebase(cwd: string): string[] {\n\tconst outDir = planningPath(cwd, \"codebase\");\n\tensureDir(outDir);\n\n\t// Gather file tree\n\tlet tree = \"\";\n\ttry {\n\t\ttree = execSync(\n\t\t\t`find . -type f -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' -not -path '*/.planning/*' | head -200`,\n\t\t\t{ cwd, encoding: \"utf-8\" },\n\t\t);\n\t} catch {\n\t\ttree = \"(unable to list files)\";\n\t}\n\n\t// Gather package info\n\tlet pkgJson: {\n\t\tname?: string;\n\t\tdependencies?: Record<string, string>;\n\t\tdevDependencies?: Record<string, string>;\n\t} | null = null;\n\ttry {\n\t\tpkgJson = JSON.parse(fs.readFileSync(path.join(cwd, \"package.json\"), \"utf-8\"));\n\t} catch {\n\t\t// not a Node.js project\n\t}\n\n\tconst stackPath = path.join(outDir, \"STACK.md\");\n\tfs.writeFileSync(\n\t\tstackPath,\n\t\t`# Technology Stack\\n\\nGenerated: ${timestamp()}\\n\\n## File Tree (first 200 files)\\n\\`\\`\\`\\n${tree}\\`\\`\\`\\n\\n## Package Info\\n\\`\\`\\`json\\n${pkgJson ? JSON.stringify({ name: pkgJson.name, dependencies: pkgJson.dependencies, devDependencies: pkgJson.devDependencies }, null, 2) : \"No package.json found\"}\\n\\`\\`\\`\\n\\n## TODO\\n- [ ] Fill in languages, versions, frameworks\\n- [ ] Document build tools and runtime\\n`,\n\t\t\"utf-8\",\n\t);\n\n\tconst archPath = path.join(outDir, \"ARCHITECTURE.md\");\n\tfs.writeFileSync(\n\t\tarchPath,\n\t\t`# Architecture\\n\\nGenerated: ${timestamp()}\\n\\n## TODO\\n- [ ] Document file/directory patterns\\n- [ ] Map module boundaries\\n- [ ] Describe data flow\\n`,\n\t\t\"utf-8\",\n\t);\n\n\tconst convPath = path.join(outDir, \"CONVENTIONS.md\");\n\tfs.writeFileSync(\n\t\tconvPath,\n\t\t`# Conventions\\n\\nGenerated: ${timestamp()}\\n\\n## TODO\\n- [ ] Document code style patterns\\n- [ ] Document testing patterns\\n- [ ] Document error handling approach\\n`,\n\t\t\"utf-8\",\n\t);\n\n\tconst concernsPath = path.join(outDir, \"CONCERNS.md\");\n\tfs.writeFileSync(\n\t\tconcernsPath,\n\t\t`# Concerns\\n\\nGenerated: ${timestamp()}\\n\\n## TODO\\n- [ ] Identify technical debt\\n- [ ] Flag security concerns\\n- [ ] Note missing tests\\n`,\n\t\t\"utf-8\",\n\t);\n\n\t// Domain model extraction\n\tlet domainHints = \"\";\n\ttry {\n\t\tconst types = execSync(\n\t\t\t`grep -rn 'export\\\\s\\\\+\\\\(interface\\\\|type\\\\|class\\\\)' --include='*.ts' --include='*.go' . 2>/dev/null | grep -v node_modules | grep -v dist | head -50`,\n\t\t\t{ cwd, encoding: \"utf-8\" },\n\t\t).trim();\n\t\tif (types) domainHints += `## Types/Interfaces (potential entities)\\n\\`\\`\\`\\n${types}\\n\\`\\`\\`\\n\\n`;\n\t} catch {\n\t\t// no ts/go files\n\t}\n\ttry {\n\t\tconst dirs = execSync(\n\t\t\t`find . -type d -maxdepth 3 -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' | sort`,\n\t\t\t{ cwd, encoding: \"utf-8\" },\n\t\t).trim();\n\t\tif (dirs) domainHints += `## Directory Structure (potential bounded contexts)\\n\\`\\`\\`\\n${dirs}\\n\\`\\`\\`\\n`;\n\t} catch {\n\t\t// ignore\n\t}\n\n\tconst hintsPath = path.join(outDir, \"DOMAIN-HINTS.md\");\n\tfs.writeFileSync(\n\t\thintsPath,\n\t\t`# Domain Model Hints\\n\\nGenerated: ${timestamp()}\\n\\nExtracted from codebase to help identify domain model.\\n\\n${domainHints}\\n## TODO\\n- [ ] Identify entities vs value objects\\n- [ ] Map bounded contexts from directory structure\\n- [ ] Define ubiquitous language glossary\\n`,\n\t\t\"utf-8\",\n\t);\n\n\treturn [stackPath, archPath, convPath, concernsPath, hintsPath];\n}\n"]}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
// GSD Domain module — domain model and codebase mapping operations.
|
|
2
|
+
// Part of the draht GSD (Get Shit Done) methodology.
|
|
3
|
+
// Exported via src/gsd/index.ts and @draht/coding-agent.
|
|
4
|
+
import { execSync } from "node:child_process";
|
|
5
|
+
import * as fs from "node:fs";
|
|
6
|
+
import * as path from "node:path";
|
|
7
|
+
const PLANNING = ".planning";
|
|
8
|
+
function planningPath(cwd, ...segments) {
|
|
9
|
+
return path.join(cwd, PLANNING, ...segments);
|
|
10
|
+
}
|
|
11
|
+
function ensureDir(dir) {
|
|
12
|
+
if (!fs.existsSync(dir))
|
|
13
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
14
|
+
}
|
|
15
|
+
function timestamp() {
|
|
16
|
+
return new Date().toISOString().replace("T", " ").slice(0, 19);
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Generate a DOMAIN-MODEL.md scaffold from PROJECT.md.
|
|
20
|
+
* Requires .planning/PROJECT.md to exist.
|
|
21
|
+
* Returns the path to the created file.
|
|
22
|
+
*/
|
|
23
|
+
export function createDomainModel(cwd) {
|
|
24
|
+
const projectPath = planningPath(cwd, "PROJECT.md");
|
|
25
|
+
if (!fs.existsSync(projectPath)) {
|
|
26
|
+
throw new Error("No PROJECT.md found — run create-project first");
|
|
27
|
+
}
|
|
28
|
+
const outPath = planningPath(cwd, "DOMAIN-MODEL.md");
|
|
29
|
+
const tmpl = `# Domain Model
|
|
30
|
+
|
|
31
|
+
## Bounded Contexts
|
|
32
|
+
[Extract from PROJECT.md — identify distinct areas of responsibility]
|
|
33
|
+
|
|
34
|
+
## Context Map
|
|
35
|
+
[How bounded contexts interact — upstream/downstream, shared kernel, etc.]
|
|
36
|
+
|
|
37
|
+
## Entities
|
|
38
|
+
[Core domain objects with identity]
|
|
39
|
+
|
|
40
|
+
## Value Objects
|
|
41
|
+
[Immutable objects defined by attributes]
|
|
42
|
+
|
|
43
|
+
## Aggregates
|
|
44
|
+
[Cluster of entities with a root — transactional boundary]
|
|
45
|
+
|
|
46
|
+
## Domain Events
|
|
47
|
+
[Things that happen in the domain]
|
|
48
|
+
|
|
49
|
+
## Ubiquitous Language Glossary
|
|
50
|
+
| Term | Context | Definition |
|
|
51
|
+
|------|---------|------------|
|
|
52
|
+
| [term] | [context] | [definition] |
|
|
53
|
+
|
|
54
|
+
---
|
|
55
|
+
Generated from PROJECT.md: ${timestamp()}
|
|
56
|
+
`;
|
|
57
|
+
fs.writeFileSync(outPath, tmpl, "utf-8");
|
|
58
|
+
return outPath;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Scan the codebase and write .planning/codebase/ analysis files.
|
|
62
|
+
* Returns array of created file paths.
|
|
63
|
+
*/
|
|
64
|
+
export function mapCodebase(cwd) {
|
|
65
|
+
const outDir = planningPath(cwd, "codebase");
|
|
66
|
+
ensureDir(outDir);
|
|
67
|
+
// Gather file tree
|
|
68
|
+
let tree = "";
|
|
69
|
+
try {
|
|
70
|
+
tree = execSync(`find . -type f -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' -not -path '*/.planning/*' | head -200`, { cwd, encoding: "utf-8" });
|
|
71
|
+
}
|
|
72
|
+
catch {
|
|
73
|
+
tree = "(unable to list files)";
|
|
74
|
+
}
|
|
75
|
+
// Gather package info
|
|
76
|
+
let pkgJson = null;
|
|
77
|
+
try {
|
|
78
|
+
pkgJson = JSON.parse(fs.readFileSync(path.join(cwd, "package.json"), "utf-8"));
|
|
79
|
+
}
|
|
80
|
+
catch {
|
|
81
|
+
// not a Node.js project
|
|
82
|
+
}
|
|
83
|
+
const stackPath = path.join(outDir, "STACK.md");
|
|
84
|
+
fs.writeFileSync(stackPath, `# Technology Stack\n\nGenerated: ${timestamp()}\n\n## File Tree (first 200 files)\n\`\`\`\n${tree}\`\`\`\n\n## Package Info\n\`\`\`json\n${pkgJson ? JSON.stringify({ name: pkgJson.name, dependencies: pkgJson.dependencies, devDependencies: pkgJson.devDependencies }, null, 2) : "No package.json found"}\n\`\`\`\n\n## TODO\n- [ ] Fill in languages, versions, frameworks\n- [ ] Document build tools and runtime\n`, "utf-8");
|
|
85
|
+
const archPath = path.join(outDir, "ARCHITECTURE.md");
|
|
86
|
+
fs.writeFileSync(archPath, `# Architecture\n\nGenerated: ${timestamp()}\n\n## TODO\n- [ ] Document file/directory patterns\n- [ ] Map module boundaries\n- [ ] Describe data flow\n`, "utf-8");
|
|
87
|
+
const convPath = path.join(outDir, "CONVENTIONS.md");
|
|
88
|
+
fs.writeFileSync(convPath, `# Conventions\n\nGenerated: ${timestamp()}\n\n## TODO\n- [ ] Document code style patterns\n- [ ] Document testing patterns\n- [ ] Document error handling approach\n`, "utf-8");
|
|
89
|
+
const concernsPath = path.join(outDir, "CONCERNS.md");
|
|
90
|
+
fs.writeFileSync(concernsPath, `# Concerns\n\nGenerated: ${timestamp()}\n\n## TODO\n- [ ] Identify technical debt\n- [ ] Flag security concerns\n- [ ] Note missing tests\n`, "utf-8");
|
|
91
|
+
// Domain model extraction
|
|
92
|
+
let domainHints = "";
|
|
93
|
+
try {
|
|
94
|
+
const types = execSync(`grep -rn 'export\\s\\+\\(interface\\|type\\|class\\)' --include='*.ts' --include='*.go' . 2>/dev/null | grep -v node_modules | grep -v dist | head -50`, { cwd, encoding: "utf-8" }).trim();
|
|
95
|
+
if (types)
|
|
96
|
+
domainHints += `## Types/Interfaces (potential entities)\n\`\`\`\n${types}\n\`\`\`\n\n`;
|
|
97
|
+
}
|
|
98
|
+
catch {
|
|
99
|
+
// no ts/go files
|
|
100
|
+
}
|
|
101
|
+
try {
|
|
102
|
+
const dirs = execSync(`find . -type d -maxdepth 3 -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' | sort`, { cwd, encoding: "utf-8" }).trim();
|
|
103
|
+
if (dirs)
|
|
104
|
+
domainHints += `## Directory Structure (potential bounded contexts)\n\`\`\`\n${dirs}\n\`\`\`\n`;
|
|
105
|
+
}
|
|
106
|
+
catch {
|
|
107
|
+
// ignore
|
|
108
|
+
}
|
|
109
|
+
const hintsPath = path.join(outDir, "DOMAIN-HINTS.md");
|
|
110
|
+
fs.writeFileSync(hintsPath, `# Domain Model Hints\n\nGenerated: ${timestamp()}\n\nExtracted from codebase to help identify domain model.\n\n${domainHints}\n## TODO\n- [ ] Identify entities vs value objects\n- [ ] Map bounded contexts from directory structure\n- [ ] Define ubiquitous language glossary\n`, "utf-8");
|
|
111
|
+
return [stackPath, archPath, convPath, concernsPath, hintsPath];
|
|
112
|
+
}
|
|
113
|
+
//# sourceMappingURL=domain.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"domain.js","sourceRoot":"","sources":["../../src/gsd/domain.ts"],"names":[],"mappings":"AAAA,sEAAoE;AACpE,qDAAqD;AACrD,yDAAyD;AAEzD,OAAO,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAC;AAC9C,OAAO,KAAK,EAAE,MAAM,SAAS,CAAC;AAC9B,OAAO,KAAK,IAAI,MAAM,WAAW,CAAC;AAElC,MAAM,QAAQ,GAAG,WAAW,CAAC;AAE7B,SAAS,YAAY,CAAC,GAAW,EAAE,GAAG,QAAkB,EAAU;IACjE,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,EAAE,GAAG,QAAQ,CAAC,CAAC;AAAA,CAC7C;AAED,SAAS,SAAS,CAAC,GAAW,EAAQ;IACrC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;QAAE,EAAE,CAAC,SAAS,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;AAAA,CAChE;AAED,SAAS,SAAS,GAAW;IAC5B,OAAO,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AAAA,CAC/D;AAED;;;;GAIG;AACH,MAAM,UAAU,iBAAiB,CAAC,GAAW,EAAU;IACtD,MAAM,WAAW,GAAG,YAAY,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC;IACpD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,WAAW,CAAC,EAAE,CAAC;QACjC,MAAM,IAAI,KAAK,CAAC,kDAAgD,CAAC,CAAC;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;IACrD,MAAM,IAAI,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;6BA0Be,SAAS,EAAE;CACvC,CAAC;IACD,EAAE,CAAC,aAAa,CAAC,OAAO,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IACzC,OAAO,OAAO,CAAC;AAAA,CACf;AAED;;;GAGG;AACH,MAAM,UAAU,WAAW,CAAC,GAAW,EAAY;IAClD,MAAM,MAAM,GAAG,YAAY,CAAC,GAAG,EAAE,UAAU,CAAC,CAAC;IAC7C,SAAS,CAAC,MAAM,CAAC,CAAC;IAElB,mBAAmB;IACnB,IAAI,IAAI,GAAG,EAAE,CAAC;IACd,IAAI,CAAC;QACJ,IAAI,GAAG,QAAQ,CACd,iIAAiI,EACjI,EAAE,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE,CAC1B,CAAC;IACH,CAAC;IAAC,MAAM,CAAC;QACR,IAAI,GAAG,wBAAwB,CAAC;IACjC,CAAC;IAED,sBAAsB;IACtB,IAAI,OAAO,GAIA,IAAI,CAAC;IAChB,IAAI,CAAC;QACJ,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,cAAc,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC;IAChF,CAAC;IAAC,MAAM,CAAC;QACR,wBAAwB;IACzB,CAAC;IAED,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;IAChD,EAAE,CAAC,aAAa,CACf,SAAS,EACT,oCAAoC,SAAS,EAAE,+CAA+C,IAAI,0CAA0C,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,IAAI,EAAE,YAAY,EAAE,OAAO,CAAC,YAAY,EAAE,eAAe,EAAE,OAAO,CAAC,eAAe,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,uBAAuB,8GAA8G,EAC3Z,OAAO,CACP,CAAC;IAEF,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,iBAAiB,CAAC,CAAC;IACtD,EAAE,CAAC,aAAa,CACf,QAAQ,EACR,gCAAgC,SAAS,EAAE,8GAA8G,EACzJ,OAAO,CACP,CAAC;IAEF,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,gBAAgB,CAAC,CAAC;IACrD,EAAE,CAAC,aAAa,CACf,QAAQ,EACR,+BAA+B,SAAS,EAAE,4HAA4H,EACtK,OAAO,CACP,CAAC;IAEF,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC;IACtD,EAAE,CAAC,aAAa,CACf,YAAY,EACZ,4BAA4B,SAAS,EAAE,sGAAsG,EAC7I,OAAO,CACP,CAAC;IAEF,0BAA0B;IAC1B,IAAI,WAAW,GAAG,EAAE,CAAC;IACrB,IAAI,CAAC;QACJ,MAAM,KAAK,GAAG,QAAQ,CACrB,wJAAwJ,EACxJ,EAAE,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE,CAC1B,CAAC,IAAI,EAAE,CAAC;QACT,IAAI,KAAK;YAAE,WAAW,IAAI,qDAAqD,KAAK,cAAc,CAAC;IACpG,CAAC;IAAC,MAAM,CAAC;QACR,iBAAiB;IAClB,CAAC;IACD,IAAI,CAAC;QACJ,MAAM,IAAI,GAAG,QAAQ,CACpB,6GAA6G,EAC7G,EAAE,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE,CAC1B,CAAC,IAAI,EAAE,CAAC;QACT,IAAI,IAAI;YAAE,WAAW,IAAI,gEAAgE,IAAI,YAAY,CAAC;IAC3G,CAAC;IAAC,MAAM,CAAC;QACR,SAAS;IACV,CAAC;IAED,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,iBAAiB,CAAC,CAAC;IACvD,EAAE,CAAC,aAAa,CACf,SAAS,EACT,sCAAsC,SAAS,EAAE,iEAAiE,WAAW,uJAAuJ,EACpR,OAAO,CACP,CAAC;IAEF,OAAO,CAAC,SAAS,EAAE,QAAQ,EAAE,QAAQ,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC;AAAA,CAChE","sourcesContent":["// GSD Domain module — domain model and codebase mapping operations.\n// Part of the draht GSD (Get Shit Done) methodology.\n// Exported via src/gsd/index.ts and @draht/coding-agent.\n\nimport { execSync } from \"node:child_process\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\n\nconst PLANNING = \".planning\";\n\nfunction planningPath(cwd: string, ...segments: string[]): string {\n\treturn path.join(cwd, PLANNING, ...segments);\n}\n\nfunction ensureDir(dir: string): void {\n\tif (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });\n}\n\nfunction timestamp(): string {\n\treturn new Date().toISOString().replace(\"T\", \" \").slice(0, 19);\n}\n\n/**\n * Generate a DOMAIN-MODEL.md scaffold from PROJECT.md.\n * Requires .planning/PROJECT.md to exist.\n * Returns the path to the created file.\n */\nexport function createDomainModel(cwd: string): string {\n\tconst projectPath = planningPath(cwd, \"PROJECT.md\");\n\tif (!fs.existsSync(projectPath)) {\n\t\tthrow new Error(\"No PROJECT.md found — run create-project first\");\n\t}\n\n\tconst outPath = planningPath(cwd, \"DOMAIN-MODEL.md\");\n\tconst tmpl = `# Domain Model\n\n## Bounded Contexts\n[Extract from PROJECT.md — identify distinct areas of responsibility]\n\n## Context Map\n[How bounded contexts interact — upstream/downstream, shared kernel, etc.]\n\n## Entities\n[Core domain objects with identity]\n\n## Value Objects\n[Immutable objects defined by attributes]\n\n## Aggregates\n[Cluster of entities with a root — transactional boundary]\n\n## Domain Events\n[Things that happen in the domain]\n\n## Ubiquitous Language Glossary\n| Term | Context | Definition |\n|------|---------|------------|\n| [term] | [context] | [definition] |\n\n---\nGenerated from PROJECT.md: ${timestamp()}\n`;\n\tfs.writeFileSync(outPath, tmpl, \"utf-8\");\n\treturn outPath;\n}\n\n/**\n * Scan the codebase and write .planning/codebase/ analysis files.\n * Returns array of created file paths.\n */\nexport function mapCodebase(cwd: string): string[] {\n\tconst outDir = planningPath(cwd, \"codebase\");\n\tensureDir(outDir);\n\n\t// Gather file tree\n\tlet tree = \"\";\n\ttry {\n\t\ttree = execSync(\n\t\t\t`find . -type f -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' -not -path '*/.planning/*' | head -200`,\n\t\t\t{ cwd, encoding: \"utf-8\" },\n\t\t);\n\t} catch {\n\t\ttree = \"(unable to list files)\";\n\t}\n\n\t// Gather package info\n\tlet pkgJson: {\n\t\tname?: string;\n\t\tdependencies?: Record<string, string>;\n\t\tdevDependencies?: Record<string, string>;\n\t} | null = null;\n\ttry {\n\t\tpkgJson = JSON.parse(fs.readFileSync(path.join(cwd, \"package.json\"), \"utf-8\"));\n\t} catch {\n\t\t// not a Node.js project\n\t}\n\n\tconst stackPath = path.join(outDir, \"STACK.md\");\n\tfs.writeFileSync(\n\t\tstackPath,\n\t\t`# Technology Stack\\n\\nGenerated: ${timestamp()}\\n\\n## File Tree (first 200 files)\\n\\`\\`\\`\\n${tree}\\`\\`\\`\\n\\n## Package Info\\n\\`\\`\\`json\\n${pkgJson ? JSON.stringify({ name: pkgJson.name, dependencies: pkgJson.dependencies, devDependencies: pkgJson.devDependencies }, null, 2) : \"No package.json found\"}\\n\\`\\`\\`\\n\\n## TODO\\n- [ ] Fill in languages, versions, frameworks\\n- [ ] Document build tools and runtime\\n`,\n\t\t\"utf-8\",\n\t);\n\n\tconst archPath = path.join(outDir, \"ARCHITECTURE.md\");\n\tfs.writeFileSync(\n\t\tarchPath,\n\t\t`# Architecture\\n\\nGenerated: ${timestamp()}\\n\\n## TODO\\n- [ ] Document file/directory patterns\\n- [ ] Map module boundaries\\n- [ ] Describe data flow\\n`,\n\t\t\"utf-8\",\n\t);\n\n\tconst convPath = path.join(outDir, \"CONVENTIONS.md\");\n\tfs.writeFileSync(\n\t\tconvPath,\n\t\t`# Conventions\\n\\nGenerated: ${timestamp()}\\n\\n## TODO\\n- [ ] Document code style patterns\\n- [ ] Document testing patterns\\n- [ ] Document error handling approach\\n`,\n\t\t\"utf-8\",\n\t);\n\n\tconst concernsPath = path.join(outDir, \"CONCERNS.md\");\n\tfs.writeFileSync(\n\t\tconcernsPath,\n\t\t`# Concerns\\n\\nGenerated: ${timestamp()}\\n\\n## TODO\\n- [ ] Identify technical debt\\n- [ ] Flag security concerns\\n- [ ] Note missing tests\\n`,\n\t\t\"utf-8\",\n\t);\n\n\t// Domain model extraction\n\tlet domainHints = \"\";\n\ttry {\n\t\tconst types = execSync(\n\t\t\t`grep -rn 'export\\\\s\\\\+\\\\(interface\\\\|type\\\\|class\\\\)' --include='*.ts' --include='*.go' . 2>/dev/null | grep -v node_modules | grep -v dist | head -50`,\n\t\t\t{ cwd, encoding: \"utf-8\" },\n\t\t).trim();\n\t\tif (types) domainHints += `## Types/Interfaces (potential entities)\\n\\`\\`\\`\\n${types}\\n\\`\\`\\`\\n\\n`;\n\t} catch {\n\t\t// no ts/go files\n\t}\n\ttry {\n\t\tconst dirs = execSync(\n\t\t\t`find . -type d -maxdepth 3 -not -path '*/node_modules/*' -not -path '*/.git/*' -not -path '*/dist/*' | sort`,\n\t\t\t{ cwd, encoding: \"utf-8\" },\n\t\t).trim();\n\t\tif (dirs) domainHints += `## Directory Structure (potential bounded contexts)\\n\\`\\`\\`\\n${dirs}\\n\\`\\`\\`\\n`;\n\t} catch {\n\t\t// ignore\n\t}\n\n\tconst hintsPath = path.join(outDir, \"DOMAIN-HINTS.md\");\n\tfs.writeFileSync(\n\t\thintsPath,\n\t\t`# Domain Model Hints\\n\\nGenerated: ${timestamp()}\\n\\nExtracted from codebase to help identify domain model.\\n\\n${domainHints}\\n## TODO\\n- [ ] Identify entities vs value objects\\n- [ ] Map bounded contexts from directory structure\\n- [ ] Define ubiquitous language glossary\\n`,\n\t\t\"utf-8\",\n\t);\n\n\treturn [stackPath, archPath, convPath, concernsPath, hintsPath];\n}\n"]}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export interface CommitResult {
|
|
2
|
+
hash: string | null;
|
|
3
|
+
tddWarning: boolean;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Returns true if any file in the list matches known test file patterns.
|
|
7
|
+
*/
|
|
8
|
+
export declare function hasTestFiles(files: string[]): boolean;
|
|
9
|
+
/**
|
|
10
|
+
* Stage all changes and commit as a task in the GSD methodology.
|
|
11
|
+
* Message format: feat(NN-NN): description
|
|
12
|
+
* Sets tddWarning=true when no test files are in the commit.
|
|
13
|
+
*/
|
|
14
|
+
export declare function commitTask(cwd: string, phaseNum: number, planNum: number, description: string): CommitResult;
|
|
15
|
+
/**
|
|
16
|
+
* Stage all changes and commit as a docs update.
|
|
17
|
+
* Message format: docs: message
|
|
18
|
+
*/
|
|
19
|
+
export declare function commitDocs(cwd: string, message: string): CommitResult;
|
|
20
|
+
//# sourceMappingURL=git.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"git.d.ts","sourceRoot":"","sources":["../../src/gsd/git.ts"],"names":[],"mappings":"AAMA,MAAM,WAAW,YAAY;IAC5B,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IACpB,UAAU,EAAE,OAAO,CAAC;CACpB;AAED;;GAEG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAErD;AAED;;;;GAIG;AACH,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,YAAY,CAwB5G;AAED;;;GAGG;AACH,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,YAAY,CAUrE","sourcesContent":["// GSD Git module — git commit operations for the GSD lifecycle.\n// Part of the draht GSD (Get Shit Done) methodology.\n// Exported via src/gsd/index.ts and @draht/coding-agent.\n\nimport { execSync } from \"node:child_process\";\n\nexport interface CommitResult {\n\thash: string | null;\n\ttddWarning: boolean;\n}\n\n/**\n * Returns true if any file in the list matches known test file patterns.\n */\nexport function hasTestFiles(files: string[]): boolean {\n\treturn files.some((f) => /\\.(test|spec)\\.(ts|tsx|js|jsx)$|_test\\.(go|ts)$/.test(f));\n}\n\n/**\n * Stage all changes and commit as a task in the GSD methodology.\n * Message format: feat(NN-NN): description\n * Sets tddWarning=true when no test files are in the commit.\n */\nexport function commitTask(cwd: string, phaseNum: number, planNum: number, description: string): CommitResult {\n\tconst scope = `${String(phaseNum).padStart(2, \"0\")}-${String(planNum).padStart(2, \"0\")}`;\n\tconst message = `feat(${scope}): ${description}`;\n\ttry {\n\t\texecSync(\"git add -A\", { cwd, stdio: \"pipe\" });\n\t\texecSync(`git commit -m ${JSON.stringify(message)}`, { cwd, stdio: \"pipe\" });\n\t\tconst hash = execSync(\"git rev-parse HEAD\", { cwd, encoding: \"utf-8\" }).trim();\n\t\tlet tddWarning = false;\n\t\ttry {\n\t\t\tconst files = execSync(`git diff-tree --no-commit-id --name-only -r ${hash}`, {\n\t\t\t\tcwd,\n\t\t\t\tencoding: \"utf-8\",\n\t\t\t})\n\t\t\t\t.trim()\n\t\t\t\t.split(\"\\n\")\n\t\t\t\t.filter(Boolean);\n\t\t\ttddWarning = !hasTestFiles(files);\n\t\t} catch {\n\t\t\t// not a git repo or commit not found\n\t\t}\n\t\treturn { hash, tddWarning };\n\t} catch {\n\t\treturn { hash: null, tddWarning: false };\n\t}\n}\n\n/**\n * Stage all changes and commit as a docs update.\n * Message format: docs: message\n */\nexport function commitDocs(cwd: string, message: string): CommitResult {\n\tconst msg = `docs: ${message}`;\n\ttry {\n\t\texecSync(\"git add -A\", { cwd, stdio: \"pipe\" });\n\t\texecSync(`git commit -m ${JSON.stringify(msg)}`, { cwd, stdio: \"pipe\" });\n\t\tconst hash = execSync(\"git rev-parse HEAD\", { cwd, encoding: \"utf-8\" }).trim();\n\t\treturn { hash, tddWarning: false };\n\t} catch {\n\t\treturn { hash: null, tddWarning: false };\n\t}\n}\n"]}
|
package/dist/gsd/git.js
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
// GSD Git module — git commit operations for the GSD lifecycle.
|
|
2
|
+
// Part of the draht GSD (Get Shit Done) methodology.
|
|
3
|
+
// Exported via src/gsd/index.ts and @draht/coding-agent.
|
|
4
|
+
import { execSync } from "node:child_process";
|
|
5
|
+
/**
|
|
6
|
+
* Returns true if any file in the list matches known test file patterns.
|
|
7
|
+
*/
|
|
8
|
+
export function hasTestFiles(files) {
|
|
9
|
+
return files.some((f) => /\.(test|spec)\.(ts|tsx|js|jsx)$|_test\.(go|ts)$/.test(f));
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Stage all changes and commit as a task in the GSD methodology.
|
|
13
|
+
* Message format: feat(NN-NN): description
|
|
14
|
+
* Sets tddWarning=true when no test files are in the commit.
|
|
15
|
+
*/
|
|
16
|
+
export function commitTask(cwd, phaseNum, planNum, description) {
|
|
17
|
+
const scope = `${String(phaseNum).padStart(2, "0")}-${String(planNum).padStart(2, "0")}`;
|
|
18
|
+
const message = `feat(${scope}): ${description}`;
|
|
19
|
+
try {
|
|
20
|
+
execSync("git add -A", { cwd, stdio: "pipe" });
|
|
21
|
+
execSync(`git commit -m ${JSON.stringify(message)}`, { cwd, stdio: "pipe" });
|
|
22
|
+
const hash = execSync("git rev-parse HEAD", { cwd, encoding: "utf-8" }).trim();
|
|
23
|
+
let tddWarning = false;
|
|
24
|
+
try {
|
|
25
|
+
const files = execSync(`git diff-tree --no-commit-id --name-only -r ${hash}`, {
|
|
26
|
+
cwd,
|
|
27
|
+
encoding: "utf-8",
|
|
28
|
+
})
|
|
29
|
+
.trim()
|
|
30
|
+
.split("\n")
|
|
31
|
+
.filter(Boolean);
|
|
32
|
+
tddWarning = !hasTestFiles(files);
|
|
33
|
+
}
|
|
34
|
+
catch {
|
|
35
|
+
// not a git repo or commit not found
|
|
36
|
+
}
|
|
37
|
+
return { hash, tddWarning };
|
|
38
|
+
}
|
|
39
|
+
catch {
|
|
40
|
+
return { hash: null, tddWarning: false };
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Stage all changes and commit as a docs update.
|
|
45
|
+
* Message format: docs: message
|
|
46
|
+
*/
|
|
47
|
+
export function commitDocs(cwd, message) {
|
|
48
|
+
const msg = `docs: ${message}`;
|
|
49
|
+
try {
|
|
50
|
+
execSync("git add -A", { cwd, stdio: "pipe" });
|
|
51
|
+
execSync(`git commit -m ${JSON.stringify(msg)}`, { cwd, stdio: "pipe" });
|
|
52
|
+
const hash = execSync("git rev-parse HEAD", { cwd, encoding: "utf-8" }).trim();
|
|
53
|
+
return { hash, tddWarning: false };
|
|
54
|
+
}
|
|
55
|
+
catch {
|
|
56
|
+
return { hash: null, tddWarning: false };
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
//# sourceMappingURL=git.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"git.js","sourceRoot":"","sources":["../../src/gsd/git.ts"],"names":[],"mappings":"AAAA,kEAAgE;AAChE,qDAAqD;AACrD,yDAAyD;AAEzD,OAAO,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAC;AAO9C;;GAEG;AACH,MAAM,UAAU,YAAY,CAAC,KAAe,EAAW;IACtD,OAAO,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,iDAAiD,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AAAA,CACpF;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW,EAAE,QAAgB,EAAE,OAAe,EAAE,WAAmB,EAAgB;IAC7G,MAAM,KAAK,GAAG,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC;IACzF,MAAM,OAAO,GAAG,QAAQ,KAAK,MAAM,WAAW,EAAE,CAAC;IACjD,IAAI,CAAC;QACJ,QAAQ,CAAC,YAAY,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;QAC/C,QAAQ,CAAC,iBAAiB,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;QAC7E,MAAM,IAAI,GAAG,QAAQ,CAAC,oBAAoB,EAAE,EAAE,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC;QAC/E,IAAI,UAAU,GAAG,KAAK,CAAC;QACvB,IAAI,CAAC;YACJ,MAAM,KAAK,GAAG,QAAQ,CAAC,+CAA+C,IAAI,EAAE,EAAE;gBAC7E,GAAG;gBACH,QAAQ,EAAE,OAAO;aACjB,CAAC;iBACA,IAAI,EAAE;iBACN,KAAK,CAAC,IAAI,CAAC;iBACX,MAAM,CAAC,OAAO,CAAC,CAAC;YAClB,UAAU,GAAG,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;QACnC,CAAC;QAAC,MAAM,CAAC;YACR,qCAAqC;QACtC,CAAC;QACD,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC;IAC7B,CAAC;IAAC,MAAM,CAAC;QACR,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC;IAC1C,CAAC;AAAA,CACD;AAED;;;GAGG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW,EAAE,OAAe,EAAgB;IACtE,MAAM,GAAG,GAAG,SAAS,OAAO,EAAE,CAAC;IAC/B,IAAI,CAAC;QACJ,QAAQ,CAAC,YAAY,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;QAC/C,QAAQ,CAAC,iBAAiB,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;QACzE,MAAM,IAAI,GAAG,QAAQ,CAAC,oBAAoB,EAAE,EAAE,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC;QAC/E,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC;IACpC,CAAC;IAAC,MAAM,CAAC;QACR,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC;IAC1C,CAAC;AAAA,CACD","sourcesContent":["// GSD Git module — git commit operations for the GSD lifecycle.\n// Part of the draht GSD (Get Shit Done) methodology.\n// Exported via src/gsd/index.ts and @draht/coding-agent.\n\nimport { execSync } from \"node:child_process\";\n\nexport interface CommitResult {\n\thash: string | null;\n\ttddWarning: boolean;\n}\n\n/**\n * Returns true if any file in the list matches known test file patterns.\n */\nexport function hasTestFiles(files: string[]): boolean {\n\treturn files.some((f) => /\\.(test|spec)\\.(ts|tsx|js|jsx)$|_test\\.(go|ts)$/.test(f));\n}\n\n/**\n * Stage all changes and commit as a task in the GSD methodology.\n * Message format: feat(NN-NN): description\n * Sets tddWarning=true when no test files are in the commit.\n */\nexport function commitTask(cwd: string, phaseNum: number, planNum: number, description: string): CommitResult {\n\tconst scope = `${String(phaseNum).padStart(2, \"0\")}-${String(planNum).padStart(2, \"0\")}`;\n\tconst message = `feat(${scope}): ${description}`;\n\ttry {\n\t\texecSync(\"git add -A\", { cwd, stdio: \"pipe\" });\n\t\texecSync(`git commit -m ${JSON.stringify(message)}`, { cwd, stdio: \"pipe\" });\n\t\tconst hash = execSync(\"git rev-parse HEAD\", { cwd, encoding: \"utf-8\" }).trim();\n\t\tlet tddWarning = false;\n\t\ttry {\n\t\t\tconst files = execSync(`git diff-tree --no-commit-id --name-only -r ${hash}`, {\n\t\t\t\tcwd,\n\t\t\t\tencoding: \"utf-8\",\n\t\t\t})\n\t\t\t\t.trim()\n\t\t\t\t.split(\"\\n\")\n\t\t\t\t.filter(Boolean);\n\t\t\ttddWarning = !hasTestFiles(files);\n\t\t} catch {\n\t\t\t// not a git repo or commit not found\n\t\t}\n\t\treturn { hash, tddWarning };\n\t} catch {\n\t\treturn { hash: null, tddWarning: false };\n\t}\n}\n\n/**\n * Stage all changes and commit as a docs update.\n * Message format: docs: message\n */\nexport function commitDocs(cwd: string, message: string): CommitResult {\n\tconst msg = `docs: ${message}`;\n\ttry {\n\t\texecSync(\"git add -A\", { cwd, stdio: \"pipe\" });\n\t\texecSync(`git commit -m ${JSON.stringify(msg)}`, { cwd, stdio: \"pipe\" });\n\t\tconst hash = execSync(\"git rev-parse HEAD\", { cwd, encoding: \"utf-8\" }).trim();\n\t\treturn { hash, tddWarning: false };\n\t} catch {\n\t\treturn { hash: null, tddWarning: false };\n\t}\n}\n"]}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export interface ToolchainInfo {
|
|
2
|
+
pm: "npm" | "bun" | "pnpm" | "yarn";
|
|
3
|
+
testCmd: string;
|
|
4
|
+
coverageCmd: string;
|
|
5
|
+
lintCmd: string;
|
|
6
|
+
}
|
|
7
|
+
export interface HookConfig {
|
|
8
|
+
coverageThreshold: number;
|
|
9
|
+
tddMode: "strict" | "advisory";
|
|
10
|
+
qualityGateStrict: boolean;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Detect package manager from lockfiles and package.json scripts.
|
|
14
|
+
* Priority: bun.lockb/bun.lock > pnpm-lock.yaml > yarn.lock > package-lock.json > fallback npm
|
|
15
|
+
*/
|
|
16
|
+
export declare function detectToolchain(cwd: string): ToolchainInfo;
|
|
17
|
+
/**
|
|
18
|
+
* Read hook configuration from .planning/config.json hooks section.
|
|
19
|
+
* Falls back to defaults on missing file or parse errors.
|
|
20
|
+
*/
|
|
21
|
+
export declare function readHookConfig(cwd: string): HookConfig;
|
|
22
|
+
//# sourceMappingURL=hook-utils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hook-utils.d.ts","sourceRoot":"","sources":["../../src/gsd/hook-utils.ts"],"names":[],"mappings":"AAOA,MAAM,WAAW,aAAa;IAC7B,EAAE,EAAE,KAAK,GAAG,KAAK,GAAG,MAAM,GAAG,MAAM,CAAC;IACpC,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,UAAU;IAC1B,iBAAiB,EAAE,MAAM,CAAC;IAC1B,OAAO,EAAE,QAAQ,GAAG,UAAU,CAAC;IAC/B,iBAAiB,EAAE,OAAO,CAAC;CAC3B;AAQD;;;GAGG;AACH,wBAAgB,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,aAAa,CAgE1D;AAED;;;GAGG;AACH,wBAAgB,cAAc,CAAC,GAAG,EAAE,MAAM,GAAG,UAAU,CAyBtD","sourcesContent":["// GSD Hook Utilities — toolchain auto-detection and hook configuration.\n// Mirrors the inline logic in hooks/gsd/*.js so it can be tested via vitest.\n// The hook .js files embed the same logic inline (with require() fallback to this dist).\n\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\n\nexport interface ToolchainInfo {\n\tpm: \"npm\" | \"bun\" | \"pnpm\" | \"yarn\";\n\ttestCmd: string;\n\tcoverageCmd: string;\n\tlintCmd: string;\n}\n\nexport interface HookConfig {\n\tcoverageThreshold: number;\n\ttddMode: \"strict\" | \"advisory\";\n\tqualityGateStrict: boolean;\n}\n\nconst DEFAULT_HOOK_CONFIG: HookConfig = {\n\tcoverageThreshold: 80,\n\ttddMode: \"advisory\",\n\tqualityGateStrict: false,\n};\n\n/**\n * Detect package manager from lockfiles and package.json scripts.\n * Priority: bun.lockb/bun.lock > pnpm-lock.yaml > yarn.lock > package-lock.json > fallback npm\n */\nexport function detectToolchain(cwd: string): ToolchainInfo {\n\tif (fs.existsSync(path.join(cwd, \"bun.lockb\")) || fs.existsSync(path.join(cwd, \"bun.lock\"))) {\n\t\treturn {\n\t\t\tpm: \"bun\",\n\t\t\ttestCmd: \"bun test\",\n\t\t\tcoverageCmd: \"bun test --coverage\",\n\t\t\tlintCmd: \"bunx biome check .\",\n\t\t};\n\t}\n\n\tif (fs.existsSync(path.join(cwd, \"pnpm-lock.yaml\"))) {\n\t\treturn {\n\t\t\tpm: \"pnpm\",\n\t\t\ttestCmd: \"pnpm test\",\n\t\t\tcoverageCmd: \"pnpm run test:coverage\",\n\t\t\tlintCmd: \"pnpm run lint\",\n\t\t};\n\t}\n\n\tif (fs.existsSync(path.join(cwd, \"yarn.lock\"))) {\n\t\treturn {\n\t\t\tpm: \"yarn\",\n\t\t\ttestCmd: \"yarn test\",\n\t\t\tcoverageCmd: \"yarn run test:coverage\",\n\t\t\tlintCmd: \"yarn run lint\",\n\t\t};\n\t}\n\n\tif (fs.existsSync(path.join(cwd, \"package-lock.json\"))) {\n\t\treturn {\n\t\t\tpm: \"npm\",\n\t\t\ttestCmd: \"npm test\",\n\t\t\tcoverageCmd: \"npm run test:coverage\",\n\t\t\tlintCmd: \"npm run lint\",\n\t\t};\n\t}\n\n\t// No lockfile — check package.json scripts for test runner hints\n\tconst pkgPath = path.join(cwd, \"package.json\");\n\tif (fs.existsSync(pkgPath)) {\n\t\ttry {\n\t\t\tconst pkg = JSON.parse(fs.readFileSync(pkgPath, \"utf-8\")) as {\n\t\t\t\tscripts?: Record<string, string>;\n\t\t\t};\n\t\t\tif (pkg.scripts?.test) {\n\t\t\t\treturn {\n\t\t\t\t\tpm: \"npm\",\n\t\t\t\t\ttestCmd: \"npm test\",\n\t\t\t\t\tcoverageCmd: \"npm run test:coverage\",\n\t\t\t\t\tlintCmd: \"npm run lint\",\n\t\t\t\t};\n\t\t\t}\n\t\t} catch {\n\t\t\t/* ignore parse errors */\n\t\t}\n\t}\n\n\t// Fallback\n\treturn {\n\t\tpm: \"npm\",\n\t\ttestCmd: \"npm test\",\n\t\tcoverageCmd: \"npm run test:coverage\",\n\t\tlintCmd: \"npm run lint\",\n\t};\n}\n\n/**\n * Read hook configuration from .planning/config.json hooks section.\n * Falls back to defaults on missing file or parse errors.\n */\nexport function readHookConfig(cwd: string): HookConfig {\n\tconst configPath = path.join(cwd, \".planning\", \"config.json\");\n\tif (!fs.existsSync(configPath)) {\n\t\treturn { ...DEFAULT_HOOK_CONFIG };\n\t}\n\ttry {\n\t\tconst raw = JSON.parse(fs.readFileSync(configPath, \"utf-8\")) as {\n\t\t\thooks?: Partial<HookConfig>;\n\t\t};\n\t\tconst hooks = raw.hooks ?? {};\n\t\treturn {\n\t\t\tcoverageThreshold:\n\t\t\t\ttypeof hooks.coverageThreshold === \"number\"\n\t\t\t\t\t? hooks.coverageThreshold\n\t\t\t\t\t: DEFAULT_HOOK_CONFIG.coverageThreshold,\n\t\t\ttddMode:\n\t\t\t\thooks.tddMode === \"strict\" || hooks.tddMode === \"advisory\" ? hooks.tddMode : DEFAULT_HOOK_CONFIG.tddMode,\n\t\t\tqualityGateStrict:\n\t\t\t\ttypeof hooks.qualityGateStrict === \"boolean\"\n\t\t\t\t\t? hooks.qualityGateStrict\n\t\t\t\t\t: DEFAULT_HOOK_CONFIG.qualityGateStrict,\n\t\t};\n\t} catch {\n\t\treturn { ...DEFAULT_HOOK_CONFIG };\n\t}\n}\n"]}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
// GSD Hook Utilities — toolchain auto-detection and hook configuration.
|
|
2
|
+
// Mirrors the inline logic in hooks/gsd/*.js so it can be tested via vitest.
|
|
3
|
+
// The hook .js files embed the same logic inline (with require() fallback to this dist).
|
|
4
|
+
import * as fs from "node:fs";
|
|
5
|
+
import * as path from "node:path";
|
|
6
|
+
const DEFAULT_HOOK_CONFIG = {
|
|
7
|
+
coverageThreshold: 80,
|
|
8
|
+
tddMode: "advisory",
|
|
9
|
+
qualityGateStrict: false,
|
|
10
|
+
};
|
|
11
|
+
/**
|
|
12
|
+
* Detect package manager from lockfiles and package.json scripts.
|
|
13
|
+
* Priority: bun.lockb/bun.lock > pnpm-lock.yaml > yarn.lock > package-lock.json > fallback npm
|
|
14
|
+
*/
|
|
15
|
+
export function detectToolchain(cwd) {
|
|
16
|
+
if (fs.existsSync(path.join(cwd, "bun.lockb")) || fs.existsSync(path.join(cwd, "bun.lock"))) {
|
|
17
|
+
return {
|
|
18
|
+
pm: "bun",
|
|
19
|
+
testCmd: "bun test",
|
|
20
|
+
coverageCmd: "bun test --coverage",
|
|
21
|
+
lintCmd: "bunx biome check .",
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
if (fs.existsSync(path.join(cwd, "pnpm-lock.yaml"))) {
|
|
25
|
+
return {
|
|
26
|
+
pm: "pnpm",
|
|
27
|
+
testCmd: "pnpm test",
|
|
28
|
+
coverageCmd: "pnpm run test:coverage",
|
|
29
|
+
lintCmd: "pnpm run lint",
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
if (fs.existsSync(path.join(cwd, "yarn.lock"))) {
|
|
33
|
+
return {
|
|
34
|
+
pm: "yarn",
|
|
35
|
+
testCmd: "yarn test",
|
|
36
|
+
coverageCmd: "yarn run test:coverage",
|
|
37
|
+
lintCmd: "yarn run lint",
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
if (fs.existsSync(path.join(cwd, "package-lock.json"))) {
|
|
41
|
+
return {
|
|
42
|
+
pm: "npm",
|
|
43
|
+
testCmd: "npm test",
|
|
44
|
+
coverageCmd: "npm run test:coverage",
|
|
45
|
+
lintCmd: "npm run lint",
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
// No lockfile — check package.json scripts for test runner hints
|
|
49
|
+
const pkgPath = path.join(cwd, "package.json");
|
|
50
|
+
if (fs.existsSync(pkgPath)) {
|
|
51
|
+
try {
|
|
52
|
+
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
|
|
53
|
+
if (pkg.scripts?.test) {
|
|
54
|
+
return {
|
|
55
|
+
pm: "npm",
|
|
56
|
+
testCmd: "npm test",
|
|
57
|
+
coverageCmd: "npm run test:coverage",
|
|
58
|
+
lintCmd: "npm run lint",
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
catch {
|
|
63
|
+
/* ignore parse errors */
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
// Fallback
|
|
67
|
+
return {
|
|
68
|
+
pm: "npm",
|
|
69
|
+
testCmd: "npm test",
|
|
70
|
+
coverageCmd: "npm run test:coverage",
|
|
71
|
+
lintCmd: "npm run lint",
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Read hook configuration from .planning/config.json hooks section.
|
|
76
|
+
* Falls back to defaults on missing file or parse errors.
|
|
77
|
+
*/
|
|
78
|
+
export function readHookConfig(cwd) {
|
|
79
|
+
const configPath = path.join(cwd, ".planning", "config.json");
|
|
80
|
+
if (!fs.existsSync(configPath)) {
|
|
81
|
+
return { ...DEFAULT_HOOK_CONFIG };
|
|
82
|
+
}
|
|
83
|
+
try {
|
|
84
|
+
const raw = JSON.parse(fs.readFileSync(configPath, "utf-8"));
|
|
85
|
+
const hooks = raw.hooks ?? {};
|
|
86
|
+
return {
|
|
87
|
+
coverageThreshold: typeof hooks.coverageThreshold === "number"
|
|
88
|
+
? hooks.coverageThreshold
|
|
89
|
+
: DEFAULT_HOOK_CONFIG.coverageThreshold,
|
|
90
|
+
tddMode: hooks.tddMode === "strict" || hooks.tddMode === "advisory" ? hooks.tddMode : DEFAULT_HOOK_CONFIG.tddMode,
|
|
91
|
+
qualityGateStrict: typeof hooks.qualityGateStrict === "boolean"
|
|
92
|
+
? hooks.qualityGateStrict
|
|
93
|
+
: DEFAULT_HOOK_CONFIG.qualityGateStrict,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
catch {
|
|
97
|
+
return { ...DEFAULT_HOOK_CONFIG };
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
//# sourceMappingURL=hook-utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hook-utils.js","sourceRoot":"","sources":["../../src/gsd/hook-utils.ts"],"names":[],"mappings":"AAAA,0EAAwE;AACxE,6EAA6E;AAC7E,yFAAyF;AAEzF,OAAO,KAAK,EAAE,MAAM,SAAS,CAAC;AAC9B,OAAO,KAAK,IAAI,MAAM,WAAW,CAAC;AAelC,MAAM,mBAAmB,GAAe;IACvC,iBAAiB,EAAE,EAAE;IACrB,OAAO,EAAE,UAAU;IACnB,iBAAiB,EAAE,KAAK;CACxB,CAAC;AAEF;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAiB;IAC3D,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,CAAC,CAAC,EAAE,CAAC;QAC7F,OAAO;YACN,EAAE,EAAE,KAAK;YACT,OAAO,EAAE,UAAU;YACnB,WAAW,EAAE,qBAAqB;YAClC,OAAO,EAAE,oBAAoB;SAC7B,CAAC;IACH,CAAC;IAED,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,gBAAgB,CAAC,CAAC,EAAE,CAAC;QACrD,OAAO;YACN,EAAE,EAAE,MAAM;YACV,OAAO,EAAE,WAAW;YACpB,WAAW,EAAE,wBAAwB;YACrC,OAAO,EAAE,eAAe;SACxB,CAAC;IACH,CAAC;IAED,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC,EAAE,CAAC;QAChD,OAAO;YACN,EAAE,EAAE,MAAM;YACV,OAAO,EAAE,WAAW;YACpB,WAAW,EAAE,wBAAwB;YACrC,OAAO,EAAE,eAAe;SACxB,CAAC;IACH,CAAC;IAED,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,mBAAmB,CAAC,CAAC,EAAE,CAAC;QACxD,OAAO;YACN,EAAE,EAAE,KAAK;YACT,OAAO,EAAE,UAAU;YACnB,WAAW,EAAE,uBAAuB;YACpC,OAAO,EAAE,cAAc;SACvB,CAAC;IACH,CAAC;IAED,mEAAiE;IACjE,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC;IAC/C,IAAI,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,IAAI,CAAC;YACJ,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,OAAO,EAAE,OAAO,CAAC,CAEvD,CAAC;YACF,IAAI,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC;gBACvB,OAAO;oBACN,EAAE,EAAE,KAAK;oBACT,OAAO,EAAE,UAAU;oBACnB,WAAW,EAAE,uBAAuB;oBACpC,OAAO,EAAE,cAAc;iBACvB,CAAC;YACH,CAAC;QACF,CAAC;QAAC,MAAM,CAAC;YACR,yBAAyB;QAC1B,CAAC;IACF,CAAC;IAED,WAAW;IACX,OAAO;QACN,EAAE,EAAE,KAAK;QACT,OAAO,EAAE,UAAU;QACnB,WAAW,EAAE,uBAAuB;QACpC,OAAO,EAAE,cAAc;KACvB,CAAC;AAAA,CACF;AAED;;;GAGG;AACH,MAAM,UAAU,cAAc,CAAC,GAAW,EAAc;IACvD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,WAAW,EAAE,aAAa,CAAC,CAAC;IAC9D,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAChC,OAAO,EAAE,GAAG,mBAAmB,EAAE,CAAC;IACnC,CAAC;IACD,IAAI,CAAC;QACJ,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAE1D,CAAC;QACF,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;QAC9B,OAAO;YACN,iBAAiB,EAChB,OAAO,KAAK,CAAC,iBAAiB,KAAK,QAAQ;gBAC1C,CAAC,CAAC,KAAK,CAAC,iBAAiB;gBACzB,CAAC,CAAC,mBAAmB,CAAC,iBAAiB;YACzC,OAAO,EACN,KAAK,CAAC,OAAO,KAAK,QAAQ,IAAI,KAAK,CAAC,OAAO,KAAK,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,mBAAmB,CAAC,OAAO;YACzG,iBAAiB,EAChB,OAAO,KAAK,CAAC,iBAAiB,KAAK,SAAS;gBAC3C,CAAC,CAAC,KAAK,CAAC,iBAAiB;gBACzB,CAAC,CAAC,mBAAmB,CAAC,iBAAiB;SACzC,CAAC;IACH,CAAC;IAAC,MAAM,CAAC;QACR,OAAO,EAAE,GAAG,mBAAmB,EAAE,CAAC;IACnC,CAAC;AAAA,CACD","sourcesContent":["// GSD Hook Utilities — toolchain auto-detection and hook configuration.\n// Mirrors the inline logic in hooks/gsd/*.js so it can be tested via vitest.\n// The hook .js files embed the same logic inline (with require() fallback to this dist).\n\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\n\nexport interface ToolchainInfo {\n\tpm: \"npm\" | \"bun\" | \"pnpm\" | \"yarn\";\n\ttestCmd: string;\n\tcoverageCmd: string;\n\tlintCmd: string;\n}\n\nexport interface HookConfig {\n\tcoverageThreshold: number;\n\ttddMode: \"strict\" | \"advisory\";\n\tqualityGateStrict: boolean;\n}\n\nconst DEFAULT_HOOK_CONFIG: HookConfig = {\n\tcoverageThreshold: 80,\n\ttddMode: \"advisory\",\n\tqualityGateStrict: false,\n};\n\n/**\n * Detect package manager from lockfiles and package.json scripts.\n * Priority: bun.lockb/bun.lock > pnpm-lock.yaml > yarn.lock > package-lock.json > fallback npm\n */\nexport function detectToolchain(cwd: string): ToolchainInfo {\n\tif (fs.existsSync(path.join(cwd, \"bun.lockb\")) || fs.existsSync(path.join(cwd, \"bun.lock\"))) {\n\t\treturn {\n\t\t\tpm: \"bun\",\n\t\t\ttestCmd: \"bun test\",\n\t\t\tcoverageCmd: \"bun test --coverage\",\n\t\t\tlintCmd: \"bunx biome check .\",\n\t\t};\n\t}\n\n\tif (fs.existsSync(path.join(cwd, \"pnpm-lock.yaml\"))) {\n\t\treturn {\n\t\t\tpm: \"pnpm\",\n\t\t\ttestCmd: \"pnpm test\",\n\t\t\tcoverageCmd: \"pnpm run test:coverage\",\n\t\t\tlintCmd: \"pnpm run lint\",\n\t\t};\n\t}\n\n\tif (fs.existsSync(path.join(cwd, \"yarn.lock\"))) {\n\t\treturn {\n\t\t\tpm: \"yarn\",\n\t\t\ttestCmd: \"yarn test\",\n\t\t\tcoverageCmd: \"yarn run test:coverage\",\n\t\t\tlintCmd: \"yarn run lint\",\n\t\t};\n\t}\n\n\tif (fs.existsSync(path.join(cwd, \"package-lock.json\"))) {\n\t\treturn {\n\t\t\tpm: \"npm\",\n\t\t\ttestCmd: \"npm test\",\n\t\t\tcoverageCmd: \"npm run test:coverage\",\n\t\t\tlintCmd: \"npm run lint\",\n\t\t};\n\t}\n\n\t// No lockfile — check package.json scripts for test runner hints\n\tconst pkgPath = path.join(cwd, \"package.json\");\n\tif (fs.existsSync(pkgPath)) {\n\t\ttry {\n\t\t\tconst pkg = JSON.parse(fs.readFileSync(pkgPath, \"utf-8\")) as {\n\t\t\t\tscripts?: Record<string, string>;\n\t\t\t};\n\t\t\tif (pkg.scripts?.test) {\n\t\t\t\treturn {\n\t\t\t\t\tpm: \"npm\",\n\t\t\t\t\ttestCmd: \"npm test\",\n\t\t\t\t\tcoverageCmd: \"npm run test:coverage\",\n\t\t\t\t\tlintCmd: \"npm run lint\",\n\t\t\t\t};\n\t\t\t}\n\t\t} catch {\n\t\t\t/* ignore parse errors */\n\t\t}\n\t}\n\n\t// Fallback\n\treturn {\n\t\tpm: \"npm\",\n\t\ttestCmd: \"npm test\",\n\t\tcoverageCmd: \"npm run test:coverage\",\n\t\tlintCmd: \"npm run lint\",\n\t};\n}\n\n/**\n * Read hook configuration from .planning/config.json hooks section.\n * Falls back to defaults on missing file or parse errors.\n */\nexport function readHookConfig(cwd: string): HookConfig {\n\tconst configPath = path.join(cwd, \".planning\", \"config.json\");\n\tif (!fs.existsSync(configPath)) {\n\t\treturn { ...DEFAULT_HOOK_CONFIG };\n\t}\n\ttry {\n\t\tconst raw = JSON.parse(fs.readFileSync(configPath, \"utf-8\")) as {\n\t\t\thooks?: Partial<HookConfig>;\n\t\t};\n\t\tconst hooks = raw.hooks ?? {};\n\t\treturn {\n\t\t\tcoverageThreshold:\n\t\t\t\ttypeof hooks.coverageThreshold === \"number\"\n\t\t\t\t\t? hooks.coverageThreshold\n\t\t\t\t\t: DEFAULT_HOOK_CONFIG.coverageThreshold,\n\t\t\ttddMode:\n\t\t\t\thooks.tddMode === \"strict\" || hooks.tddMode === \"advisory\" ? hooks.tddMode : DEFAULT_HOOK_CONFIG.tddMode,\n\t\t\tqualityGateStrict:\n\t\t\t\ttypeof hooks.qualityGateStrict === \"boolean\"\n\t\t\t\t\t? hooks.qualityGateStrict\n\t\t\t\t\t: DEFAULT_HOOK_CONFIG.qualityGateStrict,\n\t\t};\n\t} catch {\n\t\treturn { ...DEFAULT_HOOK_CONFIG };\n\t}\n}\n"]}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export { createDomainModel, mapCodebase } from "./domain.js";
|
|
2
|
+
export { extractGlossaryTerms, loadDomainContent, validateDomainGlossary, } from "./domain-validator.js";
|
|
3
|
+
export type { CommitResult } from "./git.js";
|
|
4
|
+
export { commitDocs, commitTask, hasTestFiles } from "./git.js";
|
|
5
|
+
export type { HookConfig, ToolchainInfo } from "./hook-utils.js";
|
|
6
|
+
export { detectToolchain, readHookConfig } from "./hook-utils.js";
|
|
7
|
+
export type { PhaseVerification, PlanDiscovery } from "./planning.js";
|
|
8
|
+
export { createPlan, discoverPlans, readPlan, updateState, verifyPhase, writeSummary, } from "./planning.js";
|
|
9
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/gsd/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC7D,OAAO,EACN,oBAAoB,EACpB,iBAAiB,EACjB,sBAAsB,GACtB,MAAM,uBAAuB,CAAC;AAC/B,YAAY,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAC7C,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAChE,YAAY,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAClE,YAAY,EAAE,iBAAiB,EAAE,aAAa,EAAE,MAAM,eAAe,CAAC;AACtE,OAAO,EACN,UAAU,EACV,aAAa,EACb,QAAQ,EACR,WAAW,EACX,WAAW,EACX,YAAY,GACZ,MAAM,eAAe,CAAC","sourcesContent":["// GSD index — re-exports all GSD module functions.\n// Import from @draht/coding-agent for use in extensions.\n\nexport { createDomainModel, mapCodebase } from \"./domain.js\";\nexport {\n\textractGlossaryTerms,\n\tloadDomainContent,\n\tvalidateDomainGlossary,\n} from \"./domain-validator.js\";\nexport type { CommitResult } from \"./git.js\";\nexport { commitDocs, commitTask, hasTestFiles } from \"./git.js\";\nexport type { HookConfig, ToolchainInfo } from \"./hook-utils.js\";\nexport { detectToolchain, readHookConfig } from \"./hook-utils.js\";\nexport type { PhaseVerification, PlanDiscovery } from \"./planning.js\";\nexport {\n\tcreatePlan,\n\tdiscoverPlans,\n\treadPlan,\n\tupdateState,\n\tverifyPhase,\n\twriteSummary,\n} from \"./planning.js\";\n"]}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
// GSD index — re-exports all GSD module functions.
|
|
2
|
+
// Import from @draht/coding-agent for use in extensions.
|
|
3
|
+
export { createDomainModel, mapCodebase } from "./domain.js";
|
|
4
|
+
export { extractGlossaryTerms, loadDomainContent, validateDomainGlossary, } from "./domain-validator.js";
|
|
5
|
+
export { commitDocs, commitTask, hasTestFiles } from "./git.js";
|
|
6
|
+
export { detectToolchain, readHookConfig } from "./hook-utils.js";
|
|
7
|
+
export { createPlan, discoverPlans, readPlan, updateState, verifyPhase, writeSummary, } from "./planning.js";
|
|
8
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/gsd/index.ts"],"names":[],"mappings":"AAAA,qDAAmD;AACnD,yDAAyD;AAEzD,OAAO,EAAE,iBAAiB,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC7D,OAAO,EACN,oBAAoB,EACpB,iBAAiB,EACjB,sBAAsB,GACtB,MAAM,uBAAuB,CAAC;AAE/B,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAEhE,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAElE,OAAO,EACN,UAAU,EACV,aAAa,EACb,QAAQ,EACR,WAAW,EACX,WAAW,EACX,YAAY,GACZ,MAAM,eAAe,CAAC","sourcesContent":["// GSD index — re-exports all GSD module functions.\n// Import from @draht/coding-agent for use in extensions.\n\nexport { createDomainModel, mapCodebase } from \"./domain.js\";\nexport {\n\textractGlossaryTerms,\n\tloadDomainContent,\n\tvalidateDomainGlossary,\n} from \"./domain-validator.js\";\nexport type { CommitResult } from \"./git.js\";\nexport { commitDocs, commitTask, hasTestFiles } from \"./git.js\";\nexport type { HookConfig, ToolchainInfo } from \"./hook-utils.js\";\nexport { detectToolchain, readHookConfig } from \"./hook-utils.js\";\nexport type { PhaseVerification, PlanDiscovery } from \"./planning.js\";\nexport {\n\tcreatePlan,\n\tdiscoverPlans,\n\treadPlan,\n\tupdateState,\n\tverifyPhase,\n\twriteSummary,\n} from \"./planning.js\";\n"]}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export interface PlanDiscovery {
|
|
2
|
+
plans: Array<{
|
|
3
|
+
file: string;
|
|
4
|
+
deps: string[];
|
|
5
|
+
}>;
|
|
6
|
+
incomplete: string[];
|
|
7
|
+
fixPlans: string[];
|
|
8
|
+
}
|
|
9
|
+
export interface PhaseVerification {
|
|
10
|
+
plans: number;
|
|
11
|
+
summaries: number;
|
|
12
|
+
complete: boolean;
|
|
13
|
+
}
|
|
14
|
+
export declare function createPlan(cwd: string, phaseNum: number, planNum: number, title?: string): string;
|
|
15
|
+
export declare function discoverPlans(cwd: string, phaseNum: number): PlanDiscovery;
|
|
16
|
+
export declare function readPlan(cwd: string, phaseNum: number, planNum: number): string;
|
|
17
|
+
export declare function writeSummary(cwd: string, phaseNum: number, planNum: number): string;
|
|
18
|
+
export declare function verifyPhase(cwd: string, phaseNum: number): PhaseVerification;
|
|
19
|
+
export declare function updateState(cwd: string): void;
|
|
20
|
+
//# sourceMappingURL=planning.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"planning.d.ts","sourceRoot":"","sources":["../../src/gsd/planning.ts"],"names":[],"mappings":"AA8CA,MAAM,WAAW,aAAa;IAC7B,KAAK,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,EAAE,CAAA;KAAE,CAAC,CAAC;IAC/C,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,QAAQ,EAAE,MAAM,EAAE,CAAC;CACnB;AAED,MAAM,WAAW,iBAAiB;IACjC,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,OAAO,CAAC;CAClB;AAED,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,MAAM,CAyCjG;AAED,wBAAgB,aAAa,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,aAAa,CA+B1E;AAED,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,MAAM,CAM/E;AAED,wBAAgB,YAAY,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,MAAM,CAyBnF;AAED,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,iBAAiB,CAe5E;AAED,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI,CAM7C","sourcesContent":["// GSD Planning module — phase/plan/task file system operations.\n// Part of the draht GSD (Get Shit Done) methodology.\n\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\n\nconst PLANNING = \".planning\";\n\nfunction planningPath(cwd: string, ...segments: string[]): string {\n\treturn path.join(cwd, PLANNING, ...segments);\n}\n\nfunction ensureDir(dir: string): void {\n\tif (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });\n}\n\nfunction padNum(n: number, digits = 2): string {\n\treturn String(n).padStart(digits, \"0\");\n}\n\nfunction timestamp(): string {\n\treturn new Date().toISOString().replace(\"T\", \" \").slice(0, 19);\n}\n\nfunction getPhaseSlug(cwd: string, phaseNum: number): string {\n\tconst roadmapPath = planningPath(cwd, \"ROADMAP.md\");\n\tif (!fs.existsSync(roadmapPath)) return `phase-${phaseNum}`;\n\tconst content = fs.readFileSync(roadmapPath, \"utf-8\");\n\tconst re = new RegExp(`## Phase ${phaseNum}: (.+?) —`);\n\tconst m = re.exec(content);\n\tif (!m) return `phase-${phaseNum}`;\n\treturn m[1]\n\t\t.toLowerCase()\n\t\t.replace(/[^a-z0-9]+/g, \"-\")\n\t\t.replace(/^-|-$/g, \"\")\n\t\t.slice(0, 40);\n}\n\nfunction getPhaseDir(cwd: string, phaseNum: number): string | null {\n\tconst phasesDir = planningPath(cwd, \"phases\");\n\tif (!fs.existsSync(phasesDir)) return null;\n\tconst prefix = `${padNum(phaseNum)}-`;\n\tconst entry = fs.readdirSync(phasesDir).find((e) => e.startsWith(prefix));\n\treturn entry ? path.join(phasesDir, entry) : null;\n}\n\nexport interface PlanDiscovery {\n\tplans: Array<{ file: string; deps: string[] }>;\n\tincomplete: string[];\n\tfixPlans: string[];\n}\n\nexport interface PhaseVerification {\n\tplans: number;\n\tsummaries: number;\n\tcomplete: boolean;\n}\n\nexport function createPlan(cwd: string, phaseNum: number, planNum: number, title?: string): string {\n\tconst slug = getPhaseSlug(cwd, phaseNum);\n\tconst dir = planningPath(cwd, \"phases\", `${padNum(phaseNum)}-${slug}`);\n\tensureDir(dir);\n\tconst planTitle = title || `Plan ${planNum}`;\n\tconst planFile = path.join(dir, `${padNum(phaseNum)}-${padNum(planNum)}-PLAN.md`);\n\tconst tmpl = `---\nphase: ${phaseNum}\nplan: ${planNum}\ndepends_on: []\nmust_haves:\n - \"[Observable truth this plan delivers]\"\n---\n\n# Phase ${phaseNum}, Plan ${planNum}: ${planTitle}\n\n## Goal\n[What this plan achieves from user perspective]\n\n## Context\n[Key decisions that affect this plan]\n\n## Tasks\n\n<task type=\"auto\">\n <n>[Task name]</n>\n <files>[affected files]</files>\n <test>[Write tests first — what should pass when done]</test>\n <action>\n [Implementation to make tests pass]\n </action>\n <refactor>[Optional cleanup after green]</refactor>\n <verify>[How to verify]</verify>\n <done>[What \"done\" looks like]</done>\n</task>\n\n---\nCreated: ${timestamp()}\n`;\n\tfs.writeFileSync(planFile, tmpl, \"utf-8\");\n\treturn planFile;\n}\n\nexport function discoverPlans(cwd: string, phaseNum: number): PlanDiscovery {\n\tconst phaseDir = getPhaseDir(cwd, phaseNum);\n\tif (!phaseDir) throw new Error(`Phase ${phaseNum} directory not found`);\n\n\tconst files = fs.readdirSync(phaseDir).sort();\n\tconst plans = files.filter((f) => f.endsWith(\"-PLAN.md\") && !f.includes(\"FIX\"));\n\tconst summaries = files.filter((f) => f.endsWith(\"-SUMMARY.md\"));\n\tconst fixPlans = files.filter((f) => f.includes(\"FIX-PLAN.md\"));\n\n\tconst completedPlanNums = new Set(\n\t\tsummaries.map((s) => s.match(/\\d+-(\\d+)-SUMMARY/)?.[1]).filter((x): x is string => Boolean(x)),\n\t);\n\n\tconst incomplete = plans.filter((p) => {\n\t\tconst m = p.match(/\\d+-(\\d+)-PLAN/);\n\t\treturn m ? !completedPlanNums.has(m[1]) : true;\n\t});\n\n\tconst planData = plans.map((p) => {\n\t\tconst content = fs.readFileSync(path.join(phaseDir, p), \"utf-8\");\n\t\tconst depsMatch = content.match(/depends_on:\\s*\\[(.*?)\\]/);\n\t\tconst deps = depsMatch\n\t\t\t? depsMatch[1]\n\t\t\t\t\t.split(\",\")\n\t\t\t\t\t.map((d) => d.trim())\n\t\t\t\t\t.filter(Boolean)\n\t\t\t: [];\n\t\treturn { file: p, deps };\n\t});\n\n\treturn { plans: planData, incomplete, fixPlans };\n}\n\nexport function readPlan(cwd: string, phaseNum: number, planNum: number): string {\n\tconst phaseDir = getPhaseDir(cwd, phaseNum);\n\tif (!phaseDir) throw new Error(`Phase ${phaseNum} not found`);\n\tconst planFile = path.join(phaseDir, `${padNum(phaseNum)}-${padNum(planNum)}-PLAN.md`);\n\tif (!fs.existsSync(planFile)) throw new Error(`Plan file not found: ${planFile}`);\n\treturn fs.readFileSync(planFile, \"utf-8\");\n}\n\nexport function writeSummary(cwd: string, phaseNum: number, planNum: number): string {\n\tconst phaseDir = getPhaseDir(cwd, phaseNum);\n\tif (!phaseDir) throw new Error(`Phase ${phaseNum} not found`);\n\tconst summaryPath = path.join(phaseDir, `${padNum(phaseNum)}-${padNum(planNum)}-SUMMARY.md`);\n\tconst tmpl = `# Phase ${phaseNum}, Plan ${planNum} Summary\n\n## Completed Tasks\n| # | Task | Status | Commit |\n|---|------|--------|--------|\n| 1 | [task] | ✅ Done | [hash] |\n\n## Files Changed\n- [files]\n\n## Verification Results\n- [results]\n\n## Notes\n[deviations, decisions]\n\n---\nCompleted: ${timestamp()}\n`;\n\tfs.writeFileSync(summaryPath, tmpl, \"utf-8\");\n\treturn summaryPath;\n}\n\nexport function verifyPhase(cwd: string, phaseNum: number): PhaseVerification {\n\tconst phaseDir = getPhaseDir(cwd, phaseNum);\n\tif (!phaseDir) throw new Error(`Phase ${phaseNum} not found`);\n\tconst plans = fs.readdirSync(phaseDir).filter((f) => f.endsWith(\"-PLAN.md\") && !f.includes(\"FIX\"));\n\tconst summaries = fs.readdirSync(phaseDir).filter((f) => f.endsWith(\"-SUMMARY.md\"));\n\tconst complete = summaries.length >= plans.length && plans.length > 0;\n\tif (complete) {\n\t\tconst verPath = path.join(phaseDir, `${padNum(phaseNum)}-VERIFICATION.md`);\n\t\tfs.writeFileSync(\n\t\t\tverPath,\n\t\t\t`# Phase ${phaseNum} Verification\\n\\nAll ${plans.length} plans executed.\\nVerified: ${timestamp()}\\n`,\n\t\t\t\"utf-8\",\n\t\t);\n\t}\n\treturn { plans: plans.length, summaries: summaries.length, complete };\n}\n\nexport function updateState(cwd: string): void {\n\tconst statePath = planningPath(cwd, \"STATE.md\");\n\tif (!fs.existsSync(statePath)) throw new Error(\"No STATE.md found\");\n\tlet state = fs.readFileSync(statePath, \"utf-8\");\n\tstate = state.replace(/## Last Activity:.*/, `## Last Activity: ${timestamp()}`);\n\tfs.writeFileSync(statePath, state, \"utf-8\");\n}\n"]}
|