@mindfoldhq/trellis 0.1.9 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +2 -0
- package/dist/cli/index.js.map +1 -1
- package/dist/commands/init.d.ts.map +1 -1
- package/dist/commands/init.js +12 -6
- package/dist/commands/init.js.map +1 -1
- package/dist/commands/update.d.ts +1 -0
- package/dist/commands/update.d.ts.map +1 -1
- package/dist/commands/update.js +684 -42
- package/dist/commands/update.js.map +1 -1
- package/dist/configurators/opencode.js +1 -1
- package/dist/configurators/opencode.js.map +1 -1
- package/dist/configurators/workflow.d.ts +4 -3
- package/dist/configurators/workflow.d.ts.map +1 -1
- package/dist/configurators/workflow.js +23 -20
- package/dist/configurators/workflow.js.map +1 -1
- package/dist/constants/paths.d.ts +29 -30
- package/dist/constants/paths.d.ts.map +1 -1
- package/dist/constants/paths.js +32 -35
- package/dist/constants/paths.js.map +1 -1
- package/dist/migrations/index.d.ts +35 -0
- package/dist/migrations/index.d.ts.map +1 -0
- package/dist/migrations/index.js +124 -0
- package/dist/migrations/index.js.map +1 -0
- package/dist/migrations/manifests/0.1.9.json +30 -0
- package/dist/migrations/manifests/0.2.0.json +43 -0
- package/dist/templates/claude/agents/check.md +3 -3
- package/dist/templates/claude/agents/debug.md +1 -1
- package/dist/templates/claude/agents/dispatch.md +12 -12
- package/dist/templates/claude/agents/implement.md +6 -6
- package/dist/templates/claude/agents/plan.md +37 -37
- package/dist/templates/claude/agents/research.md +1 -1
- package/dist/templates/claude/commands/before-backend-dev.md +5 -5
- package/dist/templates/claude/commands/before-frontend-dev.md +5 -5
- package/dist/templates/claude/commands/break-loop.md +2 -2
- package/dist/templates/claude/commands/check-backend.md +6 -6
- package/dist/templates/claude/commands/check-cross-layer.md +5 -5
- package/dist/templates/claude/commands/check-frontend.md +6 -6
- package/dist/templates/claude/commands/create-command.md +3 -3
- package/dist/templates/claude/commands/finish-work.md +6 -6
- package/dist/templates/claude/commands/integrate-skill.md +11 -11
- package/dist/templates/claude/commands/{onboard-developer.md → onboard.md} +31 -28
- package/dist/templates/claude/commands/parallel.md +17 -17
- package/dist/templates/claude/commands/{record-agent-flow.md → record-session.md} +7 -7
- package/dist/templates/claude/commands/start.md +36 -36
- package/dist/templates/claude/hooks/inject-subagent-context.py +77 -76
- package/dist/templates/claude/hooks/ralph-loop.py +18 -18
- package/dist/templates/claude/hooks/session-start.py +4 -4
- package/dist/templates/cursor/commands/before-backend-dev.md +5 -5
- package/dist/templates/cursor/commands/before-frontend-dev.md +5 -5
- package/dist/templates/cursor/commands/break-loop.md +2 -2
- package/dist/templates/cursor/commands/check-backend.md +6 -6
- package/dist/templates/cursor/commands/check-cross-layer.md +5 -5
- package/dist/templates/cursor/commands/check-frontend.md +6 -6
- package/dist/templates/cursor/commands/create-command.md +3 -3
- package/dist/templates/cursor/commands/finish-work.md +6 -6
- package/dist/templates/cursor/commands/integrate-skill.md +11 -11
- package/dist/templates/cursor/commands/{onboard-developer.md → onboard.md} +31 -28
- package/dist/templates/cursor/commands/{record-agent-flow.md → record-session.md} +7 -7
- package/dist/templates/cursor/commands/start.md +25 -25
- package/dist/templates/extract.d.ts +2 -2
- package/dist/templates/extract.js +2 -2
- package/dist/templates/markdown/agents.md +2 -2
- package/dist/templates/markdown/gitignore.txt +2 -2
- package/dist/templates/markdown/index.d.ts +1 -0
- package/dist/templates/markdown/index.d.ts.map +1 -1
- package/dist/templates/markdown/index.js +4 -2
- package/dist/templates/markdown/index.js.map +1 -1
- package/dist/templates/markdown/{agent-traces-index.md → workspace-index.md} +14 -14
- package/dist/templates/trellis/index.d.ts +7 -1
- package/dist/templates/trellis/index.d.ts.map +1 -1
- package/dist/templates/trellis/index.js +14 -2
- package/dist/templates/trellis/index.js.map +1 -1
- package/dist/templates/trellis/scripts/add-session.sh +26 -26
- package/dist/templates/trellis/scripts/common/developer.sh +20 -21
- package/dist/templates/trellis/scripts/common/git-context.sh +90 -115
- package/dist/templates/trellis/scripts/common/paths.sh +53 -63
- package/dist/templates/trellis/scripts/common/phase.sh +40 -40
- package/dist/templates/trellis/scripts/common/registry.sh +13 -13
- package/dist/templates/trellis/scripts/common/task-queue.sh +142 -0
- package/dist/templates/trellis/scripts/common/task-utils.sh +151 -0
- package/dist/templates/trellis/scripts/common/worktree.sh +3 -3
- package/dist/templates/trellis/scripts/create-bootstrap.sh +43 -42
- package/dist/templates/trellis/scripts/init-developer.sh +1 -1
- package/dist/templates/trellis/scripts/multi-agent/cleanup.sh +33 -33
- package/dist/templates/trellis/scripts/multi-agent/create-pr.sh +30 -30
- package/dist/templates/trellis/scripts/multi-agent/plan.sh +28 -28
- package/dist/templates/trellis/scripts/multi-agent/start.sh +56 -56
- package/dist/templates/trellis/scripts/multi-agent/status.sh +59 -59
- package/dist/templates/trellis/scripts/{feature.sh → task.sh} +235 -185
- package/dist/templates/trellis/workflow.md +71 -74
- package/dist/types/migration.d.ts +74 -0
- package/dist/types/migration.d.ts.map +1 -0
- package/dist/types/migration.js +8 -0
- package/dist/types/migration.js.map +1 -0
- package/dist/utils/template-hash.d.ts +78 -0
- package/dist/utils/template-hash.d.ts.map +1 -0
- package/dist/utils/template-hash.js +234 -0
- package/dist/utils/template-hash.js.map +1 -0
- package/package.json +1 -1
- package/dist/templates/trellis/scripts/common/backlog.sh +0 -220
- package/dist/templates/trellis/scripts/common/feature-utils.sh +0 -194
- /package/dist/templates/trellis/{backlog → tasks}/.gitkeep +0 -0
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Template hash utilities for detecting user modifications
|
|
3
|
+
*
|
|
4
|
+
* Stores SHA256 hashes of template files at install time.
|
|
5
|
+
* Used to determine if users have modified templates.
|
|
6
|
+
*/
|
|
7
|
+
import { createHash } from "node:crypto";
|
|
8
|
+
import fs from "node:fs";
|
|
9
|
+
import path from "node:path";
|
|
10
|
+
import { DIR_NAMES } from "../constants/paths.js";
|
|
11
|
+
/** File name for storing template hashes */
|
|
12
|
+
const HASHES_FILE = ".template-hashes.json";
|
|
13
|
+
/**
|
|
14
|
+
* Compute SHA256 hash of content
|
|
15
|
+
*/
|
|
16
|
+
export function computeHash(content) {
|
|
17
|
+
return createHash("sha256").update(content, "utf-8").digest("hex");
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Get path to the hashes file
|
|
21
|
+
*/
|
|
22
|
+
function getHashesPath(cwd) {
|
|
23
|
+
return path.join(cwd, DIR_NAMES.WORKFLOW, HASHES_FILE);
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Load stored template hashes
|
|
27
|
+
*/
|
|
28
|
+
export function loadHashes(cwd) {
|
|
29
|
+
const hashesPath = getHashesPath(cwd);
|
|
30
|
+
if (!fs.existsSync(hashesPath)) {
|
|
31
|
+
return {};
|
|
32
|
+
}
|
|
33
|
+
try {
|
|
34
|
+
const content = fs.readFileSync(hashesPath, "utf-8");
|
|
35
|
+
return JSON.parse(content);
|
|
36
|
+
}
|
|
37
|
+
catch {
|
|
38
|
+
return {};
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Save template hashes
|
|
43
|
+
*/
|
|
44
|
+
export function saveHashes(cwd, hashes) {
|
|
45
|
+
const hashesPath = getHashesPath(cwd);
|
|
46
|
+
fs.writeFileSync(hashesPath, JSON.stringify(hashes, null, 2));
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Update hashes for specific files
|
|
50
|
+
*
|
|
51
|
+
* @param cwd - Working directory
|
|
52
|
+
* @param files - Map of relative paths to file contents
|
|
53
|
+
*/
|
|
54
|
+
export function updateHashes(cwd, files) {
|
|
55
|
+
const hashes = loadHashes(cwd);
|
|
56
|
+
for (const [relativePath, content] of files) {
|
|
57
|
+
hashes[relativePath] = computeHash(content);
|
|
58
|
+
}
|
|
59
|
+
saveHashes(cwd, hashes);
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Update hash for a single file by reading its current content
|
|
63
|
+
*/
|
|
64
|
+
export function updateHashFromFile(cwd, relativePath) {
|
|
65
|
+
const fullPath = path.join(cwd, relativePath);
|
|
66
|
+
if (!fs.existsSync(fullPath)) {
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
const content = fs.readFileSync(fullPath, "utf-8");
|
|
70
|
+
const hashes = loadHashes(cwd);
|
|
71
|
+
hashes[relativePath] = computeHash(content);
|
|
72
|
+
saveHashes(cwd, hashes);
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Remove hash entry for a file (e.g., after deletion)
|
|
76
|
+
*/
|
|
77
|
+
export function removeHash(cwd, relativePath) {
|
|
78
|
+
const hashes = loadHashes(cwd);
|
|
79
|
+
const { [relativePath]: _, ...rest } = hashes;
|
|
80
|
+
saveHashes(cwd, rest);
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Rename hash entry (used after file rename)
|
|
84
|
+
*/
|
|
85
|
+
export function renameHash(cwd, oldPath, newPath) {
|
|
86
|
+
const hashes = loadHashes(cwd);
|
|
87
|
+
if (hashes[oldPath]) {
|
|
88
|
+
const { [oldPath]: oldValue, ...rest } = hashes;
|
|
89
|
+
rest[newPath] = oldValue;
|
|
90
|
+
saveHashes(cwd, rest);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Check if a template file has been modified by the user
|
|
95
|
+
*
|
|
96
|
+
* @param cwd - Working directory
|
|
97
|
+
* @param relativePath - Relative path to the file
|
|
98
|
+
* @param hashes - Stored template hashes
|
|
99
|
+
* @returns true if file has been modified from template, false otherwise
|
|
100
|
+
*/
|
|
101
|
+
export function isTemplateModified(cwd, relativePath, hashes) {
|
|
102
|
+
const fullPath = path.join(cwd, relativePath);
|
|
103
|
+
// If file doesn't exist, can't be modified
|
|
104
|
+
if (!fs.existsSync(fullPath)) {
|
|
105
|
+
return false;
|
|
106
|
+
}
|
|
107
|
+
// If we don't have a stored hash, assume it's modified (conservative)
|
|
108
|
+
const storedHash = hashes[relativePath];
|
|
109
|
+
if (!storedHash) {
|
|
110
|
+
return true;
|
|
111
|
+
}
|
|
112
|
+
// Compare current content hash with stored hash
|
|
113
|
+
const currentContent = fs.readFileSync(fullPath, "utf-8");
|
|
114
|
+
const currentHash = computeHash(currentContent);
|
|
115
|
+
return currentHash !== storedHash;
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Check if a file matches its original template content
|
|
119
|
+
* (Useful for determining if a file can be safely auto-migrated)
|
|
120
|
+
*
|
|
121
|
+
* @param cwd - Working directory
|
|
122
|
+
* @param relativePath - Relative path to the file
|
|
123
|
+
* @param originalContent - Original template content
|
|
124
|
+
* @returns true if file matches original template
|
|
125
|
+
*/
|
|
126
|
+
export function matchesOriginalTemplate(cwd, relativePath, originalContent) {
|
|
127
|
+
const fullPath = path.join(cwd, relativePath);
|
|
128
|
+
if (!fs.existsSync(fullPath)) {
|
|
129
|
+
return false;
|
|
130
|
+
}
|
|
131
|
+
const currentContent = fs.readFileSync(fullPath, "utf-8");
|
|
132
|
+
return currentContent === originalContent;
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Get modification status for multiple files
|
|
136
|
+
*
|
|
137
|
+
* @param cwd - Working directory
|
|
138
|
+
* @param relativePaths - Array of relative paths to check
|
|
139
|
+
* @param hashes - Stored template hashes
|
|
140
|
+
* @returns Map of path to modification status
|
|
141
|
+
*/
|
|
142
|
+
export function getModificationStatus(cwd, relativePaths, hashes) {
|
|
143
|
+
const result = new Map();
|
|
144
|
+
for (const relativePath of relativePaths) {
|
|
145
|
+
result.set(relativePath, isTemplateModified(cwd, relativePath, hashes));
|
|
146
|
+
}
|
|
147
|
+
return result;
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Directories to scan for template files during init
|
|
151
|
+
*/
|
|
152
|
+
const TEMPLATE_DIRS = [".trellis", ".claude", ".cursor"];
|
|
153
|
+
/**
|
|
154
|
+
* Patterns to exclude from hash tracking
|
|
155
|
+
*/
|
|
156
|
+
const EXCLUDE_FROM_HASH = [
|
|
157
|
+
".template-hashes.json", // Hash file itself
|
|
158
|
+
".version", // Version file
|
|
159
|
+
".gitignore", // Git ignore files
|
|
160
|
+
".developer/", // Developer-specific files
|
|
161
|
+
"workspace/", // Workspace files
|
|
162
|
+
"tasks/", // Task files
|
|
163
|
+
"agent-traces/", // Agent trace files
|
|
164
|
+
"backlog/", // Backlog files
|
|
165
|
+
".current-task/", // Current task marker
|
|
166
|
+
"spec/frontend/", // User-filled spec files
|
|
167
|
+
"spec/backend/", // User-filled spec files
|
|
168
|
+
".backup-", // Backup directories
|
|
169
|
+
];
|
|
170
|
+
/**
|
|
171
|
+
* Check if a path should be excluded from hash tracking
|
|
172
|
+
*/
|
|
173
|
+
function shouldExcludeFromHash(relativePath) {
|
|
174
|
+
for (const pattern of EXCLUDE_FROM_HASH) {
|
|
175
|
+
if (relativePath.includes(pattern)) {
|
|
176
|
+
return true;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
return false;
|
|
180
|
+
}
|
|
181
|
+
/**
|
|
182
|
+
* Recursively collect all files in a directory
|
|
183
|
+
*/
|
|
184
|
+
function collectFiles(cwd, dir, relativeTo = "") {
|
|
185
|
+
const fullDir = path.join(cwd, dir);
|
|
186
|
+
if (!fs.existsSync(fullDir)) {
|
|
187
|
+
return [];
|
|
188
|
+
}
|
|
189
|
+
const files = [];
|
|
190
|
+
const entries = fs.readdirSync(fullDir, { withFileTypes: true });
|
|
191
|
+
for (const entry of entries) {
|
|
192
|
+
const relativePath = path.join(dir, entry.name);
|
|
193
|
+
if (shouldExcludeFromHash(relativePath)) {
|
|
194
|
+
continue;
|
|
195
|
+
}
|
|
196
|
+
if (entry.isDirectory()) {
|
|
197
|
+
files.push(...collectFiles(cwd, relativePath, relativeTo));
|
|
198
|
+
}
|
|
199
|
+
else if (entry.isFile()) {
|
|
200
|
+
files.push(relativePath);
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
return files;
|
|
204
|
+
}
|
|
205
|
+
/**
|
|
206
|
+
* Initialize template hashes after init
|
|
207
|
+
*
|
|
208
|
+
* Scans all template directories and computes hashes for files.
|
|
209
|
+
* This should be called at the end of `trellis init` to enable
|
|
210
|
+
* modification detection on subsequent updates.
|
|
211
|
+
*
|
|
212
|
+
* @param cwd - Working directory
|
|
213
|
+
* @returns Number of files hashed
|
|
214
|
+
*/
|
|
215
|
+
export function initializeHashes(cwd) {
|
|
216
|
+
const hashes = {};
|
|
217
|
+
// Collect all template files
|
|
218
|
+
for (const dir of TEMPLATE_DIRS) {
|
|
219
|
+
const files = collectFiles(cwd, dir);
|
|
220
|
+
for (const relativePath of files) {
|
|
221
|
+
const fullPath = path.join(cwd, relativePath);
|
|
222
|
+
try {
|
|
223
|
+
const content = fs.readFileSync(fullPath, "utf-8");
|
|
224
|
+
hashes[relativePath] = computeHash(content);
|
|
225
|
+
}
|
|
226
|
+
catch {
|
|
227
|
+
// Skip files that can't be read (binary, etc.)
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
saveHashes(cwd, hashes);
|
|
232
|
+
return Object.keys(hashes).length;
|
|
233
|
+
}
|
|
234
|
+
//# sourceMappingURL=template-hash.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"template-hash.js","sourceRoot":"","sources":["../../src/utils/template-hash.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,EAAE,MAAM,SAAS,CAAC;AACzB,OAAO,IAAI,MAAM,WAAW,CAAC;AAE7B,OAAO,EAAE,SAAS,EAAE,MAAM,uBAAuB,CAAC;AAGlD,4CAA4C;AAC5C,MAAM,WAAW,GAAG,uBAAuB,CAAC;AAE5C;;GAEG;AACH,MAAM,UAAU,WAAW,CAAC,OAAe;IACzC,OAAO,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AACrE,CAAC;AAED;;GAEG;AACH,SAAS,aAAa,CAAC,GAAW;IAChC,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,SAAS,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;AACzD,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,MAAM,UAAU,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;IACtC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAC/B,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QACrD,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,CAAmB,CAAC;IAC/C,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW,EAAE,MAAsB;IAC5D,MAAM,UAAU,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;IACtC,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC;AAChE,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,YAAY,CAC1B,GAAW,EACX,KAA0B;IAE1B,MAAM,MAAM,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;IAE/B,KAAK,MAAM,CAAC,YAAY,EAAE,OAAO,CAAC,IAAI,KAAK,EAAE,CAAC;QAC5C,MAAM,CAAC,YAAY,CAAC,GAAG,WAAW,CAAC,OAAO,CAAC,CAAC;IAC9C,CAAC;IAED,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;AAC1B,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,kBAAkB,CAAC,GAAW,EAAE,YAAoB;IAClE,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC;IAC9C,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC7B,OAAO;IACT,CAAC;IAED,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACnD,MAAM,MAAM,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;IAC/B,MAAM,CAAC,YAAY,CAAC,GAAG,WAAW,CAAC,OAAO,CAAC,CAAC;IAC5C,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;AAC1B,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW,EAAE,YAAoB;IAC1D,MAAM,MAAM,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;IAC/B,MAAM,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,EAAE,GAAG,MAAM,CAAC;IAC9C,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;AACxB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,UAAU,CACxB,GAAW,EACX,OAAe,EACf,OAAe;IAEf,MAAM,MAAM,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC;QACpB,MAAM,EAAE,CAAC,OAAO,CAAC,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,MAAM,CAAC;QAChD,IAAI,CAAC,OAAO,CAAC,GAAG,QAAQ,CAAC;QACzB,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;IACxB,CAAC;AACH,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,kBAAkB,CAChC,GAAW,EACX,YAAoB,EACpB,MAAsB;IAEtB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC;IAE9C,2CAA2C;IAC3C,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC7B,OAAO,KAAK,CAAC;IACf,CAAC;IAED,sEAAsE;IACtE,MAAM,UAAU,GAAG,MAAM,CAAC,YAAY,CAAC,CAAC;IACxC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,gDAAgD;IAChD,MAAM,cAAc,GAAG,EAAE,CAAC,YAAY,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IAC1D,MAAM,WAAW,GAAG,WAAW,CAAC,cAAc,CAAC,CAAC;IAEhD,OAAO,WAAW,KAAK,UAAU,CAAC;AACpC,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,uBAAuB,CACrC,GAAW,EACX,YAAoB,EACpB,eAAuB;IAEvB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC;IAE9C,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC7B,OAAO,KAAK,CAAC;IACf,CAAC;IAED,MAAM,cAAc,GAAG,EAAE,CAAC,YAAY,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IAC1D,OAAO,cAAc,KAAK,eAAe,CAAC;AAC5C,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,qBAAqB,CACnC,GAAW,EACX,aAAuB,EACvB,MAAsB;IAEtB,MAAM,MAAM,GAAG,IAAI,GAAG,EAAmB,CAAC;IAE1C,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE,CAAC;QACzC,MAAM,CAAC,GAAG,CAAC,YAAY,EAAE,kBAAkB,CAAC,GAAG,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC,CAAC;IAC1E,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,MAAM,aAAa,GAAG,CAAC,UAAU,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;AAEzD;;GAEG;AACH,MAAM,iBAAiB,GAAG;IACxB,uBAAuB,EAAE,mBAAmB;IAC5C,UAAU,EAAE,eAAe;IAC3B,YAAY,EAAE,mBAAmB;IACjC,aAAa,EAAE,2BAA2B;IAC1C,YAAY,EAAE,kBAAkB;IAChC,QAAQ,EAAE,aAAa;IACvB,eAAe,EAAE,oBAAoB;IACrC,UAAU,EAAE,gBAAgB;IAC5B,gBAAgB,EAAE,sBAAsB;IACxC,gBAAgB,EAAE,yBAAyB;IAC3C,eAAe,EAAE,yBAAyB;IAC1C,UAAU,EAAE,qBAAqB;CAClC,CAAC;AAEF;;GAEG;AACH,SAAS,qBAAqB,CAAC,YAAoB;IACjD,KAAK,MAAM,OAAO,IAAI,iBAAiB,EAAE,CAAC;QACxC,IAAI,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;YACnC,OAAO,IAAI,CAAC;QACd,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;GAEG;AACH,SAAS,YAAY,CACnB,GAAW,EACX,GAAW,EACX,aAAqB,EAAE;IAEvB,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IACpC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,OAAO,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;IAEjE,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;QAC5B,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;QAEhD,IAAI,qBAAqB,CAAC,YAAY,CAAC,EAAE,CAAC;YACxC,SAAS;QACX,CAAC;QAED,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;YACxB,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,GAAG,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC,CAAC;QAC7D,CAAC;aAAM,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE,CAAC;YAC1B,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC3B,CAAC;IACH,CAAC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;GASG;AACH,MAAM,UAAU,gBAAgB,CAAC,GAAW;IAC1C,MAAM,MAAM,GAAmB,EAAE,CAAC;IAElC,6BAA6B;IAC7B,KAAK,MAAM,GAAG,IAAI,aAAa,EAAE,CAAC;QAChC,MAAM,KAAK,GAAG,YAAY,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;QAErC,KAAK,MAAM,YAAY,IAAI,KAAK,EAAE,CAAC;YACjC,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC;YAC9C,IAAI,CAAC;gBACH,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;gBACnD,MAAM,CAAC,YAAY,CAAC,GAAG,WAAW,CAAC,OAAO,CAAC,CAAC;YAC9C,CAAC;YAAC,MAAM,CAAC;gBACP,+CAA+C;YACjD,CAAC;QACH,CAAC;IACH,CAAC;IAED,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;IACxB,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC;AACpC,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,220 +0,0 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
# Backlog utility functions
|
|
3
|
-
#
|
|
4
|
-
# Usage: source this file in other scripts
|
|
5
|
-
# source "$(dirname "$0")/common/backlog.sh"
|
|
6
|
-
#
|
|
7
|
-
# Provides:
|
|
8
|
-
# create_backlog_issue - Create backlog JSON file
|
|
9
|
-
# delete_backlog_issue - Delete backlog file
|
|
10
|
-
# list_backlog_issues - List all backlog issues
|
|
11
|
-
# get_backlog_stats - Get P0/P1/P2/P3 counts
|
|
12
|
-
|
|
13
|
-
# Ensure paths.sh is loaded
|
|
14
|
-
if ! type get_repo_root &>/dev/null; then
|
|
15
|
-
echo "Error: paths.sh must be sourced before backlog.sh" >&2
|
|
16
|
-
exit 1
|
|
17
|
-
fi
|
|
18
|
-
|
|
19
|
-
# =============================================================================
|
|
20
|
-
# Helper Functions
|
|
21
|
-
# =============================================================================
|
|
22
|
-
|
|
23
|
-
# Convert title to slug (only works with ASCII)
|
|
24
|
-
_slugify() {
|
|
25
|
-
local result=$(echo "$1" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/-/g' | sed 's/--*/-/g' | sed 's/^-//' | sed 's/-$//')
|
|
26
|
-
echo "$result"
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
# Ensure backlog directory exists
|
|
30
|
-
_ensure_backlog_dir() {
|
|
31
|
-
local repo_root="${1:-$(get_repo_root)}"
|
|
32
|
-
local backlog_dir=$(get_backlog_dir "$repo_root")
|
|
33
|
-
if [[ ! -d "$backlog_dir" ]]; then
|
|
34
|
-
mkdir -p "$backlog_dir"
|
|
35
|
-
fi
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
# =============================================================================
|
|
39
|
-
# Public Functions
|
|
40
|
-
# =============================================================================
|
|
41
|
-
|
|
42
|
-
# Create a backlog issue
|
|
43
|
-
# Args: title, assignee, priority, [slug], [description], [creator]
|
|
44
|
-
# Returns: issue ID (e.g., "260119-my-feature")
|
|
45
|
-
create_backlog_issue() {
|
|
46
|
-
local title="$1"
|
|
47
|
-
local assignee="$2"
|
|
48
|
-
local priority="${3:-P2}"
|
|
49
|
-
local slug="$4"
|
|
50
|
-
local description="${5:-}"
|
|
51
|
-
local creator="${6:-$assignee}"
|
|
52
|
-
local repo_root="${7:-$(get_repo_root)}"
|
|
53
|
-
|
|
54
|
-
# Validate required fields
|
|
55
|
-
if [[ -z "$title" ]] || [[ -z "$assignee" ]]; then
|
|
56
|
-
echo "Error: title and assignee are required" >&2
|
|
57
|
-
return 1
|
|
58
|
-
fi
|
|
59
|
-
|
|
60
|
-
# Validate priority
|
|
61
|
-
if [[ ! "$priority" =~ ^P[0-3]$ ]]; then
|
|
62
|
-
echo "Error: priority must be P0, P1, P2, or P3" >&2
|
|
63
|
-
return 1
|
|
64
|
-
fi
|
|
65
|
-
|
|
66
|
-
# Validate assignee exists
|
|
67
|
-
local assignee_dir="$repo_root/$DIR_WORKFLOW/$DIR_PROGRESS/$assignee"
|
|
68
|
-
if [[ ! -d "$assignee_dir" ]]; then
|
|
69
|
-
echo "Error: developer '$assignee' not found" >&2
|
|
70
|
-
return 1
|
|
71
|
-
fi
|
|
72
|
-
|
|
73
|
-
_ensure_backlog_dir "$repo_root"
|
|
74
|
-
|
|
75
|
-
# Generate slug if not provided
|
|
76
|
-
if [[ -z "$slug" ]]; then
|
|
77
|
-
slug=$(_slugify "$title")
|
|
78
|
-
fi
|
|
79
|
-
|
|
80
|
-
# Validate slug
|
|
81
|
-
if [[ -z "$slug" ]]; then
|
|
82
|
-
echo "Error: could not generate slug from title" >&2
|
|
83
|
-
return 1
|
|
84
|
-
fi
|
|
85
|
-
|
|
86
|
-
# Generate ID
|
|
87
|
-
local date_prefix=$(generate_backlog_id)
|
|
88
|
-
local id="${date_prefix}-${slug}"
|
|
89
|
-
local backlog_dir=$(get_backlog_dir "$repo_root")
|
|
90
|
-
local issue_file="$backlog_dir/${id}.json"
|
|
91
|
-
|
|
92
|
-
# Check if file exists
|
|
93
|
-
if [[ -f "$issue_file" ]]; then
|
|
94
|
-
echo "Error: issue already exists: ${id}.json" >&2
|
|
95
|
-
return 1
|
|
96
|
-
fi
|
|
97
|
-
|
|
98
|
-
# Create issue JSON
|
|
99
|
-
local created_at=$(date -Iseconds)
|
|
100
|
-
cat > "$issue_file" << EOF
|
|
101
|
-
{
|
|
102
|
-
"id": "$id",
|
|
103
|
-
"title": "$title",
|
|
104
|
-
"description": "$description",
|
|
105
|
-
"priority": "$priority",
|
|
106
|
-
"status": "in_progress",
|
|
107
|
-
"assigned_to": "$assignee",
|
|
108
|
-
"created_by": "$creator",
|
|
109
|
-
"created_at": "$created_at"
|
|
110
|
-
}
|
|
111
|
-
EOF
|
|
112
|
-
|
|
113
|
-
# Return the ID
|
|
114
|
-
echo "$id"
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
# Complete a backlog issue (set status to done)
|
|
118
|
-
# Args: backlog_ref (e.g., "260119-my-feature.json")
|
|
119
|
-
complete_backlog_issue() {
|
|
120
|
-
local backlog_ref="$1"
|
|
121
|
-
local repo_root="${2:-$(get_repo_root)}"
|
|
122
|
-
|
|
123
|
-
if [[ -z "$backlog_ref" ]]; then
|
|
124
|
-
return 0
|
|
125
|
-
fi
|
|
126
|
-
|
|
127
|
-
local backlog_dir=$(get_backlog_dir "$repo_root")
|
|
128
|
-
local backlog_file="$backlog_dir/$backlog_ref"
|
|
129
|
-
|
|
130
|
-
if [[ -f "$backlog_file" ]]; then
|
|
131
|
-
local completed_at=$(date -Iseconds)
|
|
132
|
-
jq --arg completed_at "$completed_at" '.status = "done" | .completed_at = $completed_at' "$backlog_file" > "${backlog_file}.tmp"
|
|
133
|
-
mv "${backlog_file}.tmp" "$backlog_file"
|
|
134
|
-
return 0
|
|
135
|
-
fi
|
|
136
|
-
|
|
137
|
-
return 1
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
# Delete a backlog issue by filename
|
|
141
|
-
# Args: backlog_ref (e.g., "260119-my-feature.json")
|
|
142
|
-
delete_backlog_issue() {
|
|
143
|
-
local backlog_ref="$1"
|
|
144
|
-
local repo_root="${2:-$(get_repo_root)}"
|
|
145
|
-
|
|
146
|
-
if [[ -z "$backlog_ref" ]]; then
|
|
147
|
-
return 0
|
|
148
|
-
fi
|
|
149
|
-
|
|
150
|
-
local backlog_dir=$(get_backlog_dir "$repo_root")
|
|
151
|
-
local backlog_file="$backlog_dir/$backlog_ref"
|
|
152
|
-
|
|
153
|
-
if [[ -f "$backlog_file" ]]; then
|
|
154
|
-
rm -f "$backlog_file"
|
|
155
|
-
return 0
|
|
156
|
-
fi
|
|
157
|
-
|
|
158
|
-
return 1
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
# List backlog issues
|
|
162
|
-
# Args: [filter_priority], [filter_status]
|
|
163
|
-
# Output: formatted list to stdout
|
|
164
|
-
list_backlog_issues() {
|
|
165
|
-
local filter_priority="$1"
|
|
166
|
-
local filter_status="$2"
|
|
167
|
-
local repo_root="${3:-$(get_repo_root)}"
|
|
168
|
-
|
|
169
|
-
local backlog_dir=$(get_backlog_dir "$repo_root")
|
|
170
|
-
|
|
171
|
-
if [[ ! -d "$backlog_dir" ]]; then
|
|
172
|
-
return 0
|
|
173
|
-
fi
|
|
174
|
-
|
|
175
|
-
for f in "$backlog_dir"/*.json; do
|
|
176
|
-
if [[ -f "$f" ]]; then
|
|
177
|
-
local id=$(jq -r '.id' "$f")
|
|
178
|
-
local title=$(jq -r '.title' "$f")
|
|
179
|
-
local priority=$(jq -r '.priority // "P2"' "$f")
|
|
180
|
-
local status=$(jq -r '.status // "open"' "$f")
|
|
181
|
-
local assignee=$(jq -r '.assigned_to' "$f")
|
|
182
|
-
|
|
183
|
-
# Apply filters
|
|
184
|
-
if [[ -n "$filter_priority" ]] && [[ "$priority" != "$filter_priority" ]]; then
|
|
185
|
-
continue
|
|
186
|
-
fi
|
|
187
|
-
if [[ -n "$filter_status" ]] && [[ "$status" != "$filter_status" ]]; then
|
|
188
|
-
continue
|
|
189
|
-
fi
|
|
190
|
-
|
|
191
|
-
echo "$priority|$id|$title|$status|$assignee"
|
|
192
|
-
fi
|
|
193
|
-
done
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
# Get backlog statistics
|
|
197
|
-
# Output: "P0:N P1:N P2:N P3:N Total:N"
|
|
198
|
-
get_backlog_stats() {
|
|
199
|
-
local repo_root="${1:-$(get_repo_root)}"
|
|
200
|
-
local backlog_dir=$(get_backlog_dir "$repo_root")
|
|
201
|
-
|
|
202
|
-
local p0=0 p1=0 p2=0 p3=0 total=0
|
|
203
|
-
|
|
204
|
-
if [[ -d "$backlog_dir" ]]; then
|
|
205
|
-
for f in "$backlog_dir"/*.json; do
|
|
206
|
-
if [[ -f "$f" ]]; then
|
|
207
|
-
local priority=$(jq -r '.priority // "P2"' "$f" 2>/dev/null)
|
|
208
|
-
case "$priority" in
|
|
209
|
-
P0) ((p0++)) ;;
|
|
210
|
-
P1) ((p1++)) ;;
|
|
211
|
-
P2) ((p2++)) ;;
|
|
212
|
-
P3) ((p3++)) ;;
|
|
213
|
-
esac
|
|
214
|
-
((total++))
|
|
215
|
-
fi
|
|
216
|
-
done
|
|
217
|
-
fi
|
|
218
|
-
|
|
219
|
-
echo "P0:$p0 P1:$p1 P2:$p2 P3:$p3 Total:$total"
|
|
220
|
-
}
|
|
@@ -1,194 +0,0 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
# Feature utility functions
|
|
3
|
-
#
|
|
4
|
-
# Usage: source this file in other scripts
|
|
5
|
-
# source "$(dirname "$0")/common/feature-utils.sh"
|
|
6
|
-
#
|
|
7
|
-
# Provides:
|
|
8
|
-
# is_safe_feature_path - Validate feature path is safe to operate on
|
|
9
|
-
# find_feature_by_name - Find feature directory by name
|
|
10
|
-
# complete_feature_backlog - Complete linked backlog issue
|
|
11
|
-
# archive_feature_dir - Archive feature to monthly directory
|
|
12
|
-
|
|
13
|
-
# Ensure dependencies are loaded
|
|
14
|
-
if ! type get_repo_root &>/dev/null; then
|
|
15
|
-
echo "Error: paths.sh must be sourced before feature-utils.sh" >&2
|
|
16
|
-
exit 1
|
|
17
|
-
fi
|
|
18
|
-
|
|
19
|
-
if ! type complete_backlog_issue &>/dev/null; then
|
|
20
|
-
echo "Error: backlog.sh must be sourced before feature-utils.sh" >&2
|
|
21
|
-
exit 1
|
|
22
|
-
fi
|
|
23
|
-
|
|
24
|
-
# =============================================================================
|
|
25
|
-
# Path Safety
|
|
26
|
-
# =============================================================================
|
|
27
|
-
|
|
28
|
-
# Check if a relative feature path is safe to operate on
|
|
29
|
-
# Args: feature_path (relative), repo_root
|
|
30
|
-
# Returns: 0 if safe, 1 if dangerous
|
|
31
|
-
# Outputs: error message to stderr if unsafe
|
|
32
|
-
is_safe_feature_path() {
|
|
33
|
-
local feature_path="$1"
|
|
34
|
-
local repo_root="${2:-$(get_repo_root)}"
|
|
35
|
-
|
|
36
|
-
# Check empty or null
|
|
37
|
-
if [[ -z "$feature_path" ]] || [[ "$feature_path" = "null" ]]; then
|
|
38
|
-
echo "Error: empty or null feature path" >&2
|
|
39
|
-
return 1
|
|
40
|
-
fi
|
|
41
|
-
|
|
42
|
-
# Reject absolute paths
|
|
43
|
-
if [[ "$feature_path" = /* ]]; then
|
|
44
|
-
echo "Error: absolute path not allowed: $feature_path" >&2
|
|
45
|
-
return 1
|
|
46
|
-
fi
|
|
47
|
-
|
|
48
|
-
# Reject ".", "..", paths starting with "./" or "../", or containing ".."
|
|
49
|
-
if [[ "$feature_path" = "." ]] || [[ "$feature_path" = ".." ]] || \
|
|
50
|
-
[[ "$feature_path" = "./" ]] || [[ "$feature_path" == ./* ]] || \
|
|
51
|
-
[[ "$feature_path" == *".."* ]]; then
|
|
52
|
-
echo "Error: path traversal not allowed: $feature_path" >&2
|
|
53
|
-
return 1
|
|
54
|
-
fi
|
|
55
|
-
|
|
56
|
-
# Final check: ensure resolved path is not the repo root
|
|
57
|
-
local abs_path="${repo_root}/${feature_path}"
|
|
58
|
-
if [[ -e "$abs_path" ]]; then
|
|
59
|
-
local resolved=$(realpath "$abs_path" 2>/dev/null)
|
|
60
|
-
local root_resolved=$(realpath "$repo_root" 2>/dev/null)
|
|
61
|
-
if [[ "$resolved" = "$root_resolved" ]]; then
|
|
62
|
-
echo "Error: path resolves to repo root: $feature_path" >&2
|
|
63
|
-
return 1
|
|
64
|
-
fi
|
|
65
|
-
fi
|
|
66
|
-
|
|
67
|
-
return 0
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
# =============================================================================
|
|
71
|
-
# Feature Lookup
|
|
72
|
-
# =============================================================================
|
|
73
|
-
|
|
74
|
-
# Find feature directory by name (exact or suffix match)
|
|
75
|
-
# Args: feature_name, features_dir
|
|
76
|
-
# Returns: absolute path to feature directory, or empty if not found
|
|
77
|
-
find_feature_by_name() {
|
|
78
|
-
local feature_name="$1"
|
|
79
|
-
local features_dir="$2"
|
|
80
|
-
|
|
81
|
-
if [[ -z "$feature_name" ]] || [[ -z "$features_dir" ]]; then
|
|
82
|
-
return 1
|
|
83
|
-
fi
|
|
84
|
-
|
|
85
|
-
# Try exact match first
|
|
86
|
-
local feature_dir=$(find "$features_dir" -maxdepth 1 -type d -name "${feature_name}" 2>/dev/null | head -1)
|
|
87
|
-
|
|
88
|
-
# Try suffix match (e.g., "my-feature" matches "250119-my-feature")
|
|
89
|
-
if [[ -z "$feature_dir" ]]; then
|
|
90
|
-
feature_dir=$(find "$features_dir" -maxdepth 1 -type d -name "*-${feature_name}" 2>/dev/null | head -1)
|
|
91
|
-
fi
|
|
92
|
-
|
|
93
|
-
if [[ -n "$feature_dir" ]] && [[ -d "$feature_dir" ]]; then
|
|
94
|
-
echo "$feature_dir"
|
|
95
|
-
return 0
|
|
96
|
-
fi
|
|
97
|
-
|
|
98
|
-
return 1
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
# =============================================================================
|
|
102
|
-
# Backlog Integration
|
|
103
|
-
# =============================================================================
|
|
104
|
-
|
|
105
|
-
# Complete the backlog issue linked to a feature
|
|
106
|
-
# Args: feature_dir_abs, [repo_root]
|
|
107
|
-
# Returns: 0 if completed or no backlog, 1 on error
|
|
108
|
-
complete_feature_backlog() {
|
|
109
|
-
local feature_dir_abs="$1"
|
|
110
|
-
local repo_root="${2:-$(get_repo_root)}"
|
|
111
|
-
|
|
112
|
-
local feature_json="$feature_dir_abs/feature.json"
|
|
113
|
-
|
|
114
|
-
if [[ ! -f "$feature_json" ]]; then
|
|
115
|
-
return 0
|
|
116
|
-
fi
|
|
117
|
-
|
|
118
|
-
local backlog_ref=$(jq -r '.backlog_ref // empty' "$feature_json" 2>/dev/null)
|
|
119
|
-
|
|
120
|
-
if [[ -n "$backlog_ref" ]]; then
|
|
121
|
-
if complete_backlog_issue "$backlog_ref" "$repo_root"; then
|
|
122
|
-
echo "$backlog_ref"
|
|
123
|
-
return 0
|
|
124
|
-
else
|
|
125
|
-
return 1
|
|
126
|
-
fi
|
|
127
|
-
fi
|
|
128
|
-
|
|
129
|
-
return 0
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
# =============================================================================
|
|
133
|
-
# Archive Operations
|
|
134
|
-
# =============================================================================
|
|
135
|
-
|
|
136
|
-
# Archive a feature directory to archive/{YYYY-MM}/
|
|
137
|
-
# Args: feature_dir_abs, [repo_root]
|
|
138
|
-
# Returns: 0 on success, 1 on error
|
|
139
|
-
# Outputs: archive destination path
|
|
140
|
-
archive_feature_dir() {
|
|
141
|
-
local feature_dir_abs="$1"
|
|
142
|
-
local repo_root="${2:-$(get_repo_root)}"
|
|
143
|
-
|
|
144
|
-
if [[ ! -d "$feature_dir_abs" ]]; then
|
|
145
|
-
echo "Error: feature directory not found: $feature_dir_abs" >&2
|
|
146
|
-
return 1
|
|
147
|
-
fi
|
|
148
|
-
|
|
149
|
-
# Get features directory (parent of the feature)
|
|
150
|
-
local features_dir=$(dirname "$feature_dir_abs")
|
|
151
|
-
local archive_dir="$features_dir/archive"
|
|
152
|
-
local year_month=$(date +%Y-%m)
|
|
153
|
-
local month_dir="$archive_dir/$year_month"
|
|
154
|
-
|
|
155
|
-
# Create archive directory
|
|
156
|
-
mkdir -p "$month_dir"
|
|
157
|
-
|
|
158
|
-
# Move feature to archive
|
|
159
|
-
local feature_name=$(basename "$feature_dir_abs")
|
|
160
|
-
mv "$feature_dir_abs" "$month_dir/"
|
|
161
|
-
|
|
162
|
-
# Output the destination
|
|
163
|
-
echo "$month_dir/$feature_name"
|
|
164
|
-
return 0
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
# Complete archive workflow: complete backlog + archive directory
|
|
168
|
-
# Args: feature_dir_abs, [repo_root]
|
|
169
|
-
# Returns: 0 on success
|
|
170
|
-
# Outputs: lines with status info
|
|
171
|
-
archive_feature_complete() {
|
|
172
|
-
local feature_dir_abs="$1"
|
|
173
|
-
local repo_root="${2:-$(get_repo_root)}"
|
|
174
|
-
|
|
175
|
-
if [[ ! -d "$feature_dir_abs" ]]; then
|
|
176
|
-
echo "Error: feature directory not found: $feature_dir_abs" >&2
|
|
177
|
-
return 1
|
|
178
|
-
fi
|
|
179
|
-
|
|
180
|
-
# Complete linked backlog
|
|
181
|
-
local backlog_ref=$(complete_feature_backlog "$feature_dir_abs" "$repo_root")
|
|
182
|
-
if [[ -n "$backlog_ref" ]]; then
|
|
183
|
-
echo "backlog_completed:$backlog_ref"
|
|
184
|
-
fi
|
|
185
|
-
|
|
186
|
-
# Archive the directory
|
|
187
|
-
local archive_dest
|
|
188
|
-
if archive_dest=$(archive_feature_dir "$feature_dir_abs" "$repo_root"); then
|
|
189
|
-
echo "archived_to:$archive_dest"
|
|
190
|
-
return 0
|
|
191
|
-
fi
|
|
192
|
-
|
|
193
|
-
return 1
|
|
194
|
-
}
|
|
File without changes
|