gsd-opencode 1.33.3 → 1.35.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/agents/gsd-advisor-researcher.md +23 -0
- package/agents/gsd-ai-researcher.md +142 -0
- package/agents/gsd-code-fixer.md +523 -0
- package/agents/gsd-code-reviewer.md +361 -0
- package/agents/gsd-debugger.md +14 -1
- package/agents/gsd-domain-researcher.md +162 -0
- package/agents/gsd-eval-auditor.md +170 -0
- package/agents/gsd-eval-planner.md +161 -0
- package/agents/gsd-executor.md +70 -7
- package/agents/gsd-framework-selector.md +167 -0
- package/agents/gsd-intel-updater.md +320 -0
- package/agents/gsd-phase-researcher.md +26 -0
- package/agents/gsd-plan-checker.md +12 -0
- package/agents/gsd-planner.md +16 -6
- package/agents/gsd-project-researcher.md +23 -0
- package/agents/gsd-ui-researcher.md +23 -0
- package/agents/gsd-verifier.md +55 -1
- package/commands/gsd/gsd-ai-integration-phase.md +36 -0
- package/commands/gsd/gsd-audit-fix.md +33 -0
- package/commands/gsd/gsd-autonomous.md +1 -0
- package/commands/gsd/gsd-code-review-fix.md +52 -0
- package/commands/gsd/gsd-code-review.md +55 -0
- package/commands/gsd/gsd-eval-review.md +32 -0
- package/commands/gsd/gsd-explore.md +27 -0
- package/commands/gsd/gsd-from-gsd2.md +45 -0
- package/commands/gsd/gsd-import.md +36 -0
- package/commands/gsd/gsd-intel.md +183 -0
- package/commands/gsd/gsd-next.md +2 -0
- package/commands/gsd/gsd-reapply-patches.md +58 -3
- package/commands/gsd/gsd-review.md +4 -2
- package/commands/gsd/gsd-scan.md +26 -0
- package/commands/gsd/gsd-undo.md +34 -0
- package/commands/gsd/gsd-workstreams.md +6 -6
- package/get-shit-done/bin/gsd-tools.cjs +143 -5
- package/get-shit-done/bin/lib/commands.cjs +10 -2
- package/get-shit-done/bin/lib/config.cjs +71 -37
- package/get-shit-done/bin/lib/core.cjs +70 -8
- package/get-shit-done/bin/lib/gsd2-import.cjs +511 -0
- package/get-shit-done/bin/lib/init.cjs +20 -6
- package/get-shit-done/bin/lib/intel.cjs +660 -0
- package/get-shit-done/bin/lib/learnings.cjs +378 -0
- package/get-shit-done/bin/lib/milestone.cjs +25 -15
- package/get-shit-done/bin/lib/model-profiles.cjs +17 -17
- package/get-shit-done/bin/lib/phase.cjs +148 -112
- package/get-shit-done/bin/lib/roadmap.cjs +12 -5
- package/get-shit-done/bin/lib/security.cjs +119 -0
- package/get-shit-done/bin/lib/state.cjs +283 -221
- package/get-shit-done/bin/lib/template.cjs +8 -4
- package/get-shit-done/bin/lib/verify.cjs +42 -5
- package/get-shit-done/references/ai-evals.md +156 -0
- package/get-shit-done/references/ai-frameworks.md +186 -0
- package/get-shit-done/references/common-bug-patterns.md +114 -0
- package/get-shit-done/references/few-shot-examples/plan-checker.md +73 -0
- package/get-shit-done/references/few-shot-examples/verifier.md +109 -0
- package/get-shit-done/references/gates.md +70 -0
- package/get-shit-done/references/ios-scaffold.md +123 -0
- package/get-shit-done/references/model-profile-resolution.md +6 -7
- package/get-shit-done/references/model-profiles.md +20 -14
- package/get-shit-done/references/planning-config.md +237 -0
- package/get-shit-done/references/thinking-models-debug.md +44 -0
- package/get-shit-done/references/thinking-models-execution.md +50 -0
- package/get-shit-done/references/thinking-models-planning.md +62 -0
- package/get-shit-done/references/thinking-models-research.md +50 -0
- package/get-shit-done/references/thinking-models-verification.md +55 -0
- package/get-shit-done/references/thinking-partner.md +96 -0
- package/get-shit-done/references/universal-anti-patterns.md +6 -1
- package/get-shit-done/references/verification-overrides.md +227 -0
- package/get-shit-done/templates/AI-SPEC.md +246 -0
- package/get-shit-done/workflows/add-tests.md +3 -0
- package/get-shit-done/workflows/add-todo.md +2 -0
- package/get-shit-done/workflows/ai-integration-phase.md +284 -0
- package/get-shit-done/workflows/audit-fix.md +154 -0
- package/get-shit-done/workflows/autonomous.md +33 -2
- package/get-shit-done/workflows/check-todos.md +2 -0
- package/get-shit-done/workflows/cleanup.md +2 -0
- package/get-shit-done/workflows/code-review-fix.md +497 -0
- package/get-shit-done/workflows/code-review.md +515 -0
- package/get-shit-done/workflows/complete-milestone.md +40 -15
- package/get-shit-done/workflows/diagnose-issues.md +1 -1
- package/get-shit-done/workflows/discovery-phase.md +3 -1
- package/get-shit-done/workflows/discuss-phase-assumptions.md +1 -1
- package/get-shit-done/workflows/discuss-phase.md +21 -7
- package/get-shit-done/workflows/do.md +2 -0
- package/get-shit-done/workflows/docs-update.md +2 -0
- package/get-shit-done/workflows/eval-review.md +155 -0
- package/get-shit-done/workflows/execute-phase.md +307 -57
- package/get-shit-done/workflows/execute-plan.md +64 -93
- package/get-shit-done/workflows/explore.md +136 -0
- package/get-shit-done/workflows/help.md +1 -1
- package/get-shit-done/workflows/import.md +273 -0
- package/get-shit-done/workflows/inbox.md +387 -0
- package/get-shit-done/workflows/manager.md +4 -10
- package/get-shit-done/workflows/new-milestone.md +3 -1
- package/get-shit-done/workflows/new-project.md +2 -0
- package/get-shit-done/workflows/new-workspace.md +2 -0
- package/get-shit-done/workflows/next.md +56 -0
- package/get-shit-done/workflows/note.md +2 -0
- package/get-shit-done/workflows/plan-phase.md +97 -17
- package/get-shit-done/workflows/plant-seed.md +3 -0
- package/get-shit-done/workflows/pr-branch.md +41 -13
- package/get-shit-done/workflows/profile-user.md +4 -2
- package/get-shit-done/workflows/quick.md +99 -4
- package/get-shit-done/workflows/remove-workspace.md +2 -0
- package/get-shit-done/workflows/review.md +53 -6
- package/get-shit-done/workflows/scan.md +98 -0
- package/get-shit-done/workflows/secure-phase.md +2 -0
- package/get-shit-done/workflows/settings.md +18 -3
- package/get-shit-done/workflows/ship.md +3 -0
- package/get-shit-done/workflows/ui-phase.md +10 -2
- package/get-shit-done/workflows/ui-review.md +2 -0
- package/get-shit-done/workflows/undo.md +314 -0
- package/get-shit-done/workflows/update.md +2 -0
- package/get-shit-done/workflows/validate-phase.md +2 -0
- package/get-shit-done/workflows/verify-phase.md +83 -0
- package/get-shit-done/workflows/verify-work.md +12 -1
- package/package.json +1 -1
- package/skills/gsd-code-review/SKILL.md +48 -0
- package/skills/gsd-code-review-fix/SKILL.md +44 -0
|
@@ -0,0 +1,378 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Learnings — Global knowledge store with CRUD operations
|
|
3
|
+
*
|
|
4
|
+
* Provides a cross-project learnings store at ~/.gsd/knowledge/.
|
|
5
|
+
* Each learning is stored as an individual JSON file with content-hash
|
|
6
|
+
* deduplication. Supports write, read, list, query, delete, copy-from-project,
|
|
7
|
+
* and prune operations.
|
|
8
|
+
*
|
|
9
|
+
* Storage format: { id, source_project, date, context, learning, tags, content_hash }
|
|
10
|
+
* File naming: {id}.json
|
|
11
|
+
* Deduplication: SHA-256 of learning text + source_project
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
'use strict';
|
|
15
|
+
|
|
16
|
+
const fs = require('fs');
|
|
17
|
+
const path = require('path');
|
|
18
|
+
const crypto = require('crypto');
|
|
19
|
+
const os = require('os');
|
|
20
|
+
const { output, error: coreError } = require('./core.cjs');
|
|
21
|
+
|
|
22
|
+
// ─── Constants ───────────────────────────────────────────────────────────────
|
|
23
|
+
|
|
24
|
+
const DEFAULT_STORE_DIR = path.join(os.homedir(), '.gsd', 'knowledge');
|
|
25
|
+
|
|
26
|
+
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Get the store directory, allowing override for testing.
|
|
30
|
+
* @param {object} [opts]
|
|
31
|
+
* @param {string} [opts.storeDir] - Override store directory
|
|
32
|
+
* @returns {string}
|
|
33
|
+
*/
|
|
34
|
+
function getStoreDir(opts) {
|
|
35
|
+
return (opts && opts.storeDir) || DEFAULT_STORE_DIR;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Ensure the store directory exists. Created on first write, not on install.
|
|
40
|
+
* @param {string} dir
|
|
41
|
+
*/
|
|
42
|
+
function ensureStoreDir(dir) {
|
|
43
|
+
if (!fs.existsSync(dir)) {
|
|
44
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Generate a content hash for deduplication.
|
|
50
|
+
* Uses SHA-256 of learning text combined with source_project.
|
|
51
|
+
* @param {string} learning
|
|
52
|
+
* @param {string} sourceProject
|
|
53
|
+
* @returns {string}
|
|
54
|
+
*/
|
|
55
|
+
function contentHash(learning, sourceProject) {
|
|
56
|
+
return crypto.createHash('sha256')
|
|
57
|
+
.update(learning + '\n' + sourceProject)
|
|
58
|
+
.digest('hex');
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Generate a unique ID based on timestamp + random suffix.
|
|
63
|
+
* @returns {string}
|
|
64
|
+
*/
|
|
65
|
+
function generateId() {
|
|
66
|
+
const ts = Date.now().toString(36);
|
|
67
|
+
const rand = crypto.randomBytes(4).toString('hex');
|
|
68
|
+
return `${ts}-${rand}`;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* read and parse a single learning JSON file.
|
|
73
|
+
* Returns null (with stderr warning) for malformed files.
|
|
74
|
+
* @param {string} filePath
|
|
75
|
+
* @returns {object|null}
|
|
76
|
+
*/
|
|
77
|
+
function readLearningFile(filePath) {
|
|
78
|
+
try {
|
|
79
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
80
|
+
return JSON.parse(content);
|
|
81
|
+
} catch (err) {
|
|
82
|
+
process.stderr.write(`Warning: skipping malformed file ${filePath}: ${err.message}\n`);
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// ─── CRUD Operations ─────────────────────────────────────────────────────────
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* write a learning to the global store.
|
|
91
|
+
* Deduplicates by content hash — same content from same project is not stored twice.
|
|
92
|
+
*
|
|
93
|
+
* @param {object} entry
|
|
94
|
+
* @param {string} entry.source_project - Project name or path
|
|
95
|
+
* @param {string} entry.learning - The learning text
|
|
96
|
+
* @param {string} [entry.context] - Additional context
|
|
97
|
+
* @param {string[]} [entry.tags] - Tags for querying
|
|
98
|
+
* @param {object} [opts]
|
|
99
|
+
* @param {string} [opts.storeDir] - Override store directory
|
|
100
|
+
* @returns {{ id: string, created: boolean, content_hash: string }}
|
|
101
|
+
*/
|
|
102
|
+
function learningsWrite(entry, opts) {
|
|
103
|
+
const dir = getStoreDir(opts);
|
|
104
|
+
ensureStoreDir(dir);
|
|
105
|
+
|
|
106
|
+
const hash = contentHash(entry.learning, entry.source_project);
|
|
107
|
+
|
|
108
|
+
// Check for duplicate by scanning existing files
|
|
109
|
+
const files = fs.readdirSync(dir).filter(f => f.endsWith('.json'));
|
|
110
|
+
for (const file of files) {
|
|
111
|
+
const existing = readLearningFile(path.join(dir, file));
|
|
112
|
+
if (existing && existing.content_hash === hash) {
|
|
113
|
+
return { id: existing.id, created: false, content_hash: hash };
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const id = generateId();
|
|
118
|
+
const record = {
|
|
119
|
+
id,
|
|
120
|
+
source_project: entry.source_project,
|
|
121
|
+
date: new Date().toISOString(),
|
|
122
|
+
context: entry.context || '',
|
|
123
|
+
learning: entry.learning,
|
|
124
|
+
tags: entry.tags || [],
|
|
125
|
+
content_hash: hash,
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
fs.writeFileSync(path.join(dir, `${id}.json`), JSON.stringify(record, null, 2), 'utf-8');
|
|
129
|
+
return { id, created: true, content_hash: hash };
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
/**
|
|
133
|
+
* read a single learning by ID.
|
|
134
|
+
*
|
|
135
|
+
* @param {string} id
|
|
136
|
+
* @param {object} [opts]
|
|
137
|
+
* @param {string} [opts.storeDir] - Override store directory
|
|
138
|
+
* @returns {object|null}
|
|
139
|
+
*/
|
|
140
|
+
function learningsRead(id, opts) {
|
|
141
|
+
if (!/^[a-z0-9]+-[a-f0-9]+$/.test(id)) return null;
|
|
142
|
+
const dir = getStoreDir(opts);
|
|
143
|
+
const filePath = path.join(dir, `${id}.json`);
|
|
144
|
+
if (!fs.existsSync(filePath)) return null;
|
|
145
|
+
return readLearningFile(filePath);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* List all learnings, sorted by date (newest first).
|
|
150
|
+
*
|
|
151
|
+
* @param {object} [opts]
|
|
152
|
+
* @param {string} [opts.storeDir] - Override store directory
|
|
153
|
+
* @returns {object[]}
|
|
154
|
+
*/
|
|
155
|
+
function learningsList(opts) {
|
|
156
|
+
const dir = getStoreDir(opts);
|
|
157
|
+
if (!fs.existsSync(dir)) return [];
|
|
158
|
+
|
|
159
|
+
const files = fs.readdirSync(dir).filter(f => f.endsWith('.json'));
|
|
160
|
+
const results = [];
|
|
161
|
+
for (const file of files) {
|
|
162
|
+
const record = readLearningFile(path.join(dir, file));
|
|
163
|
+
if (record) results.push(record);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// Sort by date descending (newest first)
|
|
167
|
+
results.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
|
|
168
|
+
return results;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Query learnings by tag.
|
|
173
|
+
*
|
|
174
|
+
* @param {object} query
|
|
175
|
+
* @param {string} [query.tag] - Tag to filter by
|
|
176
|
+
* @param {object} [opts]
|
|
177
|
+
* @param {string} [opts.storeDir] - Override store directory
|
|
178
|
+
* @returns {object[]}
|
|
179
|
+
*/
|
|
180
|
+
function learningsQuery(query, opts) {
|
|
181
|
+
const all = learningsList(opts);
|
|
182
|
+
if (query && query.tag) {
|
|
183
|
+
return all.filter(r => r.tags && r.tags.includes(query.tag));
|
|
184
|
+
}
|
|
185
|
+
return all;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
/**
|
|
189
|
+
* Delete a learning by ID.
|
|
190
|
+
*
|
|
191
|
+
* @param {string} id
|
|
192
|
+
* @param {object} [opts]
|
|
193
|
+
* @param {string} [opts.storeDir] - Override store directory
|
|
194
|
+
* @returns {boolean} true if deleted, false if not found
|
|
195
|
+
*/
|
|
196
|
+
function learningsDelete(id, opts) {
|
|
197
|
+
if (!/^[a-z0-9]+-[a-f0-9]+$/.test(id)) return false;
|
|
198
|
+
const dir = getStoreDir(opts);
|
|
199
|
+
const filePath = path.join(dir, `${id}.json`);
|
|
200
|
+
if (!fs.existsSync(filePath)) return false;
|
|
201
|
+
fs.unlinkSync(filePath);
|
|
202
|
+
return true;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
/**
|
|
206
|
+
* Copy learnings from a project's LEARNINGS.md into the global store.
|
|
207
|
+
* Parses markdown sections as individual learnings. Deduplicates by content hash.
|
|
208
|
+
*
|
|
209
|
+
* Expected LEARNINGS.md format:
|
|
210
|
+
* ## Section Title
|
|
211
|
+
* Learning content paragraph(s)...
|
|
212
|
+
*
|
|
213
|
+
* ## Another Section
|
|
214
|
+
* More content...
|
|
215
|
+
*
|
|
216
|
+
* @param {string} planningDir - Path to .planning/ directory (or directory containing LEARNINGS.md)
|
|
217
|
+
* @param {object} [opts]
|
|
218
|
+
* @param {string} [opts.storeDir] - Override store directory
|
|
219
|
+
* @param {string} [opts.sourceProject] - Project name (defaults to directory basename)
|
|
220
|
+
* @returns {{ total: number, created: number, skipped: number }}
|
|
221
|
+
*/
|
|
222
|
+
function learningsCopyFromProject(planningDir, opts) {
|
|
223
|
+
const learningsPath = path.join(planningDir, 'LEARNINGS.md');
|
|
224
|
+
if (!fs.existsSync(learningsPath)) {
|
|
225
|
+
return { total: 0, created: 0, skipped: 0 };
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const content = fs.readFileSync(learningsPath, 'utf-8');
|
|
229
|
+
const sourceProject = (opts && opts.sourceProject) || path.basename(path.resolve(planningDir, '..'));
|
|
230
|
+
|
|
231
|
+
// Parse markdown: split on ## headings
|
|
232
|
+
const sections = content.split(/^## /m).slice(1); // skip preamble before first ##
|
|
233
|
+
let created = 0;
|
|
234
|
+
let skipped = 0;
|
|
235
|
+
|
|
236
|
+
for (const section of sections) {
|
|
237
|
+
const lines = section.trim().split('\n');
|
|
238
|
+
const title = lines[0].trim();
|
|
239
|
+
const body = lines.slice(1).join('\n').trim();
|
|
240
|
+
if (!body) continue;
|
|
241
|
+
|
|
242
|
+
// Extract tags from title (simple: use words as tags)
|
|
243
|
+
const tags = title.toLowerCase().split(/\s+/).filter(w => w.length > 2);
|
|
244
|
+
|
|
245
|
+
const result = learningsWrite({
|
|
246
|
+
source_project: sourceProject,
|
|
247
|
+
learning: body,
|
|
248
|
+
context: title,
|
|
249
|
+
tags,
|
|
250
|
+
}, opts);
|
|
251
|
+
|
|
252
|
+
if (result.created) {
|
|
253
|
+
created++;
|
|
254
|
+
} else {
|
|
255
|
+
skipped++;
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
return { total: created + skipped, created, skipped };
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
/**
|
|
263
|
+
* Prune learnings older than a given threshold.
|
|
264
|
+
*
|
|
265
|
+
* @param {string} olderThan - Duration string like "90d", "30d", "7d"
|
|
266
|
+
* @param {object} [opts]
|
|
267
|
+
* @param {string} [opts.storeDir] - Override store directory
|
|
268
|
+
* @returns {{ removed: number, kept: number }}
|
|
269
|
+
*/
|
|
270
|
+
function learningsPrune(olderThan, opts) {
|
|
271
|
+
const match = /^(\d+)d$/.exec(olderThan);
|
|
272
|
+
if (!match) {
|
|
273
|
+
throw new Error(`Invalid duration format: "${olderThan}" — expected format like "90d"`);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
const days = parseInt(match[1], 10);
|
|
277
|
+
const cutoff = new Date(Date.now() - days * 24 * 60 * 60 * 1000);
|
|
278
|
+
const dir = getStoreDir(opts);
|
|
279
|
+
|
|
280
|
+
if (!fs.existsSync(dir)) return { removed: 0, kept: 0 };
|
|
281
|
+
|
|
282
|
+
const files = fs.readdirSync(dir).filter(f => f.endsWith('.json'));
|
|
283
|
+
let removed = 0;
|
|
284
|
+
let kept = 0;
|
|
285
|
+
|
|
286
|
+
for (const file of files) {
|
|
287
|
+
const filePath = path.join(dir, file);
|
|
288
|
+
const record = readLearningFile(filePath);
|
|
289
|
+
if (!record) continue;
|
|
290
|
+
|
|
291
|
+
const recordDate = new Date(record.date);
|
|
292
|
+
if (recordDate < cutoff) {
|
|
293
|
+
fs.unlinkSync(filePath);
|
|
294
|
+
removed++;
|
|
295
|
+
} else {
|
|
296
|
+
kept++;
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
return { removed, kept };
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// ─── CLI Command Handlers ────────────────────────────────────────────────────
|
|
304
|
+
|
|
305
|
+
/**
|
|
306
|
+
* Handle `gsd-tools learnings list`
|
|
307
|
+
* @param {boolean} raw - Raw output flag
|
|
308
|
+
*/
|
|
309
|
+
function cmdLearningsList(raw) {
|
|
310
|
+
const results = learningsList();
|
|
311
|
+
output({ learnings: results, count: results.length }, raw);
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
/**
|
|
315
|
+
* Handle `gsd-tools learnings query --tag <tag>`
|
|
316
|
+
* @param {string} tag
|
|
317
|
+
* @param {boolean} raw - Raw output flag
|
|
318
|
+
*/
|
|
319
|
+
function cmdLearningsQuery(tag, raw) {
|
|
320
|
+
const results = learningsQuery({ tag });
|
|
321
|
+
output({ learnings: results, count: results.length, tag }, raw);
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
/**
|
|
325
|
+
* Handle `gsd-tools learnings copy`
|
|
326
|
+
* @param {string} cwd - Current working directory
|
|
327
|
+
* @param {boolean} raw - Raw output flag
|
|
328
|
+
*/
|
|
329
|
+
function cmdLearningsCopy(cwd, raw) {
|
|
330
|
+
const planningDir = path.join(cwd, '.planning');
|
|
331
|
+
const result = learningsCopyFromProject(planningDir);
|
|
332
|
+
output(result, raw);
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
/**
|
|
336
|
+
* Handle `gsd-tools learnings prune --older-than <duration>`
|
|
337
|
+
* @param {string} olderThan - Duration string like "90d"
|
|
338
|
+
* @param {boolean} raw - Raw output flag
|
|
339
|
+
*/
|
|
340
|
+
function cmdLearningsPrune(olderThan, raw) {
|
|
341
|
+
try {
|
|
342
|
+
const result = learningsPrune(olderThan);
|
|
343
|
+
output(result, raw);
|
|
344
|
+
} catch (err) {
|
|
345
|
+
coreError(err.message);
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
/**
|
|
350
|
+
* Handle `gsd-tools learnings delete <id>`
|
|
351
|
+
* @param {string} id
|
|
352
|
+
* @param {boolean} raw - Raw output flag
|
|
353
|
+
*/
|
|
354
|
+
function cmdLearningsDelete(id, raw) {
|
|
355
|
+
if (!/^[a-z0-9]+-[a-f0-9]+$/.test(id)) {
|
|
356
|
+
coreError(`Invalid learning ID: "${id}"`);
|
|
357
|
+
}
|
|
358
|
+
const deleted = learningsDelete(id);
|
|
359
|
+
output({ id, deleted }, raw);
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
// ─── Exports ─────────────────────────────────────────────────────────────────
|
|
363
|
+
|
|
364
|
+
module.exports = {
|
|
365
|
+
learningsWrite,
|
|
366
|
+
learningsRead,
|
|
367
|
+
learningsList,
|
|
368
|
+
learningsQuery,
|
|
369
|
+
learningsDelete,
|
|
370
|
+
learningsCopyFromProject,
|
|
371
|
+
learningsPrune,
|
|
372
|
+
cmdLearningsList,
|
|
373
|
+
cmdLearningsQuery,
|
|
374
|
+
cmdLearningsCopy,
|
|
375
|
+
cmdLearningsPrune,
|
|
376
|
+
cmdLearningsDelete,
|
|
377
|
+
DEFAULT_STORE_DIR,
|
|
378
|
+
};
|
|
@@ -41,29 +41,30 @@ function cmdRequirementsMarkComplete(cwd, reqIdsRaw, raw) {
|
|
|
41
41
|
const reqEscaped = escapeRegex(reqId);
|
|
42
42
|
|
|
43
43
|
// Update checkbox: - [ ] **REQ-ID** → - [x] **REQ-ID**
|
|
44
|
+
// Use replace() directly and compare — avoids test()+replace() global regex
|
|
45
|
+
// lastIndex bug where test() advances state and replace() misses matches.
|
|
44
46
|
const checkboxPattern = new RegExp(`(-\\s*\\[)[ ](\\]\\s*\\*\\*${reqEscaped}\\*\\*)`, 'gi');
|
|
45
|
-
|
|
46
|
-
|
|
47
|
+
const afterCheckbox = reqContent.replace(checkboxPattern, '$1x$2');
|
|
48
|
+
if (afterCheckbox !== reqContent) {
|
|
49
|
+
reqContent = afterCheckbox;
|
|
47
50
|
found = true;
|
|
48
51
|
}
|
|
49
52
|
|
|
50
53
|
// Update traceability table: | REQ-ID | Phase N | Pending | → | REQ-ID | Phase N | Complete |
|
|
51
54
|
const tablePattern = new RegExp(`(\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|)\\s*Pending\\s*(\\|)`, 'gi');
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
reqContent =
|
|
55
|
-
new RegExp(`(\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|)\\s*Pending\\s*(\\|)`, 'gi'),
|
|
56
|
-
'$1 Complete $2'
|
|
57
|
-
);
|
|
55
|
+
const afterTable = reqContent.replace(tablePattern, '$1 Complete $2');
|
|
56
|
+
if (afterTable !== reqContent) {
|
|
57
|
+
reqContent = afterTable;
|
|
58
58
|
found = true;
|
|
59
59
|
}
|
|
60
60
|
|
|
61
61
|
if (found) {
|
|
62
62
|
updated.push(reqId);
|
|
63
63
|
} else {
|
|
64
|
-
// Check if already complete before declaring not_found
|
|
65
|
-
|
|
66
|
-
const
|
|
64
|
+
// Check if already complete before declaring not_found.
|
|
65
|
+
// Non-global flag is fine here — we only need to know if a match exists.
|
|
66
|
+
const doneCheckbox = new RegExp(`-\\s*\\[x\\]\\s*\\*\\*${reqEscaped}\\*\\*`, 'i');
|
|
67
|
+
const doneTable = new RegExp(`\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|\\s*Complete\\s*\\|`, 'i');
|
|
67
68
|
if (doneCheckbox.test(reqContent) || doneTable.test(reqContent)) {
|
|
68
69
|
alreadyComplete.push(reqId);
|
|
69
70
|
} else {
|
|
@@ -246,15 +247,24 @@ function cmdMilestoneComplete(cwd, version, options, raw) {
|
|
|
246
247
|
output(result, raw);
|
|
247
248
|
}
|
|
248
249
|
|
|
249
|
-
function cmdPhasesClear(cwd, raw) {
|
|
250
|
+
function cmdPhasesClear(cwd, raw, args) {
|
|
250
251
|
const phasesDir = planningPaths(cwd).phases;
|
|
252
|
+
const confirm = Array.isArray(args) && args.includes('--confirm');
|
|
251
253
|
let cleared = 0;
|
|
252
254
|
|
|
253
255
|
if (fs.existsSync(phasesDir)) {
|
|
256
|
+
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
|
257
|
+
const dirs = entries.filter(e => e.isDirectory() && !/^999(?:\.|$)/.test(e.name));
|
|
258
|
+
|
|
259
|
+
if (dirs.length > 0 && !confirm) {
|
|
260
|
+
error(
|
|
261
|
+
`phases clear would delete ${dirs.length} phase director${dirs.length === 1 ? 'y' : 'ies'}. ` +
|
|
262
|
+
`Pass --confirm to proceed.`
|
|
263
|
+
);
|
|
264
|
+
}
|
|
265
|
+
|
|
254
266
|
try {
|
|
255
|
-
const
|
|
256
|
-
for (const entry of entries) {
|
|
257
|
-
if (!entry.isDirectory()) continue;
|
|
267
|
+
for (const entry of dirs) {
|
|
258
268
|
fs.rmSync(path.join(phasesDir, entry.name), { recursive: true, force: true });
|
|
259
269
|
cleared++;
|
|
260
270
|
}
|
|
@@ -7,23 +7,23 @@
|
|
|
7
7
|
* would be faster, use fewer tokens, and be less error-prone).
|
|
8
8
|
*/
|
|
9
9
|
const MODEL_PROFILES = {
|
|
10
|
-
'gsd-planner': { quality: 'opus', balanced: 'opus', budget: 'sonnet' },
|
|
11
|
-
'gsd-roadmapper': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
|
12
|
-
'gsd-executor': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
|
13
|
-
'gsd-phase-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
14
|
-
'gsd-project-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
15
|
-
'gsd-research-synthesizer': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
16
|
-
'gsd-debugger': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
|
17
|
-
'gsd-codebase-mapper': { quality: 'sonnet', balanced: 'haiku', budget: 'haiku' },
|
|
18
|
-
'gsd-verifier': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
19
|
-
'gsd-plan-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
20
|
-
'gsd-integration-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
21
|
-
'gsd-nyquist-auditor': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
22
|
-
'gsd-ui-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
23
|
-
'gsd-ui-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
24
|
-
'gsd-ui-auditor': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
25
|
-
'gsd-doc-writer': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
26
|
-
'gsd-doc-verifier': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
10
|
+
'gsd-planner': { quality: 'opus', balanced: 'opus', budget: 'sonnet', adaptive: 'opus' },
|
|
11
|
+
'gsd-roadmapper': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet', adaptive: 'sonnet' },
|
|
12
|
+
'gsd-executor': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet', adaptive: 'sonnet' },
|
|
13
|
+
'gsd-phase-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku', adaptive: 'sonnet' },
|
|
14
|
+
'gsd-project-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku', adaptive: 'sonnet' },
|
|
15
|
+
'gsd-research-synthesizer': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku', adaptive: 'haiku' },
|
|
16
|
+
'gsd-debugger': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet', adaptive: 'opus' },
|
|
17
|
+
'gsd-codebase-mapper': { quality: 'sonnet', balanced: 'haiku', budget: 'haiku', adaptive: 'haiku' },
|
|
18
|
+
'gsd-verifier': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku', adaptive: 'sonnet' },
|
|
19
|
+
'gsd-plan-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku', adaptive: 'haiku' },
|
|
20
|
+
'gsd-integration-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku', adaptive: 'haiku' },
|
|
21
|
+
'gsd-nyquist-auditor': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku', adaptive: 'haiku' },
|
|
22
|
+
'gsd-ui-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku', adaptive: 'sonnet' },
|
|
23
|
+
'gsd-ui-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku', adaptive: 'haiku' },
|
|
24
|
+
'gsd-ui-auditor': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku', adaptive: 'haiku' },
|
|
25
|
+
'gsd-doc-writer': { quality: 'opus', balanced: 'sonnet', budget: 'haiku', adaptive: 'sonnet' },
|
|
26
|
+
'gsd-doc-verifier': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku', adaptive: 'haiku' },
|
|
27
27
|
};
|
|
28
28
|
const VALID_PROFILES = Object.keys(MODEL_PROFILES['gsd-planner']);
|
|
29
29
|
|