@grainulation/wheat 1.0.1 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -47,11 +47,76 @@ function loadJSON(filePath) {
47
47
  }
48
48
  }
49
49
 
50
+ /**
51
+ * Batch git queries for all sprint files at once.
52
+ * One git call total instead of 2 per sprint (30x+ faster for 16 sprints).
53
+ * Returns Map<filePath, { date: string|null, count: number }>
54
+ */
55
+ let _gitCache = null;
56
+
57
+ function batchGitInfo(filePaths) {
58
+ if (_gitCache) return _gitCache;
59
+ const info = new Map();
60
+ // Build a map from relative path (as git returns it) back to original filePath
61
+ const relToOrig = new Map();
62
+ const relPaths = [];
63
+ for (const fp of filePaths) {
64
+ const rel = path.relative(ROOT, path.resolve(ROOT, fp));
65
+ relToOrig.set(rel, fp);
66
+ relPaths.push(rel);
67
+ info.set(fp, { date: null, count: 0 });
68
+ }
69
+
70
+ if (filePaths.length === 0) { _gitCache = info; return info; }
71
+
72
+ // Single git call: get dates AND counts from one log traversal.
73
+ // Format: "COMMIT <date>" header per commit, then --name-only lists files.
74
+ // First occurrence of each file gives its last-commit date.
75
+ // Total occurrences per file gives its commit count.
76
+ try {
77
+ const result = execFileSync('git', [
78
+ 'log', '--format=COMMIT %aI', '--name-only',
79
+ '--', ...relPaths
80
+ ], { cwd: ROOT, timeout: 10000, stdio: ['ignore', 'pipe', 'pipe'] });
81
+ const lines = result.toString().split('\n');
82
+ const seenForDate = new Set();
83
+ let currentDate = null;
84
+ for (const line of lines) {
85
+ const trimmed = line.trim();
86
+ if (!trimmed) continue;
87
+ if (trimmed.startsWith('COMMIT ')) {
88
+ currentDate = trimmed.slice(7);
89
+ } else {
90
+ const orig = relToOrig.get(trimmed);
91
+ if (orig) {
92
+ const entry = info.get(orig);
93
+ entry.count++;
94
+ if (!seenForDate.has(trimmed)) {
95
+ seenForDate.add(trimmed);
96
+ entry.date = currentDate;
97
+ }
98
+ }
99
+ }
100
+ }
101
+ } catch { /* git unavailable, dates stay null, counts stay 0 */ }
102
+
103
+ _gitCache = info;
104
+ return info;
105
+ }
106
+
107
+ /** Reset git cache (called when ROOT changes). */
108
+ function resetGitCache() { _gitCache = null; }
109
+
50
110
  /**
51
111
  * Get the ISO timestamp of the most recent git commit touching a file.
52
112
  * Returns null if file is untracked or git is unavailable.
113
+ * Uses batch cache when available.
53
114
  */
54
115
  function lastGitCommitDate(filePath) {
116
+ if (_gitCache) {
117
+ const entry = _gitCache.get(filePath);
118
+ return entry ? entry.date : null;
119
+ }
55
120
  try {
56
121
  const result = execFileSync('git', [
57
122
  'log', '-1', '--format=%aI', '--', filePath
@@ -65,8 +130,13 @@ function lastGitCommitDate(filePath) {
65
130
 
66
131
  /**
67
132
  * Count git commits touching a file (proxy for activity level).
133
+ * Uses batch cache when available.
68
134
  */
69
135
  function gitCommitCount(filePath) {
136
+ if (_gitCache) {
137
+ const entry = _gitCache.get(filePath);
138
+ return entry ? entry.count : 0;
139
+ }
70
140
  try {
71
141
  const result = execFileSync('git', [
72
142
  'rev-list', '--count', 'HEAD', '--', filePath
@@ -193,7 +263,12 @@ function analyzeSprint(root) {
193
263
  */
194
264
  export function detectSprints(rootDir) {
195
265
  if (rootDir) ROOT = rootDir;
266
+ resetGitCache();
196
267
  const roots = findSprintRoots();
268
+
269
+ // Batch all git queries upfront: 1 git call instead of 2 per sprint
270
+ batchGitInfo(roots.map(r => r.claimsPath));
271
+
197
272
  const sprints = roots.map(analyzeSprint).filter(Boolean);
198
273
 
199
274
  // Separate candidates from archived/examples
@@ -13,7 +13,7 @@
13
13
 
14
14
  import fs from 'fs';
15
15
  import path from 'path';
16
- import { execFileSync } from 'child_process';
16
+
17
17
  import { fileURLToPath } from 'url';
18
18
  import { detectSprints } from './detect-sprints.js';
19
19
 
@@ -98,6 +98,27 @@ export function highestEvidence(claims) {
98
98
  * Falls back to a minimal scan if detect-sprints.js is unavailable.
99
99
  */
100
100
  function detectSprintsForManifest() {
101
+ // Check for cached sprint data from compiler (avoids re-running detectSprints)
102
+ if (process.env.WHEAT_SPRINTS_CACHE) {
103
+ try {
104
+ const parsed = JSON.parse(process.env.WHEAT_SPRINTS_CACHE);
105
+ const sprints = {};
106
+ for (const s of (parsed.sprints || [])) {
107
+ sprints[s.name] = {
108
+ question: s.question || '',
109
+ phase: s.phase || 'unknown',
110
+ claims_count: s.claims_count || 0,
111
+ active_claims: s.active_claims || 0,
112
+ path: s.path,
113
+ status: s.status,
114
+ last_git_activity: s.last_git_activity,
115
+ git_commit_count: s.git_commit_count,
116
+ };
117
+ }
118
+ return sprints;
119
+ } catch { /* fall through to live detection */ }
120
+ }
121
+
101
122
  // Try to use the exported function directly
102
123
  try {
103
124
  const parsed = detectSprints(ROOT);
@@ -148,20 +169,21 @@ function detectSprintsForManifest() {
148
169
  return sprints;
149
170
  }
150
171
 
151
- // --- Main (only when run directly) ---
172
+ // --- Callable manifest generation ---
152
173
 
153
- const isMain = process.argv[1] && fileURLToPath(import.meta.url) === path.resolve(process.argv[1]);
154
-
155
- if (isMain) {
156
- const t0 = performance.now();
157
-
158
- const claims = loadJSON(path.join(ROOT, 'claims.json'));
159
- const compilation = loadJSON(path.join(ROOT, 'compilation.json'));
174
+ /**
175
+ * Generate the manifest. Can be called directly (no subprocess needed).
176
+ * @param {string} dir - Root directory of the sprint
177
+ * @param {object} [opts] - Options
178
+ * @param {object} [opts.sprintsInfo] - Pre-computed sprint data (avoids re-running detectSprints)
179
+ * @returns {{ manifest: object, topicCount: number, fileCount: number, sprintCount: number }}
180
+ */
181
+ export function buildManifest(dir, opts = {}) {
182
+ const rootDir = dir || ROOT;
183
+ const claims = loadJSON(path.join(rootDir, 'claims.json'));
184
+ const compilation = loadJSON(path.join(rootDir, 'compilation.json'));
160
185
 
161
- if (!claims) {
162
- console.error('Error: claims.json not found or invalid at', path.join(ROOT, 'claims.json'));
163
- process.exit(1);
164
- }
186
+ if (!claims) return null;
165
187
 
166
188
  // 1. Build topic map from claims
167
189
  const topicMap = {};
@@ -183,8 +205,8 @@ if (isMain) {
183
205
  const scanDirs = ['research', 'prototypes', 'output', 'evidence', 'templates', 'test', 'docs'];
184
206
  const allFiles = {};
185
207
 
186
- for (const dir of scanDirs) {
187
- const files = walk(path.join(ROOT, dir));
208
+ for (const d of scanDirs) {
209
+ const files = walk(path.join(rootDir, d));
188
210
  for (const f of files) {
189
211
  const type = classifyFile(f);
190
212
  allFiles[f] = { topics: [], type };
@@ -192,18 +214,20 @@ if (isMain) {
192
214
  }
193
215
 
194
216
  // Also include root-level scripts/configs
195
- for (const entry of fs.readdirSync(ROOT)) {
196
- if (entry.startsWith('.') || entry === 'node_modules') continue;
197
- const full = path.join(ROOT, entry);
198
- try {
199
- if (fs.statSync(full).isFile()) {
200
- const type = classifyFile(entry);
201
- if (type !== 'other') {
202
- allFiles[entry] = { topics: [], type };
217
+ try {
218
+ for (const entry of fs.readdirSync(rootDir)) {
219
+ if (entry.startsWith('.') || entry === 'node_modules') continue;
220
+ const full = path.join(rootDir, entry);
221
+ try {
222
+ if (fs.statSync(full).isFile()) {
223
+ const type = classifyFile(entry);
224
+ if (type !== 'other') {
225
+ allFiles[entry] = { topics: [], type };
226
+ }
203
227
  }
204
- }
205
- } catch { /* skip */ }
206
- }
228
+ } catch { /* skip */ }
229
+ }
230
+ } catch { /* skip */ }
207
231
 
208
232
  // 3. Map files to topics using claim source artifacts and keyword heuristics
209
233
  const topicKeywords = {
@@ -217,14 +241,12 @@ if (isMain) {
217
241
  for (const [filePath, fileInfo] of Object.entries(allFiles)) {
218
242
  const lower = filePath.toLowerCase();
219
243
 
220
- // Heuristic: match file paths to topics via keywords
221
244
  for (const [topic, keywords] of Object.entries(topicKeywords)) {
222
245
  if (keywords.some(kw => lower.includes(kw))) {
223
246
  if (!fileInfo.topics.includes(topic)) fileInfo.topics.push(topic);
224
247
  }
225
248
  }
226
249
 
227
- // Claims that reference files as artifacts
228
250
  for (const claim of claims.claims) {
229
251
  if (claim.source?.artifact && filePath.includes(claim.source.artifact.replace(/^.*[/\\]prototypes[/\\]/, 'prototypes/'))) {
230
252
  if (!fileInfo.topics.includes(claim.topic)) {
@@ -233,7 +255,6 @@ if (isMain) {
233
255
  }
234
256
  }
235
257
 
236
- // Add files to topic map
237
258
  for (const topic of fileInfo.topics) {
238
259
  if (topicMap[topic]) {
239
260
  topicMap[topic].files.add(filePath);
@@ -246,14 +267,31 @@ if (isMain) {
246
267
  topicMap[topic].files = [...topicMap[topic].files].sort();
247
268
  }
248
269
 
249
- // 5. Detect sprints
250
- const sprints = detectSprintsForManifest();
270
+ // 5. Detect sprints (use cached data if provided)
271
+ let sprints;
272
+ if (opts.sprintsInfo) {
273
+ sprints = {};
274
+ for (const s of (opts.sprintsInfo.sprints || [])) {
275
+ sprints[s.name] = {
276
+ question: s.question || '',
277
+ phase: s.phase || 'unknown',
278
+ claims_count: s.claims_count || 0,
279
+ active_claims: s.active_claims || 0,
280
+ path: s.path,
281
+ status: s.status,
282
+ last_git_activity: s.last_git_activity,
283
+ git_commit_count: s.git_commit_count,
284
+ };
285
+ }
286
+ } else {
287
+ sprints = detectSprintsForManifest();
288
+ }
251
289
 
252
290
  // 6. Build final manifest
253
291
  const topicFiles = {};
254
- for (const [filePath, info] of Object.entries(allFiles)) {
255
- if (info.topics.length > 0) {
256
- topicFiles[filePath] = info;
292
+ for (const [filePath, mInfo] of Object.entries(allFiles)) {
293
+ if (mInfo.topics.length > 0) {
294
+ topicFiles[filePath] = mInfo;
257
295
  }
258
296
  }
259
297
 
@@ -266,15 +304,32 @@ if (isMain) {
266
304
  files: topicFiles
267
305
  };
268
306
 
269
- fs.writeFileSync(OUT_PATH, JSON.stringify(manifest, null, 2) + '\n');
270
- const elapsed = (performance.now() - t0).toFixed(1);
307
+ const outPath = path.join(rootDir, 'wheat-manifest.json');
308
+ fs.writeFileSync(outPath, JSON.stringify(manifest, null, 2) + '\n');
309
+
310
+ return {
311
+ manifest,
312
+ topicCount: Object.keys(topicMap).length,
313
+ fileCount: Object.keys(topicFiles).length,
314
+ sprintCount: Object.keys(sprints).length,
315
+ };
316
+ }
317
+
318
+ // --- Main (only when run directly) ---
319
+
320
+ const isMain = process.argv[1] && fileURLToPath(import.meta.url) === path.resolve(process.argv[1]);
321
+
322
+ if (isMain) {
323
+ const t0 = performance.now();
271
324
 
272
- // Summary
273
- const topicCount = Object.keys(topicMap).length;
274
- const fileCount = Object.keys(topicFiles).length;
275
- const sprintCount = Object.keys(sprints).length;
276
- const sizeBytes = Buffer.byteLength(JSON.stringify(manifest, null, 2));
325
+ const result = buildManifest(ROOT);
326
+ if (!result) {
327
+ console.error('Error: claims.json not found or invalid at', path.join(ROOT, 'claims.json'));
328
+ process.exit(1);
329
+ }
277
330
 
331
+ const elapsed = (performance.now() - t0).toFixed(1);
332
+ const sizeBytes = Buffer.byteLength(JSON.stringify(result.manifest, null, 2));
278
333
  console.log(`wheat-manifest.json generated in ${elapsed}ms`);
279
- console.log(` Topics: ${topicCount} | Files: ${fileCount} | Sprints: ${sprintCount} | Size: ${(sizeBytes / 1024).toFixed(1)}KB`);
334
+ console.log(` Topics: ${result.topicCount} | Files: ${result.fileCount} | Sprints: ${result.sprintCount} | Size: ${(sizeBytes / 1024).toFixed(1)}KB`);
280
335
  }
@@ -17,11 +17,13 @@
17
17
  import fs from 'fs';
18
18
  import crypto from 'crypto';
19
19
  import path from 'path';
20
- import { execFileSync } from 'child_process';
20
+
21
21
  import { fileURLToPath } from 'url';
22
22
 
23
23
  // Sprint detection — git-based, no config pointer needed (p013/f001)
24
24
  import { detectSprints } from './detect-sprints.js';
25
+ // Direct manifest generation — avoids subprocess + redundant detectSprints call
26
+ import { buildManifest } from './generate-manifest.js';
25
27
 
26
28
  const __filename = fileURLToPath(import.meta.url);
27
29
  const __dirname = path.dirname(__filename);
@@ -570,33 +572,21 @@ function diffCompilations(before, after) {
570
572
 
571
573
  // ─── Manifest Generation (topic map) ─────────────────────────────────────────
572
574
  /**
573
- * Run generate-manifest.js to produce wheat-manifest.json.
574
- * Called automatically after each compilation. Failures are non-fatal
575
- * (manifest is an optimization, not a correctness requirement).
576
- * @param {object} compilation - The compiled output (unused, but available for future use)
575
+ * Generate wheat-manifest.json by calling buildManifest() directly.
576
+ * No subprocess reuses the already-imported module and sprint data.
577
+ * Failures are non-fatal (manifest is an optimization, not a correctness requirement).
577
578
  */
578
- function generateManifest(compilation, dir) {
579
+ function generateManifest(compilation, dir, sprintsInfo) {
579
580
  const baseDir = dir || TARGET_DIR;
580
- const manifestScript = path.join(baseDir, 'generate-manifest.js');
581
- if (!fs.existsSync(manifestScript)) {
582
- // Manifest generator not present — skip silently
583
- return;
584
- }
585
581
  try {
586
- const result = execFileSync(process.execPath, [manifestScript], {
587
- cwd: baseDir,
588
- timeout: 10000,
589
- stdio: ['ignore', 'pipe', 'pipe'],
590
- });
591
- // Print manifest summary on --summary runs (stdout captured above)
592
- const output = result.toString().trim();
593
- if (output && process.argv.includes('--summary')) {
594
- console.log(`\nManifest: ${output}`);
582
+ const result = buildManifest(baseDir, { sprintsInfo });
583
+ if (result && process.argv.includes('--summary')) {
584
+ console.log(`\nManifest: wheat-manifest.json generated`);
585
+ console.log(` Topics: ${result.topicCount} | Files: ${result.fileCount} | Sprints: ${result.sprintCount}`);
595
586
  }
596
587
  } catch (err) {
597
588
  // Non-fatal: warn but don't block compilation
598
- const stderr = err.stderr ? err.stderr.toString().trim() : err.message;
599
- console.error(`Warning: manifest generation failed — ${stderr}`);
589
+ console.error(`Warning: manifest generation failed ${err.message}`);
600
590
  }
601
591
  }
602
592
 
@@ -607,7 +597,7 @@ function generateManifest(compilation, dir) {
607
597
  * @param {string|null} outputPath - Path to write compilation.json (null = default from config)
608
598
  * @returns {object} The compiled output object
609
599
  */
610
- function compile(inputPath, outputPath, dir) {
600
+ function compile(inputPath, outputPath, dir, opts = {}) {
611
601
  const compilerVersion = '0.2.0';
612
602
  const baseDir = dir || TARGET_DIR;
613
603
  const claimsPath = inputPath || path.join(baseDir, config.compiler.claims);
@@ -671,25 +661,28 @@ function compile(inputPath, outputPath, dir) {
671
661
 
672
662
  // ── Sprint detection (git-based, non-fatal) ──────────────────────────────
673
663
  let sprintsInfo = { active: null, sprints: [] };
674
- try {
675
- sprintsInfo = detectSprints(baseDir);
676
- } catch (err) {
677
- // Non-fatal: sprint detection failure should not block compilation
678
- console.error(`Warning: sprint detection failed — ${err.message}`);
679
- }
664
+ let sprintSummaries = [];
665
+ if (!opts.skipSprintDetection) {
666
+ try {
667
+ sprintsInfo = detectSprints(baseDir);
668
+ } catch (err) {
669
+ // Non-fatal: sprint detection failure should not block compilation
670
+ console.error(`Warning: sprint detection failed — ${err.message}`);
671
+ }
680
672
 
681
- // Build sprint summaries: active sprint gets full compilation, others get summary entries
682
- const sprintSummaries = sprintsInfo.sprints.map(s => ({
683
- name: s.name,
684
- path: s.path,
685
- status: s.status,
686
- phase: s.phase,
687
- question: s.question,
688
- claims_count: s.claims_count,
689
- active_claims: s.active_claims,
690
- last_git_activity: s.last_git_activity,
691
- git_commit_count: s.git_commit_count,
692
- }));
673
+ // Build sprint summaries: active sprint gets full compilation, others get summary entries
674
+ sprintSummaries = sprintsInfo.sprints.map(s => ({
675
+ name: s.name,
676
+ path: s.path,
677
+ status: s.status,
678
+ phase: s.phase,
679
+ question: s.question,
680
+ claims_count: s.claims_count,
681
+ active_claims: s.active_claims,
682
+ last_git_activity: s.last_git_activity,
683
+ git_commit_count: s.git_commit_count,
684
+ }));
685
+ }
693
686
 
694
687
  const compilation = {
695
688
  compiled_at: new Date().toISOString(), // Non-deterministic metadata (excluded from certificate)
@@ -698,7 +691,12 @@ function compile(inputPath, outputPath, dir) {
698
691
  status,
699
692
  errors: readiness.blockers,
700
693
  warnings: readiness.warnings,
701
- resolved_claims: resolvedClaims,
694
+ resolved_claims: resolvedClaims.map(c => ({
695
+ id: c.id, type: c.type, topic: c.topic,
696
+ evidence: c.evidence, status: c.status, phase_added: c.phase_added,
697
+ source: c.source, conflicts_with: c.conflicts_with, resolved_by: c.resolved_by,
698
+ tags: c.tags,
699
+ })),
702
700
  conflict_graph: conflictGraph,
703
701
  coverage: coverageResult.coverage,
704
702
  corroboration: coverageResult.corroboration,
@@ -722,7 +720,10 @@ function compile(inputPath, outputPath, dir) {
722
720
  fs.writeFileSync(compilationOutputPath, JSON.stringify(compilation, null, 2));
723
721
 
724
722
  // Generate topic-map manifest (wheat-manifest.json)
725
- generateManifest(compilation, baseDir);
723
+ // Pass sprintsInfo to avoid re-running detectSprints in manifest generator
724
+ if (!opts.skipSprintDetection) {
725
+ generateManifest(compilation, baseDir, sprintsInfo);
726
+ }
726
727
 
727
728
  return compilation;
728
729
  }
@@ -788,6 +789,7 @@ Usage:
788
789
 
789
790
  Options:
790
791
  --dir <path> Resolve all paths relative to <path> instead of script location
792
+ --quiet, -q One-liner output (for scripts and AI agents)
791
793
  --help, -h Show this help message
792
794
  --json Output as JSON (works with --summary, --check, --gate, --scan, --next)`);
793
795
  process.exit(0);
@@ -864,8 +866,31 @@ if (outputIdx !== -1 && args[outputIdx + 1]) {
864
866
  outputPath = path.resolve(args[outputIdx + 1]);
865
867
  }
866
868
 
867
- const compilation = compile(inputPath, outputPath);
868
869
  const jsonFlag = args.includes('--json');
870
+ const quietFlag = args.includes('--quiet') || args.includes('-q');
871
+ const compilation = compile(inputPath, outputPath, undefined, { skipSprintDetection: quietFlag && !args.includes('--summary') });
872
+
873
+ // --quiet / -q: one-liner output for scripts and AI agents (~13 tokens vs ~4,600)
874
+ if (quietFlag && !args.includes('--summary')) {
875
+ const c = compilation;
876
+ const conflicts = c.sprint_meta.conflicted_claims || 0;
877
+ const suffix = conflicts > 0 ? ` (${conflicts} conflicts)` : '';
878
+ const line = `wheat: compiled ${c.sprint_meta.total_claims} claims, ${Object.keys(c.coverage).length} topics${suffix}`;
879
+ if (jsonFlag) {
880
+ console.log(JSON.stringify({
881
+ status: c.status,
882
+ claims: c.sprint_meta.total_claims,
883
+ active: c.sprint_meta.active_claims,
884
+ conflicts,
885
+ topics: Object.keys(c.coverage).length,
886
+ errors: c.errors.length,
887
+ warnings: c.warnings.length,
888
+ }));
889
+ } else {
890
+ console.log(line);
891
+ }
892
+ process.exit(c.status === 'blocked' ? 1 : 0);
893
+ }
869
894
 
870
895
  if (args.includes('--summary')) {
871
896
  const c = compilation;
package/lib/serve-mcp.js CHANGED
@@ -37,7 +37,7 @@ const __dirname = path.dirname(__filename);
37
37
  // --- Constants ---------------------------------------------------------------
38
38
 
39
39
  const SERVER_NAME = 'wheat';
40
- const SERVER_VERSION = '1.0.0';
40
+ const SERVER_VERSION = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8')).version;
41
41
  const PROTOCOL_VERSION = '2024-11-05';
42
42
 
43
43
  const VALID_TYPES = ['constraint', 'factual', 'estimate', 'risk', 'recommendation', 'feedback'];
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@grainulation/wheat",
3
- "version": "1.0.1",
3
+ "version": "1.0.3",
4
4
  "description": "Research-driven development framework — structured claims, compiled evidence, deterministic output",
5
5
  "license": "MIT",
6
6
  "author": "grainulation contributors",
@@ -2,6 +2,8 @@
2
2
 
3
3
  You are compiling the final decision brief for this Wheat sprint. This is the Bran compilation step — deterministic output from resolved claims.
4
4
 
5
+ **Default behavior:** Generate next steps only (fast). The full brief is opt-in via `--full`.
6
+
5
7
  ## Process
6
8
 
7
9
  1. **Run the compiler with check**:
@@ -15,9 +17,17 @@ You are compiling the final decision brief for this Wheat sprint. This is the Br
15
17
  - Suggest specific commands to fix each blocker
16
18
  - Do NOT proceed until compilation passes
17
19
 
18
- 2. **Read compilation.json** use ONLY `resolved_claims` as your source material. Never read claims.json directly for the brief.
20
+ 2. **Always: Generate next steps** — Read compilation.json, summarize sprint state (claim counts, conflicts, coverage), and suggest 2-4 concrete next actions. Then run `/next` to route through Farmer.
21
+
22
+ 3. **If `--full` flag is passed OR user explicitly asks for the brief:** Launch brief generation as a background agent so it's non-blocking. The user continues deciding next steps while the brief builds.
23
+
24
+ To launch in background: use the Agent tool with `run_in_background: true` to generate the full brief (steps 4-7 below). Tell the user "Brief generating in background — I'll let you know when it's ready."
25
+
26
+ If no `--full` flag, skip steps 4-7 entirely.
19
27
 
20
- 3. **Generate the brief as markdown**: Create `output/brief.md` with this structure:
28
+ 4. **Read compilation.json** use ONLY `resolved_claims` as your source material. Never read claims.json directly for the brief.
29
+
30
+ 5. **Generate the brief as markdown**: Create `output/brief.md` with this structure:
21
31
 
22
32
  ```markdown
23
33
  # Decision Brief: [Sprint Question]
@@ -47,7 +57,12 @@ You are compiling the final decision brief for this Wheat sprint. This is the Br
47
57
  Compilation certificate: [hash] | Compiler: wheat v[version] | Claims: [count] | Compiled: [timestamp]
48
58
  ```
49
59
 
50
- 4. **Also generate brief as HTML**: Create `output/brief.html` — a clean, print-friendly HTML version for browser viewing.
60
+ 6. **Generate PDF** (if build-pdf.js exists):
61
+ ```bash
62
+ node build-pdf.js output/brief.md
63
+ ```
64
+
65
+ 7. **Also generate brief as HTML**: Create `output/brief.html` — a clean, print-friendly HTML version for browser viewing.
51
66
 
52
67
  ## Key rules
53
68
 
@@ -64,9 +79,10 @@ Commit: `wheat: /brief compiled — [total] claims, [conflicts resolved] conflic
64
79
 
65
80
  ## Tell the user
66
81
 
67
- - The brief is ready at `output/brief.md` and `output/brief.html`
82
+ - If next-steps only: show the sprint summary and route via `/next`
83
+ - If `--full`: confirm brief is generating in background, show next steps immediately
84
+ - When brief completes: notify with paths to `output/brief.md` and `output/brief.html`
68
85
  - Show the compilation certificate
69
- - Remind them they can share these with stakeholders
70
86
  - Mention `/present` if they need a presentation version
71
87
  - Mention `/feedback` for incorporating stakeholder responses
72
88