@c-d-cc/reap 0.7.9 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -9218,9 +9218,21 @@ class ReapPaths {
9218
9218
  get constraints() {
9219
9219
  return join(this.genome, "constraints.md");
9220
9220
  }
9221
+ get sourceMap() {
9222
+ return join(this.genome, "source-map.md");
9223
+ }
9221
9224
  get environment() {
9222
9225
  return join(this.root, "environment");
9223
9226
  }
9227
+ get environmentSummary() {
9228
+ return join(this.environment, "summary.md");
9229
+ }
9230
+ get environmentDocs() {
9231
+ return join(this.environment, "docs");
9232
+ }
9233
+ get environmentResources() {
9234
+ return join(this.environment, "resources");
9235
+ }
9224
9236
  get life() {
9225
9237
  return join(this.root, "life");
9226
9238
  }
@@ -9752,8 +9764,11 @@ var COMMAND_NAMES = [
9752
9764
  "reap.start",
9753
9765
  "reap.next",
9754
9766
  "reap.back",
9767
+ "reap.abort",
9755
9768
  "reap.status",
9756
9769
  "reap.sync",
9770
+ "reap.sync.genome",
9771
+ "reap.sync.environment",
9757
9772
  "reap.help",
9758
9773
  "reap.update",
9759
9774
  "reap.merge.start",
@@ -9785,6 +9800,8 @@ async function initProject(projectRoot, projectName, entryMode, preset, onProgre
9785
9800
  await mkdir3(paths.genome, { recursive: true });
9786
9801
  await mkdir3(paths.domain, { recursive: true });
9787
9802
  await mkdir3(paths.environment, { recursive: true });
9803
+ await mkdir3(join5(paths.environment, "docs"), { recursive: true });
9804
+ await mkdir3(join5(paths.environment, "resources"), { recursive: true });
9788
9805
  await mkdir3(paths.life, { recursive: true });
9789
9806
  await mkdir3(paths.backlog, { recursive: true });
9790
9807
  await mkdir3(paths.lineage, { recursive: true });
@@ -9861,7 +9878,7 @@ async function initProject(projectRoot, projectName, entryMode, preset, onProgre
9861
9878
  // src/cli/commands/update.ts
9862
9879
  import { readdir as readdir9, unlink as unlink3, rm as rm2, mkdir as mkdir6 } from "fs/promises";
9863
9880
  import { join as join10 } from "path";
9864
- import { execSync } from "child_process";
9881
+ import { execSync as execSync2 } from "child_process";
9865
9882
 
9866
9883
  // src/core/hooks.ts
9867
9884
  async function migrateHooks(dryRun = false) {
@@ -9948,12 +9965,57 @@ init_fs();
9948
9965
  var import_yaml2 = __toESM(require_dist(), 1);
9949
9966
  import { readdir as readdir5, rm } from "fs/promises";
9950
9967
  import { join as join6 } from "path";
9968
+
9969
+ // src/core/git.ts
9970
+ import { execSync } from "child_process";
9971
+ function gitShow(ref, path, cwd) {
9972
+ try {
9973
+ return execSync(`git show ${ref}:${path}`, { cwd, encoding: "utf-8", timeout: 1e4 });
9974
+ } catch {
9975
+ return null;
9976
+ }
9977
+ }
9978
+ function gitLsTree(ref, path, cwd) {
9979
+ try {
9980
+ const output = execSync(`git ls-tree -r --name-only ${ref} -- ${path}`, {
9981
+ cwd,
9982
+ encoding: "utf-8",
9983
+ timeout: 1e4
9984
+ });
9985
+ return output.trim().split(`
9986
+ `).filter(Boolean);
9987
+ } catch {
9988
+ return [];
9989
+ }
9990
+ }
9991
+ function gitAllBranches(cwd) {
9992
+ try {
9993
+ const output = execSync("git branch -a --format='%(refname:short)'", {
9994
+ cwd,
9995
+ encoding: "utf-8",
9996
+ timeout: 1e4
9997
+ });
9998
+ return output.trim().split(`
9999
+ `).filter(Boolean).map((b) => b.replace(/^origin\//, ""));
10000
+ } catch {
10001
+ return [];
10002
+ }
10003
+ }
10004
+ function gitCurrentBranch(cwd) {
10005
+ try {
10006
+ return execSync("git rev-parse --abbrev-ref HEAD", { cwd, encoding: "utf-8", timeout: 5000 }).trim();
10007
+ } catch {
10008
+ return null;
10009
+ }
10010
+ }
10011
+
10012
+ // src/core/compression.ts
9951
10013
  var LINEAGE_MAX_LINES = 5000;
9952
10014
  var MIN_GENERATIONS_FOR_COMPRESSION = 5;
9953
10015
  var LEVEL1_MAX_LINES = 40;
9954
- var LEVEL2_MAX_LINES = 60;
9955
- var LEVEL2_BATCH_SIZE = 5;
9956
- var RECENT_PROTECTED_COUNT = 3;
10016
+ var LEVEL1_PROTECTED_COUNT = 3;
10017
+ var LEVEL2_MIN_LEVEL1_COUNT = 100;
10018
+ var LEVEL2_PROTECTED_COUNT = 9;
9957
10019
  function extractGenNum(name) {
9958
10020
  const match = name.match(/^gen-(\d{3})/);
9959
10021
  return match ? parseInt(match[1], 10) : 0;
@@ -10059,7 +10121,9 @@ async function scanLineage(paths) {
10059
10121
  } catch {}
10060
10122
  return entries.sort((a, b) => {
10061
10123
  if (a.completedAt && b.completedAt) {
10062
- return a.completedAt.localeCompare(b.completedAt);
10124
+ const cmp = a.completedAt.localeCompare(b.completedAt);
10125
+ if (cmp !== 0)
10126
+ return cmp;
10063
10127
  }
10064
10128
  return a.genNum - b.genNum;
10065
10129
  });
@@ -10209,50 +10273,84 @@ async function compressLevel1(genDir, genName) {
10209
10273
  }
10210
10274
  return result;
10211
10275
  }
10212
- async function compressLevel2(level1Files, epochNum) {
10213
- const lines = [];
10214
- const genIds = level1Files.map((f) => f.name.replace(".md", "").match(/^gen-\d{3}(?:-[a-f0-9]{6})?/)?.[0] ?? f.name);
10215
- const first = genIds[0];
10216
- const last = genIds[genIds.length - 1];
10217
- lines.push(`# Epoch ${String(epochNum).padStart(3, "0")} (${first} ~ ${last})`);
10218
- lines.push("");
10276
+ async function findForkedByOtherBranches(paths, cwd) {
10277
+ const forked = new Set;
10278
+ const currentBranch = gitCurrentBranch(cwd);
10279
+ const branches = gitAllBranches(cwd).filter((b) => b !== currentBranch && b !== "HEAD");
10280
+ for (const branch of branches) {
10281
+ const files = gitLsTree(branch, ".reap/lineage/", cwd);
10282
+ for (const file of files) {
10283
+ if (!file.endsWith("meta.yml"))
10284
+ continue;
10285
+ const content = gitShow(branch, file, cwd);
10286
+ if (!content)
10287
+ continue;
10288
+ try {
10289
+ const meta = import_yaml2.default.parse(content);
10290
+ for (const parent of meta.parents) {
10291
+ forked.add(parent);
10292
+ }
10293
+ } catch {}
10294
+ }
10295
+ }
10296
+ return forked;
10297
+ }
10298
+ async function compressLevel2Single(level1Files, paths) {
10299
+ const compressed = [];
10300
+ const epochPath = join6(paths.lineage, "epoch.md");
10301
+ let existingMeta = { generations: [] };
10302
+ let existingBody = "";
10303
+ const existingContent = await readTextFile(epochPath);
10304
+ if (existingContent) {
10305
+ const parsed = parseFrontmatter(existingContent);
10306
+ if (parsed?.generations)
10307
+ existingMeta = parsed;
10308
+ existingBody = existingContent.replace(/^---\n[\s\S]*?\n---\n?/, "").trim();
10309
+ }
10310
+ const newBodyLines = [];
10219
10311
  for (const file of level1Files) {
10220
10312
  const content = await readTextFileOrThrow(file.path);
10221
- const bodyContent = content.replace(/^---\n[\s\S]*?\n---\n?/, "");
10222
- const headerMatch = bodyContent.match(/^# (gen-\d{3}(?:-[a-f0-9]{6})?)/m);
10223
- const goalMatch = bodyContent.match(/- Goal: (.+)/);
10224
- const periodMatch = bodyContent.match(/- (?:Started|Period): (.+)/);
10225
- const genomeMatch = bodyContent.match(/- Genome.*: (.+)/);
10226
- const resultMatch = bodyContent.match(/## Result: (.+)/);
10227
- const genId = headerMatch?.[1] ?? "unknown";
10228
- const goal = goalMatch?.[1] ?? "";
10229
- const result2 = resultMatch?.[1] ?? "";
10230
- lines.push(`## ${genId}: ${goal}`);
10231
- if (periodMatch)
10232
- lines.push(`- ${periodMatch[0].trim()}`);
10233
- if (genomeMatch)
10234
- lines.push(`- ${genomeMatch[0].trim()}`);
10235
- if (result2)
10236
- lines.push(`- Result: ${result2}`);
10237
- const changeSection = bodyContent.match(/## Genome Changes\n([\s\S]*?)(?=\n##|$)/);
10238
- if (changeSection && !changeSection[1].match(/^\|\s*\|\s*\|\s*\|\s*\|$/)) {
10239
- lines.push(`- Genome Changes: ${changeSection[1].trim().split(`
10240
- `)[0]}`);
10313
+ const meta = file.meta;
10314
+ const bodyContent = content.replace(/^---\n[\s\S]*?\n---\n?/, "").trim();
10315
+ if (meta) {
10316
+ existingMeta.generations.push({
10317
+ id: meta.id,
10318
+ parents: meta.parents,
10319
+ genomeHash: meta.genomeHash
10320
+ });
10241
10321
  }
10242
- lines.push("");
10243
- }
10244
- let result = lines.join(`
10322
+ const headerMatch = bodyContent.match(/^# (gen-\d{3}(?:-[a-f0-9]{6})?)/m);
10323
+ const genId = headerMatch?.[1] ?? meta?.id ?? "unknown";
10324
+ const goalMatch = bodyContent.match(/## Objective\n([\s\S]*?)(?=\n##|$)/);
10325
+ const goal = goalMatch?.[1]?.trim().split(`
10326
+ `)[0] ?? "";
10327
+ newBodyLines.push(`## ${genId}: ${goal}`);
10328
+ newBodyLines.push("");
10329
+ compressed.push(genId);
10330
+ }
10331
+ const frontmatter = `---
10332
+ ${import_yaml2.default.stringify(existingMeta).trim()}
10333
+ ---
10334
+ `;
10335
+ const allIds = existingMeta.generations.map((g) => g.id);
10336
+ const first = allIds[0] ?? "?";
10337
+ const last = allIds[allIds.length - 1] ?? "?";
10338
+ const header = `# Epoch (${first} ~ ${last})
10339
+
10340
+ `;
10341
+ const body = existingBody ? existingBody + `
10342
+
10343
+ ` + newBodyLines.join(`
10344
+ `) : newBodyLines.join(`
10245
10345
  `);
10246
- const resultLines = result.split(`
10346
+ await writeTextFile(epochPath, frontmatter + header + body.trim() + `
10247
10347
  `);
10248
- if (resultLines.length > LEVEL2_MAX_LINES) {
10249
- result = resultLines.slice(0, LEVEL2_MAX_LINES - 1).join(`
10250
- `) + `
10251
- [...truncated]`;
10348
+ for (const file of level1Files) {
10349
+ await rm(file.path);
10252
10350
  }
10253
- return result;
10351
+ return compressed;
10254
10352
  }
10255
- async function compressLineageIfNeeded(paths) {
10353
+ async function compressLineageIfNeeded(paths, projectRoot) {
10256
10354
  const result = { level1: [], level2: [] };
10257
10355
  const entries = await scanLineage(paths);
10258
10356
  const totalEntries = entries.filter((e) => e.type === "dir" || e.type === "level1").length;
@@ -10265,7 +10363,7 @@ async function compressLineageIfNeeded(paths) {
10265
10363
  }
10266
10364
  const leafNodes = await findLeafNodes(paths, entries);
10267
10365
  const allDirs = entries.filter((e) => e.type === "dir");
10268
- const recentIds = new Set(allDirs.slice(Math.max(0, allDirs.length - RECENT_PROTECTED_COUNT)).map((e) => e.genId));
10366
+ const recentIds = new Set(allDirs.slice(Math.max(0, allDirs.length - LEVEL1_PROTECTED_COUNT)).map((e) => e.genId));
10269
10367
  const compressibleDirs = allDirs.filter((dir) => !recentIds.has(dir.genId) && !leafNodes.has(dir.genId));
10270
10368
  for (const dir of compressibleDirs) {
10271
10369
  const currentTotal = await countDirLines(paths.lineage);
@@ -10280,24 +10378,26 @@ async function compressLineageIfNeeded(paths) {
10280
10378
  result.level1.push(genId);
10281
10379
  }
10282
10380
  const level1s = (await scanLineage(paths)).filter((e) => e.type === "level1");
10283
- if (level1s.length >= LEVEL2_BATCH_SIZE) {
10284
- const existingEpochs = (await scanLineage(paths)).filter((e) => e.type === "level2");
10285
- let epochNum = existingEpochs.length + 1;
10286
- const batchCount = Math.floor(level1s.length / LEVEL2_BATCH_SIZE);
10287
- for (let i = 0;i < batchCount; i++) {
10288
- const batch = level1s.slice(i * LEVEL2_BATCH_SIZE, (i + 1) * LEVEL2_BATCH_SIZE);
10289
- const files = batch.map((e) => ({
10290
- name: e.name,
10291
- path: join6(paths.lineage, e.name)
10292
- }));
10293
- const compressed = await compressLevel2(files, epochNum);
10294
- const outPath = join6(paths.lineage, `epoch-${String(epochNum).padStart(3, "0")}.md`);
10295
- await writeTextFile(outPath, compressed);
10296
- for (const file of files) {
10297
- await rm(file.path);
10381
+ if (level1s.length > LEVEL2_MIN_LEVEL1_COUNT) {
10382
+ const forkedIds = projectRoot ? await findForkedByOtherBranches(paths, projectRoot) : new Set;
10383
+ let forkCutoff = level1s.length;
10384
+ for (let i = 0;i < level1s.length; i++) {
10385
+ if (forkedIds.has(level1s[i].genId)) {
10386
+ forkCutoff = i;
10387
+ break;
10298
10388
  }
10299
- result.level2.push(`epoch-${String(epochNum).padStart(3, "0")}`);
10300
- epochNum++;
10389
+ }
10390
+ const protectedStart = Math.max(0, level1s.length - LEVEL2_PROTECTED_COUNT);
10391
+ const compressEnd = Math.min(forkCutoff, protectedStart);
10392
+ const compressible = level1s.slice(0, compressEnd);
10393
+ if (compressible.length > 0) {
10394
+ const filesWithMeta = await Promise.all(compressible.map(async (e) => ({
10395
+ name: e.name,
10396
+ path: join6(paths.lineage, e.name),
10397
+ meta: await readFileMeta(join6(paths.lineage, e.name))
10398
+ })));
10399
+ const compressed = await compressLevel2Single(filesWithMeta, paths);
10400
+ result.level2.push(...compressed);
10301
10401
  }
10302
10402
  }
10303
10403
  return result;
@@ -10519,7 +10619,7 @@ class GenerationManager {
10519
10619
  }
10520
10620
  } catch {}
10521
10621
  await writeTextFile(this.paths.currentYml, "");
10522
- const compression = await compressLineageIfNeeded(this.paths);
10622
+ const compression = await compressLineageIfNeeded(this.paths, this.paths.projectRoot);
10523
10623
  return compression;
10524
10624
  }
10525
10625
  async save(state) {
@@ -10650,12 +10750,15 @@ async function migrateLineage(paths) {
10650
10750
  // src/cli/commands/update.ts
10651
10751
  function selfUpgrade() {
10652
10752
  try {
10653
- const installed = execSync("reap --version", { encoding: "utf-8", timeout: 5000 }).trim();
10654
- const latest = execSync("npm view @c-d-cc/reap version", { encoding: "utf-8", timeout: 1e4 }).trim();
10753
+ const installed = execSync2("reap --version", { encoding: "utf-8", timeout: 5000 }).trim();
10754
+ if (installed.includes("+dev")) {
10755
+ return { upgraded: false };
10756
+ }
10757
+ const latest = execSync2("npm view @c-d-cc/reap version", { encoding: "utf-8", timeout: 1e4 }).trim();
10655
10758
  if (installed === latest) {
10656
10759
  return { upgraded: false };
10657
10760
  }
10658
- execSync("npm update -g @c-d-cc/reap", { encoding: "utf-8", timeout: 60000, stdio: "pipe" });
10761
+ execSync2("npm update -g @c-d-cc/reap", { encoding: "utf-8", timeout: 60000, stdio: "pipe" });
10659
10762
  return { upgraded: true, from: installed, to: latest };
10660
10763
  } catch {
10661
10764
  return { upgraded: false };
@@ -10756,6 +10859,35 @@ async function updateProject(projectRoot, dryRun = false) {
10756
10859
  }
10757
10860
  }
10758
10861
  if (await paths.isReapProject()) {
10862
+ const projectClaudeCommands = join10(paths.projectRoot, ".claude", "commands");
10863
+ await mkdir6(projectClaudeCommands, { recursive: true });
10864
+ const reapCmdFiles = (await readdir9(ReapPaths.userReapCommands)).filter((f) => f.startsWith("reap.") && f.endsWith(".md"));
10865
+ let cmdInstalled = 0;
10866
+ for (const file of reapCmdFiles) {
10867
+ const src = await readTextFileOrThrow(join10(ReapPaths.userReapCommands, file));
10868
+ const destPath = join10(projectClaudeCommands, file);
10869
+ try {
10870
+ const s = await import("fs/promises").then((m) => m.lstat(destPath));
10871
+ if (s.isSymbolicLink()) {
10872
+ if (!dryRun)
10873
+ await unlink3(destPath);
10874
+ } else {
10875
+ const existing = await readTextFile(destPath);
10876
+ if (existing !== null && existing === src)
10877
+ continue;
10878
+ if (!dryRun)
10879
+ await unlink3(destPath);
10880
+ }
10881
+ } catch {}
10882
+ if (!dryRun)
10883
+ await writeTextFile(destPath, src);
10884
+ cmdInstalled++;
10885
+ }
10886
+ if (cmdInstalled > 0) {
10887
+ result.updated.push(`.claude/commands/ (${cmdInstalled} synced)`);
10888
+ } else {
10889
+ result.skipped.push(`.claude/commands/ (${reapCmdFiles.length} unchanged)`);
10890
+ }
10759
10891
  await migrateLegacyFiles(paths, dryRun, result);
10760
10892
  if (await needsMigration(paths)) {
10761
10893
  if (!dryRun) {
@@ -10850,7 +10982,8 @@ async function getStatus(projectRoot) {
10850
10982
 
10851
10983
  // src/cli/commands/fix.ts
10852
10984
  var import_yaml6 = __toESM(require_dist(), 1);
10853
- import { mkdir as mkdir7, stat as stat3 } from "fs/promises";
10985
+ import { mkdir as mkdir7, stat as stat3, copyFile } from "fs/promises";
10986
+ import { join as join11 } from "path";
10854
10987
  init_fs();
10855
10988
  async function dirExists(path) {
10856
10989
  try {
@@ -10878,6 +11011,23 @@ async function fixProject(projectRoot) {
10878
11011
  fixed.push(`Recreated missing directory: ${dir.name}/`);
10879
11012
  }
10880
11013
  }
11014
+ const genomeFiles = [
11015
+ { path: paths.principles, name: "principles.md" },
11016
+ { path: paths.conventions, name: "conventions.md" },
11017
+ { path: paths.constraints, name: "constraints.md" },
11018
+ { path: paths.sourceMap, name: "source-map.md" }
11019
+ ];
11020
+ for (const gf of genomeFiles) {
11021
+ if (!await fileExists(gf.path)) {
11022
+ const templateSrc = join11(ReapPaths.packageGenomeDir, gf.name);
11023
+ if (await fileExists(templateSrc)) {
11024
+ await copyFile(templateSrc, gf.path);
11025
+ fixed.push(`Restored missing genome/${gf.name} from template`);
11026
+ } else {
11027
+ issues.push(`genome/${gf.name} is missing and no template found`);
11028
+ }
11029
+ }
11030
+ }
10881
11031
  if (!await fileExists(paths.config)) {
10882
11032
  issues.push("config.yml is missing. Run 'reap init' to recreate the project.");
10883
11033
  }
@@ -10908,8 +11058,8 @@ async function fixProject(projectRoot) {
10908
11058
 
10909
11059
  // src/cli/index.ts
10910
11060
  init_fs();
10911
- import { join as join11 } from "path";
10912
- program.name("reap").description("REAP — Recursive Evolutionary Autonomous Pipeline").version("0.7.9");
11061
+ import { join as join12 } from "path";
11062
+ program.name("reap").description("REAP — Recursive Evolutionary Autonomous Pipeline").version("0.9.0");
10913
11063
  program.command("init").description("Initialize a new REAP project (Genesis)").argument("[project-name]", "Project name (defaults to current directory name)").option("-m, --mode <mode>", "Entry mode: greenfield, migration, adoption", "greenfield").option("-p, --preset <preset>", "Bootstrap with a genome preset (e.g., bun-hono-react)").action(async (projectName, options) => {
10914
11064
  try {
10915
11065
  const cwd = process.cwd();
@@ -11037,10 +11187,10 @@ program.command("help").description("Show REAP commands, slash commands, and wor
11037
11187
  if (l === "korean" || l === "ko")
11038
11188
  lang = "ko";
11039
11189
  }
11040
- const helpDir = join11(ReapPaths.packageTemplatesDir, "help");
11041
- let helpText = await readTextFile(join11(helpDir, `${lang}.txt`));
11190
+ const helpDir = join12(ReapPaths.packageTemplatesDir, "help");
11191
+ let helpText = await readTextFile(join12(helpDir, `${lang}.txt`));
11042
11192
  if (!helpText)
11043
- helpText = await readTextFile(join11(helpDir, "en.txt"));
11193
+ helpText = await readTextFile(join12(helpDir, "en.txt"));
11044
11194
  if (!helpText) {
11045
11195
  console.log("Help file not found. Run 'reap update' to install templates.");
11046
11196
  return;
@@ -0,0 +1,73 @@
1
+ ---
2
+ description: "REAP Abort — Abort the current generation and return to initial state"
3
+ ---
4
+
5
+ # Abort
6
+
7
+ Abort the current generation and return to initial state.
8
+
9
+ ## Gate (Preconditions)
10
+ - Read `.reap/life/current.yml`
11
+ - If no active Generation: ERROR — "No active Generation to abort." **STOP**
12
+
13
+ ## Steps
14
+
15
+ ### 1. Show Current State
16
+ - Display: generation id, goal, current stage
17
+ - Ask: "이 generation을 abort 하시겠습니까?"
18
+ - If no: **STOP**
19
+
20
+ ### 2. Abort Reason
21
+ - Ask: "abort 사유를 입력해주세요"
22
+
23
+ ### 3. Source Code Handling
24
+ - Check `git diff --name-only` for uncommitted changes
25
+ - If no changes: skip to Step 4
26
+ - If changes exist, present the changed files and ask:
27
+ - **rollback**: `git checkout .` — 모든 변경 revert
28
+ - **stash**: `git stash push -m "reap-abort: {gen-id}"` — stash에 저장
29
+ - **hold**: 변경 유지 (working tree에 그대로)
30
+
31
+ ### 4. Backlog Save
32
+ - Ask: "Goal과 진행 상황을 backlog에 저장할까요? (yes/no)"
33
+ - If yes:
34
+ - Read `01-objective.md` for goal/spec content
35
+ - Read `03-implementation.md` if exists for progress
36
+ - Create `.reap/life/backlog/aborted-{gen-id}.md`:
37
+ ```markdown
38
+ ---
39
+ type: task
40
+ status: pending
41
+ aborted: true
42
+ abortedFrom: {gen-id}
43
+ abortReason: "{reason}"
44
+ stage: {current stage}
45
+ sourceAction: rollback|stash|hold
46
+ stashRef: "reap-abort: {gen-id}" # only if stash
47
+ changedFiles:
48
+ - {file1}
49
+ - {file2}
50
+ ---
51
+
52
+ # [Aborted] {goal}
53
+
54
+ ## Original Goal
55
+ {goal from objective}
56
+
57
+ ## Progress
58
+ {stage} 단계에서 중단.
59
+ {implementation summary if available}
60
+
61
+ ## Resume Guide
62
+ {if stash: "git stash pop으로 코드 복구"}
63
+ {if hold: "코드 변경이 working tree에 유지됨"}
64
+ {if rollback: "코드 변경이 revert됨. objective부터 재시작 필요"}
65
+ ```
66
+
67
+ ### 5. Cleanup
68
+ - Delete all artifact files from `.reap/life/` (`01-*.md` through `05-*.md`)
69
+ - Clear `current.yml` (write empty content)
70
+ - Do NOT record in lineage (incomplete generation)
71
+
72
+ ## Completion
73
+ - "Generation {gen-id} aborted. {backlog saved / not saved}."
@@ -104,6 +104,25 @@ Do NOT finalize Genome changes without running Validation Commands.
104
104
  19. 반복 패턴이 없으면 skip — "반복 패턴이 감지되지 않았습니다."
105
105
  20. **Limit**: 한 번에 최대 2개까지만 제안 (과부하 방지)
106
106
 
107
+ ### Phase 6: Lineage Compression
108
+
109
+ 21. Check if lineage compression is needed:
110
+ - Count total lines in `.reap/lineage/` and number of generations
111
+ - **Level 1 trigger**: total lines > 5,000 AND generations >= 5
112
+ - **Level 2 trigger**: Level 1 compressed `.md` files > 100
113
+ 22. If Level 1 triggered:
114
+ - Compress oldest uncompressed generation directories into single `.md` files
115
+ - Protect: recent 3 generations + DAG leaf nodes
116
+ - Preserve DAG metadata in frontmatter (id, parents, genomeHash)
117
+ 23. If Level 2 triggered:
118
+ - Run `git fetch --all` to update remote refs
119
+ - Scan all branches (local + remote) for fork points
120
+ - Compress eligible Level 1 files into single `epoch.md` (append if exists)
121
+ - Protect: recent 9 Level 1 files + all generations at/after fork points
122
+ - epoch.md frontmatter contains `generations` array with hash chain (id, parents, genomeHash)
123
+ 24. Report compression results: "Compressed N generations (Level 1: X, Level 2: Y)"
124
+ - If no compression needed: skip silently
125
+
107
126
  ## Self-Verification
108
127
  Before saving the artifact, verify:
109
128
  - [ ] Are lessons concrete and applicable to the next generation? (No vague "do better next time")
@@ -9,6 +9,7 @@ Start a merge generation by specifying a target branch to merge into the current
9
9
  ## Gate
10
10
  - Verify no active generation exists
11
11
  - Verify the target branch exists (`git rev-parse --verify {branch}`)
12
+ - Verify that the common ancestor is NOT inside `epoch.md`. If it is: ERROR — "The common ancestor has been epoch-compressed and cannot be used as a merge base. The generation is archived in epoch.md for historical reference only."
12
13
 
13
14
  ## Steps
14
15
 
@@ -25,22 +25,11 @@ Brainstorming is triggered based on goal complexity — simple tasks skip it, co
25
25
 
26
26
  ## Steps
27
27
 
28
- ### 1. Environment Scan
29
- - Read all files in `.reap/environment/`
30
- - This directory stores external context: external API docs, product requirements, team decision logs, reference materials, etc.
31
- - **If empty (first time setup)**, conduct an interactive Environment setup:
32
- 1. **Brief the human on what Environment means**:
33
- > "Environment is where we record information **external to this project** — things that affect development but are outside the project's direct control.
34
- > Unlike the Genome (design and knowledge the team decides), Environment captures the **constraints and context from the outside world**:
35
- > connected systems, infrastructure, organizational rules, external API specs, etc."
36
- 2. **Ask questions interactively** (one at a time, skip if not applicable):
37
- - "Are there any **connected systems**? (other services, external APIs, legacy systems, etc.)"
38
- - "What is the **infrastructure/deployment environment**? (cloud, on-premise, containers, etc.)"
39
- - "Are there **organizational rules or guidelines** to follow? (company standards, coding policies, security policies, regulatory compliance, etc.)"
40
- - "Are there **external reference documents** to incorporate? (API specs, system architecture diagrams, integration guides, etc.)"
41
- 3. **Save collected information** to `.reap/environment/` as structured markdown files (e.g., `integrations.md`, `infrastructure.md`, `org-guidelines.md`)
42
- 4. If the human has nothing to add, that's fine — Environment is optional
43
- - If files already exist, review them and ask if any updates are needed
28
+ ### 1. Environment Check
29
+ - Read `.reap/environment/summary.md`
30
+ - If summary.md exists: review it for relevant context to this generation's goal
31
+ - **If empty or missing**: inform the user "Environment 설정되지 않았습니다. `/reap.sync.environment`로 외부 환경 정보를 설정할 수 있습니다."
32
+ - Do NOT block proceed with the objective. Environment is optional but recommended.
44
33
 
45
34
  ### 2. Previous Generation Reference
46
35
  - If a `05-completion.md` exists for the most recent generation in `.reap/lineage/`, read it
@@ -45,7 +45,20 @@ Do NOT make technical decisions without reading the Genome (conventions.md, cons
45
45
  - Each task must be **one logical unit of change**
46
46
  - Specify dependencies and parallelization potential between tasks
47
47
 
48
- ### 5. Human Confirmation
48
+ ### 5. E2E Test Scenarios (lifecycle 변경 시 필수)
49
+ - If this generation modifies lifecycle logic (compression, generation, merge, abort, stage transitions, etc.):
50
+ - Define specific E2E test scenarios with expected outcomes
51
+ - Each scenario: setup → action → assertion
52
+ - Example:
53
+ ```
54
+ ## E2E Test Scenarios
55
+ 1. Normal abort + rollback → source reverted, artifacts deleted, current.yml empty
56
+ 2. Abort + stash → stash created, recoverable
57
+ 3. No active generation → error message
58
+ ```
59
+ - If not a lifecycle change: skip this step
60
+
61
+ ### 6. Human Confirmation
49
62
  - Finalize the plan with the human
50
63
 
51
64
  ## Task Format
@@ -15,14 +15,15 @@ description: "REAP Start — Start a new Generation"
15
15
  - If backlog items exist:
16
16
  - Present the list with title and priority for each item
17
17
  - Ask: "Would you like to select one of these, or enter a new goal?"
18
- - If the human selects a backlog item: use its title/content as the goal, then update the selected item's frontmatter to `status: consumed` and add `consumedBy: gen-XXX-{hash}`
18
+ - If the human selects a backlog item: **note the selection** (do NOT mark consumed yet ID is not generated)
19
19
  - If the human wants a new goal: proceed to Step 1
20
20
  - If no backlog items exist: proceed to Step 1
21
21
 
22
- 1. Ask the human for the goal of this generation
22
+ 1. Ask the human for the goal of this generation (or use selected backlog item's goal)
23
23
  2. Count existing generations in `.reap/lineage/` to determine the genomeVersion
24
24
  3. Generate the next generation ID (existing count + 1, in `gen-XXX-{hash}` format where `{hash}` is a short content hash)
25
- 4. Write the following to `current.yml`:
25
+ 4. **If a backlog item was selected in Step 0**: now mark it as `status: consumed` and add `consumedBy: gen-XXX-{hash}` (using the ID just generated)
26
+ 6. Write the following to `current.yml`:
26
27
  ```yaml
27
28
  id: gen-XXX-{hash}
28
29
  goal: [goal provided by the human]
@@ -33,10 +34,10 @@ description: "REAP Start — Start a new Generation"
33
34
  - stage: objective
34
35
  at: [current ISO 8601 timestamp]
35
36
  ```
36
- 5. Immediately create `.reap/life/01-objective.md` from the artifact template with the Goal section filled in
37
+ 7. Immediately create `.reap/life/01-objective.md` from the artifact template with the Goal section filled in
37
38
 
38
39
  ### Hook Execution (Generation Start)
39
- 6. Scan `.reap/hooks/` for files matching `onGenerationStart.*`
40
+ 8. Scan `.reap/hooks/` for files matching `onGenerationStart.*`
40
41
  - For each matched file (sorted by `order` from frontmatter, then alphabetically):
41
42
  1. Read the frontmatter (`condition`, `order`)
42
43
  2. Evaluate `condition` by running `.reap/hooks/conditions/{condition}.sh` (exit 0 = met, non-zero = skip):
@@ -0,0 +1,84 @@
1
+ ---
2
+ description: "REAP Sync Environment — Discover and document external environment dependencies"
3
+ ---
4
+
5
+ # Sync Environment
6
+
7
+ Discover external systems, APIs, infrastructure, and constraints that affect this project. Populate the `.reap/environment/` 3-layer structure.
8
+
9
+ ## Gate (Preconditions)
10
+ - Read `.reap/life/current.yml`
11
+ - If active Generation exists: switch to **Backlog Mode** (record as `type: environment-change`)
12
+ - If no active Generation: proceed with **Sync Mode** (modify environment directly after human confirmation)
13
+
14
+ ## Environment 3-Layer Structure
15
+
16
+ ```
17
+ .reap/environment/
18
+ ├── summary.md # Session context (~100 lines max)
19
+ ├── docs/ # Main reference docs (agent reads these)
20
+ └── resources/ # Raw materials (user-managed)
21
+ ├── *.pdf, *.md # Original documents
22
+ └── links.md # External URLs + summaries
23
+ ```
24
+
25
+ - **summary.md**: Auto-generated overview of all docs/. Loaded into session context.
26
+ - **docs/**: One file per environment topic. ~100 lines each. AI + human maintained.
27
+ - **resources/**: User-provided originals. No line limit. AI reads when deeper detail needed.
28
+
29
+ ## Steps
30
+
31
+ ### 1. Source Code Scan
32
+ Detect hints of external dependencies from:
33
+ - `package.json` / `requirements.txt` / `go.mod` — SDK/client libraries (e.g., `discord.js`, `@aws-sdk/*`, `stripe`)
34
+ - Config files — `.env`, `.env.example`, `wrangler.toml`, `docker-compose.yml`, `vercel.json`
35
+ - API client code — HTTP clients, webhook handlers, OAuth configs
36
+ - Infrastructure — Dockerfile, CI/CD configs, deployment scripts
37
+
38
+ Present findings:
39
+ ```
40
+ 🔍 Detected external dependencies:
41
+ - discord.js → Discord Bot API
42
+ - @supabase/supabase-js → Supabase (DB + Auth)
43
+ - wrangler.toml → Cloudflare Workers
44
+ ```
45
+
46
+ ### 2. User Interview
47
+ Ask the user to confirm and expand. Goal: **capture ALL external systems** that affect this project.
48
+
49
+ Questions (one at a time, skip if already covered):
50
+ 1. "감지된 외부 서비스들이 맞나요? 추가/수정할 것이 있나요?"
51
+ 2. "그 외에 연동되는 외부 서비스, API, 시스템이 있나요?"
52
+ 3. "배포/인프라 환경을 알려주세요 (호스팅, CI/CD, 도메인 등)"
53
+ 4. "따라야 하는 조직 규칙이나 외부 제약이 있나요? (보안 정책, 규제 등)"
54
+ 5. "참고해야 할 외부 문서나 링크가 있나요? (API docs, 스펙 등)"
55
+
56
+ For each confirmed item:
57
+ - Ask: "관련 문서/링크가 있으면 알려주세요 (없으면 skip)"
58
+ - If provided: save to `resources/` (file or `links.md` entry)
59
+
60
+ ### 3. Generate docs/
61
+ For each confirmed environment topic, create a file in `docs/`:
62
+ - File name: `{topic-slug}.md` (e.g., `discord-api.md`, `infrastructure.md`)
63
+ - Content: structured markdown with key info the agent needs during implementation
64
+ - Sections: Overview, Key Constraints, API/Config Details, References (→ resources/)
65
+ - ~100 lines max per file
66
+
67
+ ### 4. Generate summary.md
68
+ Aggregate all docs/ into a concise summary:
69
+ - One section per environment topic
70
+ - Key constraints and gotchas highlighted
71
+ - Links to docs/ files for detail
72
+ - **~100 lines max** (this gets loaded into every session)
73
+
74
+ ### 5. Verify
75
+ - List all created/updated files
76
+ - Show summary.md content to user for confirmation
77
+ - Ask: "빠진 환경 정보가 있나요?"
78
+
79
+ ## Backlog Mode (active Generation)
80
+ - Record each discovered environment item as `type: environment-change` in `.reap/life/backlog/`
81
+ - "Environment 변경사항이 backlog에 기록되었습니다. Completion에서 적용됩니다."
82
+
83
+ ## Completion
84
+ - "Environment synced. {N} docs created, summary.md updated."
@@ -0,0 +1,109 @@
1
+ ---
2
+ description: "REAP Sync Genome — Synchronize Genome with current source code"
3
+ ---
4
+
5
+ # Sync Genome
6
+
7
+ Analyze the current source code and update the Genome to reflect reality.
8
+
9
+ <HARD-GATE>
10
+ If an active Generation exists (`.reap/life/current.yml` has content), do NOT modify Genome directly.
11
+ Instead, record discovered differences as `type: genome-change` items in `.reap/life/backlog/` and inform the human.
12
+ Only proceed with direct Genome modification when NO active Generation exists.
13
+ </HARD-GATE>
14
+
15
+ ## Gate (Preconditions)
16
+ - Read `.reap/life/current.yml`
17
+ - If active Generation exists: switch to **Backlog Mode** (record differences, do not modify Genome)
18
+ - If no active Generation: proceed with **Sync Mode** (modify Genome directly after human confirmation)
19
+
20
+ ## Steps
21
+
22
+ ### 1. Read Current Genome
23
+ - Read all files in `.reap/genome/` (principles.md, conventions.md, constraints.md, domain/)
24
+ - Note current genomeVersion from the most recent generation in `.reap/lineage/`
25
+
26
+ ### 2. Analyze Source Code
27
+ Scan the project to understand its current state:
28
+
29
+ **Tech Stack & Dependencies**:
30
+ - package.json, tsconfig.json, Dockerfile, docker-compose.yml, etc.
31
+ - New dependencies added, removed, or version-changed since Genome was last updated
32
+
33
+ **Architecture & Structure**:
34
+ - Directory structure and patterns (layers, modules, services)
35
+ - Entry points, routing, API structure
36
+ - Database, ORM, migration setup
37
+
38
+ **Conventions**:
39
+ - Linter/formatter configs (.eslintrc, .prettierrc, biome.json, etc.)
40
+ - Test setup and patterns (test framework, file naming, coverage config)
41
+ - Git hooks, CI/CD config
42
+ - Code patterns observed in the source (naming, error handling, etc.)
43
+
44
+ **Constraints**:
45
+ - Build commands, test commands, validation commands
46
+ - Environment requirements, runtime constraints
47
+ - External service dependencies
48
+
49
+ **Domain Knowledge** (→ `genome/domain/`):
50
+ - Read `~/.reap/templates/domain-guide.md` for domain file writing principles
51
+ - Scan source code for business rules NOT derivable from infrastructure analysis:
52
+ - State machines and status transitions (e.g., post lifecycle, order states)
53
+ - Policy rules with thresholds, limits, or conditions (e.g., rate limits, scoring criteria)
54
+ - Classification/branching logic driven by business categories (e.g., template selection by type)
55
+ - Hardcoded domain constants (keyword lists, prompt templates, magic numbers with business meaning)
56
+ - Workflow orchestration sequences (e.g., approval flows, pipeline stages)
57
+ - For each discovered domain rule cluster, evaluate:
58
+ - "Would an agent implementing this feature ask 'where is this rule?'" → YES = create domain file
59
+ - "Does a single item in an upper-level genome file require 3+ lines of explanation?" → YES = extract to domain file
60
+ - Even if `genome/domain/` is currently empty, treat it as "not yet created" rather than "not needed"
61
+
62
+ ### 3. Diff Analysis
63
+ Compare source analysis with current Genome and identify:
64
+ - **Additions**: Things in code but not in Genome
65
+ - **Changes**: Things in Genome that no longer match code
66
+ - **Removals**: Things in Genome that no longer exist in code
67
+ - **Gaps**: Areas where Genome has placeholders but code has established patterns
68
+ - **Domain gaps**: Business rules in code that have no corresponding `domain/` file
69
+
70
+ ### 4. Report to Human
71
+ Present a structured diff report:
72
+
73
+ ```
74
+ 🔄 Genome Sync Report
75
+ ━━━━━━━━━━━━━━━━━━━━━
76
+
77
+ 📝 principles.md
78
+ + [New] API-first design pattern observed
79
+ ~ [Changed] Layer structure: added shared/ directory
80
+
81
+ 📝 conventions.md
82
+ + [New] Biome used for linting (replacing ESLint)
83
+ ~ [Changed] Test pattern: using vitest instead of jest
84
+
85
+ 📝 constraints.md
86
+ + [New] Validation command: bun test
87
+ ~ [Changed] Runtime: Node.js compatible (was Bun-only)
88
+
89
+ 📁 domain/
90
+ + [Suggest] Create lifecycle-rules.md for REAP lifecycle logic
91
+ ```
92
+
93
+ ### 5. Apply Changes
94
+
95
+ **Sync Mode** (no active Generation):
96
+ - For each difference, ask the human: "Apply this change? (yes/no/modify)"
97
+ - Apply confirmed changes to the corresponding Genome files
98
+ - Follow Genome writing principles:
99
+ - Each file ≤ 100 lines
100
+ - If exceeding, extract to `domain/`
101
+ - Follow `~/.reap/templates/domain-guide.md` for domain files
102
+
103
+ **Backlog Mode** (active Generation):
104
+ - Record each difference as a `type: genome-change` backlog item in `.reap/life/backlog/`
105
+ - Inform: "Genome changes recorded in backlog. They will be applied at the Completion stage."
106
+
107
+ ## Completion
108
+ - **Sync Mode**: "Genome synchronized. [N] changes applied."
109
+ - **Backlog Mode**: "Genome differences recorded as [N] backlog items. Apply during Completion."
@@ -1,109 +1,17 @@
1
1
  ---
2
- description: "REAP Sync — Synchronize Genome with current source code"
2
+ description: "REAP Sync — Synchronize both Genome and Environment with current state"
3
3
  ---
4
4
 
5
- # Sync (Genome Synchronization)
5
+ # Sync (Full)
6
6
 
7
- Analyze the current source code and update the Genome to reflect reality.
8
-
9
- <HARD-GATE>
10
- If an active Generation exists (`.reap/life/current.yml` has content), do NOT modify Genome directly.
11
- Instead, record discovered differences as `type: genome-change` items in `.reap/life/backlog/` and inform the human.
12
- Only proceed with direct Genome modification when NO active Generation exists.
13
- </HARD-GATE>
14
-
15
- ## Gate (Preconditions)
16
- - Read `.reap/life/current.yml`
17
- - If active Generation exists: switch to **Backlog Mode** (record differences, do not modify Genome)
18
- - If no active Generation: proceed with **Sync Mode** (modify Genome directly after human confirmation)
7
+ Run both Genome and Environment synchronization.
19
8
 
20
9
  ## Steps
21
10
 
22
- ### 1. Read Current Genome
23
- - Read all files in `.reap/genome/` (principles.md, conventions.md, constraints.md, domain/)
24
- - Note current genomeVersion from the most recent generation in `.reap/lineage/`
25
-
26
- ### 2. Analyze Source Code
27
- Scan the project to understand its current state:
28
-
29
- **Tech Stack & Dependencies**:
30
- - package.json, tsconfig.json, Dockerfile, docker-compose.yml, etc.
31
- - New dependencies added, removed, or version-changed since Genome was last updated
32
-
33
- **Architecture & Structure**:
34
- - Directory structure and patterns (layers, modules, services)
35
- - Entry points, routing, API structure
36
- - Database, ORM, migration setup
37
-
38
- **Conventions**:
39
- - Linter/formatter configs (.eslintrc, .prettierrc, biome.json, etc.)
40
- - Test setup and patterns (test framework, file naming, coverage config)
41
- - Git hooks, CI/CD config
42
- - Code patterns observed in the source (naming, error handling, etc.)
43
-
44
- **Constraints**:
45
- - Build commands, test commands, validation commands
46
- - Environment requirements, runtime constraints
47
- - External service dependencies
48
-
49
- **Domain Knowledge** (→ `genome/domain/`):
50
- - Read `~/.reap/templates/domain-guide.md` for domain file writing principles
51
- - Scan source code for business rules NOT derivable from infrastructure analysis:
52
- - State machines and status transitions (e.g., post lifecycle, order states)
53
- - Policy rules with thresholds, limits, or conditions (e.g., rate limits, scoring criteria)
54
- - Classification/branching logic driven by business categories (e.g., template selection by type)
55
- - Hardcoded domain constants (keyword lists, prompt templates, magic numbers with business meaning)
56
- - Workflow orchestration sequences (e.g., approval flows, pipeline stages)
57
- - For each discovered domain rule cluster, evaluate:
58
- - "Would an agent implementing this feature ask 'where is this rule?'" → YES = create domain file
59
- - "Does a single item in an upper-level genome file require 3+ lines of explanation?" → YES = extract to domain file
60
- - Even if `genome/domain/` is currently empty, treat it as "not yet created" rather than "not needed"
61
-
62
- ### 3. Diff Analysis
63
- Compare source analysis with current Genome and identify:
64
- - **Additions**: Things in code but not in Genome
65
- - **Changes**: Things in Genome that no longer match code
66
- - **Removals**: Things in Genome that no longer exist in code
67
- - **Gaps**: Areas where Genome has placeholders but code has established patterns
68
- - **Domain gaps**: Business rules in code that have no corresponding `domain/` file
69
-
70
- ### 4. Report to Human
71
- Present a structured diff report:
72
-
73
- ```
74
- 🔄 Genome Sync Report
75
- ━━━━━━━━━━━━━━━━━━━━━
76
-
77
- 📝 principles.md
78
- + [New] API-first design pattern observed
79
- ~ [Changed] Layer structure: added shared/ directory
80
-
81
- 📝 conventions.md
82
- + [New] Biome used for linting (replacing ESLint)
83
- ~ [Changed] Test pattern: using vitest instead of jest
84
-
85
- 📝 constraints.md
86
- + [New] Validation command: bun test
87
- ~ [Changed] Runtime: Node.js compatible (was Bun-only)
88
-
89
- 📁 domain/
90
- + [Suggest] Create lifecycle-rules.md for REAP lifecycle logic
91
- ```
92
-
93
- ### 5. Apply Changes
94
-
95
- **Sync Mode** (no active Generation):
96
- - For each difference, ask the human: "Apply this change? (yes/no/modify)"
97
- - Apply confirmed changes to the corresponding Genome files
98
- - Follow Genome writing principles:
99
- - Each file ≤ 100 lines
100
- - If exceeding, extract to `domain/`
101
- - Follow `~/.reap/templates/domain-guide.md` for domain files
102
-
103
- **Backlog Mode** (active Generation):
104
- - Record each difference as a `type: genome-change` backlog item in `.reap/life/backlog/`
105
- - Inform: "Genome changes recorded in backlog. They will be applied at the Completion stage."
11
+ 1. Execute `/reap.sync.genome` — synchronize Genome with source code
12
+ 2. Execute `/reap.sync.environment` discover and document external environment
106
13
 
107
- ## Completion
108
- - **Sync Mode**: "Genome synchronized. [N] changes applied."
109
- - **Backlog Mode**: "Genome differences recorded as [N] backlog items. Apply during Completion."
14
+ ## Usage
15
+ - `/reap.sync` full sync (recommended after init or major changes)
16
+ - `/reap.sync.genome` genome only
17
+ - `/reap.sync.environment` — environment only
@@ -79,7 +79,9 @@ let updateAvailableMessage = '';
79
79
  const { configContent } = gl.parseConfig(configFile);
80
80
  const installed = gl.exec('reap --version');
81
81
  const latest = gl.exec('npm view @c-d-cc/reap version');
82
- if (installed && latest && installed !== latest) {
82
+ if (installed && installed.includes('+dev')) {
83
+ // Local dev build — skip version check entirely
84
+ } else if (installed && latest && installed !== latest) {
83
85
  const autoUpdate = configContent ? /^autoUpdate:\s*true/m.test(configContent) : false;
84
86
  if (autoUpdate) {
85
87
  const updated = gl.exec('npm update -g @c-d-cc/reap');
@@ -100,6 +102,10 @@ const reapGuide = gl.readFile(guideFile) || '';
100
102
  log('Loading Genome...');
101
103
  const { content: genomeContent, l1Lines } = gl.loadGenome(genomeDir);
102
104
 
105
+ // Step 3b: Load Environment summary
106
+ const envSummaryFile = path.join(reapDir, 'environment', 'summary.md');
107
+ const envSummary = gl.readFile(envSummaryFile) || '';
108
+
103
109
  // Step 4: Check Genome staleness
104
110
  log('Checking sync...');
105
111
  const { genomeStaleWarning, commitsSince } = gl.detectStaleness(projectRoot);
@@ -160,7 +166,9 @@ const sessionInitDisplay = initFormat
160
166
  // Step 6: Output JSON
161
167
  log('Done. Injecting context.');
162
168
 
163
- const reapContext = `<REAP_WORKFLOW>\n${reapGuide}\n\n---\n\n## Genome (Project Knowledge — treat as authoritative source of truth)\n${genomeContent}\n\n---\n\n## Current State\n${generationContext}${staleSection}${strictSection}${updateSection}\n\n## Session Init (display to user on first message)\n${sessionInitDisplay}\n\n## Rules\n1. ALL development work MUST follow the REAP lifecycle. Do NOT bypass it.\n2. Before writing any code, check if a Generation is active and what stage it is in.\n3. If a Generation is active, use \`${nextCmd}\` to proceed with the current stage.\n4. If no Generation is active, use \`/reap.start\` to start a new one.\n5. Do NOT implement features, fix bugs, or make changes outside of the REAP lifecycle unless the user explicitly asks to bypass it.\n6. When the user says "reap evolve", "next stage", "proceed", or similar — invoke the appropriate REAP skill.\n7. **Genome is the authoritative knowledge source.** When making decisions about architecture, conventions, or constraints, ALWAYS reference the Genome first. If code contradicts Genome, flag it as a potential genome-change backlog item.\n8. If you notice the Genome is outdated or missing information relevant to your current task, inform the user and suggest running \`/reap.sync\`.\n</REAP_WORKFLOW>`;
169
+ const envSection = envSummary ? `\n\n---\n\n## Environment (External Context)\n${envSummary}` : '';
170
+
171
+ const reapContext = `<REAP_WORKFLOW>\n${reapGuide}\n\n---\n\n## Genome (Project Knowledge — treat as authoritative source of truth)\n${genomeContent}${envSection}\n\n---\n\n## Current State\n${generationContext}${staleSection}${strictSection}${updateSection}\n\n## Session Init (display to user on first message)\n${sessionInitDisplay}\n\n## Rules\n1. ALL development work MUST follow the REAP lifecycle. Do NOT bypass it.\n2. Before writing any code, check if a Generation is active and what stage it is in.\n3. If a Generation is active, use \`${nextCmd}\` to proceed with the current stage.\n4. If no Generation is active, use \`/reap.start\` to start a new one.\n5. Do NOT implement features, fix bugs, or make changes outside of the REAP lifecycle unless the user explicitly asks to bypass it.\n6. When the user says "reap evolve", "next stage", "proceed", or similar — invoke the appropriate REAP skill.\n7. **Genome is the authoritative knowledge source.** When making decisions about architecture, conventions, or constraints, ALWAYS reference the Genome first. If code contradicts Genome, flag it as a potential genome-change backlog item.\n8. If you notice the Genome is outdated or missing information relevant to your current task, inform the user and suggest running \`/reap.sync\`.\n</REAP_WORKFLOW>`;
164
172
 
165
173
  process.stdout.write(JSON.stringify({
166
174
  hookSpecificOutput: {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@c-d-cc/reap",
3
- "version": "0.7.9",
3
+ "version": "0.9.0",
4
4
  "description": "Recursive Evolutionary Autonomous Pipeline — AI and humans evolve software across generations",
5
5
  "type": "module",
6
6
  "license": "MIT",