@c-d-cc/reap 0.7.9 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -9218,6 +9218,9 @@ class ReapPaths {
9218
9218
  get constraints() {
9219
9219
  return join(this.genome, "constraints.md");
9220
9220
  }
9221
+ get sourceMap() {
9222
+ return join(this.genome, "source-map.md");
9223
+ }
9221
9224
  get environment() {
9222
9225
  return join(this.root, "environment");
9223
9226
  }
@@ -9752,6 +9755,7 @@ var COMMAND_NAMES = [
9752
9755
  "reap.start",
9753
9756
  "reap.next",
9754
9757
  "reap.back",
9758
+ "reap.abort",
9755
9759
  "reap.status",
9756
9760
  "reap.sync",
9757
9761
  "reap.help",
@@ -9861,7 +9865,7 @@ async function initProject(projectRoot, projectName, entryMode, preset, onProgre
9861
9865
  // src/cli/commands/update.ts
9862
9866
  import { readdir as readdir9, unlink as unlink3, rm as rm2, mkdir as mkdir6 } from "fs/promises";
9863
9867
  import { join as join10 } from "path";
9864
- import { execSync } from "child_process";
9868
+ import { execSync as execSync2 } from "child_process";
9865
9869
 
9866
9870
  // src/core/hooks.ts
9867
9871
  async function migrateHooks(dryRun = false) {
@@ -9948,12 +9952,57 @@ init_fs();
9948
9952
  var import_yaml2 = __toESM(require_dist(), 1);
9949
9953
  import { readdir as readdir5, rm } from "fs/promises";
9950
9954
  import { join as join6 } from "path";
9955
+
9956
+ // src/core/git.ts
9957
+ import { execSync } from "child_process";
9958
+ function gitShow(ref, path, cwd) {
9959
+ try {
9960
+ return execSync(`git show ${ref}:${path}`, { cwd, encoding: "utf-8", timeout: 1e4 });
9961
+ } catch {
9962
+ return null;
9963
+ }
9964
+ }
9965
+ function gitLsTree(ref, path, cwd) {
9966
+ try {
9967
+ const output = execSync(`git ls-tree -r --name-only ${ref} -- ${path}`, {
9968
+ cwd,
9969
+ encoding: "utf-8",
9970
+ timeout: 1e4
9971
+ });
9972
+ return output.trim().split(`
9973
+ `).filter(Boolean);
9974
+ } catch {
9975
+ return [];
9976
+ }
9977
+ }
9978
+ function gitAllBranches(cwd) {
9979
+ try {
9980
+ const output = execSync("git branch -a --format='%(refname:short)'", {
9981
+ cwd,
9982
+ encoding: "utf-8",
9983
+ timeout: 1e4
9984
+ });
9985
+ return output.trim().split(`
9986
+ `).filter(Boolean).map((b) => b.replace(/^origin\//, ""));
9987
+ } catch {
9988
+ return [];
9989
+ }
9990
+ }
9991
+ function gitCurrentBranch(cwd) {
9992
+ try {
9993
+ return execSync("git rev-parse --abbrev-ref HEAD", { cwd, encoding: "utf-8", timeout: 5000 }).trim();
9994
+ } catch {
9995
+ return null;
9996
+ }
9997
+ }
9998
+
9999
+ // src/core/compression.ts
9951
10000
  var LINEAGE_MAX_LINES = 5000;
9952
10001
  var MIN_GENERATIONS_FOR_COMPRESSION = 5;
9953
10002
  var LEVEL1_MAX_LINES = 40;
9954
- var LEVEL2_MAX_LINES = 60;
9955
- var LEVEL2_BATCH_SIZE = 5;
9956
- var RECENT_PROTECTED_COUNT = 3;
10003
+ var LEVEL1_PROTECTED_COUNT = 3;
10004
+ var LEVEL2_MIN_LEVEL1_COUNT = 100;
10005
+ var LEVEL2_PROTECTED_COUNT = 9;
9957
10006
  function extractGenNum(name) {
9958
10007
  const match = name.match(/^gen-(\d{3})/);
9959
10008
  return match ? parseInt(match[1], 10) : 0;
@@ -10059,7 +10108,9 @@ async function scanLineage(paths) {
10059
10108
  } catch {}
10060
10109
  return entries.sort((a, b) => {
10061
10110
  if (a.completedAt && b.completedAt) {
10062
- return a.completedAt.localeCompare(b.completedAt);
10111
+ const cmp = a.completedAt.localeCompare(b.completedAt);
10112
+ if (cmp !== 0)
10113
+ return cmp;
10063
10114
  }
10064
10115
  return a.genNum - b.genNum;
10065
10116
  });
@@ -10209,50 +10260,84 @@ async function compressLevel1(genDir, genName) {
10209
10260
  }
10210
10261
  return result;
10211
10262
  }
10212
- async function compressLevel2(level1Files, epochNum) {
10213
- const lines = [];
10214
- const genIds = level1Files.map((f) => f.name.replace(".md", "").match(/^gen-\d{3}(?:-[a-f0-9]{6})?/)?.[0] ?? f.name);
10215
- const first = genIds[0];
10216
- const last = genIds[genIds.length - 1];
10217
- lines.push(`# Epoch ${String(epochNum).padStart(3, "0")} (${first} ~ ${last})`);
10218
- lines.push("");
10263
+ async function findForkedByOtherBranches(paths, cwd) {
10264
+ const forked = new Set;
10265
+ const currentBranch = gitCurrentBranch(cwd);
10266
+ const branches = gitAllBranches(cwd).filter((b) => b !== currentBranch && b !== "HEAD");
10267
+ for (const branch of branches) {
10268
+ const files = gitLsTree(branch, ".reap/lineage/", cwd);
10269
+ for (const file of files) {
10270
+ if (!file.endsWith("meta.yml"))
10271
+ continue;
10272
+ const content = gitShow(branch, file, cwd);
10273
+ if (!content)
10274
+ continue;
10275
+ try {
10276
+ const meta = import_yaml2.default.parse(content);
10277
+ for (const parent of meta.parents) {
10278
+ forked.add(parent);
10279
+ }
10280
+ } catch {}
10281
+ }
10282
+ }
10283
+ return forked;
10284
+ }
10285
+ async function compressLevel2Single(level1Files, paths) {
10286
+ const compressed = [];
10287
+ const epochPath = join6(paths.lineage, "epoch.md");
10288
+ let existingMeta = { generations: [] };
10289
+ let existingBody = "";
10290
+ const existingContent = await readTextFile(epochPath);
10291
+ if (existingContent) {
10292
+ const parsed = parseFrontmatter(existingContent);
10293
+ if (parsed?.generations)
10294
+ existingMeta = parsed;
10295
+ existingBody = existingContent.replace(/^---\n[\s\S]*?\n---\n?/, "").trim();
10296
+ }
10297
+ const newBodyLines = [];
10219
10298
  for (const file of level1Files) {
10220
10299
  const content = await readTextFileOrThrow(file.path);
10221
- const bodyContent = content.replace(/^---\n[\s\S]*?\n---\n?/, "");
10222
- const headerMatch = bodyContent.match(/^# (gen-\d{3}(?:-[a-f0-9]{6})?)/m);
10223
- const goalMatch = bodyContent.match(/- Goal: (.+)/);
10224
- const periodMatch = bodyContent.match(/- (?:Started|Period): (.+)/);
10225
- const genomeMatch = bodyContent.match(/- Genome.*: (.+)/);
10226
- const resultMatch = bodyContent.match(/## Result: (.+)/);
10227
- const genId = headerMatch?.[1] ?? "unknown";
10228
- const goal = goalMatch?.[1] ?? "";
10229
- const result2 = resultMatch?.[1] ?? "";
10230
- lines.push(`## ${genId}: ${goal}`);
10231
- if (periodMatch)
10232
- lines.push(`- ${periodMatch[0].trim()}`);
10233
- if (genomeMatch)
10234
- lines.push(`- ${genomeMatch[0].trim()}`);
10235
- if (result2)
10236
- lines.push(`- Result: ${result2}`);
10237
- const changeSection = bodyContent.match(/## Genome Changes\n([\s\S]*?)(?=\n##|$)/);
10238
- if (changeSection && !changeSection[1].match(/^\|\s*\|\s*\|\s*\|\s*\|$/)) {
10239
- lines.push(`- Genome Changes: ${changeSection[1].trim().split(`
10240
- `)[0]}`);
10300
+ const meta = file.meta;
10301
+ const bodyContent = content.replace(/^---\n[\s\S]*?\n---\n?/, "").trim();
10302
+ if (meta) {
10303
+ existingMeta.generations.push({
10304
+ id: meta.id,
10305
+ parents: meta.parents,
10306
+ genomeHash: meta.genomeHash
10307
+ });
10241
10308
  }
10242
- lines.push("");
10243
- }
10244
- let result = lines.join(`
10309
+ const headerMatch = bodyContent.match(/^# (gen-\d{3}(?:-[a-f0-9]{6})?)/m);
10310
+ const genId = headerMatch?.[1] ?? meta?.id ?? "unknown";
10311
+ const goalMatch = bodyContent.match(/## Objective\n([\s\S]*?)(?=\n##|$)/);
10312
+ const goal = goalMatch?.[1]?.trim().split(`
10313
+ `)[0] ?? "";
10314
+ newBodyLines.push(`## ${genId}: ${goal}`);
10315
+ newBodyLines.push("");
10316
+ compressed.push(genId);
10317
+ }
10318
+ const frontmatter = `---
10319
+ ${import_yaml2.default.stringify(existingMeta).trim()}
10320
+ ---
10321
+ `;
10322
+ const allIds = existingMeta.generations.map((g) => g.id);
10323
+ const first = allIds[0] ?? "?";
10324
+ const last = allIds[allIds.length - 1] ?? "?";
10325
+ const header = `# Epoch (${first} ~ ${last})
10326
+
10327
+ `;
10328
+ const body = existingBody ? existingBody + `
10329
+
10330
+ ` + newBodyLines.join(`
10331
+ `) : newBodyLines.join(`
10245
10332
  `);
10246
- const resultLines = result.split(`
10333
+ await writeTextFile(epochPath, frontmatter + header + body.trim() + `
10247
10334
  `);
10248
- if (resultLines.length > LEVEL2_MAX_LINES) {
10249
- result = resultLines.slice(0, LEVEL2_MAX_LINES - 1).join(`
10250
- `) + `
10251
- [...truncated]`;
10335
+ for (const file of level1Files) {
10336
+ await rm(file.path);
10252
10337
  }
10253
- return result;
10338
+ return compressed;
10254
10339
  }
10255
- async function compressLineageIfNeeded(paths) {
10340
+ async function compressLineageIfNeeded(paths, projectRoot) {
10256
10341
  const result = { level1: [], level2: [] };
10257
10342
  const entries = await scanLineage(paths);
10258
10343
  const totalEntries = entries.filter((e) => e.type === "dir" || e.type === "level1").length;
@@ -10265,7 +10350,7 @@ async function compressLineageIfNeeded(paths) {
10265
10350
  }
10266
10351
  const leafNodes = await findLeafNodes(paths, entries);
10267
10352
  const allDirs = entries.filter((e) => e.type === "dir");
10268
- const recentIds = new Set(allDirs.slice(Math.max(0, allDirs.length - RECENT_PROTECTED_COUNT)).map((e) => e.genId));
10353
+ const recentIds = new Set(allDirs.slice(Math.max(0, allDirs.length - LEVEL1_PROTECTED_COUNT)).map((e) => e.genId));
10269
10354
  const compressibleDirs = allDirs.filter((dir) => !recentIds.has(dir.genId) && !leafNodes.has(dir.genId));
10270
10355
  for (const dir of compressibleDirs) {
10271
10356
  const currentTotal = await countDirLines(paths.lineage);
@@ -10280,24 +10365,26 @@ async function compressLineageIfNeeded(paths) {
10280
10365
  result.level1.push(genId);
10281
10366
  }
10282
10367
  const level1s = (await scanLineage(paths)).filter((e) => e.type === "level1");
10283
- if (level1s.length >= LEVEL2_BATCH_SIZE) {
10284
- const existingEpochs = (await scanLineage(paths)).filter((e) => e.type === "level2");
10285
- let epochNum = existingEpochs.length + 1;
10286
- const batchCount = Math.floor(level1s.length / LEVEL2_BATCH_SIZE);
10287
- for (let i = 0;i < batchCount; i++) {
10288
- const batch = level1s.slice(i * LEVEL2_BATCH_SIZE, (i + 1) * LEVEL2_BATCH_SIZE);
10289
- const files = batch.map((e) => ({
10290
- name: e.name,
10291
- path: join6(paths.lineage, e.name)
10292
- }));
10293
- const compressed = await compressLevel2(files, epochNum);
10294
- const outPath = join6(paths.lineage, `epoch-${String(epochNum).padStart(3, "0")}.md`);
10295
- await writeTextFile(outPath, compressed);
10296
- for (const file of files) {
10297
- await rm(file.path);
10368
+ if (level1s.length > LEVEL2_MIN_LEVEL1_COUNT) {
10369
+ const forkedIds = projectRoot ? await findForkedByOtherBranches(paths, projectRoot) : new Set;
10370
+ let forkCutoff = level1s.length;
10371
+ for (let i = 0;i < level1s.length; i++) {
10372
+ if (forkedIds.has(level1s[i].genId)) {
10373
+ forkCutoff = i;
10374
+ break;
10298
10375
  }
10299
- result.level2.push(`epoch-${String(epochNum).padStart(3, "0")}`);
10300
- epochNum++;
10376
+ }
10377
+ const protectedStart = Math.max(0, level1s.length - LEVEL2_PROTECTED_COUNT);
10378
+ const compressEnd = Math.min(forkCutoff, protectedStart);
10379
+ const compressible = level1s.slice(0, compressEnd);
10380
+ if (compressible.length > 0) {
10381
+ const filesWithMeta = await Promise.all(compressible.map(async (e) => ({
10382
+ name: e.name,
10383
+ path: join6(paths.lineage, e.name),
10384
+ meta: await readFileMeta(join6(paths.lineage, e.name))
10385
+ })));
10386
+ const compressed = await compressLevel2Single(filesWithMeta, paths);
10387
+ result.level2.push(...compressed);
10301
10388
  }
10302
10389
  }
10303
10390
  return result;
@@ -10519,7 +10606,7 @@ class GenerationManager {
10519
10606
  }
10520
10607
  } catch {}
10521
10608
  await writeTextFile(this.paths.currentYml, "");
10522
- const compression = await compressLineageIfNeeded(this.paths);
10609
+ const compression = await compressLineageIfNeeded(this.paths, this.paths.projectRoot);
10523
10610
  return compression;
10524
10611
  }
10525
10612
  async save(state) {
@@ -10650,12 +10737,12 @@ async function migrateLineage(paths) {
10650
10737
  // src/cli/commands/update.ts
10651
10738
  function selfUpgrade() {
10652
10739
  try {
10653
- const installed = execSync("reap --version", { encoding: "utf-8", timeout: 5000 }).trim();
10654
- const latest = execSync("npm view @c-d-cc/reap version", { encoding: "utf-8", timeout: 1e4 }).trim();
10740
+ const installed = execSync2("reap --version", { encoding: "utf-8", timeout: 5000 }).trim();
10741
+ const latest = execSync2("npm view @c-d-cc/reap version", { encoding: "utf-8", timeout: 1e4 }).trim();
10655
10742
  if (installed === latest) {
10656
10743
  return { upgraded: false };
10657
10744
  }
10658
- execSync("npm update -g @c-d-cc/reap", { encoding: "utf-8", timeout: 60000, stdio: "pipe" });
10745
+ execSync2("npm update -g @c-d-cc/reap", { encoding: "utf-8", timeout: 60000, stdio: "pipe" });
10659
10746
  return { upgraded: true, from: installed, to: latest };
10660
10747
  } catch {
10661
10748
  return { upgraded: false };
@@ -10850,7 +10937,8 @@ async function getStatus(projectRoot) {
10850
10937
 
10851
10938
  // src/cli/commands/fix.ts
10852
10939
  var import_yaml6 = __toESM(require_dist(), 1);
10853
- import { mkdir as mkdir7, stat as stat3 } from "fs/promises";
10940
+ import { mkdir as mkdir7, stat as stat3, copyFile } from "fs/promises";
10941
+ import { join as join11 } from "path";
10854
10942
  init_fs();
10855
10943
  async function dirExists(path) {
10856
10944
  try {
@@ -10878,6 +10966,23 @@ async function fixProject(projectRoot) {
10878
10966
  fixed.push(`Recreated missing directory: ${dir.name}/`);
10879
10967
  }
10880
10968
  }
10969
+ const genomeFiles = [
10970
+ { path: paths.principles, name: "principles.md" },
10971
+ { path: paths.conventions, name: "conventions.md" },
10972
+ { path: paths.constraints, name: "constraints.md" },
10973
+ { path: paths.sourceMap, name: "source-map.md" }
10974
+ ];
10975
+ for (const gf of genomeFiles) {
10976
+ if (!await fileExists(gf.path)) {
10977
+ const templateSrc = join11(ReapPaths.packageGenomeDir, gf.name);
10978
+ if (await fileExists(templateSrc)) {
10979
+ await copyFile(templateSrc, gf.path);
10980
+ fixed.push(`Restored missing genome/${gf.name} from template`);
10981
+ } else {
10982
+ issues.push(`genome/${gf.name} is missing and no template found`);
10983
+ }
10984
+ }
10985
+ }
10881
10986
  if (!await fileExists(paths.config)) {
10882
10987
  issues.push("config.yml is missing. Run 'reap init' to recreate the project.");
10883
10988
  }
@@ -10908,8 +11013,8 @@ async function fixProject(projectRoot) {
10908
11013
 
10909
11014
  // src/cli/index.ts
10910
11015
  init_fs();
10911
- import { join as join11 } from "path";
10912
- program.name("reap").description("REAP — Recursive Evolutionary Autonomous Pipeline").version("0.7.9");
11016
+ import { join as join12 } from "path";
11017
+ program.name("reap").description("REAP — Recursive Evolutionary Autonomous Pipeline").version("0.8.0");
10913
11018
  program.command("init").description("Initialize a new REAP project (Genesis)").argument("[project-name]", "Project name (defaults to current directory name)").option("-m, --mode <mode>", "Entry mode: greenfield, migration, adoption", "greenfield").option("-p, --preset <preset>", "Bootstrap with a genome preset (e.g., bun-hono-react)").action(async (projectName, options) => {
10914
11019
  try {
10915
11020
  const cwd = process.cwd();
@@ -11037,10 +11142,10 @@ program.command("help").description("Show REAP commands, slash commands, and wor
11037
11142
  if (l === "korean" || l === "ko")
11038
11143
  lang = "ko";
11039
11144
  }
11040
- const helpDir = join11(ReapPaths.packageTemplatesDir, "help");
11041
- let helpText = await readTextFile(join11(helpDir, `${lang}.txt`));
11145
+ const helpDir = join12(ReapPaths.packageTemplatesDir, "help");
11146
+ let helpText = await readTextFile(join12(helpDir, `${lang}.txt`));
11042
11147
  if (!helpText)
11043
- helpText = await readTextFile(join11(helpDir, "en.txt"));
11148
+ helpText = await readTextFile(join12(helpDir, "en.txt"));
11044
11149
  if (!helpText) {
11045
11150
  console.log("Help file not found. Run 'reap update' to install templates.");
11046
11151
  return;
@@ -0,0 +1,73 @@
1
+ ---
2
+ description: "REAP Abort — Abort the current generation and return to initial state"
3
+ ---
4
+
5
+ # Abort
6
+
7
+ Abort the current generation and return to initial state.
8
+
9
+ ## Gate (Preconditions)
10
+ - Read `.reap/life/current.yml`
11
+ - If no active Generation: ERROR — "No active Generation to abort." **STOP**
12
+
13
+ ## Steps
14
+
15
+ ### 1. Show Current State
16
+ - Display: generation id, goal, current stage
17
+ - Ask: "이 generation을 abort 하시겠습니까?"
18
+ - If no: **STOP**
19
+
20
+ ### 2. Abort Reason
21
+ - Ask: "abort 사유를 입력해주세요"
22
+
23
+ ### 3. Source Code Handling
24
+ - Check `git diff --name-only` for uncommitted changes
25
+ - If no changes: skip to Step 4
26
+ - If changes exist, present the changed files and ask:
27
+ - **rollback**: `git checkout .` — 모든 변경 revert
28
+ - **stash**: `git stash push -m "reap-abort: {gen-id}"` — stash에 저장
29
+ - **hold**: 변경 유지 (working tree에 그대로)
30
+
31
+ ### 4. Backlog Save
32
+ - Ask: "Goal과 진행 상황을 backlog에 저장할까요? (yes/no)"
33
+ - If yes:
34
+ - Read `01-objective.md` for goal/spec content
35
+ - Read `03-implementation.md` if exists for progress
36
+ - Create `.reap/life/backlog/aborted-{gen-id}.md`:
37
+ ```markdown
38
+ ---
39
+ type: task
40
+ status: pending
41
+ aborted: true
42
+ abortedFrom: {gen-id}
43
+ abortReason: "{reason}"
44
+ stage: {current stage}
45
+ sourceAction: rollback|stash|hold
46
+ stashRef: "reap-abort: {gen-id}" # only if stash
47
+ changedFiles:
48
+ - {file1}
49
+ - {file2}
50
+ ---
51
+
52
+ # [Aborted] {goal}
53
+
54
+ ## Original Goal
55
+ {goal from objective}
56
+
57
+ ## Progress
58
+ {stage} 단계에서 중단.
59
+ {implementation summary if available}
60
+
61
+ ## Resume Guide
62
+ {if stash: "git stash pop으로 코드 복구"}
63
+ {if hold: "코드 변경이 working tree에 유지됨"}
64
+ {if rollback: "코드 변경이 revert됨. objective부터 재시작 필요"}
65
+ ```
66
+
67
+ ### 5. Cleanup
68
+ - Delete all artifact files from `.reap/life/` (`01-*.md` through `05-*.md`)
69
+ - Clear `current.yml` (write empty content)
70
+ - Do NOT record in lineage (incomplete generation)
71
+
72
+ ## Completion
73
+ - "Generation {gen-id} aborted. {backlog saved / not saved}."
@@ -104,6 +104,25 @@ Do NOT finalize Genome changes without running Validation Commands.
104
104
  19. 반복 패턴이 없으면 skip — "반복 패턴이 감지되지 않았습니다."
105
105
  20. **Limit**: 한 번에 최대 2개까지만 제안 (과부하 방지)
106
106
 
107
+ ### Phase 6: Lineage Compression
108
+
109
+ 21. Check if lineage compression is needed:
110
+ - Count total lines in `.reap/lineage/` and number of generations
111
+ - **Level 1 trigger**: total lines > 5,000 AND generations >= 5
112
+ - **Level 2 trigger**: Level 1 compressed `.md` files > 100
113
+ 22. If Level 1 triggered:
114
+ - Compress oldest uncompressed generation directories into single `.md` files
115
+ - Protect: recent 3 generations + DAG leaf nodes
116
+ - Preserve DAG metadata in frontmatter (id, parents, genomeHash)
117
+ 23. If Level 2 triggered:
118
+ - Run `git fetch --all` to update remote refs
119
+ - Scan all branches (local + remote) for fork points
120
+ - Compress eligible Level 1 files into single `epoch.md` (append if exists)
121
+ - Protect: recent 9 Level 1 files + all generations at/after fork points
122
+ - epoch.md frontmatter contains `generations` array with hash chain (id, parents, genomeHash)
123
+ 24. Report compression results: "Compressed N generations (Level 1: X, Level 2: Y)"
124
+ - If no compression needed: skip silently
125
+
107
126
  ## Self-Verification
108
127
  Before saving the artifact, verify:
109
128
  - [ ] Are lessons concrete and applicable to the next generation? (No vague "do better next time")
@@ -9,6 +9,7 @@ Start a merge generation by specifying a target branch to merge into the current
9
9
  ## Gate
10
10
  - Verify no active generation exists
11
11
  - Verify the target branch exists (`git rev-parse --verify {branch}`)
12
+ - Verify that the common ancestor is NOT inside `epoch.md`. If it is: ERROR — "The common ancestor has been epoch-compressed and cannot be used as a merge base. The generation is archived in epoch.md for historical reference only."
12
13
 
13
14
  ## Steps
14
15
 
@@ -45,7 +45,20 @@ Do NOT make technical decisions without reading the Genome (conventions.md, cons
45
45
  - Each task must be **one logical unit of change**
46
46
  - Specify dependencies and parallelization potential between tasks
47
47
 
48
- ### 5. Human Confirmation
48
+ ### 5. E2E Test Scenarios (lifecycle 변경 시 필수)
49
+ - If this generation modifies lifecycle logic (compression, generation, merge, abort, stage transitions, etc.):
50
+ - Define specific E2E test scenarios with expected outcomes
51
+ - Each scenario: setup → action → assertion
52
+ - Example:
53
+ ```
54
+ ## E2E Test Scenarios
55
+ 1. Normal abort + rollback → source reverted, artifacts deleted, current.yml empty
56
+ 2. Abort + stash → stash created, recoverable
57
+ 3. No active generation → error message
58
+ ```
59
+ - If not a lifecycle change: skip this step
60
+
61
+ ### 6. Human Confirmation
49
62
  - Finalize the plan with the human
50
63
 
51
64
  ## Task Format
@@ -15,14 +15,15 @@ description: "REAP Start — Start a new Generation"
15
15
  - If backlog items exist:
16
16
  - Present the list with title and priority for each item
17
17
  - Ask: "Would you like to select one of these, or enter a new goal?"
18
- - If the human selects a backlog item: use its title/content as the goal, then update the selected item's frontmatter to `status: consumed` and add `consumedBy: gen-XXX-{hash}`
18
+ - If the human selects a backlog item: **note the selection** (do NOT mark consumed yet ID is not generated)
19
19
  - If the human wants a new goal: proceed to Step 1
20
20
  - If no backlog items exist: proceed to Step 1
21
21
 
22
- 1. Ask the human for the goal of this generation
22
+ 1. Ask the human for the goal of this generation (or use selected backlog item's goal)
23
23
  2. Count existing generations in `.reap/lineage/` to determine the genomeVersion
24
24
  3. Generate the next generation ID (existing count + 1, in `gen-XXX-{hash}` format where `{hash}` is a short content hash)
25
- 4. Write the following to `current.yml`:
25
+ 4. **If a backlog item was selected in Step 0**: now mark it as `status: consumed` and add `consumedBy: gen-XXX-{hash}` (using the ID just generated)
26
+ 6. Write the following to `current.yml`:
26
27
  ```yaml
27
28
  id: gen-XXX-{hash}
28
29
  goal: [goal provided by the human]
@@ -33,10 +34,10 @@ description: "REAP Start — Start a new Generation"
33
34
  - stage: objective
34
35
  at: [current ISO 8601 timestamp]
35
36
  ```
36
- 5. Immediately create `.reap/life/01-objective.md` from the artifact template with the Goal section filled in
37
+ 7. Immediately create `.reap/life/01-objective.md` from the artifact template with the Goal section filled in
37
38
 
38
39
  ### Hook Execution (Generation Start)
39
- 6. Scan `.reap/hooks/` for files matching `onGenerationStart.*`
40
+ 8. Scan `.reap/hooks/` for files matching `onGenerationStart.*`
40
41
  - For each matched file (sorted by `order` from frontmatter, then alphabetically):
41
42
  1. Read the frontmatter (`condition`, `order`)
42
43
  2. Evaluate `condition` by running `.reap/hooks/conditions/{condition}.sh` (exit 0 = met, non-zero = skip):
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@c-d-cc/reap",
3
- "version": "0.7.9",
3
+ "version": "0.8.0",
4
4
  "description": "Recursive Evolutionary Autonomous Pipeline — AI and humans evolve software across generations",
5
5
  "type": "module",
6
6
  "license": "MIT",