@lumenflow/cli 2.2.2 → 2.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +147 -57
- package/dist/__tests__/agent-log-issue.test.js +56 -0
- package/dist/__tests__/cli-entry-point.test.js +66 -17
- package/dist/__tests__/cli-subprocess.test.js +25 -0
- package/dist/__tests__/init.test.js +298 -0
- package/dist/__tests__/initiative-plan.test.js +340 -0
- package/dist/__tests__/mem-cleanup-execution.test.js +19 -0
- package/dist/__tests__/merge-block.test.js +220 -0
- package/dist/__tests__/safe-git.test.js +191 -0
- package/dist/__tests__/state-doctor.test.js +274 -0
- package/dist/__tests__/wu-done.test.js +36 -0
- package/dist/__tests__/wu-edit.test.js +119 -0
- package/dist/__tests__/wu-prep.test.js +108 -0
- package/dist/agent-issues-query.js +4 -3
- package/dist/agent-log-issue.js +25 -4
- package/dist/backlog-prune.js +5 -4
- package/dist/cli-entry-point.js +11 -1
- package/dist/doctor.js +368 -0
- package/dist/flow-bottlenecks.js +6 -5
- package/dist/flow-report.js +4 -3
- package/dist/gates.js +356 -101
- package/dist/guard-locked.js +4 -3
- package/dist/guard-worktree-commit.js +4 -3
- package/dist/init.js +508 -86
- package/dist/initiative-add-wu.js +4 -3
- package/dist/initiative-bulk-assign-wus.js +8 -5
- package/dist/initiative-create.js +73 -37
- package/dist/initiative-edit.js +37 -21
- package/dist/initiative-list.js +4 -3
- package/dist/initiative-plan.js +337 -0
- package/dist/initiative-status.js +4 -3
- package/dist/lane-health.js +377 -0
- package/dist/lane-suggest.js +382 -0
- package/dist/mem-checkpoint.js +2 -2
- package/dist/mem-cleanup.js +2 -2
- package/dist/mem-context.js +306 -0
- package/dist/mem-create.js +2 -2
- package/dist/mem-delete.js +293 -0
- package/dist/mem-inbox.js +2 -2
- package/dist/mem-index.js +211 -0
- package/dist/mem-init.js +1 -1
- package/dist/mem-profile.js +207 -0
- package/dist/mem-promote.js +254 -0
- package/dist/mem-ready.js +2 -2
- package/dist/mem-signal.js +2 -2
- package/dist/mem-start.js +2 -2
- package/dist/mem-summarize.js +2 -2
- package/dist/mem-triage.js +2 -2
- package/dist/merge-block.js +222 -0
- package/dist/metrics-cli.js +7 -4
- package/dist/metrics-snapshot.js +4 -3
- package/dist/orchestrate-initiative.js +10 -4
- package/dist/orchestrate-monitor.js +379 -31
- package/dist/signal-cleanup.js +296 -0
- package/dist/spawn-list.js +6 -5
- package/dist/state-bootstrap.js +5 -4
- package/dist/state-cleanup.js +360 -0
- package/dist/state-doctor-fix.js +196 -0
- package/dist/state-doctor.js +501 -0
- package/dist/validate-agent-skills.js +4 -3
- package/dist/validate-agent-sync.js +4 -3
- package/dist/validate-backlog-sync.js +4 -3
- package/dist/validate-skills-spec.js +4 -3
- package/dist/validate.js +4 -3
- package/dist/wu-block.js +3 -3
- package/dist/wu-claim.js +208 -98
- package/dist/wu-cleanup.js +5 -4
- package/dist/wu-create.js +71 -46
- package/dist/wu-delete.js +88 -60
- package/dist/wu-deps.js +6 -5
- package/dist/wu-done-check.js +34 -0
- package/dist/wu-done.js +39 -12
- package/dist/wu-edit.js +63 -28
- package/dist/wu-infer-lane.js +7 -6
- package/dist/wu-preflight.js +23 -81
- package/dist/wu-prep.js +125 -0
- package/dist/wu-prune.js +4 -3
- package/dist/wu-recover.js +88 -22
- package/dist/wu-repair.js +7 -6
- package/dist/wu-spawn.js +226 -270
- package/dist/wu-status.js +4 -3
- package/dist/wu-unblock.js +5 -5
- package/dist/wu-unlock-lane.js +4 -3
- package/dist/wu-validate.js +5 -4
- package/package.json +16 -7
- package/templates/core/.lumenflow/constraints.md.template +192 -0
- package/templates/core/.lumenflow/rules/git-safety.md.template +27 -0
- package/templates/core/.lumenflow/rules/wu-workflow.md.template +48 -0
- package/templates/core/AGENTS.md.template +60 -0
- package/templates/core/LUMENFLOW.md.template +255 -0
- package/templates/core/UPGRADING.md.template +121 -0
- package/templates/core/ai/onboarding/agent-safety-card.md.template +106 -0
- package/templates/core/ai/onboarding/first-wu-mistakes.md.template +198 -0
- package/templates/core/ai/onboarding/quick-ref-commands.md.template +186 -0
- package/templates/core/ai/onboarding/release-process.md.template +362 -0
- package/templates/core/ai/onboarding/troubleshooting-wu-done.md.template +159 -0
- package/templates/core/ai/onboarding/wu-create-checklist.md.template +117 -0
- package/templates/vendors/aider/.aider.conf.yml.template +27 -0
- package/templates/vendors/claude/.claude/CLAUDE.md.template +52 -0
- package/templates/vendors/claude/.claude/settings.json.template +49 -0
- package/templates/vendors/claude/.claude/skills/bug-classification/SKILL.md.template +192 -0
- package/templates/vendors/claude/.claude/skills/code-quality/SKILL.md.template +152 -0
- package/templates/vendors/claude/.claude/skills/context-management/SKILL.md.template +155 -0
- package/templates/vendors/claude/.claude/skills/execution-memory/SKILL.md.template +304 -0
- package/templates/vendors/claude/.claude/skills/frontend-design/SKILL.md.template +131 -0
- package/templates/vendors/claude/.claude/skills/initiative-management/SKILL.md.template +164 -0
- package/templates/vendors/claude/.claude/skills/library-first/SKILL.md.template +98 -0
- package/templates/vendors/claude/.claude/skills/lumenflow-gates/SKILL.md.template +87 -0
- package/templates/vendors/claude/.claude/skills/multi-agent-coordination/SKILL.md.template +84 -0
- package/templates/vendors/claude/.claude/skills/ops-maintenance/SKILL.md.template +254 -0
- package/templates/vendors/claude/.claude/skills/orchestration/SKILL.md.template +189 -0
- package/templates/vendors/claude/.claude/skills/tdd-workflow/SKILL.md.template +139 -0
- package/templates/vendors/claude/.claude/skills/worktree-discipline/SKILL.md.template +138 -0
- package/templates/vendors/claude/.claude/skills/wu-lifecycle/SKILL.md.template +106 -0
- package/templates/vendors/cline/.clinerules.template +53 -0
- package/templates/vendors/cursor/.cursor/rules/lumenflow.md.template +34 -0
- package/templates/vendors/cursor/.cursor/rules.md.template +28 -0
- package/templates/vendors/windsurf/.windsurf/rules/lumenflow.md.template +34 -0
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file merge-block.test.ts
|
|
3
|
+
* Tests for merge block functionality (WU-1171)
|
|
4
|
+
*
|
|
5
|
+
* Tests the LUMENFLOW:START/END block insertion and update logic
|
|
6
|
+
* that enables safe, idempotent merging of LumenFlow config into existing files.
|
|
7
|
+
*/
|
|
8
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
9
|
+
import * as fs from 'node:fs';
|
|
10
|
+
import * as path from 'node:path';
|
|
11
|
+
import * as os from 'node:os';
|
|
12
|
+
// Import the functions we'll implement
|
|
13
|
+
import { detectLineEnding, extractMergeBlock, insertMergeBlock, updateMergeBlock, } from '../merge-block.js';
|
|
14
|
+
describe('merge-block', () => {
|
|
15
|
+
let tempDir;
|
|
16
|
+
beforeEach(() => {
|
|
17
|
+
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'merge-block-test-'));
|
|
18
|
+
});
|
|
19
|
+
afterEach(() => {
|
|
20
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
21
|
+
});
|
|
22
|
+
describe('detectLineEnding', () => {
|
|
23
|
+
it('should detect LF line endings', () => {
|
|
24
|
+
const content = 'line1\nline2\nline3\n';
|
|
25
|
+
expect(detectLineEnding(content)).toBe('\n');
|
|
26
|
+
});
|
|
27
|
+
it('should detect CRLF line endings', () => {
|
|
28
|
+
const content = 'line1\r\nline2\r\nline3\r\n';
|
|
29
|
+
expect(detectLineEnding(content)).toBe('\r\n');
|
|
30
|
+
});
|
|
31
|
+
it('should default to LF for empty content', () => {
|
|
32
|
+
expect(detectLineEnding('')).toBe('\n');
|
|
33
|
+
});
|
|
34
|
+
it('should default to LF for content without line endings', () => {
|
|
35
|
+
expect(detectLineEnding('single line')).toBe('\n');
|
|
36
|
+
});
|
|
37
|
+
it('should use majority line ending when mixed', () => {
|
|
38
|
+
const content = 'line1\r\nline2\r\nline3\n';
|
|
39
|
+
// 2 CRLF vs 1 LF, should detect CRLF
|
|
40
|
+
expect(detectLineEnding(content)).toBe('\r\n');
|
|
41
|
+
});
|
|
42
|
+
});
|
|
43
|
+
describe('extractMergeBlock', () => {
|
|
44
|
+
it('should extract content between LUMENFLOW:START and LUMENFLOW:END markers', () => {
|
|
45
|
+
const content = `# My Project
|
|
46
|
+
|
|
47
|
+
<!-- LUMENFLOW:START -->
|
|
48
|
+
This is LumenFlow content.
|
|
49
|
+
<!-- LUMENFLOW:END -->
|
|
50
|
+
|
|
51
|
+
Other content
|
|
52
|
+
`;
|
|
53
|
+
const result = extractMergeBlock(content);
|
|
54
|
+
expect(result.found).toBe(true);
|
|
55
|
+
expect(result.content).toBe('This is LumenFlow content.');
|
|
56
|
+
expect(result.startIndex).toBeGreaterThan(0);
|
|
57
|
+
expect(result.endIndex).toBeGreaterThan(result.startIndex);
|
|
58
|
+
});
|
|
59
|
+
it('should return not found when no markers exist', () => {
|
|
60
|
+
const content = '# My Project\n\nNo LumenFlow content here.';
|
|
61
|
+
const result = extractMergeBlock(content);
|
|
62
|
+
expect(result.found).toBe(false);
|
|
63
|
+
expect(result.content).toBeUndefined();
|
|
64
|
+
});
|
|
65
|
+
it('should handle malformed markers (only START)', () => {
|
|
66
|
+
const content = `# My Project
|
|
67
|
+
|
|
68
|
+
<!-- LUMENFLOW:START -->
|
|
69
|
+
Incomplete block
|
|
70
|
+
`;
|
|
71
|
+
const result = extractMergeBlock(content);
|
|
72
|
+
expect(result.found).toBe(false);
|
|
73
|
+
expect(result.malformed).toBe(true);
|
|
74
|
+
expect(result.malformedReason).toBe('missing-end');
|
|
75
|
+
});
|
|
76
|
+
it('should handle malformed markers (only END)', () => {
|
|
77
|
+
const content = `# My Project
|
|
78
|
+
|
|
79
|
+
Some content
|
|
80
|
+
<!-- LUMENFLOW:END -->
|
|
81
|
+
`;
|
|
82
|
+
const result = extractMergeBlock(content);
|
|
83
|
+
expect(result.found).toBe(false);
|
|
84
|
+
expect(result.malformed).toBe(true);
|
|
85
|
+
expect(result.malformedReason).toBe('missing-start');
|
|
86
|
+
});
|
|
87
|
+
it('should handle multiple START markers', () => {
|
|
88
|
+
const content = `<!-- LUMENFLOW:START -->
|
|
89
|
+
First block
|
|
90
|
+
<!-- LUMENFLOW:START -->
|
|
91
|
+
Second start
|
|
92
|
+
<!-- LUMENFLOW:END -->
|
|
93
|
+
`;
|
|
94
|
+
const result = extractMergeBlock(content);
|
|
95
|
+
expect(result.malformed).toBe(true);
|
|
96
|
+
expect(result.malformedReason).toBe('multiple-start');
|
|
97
|
+
});
|
|
98
|
+
});
|
|
99
|
+
describe('insertMergeBlock', () => {
|
|
100
|
+
it('should append block to end of file', () => {
|
|
101
|
+
const originalContent = '# My Project\n\nExisting content.\n';
|
|
102
|
+
const blockContent = 'LumenFlow configuration goes here.';
|
|
103
|
+
const result = insertMergeBlock(originalContent, blockContent);
|
|
104
|
+
expect(result).toContain('<!-- LUMENFLOW:START -->');
|
|
105
|
+
expect(result).toContain(blockContent);
|
|
106
|
+
expect(result).toContain('<!-- LUMENFLOW:END -->');
|
|
107
|
+
expect(result.startsWith('# My Project')).toBe(true);
|
|
108
|
+
});
|
|
109
|
+
it('should preserve original line endings (LF)', () => {
|
|
110
|
+
const originalContent = '# My Project\nExisting content.\n';
|
|
111
|
+
const blockContent = 'New content';
|
|
112
|
+
const result = insertMergeBlock(originalContent, blockContent);
|
|
113
|
+
// Should not contain CRLF
|
|
114
|
+
expect(result).not.toContain('\r\n');
|
|
115
|
+
expect(result).toContain('\n');
|
|
116
|
+
});
|
|
117
|
+
it('should preserve original line endings (CRLF)', () => {
|
|
118
|
+
const originalContent = '# My Project\r\nExisting content.\r\n';
|
|
119
|
+
const blockContent = 'New content';
|
|
120
|
+
const result = insertMergeBlock(originalContent, blockContent);
|
|
121
|
+
// Should contain CRLF
|
|
122
|
+
expect(result).toContain('\r\n');
|
|
123
|
+
});
|
|
124
|
+
it('should add blank line before block if not present', () => {
|
|
125
|
+
const originalContent = '# My Project\nNo trailing newline';
|
|
126
|
+
const blockContent = 'New content';
|
|
127
|
+
const result = insertMergeBlock(originalContent, blockContent);
|
|
128
|
+
// Should have separation between original and block
|
|
129
|
+
expect(result).toMatch(/No trailing newline\n\n<!-- LUMENFLOW:START -->/);
|
|
130
|
+
});
|
|
131
|
+
});
|
|
132
|
+
describe('updateMergeBlock', () => {
|
|
133
|
+
it('should replace existing block content', () => {
|
|
134
|
+
const originalContent = `# My Project
|
|
135
|
+
|
|
136
|
+
<!-- LUMENFLOW:START -->
|
|
137
|
+
Old LumenFlow content.
|
|
138
|
+
<!-- LUMENFLOW:END -->
|
|
139
|
+
|
|
140
|
+
Other content
|
|
141
|
+
`;
|
|
142
|
+
const newBlockContent = 'New LumenFlow content.';
|
|
143
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
144
|
+
expect(result.content).toContain('New LumenFlow content.');
|
|
145
|
+
expect(result.content).not.toContain('Old LumenFlow content.');
|
|
146
|
+
expect(result.content).toContain('Other content');
|
|
147
|
+
expect(result.updated).toBe(true);
|
|
148
|
+
});
|
|
149
|
+
it('should preserve content before and after the block', () => {
|
|
150
|
+
const originalContent = `# Header
|
|
151
|
+
|
|
152
|
+
Before the block.
|
|
153
|
+
|
|
154
|
+
<!-- LUMENFLOW:START -->
|
|
155
|
+
Old content
|
|
156
|
+
<!-- LUMENFLOW:END -->
|
|
157
|
+
|
|
158
|
+
After the block.
|
|
159
|
+
`;
|
|
160
|
+
const newBlockContent = 'Updated content';
|
|
161
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
162
|
+
expect(result.content).toContain('# Header');
|
|
163
|
+
expect(result.content).toContain('Before the block.');
|
|
164
|
+
expect(result.content).toContain('After the block.');
|
|
165
|
+
});
|
|
166
|
+
it('should preserve original line endings when updating', () => {
|
|
167
|
+
const originalContent = `# Project\r\n\r\n<!-- LUMENFLOW:START -->\r\nOld\r\n<!-- LUMENFLOW:END -->\r\n`;
|
|
168
|
+
const newBlockContent = 'New';
|
|
169
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
170
|
+
// All line endings in result should be CRLF
|
|
171
|
+
const lfCount = (result.content.match(/(?<!\r)\n/g) || []).length;
|
|
172
|
+
expect(lfCount).toBe(0); // No standalone LF
|
|
173
|
+
});
|
|
174
|
+
it('should insert block when no existing block (append mode)', () => {
|
|
175
|
+
const originalContent = '# Project\n\nNo block here.\n';
|
|
176
|
+
const newBlockContent = 'New content';
|
|
177
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
178
|
+
expect(result.content).toContain('<!-- LUMENFLOW:START -->');
|
|
179
|
+
expect(result.content).toContain(newBlockContent);
|
|
180
|
+
expect(result.updated).toBe(true);
|
|
181
|
+
expect(result.wasInserted).toBe(true);
|
|
182
|
+
});
|
|
183
|
+
it('should warn and append fresh block on malformed markers', () => {
|
|
184
|
+
const originalContent = `# Project
|
|
185
|
+
|
|
186
|
+
<!-- LUMENFLOW:START -->
|
|
187
|
+
Incomplete block without end marker
|
|
188
|
+
`;
|
|
189
|
+
const newBlockContent = 'Fresh content';
|
|
190
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
191
|
+
expect(result.warning).toContain('malformed');
|
|
192
|
+
expect(result.content).toContain('<!-- LUMENFLOW:START -->');
|
|
193
|
+
expect(result.content).toContain('Fresh content');
|
|
194
|
+
expect(result.content).toContain('<!-- LUMENFLOW:END -->');
|
|
195
|
+
});
|
|
196
|
+
});
|
|
197
|
+
describe('idempotency', () => {
|
|
198
|
+
it('should produce identical output when run twice with same input', () => {
|
|
199
|
+
const originalContent = '# My Project\n\nSome content.\n';
|
|
200
|
+
const blockContent = 'LumenFlow configuration';
|
|
201
|
+
// First merge
|
|
202
|
+
const firstResult = updateMergeBlock(originalContent, blockContent);
|
|
203
|
+
// Second merge with same block content
|
|
204
|
+
const secondResult = updateMergeBlock(firstResult.content, blockContent);
|
|
205
|
+
expect(firstResult.content).toBe(secondResult.content);
|
|
206
|
+
});
|
|
207
|
+
it('should not modify file when block content is unchanged', () => {
|
|
208
|
+
const existingContent = `# Project
|
|
209
|
+
|
|
210
|
+
<!-- LUMENFLOW:START -->
|
|
211
|
+
Same content
|
|
212
|
+
<!-- LUMENFLOW:END -->
|
|
213
|
+
`;
|
|
214
|
+
const blockContent = 'Same content';
|
|
215
|
+
const result = updateMergeBlock(existingContent, blockContent);
|
|
216
|
+
expect(result.unchanged).toBe(true);
|
|
217
|
+
expect(result.content).toBe(existingContent);
|
|
218
|
+
});
|
|
219
|
+
});
|
|
220
|
+
});
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
/* eslint-disable sonarjs/no-os-command-from-path -- Test file needs to execute git commands */
|
|
2
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
3
|
+
import { execFileSync } from 'node:child_process';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import fs from 'node:fs';
|
|
6
|
+
import os from 'node:os';
|
|
7
|
+
const CLI_SAFE_GIT_PATH = path.resolve(__dirname, '../../bin/safe-git');
|
|
8
|
+
const SCRIPTS_SAFE_GIT_PATH = path.resolve(__dirname, '../../../../../scripts/safe-git');
|
|
9
|
+
// Constants for duplicate strings
|
|
10
|
+
const SHOULD_HAVE_THROWN = 'Should have thrown an error';
|
|
11
|
+
const GIT_CMD = 'git';
|
|
12
|
+
const USER_EMAIL_CONFIG = 'user.email';
|
|
13
|
+
const USER_NAME_CONFIG = 'user.name';
|
|
14
|
+
const TEST_EMAIL = 'test@test.com';
|
|
15
|
+
const TEST_USERNAME = 'Test';
|
|
16
|
+
const FORCE_BYPASSES_LOG = 'force-bypasses.log';
|
|
17
|
+
// Create a temporary directory for testing to avoid polluting the real .beacon directory
|
|
18
|
+
const createTempDir = () => {
|
|
19
|
+
return fs.mkdtempSync(path.join(os.tmpdir(), 'safe-git-test-'));
|
|
20
|
+
};
|
|
21
|
+
describe('safe-git', () => {
|
|
22
|
+
// We mock child_process execution where possible, but for integration testing a script
|
|
23
|
+
// we often execute it directly. Since safe-git is a shell script, we executed it.
|
|
24
|
+
it('should fail when running "worktree remove" (CLI wrapper)', () => {
|
|
25
|
+
try {
|
|
26
|
+
execFileSync(CLI_SAFE_GIT_PATH, ['worktree', 'remove', 'some-path'], { stdio: 'pipe' });
|
|
27
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
28
|
+
}
|
|
29
|
+
catch (error) {
|
|
30
|
+
const err = error;
|
|
31
|
+
expect(err.status).toBe(1);
|
|
32
|
+
expect(err.stderr.toString()).toContain("BLOCKED: Manual 'git worktree remove' is unsafe");
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
it('should fail when running "worktree remove" (scripts wrapper)', () => {
|
|
36
|
+
try {
|
|
37
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['worktree', 'remove', 'some-path'], { stdio: 'pipe' });
|
|
38
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
39
|
+
}
|
|
40
|
+
catch (error) {
|
|
41
|
+
const err = error;
|
|
42
|
+
expect(err.status).toBe(1);
|
|
43
|
+
expect(err.stderr.toString()).toContain('Manual');
|
|
44
|
+
expect(err.stderr.toString()).toContain('worktree remove');
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
it('should fail when running "reset --hard" (scripts wrapper)', () => {
|
|
48
|
+
try {
|
|
49
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['reset', '--hard', 'HEAD'], { stdio: 'pipe' });
|
|
50
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
const err = error;
|
|
54
|
+
expect(err.status).toBe(1);
|
|
55
|
+
expect(err.stderr.toString()).toContain('reset --hard');
|
|
56
|
+
}
|
|
57
|
+
});
|
|
58
|
+
it('should fail when running "clean -fd" (scripts wrapper)', () => {
|
|
59
|
+
try {
|
|
60
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['clean', '-fd'], { stdio: 'pipe' });
|
|
61
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
const err = error;
|
|
65
|
+
expect(err.status).toBe(1);
|
|
66
|
+
expect(err.stderr.toString()).toContain('clean -fd');
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
it('should fail when running "push --force" (scripts wrapper)', () => {
|
|
70
|
+
try {
|
|
71
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['push', '--force'], { stdio: 'pipe' });
|
|
72
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
73
|
+
}
|
|
74
|
+
catch (error) {
|
|
75
|
+
const err = error;
|
|
76
|
+
expect(err.status).toBe(1);
|
|
77
|
+
expect(err.stderr.toString()).toContain('push --force');
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
it('should pass through safe commands', () => {
|
|
81
|
+
// We verify it calls git by mocking git or checking output.
|
|
82
|
+
// Since we can't easily mock the system git in a real shell script execution without PATH manip,
|
|
83
|
+
// we'll check that it runs git --version correctly.
|
|
84
|
+
const output = execFileSync(CLI_SAFE_GIT_PATH, ['--version'], { encoding: 'utf-8' });
|
|
85
|
+
expect(output).toContain('git version');
|
|
86
|
+
});
|
|
87
|
+
describe('LUMENFLOW_FORCE bypass', () => {
|
|
88
|
+
let tempDir;
|
|
89
|
+
beforeEach(() => {
|
|
90
|
+
tempDir = createTempDir();
|
|
91
|
+
});
|
|
92
|
+
afterEach(() => {
|
|
93
|
+
// Clean up temp directory
|
|
94
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
95
|
+
});
|
|
96
|
+
it('should bypass blocked commands when LUMENFLOW_FORCE=1', () => {
|
|
97
|
+
// Using git --version as a safe test with force flag
|
|
98
|
+
// The key is that the env var should be respected and not block
|
|
99
|
+
const output = execFileSync(SCRIPTS_SAFE_GIT_PATH, ['--version'], {
|
|
100
|
+
encoding: 'utf-8',
|
|
101
|
+
env: { ...process.env, LUMENFLOW_FORCE: '1' },
|
|
102
|
+
});
|
|
103
|
+
expect(output).toContain('git version');
|
|
104
|
+
});
|
|
105
|
+
it('should log bypass to force-bypasses.log when LUMENFLOW_FORCE=1', () => {
|
|
106
|
+
// We need to test that a blocked command, when forced, writes to the audit log
|
|
107
|
+
// Since reset --hard is dangerous, we use a mock approach
|
|
108
|
+
// The script should create the audit log entry before executing
|
|
109
|
+
// Create a temporary git repo for this test
|
|
110
|
+
const testRepo = path.join(tempDir, 'test-repo');
|
|
111
|
+
fs.mkdirSync(testRepo, { recursive: true });
|
|
112
|
+
execFileSync(GIT_CMD, ['init'], { cwd: testRepo, stdio: 'pipe' });
|
|
113
|
+
execFileSync(GIT_CMD, ['config', USER_EMAIL_CONFIG, TEST_EMAIL], {
|
|
114
|
+
cwd: testRepo,
|
|
115
|
+
stdio: 'pipe',
|
|
116
|
+
});
|
|
117
|
+
execFileSync(GIT_CMD, ['config', USER_NAME_CONFIG, TEST_USERNAME], {
|
|
118
|
+
cwd: testRepo,
|
|
119
|
+
stdio: 'pipe',
|
|
120
|
+
});
|
|
121
|
+
// Create a file and commit
|
|
122
|
+
fs.writeFileSync(path.join(testRepo, 'test.txt'), 'test');
|
|
123
|
+
execFileSync(GIT_CMD, ['add', '.'], { cwd: testRepo, stdio: 'pipe' });
|
|
124
|
+
execFileSync(GIT_CMD, ['commit', '-m', 'init'], { cwd: testRepo, stdio: 'pipe' });
|
|
125
|
+
// Run safe-git with force to reset --hard (should succeed with log)
|
|
126
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['reset', '--hard', 'HEAD'], {
|
|
127
|
+
cwd: testRepo,
|
|
128
|
+
encoding: 'utf-8',
|
|
129
|
+
env: { ...process.env, LUMENFLOW_FORCE: '1' },
|
|
130
|
+
});
|
|
131
|
+
// Check that the force bypass log exists and contains the entry
|
|
132
|
+
const bypassLog = path.join(testRepo, '.beacon', FORCE_BYPASSES_LOG);
|
|
133
|
+
expect(fs.existsSync(bypassLog)).toBe(true);
|
|
134
|
+
const logContent = fs.readFileSync(bypassLog, 'utf-8');
|
|
135
|
+
expect(logContent).toContain('reset --hard');
|
|
136
|
+
expect(logContent).toContain('BYPASSED');
|
|
137
|
+
});
|
|
138
|
+
it('should include LUMENFLOW_FORCE_REASON in audit log when provided', () => {
|
|
139
|
+
const testRepo = path.join(tempDir, 'test-repo-reason');
|
|
140
|
+
fs.mkdirSync(testRepo, { recursive: true });
|
|
141
|
+
execFileSync(GIT_CMD, ['init'], { cwd: testRepo, stdio: 'pipe' });
|
|
142
|
+
execFileSync(GIT_CMD, ['config', USER_EMAIL_CONFIG, TEST_EMAIL], {
|
|
143
|
+
cwd: testRepo,
|
|
144
|
+
stdio: 'pipe',
|
|
145
|
+
});
|
|
146
|
+
execFileSync(GIT_CMD, ['config', USER_NAME_CONFIG, TEST_USERNAME], {
|
|
147
|
+
cwd: testRepo,
|
|
148
|
+
stdio: 'pipe',
|
|
149
|
+
});
|
|
150
|
+
fs.writeFileSync(path.join(testRepo, 'test.txt'), 'test');
|
|
151
|
+
execFileSync(GIT_CMD, ['add', '.'], { cwd: testRepo, stdio: 'pipe' });
|
|
152
|
+
execFileSync(GIT_CMD, ['commit', '-m', 'init'], { cwd: testRepo, stdio: 'pipe' });
|
|
153
|
+
const testReason = 'user-approved: testing bypass';
|
|
154
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['reset', '--hard', 'HEAD'], {
|
|
155
|
+
cwd: testRepo,
|
|
156
|
+
encoding: 'utf-8',
|
|
157
|
+
env: { ...process.env, LUMENFLOW_FORCE: '1', LUMENFLOW_FORCE_REASON: testReason },
|
|
158
|
+
});
|
|
159
|
+
const bypassLog = path.join(testRepo, '.beacon', FORCE_BYPASSES_LOG);
|
|
160
|
+
const logContent = fs.readFileSync(bypassLog, 'utf-8');
|
|
161
|
+
expect(logContent).toContain(testReason);
|
|
162
|
+
});
|
|
163
|
+
it('should print warning when LUMENFLOW_FORCE used without REASON', () => {
|
|
164
|
+
const testRepo = path.join(tempDir, 'test-repo-no-reason');
|
|
165
|
+
fs.mkdirSync(testRepo, { recursive: true });
|
|
166
|
+
execFileSync(GIT_CMD, ['init'], { cwd: testRepo, stdio: 'pipe' });
|
|
167
|
+
execFileSync(GIT_CMD, ['config', USER_EMAIL_CONFIG, TEST_EMAIL], {
|
|
168
|
+
cwd: testRepo,
|
|
169
|
+
stdio: 'pipe',
|
|
170
|
+
});
|
|
171
|
+
execFileSync(GIT_CMD, ['config', USER_NAME_CONFIG, TEST_USERNAME], {
|
|
172
|
+
cwd: testRepo,
|
|
173
|
+
stdio: 'pipe',
|
|
174
|
+
});
|
|
175
|
+
fs.writeFileSync(path.join(testRepo, 'test.txt'), 'test');
|
|
176
|
+
execFileSync(GIT_CMD, ['add', '.'], { cwd: testRepo, stdio: 'pipe' });
|
|
177
|
+
execFileSync(GIT_CMD, ['commit', '-m', 'init'], { cwd: testRepo, stdio: 'pipe' });
|
|
178
|
+
// Execute with LUMENFLOW_FORCE=1 but no reason
|
|
179
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['reset', '--hard', 'HEAD'], {
|
|
180
|
+
cwd: testRepo,
|
|
181
|
+
encoding: 'utf-8',
|
|
182
|
+
env: { ...process.env, LUMENFLOW_FORCE: '1' },
|
|
183
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
184
|
+
});
|
|
185
|
+
// Check the bypasslog for the NO_REASON marker
|
|
186
|
+
const bypassLog = path.join(testRepo, '.beacon', FORCE_BYPASSES_LOG);
|
|
187
|
+
const logContent = fs.readFileSync(bypassLog, 'utf-8');
|
|
188
|
+
expect(logContent).toContain('NO_REASON');
|
|
189
|
+
});
|
|
190
|
+
});
|
|
191
|
+
});
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* State Doctor CLI Tests (WU-1230)
|
|
3
|
+
*
|
|
4
|
+
* Tests for state:doctor --fix functionality:
|
|
5
|
+
* - Micro-worktree isolation for all tracked file changes
|
|
6
|
+
* - Removal of stale WU references from backlog.md and status.md
|
|
7
|
+
* - Changes pushed via merge, not direct file modification
|
|
8
|
+
*/
|
|
9
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
10
|
+
import { join } from 'node:path';
|
|
11
|
+
import { mkdtempSync, rmSync, writeFileSync, mkdirSync, readFileSync } from 'node:fs';
|
|
12
|
+
import { tmpdir } from 'node:os';
|
|
13
|
+
/**
|
|
14
|
+
* Mocked modules
|
|
15
|
+
*/
|
|
16
|
+
vi.mock('@lumenflow/core/dist/micro-worktree.js', () => ({
|
|
17
|
+
withMicroWorktree: vi.fn(),
|
|
18
|
+
}));
|
|
19
|
+
vi.mock('@lumenflow/core/dist/git-adapter.js', () => ({
|
|
20
|
+
getGitForCwd: vi.fn(() => ({
|
|
21
|
+
fetch: vi.fn(),
|
|
22
|
+
merge: vi.fn(),
|
|
23
|
+
push: vi.fn(),
|
|
24
|
+
})),
|
|
25
|
+
createGitForPath: vi.fn(() => ({
|
|
26
|
+
add: vi.fn(),
|
|
27
|
+
commit: vi.fn(),
|
|
28
|
+
push: vi.fn(),
|
|
29
|
+
})),
|
|
30
|
+
}));
|
|
31
|
+
/**
|
|
32
|
+
* Import after mocks are set up
|
|
33
|
+
*/
|
|
34
|
+
import { withMicroWorktree } from '@lumenflow/core/dist/micro-worktree.js';
|
|
35
|
+
/**
|
|
36
|
+
* Constants for test paths
|
|
37
|
+
*/
|
|
38
|
+
const LUMENFLOW_DIR = '.lumenflow';
|
|
39
|
+
const STATE_DIR = 'state';
|
|
40
|
+
const STAMPS_DIR = 'stamps';
|
|
41
|
+
const MEMORY_DIR = 'memory';
|
|
42
|
+
const DOCS_TASKS_DIR = 'docs/04-operations/tasks';
|
|
43
|
+
const BACKLOG_PATH = `${DOCS_TASKS_DIR}/backlog.md`;
|
|
44
|
+
const STATUS_PATH = `${DOCS_TASKS_DIR}/status.md`;
|
|
45
|
+
/**
|
|
46
|
+
* Test directory path
|
|
47
|
+
*/
|
|
48
|
+
let testDir;
|
|
49
|
+
describe('state-doctor CLI (WU-1230)', () => {
|
|
50
|
+
beforeEach(() => {
|
|
51
|
+
vi.clearAllMocks();
|
|
52
|
+
// Create temp directory for each test
|
|
53
|
+
testDir = mkdtempSync(join(tmpdir(), 'state-doctor-test-'));
|
|
54
|
+
});
|
|
55
|
+
afterEach(() => {
|
|
56
|
+
// Cleanup temp directory
|
|
57
|
+
try {
|
|
58
|
+
rmSync(testDir, { recursive: true, force: true });
|
|
59
|
+
}
|
|
60
|
+
catch {
|
|
61
|
+
// Ignore cleanup errors
|
|
62
|
+
}
|
|
63
|
+
});
|
|
64
|
+
describe('micro-worktree isolation', () => {
|
|
65
|
+
it('should use micro-worktree when --fix modifies tracked files', async () => {
|
|
66
|
+
// Setup: Create test state with broken events
|
|
67
|
+
setupTestState(testDir, {
|
|
68
|
+
wus: [],
|
|
69
|
+
events: [{ wuId: 'WU-999', type: 'claimed', timestamp: new Date().toISOString() }],
|
|
70
|
+
});
|
|
71
|
+
// Mock withMicroWorktree to track that it was called
|
|
72
|
+
const mockWithMicroWorktree = vi.mocked(withMicroWorktree);
|
|
73
|
+
mockWithMicroWorktree.mockImplementation(async (options) => {
|
|
74
|
+
// Execute the callback to simulate micro-worktree operations
|
|
75
|
+
const result = await options.execute({
|
|
76
|
+
worktreePath: testDir,
|
|
77
|
+
gitWorktree: {
|
|
78
|
+
add: vi.fn(),
|
|
79
|
+
addWithDeletions: vi.fn(),
|
|
80
|
+
commit: vi.fn(),
|
|
81
|
+
push: vi.fn(),
|
|
82
|
+
},
|
|
83
|
+
});
|
|
84
|
+
return { ...result, ref: 'main' };
|
|
85
|
+
});
|
|
86
|
+
// Import and run the fix function
|
|
87
|
+
const { createStateDoctorFixDeps } = await import('../state-doctor-fix.js');
|
|
88
|
+
const deps = createStateDoctorFixDeps(testDir);
|
|
89
|
+
// When: Attempt to remove a broken event
|
|
90
|
+
await deps.removeEvent('WU-999');
|
|
91
|
+
// Then: micro-worktree should have been used
|
|
92
|
+
expect(mockWithMicroWorktree).toHaveBeenCalled();
|
|
93
|
+
expect(mockWithMicroWorktree).toHaveBeenCalledWith(expect.objectContaining({
|
|
94
|
+
operation: 'state-doctor',
|
|
95
|
+
pushOnly: true,
|
|
96
|
+
}));
|
|
97
|
+
});
|
|
98
|
+
it('should not directly modify files on main when --fix is used', async () => {
|
|
99
|
+
// Setup: Create test state with events file
|
|
100
|
+
const eventsPath = join(testDir, LUMENFLOW_DIR, STATE_DIR, 'wu-events.jsonl');
|
|
101
|
+
setupTestState(testDir, {
|
|
102
|
+
wus: [],
|
|
103
|
+
events: [{ wuId: 'WU-999', type: 'claimed', timestamp: new Date().toISOString() }],
|
|
104
|
+
});
|
|
105
|
+
const originalContent = readFileSync(eventsPath, 'utf-8');
|
|
106
|
+
// Mock withMicroWorktree to NOT actually modify files (simulating push-only mode)
|
|
107
|
+
const mockWithMicroWorktree = vi.mocked(withMicroWorktree);
|
|
108
|
+
mockWithMicroWorktree.mockResolvedValue({
|
|
109
|
+
commitMessage: 'fix: remove broken events',
|
|
110
|
+
files: ['.lumenflow/state/wu-events.jsonl'],
|
|
111
|
+
ref: 'main',
|
|
112
|
+
});
|
|
113
|
+
// Import and run the fix function
|
|
114
|
+
const { createStateDoctorFixDeps } = await import('../state-doctor-fix.js');
|
|
115
|
+
const deps = createStateDoctorFixDeps(testDir);
|
|
116
|
+
// When: Remove broken event
|
|
117
|
+
await deps.removeEvent('WU-999');
|
|
118
|
+
// Then: Original file on main should be unchanged
|
|
119
|
+
// (changes only happen in micro-worktree and pushed)
|
|
120
|
+
const currentContent = readFileSync(eventsPath, 'utf-8');
|
|
121
|
+
expect(currentContent).toBe(originalContent);
|
|
122
|
+
});
|
|
123
|
+
});
|
|
124
|
+
describe('backlog.md and status.md cleanup', () => {
|
|
125
|
+
it('should remove stale WU references from backlog.md when removing broken events', async () => {
|
|
126
|
+
// Setup: Create backlog.md with reference to WU that will be removed
|
|
127
|
+
setupTestState(testDir, {
|
|
128
|
+
wus: [],
|
|
129
|
+
events: [{ wuId: 'WU-999', type: 'claimed', timestamp: new Date().toISOString() }],
|
|
130
|
+
backlog: `# Backlog
|
|
131
|
+
|
|
132
|
+
## In Progress
|
|
133
|
+
|
|
134
|
+
- WU-999: Some old WU that no longer exists
|
|
135
|
+
|
|
136
|
+
## Ready
|
|
137
|
+
|
|
138
|
+
- WU-100: Valid WU
|
|
139
|
+
`,
|
|
140
|
+
});
|
|
141
|
+
// Track the files that would be modified in micro-worktree
|
|
142
|
+
let capturedFiles = [];
|
|
143
|
+
const mockWithMicroWorktree = vi.mocked(withMicroWorktree);
|
|
144
|
+
mockWithMicroWorktree.mockImplementation(async (options) => {
|
|
145
|
+
const result = await options.execute({
|
|
146
|
+
worktreePath: testDir,
|
|
147
|
+
gitWorktree: {
|
|
148
|
+
add: vi.fn(),
|
|
149
|
+
addWithDeletions: vi.fn(),
|
|
150
|
+
commit: vi.fn(),
|
|
151
|
+
push: vi.fn(),
|
|
152
|
+
},
|
|
153
|
+
});
|
|
154
|
+
capturedFiles = result.files;
|
|
155
|
+
return { ...result, ref: 'main' };
|
|
156
|
+
});
|
|
157
|
+
// Import and run the fix function
|
|
158
|
+
const { createStateDoctorFixDeps } = await import('../state-doctor-fix.js');
|
|
159
|
+
const deps = createStateDoctorFixDeps(testDir);
|
|
160
|
+
// When: Remove broken event for WU-999
|
|
161
|
+
await deps.removeEvent('WU-999');
|
|
162
|
+
// Then: backlog.md should be in the list of modified files
|
|
163
|
+
expect(capturedFiles).toContain(BACKLOG_PATH);
|
|
164
|
+
});
|
|
165
|
+
it('should remove stale WU references from status.md when removing broken events', async () => {
|
|
166
|
+
// Setup: Create status.md with reference to WU that will be removed
|
|
167
|
+
setupTestState(testDir, {
|
|
168
|
+
wus: [],
|
|
169
|
+
events: [{ wuId: 'WU-999', type: 'claimed', timestamp: new Date().toISOString() }],
|
|
170
|
+
status: `# Status
|
|
171
|
+
|
|
172
|
+
## In Progress
|
|
173
|
+
|
|
174
|
+
| Lane | WU | Title |
|
|
175
|
+
|------|-----|-------|
|
|
176
|
+
| Framework: CLI | WU-999 | Old WU |
|
|
177
|
+
`,
|
|
178
|
+
});
|
|
179
|
+
// Track the files that would be modified
|
|
180
|
+
let capturedFiles = [];
|
|
181
|
+
const mockWithMicroWorktree = vi.mocked(withMicroWorktree);
|
|
182
|
+
mockWithMicroWorktree.mockImplementation(async (options) => {
|
|
183
|
+
const result = await options.execute({
|
|
184
|
+
worktreePath: testDir,
|
|
185
|
+
gitWorktree: {
|
|
186
|
+
add: vi.fn(),
|
|
187
|
+
addWithDeletions: vi.fn(),
|
|
188
|
+
commit: vi.fn(),
|
|
189
|
+
push: vi.fn(),
|
|
190
|
+
},
|
|
191
|
+
});
|
|
192
|
+
capturedFiles = result.files;
|
|
193
|
+
return { ...result, ref: 'main' };
|
|
194
|
+
});
|
|
195
|
+
// Import and run the fix function
|
|
196
|
+
const { createStateDoctorFixDeps } = await import('../state-doctor-fix.js');
|
|
197
|
+
const deps = createStateDoctorFixDeps(testDir);
|
|
198
|
+
// When: Remove broken event for WU-999
|
|
199
|
+
await deps.removeEvent('WU-999');
|
|
200
|
+
// Then: status.md should be in the list of modified files
|
|
201
|
+
expect(capturedFiles).toContain(STATUS_PATH);
|
|
202
|
+
});
|
|
203
|
+
});
|
|
204
|
+
describe('commit and push behavior', () => {
|
|
205
|
+
it('should use pushOnly mode to avoid modifying local main', async () => {
|
|
206
|
+
setupTestState(testDir, {
|
|
207
|
+
wus: [],
|
|
208
|
+
events: [{ wuId: 'WU-999', type: 'claimed', timestamp: new Date().toISOString() }],
|
|
209
|
+
});
|
|
210
|
+
const mockWithMicroWorktree = vi.mocked(withMicroWorktree);
|
|
211
|
+
mockWithMicroWorktree.mockImplementation(async (options) => {
|
|
212
|
+
// Verify pushOnly is set
|
|
213
|
+
expect(options.pushOnly).toBe(true);
|
|
214
|
+
const result = await options.execute({
|
|
215
|
+
worktreePath: testDir,
|
|
216
|
+
gitWorktree: {
|
|
217
|
+
add: vi.fn(),
|
|
218
|
+
addWithDeletions: vi.fn(),
|
|
219
|
+
commit: vi.fn(),
|
|
220
|
+
push: vi.fn(),
|
|
221
|
+
},
|
|
222
|
+
});
|
|
223
|
+
return { ...result, ref: 'main' };
|
|
224
|
+
});
|
|
225
|
+
const { createStateDoctorFixDeps } = await import('../state-doctor-fix.js');
|
|
226
|
+
const deps = createStateDoctorFixDeps(testDir);
|
|
227
|
+
await deps.removeEvent('WU-999');
|
|
228
|
+
// The assertion is inside the mock - if we get here without error, pushOnly was true
|
|
229
|
+
expect(mockWithMicroWorktree).toHaveBeenCalled();
|
|
230
|
+
});
|
|
231
|
+
});
|
|
232
|
+
});
|
|
233
|
+
function setupTestState(baseDir, state) {
|
|
234
|
+
// Create directories
|
|
235
|
+
const dirs = [
|
|
236
|
+
join(baseDir, LUMENFLOW_DIR, STATE_DIR),
|
|
237
|
+
join(baseDir, LUMENFLOW_DIR, STAMPS_DIR),
|
|
238
|
+
join(baseDir, LUMENFLOW_DIR, MEMORY_DIR),
|
|
239
|
+
join(baseDir, DOCS_TASKS_DIR, 'wu'),
|
|
240
|
+
];
|
|
241
|
+
for (const dir of dirs) {
|
|
242
|
+
mkdirSync(dir, { recursive: true });
|
|
243
|
+
}
|
|
244
|
+
// Create events file
|
|
245
|
+
if (state.events && state.events.length > 0) {
|
|
246
|
+
const eventsPath = join(baseDir, LUMENFLOW_DIR, STATE_DIR, 'wu-events.jsonl');
|
|
247
|
+
const content = state.events.map((e) => JSON.stringify(e)).join('\n') + '\n';
|
|
248
|
+
writeFileSync(eventsPath, content, 'utf-8');
|
|
249
|
+
}
|
|
250
|
+
// Create signals file
|
|
251
|
+
if (state.signals && state.signals.length > 0) {
|
|
252
|
+
const signalsPath = join(baseDir, LUMENFLOW_DIR, MEMORY_DIR, 'signals.jsonl');
|
|
253
|
+
const content = state.signals.map((s) => JSON.stringify(s)).join('\n') + '\n';
|
|
254
|
+
writeFileSync(signalsPath, content, 'utf-8');
|
|
255
|
+
}
|
|
256
|
+
// Create WU YAML files
|
|
257
|
+
if (state.wus) {
|
|
258
|
+
for (const wu of state.wus) {
|
|
259
|
+
const wuPath = join(baseDir, DOCS_TASKS_DIR, 'wu', `${wu.id}.yaml`);
|
|
260
|
+
const content = `id: ${wu.id}\nstatus: ${wu.status}\ntitle: ${wu.title || wu.id}\n`;
|
|
261
|
+
writeFileSync(wuPath, content, 'utf-8');
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
// Create backlog.md
|
|
265
|
+
if (state.backlog) {
|
|
266
|
+
const backlogPath = join(baseDir, BACKLOG_PATH);
|
|
267
|
+
writeFileSync(backlogPath, state.backlog, 'utf-8');
|
|
268
|
+
}
|
|
269
|
+
// Create status.md
|
|
270
|
+
if (state.status) {
|
|
271
|
+
const statusPath = join(baseDir, STATUS_PATH);
|
|
272
|
+
writeFileSync(statusPath, state.status, 'utf-8');
|
|
273
|
+
}
|
|
274
|
+
}
|