@lumenflow/cli 2.2.1 → 2.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +147 -57
- package/dist/__tests__/agent-log-issue.test.js +56 -0
- package/dist/__tests__/cli-entry-point.test.js +66 -17
- package/dist/__tests__/cli-subprocess.test.js +25 -0
- package/dist/__tests__/init.test.js +298 -0
- package/dist/__tests__/initiative-plan.test.js +340 -0
- package/dist/__tests__/mem-cleanup-execution.test.js +19 -0
- package/dist/__tests__/merge-block.test.js +220 -0
- package/dist/__tests__/release.test.js +28 -0
- package/dist/__tests__/safe-git.test.js +191 -0
- package/dist/__tests__/state-doctor.test.js +274 -0
- package/dist/__tests__/wu-done.test.js +36 -0
- package/dist/__tests__/wu-edit.test.js +119 -0
- package/dist/__tests__/wu-prep.test.js +108 -0
- package/dist/agent-issues-query.js +4 -3
- package/dist/agent-log-issue.js +25 -4
- package/dist/backlog-prune.js +5 -4
- package/dist/cli-entry-point.js +11 -1
- package/dist/doctor.js +368 -0
- package/dist/flow-bottlenecks.js +6 -5
- package/dist/flow-report.js +4 -3
- package/dist/gates.js +468 -116
- package/dist/guard-locked.js +4 -3
- package/dist/guard-worktree-commit.js +4 -3
- package/dist/init.js +508 -86
- package/dist/initiative-add-wu.js +4 -3
- package/dist/initiative-bulk-assign-wus.js +8 -5
- package/dist/initiative-create.js +73 -37
- package/dist/initiative-edit.js +37 -21
- package/dist/initiative-list.js +4 -3
- package/dist/initiative-plan.js +337 -0
- package/dist/initiative-status.js +4 -3
- package/dist/lane-health.js +377 -0
- package/dist/lane-suggest.js +382 -0
- package/dist/mem-checkpoint.js +2 -2
- package/dist/mem-cleanup.js +2 -2
- package/dist/mem-context.js +306 -0
- package/dist/mem-create.js +2 -2
- package/dist/mem-delete.js +293 -0
- package/dist/mem-inbox.js +2 -2
- package/dist/mem-index.js +211 -0
- package/dist/mem-init.js +1 -1
- package/dist/mem-profile.js +207 -0
- package/dist/mem-promote.js +254 -0
- package/dist/mem-ready.js +2 -2
- package/dist/mem-signal.js +2 -2
- package/dist/mem-start.js +2 -2
- package/dist/mem-summarize.js +2 -2
- package/dist/mem-triage.js +2 -2
- package/dist/merge-block.js +222 -0
- package/dist/metrics-cli.js +7 -4
- package/dist/metrics-snapshot.js +4 -3
- package/dist/orchestrate-initiative.js +10 -4
- package/dist/orchestrate-monitor.js +379 -31
- package/dist/signal-cleanup.js +296 -0
- package/dist/spawn-list.js +6 -5
- package/dist/state-bootstrap.js +5 -4
- package/dist/state-cleanup.js +360 -0
- package/dist/state-doctor-fix.js +196 -0
- package/dist/state-doctor.js +501 -0
- package/dist/validate-agent-skills.js +4 -3
- package/dist/validate-agent-sync.js +4 -3
- package/dist/validate-backlog-sync.js +7 -84
- package/dist/validate-skills-spec.js +4 -3
- package/dist/validate.js +7 -107
- package/dist/wu-block.js +3 -3
- package/dist/wu-claim.js +208 -98
- package/dist/wu-cleanup.js +5 -4
- package/dist/wu-create.js +71 -46
- package/dist/wu-delete.js +88 -60
- package/dist/wu-deps.js +6 -5
- package/dist/wu-done-check.js +34 -0
- package/dist/wu-done.js +60 -24
- package/dist/wu-edit.js +63 -28
- package/dist/wu-infer-lane.js +7 -6
- package/dist/wu-preflight.js +23 -81
- package/dist/wu-prep.js +125 -0
- package/dist/wu-prune.js +4 -3
- package/dist/wu-recover.js +88 -22
- package/dist/wu-repair.js +7 -6
- package/dist/wu-spawn.js +226 -270
- package/dist/wu-status.js +4 -3
- package/dist/wu-unblock.js +5 -5
- package/dist/wu-unlock-lane.js +4 -3
- package/dist/wu-validate.js +5 -4
- package/package.json +16 -7
- package/templates/core/.lumenflow/constraints.md.template +192 -0
- package/templates/core/.lumenflow/rules/git-safety.md.template +27 -0
- package/templates/core/.lumenflow/rules/wu-workflow.md.template +48 -0
- package/templates/core/AGENTS.md.template +60 -0
- package/templates/core/LUMENFLOW.md.template +255 -0
- package/templates/core/UPGRADING.md.template +121 -0
- package/templates/core/ai/onboarding/agent-safety-card.md.template +106 -0
- package/templates/core/ai/onboarding/first-wu-mistakes.md.template +198 -0
- package/templates/core/ai/onboarding/quick-ref-commands.md.template +186 -0
- package/templates/core/ai/onboarding/release-process.md.template +362 -0
- package/templates/core/ai/onboarding/troubleshooting-wu-done.md.template +159 -0
- package/templates/core/ai/onboarding/wu-create-checklist.md.template +117 -0
- package/templates/vendors/aider/.aider.conf.yml.template +27 -0
- package/templates/vendors/claude/.claude/CLAUDE.md.template +52 -0
- package/templates/vendors/claude/.claude/settings.json.template +49 -0
- package/templates/vendors/claude/.claude/skills/bug-classification/SKILL.md.template +192 -0
- package/templates/vendors/claude/.claude/skills/code-quality/SKILL.md.template +152 -0
- package/templates/vendors/claude/.claude/skills/context-management/SKILL.md.template +155 -0
- package/templates/vendors/claude/.claude/skills/execution-memory/SKILL.md.template +304 -0
- package/templates/vendors/claude/.claude/skills/frontend-design/SKILL.md.template +131 -0
- package/templates/vendors/claude/.claude/skills/initiative-management/SKILL.md.template +164 -0
- package/templates/vendors/claude/.claude/skills/library-first/SKILL.md.template +98 -0
- package/templates/vendors/claude/.claude/skills/lumenflow-gates/SKILL.md.template +87 -0
- package/templates/vendors/claude/.claude/skills/multi-agent-coordination/SKILL.md.template +84 -0
- package/templates/vendors/claude/.claude/skills/ops-maintenance/SKILL.md.template +254 -0
- package/templates/vendors/claude/.claude/skills/orchestration/SKILL.md.template +189 -0
- package/templates/vendors/claude/.claude/skills/tdd-workflow/SKILL.md.template +139 -0
- package/templates/vendors/claude/.claude/skills/worktree-discipline/SKILL.md.template +138 -0
- package/templates/vendors/claude/.claude/skills/wu-lifecycle/SKILL.md.template +106 -0
- package/templates/vendors/cline/.clinerules.template +53 -0
- package/templates/vendors/cursor/.cursor/rules/lumenflow.md.template +34 -0
- package/templates/vendors/cursor/.cursor/rules.md.template +28 -0
- package/templates/vendors/windsurf/.windsurf/rules/lumenflow.md.template +34 -0
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file merge-block.test.ts
|
|
3
|
+
* Tests for merge block functionality (WU-1171)
|
|
4
|
+
*
|
|
5
|
+
* Tests the LUMENFLOW:START/END block insertion and update logic
|
|
6
|
+
* that enables safe, idempotent merging of LumenFlow config into existing files.
|
|
7
|
+
*/
|
|
8
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
9
|
+
import * as fs from 'node:fs';
|
|
10
|
+
import * as path from 'node:path';
|
|
11
|
+
import * as os from 'node:os';
|
|
12
|
+
// Import the functions we'll implement
|
|
13
|
+
import { detectLineEnding, extractMergeBlock, insertMergeBlock, updateMergeBlock, } from '../merge-block.js';
|
|
14
|
+
describe('merge-block', () => {
|
|
15
|
+
let tempDir;
|
|
16
|
+
beforeEach(() => {
|
|
17
|
+
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'merge-block-test-'));
|
|
18
|
+
});
|
|
19
|
+
afterEach(() => {
|
|
20
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
21
|
+
});
|
|
22
|
+
describe('detectLineEnding', () => {
|
|
23
|
+
it('should detect LF line endings', () => {
|
|
24
|
+
const content = 'line1\nline2\nline3\n';
|
|
25
|
+
expect(detectLineEnding(content)).toBe('\n');
|
|
26
|
+
});
|
|
27
|
+
it('should detect CRLF line endings', () => {
|
|
28
|
+
const content = 'line1\r\nline2\r\nline3\r\n';
|
|
29
|
+
expect(detectLineEnding(content)).toBe('\r\n');
|
|
30
|
+
});
|
|
31
|
+
it('should default to LF for empty content', () => {
|
|
32
|
+
expect(detectLineEnding('')).toBe('\n');
|
|
33
|
+
});
|
|
34
|
+
it('should default to LF for content without line endings', () => {
|
|
35
|
+
expect(detectLineEnding('single line')).toBe('\n');
|
|
36
|
+
});
|
|
37
|
+
it('should use majority line ending when mixed', () => {
|
|
38
|
+
const content = 'line1\r\nline2\r\nline3\n';
|
|
39
|
+
// 2 CRLF vs 1 LF, should detect CRLF
|
|
40
|
+
expect(detectLineEnding(content)).toBe('\r\n');
|
|
41
|
+
});
|
|
42
|
+
});
|
|
43
|
+
describe('extractMergeBlock', () => {
|
|
44
|
+
it('should extract content between LUMENFLOW:START and LUMENFLOW:END markers', () => {
|
|
45
|
+
const content = `# My Project
|
|
46
|
+
|
|
47
|
+
<!-- LUMENFLOW:START -->
|
|
48
|
+
This is LumenFlow content.
|
|
49
|
+
<!-- LUMENFLOW:END -->
|
|
50
|
+
|
|
51
|
+
Other content
|
|
52
|
+
`;
|
|
53
|
+
const result = extractMergeBlock(content);
|
|
54
|
+
expect(result.found).toBe(true);
|
|
55
|
+
expect(result.content).toBe('This is LumenFlow content.');
|
|
56
|
+
expect(result.startIndex).toBeGreaterThan(0);
|
|
57
|
+
expect(result.endIndex).toBeGreaterThan(result.startIndex);
|
|
58
|
+
});
|
|
59
|
+
it('should return not found when no markers exist', () => {
|
|
60
|
+
const content = '# My Project\n\nNo LumenFlow content here.';
|
|
61
|
+
const result = extractMergeBlock(content);
|
|
62
|
+
expect(result.found).toBe(false);
|
|
63
|
+
expect(result.content).toBeUndefined();
|
|
64
|
+
});
|
|
65
|
+
it('should handle malformed markers (only START)', () => {
|
|
66
|
+
const content = `# My Project
|
|
67
|
+
|
|
68
|
+
<!-- LUMENFLOW:START -->
|
|
69
|
+
Incomplete block
|
|
70
|
+
`;
|
|
71
|
+
const result = extractMergeBlock(content);
|
|
72
|
+
expect(result.found).toBe(false);
|
|
73
|
+
expect(result.malformed).toBe(true);
|
|
74
|
+
expect(result.malformedReason).toBe('missing-end');
|
|
75
|
+
});
|
|
76
|
+
it('should handle malformed markers (only END)', () => {
|
|
77
|
+
const content = `# My Project
|
|
78
|
+
|
|
79
|
+
Some content
|
|
80
|
+
<!-- LUMENFLOW:END -->
|
|
81
|
+
`;
|
|
82
|
+
const result = extractMergeBlock(content);
|
|
83
|
+
expect(result.found).toBe(false);
|
|
84
|
+
expect(result.malformed).toBe(true);
|
|
85
|
+
expect(result.malformedReason).toBe('missing-start');
|
|
86
|
+
});
|
|
87
|
+
it('should handle multiple START markers', () => {
|
|
88
|
+
const content = `<!-- LUMENFLOW:START -->
|
|
89
|
+
First block
|
|
90
|
+
<!-- LUMENFLOW:START -->
|
|
91
|
+
Second start
|
|
92
|
+
<!-- LUMENFLOW:END -->
|
|
93
|
+
`;
|
|
94
|
+
const result = extractMergeBlock(content);
|
|
95
|
+
expect(result.malformed).toBe(true);
|
|
96
|
+
expect(result.malformedReason).toBe('multiple-start');
|
|
97
|
+
});
|
|
98
|
+
});
|
|
99
|
+
describe('insertMergeBlock', () => {
|
|
100
|
+
it('should append block to end of file', () => {
|
|
101
|
+
const originalContent = '# My Project\n\nExisting content.\n';
|
|
102
|
+
const blockContent = 'LumenFlow configuration goes here.';
|
|
103
|
+
const result = insertMergeBlock(originalContent, blockContent);
|
|
104
|
+
expect(result).toContain('<!-- LUMENFLOW:START -->');
|
|
105
|
+
expect(result).toContain(blockContent);
|
|
106
|
+
expect(result).toContain('<!-- LUMENFLOW:END -->');
|
|
107
|
+
expect(result.startsWith('# My Project')).toBe(true);
|
|
108
|
+
});
|
|
109
|
+
it('should preserve original line endings (LF)', () => {
|
|
110
|
+
const originalContent = '# My Project\nExisting content.\n';
|
|
111
|
+
const blockContent = 'New content';
|
|
112
|
+
const result = insertMergeBlock(originalContent, blockContent);
|
|
113
|
+
// Should not contain CRLF
|
|
114
|
+
expect(result).not.toContain('\r\n');
|
|
115
|
+
expect(result).toContain('\n');
|
|
116
|
+
});
|
|
117
|
+
it('should preserve original line endings (CRLF)', () => {
|
|
118
|
+
const originalContent = '# My Project\r\nExisting content.\r\n';
|
|
119
|
+
const blockContent = 'New content';
|
|
120
|
+
const result = insertMergeBlock(originalContent, blockContent);
|
|
121
|
+
// Should contain CRLF
|
|
122
|
+
expect(result).toContain('\r\n');
|
|
123
|
+
});
|
|
124
|
+
it('should add blank line before block if not present', () => {
|
|
125
|
+
const originalContent = '# My Project\nNo trailing newline';
|
|
126
|
+
const blockContent = 'New content';
|
|
127
|
+
const result = insertMergeBlock(originalContent, blockContent);
|
|
128
|
+
// Should have separation between original and block
|
|
129
|
+
expect(result).toMatch(/No trailing newline\n\n<!-- LUMENFLOW:START -->/);
|
|
130
|
+
});
|
|
131
|
+
});
|
|
132
|
+
describe('updateMergeBlock', () => {
|
|
133
|
+
it('should replace existing block content', () => {
|
|
134
|
+
const originalContent = `# My Project
|
|
135
|
+
|
|
136
|
+
<!-- LUMENFLOW:START -->
|
|
137
|
+
Old LumenFlow content.
|
|
138
|
+
<!-- LUMENFLOW:END -->
|
|
139
|
+
|
|
140
|
+
Other content
|
|
141
|
+
`;
|
|
142
|
+
const newBlockContent = 'New LumenFlow content.';
|
|
143
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
144
|
+
expect(result.content).toContain('New LumenFlow content.');
|
|
145
|
+
expect(result.content).not.toContain('Old LumenFlow content.');
|
|
146
|
+
expect(result.content).toContain('Other content');
|
|
147
|
+
expect(result.updated).toBe(true);
|
|
148
|
+
});
|
|
149
|
+
it('should preserve content before and after the block', () => {
|
|
150
|
+
const originalContent = `# Header
|
|
151
|
+
|
|
152
|
+
Before the block.
|
|
153
|
+
|
|
154
|
+
<!-- LUMENFLOW:START -->
|
|
155
|
+
Old content
|
|
156
|
+
<!-- LUMENFLOW:END -->
|
|
157
|
+
|
|
158
|
+
After the block.
|
|
159
|
+
`;
|
|
160
|
+
const newBlockContent = 'Updated content';
|
|
161
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
162
|
+
expect(result.content).toContain('# Header');
|
|
163
|
+
expect(result.content).toContain('Before the block.');
|
|
164
|
+
expect(result.content).toContain('After the block.');
|
|
165
|
+
});
|
|
166
|
+
it('should preserve original line endings when updating', () => {
|
|
167
|
+
const originalContent = `# Project\r\n\r\n<!-- LUMENFLOW:START -->\r\nOld\r\n<!-- LUMENFLOW:END -->\r\n`;
|
|
168
|
+
const newBlockContent = 'New';
|
|
169
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
170
|
+
// All line endings in result should be CRLF
|
|
171
|
+
const lfCount = (result.content.match(/(?<!\r)\n/g) || []).length;
|
|
172
|
+
expect(lfCount).toBe(0); // No standalone LF
|
|
173
|
+
});
|
|
174
|
+
it('should insert block when no existing block (append mode)', () => {
|
|
175
|
+
const originalContent = '# Project\n\nNo block here.\n';
|
|
176
|
+
const newBlockContent = 'New content';
|
|
177
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
178
|
+
expect(result.content).toContain('<!-- LUMENFLOW:START -->');
|
|
179
|
+
expect(result.content).toContain(newBlockContent);
|
|
180
|
+
expect(result.updated).toBe(true);
|
|
181
|
+
expect(result.wasInserted).toBe(true);
|
|
182
|
+
});
|
|
183
|
+
it('should warn and append fresh block on malformed markers', () => {
|
|
184
|
+
const originalContent = `# Project
|
|
185
|
+
|
|
186
|
+
<!-- LUMENFLOW:START -->
|
|
187
|
+
Incomplete block without end marker
|
|
188
|
+
`;
|
|
189
|
+
const newBlockContent = 'Fresh content';
|
|
190
|
+
const result = updateMergeBlock(originalContent, newBlockContent);
|
|
191
|
+
expect(result.warning).toContain('malformed');
|
|
192
|
+
expect(result.content).toContain('<!-- LUMENFLOW:START -->');
|
|
193
|
+
expect(result.content).toContain('Fresh content');
|
|
194
|
+
expect(result.content).toContain('<!-- LUMENFLOW:END -->');
|
|
195
|
+
});
|
|
196
|
+
});
|
|
197
|
+
describe('idempotency', () => {
|
|
198
|
+
it('should produce identical output when run twice with same input', () => {
|
|
199
|
+
const originalContent = '# My Project\n\nSome content.\n';
|
|
200
|
+
const blockContent = 'LumenFlow configuration';
|
|
201
|
+
// First merge
|
|
202
|
+
const firstResult = updateMergeBlock(originalContent, blockContent);
|
|
203
|
+
// Second merge with same block content
|
|
204
|
+
const secondResult = updateMergeBlock(firstResult.content, blockContent);
|
|
205
|
+
expect(firstResult.content).toBe(secondResult.content);
|
|
206
|
+
});
|
|
207
|
+
it('should not modify file when block content is unchanged', () => {
|
|
208
|
+
const existingContent = `# Project
|
|
209
|
+
|
|
210
|
+
<!-- LUMENFLOW:START -->
|
|
211
|
+
Same content
|
|
212
|
+
<!-- LUMENFLOW:END -->
|
|
213
|
+
`;
|
|
214
|
+
const blockContent = 'Same content';
|
|
215
|
+
const result = updateMergeBlock(existingContent, blockContent);
|
|
216
|
+
expect(result.unchanged).toBe(true);
|
|
217
|
+
expect(result.content).toBe(existingContent);
|
|
218
|
+
});
|
|
219
|
+
});
|
|
220
|
+
});
|
|
@@ -146,11 +146,39 @@ describe('release command integration', () => {
|
|
|
146
146
|
describe('WU-1077: release script bug fixes', () => {
|
|
147
147
|
describe('hasNpmAuth - ~/.npmrc detection', () => {
|
|
148
148
|
let testDir;
|
|
149
|
+
let originalUserConfig;
|
|
150
|
+
let originalNpmToken;
|
|
151
|
+
let originalNodeAuthToken;
|
|
149
152
|
beforeEach(() => {
|
|
150
153
|
testDir = join(tmpdir(), `release-npmrc-test-${Date.now()}`);
|
|
151
154
|
mkdirSync(testDir, { recursive: true });
|
|
155
|
+
originalUserConfig = process.env.NPM_CONFIG_USERCONFIG;
|
|
156
|
+
originalNpmToken = process.env.NPM_TOKEN;
|
|
157
|
+
originalNodeAuthToken = process.env.NODE_AUTH_TOKEN;
|
|
158
|
+
process.env.NPM_CONFIG_USERCONFIG = join(testDir, 'user.npmrc');
|
|
159
|
+
writeFileSync(process.env.NPM_CONFIG_USERCONFIG, '');
|
|
160
|
+
delete process.env.NPM_TOKEN;
|
|
161
|
+
delete process.env.NODE_AUTH_TOKEN;
|
|
152
162
|
});
|
|
153
163
|
afterEach(() => {
|
|
164
|
+
if (originalUserConfig === undefined) {
|
|
165
|
+
delete process.env.NPM_CONFIG_USERCONFIG;
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
process.env.NPM_CONFIG_USERCONFIG = originalUserConfig;
|
|
169
|
+
}
|
|
170
|
+
if (originalNpmToken === undefined) {
|
|
171
|
+
delete process.env.NPM_TOKEN;
|
|
172
|
+
}
|
|
173
|
+
else {
|
|
174
|
+
process.env.NPM_TOKEN = originalNpmToken;
|
|
175
|
+
}
|
|
176
|
+
if (originalNodeAuthToken === undefined) {
|
|
177
|
+
delete process.env.NODE_AUTH_TOKEN;
|
|
178
|
+
}
|
|
179
|
+
else {
|
|
180
|
+
process.env.NODE_AUTH_TOKEN = originalNodeAuthToken;
|
|
181
|
+
}
|
|
154
182
|
rmSync(testDir, { recursive: true, force: true });
|
|
155
183
|
});
|
|
156
184
|
it('should detect auth from ~/.npmrc authToken line', async () => {
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
/* eslint-disable sonarjs/no-os-command-from-path -- Test file needs to execute git commands */
|
|
2
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
3
|
+
import { execFileSync } from 'node:child_process';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import fs from 'node:fs';
|
|
6
|
+
import os from 'node:os';
|
|
7
|
+
const CLI_SAFE_GIT_PATH = path.resolve(__dirname, '../../bin/safe-git');
|
|
8
|
+
const SCRIPTS_SAFE_GIT_PATH = path.resolve(__dirname, '../../../../../scripts/safe-git');
|
|
9
|
+
// Constants for duplicate strings
|
|
10
|
+
const SHOULD_HAVE_THROWN = 'Should have thrown an error';
|
|
11
|
+
const GIT_CMD = 'git';
|
|
12
|
+
const USER_EMAIL_CONFIG = 'user.email';
|
|
13
|
+
const USER_NAME_CONFIG = 'user.name';
|
|
14
|
+
const TEST_EMAIL = 'test@test.com';
|
|
15
|
+
const TEST_USERNAME = 'Test';
|
|
16
|
+
const FORCE_BYPASSES_LOG = 'force-bypasses.log';
|
|
17
|
+
// Create a temporary directory for testing to avoid polluting the real .beacon directory
|
|
18
|
+
const createTempDir = () => {
|
|
19
|
+
return fs.mkdtempSync(path.join(os.tmpdir(), 'safe-git-test-'));
|
|
20
|
+
};
|
|
21
|
+
describe('safe-git', () => {
|
|
22
|
+
// We mock child_process execution where possible, but for integration testing a script
|
|
23
|
+
// we often execute it directly. Since safe-git is a shell script, we executed it.
|
|
24
|
+
it('should fail when running "worktree remove" (CLI wrapper)', () => {
|
|
25
|
+
try {
|
|
26
|
+
execFileSync(CLI_SAFE_GIT_PATH, ['worktree', 'remove', 'some-path'], { stdio: 'pipe' });
|
|
27
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
28
|
+
}
|
|
29
|
+
catch (error) {
|
|
30
|
+
const err = error;
|
|
31
|
+
expect(err.status).toBe(1);
|
|
32
|
+
expect(err.stderr.toString()).toContain("BLOCKED: Manual 'git worktree remove' is unsafe");
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
it('should fail when running "worktree remove" (scripts wrapper)', () => {
|
|
36
|
+
try {
|
|
37
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['worktree', 'remove', 'some-path'], { stdio: 'pipe' });
|
|
38
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
39
|
+
}
|
|
40
|
+
catch (error) {
|
|
41
|
+
const err = error;
|
|
42
|
+
expect(err.status).toBe(1);
|
|
43
|
+
expect(err.stderr.toString()).toContain('Manual');
|
|
44
|
+
expect(err.stderr.toString()).toContain('worktree remove');
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
it('should fail when running "reset --hard" (scripts wrapper)', () => {
|
|
48
|
+
try {
|
|
49
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['reset', '--hard', 'HEAD'], { stdio: 'pipe' });
|
|
50
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
const err = error;
|
|
54
|
+
expect(err.status).toBe(1);
|
|
55
|
+
expect(err.stderr.toString()).toContain('reset --hard');
|
|
56
|
+
}
|
|
57
|
+
});
|
|
58
|
+
it('should fail when running "clean -fd" (scripts wrapper)', () => {
|
|
59
|
+
try {
|
|
60
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['clean', '-fd'], { stdio: 'pipe' });
|
|
61
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
const err = error;
|
|
65
|
+
expect(err.status).toBe(1);
|
|
66
|
+
expect(err.stderr.toString()).toContain('clean -fd');
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
it('should fail when running "push --force" (scripts wrapper)', () => {
|
|
70
|
+
try {
|
|
71
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['push', '--force'], { stdio: 'pipe' });
|
|
72
|
+
expect.fail(SHOULD_HAVE_THROWN);
|
|
73
|
+
}
|
|
74
|
+
catch (error) {
|
|
75
|
+
const err = error;
|
|
76
|
+
expect(err.status).toBe(1);
|
|
77
|
+
expect(err.stderr.toString()).toContain('push --force');
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
it('should pass through safe commands', () => {
|
|
81
|
+
// We verify it calls git by mocking git or checking output.
|
|
82
|
+
// Since we can't easily mock the system git in a real shell script execution without PATH manip,
|
|
83
|
+
// we'll check that it runs git --version correctly.
|
|
84
|
+
const output = execFileSync(CLI_SAFE_GIT_PATH, ['--version'], { encoding: 'utf-8' });
|
|
85
|
+
expect(output).toContain('git version');
|
|
86
|
+
});
|
|
87
|
+
describe('LUMENFLOW_FORCE bypass', () => {
|
|
88
|
+
let tempDir;
|
|
89
|
+
beforeEach(() => {
|
|
90
|
+
tempDir = createTempDir();
|
|
91
|
+
});
|
|
92
|
+
afterEach(() => {
|
|
93
|
+
// Clean up temp directory
|
|
94
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
95
|
+
});
|
|
96
|
+
it('should bypass blocked commands when LUMENFLOW_FORCE=1', () => {
|
|
97
|
+
// Using git --version as a safe test with force flag
|
|
98
|
+
// The key is that the env var should be respected and not block
|
|
99
|
+
const output = execFileSync(SCRIPTS_SAFE_GIT_PATH, ['--version'], {
|
|
100
|
+
encoding: 'utf-8',
|
|
101
|
+
env: { ...process.env, LUMENFLOW_FORCE: '1' },
|
|
102
|
+
});
|
|
103
|
+
expect(output).toContain('git version');
|
|
104
|
+
});
|
|
105
|
+
it('should log bypass to force-bypasses.log when LUMENFLOW_FORCE=1', () => {
|
|
106
|
+
// We need to test that a blocked command, when forced, writes to the audit log
|
|
107
|
+
// Since reset --hard is dangerous, we use a mock approach
|
|
108
|
+
// The script should create the audit log entry before executing
|
|
109
|
+
// Create a temporary git repo for this test
|
|
110
|
+
const testRepo = path.join(tempDir, 'test-repo');
|
|
111
|
+
fs.mkdirSync(testRepo, { recursive: true });
|
|
112
|
+
execFileSync(GIT_CMD, ['init'], { cwd: testRepo, stdio: 'pipe' });
|
|
113
|
+
execFileSync(GIT_CMD, ['config', USER_EMAIL_CONFIG, TEST_EMAIL], {
|
|
114
|
+
cwd: testRepo,
|
|
115
|
+
stdio: 'pipe',
|
|
116
|
+
});
|
|
117
|
+
execFileSync(GIT_CMD, ['config', USER_NAME_CONFIG, TEST_USERNAME], {
|
|
118
|
+
cwd: testRepo,
|
|
119
|
+
stdio: 'pipe',
|
|
120
|
+
});
|
|
121
|
+
// Create a file and commit
|
|
122
|
+
fs.writeFileSync(path.join(testRepo, 'test.txt'), 'test');
|
|
123
|
+
execFileSync(GIT_CMD, ['add', '.'], { cwd: testRepo, stdio: 'pipe' });
|
|
124
|
+
execFileSync(GIT_CMD, ['commit', '-m', 'init'], { cwd: testRepo, stdio: 'pipe' });
|
|
125
|
+
// Run safe-git with force to reset --hard (should succeed with log)
|
|
126
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['reset', '--hard', 'HEAD'], {
|
|
127
|
+
cwd: testRepo,
|
|
128
|
+
encoding: 'utf-8',
|
|
129
|
+
env: { ...process.env, LUMENFLOW_FORCE: '1' },
|
|
130
|
+
});
|
|
131
|
+
// Check that the force bypass log exists and contains the entry
|
|
132
|
+
const bypassLog = path.join(testRepo, '.beacon', FORCE_BYPASSES_LOG);
|
|
133
|
+
expect(fs.existsSync(bypassLog)).toBe(true);
|
|
134
|
+
const logContent = fs.readFileSync(bypassLog, 'utf-8');
|
|
135
|
+
expect(logContent).toContain('reset --hard');
|
|
136
|
+
expect(logContent).toContain('BYPASSED');
|
|
137
|
+
});
|
|
138
|
+
it('should include LUMENFLOW_FORCE_REASON in audit log when provided', () => {
|
|
139
|
+
const testRepo = path.join(tempDir, 'test-repo-reason');
|
|
140
|
+
fs.mkdirSync(testRepo, { recursive: true });
|
|
141
|
+
execFileSync(GIT_CMD, ['init'], { cwd: testRepo, stdio: 'pipe' });
|
|
142
|
+
execFileSync(GIT_CMD, ['config', USER_EMAIL_CONFIG, TEST_EMAIL], {
|
|
143
|
+
cwd: testRepo,
|
|
144
|
+
stdio: 'pipe',
|
|
145
|
+
});
|
|
146
|
+
execFileSync(GIT_CMD, ['config', USER_NAME_CONFIG, TEST_USERNAME], {
|
|
147
|
+
cwd: testRepo,
|
|
148
|
+
stdio: 'pipe',
|
|
149
|
+
});
|
|
150
|
+
fs.writeFileSync(path.join(testRepo, 'test.txt'), 'test');
|
|
151
|
+
execFileSync(GIT_CMD, ['add', '.'], { cwd: testRepo, stdio: 'pipe' });
|
|
152
|
+
execFileSync(GIT_CMD, ['commit', '-m', 'init'], { cwd: testRepo, stdio: 'pipe' });
|
|
153
|
+
const testReason = 'user-approved: testing bypass';
|
|
154
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['reset', '--hard', 'HEAD'], {
|
|
155
|
+
cwd: testRepo,
|
|
156
|
+
encoding: 'utf-8',
|
|
157
|
+
env: { ...process.env, LUMENFLOW_FORCE: '1', LUMENFLOW_FORCE_REASON: testReason },
|
|
158
|
+
});
|
|
159
|
+
const bypassLog = path.join(testRepo, '.beacon', FORCE_BYPASSES_LOG);
|
|
160
|
+
const logContent = fs.readFileSync(bypassLog, 'utf-8');
|
|
161
|
+
expect(logContent).toContain(testReason);
|
|
162
|
+
});
|
|
163
|
+
it('should print warning when LUMENFLOW_FORCE used without REASON', () => {
|
|
164
|
+
const testRepo = path.join(tempDir, 'test-repo-no-reason');
|
|
165
|
+
fs.mkdirSync(testRepo, { recursive: true });
|
|
166
|
+
execFileSync(GIT_CMD, ['init'], { cwd: testRepo, stdio: 'pipe' });
|
|
167
|
+
execFileSync(GIT_CMD, ['config', USER_EMAIL_CONFIG, TEST_EMAIL], {
|
|
168
|
+
cwd: testRepo,
|
|
169
|
+
stdio: 'pipe',
|
|
170
|
+
});
|
|
171
|
+
execFileSync(GIT_CMD, ['config', USER_NAME_CONFIG, TEST_USERNAME], {
|
|
172
|
+
cwd: testRepo,
|
|
173
|
+
stdio: 'pipe',
|
|
174
|
+
});
|
|
175
|
+
fs.writeFileSync(path.join(testRepo, 'test.txt'), 'test');
|
|
176
|
+
execFileSync(GIT_CMD, ['add', '.'], { cwd: testRepo, stdio: 'pipe' });
|
|
177
|
+
execFileSync(GIT_CMD, ['commit', '-m', 'init'], { cwd: testRepo, stdio: 'pipe' });
|
|
178
|
+
// Execute with LUMENFLOW_FORCE=1 but no reason
|
|
179
|
+
execFileSync(SCRIPTS_SAFE_GIT_PATH, ['reset', '--hard', 'HEAD'], {
|
|
180
|
+
cwd: testRepo,
|
|
181
|
+
encoding: 'utf-8',
|
|
182
|
+
env: { ...process.env, LUMENFLOW_FORCE: '1' },
|
|
183
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
184
|
+
});
|
|
185
|
+
// Check the bypasslog for the NO_REASON marker
|
|
186
|
+
const bypassLog = path.join(testRepo, '.beacon', FORCE_BYPASSES_LOG);
|
|
187
|
+
const logContent = fs.readFileSync(bypassLog, 'utf-8');
|
|
188
|
+
expect(logContent).toContain('NO_REASON');
|
|
189
|
+
});
|
|
190
|
+
});
|
|
191
|
+
});
|