@ricky-stevens/context-guardian 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +29 -0
- package/.claude-plugin/plugin.json +63 -0
- package/.github/workflows/ci.yml +66 -0
- package/CLAUDE.md +132 -0
- package/LICENSE +21 -0
- package/README.md +362 -0
- package/biome.json +34 -0
- package/bun.lock +31 -0
- package/hooks/precompact.mjs +73 -0
- package/hooks/session-start.mjs +133 -0
- package/hooks/stop.mjs +172 -0
- package/hooks/submit.mjs +133 -0
- package/lib/checkpoint.mjs +258 -0
- package/lib/compact-cli.mjs +124 -0
- package/lib/compact-output.mjs +350 -0
- package/lib/config.mjs +40 -0
- package/lib/content.mjs +33 -0
- package/lib/diagnostics.mjs +221 -0
- package/lib/estimate.mjs +254 -0
- package/lib/extract-helpers.mjs +869 -0
- package/lib/handoff.mjs +329 -0
- package/lib/logger.mjs +34 -0
- package/lib/mcp-tools.mjs +200 -0
- package/lib/paths.mjs +90 -0
- package/lib/stats.mjs +81 -0
- package/lib/statusline.mjs +123 -0
- package/lib/synthetic-session.mjs +273 -0
- package/lib/tokens.mjs +170 -0
- package/lib/tool-summary.mjs +399 -0
- package/lib/transcript.mjs +939 -0
- package/lib/trim.mjs +158 -0
- package/package.json +22 -0
- package/skills/compact/SKILL.md +20 -0
- package/skills/config/SKILL.md +70 -0
- package/skills/handoff/SKILL.md +26 -0
- package/skills/prune/SKILL.md +20 -0
- package/skills/stats/SKILL.md +100 -0
- package/sonar-project.properties +12 -0
- package/test/checkpoint.test.mjs +171 -0
- package/test/compact-cli.test.mjs +230 -0
- package/test/compact-output.test.mjs +284 -0
- package/test/compaction-e2e.test.mjs +809 -0
- package/test/content.test.mjs +86 -0
- package/test/diagnostics.test.mjs +188 -0
- package/test/edge-cases.test.mjs +543 -0
- package/test/estimate.test.mjs +262 -0
- package/test/extract-helpers-coverage.test.mjs +333 -0
- package/test/extract-helpers.test.mjs +234 -0
- package/test/handoff.test.mjs +738 -0
- package/test/integration.test.mjs +582 -0
- package/test/logger.test.mjs +70 -0
- package/test/manual-compaction-test.md +426 -0
- package/test/mcp-tools.test.mjs +443 -0
- package/test/paths.test.mjs +250 -0
- package/test/quick-compaction-test.md +191 -0
- package/test/stats.test.mjs +88 -0
- package/test/statusline.test.mjs +222 -0
- package/test/submit.test.mjs +232 -0
- package/test/synthetic-session.test.mjs +600 -0
- package/test/tokens.test.mjs +293 -0
- package/test/tool-summary.test.mjs +771 -0
- package/test/transcript-coverage.test.mjs +369 -0
- package/test/transcript.test.mjs +596 -0
- package/test/trim.test.mjs +356 -0
|
@@ -0,0 +1,543 @@
|
|
|
1
|
+
import assert from "node:assert/strict";
|
|
2
|
+
import { execFileSync } from "node:child_process";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import os from "node:os";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import { afterEach, beforeEach, describe, it } from "node:test";
|
|
7
|
+
import { loadConfig, resolveMaxTokens } from "../lib/config.mjs";
|
|
8
|
+
import { log } from "../lib/logger.mjs";
|
|
9
|
+
import {
|
|
10
|
+
atomicWriteFileSync,
|
|
11
|
+
CHECKPOINTS_DIR,
|
|
12
|
+
ensureDataDir,
|
|
13
|
+
rotateCheckpoints,
|
|
14
|
+
stateFile,
|
|
15
|
+
} from "../lib/paths.mjs";
|
|
16
|
+
import { getTokenUsage } from "../lib/tokens.mjs";
|
|
17
|
+
import {
|
|
18
|
+
extractConversation,
|
|
19
|
+
extractRecent,
|
|
20
|
+
readTranscriptLines,
|
|
21
|
+
} from "../lib/transcript.mjs";
|
|
22
|
+
|
|
23
|
+
const HOOK_PATH = path.resolve("hooks/submit.mjs");
|
|
24
|
+
|
|
25
|
+
let tmpDir;
|
|
26
|
+
|
|
27
|
+
beforeEach(() => {
|
|
28
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "cg-edge-"));
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
afterEach(() => {
|
|
32
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// =========================================================================
|
|
36
|
+
// submit.mjs — stdin parse error (lines 25-29)
|
|
37
|
+
// =========================================================================
|
|
38
|
+
describe("submit hook — invalid stdin", () => {
|
|
39
|
+
it("exits 0 with no output on invalid JSON stdin", () => {
|
|
40
|
+
try {
|
|
41
|
+
const stdout = execFileSync("node", [HOOK_PATH], {
|
|
42
|
+
input: "NOT VALID JSON{{{",
|
|
43
|
+
encoding: "utf8",
|
|
44
|
+
timeout: 5000,
|
|
45
|
+
env: { ...process.env, CLAUDE_PLUGIN_DATA: tmpDir },
|
|
46
|
+
});
|
|
47
|
+
assert.equal(stdout.trim(), "");
|
|
48
|
+
} catch (e) {
|
|
49
|
+
assert.equal(e.status, 0);
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
// =========================================================================
|
|
55
|
+
// submit.mjs — corrupt config uses defaults (config.mjs lines 22-23)
|
|
56
|
+
// =========================================================================
|
|
57
|
+
describe("submit hook — corrupt config", () => {
|
|
58
|
+
it("uses defaults when config.json is corrupt", () => {
|
|
59
|
+
const cwd2 = path.join(tmpDir, "proj-corrupt-cfg");
|
|
60
|
+
const dataDir2 = path.join(tmpDir, "data-corrupt-cfg");
|
|
61
|
+
fs.mkdirSync(path.join(cwd2, ".claude"), { recursive: true });
|
|
62
|
+
fs.mkdirSync(dataDir2, { recursive: true });
|
|
63
|
+
|
|
64
|
+
fs.writeFileSync(path.join(dataDir2, "config.json"), "NOT{JSON");
|
|
65
|
+
|
|
66
|
+
const tp = path.join(tmpDir, "transcript-corrupt-cfg.jsonl");
|
|
67
|
+
fs.appendFileSync(
|
|
68
|
+
tp,
|
|
69
|
+
`${JSON.stringify({
|
|
70
|
+
type: "assistant",
|
|
71
|
+
message: {
|
|
72
|
+
role: "assistant",
|
|
73
|
+
model: "claude-sonnet-4-20250514",
|
|
74
|
+
content: [{ type: "text", text: "hi" }],
|
|
75
|
+
usage: {
|
|
76
|
+
input_tokens: 5,
|
|
77
|
+
cache_creation_input_tokens: 0,
|
|
78
|
+
cache_read_input_tokens: 0,
|
|
79
|
+
output_tokens: 2,
|
|
80
|
+
},
|
|
81
|
+
},
|
|
82
|
+
})}\n`,
|
|
83
|
+
);
|
|
84
|
+
|
|
85
|
+
try {
|
|
86
|
+
execFileSync("node", [HOOK_PATH], {
|
|
87
|
+
input: JSON.stringify({
|
|
88
|
+
session_id: "cfg-test",
|
|
89
|
+
prompt: "hi",
|
|
90
|
+
transcript_path: tp,
|
|
91
|
+
cwd: cwd2,
|
|
92
|
+
}),
|
|
93
|
+
encoding: "utf8",
|
|
94
|
+
timeout: 5000,
|
|
95
|
+
env: { ...process.env, CLAUDE_PLUGIN_DATA: dataDir2 },
|
|
96
|
+
});
|
|
97
|
+
} catch (e) {
|
|
98
|
+
if (e.status !== 0) throw e;
|
|
99
|
+
}
|
|
100
|
+
// Should write state file using defaults (didn't crash)
|
|
101
|
+
assert.ok(fs.existsSync(path.join(dataDir2, "state-cfg-test.json")));
|
|
102
|
+
});
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
// =========================================================================
|
|
106
|
+
// transcript.mjs — large file tail read (lines 22-34)
|
|
107
|
+
// =========================================================================
|
|
108
|
+
describe("transcript — large file tail read", () => {
|
|
109
|
+
it("reads full transcript under 50MB cap", () => {
|
|
110
|
+
const tp = path.join(tmpDir, "large-transcript.jsonl");
|
|
111
|
+
|
|
112
|
+
const oldLine = JSON.stringify({
|
|
113
|
+
type: "user",
|
|
114
|
+
message: {
|
|
115
|
+
role: "user",
|
|
116
|
+
content: "old message preserved under 50MB cap",
|
|
117
|
+
},
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
const paddingLine = JSON.stringify({
|
|
121
|
+
type: "system",
|
|
122
|
+
message: { content: "x".repeat(150) },
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
let content = `${oldLine}\n`;
|
|
126
|
+
// 11MB file — well under the 50MB cap, so ALL content is preserved
|
|
127
|
+
const lines = Math.ceil((11 * 1024 * 1024) / (paddingLine.length + 1));
|
|
128
|
+
for (let i = 0; i < lines; i++) {
|
|
129
|
+
content += `${paddingLine}\n`;
|
|
130
|
+
}
|
|
131
|
+
content += `${JSON.stringify({
|
|
132
|
+
type: "user",
|
|
133
|
+
message: { role: "user", content: "this is the recent message" },
|
|
134
|
+
})}\n`;
|
|
135
|
+
|
|
136
|
+
fs.writeFileSync(tp, content);
|
|
137
|
+
|
|
138
|
+
const result = extractConversation(tp);
|
|
139
|
+
assert.ok(result.includes("User: this is the recent message"));
|
|
140
|
+
// Under the 50MB cap, old content is preserved (not tail-dropped)
|
|
141
|
+
assert.ok(result.includes("old message preserved under 50MB cap"));
|
|
142
|
+
});
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
// =========================================================================
|
|
146
|
+
// tokens.mjs — tiered read returns null for large file with no usage (line 43)
|
|
147
|
+
// =========================================================================
|
|
148
|
+
describe("tokens — tiered read exhaustion", () => {
|
|
149
|
+
it("returns null for >32KB transcript with no usage data", () => {
|
|
150
|
+
const tp = path.join(tmpDir, "large-no-usage.jsonl");
|
|
151
|
+
const line = JSON.stringify({
|
|
152
|
+
type: "user",
|
|
153
|
+
message: { role: "user", content: "x".repeat(500) },
|
|
154
|
+
});
|
|
155
|
+
let content = "";
|
|
156
|
+
for (let i = 0; i < 60; i++) {
|
|
157
|
+
content += `${line}\n`;
|
|
158
|
+
}
|
|
159
|
+
fs.writeFileSync(tp, content);
|
|
160
|
+
|
|
161
|
+
const result = getTokenUsage(tp);
|
|
162
|
+
assert.equal(result, null);
|
|
163
|
+
});
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
// =========================================================================
|
|
167
|
+
// transcript.mjs — extractRecent skill injection filter (line 194)
|
|
168
|
+
// =========================================================================
|
|
169
|
+
describe("extractRecent — skill injection filter", () => {
|
|
170
|
+
it("filters known skill injection messages in extractRecent", () => {
|
|
171
|
+
const tp = path.join(tmpDir, "recent-skill.jsonl");
|
|
172
|
+
// New injection filter matches messages containing both "SKILL.md" and "plugin"
|
|
173
|
+
const skillContent =
|
|
174
|
+
"# Skill Title\n\nInstructions from SKILL.md for this plugin.\n\n## Step 1\n\nDo this.\n\n## Step 2\n\nDo that.\n\n" +
|
|
175
|
+
"x".repeat(800);
|
|
176
|
+
fs.appendFileSync(
|
|
177
|
+
tp,
|
|
178
|
+
`${JSON.stringify({
|
|
179
|
+
type: "user",
|
|
180
|
+
message: { role: "user", content: skillContent },
|
|
181
|
+
})}\n`,
|
|
182
|
+
);
|
|
183
|
+
fs.appendFileSync(
|
|
184
|
+
tp,
|
|
185
|
+
`${JSON.stringify({
|
|
186
|
+
type: "user",
|
|
187
|
+
message: { role: "user", content: "real message" },
|
|
188
|
+
})}\n`,
|
|
189
|
+
);
|
|
190
|
+
|
|
191
|
+
const result = extractRecent(tp, 20);
|
|
192
|
+
assert.ok(!result.includes("Skill Title"));
|
|
193
|
+
assert.ok(result.includes("User: real message"));
|
|
194
|
+
});
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
// =========================================================================
|
|
198
|
+
// transcript.mjs — extractRecent parse errors (lines 154-156)
|
|
199
|
+
// =========================================================================
|
|
200
|
+
describe("extractRecent — parse errors", () => {
|
|
201
|
+
it("counts and reports parse errors", () => {
|
|
202
|
+
const tp = path.join(tmpDir, "recent-errors.jsonl");
|
|
203
|
+
fs.appendFileSync(
|
|
204
|
+
tp,
|
|
205
|
+
`${JSON.stringify({
|
|
206
|
+
type: "user",
|
|
207
|
+
message: { role: "user", content: "good" },
|
|
208
|
+
})}\n`,
|
|
209
|
+
);
|
|
210
|
+
fs.appendFileSync(tp, "bad json line\n");
|
|
211
|
+
fs.appendFileSync(
|
|
212
|
+
tp,
|
|
213
|
+
`${JSON.stringify({
|
|
214
|
+
type: "assistant",
|
|
215
|
+
message: {
|
|
216
|
+
role: "assistant",
|
|
217
|
+
content: [{ type: "text", text: "response" }],
|
|
218
|
+
},
|
|
219
|
+
})}\n`,
|
|
220
|
+
);
|
|
221
|
+
|
|
222
|
+
const result = extractRecent(tp, 20);
|
|
223
|
+
assert.ok(result.includes("User: good"));
|
|
224
|
+
assert.ok(result.includes("Asst: response"));
|
|
225
|
+
assert.ok(result.includes("Warning: 1 transcript line(s)"));
|
|
226
|
+
});
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
// =========================================================================
|
|
230
|
+
// transcript.mjs — extractRecent compact marker filter (line 145)
|
|
231
|
+
// =========================================================================
|
|
232
|
+
describe("extractRecent — compact marker filtering", () => {
|
|
233
|
+
it("filters # Context Checkpoint markers", () => {
|
|
234
|
+
const tp = path.join(tmpDir, "recent-checkpoint.jsonl");
|
|
235
|
+
fs.appendFileSync(
|
|
236
|
+
tp,
|
|
237
|
+
`${JSON.stringify({
|
|
238
|
+
type: "user",
|
|
239
|
+
message: {
|
|
240
|
+
role: "user",
|
|
241
|
+
content:
|
|
242
|
+
"# Context Checkpoint (Smart Compact)\n> Created: 2026\n\nUser: old",
|
|
243
|
+
},
|
|
244
|
+
})}\n`,
|
|
245
|
+
);
|
|
246
|
+
fs.appendFileSync(
|
|
247
|
+
tp,
|
|
248
|
+
`${JSON.stringify({
|
|
249
|
+
type: "user",
|
|
250
|
+
message: { role: "user", content: "real message" },
|
|
251
|
+
})}\n`,
|
|
252
|
+
);
|
|
253
|
+
|
|
254
|
+
const result = extractRecent(tp, 20);
|
|
255
|
+
assert.ok(!result.includes("Context Checkpoint"));
|
|
256
|
+
assert.ok(result.includes("User: real message"));
|
|
257
|
+
});
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
// =========================================================================
|
|
261
|
+
// config.mjs — unit tests
|
|
262
|
+
// =========================================================================
|
|
263
|
+
describe("config — loadConfig and resolveMaxTokens", () => {
|
|
264
|
+
it("loadConfig returns defaults", () => {
|
|
265
|
+
const cfg = loadConfig();
|
|
266
|
+
assert.equal(typeof cfg.threshold, "number");
|
|
267
|
+
assert.equal(typeof cfg.max_tokens, "number");
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
it("resolveMaxTokens returns a positive number", () => {
|
|
271
|
+
const mt = resolveMaxTokens();
|
|
272
|
+
assert.equal(typeof mt, "number");
|
|
273
|
+
assert.ok(mt > 0);
|
|
274
|
+
});
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
// =========================================================================
|
|
278
|
+
// tokens.mjs — tiered read: both tiers exhausted (line 43)
|
|
279
|
+
// Need a file >2MB with no usage data so both 32KB and 2MB tiers fail.
|
|
280
|
+
// =========================================================================
|
|
281
|
+
describe("tokens — both read tiers exhausted", () => {
|
|
282
|
+
it("returns null for >2MB transcript with no usage data", () => {
|
|
283
|
+
const tp = path.join(tmpDir, "huge-no-usage.jsonl");
|
|
284
|
+
const line = JSON.stringify({
|
|
285
|
+
type: "user",
|
|
286
|
+
message: { role: "user", content: "x".repeat(2000) },
|
|
287
|
+
});
|
|
288
|
+
// ~2KB per line, need ~1100 lines for >2MB
|
|
289
|
+
const fd = fs.openSync(tp, "w");
|
|
290
|
+
for (let i = 0; i < 1100; i++) {
|
|
291
|
+
fs.writeSync(fd, `${line}\n`);
|
|
292
|
+
}
|
|
293
|
+
fs.closeSync(fd);
|
|
294
|
+
|
|
295
|
+
const result = getTokenUsage(tp);
|
|
296
|
+
assert.equal(result, null);
|
|
297
|
+
});
|
|
298
|
+
});
|
|
299
|
+
|
|
300
|
+
// =========================================================================
|
|
301
|
+
// paths.mjs — || fallback branches
|
|
302
|
+
// =========================================================================
|
|
303
|
+
describe("paths — fallback branches", () => {
|
|
304
|
+
it("stateFile handles null sessionId", () => {
|
|
305
|
+
const result = stateFile(null);
|
|
306
|
+
assert.ok(result.includes("state-unknown.json"));
|
|
307
|
+
});
|
|
308
|
+
|
|
309
|
+
it("stateFile handles undefined sessionId", () => {
|
|
310
|
+
const result = stateFile(undefined);
|
|
311
|
+
assert.ok(result.includes("state-unknown.json"));
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
it("stateFile handles empty string sessionId", () => {
|
|
315
|
+
const result = stateFile("");
|
|
316
|
+
assert.ok(result.includes("state-unknown.json"));
|
|
317
|
+
});
|
|
318
|
+
|
|
319
|
+
it("stateFile handles valid sessionId", () => {
|
|
320
|
+
const result = stateFile("abc-123");
|
|
321
|
+
assert.ok(result.includes("state-abc-123.json"));
|
|
322
|
+
});
|
|
323
|
+
|
|
324
|
+
it("ensureDataDir does not throw", () => {
|
|
325
|
+
ensureDataDir();
|
|
326
|
+
assert.ok(true);
|
|
327
|
+
});
|
|
328
|
+
});
|
|
329
|
+
|
|
330
|
+
// =========================================================================
|
|
331
|
+
// transcript.mjs — extractRecent: long heading message WITHOUT sub-headings
|
|
332
|
+
// Hits the || [] fallback when regex match returns null
|
|
333
|
+
// =========================================================================
|
|
334
|
+
describe("extractRecent — long heading no sub-headings", () => {
|
|
335
|
+
it("keeps long heading message without sub-headings", () => {
|
|
336
|
+
const tp = path.join(tmpDir, "recent-long-heading.jsonl");
|
|
337
|
+
const longMsg =
|
|
338
|
+
"# My Big Plan\n\n" +
|
|
339
|
+
"Some content without any sub-headings at all. ".repeat(25);
|
|
340
|
+
fs.appendFileSync(
|
|
341
|
+
tp,
|
|
342
|
+
`${JSON.stringify({
|
|
343
|
+
type: "user",
|
|
344
|
+
message: { role: "user", content: longMsg },
|
|
345
|
+
})}\n`,
|
|
346
|
+
);
|
|
347
|
+
|
|
348
|
+
const result = extractRecent(tp, 20);
|
|
349
|
+
assert.ok(result.includes("# My Big Plan"));
|
|
350
|
+
});
|
|
351
|
+
});
|
|
352
|
+
|
|
353
|
+
// =========================================================================
|
|
354
|
+
// transcript.mjs — extractRecent: assistant with string content (not array)
|
|
355
|
+
// =========================================================================
|
|
356
|
+
describe("extractRecent — assistant string content", () => {
|
|
357
|
+
it("handles assistant message with string content", () => {
|
|
358
|
+
const tp = path.join(tmpDir, "recent-string-content.jsonl");
|
|
359
|
+
fs.appendFileSync(
|
|
360
|
+
tp,
|
|
361
|
+
`${JSON.stringify({
|
|
362
|
+
type: "user",
|
|
363
|
+
message: { role: "user", content: "hello there" },
|
|
364
|
+
})}\n`,
|
|
365
|
+
);
|
|
366
|
+
fs.appendFileSync(
|
|
367
|
+
tp,
|
|
368
|
+
`${JSON.stringify({
|
|
369
|
+
type: "assistant",
|
|
370
|
+
message: { role: "assistant", content: "plain string response" },
|
|
371
|
+
})}\n`,
|
|
372
|
+
);
|
|
373
|
+
|
|
374
|
+
const result = extractRecent(tp, 20);
|
|
375
|
+
assert.ok(result.includes("Asst: plain string response"));
|
|
376
|
+
});
|
|
377
|
+
});
|
|
378
|
+
|
|
379
|
+
// =========================================================================
|
|
380
|
+
// transcript.mjs — extractConversation: assistant with string content
|
|
381
|
+
// =========================================================================
|
|
382
|
+
describe("extractConversation — assistant string content", () => {
|
|
383
|
+
it("handles assistant message with string content", () => {
|
|
384
|
+
const tp = path.join(tmpDir, "conv-string-content.jsonl");
|
|
385
|
+
fs.appendFileSync(
|
|
386
|
+
tp,
|
|
387
|
+
`${JSON.stringify({
|
|
388
|
+
type: "user",
|
|
389
|
+
message: { role: "user", content: "hello" },
|
|
390
|
+
})}\n`,
|
|
391
|
+
);
|
|
392
|
+
fs.appendFileSync(
|
|
393
|
+
tp,
|
|
394
|
+
`${JSON.stringify({
|
|
395
|
+
type: "assistant",
|
|
396
|
+
message: { role: "assistant", content: "plain string response" },
|
|
397
|
+
})}\n`,
|
|
398
|
+
);
|
|
399
|
+
|
|
400
|
+
const result = extractConversation(tp);
|
|
401
|
+
assert.ok(result.includes("Asst: plain string response"));
|
|
402
|
+
});
|
|
403
|
+
});
|
|
404
|
+
|
|
405
|
+
// =========================================================================
|
|
406
|
+
// paths.mjs — rotateCheckpoints
|
|
407
|
+
// =========================================================================
|
|
408
|
+
describe("paths — rotateCheckpoints", () => {
|
|
409
|
+
it("does not crash when checkpoints dir does not exist", () => {
|
|
410
|
+
rotateCheckpoints(10);
|
|
411
|
+
assert.ok(true);
|
|
412
|
+
});
|
|
413
|
+
});
|
|
414
|
+
|
|
415
|
+
// =========================================================================
|
|
416
|
+
// logger.mjs — basic functionality
|
|
417
|
+
// =========================================================================
|
|
418
|
+
describe("logger", () => {
|
|
419
|
+
it("log function works without error", () => {
|
|
420
|
+
log("test message from edge-cases.test.mjs");
|
|
421
|
+
assert.ok(true);
|
|
422
|
+
});
|
|
423
|
+
|
|
424
|
+
it("log function works on second call (logDirReady cache)", () => {
|
|
425
|
+
log("first call");
|
|
426
|
+
log("second call — should skip mkdir");
|
|
427
|
+
assert.ok(true);
|
|
428
|
+
});
|
|
429
|
+
|
|
430
|
+
it("log rotates when file exceeds 5MB", () => {
|
|
431
|
+
const logFile = path.join(os.homedir(), ".claude", "logs", "cg.log");
|
|
432
|
+
const rotated = `${logFile}.1`;
|
|
433
|
+
// Write >5MB to trigger rotation
|
|
434
|
+
const bigContent = "x".repeat(5.1 * 1024 * 1024);
|
|
435
|
+
fs.writeFileSync(logFile, bigContent);
|
|
436
|
+
log("trigger rotation");
|
|
437
|
+
// After rotation, the current log should be small (just our message)
|
|
438
|
+
const size = fs.statSync(logFile).size;
|
|
439
|
+
assert.ok(
|
|
440
|
+
size < 1024 * 1024,
|
|
441
|
+
`Log should be small after rotation, got ${size}`,
|
|
442
|
+
);
|
|
443
|
+
// Rotated file should exist
|
|
444
|
+
assert.ok(fs.existsSync(rotated), "Rotated log file should exist");
|
|
445
|
+
// Clean up
|
|
446
|
+
try {
|
|
447
|
+
fs.unlinkSync(rotated);
|
|
448
|
+
} catch {}
|
|
449
|
+
});
|
|
450
|
+
});
|
|
451
|
+
|
|
452
|
+
// =========================================================================
|
|
453
|
+
// paths.mjs — atomicWriteFileSync
|
|
454
|
+
// =========================================================================
|
|
455
|
+
describe("atomicWriteFileSync", () => {
|
|
456
|
+
it("writes data atomically", () => {
|
|
457
|
+
const target = path.join(tmpDir, "atomic-test.txt");
|
|
458
|
+
atomicWriteFileSync(target, "hello world");
|
|
459
|
+
assert.equal(fs.readFileSync(target, "utf8"), "hello world");
|
|
460
|
+
});
|
|
461
|
+
|
|
462
|
+
it("overwrites existing file atomically", () => {
|
|
463
|
+
const target = path.join(tmpDir, "atomic-overwrite.txt");
|
|
464
|
+
fs.writeFileSync(target, "old content");
|
|
465
|
+
atomicWriteFileSync(target, "new content");
|
|
466
|
+
assert.equal(fs.readFileSync(target, "utf8"), "new content");
|
|
467
|
+
});
|
|
468
|
+
|
|
469
|
+
it("does not leave temp files on success", () => {
|
|
470
|
+
const target = path.join(tmpDir, "atomic-clean.txt");
|
|
471
|
+
atomicWriteFileSync(target, "data");
|
|
472
|
+
const tmpFiles = fs.readdirSync(tmpDir).filter((f) => f.endsWith(".tmp"));
|
|
473
|
+
assert.equal(tmpFiles.length, 0, "No temp files should remain");
|
|
474
|
+
});
|
|
475
|
+
});
|
|
476
|
+
|
|
477
|
+
// =========================================================================
|
|
478
|
+
// paths.mjs — rotateCheckpoints
|
|
479
|
+
// =========================================================================
|
|
480
|
+
describe("rotateCheckpoints", () => {
|
|
481
|
+
it("keeps only maxKeep files", () => {
|
|
482
|
+
const cpDir = CHECKPOINTS_DIR;
|
|
483
|
+
fs.mkdirSync(cpDir, { recursive: true });
|
|
484
|
+
// Create 15 checkpoint files
|
|
485
|
+
for (let i = 0; i < 15; i++) {
|
|
486
|
+
const ts = `2026-01-${String(i + 1).padStart(2, "0")}T00-00-00`;
|
|
487
|
+
fs.writeFileSync(
|
|
488
|
+
path.join(cpDir, `session-${ts}-abc${i}.md`),
|
|
489
|
+
`checkpoint ${i}`,
|
|
490
|
+
);
|
|
491
|
+
}
|
|
492
|
+
rotateCheckpoints(10);
|
|
493
|
+
const remaining = fs
|
|
494
|
+
.readdirSync(cpDir)
|
|
495
|
+
.filter((f) => f.startsWith("session-") && f.endsWith(".md"));
|
|
496
|
+
assert.equal(remaining.length, 10, "Should keep only 10 checkpoints");
|
|
497
|
+
// The newest 10 should remain (highest dates)
|
|
498
|
+
assert.ok(
|
|
499
|
+
remaining.some((f) => f.includes("2026-01-15")),
|
|
500
|
+
"Newest should survive",
|
|
501
|
+
);
|
|
502
|
+
assert.ok(
|
|
503
|
+
!remaining.some((f) => f.includes("2026-01-01")),
|
|
504
|
+
"Oldest should be deleted",
|
|
505
|
+
);
|
|
506
|
+
// Clean up
|
|
507
|
+
for (const f of remaining) {
|
|
508
|
+
try {
|
|
509
|
+
fs.unlinkSync(path.join(cpDir, f));
|
|
510
|
+
} catch {}
|
|
511
|
+
}
|
|
512
|
+
});
|
|
513
|
+
});
|
|
514
|
+
|
|
515
|
+
// =========================================================================
|
|
516
|
+
// transcript.mjs — large file tiered read fallback
|
|
517
|
+
// =========================================================================
|
|
518
|
+
describe("readTranscriptLines — large file path", () => {
|
|
519
|
+
it("reads tail of large transcript file", () => {
|
|
520
|
+
const tp = path.join(tmpDir, "large-transcript.jsonl");
|
|
521
|
+
// Write a moderately sized file to test readTranscriptLines
|
|
522
|
+
const lines = [];
|
|
523
|
+
for (let i = 0; i < 500; i++) {
|
|
524
|
+
lines.push(
|
|
525
|
+
JSON.stringify({
|
|
526
|
+
type: "user",
|
|
527
|
+
message: { role: "user", content: `message ${i}` },
|
|
528
|
+
}),
|
|
529
|
+
);
|
|
530
|
+
}
|
|
531
|
+
fs.writeFileSync(tp, `${lines.join("\n")}\n`);
|
|
532
|
+
const result = readTranscriptLines(tp);
|
|
533
|
+
assert.ok(
|
|
534
|
+
result.length >= 400,
|
|
535
|
+
`Should read many lines, got ${result.length}`,
|
|
536
|
+
);
|
|
537
|
+
// Last line should be the most recent message
|
|
538
|
+
assert.ok(
|
|
539
|
+
result[result.length - 1].includes("message 499"),
|
|
540
|
+
"Should include last message",
|
|
541
|
+
);
|
|
542
|
+
});
|
|
543
|
+
});
|