@tracemarketplace/cli 0.0.15 → 0.0.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api-client.d.ts +7 -0
- package/dist/api-client.d.ts.map +1 -1
- package/dist/api-client.js +79 -14
- package/dist/api-client.js.map +1 -1
- package/dist/api-client.test.d.ts +2 -0
- package/dist/api-client.test.d.ts.map +1 -0
- package/dist/api-client.test.js +34 -0
- package/dist/api-client.test.js.map +1 -0
- package/dist/cli.js +9 -8
- package/dist/cli.js.map +1 -1
- package/dist/commands/daemon.d.ts.map +1 -1
- package/dist/commands/daemon.js +71 -6
- package/dist/commands/daemon.js.map +1 -1
- package/dist/commands/remove-daemon.d.ts +6 -0
- package/dist/commands/remove-daemon.d.ts.map +1 -0
- package/dist/commands/remove-daemon.js +66 -0
- package/dist/commands/remove-daemon.js.map +1 -0
- package/dist/commands/submit.d.ts +1 -0
- package/dist/commands/submit.d.ts.map +1 -1
- package/dist/commands/submit.js +25 -15
- package/dist/commands/submit.js.map +1 -1
- package/dist/config.d.ts +5 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +12 -0
- package/dist/config.js.map +1 -1
- package/dist/flush.d.ts +5 -1
- package/dist/flush.d.ts.map +1 -1
- package/dist/flush.js +107 -27
- package/dist/flush.js.map +1 -1
- package/dist/flush.test.js +162 -7
- package/dist/flush.test.js.map +1 -1
- package/package.json +2 -2
- package/src/api-client.test.ts +47 -0
- package/src/api-client.ts +98 -14
- package/src/cli.ts +10 -9
- package/src/commands/daemon.ts +82 -6
- package/src/commands/remove-daemon.ts +75 -0
- package/src/commands/submit.ts +28 -18
- package/src/config.ts +18 -0
- package/src/flush.test.ts +187 -6
- package/src/flush.ts +140 -39
- package/src/commands/register.ts +0 -8
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { existsSync, readFileSync, unlinkSync } from "fs";
|
|
2
|
+
import chalk from "chalk";
|
|
3
|
+
import { DEFAULT_PROFILE } from "../constants.js";
|
|
4
|
+
import { getDaemonPidPath, resolveProfile } from "../config.js";
|
|
5
|
+
|
|
6
|
+
interface RemoveDaemonOptions {
|
|
7
|
+
profile?: string;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
function readDaemonPid(profile: string): number | null {
|
|
11
|
+
const pidPath = getDaemonPidPath(profile);
|
|
12
|
+
if (!existsSync(pidPath)) return null;
|
|
13
|
+
|
|
14
|
+
try {
|
|
15
|
+
const parsed = Number.parseInt(readFileSync(pidPath, "utf-8").trim(), 10);
|
|
16
|
+
return Number.isFinite(parsed) && parsed > 0 ? parsed : null;
|
|
17
|
+
} catch {
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function isProcessRunning(pid: number): boolean {
|
|
23
|
+
try {
|
|
24
|
+
process.kill(pid, 0);
|
|
25
|
+
return true;
|
|
26
|
+
} catch (err) {
|
|
27
|
+
return (err as NodeJS.ErrnoException).code === "EPERM";
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function removePidFile(profile: string) {
|
|
32
|
+
const pidPath = getDaemonPidPath(profile);
|
|
33
|
+
if (!existsSync(pidPath)) return;
|
|
34
|
+
try {
|
|
35
|
+
unlinkSync(pidPath);
|
|
36
|
+
} catch {}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async function waitForExit(pid: number, timeoutMs = 3000): Promise<boolean> {
|
|
40
|
+
const deadline = Date.now() + timeoutMs;
|
|
41
|
+
while (Date.now() < deadline) {
|
|
42
|
+
if (!isProcessRunning(pid)) {
|
|
43
|
+
return true;
|
|
44
|
+
}
|
|
45
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
46
|
+
}
|
|
47
|
+
return !isProcessRunning(pid);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export async function removeDaemonCommand(opts: RemoveDaemonOptions = {}): Promise<void> {
|
|
51
|
+
const profile = resolveProfile(opts.profile);
|
|
52
|
+
const pid = readDaemonPid(profile);
|
|
53
|
+
const profileLabel = profile === DEFAULT_PROFILE ? "default profile" : `profile '${profile}'`;
|
|
54
|
+
|
|
55
|
+
if (pid === null) {
|
|
56
|
+
console.log(chalk.gray(`No tracemp daemon found for ${profileLabel}.`));
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if (!isProcessRunning(pid)) {
|
|
61
|
+
removePidFile(profile);
|
|
62
|
+
console.log(chalk.gray(`Removed stale daemon PID file for ${profileLabel}.`));
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
process.kill(pid, "SIGTERM");
|
|
67
|
+
|
|
68
|
+
if (await waitForExit(pid)) {
|
|
69
|
+
removePidFile(profile);
|
|
70
|
+
console.log(chalk.green(`✓ Stopped tracemp daemon for ${profileLabel}`));
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
console.log(chalk.yellow(`Sent SIGTERM to tracemp daemon for ${profileLabel} (pid ${pid}), but it is still running.`));
|
|
75
|
+
}
|
package/src/commands/submit.ts
CHANGED
|
@@ -23,6 +23,7 @@ interface SubmitOptions {
|
|
|
23
23
|
session?: string;
|
|
24
24
|
dryRun?: boolean;
|
|
25
25
|
since?: string;
|
|
26
|
+
sync?: boolean;
|
|
26
27
|
}
|
|
27
28
|
|
|
28
29
|
interface DiscoveredSession {
|
|
@@ -268,33 +269,42 @@ export async function submitCommand(opts: SubmitOptions): Promise<void> {
|
|
|
268
269
|
return;
|
|
269
270
|
}
|
|
270
271
|
|
|
271
|
-
const uploadSpinner = ora(
|
|
272
|
+
const uploadSpinner = ora(
|
|
273
|
+
opts.sync
|
|
274
|
+
? `Submitting ${readyChunkCount} finalized chunk(s) to ${config.profile}...`
|
|
275
|
+
: `Queuing ${readyChunkCount} finalized chunk(s) to ${config.profile}...`
|
|
276
|
+
).start();
|
|
272
277
|
|
|
273
278
|
try {
|
|
279
|
+
const readySessions = plannedSessions.filter((session) => session.readyChunks > 0);
|
|
280
|
+
const prefetchedTraces = new Map(readySessions.map((s) => [`${s.source.tool}:${s.source.locator}`, s.trace]));
|
|
274
281
|
const result = await flushTrackedSessions(
|
|
275
282
|
config,
|
|
276
|
-
|
|
277
|
-
{ includeIdleTracked: false }
|
|
283
|
+
readySessions.map((session) => session.source),
|
|
284
|
+
{ includeIdleTracked: false, prefetchedTraces, sync: opts.sync }
|
|
278
285
|
);
|
|
279
286
|
|
|
280
287
|
uploadSpinner.stop();
|
|
281
288
|
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
289
|
+
if (opts.sync) {
|
|
290
|
+
const failedSessions = result.results.filter((session) => session.error && session.error !== "Empty session");
|
|
291
|
+
console.log(chalk.green("\nSubmission complete!"));
|
|
292
|
+
console.log(` Uploaded chunks: ${chalk.bold(result.uploadedChunks)}`);
|
|
293
|
+
console.log(` Duplicate chunks: ${chalk.gray(result.duplicateChunks)}`);
|
|
294
|
+
console.log(` Pending sessions: ${result.pendingSessions}`);
|
|
295
|
+
console.log(` Payout: ${chalk.green("$" + (result.payoutCents / 100).toFixed(2))}`);
|
|
296
|
+
if (failedSessions.length > 0) {
|
|
297
|
+
console.log(chalk.yellow(`\n${failedSessions.length} session(s) failed during submit:`));
|
|
298
|
+
failedSessions.slice(0, 3).forEach((session) => {
|
|
299
|
+
console.log(chalk.gray(` ${session.source.label}: ${session.error}`));
|
|
300
|
+
});
|
|
301
|
+
if (failedSessions.length > 3) {
|
|
302
|
+
console.log(chalk.gray(` ...and ${failedSessions.length - 3} more`));
|
|
303
|
+
}
|
|
297
304
|
}
|
|
305
|
+
} else {
|
|
306
|
+
console.log(chalk.green(`\nQueued ${readyChunkCount} chunk(s) — processing in background.`));
|
|
307
|
+
console.log(chalk.gray(" Run again to check confirmation status."));
|
|
298
308
|
}
|
|
299
309
|
} catch (e) {
|
|
300
310
|
uploadSpinner.fail("Submission failed");
|
package/src/config.ts
CHANGED
|
@@ -29,6 +29,10 @@ export interface SessionUploadState {
|
|
|
29
29
|
lastSeenTurnCount: number;
|
|
30
30
|
lastActivityAt: string | null;
|
|
31
31
|
lastFlushedTurnId: string | null;
|
|
32
|
+
// Async confirmation tracking
|
|
33
|
+
confirmedChunkIndex: number; // all chunks below this index are confirmed in DB
|
|
34
|
+
confirmedOpenChunkStartTurn: number; // openChunkStartTurn to restore if re-submitting
|
|
35
|
+
unconfirmedSince: string | null; // ISO timestamp when chunks first went unconfirmed
|
|
32
36
|
}
|
|
33
37
|
|
|
34
38
|
export interface SubmitState {
|
|
@@ -60,6 +64,10 @@ export function getDaemonStatePath(profile?: string): string {
|
|
|
60
64
|
return join(getConfigDir(), `daemon-state${profileSuffix(profile)}.json`);
|
|
61
65
|
}
|
|
62
66
|
|
|
67
|
+
export function getDaemonPidPath(profile?: string): string {
|
|
68
|
+
return join(getConfigDir(), `daemon${profileSuffix(profile)}.pid`);
|
|
69
|
+
}
|
|
70
|
+
|
|
63
71
|
export function resolveProfile(profile?: string): string {
|
|
64
72
|
if (profile) return normalizeProfile(profile);
|
|
65
73
|
if (process.env.TRACEMP_PROFILE) return normalizeProfile(process.env.TRACEMP_PROFILE);
|
|
@@ -261,3 +269,13 @@ function isSessionUploadState(value: unknown): value is SessionUploadState {
|
|
|
261
269
|
&& (value.lastActivityAt === null || typeof value.lastActivityAt === "string")
|
|
262
270
|
&& (value.lastFlushedTurnId === null || typeof value.lastFlushedTurnId === "string");
|
|
263
271
|
}
|
|
272
|
+
|
|
273
|
+
export function migrateSessionUploadState(value: SessionUploadState): SessionUploadState {
|
|
274
|
+
return {
|
|
275
|
+
...value,
|
|
276
|
+
// Backward compat: existing sessions assume all submitted chunks are confirmed
|
|
277
|
+
confirmedChunkIndex: value.confirmedChunkIndex ?? value.nextChunkIndex,
|
|
278
|
+
confirmedOpenChunkStartTurn: value.confirmedOpenChunkStartTurn ?? value.openChunkStartTurn,
|
|
279
|
+
unconfirmedSince: value.unconfirmedSince ?? null,
|
|
280
|
+
};
|
|
281
|
+
}
|
package/src/flush.test.ts
CHANGED
|
@@ -1,12 +1,15 @@
|
|
|
1
1
|
import { describe, expect, it } from "vitest";
|
|
2
2
|
import type { NormalizedTrace, Turn } from "@tracemarketplace/shared";
|
|
3
3
|
import type { SessionSource } from "./flush.js";
|
|
4
|
+
import type { SessionUploadState } from "./config.js";
|
|
4
5
|
import {
|
|
5
6
|
collectIdleSessionSources,
|
|
6
7
|
createFreshSessionState,
|
|
7
8
|
migrateLegacySessionState,
|
|
8
9
|
planSessionUploads,
|
|
10
|
+
verifyUnconfirmedChunks,
|
|
9
11
|
} from "./flush.js";
|
|
12
|
+
import { migrateSessionUploadState } from "./config.js";
|
|
10
13
|
|
|
11
14
|
function makeTurn(
|
|
12
15
|
turnId: string,
|
|
@@ -81,6 +84,20 @@ function makeSource(tool: SessionSource["tool"], locator: string): SessionSource
|
|
|
81
84
|
return { tool, locator, label: locator };
|
|
82
85
|
}
|
|
83
86
|
|
|
87
|
+
function makeSessionState(overrides: Partial<SessionUploadState> & Pick<SessionUploadState, "sourceTool" | "sourceSessionId" | "locator">): SessionUploadState {
|
|
88
|
+
return {
|
|
89
|
+
nextChunkIndex: 0,
|
|
90
|
+
openChunkStartTurn: 0,
|
|
91
|
+
lastSeenTurnCount: 0,
|
|
92
|
+
lastActivityAt: null,
|
|
93
|
+
lastFlushedTurnId: null,
|
|
94
|
+
confirmedChunkIndex: 0,
|
|
95
|
+
confirmedOpenChunkStartTurn: 0,
|
|
96
|
+
unconfirmedSince: null,
|
|
97
|
+
...overrides,
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
|
|
84
101
|
describe("planSessionUploads", () => {
|
|
85
102
|
it("flushes a sealed 100k chunk and keeps the tail pending", () => {
|
|
86
103
|
const trace = makeTrace("session-100k", [
|
|
@@ -168,10 +185,174 @@ describe("planSessionUploads", () => {
|
|
|
168
185
|
});
|
|
169
186
|
});
|
|
170
187
|
|
|
188
|
+
describe("migrateSessionUploadState", () => {
|
|
189
|
+
it("fills in missing confirmation fields from next/openChunk values", () => {
|
|
190
|
+
const legacy = {
|
|
191
|
+
sourceTool: "codex_cli" as const,
|
|
192
|
+
sourceSessionId: "s1",
|
|
193
|
+
locator: "/tmp/s1.jsonl",
|
|
194
|
+
nextChunkIndex: 3,
|
|
195
|
+
openChunkStartTurn: 10,
|
|
196
|
+
lastSeenTurnCount: 10,
|
|
197
|
+
lastActivityAt: "2026-03-21T00:00:00.000Z",
|
|
198
|
+
lastFlushedTurnId: "a3",
|
|
199
|
+
// missing: confirmedChunkIndex, confirmedOpenChunkStartTurn, unconfirmedSince
|
|
200
|
+
} as any;
|
|
201
|
+
|
|
202
|
+
const migrated = migrateSessionUploadState(legacy);
|
|
203
|
+
|
|
204
|
+
expect(migrated.confirmedChunkIndex).toBe(3);
|
|
205
|
+
expect(migrated.confirmedOpenChunkStartTurn).toBe(10);
|
|
206
|
+
expect(migrated.unconfirmedSince).toBeNull();
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
it("does not overwrite existing confirmation fields", () => {
|
|
210
|
+
const state = makeSessionState({
|
|
211
|
+
sourceTool: "codex_cli",
|
|
212
|
+
sourceSessionId: "s2",
|
|
213
|
+
locator: "/tmp/s2.jsonl",
|
|
214
|
+
nextChunkIndex: 5,
|
|
215
|
+
openChunkStartTurn: 20,
|
|
216
|
+
confirmedChunkIndex: 3,
|
|
217
|
+
confirmedOpenChunkStartTurn: 12,
|
|
218
|
+
unconfirmedSince: "2026-03-21T01:00:00.000Z",
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
const migrated = migrateSessionUploadState(state);
|
|
222
|
+
|
|
223
|
+
expect(migrated.confirmedChunkIndex).toBe(3);
|
|
224
|
+
expect(migrated.confirmedOpenChunkStartTurn).toBe(12);
|
|
225
|
+
expect(migrated.unconfirmedSince).toBe("2026-03-21T01:00:00.000Z");
|
|
226
|
+
});
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
describe("verifyUnconfirmedChunks", () => {
|
|
230
|
+
function makeSubmitState(sessions: Record<string, ReturnType<typeof makeSessionState>>) {
|
|
231
|
+
return { version: 2 as const, chunks: {}, sessions };
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
function makeMockClient(responses: Record<string, { exists: boolean }>) {
|
|
235
|
+
return {
|
|
236
|
+
async get(path: string) {
|
|
237
|
+
const url = new URL(`http://x${path}`);
|
|
238
|
+
const tool = url.searchParams.get("source_tool")!;
|
|
239
|
+
const id = url.searchParams.get("source_session_id")!;
|
|
240
|
+
const idx = url.searchParams.get("chunk_index")!;
|
|
241
|
+
const key = `${tool}:${id}:${idx}`;
|
|
242
|
+
const r = responses[key];
|
|
243
|
+
if (!r) throw new Error(`Unexpected exists check: ${key}`);
|
|
244
|
+
return r;
|
|
245
|
+
},
|
|
246
|
+
} as any;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
it("advances confirmedChunkIndex when chunks are confirmed", async () => {
|
|
250
|
+
const state = makeSubmitState({
|
|
251
|
+
"codex_cli:sess1": makeSessionState({
|
|
252
|
+
sourceTool: "codex_cli",
|
|
253
|
+
sourceSessionId: "sess1",
|
|
254
|
+
locator: "/tmp/sess1.jsonl",
|
|
255
|
+
nextChunkIndex: 3,
|
|
256
|
+
openChunkStartTurn: 12,
|
|
257
|
+
confirmedChunkIndex: 1,
|
|
258
|
+
confirmedOpenChunkStartTurn: 4,
|
|
259
|
+
unconfirmedSince: "2026-03-21T00:00:00.000Z",
|
|
260
|
+
}),
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
const client = makeMockClient({
|
|
264
|
+
"codex_cli:sess1:1": { exists: true },
|
|
265
|
+
"codex_cli:sess1:2": { exists: true },
|
|
266
|
+
});
|
|
267
|
+
|
|
268
|
+
await verifyUnconfirmedChunks(state, client, new Date("2026-03-21T01:00:00.000Z"));
|
|
269
|
+
|
|
270
|
+
const s = state.sessions["codex_cli:sess1"]!;
|
|
271
|
+
expect(s.confirmedChunkIndex).toBe(3);
|
|
272
|
+
// all confirmed → unconfirmedSince cleared
|
|
273
|
+
expect(s.unconfirmedSince).toBeNull();
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
it("stops advancing at first missing chunk", async () => {
|
|
277
|
+
const state = makeSubmitState({
|
|
278
|
+
"codex_cli:sess2": makeSessionState({
|
|
279
|
+
sourceTool: "codex_cli",
|
|
280
|
+
sourceSessionId: "sess2",
|
|
281
|
+
locator: "/tmp/sess2.jsonl",
|
|
282
|
+
nextChunkIndex: 3,
|
|
283
|
+
openChunkStartTurn: 12,
|
|
284
|
+
confirmedChunkIndex: 1,
|
|
285
|
+
confirmedOpenChunkStartTurn: 4,
|
|
286
|
+
unconfirmedSince: "2026-03-21T00:00:00.000Z",
|
|
287
|
+
}),
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
const client = makeMockClient({
|
|
291
|
+
"codex_cli:sess2:1": { exists: true },
|
|
292
|
+
"codex_cli:sess2:2": { exists: false },
|
|
293
|
+
});
|
|
294
|
+
|
|
295
|
+
await verifyUnconfirmedChunks(state, client, new Date("2026-03-21T01:00:00.000Z"));
|
|
296
|
+
|
|
297
|
+
const s = state.sessions["codex_cli:sess2"]!;
|
|
298
|
+
expect(s.confirmedChunkIndex).toBe(2);
|
|
299
|
+
// still one unconfirmed chunk → unconfirmedSince preserved
|
|
300
|
+
expect(s.unconfirmedSince).toBe("2026-03-21T00:00:00.000Z");
|
|
301
|
+
});
|
|
302
|
+
|
|
303
|
+
it("resets to confirmed state after 2hr timeout", async () => {
|
|
304
|
+
const state = makeSubmitState({
|
|
305
|
+
"codex_cli:sess3": makeSessionState({
|
|
306
|
+
sourceTool: "codex_cli",
|
|
307
|
+
sourceSessionId: "sess3",
|
|
308
|
+
locator: "/tmp/sess3.jsonl",
|
|
309
|
+
nextChunkIndex: 3,
|
|
310
|
+
openChunkStartTurn: 12,
|
|
311
|
+
confirmedChunkIndex: 1,
|
|
312
|
+
confirmedOpenChunkStartTurn: 4,
|
|
313
|
+
unconfirmedSince: "2026-03-21T00:00:00.000Z",
|
|
314
|
+
}),
|
|
315
|
+
});
|
|
316
|
+
|
|
317
|
+
const client = makeMockClient({}); // should not be called
|
|
318
|
+
|
|
319
|
+
// now = 2h01m after unconfirmedSince
|
|
320
|
+
await verifyUnconfirmedChunks(state, client, new Date("2026-03-21T02:01:00.000Z"));
|
|
321
|
+
|
|
322
|
+
const s = state.sessions["codex_cli:sess3"]!;
|
|
323
|
+
// reset back to confirmed baseline so chunks get re-submitted
|
|
324
|
+
expect(s.nextChunkIndex).toBe(1);
|
|
325
|
+
expect(s.openChunkStartTurn).toBe(4);
|
|
326
|
+
expect(s.unconfirmedSince).toBeNull();
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
it("skips sessions that are already fully confirmed", async () => {
|
|
330
|
+
const state = makeSubmitState({
|
|
331
|
+
"codex_cli:sess4": makeSessionState({
|
|
332
|
+
sourceTool: "codex_cli",
|
|
333
|
+
sourceSessionId: "sess4",
|
|
334
|
+
locator: "/tmp/sess4.jsonl",
|
|
335
|
+
nextChunkIndex: 2,
|
|
336
|
+
openChunkStartTurn: 8,
|
|
337
|
+
confirmedChunkIndex: 2,
|
|
338
|
+
confirmedOpenChunkStartTurn: 8,
|
|
339
|
+
unconfirmedSince: null,
|
|
340
|
+
}),
|
|
341
|
+
});
|
|
342
|
+
|
|
343
|
+
let callCount = 0;
|
|
344
|
+
const client = { async get() { callCount++; return { exists: true }; } } as any;
|
|
345
|
+
|
|
346
|
+
await verifyUnconfirmedChunks(state, client, new Date("2026-03-21T01:00:00.000Z"));
|
|
347
|
+
|
|
348
|
+
expect(callCount).toBe(0);
|
|
349
|
+
});
|
|
350
|
+
});
|
|
351
|
+
|
|
171
352
|
describe("collectIdleSessionSources", () => {
|
|
172
353
|
it("returns only tracked sessions with an open tail older than two days", () => {
|
|
173
354
|
const sources = collectIdleSessionSources({
|
|
174
|
-
"codex_cli:idle": {
|
|
355
|
+
"codex_cli:idle": makeSessionState({
|
|
175
356
|
sourceTool: "codex_cli",
|
|
176
357
|
sourceSessionId: "idle",
|
|
177
358
|
locator: "/tmp/idle.jsonl",
|
|
@@ -180,8 +361,8 @@ describe("collectIdleSessionSources", () => {
|
|
|
180
361
|
lastSeenTurnCount: 4,
|
|
181
362
|
lastActivityAt: "2026-03-21T00:00:00.000Z",
|
|
182
363
|
lastFlushedTurnId: "a1",
|
|
183
|
-
},
|
|
184
|
-
"codex_cli:closed": {
|
|
364
|
+
}),
|
|
365
|
+
"codex_cli:closed": makeSessionState({
|
|
185
366
|
sourceTool: "codex_cli",
|
|
186
367
|
sourceSessionId: "closed",
|
|
187
368
|
locator: "/tmp/closed.jsonl",
|
|
@@ -190,8 +371,8 @@ describe("collectIdleSessionSources", () => {
|
|
|
190
371
|
lastSeenTurnCount: 4,
|
|
191
372
|
lastActivityAt: "2026-03-21T00:00:00.000Z",
|
|
192
373
|
lastFlushedTurnId: "a2",
|
|
193
|
-
},
|
|
194
|
-
"codex_cli:fresh": {
|
|
374
|
+
}),
|
|
375
|
+
"codex_cli:fresh": makeSessionState({
|
|
195
376
|
sourceTool: "codex_cli",
|
|
196
377
|
sourceSessionId: "fresh",
|
|
197
378
|
locator: "/tmp/fresh.jsonl",
|
|
@@ -200,7 +381,7 @@ describe("collectIdleSessionSources", () => {
|
|
|
200
381
|
lastSeenTurnCount: 2,
|
|
201
382
|
lastActivityAt: "2026-03-22T23:59:59.000Z",
|
|
202
383
|
lastFlushedTurnId: null,
|
|
203
|
-
},
|
|
384
|
+
}),
|
|
204
385
|
}, new Date("2026-03-23T00:01:00.000Z"));
|
|
205
386
|
|
|
206
387
|
expect(sources).toEqual([
|