@tracemarketplace/cli 0.0.15 → 0.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api-client.d.ts +7 -0
- package/dist/api-client.d.ts.map +1 -1
- package/dist/api-client.js +79 -14
- package/dist/api-client.js.map +1 -1
- package/dist/api-client.test.d.ts +2 -0
- package/dist/api-client.test.d.ts.map +1 -0
- package/dist/api-client.test.js +34 -0
- package/dist/api-client.test.js.map +1 -0
- package/dist/cli.js +7 -8
- package/dist/cli.js.map +1 -1
- package/dist/commands/daemon.d.ts.map +1 -1
- package/dist/commands/daemon.js +71 -6
- package/dist/commands/daemon.js.map +1 -1
- package/dist/commands/remove-daemon.d.ts +6 -0
- package/dist/commands/remove-daemon.d.ts.map +1 -0
- package/dist/commands/remove-daemon.js +66 -0
- package/dist/commands/remove-daemon.js.map +1 -0
- package/dist/commands/submit.d.ts.map +1 -1
- package/dist/commands/submit.js +3 -1
- package/dist/commands/submit.js.map +1 -1
- package/dist/config.d.ts +5 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +12 -0
- package/dist/config.js.map +1 -1
- package/dist/flush.d.ts +4 -1
- package/dist/flush.d.ts.map +1 -1
- package/dist/flush.js +92 -25
- package/dist/flush.js.map +1 -1
- package/dist/flush.test.js +162 -7
- package/dist/flush.test.js.map +1 -1
- package/package.json +2 -2
- package/src/api-client.test.ts +47 -0
- package/src/api-client.ts +98 -14
- package/src/cli.ts +8 -9
- package/src/commands/daemon.ts +82 -6
- package/src/commands/remove-daemon.ts +75 -0
- package/src/commands/submit.ts +4 -2
- package/src/config.ts +18 -0
- package/src/flush.test.ts +187 -6
- package/src/flush.ts +123 -37
- package/src/commands/register.ts +0 -8
package/src/flush.test.ts
CHANGED
|
@@ -1,12 +1,15 @@
|
|
|
1
1
|
import { describe, expect, it } from "vitest";
|
|
2
2
|
import type { NormalizedTrace, Turn } from "@tracemarketplace/shared";
|
|
3
3
|
import type { SessionSource } from "./flush.js";
|
|
4
|
+
import type { SessionUploadState } from "./config.js";
|
|
4
5
|
import {
|
|
5
6
|
collectIdleSessionSources,
|
|
6
7
|
createFreshSessionState,
|
|
7
8
|
migrateLegacySessionState,
|
|
8
9
|
planSessionUploads,
|
|
10
|
+
verifyUnconfirmedChunks,
|
|
9
11
|
} from "./flush.js";
|
|
12
|
+
import { migrateSessionUploadState } from "./config.js";
|
|
10
13
|
|
|
11
14
|
function makeTurn(
|
|
12
15
|
turnId: string,
|
|
@@ -81,6 +84,20 @@ function makeSource(tool: SessionSource["tool"], locator: string): SessionSource
|
|
|
81
84
|
return { tool, locator, label: locator };
|
|
82
85
|
}
|
|
83
86
|
|
|
87
|
+
function makeSessionState(overrides: Partial<SessionUploadState> & Pick<SessionUploadState, "sourceTool" | "sourceSessionId" | "locator">): SessionUploadState {
|
|
88
|
+
return {
|
|
89
|
+
nextChunkIndex: 0,
|
|
90
|
+
openChunkStartTurn: 0,
|
|
91
|
+
lastSeenTurnCount: 0,
|
|
92
|
+
lastActivityAt: null,
|
|
93
|
+
lastFlushedTurnId: null,
|
|
94
|
+
confirmedChunkIndex: 0,
|
|
95
|
+
confirmedOpenChunkStartTurn: 0,
|
|
96
|
+
unconfirmedSince: null,
|
|
97
|
+
...overrides,
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
|
|
84
101
|
describe("planSessionUploads", () => {
|
|
85
102
|
it("flushes a sealed 100k chunk and keeps the tail pending", () => {
|
|
86
103
|
const trace = makeTrace("session-100k", [
|
|
@@ -168,10 +185,174 @@ describe("planSessionUploads", () => {
|
|
|
168
185
|
});
|
|
169
186
|
});
|
|
170
187
|
|
|
188
|
+
describe("migrateSessionUploadState", () => {
|
|
189
|
+
it("fills in missing confirmation fields from next/openChunk values", () => {
|
|
190
|
+
const legacy = {
|
|
191
|
+
sourceTool: "codex_cli" as const,
|
|
192
|
+
sourceSessionId: "s1",
|
|
193
|
+
locator: "/tmp/s1.jsonl",
|
|
194
|
+
nextChunkIndex: 3,
|
|
195
|
+
openChunkStartTurn: 10,
|
|
196
|
+
lastSeenTurnCount: 10,
|
|
197
|
+
lastActivityAt: "2026-03-21T00:00:00.000Z",
|
|
198
|
+
lastFlushedTurnId: "a3",
|
|
199
|
+
// missing: confirmedChunkIndex, confirmedOpenChunkStartTurn, unconfirmedSince
|
|
200
|
+
} as any;
|
|
201
|
+
|
|
202
|
+
const migrated = migrateSessionUploadState(legacy);
|
|
203
|
+
|
|
204
|
+
expect(migrated.confirmedChunkIndex).toBe(3);
|
|
205
|
+
expect(migrated.confirmedOpenChunkStartTurn).toBe(10);
|
|
206
|
+
expect(migrated.unconfirmedSince).toBeNull();
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
it("does not overwrite existing confirmation fields", () => {
|
|
210
|
+
const state = makeSessionState({
|
|
211
|
+
sourceTool: "codex_cli",
|
|
212
|
+
sourceSessionId: "s2",
|
|
213
|
+
locator: "/tmp/s2.jsonl",
|
|
214
|
+
nextChunkIndex: 5,
|
|
215
|
+
openChunkStartTurn: 20,
|
|
216
|
+
confirmedChunkIndex: 3,
|
|
217
|
+
confirmedOpenChunkStartTurn: 12,
|
|
218
|
+
unconfirmedSince: "2026-03-21T01:00:00.000Z",
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
const migrated = migrateSessionUploadState(state);
|
|
222
|
+
|
|
223
|
+
expect(migrated.confirmedChunkIndex).toBe(3);
|
|
224
|
+
expect(migrated.confirmedOpenChunkStartTurn).toBe(12);
|
|
225
|
+
expect(migrated.unconfirmedSince).toBe("2026-03-21T01:00:00.000Z");
|
|
226
|
+
});
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
describe("verifyUnconfirmedChunks", () => {
|
|
230
|
+
function makeSubmitState(sessions: Record<string, ReturnType<typeof makeSessionState>>) {
|
|
231
|
+
return { version: 2 as const, chunks: {}, sessions };
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
function makeMockClient(responses: Record<string, { exists: boolean }>) {
|
|
235
|
+
return {
|
|
236
|
+
async get(path: string) {
|
|
237
|
+
const url = new URL(`http://x${path}`);
|
|
238
|
+
const tool = url.searchParams.get("source_tool")!;
|
|
239
|
+
const id = url.searchParams.get("source_session_id")!;
|
|
240
|
+
const idx = url.searchParams.get("chunk_index")!;
|
|
241
|
+
const key = `${tool}:${id}:${idx}`;
|
|
242
|
+
const r = responses[key];
|
|
243
|
+
if (!r) throw new Error(`Unexpected exists check: ${key}`);
|
|
244
|
+
return r;
|
|
245
|
+
},
|
|
246
|
+
} as any;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
it("advances confirmedChunkIndex when chunks are confirmed", async () => {
|
|
250
|
+
const state = makeSubmitState({
|
|
251
|
+
"codex_cli:sess1": makeSessionState({
|
|
252
|
+
sourceTool: "codex_cli",
|
|
253
|
+
sourceSessionId: "sess1",
|
|
254
|
+
locator: "/tmp/sess1.jsonl",
|
|
255
|
+
nextChunkIndex: 3,
|
|
256
|
+
openChunkStartTurn: 12,
|
|
257
|
+
confirmedChunkIndex: 1,
|
|
258
|
+
confirmedOpenChunkStartTurn: 4,
|
|
259
|
+
unconfirmedSince: "2026-03-21T00:00:00.000Z",
|
|
260
|
+
}),
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
const client = makeMockClient({
|
|
264
|
+
"codex_cli:sess1:1": { exists: true },
|
|
265
|
+
"codex_cli:sess1:2": { exists: true },
|
|
266
|
+
});
|
|
267
|
+
|
|
268
|
+
await verifyUnconfirmedChunks(state, client, new Date("2026-03-21T01:00:00.000Z"));
|
|
269
|
+
|
|
270
|
+
const s = state.sessions["codex_cli:sess1"]!;
|
|
271
|
+
expect(s.confirmedChunkIndex).toBe(3);
|
|
272
|
+
// all confirmed → unconfirmedSince cleared
|
|
273
|
+
expect(s.unconfirmedSince).toBeNull();
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
it("stops advancing at first missing chunk", async () => {
|
|
277
|
+
const state = makeSubmitState({
|
|
278
|
+
"codex_cli:sess2": makeSessionState({
|
|
279
|
+
sourceTool: "codex_cli",
|
|
280
|
+
sourceSessionId: "sess2",
|
|
281
|
+
locator: "/tmp/sess2.jsonl",
|
|
282
|
+
nextChunkIndex: 3,
|
|
283
|
+
openChunkStartTurn: 12,
|
|
284
|
+
confirmedChunkIndex: 1,
|
|
285
|
+
confirmedOpenChunkStartTurn: 4,
|
|
286
|
+
unconfirmedSince: "2026-03-21T00:00:00.000Z",
|
|
287
|
+
}),
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
const client = makeMockClient({
|
|
291
|
+
"codex_cli:sess2:1": { exists: true },
|
|
292
|
+
"codex_cli:sess2:2": { exists: false },
|
|
293
|
+
});
|
|
294
|
+
|
|
295
|
+
await verifyUnconfirmedChunks(state, client, new Date("2026-03-21T01:00:00.000Z"));
|
|
296
|
+
|
|
297
|
+
const s = state.sessions["codex_cli:sess2"]!;
|
|
298
|
+
expect(s.confirmedChunkIndex).toBe(2);
|
|
299
|
+
// still one unconfirmed chunk → unconfirmedSince preserved
|
|
300
|
+
expect(s.unconfirmedSince).toBe("2026-03-21T00:00:00.000Z");
|
|
301
|
+
});
|
|
302
|
+
|
|
303
|
+
it("resets to confirmed state after 2hr timeout", async () => {
|
|
304
|
+
const state = makeSubmitState({
|
|
305
|
+
"codex_cli:sess3": makeSessionState({
|
|
306
|
+
sourceTool: "codex_cli",
|
|
307
|
+
sourceSessionId: "sess3",
|
|
308
|
+
locator: "/tmp/sess3.jsonl",
|
|
309
|
+
nextChunkIndex: 3,
|
|
310
|
+
openChunkStartTurn: 12,
|
|
311
|
+
confirmedChunkIndex: 1,
|
|
312
|
+
confirmedOpenChunkStartTurn: 4,
|
|
313
|
+
unconfirmedSince: "2026-03-21T00:00:00.000Z",
|
|
314
|
+
}),
|
|
315
|
+
});
|
|
316
|
+
|
|
317
|
+
const client = makeMockClient({}); // should not be called
|
|
318
|
+
|
|
319
|
+
// now = 2h01m after unconfirmedSince
|
|
320
|
+
await verifyUnconfirmedChunks(state, client, new Date("2026-03-21T02:01:00.000Z"));
|
|
321
|
+
|
|
322
|
+
const s = state.sessions["codex_cli:sess3"]!;
|
|
323
|
+
// reset back to confirmed baseline so chunks get re-submitted
|
|
324
|
+
expect(s.nextChunkIndex).toBe(1);
|
|
325
|
+
expect(s.openChunkStartTurn).toBe(4);
|
|
326
|
+
expect(s.unconfirmedSince).toBeNull();
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
it("skips sessions that are already fully confirmed", async () => {
|
|
330
|
+
const state = makeSubmitState({
|
|
331
|
+
"codex_cli:sess4": makeSessionState({
|
|
332
|
+
sourceTool: "codex_cli",
|
|
333
|
+
sourceSessionId: "sess4",
|
|
334
|
+
locator: "/tmp/sess4.jsonl",
|
|
335
|
+
nextChunkIndex: 2,
|
|
336
|
+
openChunkStartTurn: 8,
|
|
337
|
+
confirmedChunkIndex: 2,
|
|
338
|
+
confirmedOpenChunkStartTurn: 8,
|
|
339
|
+
unconfirmedSince: null,
|
|
340
|
+
}),
|
|
341
|
+
});
|
|
342
|
+
|
|
343
|
+
let callCount = 0;
|
|
344
|
+
const client = { async get() { callCount++; return { exists: true }; } } as any;
|
|
345
|
+
|
|
346
|
+
await verifyUnconfirmedChunks(state, client, new Date("2026-03-21T01:00:00.000Z"));
|
|
347
|
+
|
|
348
|
+
expect(callCount).toBe(0);
|
|
349
|
+
});
|
|
350
|
+
});
|
|
351
|
+
|
|
171
352
|
describe("collectIdleSessionSources", () => {
|
|
172
353
|
it("returns only tracked sessions with an open tail older than two days", () => {
|
|
173
354
|
const sources = collectIdleSessionSources({
|
|
174
|
-
"codex_cli:idle": {
|
|
355
|
+
"codex_cli:idle": makeSessionState({
|
|
175
356
|
sourceTool: "codex_cli",
|
|
176
357
|
sourceSessionId: "idle",
|
|
177
358
|
locator: "/tmp/idle.jsonl",
|
|
@@ -180,8 +361,8 @@ describe("collectIdleSessionSources", () => {
|
|
|
180
361
|
lastSeenTurnCount: 4,
|
|
181
362
|
lastActivityAt: "2026-03-21T00:00:00.000Z",
|
|
182
363
|
lastFlushedTurnId: "a1",
|
|
183
|
-
},
|
|
184
|
-
"codex_cli:closed": {
|
|
364
|
+
}),
|
|
365
|
+
"codex_cli:closed": makeSessionState({
|
|
185
366
|
sourceTool: "codex_cli",
|
|
186
367
|
sourceSessionId: "closed",
|
|
187
368
|
locator: "/tmp/closed.jsonl",
|
|
@@ -190,8 +371,8 @@ describe("collectIdleSessionSources", () => {
|
|
|
190
371
|
lastSeenTurnCount: 4,
|
|
191
372
|
lastActivityAt: "2026-03-21T00:00:00.000Z",
|
|
192
373
|
lastFlushedTurnId: "a2",
|
|
193
|
-
},
|
|
194
|
-
"codex_cli:fresh": {
|
|
374
|
+
}),
|
|
375
|
+
"codex_cli:fresh": makeSessionState({
|
|
195
376
|
sourceTool: "codex_cli",
|
|
196
377
|
sourceSessionId: "fresh",
|
|
197
378
|
locator: "/tmp/fresh.jsonl",
|
|
@@ -200,7 +381,7 @@ describe("collectIdleSessionSources", () => {
|
|
|
200
381
|
lastSeenTurnCount: 2,
|
|
201
382
|
lastActivityAt: "2026-03-22T23:59:59.000Z",
|
|
202
383
|
lastFlushedTurnId: null,
|
|
203
|
-
},
|
|
384
|
+
}),
|
|
204
385
|
}, new Date("2026-03-23T00:01:00.000Z"));
|
|
205
386
|
|
|
206
387
|
expect(sources).toEqual([
|
package/src/flush.ts
CHANGED
|
@@ -7,11 +7,11 @@ import {
|
|
|
7
7
|
extractCursor,
|
|
8
8
|
redactTrace,
|
|
9
9
|
type NormalizedTrace,
|
|
10
|
-
type Turn,
|
|
11
10
|
} from "@tracemarketplace/shared";
|
|
12
11
|
import { ApiClient } from "./api-client.js";
|
|
13
12
|
import {
|
|
14
13
|
loadState,
|
|
14
|
+
migrateSessionUploadState,
|
|
15
15
|
saveState,
|
|
16
16
|
stateKey,
|
|
17
17
|
type Config,
|
|
@@ -21,6 +21,8 @@ import {
|
|
|
21
21
|
import { CURSOR_DB_PATH } from "./sessions.js";
|
|
22
22
|
|
|
23
23
|
const IDLE_FINALIZATION_MS = 2 * 24 * 60 * 60 * 1000;
|
|
24
|
+
const UNCONFIRMED_RESUBMIT_MS = 2 * 60 * 60 * 1000; // re-submit after 2hr with no confirmation
|
|
25
|
+
const INGEST_CONCURRENCY = 12;
|
|
24
26
|
|
|
25
27
|
export interface SessionSource {
|
|
26
28
|
tool: TrackedSessionTool;
|
|
@@ -67,6 +69,12 @@ interface ChunkUploadResult {
|
|
|
67
69
|
error?: string;
|
|
68
70
|
}
|
|
69
71
|
|
|
72
|
+
interface IngestResponse {
|
|
73
|
+
queued?: boolean;
|
|
74
|
+
duplicate?: boolean;
|
|
75
|
+
trace_id?: string;
|
|
76
|
+
}
|
|
77
|
+
|
|
70
78
|
export function collectIdleSessionSources(
|
|
71
79
|
sessions: Record<string, SessionUploadState>,
|
|
72
80
|
now = new Date()
|
|
@@ -80,10 +88,60 @@ export function collectIdleSessionSources(
|
|
|
80
88
|
}));
|
|
81
89
|
}
|
|
82
90
|
|
|
91
|
+
interface ChunkExistsResponse {
|
|
92
|
+
exists: boolean;
|
|
93
|
+
trace_id?: string;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
export async function verifyUnconfirmedChunks(
|
|
97
|
+
state: ReturnType<typeof loadState>,
|
|
98
|
+
client: ApiClient,
|
|
99
|
+
now: Date,
|
|
100
|
+
): Promise<void> {
|
|
101
|
+
for (const [key, session] of Object.entries(state.sessions)) {
|
|
102
|
+
if (session.confirmedChunkIndex >= session.nextChunkIndex) continue;
|
|
103
|
+
|
|
104
|
+
// Check if timed out — reset to re-submit from last confirmed point
|
|
105
|
+
if (session.unconfirmedSince) {
|
|
106
|
+
const age = now.getTime() - Date.parse(session.unconfirmedSince);
|
|
107
|
+
if (age >= UNCONFIRMED_RESUBMIT_MS) {
|
|
108
|
+
state.sessions[key] = {
|
|
109
|
+
...session,
|
|
110
|
+
nextChunkIndex: session.confirmedChunkIndex,
|
|
111
|
+
openChunkStartTurn: session.confirmedOpenChunkStartTurn,
|
|
112
|
+
unconfirmedSince: null,
|
|
113
|
+
};
|
|
114
|
+
continue;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Check each unconfirmed chunk sequentially — stop at first missing one
|
|
119
|
+
for (let i = session.confirmedChunkIndex; i < session.nextChunkIndex; i++) {
|
|
120
|
+
try {
|
|
121
|
+
const params = new URLSearchParams({
|
|
122
|
+
source_tool: session.sourceTool,
|
|
123
|
+
source_session_id: session.sourceSessionId,
|
|
124
|
+
chunk_index: String(i),
|
|
125
|
+
});
|
|
126
|
+
const result = await client.get(`/api/v1/traces/exists?${params}`) as ChunkExistsResponse;
|
|
127
|
+
if (!result.exists) break;
|
|
128
|
+
state.sessions[key] = {
|
|
129
|
+
...state.sessions[key]!,
|
|
130
|
+
confirmedChunkIndex: i + 1,
|
|
131
|
+
confirmedOpenChunkStartTurn: state.sessions[key]!.openChunkStartTurn,
|
|
132
|
+
unconfirmedSince: i + 1 >= session.nextChunkIndex ? null : state.sessions[key]!.unconfirmedSince,
|
|
133
|
+
};
|
|
134
|
+
} catch {
|
|
135
|
+
break;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
83
141
|
export async function flushTrackedSessions(
|
|
84
142
|
config: Config,
|
|
85
143
|
sources: SessionSource[],
|
|
86
|
-
opts: { includeIdleTracked?: boolean; now?: Date } = {}
|
|
144
|
+
opts: { includeIdleTracked?: boolean; now?: Date; prefetchedTraces?: Map<string, NormalizedTrace> } = {}
|
|
87
145
|
): Promise<FlushResult> {
|
|
88
146
|
const now = opts.now ?? new Date();
|
|
89
147
|
const state = loadState(config.profile);
|
|
@@ -94,12 +152,23 @@ export async function flushTrackedSessions(
|
|
|
94
152
|
...(opts.includeIdleTracked ? collectIdleSessionSources(state.sessions, now) : []),
|
|
95
153
|
]);
|
|
96
154
|
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
results.push(await processSessionSource(source, state, config, client, now));
|
|
155
|
+
// Migrate any existing sessions that predate the confirmation fields
|
|
156
|
+
for (const [key, session] of Object.entries(state.sessions)) {
|
|
157
|
+
state.sessions[key] = migrateSessionUploadState(session);
|
|
101
158
|
}
|
|
102
159
|
|
|
160
|
+
// Verify previously queued chunks and handle 2hr re-submit timeout
|
|
161
|
+
await verifyUnconfirmedChunks(state, client, now);
|
|
162
|
+
|
|
163
|
+
// Process sessions concurrently — each session's chunks stay sequential internally
|
|
164
|
+
const results = await pLimit(
|
|
165
|
+
allSources.map((source) => () => processSessionSource(
|
|
166
|
+
source, state, config, client, now,
|
|
167
|
+
opts.prefetchedTraces?.get(`${source.tool}:${source.locator}`),
|
|
168
|
+
)),
|
|
169
|
+
INGEST_CONCURRENCY,
|
|
170
|
+
);
|
|
171
|
+
|
|
103
172
|
saveState(state, config.profile);
|
|
104
173
|
|
|
105
174
|
return {
|
|
@@ -174,6 +243,9 @@ export function createFreshSessionState(
|
|
|
174
243
|
lastSeenTurnCount: 0,
|
|
175
244
|
lastActivityAt: null,
|
|
176
245
|
lastFlushedTurnId: null,
|
|
246
|
+
confirmedChunkIndex: 0,
|
|
247
|
+
confirmedOpenChunkStartTurn: 0,
|
|
248
|
+
unconfirmedSince: null,
|
|
177
249
|
};
|
|
178
250
|
}
|
|
179
251
|
|
|
@@ -182,15 +254,20 @@ export function migrateLegacySessionState(
|
|
|
182
254
|
trace: NormalizedTrace,
|
|
183
255
|
legacyChunkIndex: number
|
|
184
256
|
): SessionUploadState {
|
|
257
|
+
const nextChunkIndex = legacyChunkIndex + 1;
|
|
258
|
+
const openChunkStartTurn = trace.turn_count;
|
|
185
259
|
return {
|
|
186
260
|
sourceTool: source.tool,
|
|
187
261
|
sourceSessionId: trace.source_session_id,
|
|
188
262
|
locator: source.locator,
|
|
189
|
-
nextChunkIndex
|
|
190
|
-
openChunkStartTurn
|
|
263
|
+
nextChunkIndex,
|
|
264
|
+
openChunkStartTurn,
|
|
191
265
|
lastSeenTurnCount: trace.turn_count,
|
|
192
266
|
lastActivityAt: getLastActivityAt(trace),
|
|
193
267
|
lastFlushedTurnId: trace.turns[trace.turn_count - 1]?.turn_id ?? null,
|
|
268
|
+
confirmedChunkIndex: nextChunkIndex,
|
|
269
|
+
confirmedOpenChunkStartTurn: openChunkStartTurn,
|
|
270
|
+
unconfirmedSince: null,
|
|
194
271
|
};
|
|
195
272
|
}
|
|
196
273
|
|
|
@@ -199,12 +276,13 @@ async function processSessionSource(
|
|
|
199
276
|
state: ReturnType<typeof loadState>,
|
|
200
277
|
config: Config,
|
|
201
278
|
client: ApiClient,
|
|
202
|
-
now: Date
|
|
279
|
+
now: Date,
|
|
280
|
+
prefetchedTrace?: NormalizedTrace,
|
|
203
281
|
): Promise<SessionFlushResult> {
|
|
204
282
|
let trace: NormalizedTrace;
|
|
205
283
|
|
|
206
284
|
try {
|
|
207
|
-
trace = await extractTraceFromSource(source, config.email);
|
|
285
|
+
trace = prefetchedTrace ?? await extractTraceFromSource(source, config.email);
|
|
208
286
|
} catch (err) {
|
|
209
287
|
return {
|
|
210
288
|
source,
|
|
@@ -250,7 +328,9 @@ async function processSessionSource(
|
|
|
250
328
|
let payoutCents = 0;
|
|
251
329
|
|
|
252
330
|
for (const upload of plan.uploads) {
|
|
331
|
+
const tUpload = Date.now();
|
|
253
332
|
const result = await uploadTraceChunk(upload.trace, client);
|
|
333
|
+
console.error(`[flush] ${key} chunk${upload.trace.chunk_index} done in ${Date.now()-tUpload}ms err=${result.error?.slice(0,60) ?? 'none'}`);
|
|
254
334
|
if (result.error) {
|
|
255
335
|
state.sessions[key] = workingState;
|
|
256
336
|
if (workingState.nextChunkIndex > 0) {
|
|
@@ -315,42 +395,27 @@ async function uploadTraceChunk(
|
|
|
315
395
|
trace: NormalizedTrace,
|
|
316
396
|
client: ApiClient
|
|
317
397
|
): Promise<ChunkUploadResult> {
|
|
398
|
+
// Client-side regex redaction runs before transmission; Presidio runs server-side async.
|
|
399
|
+
const payloadTrace = redactTrace(trace, { homeDir: homedir() });
|
|
400
|
+
|
|
318
401
|
try {
|
|
319
|
-
const result = await client.post("/api/v1/traces/
|
|
320
|
-
|
|
321
|
-
source_tool:
|
|
322
|
-
}) as
|
|
323
|
-
accepted: number;
|
|
324
|
-
duplicate: number;
|
|
325
|
-
traces: Array<{
|
|
326
|
-
error?: string;
|
|
327
|
-
is_duplicate?: boolean;
|
|
328
|
-
payout_cents?: number;
|
|
329
|
-
trace_id?: string;
|
|
330
|
-
}>;
|
|
331
|
-
};
|
|
402
|
+
const result = await client.post("/api/v1/traces/ingest", {
|
|
403
|
+
trace: payloadTrace,
|
|
404
|
+
source_tool: payloadTrace.source_tool,
|
|
405
|
+
}) as IngestResponse;
|
|
332
406
|
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
return {
|
|
336
|
-
duplicate: false,
|
|
337
|
-
payoutCents: 0,
|
|
338
|
-
traceId: traceResult.trace_id ?? null,
|
|
339
|
-
error: traceResult.error,
|
|
340
|
-
};
|
|
407
|
+
if (result.duplicate) {
|
|
408
|
+
return { duplicate: true, payoutCents: 0, traceId: result.trace_id ?? null };
|
|
341
409
|
}
|
|
342
410
|
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
payoutCents: traceResult.payout_cents ?? 0,
|
|
346
|
-
traceId: traceResult.trace_id ?? null,
|
|
347
|
-
};
|
|
411
|
+
// 202 queued — payout credited asynchronously; show $0 until processed
|
|
412
|
+
return { duplicate: false, payoutCents: 0, traceId: null };
|
|
348
413
|
} catch (err) {
|
|
349
414
|
return {
|
|
350
415
|
duplicate: false,
|
|
351
416
|
payoutCents: 0,
|
|
352
417
|
traceId: null,
|
|
353
|
-
error:
|
|
418
|
+
error: formatSubmitFailure(err),
|
|
354
419
|
};
|
|
355
420
|
}
|
|
356
421
|
}
|
|
@@ -379,6 +444,7 @@ function applyUploadedChunk(
|
|
|
379
444
|
const nextOpenChunkStartTurn = (uploadedChunk.chunk_start_turn ?? cursor.openChunkStartTurn)
|
|
380
445
|
+ (uploadedChunk.turn_count ?? uploadedChunk.turns.length);
|
|
381
446
|
|
|
447
|
+
const nowIso = new Date().toISOString();
|
|
382
448
|
return {
|
|
383
449
|
...cursor,
|
|
384
450
|
nextChunkIndex: (uploadedChunk.chunk_index ?? cursor.nextChunkIndex) + 1,
|
|
@@ -386,6 +452,7 @@ function applyUploadedChunk(
|
|
|
386
452
|
lastSeenTurnCount: trace.turn_count,
|
|
387
453
|
lastActivityAt: getLastActivityAt(trace),
|
|
388
454
|
lastFlushedTurnId: trace.turns[nextOpenChunkStartTurn - 1]?.turn_id ?? cursor.lastFlushedTurnId,
|
|
455
|
+
unconfirmedSince: cursor.unconfirmedSince ?? nowIso,
|
|
389
456
|
};
|
|
390
457
|
}
|
|
391
458
|
|
|
@@ -485,6 +552,25 @@ function dedupeSources(sources: SessionSource[]): SessionSource[] {
|
|
|
485
552
|
return [...unique.values()];
|
|
486
553
|
}
|
|
487
554
|
|
|
555
|
+
function formatSubmitFailure(err: unknown): string {
|
|
556
|
+
return err instanceof Error ? `Submit failed: ${err.message}` : `Submit failed: ${String(err)}`;
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
async function pLimit<T>(tasks: Array<() => Promise<T>>, concurrency: number): Promise<T[]> {
|
|
560
|
+
const results: T[] = new Array(tasks.length);
|
|
561
|
+
let index = 0;
|
|
562
|
+
|
|
563
|
+
async function worker() {
|
|
564
|
+
while (index < tasks.length) {
|
|
565
|
+
const i = index++;
|
|
566
|
+
results[i] = await tasks[i]!();
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
await Promise.all(Array.from({ length: Math.min(concurrency, tasks.length) }, worker));
|
|
571
|
+
return results;
|
|
572
|
+
}
|
|
573
|
+
|
|
488
574
|
export function buildFileSessionSource(
|
|
489
575
|
tool: "claude_code" | "codex_cli",
|
|
490
576
|
filePath: string
|
package/src/commands/register.ts
DELETED
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import chalk from "chalk";
|
|
2
|
-
import { CLI_NAME } from "../constants.js";
|
|
3
|
-
import { loginCommand } from "./login.js";
|
|
4
|
-
|
|
5
|
-
export async function registerCommand(opts: { profile?: string; serverUrl?: string }): Promise<void> {
|
|
6
|
-
console.log(chalk.yellow(`\`${CLI_NAME} register\` is now an alias for \`${CLI_NAME} login\`.`));
|
|
7
|
-
await loginCommand({ profile: opts.profile, serverUrl: opts.serverUrl });
|
|
8
|
-
}
|