@nocoo/pew 0.2.0 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +117 -8
- package/dist/cli.js.map +1 -1
- package/dist/commands/session-sync.d.ts +62 -0
- package/dist/commands/session-sync.d.ts.map +1 -0
- package/dist/commands/session-sync.js +443 -0
- package/dist/commands/session-sync.js.map +1 -0
- package/dist/commands/session-upload.d.ts +55 -0
- package/dist/commands/session-upload.d.ts.map +1 -0
- package/dist/commands/session-upload.js +177 -0
- package/dist/commands/session-upload.js.map +1 -0
- package/dist/commands/status.d.ts +9 -0
- package/dist/commands/status.d.ts.map +1 -1
- package/dist/commands/status.js +22 -11
- package/dist/commands/status.js.map +1 -1
- package/dist/commands/sync.d.ts +13 -1
- package/dist/commands/sync.d.ts.map +1 -1
- package/dist/commands/sync.js +207 -9
- package/dist/commands/sync.js.map +1 -1
- package/dist/discovery/sources.d.ts +5 -0
- package/dist/discovery/sources.d.ts.map +1 -1
- package/dist/discovery/sources.js +7 -0
- package/dist/discovery/sources.js.map +1 -1
- package/dist/parsers/claude-session.d.ts +19 -0
- package/dist/parsers/claude-session.d.ts.map +1 -0
- package/dist/parsers/claude-session.js +131 -0
- package/dist/parsers/claude-session.js.map +1 -0
- package/dist/parsers/codex-session.d.ts +24 -0
- package/dist/parsers/codex-session.d.ts.map +1 -0
- package/dist/parsers/codex-session.js +140 -0
- package/dist/parsers/codex-session.js.map +1 -0
- package/dist/parsers/codex.d.ts +37 -0
- package/dist/parsers/codex.d.ts.map +1 -0
- package/dist/parsers/codex.js +136 -0
- package/dist/parsers/codex.js.map +1 -0
- package/dist/parsers/gemini-session.d.ts +19 -0
- package/dist/parsers/gemini-session.d.ts.map +1 -0
- package/dist/parsers/gemini-session.js +103 -0
- package/dist/parsers/gemini-session.js.map +1 -0
- package/dist/parsers/openclaw-session.d.ts +20 -0
- package/dist/parsers/openclaw-session.d.ts.map +1 -0
- package/dist/parsers/openclaw-session.js +122 -0
- package/dist/parsers/openclaw-session.js.map +1 -0
- package/dist/parsers/opencode-session.d.ts +15 -0
- package/dist/parsers/opencode-session.d.ts.map +1 -0
- package/dist/parsers/opencode-session.js +131 -0
- package/dist/parsers/opencode-session.js.map +1 -0
- package/dist/parsers/opencode-sqlite-db.d.ts +29 -0
- package/dist/parsers/opencode-sqlite-db.d.ts.map +1 -0
- package/dist/parsers/opencode-sqlite-db.js +71 -0
- package/dist/parsers/opencode-sqlite-db.js.map +1 -0
- package/dist/parsers/opencode-sqlite-session.d.ts +32 -0
- package/dist/parsers/opencode-sqlite-session.d.ts.map +1 -0
- package/dist/parsers/opencode-sqlite-session.js +121 -0
- package/dist/parsers/opencode-sqlite-session.js.map +1 -0
- package/dist/parsers/opencode-sqlite.d.ts +53 -0
- package/dist/parsers/opencode-sqlite.d.ts.map +1 -0
- package/dist/parsers/opencode-sqlite.js +104 -0
- package/dist/parsers/opencode-sqlite.js.map +1 -0
- package/dist/storage/session-cursor-store.d.ts +14 -0
- package/dist/storage/session-cursor-store.d.ts.map +1 -0
- package/dist/storage/session-cursor-store.js +34 -0
- package/dist/storage/session-cursor-store.js.map +1 -0
- package/dist/storage/session-queue.d.ts +28 -0
- package/dist/storage/session-queue.d.ts.map +1 -0
- package/dist/storage/session-queue.js +65 -0
- package/dist/storage/session-queue.js.map +1 -0
- package/dist/utils/paths.d.ts +4 -0
- package/dist/utils/paths.d.ts.map +1 -1
- package/dist/utils/paths.js +5 -0
- package/dist/utils/paths.js.map +1 -1
- package/package.json +1 -1
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Session upload command — sends local session queue records to the Pew SaaS.
|
|
3
|
+
*
|
|
4
|
+
* Flow:
|
|
5
|
+
* 1. Load API key from config
|
|
6
|
+
* 2. Read un-uploaded session records from queue (using saved offset)
|
|
7
|
+
* 3. Deduplicate: keep only latest snapshot per session_key
|
|
8
|
+
* 4. Split into batches of ≤50 (D1 Free plan limit)
|
|
9
|
+
* 5. POST each batch to /api/ingest/sessions with Bearer token
|
|
10
|
+
* 6. Persist offset after all batches succeed
|
|
11
|
+
* 7. Retry on 5xx/429 with exponential backoff
|
|
12
|
+
*/
|
|
13
|
+
import { ConfigManager } from "../config/manager.js";
|
|
14
|
+
import { SessionQueue } from "../storage/session-queue.js";
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
// Constants
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
const DEFAULT_BATCH_SIZE = 50;
|
|
19
|
+
const DEFAULT_MAX_RETRIES = 2;
|
|
20
|
+
const DEFAULT_RETRY_DELAY_MS = 1000;
|
|
21
|
+
// ---------------------------------------------------------------------------
|
|
22
|
+
// Upload dedup
|
|
23
|
+
// ---------------------------------------------------------------------------
|
|
24
|
+
/**
|
|
25
|
+
* Unlike token's aggregateRecords() which SUMS, session dedup
|
|
26
|
+
* keeps only the LATEST snapshot per session_key.
|
|
27
|
+
*
|
|
28
|
+
* This ensures idempotent uploads: re-scanning the same session
|
|
29
|
+
* files produces the same final result after server-side monotonic
|
|
30
|
+
* upsert (WHERE excluded.snapshot_at >= session_records.snapshot_at).
|
|
31
|
+
*/
|
|
32
|
+
export function deduplicateSessionRecords(records) {
|
|
33
|
+
if (records.length === 0)
|
|
34
|
+
return [];
|
|
35
|
+
const map = new Map();
|
|
36
|
+
for (const r of records) {
|
|
37
|
+
const existing = map.get(r.session_key);
|
|
38
|
+
if (!existing || r.snapshot_at > existing.snapshot_at) {
|
|
39
|
+
map.set(r.session_key, r);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return [...map.values()];
|
|
43
|
+
}
|
|
44
|
+
// ---------------------------------------------------------------------------
|
|
45
|
+
// Implementation
|
|
46
|
+
// ---------------------------------------------------------------------------
|
|
47
|
+
export async function executeSessionUpload(opts) {
|
|
48
|
+
const { stateDir, apiUrl, dev = false, fetch: fetchFn, batchSize = DEFAULT_BATCH_SIZE, maxRetries = DEFAULT_MAX_RETRIES, retryDelayMs = DEFAULT_RETRY_DELAY_MS, onProgress, } = opts;
|
|
49
|
+
// 1. Load API key
|
|
50
|
+
const configManager = new ConfigManager(stateDir, dev);
|
|
51
|
+
const config = await configManager.load();
|
|
52
|
+
if (!config.token) {
|
|
53
|
+
return {
|
|
54
|
+
success: false,
|
|
55
|
+
uploaded: 0,
|
|
56
|
+
batches: 0,
|
|
57
|
+
error: "Not logged in. Run `pew login` first.",
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
// 2. Read un-uploaded records
|
|
61
|
+
const queue = new SessionQueue(stateDir);
|
|
62
|
+
const currentOffset = await queue.loadOffset();
|
|
63
|
+
const { records: rawRecords, newOffset } = await queue.readFromOffset(currentOffset);
|
|
64
|
+
if (rawRecords.length === 0) {
|
|
65
|
+
return { success: true, uploaded: 0, batches: 0 };
|
|
66
|
+
}
|
|
67
|
+
// 2b. Pre-deduplicate: keep only latest snapshot per session_key
|
|
68
|
+
const records = deduplicateSessionRecords(rawRecords);
|
|
69
|
+
// 3. Split into batches
|
|
70
|
+
const batches = [];
|
|
71
|
+
for (let i = 0; i < records.length; i += batchSize) {
|
|
72
|
+
batches.push(records.slice(i, i + batchSize));
|
|
73
|
+
}
|
|
74
|
+
// 4. Upload each batch
|
|
75
|
+
const endpoint = `${apiUrl}/api/ingest/sessions`;
|
|
76
|
+
let totalUploaded = 0;
|
|
77
|
+
let batchesCompleted = 0;
|
|
78
|
+
for (let batchIdx = 0; batchIdx < batches.length; batchIdx++) {
|
|
79
|
+
const batch = batches[batchIdx];
|
|
80
|
+
onProgress?.({
|
|
81
|
+
phase: "uploading",
|
|
82
|
+
batch: batchIdx + 1,
|
|
83
|
+
totalBatches: batches.length,
|
|
84
|
+
total: records.length,
|
|
85
|
+
message: `Uploading session batch ${batchIdx + 1}/${batches.length} (${batch.length} records)...`,
|
|
86
|
+
});
|
|
87
|
+
const result = await sendBatchWithRetry({
|
|
88
|
+
endpoint,
|
|
89
|
+
token: config.token,
|
|
90
|
+
batch,
|
|
91
|
+
fetchFn,
|
|
92
|
+
maxRetries,
|
|
93
|
+
retryDelayMs,
|
|
94
|
+
});
|
|
95
|
+
if (!result.ok) {
|
|
96
|
+
return {
|
|
97
|
+
success: false,
|
|
98
|
+
uploaded: totalUploaded,
|
|
99
|
+
batches: batchesCompleted,
|
|
100
|
+
error: result.error,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
totalUploaded += batch.length;
|
|
104
|
+
batchesCompleted++;
|
|
105
|
+
}
|
|
106
|
+
// 5. All batches succeeded — save final offset
|
|
107
|
+
await queue.saveOffset(newOffset);
|
|
108
|
+
onProgress?.({
|
|
109
|
+
phase: "done",
|
|
110
|
+
total: totalUploaded,
|
|
111
|
+
message: `Uploaded ${totalUploaded} session records in ${batchesCompleted} batch(es).`,
|
|
112
|
+
});
|
|
113
|
+
return {
|
|
114
|
+
success: true,
|
|
115
|
+
uploaded: totalUploaded,
|
|
116
|
+
batches: batchesCompleted,
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
async function sendBatchWithRetry(opts) {
|
|
120
|
+
const { endpoint, token, batch, fetchFn, maxRetries, retryDelayMs } = opts;
|
|
121
|
+
let lastError = "";
|
|
122
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
123
|
+
if (attempt > 0 && retryDelayMs > 0) {
|
|
124
|
+
await sleep(retryDelayMs * 2 ** (attempt - 1));
|
|
125
|
+
}
|
|
126
|
+
try {
|
|
127
|
+
const resp = await fetchFn(endpoint, {
|
|
128
|
+
method: "POST",
|
|
129
|
+
headers: {
|
|
130
|
+
"Content-Type": "application/json",
|
|
131
|
+
Authorization: `Bearer ${token}`,
|
|
132
|
+
},
|
|
133
|
+
body: JSON.stringify(batch),
|
|
134
|
+
});
|
|
135
|
+
if (resp.ok) {
|
|
136
|
+
return { ok: true };
|
|
137
|
+
}
|
|
138
|
+
// 429 — rate limited, retry with Retry-After if available
|
|
139
|
+
if (resp.status === 429) {
|
|
140
|
+
const retryAfter = resp.headers.get("Retry-After");
|
|
141
|
+
const retryMs = retryAfter
|
|
142
|
+
? Math.max(Number(retryAfter) * 1000, retryDelayMs)
|
|
143
|
+
: retryDelayMs * 2 ** attempt;
|
|
144
|
+
if (attempt < maxRetries && retryMs > 0) {
|
|
145
|
+
await sleep(retryMs);
|
|
146
|
+
}
|
|
147
|
+
const body = await resp.json().catch(() => ({}));
|
|
148
|
+
lastError = `429: ${body.error ?? "Too Many Requests"}`;
|
|
149
|
+
continue;
|
|
150
|
+
}
|
|
151
|
+
// 4xx — client error, don't retry
|
|
152
|
+
if (resp.status >= 400 && resp.status < 500) {
|
|
153
|
+
const body = await resp.json().catch(() => ({}));
|
|
154
|
+
const msg = body.error ?? `HTTP ${resp.status}`;
|
|
155
|
+
return { ok: false, error: `${resp.status}: ${msg}` };
|
|
156
|
+
}
|
|
157
|
+
// 5xx — server error, retry
|
|
158
|
+
const body = await resp.json().catch(() => ({}));
|
|
159
|
+
lastError = `${resp.status}: ${body.error ?? "Server Error"}`;
|
|
160
|
+
}
|
|
161
|
+
catch (err) {
|
|
162
|
+
lastError = String(err.message ?? err);
|
|
163
|
+
// Network errors — don't retry if maxRetries is 0
|
|
164
|
+
if (attempt >= maxRetries) {
|
|
165
|
+
return { ok: false, error: lastError };
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
return {
|
|
170
|
+
ok: false,
|
|
171
|
+
error: `Upload failed after ${maxRetries + 1} attempts: ${lastError}`,
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
function sleep(ms) {
|
|
175
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
176
|
+
}
|
|
177
|
+
//# sourceMappingURL=session-upload.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"session-upload.js","sourceRoot":"","sources":["../../src/commands/session-upload.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAGH,OAAO,EAAE,aAAa,EAAE,MAAM,sBAAsB,CAAC;AACrD,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAwC3D,8EAA8E;AAC9E,YAAY;AACZ,8EAA8E;AAE9E,MAAM,kBAAkB,GAAG,EAAE,CAAC;AAC9B,MAAM,mBAAmB,GAAG,CAAC,CAAC;AAC9B,MAAM,sBAAsB,GAAG,IAAI,CAAC;AAEpC,8EAA8E;AAC9E,eAAe;AACf,8EAA8E;AAE9E;;;;;;;GAOG;AACH,MAAM,UAAU,yBAAyB,CACvC,OAA6B;IAE7B,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,EAAE,CAAC;IAEpC,MAAM,GAAG,GAAG,IAAI,GAAG,EAA8B,CAAC;IAClD,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE,CAAC;QACxB,MAAM,QAAQ,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC;QACxC,IAAI,CAAC,QAAQ,IAAI,CAAC,CAAC,WAAW,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;YACtD,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;QAC5B,CAAC;IACH,CAAC;IACD,OAAO,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC;AAC3B,CAAC;AAED,8EAA8E;AAC9E,iBAAiB;AACjB,8EAA8E;AAE9E,MAAM,CAAC,KAAK,UAAU,oBAAoB,CACxC,IAA0B;IAE1B,MAAM,EACJ,QAAQ,EACR,MAAM,EACN,GAAG,GAAG,KAAK,EACX,KAAK,EAAE,OAAO,EACd,SAAS,GAAG,kBAAkB,EAC9B,UAAU,GAAG,mBAAmB,EAChC,YAAY,GAAG,sBAAsB,EACrC,UAAU,GACX,GAAG,IAAI,CAAC;IAET,kBAAkB;IAClB,MAAM,aAAa,GAAG,IAAI,aAAa,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;IACvD,MAAM,MAAM,GAAG,MAAM,aAAa,CAAC,IAAI,EAAE,CAAC;IAE1C,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;QAClB,OAAO;YACL,OAAO,EAAE,KAAK;YACd,QAAQ,EAAE,CAAC;YACX,OAAO,EAAE,CAAC;YACV,KAAK,EAAE,uCAAuC;SAC/C,CAAC;IACJ,CAAC;IAED,8BAA8B;IAC9B,MAAM,KAAK,GAAG,IAAI,YAAY,CAAC,QAAQ,CAAC,CAAC;IACzC,MAAM,aAAa,GAAG,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;IAC/C,MAAM,EAAE,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,GACtC,MAAM,KAAK,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;IAE5C,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC5B,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,CAAC;IACpD,CAAC;IAED,iEAAiE;IACjE,MAAM,OAAO,GAAG,yBAAyB,CAAC,UAAU,CAAC,CAAC;IAEtD,wBAAwB;IACxB,MAAM,OAAO,GAA2B,EAAE,CAAC;IAC3C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,IAAI,SAAS,EAAE,CAAC;QACnD,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC;IAChD,CAAC;IAED,uBAAuB;IACvB,MAAM,QAAQ,GAAG,GAAG,MAAM,sBAAsB,CAAC;IACjD,IAAI,aAAa,GAAG,CAAC,CAAC;IACtB,IAAI,gBAAgB,GAAG,CAAC,CAAC;IAEzB,KAAK,IAAI,QAAQ,GAAG,CAAC,EAAE,QAAQ,GAAG,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,EAAE,CAAC;QAC7D,MAAM,KAAK,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;QAEhC,UAAU,EAAE,CAAC;YACX,KAAK,EAAE,WAAW;YAClB,KAAK,EAAE,QAAQ,GAAG,CAAC;YACnB,YAAY,EAAE,OAAO,CAAC,MAAM;YAC5B,KAAK,EAAE,OAAO,CAAC,MAAM;YACrB,OAAO,EAAE,2BAA2B,QAAQ,GAAG,CAAC,IAAI,OAAO,CAAC,MAAM,KAAK,KAAK,CAAC,MAAM,cAAc;SAClG,CAAC,CAAC;QAEH,MAAM,MAAM,GAAG,MAAM,kBAAkB,CAAC;YACtC,QAAQ;YACR,KAAK,EAAE,MAAM,CAAC,KAAK;YACnB,KAAK;YACL,OAAO;YACP,UAAU;YACV,YAAY;SACb,CAAC,CAAC;QAEH,IAAI,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC;YACf,OAAO;gBACL,OAAO,EAAE,KAAK;gBACd,QAAQ,EAAE,aAAa;gBACvB,OAAO,EAAE,gBAAgB;gBACzB,KAAK,EAAE,MAAM,CAAC,KAAK;aACpB,CAAC;QACJ,CAAC;QAED,aAAa,IAAI,KAAK,CAAC,MAAM,CAAC;QAC9B,gBAAgB,EAAE,CAAC;IACrB,CAAC;IAED,+CAA+C;IAC/C,MAAM,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAElC,UAAU,EAAE,CAAC;QACX,KAAK,EAAE,MAAM;QACb,KAAK,EAAE,aAAa;QACpB,OAAO,EAAE,YAAY,aAAa,uBAAuB,gBAAgB,aAAa;KACvF,CAAC,CAAC;IAEH,OAAO;QACL,OAAO,EAAE,IAAI;QACb,QAAQ,EAAE,aAAa;QACvB,OAAO,EAAE,gBAAgB;KAC1B,CAAC;AACJ,CAAC;AAWD,KAAK,UAAU,kBAAkB,CAAC,IAOjC;IACC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,UAAU,EAAE,YAAY,EAAE,GAAG,IAAI,CAAC;IAE3E,IAAI,SAAS,GAAG,EAAE,CAAC;IAEnB,KAAK,IAAI,OAAO,GAAG,CAAC,EAAE,OAAO,IAAI,UAAU,EAAE,OAAO,EAAE,EAAE,CAAC;QACvD,IAAI,OAAO,GAAG,CAAC,IAAI,YAAY,GAAG,CAAC,EAAE,CAAC;YACpC,MAAM,KAAK,CAAC,YAAY,GAAG,CAAC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC;QACjD,CAAC;QAED,IAAI,CAAC;YACH,MAAM,IAAI,GAAG,MAAM,OAAO,CAAC,QAAQ,EAAE;gBACnC,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE;oBACP,cAAc,EAAE,kBAAkB;oBAClC,aAAa,EAAE,UAAU,KAAK,EAAE;iBACjC;gBACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;aAC5B,CAAC,CAAC;YAEH,IAAI,IAAI,CAAC,EAAE,EAAE,CAAC;gBACZ,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,CAAC;YACtB,CAAC;YAED,0DAA0D;YAC1D,IAAI,IAAI,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;gBACxB,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;gBACnD,MAAM,OAAO,GAAG,UAAU;oBACxB,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,IAAI,EAAE,YAAY,CAAC;oBACnD,CAAC,CAAC,YAAY,GAAG,CAAC,IAAI,OAAO,CAAC;gBAChC,IAAI,OAAO,GAAG,UAAU,IAAI,OAAO,GAAG,CAAC,EAAE,CAAC;oBACxC,MAAM,KAAK,CAAC,OAAO,CAAC,CAAC;gBACvB,CAAC;gBACD,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACjD,SAAS,GAAG,QAAS,IAA+B,CAAC,KAAK,IAAI,mBAAmB,EAAE,CAAC;gBACpF,SAAS;YACX,CAAC;YAED,kCAAkC;YAClC,IAAI,IAAI,CAAC,MAAM,IAAI,GAAG,IAAI,IAAI,CAAC,MAAM,GAAG,GAAG,EAAE,CAAC;gBAC5C,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACjD,MAAM,GAAG,GACN,IAA+B,CAAC,KAAK,IAAI,QAAQ,IAAI,CAAC,MAAM,EAAE,CAAC;gBAClE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,MAAM,KAAK,GAAG,EAAE,EAAE,CAAC;YACxD,CAAC;YAED,4BAA4B;YAC5B,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACjD,SAAS,GAAG,GAAG,IAAI,CAAC,MAAM,KAAM,IAA+B,CAAC,KAAK,IAAI,cAAc,EAAE,CAAC;QAC5F,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,SAAS,GAAG,MAAM,CAAE,GAAa,CAAC,OAAO,IAAI,GAAG,CAAC,CAAC;YAElD,kDAAkD;YAClD,IAAI,OAAO,IAAI,UAAU,EAAE,CAAC;gBAC1B,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC;YACzC,CAAC;QACH,CAAC;IACH,CAAC;IAED,OAAO;QACL,EAAE,EAAE,KAAK;QACT,KAAK,EAAE,uBAAuB,UAAU,GAAG,CAAC,cAAc,SAAS,EAAE;KACtE,CAAC;AACJ,CAAC;AAED,SAAS,KAAK,CAAC,EAAU;IACvB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC;AAC3D,CAAC"}
|
|
@@ -1,3 +1,11 @@
|
|
|
1
|
+
/** Resolved source directory paths used for file classification */
|
|
2
|
+
export interface SourceDirs {
|
|
3
|
+
claudeDir: string;
|
|
4
|
+
codexSessionsDir: string;
|
|
5
|
+
geminiDir: string;
|
|
6
|
+
openCodeMessageDir: string;
|
|
7
|
+
openclawDir: string;
|
|
8
|
+
}
|
|
1
9
|
/** Status summary for display */
|
|
2
10
|
export interface StatusResult {
|
|
3
11
|
/** Number of tracked files */
|
|
@@ -15,5 +23,6 @@ export interface StatusResult {
|
|
|
15
23
|
*/
|
|
16
24
|
export declare function executeStatus(opts: {
|
|
17
25
|
stateDir: string;
|
|
26
|
+
sourceDirs: SourceDirs;
|
|
18
27
|
}): Promise<StatusResult>;
|
|
19
28
|
//# sourceMappingURL=status.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"status.d.ts","sourceRoot":"","sources":["../../src/commands/status.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"status.d.ts","sourceRoot":"","sources":["../../src/commands/status.ts"],"names":[],"mappings":"AAGA,mEAAmE;AACnE,MAAM,WAAW,UAAU;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,gBAAgB,EAAE,MAAM,CAAC;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,kBAAkB,EAAE,MAAM,CAAC;IAC3B,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,iCAAiC;AACjC,MAAM,WAAW,YAAY;IAC3B,8BAA8B;IAC9B,YAAY,EAAE,MAAM,CAAC;IACrB,wCAAwC;IACxC,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,4CAA4C;IAC5C,cAAc,EAAE,MAAM,CAAC;IACvB,0BAA0B;IAC1B,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACjC;AAiBD;;;GAGG;AACH,wBAAsB,aAAa,CAAC,IAAI,EAAE;IACxC,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,UAAU,CAAC;CACxB,GAAG,OAAO,CAAC,YAAY,CAAC,CAuBxB"}
|
package/dist/commands/status.js
CHANGED
|
@@ -1,28 +1,39 @@
|
|
|
1
1
|
import { CursorStore } from "../storage/cursor-store.js";
|
|
2
2
|
import { LocalQueue } from "../storage/local-queue.js";
|
|
3
|
+
/**
|
|
4
|
+
* Classify a cursor file path into a source label.
|
|
5
|
+
*
|
|
6
|
+
* Uses resolved source directories (startsWith) so that custom paths
|
|
7
|
+
* like $CODEX_HOME are classified correctly.
|
|
8
|
+
*/
|
|
9
|
+
function classifySource(filePath, dirs) {
|
|
10
|
+
if (filePath.startsWith(dirs.claudeDir))
|
|
11
|
+
return "claude-code";
|
|
12
|
+
if (filePath.startsWith(dirs.codexSessionsDir))
|
|
13
|
+
return "codex";
|
|
14
|
+
if (filePath.startsWith(dirs.geminiDir))
|
|
15
|
+
return "gemini-cli";
|
|
16
|
+
if (filePath.startsWith(dirs.openCodeMessageDir))
|
|
17
|
+
return "opencode";
|
|
18
|
+
if (filePath.startsWith(dirs.openclawDir))
|
|
19
|
+
return "openclaw";
|
|
20
|
+
return "unknown";
|
|
21
|
+
}
|
|
3
22
|
/**
|
|
4
23
|
* Compute the current sync status.
|
|
5
24
|
* Pure logic — no CLI I/O.
|
|
6
25
|
*/
|
|
7
26
|
export async function executeStatus(opts) {
|
|
8
|
-
const { stateDir } = opts;
|
|
27
|
+
const { stateDir, sourceDirs } = opts;
|
|
9
28
|
const cursorStore = new CursorStore(stateDir);
|
|
10
29
|
const queue = new LocalQueue(stateDir);
|
|
11
30
|
const cursors = await cursorStore.load();
|
|
12
31
|
const offset = await queue.loadOffset();
|
|
13
32
|
const { records } = await queue.readFromOffset(offset);
|
|
14
|
-
// Count files by source
|
|
33
|
+
// Count files by source using resolved directory paths
|
|
15
34
|
const sources = {};
|
|
16
35
|
for (const filePath of Object.keys(cursors.files)) {
|
|
17
|
-
|
|
18
|
-
if (filePath.includes(".claude"))
|
|
19
|
-
source = "claude-code";
|
|
20
|
-
else if (filePath.includes(".gemini"))
|
|
21
|
-
source = "gemini-cli";
|
|
22
|
-
else if (filePath.includes("opencode"))
|
|
23
|
-
source = "opencode";
|
|
24
|
-
else if (filePath.includes(".openclaw"))
|
|
25
|
-
source = "openclaw";
|
|
36
|
+
const source = classifySource(filePath, sourceDirs);
|
|
26
37
|
sources[source] = (sources[source] || 0) + 1;
|
|
27
38
|
}
|
|
28
39
|
return {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"status.js","sourceRoot":"","sources":["../../src/commands/status.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"status.js","sourceRoot":"","sources":["../../src/commands/status.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AACzD,OAAO,EAAE,UAAU,EAAE,MAAM,2BAA2B,CAAC;AAuBvD;;;;;GAKG;AACH,SAAS,cAAc,CAAC,QAAgB,EAAE,IAAgB;IACxD,IAAI,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC;QAAE,OAAO,aAAa,CAAC;IAC9D,IAAI,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,gBAAgB,CAAC;QAAE,OAAO,OAAO,CAAC;IAC/D,IAAI,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC;QAAE,OAAO,YAAY,CAAC;IAC7D,IAAI,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,kBAAkB,CAAC;QAAE,OAAO,UAAU,CAAC;IACpE,IAAI,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC;QAAE,OAAO,UAAU,CAAC;IAC7D,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CAAC,IAGnC;IACC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,IAAI,CAAC;IAEtC,MAAM,WAAW,GAAG,IAAI,WAAW,CAAC,QAAQ,CAAC,CAAC;IAC9C,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC;IAEvC,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,IAAI,EAAE,CAAC;IACzC,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;IACxC,MAAM,EAAE,OAAO,EAAE,GAAG,MAAM,KAAK,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IAEvD,uDAAuD;IACvD,MAAM,OAAO,GAA2B,EAAE,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;QAClD,MAAM,MAAM,GAAG,cAAc,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;QACpD,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC;IAC/C,CAAC;IAED,OAAO;QACL,YAAY,EAAE,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,MAAM;QAC/C,QAAQ,EAAE,OAAO,CAAC,SAAS;QAC3B,cAAc,EAAE,OAAO,CAAC,MAAM;QAC9B,OAAO;KACR,CAAC;AACJ,CAAC"}
|
package/dist/commands/sync.d.ts
CHANGED
|
@@ -1,13 +1,23 @@
|
|
|
1
|
+
import type { QueryMessagesFn } from "../parsers/opencode-sqlite.js";
|
|
1
2
|
/** Sync execution options */
|
|
2
3
|
export interface SyncOptions {
|
|
3
4
|
/** Directory for persisting state (cursors, queue) */
|
|
4
5
|
stateDir: string;
|
|
5
6
|
/** Override: Claude data directory (~/.claude) */
|
|
6
7
|
claudeDir?: string;
|
|
8
|
+
/** Override: Codex CLI sessions directory (~/.codex/sessions) */
|
|
9
|
+
codexSessionsDir?: string;
|
|
7
10
|
/** Override: Gemini data directory (~/.gemini) */
|
|
8
11
|
geminiDir?: string;
|
|
9
12
|
/** Override: OpenCode message directory (~/.local/share/opencode/storage/message) */
|
|
10
13
|
openCodeMessageDir?: string;
|
|
14
|
+
/** Override: OpenCode SQLite database path (~/.local/share/opencode/opencode.db) */
|
|
15
|
+
openCodeDbPath?: string;
|
|
16
|
+
/** Factory for opening the OpenCode SQLite DB (DI for testability) */
|
|
17
|
+
openMessageDb?: (dbPath: string) => {
|
|
18
|
+
queryMessages: QueryMessagesFn;
|
|
19
|
+
close: () => void;
|
|
20
|
+
} | null;
|
|
11
21
|
/** Override: OpenClaw data directory (~/.openclaw) */
|
|
12
22
|
openclawDir?: string;
|
|
13
23
|
/** Progress callback */
|
|
@@ -16,7 +26,7 @@ export interface SyncOptions {
|
|
|
16
26
|
/** Progress event for UI display */
|
|
17
27
|
export interface ProgressEvent {
|
|
18
28
|
source: string;
|
|
19
|
-
phase: "discover" | "parse" | "aggregate" | "done";
|
|
29
|
+
phase: "discover" | "parse" | "aggregate" | "done" | "warn";
|
|
20
30
|
current?: number;
|
|
21
31
|
total?: number;
|
|
22
32
|
message?: string;
|
|
@@ -27,6 +37,7 @@ export interface SyncResult {
|
|
|
27
37
|
totalRecords: number;
|
|
28
38
|
sources: {
|
|
29
39
|
claude: number;
|
|
40
|
+
codex: number;
|
|
30
41
|
gemini: number;
|
|
31
42
|
opencode: number;
|
|
32
43
|
openclaw: number;
|
|
@@ -34,6 +45,7 @@ export interface SyncResult {
|
|
|
34
45
|
/** Total files scanned per source */
|
|
35
46
|
filesScanned: {
|
|
36
47
|
claude: number;
|
|
48
|
+
codex: number;
|
|
37
49
|
gemini: number;
|
|
38
50
|
opencode: number;
|
|
39
51
|
openclaw: number;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"sync.d.ts","sourceRoot":"","sources":["../../src/commands/sync.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"sync.d.ts","sourceRoot":"","sources":["../../src/commands/sync.ts"],"names":[],"mappings":"AA4BA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAIrE,6BAA6B;AAC7B,MAAM,WAAW,WAAW;IAC1B,sDAAsD;IACtD,QAAQ,EAAE,MAAM,CAAC;IACjB,kDAAkD;IAClD,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,iEAAiE;IACjE,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,kDAAkD;IAClD,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,qFAAqF;IACrF,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,oFAAoF;IACpF,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,sEAAsE;IACtE,aAAa,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK;QAAE,aAAa,EAAE,eAAe,CAAC;QAAC,KAAK,EAAE,MAAM,IAAI,CAAA;KAAE,GAAG,IAAI,CAAC;IACjG,sDAAsD;IACtD,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,wBAAwB;IACxB,UAAU,CAAC,EAAE,CAAC,KAAK,EAAE,aAAa,KAAK,IAAI,CAAC;CAC7C;AAED,oCAAoC;AACpC,MAAM,WAAW,aAAa;IAC5B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,UAAU,GAAG,OAAO,GAAG,WAAW,GAAG,MAAM,GAAG,MAAM,CAAC;IAC5D,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,iCAAiC;AACjC,MAAM,WAAW,UAAU;IACzB,WAAW,EAAE,MAAM,CAAC;IACpB,YAAY,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE;QACP,MAAM,EAAE,MAAM,CAAC;QACf,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC;QACf,QAAQ,EAAE,MAAM,CAAC;QACjB,QAAQ,EAAE,MAAM,CAAC;KAClB,CAAC;IACF,qCAAqC;IACrC,YAAY,EAAE;QACZ,MAAM,EAAE,MAAM,CAAC;QACf,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC;QACf,QAAQ,EAAE,MAAM,CAAC;QACjB,QAAQ,EAAE,MAAM,CAAC;KAClB,CAAC;CACH;AAUD;;;;;GAKG;AACH,wBAAsB,WAAW,CAAC,IAAI,EAAE,WAAW,GAAG,OAAO,CAAC,UAAU,CAAC,CAmgBxE"}
|
package/dist/commands/sync.js
CHANGED
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import { stat } from "node:fs/promises";
|
|
2
2
|
import { CursorStore } from "../storage/cursor-store.js";
|
|
3
3
|
import { LocalQueue } from "../storage/local-queue.js";
|
|
4
|
-
import { discoverClaudeFiles, discoverGeminiFiles, discoverOpenCodeFiles, discoverOpenClawFiles, } from "../discovery/sources.js";
|
|
4
|
+
import { discoverClaudeFiles, discoverCodexFiles, discoverGeminiFiles, discoverOpenCodeFiles, discoverOpenClawFiles, } from "../discovery/sources.js";
|
|
5
5
|
import { parseClaudeFile } from "../parsers/claude.js";
|
|
6
|
+
import { parseCodexFile } from "../parsers/codex.js";
|
|
6
7
|
import { parseGeminiFile } from "../parsers/gemini.js";
|
|
7
8
|
import { parseOpenCodeFile } from "../parsers/opencode.js";
|
|
8
9
|
import { parseOpenClawFile } from "../parsers/openclaw.js";
|
|
10
|
+
import { processOpenCodeMessages } from "../parsers/opencode-sqlite.js";
|
|
9
11
|
import { toUtcHalfHourStart, bucketKey, addTokens, emptyTokenDelta } from "../utils/buckets.js";
|
|
10
12
|
/**
|
|
11
13
|
* Execute the sync operation: discover files, parse incrementally,
|
|
@@ -19,8 +21,8 @@ export async function executeSync(opts) {
|
|
|
19
21
|
const queue = new LocalQueue(stateDir);
|
|
20
22
|
const cursors = await cursorStore.load();
|
|
21
23
|
const allDeltas = [];
|
|
22
|
-
const sourceCounts = { claude: 0, gemini: 0, opencode: 0, openclaw: 0 };
|
|
23
|
-
const filesScanned = { claude: 0, gemini: 0, opencode: 0, openclaw: 0 };
|
|
24
|
+
const sourceCounts = { claude: 0, codex: 0, gemini: 0, opencode: 0, openclaw: 0 };
|
|
25
|
+
const filesScanned = { claude: 0, codex: 0, gemini: 0, opencode: 0, openclaw: 0 };
|
|
24
26
|
// ---------- Claude Code ----------
|
|
25
27
|
if (opts.claudeDir) {
|
|
26
28
|
onProgress?.({
|
|
@@ -43,7 +45,16 @@ export async function executeSync(opts) {
|
|
|
43
45
|
continue;
|
|
44
46
|
const inode = st.ino;
|
|
45
47
|
const startOffset = prev && prev.inode === inode ? (prev.offset ?? 0) : 0;
|
|
46
|
-
const result = await parseClaudeFile({ filePath, startOffset })
|
|
48
|
+
const result = await parseClaudeFile({ filePath, startOffset }).catch((err) => {
|
|
49
|
+
onProgress?.({
|
|
50
|
+
source: "claude-code",
|
|
51
|
+
phase: "warn",
|
|
52
|
+
message: `Skipping ${filePath}: ${err instanceof Error ? err.message : String(err)}`,
|
|
53
|
+
});
|
|
54
|
+
return null;
|
|
55
|
+
});
|
|
56
|
+
if (!result)
|
|
57
|
+
continue;
|
|
47
58
|
cursors.files[filePath] = {
|
|
48
59
|
inode,
|
|
49
60
|
offset: result.endOffset,
|
|
@@ -87,7 +98,16 @@ export async function executeSync(opts) {
|
|
|
87
98
|
filePath,
|
|
88
99
|
startIndex,
|
|
89
100
|
lastTotals,
|
|
101
|
+
}).catch((err) => {
|
|
102
|
+
onProgress?.({
|
|
103
|
+
source: "gemini-cli",
|
|
104
|
+
phase: "warn",
|
|
105
|
+
message: `Skipping ${filePath}: ${err instanceof Error ? err.message : String(err)}`,
|
|
106
|
+
});
|
|
107
|
+
return null;
|
|
90
108
|
});
|
|
109
|
+
if (!result)
|
|
110
|
+
continue;
|
|
91
111
|
cursors.files[filePath] = {
|
|
92
112
|
inode,
|
|
93
113
|
lastIndex: result.lastIndex,
|
|
@@ -143,7 +163,16 @@ export async function executeSync(opts) {
|
|
|
143
163
|
continue;
|
|
144
164
|
}
|
|
145
165
|
const lastTotals = prev && prev.inode === inode ? (prev.lastTotals ?? null) : null;
|
|
146
|
-
const result = await parseOpenCodeFile({ filePath, lastTotals })
|
|
166
|
+
const result = await parseOpenCodeFile({ filePath, lastTotals }).catch((err) => {
|
|
167
|
+
onProgress?.({
|
|
168
|
+
source: "opencode",
|
|
169
|
+
phase: "warn",
|
|
170
|
+
message: `Skipping ${filePath}: ${err instanceof Error ? err.message : String(err)}`,
|
|
171
|
+
});
|
|
172
|
+
return null;
|
|
173
|
+
});
|
|
174
|
+
if (!result)
|
|
175
|
+
continue;
|
|
147
176
|
cursors.files[filePath] = {
|
|
148
177
|
inode,
|
|
149
178
|
size: st.size,
|
|
@@ -166,6 +195,103 @@ export async function executeSync(opts) {
|
|
|
166
195
|
// Persist directory mtimes for next run
|
|
167
196
|
cursors.dirMtimes = discovery.dirMtimes;
|
|
168
197
|
}
|
|
198
|
+
// ---------- OpenCode SQLite ----------
|
|
199
|
+
if (opts.openCodeDbPath) {
|
|
200
|
+
onProgress?.({
|
|
201
|
+
source: "opencode-sqlite",
|
|
202
|
+
phase: "discover",
|
|
203
|
+
message: "Checking OpenCode SQLite database...",
|
|
204
|
+
});
|
|
205
|
+
// Check if DB file exists
|
|
206
|
+
const dbStat = await stat(opts.openCodeDbPath).catch(() => null);
|
|
207
|
+
if (dbStat && !opts.openMessageDb) {
|
|
208
|
+
// DB file exists but adapter is missing (bun:sqlite not available)
|
|
209
|
+
onProgress?.({
|
|
210
|
+
source: "opencode-sqlite",
|
|
211
|
+
phase: "warn",
|
|
212
|
+
message: `OpenCode SQLite database found at ${opts.openCodeDbPath} but bun:sqlite is not available — SQLite token data will NOT be synced`,
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
else if (dbStat && opts.openMessageDb) {
|
|
216
|
+
const dbInode = dbStat.ino;
|
|
217
|
+
const prevSqlite = cursors.openCodeSqlite;
|
|
218
|
+
// If inode changed (DB recreated), reset cursor
|
|
219
|
+
const lastTimeCreated = prevSqlite && prevSqlite.inode === dbInode
|
|
220
|
+
? prevSqlite.lastTimeCreated
|
|
221
|
+
: 0;
|
|
222
|
+
const prevProcessedIds = new Set(prevSqlite && prevSqlite.inode === dbInode
|
|
223
|
+
? (prevSqlite.lastProcessedIds ?? [])
|
|
224
|
+
: []);
|
|
225
|
+
const handle = opts.openMessageDb(opts.openCodeDbPath);
|
|
226
|
+
if (handle) {
|
|
227
|
+
try {
|
|
228
|
+
// Query uses >= to avoid missing same-millisecond rows.
|
|
229
|
+
// We dedup previously-processed IDs from the prior batch.
|
|
230
|
+
const rawRows = handle.queryMessages(lastTimeCreated);
|
|
231
|
+
const rows = prevProcessedIds.size > 0
|
|
232
|
+
? rawRows.filter((r) => !prevProcessedIds.has(r.id))
|
|
233
|
+
: rawRows;
|
|
234
|
+
// Collect JSON messageKeys from cursor store for dedup.
|
|
235
|
+
// During the overlap window (~Feb 15-17), both JSON and SQLite
|
|
236
|
+
// sources contain the same messages. We skip any SQLite row
|
|
237
|
+
// whose messageKey is already tracked by a JSON file cursor.
|
|
238
|
+
const jsonMessageKeys = new Set();
|
|
239
|
+
for (const cursor of Object.values(cursors.files)) {
|
|
240
|
+
const oc = cursor;
|
|
241
|
+
if (oc.messageKey) {
|
|
242
|
+
jsonMessageKeys.add(oc.messageKey);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
// Filter rows: exclude assistant messages already tracked by JSON parser.
|
|
246
|
+
// row.role is extracted at the SQL level via json_extract — no need to
|
|
247
|
+
// parse the full data JSON here.
|
|
248
|
+
const filteredRows = rows.filter((row) => {
|
|
249
|
+
if (row.role !== "assistant")
|
|
250
|
+
return true; // non-assistant rows don't produce deltas
|
|
251
|
+
const key = `${row.session_id}|${row.id}`;
|
|
252
|
+
return !jsonMessageKeys.has(key);
|
|
253
|
+
});
|
|
254
|
+
const dedupSkipped = rows.length - filteredRows.length;
|
|
255
|
+
const result = processOpenCodeMessages(filteredRows);
|
|
256
|
+
onProgress?.({
|
|
257
|
+
source: "opencode-sqlite",
|
|
258
|
+
phase: "parse",
|
|
259
|
+
message: `Parsed ${result.deltas.length} deltas from ${rawRows.length} SQLite rows${dedupSkipped > 0 ? ` (${dedupSkipped} deduped)` : ""}`,
|
|
260
|
+
});
|
|
261
|
+
allDeltas.push(...result.deltas);
|
|
262
|
+
sourceCounts.opencode += result.deltas.length;
|
|
263
|
+
// Update SQLite cursor — advance past ALL rows (including deduped).
|
|
264
|
+
// Rows are ORDER BY time_created ASC, so the last row has the
|
|
265
|
+
// highest time_created. Track IDs at the max timestamp for
|
|
266
|
+
// same-millisecond dedup on the next query.
|
|
267
|
+
const maxTime = rawRows.length > 0
|
|
268
|
+
? rawRows[rawRows.length - 1].time_created
|
|
269
|
+
: lastTimeCreated;
|
|
270
|
+
const idsAtMax = rawRows
|
|
271
|
+
.filter((r) => r.time_created === maxTime)
|
|
272
|
+
.map((r) => r.id);
|
|
273
|
+
cursors.openCodeSqlite = {
|
|
274
|
+
lastTimeCreated: maxTime,
|
|
275
|
+
lastProcessedIds: idsAtMax,
|
|
276
|
+
lastSessionUpdated: prevSqlite?.lastSessionUpdated ?? 0,
|
|
277
|
+
inode: dbInode,
|
|
278
|
+
updatedAt: new Date().toISOString(),
|
|
279
|
+
};
|
|
280
|
+
}
|
|
281
|
+
finally {
|
|
282
|
+
handle.close();
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
else {
|
|
286
|
+
// openMessageDb returned null — DB exists but couldn't be opened
|
|
287
|
+
onProgress?.({
|
|
288
|
+
source: "opencode-sqlite",
|
|
289
|
+
phase: "warn",
|
|
290
|
+
message: `Failed to open OpenCode SQLite database at ${opts.openCodeDbPath} — SQLite token data will NOT be synced`,
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
}
|
|
169
295
|
// ---------- OpenClaw ----------
|
|
170
296
|
if (opts.openclawDir) {
|
|
171
297
|
onProgress?.({
|
|
@@ -189,7 +315,16 @@ export async function executeSync(opts) {
|
|
|
189
315
|
continue;
|
|
190
316
|
const inode = st.ino;
|
|
191
317
|
const startOffset = prev && prev.inode === inode ? (prev.offset ?? 0) : 0;
|
|
192
|
-
const result = await parseOpenClawFile({ filePath, startOffset })
|
|
318
|
+
const result = await parseOpenClawFile({ filePath, startOffset }).catch((err) => {
|
|
319
|
+
onProgress?.({
|
|
320
|
+
source: "openclaw",
|
|
321
|
+
phase: "warn",
|
|
322
|
+
message: `Skipping ${filePath}: ${err instanceof Error ? err.message : String(err)}`,
|
|
323
|
+
});
|
|
324
|
+
return null;
|
|
325
|
+
});
|
|
326
|
+
if (!result)
|
|
327
|
+
continue;
|
|
193
328
|
cursors.files[filePath] = {
|
|
194
329
|
inode,
|
|
195
330
|
offset: result.endOffset,
|
|
@@ -205,6 +340,63 @@ export async function executeSync(opts) {
|
|
|
205
340
|
});
|
|
206
341
|
}
|
|
207
342
|
}
|
|
343
|
+
// ---------- Codex CLI ----------
|
|
344
|
+
if (opts.codexSessionsDir) {
|
|
345
|
+
onProgress?.({
|
|
346
|
+
source: "codex",
|
|
347
|
+
phase: "discover",
|
|
348
|
+
message: "Discovering Codex CLI files...",
|
|
349
|
+
});
|
|
350
|
+
const files = await discoverCodexFiles(opts.codexSessionsDir);
|
|
351
|
+
filesScanned.codex = files.length;
|
|
352
|
+
onProgress?.({
|
|
353
|
+
source: "codex",
|
|
354
|
+
phase: "parse",
|
|
355
|
+
total: files.length,
|
|
356
|
+
message: `Parsing ${files.length} Codex files...`,
|
|
357
|
+
});
|
|
358
|
+
for (let i = 0; i < files.length; i++) {
|
|
359
|
+
const filePath = files[i];
|
|
360
|
+
const prev = cursors.files[filePath];
|
|
361
|
+
const st = await stat(filePath).catch(() => null);
|
|
362
|
+
if (!st)
|
|
363
|
+
continue;
|
|
364
|
+
const inode = st.ino;
|
|
365
|
+
const startOffset = prev && prev.inode === inode ? (prev.offset ?? 0) : 0;
|
|
366
|
+
const lastTotals = prev && prev.inode === inode ? (prev.lastTotals ?? null) : null;
|
|
367
|
+
const lastModel = prev && prev.inode === inode ? (prev.lastModel ?? null) : null;
|
|
368
|
+
const result = await parseCodexFile({
|
|
369
|
+
filePath,
|
|
370
|
+
startOffset,
|
|
371
|
+
lastTotals,
|
|
372
|
+
lastModel,
|
|
373
|
+
}).catch((err) => {
|
|
374
|
+
onProgress?.({
|
|
375
|
+
source: "codex",
|
|
376
|
+
phase: "warn",
|
|
377
|
+
message: `Skipping ${filePath}: ${err instanceof Error ? err.message : String(err)}`,
|
|
378
|
+
});
|
|
379
|
+
return null;
|
|
380
|
+
});
|
|
381
|
+
if (!result)
|
|
382
|
+
continue;
|
|
383
|
+
cursors.files[filePath] = {
|
|
384
|
+
inode,
|
|
385
|
+
offset: result.endOffset,
|
|
386
|
+
lastTotals: result.lastTotals,
|
|
387
|
+
lastModel: result.lastModel,
|
|
388
|
+
updatedAt: new Date().toISOString(),
|
|
389
|
+
};
|
|
390
|
+
allDeltas.push(...result.deltas);
|
|
391
|
+
sourceCounts.codex += result.deltas.length;
|
|
392
|
+
onProgress?.({
|
|
393
|
+
source: "codex",
|
|
394
|
+
phase: "parse",
|
|
395
|
+
current: i + 1,
|
|
396
|
+
total: files.length,
|
|
397
|
+
});
|
|
398
|
+
}
|
|
399
|
+
}
|
|
208
400
|
// ---------- Aggregate into half-hour buckets ----------
|
|
209
401
|
onProgress?.({
|
|
210
402
|
source: "all",
|
|
@@ -233,6 +425,7 @@ export async function executeSync(opts) {
|
|
|
233
425
|
const records = [];
|
|
234
426
|
for (const bucket of buckets.values()) {
|
|
235
427
|
const totalTokens = bucket.tokens.inputTokens +
|
|
428
|
+
bucket.tokens.cachedInputTokens +
|
|
236
429
|
bucket.tokens.outputTokens +
|
|
237
430
|
bucket.tokens.reasoningOutputTokens;
|
|
238
431
|
records.push({
|
|
@@ -246,12 +439,17 @@ export async function executeSync(opts) {
|
|
|
246
439
|
total_tokens: totalTokens,
|
|
247
440
|
});
|
|
248
441
|
}
|
|
442
|
+
// ---------- Save cursor state FIRST (before queue) ----------
|
|
443
|
+
// Cursor must be persisted before the queue write so that a crash
|
|
444
|
+
// between the two operations does not cause double-counting on the
|
|
445
|
+
// next sync. Worst case: cursor saved but queue not written — data
|
|
446
|
+
// is lost for this cycle (acceptable), but never duplicated.
|
|
447
|
+
cursors.updatedAt = new Date().toISOString();
|
|
448
|
+
await cursorStore.save(cursors);
|
|
449
|
+
// ---------- Write to queue ----------
|
|
249
450
|
if (records.length > 0) {
|
|
250
451
|
await queue.appendBatch(records);
|
|
251
452
|
}
|
|
252
|
-
// ---------- Save cursor state ----------
|
|
253
|
-
cursors.updatedAt = new Date().toISOString();
|
|
254
|
-
await cursorStore.save(cursors);
|
|
255
453
|
onProgress?.({
|
|
256
454
|
source: "all",
|
|
257
455
|
phase: "done",
|