codeam-cli 1.4.35 → 1.4.37
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +13 -5
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -116,7 +116,7 @@ var import_qrcode_terminal = __toESM(require("qrcode-terminal"));
|
|
|
116
116
|
// package.json
|
|
117
117
|
var package_default = {
|
|
118
118
|
name: "codeam-cli",
|
|
119
|
-
version: "1.4.
|
|
119
|
+
version: "1.4.37",
|
|
120
120
|
description: "Remote control Claude Code from your mobile device",
|
|
121
121
|
main: "dist/index.js",
|
|
122
122
|
bin: {
|
|
@@ -1356,6 +1356,7 @@ function parseJsonl(filePath) {
|
|
|
1356
1356
|
const ts = record["timestamp"];
|
|
1357
1357
|
const timestamp = typeof ts === "string" ? new Date(ts).getTime() : typeof ts === "number" ? ts : Date.now();
|
|
1358
1358
|
const uuid = record["uuid"] ?? `${Date.now()}-${Math.random()}`;
|
|
1359
|
+
if (record["isMeta"]) continue;
|
|
1359
1360
|
if (type === "user" && msg) {
|
|
1360
1361
|
const text = extractText(msg["content"]).trim();
|
|
1361
1362
|
if (text) messages.push({ id: uuid, role: "user", text, timestamp });
|
|
@@ -1669,20 +1670,27 @@ var HistoryService = class {
|
|
|
1669
1670
|
/**
|
|
1670
1671
|
* Read a specific session's full conversation and POST it to the API in batches.
|
|
1671
1672
|
* Batching avoids Vercel's 4.5 MB body limit for long sessions.
|
|
1672
|
-
*
|
|
1673
|
+
* Every batch MUST be confirmed (2xx) before proceeding — retries with
|
|
1674
|
+
* exponential backoff (500 ms → 1 s → 2 s → 4 s → 8 s). Throws if a batch
|
|
1675
|
+
* still fails after all attempts so callers skip newTurnResume instead of
|
|
1676
|
+
* showing an empty conversation.
|
|
1673
1677
|
*/
|
|
1674
1678
|
async loadConversation(sessionId) {
|
|
1675
1679
|
const filePath = path4.join(this.projectDir, `${sessionId}.jsonl`);
|
|
1676
1680
|
const messages = parseJsonl(filePath);
|
|
1677
1681
|
if (messages.length === 0) return;
|
|
1678
1682
|
const totalBatches = Math.ceil(messages.length / CONVERSATION_BATCH_SIZE);
|
|
1683
|
+
const RETRY_DELAYS = [500, 1e3, 2e3, 4e3, 8e3];
|
|
1679
1684
|
for (let i = 0; i < totalBatches; i++) {
|
|
1680
1685
|
const batch = messages.slice(i * CONVERSATION_BATCH_SIZE, (i + 1) * CONVERSATION_BATCH_SIZE);
|
|
1681
1686
|
const body = { pluginId: this.pluginId, sessionId, messages: batch, batchIndex: i, totalBatches };
|
|
1682
|
-
|
|
1687
|
+
let ok = await post("/api/sessions/claude-conversation", body);
|
|
1688
|
+
for (let attempt = 0; !ok && attempt < RETRY_DELAYS.length; attempt++) {
|
|
1689
|
+
await new Promise((r) => setTimeout(r, RETRY_DELAYS[attempt]));
|
|
1690
|
+
ok = await post("/api/sessions/claude-conversation", body);
|
|
1691
|
+
}
|
|
1683
1692
|
if (!ok) {
|
|
1684
|
-
|
|
1685
|
-
await post("/api/sessions/claude-conversation", body);
|
|
1693
|
+
throw new Error(`Failed to upload conversation batch ${i + 1}/${totalBatches} after all retries`);
|
|
1686
1694
|
}
|
|
1687
1695
|
}
|
|
1688
1696
|
}
|