openclaw-navigator 5.6.4 → 5.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/cli.mjs +122 -136
  2. package/package.json +1 -1
package/cli.mjs CHANGED
@@ -1137,11 +1137,12 @@ function handleRequest(req, res) {
1137
1137
  }
1138
1138
 
1139
1139
  // ── Chat send: store locally + respond immediately ─────────────────────
1140
- // 1. Store user message in bridge memory (for MCP tools to read)
1141
- // 2. Respond to Navigator immediately (don't block on gateway)
1142
- // 3. In background: try relaying to gateway for AI processing
1143
- // - If gateway responds store assistant message + broadcast via WS
1144
- // - If gateway hangs/fails agent will respond via MCP tools later
1140
+ // ── Chat send: Navigator sidepane sends a message ─────────────────────
1141
+ // 1. Store user message in bridge memory
1142
+ // 2. Respond to Navigator immediately
1143
+ // 3. In background: relay to the BFF on port 4000 (/api/chat) for AI processing
1144
+ // The AI lives behind the BFF, NOT on the gateway (port 18789 returns 405).
1145
+ // The BFF returns SSE — collect it, store the response, broadcast via WS.
1145
1146
  if (path === "/api/sessions/send" && req.method === "POST") {
1146
1147
  readBody(req)
1147
1148
  .then((bodyStr) => {
@@ -1172,48 +1173,63 @@ function handleRequest(req, res) {
1172
1173
  // 2. Respond immediately — don't block Navigator
1173
1174
  sendJSON(res, 200, { ok: true, stored: true, messageCount: session.messages.length });
1174
1175
 
1175
- // 3. Background: relay to gateway for AI processing
1176
- const proxyBody = JSON.stringify({ message, sessionKey });
1176
+ // 3. Background: relay to BFF (port 4000) /api/chat for AI processing
1177
+ // Format: OpenAI-compatible chat completions with message history
1178
+ const chatHistory = session.messages.map((m) => ({
1179
+ role: m.role,
1180
+ content: m.content,
1181
+ }));
1182
+ const proxyBody = JSON.stringify({
1183
+ messages: chatHistory,
1184
+ stream: true,
1185
+ });
1177
1186
  const proxyOpts = {
1178
1187
  hostname: "127.0.0.1",
1179
- port: ocGatewayPort,
1180
- path: `/api/sessions/send`,
1188
+ port: ocUIPort,
1189
+ path: `/api/chat`,
1181
1190
  method: "POST",
1182
- timeout: 60000, // 60s — agent may take a while to respond
1191
+ timeout: 120000, // 2 min — agent may take a while
1183
1192
  headers: {
1184
1193
  "content-type": "application/json",
1185
1194
  "content-length": Buffer.byteLength(proxyBody),
1186
1195
  },
1187
1196
  };
1188
1197
 
1198
+ console.log(` ${DIM}→ Relaying to BFF /api/chat (port ${ocUIPort}) with ${chatHistory.length} messages${RESET}`);
1199
+
1189
1200
  const proxyReq = httpRequest(proxyOpts, (proxyRes) => {
1190
1201
  const contentType = (proxyRes.headers["content-type"] || "").toLowerCase();
1191
1202
  const isSSE = contentType.includes("text/event-stream");
1192
- console.log(` ${DIM}Gateway relay response: ${proxyRes.statusCode} ${contentType || "no-content-type"}${RESET}`);
1193
-
1194
- if (isSSE) {
1195
- // SSE response: collect stream, broadcast chunks via WebSocket
1196
- let fullText = "";
1197
- let buffer = "";
1198
- proxyRes.setEncoding("utf-8");
1199
- proxyRes.on("data", (chunk) => {
1200
- buffer += chunk;
1201
- const lines = buffer.split("\n");
1202
- buffer = lines.pop() || "";
1203
+ console.log(` ${DIM} BFF response: ${proxyRes.statusCode} ${contentType || "no-content-type"}${RESET}`);
1204
+
1205
+ // Collect the response (SSE or JSON) and extract the assistant's text
1206
+ let fullText = "";
1207
+ let sseBuffer = "";
1208
+
1209
+ proxyRes.setEncoding("utf-8");
1210
+ proxyRes.on("data", (chunk) => {
1211
+ if (isSSE) {
1212
+ sseBuffer += chunk;
1213
+ const lines = sseBuffer.split("\n");
1214
+ sseBuffer = lines.pop() || "";
1203
1215
  for (const line of lines) {
1204
1216
  if (line.startsWith("data: ")) {
1205
1217
  const raw = line.slice(6).trim();
1206
- if (raw === "[DONE]") continue;
1218
+ if (raw === "[DONE]" || !raw) continue;
1207
1219
  try {
1208
1220
  const evt = JSON.parse(raw);
1209
- const text =
1210
- evt.text || evt.content || evt.delta?.text || evt.delta?.content || "";
1211
- if (text) {
1212
- fullText += text;
1221
+ const delta =
1222
+ evt.choices?.[0]?.delta?.content ||
1223
+ evt.delta?.text ||
1224
+ evt.text ||
1225
+ evt.content ||
1226
+ "";
1227
+ if (delta) {
1228
+ fullText += delta;
1213
1229
  broadcastToWS({
1214
1230
  type: "chat.delta",
1215
1231
  text: fullText,
1216
- delta: text,
1232
+ delta,
1217
1233
  sessionKey,
1218
1234
  timestamp: Date.now(),
1219
1235
  });
@@ -1223,72 +1239,64 @@ function handleRequest(req, res) {
1223
1239
  }
1224
1240
  }
1225
1241
  }
1226
- });
1227
- proxyRes.on("end", () => {
1228
- if (buffer.startsWith("data: ")) {
1229
- const raw = buffer.slice(6).trim();
1230
- if (raw && raw !== "[DONE]") {
1231
- try {
1232
- const evt = JSON.parse(raw);
1233
- fullText += evt.text || evt.content || evt.delta?.text || "";
1234
- } catch {
1235
- fullText += raw;
1242
+ } else {
1243
+ sseBuffer += chunk; // Collect JSON body
1244
+ }
1245
+ });
1246
+
1247
+ proxyRes.on("end", () => {
1248
+ // Handle remaining buffer for SSE
1249
+ if (isSSE && sseBuffer) {
1250
+ for (const line of sseBuffer.split("\n")) {
1251
+ if (line.startsWith("data: ")) {
1252
+ const raw = line.slice(6).trim();
1253
+ if (raw && raw !== "[DONE]") {
1254
+ try {
1255
+ const evt = JSON.parse(raw);
1256
+ const delta = evt.choices?.[0]?.delta?.content || evt.delta?.text || evt.text || evt.content || "";
1257
+ if (delta) fullText += delta;
1258
+ } catch { fullText += raw; }
1236
1259
  }
1237
1260
  }
1238
1261
  }
1239
- if (fullText) {
1240
- session.messages.push({ role: "assistant", content: fullText, timestamp: Date.now() });
1241
- broadcastToWS({
1242
- type: "chat.final",
1243
- text: fullText,
1244
- content: fullText,
1245
- sessionKey,
1246
- role: "assistant",
1247
- timestamp: Date.now(),
1248
- });
1249
- console.log(` ${GREEN}✓${RESET} Gateway SSE response: ${fullText.substring(0, 80)}...`);
1250
- }
1251
- });
1252
- proxyRes.on("error", () => {});
1253
- } else {
1254
- // JSON response from gateway
1255
- const chunks = [];
1256
- proxyRes.on("data", (c) => chunks.push(c));
1257
- proxyRes.on("end", () => {
1258
- const body = Buffer.concat(chunks).toString("utf-8");
1262
+ }
1263
+
1264
+ // Handle JSON response
1265
+ if (!isSSE && sseBuffer) {
1259
1266
  try {
1260
- const jsonBody = JSON.parse(body);
1261
- const inlineResponse =
1262
- jsonBody.response || jsonBody.message || jsonBody.answer || jsonBody.text || "";
1263
- if (inlineResponse) {
1264
- session.messages.push({ role: "assistant", content: inlineResponse, timestamp: Date.now() });
1265
- broadcastToWS({
1266
- type: "chat.final",
1267
- text: inlineResponse,
1268
- content: inlineResponse,
1269
- sessionKey,
1270
- role: "assistant",
1271
- timestamp: Date.now(),
1272
- });
1273
- console.log(` ${GREEN}✓${RESET} Gateway JSON response: ${inlineResponse.substring(0, 80)}...`);
1274
- } else {
1275
- console.log(` ${DIM}Gateway returned JSON with no response field — waiting for MCP${RESET}`);
1276
- }
1267
+ const jsonBody = JSON.parse(sseBuffer);
1268
+ fullText = jsonBody.choices?.[0]?.message?.content || jsonBody.response || jsonBody.message || jsonBody.text || "";
1277
1269
  } catch {
1278
- console.log(` ${DIM}Gateway returned non-JSON (${body.length} bytes): ${body.substring(0, 200)}${RESET}`);
1279
- console.log(` ${DIM}Waiting for MCP agent to respond...${RESET}`);
1270
+ console.log(` ${DIM}BFF returned non-JSON (${sseBuffer.length} bytes): ${sseBuffer.substring(0, 200)}${RESET}`);
1280
1271
  }
1281
- });
1282
- }
1272
+ }
1273
+
1274
+ if (fullText) {
1275
+ session.messages.push({ role: "assistant", content: fullText, timestamp: Date.now() });
1276
+ broadcastToWS({
1277
+ type: "chat.final",
1278
+ text: fullText,
1279
+ content: fullText,
1280
+ sessionKey,
1281
+ role: "assistant",
1282
+ timestamp: Date.now(),
1283
+ });
1284
+ console.log(` ${GREEN}✓${RESET} AI response (${fullText.length} chars): ${fullText.substring(0, 80)}...`);
1285
+ } else {
1286
+ console.log(` ${DIM}BFF returned no content${RESET}`);
1287
+ }
1288
+ });
1289
+
1290
+ proxyRes.on("error", () => {});
1283
1291
  });
1284
1292
 
1285
1293
  proxyReq.on("timeout", () => {
1286
1294
  proxyReq.destroy();
1287
- console.log(` ${DIM}Gateway relay timed out — agent will respond via MCP${RESET}`);
1295
+ console.log(` ${DIM}BFF relay timed out${RESET}`);
1288
1296
  });
1289
1297
 
1290
1298
  proxyReq.on("error", (err) => {
1291
- console.log(` ${DIM}Gateway relay failed (${err.message}) — agent will respond via MCP${RESET}`);
1299
+ console.log(` ${DIM}BFF relay failed: ${err.message}${RESET}`);
1292
1300
  });
1293
1301
 
1294
1302
  proxyReq.write(proxyBody);
@@ -1493,41 +1501,28 @@ function handleRequest(req, res) {
1493
1501
  return;
1494
1502
  }
1495
1503
 
1496
- // ── SSE NDJSON conversion for streaming endpoints (/api/chat) ──
1497
- // The OC gateway returns standard SSE (data: {...}\n), but the web UI
1498
- // frontend expects clean JSON lines (NDJSON). Strip the "data: " prefix
1499
- // and forward clean JSON. Also tap data to broadcast via WebSocket
1500
- // so Navigator's sidepane chat gets the response too.
1504
+ // ── SSE→JSON for streaming endpoints (/api/chat) ──────────────────
1505
+ // The BFF returns SSE but the web UI's parser can't handle SSE through
1506
+ // a reverse proxy (it does JSON.parse on raw chunks without stripping
1507
+ // the "data: " prefix). So we collect the full SSE stream, extract the
1508
+ // text, and return a single JSON response. Also store + broadcast via WS.
1501
1509
  if (isSSE && isStreamingEndpoint) {
1502
- console.log(` ${DIM}SSE→NDJSON + WS tap: ${path}${RESET}`);
1503
-
1504
- // Change content-type so frontend doesn't try to parse as SSE
1505
- headers["content-type"] = "text/plain; charset=utf-8";
1506
- delete headers["content-length"];
1507
- delete headers["transfer-encoding"];
1508
- res.writeHead(proxyRes.statusCode ?? 200, headers);
1510
+ console.log(` ${DIM}SSE→JSON collect: ${path}${RESET}`);
1509
1511
 
1510
1512
  let fullText = "";
1511
- let sseBuffer = "";
1513
+ let sseData = "";
1512
1514
 
1513
1515
  proxyRes.setEncoding("utf-8");
1514
1516
  proxyRes.on("data", (chunk) => {
1515
- sseBuffer += chunk;
1516
- const lines = sseBuffer.split("\n");
1517
- sseBuffer = lines.pop() || ""; // keep incomplete last line
1517
+ sseData += chunk;
1518
+ });
1518
1519
 
1519
- for (const line of lines) {
1520
+ proxyRes.on("end", () => {
1521
+ // Extract text from all SSE events
1522
+ for (const line of sseData.split("\n")) {
1520
1523
  if (line.startsWith("data: ")) {
1521
1524
  const raw = line.slice(6).trim();
1522
- if (raw === "[DONE]" || !raw) {
1523
- continue;
1524
- }
1525
-
1526
- // Forward clean JSON line to browser (no "data: " prefix)
1527
- res.write(raw + "\n");
1528
- console.log(` ${DIM} SSE chunk: ${raw.substring(0, 100)}${raw.length > 100 ? "..." : ""}${RESET}`);
1529
-
1530
- // Also broadcast to WebSocket for Navigator sidepane
1525
+ if (raw === "[DONE]" || !raw) continue;
1531
1526
  try {
1532
1527
  const evt = JSON.parse(raw);
1533
1528
  const delta =
@@ -1536,41 +1531,33 @@ function handleRequest(req, res) {
1536
1531
  evt.text ||
1537
1532
  evt.content ||
1538
1533
  "";
1539
- if (delta) {
1540
- fullText += delta;
1541
- broadcastToWS({
1542
- type: "chat.delta",
1543
- text: fullText,
1544
- delta,
1545
- sessionKey: "main",
1546
- timestamp: Date.now(),
1547
- });
1548
- }
1534
+ if (delta) fullText += delta;
1549
1535
  } catch {
1550
- // Non-JSON SSE data — forward as text anyway
1551
- fullText += raw;
1536
+ if (raw) fullText += raw;
1552
1537
  }
1553
1538
  }
1554
1539
  }
1555
- });
1556
1540
 
1557
- proxyRes.on("end", () => {
1558
- // Process remaining buffer
1559
- if (sseBuffer.startsWith("data: ")) {
1560
- const raw = sseBuffer.slice(6).trim();
1561
- if (raw && raw !== "[DONE]") {
1562
- res.write(raw + "\n");
1563
- try {
1564
- const evt = JSON.parse(raw);
1565
- const delta = evt.choices?.[0]?.delta?.content || evt.delta?.text || evt.text || evt.content || "";
1566
- if (delta) fullText += delta;
1567
- } catch { fullText += raw; }
1568
- }
1569
- }
1541
+ // Return as a single OpenAI-compatible JSON response
1542
+ const result = {
1543
+ id: "chatcmpl_bridge_" + Date.now(),
1544
+ object: "chat.completion",
1545
+ created: Math.floor(Date.now() / 1000),
1546
+ choices: [{
1547
+ index: 0,
1548
+ message: { role: "assistant", content: fullText },
1549
+ finish_reason: "stop",
1550
+ }],
1551
+ };
1552
+
1553
+ headers["content-type"] = "application/json";
1554
+ delete headers["content-length"];
1555
+ delete headers["transfer-encoding"];
1556
+ res.writeHead(200, headers);
1557
+ res.end(JSON.stringify(result));
1570
1558
 
1571
- // Broadcast final message via WebSocket
1559
+ // Store in bridge session + broadcast via WebSocket
1572
1560
  if (fullText) {
1573
- // Also store as assistant message in bridge chat session
1574
1561
  const session = getChatSession("main");
1575
1562
  session.messages.push({ role: "assistant", content: fullText, timestamp: Date.now() });
1576
1563
 
@@ -1586,12 +1573,11 @@ function handleRequest(req, res) {
1586
1573
  } else {
1587
1574
  console.log(` ${DIM}SSE stream ended with no content${RESET}`);
1588
1575
  }
1589
- res.end();
1590
1576
  });
1591
1577
 
1592
1578
  proxyRes.on("error", (err) => {
1593
1579
  console.log(` ${DIM}SSE stream error: ${err.message}${RESET}`);
1594
- res.end();
1580
+ sendJSON(res, 502, { ok: false, error: "Stream error" });
1595
1581
  });
1596
1582
  return;
1597
1583
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "openclaw-navigator",
3
- "version": "5.6.4",
3
+ "version": "5.7.1",
4
4
  "description": "One-command bridge + tunnel for the Navigator browser — works on any machine, any OS",
5
5
  "keywords": [
6
6
  "browser",