openclaw-navigator 5.7.8 → 5.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/cli.mjs +46 -8
  2. package/package.json +1 -1
package/cli.mjs CHANGED
@@ -1613,15 +1613,53 @@ function handleRequest(req, res) {
1613
1613
  return;
1614
1614
  }
1615
1615
 
1616
- // ── SSE for streaming endpoints (/api/chat) ──────────────────────
1617
- // DO NOT transform SSE pipe raw bytes through untouched.
1618
- // The web UI works fine hitting port 4000 directly; any transformation
1619
- // we add (SSE→JSON, SSE→NDJSON, setEncoding) breaks the frontend parser.
1620
- // Raw binary pipe preserves the exact chunking the BFF sends.
1616
+ // ── SSE collect for /api/chat ─────────────────────────────────────
1617
+ // The frontend can't parse raw SSE through a proxy. Collect the full
1618
+ // response, extract the text, and return in Vercel AI stream format
1619
+ // (0:"text"\n) this is what Next.js useChat() expects.
1621
1620
  if (isSSE && isStreamingEndpoint) {
1622
- console.log(` ${DIM}SSE raw pipe: ${path}${RESET}`);
1623
- res.writeHead(proxyRes.statusCode ?? 200, headers);
1624
- proxyRes.pipe(res, { end: true });
1621
+ let sseData = "";
1622
+ proxyRes.setEncoding("utf-8");
1623
+ proxyRes.on("data", (chunk) => { sseData += chunk; });
1624
+ proxyRes.on("end", () => {
1625
+ // Extract text from SSE events
1626
+ let fullText = "";
1627
+ for (const line of sseData.split("\n")) {
1628
+ if (line.startsWith("data: ")) {
1629
+ const raw = line.slice(6).trim();
1630
+ if (raw === "[DONE]" || !raw) continue;
1631
+ try {
1632
+ const evt = JSON.parse(raw);
1633
+ const delta =
1634
+ evt.choices?.[0]?.delta?.content ||
1635
+ evt.delta?.text ||
1636
+ evt.text ||
1637
+ evt.content || "";
1638
+ if (delta) fullText += delta;
1639
+ } catch {
1640
+ if (raw) fullText += raw;
1641
+ }
1642
+ }
1643
+ // Also handle Vercel AI stream format (0:"text") passthrough
1644
+ if (line.startsWith("0:")) {
1645
+ try {
1646
+ fullText += JSON.parse(line.slice(2));
1647
+ } catch { /* skip */ }
1648
+ }
1649
+ }
1650
+
1651
+ console.log(` ${GREEN}✓${RESET} Chat (${fullText.length} chars): ${fullText.substring(0, 80)}...`);
1652
+
1653
+ // Return as Vercel AI text stream: 0:"text"\n
1654
+ // This is what useChat() from the "ai" package expects.
1655
+ const aiStream = `0:${JSON.stringify(fullText)}\n`;
1656
+ headers["content-type"] = "text/plain; charset=utf-8";
1657
+ delete headers["content-length"];
1658
+ delete headers["transfer-encoding"];
1659
+ res.writeHead(200, headers);
1660
+ res.end(aiStream);
1661
+ });
1662
+ proxyRes.on("error", () => res.end());
1625
1663
  return;
1626
1664
  }
1627
1665
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "openclaw-navigator",
3
- "version": "5.7.8",
3
+ "version": "5.8.0",
4
4
  "description": "One-command bridge + tunnel for the Navigator browser — works on any machine, any OS",
5
5
  "keywords": [
6
6
  "browser",