bloby-bot 0.17.9 → 0.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- import{c as e,r as t,t as n}from"./jsx-runtime-C0W9Wf2W.js";import{n as r,r as i,t as a}from"./bloby-Cnt49fF0.js";var o=e(t(),1),s=n(),c=({code:e,language:t,raw:n,className:c,startLine:l,lineNumbers:u,...d})=>{let{shikiTheme:f}=(0,o.useContext)(i),p=r(),[m,h]=(0,o.useState)(n);return(0,o.useEffect)(()=>{if(!p){h(n);return}let r=p.highlight({code:e,language:t,themes:f},e=>{h(e)});r&&h(r)},[e,t,f,p,n]),(0,s.jsx)(a,{className:c,language:t,lineNumbers:u,result:m,startLine:l,...d})};export{c as HighlightedCodeBlockBody};
1
+ import{c as e,r as t,t as n}from"./jsx-runtime-C0W9Wf2W.js";import{n as r,r as i,t as a}from"./bloby-B8Qpaiaq.js";var o=e(t(),1),s=n(),c=({code:e,language:t,raw:n,className:c,startLine:l,lineNumbers:u,...d})=>{let{shikiTheme:f}=(0,o.useContext)(i),p=r(),[m,h]=(0,o.useState)(n);return(0,o.useEffect)(()=>{if(!p){h(n);return}let r=p.highlight({code:e,language:t,themes:f},e=>{h(e)});r&&h(r)},[e,t,f,p,n]),(0,s.jsx)(a,{className:c,language:t,lineNumbers:u,result:m,startLine:l,...d})};export{c as HighlightedCodeBlockBody};
@@ -0,0 +1 @@
1
+ import{i as e}from"./bloby-B8Qpaiaq.js";export{e as Mermaid};
@@ -4,7 +4,7 @@
4
4
  <meta charset="UTF-8" />
5
5
  <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no, interactive-widget=resizes-content" />
6
6
  <title>Bloby Chat</title>
7
- <script type="module" crossorigin src="/bloby/assets/bloby-Cnt49fF0.js"></script>
7
+ <script type="module" crossorigin src="/bloby/assets/bloby-B8Qpaiaq.js"></script>
8
8
  <link rel="modulepreload" crossorigin href="/bloby/assets/jsx-runtime-C0W9Wf2W.js">
9
9
  <link rel="modulepreload" crossorigin href="/bloby/assets/globals-Y2sqR_Rk.js">
10
10
  <link rel="stylesheet" crossorigin href="/bloby/assets/globals-Dw5ZdZGt.css">
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bloby-bot",
3
- "version": "0.17.9",
3
+ "version": "0.18.0",
4
4
  "releaseNotes": [
5
5
  "1. react router implemented",
6
6
  "2. new workspace design",
@@ -277,6 +277,41 @@ function BlobyApp() {
277
277
  const { messages, streaming, streamBuffer, tools, hasMore, loadOlder, sendMessage, stopStreaming, clearContext } =
278
278
  useBlobyChat(clientRef.current, reloadTrigger, authenticated);
279
279
 
280
+ // Handle voice recordings sent from the widget bubble (long-press mic)
281
+ useEffect(() => {
282
+ const handler = async (e: MessageEvent) => {
283
+ if (e.data?.type !== 'bloby:voice-record') return;
284
+ const { audio, transcript } = e.data as { audio?: string; transcript?: string };
285
+
286
+ if (audio && whisperEnabled) {
287
+ // Whisper path: transcribe via WebSocket, then send message
288
+ const client = clientRef.current;
289
+ if (!client?.connected) return;
290
+ try {
291
+ const data = await new Promise<{ transcript?: string }>((resolve, reject) => {
292
+ const unsub = client.on('whisper:result', (d: { transcript?: string }) => {
293
+ unsub();
294
+ clearTimeout(timer);
295
+ resolve(d);
296
+ });
297
+ const timer = setTimeout(() => { unsub(); reject(new Error('Timeout')); }, 30000);
298
+ client.send('whisper:transcribe', { audio });
299
+ });
300
+ if (data.transcript?.trim()) {
301
+ sendMessage(data.transcript.trim(), undefined, `data:audio/webm;base64,${audio}`);
302
+ }
303
+ } catch (err) {
304
+ console.error('[BlobyApp] widget voice transcription error:', err);
305
+ }
306
+ } else if (transcript?.trim()) {
307
+ // Web Speech path: send transcript directly
308
+ sendMessage(transcript.trim());
309
+ }
310
+ };
311
+ window.addEventListener('message', handler);
312
+ return () => window.removeEventListener('message', handler);
313
+ }, [sendMessage, whisperEnabled]);
314
+
280
315
  // Auth gate: show spinner while checking, login screen if needed
281
316
  if (!authChecked) {
282
317
  return (
@@ -77,6 +77,7 @@ export default function InputBar({ onSend, onStop, streaming, whisperEnabled, on
77
77
  const dragRef = useRef(0);
78
78
  const holdTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
79
79
  const isHolding = useRef(false);
80
+ const pointerIsDown = useRef(false);
80
81
  const intervalRef = useRef<ReturnType<typeof setInterval> | null>(null);
81
82
  const mediaRecorderRef = useRef<MediaRecorder | null>(null);
82
83
  const audioChunksRef = useRef<Blob[]>([]);
@@ -255,6 +256,7 @@ export default function InputBar({ onSend, onStop, streaming, whisperEnabled, on
255
256
  // ── Mic pointer handlers ──
256
257
  const handleMicDown = useCallback((e: RPointerEvent) => {
257
258
  e.preventDefault();
259
+ pointerIsDown.current = true;
258
260
  startXRef.current = e.clientX;
259
261
  dragRef.current = 0;
260
262
  (e.currentTarget as HTMLElement).setPointerCapture(e.pointerId);
@@ -267,6 +269,11 @@ export default function InputBar({ onSend, onStop, streaming, whisperEnabled, on
267
269
  if (whisperEnabled) {
268
270
  // Whisper path: need getUserMedia + MediaRecorder for audio capture
269
271
  const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
272
+ // If user released while permission dialog was showing, clean up and bail
273
+ if (!pointerIsDown.current) {
274
+ stream.getTracks().forEach((t) => t.stop());
275
+ return;
276
+ }
270
277
  streamRef.current = stream;
271
278
  const mimeType = MediaRecorder.isTypeSupported('audio/webm;codecs=opus') ? 'audio/webm;codecs=opus' : 'audio/webm';
272
279
  const recorder = new MediaRecorder(stream, { mimeType });
@@ -279,6 +286,10 @@ export default function InputBar({ onSend, onStop, streaming, whisperEnabled, on
279
286
  } else {
280
287
  // Web Speech path: only SpeechRecognition, no getUserMedia (avoids mic conflict on mobile)
281
288
  startSpeech();
289
+ if (!pointerIsDown.current) {
290
+ abortSpeech();
291
+ return;
292
+ }
282
293
  }
283
294
 
284
295
  isHolding.current = true;
@@ -288,7 +299,7 @@ export default function InputBar({ onSend, onStop, streaming, whisperEnabled, on
288
299
  console.error('[InputBar] recording setup failed:', err);
289
300
  }
290
301
  }, 200);
291
- }, [voiceEnabled, whisperEnabled, startSpeech]);
302
+ }, [voiceEnabled, whisperEnabled, startSpeech, abortSpeech]);
292
303
 
293
304
  const handleMicMove = useCallback((e: RPointerEvent) => {
294
305
  if (!isHolding.current) return;
@@ -306,6 +317,7 @@ export default function InputBar({ onSend, onStop, streaming, whisperEnabled, on
306
317
  }, [stopRecording]);
307
318
 
308
319
  const handleMicUp = useCallback(() => {
320
+ pointerIsDown.current = false;
309
321
  if (holdTimerRef.current) { clearTimeout(holdTimerRef.current); holdTimerRef.current = null; }
310
322
  if (!isHolding.current) {
311
323
  return;
@@ -314,6 +326,7 @@ export default function InputBar({ onSend, onStop, streaming, whisperEnabled, on
314
326
  }, [stopRecording]);
315
327
 
316
328
  const handleMicCancel = useCallback(() => {
329
+ pointerIsDown.current = false;
317
330
  if (holdTimerRef.current) { clearTimeout(holdTimerRef.current); holdTimerRef.current = null; }
318
331
  if (isHolding.current) stopRecording(true);
319
332
  }, [stopRecording]);