bloby-bot 0.47.7 → 0.47.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -141,14 +141,17 @@ export function createPiSession(init: PiSessionInit): PiSession {
141
141
  }
142
142
  }
143
143
 
144
- async function runOneTurn(userMsg: PiMessage): Promise<void> {
144
+ async function runOneTurn(input: AsyncQueue<PiMessage>, firstUserMsg: PiMessage): Promise<void> {
145
145
  if (init.abortController.signal.aborted) return;
146
- messages.push(userMsg);
146
+ // Stack any messages that arrived in the same millisecond into one turn.
147
+ messages.push(firstUserMsg);
148
+ for (const extra of input.drainPending()) messages.push(extra);
147
149
  init.onEvent({ type: 'turn_started' });
148
150
 
149
151
  let accumulatedText = '';
150
152
  const usedTools = new Set<string>();
151
153
  let turnErrored = false;
154
+ let pendingInterleave = false;
152
155
 
153
156
  for (let round = 0; round < MAX_TOOL_ROUNDS; round++) {
154
157
  if (init.abortController.signal.aborted) break;
@@ -177,7 +180,6 @@ export function createPiSession(init: PiSessionInit): PiSession {
177
180
  }
178
181
 
179
182
  if (errored) { turnErrored = true; break; }
180
- if (toolUses.length === 0) break; // model finished — exit loop
181
183
 
182
184
  // Run every tool the model asked for this round, then feed the results
183
185
  // back as a single user message Gemini accepts as a batch.
@@ -198,7 +200,24 @@ export function createPiSession(init: PiSessionInit): PiSession {
198
200
  if (toolResultBlocks.length > 0) {
199
201
  messages.push({ role: 'user', content: toolResultBlocks });
200
202
  }
201
- // Loop continues — re-stream with the new tool results in context.
203
+
204
+ // Fold any user messages that arrived during this round into history so
205
+ // the next stream pass sees them. This is what makes the conversation
206
+ // feel alive: while the agent is grinding on a long task, a question
207
+ // typed mid-stream lands in the very next request as a user-role part,
208
+ // and the model can answer it inline before continuing.
209
+ const interleaved = input.drainPending();
210
+ if (interleaved.length > 0) {
211
+ log.info(`[pi/session] interleaved ${interleaved.length} mid-turn user message(s) into history`);
212
+ for (const m of interleaved) messages.push(m);
213
+ pendingInterleave = true;
214
+ } else {
215
+ pendingInterleave = false;
216
+ }
217
+
218
+ // Exit when the model has nothing more to do AND no new user messages
219
+ // arrived mid-round. Either condition by itself keeps the loop alive.
220
+ if (toolUses.length === 0 && !pendingInterleave) break;
202
221
  }
203
222
 
204
223
  if (!turnErrored) {
@@ -215,7 +234,7 @@ export function createPiSession(init: PiSessionInit): PiSession {
215
234
  for await (const userMsg of input) {
216
235
  if (init.abortController.signal.aborted) break;
217
236
  try {
218
- await runOneTurn(userMsg);
237
+ await runOneTurn(input, userMsg);
219
238
  } catch (err: any) {
220
239
  log.warn(`[pi/session] Turn failed: ${err?.message || err}`);
221
240
  init.onEvent({ type: 'error', error: err?.message || String(err) });
@@ -0,0 +1,20 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 800 800">
3
+ <path fill="#fff" fill-rule="evenodd" d="
4
+ M165.29 165.29
5
+ H517.36
6
+ V400
7
+ H400
8
+ V517.36
9
+ H282.65
10
+ V634.72
11
+ H165.29
12
+ Z
13
+ M282.65 282.65
14
+ V400
15
+ H400
16
+ V282.65
17
+ Z
18
+ "/>
19
+ <path fill="#fff" d="M517.36 400 H634.72 V634.72 H517.36 Z"/>
20
+ </svg>