tunacode-cli 0.0.76__py3-none-any.whl → 0.0.76.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tunacode-cli might be problematic. Click here for more details.

@@ -48,6 +48,7 @@ from .agent_components import (
48
48
  parse_json_tool_calls,
49
49
  patch_tool_messages,
50
50
  )
51
+ from .agent_components.streaming import stream_model_request_node
51
52
 
52
53
  # Import streaming types with fallback for older versions
53
54
  try:
@@ -174,43 +175,14 @@ async def process_request(
174
175
  # Handle token-level streaming for model request nodes
175
176
  Agent, _ = get_agent_tool()
176
177
  if streaming_callback and STREAMING_AVAILABLE and Agent.is_model_request_node(node):
177
- # Gracefully handle streaming errors from LLM provider
178
- for attempt in range(2): # simple retry once, then degrade gracefully
179
- try:
180
- async with node.stream(agent_run.ctx) as request_stream:
181
- async for event in request_stream:
182
- if isinstance(event, PartDeltaEvent) and isinstance(
183
- event.delta, TextPartDelta
184
- ):
185
- # Stream individual token deltas
186
- if event.delta.content_delta and streaming_callback:
187
- await streaming_callback(event.delta.content_delta)
188
- break # successful streaming; exit retry loop
189
- except Exception as stream_err:
190
- # Log with context and optionally notify UI, then retry once
191
- logger.warning(
192
- "Streaming error (attempt %s/2) req=%s iter=%s: %s",
193
- attempt + 1,
194
- request_id,
195
- i,
196
- stream_err,
197
- exc_info=True,
198
- )
199
- if getattr(state_manager.session, "show_thoughts", False):
200
- from tunacode.ui import console as ui
201
-
202
- await ui.warning(
203
- "⚠️ Streaming failed; retrying once then falling back"
204
- )
205
- # On second failure, degrade gracefully (no streaming)
206
- if attempt == 1:
207
- if getattr(state_manager.session, "show_thoughts", False):
208
- from tunacode.ui import console as ui
209
-
210
- await ui.muted(
211
- "Switching to non-streaming processing for this node"
212
- )
213
- break
178
+ await stream_model_request_node(
179
+ node,
180
+ agent_run.ctx,
181
+ state_manager,
182
+ streaming_callback,
183
+ request_id,
184
+ i,
185
+ )
214
186
 
215
187
  empty_response, empty_reason = await _process_node(
216
188
  node,
@@ -242,7 +214,7 @@ async def process_request(
242
214
  from tunacode.ui import console as ui
243
215
 
244
216
  await ui.warning(
245
- "\n⚠️ EMPTY RESPONSE FAILURE - AGGRESSIVE RETRY TRIGGERED"
217
+ "\nEMPTY RESPONSE FAILURE - AGGRESSIVE RETRY TRIGGERED"
246
218
  )
247
219
  await ui.muted(f" Reason: {empty_reason}")
248
220
  await ui.muted(
@@ -297,7 +269,7 @@ NO MORE DESCRIPTIONS. Take ACTION or mark COMPLETE."""
297
269
  from tunacode.ui import console as ui
298
270
 
299
271
  await ui.warning(
300
- f"⚠️ NO PROGRESS: {unproductive_iterations} iterations without tool usage"
272
+ f"NO PROGRESS: {unproductive_iterations} iterations without tool usage"
301
273
  )
302
274
 
303
275
  unproductive_iterations = 0
@@ -345,7 +317,7 @@ Otherwise, please provide specific guidance on what to do next."""
345
317
  from tunacode.ui import console as ui
346
318
 
347
319
  await ui.muted(
348
- "\n🤔 SEEKING CLARIFICATION: Asking user for guidance on task progress"
320
+ "\nSEEKING CLARIFICATION: Asking user for guidance on task progress"
349
321
  )
350
322
 
351
323
  response_state.awaiting_user_guidance = True
@@ -381,7 +353,7 @@ Please let me know how to proceed."""
381
353
  from tunacode.ui import console as ui
382
354
 
383
355
  await ui.muted(
384
- f"\n📊 ITERATION LIMIT: Asking user for guidance at {max_iterations} iterations"
356
+ f"\nITERATION LIMIT: Asking user for guidance at {max_iterations} iterations"
385
357
  )
386
358
 
387
359
  max_iterations += 5
@@ -408,7 +380,7 @@ Please let me know how to proceed."""
408
380
 
409
381
  await ui.muted("\n" + "=" * 60)
410
382
  await ui.muted(
411
- f"🚀 FINAL BATCH: Executing {len(buffered_tasks)} buffered read-only tools"
383
+ f"FINAL BATCH: Executing {len(buffered_tasks)} buffered read-only tools"
412
384
  )
413
385
  await ui.muted("=" * 60)
414
386
 
@@ -435,7 +407,7 @@ Please let me know how to proceed."""
435
407
  speedup = sequential_estimate / elapsed_time if elapsed_time > 0 else 1.0
436
408
 
437
409
  await ui.muted(
438
- f"Final batch completed in {elapsed_time:.0f}ms "
410
+ f"Final batch completed in {elapsed_time:.0f}ms "
439
411
  f"(~{speedup:.1f}x faster than sequential)\n"
440
412
  )
441
413
 
@@ -501,6 +473,3 @@ Please let me know how to proceed."""
501
473
  )
502
474
  # Re-raise to be handled by caller
503
475
  raise
504
-
505
-
506
- 1
@@ -6,6 +6,7 @@ and keep responsibilities focused.
6
6
 
7
7
  import json
8
8
  from pathlib import Path
9
+ from typing import Dict, Optional
9
10
 
10
11
  from tunacode.constants import UI_COLORS
11
12
  from tunacode.exceptions import ConfigurationError
@@ -20,7 +21,7 @@ class ConfigWizard:
20
21
  self.state_manager = state_manager
21
22
  self.model_registry = model_registry
22
23
  self.config_file = config_file
23
- self._wizard_selected_provider = None
24
+ self._wizard_selected_provider: Optional[Dict[str, str]] = None
24
25
 
25
26
  async def run_onboarding(self) -> None:
26
27
  """Run enhanced wizard-style onboarding process for new users."""