ruby-pi 0.1.3 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,7 +37,7 @@ module RubyPi
37
37
  # @param options [Hash] additional options passed to BaseProvider
38
38
  def initialize(model: nil, api_key: nil, max_tokens: DEFAULT_MAX_TOKENS, **options)
39
39
  super(**options)
40
- config = RubyPi.configuration
40
+ config = @config
41
41
  @model = model || config.default_anthropic_model
42
42
  @api_key = api_key || config.anthropic_api_key
43
43
  @max_tokens = max_tokens
@@ -172,9 +172,23 @@ module RubyPi
172
172
  tool_use_id = msg[:tool_call_id] || msg["tool_call_id"]
173
173
  content = msg[:content] || msg["content"]
174
174
 
175
+ # Fail fast with a descriptive error instead of sending "unknown" as
176
+ # the tool_use_id. Anthropic requires tool_use_id to match a preceding
177
+ # tool_use block; sending "unknown" causes an opaque HTTP 400 with no
178
+ # useful error message. Raising here gives the developer a clear signal
179
+ # about what went wrong.
180
+ if tool_use_id.nil? || tool_use_id.to_s.strip.empty?
181
+ raise RubyPi::ProviderError.new(
182
+ "Missing tool_call_id in tool result message. Anthropic requires " \
183
+ "tool_use_id to match a preceding tool_use block. Ensure every tool " \
184
+ "result message includes a valid :tool_call_id.",
185
+ provider: :anthropic
186
+ )
187
+ end
188
+
175
189
  block = {
176
190
  type: "tool_result",
177
- tool_use_id: tool_use_id || "unknown"
191
+ tool_use_id: tool_use_id
178
192
  }
179
193
 
180
194
  # Content can be a simple string or a structured content array.
@@ -225,10 +239,12 @@ module RubyPi
225
239
  tc_name = tc[:name] || tc["name"]
226
240
  tc_args = tc[:arguments] || tc["arguments"] || {}
227
241
 
228
- # Ensure arguments is a Hash; parse JSON string if needed
242
+ # Ensure arguments is a Hash; parse JSON string if needed.
243
+ # Issue #12: Guard against empty strings — they are truthy but
244
+ # cause JSON::ParserError when parsed.
229
245
  tc_input = if tc_args.is_a?(Hash)
230
246
  tc_args
231
- elsif tc_args.is_a?(String) && !tc_args.empty?
247
+ elsif tc_args.is_a?(String) && !tc_args.strip.empty?
232
248
  begin
233
249
  JSON.parse(tc_args)
234
250
  rescue JSON::ParserError
@@ -238,17 +254,32 @@ module RubyPi
238
254
  {}
239
255
  end
240
256
 
257
+ # Fail fast if tool call ID is missing rather than sending "unknown"
258
+ # which causes an opaque Anthropic API 400 error.
259
+ if tc_id.nil? || tc_id.to_s.strip.empty?
260
+ raise RubyPi::ProviderError.new(
261
+ "Missing tool call ID in assistant message tool_calls. Anthropic " \
262
+ "requires each tool_use block to have a unique ID that subsequent " \
263
+ "tool_result blocks reference. Ensure every tool call includes an :id.",
264
+ provider: :anthropic
265
+ )
266
+ end
267
+
241
268
  content_blocks << {
242
269
  type: "tool_use",
243
- id: tc_id || "unknown",
270
+ id: tc_id,
244
271
  name: tc_name || "unknown",
245
272
  input: tc_input
246
273
  }
247
274
  end
248
275
  end
249
276
 
250
- # If no content blocks were generated (edge case), add an empty text
251
- # block to satisfy Anthropic's requirement for non-empty content.
277
+ # Anthropic requires every assistant message to have at least one
278
+ # content block. When an assistant turn contains only tool_use calls
279
+ # with no accompanying text (common in multi-tool responses), the
280
+ # content_blocks array may be empty after processing. Adding an empty
281
+ # text block satisfies the API's non-empty content constraint without
282
+ # altering the semantic content of the message.
252
283
  content_blocks << { type: "text", text: "" } if content_blocks.empty?
253
284
 
254
285
  { role: "assistant", content: content_blocks }
@@ -299,9 +330,11 @@ module RubyPi
299
330
  headers: default_headers
300
331
  )
301
332
 
302
- response = conn.post("/v1/messages") do |req|
303
- req.headers["Content-Type"] = "application/json"
304
- req.body = JSON.generate(body)
333
+ response = with_transport_errors do
334
+ conn.post("/v1/messages") do |req|
335
+ req.headers["Content-Type"] = "application/json"
336
+ req.body = JSON.generate(body)
337
+ end
305
338
  end
306
339
 
307
340
  handle_error_response(response) unless response.success?
@@ -310,6 +343,11 @@ module RubyPi
310
343
 
311
344
  # Executes a streaming request to the Anthropic API, yielding events.
312
345
  #
346
+ # Issue #22: Wraps JSON.parse(current_tool_json) at content_block_stop
347
+ # in a rescue block. If the stream was truncated or the accumulated JSON
348
+ # is malformed, raises a typed ProviderError instead of letting
349
+ # JSON::ParserError propagate and abort the entire stream processing.
350
+ #
313
351
  # @param body [Hash] the request body
314
352
  # @yield [event] StreamEvent objects
315
353
  # @return [RubyPi::LLM::Response] final aggregated response
@@ -326,71 +364,107 @@ module RubyPi
326
364
  usage_data = {}
327
365
  finish_reason = nil
328
366
 
329
- response = conn.post("/v1/messages") do |req|
330
- req.headers["Content-Type"] = "application/json"
331
- req.body = JSON.generate(body)
332
- end
333
-
334
- handle_error_response(response) unless response.success?
335
-
336
- # Parse SSE events from the response body
337
- parse_sse_events(response.body) do |data|
338
- event_type = data["type"]
339
-
340
- case event_type
341
- when "content_block_start"
342
- content_block = data["content_block"] || {}
343
- if content_block["type"] == "tool_use"
344
- current_tool_call = {
345
- id: content_block["id"],
346
- name: content_block["name"]
347
- }
348
- current_tool_json = +""
349
- end
350
-
351
- when "content_block_delta"
352
- delta = data["delta"] || {}
353
- if delta["type"] == "text_delta"
354
- text = delta["text"] || ""
355
- accumulated_text << text
356
- block.call(StreamEvent.new(type: :text_delta, data: text))
357
- elsif delta["type"] == "input_json_delta"
358
- json_chunk = delta["partial_json"] || ""
359
- current_tool_json << json_chunk
360
- block.call(StreamEvent.new(type: :tool_call_delta, data: {
361
- id: current_tool_call&.dig(:id),
362
- partial_json: json_chunk
363
- }))
367
+ # Buffer for incomplete SSE lines across on_data chunks. Faraday's
368
+ # on_data callback delivers raw bytes as they arrive from the network,
369
+ # which may split SSE events mid-line. We accumulate a line buffer and
370
+ # process complete lines incrementally so that deltas reach the caller
371
+ # as soon as each SSE event is fully received — not after the entire
372
+ # response has been buffered.
373
+ sse_buffer = +""
374
+ response_status = nil
375
+
376
+ # Accumulate error response body separately so ApiError gets the
377
+ # full body even though on_data consumed the chunks.
378
+ error_body = +""
379
+
380
+ response = with_transport_errors do
381
+ conn.post("/v1/messages") do |req|
382
+ req.headers["Content-Type"] = "application/json"
383
+ req.body = JSON.generate(body)
384
+
385
+ # Use Faraday's on_data callback for real incremental streaming.
386
+ # Without this, Faraday buffers the entire response body before
387
+ # returning, which means no deltas reach the caller until the model
388
+ # finishes generating (fake streaming).
389
+ req.options.on_data = proc do |chunk, overall_received_bytes, env|
390
+ response_status ||= env&.status
391
+
392
+ # If the HTTP status indicates an error, accumulate the body for
393
+ # the error handler instead of parsing it as SSE events. Faraday
394
+ # calls on_data for error responses too, which would otherwise
395
+ # consume the body and leave response.body empty.
396
+ if response_status && response_status >= 400
397
+ error_body << chunk
398
+ next
364
399
  end
365
400
 
366
- when "content_block_stop"
367
- if current_tool_call
368
- arguments = current_tool_json.empty? ? {} : JSON.parse(current_tool_json)
369
- accumulated_tool_calls << ToolCall.new(
370
- id: current_tool_call[:id],
371
- name: current_tool_call[:name],
372
- arguments: arguments
401
+ sse_buffer << chunk
402
+ # Process all complete lines in the buffer
403
+ while (line_end = sse_buffer.index("\n"))
404
+ line = sse_buffer.slice!(0, line_end + 1).strip
405
+ next if line.empty?
406
+ next unless line.start_with?("data: ")
407
+
408
+ data_str = line.sub(/\Adata: /, "")
409
+ next if data_str == "[DONE]"
410
+
411
+ begin
412
+ data = JSON.parse(data_str)
413
+ rescue JSON::ParserError
414
+ next
415
+ end
416
+
417
+ # --- process each SSE event exactly as before ---
418
+ # Process the SSE event and update mutable locals from the
419
+ # returned hash. This keeps all streaming state method-local,
420
+ # avoiding thread-unsafe instance variables.
421
+ stream_state = process_anthropic_stream_event(
422
+ data, accumulated_text, accumulated_tool_calls,
423
+ current_tool_call, current_tool_json, usage_data, finish_reason, block
373
424
  )
374
- current_tool_call = nil
375
- current_tool_json = +""
376
- end
377
-
378
- when "message_delta"
379
- delta = data["delta"] || {}
380
- finish_reason = delta["stop_reason"]
381
- if data.key?("usage")
382
- usage_info = data["usage"]
383
- usage_data[:completion_tokens] = usage_info["output_tokens"]
425
+ current_tool_call = stream_state[:current_tool_call]
426
+ current_tool_json = stream_state[:current_tool_json]
427
+ finish_reason = stream_state[:finish_reason]
384
428
  end
429
+ end
430
+ end # conn.post
431
+ end # with_transport_errors
432
+
433
+ # Check for HTTP errors. When on_data was active, the response body
434
+ # was consumed by the callback, so we pass the accumulated error_body
435
+ # to handle_error_response for proper error messaging.
436
+ unless response.success?
437
+ # Reconstruct the response body from what on_data accumulated
438
+ error_response = response
439
+ error_body_str = error_body.empty? ? response.body : error_body
440
+ handle_error_response(error_response, override_body: error_body_str)
441
+ end
385
442
 
386
- when "message_start"
387
- if data.dig("message", "usage")
388
- usage_info = data["message"]["usage"]
389
- usage_data[:prompt_tokens] = usage_info["input_tokens"]
390
- end
443
+ # Process any remaining data in the buffer after the connection closes
444
+ sse_buffer.each_line do |line|
445
+ line = line.strip
446
+ next if line.empty?
447
+ next unless line.start_with?("data: ")
448
+ data_str = line.sub(/\Adata: /, "")
449
+ next if data_str == "[DONE]"
450
+ begin
451
+ data = JSON.parse(data_str)
452
+ rescue JSON::ParserError
453
+ next
391
454
  end
455
+ stream_state = process_anthropic_stream_event(
456
+ data, accumulated_text, accumulated_tool_calls,
457
+ current_tool_call, current_tool_json, usage_data, finish_reason, block
458
+ )
459
+ current_tool_call = stream_state[:current_tool_call]
460
+ current_tool_json = stream_state[:current_tool_json]
461
+ finish_reason = stream_state[:finish_reason]
392
462
  end
393
463
 
464
+ # (Event processing is now handled incrementally by the on_data callback
465
+ # above, which calls process_anthropic_stream_event for each complete
466
+ # SSE event as it arrives from the network.)
467
+
394
468
  # Signal completion
395
469
  block.call(StreamEvent.new(type: :done))
396
470
 
@@ -407,6 +481,108 @@ module RubyPi
407
481
  )
408
482
  end
409
483
 
484
+
485
+ # Processes a single Anthropic SSE event during streaming. Called by the
486
+ # on_data callback for each complete SSE event. Updates the mutable
487
+ # accumulator variables and yields deltas to the caller's block.
488
+ #
489
+ # Returns a hash with updated :current_tool_call, :current_tool_json,
490
+ # and :finish_reason values. The caller updates its own local variables
491
+ # from this hash, keeping all streaming state method-scoped and
492
+ # thread-safe.
493
+ #
494
+ # @param data [Hash] parsed SSE event payload
495
+ # @param accumulated_text [String] mutable text accumulator
496
+ # @param accumulated_tool_calls [Array] mutable tool call accumulator
497
+ # @param current_tool_call [Hash, nil] current in-progress tool call
498
+ # @param current_tool_json [String] current tool call JSON accumulator
499
+ # @param usage_data [Hash] mutable usage data accumulator
500
+ # @param finish_reason [String, nil] current finish reason
501
+ # @param block [Proc] the caller's streaming block
502
+ # @return [Hash] updated streaming state with :current_tool_call, :current_tool_json, :finish_reason
503
+ def process_anthropic_stream_event(data, accumulated_text, accumulated_tool_calls,
504
+ current_tool_call, current_tool_json,
505
+ usage_data, finish_reason, block)
506
+ event_type = data["type"]
507
+
508
+ case event_type
509
+ when "content_block_start"
510
+ content_block = data["content_block"] || {}
511
+ if content_block["type"] == "tool_use"
512
+ current_tool_call = {
513
+ id: content_block["id"],
514
+ name: content_block["name"]
515
+ }
516
+ current_tool_json = +""
517
+ end
518
+
519
+ when "content_block_delta"
520
+ delta = data["delta"] || {}
521
+ if delta["type"] == "text_delta"
522
+ text = delta["text"] || ""
523
+ accumulated_text << text
524
+ block.call(StreamEvent.new(type: :text_delta, data: text))
525
+ elsif delta["type"] == "input_json_delta"
526
+ json_chunk = delta["partial_json"] || ""
527
+ current_tool_json << json_chunk
528
+ block.call(StreamEvent.new(type: :tool_call_delta, data: {
529
+ id: current_tool_call&.dig(:id),
530
+ partial_json: json_chunk
531
+ }))
532
+ end
533
+
534
+ when "content_block_stop"
535
+ if current_tool_call
536
+ # Issue #22: Guard JSON.parse against truncated/malformed JSON.
537
+ # If the stream was interrupted mid-tool-call, the accumulated
538
+ # JSON may be incomplete. Rescue JSON::ParserError and raise a
539
+ # typed ProviderError with context about what failed.
540
+ arguments = if current_tool_json.strip.empty?
541
+ {}
542
+ else
543
+ begin
544
+ JSON.parse(current_tool_json)
545
+ rescue JSON::ParserError => e
546
+ raise RubyPi::ProviderError.new(
547
+ "Failed to parse streaming tool call arguments for " \
548
+ "'#{current_tool_call[:name]}': #{e.message} " \
549
+ "(accumulated JSON: #{current_tool_json.inspect})",
550
+ provider: :anthropic
551
+ )
552
+ end
553
+ end
554
+ accumulated_tool_calls << ToolCall.new(
555
+ id: current_tool_call[:id],
556
+ name: current_tool_call[:name],
557
+ arguments: arguments
558
+ )
559
+ current_tool_call = nil
560
+ current_tool_json = +""
561
+ end
562
+
563
+ when "message_delta"
564
+ delta = data["delta"] || {}
565
+ finish_reason = delta["stop_reason"]
566
+ if data.key?("usage")
567
+ usage_info = data["usage"]
568
+ usage_data[:completion_tokens] = usage_info["output_tokens"]
569
+ end
570
+
571
+ when "message_start"
572
+ if data.dig("message", "usage")
573
+ usage_info = data["message"]["usage"]
574
+ usage_data[:prompt_tokens] = usage_info["input_tokens"]
575
+ end
576
+ end
577
+
578
+ # Return mutable state as a hash so the caller can update its locals.
579
+ # This avoids thread-unsafe instance variables that would leak state
580
+ # across concurrent requests on the same provider instance.
581
+ { current_tool_call: current_tool_call,
582
+ current_tool_json: current_tool_json,
583
+ finish_reason: finish_reason }
584
+ end
585
+
410
586
  # Returns the default HTTP headers required by the Anthropic API.
411
587
  #
412
588
  # @return [Hash] headers hash
@@ -41,14 +41,18 @@ module RubyPi
41
41
 
42
42
  # Initializes the base provider with retry configuration.
43
43
  #
44
- # @param max_retries [Integer, nil] override max retries (defaults to global config)
45
- # @param retry_base_delay [Float, nil] override base delay (defaults to global config)
46
- # @param retry_max_delay [Float, nil] override max delay (defaults to global config)
47
- def initialize(max_retries: nil, retry_base_delay: nil, retry_max_delay: nil)
48
- config = RubyPi.configuration
49
- @max_retries = max_retries || config.max_retries
50
- @retry_base_delay = retry_base_delay || config.retry_base_delay
51
- @retry_max_delay = retry_max_delay || config.retry_max_delay
44
+ # @param config [RubyPi::Configuration, nil] optional per-agent config override.
45
+ # When provided, the provider uses this config instead of the global
46
+ # RubyPi.configuration singleton. This enables per-agent API keys,
47
+ # timeouts, and retry settings.
48
+ # @param max_retries [Integer, nil] override max retries (defaults to config)
49
+ # @param retry_base_delay [Float, nil] override base delay (defaults to config)
50
+ # @param retry_max_delay [Float, nil] override max delay (defaults to config)
51
+ def initialize(config: nil, max_retries: nil, retry_base_delay: nil, retry_max_delay: nil)
52
+ @config = config || RubyPi.configuration
53
+ @max_retries = max_retries || @config.max_retries
54
+ @retry_base_delay = retry_base_delay || @config.retry_base_delay
55
+ @retry_max_delay = retry_max_delay || @config.retry_max_delay
52
56
  end
53
57
 
54
58
  # Sends a completion request to the LLM provider with automatic retry
@@ -74,8 +78,13 @@ module RubyPi
74
78
  rescue RubyPi::AuthenticationError
75
79
  # Authentication errors are not retryable — raise immediately
76
80
  raise
77
- rescue RubyPi::RateLimitError, RubyPi::ApiError, RubyPi::TimeoutError => e
78
- if attempt < @max_retries
81
+ rescue RubyPi::RateLimitError, RubyPi::ApiError, RubyPi::TimeoutError, RubyPi::ProviderError => e
82
+ # Retry up to max_retries times AFTER the initial attempt.
83
+ # With max_retries: 3, attempt goes 1 (initial), 2, 3, 4 — the condition
84
+ # `attempt <= @max_retries` allows retries on attempts 1..3, so we get
85
+ # 3 retries + 1 initial = 4 total attempts. Previously used `< @max_retries`
86
+ # which was off-by-one (only 2 retries with max_retries: 3).
87
+ if attempt <= @max_retries
79
88
  delay = calculate_backoff(attempt)
80
89
  log_retry(attempt, delay, e)
81
90
  sleep(delay)
@@ -90,18 +99,18 @@ module RubyPi
90
99
  # Subclasses MUST override this method.
91
100
  #
92
101
  # @return [String] the model identifier
93
- # @raise [RubyPi::NotImplementedError] if not overridden
102
+ # @raise [RubyPi::AbstractMethodError] if not overridden
94
103
  def model_name
95
- raise RubyPi::NotImplementedError, :model_name
104
+ raise RubyPi::AbstractMethodError, :model_name
96
105
  end
97
106
 
98
107
  # Returns the provider identifier.
99
108
  # Subclasses MUST override this method.
100
109
  #
101
110
  # @return [Symbol] the provider identifier (e.g., :gemini, :anthropic, :openai)
102
- # @raise [RubyPi::NotImplementedError] if not overridden
111
+ # @raise [RubyPi::AbstractMethodError] if not overridden
103
112
  def provider_name
104
- raise RubyPi::NotImplementedError, :provider_name
113
+ raise RubyPi::AbstractMethodError, :provider_name
105
114
  end
106
115
 
107
116
  private
@@ -115,7 +124,7 @@ module RubyPi
115
124
  # @yield [event] optional block for streaming events
116
125
  # @return [RubyPi::LLM::Response]
117
126
  def perform_complete(messages:, tools:, stream:, &block)
118
- raise RubyPi::NotImplementedError, :perform_complete
127
+ raise RubyPi::AbstractMethodError, :perform_complete
119
128
  end
120
129
 
121
130
  # Calculates the backoff delay for a given retry attempt using
@@ -136,7 +145,7 @@ module RubyPi
136
145
  # @param error [Exception] the error that triggered the retry
137
146
  # @return [void]
138
147
  def log_retry(attempt, delay, error)
139
- logger = RubyPi.configuration.logger
148
+ logger = @config.logger
140
149
  return unless logger
141
150
 
142
151
  logger.warn(
@@ -145,13 +154,21 @@ module RubyPi
145
154
  )
146
155
  end
147
156
 
148
- # Builds a Faraday connection with retry middleware and standard settings.
157
+ # Builds a Faraday connection with standard settings.
158
+ #
159
+ # Issue #20: Removed incorrect retry-middleware claim from the
160
+ # docstring. The faraday-retry gem was listed as a dependency but never
161
+ # wired into the connection builder. Since retry logic is already
162
+ # implemented in BaseProvider#complete with exponential backoff (see
163
+ # the begin/rescue/retry block), the Faraday-level retry middleware is
164
+ # not needed and would cause confusing double-retry behavior. The
165
+ # faraday-retry dependency has been removed from the gemspec.
149
166
  #
150
167
  # @param base_url [String] the base URL for the API
151
168
  # @param headers [Hash] default headers for all requests
152
169
  # @return [Faraday::Connection]
153
170
  def build_connection(base_url:, headers: {})
154
- config = RubyPi.configuration
171
+ config = @config
155
172
 
156
173
  Faraday.new(url: base_url) do |conn|
157
174
  conn.headers.update(headers)
@@ -161,60 +178,80 @@ module RubyPi
161
178
  end
162
179
  end
163
180
 
181
+ # Wraps an HTTP block, translating Faraday transport-level exceptions
182
+ # (DNS failures, connection resets, TLS handshakes, read/write timeouts)
183
+ # into the RubyPi typed-error hierarchy so callers and the retry loop
184
+ # can rescue them uniformly.
185
+ #
186
+ # Without this wrapper, a `Faraday::TimeoutError` or
187
+ # `Faraday::ConnectionFailed` would propagate out of the provider as
188
+ # the raw Faraday class. That breaks two contracts:
189
+ # 1. The documented retry policy (BaseProvider#complete) only rescues
190
+ # RubyPi errors, so transport failures would not be retried —
191
+ # exactly the case retries exist for.
192
+ # 2. Callers `rescue RubyPi::TimeoutError` per the documented error
193
+ # hierarchy and would not catch real network timeouts.
194
+ #
195
+ # @yield the HTTP call to wrap
196
+ # @return [Object] whatever the block returns
197
+ # @raise [RubyPi::TimeoutError] on Faraday::TimeoutError
198
+ # @raise [RubyPi::ApiError] on connection failures, SSL errors, or
199
+ # any other Faraday::Error not otherwise classified
200
+ def with_transport_errors
201
+ yield
202
+ rescue Faraday::TimeoutError => e
203
+ raise RubyPi::TimeoutError, "#{provider_name} request timed out: #{e.message}"
204
+ rescue Faraday::ConnectionFailed, Faraday::SSLError => e
205
+ raise RubyPi::ApiError.new(
206
+ "#{provider_name} transport error: #{e.class}: #{e.message}",
207
+ status_code: nil,
208
+ response_body: nil
209
+ )
210
+ rescue Faraday::Error => e
211
+ # Catch-all for any other Faraday-level failure (parsing, adapter
212
+ # issues, etc.) so transport problems never leak provider internals.
213
+ raise RubyPi::ApiError.new(
214
+ "#{provider_name} HTTP client error: #{e.class}: #{e.message}",
215
+ status_code: nil,
216
+ response_body: nil
217
+ )
218
+ end
219
+
164
220
  # Handles HTTP error responses by raising the appropriate RubyPi error.
221
+ # When streaming with on_data, the response body is consumed by the
222
+ # callback and response.body may be empty. Pass override_body with the
223
+ # accumulated error chunks so the raised error contains the full body.
165
224
  #
166
225
  # @param response [Faraday::Response] the HTTP response
226
+ # @param override_body [String, nil] optional body to use instead of response.body
227
+ # (used when on_data consumed the body during streaming)
167
228
  # @raise [RubyPi::AuthenticationError] on 401 or 403
168
229
  # @raise [RubyPi::RateLimitError] on 429
169
230
  # @raise [RubyPi::ApiError] on other error status codes
170
- def handle_error_response(response)
231
+ def handle_error_response(response, override_body: nil)
232
+ body = override_body || response.body
171
233
  case response.status
172
234
  when 401, 403
173
235
  raise RubyPi::AuthenticationError.new(
174
236
  "#{provider_name} authentication failed (HTTP #{response.status})",
175
- response_body: response.body
237
+ response_body: body
176
238
  )
177
239
  when 429
178
240
  retry_after = response.headers["retry-after"]&.to_f
179
241
  raise RubyPi::RateLimitError.new(
180
242
  "#{provider_name} rate limit exceeded (HTTP 429)",
181
243
  retry_after: retry_after,
182
- response_body: response.body
244
+ response_body: body
183
245
  )
184
246
  else
185
247
  raise RubyPi::ApiError.new(
186
248
  "#{provider_name} API error (HTTP #{response.status})",
187
249
  status_code: response.status,
188
- response_body: response.body
250
+ response_body: body
189
251
  )
190
252
  end
191
253
  end
192
254
 
193
- # Processes a streaming response body line by line, parsing SSE events.
194
- # Yields parsed data hashes to the provided block.
195
- #
196
- # @param response_body [String] the raw SSE response body
197
- # @yield [data] parsed SSE event data
198
- # @yieldparam data [Hash] a parsed JSON event payload
199
- # @return [void]
200
- def parse_sse_events(response_body, &block)
201
- response_body.each_line do |line|
202
- line = line.strip
203
- next if line.empty?
204
- next unless line.start_with?("data: ")
205
-
206
- data_str = line.sub(/\Adata: /, "")
207
- next if data_str == "[DONE]"
208
-
209
- begin
210
- data = JSON.parse(data_str)
211
- block.call(data)
212
- rescue JSON::ParserError
213
- # Skip malformed SSE data lines
214
- next
215
- end
216
- end
217
- end
218
255
  end
219
256
  end
220
257
  end