durable-llm 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +7 -0
  3. data/CHANGELOG.md +5 -0
  4. data/CONFIGURE.md +132 -0
  5. data/Gemfile +7 -9
  6. data/Gemfile.lock +3 -3
  7. data/README.md +1 -0
  8. data/Rakefile +6 -6
  9. data/devenv.lock +103 -0
  10. data/devenv.nix +9 -0
  11. data/devenv.yaml +15 -0
  12. data/durable-llm.gemspec +44 -0
  13. data/examples/openai_quick_complete.rb +3 -1
  14. data/lib/durable/llm/cli.rb +247 -60
  15. data/lib/durable/llm/client.rb +92 -11
  16. data/lib/durable/llm/configuration.rb +174 -23
  17. data/lib/durable/llm/errors.rb +185 -0
  18. data/lib/durable/llm/providers/anthropic.rb +246 -36
  19. data/lib/durable/llm/providers/azure_openai.rb +347 -0
  20. data/lib/durable/llm/providers/base.rb +106 -9
  21. data/lib/durable/llm/providers/cohere.rb +227 -0
  22. data/lib/durable/llm/providers/deepseek.rb +233 -0
  23. data/lib/durable/llm/providers/fireworks.rb +278 -0
  24. data/lib/durable/llm/providers/google.rb +301 -0
  25. data/lib/durable/llm/providers/groq.rb +108 -29
  26. data/lib/durable/llm/providers/huggingface.rb +122 -18
  27. data/lib/durable/llm/providers/mistral.rb +431 -0
  28. data/lib/durable/llm/providers/openai.rb +162 -25
  29. data/lib/durable/llm/providers/opencode.rb +253 -0
  30. data/lib/durable/llm/providers/openrouter.rb +256 -0
  31. data/lib/durable/llm/providers/perplexity.rb +273 -0
  32. data/lib/durable/llm/providers/together.rb +346 -0
  33. data/lib/durable/llm/providers/xai.rb +355 -0
  34. data/lib/durable/llm/providers.rb +103 -15
  35. data/lib/durable/llm/version.rb +5 -1
  36. data/lib/durable/llm.rb +143 -3
  37. data/lib/durable.rb +29 -4
  38. data/sig/durable/llm.rbs +302 -1
  39. metadata +50 -36
@@ -1,3 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file implements the OpenAI provider for accessing OpenAI's language models through their API, providing completion, embedding, and streaming capabilities with authentication handling, error management, and response normalization. It establishes HTTP connections to OpenAI's v1 API endpoint, processes chat completions and embeddings with organization support, handles various API error responses including rate limiting and authentication errors, and includes comprehensive response classes to format OpenAI's API responses into a consistent interface. The provider supports both regular and streaming response modes using event stream parsing for real-time token streaming, and includes specialized handling for embedding responses alongside standard chat completion functionality.
4
+
1
5
  require 'faraday'
2
6
  require 'json'
3
7
  require 'durable/llm/errors'
@@ -7,17 +11,66 @@ require 'event_stream_parser'
7
11
  module Durable
8
12
  module Llm
9
13
  module Providers
14
+ # OpenAI provider for accessing OpenAI's language models through their API.
15
+ #
16
+ # This provider implements the Durable::Llm::Providers::Base interface to provide
17
+ # completion, embedding, and streaming capabilities for OpenAI's models including
18
+ # GPT-3.5, GPT-4, and their variants. It handles authentication via API keys,
19
+ # supports organization-based access, and provides comprehensive error handling
20
+ # for various OpenAI API error conditions.
21
+ #
22
+ # Key features:
23
+ # - Chat completions with support for multi-turn conversations
24
+ # - Text embeddings for semantic similarity and retrieval tasks
25
+ # - Real-time streaming responses for interactive applications
26
+ # - Automatic model listing from OpenAI's API
27
+ # - Organization support for enterprise accounts
28
+ # - Comprehensive error handling with specific exception types
29
+ #
30
+ # @example Basic completion
31
+ # provider = Durable::Llm::Providers::OpenAI.new(api_key: 'your-api-key')
32
+ # response = provider.completion(
33
+ # model: 'gpt-3.5-turbo',
34
+ # messages: [{ role: 'user', content: 'Hello, world!' }]
35
+ # )
36
+ # puts response.choices.first.to_s
37
+ #
38
+ # @example Streaming response
39
+ # provider.stream(model: 'gpt-4', messages: messages) do |chunk|
40
+ # print chunk.to_s
41
+ # end
42
+ #
43
+ # @example Text embedding
44
+ # embedding = provider.embedding(
45
+ # model: 'text-embedding-ada-002',
46
+ # input: 'Some text to embed'
47
+ # )
48
+ #
49
+ # @see https://platform.openai.com/docs/api-reference OpenAI API Documentation
10
50
  class OpenAI < Durable::Llm::Providers::Base
11
51
  BASE_URL = 'https://api.openai.com/v1'
12
52
 
13
53
  def default_api_key
14
- Durable::Llm.configuration.openai&.api_key || ENV['OPENAI_API_KEY']
54
+ begin
55
+ Durable::Llm.configuration.openai&.api_key
56
+ rescue NoMethodError
57
+ nil
58
+ end || ENV['OPENAI_API_KEY']
15
59
  end
16
60
 
61
+ # @!attribute [rw] api_key
62
+ # @return [String, nil] The API key used for authentication with OpenAI
63
+ # @!attribute [rw] organization
64
+ # @return [String, nil] The OpenAI organization ID for enterprise accounts
17
65
  attr_accessor :api_key, :organization
18
66
 
67
+ # Initializes a new OpenAI provider instance.
68
+ #
69
+ # @param api_key [String, nil] The OpenAI API key. If nil, uses default_api_key
70
+ # @param organization [String, nil] The OpenAI organization ID. If nil, uses ENV['OPENAI_ORGANIZATION']
71
+ # @return [OpenAI] A new OpenAI provider instance
19
72
  def initialize(api_key: nil, organization: nil)
20
- @api_key = api_key || default_api_key
73
+ super(api_key: api_key)
21
74
  @organization = organization || ENV['OPENAI_ORGANIZATION']
22
75
  @conn = Faraday.new(url: BASE_URL) do |faraday|
23
76
  faraday.request :json
@@ -26,6 +79,19 @@ module Durable
26
79
  end
27
80
  end
28
81
 
82
+ # Performs a chat completion request to OpenAI's API.
83
+ #
84
+ # @param options [Hash] The completion options
85
+ # @option options [String] :model The model to use (e.g., 'gpt-3.5-turbo', 'gpt-4')
86
+ # @option options [Array<Hash>] :messages Array of message objects with role and content
87
+ # @option options [Float] :temperature Sampling temperature between 0 and 2
88
+ # @option options [Integer] :max_tokens Maximum number of tokens to generate
89
+ # @option options [Float] :top_p Nucleus sampling parameter
90
+ # @return [OpenAIResponse] The completion response object
91
+ # @raise [Durable::Llm::AuthenticationError] If API key is invalid
92
+ # @raise [Durable::Llm::RateLimitError] If rate limit is exceeded
93
+ # @raise [Durable::Llm::InvalidRequestError] If request parameters are invalid
94
+ # @raise [Durable::Llm::ServerError] If OpenAI's servers encounter an error
29
95
  def completion(options)
30
96
  response = @conn.post('chat/completions') do |req|
31
97
  req.headers['Authorization'] = "Bearer #{@api_key}"
@@ -36,6 +102,16 @@ module Durable
36
102
  handle_response(response)
37
103
  end
38
104
 
105
+ # Performs an embedding request to OpenAI's API.
106
+ #
107
+ # @param model [String] The embedding model to use (e.g., 'text-embedding-ada-002')
108
+ # @param input [String, Array<String>] The text(s) to embed
109
+ # @param options [Hash] Additional options for the embedding request
110
+ # @return [OpenAIEmbeddingResponse] The embedding response object
111
+ # @raise [Durable::Llm::AuthenticationError] If API key is invalid
112
+ # @raise [Durable::Llm::RateLimitError] If rate limit is exceeded
113
+ # @raise [Durable::Llm::InvalidRequestError] If request parameters are invalid
114
+ # @raise [Durable::Llm::ServerError] If OpenAI's servers encounter an error
39
115
  def embedding(model:, input:, **options)
40
116
  response = @conn.post('embeddings') do |req|
41
117
  req.headers['Authorization'] = "Bearer #{@api_key}"
@@ -46,6 +122,12 @@ module Durable
46
122
  handle_response(response, OpenAIEmbeddingResponse)
47
123
  end
48
124
 
125
+ # Retrieves the list of available models from OpenAI's API.
126
+ #
127
+ # @return [Array<String>] Array of model IDs available to the account
128
+ # @raise [Durable::Llm::AuthenticationError] If API key is invalid
129
+ # @raise [Durable::Llm::RateLimitError] If rate limit is exceeded
130
+ # @raise [Durable::Llm::ServerError] If OpenAI's servers encounter an error
49
131
  def models
50
132
  response = @conn.get('models') do |req|
51
133
  req.headers['Authorization'] = "Bearer #{@api_key}"
@@ -55,39 +137,37 @@ module Durable
55
137
  handle_response(response).data.map { |model| model['id'] }
56
138
  end
57
139
 
58
- def self.models
59
- self.new.models
60
- end
61
-
140
+ # @return [Boolean] True, indicating this provider supports streaming
62
141
  def self.stream?
63
142
  true
64
143
  end
65
144
 
66
- def stream(options, &block)
67
-
145
+ # Performs a streaming chat completion request to OpenAI's API.
146
+ #
147
+ # @param options [Hash] The stream options (same as completion plus stream: true)
148
+ # @yield [OpenAIStreamResponse] Yields stream response chunks as they arrive
149
+ # @return [Object] The final response object
150
+ # @raise [Durable::Llm::AuthenticationError] If API key is invalid
151
+ # @raise [Durable::Llm::RateLimitError] If rate limit is exceeded
152
+ # @raise [Durable::Llm::InvalidRequestError] If request parameters are invalid
153
+ # @raise [Durable::Llm::ServerError] If OpenAI's servers encounter an error
154
+ def stream(options)
68
155
  options[:stream] = true
69
156
 
70
157
  response = @conn.post('chat/completions') do |req|
71
-
72
158
  req.headers['Authorization'] = "Bearer #{@api_key}"
73
159
  req.headers['OpenAI-Organization'] = @organization if @organization
74
160
  req.headers['Accept'] = 'text/event-stream'
75
161
 
76
- if options['temperature']
77
- options['temperature'] = options['temperature'].to_f
78
- end
162
+ options['temperature'] = options['temperature'].to_f if options['temperature']
79
163
 
80
164
  req.body = options
81
165
 
82
- user_proc = Proc.new do |chunk, size, total|
83
-
84
-
166
+ user_proc = proc do |chunk, _size, _total|
85
167
  yield OpenAIStreamResponse.new(chunk)
86
-
87
168
  end
88
169
 
89
- req.options.on_data = to_json_stream( user_proc: user_proc )
90
-
170
+ req.options.on_data = to_json_stream(user_proc: user_proc)
91
171
  end
92
172
 
93
173
  handle_response(response)
@@ -95,6 +175,15 @@ module Durable
95
175
 
96
176
  private
97
177
 
178
+ # Converts JSON stream chunks to individual data objects for processing.
179
+ #
180
+ # This method is adapted from the ruby-openai gem to handle Server-Sent Events
181
+ # from OpenAI's streaming API. It parses the event stream and yields individual
182
+ # JSON objects for each data chunk received.
183
+ #
184
+ # @param user_proc [Proc] The proc to call with each parsed JSON object
185
+ # @return [Proc] A proc that can be used as Faraday's on_data callback
186
+ # @note Adapted from ruby-openai gem under MIT License
98
187
  # CODE-FROM: ruby-openai @ https://github.com/alexrudall/ruby-openai/blob/main/lib/openai/http.rb
99
188
  # MIT License: https://github.com/alexrudall/ruby-openai/blob/main/LICENSE.md
100
189
  # Given a proc, returns an outer proc that can be used to iterate over a JSON stream of chunks.
@@ -113,11 +202,15 @@ module Durable
113
202
  end
114
203
 
115
204
  parser.feed(chunk) do |_type, data|
116
- user_proc.call(JSON.parse(data)) unless data == "[DONE]"
205
+ user_proc.call(JSON.parse(data)) unless data == '[DONE]'
117
206
  end
118
207
  end
119
208
  end
120
209
 
210
+ # Attempts to parse a string as JSON, returning the string if parsing fails.
211
+ #
212
+ # @param maybe_json [String] The string that might be JSON
213
+ # @return [Hash, Array, String] The parsed JSON object or the original string
121
214
  def try_parse_json(maybe_json)
122
215
  JSON.parse(maybe_json)
123
216
  rescue JSON::ParserError
@@ -125,11 +218,21 @@ module Durable
125
218
  end
126
219
 
127
220
  # END-CODE-FROM
128
-
129
- def handle_response(response, responseClass=OpenAIResponse)
221
+
222
+ # Processes the API response and handles errors appropriately.
223
+ #
224
+ # @param response [Faraday::Response] The HTTP response from the API
225
+ # @param response_class [Class] The response class to instantiate for successful responses
226
+ # @return [Object] An instance of response_class for successful responses
227
+ # @raise [Durable::Llm::AuthenticationError] For 401 responses
228
+ # @raise [Durable::Llm::RateLimitError] For 429 responses
229
+ # @raise [Durable::Llm::InvalidRequestError] For 4xx client errors
230
+ # @raise [Durable::Llm::ServerError] For 5xx server errors
231
+ # @raise [Durable::Llm::APIError] For unexpected status codes
232
+ def handle_response(response, response_class = OpenAIResponse)
130
233
  case response.status
131
234
  when 200..299
132
- responseClass.new(response.body)
235
+ response_class.new(response.body)
133
236
  when 401
134
237
  raise Durable::Llm::AuthenticationError, parse_error_message(response)
135
238
  when 429
@@ -143,12 +246,25 @@ module Durable
143
246
  end
144
247
  end
145
248
 
249
+ # Extracts and formats error messages from API error responses.
250
+ #
251
+ # @param response [Faraday::Response] The error response from the API
252
+ # @return [String] The formatted error message
146
253
  def parse_error_message(response)
147
- body = JSON.parse(response.body) rescue nil
254
+ body = begin
255
+ JSON.parse(response.body)
256
+ rescue StandardError
257
+ nil
258
+ end
148
259
  message = body&.dig('error', 'message') || response.body
149
260
  "#{response.status} Error: #{message}"
150
261
  end
151
262
 
263
+ # Response object for OpenAI chat completion API responses.
264
+ #
265
+ # This class wraps the raw response from OpenAI's chat completions endpoint
266
+ # and provides a consistent interface for accessing choices, usage data, and
267
+ # other response components.
152
268
  class OpenAIResponse
153
269
  attr_reader :raw_response
154
270
 
@@ -173,6 +289,10 @@ module Durable
173
289
  end
174
290
  end
175
291
 
292
+ # Represents a single choice in an OpenAI chat completion response.
293
+ #
294
+ # Each choice contains a message with role and content, along with
295
+ # metadata like finish reason.
176
296
  class OpenAIChoice
177
297
  attr_reader :message, :finish_reason
178
298
 
@@ -186,6 +306,9 @@ module Durable
186
306
  end
187
307
  end
188
308
 
309
+ # Represents a message in an OpenAI chat completion.
310
+ #
311
+ # Messages have a role (system, user, assistant) and content text.
189
312
  class OpenAIMessage
190
313
  attr_reader :role, :content
191
314
 
@@ -199,12 +322,15 @@ module Durable
199
322
  end
200
323
  end
201
324
 
325
+ # Response object for streaming OpenAI chat completion chunks.
326
+ #
327
+ # This wraps individual chunks from the Server-Sent Events stream,
328
+ # providing access to the incremental content updates.
202
329
  class OpenAIStreamResponse
203
330
  attr_reader :choices
204
331
 
205
332
  def initialize(parsed)
206
-
207
- @choices = OpenAIStreamChoice.new(parsed['choices'])
333
+ @choices = OpenAIStreamChoice.new(parsed['choices'])
208
334
  end
209
335
 
210
336
  def to_s
@@ -212,6 +338,9 @@ module Durable
212
338
  end
213
339
  end
214
340
 
341
+ # Response object for OpenAI embedding API responses.
342
+ #
343
+ # Provides access to the embedding vectors generated for input text.
215
344
  class OpenAIEmbeddingResponse
216
345
  attr_reader :embedding
217
346
 
@@ -224,6 +353,9 @@ module Durable
224
353
  end
225
354
  end
226
355
 
356
+ # Represents a single choice in a streaming OpenAI response chunk.
357
+ #
358
+ # Contains the delta (incremental content) and finish reason for the choice.
227
359
  class OpenAIStreamChoice
228
360
  attr_reader :delta, :finish_reason
229
361
 
@@ -238,6 +370,9 @@ module Durable
238
370
  end
239
371
  end
240
372
 
373
+ # Represents the incremental content delta in a streaming response.
374
+ #
375
+ # Contains the role (for the first chunk) and content updates.
241
376
  class OpenAIStreamDelta
242
377
  attr_reader :role, :content
243
378
 
@@ -254,3 +389,5 @@ module Durable
254
389
  end
255
390
  end
256
391
  end
392
+
393
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
@@ -0,0 +1,253 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file implements the OpenCode Zen provider for accessing OpenCode Zen's language models through their API, providing completion, embedding, and streaming capabilities with authentication handling, error management, and response normalization.
4
+
5
+ require 'faraday'
6
+ require 'json'
7
+ require 'durable/llm/errors'
8
+ require 'durable/llm/providers/base'
9
+ require 'event_stream_parser'
10
+
11
+ module Durable
12
+ module Llm
13
+ module Providers
14
+ # OpenCode Zen provider for accessing OpenCode Zen's language models through their API.
15
+ # Provides completion, embedding, and streaming capabilities with authentication handling,
16
+ # error management, and response normalization.
17
+ class Opencode < Durable::Llm::Providers::Base
18
+ BASE_URL = 'https://opencode.ai/zen/v1'
19
+
20
+ def default_api_key
21
+ begin
22
+ Durable::Llm.configuration.opencode&.api_key
23
+ rescue NoMethodError
24
+ nil
25
+ end || ENV['OPENCODE_API_KEY']
26
+ end
27
+
28
+ attr_accessor :api_key
29
+
30
+ def initialize(api_key: nil)
31
+ super
32
+ @api_key = api_key || default_api_key
33
+ @conn = Faraday.new(url: BASE_URL) do |faraday|
34
+ faraday.request :json
35
+ faraday.response :json
36
+ faraday.adapter Faraday.default_adapter
37
+ end
38
+ end
39
+
40
+ def completion(options)
41
+ response = @conn.post('chat/completions') do |req|
42
+ req.headers['Authorization'] = "Bearer #{@api_key}"
43
+ req.body = options
44
+ end
45
+
46
+ handle_response(response)
47
+ end
48
+
49
+ def embedding(model:, input:, **options)
50
+ response = @conn.post('embeddings') do |req|
51
+ req.headers['Authorization'] = "Bearer #{@api_key}"
52
+ req.body = { model: model, input: input, **options }
53
+ end
54
+
55
+ handle_response(response, OpencodeEmbeddingResponse)
56
+ end
57
+
58
+ def models
59
+ response = @conn.get('models') do |req|
60
+ req.headers['Authorization'] = "Bearer #{@api_key}"
61
+ end
62
+
63
+ handle_response(response).data.map { |model| model['id'] }
64
+ end
65
+
66
+ def self.stream?
67
+ true
68
+ end
69
+
70
+ def stream(options)
71
+ options[:stream] = true
72
+
73
+ response = @conn.post('chat/completions') do |req|
74
+ req.headers['Authorization'] = "Bearer #{@api_key}"
75
+ req.headers['Accept'] = 'text/event-stream'
76
+
77
+ options['temperature'] = options['temperature'].to_f if options['temperature']
78
+
79
+ req.body = options
80
+
81
+ user_proc = proc do |chunk, _size, _total|
82
+ yield OpencodeStreamResponse.new(chunk)
83
+ end
84
+
85
+ req.options.on_data = to_json_stream(user_proc: user_proc)
86
+ end
87
+
88
+ handle_response(response)
89
+ end
90
+
91
+ private
92
+
93
+ # CODE-FROM: ruby-openai @ https://github.com/alexrudall/ruby-openai/blob/main/lib/openai/http.rb
94
+ # MIT License: https://github.com/alexrudall/ruby-openai/blob/main/LICENSE.md
95
+ def to_json_stream(user_proc:)
96
+ parser = EventStreamParser::Parser.new
97
+
98
+ proc do |chunk, _bytes, env|
99
+ if env && env.status != 200
100
+ raise_error = Faraday::Response::RaiseError.new
101
+ raise_error.on_complete(env.merge(body: try_parse_json(chunk)))
102
+ end
103
+
104
+ parser.feed(chunk) do |_type, data|
105
+ user_proc.call(JSON.parse(data)) unless data == '[DONE]'
106
+ end
107
+ end
108
+ end
109
+
110
+ def try_parse_json(maybe_json)
111
+ JSON.parse(maybe_json)
112
+ rescue JSON::ParserError
113
+ maybe_json
114
+ end
115
+
116
+ # END-CODE-FROM
117
+
118
+ def handle_response(response, response_class = OpencodeResponse)
119
+ case response.status
120
+ when 200..299
121
+ response_class.new(response.body)
122
+ when 401
123
+ raise Durable::Llm::AuthenticationError, parse_error_message(response)
124
+ when 429
125
+ raise Durable::Llm::RateLimitError, parse_error_message(response)
126
+ when 400..499
127
+ raise Durable::Llm::InvalidRequestError, parse_error_message(response)
128
+ when 500..599
129
+ raise Durable::Llm::ServerError, parse_error_message(response)
130
+ else
131
+ raise Durable::Llm::APIError, "Unexpected response code: #{response.status}"
132
+ end
133
+ end
134
+
135
+ def parse_error_message(response)
136
+ body = begin
137
+ JSON.parse(response.body)
138
+ rescue StandardError
139
+ nil
140
+ end
141
+ message = body&.dig('error', 'message') || response.body
142
+ "#{response.status} Error: #{message}"
143
+ end
144
+
145
+ # Response class for OpenCode API completions
146
+ class OpencodeResponse
147
+ attr_reader :raw_response
148
+
149
+ def initialize(response)
150
+ @raw_response = response
151
+ end
152
+
153
+ def choices
154
+ @raw_response['choices'].map { |choice| OpencodeChoice.new(choice) }
155
+ end
156
+
157
+ def data
158
+ @raw_response['data']
159
+ end
160
+
161
+ def to_s
162
+ choices.map(&:to_s).join(' ')
163
+ end
164
+ end
165
+
166
+ # Choice class for OpenCode API responses
167
+ class OpencodeChoice
168
+ attr_reader :message, :finish_reason
169
+
170
+ def initialize(choice)
171
+ @message = OpencodeMessage.new(choice['message'])
172
+ @finish_reason = choice['finish_reason']
173
+ end
174
+
175
+ def to_s
176
+ @message.to_s
177
+ end
178
+ end
179
+
180
+ # Message class for OpenCode API responses
181
+ class OpencodeMessage
182
+ attr_reader :role, :content
183
+
184
+ def initialize(message)
185
+ @role = message['role']
186
+ @content = message['content']
187
+ end
188
+
189
+ def to_s
190
+ @content
191
+ end
192
+ end
193
+
194
+ # Stream response class for OpenCode API
195
+ class OpencodeStreamResponse
196
+ attr_reader :choices
197
+
198
+ def initialize(parsed)
199
+ @choices = OpencodeStreamChoice.new(parsed['choices'])
200
+ end
201
+
202
+ def to_s
203
+ @choices.to_s
204
+ end
205
+ end
206
+
207
+ # Embedding response class for OpenCode API
208
+ class OpencodeEmbeddingResponse
209
+ attr_reader :embedding
210
+
211
+ def initialize(data)
212
+ @embedding = data.dig('data', 0, 'embedding')
213
+ end
214
+
215
+ def to_a
216
+ @embedding
217
+ end
218
+ end
219
+
220
+ # Stream choice class for OpenCode API
221
+ class OpencodeStreamChoice
222
+ attr_reader :delta, :finish_reason
223
+
224
+ def initialize(choice)
225
+ @choice = [choice].flatten.first
226
+ @delta = OpencodeStreamDelta.new(@choice['delta'])
227
+ @finish_reason = @choice['finish_reason']
228
+ end
229
+
230
+ def to_s
231
+ @delta.to_s
232
+ end
233
+ end
234
+
235
+ # Stream delta class for OpenCode API
236
+ class OpencodeStreamDelta
237
+ attr_reader :role, :content
238
+
239
+ def initialize(delta)
240
+ @role = delta['role']
241
+ @content = delta['content']
242
+ end
243
+
244
+ def to_s
245
+ @content || ''
246
+ end
247
+ end
248
+ end
249
+ end
250
+ end
251
+ end
252
+
253
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.