openai 0.26.0 → 0.27.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,181 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OpenAI
4
+ module Helpers
5
+ module Streaming
6
+ # Raw streaming chunk event with accumulated completion snapshot.
7
+ #
8
+ # This is the fundamental event that wraps each raw chunk from the API
9
+ # along with the accumulated state up to that point. All other events
10
+ # are derived from processing these chunks.
11
+ #
12
+ # @example
13
+ # event.chunk # => ChatCompletionChunk (raw API response)
14
+ # event.snapshot # => ParsedChatCompletion (accumulated state)
15
+ class ChatChunkEvent < OpenAI::Internal::Type::BaseModel
16
+ required :type, const: :chunk
17
+ required :chunk, -> { OpenAI::Chat::ChatCompletionChunk }
18
+ required :snapshot, -> { OpenAI::Chat::ParsedChatCompletion }
19
+ end
20
+
21
+ # Incremental text content update event.
22
+ #
23
+ # Emitted as the assistant's text response is being generated. Each event
24
+ # contains the new text fragment (delta) and the complete accumulated
25
+ # text so far (snapshot).
26
+ #
27
+ # @example
28
+ # event.delta # => "Hello" (new fragment)
29
+ # event.snapshot # => "Hello world" (accumulated text)
30
+ # event.parsed # => {name: "John"} (if using structured outputs)
31
+ class ChatContentDeltaEvent < OpenAI::Internal::Type::BaseModel
32
+ required :type, const: :"content.delta"
33
+ required :delta, String
34
+ required :snapshot, String
35
+ optional :parsed, Object # Partially parsed structured output
36
+ end
37
+
38
+ # Text content completion event.
39
+ #
40
+ # Emitted when the assistant has finished generating text content.
41
+ # Contains the complete text and, if applicable, the fully parsed
42
+ # structured output.
43
+ #
44
+ # @example
45
+ # event.content # => "Hello world! How can I help?"
46
+ # event.parsed # => {name: "John", age: 30} (if using structured outputs)
47
+ class ChatContentDoneEvent < OpenAI::Internal::Type::BaseModel
48
+ required :type, const: :"content.done"
49
+ required :content, String
50
+ optional :parsed, Object # Fully parsed structured output
51
+ end
52
+
53
+ # Incremental refusal update event.
54
+ #
55
+ # Emitted when the assistant is refusing to fulfill a request.
56
+ # Contains the new refusal text fragment and accumulated refusal message.
57
+ #
58
+ # @example
59
+ # event.delta # => "I cannot"
60
+ # event.snapshot # => "I cannot help with that request"
61
+ class ChatRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel
62
+ required :type, const: :"refusal.delta"
63
+ required :delta, String
64
+ required :snapshot, String
65
+ end
66
+
67
+ # Refusal completion event.
68
+ #
69
+ # Emitted when the assistant has finished generating a refusal message.
70
+ # Contains the complete refusal text.
71
+ #
72
+ # @example
73
+ # event.refusal # => "I cannot help with that request as it violates..."
74
+ class ChatRefusalDoneEvent < OpenAI::Internal::Type::BaseModel
75
+ required :type, const: :"refusal.done"
76
+ required :refusal, String
77
+ end
78
+
79
+ # Incremental function tool call arguments update.
80
+ #
81
+ # Emitted as function arguments are being streamed. Provides both the
82
+ # raw JSON fragments and incrementally parsed arguments for strict tools.
83
+ #
84
+ # @example
85
+ # event.name # => "get_weather"
86
+ # event.index # => 0 (tool call index in array)
87
+ # event.arguments_delta # => '{"location": "San' (new fragment)
88
+ # event.arguments # => '{"location": "San Francisco"' (accumulated JSON)
89
+ # event.parsed # => {location: "San Francisco"} (if strict: true)
90
+ class ChatFunctionToolCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel
91
+ required :type, const: :"tool_calls.function.arguments.delta"
92
+ required :name, String
93
+ required :index, Integer
94
+ required :arguments_delta, String
95
+ required :arguments, String
96
+ required :parsed, Object
97
+ end
98
+
99
+ # Function tool call arguments completion event.
100
+ #
101
+ # Emitted when a function tool call's arguments are complete.
102
+ # For tools defined with `strict: true`, the arguments will be fully
103
+ # parsed and validated. For non-strict tools, only raw JSON is available.
104
+ #
105
+ # @example With strict tool
106
+ # event.name # => "get_weather"
107
+ # event.arguments # => '{"location": "San Francisco", "unit": "celsius"}'
108
+ # event.parsed # => {location: "San Francisco", unit: "celsius"}
109
+ #
110
+ # @example Without strict tool
111
+ # event.parsed # => nil (parse JSON from event.arguments manually)
112
+ class ChatFunctionToolCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel
113
+ required :type, const: :"tool_calls.function.arguments.done"
114
+ required :name, String
115
+ required :index, Integer
116
+ required :arguments, String
117
+ required :parsed, Object # (only for strict: true tools)
118
+ end
119
+
120
+ # Incremental logprobs update for content tokens.
121
+ #
122
+ # Emitted when logprobs are requested and content tokens are being generated.
123
+ # Contains log probability information for the new tokens and accumulated
124
+ # logprobs for all content tokens so far.
125
+ #
126
+ # @example
127
+ # event.content[0].token # => "Hello"
128
+ # event.content[0].logprob # => -0.31725305
129
+ # event.content[0].top_logprobs # => [{token: "Hello", logprob: -0.31725305}, ...]
130
+ # event.snapshot # => [all logprobs accumulated so far]
131
+ class ChatLogprobsContentDeltaEvent < OpenAI::Internal::Type::BaseModel
132
+ required :type, const: :"logprobs.content.delta"
133
+ required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }
134
+ required :snapshot, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }
135
+ end
136
+
137
+ # Logprobs completion event for content tokens.
138
+ #
139
+ # Emitted when content generation is complete and logprobs were requested.
140
+ # Contains the complete array of log probabilities for all content tokens.
141
+ #
142
+ # @example
143
+ # event.content.each do |logprob|
144
+ # puts "Token: #{logprob.token}, Logprob: #{logprob.logprob}"
145
+ # end
146
+ class ChatLogprobsContentDoneEvent < OpenAI::Internal::Type::BaseModel
147
+ required :type, const: :"logprobs.content.done"
148
+ required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }
149
+ end
150
+
151
+ # Incremental logprobs update for refusal tokens.
152
+ #
153
+ # Emitted when logprobs are requested and refusal tokens are being generated.
154
+ # Contains log probability information for refusal message tokens.
155
+ #
156
+ # @example
157
+ # event.refusal[0].token # => "I"
158
+ # event.refusal[0].logprob # => -0.12345
159
+ # event.snapshot # => [all refusal logprobs accumulated so far]
160
+ class ChatLogprobsRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel
161
+ required :type, const: :"logprobs.refusal.delta"
162
+ required :refusal, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }
163
+ required :snapshot, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }
164
+ end
165
+
166
+ # Logprobs completion event for refusal tokens.
167
+ #
168
+ # Emitted when refusal generation is complete and logprobs were requested.
169
+ # Contains the complete array of log probabilities for all refusal tokens.
170
+ #
171
+ # @example
172
+ # event.refusal.each do |logprob|
173
+ # puts "Refusal token: #{logprob.token}, Logprob: #{logprob.logprob}"
174
+ # end
175
+ class ChatLogprobsRefusalDoneEvent < OpenAI::Internal::Type::BaseModel
176
+ required :type, const: :"logprobs.refusal.done"
177
+ required :refusal, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }
178
+ end
179
+ end
180
+ end
181
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OpenAI
4
+ module Helpers
5
+ module Streaming
6
+ class StreamError < StandardError; end
7
+
8
+ class LengthFinishReasonError < StreamError
9
+ attr_reader :completion
10
+
11
+ def initialize(completion:)
12
+ @completion = completion
13
+ super("Stream finished due to length limit")
14
+ end
15
+ end
16
+
17
+ class ContentFilterFinishReasonError < StreamError
18
+ def initialize
19
+ super("Stream finished due to content filter")
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
25
+
26
+ module OpenAI
27
+ LengthFinishReasonError = Helpers::Streaming::LengthFinishReasonError
28
+ ContentFilterFinishReasonError = Helpers::Streaming::ContentFilterFinishReasonError
29
+ end
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require_relative "events"
4
-
5
3
  module OpenAI
6
4
  module Helpers
7
5
  module Streaming
@@ -82,17 +82,20 @@ module OpenAI
82
82
  #
83
83
  # @return [Pathname, StringIO, IO, String, Object]
84
84
  def dump(value, state:)
85
- # rubocop:disable Lint/DuplicateBranch
86
85
  case value
86
+ in StringIO | String
87
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
88
+ # while not required, a filename is recommended, and in practice many servers do expect this
89
+ OpenAI::FilePart.new(value, filename: "upload")
87
90
  in IO
88
91
  state[:can_retry] = false
92
+ value.to_path.nil? ? OpenAI::FilePart.new(value, filename: "upload") : value
89
93
  in OpenAI::FilePart if value.content.is_a?(IO)
90
94
  state[:can_retry] = false
95
+ value
91
96
  else
97
+ value
92
98
  end
93
- # rubocop:enable Lint/DuplicateBranch
94
-
95
- value
96
99
  end
97
100
 
98
101
  # @api private
@@ -566,7 +566,8 @@ module OpenAI
566
566
  #
567
567
  # @return [Array(String, Enumerable<String>)]
568
568
  private def encode_multipart_streaming(body)
569
- boundary = SecureRandom.urlsafe_base64(60)
569
+ # RFC 1521 Section 7.2.1 says we should have 70 char maximum for boundary length
570
+ boundary = SecureRandom.urlsafe_base64(46)
570
571
 
571
572
  closing = []
572
573
  strio = writable_enum do |y|
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OpenAI
4
+ module Models
5
+ module Chat
6
+ class ParsedChoice < OpenAI::Models::Chat::ChatCompletion::Choice
7
+ optional :finish_reason, enum: -> { OpenAI::Chat::ChatCompletion::Choice::FinishReason }, nil?: true
8
+ end
9
+
10
+ class ParsedChatCompletion < ChatCompletion
11
+ required :choices, -> { OpenAI::Internal::Type::ArrayOf[ParsedChoice] }
12
+ end
13
+ end
14
+ end
15
+ end
@@ -110,6 +110,54 @@ module OpenAI
110
110
  raise ArgumentError.new(message)
111
111
  end
112
112
 
113
+ model, tool_models = get_structured_output_models(parsed)
114
+
115
+ # rubocop:disable Metrics/BlockLength
116
+ unwrap = ->(raw) do
117
+ if model.is_a?(OpenAI::StructuredOutput::JsonSchemaConverter)
118
+ raw[:choices]&.each do |choice|
119
+ message = choice.fetch(:message)
120
+ begin
121
+ content = message.fetch(:content)
122
+ parsed = content.nil? ? nil : JSON.parse(content, symbolize_names: true)
123
+ rescue JSON::ParserError => e
124
+ parsed = e
125
+ end
126
+ coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed)
127
+ message.store(:parsed, coerced)
128
+ end
129
+ end
130
+ raw[:choices]&.each do |choice|
131
+ choice.dig(:message, :tool_calls)&.each do |tool_call|
132
+ func = tool_call.fetch(:function)
133
+ next if (model = tool_models[func.fetch(:name)]).nil?
134
+
135
+ begin
136
+ arguments = func.fetch(:arguments)
137
+ parsed = arguments.nil? ? nil : JSON.parse(arguments, symbolize_names: true)
138
+ rescue JSON::ParserError => e
139
+ parsed = e
140
+ end
141
+ coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed)
142
+ func.store(:parsed, coerced)
143
+ end
144
+ end
145
+
146
+ raw
147
+ end
148
+ # rubocop:enable Metrics/BlockLength
149
+
150
+ @client.request(
151
+ method: :post,
152
+ path: "chat/completions",
153
+ body: parsed,
154
+ unwrap: unwrap,
155
+ model: OpenAI::Chat::ChatCompletion,
156
+ options: options
157
+ )
158
+ end
159
+
160
+ def get_structured_output_models(parsed)
113
161
  model = nil
114
162
  tool_models = {}
115
163
  case parsed
@@ -162,53 +210,46 @@ module OpenAI
162
210
  else
163
211
  end
164
212
 
165
- # rubocop:disable Metrics/BlockLength
166
- unwrap = ->(raw) do
167
- if model.is_a?(OpenAI::StructuredOutput::JsonSchemaConverter)
168
- raw[:choices]&.each do |choice|
169
- message = choice.fetch(:message)
170
- begin
171
- content = message.fetch(:content)
172
- parsed = content.nil? ? nil : JSON.parse(content, symbolize_names: true)
173
- rescue JSON::ParserError => e
174
- parsed = e
175
- end
176
- coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed)
177
- message.store(:parsed, coerced)
178
- end
179
- end
180
- raw[:choices]&.each do |choice|
181
- choice.dig(:message, :tool_calls)&.each do |tool_call|
182
- func = tool_call.fetch(:function)
183
- next if (model = tool_models[func.fetch(:name)]).nil?
213
+ [model, tool_models]
214
+ end
184
215
 
185
- begin
186
- arguments = func.fetch(:arguments)
187
- parsed = arguments.nil? ? nil : JSON.parse(arguments, symbolize_names: true)
188
- rescue JSON::ParserError => e
189
- parsed = e
190
- end
191
- coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed)
192
- func.store(:parsed, coerced)
193
- end
194
- end
216
+ def build_tools_with_models(tools, tool_models)
217
+ return [] if tools.nil?
195
218
 
196
- raw
219
+ tools.map do |tool|
220
+ next tool unless tool[:type] == :function
221
+
222
+ function_name = tool.dig(:function, :name)
223
+ model = tool_models[function_name]
224
+
225
+ model ? tool.merge(model: model) : tool
197
226
  end
198
- # rubocop:enable Metrics/BlockLength
227
+ end
199
228
 
200
- @client.request(
229
+ def stream(params)
230
+ parsed, options = OpenAI::Chat::CompletionCreateParams.dump_request(params)
231
+
232
+ parsed.store(:stream, true)
233
+
234
+ response_format, tool_models = get_structured_output_models(parsed)
235
+
236
+ input_tools = build_tools_with_models(parsed[:tools], tool_models)
237
+
238
+ raw_stream = @client.request(
201
239
  method: :post,
202
240
  path: "chat/completions",
241
+ headers: {"accept" => "text/event-stream"},
203
242
  body: parsed,
204
- unwrap: unwrap,
205
- model: OpenAI::Chat::ChatCompletion,
243
+ stream: OpenAI::Internal::Stream,
244
+ model: OpenAI::Chat::ChatCompletionChunk,
206
245
  options: options
207
246
  )
208
- end
209
247
 
210
- def stream
211
- raise NotImplementedError.new("higher level helpers are coming soon!")
248
+ OpenAI::Helpers::Streaming::ChatCompletionStream.new(
249
+ raw_stream: raw_stream,
250
+ response_format: response_format,
251
+ input_tools: input_tools
252
+ )
212
253
  end
213
254
 
214
255
  # See {OpenAI::Resources::Chat::Completions#create} for non-streaming counterpart.
@@ -85,7 +85,7 @@ module OpenAI
85
85
  def create(params = {})
86
86
  parsed, options = OpenAI::Responses::ResponseCreateParams.dump_request(params)
87
87
  if parsed[:stream]
88
- message = "Please use `#stream_raw` for the streaming use case."
88
+ message = "Please use `#stream` for the streaming use case."
89
89
  raise ArgumentError.new(message)
90
90
  end
91
91
 
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module OpenAI
4
- VERSION = "0.26.0"
4
+ VERSION = "0.27.1"
5
5
  end
data/lib/openai.rb CHANGED
@@ -195,6 +195,7 @@ require_relative "openai/models/chat/chat_completion_assistant_message_param"
195
195
  require_relative "openai/models/chat/chat_completion_audio"
196
196
  require_relative "openai/models/chat/chat_completion_audio_param"
197
197
  require_relative "openai/models/chat/chat_completion_chunk"
198
+ require_relative "openai/models/chat/parsed_chat_completion"
198
199
  require_relative "openai/models/chat/chat_completion_content_part"
199
200
  require_relative "openai/models/chat/chat_completion_content_part_image"
200
201
  require_relative "openai/models/chat/chat_completion_content_part_input_audio"
@@ -697,6 +698,9 @@ require_relative "openai/resources/vector_stores"
697
698
  require_relative "openai/resources/vector_stores/file_batches"
698
699
  require_relative "openai/resources/vector_stores/files"
699
700
  require_relative "openai/resources/webhooks"
700
- require_relative "openai/helpers/streaming/events"
701
+ require_relative "openai/helpers/streaming/response_events"
701
702
  require_relative "openai/helpers/streaming/response_stream"
703
+ require_relative "openai/helpers/streaming/exceptions"
704
+ require_relative "openai/helpers/streaming/chat_events"
705
+ require_relative "openai/helpers/streaming/chat_completion_stream"
702
706
  require_relative "openai/streaming"
@@ -27,7 +27,7 @@ module OpenAI
27
27
  sig do
28
28
  params(
29
29
  content: T.any(Pathname, StringIO, IO, String),
30
- filename: T.nilable(String),
30
+ filename: T.nilable(T.any(Pathname, String)),
31
31
  content_type: T.nilable(String)
32
32
  ).returns(T.attached_class)
33
33
  end
@@ -26,6 +26,126 @@ module OpenAI
26
26
  def response
27
27
  end
28
28
  end
29
+
30
+ class ChatChunkEvent < OpenAI::Internal::Type::BaseModel
31
+ sig { returns(T.untyped) }
32
+ def chunk
33
+ end
34
+
35
+ sig { returns(T.untyped) }
36
+ def snapshot
37
+ end
38
+ end
39
+
40
+ class ChatContentDeltaEvent < OpenAI::Internal::Type::BaseModel
41
+ sig { returns(String) }
42
+ def delta
43
+ end
44
+
45
+ sig { returns(String) }
46
+ def snapshot
47
+ end
48
+
49
+ sig { returns(T.untyped) }
50
+ def parsed
51
+ end
52
+ end
53
+
54
+ class ChatContentDoneEvent < OpenAI::Internal::Type::BaseModel
55
+ sig { returns(String) }
56
+ def content
57
+ end
58
+
59
+ sig { returns(T.untyped) }
60
+ def parsed
61
+ end
62
+ end
63
+
64
+ class ChatRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel
65
+ sig { returns(String) }
66
+ def delta
67
+ end
68
+
69
+ sig { returns(String) }
70
+ def snapshot
71
+ end
72
+ end
73
+
74
+ class ChatRefusalDoneEvent < OpenAI::Internal::Type::BaseModel
75
+ sig { returns(String) }
76
+ def refusal
77
+ end
78
+ end
79
+
80
+ class ChatFunctionToolCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel
81
+ sig { returns(String) }
82
+ def name
83
+ end
84
+
85
+ sig { returns(Integer) }
86
+ def index
87
+ end
88
+
89
+ sig { returns(String) }
90
+ def arguments_delta
91
+ end
92
+
93
+ sig { returns(String) }
94
+ def arguments
95
+ end
96
+
97
+ sig { returns(T.untyped) }
98
+ def parsed_arguments
99
+ end
100
+ end
101
+
102
+ class ChatFunctionToolCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel
103
+ sig { returns(String) }
104
+ def name
105
+ end
106
+
107
+ sig { returns(Integer) }
108
+ def index
109
+ end
110
+
111
+ sig { returns(String) }
112
+ def arguments
113
+ end
114
+
115
+ sig { returns(T.untyped) }
116
+ def parsed_arguments
117
+ end
118
+ end
119
+
120
+ class ChatLogprobsContentDeltaEvent < OpenAI::Internal::Type::BaseModel
121
+ sig { returns(T.untyped) }
122
+ def content
123
+ end
124
+ end
125
+
126
+ class ChatLogprobsContentDoneEvent < OpenAI::Internal::Type::BaseModel
127
+ sig { returns(T.untyped) }
128
+ def content
129
+ end
130
+ end
131
+
132
+ class ChatLogprobsRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel
133
+ sig { returns(T.untyped) }
134
+ def refusal
135
+ end
136
+ end
137
+
138
+ class ChatLogprobsRefusalDoneEvent < OpenAI::Internal::Type::BaseModel
139
+ sig { returns(T.untyped) }
140
+ def refusal
141
+ end
142
+ end
143
+
144
+ class ChatCompletionStream
145
+ sig { returns(T.untyped) }
146
+ def each
147
+ end
148
+ end
29
149
  end
30
150
  end
31
151
  end
@@ -1,5 +1,32 @@
1
1
  # typed: strong
2
2
 
3
3
  module OpenAI
4
- Streaming = OpenAI::Helpers::Streaming
4
+ module Streaming
5
+ ResponseTextDeltaEvent = OpenAI::Helpers::Streaming::ResponseTextDeltaEvent
6
+ ResponseTextDoneEvent = OpenAI::Helpers::Streaming::ResponseTextDoneEvent
7
+ ResponseFunctionCallArgumentsDeltaEvent =
8
+ OpenAI::Helpers::Streaming::ResponseFunctionCallArgumentsDeltaEvent
9
+ ResponseCompletedEvent = OpenAI::Helpers::Streaming::ResponseCompletedEvent
10
+
11
+ ChatChunkEvent = OpenAI::Helpers::Streaming::ChatChunkEvent
12
+ ChatContentDeltaEvent = OpenAI::Helpers::Streaming::ChatContentDeltaEvent
13
+ ChatContentDoneEvent = OpenAI::Helpers::Streaming::ChatContentDoneEvent
14
+ ChatRefusalDeltaEvent = OpenAI::Helpers::Streaming::ChatRefusalDeltaEvent
15
+ ChatRefusalDoneEvent = OpenAI::Helpers::Streaming::ChatRefusalDoneEvent
16
+ ChatFunctionToolCallArgumentsDeltaEvent =
17
+ OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDeltaEvent
18
+ ChatFunctionToolCallArgumentsDoneEvent =
19
+ OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent
20
+ ChatLogprobsContentDeltaEvent =
21
+ OpenAI::Helpers::Streaming::ChatLogprobsContentDeltaEvent
22
+ ChatLogprobsContentDoneEvent =
23
+ OpenAI::Helpers::Streaming::ChatLogprobsContentDoneEvent
24
+ ChatLogprobsRefusalDeltaEvent =
25
+ OpenAI::Helpers::Streaming::ChatLogprobsRefusalDeltaEvent
26
+ ChatLogprobsRefusalDoneEvent =
27
+ OpenAI::Helpers::Streaming::ChatLogprobsRefusalDoneEvent
28
+
29
+ ResponseStream = OpenAI::Helpers::Streaming::ResponseStream
30
+ ChatCompletionStream = OpenAI::Helpers::Streaming::ChatCompletionStream
31
+ end
5
32
  end
@@ -14,7 +14,7 @@ module OpenAI
14
14
 
15
15
  def initialize: (
16
16
  Pathname | StringIO | IO | String content,
17
- ?filename: String?,
17
+ ?filename: (Pathname | String)?,
18
18
  ?content_type: String?
19
19
  ) -> void
20
20
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.26.0
4
+ version: 0.27.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - OpenAI
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-09-23 00:00:00.000000000 Z
11
+ date: 2025-09-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: connection_pool
@@ -39,7 +39,10 @@ files:
39
39
  - lib/openai/client.rb
40
40
  - lib/openai/errors.rb
41
41
  - lib/openai/file_part.rb
42
- - lib/openai/helpers/streaming/events.rb
42
+ - lib/openai/helpers/streaming/chat_completion_stream.rb
43
+ - lib/openai/helpers/streaming/chat_events.rb
44
+ - lib/openai/helpers/streaming/exceptions.rb
45
+ - lib/openai/helpers/streaming/response_events.rb
43
46
  - lib/openai/helpers/streaming/response_stream.rb
44
47
  - lib/openai/helpers/structured_output.rb
45
48
  - lib/openai/helpers/structured_output/array_of.rb
@@ -229,6 +232,7 @@ files:
229
232
  - lib/openai/models/chat/completion_retrieve_params.rb
230
233
  - lib/openai/models/chat/completion_update_params.rb
231
234
  - lib/openai/models/chat/completions/message_list_params.rb
235
+ - lib/openai/models/chat/parsed_chat_completion.rb
232
236
  - lib/openai/models/chat_model.rb
233
237
  - lib/openai/models/comparison_filter.rb
234
238
  - lib/openai/models/completion.rb