openai 0.11.0 → 0.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +24 -0
  3. data/README.md +5 -7
  4. data/lib/openai/helpers/streaming/events.rb +23 -0
  5. data/lib/openai/helpers/streaming/response_stream.rb +232 -0
  6. data/lib/openai/helpers/structured_output/parsed_json.rb +39 -0
  7. data/lib/openai/internal/stream.rb +2 -1
  8. data/lib/openai/internal/transport/base_client.rb +10 -2
  9. data/lib/openai/internal/type/base_stream.rb +3 -1
  10. data/lib/openai/models/audio/transcription.rb +4 -4
  11. data/lib/openai/models/audio/transcription_verbose.rb +4 -4
  12. data/lib/openai/models/chat/chat_completion_message.rb +1 -1
  13. data/lib/openai/models/chat/chat_completion_message_tool_call.rb +1 -1
  14. data/lib/openai/models/file_object.rb +5 -4
  15. data/lib/openai/models/responses/response_function_tool_call.rb +1 -1
  16. data/lib/openai/models/responses/response_input_file.rb +9 -1
  17. data/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb +5 -5
  18. data/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb +5 -5
  19. data/lib/openai/models/responses/response_output_text.rb +1 -1
  20. data/lib/openai/models/responses/response_output_text_annotation_added_event.rb +5 -5
  21. data/lib/openai/models/responses/response_stream_event.rb +3 -3
  22. data/lib/openai/models/responses/tool.rb +9 -1
  23. data/lib/openai/resources/chat/completions.rb +12 -4
  24. data/lib/openai/resources/responses.rb +248 -75
  25. data/lib/openai/streaming.rb +5 -0
  26. data/lib/openai/version.rb +1 -1
  27. data/lib/openai.rb +4 -0
  28. data/rbi/openai/helpers/streaming/events.rbi +31 -0
  29. data/rbi/openai/helpers/streaming/response_stream.rbi +104 -0
  30. data/rbi/openai/internal/type/base_stream.rbi +8 -1
  31. data/rbi/openai/models/audio/transcription.rbi +4 -4
  32. data/rbi/openai/models/audio/transcription_verbose.rbi +4 -6
  33. data/rbi/openai/models/file_object.rbi +7 -6
  34. data/rbi/openai/models/responses/response_input_file.rbi +11 -0
  35. data/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi +3 -3
  36. data/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi +3 -3
  37. data/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi +3 -3
  38. data/rbi/openai/models/responses/tool.rbi +12 -1
  39. data/rbi/openai/resources/responses.rbi +126 -1
  40. data/rbi/openai/streaming.rbi +5 -0
  41. data/sig/openai/internal/type/base_stream.rbs +4 -0
  42. data/sig/openai/models/audio/transcription.rbs +4 -4
  43. data/sig/openai/models/audio/transcription_verbose.rbs +4 -4
  44. data/sig/openai/models/file_object.rbs +2 -0
  45. data/sig/openai/models/responses/response_input_file.rbs +7 -0
  46. data/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs +4 -4
  47. data/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs +4 -4
  48. data/sig/openai/models/responses/response_output_text_annotation_added_event.rbs +4 -4
  49. data/sig/openai/models/responses/tool.rbs +9 -2
  50. metadata +9 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 252a9ce9833b0a9f66be94b76081f34716e348ebca732e63c3e16c105ed42ea1
4
- data.tar.gz: d4d8b36822ee74af77508ec8e48b472d72d619333e468055695158e242c49760
3
+ metadata.gz: c46771d67ac54a96ea533d6473403bd7357164093ad318367a59a9e4f8d526c4
4
+ data.tar.gz: c2c858697c822c515ebc105aeb69ecfabdfd515fdc2886ca308ffaa267c2c893
5
5
  SHA512:
6
- metadata.gz: f27c40e40df727c8da570a4f6b1e20f72a87baa710efd3957bb0fbbcc23eb1dd78d03b1bcc3c3cf5684d4db4b062c392c1c3d970093c8e0fac89ef85db053db3
7
- data.tar.gz: 21fe44d1605c6196d7321dcb7efa7d951a2523ba71859eab1bf242ff3314afe0cce25201c21b141f9f3902b684cb577ba51c520d12602d1279eccb770769e3a4
6
+ metadata.gz: efa25f9aab25791e58bd2a941953a37b54df6a01c98b0e4bbb0363f8739d7856e8e1b56804bc326315a7861ecfa5082a8b93eee7b429d9db09fe4c2b9b3455d5
7
+ data.tar.gz: 51b7aab418c9864b7375f457fdcd45380a437361ec72eabad9aca55050ff9ee6ca9f213d5cc037661ae56e75ed12a53160d7819dcbfc2f549b1c82f7b4ae6daa
data/CHANGELOG.md CHANGED
@@ -1,5 +1,29 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.13.0 (2025-07-10)
4
+
5
+ Full Changelog: [v0.12.0...v0.13.0](https://github.com/openai/openai-ruby/compare/v0.12.0...v0.13.0)
6
+
7
+ ### Features
8
+
9
+ * **api:** add file_url, fix event ID ([9b8919d](https://github.com/openai/openai-ruby/commit/9b8919d470b622035f13c455aa9aa783feb1f936))
10
+
11
+ ## 0.12.0 (2025-07-03)
12
+
13
+ Full Changelog: [v0.11.0...v0.12.0](https://github.com/openai/openai-ruby/compare/v0.11.0...v0.12.0)
14
+
15
+ ### Features
16
+
17
+ * ensure partial jsons in structured ouput are handled gracefully ([#740](https://github.com/openai/openai-ruby/issues/740)) ([5deec70](https://github.com/openai/openai-ruby/commit/5deec708bad1ceb1a03e9aa65f737e3f89ce6455))
18
+ * responses streaming helpers ([#721](https://github.com/openai/openai-ruby/issues/721)) ([c2f4270](https://github.com/openai/openai-ruby/commit/c2f42708e41492f1c22886735079973510fb2789))
19
+
20
+
21
+ ### Chores
22
+
23
+ * **ci:** only run for pushes and fork pull requests ([97538e2](https://github.com/openai/openai-ruby/commit/97538e266f6f9a0e09669453539ee52ca56f4f59))
24
+ * **internal:** allow streams to also be unwrapped on a per-row basis ([49bdadf](https://github.com/openai/openai-ruby/commit/49bdadfc0d3400664de0c8e7cfd59879faec45b8))
25
+ * **internal:** minor refactoring of json helpers ([#744](https://github.com/openai/openai-ruby/issues/744)) ([f13edee](https://github.com/openai/openai-ruby/commit/f13edee16325be04335443cb886a7c2024155fd9))
26
+
3
27
  ## 0.11.0 (2025-06-26)
4
28
 
5
29
  Full Changelog: [v0.10.0...v0.11.0](https://github.com/openai/openai-ruby/compare/v0.10.0...v0.11.0)
data/README.md CHANGED
@@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application
15
15
  <!-- x-release-please-start-version -->
16
16
 
17
17
  ```ruby
18
- gem "openai", "~> 0.11.0"
18
+ gem "openai", "~> 0.13.0"
19
19
  ```
20
20
 
21
21
  <!-- x-release-please-end -->
@@ -42,16 +42,14 @@ puts(chat_completion)
42
42
 
43
43
  We provide support for streaming responses using Server-Sent Events (SSE).
44
44
 
45
- **coming soon:** `openai.chat.completions.stream` will soon come with Python SDK-style higher-level streaming responses support.
46
-
47
45
  ```ruby
48
- stream = openai.chat.completions.stream_raw(
49
- messages: [{role: "user", content: "Say this is a test"}],
46
+ stream = openai.responses.stream(
47
+ input: "Write a haiku about OpenAI.",
50
48
  model: :"gpt-4.1"
51
49
  )
52
50
 
53
- stream.each do |completion|
54
- puts(completion)
51
+ stream.each do |event|
52
+ puts(event.type)
55
53
  end
56
54
  ```
57
55
 
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OpenAI
4
+ module Helpers
5
+ module Streaming
6
+ class ResponseTextDeltaEvent < OpenAI::Models::Responses::ResponseTextDeltaEvent
7
+ required :snapshot, String
8
+ end
9
+
10
+ class ResponseTextDoneEvent < OpenAI::Models::Responses::ResponseTextDoneEvent
11
+ optional :parsed, Object
12
+ end
13
+
14
+ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent
15
+ required :snapshot, String
16
+ end
17
+
18
+ class ResponseCompletedEvent < OpenAI::Models::Responses::ResponseCompletedEvent
19
+ required :response, OpenAI::Models::Responses::Response
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,232 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "events"
4
+
5
+ module OpenAI
6
+ module Helpers
7
+ module Streaming
8
+ class ResponseStream
9
+ include OpenAI::Internal::Type::BaseStream
10
+
11
+ def initialize(raw_stream:, text_format: nil, starting_after: nil)
12
+ @text_format = text_format
13
+ @starting_after = starting_after
14
+ @raw_stream = raw_stream
15
+ @iterator = iterator
16
+ @state = ResponseStreamState.new(
17
+ text_format: text_format
18
+ )
19
+ end
20
+
21
+ def until_done
22
+ each {} # rubocop:disable Lint/EmptyBlock
23
+ self
24
+ end
25
+
26
+ def text
27
+ OpenAI::Internal::Util.chain_fused(@iterator) do |yielder|
28
+ @iterator.each do |event|
29
+ case event
30
+ when OpenAI::Streaming::ResponseTextDeltaEvent
31
+ yielder << event.delta
32
+ end
33
+ end
34
+ end
35
+ end
36
+
37
+ def get_final_response
38
+ until_done
39
+ response = @state.completed_response
40
+ raise RuntimeError.new("Didn't receive a 'response.completed' event") unless response
41
+ response
42
+ end
43
+
44
+ def get_output_text
45
+ response = get_final_response
46
+ text_parts = []
47
+
48
+ response.output.each do |output|
49
+ next unless output.type == :message
50
+
51
+ output.content.each do |content|
52
+ next unless content.type == :output_text
53
+ text_parts << content.text
54
+ end
55
+ end
56
+
57
+ text_parts.join
58
+ end
59
+
60
+ private
61
+
62
+ def iterator
63
+ @iterator ||= OpenAI::Internal::Util.chain_fused(@raw_stream) do |y|
64
+ @raw_stream.each do |raw_event|
65
+ events_to_yield = @state.handle_event(raw_event)
66
+ events_to_yield.each do |event|
67
+ if @starting_after.nil? || event.sequence_number > @starting_after
68
+ y << event
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
74
+ end
75
+
76
+ class ResponseStreamState
77
+ attr_reader :completed_response
78
+
79
+ def initialize(text_format:)
80
+ @current_snapshot = nil
81
+ @completed_response = nil
82
+ @text_format = text_format
83
+ end
84
+
85
+ def handle_event(event)
86
+ @current_snapshot = accumulate_event(
87
+ event: event,
88
+ current_snapshot: @current_snapshot
89
+ )
90
+
91
+ events_to_yield = []
92
+
93
+ case event
94
+ when OpenAI::Models::Responses::ResponseTextDeltaEvent
95
+ output = @current_snapshot.output[event.output_index]
96
+ assert_type(output, :message)
97
+
98
+ content = output.content[event.content_index]
99
+ assert_type(content, :output_text)
100
+
101
+ events_to_yield << OpenAI::Streaming::ResponseTextDeltaEvent.new(
102
+ content_index: event.content_index,
103
+ delta: event.delta,
104
+ item_id: event.item_id,
105
+ output_index: event.output_index,
106
+ sequence_number: event.sequence_number,
107
+ type: event.type,
108
+ snapshot: content.text
109
+ )
110
+
111
+ when OpenAI::Models::Responses::ResponseTextDoneEvent
112
+ output = @current_snapshot.output[event.output_index]
113
+ assert_type(output, :message)
114
+
115
+ content = output.content[event.content_index]
116
+ assert_type(content, :output_text)
117
+
118
+ parsed = parse_structured_text(content.text)
119
+
120
+ events_to_yield << OpenAI::Streaming::ResponseTextDoneEvent.new(
121
+ content_index: event.content_index,
122
+ item_id: event.item_id,
123
+ output_index: event.output_index,
124
+ sequence_number: event.sequence_number,
125
+ text: event.text,
126
+ type: event.type,
127
+ parsed: parsed
128
+ )
129
+
130
+ when OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent
131
+ output = @current_snapshot.output[event.output_index]
132
+ assert_type(output, :function_call)
133
+
134
+ events_to_yield << OpenAI::Streaming::ResponseFunctionCallArgumentsDeltaEvent.new(
135
+ delta: event.delta,
136
+ item_id: event.item_id,
137
+ output_index: event.output_index,
138
+ sequence_number: event.sequence_number,
139
+ type: event.type,
140
+ snapshot: output.arguments
141
+ )
142
+
143
+ when OpenAI::Models::Responses::ResponseCompletedEvent
144
+ events_to_yield << OpenAI::Streaming::ResponseCompletedEvent.new(
145
+ sequence_number: event.sequence_number,
146
+ type: event.type,
147
+ response: event.response
148
+ )
149
+
150
+ else
151
+ # Pass through other events unchanged.
152
+ events_to_yield << event
153
+ end
154
+
155
+ events_to_yield
156
+ end
157
+
158
+ def accumulate_event(event:, current_snapshot:)
159
+ if current_snapshot.nil?
160
+ unless event.is_a?(OpenAI::Models::Responses::ResponseCreatedEvent)
161
+ raise "Expected first event to be response.created"
162
+ end
163
+
164
+ # Use the converter to create a new, isolated copy of the response object.
165
+ # This ensures proper type validation and prevents shared object references.
166
+ return OpenAI::Internal::Type::Converter.coerce(
167
+ OpenAI::Models::Responses::Response,
168
+ event.response
169
+ )
170
+ end
171
+
172
+ case event
173
+ when OpenAI::Models::Responses::ResponseOutputItemAddedEvent
174
+ current_snapshot.output.push(event.item)
175
+
176
+ when OpenAI::Models::Responses::ResponseContentPartAddedEvent
177
+ output = current_snapshot.output[event.output_index]
178
+ if output && output.type == :message
179
+ output.content.push(event.part)
180
+ current_snapshot.output[event.output_index] = output
181
+ end
182
+
183
+ when OpenAI::Models::Responses::ResponseTextDeltaEvent
184
+ output = current_snapshot.output[event.output_index]
185
+ if output && output.type == :message
186
+ content = output.content[event.content_index]
187
+ if content && content.type == :output_text
188
+ content.text += event.delta
189
+ output.content[event.content_index] = content
190
+ current_snapshot.output[event.output_index] = output
191
+ end
192
+ end
193
+
194
+ when OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent
195
+ output = current_snapshot.output[event.output_index]
196
+ if output && output.type == :function_call
197
+ output.arguments = (output.arguments || "") + event.delta
198
+ current_snapshot.output[event.output_index] = output
199
+ end
200
+
201
+ when OpenAI::Models::Responses::ResponseCompletedEvent
202
+ @completed_response = event.response
203
+ end
204
+
205
+ current_snapshot
206
+ end
207
+
208
+ private
209
+
210
+ def assert_type(object, expected_type)
211
+ return if object && object.type == expected_type
212
+ actual_type = object ? object.type : "nil"
213
+ raise "Invalid state: expected #{expected_type} but got #{actual_type}"
214
+ end
215
+
216
+ def parse_structured_text(text)
217
+ return nil unless @text_format && text
218
+
219
+ begin
220
+ parsed = JSON.parse(text, symbolize_names: true)
221
+ OpenAI::Internal::Type::Converter.coerce(@text_format, parsed)
222
+ rescue JSON::ParserError => e
223
+ raise RuntimeError.new(
224
+ "Failed to parse structured text as JSON for #{@text_format}: #{e.message}. " \
225
+ "Raw text: #{text.inspect}"
226
+ )
227
+ end
228
+ end
229
+ end
230
+ end
231
+ end
232
+ end
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OpenAI
4
+ module Helpers
5
+ module StructuredOutput
6
+ # @abstract
7
+ #
8
+ # Like OpenAI::Internal::Type::Unknown, but for parsed JSON values, which can be incomplete or malformed.
9
+ class ParsedJson < OpenAI::Internal::Type::Unknown
10
+ class << self
11
+ # @api private
12
+ #
13
+ # No coercion needed for Unknown type.
14
+ #
15
+ # @param value [Object]
16
+ #
17
+ # @param state [Hash{Symbol=>Object}] .
18
+ #
19
+ # @option state [Boolean] :translate_names
20
+ #
21
+ # @option state [Boolean] :strictness
22
+ #
23
+ # @option state [Hash{Symbol=>Object}] :exactness
24
+ #
25
+ # @option state [Class<StandardError>] :error
26
+ #
27
+ # @option state [Integer] :branched
28
+ #
29
+ # @return [Object]
30
+ def coerce(value, state:)
31
+ (state[:error] = value) if value.is_a?(StandardError)
32
+
33
+ super
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
@@ -47,7 +47,8 @@ module OpenAI
47
47
  message: message
48
48
  )
49
49
  in decoded
50
- y << OpenAI::Internal::Type::Converter.coerce(@model, decoded)
50
+ unwrapped = OpenAI::Internal::Util.dig(decoded, @unwrap)
51
+ y << OpenAI::Internal::Type::Converter.coerce(@model, unwrapped)
51
52
  end
52
53
  else
53
54
  end
@@ -471,6 +471,7 @@ module OpenAI
471
471
  self.class.validate!(req)
472
472
  model = req.fetch(:model) { OpenAI::Internal::Type::Unknown }
473
473
  opts = req[:options].to_h
474
+ unwrap = req[:unwrap]
474
475
  OpenAI::RequestOptions.validate!(opts)
475
476
  request = build_request(req.except(:options), opts)
476
477
  url = request.fetch(:url)
@@ -487,11 +488,18 @@ module OpenAI
487
488
  decoded = OpenAI::Internal::Util.decode_content(response, stream: stream)
488
489
  case req
489
490
  in {stream: Class => st}
490
- st.new(model: model, url: url, status: status, response: response, stream: decoded)
491
+ st.new(
492
+ model: model,
493
+ url: url,
494
+ status: status,
495
+ response: response,
496
+ unwrap: unwrap,
497
+ stream: decoded
498
+ )
491
499
  in {page: Class => page}
492
500
  page.new(client: self, req: req, headers: response, page_data: decoded)
493
501
  else
494
- unwrapped = OpenAI::Internal::Util.dig(decoded, req[:unwrap])
502
+ unwrapped = OpenAI::Internal::Util.dig(decoded, unwrap)
495
503
  OpenAI::Internal::Type::Converter.coerce(model, unwrapped)
496
504
  end
497
505
  end
@@ -64,12 +64,14 @@ module OpenAI
64
64
  # @param url [URI::Generic]
65
65
  # @param status [Integer]
66
66
  # @param response [Net::HTTPResponse]
67
+ # @param unwrap [Symbol, Integer, Array<Symbol, Integer>, Proc]
67
68
  # @param stream [Enumerable<Object>]
68
- def initialize(model:, url:, status:, response:, stream:)
69
+ def initialize(model:, url:, status:, response:, unwrap:, stream:)
69
70
  @model = model
70
71
  @url = url
71
72
  @status = status
72
73
  @response = response
74
+ @unwrap = unwrap
73
75
  @stream = stream
74
76
  @iterator = iterator
75
77
 
@@ -149,11 +149,11 @@ module OpenAI
149
149
  end
150
150
 
151
151
  class Duration < OpenAI::Internal::Type::BaseModel
152
- # @!attribute duration
152
+ # @!attribute seconds
153
153
  # Duration of the input audio in seconds.
154
154
  #
155
155
  # @return [Float]
156
- required :duration, Float
156
+ required :seconds, Float
157
157
 
158
158
  # @!attribute type
159
159
  # The type of the usage object. Always `duration` for this variant.
@@ -161,10 +161,10 @@ module OpenAI
161
161
  # @return [Symbol, :duration]
162
162
  required :type, const: :duration
163
163
 
164
- # @!method initialize(duration:, type: :duration)
164
+ # @!method initialize(seconds:, type: :duration)
165
165
  # Usage statistics for models billed by audio input duration.
166
166
  #
167
- # @param duration [Float] Duration of the input audio in seconds.
167
+ # @param seconds [Float] Duration of the input audio in seconds.
168
168
  #
169
169
  # @param type [Symbol, :duration] The type of the usage object. Always `duration` for this variant.
170
170
  end
@@ -58,11 +58,11 @@ module OpenAI
58
58
 
59
59
  # @see OpenAI::Models::Audio::TranscriptionVerbose#usage
60
60
  class Usage < OpenAI::Internal::Type::BaseModel
61
- # @!attribute duration
61
+ # @!attribute seconds
62
62
  # Duration of the input audio in seconds.
63
63
  #
64
64
  # @return [Float]
65
- required :duration, Float
65
+ required :seconds, Float
66
66
 
67
67
  # @!attribute type
68
68
  # The type of the usage object. Always `duration` for this variant.
@@ -70,10 +70,10 @@ module OpenAI
70
70
  # @return [Symbol, :duration]
71
71
  required :type, const: :duration
72
72
 
73
- # @!method initialize(duration:, type: :duration)
73
+ # @!method initialize(seconds:, type: :duration)
74
74
  # Usage statistics for models billed by audio input duration.
75
75
  #
76
- # @param duration [Float] Duration of the input audio in seconds.
76
+ # @param seconds [Float] Duration of the input audio in seconds.
77
77
  #
78
78
  # @param type [Symbol, :duration] The type of the usage object. Always `duration` for this variant.
79
79
  end
@@ -14,7 +14,7 @@ module OpenAI
14
14
  # The parsed contents of the message, if JSON schema is specified.
15
15
  #
16
16
  # @return [Object, nil]
17
- optional :parsed, OpenAI::Internal::Type::Unknown
17
+ optional :parsed, OpenAI::StructuredOutput::ParsedJson
18
18
 
19
19
  # @!attribute refusal
20
20
  # The refusal message generated by the model.
@@ -44,7 +44,7 @@ module OpenAI
44
44
  # The parsed contents of the arguments.
45
45
  #
46
46
  # @return [Object, nil]
47
- required :parsed, OpenAI::Internal::Type::Unknown
47
+ required :parsed, OpenAI::StructuredOutput::ParsedJson
48
48
 
49
49
  # @!attribute name
50
50
  # The name of the function to call.
@@ -36,8 +36,8 @@ module OpenAI
36
36
 
37
37
  # @!attribute purpose
38
38
  # The intended purpose of the file. Supported values are `assistants`,
39
- # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`
40
- # and `vision`.
39
+ # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`,
40
+ # `vision`, and `user_data`.
41
41
  #
42
42
  # @return [Symbol, OpenAI::Models::FileObject::Purpose]
43
43
  required :purpose, enum: -> { OpenAI::FileObject::Purpose }
@@ -91,8 +91,8 @@ module OpenAI
91
91
  # @param object [Symbol, :file] The object type, which is always `file`.
92
92
 
93
93
  # The intended purpose of the file. Supported values are `assistants`,
94
- # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`
95
- # and `vision`.
94
+ # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`,
95
+ # `vision`, and `user_data`.
96
96
  #
97
97
  # @see OpenAI::Models::FileObject#purpose
98
98
  module Purpose
@@ -105,6 +105,7 @@ module OpenAI
105
105
  FINE_TUNE = :"fine-tune"
106
106
  FINE_TUNE_RESULTS = :"fine-tune-results"
107
107
  VISION = :vision
108
+ USER_DATA = :user_data
108
109
 
109
110
  # @!method self.values
110
111
  # @return [Array<Symbol>]
@@ -14,7 +14,7 @@ module OpenAI
14
14
  # The parsed contents of the arguments.
15
15
  #
16
16
  # @return [Object, nil]
17
- required :parsed, OpenAI::Internal::Type::Unknown
17
+ required :parsed, OpenAI::StructuredOutput::ParsedJson
18
18
 
19
19
  # @!attribute call_id
20
20
  # The unique ID of the function tool call generated by the model.
@@ -22,13 +22,19 @@ module OpenAI
22
22
  # @return [String, nil]
23
23
  optional :file_id, String, nil?: true
24
24
 
25
+ # @!attribute file_url
26
+ # The URL of the file to be sent to the model.
27
+ #
28
+ # @return [String, nil]
29
+ optional :file_url, String
30
+
25
31
  # @!attribute filename
26
32
  # The name of the file to be sent to the model.
27
33
  #
28
34
  # @return [String, nil]
29
35
  optional :filename, String
30
36
 
31
- # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file)
37
+ # @!method initialize(file_data: nil, file_id: nil, file_url: nil, filename: nil, type: :input_file)
32
38
  # Some parameter documentations has been truncated, see
33
39
  # {OpenAI::Models::Responses::ResponseInputFile} for more details.
34
40
  #
@@ -38,6 +44,8 @@ module OpenAI
38
44
  #
39
45
  # @param file_id [String, nil] The ID of the file to be sent to the model.
40
46
  #
47
+ # @param file_url [String] The URL of the file to be sent to the model.
48
+ #
41
49
  # @param filename [String] The name of the file to be sent to the model.
42
50
  #
43
51
  # @param type [Symbol, :input_file] The type of the input item. Always `input_file`.
@@ -29,12 +29,12 @@ module OpenAI
29
29
  required :sequence_number, Integer
30
30
 
31
31
  # @!attribute type
32
- # The type of the event. Always 'response.mcp_call.arguments_delta'.
32
+ # The type of the event. Always 'response.mcp_call_arguments.delta'.
33
33
  #
34
- # @return [Symbol, :"response.mcp_call.arguments_delta"]
35
- required :type, const: :"response.mcp_call.arguments_delta"
34
+ # @return [Symbol, :"response.mcp_call_arguments.delta"]
35
+ required :type, const: :"response.mcp_call_arguments.delta"
36
36
 
37
- # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call.arguments_delta")
37
+ # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call_arguments.delta")
38
38
  # Emitted when there is a delta (partial update) to the arguments of an MCP tool
39
39
  # call.
40
40
  #
@@ -46,7 +46,7 @@ module OpenAI
46
46
  #
47
47
  # @param sequence_number [Integer] The sequence number of this event.
48
48
  #
49
- # @param type [Symbol, :"response.mcp_call.arguments_delta"] The type of the event. Always 'response.mcp_call.arguments_delta'.
49
+ # @param type [Symbol, :"response.mcp_call_arguments.delta"] The type of the event. Always 'response.mcp_call_arguments.delta'.
50
50
  end
51
51
  end
52
52
  end
@@ -29,12 +29,12 @@ module OpenAI
29
29
  required :sequence_number, Integer
30
30
 
31
31
  # @!attribute type
32
- # The type of the event. Always 'response.mcp_call.arguments_done'.
32
+ # The type of the event. Always 'response.mcp_call_arguments.done'.
33
33
  #
34
- # @return [Symbol, :"response.mcp_call.arguments_done"]
35
- required :type, const: :"response.mcp_call.arguments_done"
34
+ # @return [Symbol, :"response.mcp_call_arguments.done"]
35
+ required :type, const: :"response.mcp_call_arguments.done"
36
36
 
37
- # @!method initialize(arguments:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call.arguments_done")
37
+ # @!method initialize(arguments:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call_arguments.done")
38
38
  # Emitted when the arguments for an MCP tool call are finalized.
39
39
  #
40
40
  # @param arguments [Object] The finalized arguments for the MCP tool call.
@@ -45,7 +45,7 @@ module OpenAI
45
45
  #
46
46
  # @param sequence_number [Integer] The sequence number of this event.
47
47
  #
48
- # @param type [Symbol, :"response.mcp_call.arguments_done"] The type of the event. Always 'response.mcp_call.arguments_done'.
48
+ # @param type [Symbol, :"response.mcp_call_arguments.done"] The type of the event. Always 'response.mcp_call_arguments.done'.
49
49
  end
50
50
  end
51
51
  end
@@ -23,7 +23,7 @@ module OpenAI
23
23
  # The parsed contents of the output, if JSON schema is specified.
24
24
  #
25
25
  # @return [Object, nil]
26
- optional :parsed, OpenAI::Internal::Type::Unknown
26
+ optional :parsed, OpenAI::StructuredOutput::ParsedJson
27
27
 
28
28
  # @!attribute type
29
29
  # The type of the output text. Always `output_text`.
@@ -41,12 +41,12 @@ module OpenAI
41
41
  required :sequence_number, Integer
42
42
 
43
43
  # @!attribute type
44
- # The type of the event. Always 'response.output_text_annotation.added'.
44
+ # The type of the event. Always 'response.output_text.annotation.added'.
45
45
  #
46
- # @return [Symbol, :"response.output_text_annotation.added"]
47
- required :type, const: :"response.output_text_annotation.added"
46
+ # @return [Symbol, :"response.output_text.annotation.added"]
47
+ required :type, const: :"response.output_text.annotation.added"
48
48
 
49
- # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, sequence_number:, type: :"response.output_text_annotation.added")
49
+ # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, sequence_number:, type: :"response.output_text.annotation.added")
50
50
  # Emitted when an annotation is added to output text content.
51
51
  #
52
52
  # @param annotation [Object] The annotation object being added. (See annotation schema for details.)
@@ -61,7 +61,7 @@ module OpenAI
61
61
  #
62
62
  # @param sequence_number [Integer] The sequence number of this event.
63
63
  #
64
- # @param type [Symbol, :"response.output_text_annotation.added"] The type of the event. Always 'response.output_text_annotation.added'.
64
+ # @param type [Symbol, :"response.output_text.annotation.added"] The type of the event. Always 'response.output_text.annotation.added'.
65
65
  end
66
66
  end
67
67
  end