openai 0.12.0 → 0.13.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +21 -0
  3. data/README.md +1 -1
  4. data/lib/openai/helpers/structured_output/json_schema_converter.rb +34 -10
  5. data/lib/openai/models/audio/transcription.rb +4 -4
  6. data/lib/openai/models/audio/transcription_verbose.rb +4 -4
  7. data/lib/openai/models/eval_create_params.rb +50 -5
  8. data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +50 -5
  9. data/lib/openai/models/evals/run_cancel_response.rb +48 -5
  10. data/lib/openai/models/evals/run_create_params.rb +50 -5
  11. data/lib/openai/models/evals/run_create_response.rb +48 -5
  12. data/lib/openai/models/evals/run_list_response.rb +48 -5
  13. data/lib/openai/models/evals/run_retrieve_response.rb +48 -5
  14. data/lib/openai/models/file_object.rb +5 -4
  15. data/lib/openai/models/graders/label_model_grader.rb +48 -5
  16. data/lib/openai/models/graders/score_model_grader.rb +48 -5
  17. data/lib/openai/models/responses/response_input_file.rb +9 -1
  18. data/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb +5 -5
  19. data/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb +5 -5
  20. data/lib/openai/models/responses/response_output_text_annotation_added_event.rb +5 -5
  21. data/lib/openai/models/responses/response_stream_event.rb +3 -3
  22. data/lib/openai/models/responses/tool.rb +9 -1
  23. data/lib/openai/version.rb +1 -1
  24. data/rbi/openai/helpers/structured_output/json_schema_converter.rbi +4 -0
  25. data/rbi/openai/models/audio/transcription.rbi +4 -4
  26. data/rbi/openai/models/audio/transcription_verbose.rbi +4 -6
  27. data/rbi/openai/models/eval_create_params.rbi +76 -7
  28. data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +76 -7
  29. data/rbi/openai/models/evals/run_cancel_response.rbi +70 -5
  30. data/rbi/openai/models/evals/run_create_params.rbi +76 -7
  31. data/rbi/openai/models/evals/run_create_response.rbi +70 -5
  32. data/rbi/openai/models/evals/run_list_response.rbi +70 -5
  33. data/rbi/openai/models/evals/run_retrieve_response.rbi +70 -5
  34. data/rbi/openai/models/file_object.rbi +7 -6
  35. data/rbi/openai/models/graders/label_model_grader.rbi +74 -7
  36. data/rbi/openai/models/graders/score_model_grader.rbi +74 -7
  37. data/rbi/openai/models/responses/response_input_file.rbi +11 -0
  38. data/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi +3 -3
  39. data/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi +3 -3
  40. data/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi +3 -3
  41. data/rbi/openai/models/responses/tool.rbi +12 -1
  42. data/sig/openai/models/audio/transcription.rbs +4 -4
  43. data/sig/openai/models/audio/transcription_verbose.rbs +4 -4
  44. data/sig/openai/models/eval_create_params.rbs +29 -0
  45. data/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +29 -0
  46. data/sig/openai/models/evals/run_cancel_response.rbs +33 -0
  47. data/sig/openai/models/evals/run_create_params.rbs +33 -0
  48. data/sig/openai/models/evals/run_create_response.rbs +33 -0
  49. data/sig/openai/models/evals/run_list_response.rbs +33 -0
  50. data/sig/openai/models/evals/run_retrieve_response.rbs +33 -0
  51. data/sig/openai/models/file_object.rbs +2 -0
  52. data/sig/openai/models/graders/label_model_grader.rbs +29 -0
  53. data/sig/openai/models/graders/score_model_grader.rbs +29 -0
  54. data/sig/openai/models/responses/response_input_file.rbs +7 -0
  55. data/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs +4 -4
  56. data/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs +4 -4
  57. data/sig/openai/models/responses/response_output_text_annotation_added_event.rbs +4 -4
  58. data/sig/openai/models/responses/tool.rbs +9 -2
  59. metadata +2 -2
@@ -745,7 +745,7 @@ module OpenAI
745
745
  )
746
746
  end
747
747
 
748
- # Text inputs to the model - can contain template strings.
748
+ # Inputs to the model - can contain template strings.
749
749
  sig do
750
750
  returns(
751
751
  OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants
@@ -791,7 +791,9 @@ module OpenAI
791
791
  T.any(
792
792
  String,
793
793
  OpenAI::Responses::ResponseInputText::OrHash,
794
- OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash
794
+ OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash,
795
+ OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash,
796
+ T::Array[T.anything]
795
797
  ),
796
798
  role:
797
799
  OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol,
@@ -800,7 +802,7 @@ module OpenAI
800
802
  ).returns(T.attached_class)
801
803
  end
802
804
  def self.new(
803
- # Text inputs to the model - can contain template strings.
805
+ # Inputs to the model - can contain template strings.
804
806
  content:,
805
807
  # The role of the message input. One of `user`, `assistant`, `system`, or
806
808
  # `developer`.
@@ -825,7 +827,7 @@ module OpenAI
825
827
  def to_hash
826
828
  end
827
829
 
828
- # Text inputs to the model - can contain template strings.
830
+ # Inputs to the model - can contain template strings.
829
831
  module Content
830
832
  extend OpenAI::Internal::Type::Union
831
833
 
@@ -834,7 +836,9 @@ module OpenAI
834
836
  T.any(
835
837
  String,
836
838
  OpenAI::Responses::ResponseInputText,
837
- OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText
839
+ OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText,
840
+ OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
841
+ T::Array[T.anything]
838
842
  )
839
843
  end
840
844
 
@@ -874,6 +878,59 @@ module OpenAI
874
878
  end
875
879
  end
876
880
 
881
+ class InputImage < OpenAI::Internal::Type::BaseModel
882
+ OrHash =
883
+ T.type_alias do
884
+ T.any(
885
+ OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
886
+ OpenAI::Internal::AnyHash
887
+ )
888
+ end
889
+
890
+ # The URL of the image input.
891
+ sig { returns(String) }
892
+ attr_accessor :image_url
893
+
894
+ # The type of the image input. Always `input_image`.
895
+ sig { returns(Symbol) }
896
+ attr_accessor :type
897
+
898
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
899
+ # `auto`. Defaults to `auto`.
900
+ sig { returns(T.nilable(String)) }
901
+ attr_reader :detail
902
+
903
+ sig { params(detail: String).void }
904
+ attr_writer :detail
905
+
906
+ # An image input to the model.
907
+ sig do
908
+ params(
909
+ image_url: String,
910
+ detail: String,
911
+ type: Symbol
912
+ ).returns(T.attached_class)
913
+ end
914
+ def self.new(
915
+ # The URL of the image input.
916
+ image_url:,
917
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
918
+ # `auto`. Defaults to `auto`.
919
+ detail: nil,
920
+ # The type of the image input. Always `input_image`.
921
+ type: :input_image
922
+ )
923
+ end
924
+
925
+ sig do
926
+ override.returns(
927
+ { image_url: String, type: Symbol, detail: String }
928
+ )
929
+ end
930
+ def to_hash
931
+ end
932
+ end
933
+
877
934
  sig do
878
935
  override.returns(
879
936
  T::Array[
@@ -883,6 +940,14 @@ module OpenAI
883
940
  end
884
941
  def self.variants
885
942
  end
943
+
944
+ AnArrayOfInputTextAndInputImageArray =
945
+ T.let(
946
+ OpenAI::Internal::Type::ArrayOf[
947
+ OpenAI::Internal::Type::Unknown
948
+ ],
949
+ OpenAI::Internal::Type::Converter
950
+ )
886
951
  end
887
952
 
888
953
  # The role of the message input. One of `user`, `assistant`, `system`, or
@@ -751,7 +751,7 @@ module OpenAI
751
751
  )
752
752
  end
753
753
 
754
- # Text inputs to the model - can contain template strings.
754
+ # Inputs to the model - can contain template strings.
755
755
  sig do
756
756
  returns(
757
757
  OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants
@@ -797,7 +797,9 @@ module OpenAI
797
797
  T.any(
798
798
  String,
799
799
  OpenAI::Responses::ResponseInputText::OrHash,
800
- OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash
800
+ OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash,
801
+ OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash,
802
+ T::Array[T.anything]
801
803
  ),
802
804
  role:
803
805
  OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol,
@@ -806,7 +808,7 @@ module OpenAI
806
808
  ).returns(T.attached_class)
807
809
  end
808
810
  def self.new(
809
- # Text inputs to the model - can contain template strings.
811
+ # Inputs to the model - can contain template strings.
810
812
  content:,
811
813
  # The role of the message input. One of `user`, `assistant`, `system`, or
812
814
  # `developer`.
@@ -831,7 +833,7 @@ module OpenAI
831
833
  def to_hash
832
834
  end
833
835
 
834
- # Text inputs to the model - can contain template strings.
836
+ # Inputs to the model - can contain template strings.
835
837
  module Content
836
838
  extend OpenAI::Internal::Type::Union
837
839
 
@@ -840,7 +842,9 @@ module OpenAI
840
842
  T.any(
841
843
  String,
842
844
  OpenAI::Responses::ResponseInputText,
843
- OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText
845
+ OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText,
846
+ OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
847
+ T::Array[T.anything]
844
848
  )
845
849
  end
846
850
 
@@ -880,6 +884,59 @@ module OpenAI
880
884
  end
881
885
  end
882
886
 
887
+ class InputImage < OpenAI::Internal::Type::BaseModel
888
+ OrHash =
889
+ T.type_alias do
890
+ T.any(
891
+ OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
892
+ OpenAI::Internal::AnyHash
893
+ )
894
+ end
895
+
896
+ # The URL of the image input.
897
+ sig { returns(String) }
898
+ attr_accessor :image_url
899
+
900
+ # The type of the image input. Always `input_image`.
901
+ sig { returns(Symbol) }
902
+ attr_accessor :type
903
+
904
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
905
+ # `auto`. Defaults to `auto`.
906
+ sig { returns(T.nilable(String)) }
907
+ attr_reader :detail
908
+
909
+ sig { params(detail: String).void }
910
+ attr_writer :detail
911
+
912
+ # An image input to the model.
913
+ sig do
914
+ params(
915
+ image_url: String,
916
+ detail: String,
917
+ type: Symbol
918
+ ).returns(T.attached_class)
919
+ end
920
+ def self.new(
921
+ # The URL of the image input.
922
+ image_url:,
923
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
924
+ # `auto`. Defaults to `auto`.
925
+ detail: nil,
926
+ # The type of the image input. Always `input_image`.
927
+ type: :input_image
928
+ )
929
+ end
930
+
931
+ sig do
932
+ override.returns(
933
+ { image_url: String, type: Symbol, detail: String }
934
+ )
935
+ end
936
+ def to_hash
937
+ end
938
+ end
939
+
883
940
  sig do
884
941
  override.returns(
885
942
  T::Array[
@@ -889,6 +946,14 @@ module OpenAI
889
946
  end
890
947
  def self.variants
891
948
  end
949
+
950
+ AnArrayOfInputTextAndInputImageArray =
951
+ T.let(
952
+ OpenAI::Internal::Type::ArrayOf[
953
+ OpenAI::Internal::Type::Unknown
954
+ ],
955
+ OpenAI::Internal::Type::Converter
956
+ )
892
957
  end
893
958
 
894
959
  # The role of the message input. One of `user`, `assistant`, `system`, or
@@ -27,8 +27,8 @@ module OpenAI
27
27
  attr_accessor :object
28
28
 
29
29
  # The intended purpose of the file. Supported values are `assistants`,
30
- # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`
31
- # and `vision`.
30
+ # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`,
31
+ # `vision`, and `user_data`.
32
32
  sig { returns(OpenAI::FileObject::Purpose::TaggedSymbol) }
33
33
  attr_accessor :purpose
34
34
 
@@ -76,8 +76,8 @@ module OpenAI
76
76
  # The name of the file.
77
77
  filename:,
78
78
  # The intended purpose of the file. Supported values are `assistants`,
79
- # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`
80
- # and `vision`.
79
+ # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`,
80
+ # `vision`, and `user_data`.
81
81
  purpose:,
82
82
  # Deprecated. The current status of the file, which can be either `uploaded`,
83
83
  # `processed`, or `error`.
@@ -111,8 +111,8 @@ module OpenAI
111
111
  end
112
112
 
113
113
  # The intended purpose of the file. Supported values are `assistants`,
114
- # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`
115
- # and `vision`.
114
+ # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`,
115
+ # `vision`, and `user_data`.
116
116
  module Purpose
117
117
  extend OpenAI::Internal::Type::Enum
118
118
 
@@ -132,6 +132,7 @@ module OpenAI
132
132
  FINE_TUNE_RESULTS =
133
133
  T.let(:"fine-tune-results", OpenAI::FileObject::Purpose::TaggedSymbol)
134
134
  VISION = T.let(:vision, OpenAI::FileObject::Purpose::TaggedSymbol)
135
+ USER_DATA = T.let(:user_data, OpenAI::FileObject::Purpose::TaggedSymbol)
135
136
 
136
137
  sig do
137
138
  override.returns(T::Array[OpenAI::FileObject::Purpose::TaggedSymbol])
@@ -85,13 +85,15 @@ module OpenAI
85
85
  )
86
86
  end
87
87
 
88
- # Text inputs to the model - can contain template strings.
88
+ # Inputs to the model - can contain template strings.
89
89
  sig do
90
90
  returns(
91
91
  T.any(
92
92
  String,
93
93
  OpenAI::Responses::ResponseInputText,
94
- OpenAI::Graders::LabelModelGrader::Input::Content::OutputText
94
+ OpenAI::Graders::LabelModelGrader::Input::Content::OutputText,
95
+ OpenAI::Graders::LabelModelGrader::Input::Content::InputImage,
96
+ T::Array[T.anything]
95
97
  )
96
98
  )
97
99
  end
@@ -132,14 +134,16 @@ module OpenAI
132
134
  T.any(
133
135
  String,
134
136
  OpenAI::Responses::ResponseInputText::OrHash,
135
- OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash
137
+ OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash,
138
+ OpenAI::Graders::LabelModelGrader::Input::Content::InputImage::OrHash,
139
+ T::Array[T.anything]
136
140
  ),
137
141
  role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol,
138
142
  type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol
139
143
  ).returns(T.attached_class)
140
144
  end
141
145
  def self.new(
142
- # Text inputs to the model - can contain template strings.
146
+ # Inputs to the model - can contain template strings.
143
147
  content:,
144
148
  # The role of the message input. One of `user`, `assistant`, `system`, or
145
149
  # `developer`.
@@ -156,7 +160,9 @@ module OpenAI
156
160
  T.any(
157
161
  String,
158
162
  OpenAI::Responses::ResponseInputText,
159
- OpenAI::Graders::LabelModelGrader::Input::Content::OutputText
163
+ OpenAI::Graders::LabelModelGrader::Input::Content::OutputText,
164
+ OpenAI::Graders::LabelModelGrader::Input::Content::InputImage,
165
+ T::Array[T.anything]
160
166
  ),
161
167
  role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol,
162
168
  type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol
@@ -166,7 +172,7 @@ module OpenAI
166
172
  def to_hash
167
173
  end
168
174
 
169
- # Text inputs to the model - can contain template strings.
175
+ # Inputs to the model - can contain template strings.
170
176
  module Content
171
177
  extend OpenAI::Internal::Type::Union
172
178
 
@@ -175,7 +181,9 @@ module OpenAI
175
181
  T.any(
176
182
  String,
177
183
  OpenAI::Responses::ResponseInputText,
178
- OpenAI::Graders::LabelModelGrader::Input::Content::OutputText
184
+ OpenAI::Graders::LabelModelGrader::Input::Content::OutputText,
185
+ OpenAI::Graders::LabelModelGrader::Input::Content::InputImage,
186
+ T::Array[T.anything]
179
187
  )
180
188
  end
181
189
 
@@ -213,6 +221,57 @@ module OpenAI
213
221
  end
214
222
  end
215
223
 
224
+ class InputImage < OpenAI::Internal::Type::BaseModel
225
+ OrHash =
226
+ T.type_alias do
227
+ T.any(
228
+ OpenAI::Graders::LabelModelGrader::Input::Content::InputImage,
229
+ OpenAI::Internal::AnyHash
230
+ )
231
+ end
232
+
233
+ # The URL of the image input.
234
+ sig { returns(String) }
235
+ attr_accessor :image_url
236
+
237
+ # The type of the image input. Always `input_image`.
238
+ sig { returns(Symbol) }
239
+ attr_accessor :type
240
+
241
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
242
+ # `auto`. Defaults to `auto`.
243
+ sig { returns(T.nilable(String)) }
244
+ attr_reader :detail
245
+
246
+ sig { params(detail: String).void }
247
+ attr_writer :detail
248
+
249
+ # An image input to the model.
250
+ sig do
251
+ params(image_url: String, detail: String, type: Symbol).returns(
252
+ T.attached_class
253
+ )
254
+ end
255
+ def self.new(
256
+ # The URL of the image input.
257
+ image_url:,
258
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
259
+ # `auto`. Defaults to `auto`.
260
+ detail: nil,
261
+ # The type of the image input. Always `input_image`.
262
+ type: :input_image
263
+ )
264
+ end
265
+
266
+ sig do
267
+ override.returns(
268
+ { image_url: String, type: Symbol, detail: String }
269
+ )
270
+ end
271
+ def to_hash
272
+ end
273
+ end
274
+
216
275
  sig do
217
276
  override.returns(
218
277
  T::Array[
@@ -222,6 +281,14 @@ module OpenAI
222
281
  end
223
282
  def self.variants
224
283
  end
284
+
285
+ AnArrayOfInputTextAndInputImageArray =
286
+ T.let(
287
+ OpenAI::Internal::Type::ArrayOf[
288
+ OpenAI::Internal::Type::Unknown
289
+ ],
290
+ OpenAI::Internal::Type::Converter
291
+ )
225
292
  end
226
293
 
227
294
  # The role of the message input. One of `user`, `assistant`, `system`, or
@@ -92,13 +92,15 @@ module OpenAI
92
92
  )
93
93
  end
94
94
 
95
- # Text inputs to the model - can contain template strings.
95
+ # Inputs to the model - can contain template strings.
96
96
  sig do
97
97
  returns(
98
98
  T.any(
99
99
  String,
100
100
  OpenAI::Responses::ResponseInputText,
101
- OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText
101
+ OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText,
102
+ OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage,
103
+ T::Array[T.anything]
102
104
  )
103
105
  )
104
106
  end
@@ -139,14 +141,16 @@ module OpenAI
139
141
  T.any(
140
142
  String,
141
143
  OpenAI::Responses::ResponseInputText::OrHash,
142
- OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash
144
+ OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash,
145
+ OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage::OrHash,
146
+ T::Array[T.anything]
143
147
  ),
144
148
  role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol,
145
149
  type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol
146
150
  ).returns(T.attached_class)
147
151
  end
148
152
  def self.new(
149
- # Text inputs to the model - can contain template strings.
153
+ # Inputs to the model - can contain template strings.
150
154
  content:,
151
155
  # The role of the message input. One of `user`, `assistant`, `system`, or
152
156
  # `developer`.
@@ -163,7 +167,9 @@ module OpenAI
163
167
  T.any(
164
168
  String,
165
169
  OpenAI::Responses::ResponseInputText,
166
- OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText
170
+ OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText,
171
+ OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage,
172
+ T::Array[T.anything]
167
173
  ),
168
174
  role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol,
169
175
  type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol
@@ -173,7 +179,7 @@ module OpenAI
173
179
  def to_hash
174
180
  end
175
181
 
176
- # Text inputs to the model - can contain template strings.
182
+ # Inputs to the model - can contain template strings.
177
183
  module Content
178
184
  extend OpenAI::Internal::Type::Union
179
185
 
@@ -182,7 +188,9 @@ module OpenAI
182
188
  T.any(
183
189
  String,
184
190
  OpenAI::Responses::ResponseInputText,
185
- OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText
191
+ OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText,
192
+ OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage,
193
+ T::Array[T.anything]
186
194
  )
187
195
  end
188
196
 
@@ -220,6 +228,57 @@ module OpenAI
220
228
  end
221
229
  end
222
230
 
231
+ class InputImage < OpenAI::Internal::Type::BaseModel
232
+ OrHash =
233
+ T.type_alias do
234
+ T.any(
235
+ OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage,
236
+ OpenAI::Internal::AnyHash
237
+ )
238
+ end
239
+
240
+ # The URL of the image input.
241
+ sig { returns(String) }
242
+ attr_accessor :image_url
243
+
244
+ # The type of the image input. Always `input_image`.
245
+ sig { returns(Symbol) }
246
+ attr_accessor :type
247
+
248
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
249
+ # `auto`. Defaults to `auto`.
250
+ sig { returns(T.nilable(String)) }
251
+ attr_reader :detail
252
+
253
+ sig { params(detail: String).void }
254
+ attr_writer :detail
255
+
256
+ # An image input to the model.
257
+ sig do
258
+ params(image_url: String, detail: String, type: Symbol).returns(
259
+ T.attached_class
260
+ )
261
+ end
262
+ def self.new(
263
+ # The URL of the image input.
264
+ image_url:,
265
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
266
+ # `auto`. Defaults to `auto`.
267
+ detail: nil,
268
+ # The type of the image input. Always `input_image`.
269
+ type: :input_image
270
+ )
271
+ end
272
+
273
+ sig do
274
+ override.returns(
275
+ { image_url: String, type: Symbol, detail: String }
276
+ )
277
+ end
278
+ def to_hash
279
+ end
280
+ end
281
+
223
282
  sig do
224
283
  override.returns(
225
284
  T::Array[
@@ -229,6 +288,14 @@ module OpenAI
229
288
  end
230
289
  def self.variants
231
290
  end
291
+
292
+ AnArrayOfInputTextAndInputImageArray =
293
+ T.let(
294
+ OpenAI::Internal::Type::ArrayOf[
295
+ OpenAI::Internal::Type::Unknown
296
+ ],
297
+ OpenAI::Internal::Type::Converter
298
+ )
232
299
  end
233
300
 
234
301
  # The role of the message input. One of `user`, `assistant`, `system`, or
@@ -27,6 +27,13 @@ module OpenAI
27
27
  sig { returns(T.nilable(String)) }
28
28
  attr_accessor :file_id
29
29
 
30
+ # The URL of the file to be sent to the model.
31
+ sig { returns(T.nilable(String)) }
32
+ attr_reader :file_url
33
+
34
+ sig { params(file_url: String).void }
35
+ attr_writer :file_url
36
+
30
37
  # The name of the file to be sent to the model.
31
38
  sig { returns(T.nilable(String)) }
32
39
  attr_reader :filename
@@ -39,6 +46,7 @@ module OpenAI
39
46
  params(
40
47
  file_data: String,
41
48
  file_id: T.nilable(String),
49
+ file_url: String,
42
50
  filename: String,
43
51
  type: Symbol
44
52
  ).returns(T.attached_class)
@@ -48,6 +56,8 @@ module OpenAI
48
56
  file_data: nil,
49
57
  # The ID of the file to be sent to the model.
50
58
  file_id: nil,
59
+ # The URL of the file to be sent to the model.
60
+ file_url: nil,
51
61
  # The name of the file to be sent to the model.
52
62
  filename: nil,
53
63
  # The type of the input item. Always `input_file`.
@@ -61,6 +71,7 @@ module OpenAI
61
71
  type: Symbol,
62
72
  file_data: String,
63
73
  file_id: T.nilable(String),
74
+ file_url: String,
64
75
  filename: String
65
76
  }
66
77
  )
@@ -28,7 +28,7 @@ module OpenAI
28
28
  sig { returns(Integer) }
29
29
  attr_accessor :sequence_number
30
30
 
31
- # The type of the event. Always 'response.mcp_call.arguments_delta'.
31
+ # The type of the event. Always 'response.mcp_call_arguments.delta'.
32
32
  sig { returns(Symbol) }
33
33
  attr_accessor :type
34
34
 
@@ -52,8 +52,8 @@ module OpenAI
52
52
  output_index:,
53
53
  # The sequence number of this event.
54
54
  sequence_number:,
55
- # The type of the event. Always 'response.mcp_call.arguments_delta'.
56
- type: :"response.mcp_call.arguments_delta"
55
+ # The type of the event. Always 'response.mcp_call_arguments.delta'.
56
+ type: :"response.mcp_call_arguments.delta"
57
57
  )
58
58
  end
59
59
 
@@ -28,7 +28,7 @@ module OpenAI
28
28
  sig { returns(Integer) }
29
29
  attr_accessor :sequence_number
30
30
 
31
- # The type of the event. Always 'response.mcp_call.arguments_done'.
31
+ # The type of the event. Always 'response.mcp_call_arguments.done'.
32
32
  sig { returns(Symbol) }
33
33
  attr_accessor :type
34
34
 
@@ -51,8 +51,8 @@ module OpenAI
51
51
  output_index:,
52
52
  # The sequence number of this event.
53
53
  sequence_number:,
54
- # The type of the event. Always 'response.mcp_call.arguments_done'.
55
- type: :"response.mcp_call.arguments_done"
54
+ # The type of the event. Always 'response.mcp_call_arguments.done'.
55
+ type: :"response.mcp_call_arguments.done"
56
56
  )
57
57
  end
58
58
 
@@ -36,7 +36,7 @@ module OpenAI
36
36
  sig { returns(Integer) }
37
37
  attr_accessor :sequence_number
38
38
 
39
- # The type of the event. Always 'response.output_text_annotation.added'.
39
+ # The type of the event. Always 'response.output_text.annotation.added'.
40
40
  sig { returns(Symbol) }
41
41
  attr_accessor :type
42
42
 
@@ -65,8 +65,8 @@ module OpenAI
65
65
  output_index:,
66
66
  # The sequence number of this event.
67
67
  sequence_number:,
68
- # The type of the event. Always 'response.output_text_annotation.added'.
69
- type: :"response.output_text_annotation.added"
68
+ # The type of the event. Always 'response.output_text.annotation.added'.
69
+ type: :"response.output_text.annotation.added"
70
70
  )
71
71
  end
72
72