openai 0.13.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +21 -0
  3. data/README.md +1 -1
  4. data/lib/openai/helpers/structured_output/json_schema_converter.rb +34 -10
  5. data/lib/openai/models/eval_create_params.rb +50 -5
  6. data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +50 -5
  7. data/lib/openai/models/evals/run_cancel_response.rb +48 -5
  8. data/lib/openai/models/evals/run_create_params.rb +50 -5
  9. data/lib/openai/models/evals/run_create_response.rb +48 -5
  10. data/lib/openai/models/evals/run_list_response.rb +48 -5
  11. data/lib/openai/models/evals/run_retrieve_response.rb +48 -5
  12. data/lib/openai/models/graders/label_model_grader.rb +48 -5
  13. data/lib/openai/models/graders/score_model_grader.rb +48 -5
  14. data/lib/openai/models/image_edit_completed_event.rb +198 -0
  15. data/lib/openai/models/image_edit_params.rb +36 -1
  16. data/lib/openai/models/image_edit_partial_image_event.rb +135 -0
  17. data/lib/openai/models/image_edit_stream_event.rb +21 -0
  18. data/lib/openai/models/image_gen_completed_event.rb +198 -0
  19. data/lib/openai/models/image_gen_partial_image_event.rb +135 -0
  20. data/lib/openai/models/image_gen_stream_event.rb +21 -0
  21. data/lib/openai/models/image_generate_params.rb +13 -1
  22. data/lib/openai/models/images_response.rb +3 -0
  23. data/lib/openai/models/responses/response_output_refusal.rb +2 -2
  24. data/lib/openai/models/responses/tool.rb +30 -1
  25. data/lib/openai/models.rb +12 -0
  26. data/lib/openai/resources/images.rb +140 -2
  27. data/lib/openai/version.rb +1 -1
  28. data/lib/openai.rb +6 -0
  29. data/rbi/openai/helpers/structured_output/json_schema_converter.rbi +4 -0
  30. data/rbi/openai/models/eval_create_params.rbi +76 -7
  31. data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +76 -7
  32. data/rbi/openai/models/evals/run_cancel_response.rbi +70 -5
  33. data/rbi/openai/models/evals/run_create_params.rbi +76 -7
  34. data/rbi/openai/models/evals/run_create_response.rbi +70 -5
  35. data/rbi/openai/models/evals/run_list_response.rbi +70 -5
  36. data/rbi/openai/models/evals/run_retrieve_response.rbi +70 -5
  37. data/rbi/openai/models/graders/label_model_grader.rbi +74 -7
  38. data/rbi/openai/models/graders/score_model_grader.rbi +74 -7
  39. data/rbi/openai/models/image_edit_completed_event.rbi +346 -0
  40. data/rbi/openai/models/image_edit_params.rbi +51 -0
  41. data/rbi/openai/models/image_edit_partial_image_event.rbi +249 -0
  42. data/rbi/openai/models/image_edit_stream_event.rbi +22 -0
  43. data/rbi/openai/models/image_gen_completed_event.rbi +339 -0
  44. data/rbi/openai/models/image_gen_partial_image_event.rbi +243 -0
  45. data/rbi/openai/models/image_gen_stream_event.rbi +22 -0
  46. data/rbi/openai/models/image_generate_params.rbi +12 -0
  47. data/rbi/openai/models/responses/response_output_refusal.rbi +2 -2
  48. data/rbi/openai/models/responses/tool.rbi +61 -0
  49. data/rbi/openai/models.rbi +12 -0
  50. data/rbi/openai/resources/images.rbi +225 -0
  51. data/sig/openai/models/eval_create_params.rbs +29 -0
  52. data/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +29 -0
  53. data/sig/openai/models/evals/run_cancel_response.rbs +33 -0
  54. data/sig/openai/models/evals/run_create_params.rbs +33 -0
  55. data/sig/openai/models/evals/run_create_response.rbs +33 -0
  56. data/sig/openai/models/evals/run_list_response.rbs +33 -0
  57. data/sig/openai/models/evals/run_retrieve_response.rbs +33 -0
  58. data/sig/openai/models/graders/label_model_grader.rbs +29 -0
  59. data/sig/openai/models/graders/score_model_grader.rbs +29 -0
  60. data/sig/openai/models/image_edit_completed_event.rbs +150 -0
  61. data/sig/openai/models/image_edit_params.rbs +21 -0
  62. data/sig/openai/models/image_edit_partial_image_event.rbs +105 -0
  63. data/sig/openai/models/image_edit_stream_event.rbs +12 -0
  64. data/sig/openai/models/image_gen_completed_event.rbs +150 -0
  65. data/sig/openai/models/image_gen_partial_image_event.rbs +105 -0
  66. data/sig/openai/models/image_gen_stream_event.rbs +12 -0
  67. data/sig/openai/models/image_generate_params.rbs +5 -0
  68. data/sig/openai/models/responses/tool.rbs +16 -0
  69. data/sig/openai/models.rbs +12 -0
  70. data/sig/openai/resources/images.rbs +38 -0
  71. metadata +20 -2
@@ -39,13 +39,15 @@ module OpenAI
39
39
  )
40
40
  end
41
41
 
42
+ # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart.
43
+ #
42
44
  # Some parameter documentations has been truncated, see
43
45
  # {OpenAI::Models::ImageEditParams} for more details.
44
46
  #
45
47
  # Creates an edited or extended image given one or more source images and a
46
48
  # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`.
47
49
  #
48
- # @overload edit(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {})
50
+ # @overload edit(image:, prompt:, background: nil, input_fidelity: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {})
49
51
  #
50
52
  # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array<Pathname, StringIO, IO, String, OpenAI::FilePart>] The image(s) to edit. Must be a supported image file or an array of images.
51
53
  #
@@ -53,6 +55,8 @@ module OpenAI
53
55
  #
54
56
  # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s).
55
57
  #
58
+ # @param input_fidelity [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil] Control how much effort the model will exert to match the style and features,
59
+ #
56
60
  # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind
57
61
  #
58
62
  # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup
@@ -63,6 +67,8 @@ module OpenAI
63
67
  #
64
68
  # @param output_format [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is
65
69
  #
70
+ # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for
71
+ #
66
72
  # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are
67
73
  #
68
74
  # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or `
@@ -78,6 +84,10 @@ module OpenAI
78
84
  # @see OpenAI::Models::ImageEditParams
79
85
  def edit(params)
80
86
  parsed, options = OpenAI::ImageEditParams.dump_request(params)
87
+ if parsed[:stream]
88
+ message = "Please use `#stream_raw` for the streaming use case."
89
+ raise ArgumentError.new(message)
90
+ end
81
91
  @client.request(
82
92
  method: :post,
83
93
  path: "images/edits",
@@ -88,13 +98,76 @@ module OpenAI
88
98
  )
89
99
  end
90
100
 
101
+ # See {OpenAI::Resources::Images#edit} for non-streaming counterpart.
102
+ #
103
+ # Some parameter documentations has been truncated, see
104
+ # {OpenAI::Models::ImageEditParams} for more details.
105
+ #
106
+ # Creates an edited or extended image given one or more source images and a
107
+ # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`.
108
+ #
109
+ # @overload stream_raw(image:, prompt:, background: nil, input_fidelity: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {})
110
+ #
111
+ # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array<Pathname, StringIO, IO, String, OpenAI::FilePart>] The image(s) to edit. Must be a supported image file or an array of images.
112
+ #
113
+ # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character
114
+ #
115
+ # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s).
116
+ #
117
+ # @param input_fidelity [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil] Control how much effort the model will exert to match the style and features,
118
+ #
119
+ # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind
120
+ #
121
+ # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup
122
+ #
123
+ # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10.
124
+ #
125
+ # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter
126
+ #
127
+ # @param output_format [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is
128
+ #
129
+ # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for
130
+ #
131
+ # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are
132
+ #
133
+ # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or `
134
+ #
135
+ # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands
136
+ #
137
+ # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor
138
+ #
139
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
140
+ #
141
+ # @return [OpenAI::Internal::Stream<OpenAI::Models::ImageEditPartialImageEvent, OpenAI::Models::ImageEditCompletedEvent>]
142
+ #
143
+ # @see OpenAI::Models::ImageEditParams
144
+ def edit_stream_raw(params)
145
+ parsed, options = OpenAI::ImageEditParams.dump_request(params)
146
+ unless parsed.fetch(:stream, true)
147
+ message = "Please use `#edit` for the non-streaming use case."
148
+ raise ArgumentError.new(message)
149
+ end
150
+ parsed.store(:stream, true)
151
+ @client.request(
152
+ method: :post,
153
+ path: "images/edits",
154
+ headers: {"content-type" => "multipart/form-data", "accept" => "text/event-stream"},
155
+ body: parsed,
156
+ stream: OpenAI::Internal::Stream,
157
+ model: OpenAI::ImageEditStreamEvent,
158
+ options: options
159
+ )
160
+ end
161
+
162
+ # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart.
163
+ #
91
164
  # Some parameter documentations has been truncated, see
92
165
  # {OpenAI::Models::ImageGenerateParams} for more details.
93
166
  #
94
167
  # Creates an image given a prompt.
95
168
  # [Learn more](https://platform.openai.com/docs/guides/images).
96
169
  #
97
- # @overload generate(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {})
170
+ # @overload generate(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {})
98
171
  #
99
172
  # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte
100
173
  #
@@ -110,6 +183,8 @@ module OpenAI
110
183
  #
111
184
  # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su
112
185
  #
186
+ # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for
187
+ #
113
188
  # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated.
114
189
  #
115
190
  # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned
@@ -127,6 +202,10 @@ module OpenAI
127
202
  # @see OpenAI::Models::ImageGenerateParams
128
203
  def generate(params)
129
204
  parsed, options = OpenAI::ImageGenerateParams.dump_request(params)
205
+ if parsed[:stream]
206
+ message = "Please use `#stream_raw` for the streaming use case."
207
+ raise ArgumentError.new(message)
208
+ end
130
209
  @client.request(
131
210
  method: :post,
132
211
  path: "images/generations",
@@ -136,6 +215,65 @@ module OpenAI
136
215
  )
137
216
  end
138
217
 
218
+ # See {OpenAI::Resources::Images#generate} for non-streaming counterpart.
219
+ #
220
+ # Some parameter documentations has been truncated, see
221
+ # {OpenAI::Models::ImageGenerateParams} for more details.
222
+ #
223
+ # Creates an image given a prompt.
224
+ # [Learn more](https://platform.openai.com/docs/guides/images).
225
+ #
226
+ # @overload stream_raw(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {})
227
+ #
228
+ # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte
229
+ #
230
+ # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s).
231
+ #
232
+ # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im
233
+ #
234
+ # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must
235
+ #
236
+ # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only
237
+ #
238
+ # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only
239
+ #
240
+ # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su
241
+ #
242
+ # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for
243
+ #
244
+ # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated.
245
+ #
246
+ # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned
247
+ #
248
+ # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands
249
+ #
250
+ # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e-
251
+ #
252
+ # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor
253
+ #
254
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
255
+ #
256
+ # @return [OpenAI::Internal::Stream<OpenAI::Models::ImageGenPartialImageEvent, OpenAI::Models::ImageGenCompletedEvent>]
257
+ #
258
+ # @see OpenAI::Models::ImageGenerateParams
259
+ def generate_stream_raw(params)
260
+ parsed, options = OpenAI::ImageGenerateParams.dump_request(params)
261
+ unless parsed.fetch(:stream, true)
262
+ message = "Please use `#generate` for the non-streaming use case."
263
+ raise ArgumentError.new(message)
264
+ end
265
+ parsed.store(:stream, true)
266
+ @client.request(
267
+ method: :post,
268
+ path: "images/generations",
269
+ headers: {"accept" => "text/event-stream"},
270
+ body: parsed,
271
+ stream: OpenAI::Internal::Stream,
272
+ model: OpenAI::ImageGenStreamEvent,
273
+ options: options
274
+ )
275
+ end
276
+
139
277
  # @api private
140
278
  #
141
279
  # @param client [OpenAI::Client]
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module OpenAI
4
- VERSION = "0.13.0"
4
+ VERSION = "0.14.0"
5
5
  end
data/lib/openai.rb CHANGED
@@ -320,8 +320,14 @@ require_relative "openai/models/graders/multi_grader"
320
320
  require_relative "openai/models/graders/string_check_grader"
321
321
  require_relative "openai/models/image"
322
322
  require_relative "openai/models/image_create_variation_params"
323
+ require_relative "openai/models/image_edit_completed_event"
323
324
  require_relative "openai/models/image_edit_params"
325
+ require_relative "openai/models/image_edit_partial_image_event"
326
+ require_relative "openai/models/image_edit_stream_event"
327
+ require_relative "openai/models/image_gen_completed_event"
324
328
  require_relative "openai/models/image_generate_params"
329
+ require_relative "openai/models/image_gen_partial_image_event"
330
+ require_relative "openai/models/image_gen_stream_event"
325
331
  require_relative "openai/models/image_model"
326
332
  require_relative "openai/models/images_response"
327
333
  require_relative "openai/models/metadata"
@@ -7,8 +7,12 @@ module OpenAI
7
7
 
8
8
  # To customize the JSON schema conversion for a type, implement the `JsonSchemaConverter` interface.
9
9
  module JsonSchemaConverter
10
+ # @api private
10
11
  POINTER = T.let(Object.new.freeze, T.anything)
12
+ # @api private
11
13
  COUNTER = T.let(Object.new.freeze, T.anything)
14
+ # @api private
15
+ NO_REF = T.let(Object.new.freeze, T.anything)
12
16
 
13
17
  Input =
14
18
  T.type_alias do
@@ -474,13 +474,15 @@ module OpenAI
474
474
  )
475
475
  end
476
476
 
477
- # Text inputs to the model - can contain template strings.
477
+ # Inputs to the model - can contain template strings.
478
478
  sig do
479
479
  returns(
480
480
  T.any(
481
481
  String,
482
482
  OpenAI::Responses::ResponseInputText,
483
- OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText
483
+ OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText,
484
+ OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage,
485
+ T::Array[T.anything]
484
486
  )
485
487
  )
486
488
  end
@@ -524,7 +526,9 @@ module OpenAI
524
526
  T.any(
525
527
  String,
526
528
  OpenAI::Responses::ResponseInputText::OrHash,
527
- OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash
529
+ OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash,
530
+ OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage::OrHash,
531
+ T::Array[T.anything]
528
532
  ),
529
533
  role:
530
534
  OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol,
@@ -533,7 +537,7 @@ module OpenAI
533
537
  ).returns(T.attached_class)
534
538
  end
535
539
  def self.new(
536
- # Text inputs to the model - can contain template strings.
540
+ # Inputs to the model - can contain template strings.
537
541
  content:,
538
542
  # The role of the message input. One of `user`, `assistant`, `system`, or
539
543
  # `developer`.
@@ -550,7 +554,9 @@ module OpenAI
550
554
  T.any(
551
555
  String,
552
556
  OpenAI::Responses::ResponseInputText,
553
- OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText
557
+ OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText,
558
+ OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage,
559
+ T::Array[T.anything]
554
560
  ),
555
561
  role:
556
562
  OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol,
@@ -562,7 +568,7 @@ module OpenAI
562
568
  def to_hash
563
569
  end
564
570
 
565
- # Text inputs to the model - can contain template strings.
571
+ # Inputs to the model - can contain template strings.
566
572
  module Content
567
573
  extend OpenAI::Internal::Type::Union
568
574
 
@@ -571,7 +577,9 @@ module OpenAI
571
577
  T.any(
572
578
  String,
573
579
  OpenAI::Responses::ResponseInputText,
574
- OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText
580
+ OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText,
581
+ OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage,
582
+ T::Array[T.anything]
575
583
  )
576
584
  end
577
585
 
@@ -609,6 +617,59 @@ module OpenAI
609
617
  end
610
618
  end
611
619
 
620
+ class InputImage < OpenAI::Internal::Type::BaseModel
621
+ OrHash =
622
+ T.type_alias do
623
+ T.any(
624
+ OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage,
625
+ OpenAI::Internal::AnyHash
626
+ )
627
+ end
628
+
629
+ # The URL of the image input.
630
+ sig { returns(String) }
631
+ attr_accessor :image_url
632
+
633
+ # The type of the image input. Always `input_image`.
634
+ sig { returns(Symbol) }
635
+ attr_accessor :type
636
+
637
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
638
+ # `auto`. Defaults to `auto`.
639
+ sig { returns(T.nilable(String)) }
640
+ attr_reader :detail
641
+
642
+ sig { params(detail: String).void }
643
+ attr_writer :detail
644
+
645
+ # An image input to the model.
646
+ sig do
647
+ params(
648
+ image_url: String,
649
+ detail: String,
650
+ type: Symbol
651
+ ).returns(T.attached_class)
652
+ end
653
+ def self.new(
654
+ # The URL of the image input.
655
+ image_url:,
656
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
657
+ # `auto`. Defaults to `auto`.
658
+ detail: nil,
659
+ # The type of the image input. Always `input_image`.
660
+ type: :input_image
661
+ )
662
+ end
663
+
664
+ sig do
665
+ override.returns(
666
+ { image_url: String, type: Symbol, detail: String }
667
+ )
668
+ end
669
+ def to_hash
670
+ end
671
+ end
672
+
612
673
  sig do
613
674
  override.returns(
614
675
  T::Array[
@@ -618,6 +679,14 @@ module OpenAI
618
679
  end
619
680
  def self.variants
620
681
  end
682
+
683
+ AnArrayOfInputTextAndInputImageArray =
684
+ T.let(
685
+ OpenAI::Internal::Type::ArrayOf[
686
+ OpenAI::Internal::Type::Unknown
687
+ ],
688
+ OpenAI::Internal::Type::Converter
689
+ )
621
690
  end
622
691
 
623
692
  # The role of the message input. One of `user`, `assistant`, `system`, or
@@ -515,13 +515,15 @@ module OpenAI
515
515
  )
516
516
  end
517
517
 
518
- # Text inputs to the model - can contain template strings.
518
+ # Inputs to the model - can contain template strings.
519
519
  sig do
520
520
  returns(
521
521
  T.any(
522
522
  String,
523
523
  OpenAI::Responses::ResponseInputText,
524
- OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText
524
+ OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText,
525
+ OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage,
526
+ T::Array[T.anything]
525
527
  )
526
528
  )
527
529
  end
@@ -565,7 +567,9 @@ module OpenAI
565
567
  T.any(
566
568
  String,
567
569
  OpenAI::Responses::ResponseInputText::OrHash,
568
- OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash
570
+ OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash,
571
+ OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage::OrHash,
572
+ T::Array[T.anything]
569
573
  ),
570
574
  role:
571
575
  OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol,
@@ -574,7 +578,7 @@ module OpenAI
574
578
  ).returns(T.attached_class)
575
579
  end
576
580
  def self.new(
577
- # Text inputs to the model - can contain template strings.
581
+ # Inputs to the model - can contain template strings.
578
582
  content:,
579
583
  # The role of the message input. One of `user`, `assistant`, `system`, or
580
584
  # `developer`.
@@ -591,7 +595,9 @@ module OpenAI
591
595
  T.any(
592
596
  String,
593
597
  OpenAI::Responses::ResponseInputText,
594
- OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText
598
+ OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText,
599
+ OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage,
600
+ T::Array[T.anything]
595
601
  ),
596
602
  role:
597
603
  OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol,
@@ -603,7 +609,7 @@ module OpenAI
603
609
  def to_hash
604
610
  end
605
611
 
606
- # Text inputs to the model - can contain template strings.
612
+ # Inputs to the model - can contain template strings.
607
613
  module Content
608
614
  extend OpenAI::Internal::Type::Union
609
615
 
@@ -612,7 +618,9 @@ module OpenAI
612
618
  T.any(
613
619
  String,
614
620
  OpenAI::Responses::ResponseInputText,
615
- OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText
621
+ OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText,
622
+ OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage,
623
+ T::Array[T.anything]
616
624
  )
617
625
  end
618
626
 
@@ -652,6 +660,59 @@ module OpenAI
652
660
  end
653
661
  end
654
662
 
663
+ class InputImage < OpenAI::Internal::Type::BaseModel
664
+ OrHash =
665
+ T.type_alias do
666
+ T.any(
667
+ OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage,
668
+ OpenAI::Internal::AnyHash
669
+ )
670
+ end
671
+
672
+ # The URL of the image input.
673
+ sig { returns(String) }
674
+ attr_accessor :image_url
675
+
676
+ # The type of the image input. Always `input_image`.
677
+ sig { returns(Symbol) }
678
+ attr_accessor :type
679
+
680
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
681
+ # `auto`. Defaults to `auto`.
682
+ sig { returns(T.nilable(String)) }
683
+ attr_reader :detail
684
+
685
+ sig { params(detail: String).void }
686
+ attr_writer :detail
687
+
688
+ # An image input to the model.
689
+ sig do
690
+ params(
691
+ image_url: String,
692
+ detail: String,
693
+ type: Symbol
694
+ ).returns(T.attached_class)
695
+ end
696
+ def self.new(
697
+ # The URL of the image input.
698
+ image_url:,
699
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
700
+ # `auto`. Defaults to `auto`.
701
+ detail: nil,
702
+ # The type of the image input. Always `input_image`.
703
+ type: :input_image
704
+ )
705
+ end
706
+
707
+ sig do
708
+ override.returns(
709
+ { image_url: String, type: Symbol, detail: String }
710
+ )
711
+ end
712
+ def to_hash
713
+ end
714
+ end
715
+
655
716
  sig do
656
717
  override.returns(
657
718
  T::Array[
@@ -661,6 +722,14 @@ module OpenAI
661
722
  end
662
723
  def self.variants
663
724
  end
725
+
726
+ AnArrayOfInputTextAndInputImageArray =
727
+ T.let(
728
+ OpenAI::Internal::Type::ArrayOf[
729
+ OpenAI::Internal::Type::Unknown
730
+ ],
731
+ OpenAI::Internal::Type::Converter
732
+ )
664
733
  end
665
734
 
666
735
  # The role of the message input. One of `user`, `assistant`, `system`, or
@@ -749,7 +749,7 @@ module OpenAI
749
749
  )
750
750
  end
751
751
 
752
- # Text inputs to the model - can contain template strings.
752
+ # Inputs to the model - can contain template strings.
753
753
  sig do
754
754
  returns(
755
755
  OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants
@@ -795,7 +795,9 @@ module OpenAI
795
795
  T.any(
796
796
  String,
797
797
  OpenAI::Responses::ResponseInputText::OrHash,
798
- OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash
798
+ OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash,
799
+ OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash,
800
+ T::Array[T.anything]
799
801
  ),
800
802
  role:
801
803
  OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol,
@@ -804,7 +806,7 @@ module OpenAI
804
806
  ).returns(T.attached_class)
805
807
  end
806
808
  def self.new(
807
- # Text inputs to the model - can contain template strings.
809
+ # Inputs to the model - can contain template strings.
808
810
  content:,
809
811
  # The role of the message input. One of `user`, `assistant`, `system`, or
810
812
  # `developer`.
@@ -829,7 +831,7 @@ module OpenAI
829
831
  def to_hash
830
832
  end
831
833
 
832
- # Text inputs to the model - can contain template strings.
834
+ # Inputs to the model - can contain template strings.
833
835
  module Content
834
836
  extend OpenAI::Internal::Type::Union
835
837
 
@@ -838,7 +840,9 @@ module OpenAI
838
840
  T.any(
839
841
  String,
840
842
  OpenAI::Responses::ResponseInputText,
841
- OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText
843
+ OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText,
844
+ OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
845
+ T::Array[T.anything]
842
846
  )
843
847
  end
844
848
 
@@ -878,6 +882,59 @@ module OpenAI
878
882
  end
879
883
  end
880
884
 
885
+ class InputImage < OpenAI::Internal::Type::BaseModel
886
+ OrHash =
887
+ T.type_alias do
888
+ T.any(
889
+ OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
890
+ OpenAI::Internal::AnyHash
891
+ )
892
+ end
893
+
894
+ # The URL of the image input.
895
+ sig { returns(String) }
896
+ attr_accessor :image_url
897
+
898
+ # The type of the image input. Always `input_image`.
899
+ sig { returns(Symbol) }
900
+ attr_accessor :type
901
+
902
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
903
+ # `auto`. Defaults to `auto`.
904
+ sig { returns(T.nilable(String)) }
905
+ attr_reader :detail
906
+
907
+ sig { params(detail: String).void }
908
+ attr_writer :detail
909
+
910
+ # An image input to the model.
911
+ sig do
912
+ params(
913
+ image_url: String,
914
+ detail: String,
915
+ type: Symbol
916
+ ).returns(T.attached_class)
917
+ end
918
+ def self.new(
919
+ # The URL of the image input.
920
+ image_url:,
921
+ # The detail level of the image to be sent to the model. One of `high`, `low`, or
922
+ # `auto`. Defaults to `auto`.
923
+ detail: nil,
924
+ # The type of the image input. Always `input_image`.
925
+ type: :input_image
926
+ )
927
+ end
928
+
929
+ sig do
930
+ override.returns(
931
+ { image_url: String, type: Symbol, detail: String }
932
+ )
933
+ end
934
+ def to_hash
935
+ end
936
+ end
937
+
881
938
  sig do
882
939
  override.returns(
883
940
  T::Array[
@@ -887,6 +944,14 @@ module OpenAI
887
944
  end
888
945
  def self.variants
889
946
  end
947
+
948
+ AnArrayOfInputTextAndInputImageArray =
949
+ T.let(
950
+ OpenAI::Internal::Type::ArrayOf[
951
+ OpenAI::Internal::Type::Unknown
952
+ ],
953
+ OpenAI::Internal::Type::Converter
954
+ )
890
955
  end
891
956
 
892
957
  # The role of the message input. One of `user`, `assistant`, `system`, or