openai 0.13.0 → 0.13.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +13 -0
- data/README.md +1 -1
- data/lib/openai/helpers/structured_output/json_schema_converter.rb +34 -10
- data/lib/openai/models/eval_create_params.rb +50 -5
- data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +50 -5
- data/lib/openai/models/evals/run_cancel_response.rb +48 -5
- data/lib/openai/models/evals/run_create_params.rb +50 -5
- data/lib/openai/models/evals/run_create_response.rb +48 -5
- data/lib/openai/models/evals/run_list_response.rb +48 -5
- data/lib/openai/models/evals/run_retrieve_response.rb +48 -5
- data/lib/openai/models/graders/label_model_grader.rb +48 -5
- data/lib/openai/models/graders/score_model_grader.rb +48 -5
- data/lib/openai/version.rb +1 -1
- data/rbi/openai/helpers/structured_output/json_schema_converter.rbi +4 -0
- data/rbi/openai/models/eval_create_params.rbi +76 -7
- data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +76 -7
- data/rbi/openai/models/evals/run_cancel_response.rbi +70 -5
- data/rbi/openai/models/evals/run_create_params.rbi +76 -7
- data/rbi/openai/models/evals/run_create_response.rbi +70 -5
- data/rbi/openai/models/evals/run_list_response.rbi +70 -5
- data/rbi/openai/models/evals/run_retrieve_response.rbi +70 -5
- data/rbi/openai/models/graders/label_model_grader.rbi +74 -7
- data/rbi/openai/models/graders/score_model_grader.rbi +74 -7
- data/sig/openai/models/eval_create_params.rbs +29 -0
- data/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +29 -0
- data/sig/openai/models/evals/run_cancel_response.rbs +33 -0
- data/sig/openai/models/evals/run_create_params.rbs +33 -0
- data/sig/openai/models/evals/run_create_response.rbs +33 -0
- data/sig/openai/models/evals/run_list_response.rbs +33 -0
- data/sig/openai/models/evals/run_retrieve_response.rbs +33 -0
- data/sig/openai/models/graders/label_model_grader.rbs +29 -0
- data/sig/openai/models/graders/score_model_grader.rbs +29 -0
- metadata +2 -2
@@ -85,13 +85,15 @@ module OpenAI
|
|
85
85
|
)
|
86
86
|
end
|
87
87
|
|
88
|
-
#
|
88
|
+
# Inputs to the model - can contain template strings.
|
89
89
|
sig do
|
90
90
|
returns(
|
91
91
|
T.any(
|
92
92
|
String,
|
93
93
|
OpenAI::Responses::ResponseInputText,
|
94
|
-
OpenAI::Graders::LabelModelGrader::Input::Content::OutputText
|
94
|
+
OpenAI::Graders::LabelModelGrader::Input::Content::OutputText,
|
95
|
+
OpenAI::Graders::LabelModelGrader::Input::Content::InputImage,
|
96
|
+
T::Array[T.anything]
|
95
97
|
)
|
96
98
|
)
|
97
99
|
end
|
@@ -132,14 +134,16 @@ module OpenAI
|
|
132
134
|
T.any(
|
133
135
|
String,
|
134
136
|
OpenAI::Responses::ResponseInputText::OrHash,
|
135
|
-
OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash
|
137
|
+
OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash,
|
138
|
+
OpenAI::Graders::LabelModelGrader::Input::Content::InputImage::OrHash,
|
139
|
+
T::Array[T.anything]
|
136
140
|
),
|
137
141
|
role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol,
|
138
142
|
type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol
|
139
143
|
).returns(T.attached_class)
|
140
144
|
end
|
141
145
|
def self.new(
|
142
|
-
#
|
146
|
+
# Inputs to the model - can contain template strings.
|
143
147
|
content:,
|
144
148
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
145
149
|
# `developer`.
|
@@ -156,7 +160,9 @@ module OpenAI
|
|
156
160
|
T.any(
|
157
161
|
String,
|
158
162
|
OpenAI::Responses::ResponseInputText,
|
159
|
-
OpenAI::Graders::LabelModelGrader::Input::Content::OutputText
|
163
|
+
OpenAI::Graders::LabelModelGrader::Input::Content::OutputText,
|
164
|
+
OpenAI::Graders::LabelModelGrader::Input::Content::InputImage,
|
165
|
+
T::Array[T.anything]
|
160
166
|
),
|
161
167
|
role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol,
|
162
168
|
type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol
|
@@ -166,7 +172,7 @@ module OpenAI
|
|
166
172
|
def to_hash
|
167
173
|
end
|
168
174
|
|
169
|
-
#
|
175
|
+
# Inputs to the model - can contain template strings.
|
170
176
|
module Content
|
171
177
|
extend OpenAI::Internal::Type::Union
|
172
178
|
|
@@ -175,7 +181,9 @@ module OpenAI
|
|
175
181
|
T.any(
|
176
182
|
String,
|
177
183
|
OpenAI::Responses::ResponseInputText,
|
178
|
-
OpenAI::Graders::LabelModelGrader::Input::Content::OutputText
|
184
|
+
OpenAI::Graders::LabelModelGrader::Input::Content::OutputText,
|
185
|
+
OpenAI::Graders::LabelModelGrader::Input::Content::InputImage,
|
186
|
+
T::Array[T.anything]
|
179
187
|
)
|
180
188
|
end
|
181
189
|
|
@@ -213,6 +221,57 @@ module OpenAI
|
|
213
221
|
end
|
214
222
|
end
|
215
223
|
|
224
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
225
|
+
OrHash =
|
226
|
+
T.type_alias do
|
227
|
+
T.any(
|
228
|
+
OpenAI::Graders::LabelModelGrader::Input::Content::InputImage,
|
229
|
+
OpenAI::Internal::AnyHash
|
230
|
+
)
|
231
|
+
end
|
232
|
+
|
233
|
+
# The URL of the image input.
|
234
|
+
sig { returns(String) }
|
235
|
+
attr_accessor :image_url
|
236
|
+
|
237
|
+
# The type of the image input. Always `input_image`.
|
238
|
+
sig { returns(Symbol) }
|
239
|
+
attr_accessor :type
|
240
|
+
|
241
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
242
|
+
# `auto`. Defaults to `auto`.
|
243
|
+
sig { returns(T.nilable(String)) }
|
244
|
+
attr_reader :detail
|
245
|
+
|
246
|
+
sig { params(detail: String).void }
|
247
|
+
attr_writer :detail
|
248
|
+
|
249
|
+
# An image input to the model.
|
250
|
+
sig do
|
251
|
+
params(image_url: String, detail: String, type: Symbol).returns(
|
252
|
+
T.attached_class
|
253
|
+
)
|
254
|
+
end
|
255
|
+
def self.new(
|
256
|
+
# The URL of the image input.
|
257
|
+
image_url:,
|
258
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
259
|
+
# `auto`. Defaults to `auto`.
|
260
|
+
detail: nil,
|
261
|
+
# The type of the image input. Always `input_image`.
|
262
|
+
type: :input_image
|
263
|
+
)
|
264
|
+
end
|
265
|
+
|
266
|
+
sig do
|
267
|
+
override.returns(
|
268
|
+
{ image_url: String, type: Symbol, detail: String }
|
269
|
+
)
|
270
|
+
end
|
271
|
+
def to_hash
|
272
|
+
end
|
273
|
+
end
|
274
|
+
|
216
275
|
sig do
|
217
276
|
override.returns(
|
218
277
|
T::Array[
|
@@ -222,6 +281,14 @@ module OpenAI
|
|
222
281
|
end
|
223
282
|
def self.variants
|
224
283
|
end
|
284
|
+
|
285
|
+
AnArrayOfInputTextAndInputImageArray =
|
286
|
+
T.let(
|
287
|
+
OpenAI::Internal::Type::ArrayOf[
|
288
|
+
OpenAI::Internal::Type::Unknown
|
289
|
+
],
|
290
|
+
OpenAI::Internal::Type::Converter
|
291
|
+
)
|
225
292
|
end
|
226
293
|
|
227
294
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
@@ -92,13 +92,15 @@ module OpenAI
|
|
92
92
|
)
|
93
93
|
end
|
94
94
|
|
95
|
-
#
|
95
|
+
# Inputs to the model - can contain template strings.
|
96
96
|
sig do
|
97
97
|
returns(
|
98
98
|
T.any(
|
99
99
|
String,
|
100
100
|
OpenAI::Responses::ResponseInputText,
|
101
|
-
OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText
|
101
|
+
OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText,
|
102
|
+
OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage,
|
103
|
+
T::Array[T.anything]
|
102
104
|
)
|
103
105
|
)
|
104
106
|
end
|
@@ -139,14 +141,16 @@ module OpenAI
|
|
139
141
|
T.any(
|
140
142
|
String,
|
141
143
|
OpenAI::Responses::ResponseInputText::OrHash,
|
142
|
-
OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash
|
144
|
+
OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash,
|
145
|
+
OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage::OrHash,
|
146
|
+
T::Array[T.anything]
|
143
147
|
),
|
144
148
|
role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol,
|
145
149
|
type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol
|
146
150
|
).returns(T.attached_class)
|
147
151
|
end
|
148
152
|
def self.new(
|
149
|
-
#
|
153
|
+
# Inputs to the model - can contain template strings.
|
150
154
|
content:,
|
151
155
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
152
156
|
# `developer`.
|
@@ -163,7 +167,9 @@ module OpenAI
|
|
163
167
|
T.any(
|
164
168
|
String,
|
165
169
|
OpenAI::Responses::ResponseInputText,
|
166
|
-
OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText
|
170
|
+
OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText,
|
171
|
+
OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage,
|
172
|
+
T::Array[T.anything]
|
167
173
|
),
|
168
174
|
role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol,
|
169
175
|
type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol
|
@@ -173,7 +179,7 @@ module OpenAI
|
|
173
179
|
def to_hash
|
174
180
|
end
|
175
181
|
|
176
|
-
#
|
182
|
+
# Inputs to the model - can contain template strings.
|
177
183
|
module Content
|
178
184
|
extend OpenAI::Internal::Type::Union
|
179
185
|
|
@@ -182,7 +188,9 @@ module OpenAI
|
|
182
188
|
T.any(
|
183
189
|
String,
|
184
190
|
OpenAI::Responses::ResponseInputText,
|
185
|
-
OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText
|
191
|
+
OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText,
|
192
|
+
OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage,
|
193
|
+
T::Array[T.anything]
|
186
194
|
)
|
187
195
|
end
|
188
196
|
|
@@ -220,6 +228,57 @@ module OpenAI
|
|
220
228
|
end
|
221
229
|
end
|
222
230
|
|
231
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
232
|
+
OrHash =
|
233
|
+
T.type_alias do
|
234
|
+
T.any(
|
235
|
+
OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage,
|
236
|
+
OpenAI::Internal::AnyHash
|
237
|
+
)
|
238
|
+
end
|
239
|
+
|
240
|
+
# The URL of the image input.
|
241
|
+
sig { returns(String) }
|
242
|
+
attr_accessor :image_url
|
243
|
+
|
244
|
+
# The type of the image input. Always `input_image`.
|
245
|
+
sig { returns(Symbol) }
|
246
|
+
attr_accessor :type
|
247
|
+
|
248
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
249
|
+
# `auto`. Defaults to `auto`.
|
250
|
+
sig { returns(T.nilable(String)) }
|
251
|
+
attr_reader :detail
|
252
|
+
|
253
|
+
sig { params(detail: String).void }
|
254
|
+
attr_writer :detail
|
255
|
+
|
256
|
+
# An image input to the model.
|
257
|
+
sig do
|
258
|
+
params(image_url: String, detail: String, type: Symbol).returns(
|
259
|
+
T.attached_class
|
260
|
+
)
|
261
|
+
end
|
262
|
+
def self.new(
|
263
|
+
# The URL of the image input.
|
264
|
+
image_url:,
|
265
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
266
|
+
# `auto`. Defaults to `auto`.
|
267
|
+
detail: nil,
|
268
|
+
# The type of the image input. Always `input_image`.
|
269
|
+
type: :input_image
|
270
|
+
)
|
271
|
+
end
|
272
|
+
|
273
|
+
sig do
|
274
|
+
override.returns(
|
275
|
+
{ image_url: String, type: Symbol, detail: String }
|
276
|
+
)
|
277
|
+
end
|
278
|
+
def to_hash
|
279
|
+
end
|
280
|
+
end
|
281
|
+
|
223
282
|
sig do
|
224
283
|
override.returns(
|
225
284
|
T::Array[
|
@@ -229,6 +288,14 @@ module OpenAI
|
|
229
288
|
end
|
230
289
|
def self.variants
|
231
290
|
end
|
291
|
+
|
292
|
+
AnArrayOfInputTextAndInputImageArray =
|
293
|
+
T.let(
|
294
|
+
OpenAI::Internal::Type::ArrayOf[
|
295
|
+
OpenAI::Internal::Type::Unknown
|
296
|
+
],
|
297
|
+
OpenAI::Internal::Type::Converter
|
298
|
+
)
|
232
299
|
end
|
233
300
|
|
234
301
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
@@ -218,6 +218,8 @@ module OpenAI
|
|
218
218
|
String
|
219
219
|
| OpenAI::Responses::ResponseInputText
|
220
220
|
| OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText
|
221
|
+
| OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage
|
222
|
+
| ::Array[top]
|
221
223
|
|
222
224
|
module Content
|
223
225
|
extend OpenAI::Internal::Type::Union
|
@@ -234,7 +236,34 @@ module OpenAI
|
|
234
236
|
def to_hash: -> { text: String, type: :output_text }
|
235
237
|
end
|
236
238
|
|
239
|
+
type input_image =
|
240
|
+
{ image_url: String, type: :input_image, detail: String }
|
241
|
+
|
242
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
243
|
+
attr_accessor image_url: String
|
244
|
+
|
245
|
+
attr_accessor type: :input_image
|
246
|
+
|
247
|
+
attr_reader detail: String?
|
248
|
+
|
249
|
+
def detail=: (String) -> String
|
250
|
+
|
251
|
+
def initialize: (
|
252
|
+
image_url: String,
|
253
|
+
?detail: String,
|
254
|
+
?type: :input_image
|
255
|
+
) -> void
|
256
|
+
|
257
|
+
def to_hash: -> {
|
258
|
+
image_url: String,
|
259
|
+
type: :input_image,
|
260
|
+
detail: String
|
261
|
+
}
|
262
|
+
end
|
263
|
+
|
237
264
|
def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content]
|
265
|
+
|
266
|
+
AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter
|
238
267
|
end
|
239
268
|
|
240
269
|
type role = :user | :assistant | :system | :developer
|
@@ -234,6 +234,8 @@ module OpenAI
|
|
234
234
|
String
|
235
235
|
| OpenAI::Responses::ResponseInputText
|
236
236
|
| OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText
|
237
|
+
| OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage
|
238
|
+
| ::Array[top]
|
237
239
|
|
238
240
|
module Content
|
239
241
|
extend OpenAI::Internal::Type::Union
|
@@ -250,7 +252,34 @@ module OpenAI
|
|
250
252
|
def to_hash: -> { text: String, type: :output_text }
|
251
253
|
end
|
252
254
|
|
255
|
+
type input_image =
|
256
|
+
{ image_url: String, type: :input_image, detail: String }
|
257
|
+
|
258
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
259
|
+
attr_accessor image_url: String
|
260
|
+
|
261
|
+
attr_accessor type: :input_image
|
262
|
+
|
263
|
+
attr_reader detail: String?
|
264
|
+
|
265
|
+
def detail=: (String) -> String
|
266
|
+
|
267
|
+
def initialize: (
|
268
|
+
image_url: String,
|
269
|
+
?detail: String,
|
270
|
+
?type: :input_image
|
271
|
+
) -> void
|
272
|
+
|
273
|
+
def to_hash: -> {
|
274
|
+
image_url: String,
|
275
|
+
type: :input_image,
|
276
|
+
detail: String
|
277
|
+
}
|
278
|
+
end
|
279
|
+
|
253
280
|
def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content]
|
281
|
+
|
282
|
+
AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter
|
254
283
|
end
|
255
284
|
|
256
285
|
type role = :user | :assistant | :system | :developer
|
@@ -350,6 +350,8 @@ module OpenAI
|
|
350
350
|
String
|
351
351
|
| OpenAI::Responses::ResponseInputText
|
352
352
|
| OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText
|
353
|
+
| OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage
|
354
|
+
| ::Array[top]
|
353
355
|
|
354
356
|
module Content
|
355
357
|
extend OpenAI::Internal::Type::Union
|
@@ -369,7 +371,38 @@ module OpenAI
|
|
369
371
|
def to_hash: -> { text: String, type: :output_text }
|
370
372
|
end
|
371
373
|
|
374
|
+
type input_image =
|
375
|
+
{
|
376
|
+
image_url: String,
|
377
|
+
type: :input_image,
|
378
|
+
detail: String
|
379
|
+
}
|
380
|
+
|
381
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
382
|
+
attr_accessor image_url: String
|
383
|
+
|
384
|
+
attr_accessor type: :input_image
|
385
|
+
|
386
|
+
attr_reader detail: String?
|
387
|
+
|
388
|
+
def detail=: (String) -> String
|
389
|
+
|
390
|
+
def initialize: (
|
391
|
+
image_url: String,
|
392
|
+
?detail: String,
|
393
|
+
?type: :input_image
|
394
|
+
) -> void
|
395
|
+
|
396
|
+
def to_hash: -> {
|
397
|
+
image_url: String,
|
398
|
+
type: :input_image,
|
399
|
+
detail: String
|
400
|
+
}
|
401
|
+
end
|
402
|
+
|
372
403
|
def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content]
|
404
|
+
|
405
|
+
AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter
|
373
406
|
end
|
374
407
|
|
375
408
|
type role = :user | :assistant | :system | :developer
|
@@ -313,6 +313,8 @@ module OpenAI
|
|
313
313
|
String
|
314
314
|
| OpenAI::Responses::ResponseInputText
|
315
315
|
| OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText
|
316
|
+
| OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage
|
317
|
+
| ::Array[top]
|
316
318
|
|
317
319
|
module Content
|
318
320
|
extend OpenAI::Internal::Type::Union
|
@@ -332,7 +334,38 @@ module OpenAI
|
|
332
334
|
def to_hash: -> { text: String, type: :output_text }
|
333
335
|
end
|
334
336
|
|
337
|
+
type input_image =
|
338
|
+
{
|
339
|
+
image_url: String,
|
340
|
+
type: :input_image,
|
341
|
+
detail: String
|
342
|
+
}
|
343
|
+
|
344
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
345
|
+
attr_accessor image_url: String
|
346
|
+
|
347
|
+
attr_accessor type: :input_image
|
348
|
+
|
349
|
+
attr_reader detail: String?
|
350
|
+
|
351
|
+
def detail=: (String) -> String
|
352
|
+
|
353
|
+
def initialize: (
|
354
|
+
image_url: String,
|
355
|
+
?detail: String,
|
356
|
+
?type: :input_image
|
357
|
+
) -> void
|
358
|
+
|
359
|
+
def to_hash: -> {
|
360
|
+
image_url: String,
|
361
|
+
type: :input_image,
|
362
|
+
detail: String
|
363
|
+
}
|
364
|
+
end
|
365
|
+
|
335
366
|
def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content]
|
367
|
+
|
368
|
+
AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter
|
336
369
|
end
|
337
370
|
|
338
371
|
type role = :user | :assistant | :system | :developer
|
@@ -350,6 +350,8 @@ module OpenAI
|
|
350
350
|
String
|
351
351
|
| OpenAI::Responses::ResponseInputText
|
352
352
|
| OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText
|
353
|
+
| OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage
|
354
|
+
| ::Array[top]
|
353
355
|
|
354
356
|
module Content
|
355
357
|
extend OpenAI::Internal::Type::Union
|
@@ -369,7 +371,38 @@ module OpenAI
|
|
369
371
|
def to_hash: -> { text: String, type: :output_text }
|
370
372
|
end
|
371
373
|
|
374
|
+
type input_image =
|
375
|
+
{
|
376
|
+
image_url: String,
|
377
|
+
type: :input_image,
|
378
|
+
detail: String
|
379
|
+
}
|
380
|
+
|
381
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
382
|
+
attr_accessor image_url: String
|
383
|
+
|
384
|
+
attr_accessor type: :input_image
|
385
|
+
|
386
|
+
attr_reader detail: String?
|
387
|
+
|
388
|
+
def detail=: (String) -> String
|
389
|
+
|
390
|
+
def initialize: (
|
391
|
+
image_url: String,
|
392
|
+
?detail: String,
|
393
|
+
?type: :input_image
|
394
|
+
) -> void
|
395
|
+
|
396
|
+
def to_hash: -> {
|
397
|
+
image_url: String,
|
398
|
+
type: :input_image,
|
399
|
+
detail: String
|
400
|
+
}
|
401
|
+
end
|
402
|
+
|
372
403
|
def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content]
|
404
|
+
|
405
|
+
AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter
|
373
406
|
end
|
374
407
|
|
375
408
|
type role = :user | :assistant | :system | :developer
|
@@ -350,6 +350,8 @@ module OpenAI
|
|
350
350
|
String
|
351
351
|
| OpenAI::Responses::ResponseInputText
|
352
352
|
| OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText
|
353
|
+
| OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage
|
354
|
+
| ::Array[top]
|
353
355
|
|
354
356
|
module Content
|
355
357
|
extend OpenAI::Internal::Type::Union
|
@@ -369,7 +371,38 @@ module OpenAI
|
|
369
371
|
def to_hash: -> { text: String, type: :output_text }
|
370
372
|
end
|
371
373
|
|
374
|
+
type input_image =
|
375
|
+
{
|
376
|
+
image_url: String,
|
377
|
+
type: :input_image,
|
378
|
+
detail: String
|
379
|
+
}
|
380
|
+
|
381
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
382
|
+
attr_accessor image_url: String
|
383
|
+
|
384
|
+
attr_accessor type: :input_image
|
385
|
+
|
386
|
+
attr_reader detail: String?
|
387
|
+
|
388
|
+
def detail=: (String) -> String
|
389
|
+
|
390
|
+
def initialize: (
|
391
|
+
image_url: String,
|
392
|
+
?detail: String,
|
393
|
+
?type: :input_image
|
394
|
+
) -> void
|
395
|
+
|
396
|
+
def to_hash: -> {
|
397
|
+
image_url: String,
|
398
|
+
type: :input_image,
|
399
|
+
detail: String
|
400
|
+
}
|
401
|
+
end
|
402
|
+
|
372
403
|
def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content]
|
404
|
+
|
405
|
+
AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter
|
373
406
|
end
|
374
407
|
|
375
408
|
type role = :user | :assistant | :system | :developer
|
@@ -350,6 +350,8 @@ module OpenAI
|
|
350
350
|
String
|
351
351
|
| OpenAI::Responses::ResponseInputText
|
352
352
|
| OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText
|
353
|
+
| OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage
|
354
|
+
| ::Array[top]
|
353
355
|
|
354
356
|
module Content
|
355
357
|
extend OpenAI::Internal::Type::Union
|
@@ -369,7 +371,38 @@ module OpenAI
|
|
369
371
|
def to_hash: -> { text: String, type: :output_text }
|
370
372
|
end
|
371
373
|
|
374
|
+
type input_image =
|
375
|
+
{
|
376
|
+
image_url: String,
|
377
|
+
type: :input_image,
|
378
|
+
detail: String
|
379
|
+
}
|
380
|
+
|
381
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
382
|
+
attr_accessor image_url: String
|
383
|
+
|
384
|
+
attr_accessor type: :input_image
|
385
|
+
|
386
|
+
attr_reader detail: String?
|
387
|
+
|
388
|
+
def detail=: (String) -> String
|
389
|
+
|
390
|
+
def initialize: (
|
391
|
+
image_url: String,
|
392
|
+
?detail: String,
|
393
|
+
?type: :input_image
|
394
|
+
) -> void
|
395
|
+
|
396
|
+
def to_hash: -> {
|
397
|
+
image_url: String,
|
398
|
+
type: :input_image,
|
399
|
+
detail: String
|
400
|
+
}
|
401
|
+
end
|
402
|
+
|
372
403
|
def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content]
|
404
|
+
|
405
|
+
AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter
|
373
406
|
end
|
374
407
|
|
375
408
|
type role = :user | :assistant | :system | :developer
|
@@ -78,6 +78,8 @@ module OpenAI
|
|
78
78
|
String
|
79
79
|
| OpenAI::Responses::ResponseInputText
|
80
80
|
| OpenAI::Graders::LabelModelGrader::Input::Content::OutputText
|
81
|
+
| OpenAI::Graders::LabelModelGrader::Input::Content::InputImage
|
82
|
+
| ::Array[top]
|
81
83
|
|
82
84
|
module Content
|
83
85
|
extend OpenAI::Internal::Type::Union
|
@@ -94,7 +96,34 @@ module OpenAI
|
|
94
96
|
def to_hash: -> { text: String, type: :output_text }
|
95
97
|
end
|
96
98
|
|
99
|
+
type input_image =
|
100
|
+
{ image_url: String, type: :input_image, detail: String }
|
101
|
+
|
102
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
103
|
+
attr_accessor image_url: String
|
104
|
+
|
105
|
+
attr_accessor type: :input_image
|
106
|
+
|
107
|
+
attr_reader detail: String?
|
108
|
+
|
109
|
+
def detail=: (String) -> String
|
110
|
+
|
111
|
+
def initialize: (
|
112
|
+
image_url: String,
|
113
|
+
?detail: String,
|
114
|
+
?type: :input_image
|
115
|
+
) -> void
|
116
|
+
|
117
|
+
def to_hash: -> {
|
118
|
+
image_url: String,
|
119
|
+
type: :input_image,
|
120
|
+
detail: String
|
121
|
+
}
|
122
|
+
end
|
123
|
+
|
97
124
|
def self?.variants: -> ::Array[OpenAI::Models::Graders::LabelModelGrader::Input::content]
|
125
|
+
|
126
|
+
AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter
|
98
127
|
end
|
99
128
|
|
100
129
|
type role = :user | :assistant | :system | :developer
|