openai 0.13.0 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +21 -0
- data/README.md +1 -1
- data/lib/openai/helpers/structured_output/json_schema_converter.rb +34 -10
- data/lib/openai/models/eval_create_params.rb +50 -5
- data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +50 -5
- data/lib/openai/models/evals/run_cancel_response.rb +48 -5
- data/lib/openai/models/evals/run_create_params.rb +50 -5
- data/lib/openai/models/evals/run_create_response.rb +48 -5
- data/lib/openai/models/evals/run_list_response.rb +48 -5
- data/lib/openai/models/evals/run_retrieve_response.rb +48 -5
- data/lib/openai/models/graders/label_model_grader.rb +48 -5
- data/lib/openai/models/graders/score_model_grader.rb +48 -5
- data/lib/openai/models/image_edit_completed_event.rb +198 -0
- data/lib/openai/models/image_edit_params.rb +36 -1
- data/lib/openai/models/image_edit_partial_image_event.rb +135 -0
- data/lib/openai/models/image_edit_stream_event.rb +21 -0
- data/lib/openai/models/image_gen_completed_event.rb +198 -0
- data/lib/openai/models/image_gen_partial_image_event.rb +135 -0
- data/lib/openai/models/image_gen_stream_event.rb +21 -0
- data/lib/openai/models/image_generate_params.rb +13 -1
- data/lib/openai/models/images_response.rb +3 -0
- data/lib/openai/models/responses/response_output_refusal.rb +2 -2
- data/lib/openai/models/responses/tool.rb +30 -1
- data/lib/openai/models.rb +12 -0
- data/lib/openai/resources/images.rb +140 -2
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +6 -0
- data/rbi/openai/helpers/structured_output/json_schema_converter.rbi +4 -0
- data/rbi/openai/models/eval_create_params.rbi +76 -7
- data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +76 -7
- data/rbi/openai/models/evals/run_cancel_response.rbi +70 -5
- data/rbi/openai/models/evals/run_create_params.rbi +76 -7
- data/rbi/openai/models/evals/run_create_response.rbi +70 -5
- data/rbi/openai/models/evals/run_list_response.rbi +70 -5
- data/rbi/openai/models/evals/run_retrieve_response.rbi +70 -5
- data/rbi/openai/models/graders/label_model_grader.rbi +74 -7
- data/rbi/openai/models/graders/score_model_grader.rbi +74 -7
- data/rbi/openai/models/image_edit_completed_event.rbi +346 -0
- data/rbi/openai/models/image_edit_params.rbi +51 -0
- data/rbi/openai/models/image_edit_partial_image_event.rbi +249 -0
- data/rbi/openai/models/image_edit_stream_event.rbi +22 -0
- data/rbi/openai/models/image_gen_completed_event.rbi +339 -0
- data/rbi/openai/models/image_gen_partial_image_event.rbi +243 -0
- data/rbi/openai/models/image_gen_stream_event.rbi +22 -0
- data/rbi/openai/models/image_generate_params.rbi +12 -0
- data/rbi/openai/models/responses/response_output_refusal.rbi +2 -2
- data/rbi/openai/models/responses/tool.rbi +61 -0
- data/rbi/openai/models.rbi +12 -0
- data/rbi/openai/resources/images.rbi +225 -0
- data/sig/openai/models/eval_create_params.rbs +29 -0
- data/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +29 -0
- data/sig/openai/models/evals/run_cancel_response.rbs +33 -0
- data/sig/openai/models/evals/run_create_params.rbs +33 -0
- data/sig/openai/models/evals/run_create_response.rbs +33 -0
- data/sig/openai/models/evals/run_list_response.rbs +33 -0
- data/sig/openai/models/evals/run_retrieve_response.rbs +33 -0
- data/sig/openai/models/graders/label_model_grader.rbs +29 -0
- data/sig/openai/models/graders/score_model_grader.rbs +29 -0
- data/sig/openai/models/image_edit_completed_event.rbs +150 -0
- data/sig/openai/models/image_edit_params.rbs +21 -0
- data/sig/openai/models/image_edit_partial_image_event.rbs +105 -0
- data/sig/openai/models/image_edit_stream_event.rbs +12 -0
- data/sig/openai/models/image_gen_completed_event.rbs +150 -0
- data/sig/openai/models/image_gen_partial_image_event.rbs +105 -0
- data/sig/openai/models/image_gen_stream_event.rbs +12 -0
- data/sig/openai/models/image_generate_params.rbs +5 -0
- data/sig/openai/models/responses/tool.rbs +16 -0
- data/sig/openai/models.rbs +12 -0
- data/sig/openai/resources/images.rbs +38 -0
- metadata +20 -2
@@ -0,0 +1,243 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class ImageGenPartialImageEvent < OpenAI::Internal::Type::BaseModel
|
6
|
+
OrHash =
|
7
|
+
T.type_alias do
|
8
|
+
T.any(OpenAI::ImageGenPartialImageEvent, OpenAI::Internal::AnyHash)
|
9
|
+
end
|
10
|
+
|
11
|
+
# Base64-encoded partial image data, suitable for rendering as an image.
|
12
|
+
sig { returns(String) }
|
13
|
+
attr_accessor :b64_json
|
14
|
+
|
15
|
+
# The background setting for the requested image.
|
16
|
+
sig do
|
17
|
+
returns(OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol)
|
18
|
+
end
|
19
|
+
attr_accessor :background
|
20
|
+
|
21
|
+
# The Unix timestamp when the event was created.
|
22
|
+
sig { returns(Integer) }
|
23
|
+
attr_accessor :created_at
|
24
|
+
|
25
|
+
# The output format for the requested image.
|
26
|
+
sig do
|
27
|
+
returns(OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol)
|
28
|
+
end
|
29
|
+
attr_accessor :output_format
|
30
|
+
|
31
|
+
# 0-based index for the partial image (streaming).
|
32
|
+
sig { returns(Integer) }
|
33
|
+
attr_accessor :partial_image_index
|
34
|
+
|
35
|
+
# The quality setting for the requested image.
|
36
|
+
sig { returns(OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol) }
|
37
|
+
attr_accessor :quality
|
38
|
+
|
39
|
+
# The size of the requested image.
|
40
|
+
sig { returns(OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol) }
|
41
|
+
attr_accessor :size
|
42
|
+
|
43
|
+
# The type of the event. Always `image_generation.partial_image`.
|
44
|
+
sig { returns(Symbol) }
|
45
|
+
attr_accessor :type
|
46
|
+
|
47
|
+
# Emitted when a partial image is available during image generation streaming.
|
48
|
+
sig do
|
49
|
+
params(
|
50
|
+
b64_json: String,
|
51
|
+
background: OpenAI::ImageGenPartialImageEvent::Background::OrSymbol,
|
52
|
+
created_at: Integer,
|
53
|
+
output_format:
|
54
|
+
OpenAI::ImageGenPartialImageEvent::OutputFormat::OrSymbol,
|
55
|
+
partial_image_index: Integer,
|
56
|
+
quality: OpenAI::ImageGenPartialImageEvent::Quality::OrSymbol,
|
57
|
+
size: OpenAI::ImageGenPartialImageEvent::Size::OrSymbol,
|
58
|
+
type: Symbol
|
59
|
+
).returns(T.attached_class)
|
60
|
+
end
|
61
|
+
def self.new(
|
62
|
+
# Base64-encoded partial image data, suitable for rendering as an image.
|
63
|
+
b64_json:,
|
64
|
+
# The background setting for the requested image.
|
65
|
+
background:,
|
66
|
+
# The Unix timestamp when the event was created.
|
67
|
+
created_at:,
|
68
|
+
# The output format for the requested image.
|
69
|
+
output_format:,
|
70
|
+
# 0-based index for the partial image (streaming).
|
71
|
+
partial_image_index:,
|
72
|
+
# The quality setting for the requested image.
|
73
|
+
quality:,
|
74
|
+
# The size of the requested image.
|
75
|
+
size:,
|
76
|
+
# The type of the event. Always `image_generation.partial_image`.
|
77
|
+
type: :"image_generation.partial_image"
|
78
|
+
)
|
79
|
+
end
|
80
|
+
|
81
|
+
sig do
|
82
|
+
override.returns(
|
83
|
+
{
|
84
|
+
b64_json: String,
|
85
|
+
background:
|
86
|
+
OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol,
|
87
|
+
created_at: Integer,
|
88
|
+
output_format:
|
89
|
+
OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol,
|
90
|
+
partial_image_index: Integer,
|
91
|
+
quality: OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol,
|
92
|
+
size: OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol,
|
93
|
+
type: Symbol
|
94
|
+
}
|
95
|
+
)
|
96
|
+
end
|
97
|
+
def to_hash
|
98
|
+
end
|
99
|
+
|
100
|
+
# The background setting for the requested image.
|
101
|
+
module Background
|
102
|
+
extend OpenAI::Internal::Type::Enum
|
103
|
+
|
104
|
+
TaggedSymbol =
|
105
|
+
T.type_alias do
|
106
|
+
T.all(Symbol, OpenAI::ImageGenPartialImageEvent::Background)
|
107
|
+
end
|
108
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
109
|
+
|
110
|
+
TRANSPARENT =
|
111
|
+
T.let(
|
112
|
+
:transparent,
|
113
|
+
OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol
|
114
|
+
)
|
115
|
+
OPAQUE =
|
116
|
+
T.let(
|
117
|
+
:opaque,
|
118
|
+
OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol
|
119
|
+
)
|
120
|
+
AUTO =
|
121
|
+
T.let(
|
122
|
+
:auto,
|
123
|
+
OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol
|
124
|
+
)
|
125
|
+
|
126
|
+
sig do
|
127
|
+
override.returns(
|
128
|
+
T::Array[
|
129
|
+
OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol
|
130
|
+
]
|
131
|
+
)
|
132
|
+
end
|
133
|
+
def self.values
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
# The output format for the requested image.
|
138
|
+
module OutputFormat
|
139
|
+
extend OpenAI::Internal::Type::Enum
|
140
|
+
|
141
|
+
TaggedSymbol =
|
142
|
+
T.type_alias do
|
143
|
+
T.all(Symbol, OpenAI::ImageGenPartialImageEvent::OutputFormat)
|
144
|
+
end
|
145
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
146
|
+
|
147
|
+
PNG =
|
148
|
+
T.let(
|
149
|
+
:png,
|
150
|
+
OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol
|
151
|
+
)
|
152
|
+
WEBP =
|
153
|
+
T.let(
|
154
|
+
:webp,
|
155
|
+
OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol
|
156
|
+
)
|
157
|
+
JPEG =
|
158
|
+
T.let(
|
159
|
+
:jpeg,
|
160
|
+
OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol
|
161
|
+
)
|
162
|
+
|
163
|
+
sig do
|
164
|
+
override.returns(
|
165
|
+
T::Array[
|
166
|
+
OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol
|
167
|
+
]
|
168
|
+
)
|
169
|
+
end
|
170
|
+
def self.values
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
# The quality setting for the requested image.
|
175
|
+
module Quality
|
176
|
+
extend OpenAI::Internal::Type::Enum
|
177
|
+
|
178
|
+
TaggedSymbol =
|
179
|
+
T.type_alias do
|
180
|
+
T.all(Symbol, OpenAI::ImageGenPartialImageEvent::Quality)
|
181
|
+
end
|
182
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
183
|
+
|
184
|
+
LOW =
|
185
|
+
T.let(:low, OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol)
|
186
|
+
MEDIUM =
|
187
|
+
T.let(
|
188
|
+
:medium,
|
189
|
+
OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol
|
190
|
+
)
|
191
|
+
HIGH =
|
192
|
+
T.let(:high, OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol)
|
193
|
+
AUTO =
|
194
|
+
T.let(:auto, OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol)
|
195
|
+
|
196
|
+
sig do
|
197
|
+
override.returns(
|
198
|
+
T::Array[OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol]
|
199
|
+
)
|
200
|
+
end
|
201
|
+
def self.values
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
# The size of the requested image.
|
206
|
+
module Size
|
207
|
+
extend OpenAI::Internal::Type::Enum
|
208
|
+
|
209
|
+
TaggedSymbol =
|
210
|
+
T.type_alias do
|
211
|
+
T.all(Symbol, OpenAI::ImageGenPartialImageEvent::Size)
|
212
|
+
end
|
213
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
214
|
+
|
215
|
+
SIZE_1024X1024 =
|
216
|
+
T.let(
|
217
|
+
:"1024x1024",
|
218
|
+
OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol
|
219
|
+
)
|
220
|
+
SIZE_1024X1536 =
|
221
|
+
T.let(
|
222
|
+
:"1024x1536",
|
223
|
+
OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol
|
224
|
+
)
|
225
|
+
SIZE_1536X1024 =
|
226
|
+
T.let(
|
227
|
+
:"1536x1024",
|
228
|
+
OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol
|
229
|
+
)
|
230
|
+
AUTO =
|
231
|
+
T.let(:auto, OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol)
|
232
|
+
|
233
|
+
sig do
|
234
|
+
override.returns(
|
235
|
+
T::Array[OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol]
|
236
|
+
)
|
237
|
+
end
|
238
|
+
def self.values
|
239
|
+
end
|
240
|
+
end
|
241
|
+
end
|
242
|
+
end
|
243
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
# Emitted when a partial image is available during image generation streaming.
|
6
|
+
module ImageGenStreamEvent
|
7
|
+
extend OpenAI::Internal::Type::Union
|
8
|
+
|
9
|
+
Variants =
|
10
|
+
T.type_alias do
|
11
|
+
T.any(
|
12
|
+
OpenAI::ImageGenPartialImageEvent,
|
13
|
+
OpenAI::ImageGenCompletedEvent
|
14
|
+
)
|
15
|
+
end
|
16
|
+
|
17
|
+
sig { override.returns(T::Array[OpenAI::ImageGenStreamEvent::Variants]) }
|
18
|
+
def self.variants
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -60,6 +60,12 @@ module OpenAI
|
|
60
60
|
end
|
61
61
|
attr_accessor :output_format
|
62
62
|
|
63
|
+
# The number of partial images to generate. This parameter is used for streaming
|
64
|
+
# responses that return partial images. Value must be between 0 and 3. When set to
|
65
|
+
# 0, the response will be a single image sent in one streaming event.
|
66
|
+
sig { returns(T.nilable(Integer)) }
|
67
|
+
attr_accessor :partial_images
|
68
|
+
|
63
69
|
# The quality of the image that will be generated.
|
64
70
|
#
|
65
71
|
# - `auto` (default value) will automatically select the best quality for the
|
@@ -116,6 +122,7 @@ module OpenAI
|
|
116
122
|
output_compression: T.nilable(Integer),
|
117
123
|
output_format:
|
118
124
|
T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol),
|
125
|
+
partial_images: T.nilable(Integer),
|
119
126
|
quality: T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol),
|
120
127
|
response_format:
|
121
128
|
T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol),
|
@@ -155,6 +162,10 @@ module OpenAI
|
|
155
162
|
# The format in which the generated images are returned. This parameter is only
|
156
163
|
# supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`.
|
157
164
|
output_format: nil,
|
165
|
+
# The number of partial images to generate. This parameter is used for streaming
|
166
|
+
# responses that return partial images. Value must be between 0 and 3. When set to
|
167
|
+
# 0, the response will be a single image sent in one streaming event.
|
168
|
+
partial_images: nil,
|
158
169
|
# The quality of the image that will be generated.
|
159
170
|
#
|
160
171
|
# - `auto` (default value) will automatically select the best quality for the
|
@@ -199,6 +210,7 @@ module OpenAI
|
|
199
210
|
output_compression: T.nilable(Integer),
|
200
211
|
output_format:
|
201
212
|
T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol),
|
213
|
+
partial_images: T.nilable(Integer),
|
202
214
|
quality: T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol),
|
203
215
|
response_format:
|
204
216
|
T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol),
|
@@ -12,7 +12,7 @@ module OpenAI
|
|
12
12
|
)
|
13
13
|
end
|
14
14
|
|
15
|
-
# The refusal
|
15
|
+
# The refusal explanation from the model.
|
16
16
|
sig { returns(String) }
|
17
17
|
attr_accessor :refusal
|
18
18
|
|
@@ -23,7 +23,7 @@ module OpenAI
|
|
23
23
|
# A refusal from the model.
|
24
24
|
sig { params(refusal: String, type: Symbol).returns(T.attached_class) }
|
25
25
|
def self.new(
|
26
|
-
# The refusal
|
26
|
+
# The refusal explanation from the model.
|
27
27
|
refusal:,
|
28
28
|
# The type of the refusal. Always `refusal`.
|
29
29
|
type: :refusal
|
@@ -565,6 +565,18 @@ module OpenAI
|
|
565
565
|
end
|
566
566
|
attr_writer :background
|
567
567
|
|
568
|
+
# Control how much effort the model will exert to match the style and features,
|
569
|
+
# especially facial features, of input images. This parameter is only supported
|
570
|
+
# for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.
|
571
|
+
sig do
|
572
|
+
returns(
|
573
|
+
T.nilable(
|
574
|
+
OpenAI::Responses::Tool::ImageGeneration::InputFidelity::OrSymbol
|
575
|
+
)
|
576
|
+
)
|
577
|
+
end
|
578
|
+
attr_accessor :input_fidelity
|
579
|
+
|
568
580
|
# Optional mask for inpainting. Contains `image_url` (string, optional) and
|
569
581
|
# `file_id` (string, optional).
|
570
582
|
sig do
|
@@ -695,6 +707,10 @@ module OpenAI
|
|
695
707
|
params(
|
696
708
|
background:
|
697
709
|
OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol,
|
710
|
+
input_fidelity:
|
711
|
+
T.nilable(
|
712
|
+
OpenAI::Responses::Tool::ImageGeneration::InputFidelity::OrSymbol
|
713
|
+
),
|
698
714
|
input_image_mask:
|
699
715
|
OpenAI::Responses::Tool::ImageGeneration::InputImageMask::OrHash,
|
700
716
|
model: OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol,
|
@@ -714,6 +730,10 @@ module OpenAI
|
|
714
730
|
# Background type for the generated image. One of `transparent`, `opaque`, or
|
715
731
|
# `auto`. Default: `auto`.
|
716
732
|
background: nil,
|
733
|
+
# Control how much effort the model will exert to match the style and features,
|
734
|
+
# especially facial features, of input images. This parameter is only supported
|
735
|
+
# for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.
|
736
|
+
input_fidelity: nil,
|
717
737
|
# Optional mask for inpainting. Contains `image_url` (string, optional) and
|
718
738
|
# `file_id` (string, optional).
|
719
739
|
input_image_mask: nil,
|
@@ -746,6 +766,10 @@ module OpenAI
|
|
746
766
|
type: Symbol,
|
747
767
|
background:
|
748
768
|
OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol,
|
769
|
+
input_fidelity:
|
770
|
+
T.nilable(
|
771
|
+
OpenAI::Responses::Tool::ImageGeneration::InputFidelity::OrSymbol
|
772
|
+
),
|
749
773
|
input_image_mask:
|
750
774
|
OpenAI::Responses::Tool::ImageGeneration::InputImageMask,
|
751
775
|
model:
|
@@ -806,6 +830,43 @@ module OpenAI
|
|
806
830
|
end
|
807
831
|
end
|
808
832
|
|
833
|
+
# Control how much effort the model will exert to match the style and features,
|
834
|
+
# especially facial features, of input images. This parameter is only supported
|
835
|
+
# for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.
|
836
|
+
module InputFidelity
|
837
|
+
extend OpenAI::Internal::Type::Enum
|
838
|
+
|
839
|
+
TaggedSymbol =
|
840
|
+
T.type_alias do
|
841
|
+
T.all(
|
842
|
+
Symbol,
|
843
|
+
OpenAI::Responses::Tool::ImageGeneration::InputFidelity
|
844
|
+
)
|
845
|
+
end
|
846
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
847
|
+
|
848
|
+
HIGH =
|
849
|
+
T.let(
|
850
|
+
:high,
|
851
|
+
OpenAI::Responses::Tool::ImageGeneration::InputFidelity::TaggedSymbol
|
852
|
+
)
|
853
|
+
LOW =
|
854
|
+
T.let(
|
855
|
+
:low,
|
856
|
+
OpenAI::Responses::Tool::ImageGeneration::InputFidelity::TaggedSymbol
|
857
|
+
)
|
858
|
+
|
859
|
+
sig do
|
860
|
+
override.returns(
|
861
|
+
T::Array[
|
862
|
+
OpenAI::Responses::Tool::ImageGeneration::InputFidelity::TaggedSymbol
|
863
|
+
]
|
864
|
+
)
|
865
|
+
end
|
866
|
+
def self.values
|
867
|
+
end
|
868
|
+
end
|
869
|
+
|
809
870
|
class InputImageMask < OpenAI::Internal::Type::BaseModel
|
810
871
|
OrHash =
|
811
872
|
T.type_alias do
|
data/rbi/openai/models.rbi
CHANGED
@@ -115,10 +115,22 @@ module OpenAI
|
|
115
115
|
|
116
116
|
ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams
|
117
117
|
|
118
|
+
ImageEditCompletedEvent = OpenAI::Models::ImageEditCompletedEvent
|
119
|
+
|
118
120
|
ImageEditParams = OpenAI::Models::ImageEditParams
|
119
121
|
|
122
|
+
ImageEditPartialImageEvent = OpenAI::Models::ImageEditPartialImageEvent
|
123
|
+
|
124
|
+
ImageEditStreamEvent = OpenAI::Models::ImageEditStreamEvent
|
125
|
+
|
126
|
+
ImageGenCompletedEvent = OpenAI::Models::ImageGenCompletedEvent
|
127
|
+
|
120
128
|
ImageGenerateParams = OpenAI::Models::ImageGenerateParams
|
121
129
|
|
130
|
+
ImageGenPartialImageEvent = OpenAI::Models::ImageGenPartialImageEvent
|
131
|
+
|
132
|
+
ImageGenStreamEvent = OpenAI::Models::ImageGenStreamEvent
|
133
|
+
|
122
134
|
ImageModel = OpenAI::Models::ImageModel
|
123
135
|
|
124
136
|
ImagesResponse = OpenAI::Models::ImagesResponse
|