openai 0.8.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +41 -0
- data/README.md +115 -4
- data/lib/openai/errors.rb +22 -0
- data/lib/openai/internal/type/array_of.rb +6 -1
- data/lib/openai/internal/type/base_model.rb +76 -24
- data/lib/openai/internal/type/boolean.rb +7 -1
- data/lib/openai/internal/type/converter.rb +42 -34
- data/lib/openai/internal/type/enum.rb +10 -2
- data/lib/openai/internal/type/file_input.rb +6 -1
- data/lib/openai/internal/type/hash_of.rb +6 -1
- data/lib/openai/internal/type/union.rb +12 -7
- data/lib/openai/internal/type/unknown.rb +7 -1
- data/lib/openai/models/audio/speech_create_params.rb +23 -2
- data/lib/openai/models/audio/transcription.rb +118 -1
- data/lib/openai/models/audio/transcription_text_done_event.rb +80 -1
- data/lib/openai/models/audio/transcription_verbose.rb +31 -1
- data/lib/openai/models/chat/chat_completion.rb +1 -0
- data/lib/openai/models/chat/chat_completion_chunk.rb +1 -0
- data/lib/openai/models/chat/completion_create_params.rb +1 -0
- data/lib/openai/models/fine_tuning/job_create_params.rb +4 -2
- data/lib/openai/models/image_edit_params.rb +35 -1
- data/lib/openai/models/responses/response.rb +41 -6
- data/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +17 -8
- data/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +14 -10
- data/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +11 -10
- data/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +11 -10
- data/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +11 -10
- data/lib/openai/models/responses/response_code_interpreter_tool_call.rb +49 -78
- data/lib/openai/models/responses/response_create_params.rb +41 -32
- data/lib/openai/models/responses/response_output_text.rb +18 -2
- data/lib/openai/models/responses/response_prompt.rb +63 -0
- data/lib/openai/models/responses/response_stream_event.rb +2 -2
- data/lib/openai/resources/audio/speech.rb +3 -1
- data/lib/openai/resources/chat/completions.rb +8 -0
- data/lib/openai/resources/fine_tuning/jobs.rb +2 -2
- data/lib/openai/resources/images.rb +5 -1
- data/lib/openai/resources/responses.rb +18 -14
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +1 -0
- data/rbi/openai/errors.rbi +16 -0
- data/rbi/openai/internal/type/boolean.rbi +2 -0
- data/rbi/openai/internal/type/converter.rbi +15 -15
- data/rbi/openai/internal/type/union.rbi +5 -0
- data/rbi/openai/internal/type/unknown.rbi +2 -0
- data/rbi/openai/models/audio/speech_create_params.rbi +59 -2
- data/rbi/openai/models/audio/transcription.rbi +213 -3
- data/rbi/openai/models/audio/transcription_text_done_event.rbi +146 -1
- data/rbi/openai/models/audio/transcription_verbose.rbi +47 -0
- data/rbi/openai/models/chat/chat_completion.rbi +5 -0
- data/rbi/openai/models/chat/chat_completion_chunk.rbi +5 -0
- data/rbi/openai/models/chat/completion_create_params.rbi +5 -0
- data/rbi/openai/models/fine_tuning/job_create_params.rbi +8 -4
- data/rbi/openai/models/image_edit_params.rbi +51 -0
- data/rbi/openai/models/responses/response.rbi +66 -7
- data/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +17 -7
- data/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +13 -5
- data/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi +13 -21
- data/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +13 -21
- data/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +13 -21
- data/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +83 -125
- data/rbi/openai/models/responses/response_create_params.rbi +107 -64
- data/rbi/openai/models/responses/response_output_text.rbi +26 -4
- data/rbi/openai/models/responses/response_prompt.rbi +120 -0
- data/rbi/openai/resources/audio/speech.rbi +6 -1
- data/rbi/openai/resources/fine_tuning/jobs.rbi +6 -4
- data/rbi/openai/resources/images.rbi +11 -0
- data/rbi/openai/resources/responses.rbi +56 -50
- data/sig/openai/errors.rbs +9 -0
- data/sig/openai/internal/type/converter.rbs +7 -1
- data/sig/openai/models/audio/speech_create_params.rbs +21 -1
- data/sig/openai/models/audio/transcription.rbs +95 -3
- data/sig/openai/models/audio/transcription_text_done_event.rbs +72 -2
- data/sig/openai/models/audio/transcription_verbose.rbs +21 -0
- data/sig/openai/models/chat/chat_completion.rbs +2 -1
- data/sig/openai/models/chat/chat_completion_chunk.rbs +2 -1
- data/sig/openai/models/chat/completion_create_params.rbs +2 -1
- data/sig/openai/models/image_edit_params.rbs +22 -0
- data/sig/openai/models/responses/response.rbs +22 -5
- data/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +5 -0
- data/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +5 -0
- data/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +4 -4
- data/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +4 -4
- data/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +4 -4
- data/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +31 -52
- data/sig/openai/models/responses/response_create_params.rbs +25 -11
- data/sig/openai/models/responses/response_output_text.rbs +15 -1
- data/sig/openai/models/responses/response_prompt.rbs +44 -0
- data/sig/openai/resources/audio/speech.rbs +1 -0
- data/sig/openai/resources/images.rbs +2 -0
- data/sig/openai/resources/responses.rbs +6 -4
- metadata +5 -2
@@ -16,22 +16,29 @@ module OpenAI
|
|
16
16
|
sig { returns(String) }
|
17
17
|
attr_accessor :id
|
18
18
|
|
19
|
-
# The code to run.
|
20
|
-
sig { returns(String) }
|
19
|
+
# The code to run, or null if not available.
|
20
|
+
sig { returns(T.nilable(String)) }
|
21
21
|
attr_accessor :code
|
22
22
|
|
23
|
-
# The
|
23
|
+
# The ID of the container used to run the code.
|
24
|
+
sig { returns(String) }
|
25
|
+
attr_accessor :container_id
|
26
|
+
|
27
|
+
# The outputs generated by the code interpreter, such as logs or images. Can be
|
28
|
+
# null if no outputs are available.
|
24
29
|
sig do
|
25
30
|
returns(
|
26
|
-
T
|
27
|
-
T
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
31
|
+
T.nilable(
|
32
|
+
T::Array[
|
33
|
+
T.any(
|
34
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs,
|
35
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image
|
36
|
+
)
|
37
|
+
]
|
38
|
+
)
|
32
39
|
)
|
33
40
|
end
|
34
|
-
attr_accessor :
|
41
|
+
attr_accessor :outputs
|
35
42
|
|
36
43
|
# The status of the code interpreter tool call.
|
37
44
|
sig do
|
@@ -45,42 +52,38 @@ module OpenAI
|
|
45
52
|
sig { returns(Symbol) }
|
46
53
|
attr_accessor :type
|
47
54
|
|
48
|
-
# The ID of the container used to run the code.
|
49
|
-
sig { returns(T.nilable(String)) }
|
50
|
-
attr_reader :container_id
|
51
|
-
|
52
|
-
sig { params(container_id: String).void }
|
53
|
-
attr_writer :container_id
|
54
|
-
|
55
55
|
# A tool call to run code.
|
56
56
|
sig do
|
57
57
|
params(
|
58
58
|
id: String,
|
59
|
-
code: String,
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
59
|
+
code: T.nilable(String),
|
60
|
+
container_id: String,
|
61
|
+
outputs:
|
62
|
+
T.nilable(
|
63
|
+
T::Array[
|
64
|
+
T.any(
|
65
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs::OrHash,
|
66
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image::OrHash
|
67
|
+
)
|
68
|
+
]
|
69
|
+
),
|
67
70
|
status:
|
68
71
|
OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol,
|
69
|
-
container_id: String,
|
70
72
|
type: Symbol
|
71
73
|
).returns(T.attached_class)
|
72
74
|
end
|
73
75
|
def self.new(
|
74
76
|
# The unique ID of the code interpreter tool call.
|
75
77
|
id:,
|
76
|
-
# The code to run.
|
78
|
+
# The code to run, or null if not available.
|
77
79
|
code:,
|
78
|
-
# The
|
79
|
-
|
80
|
+
# The ID of the container used to run the code.
|
81
|
+
container_id:,
|
82
|
+
# The outputs generated by the code interpreter, such as logs or images. Can be
|
83
|
+
# null if no outputs are available.
|
84
|
+
outputs:,
|
80
85
|
# The status of the code interpreter tool call.
|
81
86
|
status:,
|
82
|
-
# The ID of the container used to run the code.
|
83
|
-
container_id: nil,
|
84
87
|
# The type of the code interpreter tool call. Always `code_interpreter_call`.
|
85
88
|
type: :code_interpreter_call
|
86
89
|
)
|
@@ -90,33 +93,35 @@ module OpenAI
|
|
90
93
|
override.returns(
|
91
94
|
{
|
92
95
|
id: String,
|
93
|
-
code: String,
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
96
|
+
code: T.nilable(String),
|
97
|
+
container_id: String,
|
98
|
+
outputs:
|
99
|
+
T.nilable(
|
100
|
+
T::Array[
|
101
|
+
T.any(
|
102
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs,
|
103
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image
|
104
|
+
)
|
105
|
+
]
|
106
|
+
),
|
101
107
|
status:
|
102
108
|
OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol,
|
103
|
-
type: Symbol
|
104
|
-
container_id: String
|
109
|
+
type: Symbol
|
105
110
|
}
|
106
111
|
)
|
107
112
|
end
|
108
113
|
def to_hash
|
109
114
|
end
|
110
115
|
|
111
|
-
# The output
|
112
|
-
module
|
116
|
+
# The logs output from the code interpreter.
|
117
|
+
module Output
|
113
118
|
extend OpenAI::Internal::Type::Union
|
114
119
|
|
115
120
|
Variants =
|
116
121
|
T.type_alias do
|
117
122
|
T.any(
|
118
|
-
OpenAI::Responses::ResponseCodeInterpreterToolCall::
|
119
|
-
OpenAI::Responses::ResponseCodeInterpreterToolCall::
|
123
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs,
|
124
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image
|
120
125
|
)
|
121
126
|
end
|
122
127
|
|
@@ -124,25 +129,25 @@ module OpenAI
|
|
124
129
|
OrHash =
|
125
130
|
T.type_alias do
|
126
131
|
T.any(
|
127
|
-
OpenAI::Responses::ResponseCodeInterpreterToolCall::
|
132
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs,
|
128
133
|
OpenAI::Internal::AnyHash
|
129
134
|
)
|
130
135
|
end
|
131
136
|
|
132
|
-
# The logs
|
137
|
+
# The logs output from the code interpreter.
|
133
138
|
sig { returns(String) }
|
134
139
|
attr_accessor :logs
|
135
140
|
|
136
|
-
# The type of the
|
141
|
+
# The type of the output. Always 'logs'.
|
137
142
|
sig { returns(Symbol) }
|
138
143
|
attr_accessor :type
|
139
144
|
|
140
|
-
# The output
|
145
|
+
# The logs output from the code interpreter.
|
141
146
|
sig { params(logs: String, type: Symbol).returns(T.attached_class) }
|
142
147
|
def self.new(
|
143
|
-
# The logs
|
148
|
+
# The logs output from the code interpreter.
|
144
149
|
logs:,
|
145
|
-
# The type of the
|
150
|
+
# The type of the output. Always 'logs'.
|
146
151
|
type: :logs
|
147
152
|
)
|
148
153
|
end
|
@@ -152,99 +157,42 @@ module OpenAI
|
|
152
157
|
end
|
153
158
|
end
|
154
159
|
|
155
|
-
class
|
160
|
+
class Image < OpenAI::Internal::Type::BaseModel
|
156
161
|
OrHash =
|
157
162
|
T.type_alias do
|
158
163
|
T.any(
|
159
|
-
OpenAI::Responses::ResponseCodeInterpreterToolCall::
|
164
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image,
|
160
165
|
OpenAI::Internal::AnyHash
|
161
166
|
)
|
162
167
|
end
|
163
168
|
|
164
|
-
|
165
|
-
returns(
|
166
|
-
T::Array[
|
167
|
-
OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File
|
168
|
-
]
|
169
|
-
)
|
170
|
-
end
|
171
|
-
attr_accessor :files
|
172
|
-
|
173
|
-
# The type of the code interpreter file output. Always `files`.
|
169
|
+
# The type of the output. Always 'image'.
|
174
170
|
sig { returns(Symbol) }
|
175
171
|
attr_accessor :type
|
176
172
|
|
177
|
-
# The
|
178
|
-
sig
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
],
|
184
|
-
type: Symbol
|
185
|
-
).returns(T.attached_class)
|
186
|
-
end
|
173
|
+
# The URL of the image output from the code interpreter.
|
174
|
+
sig { returns(String) }
|
175
|
+
attr_accessor :url
|
176
|
+
|
177
|
+
# The image output from the code interpreter.
|
178
|
+
sig { params(url: String, type: Symbol).returns(T.attached_class) }
|
187
179
|
def self.new(
|
188
|
-
|
189
|
-
|
190
|
-
type
|
180
|
+
# The URL of the image output from the code interpreter.
|
181
|
+
url:,
|
182
|
+
# The type of the output. Always 'image'.
|
183
|
+
type: :image
|
191
184
|
)
|
192
185
|
end
|
193
186
|
|
194
|
-
sig
|
195
|
-
override.returns(
|
196
|
-
{
|
197
|
-
files:
|
198
|
-
T::Array[
|
199
|
-
OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File
|
200
|
-
],
|
201
|
-
type: Symbol
|
202
|
-
}
|
203
|
-
)
|
204
|
-
end
|
187
|
+
sig { override.returns({ type: Symbol, url: String }) }
|
205
188
|
def to_hash
|
206
189
|
end
|
207
|
-
|
208
|
-
class File < OpenAI::Internal::Type::BaseModel
|
209
|
-
OrHash =
|
210
|
-
T.type_alias do
|
211
|
-
T.any(
|
212
|
-
OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File,
|
213
|
-
OpenAI::Internal::AnyHash
|
214
|
-
)
|
215
|
-
end
|
216
|
-
|
217
|
-
# The ID of the file.
|
218
|
-
sig { returns(String) }
|
219
|
-
attr_accessor :file_id
|
220
|
-
|
221
|
-
# The MIME type of the file.
|
222
|
-
sig { returns(String) }
|
223
|
-
attr_accessor :mime_type
|
224
|
-
|
225
|
-
sig do
|
226
|
-
params(file_id: String, mime_type: String).returns(
|
227
|
-
T.attached_class
|
228
|
-
)
|
229
|
-
end
|
230
|
-
def self.new(
|
231
|
-
# The ID of the file.
|
232
|
-
file_id:,
|
233
|
-
# The MIME type of the file.
|
234
|
-
mime_type:
|
235
|
-
)
|
236
|
-
end
|
237
|
-
|
238
|
-
sig { override.returns({ file_id: String, mime_type: String }) }
|
239
|
-
def to_hash
|
240
|
-
end
|
241
|
-
end
|
242
190
|
end
|
243
191
|
|
244
192
|
sig do
|
245
193
|
override.returns(
|
246
194
|
T::Array[
|
247
|
-
OpenAI::Responses::ResponseCodeInterpreterToolCall::
|
195
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Variants
|
248
196
|
]
|
249
197
|
)
|
250
198
|
end
|
@@ -270,14 +218,24 @@ module OpenAI
|
|
270
218
|
:in_progress,
|
271
219
|
OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol
|
272
220
|
)
|
221
|
+
COMPLETED =
|
222
|
+
T.let(
|
223
|
+
:completed,
|
224
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol
|
225
|
+
)
|
226
|
+
INCOMPLETE =
|
227
|
+
T.let(
|
228
|
+
:incomplete,
|
229
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol
|
230
|
+
)
|
273
231
|
INTERPRETING =
|
274
232
|
T.let(
|
275
233
|
:interpreting,
|
276
234
|
OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol
|
277
235
|
)
|
278
|
-
|
236
|
+
FAILED =
|
279
237
|
T.let(
|
280
|
-
:
|
238
|
+
:failed,
|
281
239
|
OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol
|
282
240
|
)
|
283
241
|
|
@@ -15,36 +15,6 @@ module OpenAI
|
|
15
15
|
)
|
16
16
|
end
|
17
17
|
|
18
|
-
# Text, image, or file inputs to the model, used to generate a response.
|
19
|
-
#
|
20
|
-
# Learn more:
|
21
|
-
#
|
22
|
-
# - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
|
23
|
-
# - [Image inputs](https://platform.openai.com/docs/guides/images)
|
24
|
-
# - [File inputs](https://platform.openai.com/docs/guides/pdf-files)
|
25
|
-
# - [Conversation state](https://platform.openai.com/docs/guides/conversation-state)
|
26
|
-
# - [Function calling](https://platform.openai.com/docs/guides/function-calling)
|
27
|
-
sig do
|
28
|
-
returns(OpenAI::Responses::ResponseCreateParams::Input::Variants)
|
29
|
-
end
|
30
|
-
attr_accessor :input
|
31
|
-
|
32
|
-
# Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a
|
33
|
-
# wide range of models with different capabilities, performance characteristics,
|
34
|
-
# and price points. Refer to the
|
35
|
-
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
36
|
-
# available models.
|
37
|
-
sig do
|
38
|
-
returns(
|
39
|
-
T.any(
|
40
|
-
String,
|
41
|
-
OpenAI::ChatModel::OrSymbol,
|
42
|
-
OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol
|
43
|
-
)
|
44
|
-
)
|
45
|
-
end
|
46
|
-
attr_accessor :model
|
47
|
-
|
48
18
|
# Whether to run the model response in the background.
|
49
19
|
# [Learn more](https://platform.openai.com/docs/guides/background).
|
50
20
|
sig { returns(T.nilable(T::Boolean)) }
|
@@ -72,8 +42,30 @@ module OpenAI
|
|
72
42
|
end
|
73
43
|
attr_accessor :include
|
74
44
|
|
75
|
-
#
|
76
|
-
#
|
45
|
+
# Text, image, or file inputs to the model, used to generate a response.
|
46
|
+
#
|
47
|
+
# Learn more:
|
48
|
+
#
|
49
|
+
# - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
|
50
|
+
# - [Image inputs](https://platform.openai.com/docs/guides/images)
|
51
|
+
# - [File inputs](https://platform.openai.com/docs/guides/pdf-files)
|
52
|
+
# - [Conversation state](https://platform.openai.com/docs/guides/conversation-state)
|
53
|
+
# - [Function calling](https://platform.openai.com/docs/guides/function-calling)
|
54
|
+
sig do
|
55
|
+
returns(
|
56
|
+
T.nilable(OpenAI::Responses::ResponseCreateParams::Input::Variants)
|
57
|
+
)
|
58
|
+
end
|
59
|
+
attr_reader :input
|
60
|
+
|
61
|
+
sig do
|
62
|
+
params(
|
63
|
+
input: OpenAI::Responses::ResponseCreateParams::Input::Variants
|
64
|
+
).void
|
65
|
+
end
|
66
|
+
attr_writer :input
|
67
|
+
|
68
|
+
# A system (or developer) message inserted into the model's context.
|
77
69
|
#
|
78
70
|
# When using along with `previous_response_id`, the instructions from a previous
|
79
71
|
# response will not be carried over to the next response. This makes it simple to
|
@@ -96,6 +88,36 @@ module OpenAI
|
|
96
88
|
sig { returns(T.nilable(T::Hash[Symbol, String])) }
|
97
89
|
attr_accessor :metadata
|
98
90
|
|
91
|
+
# Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a
|
92
|
+
# wide range of models with different capabilities, performance characteristics,
|
93
|
+
# and price points. Refer to the
|
94
|
+
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
95
|
+
# available models.
|
96
|
+
sig do
|
97
|
+
returns(
|
98
|
+
T.nilable(
|
99
|
+
T.any(
|
100
|
+
String,
|
101
|
+
OpenAI::ChatModel::OrSymbol,
|
102
|
+
OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol
|
103
|
+
)
|
104
|
+
)
|
105
|
+
)
|
106
|
+
end
|
107
|
+
attr_reader :model
|
108
|
+
|
109
|
+
sig do
|
110
|
+
params(
|
111
|
+
model:
|
112
|
+
T.any(
|
113
|
+
String,
|
114
|
+
OpenAI::ChatModel::OrSymbol,
|
115
|
+
OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol
|
116
|
+
)
|
117
|
+
).void
|
118
|
+
end
|
119
|
+
attr_writer :model
|
120
|
+
|
99
121
|
# Whether to allow the model to run tool calls in parallel.
|
100
122
|
sig { returns(T.nilable(T::Boolean)) }
|
101
123
|
attr_accessor :parallel_tool_calls
|
@@ -106,6 +128,18 @@ module OpenAI
|
|
106
128
|
sig { returns(T.nilable(String)) }
|
107
129
|
attr_accessor :previous_response_id
|
108
130
|
|
131
|
+
# Reference to a prompt template and its variables.
|
132
|
+
# [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts).
|
133
|
+
sig { returns(T.nilable(OpenAI::Responses::ResponsePrompt)) }
|
134
|
+
attr_reader :prompt
|
135
|
+
|
136
|
+
sig do
|
137
|
+
params(
|
138
|
+
prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash)
|
139
|
+
).void
|
140
|
+
end
|
141
|
+
attr_writer :prompt
|
142
|
+
|
109
143
|
# **o-series models only**
|
110
144
|
#
|
111
145
|
# Configuration options for
|
@@ -288,23 +322,24 @@ module OpenAI
|
|
288
322
|
|
289
323
|
sig do
|
290
324
|
params(
|
291
|
-
input: OpenAI::Responses::ResponseCreateParams::Input::Variants,
|
292
|
-
model:
|
293
|
-
T.any(
|
294
|
-
String,
|
295
|
-
OpenAI::ChatModel::OrSymbol,
|
296
|
-
OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol
|
297
|
-
),
|
298
325
|
background: T.nilable(T::Boolean),
|
299
326
|
include:
|
300
327
|
T.nilable(
|
301
328
|
T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]
|
302
329
|
),
|
330
|
+
input: OpenAI::Responses::ResponseCreateParams::Input::Variants,
|
303
331
|
instructions: T.nilable(String),
|
304
332
|
max_output_tokens: T.nilable(Integer),
|
305
333
|
metadata: T.nilable(T::Hash[Symbol, String]),
|
334
|
+
model:
|
335
|
+
T.any(
|
336
|
+
String,
|
337
|
+
OpenAI::ChatModel::OrSymbol,
|
338
|
+
OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol
|
339
|
+
),
|
306
340
|
parallel_tool_calls: T.nilable(T::Boolean),
|
307
341
|
previous_response_id: T.nilable(String),
|
342
|
+
prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash),
|
308
343
|
reasoning: T.nilable(OpenAI::Reasoning::OrHash),
|
309
344
|
service_tier:
|
310
345
|
T.nilable(
|
@@ -342,22 +377,6 @@ module OpenAI
|
|
342
377
|
).returns(T.attached_class)
|
343
378
|
end
|
344
379
|
def self.new(
|
345
|
-
# Text, image, or file inputs to the model, used to generate a response.
|
346
|
-
#
|
347
|
-
# Learn more:
|
348
|
-
#
|
349
|
-
# - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
|
350
|
-
# - [Image inputs](https://platform.openai.com/docs/guides/images)
|
351
|
-
# - [File inputs](https://platform.openai.com/docs/guides/pdf-files)
|
352
|
-
# - [Conversation state](https://platform.openai.com/docs/guides/conversation-state)
|
353
|
-
# - [Function calling](https://platform.openai.com/docs/guides/function-calling)
|
354
|
-
input:,
|
355
|
-
# Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a
|
356
|
-
# wide range of models with different capabilities, performance characteristics,
|
357
|
-
# and price points. Refer to the
|
358
|
-
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
359
|
-
# available models.
|
360
|
-
model:,
|
361
380
|
# Whether to run the model response in the background.
|
362
381
|
# [Learn more](https://platform.openai.com/docs/guides/background).
|
363
382
|
background: nil,
|
@@ -377,8 +396,17 @@ module OpenAI
|
|
377
396
|
# - `code_interpreter_call.outputs`: Includes the outputs of python code execution
|
378
397
|
# in code interpreter tool call items.
|
379
398
|
include: nil,
|
380
|
-
#
|
381
|
-
#
|
399
|
+
# Text, image, or file inputs to the model, used to generate a response.
|
400
|
+
#
|
401
|
+
# Learn more:
|
402
|
+
#
|
403
|
+
# - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
|
404
|
+
# - [Image inputs](https://platform.openai.com/docs/guides/images)
|
405
|
+
# - [File inputs](https://platform.openai.com/docs/guides/pdf-files)
|
406
|
+
# - [Conversation state](https://platform.openai.com/docs/guides/conversation-state)
|
407
|
+
# - [Function calling](https://platform.openai.com/docs/guides/function-calling)
|
408
|
+
input: nil,
|
409
|
+
# A system (or developer) message inserted into the model's context.
|
382
410
|
#
|
383
411
|
# When using along with `previous_response_id`, the instructions from a previous
|
384
412
|
# response will not be carried over to the next response. This makes it simple to
|
@@ -395,12 +423,21 @@ module OpenAI
|
|
395
423
|
# Keys are strings with a maximum length of 64 characters. Values are strings with
|
396
424
|
# a maximum length of 512 characters.
|
397
425
|
metadata: nil,
|
426
|
+
# Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a
|
427
|
+
# wide range of models with different capabilities, performance characteristics,
|
428
|
+
# and price points. Refer to the
|
429
|
+
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
430
|
+
# available models.
|
431
|
+
model: nil,
|
398
432
|
# Whether to allow the model to run tool calls in parallel.
|
399
433
|
parallel_tool_calls: nil,
|
400
434
|
# The unique ID of the previous response to the model. Use this to create
|
401
435
|
# multi-turn conversations. Learn more about
|
402
436
|
# [conversation state](https://platform.openai.com/docs/guides/conversation-state).
|
403
437
|
previous_response_id: nil,
|
438
|
+
# Reference to a prompt template and its variables.
|
439
|
+
# [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts).
|
440
|
+
prompt: nil,
|
404
441
|
# **o-series models only**
|
405
442
|
#
|
406
443
|
# Configuration options for
|
@@ -481,23 +518,24 @@ module OpenAI
|
|
481
518
|
sig do
|
482
519
|
override.returns(
|
483
520
|
{
|
484
|
-
input: OpenAI::Responses::ResponseCreateParams::Input::Variants,
|
485
|
-
model:
|
486
|
-
T.any(
|
487
|
-
String,
|
488
|
-
OpenAI::ChatModel::OrSymbol,
|
489
|
-
OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol
|
490
|
-
),
|
491
521
|
background: T.nilable(T::Boolean),
|
492
522
|
include:
|
493
523
|
T.nilable(
|
494
524
|
T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]
|
495
525
|
),
|
526
|
+
input: OpenAI::Responses::ResponseCreateParams::Input::Variants,
|
496
527
|
instructions: T.nilable(String),
|
497
528
|
max_output_tokens: T.nilable(Integer),
|
498
529
|
metadata: T.nilable(T::Hash[Symbol, String]),
|
530
|
+
model:
|
531
|
+
T.any(
|
532
|
+
String,
|
533
|
+
OpenAI::ChatModel::OrSymbol,
|
534
|
+
OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol
|
535
|
+
),
|
499
536
|
parallel_tool_calls: T.nilable(T::Boolean),
|
500
537
|
previous_response_id: T.nilable(String),
|
538
|
+
prompt: T.nilable(OpenAI::Responses::ResponsePrompt),
|
501
539
|
reasoning: T.nilable(OpenAI::Reasoning),
|
502
540
|
service_tier:
|
503
541
|
T.nilable(
|
@@ -611,6 +649,11 @@ module OpenAI
|
|
611
649
|
:flex,
|
612
650
|
OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol
|
613
651
|
)
|
652
|
+
SCALE =
|
653
|
+
T.let(
|
654
|
+
:scale,
|
655
|
+
OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol
|
656
|
+
)
|
614
657
|
|
615
658
|
sig do
|
616
659
|
override.returns(
|
@@ -131,6 +131,10 @@ module OpenAI
|
|
131
131
|
sig { returns(String) }
|
132
132
|
attr_accessor :file_id
|
133
133
|
|
134
|
+
# The filename of the file cited.
|
135
|
+
sig { returns(String) }
|
136
|
+
attr_accessor :filename
|
137
|
+
|
134
138
|
# The index of the file in the list of files.
|
135
139
|
sig { returns(Integer) }
|
136
140
|
attr_accessor :index
|
@@ -141,13 +145,18 @@ module OpenAI
|
|
141
145
|
|
142
146
|
# A citation to a file.
|
143
147
|
sig do
|
144
|
-
params(
|
145
|
-
|
146
|
-
|
148
|
+
params(
|
149
|
+
file_id: String,
|
150
|
+
filename: String,
|
151
|
+
index: Integer,
|
152
|
+
type: Symbol
|
153
|
+
).returns(T.attached_class)
|
147
154
|
end
|
148
155
|
def self.new(
|
149
156
|
# The ID of the file.
|
150
157
|
file_id:,
|
158
|
+
# The filename of the file cited.
|
159
|
+
filename:,
|
151
160
|
# The index of the file in the list of files.
|
152
161
|
index:,
|
153
162
|
# The type of the file citation. Always `file_citation`.
|
@@ -157,7 +166,12 @@ module OpenAI
|
|
157
166
|
|
158
167
|
sig do
|
159
168
|
override.returns(
|
160
|
-
{
|
169
|
+
{
|
170
|
+
file_id: String,
|
171
|
+
filename: String,
|
172
|
+
index: Integer,
|
173
|
+
type: Symbol
|
174
|
+
}
|
161
175
|
)
|
162
176
|
end
|
163
177
|
def to_hash
|
@@ -253,6 +267,10 @@ module OpenAI
|
|
253
267
|
sig { returns(String) }
|
254
268
|
attr_accessor :file_id
|
255
269
|
|
270
|
+
# The filename of the container file cited.
|
271
|
+
sig { returns(String) }
|
272
|
+
attr_accessor :filename
|
273
|
+
|
256
274
|
# The index of the first character of the container file citation in the message.
|
257
275
|
sig { returns(Integer) }
|
258
276
|
attr_accessor :start_index
|
@@ -267,6 +285,7 @@ module OpenAI
|
|
267
285
|
container_id: String,
|
268
286
|
end_index: Integer,
|
269
287
|
file_id: String,
|
288
|
+
filename: String,
|
270
289
|
start_index: Integer,
|
271
290
|
type: Symbol
|
272
291
|
).returns(T.attached_class)
|
@@ -278,6 +297,8 @@ module OpenAI
|
|
278
297
|
end_index:,
|
279
298
|
# The ID of the file.
|
280
299
|
file_id:,
|
300
|
+
# The filename of the container file cited.
|
301
|
+
filename:,
|
281
302
|
# The index of the first character of the container file citation in the message.
|
282
303
|
start_index:,
|
283
304
|
# The type of the container file citation. Always `container_file_citation`.
|
@@ -291,6 +312,7 @@ module OpenAI
|
|
291
312
|
container_id: String,
|
292
313
|
end_index: Integer,
|
293
314
|
file_id: String,
|
315
|
+
filename: String,
|
294
316
|
start_index: Integer,
|
295
317
|
type: Symbol
|
296
318
|
}
|