openai 0.22.1 → 0.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/README.md +1 -1
- data/lib/openai/models/realtime/audio_transcription.rb +60 -0
- data/lib/openai/models/realtime/client_secret_create_params.rb +18 -9
- data/lib/openai/models/realtime/client_secret_create_response.rb +11 -250
- data/lib/openai/models/realtime/conversation_item.rb +1 -1
- data/lib/openai/models/realtime/conversation_item_added.rb +14 -1
- data/lib/openai/models/realtime/conversation_item_done.rb +3 -0
- data/lib/openai/models/realtime/conversation_item_input_audio_transcription_completed_event.rb +10 -8
- data/lib/openai/models/realtime/conversation_item_input_audio_transcription_delta_event.rb +14 -5
- data/lib/openai/models/realtime/conversation_item_truncate_event.rb +2 -2
- data/lib/openai/models/realtime/input_audio_buffer_append_event.rb +10 -5
- data/lib/openai/models/realtime/models.rb +58 -0
- data/lib/openai/models/realtime/noise_reduction_type.rb +20 -0
- data/lib/openai/models/realtime/realtime_audio_config.rb +6 -427
- data/lib/openai/models/realtime/realtime_audio_config_input.rb +89 -0
- data/lib/openai/models/realtime/realtime_audio_config_output.rb +100 -0
- data/lib/openai/models/realtime/realtime_audio_formats.rb +121 -0
- data/lib/openai/models/realtime/realtime_audio_input_turn_detection.rb +131 -0
- data/lib/openai/models/realtime/realtime_client_event.rb +31 -23
- data/lib/openai/models/realtime/realtime_conversation_item_assistant_message.rb +43 -10
- data/lib/openai/models/realtime/realtime_conversation_item_function_call.rb +16 -7
- data/lib/openai/models/realtime/realtime_conversation_item_function_call_output.rb +15 -7
- data/lib/openai/models/realtime/realtime_conversation_item_system_message.rb +18 -6
- data/lib/openai/models/realtime/realtime_conversation_item_user_message.rb +62 -13
- data/lib/openai/models/realtime/realtime_response.rb +117 -107
- data/lib/openai/models/realtime/realtime_response_create_audio_output.rb +100 -0
- data/lib/openai/models/realtime/realtime_response_create_mcp_tool.rb +310 -0
- data/lib/openai/models/realtime/realtime_response_create_params.rb +225 -0
- data/lib/openai/models/realtime/realtime_response_status.rb +1 -1
- data/lib/openai/models/realtime/realtime_response_usage.rb +5 -2
- data/lib/openai/models/realtime/realtime_response_usage_input_token_details.rb +58 -8
- data/lib/openai/models/realtime/realtime_server_event.rb +21 -5
- data/lib/openai/models/realtime/realtime_session.rb +9 -125
- data/lib/openai/models/realtime/realtime_session_client_secret.rb +36 -0
- data/lib/openai/models/realtime/realtime_session_create_request.rb +50 -71
- data/lib/openai/models/realtime/realtime_session_create_response.rb +621 -219
- data/lib/openai/models/realtime/realtime_tools_config_union.rb +2 -53
- data/lib/openai/models/realtime/realtime_tracing_config.rb +7 -6
- data/lib/openai/models/realtime/realtime_transcription_session_audio.rb +19 -0
- data/lib/openai/models/realtime/realtime_transcription_session_audio_input.rb +90 -0
- data/lib/openai/models/realtime/realtime_transcription_session_audio_input_turn_detection.rb +131 -0
- data/lib/openai/models/realtime/realtime_transcription_session_client_secret.rb +38 -0
- data/lib/openai/models/realtime/realtime_transcription_session_create_request.rb +12 -270
- data/lib/openai/models/realtime/realtime_transcription_session_create_response.rb +78 -0
- data/lib/openai/models/realtime/realtime_transcription_session_input_audio_transcription.rb +66 -0
- data/lib/openai/models/realtime/realtime_transcription_session_turn_detection.rb +57 -0
- data/lib/openai/models/realtime/realtime_truncation.rb +8 -40
- data/lib/openai/models/realtime/realtime_truncation_retention_ratio.rb +34 -0
- data/lib/openai/models/realtime/response_cancel_event.rb +3 -1
- data/lib/openai/models/realtime/response_create_event.rb +18 -348
- data/lib/openai/models/realtime/response_done_event.rb +7 -0
- data/lib/openai/models/realtime/session_created_event.rb +20 -4
- data/lib/openai/models/realtime/session_update_event.rb +36 -12
- data/lib/openai/models/realtime/session_updated_event.rb +20 -4
- data/lib/openai/models/realtime/transcription_session_created.rb +8 -243
- data/lib/openai/models/realtime/transcription_session_update.rb +179 -3
- data/lib/openai/models/realtime/transcription_session_updated_event.rb +8 -243
- data/lib/openai/resources/realtime/client_secrets.rb +2 -3
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +19 -1
- data/rbi/openai/models/realtime/audio_transcription.rbi +132 -0
- data/rbi/openai/models/realtime/client_secret_create_params.rbi +25 -11
- data/rbi/openai/models/realtime/client_secret_create_response.rbi +2 -587
- data/rbi/openai/models/realtime/conversation_item_added.rbi +14 -1
- data/rbi/openai/models/realtime/conversation_item_done.rbi +3 -0
- data/rbi/openai/models/realtime/conversation_item_input_audio_transcription_completed_event.rbi +11 -8
- data/rbi/openai/models/realtime/conversation_item_input_audio_transcription_delta_event.rbi +15 -5
- data/rbi/openai/models/realtime/conversation_item_truncate_event.rbi +2 -2
- data/rbi/openai/models/realtime/input_audio_buffer_append_event.rbi +10 -5
- data/rbi/openai/models/realtime/models.rbi +97 -0
- data/rbi/openai/models/realtime/noise_reduction_type.rbi +31 -0
- data/rbi/openai/models/realtime/realtime_audio_config.rbi +8 -956
- data/rbi/openai/models/realtime/realtime_audio_config_input.rbi +221 -0
- data/rbi/openai/models/realtime/realtime_audio_config_output.rbi +222 -0
- data/rbi/openai/models/realtime/realtime_audio_formats.rbi +329 -0
- data/rbi/openai/models/realtime/realtime_audio_input_turn_detection.rbi +262 -0
- data/rbi/openai/models/realtime/realtime_conversation_item_assistant_message.rbi +51 -10
- data/rbi/openai/models/realtime/realtime_conversation_item_function_call.rbi +16 -7
- data/rbi/openai/models/realtime/realtime_conversation_item_function_call_output.rbi +14 -7
- data/rbi/openai/models/realtime/realtime_conversation_item_system_message.rbi +16 -6
- data/rbi/openai/models/realtime/realtime_conversation_item_user_message.rbi +110 -12
- data/rbi/openai/models/realtime/realtime_response.rbi +287 -212
- data/rbi/openai/models/realtime/realtime_response_create_audio_output.rbi +250 -0
- data/rbi/openai/models/realtime/realtime_response_create_mcp_tool.rbi +616 -0
- data/rbi/openai/models/realtime/realtime_response_create_params.rbi +529 -0
- data/rbi/openai/models/realtime/realtime_response_usage.rbi +8 -2
- data/rbi/openai/models/realtime/realtime_response_usage_input_token_details.rbi +106 -7
- data/rbi/openai/models/realtime/realtime_server_event.rbi +4 -1
- data/rbi/openai/models/realtime/realtime_session.rbi +12 -262
- data/rbi/openai/models/realtime/realtime_session_client_secret.rbi +49 -0
- data/rbi/openai/models/realtime/realtime_session_create_request.rbi +112 -133
- data/rbi/openai/models/realtime/realtime_session_create_response.rbi +1229 -405
- data/rbi/openai/models/realtime/realtime_tools_config_union.rbi +1 -117
- data/rbi/openai/models/realtime/realtime_tracing_config.rbi +11 -10
- data/rbi/openai/models/realtime/realtime_transcription_session_audio.rbi +50 -0
- data/rbi/openai/models/realtime/realtime_transcription_session_audio_input.rbi +226 -0
- data/rbi/openai/models/realtime/realtime_transcription_session_audio_input_turn_detection.rbi +259 -0
- data/rbi/openai/models/realtime/realtime_transcription_session_client_secret.rbi +51 -0
- data/rbi/openai/models/realtime/realtime_transcription_session_create_request.rbi +25 -597
- data/rbi/openai/models/realtime/realtime_transcription_session_create_response.rbi +195 -0
- data/rbi/openai/models/realtime/realtime_transcription_session_input_audio_transcription.rbi +144 -0
- data/rbi/openai/models/realtime/realtime_transcription_session_turn_detection.rbi +94 -0
- data/rbi/openai/models/realtime/realtime_truncation.rbi +5 -56
- data/rbi/openai/models/realtime/realtime_truncation_retention_ratio.rbi +45 -0
- data/rbi/openai/models/realtime/response_cancel_event.rbi +3 -1
- data/rbi/openai/models/realtime/response_create_event.rbi +19 -786
- data/rbi/openai/models/realtime/response_done_event.rbi +7 -0
- data/rbi/openai/models/realtime/session_created_event.rbi +42 -9
- data/rbi/openai/models/realtime/session_update_event.rbi +57 -19
- data/rbi/openai/models/realtime/session_updated_event.rbi +42 -9
- data/rbi/openai/models/realtime/transcription_session_created.rbi +17 -591
- data/rbi/openai/models/realtime/transcription_session_update.rbi +425 -7
- data/rbi/openai/models/realtime/transcription_session_updated_event.rbi +14 -591
- data/rbi/openai/resources/realtime/client_secrets.rbi +5 -3
- data/sig/openai/models/realtime/audio_transcription.rbs +57 -0
- data/sig/openai/models/realtime/client_secret_create_response.rbs +1 -251
- data/sig/openai/models/realtime/models.rbs +57 -0
- data/sig/openai/models/realtime/noise_reduction_type.rbs +16 -0
- data/sig/openai/models/realtime/realtime_audio_config.rbs +12 -331
- data/sig/openai/models/realtime/realtime_audio_config_input.rbs +72 -0
- data/sig/openai/models/realtime/realtime_audio_config_output.rbs +72 -0
- data/sig/openai/models/realtime/realtime_audio_formats.rbs +128 -0
- data/sig/openai/models/realtime/realtime_audio_input_turn_detection.rbs +99 -0
- data/sig/openai/models/realtime/realtime_conversation_item_assistant_message.rbs +17 -2
- data/sig/openai/models/realtime/realtime_conversation_item_user_message.rbs +30 -1
- data/sig/openai/models/realtime/realtime_response.rbs +103 -82
- data/sig/openai/models/realtime/realtime_response_create_audio_output.rbs +84 -0
- data/sig/openai/models/realtime/realtime_response_create_mcp_tool.rbs +218 -0
- data/sig/openai/models/realtime/realtime_response_create_params.rbs +148 -0
- data/sig/openai/models/realtime/realtime_response_usage_input_token_details.rbs +50 -1
- data/sig/openai/models/realtime/realtime_session.rbs +16 -106
- data/sig/openai/models/realtime/realtime_session_client_secret.rbs +20 -0
- data/sig/openai/models/realtime/realtime_session_create_request.rbs +27 -43
- data/sig/openai/models/realtime/realtime_session_create_response.rbs +389 -187
- data/sig/openai/models/realtime/realtime_tools_config_union.rbs +1 -53
- data/sig/openai/models/realtime/realtime_transcription_session_audio.rbs +24 -0
- data/sig/openai/models/realtime/realtime_transcription_session_audio_input.rbs +72 -0
- data/sig/openai/models/realtime/realtime_transcription_session_audio_input_turn_detection.rbs +99 -0
- data/sig/openai/models/realtime/realtime_transcription_session_client_secret.rbs +20 -0
- data/sig/openai/models/realtime/realtime_transcription_session_create_request.rbs +11 -203
- data/sig/openai/models/realtime/realtime_transcription_session_create_response.rbs +69 -0
- data/sig/openai/models/realtime/realtime_transcription_session_input_audio_transcription.rbs +59 -0
- data/sig/openai/models/realtime/realtime_transcription_session_turn_detection.rbs +47 -0
- data/sig/openai/models/realtime/realtime_truncation.rbs +1 -28
- data/sig/openai/models/realtime/realtime_truncation_retention_ratio.rbs +21 -0
- data/sig/openai/models/realtime/response_create_event.rbs +6 -249
- data/sig/openai/models/realtime/session_created_event.rbs +14 -4
- data/sig/openai/models/realtime/session_update_event.rbs +14 -4
- data/sig/openai/models/realtime/session_updated_event.rbs +14 -4
- data/sig/openai/models/realtime/transcription_session_created.rbs +4 -254
- data/sig/openai/models/realtime/transcription_session_update.rbs +154 -4
- data/sig/openai/models/realtime/transcription_session_updated_event.rbs +4 -254
- metadata +59 -5
- data/lib/openai/models/realtime/realtime_client_secret_config.rb +0 -64
- data/rbi/openai/models/realtime/realtime_client_secret_config.rbi +0 -147
- data/sig/openai/models/realtime/realtime_client_secret_config.rbs +0 -60
|
@@ -12,17 +12,6 @@ module OpenAI
|
|
|
12
12
|
)
|
|
13
13
|
end
|
|
14
14
|
|
|
15
|
-
# The Realtime model used for this session.
|
|
16
|
-
sig do
|
|
17
|
-
returns(
|
|
18
|
-
T.any(
|
|
19
|
-
String,
|
|
20
|
-
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::OrSymbol
|
|
21
|
-
)
|
|
22
|
-
)
|
|
23
|
-
end
|
|
24
|
-
attr_accessor :model
|
|
25
|
-
|
|
26
15
|
# The type of session to create. Always `realtime` for the Realtime API.
|
|
27
16
|
sig { returns(Symbol) }
|
|
28
17
|
attr_accessor :type
|
|
@@ -36,21 +25,10 @@ module OpenAI
|
|
|
36
25
|
end
|
|
37
26
|
attr_writer :audio
|
|
38
27
|
|
|
39
|
-
# Configuration options for the generated client secret.
|
|
40
|
-
sig { returns(T.nilable(OpenAI::Realtime::RealtimeClientSecretConfig)) }
|
|
41
|
-
attr_reader :client_secret
|
|
42
|
-
|
|
43
|
-
sig do
|
|
44
|
-
params(
|
|
45
|
-
client_secret: OpenAI::Realtime::RealtimeClientSecretConfig::OrHash
|
|
46
|
-
).void
|
|
47
|
-
end
|
|
48
|
-
attr_writer :client_secret
|
|
49
|
-
|
|
50
28
|
# Additional fields to include in server outputs.
|
|
51
29
|
#
|
|
52
|
-
#
|
|
53
|
-
#
|
|
30
|
+
# `item.input_audio_transcription.logprobs`: Include logprobs for input audio
|
|
31
|
+
# transcription.
|
|
54
32
|
sig do
|
|
55
33
|
returns(
|
|
56
34
|
T.nilable(
|
|
@@ -98,8 +76,34 @@ module OpenAI
|
|
|
98
76
|
sig { params(max_output_tokens: T.any(Integer, Symbol)).void }
|
|
99
77
|
attr_writer :max_output_tokens
|
|
100
78
|
|
|
101
|
-
# The
|
|
102
|
-
|
|
79
|
+
# The Realtime model used for this session.
|
|
80
|
+
sig do
|
|
81
|
+
returns(
|
|
82
|
+
T.nilable(
|
|
83
|
+
T.any(
|
|
84
|
+
String,
|
|
85
|
+
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::OrSymbol
|
|
86
|
+
)
|
|
87
|
+
)
|
|
88
|
+
)
|
|
89
|
+
end
|
|
90
|
+
attr_reader :model
|
|
91
|
+
|
|
92
|
+
sig do
|
|
93
|
+
params(
|
|
94
|
+
model:
|
|
95
|
+
T.any(
|
|
96
|
+
String,
|
|
97
|
+
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::OrSymbol
|
|
98
|
+
)
|
|
99
|
+
).void
|
|
100
|
+
end
|
|
101
|
+
attr_writer :model
|
|
102
|
+
|
|
103
|
+
# The set of modalities the model can respond with. It defaults to `["audio"]`,
|
|
104
|
+
# indicating that the model will respond with audio plus a transcript. `["text"]`
|
|
105
|
+
# can be used to make the model respond with text only. It is not possible to
|
|
106
|
+
# request both `text` and `audio` at the same time.
|
|
103
107
|
sig do
|
|
104
108
|
returns(
|
|
105
109
|
T.nilable(
|
|
@@ -133,14 +137,6 @@ module OpenAI
|
|
|
133
137
|
end
|
|
134
138
|
attr_writer :prompt
|
|
135
139
|
|
|
136
|
-
# Sampling temperature for the model, limited to [0.6, 1.2]. For audio models a
|
|
137
|
-
# temperature of 0.8 is highly recommended for best performance.
|
|
138
|
-
sig { returns(T.nilable(Float)) }
|
|
139
|
-
attr_reader :temperature
|
|
140
|
-
|
|
141
|
-
sig { params(temperature: Float).void }
|
|
142
|
-
attr_writer :temperature
|
|
143
|
-
|
|
144
140
|
# How the model chooses tools. Provide one of the string modes or force a specific
|
|
145
141
|
# function/MCP tool.
|
|
146
142
|
sig do
|
|
@@ -174,7 +170,7 @@ module OpenAI
|
|
|
174
170
|
T.nilable(
|
|
175
171
|
T::Array[
|
|
176
172
|
T.any(
|
|
177
|
-
OpenAI::Realtime::
|
|
173
|
+
OpenAI::Realtime::Models,
|
|
178
174
|
OpenAI::Realtime::RealtimeToolsConfigUnion::Mcp
|
|
179
175
|
)
|
|
180
176
|
]
|
|
@@ -188,7 +184,7 @@ module OpenAI
|
|
|
188
184
|
tools:
|
|
189
185
|
T::Array[
|
|
190
186
|
T.any(
|
|
191
|
-
OpenAI::Realtime::
|
|
187
|
+
OpenAI::Realtime::Models::OrHash,
|
|
192
188
|
OpenAI::Realtime::RealtimeToolsConfigUnion::Mcp::OrHash
|
|
193
189
|
)
|
|
194
190
|
]
|
|
@@ -196,8 +192,9 @@ module OpenAI
|
|
|
196
192
|
end
|
|
197
193
|
attr_writer :tools
|
|
198
194
|
|
|
199
|
-
#
|
|
200
|
-
#
|
|
195
|
+
# Realtime API can write session traces to the
|
|
196
|
+
# [Traces Dashboard](/logs?api=traces). Set to null to disable tracing. Once
|
|
197
|
+
# tracing is enabled for a session, the configuration cannot be modified.
|
|
201
198
|
#
|
|
202
199
|
# `auto` will create a trace for the session with default values for the workflow
|
|
203
200
|
# name, group id, and metadata.
|
|
@@ -214,14 +211,13 @@ module OpenAI
|
|
|
214
211
|
attr_accessor :tracing
|
|
215
212
|
|
|
216
213
|
# Controls how the realtime conversation is truncated prior to model inference.
|
|
217
|
-
# The default is `auto`.
|
|
218
|
-
# fraction of the conversation tokens prior to the instructions.
|
|
214
|
+
# The default is `auto`.
|
|
219
215
|
sig do
|
|
220
216
|
returns(
|
|
221
217
|
T.nilable(
|
|
222
218
|
T.any(
|
|
223
219
|
OpenAI::Realtime::RealtimeTruncation::RealtimeTruncationStrategy::OrSymbol,
|
|
224
|
-
OpenAI::Realtime::
|
|
220
|
+
OpenAI::Realtime::RealtimeTruncationRetentionRatio
|
|
225
221
|
)
|
|
226
222
|
)
|
|
227
223
|
)
|
|
@@ -233,7 +229,7 @@ module OpenAI
|
|
|
233
229
|
truncation:
|
|
234
230
|
T.any(
|
|
235
231
|
OpenAI::Realtime::RealtimeTruncation::RealtimeTruncationStrategy::OrSymbol,
|
|
236
|
-
OpenAI::Realtime::
|
|
232
|
+
OpenAI::Realtime::RealtimeTruncationRetentionRatio::OrHash
|
|
237
233
|
)
|
|
238
234
|
).void
|
|
239
235
|
end
|
|
@@ -242,25 +238,23 @@ module OpenAI
|
|
|
242
238
|
# Realtime session object configuration.
|
|
243
239
|
sig do
|
|
244
240
|
params(
|
|
245
|
-
model:
|
|
246
|
-
T.any(
|
|
247
|
-
String,
|
|
248
|
-
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::OrSymbol
|
|
249
|
-
),
|
|
250
241
|
audio: OpenAI::Realtime::RealtimeAudioConfig::OrHash,
|
|
251
|
-
client_secret: OpenAI::Realtime::RealtimeClientSecretConfig::OrHash,
|
|
252
242
|
include:
|
|
253
243
|
T::Array[
|
|
254
244
|
OpenAI::Realtime::RealtimeSessionCreateRequest::Include::OrSymbol
|
|
255
245
|
],
|
|
256
246
|
instructions: String,
|
|
257
247
|
max_output_tokens: T.any(Integer, Symbol),
|
|
248
|
+
model:
|
|
249
|
+
T.any(
|
|
250
|
+
String,
|
|
251
|
+
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::OrSymbol
|
|
252
|
+
),
|
|
258
253
|
output_modalities:
|
|
259
254
|
T::Array[
|
|
260
255
|
OpenAI::Realtime::RealtimeSessionCreateRequest::OutputModality::OrSymbol
|
|
261
256
|
],
|
|
262
257
|
prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash),
|
|
263
|
-
temperature: Float,
|
|
264
258
|
tool_choice:
|
|
265
259
|
T.any(
|
|
266
260
|
OpenAI::Responses::ToolChoiceOptions::OrSymbol,
|
|
@@ -270,7 +264,7 @@ module OpenAI
|
|
|
270
264
|
tools:
|
|
271
265
|
T::Array[
|
|
272
266
|
T.any(
|
|
273
|
-
OpenAI::Realtime::
|
|
267
|
+
OpenAI::Realtime::Models::OrHash,
|
|
274
268
|
OpenAI::Realtime::RealtimeToolsConfigUnion::Mcp::OrHash
|
|
275
269
|
)
|
|
276
270
|
],
|
|
@@ -284,22 +278,18 @@ module OpenAI
|
|
|
284
278
|
truncation:
|
|
285
279
|
T.any(
|
|
286
280
|
OpenAI::Realtime::RealtimeTruncation::RealtimeTruncationStrategy::OrSymbol,
|
|
287
|
-
OpenAI::Realtime::
|
|
281
|
+
OpenAI::Realtime::RealtimeTruncationRetentionRatio::OrHash
|
|
288
282
|
),
|
|
289
283
|
type: Symbol
|
|
290
284
|
).returns(T.attached_class)
|
|
291
285
|
end
|
|
292
286
|
def self.new(
|
|
293
|
-
# The Realtime model used for this session.
|
|
294
|
-
model:,
|
|
295
287
|
# Configuration for input and output audio.
|
|
296
288
|
audio: nil,
|
|
297
|
-
# Configuration options for the generated client secret.
|
|
298
|
-
client_secret: nil,
|
|
299
289
|
# Additional fields to include in server outputs.
|
|
300
290
|
#
|
|
301
|
-
#
|
|
302
|
-
#
|
|
291
|
+
# `item.input_audio_transcription.logprobs`: Include logprobs for input audio
|
|
292
|
+
# transcription.
|
|
303
293
|
include: nil,
|
|
304
294
|
# The default system instructions (i.e. system message) prepended to model calls.
|
|
305
295
|
# This field allows the client to guide the model on desired responses. The model
|
|
@@ -317,29 +307,30 @@ module OpenAI
|
|
|
317
307
|
# tool calls. Provide an integer between 1 and 4096 to limit output tokens, or
|
|
318
308
|
# `inf` for the maximum available tokens for a given model. Defaults to `inf`.
|
|
319
309
|
max_output_tokens: nil,
|
|
320
|
-
# The
|
|
321
|
-
|
|
310
|
+
# The Realtime model used for this session.
|
|
311
|
+
model: nil,
|
|
312
|
+
# The set of modalities the model can respond with. It defaults to `["audio"]`,
|
|
313
|
+
# indicating that the model will respond with audio plus a transcript. `["text"]`
|
|
314
|
+
# can be used to make the model respond with text only. It is not possible to
|
|
315
|
+
# request both `text` and `audio` at the same time.
|
|
322
316
|
output_modalities: nil,
|
|
323
317
|
# Reference to a prompt template and its variables.
|
|
324
318
|
# [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts).
|
|
325
319
|
prompt: nil,
|
|
326
|
-
# Sampling temperature for the model, limited to [0.6, 1.2]. For audio models a
|
|
327
|
-
# temperature of 0.8 is highly recommended for best performance.
|
|
328
|
-
temperature: nil,
|
|
329
320
|
# How the model chooses tools. Provide one of the string modes or force a specific
|
|
330
321
|
# function/MCP tool.
|
|
331
322
|
tool_choice: nil,
|
|
332
323
|
# Tools available to the model.
|
|
333
324
|
tools: nil,
|
|
334
|
-
#
|
|
335
|
-
#
|
|
325
|
+
# Realtime API can write session traces to the
|
|
326
|
+
# [Traces Dashboard](/logs?api=traces). Set to null to disable tracing. Once
|
|
327
|
+
# tracing is enabled for a session, the configuration cannot be modified.
|
|
336
328
|
#
|
|
337
329
|
# `auto` will create a trace for the session with default values for the workflow
|
|
338
330
|
# name, group id, and metadata.
|
|
339
331
|
tracing: nil,
|
|
340
332
|
# Controls how the realtime conversation is truncated prior to model inference.
|
|
341
|
-
# The default is `auto`.
|
|
342
|
-
# fraction of the conversation tokens prior to the instructions.
|
|
333
|
+
# The default is `auto`.
|
|
343
334
|
truncation: nil,
|
|
344
335
|
# The type of session to create. Always `realtime` for the Realtime API.
|
|
345
336
|
type: :realtime
|
|
@@ -349,26 +340,24 @@ module OpenAI
|
|
|
349
340
|
sig do
|
|
350
341
|
override.returns(
|
|
351
342
|
{
|
|
352
|
-
model:
|
|
353
|
-
T.any(
|
|
354
|
-
String,
|
|
355
|
-
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::OrSymbol
|
|
356
|
-
),
|
|
357
343
|
type: Symbol,
|
|
358
344
|
audio: OpenAI::Realtime::RealtimeAudioConfig,
|
|
359
|
-
client_secret: OpenAI::Realtime::RealtimeClientSecretConfig,
|
|
360
345
|
include:
|
|
361
346
|
T::Array[
|
|
362
347
|
OpenAI::Realtime::RealtimeSessionCreateRequest::Include::OrSymbol
|
|
363
348
|
],
|
|
364
349
|
instructions: String,
|
|
365
350
|
max_output_tokens: T.any(Integer, Symbol),
|
|
351
|
+
model:
|
|
352
|
+
T.any(
|
|
353
|
+
String,
|
|
354
|
+
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::OrSymbol
|
|
355
|
+
),
|
|
366
356
|
output_modalities:
|
|
367
357
|
T::Array[
|
|
368
358
|
OpenAI::Realtime::RealtimeSessionCreateRequest::OutputModality::OrSymbol
|
|
369
359
|
],
|
|
370
360
|
prompt: T.nilable(OpenAI::Responses::ResponsePrompt),
|
|
371
|
-
temperature: Float,
|
|
372
361
|
tool_choice:
|
|
373
362
|
T.any(
|
|
374
363
|
OpenAI::Responses::ToolChoiceOptions::OrSymbol,
|
|
@@ -378,7 +367,7 @@ module OpenAI
|
|
|
378
367
|
tools:
|
|
379
368
|
T::Array[
|
|
380
369
|
T.any(
|
|
381
|
-
OpenAI::Realtime::
|
|
370
|
+
OpenAI::Realtime::Models,
|
|
382
371
|
OpenAI::Realtime::RealtimeToolsConfigUnion::Mcp
|
|
383
372
|
)
|
|
384
373
|
],
|
|
@@ -392,7 +381,7 @@ module OpenAI
|
|
|
392
381
|
truncation:
|
|
393
382
|
T.any(
|
|
394
383
|
OpenAI::Realtime::RealtimeTruncation::RealtimeTruncationStrategy::OrSymbol,
|
|
395
|
-
OpenAI::Realtime::
|
|
384
|
+
OpenAI::Realtime::RealtimeTruncationRetentionRatio
|
|
396
385
|
)
|
|
397
386
|
}
|
|
398
387
|
)
|
|
@@ -400,6 +389,54 @@ module OpenAI
|
|
|
400
389
|
def to_hash
|
|
401
390
|
end
|
|
402
391
|
|
|
392
|
+
module Include
|
|
393
|
+
extend OpenAI::Internal::Type::Enum
|
|
394
|
+
|
|
395
|
+
TaggedSymbol =
|
|
396
|
+
T.type_alias do
|
|
397
|
+
T.all(
|
|
398
|
+
Symbol,
|
|
399
|
+
OpenAI::Realtime::RealtimeSessionCreateRequest::Include
|
|
400
|
+
)
|
|
401
|
+
end
|
|
402
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
|
403
|
+
|
|
404
|
+
ITEM_INPUT_AUDIO_TRANSCRIPTION_LOGPROBS =
|
|
405
|
+
T.let(
|
|
406
|
+
:"item.input_audio_transcription.logprobs",
|
|
407
|
+
OpenAI::Realtime::RealtimeSessionCreateRequest::Include::TaggedSymbol
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
sig do
|
|
411
|
+
override.returns(
|
|
412
|
+
T::Array[
|
|
413
|
+
OpenAI::Realtime::RealtimeSessionCreateRequest::Include::TaggedSymbol
|
|
414
|
+
]
|
|
415
|
+
)
|
|
416
|
+
end
|
|
417
|
+
def self.values
|
|
418
|
+
end
|
|
419
|
+
end
|
|
420
|
+
|
|
421
|
+
# Maximum number of output tokens for a single assistant response, inclusive of
|
|
422
|
+
# tool calls. Provide an integer between 1 and 4096 to limit output tokens, or
|
|
423
|
+
# `inf` for the maximum available tokens for a given model. Defaults to `inf`.
|
|
424
|
+
module MaxOutputTokens
|
|
425
|
+
extend OpenAI::Internal::Type::Union
|
|
426
|
+
|
|
427
|
+
Variants = T.type_alias { T.any(Integer, Symbol) }
|
|
428
|
+
|
|
429
|
+
sig do
|
|
430
|
+
override.returns(
|
|
431
|
+
T::Array[
|
|
432
|
+
OpenAI::Realtime::RealtimeSessionCreateRequest::MaxOutputTokens::Variants
|
|
433
|
+
]
|
|
434
|
+
)
|
|
435
|
+
end
|
|
436
|
+
def self.variants
|
|
437
|
+
end
|
|
438
|
+
end
|
|
439
|
+
|
|
403
440
|
# The Realtime model used for this session.
|
|
404
441
|
module Model
|
|
405
442
|
extend OpenAI::Internal::Type::Union
|
|
@@ -441,16 +478,6 @@ module OpenAI
|
|
|
441
478
|
:"gpt-realtime-2025-08-28",
|
|
442
479
|
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::TaggedSymbol
|
|
443
480
|
)
|
|
444
|
-
GPT_4O_REALTIME =
|
|
445
|
-
T.let(
|
|
446
|
-
:"gpt-4o-realtime",
|
|
447
|
-
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::TaggedSymbol
|
|
448
|
-
)
|
|
449
|
-
GPT_4O_MINI_REALTIME =
|
|
450
|
-
T.let(
|
|
451
|
-
:"gpt-4o-mini-realtime",
|
|
452
|
-
OpenAI::Realtime::RealtimeSessionCreateRequest::Model::TaggedSymbol
|
|
453
|
-
)
|
|
454
481
|
GPT_4O_REALTIME_PREVIEW =
|
|
455
482
|
T.let(
|
|
456
483
|
:"gpt-4o-realtime-preview",
|
|
@@ -483,54 +510,6 @@ module OpenAI
|
|
|
483
510
|
)
|
|
484
511
|
end
|
|
485
512
|
|
|
486
|
-
module Include
|
|
487
|
-
extend OpenAI::Internal::Type::Enum
|
|
488
|
-
|
|
489
|
-
TaggedSymbol =
|
|
490
|
-
T.type_alias do
|
|
491
|
-
T.all(
|
|
492
|
-
Symbol,
|
|
493
|
-
OpenAI::Realtime::RealtimeSessionCreateRequest::Include
|
|
494
|
-
)
|
|
495
|
-
end
|
|
496
|
-
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
|
497
|
-
|
|
498
|
-
ITEM_INPUT_AUDIO_TRANSCRIPTION_LOGPROBS =
|
|
499
|
-
T.let(
|
|
500
|
-
:"item.input_audio_transcription.logprobs",
|
|
501
|
-
OpenAI::Realtime::RealtimeSessionCreateRequest::Include::TaggedSymbol
|
|
502
|
-
)
|
|
503
|
-
|
|
504
|
-
sig do
|
|
505
|
-
override.returns(
|
|
506
|
-
T::Array[
|
|
507
|
-
OpenAI::Realtime::RealtimeSessionCreateRequest::Include::TaggedSymbol
|
|
508
|
-
]
|
|
509
|
-
)
|
|
510
|
-
end
|
|
511
|
-
def self.values
|
|
512
|
-
end
|
|
513
|
-
end
|
|
514
|
-
|
|
515
|
-
# Maximum number of output tokens for a single assistant response, inclusive of
|
|
516
|
-
# tool calls. Provide an integer between 1 and 4096 to limit output tokens, or
|
|
517
|
-
# `inf` for the maximum available tokens for a given model. Defaults to `inf`.
|
|
518
|
-
module MaxOutputTokens
|
|
519
|
-
extend OpenAI::Internal::Type::Union
|
|
520
|
-
|
|
521
|
-
Variants = T.type_alias { T.any(Integer, Symbol) }
|
|
522
|
-
|
|
523
|
-
sig do
|
|
524
|
-
override.returns(
|
|
525
|
-
T::Array[
|
|
526
|
-
OpenAI::Realtime::RealtimeSessionCreateRequest::MaxOutputTokens::Variants
|
|
527
|
-
]
|
|
528
|
-
)
|
|
529
|
-
end
|
|
530
|
-
def self.variants
|
|
531
|
-
end
|
|
532
|
-
end
|
|
533
|
-
|
|
534
513
|
module OutputModality
|
|
535
514
|
extend OpenAI::Internal::Type::Enum
|
|
536
515
|
|