openai 0.42.0 → 0.43.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +16 -0
  3. data/README.md +10 -16
  4. data/lib/openai/models/audio/speech_create_params.rb +9 -7
  5. data/lib/openai/models/chat/chat_completion_audio_param.rb +7 -5
  6. data/lib/openai/models/conversations/conversation_item.rb +1 -1
  7. data/lib/openai/models/conversations/message.rb +1 -1
  8. data/lib/openai/models/realtime/realtime_audio_config_output.rb +9 -9
  9. data/lib/openai/models/realtime/realtime_response_create_audio_output.rb +9 -9
  10. data/lib/openai/models/responses/input_token_count_params.rb +4 -7
  11. data/lib/openai/models/responses/response.rb +17 -8
  12. data/lib/openai/models/responses/response_compact_params.rb +1 -0
  13. data/lib/openai/models/responses/response_compaction_item.rb +4 -2
  14. data/lib/openai/models/responses/response_compaction_item_param.rb +2 -1
  15. data/lib/openai/models/responses/response_function_call_output_item.rb +1 -1
  16. data/lib/openai/models/responses/response_function_shell_tool_call_output.rb +10 -6
  17. data/lib/openai/models/responses/response_function_web_search.rb +11 -3
  18. data/lib/openai/models/responses/response_input_item.rb +1 -0
  19. data/lib/openai/models/responses/response_item.rb +1 -1
  20. data/lib/openai/models/responses/response_output_item.rb +1 -1
  21. data/lib/openai/models/responses/response_output_text.rb +1 -1
  22. data/lib/openai/models/responses/tool.rb +4 -1
  23. data/lib/openai/models/video_create_error.rb +7 -2
  24. data/lib/openai/resources/audio/speech.rb +1 -1
  25. data/lib/openai/resources/images.rb +4 -2
  26. data/lib/openai/resources/responses/input_tokens.rb +1 -1
  27. data/lib/openai/version.rb +1 -1
  28. data/rbi/openai/models/audio/speech_create_params.rbi +12 -9
  29. data/rbi/openai/models/chat/chat_completion_audio_param.rbi +9 -6
  30. data/rbi/openai/models/conversations/message.rbi +1 -1
  31. data/rbi/openai/models/realtime/realtime_audio_config_output.rbi +12 -12
  32. data/rbi/openai/models/realtime/realtime_response_create_audio_output.rbi +12 -12
  33. data/rbi/openai/models/responses/input_token_count_params.rbi +3 -9
  34. data/rbi/openai/models/responses/response.rbi +18 -8
  35. data/rbi/openai/models/responses/response_compaction_item.rbi +4 -0
  36. data/rbi/openai/models/responses/response_compaction_item_param.rbi +2 -0
  37. data/rbi/openai/models/responses/response_function_call_output_item.rbi +1 -1
  38. data/rbi/openai/models/responses/response_function_shell_tool_call_output.rbi +10 -2
  39. data/rbi/openai/models/responses/response_function_web_search.rbi +13 -2
  40. data/rbi/openai/models/responses/response_output_text.rbi +1 -1
  41. data/rbi/openai/models/responses/tool.rbi +3 -0
  42. data/rbi/openai/models/video_create_error.rbi +9 -1
  43. data/rbi/openai/resources/audio/speech.rbi +4 -3
  44. data/rbi/openai/resources/images.rbi +4 -2
  45. data/rbi/openai/resources/responses/input_tokens.rbi +1 -3
  46. data/sig/openai/models/responses/response.rbs +5 -0
  47. data/sig/openai/models/responses/response_function_web_search.rbs +7 -0
  48. metadata +16 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 464227db3c30bad373f3cd4600bb7409c8ace214249c3257016363d533d848c8
4
- data.tar.gz: a3828202f21732efa015e97198bf160fff5dca941a25cc1010f688a583ac8ccb
3
+ metadata.gz: d4083372aec70ae767e55529226345fd14c51b526994a100ac2985ab1990cd48
4
+ data.tar.gz: 79cc5f4c8ad7b26a1ec05490a80b55105393750839b2ecb3bf4da1a22385952d
5
5
  SHA512:
6
- metadata.gz: c062d515f082c87524e10ccce322304923f4db54cd489218248f63e2d47e1f976db323f9e1cff5753f66a603112d8b56df3a9ed4f9a144d4e8867c6cdc075e52
7
- data.tar.gz: 566f420ae6db1b332fe1dd253544271c470f9863d4278ebcf22a873ff2cc4102999d0ed741f68671011c5a761e39d988e3679e41e139f2a7d526334ba48cd452
6
+ metadata.gz: 95ff955307367382d388f0d575f733a7c384b513a72be89010dfe70aefa1868c39c36c9a49c7130ea124f793feed59543b7f837facbd738082cd1e0db1e2d658
7
+ data.tar.gz: 378b3dc58226b56d9a362578a879fc3f48acdcfecbea44507798bbfb53ed4fc8f96a5b2849004db91aeab9acadcde2829be897321e38bafa987e819e0ca1aa1a
data/CHANGELOG.md CHANGED
@@ -1,5 +1,21 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.43.0 (2026-01-09)
4
+
5
+ Full Changelog: [v0.42.0...v0.43.0](https://github.com/openai/openai-ruby/compare/v0.42.0...v0.43.0)
6
+
7
+ ### Features
8
+
9
+ * **api:** add new Response completed_at prop ([c32e1c2](https://github.com/openai/openai-ruby/commit/c32e1c2a0a0ae6a9b7724e1c6d251ba14e8499e1))
10
+
11
+
12
+ ### Chores
13
+
14
+ * add ci tests for ruby 4 compatibility [#235](https://github.com/openai/openai-ruby/issues/235) ([#236](https://github.com/openai/openai-ruby/issues/236)) ([1aa0d7a](https://github.com/openai/openai-ruby/commit/1aa0d7abf5a4714fa28cfe7ee5aecefbe2c683d2))
15
+ * **internal:** codegen related update ([ef23de3](https://github.com/openai/openai-ruby/commit/ef23de347b5f541853a32a288cd02a54938793cf))
16
+ * **internal:** use different example values for some enums ([8b6c4ad](https://github.com/openai/openai-ruby/commit/8b6c4ade813244e8c65690c66fe09f2566dd3ff0))
17
+ * move `cgi` into dependencies for ruby 4 ([bd9c798](https://github.com/openai/openai-ruby/commit/bd9c798552a3d378ec943c7e07d6cf1334f72b1d))
18
+
3
19
  ## 0.42.0 (2025-12-19)
4
20
 
5
21
  Full Changelog: [v0.41.0...v0.42.0](https://github.com/openai/openai-ruby/compare/v0.41.0...v0.42.0)
data/README.md CHANGED
@@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application
15
15
  <!-- x-release-please-start-version -->
16
16
 
17
17
  ```ruby
18
- gem "openai", "~> 0.42.0"
18
+ gem "openai", "~> 0.43.0"
19
19
  ```
20
20
 
21
21
  <!-- x-release-please-end -->
@@ -30,10 +30,7 @@ openai = OpenAI::Client.new(
30
30
  api_key: ENV["OPENAI_API_KEY"] # This is the default and can be omitted
31
31
  )
32
32
 
33
- chat_completion = openai.chat.completions.create(
34
- messages: [{role: "user", content: "Say this is a test"}],
35
- model: :"gpt-5.2"
36
- )
33
+ chat_completion = openai.chat.completions.create(messages: [{role: "user", content: "Say this is a test"}], model: "gpt-4o")
37
34
 
38
35
  puts(chat_completion)
39
36
  ```
@@ -45,7 +42,7 @@ We provide support for streaming responses using Server-Sent Events (SSE).
45
42
  ```ruby
46
43
  stream = openai.responses.stream(
47
44
  input: "Write a haiku about OpenAI.",
48
- model: :"gpt-5.2"
45
+ model: "gpt-4o"
49
46
  )
50
47
 
51
48
  stream.each do |event|
@@ -298,7 +295,7 @@ When the library is unable to connect to the API, or if the API returns a non-su
298
295
 
299
296
  ```ruby
300
297
  begin
301
- job = openai.fine_tuning.jobs.create(model: :"babbage-002", training_file: "file-abc123")
298
+ job = openai.fine_tuning.jobs.create(model: "gpt-4o", training_file: "file-abc123")
302
299
  rescue OpenAI::Errors::APIConnectionError => e
303
300
  puts("The server could not be reached")
304
301
  puts(e.cause) # an underlying Exception, likely raised within `net/http`
@@ -343,7 +340,7 @@ openai = OpenAI::Client.new(
343
340
  # Or, configure per-request:
344
341
  openai.chat.completions.create(
345
342
  messages: [{role: "user", content: "How can I get the name of the current day in JavaScript?"}],
346
- model: :"gpt-5.2",
343
+ model: "gpt-4o",
347
344
  request_options: {max_retries: 5}
348
345
  )
349
346
  ```
@@ -361,7 +358,7 @@ openai = OpenAI::Client.new(
361
358
  # Or, configure per-request:
362
359
  openai.chat.completions.create(
363
360
  messages: [{role: "user", content: "How can I list all files in a directory using Python?"}],
364
- model: :"gpt-5.2",
361
+ model: "gpt-4o",
365
362
  request_options: {timeout: 5}
366
363
  )
367
364
  ```
@@ -396,7 +393,7 @@ Note: the `extra_` parameters of the same name overrides the documented paramete
396
393
  chat_completion =
397
394
  openai.chat.completions.create(
398
395
  messages: [{role: "user", content: "How can I get the name of the current day in JavaScript?"}],
399
- model: :"gpt-5.2",
396
+ model: "gpt-4o",
400
397
  request_options: {
401
398
  extra_query: {my_query_parameter: value},
402
399
  extra_body: {my_body_parameter: value},
@@ -444,7 +441,7 @@ You can provide typesafe request parameters like so:
444
441
  ```ruby
445
442
  openai.chat.completions.create(
446
443
  messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(content: "Say this is a test")],
447
- model: :"gpt-5.2"
444
+ model: "gpt-4o"
448
445
  )
449
446
  ```
450
447
 
@@ -452,15 +449,12 @@ Or, equivalently:
452
449
 
453
450
  ```ruby
454
451
  # Hashes work, but are not typesafe:
455
- openai.chat.completions.create(
456
- messages: [{role: "user", content: "Say this is a test"}],
457
- model: :"gpt-5.2"
458
- )
452
+ openai.chat.completions.create(messages: [{role: "user", content: "Say this is a test"}], model: "gpt-4o")
459
453
 
460
454
  # You can also splat a full Params class:
461
455
  params = OpenAI::Chat::CompletionCreateParams.new(
462
456
  messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(content: "Say this is a test")],
463
- model: :"gpt-5.2"
457
+ model: "gpt-4o"
464
458
  )
465
459
  openai.chat.completions.create(**params)
466
460
  ```
@@ -22,9 +22,10 @@ module OpenAI
22
22
  required :model, union: -> { OpenAI::Audio::SpeechCreateParams::Model }
23
23
 
24
24
  # @!attribute voice
25
- # The voice to use when generating the audio. Supported voices are `alloy`, `ash`,
26
- # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and
27
- # `verse`. Previews of the voices are available in the
25
+ # The voice to use when generating the audio. Supported built-in voices are
26
+ # `alloy`, `ash`, `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`,
27
+ # `shimmer`, `verse`, `marin`, and `cedar`. Previews of the voices are available
28
+ # in the
28
29
  # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options).
29
30
  #
30
31
  # @return [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice]
@@ -66,7 +67,7 @@ module OpenAI
66
67
  #
67
68
  # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts):
68
69
  #
69
- # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`,
70
+ # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported built-in voices are `alloy
70
71
  #
71
72
  # @param instructions [String] Control the voice of your generated audio with additional instructions. Does not
72
73
  #
@@ -92,9 +93,10 @@ module OpenAI
92
93
  # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)]
93
94
  end
94
95
 
95
- # The voice to use when generating the audio. Supported voices are `alloy`, `ash`,
96
- # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and
97
- # `verse`. Previews of the voices are available in the
96
+ # The voice to use when generating the audio. Supported built-in voices are
97
+ # `alloy`, `ash`, `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`,
98
+ # `shimmer`, `verse`, `marin`, and `cedar`. Previews of the voices are available
99
+ # in the
98
100
  # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options).
99
101
  module Voice
100
102
  extend OpenAI::Internal::Type::Union
@@ -12,8 +12,9 @@ module OpenAI
12
12
  required :format_, enum: -> { OpenAI::Chat::ChatCompletionAudioParam::Format }, api_name: :format
13
13
 
14
14
  # @!attribute voice
15
- # The voice the model uses to respond. Supported voices are `alloy`, `ash`,
16
- # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`.
15
+ # The voice the model uses to respond. Supported built-in voices are `alloy`,
16
+ # `ash`, `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, `shimmer`,
17
+ # `marin`, and `cedar`.
17
18
  #
18
19
  # @return [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice]
19
20
  required :voice, union: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice }
@@ -28,7 +29,7 @@ module OpenAI
28
29
  #
29
30
  # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`,
30
31
  #
31
- # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] The voice the model uses to respond. Supported voices are
32
+ # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] The voice the model uses to respond. Supported built-in voices are `alloy`, `ash
32
33
 
33
34
  # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`,
34
35
  # or `pcm16`.
@@ -48,8 +49,9 @@ module OpenAI
48
49
  # @return [Array<Symbol>]
49
50
  end
50
51
 
51
- # The voice the model uses to respond. Supported voices are `alloy`, `ash`,
52
- # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`.
52
+ # The voice the model uses to respond. Supported built-in voices are `alloy`,
53
+ # `ash`, `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, `shimmer`,
54
+ # `marin`, and `cedar`.
53
55
  #
54
56
  # @see OpenAI::Models::Chat::ChatCompletionAudioParam#voice
55
57
  module Voice
@@ -57,7 +57,7 @@ module OpenAI
57
57
  # A tool call that executes one or more shell commands in a managed environment.
58
58
  variant :shell_call, -> { OpenAI::Responses::ResponseFunctionShellToolCall }
59
59
 
60
- # The output of a shell tool call.
60
+ # The output of a shell tool call that was emitted.
61
61
  variant :shell_call_output, -> { OpenAI::Responses::ResponseFunctionShellToolCallOutput }
62
62
 
63
63
  # A tool call that applies file diffs by creating, deleting, or updating files.
@@ -52,7 +52,7 @@ module OpenAI
52
52
  #
53
53
  # @param type [Symbol, :message] The type of the message. Always set to `message`.
54
54
 
55
- # A text input to the model.
55
+ # A content part that makes up an input or output item.
56
56
  module Content
57
57
  extend OpenAI::Internal::Type::Union
58
58
 
@@ -23,10 +23,10 @@ module OpenAI
23
23
  optional :speed, Float
24
24
 
25
25
  # @!attribute voice
26
- # The voice the model uses to respond. Voice cannot be changed during the session
27
- # once the model has responded with audio at least once. Current voice options are
28
- # `alloy`, `ash`, `ballad`, `coral`, `echo`, `sage`, `shimmer`, `verse`, `marin`,
29
- # and `cedar`. We recommend `marin` and `cedar` for best quality.
26
+ # The voice the model uses to respond. Supported built-in voices are `alloy`,
27
+ # `ash`, `ballad`, `coral`, `echo`, `sage`, `shimmer`, `verse`, `marin`, and
28
+ # `cedar`. Voice cannot be changed during the session once the model has responded
29
+ # with audio at least once. We recommend `marin` and `cedar` for best quality.
30
30
  #
31
31
  # @return [String, Symbol, OpenAI::Models::Realtime::RealtimeAudioConfigOutput::Voice, nil]
32
32
  optional :voice, union: -> { OpenAI::Realtime::RealtimeAudioConfigOutput::Voice }
@@ -39,12 +39,12 @@ module OpenAI
39
39
  #
40
40
  # @param speed [Float] The speed of the model's spoken response as a multiple of the original speed.
41
41
  #
42
- # @param voice [String, Symbol, OpenAI::Models::Realtime::RealtimeAudioConfigOutput::Voice] The voice the model uses to respond. Voice cannot be changed during the
42
+ # @param voice [String, Symbol, OpenAI::Models::Realtime::RealtimeAudioConfigOutput::Voice] The voice the model uses to respond. Supported built-in voices are `alloy`, `ash
43
43
 
44
- # The voice the model uses to respond. Voice cannot be changed during the session
45
- # once the model has responded with audio at least once. Current voice options are
46
- # `alloy`, `ash`, `ballad`, `coral`, `echo`, `sage`, `shimmer`, `verse`, `marin`,
47
- # and `cedar`. We recommend `marin` and `cedar` for best quality.
44
+ # The voice the model uses to respond. Supported built-in voices are `alloy`,
45
+ # `ash`, `ballad`, `coral`, `echo`, `sage`, `shimmer`, `verse`, `marin`, and
46
+ # `cedar`. Voice cannot be changed during the session once the model has responded
47
+ # with audio at least once. We recommend `marin` and `cedar` for best quality.
48
48
  #
49
49
  # @see OpenAI::Models::Realtime::RealtimeAudioConfigOutput#voice
50
50
  module Voice
@@ -23,10 +23,10 @@ module OpenAI
23
23
  optional :format_, union: -> { OpenAI::Realtime::RealtimeAudioFormats }, api_name: :format
24
24
 
25
25
  # @!attribute voice
26
- # The voice the model uses to respond. Voice cannot be changed during the session
27
- # once the model has responded with audio at least once. Current voice options are
28
- # `alloy`, `ash`, `ballad`, `coral`, `echo`, `sage`, `shimmer`, `verse`, `marin`,
29
- # and `cedar`. We recommend `marin` and `cedar` for best quality.
26
+ # The voice the model uses to respond. Supported built-in voices are `alloy`,
27
+ # `ash`, `ballad`, `coral`, `echo`, `sage`, `shimmer`, `verse`, `marin`, and
28
+ # `cedar`. Voice cannot be changed during the session once the model has responded
29
+ # with audio at least once.
30
30
  #
31
31
  # @return [String, Symbol, OpenAI::Models::Realtime::RealtimeResponseCreateAudioOutput::Output::Voice, nil]
32
32
  optional :voice, union: -> { OpenAI::Realtime::RealtimeResponseCreateAudioOutput::Output::Voice }
@@ -38,12 +38,12 @@ module OpenAI
38
38
  #
39
39
  # @param format_ [OpenAI::Models::Realtime::RealtimeAudioFormats::AudioPCM, OpenAI::Models::Realtime::RealtimeAudioFormats::AudioPCMU, OpenAI::Models::Realtime::RealtimeAudioFormats::AudioPCMA] The format of the output audio.
40
40
  #
41
- # @param voice [String, Symbol, OpenAI::Models::Realtime::RealtimeResponseCreateAudioOutput::Output::Voice] The voice the model uses to respond. Voice cannot be changed during the
41
+ # @param voice [String, Symbol, OpenAI::Models::Realtime::RealtimeResponseCreateAudioOutput::Output::Voice] The voice the model uses to respond. Supported built-in voices are `alloy`, `ash
42
42
 
43
- # The voice the model uses to respond. Voice cannot be changed during the session
44
- # once the model has responded with audio at least once. Current voice options are
45
- # `alloy`, `ash`, `ballad`, `coral`, `echo`, `sage`, `shimmer`, `verse`, `marin`,
46
- # and `cedar`. We recommend `marin` and `cedar` for best quality.
43
+ # The voice the model uses to respond. Supported built-in voices are `alloy`,
44
+ # `ash`, `ballad`, `coral`, `echo`, `sage`, `shimmer`, `verse`, `marin`, and
45
+ # `cedar`. Voice cannot be changed during the session once the model has responded
46
+ # with audio at least once.
47
47
  #
48
48
  # @see OpenAI::Models::Realtime::RealtimeResponseCreateAudioOutput::Output#voice
49
49
  module Voice
@@ -79,9 +79,7 @@ module OpenAI
79
79
  optional :text, -> { OpenAI::Responses::InputTokenCountParams::Text }, nil?: true
80
80
 
81
81
  # @!attribute tool_choice
82
- # How the model should select which tool (or tools) to use when generating a
83
- # response. See the `tools` parameter to see how to specify which tools the model
84
- # can call.
82
+ # Controls which tool the model should use, if any.
85
83
  #
86
84
  # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom, OpenAI::Models::Responses::ToolChoiceApplyPatch, OpenAI::Models::Responses::ToolChoiceShell, nil]
87
85
  optional :tool_choice, union: -> { OpenAI::Responses::InputTokenCountParams::ToolChoice }, nil?: true
@@ -123,7 +121,7 @@ module OpenAI
123
121
  #
124
122
  # @param text [OpenAI::Models::Responses::InputTokenCountParams::Text, nil] Configuration options for a text response from the model. Can be plain
125
123
  #
126
- # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom, OpenAI::Models::Responses::ToolChoiceApplyPatch, OpenAI::Models::Responses::ToolChoiceShell, nil] How the model should select which tool (or tools) to use when generating
124
+ # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom, OpenAI::Models::Responses::ToolChoiceApplyPatch, OpenAI::Models::Responses::ToolChoiceShell, nil] Controls which tool the model should use, if any.
127
125
  #
128
126
  # @param tools [Array<OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::Tool::Mcp, OpenAI::Models::Responses::Tool::CodeInterpreter, OpenAI::Models::Responses::Tool::ImageGeneration, OpenAI::Models::Responses::Tool::LocalShell, OpenAI::Models::Responses::FunctionShellTool, OpenAI::Models::Responses::CustomTool, OpenAI::Models::Responses::ApplyPatchTool, OpenAI::Models::Responses::WebSearchTool, OpenAI::Models::Responses::WebSearchPreviewTool>, nil] An array of tools the model may call while generating a response. You can specif
129
127
  #
@@ -155,6 +153,7 @@ module OpenAI
155
153
  # A text input to the model, equivalent to a text input with the `user` role.
156
154
  variant String
157
155
 
156
+ # A list of one or many input items to the model, containing different content types.
158
157
  variant -> { OpenAI::Models::Responses::InputTokenCountParams::Input::ResponseInputItemArray }
159
158
 
160
159
  # @!method self.variants
@@ -227,9 +226,7 @@ module OpenAI
227
226
  end
228
227
  end
229
228
 
230
- # How the model should select which tool (or tools) to use when generating a
231
- # response. See the `tools` parameter to see how to specify which tools the model
232
- # can call.
229
+ # Controls which tool the model should use, if any.
233
230
  module ToolChoice
234
231
  extend OpenAI::Internal::Type::Union
235
232
 
@@ -144,9 +144,16 @@ module OpenAI
144
144
  # @return [Boolean, nil]
145
145
  optional :background, OpenAI::Internal::Type::Boolean, nil?: true
146
146
 
147
+ # @!attribute completed_at
148
+ # Unix timestamp (in seconds) of when this Response was completed. Only present
149
+ # when the status is `completed`.
150
+ #
151
+ # @return [Float, nil]
152
+ optional :completed_at, Float, nil?: true
153
+
147
154
  # @!attribute conversation
148
- # The conversation that this response belongs to. Input items and output items
149
- # from this response are automatically added to this conversation.
155
+ # The conversation that this response belonged to. Input items and output items
156
+ # from this response were automatically added to this conversation.
150
157
  #
151
158
  # @return [OpenAI::Models::Responses::Response::Conversation, nil]
152
159
  optional :conversation, -> { OpenAI::Responses::Response::Conversation }, nil?: true
@@ -318,7 +325,7 @@ module OpenAI
318
325
  texts.join
319
326
  end
320
327
 
321
- # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, conversation: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, prompt_cache_retention: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, object: :response)
328
+ # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, completed_at: nil, conversation: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, prompt_cache_retention: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, object: :response)
322
329
  # Some parameter documentations has been truncated, see
323
330
  # {OpenAI::Models::Responses::Response} for more details.
324
331
  #
@@ -350,7 +357,9 @@ module OpenAI
350
357
  #
351
358
  # @param background [Boolean, nil] Whether to run the model response in the background.
352
359
  #
353
- # @param conversation [OpenAI::Models::Responses::Response::Conversation, nil] The conversation that this response belongs to. Input items and output items fro
360
+ # @param completed_at [Float, nil] Unix timestamp (in seconds) of when this Response was completed.
361
+ #
362
+ # @param conversation [OpenAI::Models::Responses::Response::Conversation, nil] The conversation that this response belonged to. Input items and output items fr
354
363
  #
355
364
  # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in
356
365
  #
@@ -484,16 +493,16 @@ module OpenAI
484
493
  # @see OpenAI::Models::Responses::Response#conversation
485
494
  class Conversation < OpenAI::Internal::Type::BaseModel
486
495
  # @!attribute id
487
- # The unique ID of the conversation.
496
+ # The unique ID of the conversation that this response was associated with.
488
497
  #
489
498
  # @return [String]
490
499
  required :id, String
491
500
 
492
501
  # @!method initialize(id:)
493
- # The conversation that this response belongs to. Input items and output items
494
- # from this response are automatically added to this conversation.
502
+ # The conversation that this response belonged to. Input items and output items
503
+ # from this response were automatically added to this conversation.
495
504
  #
496
- # @param id [String] The unique ID of the conversation.
505
+ # @param id [String] The unique ID of the conversation that this response was associated with.
497
506
  end
498
507
 
499
508
  # The retention policy for the prompt cache. Set to `24h` to enable extended
@@ -344,6 +344,7 @@ module OpenAI
344
344
  # A text input to the model, equivalent to a text input with the `user` role.
345
345
  variant String
346
346
 
347
+ # A list of one or many input items to the model, containing different content types.
347
348
  variant -> { OpenAI::Models::Responses::ResponseCompactParams::Input::ResponseInputItemArray }
348
349
 
349
350
  # @!method self.variants
@@ -11,6 +11,7 @@ module OpenAI
11
11
  required :id, String
12
12
 
13
13
  # @!attribute encrypted_content
14
+ # The encrypted content that was produced by compaction.
14
15
  #
15
16
  # @return [String]
16
17
  required :encrypted_content, String
@@ -22,6 +23,7 @@ module OpenAI
22
23
  required :type, const: :compaction
23
24
 
24
25
  # @!attribute created_by
26
+ # The identifier of the actor that created the item.
25
27
  #
26
28
  # @return [String, nil]
27
29
  optional :created_by, String
@@ -32,9 +34,9 @@ module OpenAI
32
34
  #
33
35
  # @param id [String] The unique ID of the compaction item.
34
36
  #
35
- # @param encrypted_content [String]
37
+ # @param encrypted_content [String] The encrypted content that was produced by compaction.
36
38
  #
37
- # @param created_by [String]
39
+ # @param created_by [String] The identifier of the actor that created the item.
38
40
  #
39
41
  # @param type [Symbol, :compaction] The type of the item. Always `compaction`.
40
42
  end
@@ -5,6 +5,7 @@ module OpenAI
5
5
  module Responses
6
6
  class ResponseCompactionItemParam < OpenAI::Internal::Type::BaseModel
7
7
  # @!attribute encrypted_content
8
+ # The encrypted content of the compaction summary.
8
9
  #
9
10
  # @return [String]
10
11
  required :encrypted_content, String
@@ -25,7 +26,7 @@ module OpenAI
25
26
  # A compaction item generated by the
26
27
  # [`v1/responses/compact` API](https://platform.openai.com/docs/api-reference/responses/compact).
27
28
  #
28
- # @param encrypted_content [String]
29
+ # @param encrypted_content [String] The encrypted content of the compaction summary.
29
30
  #
30
31
  # @param id [String, nil] The ID of the compaction item.
31
32
  #
@@ -3,7 +3,7 @@
3
3
  module OpenAI
4
4
  module Models
5
5
  module Responses
6
- # A text input to the model.
6
+ # A piece of message content, such as text, an image, or a file.
7
7
  module ResponseFunctionCallOutputItem
8
8
  extend OpenAI::Internal::Type::Union
9
9
 
@@ -38,6 +38,7 @@ module OpenAI
38
38
  required :type, const: :shell_call_output
39
39
 
40
40
  # @!attribute created_by
41
+ # The identifier of the actor that created the item.
41
42
  #
42
43
  # @return [String, nil]
43
44
  optional :created_by, String
@@ -47,7 +48,7 @@ module OpenAI
47
48
  # {OpenAI::Models::Responses::ResponseFunctionShellToolCallOutput} for more
48
49
  # details.
49
50
  #
50
- # The output of a shell tool call.
51
+ # The output of a shell tool call that was emitted.
51
52
  #
52
53
  # @param id [String] The unique ID of the shell call output. Populated when this item is returned via
53
54
  #
@@ -57,7 +58,7 @@ module OpenAI
57
58
  #
58
59
  # @param output [Array<OpenAI::Models::Responses::ResponseFunctionShellToolCallOutput::Output>] An array of shell call output contents
59
60
  #
60
- # @param created_by [String]
61
+ # @param created_by [String] The identifier of the actor that created the item.
61
62
  #
62
63
  # @param type [Symbol, :shell_call_output] The type of the shell call output. Always `shell_call_output`.
63
64
 
@@ -70,16 +71,19 @@ module OpenAI
70
71
  required :outcome, union: -> { OpenAI::Responses::ResponseFunctionShellToolCallOutput::Output::Outcome }
71
72
 
72
73
  # @!attribute stderr
74
+ # The standard error output that was captured.
73
75
  #
74
76
  # @return [String]
75
77
  required :stderr, String
76
78
 
77
79
  # @!attribute stdout
80
+ # The standard output that was captured.
78
81
  #
79
82
  # @return [String]
80
83
  required :stdout, String
81
84
 
82
85
  # @!attribute created_by
86
+ # The identifier of the actor that created the item.
83
87
  #
84
88
  # @return [String, nil]
85
89
  optional :created_by, String
@@ -89,15 +93,15 @@ module OpenAI
89
93
  # {OpenAI::Models::Responses::ResponseFunctionShellToolCallOutput::Output} for
90
94
  # more details.
91
95
  #
92
- # The content of a shell call output.
96
+ # The content of a shell tool call output that was emitted.
93
97
  #
94
98
  # @param outcome [OpenAI::Models::Responses::ResponseFunctionShellToolCallOutput::Output::Outcome::Timeout, OpenAI::Models::Responses::ResponseFunctionShellToolCallOutput::Output::Outcome::Exit] Represents either an exit outcome (with an exit code) or a timeout outcome for a
95
99
  #
96
- # @param stderr [String]
100
+ # @param stderr [String] The standard error output that was captured.
97
101
  #
98
- # @param stdout [String]
102
+ # @param stdout [String] The standard output that was captured.
99
103
  #
100
- # @param created_by [String]
104
+ # @param created_by [String] The identifier of the actor that created the item.
101
105
 
102
106
  # Represents either an exit outcome (with an exit code) or a timeout outcome for a
103
107
  # shell call output chunk.
@@ -65,7 +65,7 @@ module OpenAI
65
65
 
66
66
  class Search < OpenAI::Internal::Type::BaseModel
67
67
  # @!attribute query
68
- # The search query.
68
+ # [DEPRECATED] The search query.
69
69
  #
70
70
  # @return [String]
71
71
  required :query, String
@@ -76,6 +76,12 @@ module OpenAI
76
76
  # @return [Symbol, :search]
77
77
  required :type, const: :search
78
78
 
79
+ # @!attribute queries
80
+ # The search queries.
81
+ #
82
+ # @return [Array<String>, nil]
83
+ optional :queries, OpenAI::Internal::Type::ArrayOf[String]
84
+
79
85
  # @!attribute sources
80
86
  # The sources used in the search.
81
87
  #
@@ -83,14 +89,16 @@ module OpenAI
83
89
  optional :sources,
84
90
  -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source] }
85
91
 
86
- # @!method initialize(query:, sources: nil, type: :search)
92
+ # @!method initialize(query:, queries: nil, sources: nil, type: :search)
87
93
  # Some parameter documentations has been truncated, see
88
94
  # {OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Search} for more
89
95
  # details.
90
96
  #
91
97
  # Action type "search" - Performs a web search query.
92
98
  #
93
- # @param query [String] The search query.
99
+ # @param query [String] [DEPRECATED] The search query.
100
+ #
101
+ # @param queries [Array<String>] The search queries.
94
102
  #
95
103
  # @param sources [Array<OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Search::Source>] The sources used in the search.
96
104
  #
@@ -359,6 +359,7 @@ module OpenAI
359
359
  # A JSON string of the output of the function tool call.
360
360
  variant String
361
361
 
362
+ # An array of content outputs (text, image, file) for the function tool call.
362
363
  variant -> { OpenAI::Responses::ResponseFunctionCallOutputItemList }
363
364
 
364
365
  # @!method self.variants
@@ -49,7 +49,7 @@ module OpenAI
49
49
  # A tool call that executes one or more shell commands in a managed environment.
50
50
  variant :shell_call, -> { OpenAI::Responses::ResponseFunctionShellToolCall }
51
51
 
52
- # The output of a shell tool call.
52
+ # The output of a shell tool call that was emitted.
53
53
  variant :shell_call_output, -> { OpenAI::Responses::ResponseFunctionShellToolCallOutput }
54
54
 
55
55
  # A tool call that applies file diffs by creating, deleting, or updating files.
@@ -49,7 +49,7 @@ module OpenAI
49
49
  # A tool call that executes one or more shell commands in a managed environment.
50
50
  variant :shell_call, -> { OpenAI::Responses::ResponseFunctionShellToolCall }
51
51
 
52
- # The output of a shell tool call.
52
+ # The output of a shell tool call that was emitted.
53
53
  variant :shell_call_output, -> { OpenAI::Responses::ResponseFunctionShellToolCallOutput }
54
54
 
55
55
  # A tool call that applies file diffs by creating, deleting, or updating files.
@@ -47,7 +47,7 @@ module OpenAI
47
47
  #
48
48
  # @param type [Symbol, :output_text] The type of the output text. Always `output_text`.
49
49
 
50
- # A citation to a file.
50
+ # An annotation that applies to a span of output text.
51
51
  module Annotation
52
52
  extend OpenAI::Internal::Type::Union
53
53
 
@@ -399,6 +399,7 @@ module OpenAI
399
399
  optional :file_ids, OpenAI::Internal::Type::ArrayOf[String]
400
400
 
401
401
  # @!attribute memory_limit
402
+ # The memory limit for the code interpreter container.
402
403
  #
403
404
  # @return [Symbol, OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto::MemoryLimit, nil]
404
405
  optional :memory_limit,
@@ -413,10 +414,12 @@ module OpenAI
413
414
  #
414
415
  # @param file_ids [Array<String>] An optional list of uploaded files to make available to your code.
415
416
  #
416
- # @param memory_limit [Symbol, OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto::MemoryLimit, nil]
417
+ # @param memory_limit [Symbol, OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto::MemoryLimit, nil] The memory limit for the code interpreter container.
417
418
  #
418
419
  # @param type [Symbol, :auto] Always `auto`.
419
420
 
421
+ # The memory limit for the code interpreter container.
422
+ #
420
423
  # @see OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto#memory_limit
421
424
  module MemoryLimit
422
425
  extend OpenAI::Internal::Type::Enum