openai 0.9.0 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +20 -0
  3. data/README.md +1 -1
  4. data/lib/openai/errors.rb +22 -0
  5. data/lib/openai/internal/type/array_of.rb +6 -1
  6. data/lib/openai/internal/type/base_model.rb +76 -24
  7. data/lib/openai/internal/type/boolean.rb +7 -1
  8. data/lib/openai/internal/type/converter.rb +42 -34
  9. data/lib/openai/internal/type/enum.rb +10 -2
  10. data/lib/openai/internal/type/file_input.rb +6 -1
  11. data/lib/openai/internal/type/hash_of.rb +6 -1
  12. data/lib/openai/internal/type/union.rb +12 -7
  13. data/lib/openai/internal/type/unknown.rb +7 -1
  14. data/lib/openai/models/audio/speech_create_params.rb +23 -2
  15. data/lib/openai/models/audio/transcription.rb +118 -1
  16. data/lib/openai/models/audio/transcription_text_done_event.rb +80 -1
  17. data/lib/openai/models/audio/transcription_verbose.rb +31 -1
  18. data/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb +60 -25
  19. data/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +17 -8
  20. data/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +14 -10
  21. data/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +11 -10
  22. data/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +11 -10
  23. data/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +11 -10
  24. data/lib/openai/models/responses/response_code_interpreter_tool_call.rb +49 -78
  25. data/lib/openai/models/responses/response_create_params.rb +29 -29
  26. data/lib/openai/models/responses/response_output_text.rb +18 -2
  27. data/lib/openai/models/responses/response_stream_event.rb +2 -2
  28. data/lib/openai/resources/audio/speech.rb +3 -1
  29. data/lib/openai/resources/chat/completions.rb +8 -0
  30. data/lib/openai/resources/fine_tuning/checkpoints/permissions.rb +1 -2
  31. data/lib/openai/resources/responses.rb +12 -12
  32. data/lib/openai/version.rb +1 -1
  33. data/rbi/openai/errors.rbi +16 -0
  34. data/rbi/openai/internal/type/boolean.rbi +2 -0
  35. data/rbi/openai/internal/type/converter.rbi +15 -15
  36. data/rbi/openai/internal/type/union.rbi +5 -0
  37. data/rbi/openai/internal/type/unknown.rbi +2 -0
  38. data/rbi/openai/models/audio/speech_create_params.rbi +59 -2
  39. data/rbi/openai/models/audio/transcription.rbi +213 -3
  40. data/rbi/openai/models/audio/transcription_text_done_event.rbi +146 -1
  41. data/rbi/openai/models/audio/transcription_verbose.rbi +47 -0
  42. data/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi +95 -26
  43. data/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +17 -7
  44. data/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +13 -5
  45. data/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi +13 -21
  46. data/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +13 -21
  47. data/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +13 -21
  48. data/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +83 -125
  49. data/rbi/openai/models/responses/response_create_params.rbi +83 -60
  50. data/rbi/openai/models/responses/response_output_text.rbi +26 -4
  51. data/rbi/openai/resources/audio/speech.rbi +6 -1
  52. data/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi +1 -3
  53. data/rbi/openai/resources/responses.rbi +46 -46
  54. data/sig/openai/errors.rbs +9 -0
  55. data/sig/openai/internal/type/converter.rbs +7 -1
  56. data/sig/openai/models/audio/speech_create_params.rbs +21 -1
  57. data/sig/openai/models/audio/transcription.rbs +95 -3
  58. data/sig/openai/models/audio/transcription_text_done_event.rbs +72 -2
  59. data/sig/openai/models/audio/transcription_verbose.rbs +21 -0
  60. data/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs +53 -16
  61. data/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +5 -0
  62. data/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +5 -0
  63. data/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +4 -4
  64. data/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +4 -4
  65. data/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +4 -4
  66. data/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +31 -52
  67. data/sig/openai/models/responses/response_create_params.rbs +18 -10
  68. data/sig/openai/models/responses/response_output_text.rbs +15 -1
  69. data/sig/openai/resources/audio/speech.rbs +1 -0
  70. data/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs +1 -1
  71. data/sig/openai/resources/responses.rbs +4 -4
  72. metadata +2 -2
@@ -11,19 +11,27 @@ module OpenAI
11
11
  required :id, String
12
12
 
13
13
  # @!attribute code
14
- # The code to run.
14
+ # The code to run, or null if not available.
15
+ #
16
+ # @return [String, nil]
17
+ required :code, String, nil?: true
18
+
19
+ # @!attribute container_id
20
+ # The ID of the container used to run the code.
15
21
  #
16
22
  # @return [String]
17
- required :code, String
23
+ required :container_id, String
18
24
 
19
- # @!attribute results
20
- # The results of the code interpreter tool call.
25
+ # @!attribute outputs
26
+ # The outputs generated by the code interpreter, such as logs or images. Can be
27
+ # null if no outputs are available.
21
28
  #
22
- # @return [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files>]
23
- required :results,
29
+ # @return [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Image>, nil]
30
+ required :outputs,
24
31
  -> {
25
- OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Result]
26
- }
32
+ OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Output]
33
+ },
34
+ nil?: true
27
35
 
28
36
  # @!attribute status
29
37
  # The status of the code interpreter tool call.
@@ -37,13 +45,7 @@ module OpenAI
37
45
  # @return [Symbol, :code_interpreter_call]
38
46
  required :type, const: :code_interpreter_call
39
47
 
40
- # @!attribute container_id
41
- # The ID of the container used to run the code.
42
- #
43
- # @return [String, nil]
44
- optional :container_id, String
45
-
46
- # @!method initialize(id:, code:, results:, status:, container_id: nil, type: :code_interpreter_call)
48
+ # @!method initialize(id:, code:, container_id:, outputs:, status:, type: :code_interpreter_call)
47
49
  # Some parameter documentations has been truncated, see
48
50
  # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall} for more details.
49
51
  #
@@ -51,105 +53,72 @@ module OpenAI
51
53
  #
52
54
  # @param id [String] The unique ID of the code interpreter tool call.
53
55
  #
54
- # @param code [String] The code to run.
56
+ # @param code [String, nil] The code to run, or null if not available.
55
57
  #
56
- # @param results [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files>] The results of the code interpreter tool call.
58
+ # @param container_id [String] The ID of the container used to run the code.
57
59
  #
58
- # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call.
60
+ # @param outputs [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Image>, nil] The outputs generated by the code interpreter, such as logs or images.
59
61
  #
60
- # @param container_id [String] The ID of the container used to run the code.
62
+ # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call.
61
63
  #
62
64
  # @param type [Symbol, :code_interpreter_call] The type of the code interpreter tool call. Always `code_interpreter_call`.
63
65
 
64
- # The output of a code interpreter tool.
65
- module Result
66
+ # The logs output from the code interpreter.
67
+ module Output
66
68
  extend OpenAI::Internal::Type::Union
67
69
 
68
70
  discriminator :type
69
71
 
70
- # The output of a code interpreter tool call that is text.
71
- variant :logs, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs }
72
+ # The logs output from the code interpreter.
73
+ variant :logs, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs }
72
74
 
73
- # The output of a code interpreter tool call that is a file.
74
- variant :files, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files }
75
+ # The image output from the code interpreter.
76
+ variant :image, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image }
75
77
 
76
78
  class Logs < OpenAI::Internal::Type::BaseModel
77
79
  # @!attribute logs
78
- # The logs of the code interpreter tool call.
80
+ # The logs output from the code interpreter.
79
81
  #
80
82
  # @return [String]
81
83
  required :logs, String
82
84
 
83
85
  # @!attribute type
84
- # The type of the code interpreter text output. Always `logs`.
86
+ # The type of the output. Always 'logs'.
85
87
  #
86
88
  # @return [Symbol, :logs]
87
89
  required :type, const: :logs
88
90
 
89
91
  # @!method initialize(logs:, type: :logs)
90
- # Some parameter documentations has been truncated, see
91
- # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for
92
- # more details.
92
+ # The logs output from the code interpreter.
93
93
  #
94
- # The output of a code interpreter tool call that is text.
94
+ # @param logs [String] The logs output from the code interpreter.
95
95
  #
96
- # @param logs [String] The logs of the code interpreter tool call.
97
- #
98
- # @param type [Symbol, :logs] The type of the code interpreter text output. Always `logs`.
96
+ # @param type [Symbol, :logs] The type of the output. Always 'logs'.
99
97
  end
100
98
 
101
- class Files < OpenAI::Internal::Type::BaseModel
102
- # @!attribute files
103
- #
104
- # @return [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File>]
105
- required :files,
106
- -> {
107
- OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File]
108
- }
109
-
99
+ class Image < OpenAI::Internal::Type::BaseModel
110
100
  # @!attribute type
111
- # The type of the code interpreter file output. Always `files`.
101
+ # The type of the output. Always 'image'.
112
102
  #
113
- # @return [Symbol, :files]
114
- required :type, const: :files
103
+ # @return [Symbol, :image]
104
+ required :type, const: :image
115
105
 
116
- # @!method initialize(files:, type: :files)
117
- # Some parameter documentations has been truncated, see
118
- # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files} for
119
- # more details.
106
+ # @!attribute url
107
+ # The URL of the image output from the code interpreter.
120
108
  #
121
- # The output of a code interpreter tool call that is a file.
109
+ # @return [String]
110
+ required :url, String
111
+
112
+ # @!method initialize(url:, type: :image)
113
+ # The image output from the code interpreter.
122
114
  #
123
- # @param files [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File>]
115
+ # @param url [String] The URL of the image output from the code interpreter.
124
116
  #
125
- # @param type [Symbol, :files] The type of the code interpreter file output. Always `files`.
126
-
127
- class File < OpenAI::Internal::Type::BaseModel
128
- # @!attribute file_id
129
- # The ID of the file.
130
- #
131
- # @return [String]
132
- required :file_id, String
133
-
134
- # @!attribute mime_type
135
- # The MIME type of the file.
136
- #
137
- # @return [String]
138
- required :mime_type, String
139
-
140
- # @!method initialize(file_id:, mime_type:)
141
- # Some parameter documentations has been truncated, see
142
- # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File}
143
- # for more details.
144
- #
145
- # @param file_id [String] The ID of the file.
146
- #
147
- # @param mime_type [String] The MIME type of the file.
148
- end
117
+ # @param type [Symbol, :image] The type of the output. Always 'image'.
149
118
  end
150
119
 
151
120
  # @!method self.variants
152
- # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)]
121
+ # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Image)]
153
122
  end
154
123
 
155
124
  # The status of the code interpreter tool call.
@@ -159,8 +128,10 @@ module OpenAI
159
128
  extend OpenAI::Internal::Type::Enum
160
129
 
161
130
  IN_PROGRESS = :in_progress
162
- INTERPRETING = :interpreting
163
131
  COMPLETED = :completed
132
+ INCOMPLETE = :incomplete
133
+ INTERPRETING = :interpreting
134
+ FAILED = :failed
164
135
 
165
136
  # @!method self.values
166
137
  # @return [Array<Symbol>]
@@ -10,30 +10,6 @@ module OpenAI
10
10
  extend OpenAI::Internal::Type::RequestParameters::Converter
11
11
  include OpenAI::Internal::Type::RequestParameters
12
12
 
13
- # @!attribute input
14
- # Text, image, or file inputs to the model, used to generate a response.
15
- #
16
- # Learn more:
17
- #
18
- # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
19
- # - [Image inputs](https://platform.openai.com/docs/guides/images)
20
- # - [File inputs](https://platform.openai.com/docs/guides/pdf-files)
21
- # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state)
22
- # - [Function calling](https://platform.openai.com/docs/guides/function-calling)
23
- #
24
- # @return [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>]
25
- required :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input }
26
-
27
- # @!attribute model
28
- # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a
29
- # wide range of models with different capabilities, performance characteristics,
30
- # and price points. Refer to the
31
- # [model guide](https://platform.openai.com/docs/models) to browse and compare
32
- # available models.
33
- #
34
- # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel]
35
- required :model, union: -> { OpenAI::ResponsesModel }
36
-
37
13
  # @!attribute background
38
14
  # Whether to run the model response in the background.
39
15
  # [Learn more](https://platform.openai.com/docs/guides/background).
@@ -63,6 +39,20 @@ module OpenAI
63
39
  -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] },
64
40
  nil?: true
65
41
 
42
+ # @!attribute input
43
+ # Text, image, or file inputs to the model, used to generate a response.
44
+ #
45
+ # Learn more:
46
+ #
47
+ # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
48
+ # - [Image inputs](https://platform.openai.com/docs/guides/images)
49
+ # - [File inputs](https://platform.openai.com/docs/guides/pdf-files)
50
+ # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state)
51
+ # - [Function calling](https://platform.openai.com/docs/guides/function-calling)
52
+ #
53
+ # @return [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>, nil]
54
+ optional :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input }
55
+
66
56
  # @!attribute instructions
67
57
  # A system (or developer) message inserted into the model's context.
68
58
  #
@@ -92,6 +82,16 @@ module OpenAI
92
82
  # @return [Hash{Symbol=>String}, nil]
93
83
  optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true
94
84
 
85
+ # @!attribute model
86
+ # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a
87
+ # wide range of models with different capabilities, performance characteristics,
88
+ # and price points. Refer to the
89
+ # [model guide](https://platform.openai.com/docs/models) to browse and compare
90
+ # available models.
91
+ #
92
+ # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel, nil]
93
+ optional :model, union: -> { OpenAI::ResponsesModel }
94
+
95
95
  # @!attribute parallel_tool_calls
96
96
  # Whether to allow the model to run tool calls in parallel.
97
97
  #
@@ -232,24 +232,24 @@ module OpenAI
232
232
  # @return [String, nil]
233
233
  optional :user, String
234
234
 
235
- # @!method initialize(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
235
+ # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
236
236
  # Some parameter documentations has been truncated, see
237
237
  # {OpenAI::Models::Responses::ResponseCreateParams} for more details.
238
238
  #
239
- # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
240
- #
241
- # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI
242
- #
243
239
  # @param background [Boolean, nil] Whether to run the model response in the background.
244
240
  #
245
241
  # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently
246
242
  #
243
+ # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
244
+ #
247
245
  # @param instructions [String, nil] A system (or developer) message inserted into the model's context.
248
246
  #
249
247
  # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in
250
248
  #
251
249
  # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be
252
250
  #
251
+ # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI
252
+ #
253
253
  # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel.
254
254
  #
255
255
  # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to
@@ -76,6 +76,12 @@ module OpenAI
76
76
  # @return [String]
77
77
  required :file_id, String
78
78
 
79
+ # @!attribute filename
80
+ # The filename of the file cited.
81
+ #
82
+ # @return [String]
83
+ required :filename, String
84
+
79
85
  # @!attribute index
80
86
  # The index of the file in the list of files.
81
87
  #
@@ -88,11 +94,13 @@ module OpenAI
88
94
  # @return [Symbol, :file_citation]
89
95
  required :type, const: :file_citation
90
96
 
91
- # @!method initialize(file_id:, index:, type: :file_citation)
97
+ # @!method initialize(file_id:, filename:, index:, type: :file_citation)
92
98
  # A citation to a file.
93
99
  #
94
100
  # @param file_id [String] The ID of the file.
95
101
  #
102
+ # @param filename [String] The filename of the file cited.
103
+ #
96
104
  # @param index [Integer] The index of the file in the list of files.
97
105
  #
98
106
  # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`.
@@ -162,6 +170,12 @@ module OpenAI
162
170
  # @return [String]
163
171
  required :file_id, String
164
172
 
173
+ # @!attribute filename
174
+ # The filename of the container file cited.
175
+ #
176
+ # @return [String]
177
+ required :filename, String
178
+
165
179
  # @!attribute start_index
166
180
  # The index of the first character of the container file citation in the message.
167
181
  #
@@ -174,7 +188,7 @@ module OpenAI
174
188
  # @return [Symbol, :container_file_citation]
175
189
  required :type, const: :container_file_citation
176
190
 
177
- # @!method initialize(container_id:, end_index:, file_id:, start_index:, type: :container_file_citation)
191
+ # @!method initialize(container_id:, end_index:, file_id:, filename:, start_index:, type: :container_file_citation)
178
192
  # A citation for a container file used to generate a model response.
179
193
  #
180
194
  # @param container_id [String] The ID of the container file.
@@ -183,6 +197,8 @@ module OpenAI
183
197
  #
184
198
  # @param file_id [String] The ID of the file.
185
199
  #
200
+ # @param filename [String] The filename of the container file cited.
201
+ #
186
202
  # @param start_index [Integer] The index of the first character of the container file citation in the message.
187
203
  #
188
204
  # @param type [Symbol, :container_file_citation] The type of the container file citation. Always `container_file_citation`.
@@ -24,11 +24,11 @@ module OpenAI
24
24
  # Emitted when the full audio transcript is completed.
25
25
  variant :"response.audio.transcript.done", -> { OpenAI::Responses::ResponseAudioTranscriptDoneEvent }
26
26
 
27
- # Emitted when a partial code snippet is added by the code interpreter.
27
+ # Emitted when a partial code snippet is streamed by the code interpreter.
28
28
  variant :"response.code_interpreter_call_code.delta",
29
29
  -> { OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent }
30
30
 
31
- # Emitted when code snippet output is finalized by the code interpreter.
31
+ # Emitted when the code snippet is finalized by the code interpreter.
32
32
  variant :"response.code_interpreter_call_code.done",
33
33
  -> { OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent }
34
34
 
@@ -9,7 +9,7 @@ module OpenAI
9
9
  #
10
10
  # Generates audio from the input text.
11
11
  #
12
- # @overload create(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {})
12
+ # @overload create(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, stream_format: nil, request_options: {})
13
13
  #
14
14
  # @param input [String] The text to generate audio for. The maximum length is 4096 characters.
15
15
  #
@@ -23,6 +23,8 @@ module OpenAI
23
23
  #
24
24
  # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is
25
25
  #
26
+ # @param stream_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::StreamFormat] The format to stream the audio in. Supported formats are `sse` and `audio`. `sse
27
+ #
26
28
  # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
27
29
  #
28
30
  # @return [StringIO]
@@ -119,6 +119,14 @@ module OpenAI
119
119
  }
120
120
  }
121
121
  )
122
+ in {response_format: {type: :json_schema, json_schema: OpenAI::StructuredOutput::JsonSchemaConverter => model}}
123
+ parsed.fetch(:response_format).update(
124
+ json_schema: {
125
+ strict: true,
126
+ name: model.name.split("::").last,
127
+ schema: model.to_json_schema
128
+ }
129
+ )
122
130
  in {response_format: {type: :json_schema, json_schema: {schema: OpenAI::StructuredOutput::JsonSchemaConverter => model}}}
123
131
  parsed.dig(:response_format, :json_schema).store(:schema, model.to_json_schema)
124
132
  in {tools: Array => tools}
@@ -60,7 +60,7 @@ module OpenAI
60
60
  #
61
61
  # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
62
62
  #
63
- # @return [OpenAI::Internal::CursorPage<OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse>]
63
+ # @return [OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse]
64
64
  #
65
65
  # @see OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams
66
66
  def retrieve(fine_tuned_model_checkpoint, params = {})
@@ -69,7 +69,6 @@ module OpenAI
69
69
  method: :get,
70
70
  path: ["fine_tuning/checkpoints/%1$s/permissions", fine_tuned_model_checkpoint],
71
71
  query: parsed,
72
- page: OpenAI::Internal::CursorPage,
73
72
  model: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse,
74
73
  options: options
75
74
  )
@@ -23,22 +23,22 @@ module OpenAI
23
23
  # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use
24
24
  # your own data as input for the model's response.
25
25
  #
26
- # @overload create(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
27
- #
28
- # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
29
- #
30
- # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI
26
+ # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
31
27
  #
32
28
  # @param background [Boolean, nil] Whether to run the model response in the background.
33
29
  #
34
30
  # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently
35
31
  #
32
+ # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
33
+ #
36
34
  # @param instructions [String, nil] A system (or developer) message inserted into the model's context.
37
35
  #
38
36
  # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in
39
37
  #
40
38
  # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be
41
39
  #
40
+ # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI
41
+ #
42
42
  # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel.
43
43
  #
44
44
  # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to
@@ -70,7 +70,7 @@ module OpenAI
70
70
  # @return [OpenAI::Models::Responses::Response]
71
71
  #
72
72
  # @see OpenAI::Models::Responses::ResponseCreateParams
73
- def create(params)
73
+ def create(params = {})
74
74
  parsed, options = OpenAI::Responses::ResponseCreateParams.dump_request(params)
75
75
  if parsed[:stream]
76
76
  message = "Please use `#stream_raw` for the streaming use case."
@@ -183,22 +183,22 @@ module OpenAI
183
183
  # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use
184
184
  # your own data as input for the model's response.
185
185
  #
186
- # @overload stream_raw(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
187
- #
188
- # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
189
- #
190
- # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI
186
+ # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
191
187
  #
192
188
  # @param background [Boolean, nil] Whether to run the model response in the background.
193
189
  #
194
190
  # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently
195
191
  #
192
+ # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
193
+ #
196
194
  # @param instructions [String, nil] A system (or developer) message inserted into the model's context.
197
195
  #
198
196
  # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in
199
197
  #
200
198
  # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be
201
199
  #
200
+ # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI
201
+ #
202
202
  # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel.
203
203
  #
204
204
  # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to
@@ -230,7 +230,7 @@ module OpenAI
230
230
  # @return [OpenAI::Internal::Stream<OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningDeltaEvent, OpenAI::Models::Responses::ResponseReasoningDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent>]
231
231
  #
232
232
  # @see OpenAI::Models::Responses::ResponseCreateParams
233
- def stream_raw(params)
233
+ def stream_raw(params = {})
234
234
  parsed, options = OpenAI::Responses::ResponseCreateParams.dump_request(params)
235
235
  unless parsed.fetch(:stream, true)
236
236
  message = "Please use `#create` for the non-streaming use case."
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module OpenAI
4
- VERSION = "0.9.0"
4
+ VERSION = "0.10.0"
5
5
  end
@@ -8,6 +8,22 @@ module OpenAI
8
8
  end
9
9
 
10
10
  class ConversionError < OpenAI::Errors::Error
11
+ sig { returns(T.nilable(StandardError)) }
12
+ def cause
13
+ end
14
+
15
+ # @api private
16
+ sig do
17
+ params(
18
+ on: T::Class[StandardError],
19
+ method: Symbol,
20
+ target: T.anything,
21
+ value: T.anything,
22
+ cause: T.nilable(StandardError)
23
+ ).returns(T.attached_class)
24
+ end
25
+ def self.new(on:, method:, target:, value:, cause: nil)
26
+ end
11
27
  end
12
28
 
13
29
  class APIError < OpenAI::Errors::Error
@@ -22,6 +22,8 @@ module OpenAI
22
22
 
23
23
  class << self
24
24
  # @api private
25
+ #
26
+ # Coerce value to Boolean if possible, otherwise return the original value.
25
27
  sig do
26
28
  override
27
29
  .params(
@@ -15,12 +15,14 @@ module OpenAI
15
15
  CoerceState =
16
16
  T.type_alias do
17
17
  {
18
- strictness: T.any(T::Boolean, Symbol),
18
+ translate_names: T::Boolean,
19
+ strictness: T::Boolean,
19
20
  exactness: {
20
21
  yes: Integer,
21
22
  no: Integer,
22
23
  maybe: Integer
23
24
  },
25
+ error: T::Class[StandardError],
24
26
  branched: Integer
25
27
  }
26
28
  end
@@ -84,6 +86,15 @@ module OpenAI
84
86
  def self.type_info(spec)
85
87
  end
86
88
 
89
+ # @api private
90
+ sig do
91
+ params(translate_names: T::Boolean).returns(
92
+ OpenAI::Internal::Type::Converter::CoerceState
93
+ )
94
+ end
95
+ def self.new_coerce_state(translate_names: true)
96
+ end
97
+
87
98
  # @api private
88
99
  #
89
100
  # Based on `target`, transform `value` into `target`, to the extent possible:
@@ -105,14 +116,11 @@ module OpenAI
105
116
  def self.coerce(
106
117
  target,
107
118
  value,
108
- # The `strictness` is one of `true`, `false`, or `:strong`. This informs the
109
- # coercion strategy when we have to decide between multiple possible conversion
110
- # targets:
119
+ # The `strictness` is one of `true`, `false`. This informs the coercion strategy
120
+ # when we have to decide between multiple possible conversion targets:
111
121
  #
112
122
  # - `true`: the conversion must be exact, with minimum coercion.
113
123
  # - `false`: the conversion can be approximate, with some coercion.
114
- # - `:strong`: the conversion must be exact, with no coercion, and raise an error
115
- # if not possible.
116
124
  #
117
125
  # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For
118
126
  # any given conversion attempt, the exactness will be updated based on how closely
@@ -124,15 +132,7 @@ module OpenAI
124
132
  # - `no`: the value cannot be converted to the target type.
125
133
  #
126
134
  # See implementation below for more details.
127
- state: {
128
- strictness: true,
129
- exactness: {
130
- yes: 0,
131
- no: 0,
132
- maybe: 0
133
- },
134
- branched: 0
135
- }
135
+ state: OpenAI::Internal::Type::Converter.new_coerce_state
136
136
  )
137
137
  end
138
138
 
@@ -78,6 +78,11 @@ module OpenAI
78
78
  end
79
79
 
80
80
  # @api private
81
+ #
82
+ # Tries to efficiently coerce the given value to one of the known variants.
83
+ #
84
+ # If the value cannot match any of the known variants, the coercion is considered
85
+ # non-viable and returns the original value.
81
86
  sig do
82
87
  override
83
88
  .params(
@@ -22,6 +22,8 @@ module OpenAI
22
22
 
23
23
  class << self
24
24
  # @api private
25
+ #
26
+ # No coercion needed for Unknown type.
25
27
  sig do
26
28
  override
27
29
  .params(