openai 0.8.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +41 -0
- data/README.md +115 -4
- data/lib/openai/errors.rb +22 -0
- data/lib/openai/internal/type/array_of.rb +6 -1
- data/lib/openai/internal/type/base_model.rb +76 -24
- data/lib/openai/internal/type/boolean.rb +7 -1
- data/lib/openai/internal/type/converter.rb +42 -34
- data/lib/openai/internal/type/enum.rb +10 -2
- data/lib/openai/internal/type/file_input.rb +6 -1
- data/lib/openai/internal/type/hash_of.rb +6 -1
- data/lib/openai/internal/type/union.rb +12 -7
- data/lib/openai/internal/type/unknown.rb +7 -1
- data/lib/openai/models/audio/speech_create_params.rb +23 -2
- data/lib/openai/models/audio/transcription.rb +118 -1
- data/lib/openai/models/audio/transcription_text_done_event.rb +80 -1
- data/lib/openai/models/audio/transcription_verbose.rb +31 -1
- data/lib/openai/models/chat/chat_completion.rb +1 -0
- data/lib/openai/models/chat/chat_completion_chunk.rb +1 -0
- data/lib/openai/models/chat/completion_create_params.rb +1 -0
- data/lib/openai/models/fine_tuning/job_create_params.rb +4 -2
- data/lib/openai/models/image_edit_params.rb +35 -1
- data/lib/openai/models/responses/response.rb +41 -6
- data/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +17 -8
- data/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +14 -10
- data/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +11 -10
- data/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +11 -10
- data/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +11 -10
- data/lib/openai/models/responses/response_code_interpreter_tool_call.rb +49 -78
- data/lib/openai/models/responses/response_create_params.rb +41 -32
- data/lib/openai/models/responses/response_output_text.rb +18 -2
- data/lib/openai/models/responses/response_prompt.rb +63 -0
- data/lib/openai/models/responses/response_stream_event.rb +2 -2
- data/lib/openai/resources/audio/speech.rb +3 -1
- data/lib/openai/resources/chat/completions.rb +8 -0
- data/lib/openai/resources/fine_tuning/jobs.rb +2 -2
- data/lib/openai/resources/images.rb +5 -1
- data/lib/openai/resources/responses.rb +18 -14
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +1 -0
- data/rbi/openai/errors.rbi +16 -0
- data/rbi/openai/internal/type/boolean.rbi +2 -0
- data/rbi/openai/internal/type/converter.rbi +15 -15
- data/rbi/openai/internal/type/union.rbi +5 -0
- data/rbi/openai/internal/type/unknown.rbi +2 -0
- data/rbi/openai/models/audio/speech_create_params.rbi +59 -2
- data/rbi/openai/models/audio/transcription.rbi +213 -3
- data/rbi/openai/models/audio/transcription_text_done_event.rbi +146 -1
- data/rbi/openai/models/audio/transcription_verbose.rbi +47 -0
- data/rbi/openai/models/chat/chat_completion.rbi +5 -0
- data/rbi/openai/models/chat/chat_completion_chunk.rbi +5 -0
- data/rbi/openai/models/chat/completion_create_params.rbi +5 -0
- data/rbi/openai/models/fine_tuning/job_create_params.rbi +8 -4
- data/rbi/openai/models/image_edit_params.rbi +51 -0
- data/rbi/openai/models/responses/response.rbi +66 -7
- data/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +17 -7
- data/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +13 -5
- data/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi +13 -21
- data/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +13 -21
- data/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +13 -21
- data/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +83 -125
- data/rbi/openai/models/responses/response_create_params.rbi +107 -64
- data/rbi/openai/models/responses/response_output_text.rbi +26 -4
- data/rbi/openai/models/responses/response_prompt.rbi +120 -0
- data/rbi/openai/resources/audio/speech.rbi +6 -1
- data/rbi/openai/resources/fine_tuning/jobs.rbi +6 -4
- data/rbi/openai/resources/images.rbi +11 -0
- data/rbi/openai/resources/responses.rbi +56 -50
- data/sig/openai/errors.rbs +9 -0
- data/sig/openai/internal/type/converter.rbs +7 -1
- data/sig/openai/models/audio/speech_create_params.rbs +21 -1
- data/sig/openai/models/audio/transcription.rbs +95 -3
- data/sig/openai/models/audio/transcription_text_done_event.rbs +72 -2
- data/sig/openai/models/audio/transcription_verbose.rbs +21 -0
- data/sig/openai/models/chat/chat_completion.rbs +2 -1
- data/sig/openai/models/chat/chat_completion_chunk.rbs +2 -1
- data/sig/openai/models/chat/completion_create_params.rbs +2 -1
- data/sig/openai/models/image_edit_params.rbs +22 -0
- data/sig/openai/models/responses/response.rbs +22 -5
- data/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +5 -0
- data/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +5 -0
- data/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +4 -4
- data/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +4 -4
- data/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +4 -4
- data/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +31 -52
- data/sig/openai/models/responses/response_create_params.rbs +25 -11
- data/sig/openai/models/responses/response_output_text.rbs +15 -1
- data/sig/openai/models/responses/response_prompt.rbs +44 -0
- data/sig/openai/resources/audio/speech.rbs +1 -0
- data/sig/openai/resources/images.rbs +2 -0
- data/sig/openai/resources/responses.rbs +6 -4
- metadata +5 -2
@@ -5,19 +5,26 @@ module OpenAI
|
|
5
5
|
module Responses
|
6
6
|
class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseModel
|
7
7
|
# @!attribute delta
|
8
|
-
# The partial code snippet
|
8
|
+
# The partial code snippet being streamed by the code interpreter.
|
9
9
|
#
|
10
10
|
# @return [String]
|
11
11
|
required :delta, String
|
12
12
|
|
13
|
+
# @!attribute item_id
|
14
|
+
# The unique identifier of the code interpreter tool call item.
|
15
|
+
#
|
16
|
+
# @return [String]
|
17
|
+
required :item_id, String
|
18
|
+
|
13
19
|
# @!attribute output_index
|
14
|
-
# The index of the output item
|
20
|
+
# The index of the output item in the response for which the code is being
|
21
|
+
# streamed.
|
15
22
|
#
|
16
23
|
# @return [Integer]
|
17
24
|
required :output_index, Integer
|
18
25
|
|
19
26
|
# @!attribute sequence_number
|
20
|
-
# The sequence number of this event.
|
27
|
+
# The sequence number of this event, used to order streaming events.
|
21
28
|
#
|
22
29
|
# @return [Integer]
|
23
30
|
required :sequence_number, Integer
|
@@ -28,18 +35,20 @@ module OpenAI
|
|
28
35
|
# @return [Symbol, :"response.code_interpreter_call_code.delta"]
|
29
36
|
required :type, const: :"response.code_interpreter_call_code.delta"
|
30
37
|
|
31
|
-
# @!method initialize(delta:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.delta")
|
38
|
+
# @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.delta")
|
32
39
|
# Some parameter documentations has been truncated, see
|
33
40
|
# {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more
|
34
41
|
# details.
|
35
42
|
#
|
36
|
-
# Emitted when a partial code snippet is
|
43
|
+
# Emitted when a partial code snippet is streamed by the code interpreter.
|
44
|
+
#
|
45
|
+
# @param delta [String] The partial code snippet being streamed by the code interpreter.
|
37
46
|
#
|
38
|
-
# @param
|
47
|
+
# @param item_id [String] The unique identifier of the code interpreter tool call item.
|
39
48
|
#
|
40
|
-
# @param output_index [Integer] The index of the output item
|
49
|
+
# @param output_index [Integer] The index of the output item in the response for which the code is being streame
|
41
50
|
#
|
42
|
-
# @param sequence_number [Integer] The sequence number of this event.
|
51
|
+
# @param sequence_number [Integer] The sequence number of this event, used to order streaming events.
|
43
52
|
#
|
44
53
|
# @param type [Symbol, :"response.code_interpreter_call_code.delta"] The type of the event. Always `response.code_interpreter_call_code.delta`.
|
45
54
|
end
|
@@ -10,14 +10,20 @@ module OpenAI
|
|
10
10
|
# @return [String]
|
11
11
|
required :code, String
|
12
12
|
|
13
|
+
# @!attribute item_id
|
14
|
+
# The unique identifier of the code interpreter tool call item.
|
15
|
+
#
|
16
|
+
# @return [String]
|
17
|
+
required :item_id, String
|
18
|
+
|
13
19
|
# @!attribute output_index
|
14
|
-
# The index of the output item
|
20
|
+
# The index of the output item in the response for which the code is finalized.
|
15
21
|
#
|
16
22
|
# @return [Integer]
|
17
23
|
required :output_index, Integer
|
18
24
|
|
19
25
|
# @!attribute sequence_number
|
20
|
-
# The sequence number of this event.
|
26
|
+
# The sequence number of this event, used to order streaming events.
|
21
27
|
#
|
22
28
|
# @return [Integer]
|
23
29
|
required :sequence_number, Integer
|
@@ -28,18 +34,16 @@ module OpenAI
|
|
28
34
|
# @return [Symbol, :"response.code_interpreter_call_code.done"]
|
29
35
|
required :type, const: :"response.code_interpreter_call_code.done"
|
30
36
|
|
31
|
-
# @!method initialize(code:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.done")
|
32
|
-
#
|
33
|
-
# {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more
|
34
|
-
# details.
|
35
|
-
#
|
36
|
-
# Emitted when code snippet output is finalized by the code interpreter.
|
37
|
+
# @!method initialize(code:, item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.done")
|
38
|
+
# Emitted when the code snippet is finalized by the code interpreter.
|
37
39
|
#
|
38
40
|
# @param code [String] The final code snippet output by the code interpreter.
|
39
41
|
#
|
40
|
-
# @param
|
42
|
+
# @param item_id [String] The unique identifier of the code interpreter tool call item.
|
43
|
+
#
|
44
|
+
# @param output_index [Integer] The index of the output item in the response for which the code is finalized.
|
41
45
|
#
|
42
|
-
# @param sequence_number [Integer] The sequence number of this event.
|
46
|
+
# @param sequence_number [Integer] The sequence number of this event, used to order streaming events.
|
43
47
|
#
|
44
48
|
# @param type [Symbol, :"response.code_interpreter_call_code.done"] The type of the event. Always `response.code_interpreter_call_code.done`.
|
45
49
|
end
|
@@ -4,20 +4,21 @@ module OpenAI
|
|
4
4
|
module Models
|
5
5
|
module Responses
|
6
6
|
class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseModel
|
7
|
-
# @!attribute
|
8
|
-
#
|
7
|
+
# @!attribute item_id
|
8
|
+
# The unique identifier of the code interpreter tool call item.
|
9
9
|
#
|
10
|
-
# @return [
|
11
|
-
required :
|
10
|
+
# @return [String]
|
11
|
+
required :item_id, String
|
12
12
|
|
13
13
|
# @!attribute output_index
|
14
|
-
# The index of the output item
|
14
|
+
# The index of the output item in the response for which the code interpreter call
|
15
|
+
# is completed.
|
15
16
|
#
|
16
17
|
# @return [Integer]
|
17
18
|
required :output_index, Integer
|
18
19
|
|
19
20
|
# @!attribute sequence_number
|
20
|
-
# The sequence number of this event.
|
21
|
+
# The sequence number of this event, used to order streaming events.
|
21
22
|
#
|
22
23
|
# @return [Integer]
|
23
24
|
required :sequence_number, Integer
|
@@ -28,18 +29,18 @@ module OpenAI
|
|
28
29
|
# @return [Symbol, :"response.code_interpreter_call.completed"]
|
29
30
|
required :type, const: :"response.code_interpreter_call.completed"
|
30
31
|
|
31
|
-
# @!method initialize(
|
32
|
+
# @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call.completed")
|
32
33
|
# Some parameter documentations has been truncated, see
|
33
34
|
# {OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent} for more
|
34
35
|
# details.
|
35
36
|
#
|
36
37
|
# Emitted when the code interpreter call is completed.
|
37
38
|
#
|
38
|
-
# @param
|
39
|
+
# @param item_id [String] The unique identifier of the code interpreter tool call item.
|
39
40
|
#
|
40
|
-
# @param output_index [Integer] The index of the output item
|
41
|
+
# @param output_index [Integer] The index of the output item in the response for which the code interpreter call
|
41
42
|
#
|
42
|
-
# @param sequence_number [Integer] The sequence number of this event.
|
43
|
+
# @param sequence_number [Integer] The sequence number of this event, used to order streaming events.
|
43
44
|
#
|
44
45
|
# @param type [Symbol, :"response.code_interpreter_call.completed"] The type of the event. Always `response.code_interpreter_call.completed`.
|
45
46
|
end
|
@@ -4,20 +4,21 @@ module OpenAI
|
|
4
4
|
module Models
|
5
5
|
module Responses
|
6
6
|
class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseModel
|
7
|
-
# @!attribute
|
8
|
-
#
|
7
|
+
# @!attribute item_id
|
8
|
+
# The unique identifier of the code interpreter tool call item.
|
9
9
|
#
|
10
|
-
# @return [
|
11
|
-
required :
|
10
|
+
# @return [String]
|
11
|
+
required :item_id, String
|
12
12
|
|
13
13
|
# @!attribute output_index
|
14
|
-
# The index of the output item
|
14
|
+
# The index of the output item in the response for which the code interpreter call
|
15
|
+
# is in progress.
|
15
16
|
#
|
16
17
|
# @return [Integer]
|
17
18
|
required :output_index, Integer
|
18
19
|
|
19
20
|
# @!attribute sequence_number
|
20
|
-
# The sequence number of this event.
|
21
|
+
# The sequence number of this event, used to order streaming events.
|
21
22
|
#
|
22
23
|
# @return [Integer]
|
23
24
|
required :sequence_number, Integer
|
@@ -28,18 +29,18 @@ module OpenAI
|
|
28
29
|
# @return [Symbol, :"response.code_interpreter_call.in_progress"]
|
29
30
|
required :type, const: :"response.code_interpreter_call.in_progress"
|
30
31
|
|
31
|
-
# @!method initialize(
|
32
|
+
# @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call.in_progress")
|
32
33
|
# Some parameter documentations has been truncated, see
|
33
34
|
# {OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent} for more
|
34
35
|
# details.
|
35
36
|
#
|
36
37
|
# Emitted when a code interpreter call is in progress.
|
37
38
|
#
|
38
|
-
# @param
|
39
|
+
# @param item_id [String] The unique identifier of the code interpreter tool call item.
|
39
40
|
#
|
40
|
-
# @param output_index [Integer] The index of the output item
|
41
|
+
# @param output_index [Integer] The index of the output item in the response for which the code interpreter call
|
41
42
|
#
|
42
|
-
# @param sequence_number [Integer] The sequence number of this event.
|
43
|
+
# @param sequence_number [Integer] The sequence number of this event, used to order streaming events.
|
43
44
|
#
|
44
45
|
# @param type [Symbol, :"response.code_interpreter_call.in_progress"] The type of the event. Always `response.code_interpreter_call.in_progress`.
|
45
46
|
end
|
@@ -4,20 +4,21 @@ module OpenAI
|
|
4
4
|
module Models
|
5
5
|
module Responses
|
6
6
|
class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::BaseModel
|
7
|
-
# @!attribute
|
8
|
-
#
|
7
|
+
# @!attribute item_id
|
8
|
+
# The unique identifier of the code interpreter tool call item.
|
9
9
|
#
|
10
|
-
# @return [
|
11
|
-
required :
|
10
|
+
# @return [String]
|
11
|
+
required :item_id, String
|
12
12
|
|
13
13
|
# @!attribute output_index
|
14
|
-
# The index of the output item
|
14
|
+
# The index of the output item in the response for which the code interpreter is
|
15
|
+
# interpreting code.
|
15
16
|
#
|
16
17
|
# @return [Integer]
|
17
18
|
required :output_index, Integer
|
18
19
|
|
19
20
|
# @!attribute sequence_number
|
20
|
-
# The sequence number of this event.
|
21
|
+
# The sequence number of this event, used to order streaming events.
|
21
22
|
#
|
22
23
|
# @return [Integer]
|
23
24
|
required :sequence_number, Integer
|
@@ -28,18 +29,18 @@ module OpenAI
|
|
28
29
|
# @return [Symbol, :"response.code_interpreter_call.interpreting"]
|
29
30
|
required :type, const: :"response.code_interpreter_call.interpreting"
|
30
31
|
|
31
|
-
# @!method initialize(
|
32
|
+
# @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call.interpreting")
|
32
33
|
# Some parameter documentations has been truncated, see
|
33
34
|
# {OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent} for
|
34
35
|
# more details.
|
35
36
|
#
|
36
37
|
# Emitted when the code interpreter is actively interpreting the code snippet.
|
37
38
|
#
|
38
|
-
# @param
|
39
|
+
# @param item_id [String] The unique identifier of the code interpreter tool call item.
|
39
40
|
#
|
40
|
-
# @param output_index [Integer] The index of the output item
|
41
|
+
# @param output_index [Integer] The index of the output item in the response for which the code interpreter is i
|
41
42
|
#
|
42
|
-
# @param sequence_number [Integer] The sequence number of this event.
|
43
|
+
# @param sequence_number [Integer] The sequence number of this event, used to order streaming events.
|
43
44
|
#
|
44
45
|
# @param type [Symbol, :"response.code_interpreter_call.interpreting"] The type of the event. Always `response.code_interpreter_call.interpreting`.
|
45
46
|
end
|
@@ -11,19 +11,27 @@ module OpenAI
|
|
11
11
|
required :id, String
|
12
12
|
|
13
13
|
# @!attribute code
|
14
|
-
# The code to run.
|
14
|
+
# The code to run, or null if not available.
|
15
|
+
#
|
16
|
+
# @return [String, nil]
|
17
|
+
required :code, String, nil?: true
|
18
|
+
|
19
|
+
# @!attribute container_id
|
20
|
+
# The ID of the container used to run the code.
|
15
21
|
#
|
16
22
|
# @return [String]
|
17
|
-
required :
|
23
|
+
required :container_id, String
|
18
24
|
|
19
|
-
# @!attribute
|
20
|
-
# The
|
25
|
+
# @!attribute outputs
|
26
|
+
# The outputs generated by the code interpreter, such as logs or images. Can be
|
27
|
+
# null if no outputs are available.
|
21
28
|
#
|
22
|
-
# @return [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::
|
23
|
-
required :
|
29
|
+
# @return [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Image>, nil]
|
30
|
+
required :outputs,
|
24
31
|
-> {
|
25
|
-
OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::
|
26
|
-
}
|
32
|
+
OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Output]
|
33
|
+
},
|
34
|
+
nil?: true
|
27
35
|
|
28
36
|
# @!attribute status
|
29
37
|
# The status of the code interpreter tool call.
|
@@ -37,13 +45,7 @@ module OpenAI
|
|
37
45
|
# @return [Symbol, :code_interpreter_call]
|
38
46
|
required :type, const: :code_interpreter_call
|
39
47
|
|
40
|
-
# @!
|
41
|
-
# The ID of the container used to run the code.
|
42
|
-
#
|
43
|
-
# @return [String, nil]
|
44
|
-
optional :container_id, String
|
45
|
-
|
46
|
-
# @!method initialize(id:, code:, results:, status:, container_id: nil, type: :code_interpreter_call)
|
48
|
+
# @!method initialize(id:, code:, container_id:, outputs:, status:, type: :code_interpreter_call)
|
47
49
|
# Some parameter documentations has been truncated, see
|
48
50
|
# {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall} for more details.
|
49
51
|
#
|
@@ -51,105 +53,72 @@ module OpenAI
|
|
51
53
|
#
|
52
54
|
# @param id [String] The unique ID of the code interpreter tool call.
|
53
55
|
#
|
54
|
-
# @param code [String] The code to run.
|
56
|
+
# @param code [String, nil] The code to run, or null if not available.
|
55
57
|
#
|
56
|
-
# @param
|
58
|
+
# @param container_id [String] The ID of the container used to run the code.
|
57
59
|
#
|
58
|
-
# @param
|
60
|
+
# @param outputs [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Image>, nil] The outputs generated by the code interpreter, such as logs or images.
|
59
61
|
#
|
60
|
-
# @param
|
62
|
+
# @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call.
|
61
63
|
#
|
62
64
|
# @param type [Symbol, :code_interpreter_call] The type of the code interpreter tool call. Always `code_interpreter_call`.
|
63
65
|
|
64
|
-
# The output
|
65
|
-
module
|
66
|
+
# The logs output from the code interpreter.
|
67
|
+
module Output
|
66
68
|
extend OpenAI::Internal::Type::Union
|
67
69
|
|
68
70
|
discriminator :type
|
69
71
|
|
70
|
-
# The output
|
71
|
-
variant :logs, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::
|
72
|
+
# The logs output from the code interpreter.
|
73
|
+
variant :logs, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs }
|
72
74
|
|
73
|
-
# The output
|
74
|
-
variant :
|
75
|
+
# The image output from the code interpreter.
|
76
|
+
variant :image, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image }
|
75
77
|
|
76
78
|
class Logs < OpenAI::Internal::Type::BaseModel
|
77
79
|
# @!attribute logs
|
78
|
-
# The logs
|
80
|
+
# The logs output from the code interpreter.
|
79
81
|
#
|
80
82
|
# @return [String]
|
81
83
|
required :logs, String
|
82
84
|
|
83
85
|
# @!attribute type
|
84
|
-
# The type of the
|
86
|
+
# The type of the output. Always 'logs'.
|
85
87
|
#
|
86
88
|
# @return [Symbol, :logs]
|
87
89
|
required :type, const: :logs
|
88
90
|
|
89
91
|
# @!method initialize(logs:, type: :logs)
|
90
|
-
#
|
91
|
-
# {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for
|
92
|
-
# more details.
|
92
|
+
# The logs output from the code interpreter.
|
93
93
|
#
|
94
|
-
# The output
|
94
|
+
# @param logs [String] The logs output from the code interpreter.
|
95
95
|
#
|
96
|
-
# @param
|
97
|
-
#
|
98
|
-
# @param type [Symbol, :logs] The type of the code interpreter text output. Always `logs`.
|
96
|
+
# @param type [Symbol, :logs] The type of the output. Always 'logs'.
|
99
97
|
end
|
100
98
|
|
101
|
-
class
|
102
|
-
# @!attribute files
|
103
|
-
#
|
104
|
-
# @return [Array<OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File>]
|
105
|
-
required :files,
|
106
|
-
-> {
|
107
|
-
OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File]
|
108
|
-
}
|
109
|
-
|
99
|
+
class Image < OpenAI::Internal::Type::BaseModel
|
110
100
|
# @!attribute type
|
111
|
-
# The type of the
|
101
|
+
# The type of the output. Always 'image'.
|
112
102
|
#
|
113
|
-
# @return [Symbol, :
|
114
|
-
required :type, const: :
|
103
|
+
# @return [Symbol, :image]
|
104
|
+
required :type, const: :image
|
115
105
|
|
116
|
-
# @!
|
117
|
-
#
|
118
|
-
# {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files} for
|
119
|
-
# more details.
|
106
|
+
# @!attribute url
|
107
|
+
# The URL of the image output from the code interpreter.
|
120
108
|
#
|
121
|
-
#
|
109
|
+
# @return [String]
|
110
|
+
required :url, String
|
111
|
+
|
112
|
+
# @!method initialize(url:, type: :image)
|
113
|
+
# The image output from the code interpreter.
|
122
114
|
#
|
123
|
-
# @param
|
115
|
+
# @param url [String] The URL of the image output from the code interpreter.
|
124
116
|
#
|
125
|
-
# @param type [Symbol, :
|
126
|
-
|
127
|
-
class File < OpenAI::Internal::Type::BaseModel
|
128
|
-
# @!attribute file_id
|
129
|
-
# The ID of the file.
|
130
|
-
#
|
131
|
-
# @return [String]
|
132
|
-
required :file_id, String
|
133
|
-
|
134
|
-
# @!attribute mime_type
|
135
|
-
# The MIME type of the file.
|
136
|
-
#
|
137
|
-
# @return [String]
|
138
|
-
required :mime_type, String
|
139
|
-
|
140
|
-
# @!method initialize(file_id:, mime_type:)
|
141
|
-
# Some parameter documentations has been truncated, see
|
142
|
-
# {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File}
|
143
|
-
# for more details.
|
144
|
-
#
|
145
|
-
# @param file_id [String] The ID of the file.
|
146
|
-
#
|
147
|
-
# @param mime_type [String] The MIME type of the file.
|
148
|
-
end
|
117
|
+
# @param type [Symbol, :image] The type of the output. Always 'image'.
|
149
118
|
end
|
150
119
|
|
151
120
|
# @!method self.variants
|
152
|
-
# @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::
|
121
|
+
# @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Image)]
|
153
122
|
end
|
154
123
|
|
155
124
|
# The status of the code interpreter tool call.
|
@@ -159,8 +128,10 @@ module OpenAI
|
|
159
128
|
extend OpenAI::Internal::Type::Enum
|
160
129
|
|
161
130
|
IN_PROGRESS = :in_progress
|
162
|
-
INTERPRETING = :interpreting
|
163
131
|
COMPLETED = :completed
|
132
|
+
INCOMPLETE = :incomplete
|
133
|
+
INTERPRETING = :interpreting
|
134
|
+
FAILED = :failed
|
164
135
|
|
165
136
|
# @!method self.values
|
166
137
|
# @return [Array<Symbol>]
|
@@ -10,30 +10,6 @@ module OpenAI
|
|
10
10
|
extend OpenAI::Internal::Type::RequestParameters::Converter
|
11
11
|
include OpenAI::Internal::Type::RequestParameters
|
12
12
|
|
13
|
-
# @!attribute input
|
14
|
-
# Text, image, or file inputs to the model, used to generate a response.
|
15
|
-
#
|
16
|
-
# Learn more:
|
17
|
-
#
|
18
|
-
# - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
|
19
|
-
# - [Image inputs](https://platform.openai.com/docs/guides/images)
|
20
|
-
# - [File inputs](https://platform.openai.com/docs/guides/pdf-files)
|
21
|
-
# - [Conversation state](https://platform.openai.com/docs/guides/conversation-state)
|
22
|
-
# - [Function calling](https://platform.openai.com/docs/guides/function-calling)
|
23
|
-
#
|
24
|
-
# @return [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>]
|
25
|
-
required :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input }
|
26
|
-
|
27
|
-
# @!attribute model
|
28
|
-
# Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a
|
29
|
-
# wide range of models with different capabilities, performance characteristics,
|
30
|
-
# and price points. Refer to the
|
31
|
-
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
32
|
-
# available models.
|
33
|
-
#
|
34
|
-
# @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel]
|
35
|
-
required :model, union: -> { OpenAI::ResponsesModel }
|
36
|
-
|
37
13
|
# @!attribute background
|
38
14
|
# Whether to run the model response in the background.
|
39
15
|
# [Learn more](https://platform.openai.com/docs/guides/background).
|
@@ -63,9 +39,22 @@ module OpenAI
|
|
63
39
|
-> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] },
|
64
40
|
nil?: true
|
65
41
|
|
42
|
+
# @!attribute input
|
43
|
+
# Text, image, or file inputs to the model, used to generate a response.
|
44
|
+
#
|
45
|
+
# Learn more:
|
46
|
+
#
|
47
|
+
# - [Text inputs and outputs](https://platform.openai.com/docs/guides/text)
|
48
|
+
# - [Image inputs](https://platform.openai.com/docs/guides/images)
|
49
|
+
# - [File inputs](https://platform.openai.com/docs/guides/pdf-files)
|
50
|
+
# - [Conversation state](https://platform.openai.com/docs/guides/conversation-state)
|
51
|
+
# - [Function calling](https://platform.openai.com/docs/guides/function-calling)
|
52
|
+
#
|
53
|
+
# @return [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>, nil]
|
54
|
+
optional :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input }
|
55
|
+
|
66
56
|
# @!attribute instructions
|
67
|
-
#
|
68
|
-
# context.
|
57
|
+
# A system (or developer) message inserted into the model's context.
|
69
58
|
#
|
70
59
|
# When using along with `previous_response_id`, the instructions from a previous
|
71
60
|
# response will not be carried over to the next response. This makes it simple to
|
@@ -93,6 +82,16 @@ module OpenAI
|
|
93
82
|
# @return [Hash{Symbol=>String}, nil]
|
94
83
|
optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true
|
95
84
|
|
85
|
+
# @!attribute model
|
86
|
+
# Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a
|
87
|
+
# wide range of models with different capabilities, performance characteristics,
|
88
|
+
# and price points. Refer to the
|
89
|
+
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
90
|
+
# available models.
|
91
|
+
#
|
92
|
+
# @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel, nil]
|
93
|
+
optional :model, union: -> { OpenAI::ResponsesModel }
|
94
|
+
|
96
95
|
# @!attribute parallel_tool_calls
|
97
96
|
# Whether to allow the model to run tool calls in parallel.
|
98
97
|
#
|
@@ -107,6 +106,13 @@ module OpenAI
|
|
107
106
|
# @return [String, nil]
|
108
107
|
optional :previous_response_id, String, nil?: true
|
109
108
|
|
109
|
+
# @!attribute prompt
|
110
|
+
# Reference to a prompt template and its variables.
|
111
|
+
# [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts).
|
112
|
+
#
|
113
|
+
# @return [OpenAI::Models::Responses::ResponsePrompt, nil]
|
114
|
+
optional :prompt, -> { OpenAI::Responses::ResponsePrompt }, nil?: true
|
115
|
+
|
110
116
|
# @!attribute reasoning
|
111
117
|
# **o-series models only**
|
112
118
|
#
|
@@ -226,28 +232,30 @@ module OpenAI
|
|
226
232
|
# @return [String, nil]
|
227
233
|
optional :user, String
|
228
234
|
|
229
|
-
# @!method initialize(
|
235
|
+
# @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
|
230
236
|
# Some parameter documentations has been truncated, see
|
231
237
|
# {OpenAI::Models::Responses::ResponseCreateParams} for more details.
|
232
238
|
#
|
233
|
-
# @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
|
234
|
-
#
|
235
|
-
# @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI
|
236
|
-
#
|
237
239
|
# @param background [Boolean, nil] Whether to run the model response in the background.
|
238
240
|
#
|
239
241
|
# @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently
|
240
242
|
#
|
241
|
-
# @param
|
243
|
+
# @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
|
244
|
+
#
|
245
|
+
# @param instructions [String, nil] A system (or developer) message inserted into the model's context.
|
242
246
|
#
|
243
247
|
# @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in
|
244
248
|
#
|
245
249
|
# @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be
|
246
250
|
#
|
251
|
+
# @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI
|
252
|
+
#
|
247
253
|
# @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel.
|
248
254
|
#
|
249
255
|
# @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to
|
250
256
|
#
|
257
|
+
# @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables.
|
258
|
+
#
|
251
259
|
# @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only**
|
252
260
|
#
|
253
261
|
# @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is
|
@@ -317,6 +325,7 @@ module OpenAI
|
|
317
325
|
AUTO = :auto
|
318
326
|
DEFAULT = :default
|
319
327
|
FLEX = :flex
|
328
|
+
SCALE = :scale
|
320
329
|
|
321
330
|
# @!method self.values
|
322
331
|
# @return [Array<Symbol>]
|