openai 0.37.0 → 0.38.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/README.md +1 -1
- data/lib/openai/models/responses/response_compact_params.rb +30 -30
- data/lib/openai/resources/responses.rb +4 -4
- data/lib/openai/version.rb +1 -1
- data/rbi/openai/models/responses/response_compact_params.rbi +62 -62
- data/rbi/openai/resources/responses.rbi +11 -11
- data/sig/openai/models/responses/response_compact_params.rbs +16 -16
- data/sig/openai/resources/responses.rbs +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: ea6434eae9ddaa1fdf22d4069a4ae253720b292816054e4a8a4076a4a5fbde28
|
|
4
|
+
data.tar.gz: 291feafd4b0164c5e6020e9a6cd85c70333c65ac0753bb39677e760e5410d487
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: c515391408af1a74b506b750d7c78ae7b56bdb9cec40e21fe6d80d6d5c923941d476a529c6d51c87b2e1abbeae9ae78e5d4216520eed3dc3aa2d4403384a0bfb
|
|
7
|
+
data.tar.gz: 7866eb758cd765d796fe156904c93f005778ce1fad88d5fc0011c33ac5786c20a57ecbfc18bd39f16027e533840cde09966ca387b80fd41dfe205db864ef7aa9
|
data/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,13 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## 0.38.0 (2025-12-08)
|
|
4
|
+
|
|
5
|
+
Full Changelog: [v0.37.0...v0.38.0](https://github.com/openai/openai-ruby/compare/v0.37.0...v0.38.0)
|
|
6
|
+
|
|
7
|
+
### Features
|
|
8
|
+
|
|
9
|
+
* **api:** make model required for the responses/compact endpoint ([94ad657](https://github.com/openai/openai-ruby/commit/94ad657d3824838dbb1517bb6aa43341a0581102))
|
|
10
|
+
|
|
3
11
|
## 0.37.0 (2025-12-04)
|
|
4
12
|
|
|
5
13
|
Full Changelog: [v0.36.1...v0.37.0](https://github.com/openai/openai-ruby/compare/v0.36.1...v0.37.0)
|
data/README.md
CHANGED
|
@@ -8,6 +8,16 @@ module OpenAI
|
|
|
8
8
|
extend OpenAI::Internal::Type::RequestParameters::Converter
|
|
9
9
|
include OpenAI::Internal::Type::RequestParameters
|
|
10
10
|
|
|
11
|
+
# @!attribute model
|
|
12
|
+
# Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a
|
|
13
|
+
# wide range of models with different capabilities, performance characteristics,
|
|
14
|
+
# and price points. Refer to the
|
|
15
|
+
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
|
16
|
+
# available models.
|
|
17
|
+
#
|
|
18
|
+
# @return [Symbol, String, OpenAI::Models::Responses::ResponseCompactParams::Model, nil]
|
|
19
|
+
required :model, union: -> { OpenAI::Responses::ResponseCompactParams::Model }, nil?: true
|
|
20
|
+
|
|
11
21
|
# @!attribute input
|
|
12
22
|
# Text, image, or file inputs to the model, used to generate a response
|
|
13
23
|
#
|
|
@@ -23,16 +33,6 @@ module OpenAI
|
|
|
23
33
|
# @return [String, nil]
|
|
24
34
|
optional :instructions, String, nil?: true
|
|
25
35
|
|
|
26
|
-
# @!attribute model
|
|
27
|
-
# Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a
|
|
28
|
-
# wide range of models with different capabilities, performance characteristics,
|
|
29
|
-
# and price points. Refer to the
|
|
30
|
-
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
|
31
|
-
# available models.
|
|
32
|
-
#
|
|
33
|
-
# @return [Symbol, String, OpenAI::Models::Responses::ResponseCompactParams::Model, nil]
|
|
34
|
-
optional :model, union: -> { OpenAI::Responses::ResponseCompactParams::Model }, nil?: true
|
|
35
|
-
|
|
36
36
|
# @!attribute previous_response_id
|
|
37
37
|
# The unique ID of the previous response to the model. Use this to create
|
|
38
38
|
# multi-turn conversations. Learn more about
|
|
@@ -42,37 +42,20 @@ module OpenAI
|
|
|
42
42
|
# @return [String, nil]
|
|
43
43
|
optional :previous_response_id, String, nil?: true
|
|
44
44
|
|
|
45
|
-
# @!method initialize(input: nil, instructions: nil,
|
|
45
|
+
# @!method initialize(model:, input: nil, instructions: nil, previous_response_id: nil, request_options: {})
|
|
46
46
|
# Some parameter documentations has been truncated, see
|
|
47
47
|
# {OpenAI::Models::Responses::ResponseCompactParams} for more details.
|
|
48
48
|
#
|
|
49
|
+
# @param model [Symbol, String, OpenAI::Models::Responses::ResponseCompactParams::Model, nil] Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a wi
|
|
50
|
+
#
|
|
49
51
|
# @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseCompactionItemParam, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::ShellCall, OpenAI::Models::Responses::ResponseInputItem::ShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::ApplyPatchCall, OpenAI::Models::Responses::ResponseInputItem::ApplyPatchCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>, nil] Text, image, or file inputs to the model, used to generate a response
|
|
50
52
|
#
|
|
51
53
|
# @param instructions [String, nil] A system (or developer) message inserted into the model's context.
|
|
52
54
|
#
|
|
53
|
-
# @param model [Symbol, String, OpenAI::Models::Responses::ResponseCompactParams::Model, nil] Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a wi
|
|
54
|
-
#
|
|
55
55
|
# @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to create multi-tu
|
|
56
56
|
#
|
|
57
57
|
# @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}]
|
|
58
58
|
|
|
59
|
-
# Text, image, or file inputs to the model, used to generate a response
|
|
60
|
-
module Input
|
|
61
|
-
extend OpenAI::Internal::Type::Union
|
|
62
|
-
|
|
63
|
-
# A text input to the model, equivalent to a text input with the `user` role.
|
|
64
|
-
variant String
|
|
65
|
-
|
|
66
|
-
variant -> { OpenAI::Models::Responses::ResponseCompactParams::Input::ResponseInputItemArray }
|
|
67
|
-
|
|
68
|
-
# @!method self.variants
|
|
69
|
-
# @return [Array(String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseCompactionItemParam, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::ShellCall, OpenAI::Models::Responses::ResponseInputItem::ShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::ApplyPatchCall, OpenAI::Models::Responses::ResponseInputItem::ApplyPatchCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>)]
|
|
70
|
-
|
|
71
|
-
# @type [OpenAI::Internal::Type::Converter]
|
|
72
|
-
ResponseInputItemArray =
|
|
73
|
-
OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Responses::ResponseInputItem }]
|
|
74
|
-
end
|
|
75
|
-
|
|
76
59
|
# Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a
|
|
77
60
|
# wide range of models with different capabilities, performance characteristics,
|
|
78
61
|
# and price points. Refer to the
|
|
@@ -338,6 +321,23 @@ module OpenAI
|
|
|
338
321
|
|
|
339
322
|
# @!endgroup
|
|
340
323
|
end
|
|
324
|
+
|
|
325
|
+
# Text, image, or file inputs to the model, used to generate a response
|
|
326
|
+
module Input
|
|
327
|
+
extend OpenAI::Internal::Type::Union
|
|
328
|
+
|
|
329
|
+
# A text input to the model, equivalent to a text input with the `user` role.
|
|
330
|
+
variant String
|
|
331
|
+
|
|
332
|
+
variant -> { OpenAI::Models::Responses::ResponseCompactParams::Input::ResponseInputItemArray }
|
|
333
|
+
|
|
334
|
+
# @!method self.variants
|
|
335
|
+
# @return [Array(String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseCompactionItemParam, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::ShellCall, OpenAI::Models::Responses::ResponseInputItem::ShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::ApplyPatchCall, OpenAI::Models::Responses::ResponseInputItem::ApplyPatchCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>)]
|
|
336
|
+
|
|
337
|
+
# @type [OpenAI::Internal::Type::Converter]
|
|
338
|
+
ResponseInputItemArray =
|
|
339
|
+
OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Responses::ResponseInputItem }]
|
|
340
|
+
end
|
|
341
341
|
end
|
|
342
342
|
end
|
|
343
343
|
end
|
|
@@ -464,14 +464,14 @@ module OpenAI
|
|
|
464
464
|
#
|
|
465
465
|
# Compact conversation
|
|
466
466
|
#
|
|
467
|
-
# @overload compact(input: nil, instructions: nil,
|
|
467
|
+
# @overload compact(model:, input: nil, instructions: nil, previous_response_id: nil, request_options: {})
|
|
468
|
+
#
|
|
469
|
+
# @param model [Symbol, String, OpenAI::Models::Responses::ResponseCompactParams::Model, nil] Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a wi
|
|
468
470
|
#
|
|
469
471
|
# @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseCompactionItemParam, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::ShellCall, OpenAI::Models::Responses::ResponseInputItem::ShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::ApplyPatchCall, OpenAI::Models::Responses::ResponseInputItem::ApplyPatchCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>, nil] Text, image, or file inputs to the model, used to generate a response
|
|
470
472
|
#
|
|
471
473
|
# @param instructions [String, nil] A system (or developer) message inserted into the model's context.
|
|
472
474
|
#
|
|
473
|
-
# @param model [Symbol, String, OpenAI::Models::Responses::ResponseCompactParams::Model, nil] Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a wi
|
|
474
|
-
#
|
|
475
475
|
# @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to create multi-tu
|
|
476
476
|
#
|
|
477
477
|
# @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
|
|
@@ -479,7 +479,7 @@ module OpenAI
|
|
|
479
479
|
# @return [OpenAI::Models::Responses::CompactedResponse]
|
|
480
480
|
#
|
|
481
481
|
# @see OpenAI::Models::Responses::ResponseCompactParams
|
|
482
|
-
def compact(params
|
|
482
|
+
def compact(params)
|
|
483
483
|
parsed, options = OpenAI::Responses::ResponseCompactParams.dump_request(params)
|
|
484
484
|
@client.request(
|
|
485
485
|
method: :post,
|
data/lib/openai/version.rb
CHANGED
|
@@ -15,21 +15,6 @@ module OpenAI
|
|
|
15
15
|
)
|
|
16
16
|
end
|
|
17
17
|
|
|
18
|
-
# Text, image, or file inputs to the model, used to generate a response
|
|
19
|
-
sig do
|
|
20
|
-
returns(
|
|
21
|
-
T.nilable(OpenAI::Responses::ResponseCompactParams::Input::Variants)
|
|
22
|
-
)
|
|
23
|
-
end
|
|
24
|
-
attr_accessor :input
|
|
25
|
-
|
|
26
|
-
# A system (or developer) message inserted into the model's context. When used
|
|
27
|
-
# along with `previous_response_id`, the instructions from a previous response
|
|
28
|
-
# will not be carried over to the next response. This makes it simple to swap out
|
|
29
|
-
# system (or developer) messages in new responses.
|
|
30
|
-
sig { returns(T.nilable(String)) }
|
|
31
|
-
attr_accessor :instructions
|
|
32
|
-
|
|
33
18
|
# Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a
|
|
34
19
|
# wide range of models with different capabilities, performance characteristics,
|
|
35
20
|
# and price points. Refer to the
|
|
@@ -47,6 +32,21 @@ module OpenAI
|
|
|
47
32
|
end
|
|
48
33
|
attr_accessor :model
|
|
49
34
|
|
|
35
|
+
# Text, image, or file inputs to the model, used to generate a response
|
|
36
|
+
sig do
|
|
37
|
+
returns(
|
|
38
|
+
T.nilable(OpenAI::Responses::ResponseCompactParams::Input::Variants)
|
|
39
|
+
)
|
|
40
|
+
end
|
|
41
|
+
attr_accessor :input
|
|
42
|
+
|
|
43
|
+
# A system (or developer) message inserted into the model's context. When used
|
|
44
|
+
# along with `previous_response_id`, the instructions from a previous response
|
|
45
|
+
# will not be carried over to the next response. This makes it simple to swap out
|
|
46
|
+
# system (or developer) messages in new responses.
|
|
47
|
+
sig { returns(T.nilable(String)) }
|
|
48
|
+
attr_accessor :instructions
|
|
49
|
+
|
|
50
50
|
# The unique ID of the previous response to the model. Use this to create
|
|
51
51
|
# multi-turn conversations. Learn more about
|
|
52
52
|
# [conversation state](https://platform.openai.com/docs/guides/conversation-state).
|
|
@@ -56,11 +56,6 @@ module OpenAI
|
|
|
56
56
|
|
|
57
57
|
sig do
|
|
58
58
|
params(
|
|
59
|
-
input:
|
|
60
|
-
T.nilable(
|
|
61
|
-
OpenAI::Responses::ResponseCompactParams::Input::Variants
|
|
62
|
-
),
|
|
63
|
-
instructions: T.nilable(String),
|
|
64
59
|
model:
|
|
65
60
|
T.nilable(
|
|
66
61
|
T.any(
|
|
@@ -68,11 +63,22 @@ module OpenAI
|
|
|
68
63
|
String
|
|
69
64
|
)
|
|
70
65
|
),
|
|
66
|
+
input:
|
|
67
|
+
T.nilable(
|
|
68
|
+
OpenAI::Responses::ResponseCompactParams::Input::Variants
|
|
69
|
+
),
|
|
70
|
+
instructions: T.nilable(String),
|
|
71
71
|
previous_response_id: T.nilable(String),
|
|
72
72
|
request_options: OpenAI::RequestOptions::OrHash
|
|
73
73
|
).returns(T.attached_class)
|
|
74
74
|
end
|
|
75
75
|
def self.new(
|
|
76
|
+
# Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a
|
|
77
|
+
# wide range of models with different capabilities, performance characteristics,
|
|
78
|
+
# and price points. Refer to the
|
|
79
|
+
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
|
80
|
+
# available models.
|
|
81
|
+
model:,
|
|
76
82
|
# Text, image, or file inputs to the model, used to generate a response
|
|
77
83
|
input: nil,
|
|
78
84
|
# A system (or developer) message inserted into the model's context. When used
|
|
@@ -80,12 +86,6 @@ module OpenAI
|
|
|
80
86
|
# will not be carried over to the next response. This makes it simple to swap out
|
|
81
87
|
# system (or developer) messages in new responses.
|
|
82
88
|
instructions: nil,
|
|
83
|
-
# Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a
|
|
84
|
-
# wide range of models with different capabilities, performance characteristics,
|
|
85
|
-
# and price points. Refer to the
|
|
86
|
-
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
|
87
|
-
# available models.
|
|
88
|
-
model: nil,
|
|
89
89
|
# The unique ID of the previous response to the model. Use this to create
|
|
90
90
|
# multi-turn conversations. Learn more about
|
|
91
91
|
# [conversation state](https://platform.openai.com/docs/guides/conversation-state).
|
|
@@ -98,11 +98,6 @@ module OpenAI
|
|
|
98
98
|
sig do
|
|
99
99
|
override.returns(
|
|
100
100
|
{
|
|
101
|
-
input:
|
|
102
|
-
T.nilable(
|
|
103
|
-
OpenAI::Responses::ResponseCompactParams::Input::Variants
|
|
104
|
-
),
|
|
105
|
-
instructions: T.nilable(String),
|
|
106
101
|
model:
|
|
107
102
|
T.nilable(
|
|
108
103
|
T.any(
|
|
@@ -110,6 +105,11 @@ module OpenAI
|
|
|
110
105
|
String
|
|
111
106
|
)
|
|
112
107
|
),
|
|
108
|
+
input:
|
|
109
|
+
T.nilable(
|
|
110
|
+
OpenAI::Responses::ResponseCompactParams::Input::Variants
|
|
111
|
+
),
|
|
112
|
+
instructions: T.nilable(String),
|
|
113
113
|
previous_response_id: T.nilable(String),
|
|
114
114
|
request_options: OpenAI::RequestOptions
|
|
115
115
|
}
|
|
@@ -118,37 +118,6 @@ module OpenAI
|
|
|
118
118
|
def to_hash
|
|
119
119
|
end
|
|
120
120
|
|
|
121
|
-
# Text, image, or file inputs to the model, used to generate a response
|
|
122
|
-
module Input
|
|
123
|
-
extend OpenAI::Internal::Type::Union
|
|
124
|
-
|
|
125
|
-
Variants =
|
|
126
|
-
T.type_alias do
|
|
127
|
-
T.any(
|
|
128
|
-
String,
|
|
129
|
-
T::Array[OpenAI::Responses::ResponseInputItem::Variants]
|
|
130
|
-
)
|
|
131
|
-
end
|
|
132
|
-
|
|
133
|
-
sig do
|
|
134
|
-
override.returns(
|
|
135
|
-
T::Array[
|
|
136
|
-
OpenAI::Responses::ResponseCompactParams::Input::Variants
|
|
137
|
-
]
|
|
138
|
-
)
|
|
139
|
-
end
|
|
140
|
-
def self.variants
|
|
141
|
-
end
|
|
142
|
-
|
|
143
|
-
ResponseInputItemArray =
|
|
144
|
-
T.let(
|
|
145
|
-
OpenAI::Internal::Type::ArrayOf[
|
|
146
|
-
union: OpenAI::Responses::ResponseInputItem
|
|
147
|
-
],
|
|
148
|
-
OpenAI::Internal::Type::Converter
|
|
149
|
-
)
|
|
150
|
-
end
|
|
151
|
-
|
|
152
121
|
# Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a
|
|
153
122
|
# wide range of models with different capabilities, performance characteristics,
|
|
154
123
|
# and price points. Refer to the
|
|
@@ -587,6 +556,37 @@ module OpenAI
|
|
|
587
556
|
OpenAI::Responses::ResponseCompactParams::Model::TaggedSymbol
|
|
588
557
|
)
|
|
589
558
|
end
|
|
559
|
+
|
|
560
|
+
# Text, image, or file inputs to the model, used to generate a response
|
|
561
|
+
module Input
|
|
562
|
+
extend OpenAI::Internal::Type::Union
|
|
563
|
+
|
|
564
|
+
Variants =
|
|
565
|
+
T.type_alias do
|
|
566
|
+
T.any(
|
|
567
|
+
String,
|
|
568
|
+
T::Array[OpenAI::Responses::ResponseInputItem::Variants]
|
|
569
|
+
)
|
|
570
|
+
end
|
|
571
|
+
|
|
572
|
+
sig do
|
|
573
|
+
override.returns(
|
|
574
|
+
T::Array[
|
|
575
|
+
OpenAI::Responses::ResponseCompactParams::Input::Variants
|
|
576
|
+
]
|
|
577
|
+
)
|
|
578
|
+
end
|
|
579
|
+
def self.variants
|
|
580
|
+
end
|
|
581
|
+
|
|
582
|
+
ResponseInputItemArray =
|
|
583
|
+
T.let(
|
|
584
|
+
OpenAI::Internal::Type::ArrayOf[
|
|
585
|
+
union: OpenAI::Responses::ResponseInputItem
|
|
586
|
+
],
|
|
587
|
+
OpenAI::Internal::Type::Converter
|
|
588
|
+
)
|
|
589
|
+
end
|
|
590
590
|
end
|
|
591
591
|
end
|
|
592
592
|
end
|
|
@@ -945,11 +945,6 @@ module OpenAI
|
|
|
945
945
|
# Compact conversation
|
|
946
946
|
sig do
|
|
947
947
|
params(
|
|
948
|
-
input:
|
|
949
|
-
T.nilable(
|
|
950
|
-
OpenAI::Responses::ResponseCompactParams::Input::Variants
|
|
951
|
-
),
|
|
952
|
-
instructions: T.nilable(String),
|
|
953
948
|
model:
|
|
954
949
|
T.nilable(
|
|
955
950
|
T.any(
|
|
@@ -957,11 +952,22 @@ module OpenAI
|
|
|
957
952
|
String
|
|
958
953
|
)
|
|
959
954
|
),
|
|
955
|
+
input:
|
|
956
|
+
T.nilable(
|
|
957
|
+
OpenAI::Responses::ResponseCompactParams::Input::Variants
|
|
958
|
+
),
|
|
959
|
+
instructions: T.nilable(String),
|
|
960
960
|
previous_response_id: T.nilable(String),
|
|
961
961
|
request_options: OpenAI::RequestOptions::OrHash
|
|
962
962
|
).returns(OpenAI::Responses::CompactedResponse)
|
|
963
963
|
end
|
|
964
964
|
def compact(
|
|
965
|
+
# Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a
|
|
966
|
+
# wide range of models with different capabilities, performance characteristics,
|
|
967
|
+
# and price points. Refer to the
|
|
968
|
+
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
|
969
|
+
# available models.
|
|
970
|
+
model:,
|
|
965
971
|
# Text, image, or file inputs to the model, used to generate a response
|
|
966
972
|
input: nil,
|
|
967
973
|
# A system (or developer) message inserted into the model's context. When used
|
|
@@ -969,12 +975,6 @@ module OpenAI
|
|
|
969
975
|
# will not be carried over to the next response. This makes it simple to swap out
|
|
970
976
|
# system (or developer) messages in new responses.
|
|
971
977
|
instructions: nil,
|
|
972
|
-
# Model ID used to generate the response, like `gpt-5` or `o3`. OpenAI offers a
|
|
973
|
-
# wide range of models with different capabilities, performance characteristics,
|
|
974
|
-
# and price points. Refer to the
|
|
975
|
-
# [model guide](https://platform.openai.com/docs/models) to browse and compare
|
|
976
|
-
# available models.
|
|
977
|
-
model: nil,
|
|
978
978
|
# The unique ID of the previous response to the model. Use this to create
|
|
979
979
|
# multi-turn conversations. Learn more about
|
|
980
980
|
# [conversation state](https://platform.openai.com/docs/guides/conversation-state).
|
|
@@ -3,9 +3,9 @@ module OpenAI
|
|
|
3
3
|
module Responses
|
|
4
4
|
type response_compact_params =
|
|
5
5
|
{
|
|
6
|
+
model: OpenAI::Models::Responses::ResponseCompactParams::model?,
|
|
6
7
|
input: OpenAI::Models::Responses::ResponseCompactParams::input?,
|
|
7
8
|
instructions: String?,
|
|
8
|
-
model: OpenAI::Models::Responses::ResponseCompactParams::model?,
|
|
9
9
|
previous_response_id: String?
|
|
10
10
|
}
|
|
11
11
|
& OpenAI::Internal::Type::request_parameters
|
|
@@ -14,41 +14,30 @@ module OpenAI
|
|
|
14
14
|
extend OpenAI::Internal::Type::RequestParameters::Converter
|
|
15
15
|
include OpenAI::Internal::Type::RequestParameters
|
|
16
16
|
|
|
17
|
+
attr_accessor model: OpenAI::Models::Responses::ResponseCompactParams::model?
|
|
18
|
+
|
|
17
19
|
attr_accessor input: OpenAI::Models::Responses::ResponseCompactParams::input?
|
|
18
20
|
|
|
19
21
|
attr_accessor instructions: String?
|
|
20
22
|
|
|
21
|
-
attr_accessor model: OpenAI::Models::Responses::ResponseCompactParams::model?
|
|
22
|
-
|
|
23
23
|
attr_accessor previous_response_id: String?
|
|
24
24
|
|
|
25
25
|
def initialize: (
|
|
26
|
+
model: OpenAI::Models::Responses::ResponseCompactParams::model?,
|
|
26
27
|
?input: OpenAI::Models::Responses::ResponseCompactParams::input?,
|
|
27
28
|
?instructions: String?,
|
|
28
|
-
?model: OpenAI::Models::Responses::ResponseCompactParams::model?,
|
|
29
29
|
?previous_response_id: String?,
|
|
30
30
|
?request_options: OpenAI::request_opts
|
|
31
31
|
) -> void
|
|
32
32
|
|
|
33
33
|
def to_hash: -> {
|
|
34
|
+
model: OpenAI::Models::Responses::ResponseCompactParams::model?,
|
|
34
35
|
input: OpenAI::Models::Responses::ResponseCompactParams::input?,
|
|
35
36
|
instructions: String?,
|
|
36
|
-
model: OpenAI::Models::Responses::ResponseCompactParams::model?,
|
|
37
37
|
previous_response_id: String?,
|
|
38
38
|
request_options: OpenAI::RequestOptions
|
|
39
39
|
}
|
|
40
40
|
|
|
41
|
-
type input =
|
|
42
|
-
String | ::Array[OpenAI::Models::Responses::response_input_item]
|
|
43
|
-
|
|
44
|
-
module Input
|
|
45
|
-
extend OpenAI::Internal::Type::Union
|
|
46
|
-
|
|
47
|
-
def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCompactParams::input]
|
|
48
|
-
|
|
49
|
-
ResponseInputItemArray: OpenAI::Internal::Type::Converter
|
|
50
|
-
end
|
|
51
|
-
|
|
52
41
|
type model =
|
|
53
42
|
:"gpt-5.1"
|
|
54
43
|
| :"gpt-5.1-2025-11-13"
|
|
@@ -220,6 +209,17 @@ module OpenAI
|
|
|
220
209
|
GPT_5_PRO_2025_10_06: :"gpt-5-pro-2025-10-06"
|
|
221
210
|
GPT_5_1_CODEX_MAX: :"gpt-5.1-codex-max"
|
|
222
211
|
end
|
|
212
|
+
|
|
213
|
+
type input =
|
|
214
|
+
String | ::Array[OpenAI::Models::Responses::response_input_item]
|
|
215
|
+
|
|
216
|
+
module Input
|
|
217
|
+
extend OpenAI::Internal::Type::Union
|
|
218
|
+
|
|
219
|
+
def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCompactParams::input]
|
|
220
|
+
|
|
221
|
+
ResponseInputItemArray: OpenAI::Internal::Type::Converter
|
|
222
|
+
end
|
|
223
223
|
end
|
|
224
224
|
end
|
|
225
225
|
end
|
|
@@ -122,9 +122,9 @@ module OpenAI
|
|
|
122
122
|
) -> OpenAI::Responses::Response
|
|
123
123
|
|
|
124
124
|
def compact: (
|
|
125
|
+
model: OpenAI::Models::Responses::ResponseCompactParams::model?,
|
|
125
126
|
?input: OpenAI::Models::Responses::ResponseCompactParams::input?,
|
|
126
127
|
?instructions: String?,
|
|
127
|
-
?model: OpenAI::Models::Responses::ResponseCompactParams::model?,
|
|
128
128
|
?previous_response_id: String?,
|
|
129
129
|
?request_options: OpenAI::request_opts
|
|
130
130
|
) -> OpenAI::Responses::CompactedResponse
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: openai
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.
|
|
4
|
+
version: 0.38.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- OpenAI
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: bin
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date: 2025-12-
|
|
11
|
+
date: 2025-12-10 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: base64
|