openai 0.15.0 → 0.16.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +23 -0
- data/README.md +1 -1
- data/lib/openai/helpers/structured_output/json_schema_converter.rb +20 -21
- data/lib/openai/helpers/structured_output/union_of.rb +11 -1
- data/lib/openai/models/chat/chat_completion_store_message.rb +32 -1
- data/lib/openai/models/chat/completion_create_params.rb +31 -5
- data/lib/openai/models/responses/response.rb +50 -4
- data/lib/openai/models/responses/response_create_params.rb +31 -5
- data/lib/openai/resources/chat/completions.rb +12 -4
- data/lib/openai/resources/responses.rb +40 -15
- data/lib/openai/version.rb +1 -1
- data/rbi/openai/models/chat/chat_completion_store_message.rbi +68 -3
- data/rbi/openai/models/chat/completion_create_params.rbi +44 -6
- data/rbi/openai/models/responses/response.rbi +44 -6
- data/rbi/openai/models/responses/response_create_params.rbi +44 -6
- data/rbi/openai/resources/chat/completions.rbi +34 -6
- data/rbi/openai/resources/responses.rbi +34 -6
- data/sig/openai/models/chat/chat_completion_store_message.rbs +29 -3
- data/sig/openai/models/chat/completion_create_params.rbs +14 -0
- data/sig/openai/models/responses/response.rbs +14 -0
- data/sig/openai/models/responses/response_create_params.rbs +14 -0
- data/sig/openai/resources/chat/completions.rbs +4 -0
- data/sig/openai/resources/responses.rbs +4 -0
- metadata +2 -2
@@ -19,8 +19,10 @@ module OpenAI
|
|
19
19
|
parallel_tool_calls: bool,
|
20
20
|
prediction: OpenAI::Chat::ChatCompletionPredictionContent?,
|
21
21
|
presence_penalty: Float?,
|
22
|
+
prompt_cache_key: String,
|
22
23
|
reasoning_effort: OpenAI::Models::reasoning_effort?,
|
23
24
|
response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format,
|
25
|
+
safety_identifier: String,
|
24
26
|
seed: Integer?,
|
25
27
|
service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?,
|
26
28
|
stop: OpenAI::Models::Chat::CompletionCreateParams::stop?,
|
@@ -82,6 +84,10 @@ module OpenAI
|
|
82
84
|
|
83
85
|
attr_accessor presence_penalty: Float?
|
84
86
|
|
87
|
+
attr_reader prompt_cache_key: String?
|
88
|
+
|
89
|
+
def prompt_cache_key=: (String) -> String
|
90
|
+
|
85
91
|
attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort?
|
86
92
|
|
87
93
|
attr_reader response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format?
|
@@ -90,6 +96,10 @@ module OpenAI
|
|
90
96
|
OpenAI::Models::Chat::CompletionCreateParams::response_format
|
91
97
|
) -> OpenAI::Models::Chat::CompletionCreateParams::response_format
|
92
98
|
|
99
|
+
attr_reader safety_identifier: String?
|
100
|
+
|
101
|
+
def safety_identifier=: (String) -> String
|
102
|
+
|
93
103
|
attr_accessor seed: Integer?
|
94
104
|
|
95
105
|
attr_accessor service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?
|
@@ -145,8 +155,10 @@ module OpenAI
|
|
145
155
|
?parallel_tool_calls: bool,
|
146
156
|
?prediction: OpenAI::Chat::ChatCompletionPredictionContent?,
|
147
157
|
?presence_penalty: Float?,
|
158
|
+
?prompt_cache_key: String,
|
148
159
|
?reasoning_effort: OpenAI::Models::reasoning_effort?,
|
149
160
|
?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format,
|
161
|
+
?safety_identifier: String,
|
150
162
|
?seed: Integer?,
|
151
163
|
?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?,
|
152
164
|
?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?,
|
@@ -179,8 +191,10 @@ module OpenAI
|
|
179
191
|
parallel_tool_calls: bool,
|
180
192
|
prediction: OpenAI::Chat::ChatCompletionPredictionContent?,
|
181
193
|
presence_penalty: Float?,
|
194
|
+
prompt_cache_key: String,
|
182
195
|
reasoning_effort: OpenAI::Models::reasoning_effort?,
|
183
196
|
response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format,
|
197
|
+
safety_identifier: String,
|
184
198
|
seed: Integer?,
|
185
199
|
service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?,
|
186
200
|
stop: OpenAI::Models::Chat::CompletionCreateParams::stop?,
|
@@ -22,7 +22,9 @@ module OpenAI
|
|
22
22
|
max_tool_calls: Integer?,
|
23
23
|
previous_response_id: String?,
|
24
24
|
prompt: OpenAI::Responses::ResponsePrompt?,
|
25
|
+
prompt_cache_key: String,
|
25
26
|
reasoning: OpenAI::Reasoning?,
|
27
|
+
safety_identifier: String,
|
26
28
|
service_tier: OpenAI::Models::Responses::Response::service_tier?,
|
27
29
|
status: OpenAI::Models::Responses::response_status,
|
28
30
|
text: OpenAI::Responses::ResponseTextConfig,
|
@@ -71,8 +73,16 @@ module OpenAI
|
|
71
73
|
|
72
74
|
attr_accessor prompt: OpenAI::Responses::ResponsePrompt?
|
73
75
|
|
76
|
+
attr_reader prompt_cache_key: String?
|
77
|
+
|
78
|
+
def prompt_cache_key=: (String) -> String
|
79
|
+
|
74
80
|
attr_accessor reasoning: OpenAI::Reasoning?
|
75
81
|
|
82
|
+
attr_reader safety_identifier: String?
|
83
|
+
|
84
|
+
def safety_identifier=: (String) -> String
|
85
|
+
|
76
86
|
attr_accessor service_tier: OpenAI::Models::Responses::Response::service_tier?
|
77
87
|
|
78
88
|
attr_reader status: OpenAI::Models::Responses::response_status?
|
@@ -120,7 +130,9 @@ module OpenAI
|
|
120
130
|
?max_tool_calls: Integer?,
|
121
131
|
?previous_response_id: String?,
|
122
132
|
?prompt: OpenAI::Responses::ResponsePrompt?,
|
133
|
+
?prompt_cache_key: String,
|
123
134
|
?reasoning: OpenAI::Reasoning?,
|
135
|
+
?safety_identifier: String,
|
124
136
|
?service_tier: OpenAI::Models::Responses::Response::service_tier?,
|
125
137
|
?status: OpenAI::Models::Responses::response_status,
|
126
138
|
?text: OpenAI::Responses::ResponseTextConfig,
|
@@ -151,7 +163,9 @@ module OpenAI
|
|
151
163
|
max_tool_calls: Integer?,
|
152
164
|
previous_response_id: String?,
|
153
165
|
prompt: OpenAI::Responses::ResponsePrompt?,
|
166
|
+
prompt_cache_key: String,
|
154
167
|
reasoning: OpenAI::Reasoning?,
|
168
|
+
safety_identifier: String,
|
155
169
|
service_tier: OpenAI::Models::Responses::Response::service_tier?,
|
156
170
|
status: OpenAI::Models::Responses::response_status,
|
157
171
|
text: OpenAI::Responses::ResponseTextConfig,
|
@@ -14,7 +14,9 @@ module OpenAI
|
|
14
14
|
parallel_tool_calls: bool?,
|
15
15
|
previous_response_id: String?,
|
16
16
|
prompt: OpenAI::Responses::ResponsePrompt?,
|
17
|
+
prompt_cache_key: String,
|
17
18
|
reasoning: OpenAI::Reasoning?,
|
19
|
+
safety_identifier: String,
|
18
20
|
service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?,
|
19
21
|
store: bool?,
|
20
22
|
temperature: Float?,
|
@@ -62,8 +64,16 @@ module OpenAI
|
|
62
64
|
|
63
65
|
attr_accessor prompt: OpenAI::Responses::ResponsePrompt?
|
64
66
|
|
67
|
+
attr_reader prompt_cache_key: String?
|
68
|
+
|
69
|
+
def prompt_cache_key=: (String) -> String
|
70
|
+
|
65
71
|
attr_accessor reasoning: OpenAI::Reasoning?
|
66
72
|
|
73
|
+
attr_reader safety_identifier: String?
|
74
|
+
|
75
|
+
def safety_identifier=: (String) -> String
|
76
|
+
|
67
77
|
attr_accessor service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?
|
68
78
|
|
69
79
|
attr_accessor store: bool?
|
@@ -110,7 +120,9 @@ module OpenAI
|
|
110
120
|
?parallel_tool_calls: bool?,
|
111
121
|
?previous_response_id: String?,
|
112
122
|
?prompt: OpenAI::Responses::ResponsePrompt?,
|
123
|
+
?prompt_cache_key: String,
|
113
124
|
?reasoning: OpenAI::Reasoning?,
|
125
|
+
?safety_identifier: String,
|
114
126
|
?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?,
|
115
127
|
?store: bool?,
|
116
128
|
?temperature: Float?,
|
@@ -136,7 +148,9 @@ module OpenAI
|
|
136
148
|
parallel_tool_calls: bool?,
|
137
149
|
previous_response_id: String?,
|
138
150
|
prompt: OpenAI::Responses::ResponsePrompt?,
|
151
|
+
prompt_cache_key: String,
|
139
152
|
reasoning: OpenAI::Reasoning?,
|
153
|
+
safety_identifier: String,
|
140
154
|
service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?,
|
141
155
|
store: bool?,
|
142
156
|
temperature: Float?,
|
@@ -21,8 +21,10 @@ module OpenAI
|
|
21
21
|
?parallel_tool_calls: bool,
|
22
22
|
?prediction: OpenAI::Chat::ChatCompletionPredictionContent?,
|
23
23
|
?presence_penalty: Float?,
|
24
|
+
?prompt_cache_key: String,
|
24
25
|
?reasoning_effort: OpenAI::Models::reasoning_effort?,
|
25
26
|
?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format,
|
27
|
+
?safety_identifier: String,
|
26
28
|
?seed: Integer?,
|
27
29
|
?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?,
|
28
30
|
?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?,
|
@@ -55,8 +57,10 @@ module OpenAI
|
|
55
57
|
?parallel_tool_calls: bool,
|
56
58
|
?prediction: OpenAI::Chat::ChatCompletionPredictionContent?,
|
57
59
|
?presence_penalty: Float?,
|
60
|
+
?prompt_cache_key: String,
|
58
61
|
?reasoning_effort: OpenAI::Models::reasoning_effort?,
|
59
62
|
?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format,
|
63
|
+
?safety_identifier: String,
|
60
64
|
?seed: Integer?,
|
61
65
|
?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?,
|
62
66
|
?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?,
|
@@ -15,7 +15,9 @@ module OpenAI
|
|
15
15
|
?parallel_tool_calls: bool?,
|
16
16
|
?previous_response_id: String?,
|
17
17
|
?prompt: OpenAI::Responses::ResponsePrompt?,
|
18
|
+
?prompt_cache_key: String,
|
18
19
|
?reasoning: OpenAI::Reasoning?,
|
20
|
+
?safety_identifier: String,
|
19
21
|
?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?,
|
20
22
|
?store: bool?,
|
21
23
|
?temperature: Float?,
|
@@ -41,7 +43,9 @@ module OpenAI
|
|
41
43
|
?parallel_tool_calls: bool?,
|
42
44
|
?previous_response_id: String?,
|
43
45
|
?prompt: OpenAI::Responses::ResponsePrompt?,
|
46
|
+
?prompt_cache_key: String,
|
44
47
|
?reasoning: OpenAI::Reasoning?,
|
48
|
+
?safety_identifier: String,
|
45
49
|
?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?,
|
46
50
|
?store: bool?,
|
47
51
|
?temperature: Float?,
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: openai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.16.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- OpenAI
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2025-07-
|
11
|
+
date: 2025-07-30 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: connection_pool
|