openai 0.18.1 → 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +20 -0
- data/README.md +1 -1
- data/lib/openai/client.rb +4 -0
- data/lib/openai/helpers/structured_output/boolean.rb +1 -0
- data/lib/openai/internal/conversation_cursor_page.rb +92 -0
- data/lib/openai/internal/transport/base_client.rb +1 -4
- data/lib/openai/internal/transport/pooled_net_requester.rb +1 -9
- data/lib/openai/internal/util.rb +1 -1
- data/lib/openai/models/audio/transcription.rb +1 -4
- data/lib/openai/models/audio/transcription_create_params.rb +2 -7
- data/lib/openai/models/audio/transcription_text_done_event.rb +1 -4
- data/lib/openai/models/beta/assistant_create_params.rb +6 -19
- data/lib/openai/models/beta/assistant_stream_event.rb +6 -24
- data/lib/openai/models/beta/assistant_update_params.rb +1 -4
- data/lib/openai/models/beta/message_stream_event.rb +1 -4
- data/lib/openai/models/beta/run_step_stream_event.rb +1 -4
- data/lib/openai/models/beta/thread_create_and_run_params.rb +10 -32
- data/lib/openai/models/beta/thread_create_params.rb +7 -22
- data/lib/openai/models/beta/threads/message.rb +3 -10
- data/lib/openai/models/beta/threads/message_create_params.rb +2 -7
- data/lib/openai/models/beta/threads/run.rb +2 -7
- data/lib/openai/models/beta/threads/run_create_params.rb +3 -10
- data/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +1 -3
- data/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +5 -17
- data/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +1 -3
- data/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +4 -12
- data/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +1 -4
- data/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +1 -4
- data/lib/openai/models/beta/threads/text.rb +1 -4
- data/lib/openai/models/chat/chat_completion_chunk.rb +1 -3
- data/lib/openai/models/chat/chat_completion_custom_tool.rb +2 -7
- data/lib/openai/models/conversations/computer_screenshot_content.rb +38 -0
- data/lib/openai/models/conversations/container_file_citation_body.rb +58 -0
- data/lib/openai/models/conversations/conversation.rb +51 -0
- data/lib/openai/models/conversations/conversation_create_params.rb +39 -0
- data/lib/openai/models/conversations/conversation_delete_params.rb +16 -0
- data/lib/openai/models/conversations/conversation_deleted.rb +29 -0
- data/lib/openai/models/conversations/conversation_deleted_resource.rb +30 -0
- data/lib/openai/models/conversations/conversation_item.rb +568 -0
- data/lib/openai/models/conversations/conversation_item_list.rb +55 -0
- data/lib/openai/models/conversations/conversation_retrieve_params.rb +16 -0
- data/lib/openai/models/conversations/conversation_update_params.rb +31 -0
- data/lib/openai/models/conversations/file_citation_body.rb +42 -0
- data/lib/openai/models/conversations/input_file_content.rb +42 -0
- data/lib/openai/models/conversations/input_image_content.rb +62 -0
- data/lib/openai/models/conversations/input_text_content.rb +26 -0
- data/lib/openai/models/conversations/item_create_params.rb +37 -0
- data/lib/openai/models/conversations/item_delete_params.rb +22 -0
- data/lib/openai/models/conversations/item_list_params.rb +84 -0
- data/lib/openai/models/conversations/item_retrieve_params.rb +36 -0
- data/lib/openai/models/conversations/lob_prob.rb +35 -0
- data/lib/openai/models/conversations/message.rb +115 -0
- data/lib/openai/models/conversations/output_text_content.rb +57 -0
- data/lib/openai/models/conversations/refusal_content.rb +26 -0
- data/lib/openai/models/conversations/summary_text_content.rb +23 -0
- data/lib/openai/models/conversations/text_content.rb +23 -0
- data/lib/openai/models/conversations/top_log_prob.rb +29 -0
- data/lib/openai/models/conversations/url_citation_body.rb +50 -0
- data/lib/openai/models/eval_create_params.rb +6 -20
- data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +29 -53
- data/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +1 -3
- data/lib/openai/models/evals/run_create_params.rb +18 -54
- data/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb +1 -4
- data/lib/openai/models/moderation.rb +5 -15
- data/lib/openai/models/responses/input_item_list_params.rb +1 -9
- data/lib/openai/models/responses/response.rb +26 -1
- data/lib/openai/models/responses/response_computer_tool_call.rb +2 -6
- data/lib/openai/models/responses/response_computer_tool_call_output_item.rb +1 -3
- data/lib/openai/models/responses/response_conversation_param.rb +20 -0
- data/lib/openai/models/responses/response_create_params.rb +34 -1
- data/lib/openai/models/responses/response_input_item.rb +2 -7
- data/lib/openai/models/responses/response_input_message_item.rb +1 -4
- data/lib/openai/models/responses/response_output_item.rb +1 -3
- data/lib/openai/models/responses/response_output_message.rb +1 -3
- data/lib/openai/models/responses/response_output_text.rb +3 -10
- data/lib/openai/models/responses/response_stream_event.rb +4 -16
- data/lib/openai/models/responses/response_text_delta_event.rb +1 -3
- data/lib/openai/models/responses/response_text_done_event.rb +1 -3
- data/lib/openai/models/responses/tool.rb +145 -34
- data/lib/openai/models.rb +2 -0
- data/lib/openai/resources/conversations/items.rb +141 -0
- data/lib/openai/resources/conversations.rb +112 -0
- data/lib/openai/resources/responses/input_items.rb +1 -3
- data/lib/openai/resources/responses.rb +6 -2
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +31 -0
- data/rbi/openai/client.rbi +3 -0
- data/rbi/openai/errors.rbi +5 -5
- data/rbi/openai/internal/conversation_cursor_page.rbi +25 -0
- data/rbi/openai/models/conversations/computer_screenshot_content.rbi +60 -0
- data/rbi/openai/models/conversations/container_file_citation_body.rbi +82 -0
- data/rbi/openai/models/conversations/conversation.rbi +76 -0
- data/rbi/openai/models/conversations/conversation_create_params.rbi +144 -0
- data/rbi/openai/models/conversations/conversation_delete_params.rbi +32 -0
- data/rbi/openai/models/conversations/conversation_deleted.rbi +40 -0
- data/rbi/openai/models/conversations/conversation_deleted_resource.rbi +40 -0
- data/rbi/openai/models/conversations/conversation_item.rbi +835 -0
- data/rbi/openai/models/conversations/conversation_item_list.rbi +101 -0
- data/rbi/openai/models/conversations/conversation_retrieve_params.rbi +32 -0
- data/rbi/openai/models/conversations/conversation_update_params.rbi +56 -0
- data/rbi/openai/models/conversations/file_citation_body.rbi +61 -0
- data/rbi/openai/models/conversations/input_file_content.rbi +72 -0
- data/rbi/openai/models/conversations/input_image_content.rbi +113 -0
- data/rbi/openai/models/conversations/input_text_content.rbi +38 -0
- data/rbi/openai/models/conversations/item_create_params.rbi +150 -0
- data/rbi/openai/models/conversations/item_delete_params.rbi +40 -0
- data/rbi/openai/models/conversations/item_list_params.rbi +174 -0
- data/rbi/openai/models/conversations/item_retrieve_params.rbi +70 -0
- data/rbi/openai/models/conversations/lob_prob.rbi +50 -0
- data/rbi/openai/models/conversations/message.rbi +196 -0
- data/rbi/openai/models/conversations/output_text_content.rbi +110 -0
- data/rbi/openai/models/conversations/refusal_content.rbi +38 -0
- data/rbi/openai/models/conversations/summary_text_content.rbi +31 -0
- data/rbi/openai/models/conversations/text_content.rbi +28 -0
- data/rbi/openai/models/conversations/top_log_prob.rbi +41 -0
- data/rbi/openai/models/conversations/url_citation_body.rbi +74 -0
- data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +33 -33
- data/rbi/openai/models/responses/input_item_list_params.rbi +0 -11
- data/rbi/openai/models/responses/response.rbi +49 -0
- data/rbi/openai/models/responses/response_conversation_param.rbi +33 -0
- data/rbi/openai/models/responses/response_create_params.rbi +54 -0
- data/rbi/openai/models/responses/tool.rbi +243 -31
- data/rbi/openai/models.rbi +2 -0
- data/rbi/openai/resources/conversations/items.rbi +152 -0
- data/rbi/openai/resources/conversations.rbi +110 -0
- data/rbi/openai/resources/responses/input_items.rbi +0 -3
- data/rbi/openai/resources/responses.rbi +26 -0
- data/sig/openai/client.rbs +2 -0
- data/sig/openai/internal/conversation_cursor_page.rbs +15 -0
- data/sig/openai/models/conversations/computer_screenshot_content.rbs +28 -0
- data/sig/openai/models/conversations/container_file_citation_body.rbs +47 -0
- data/sig/openai/models/conversations/conversation.rbs +37 -0
- data/sig/openai/models/conversations/conversation_create_params.rbs +33 -0
- data/sig/openai/models/conversations/conversation_delete_params.rbs +17 -0
- data/sig/openai/models/conversations/conversation_deleted.rbs +28 -0
- data/sig/openai/models/conversations/conversation_deleted_resource.rbs +28 -0
- data/sig/openai/models/conversations/conversation_item.rbs +403 -0
- data/sig/openai/models/conversations/conversation_item_list.rbs +44 -0
- data/sig/openai/models/conversations/conversation_retrieve_params.rbs +17 -0
- data/sig/openai/models/conversations/conversation_update_params.rbs +26 -0
- data/sig/openai/models/conversations/file_citation_body.rbs +37 -0
- data/sig/openai/models/conversations/input_file_content.rbs +41 -0
- data/sig/openai/models/conversations/input_image_content.rbs +49 -0
- data/sig/openai/models/conversations/input_text_content.rbs +17 -0
- data/sig/openai/models/conversations/item_create_params.rbs +37 -0
- data/sig/openai/models/conversations/item_delete_params.rbs +25 -0
- data/sig/openai/models/conversations/item_list_params.rbs +66 -0
- data/sig/openai/models/conversations/item_retrieve_params.rbs +37 -0
- data/sig/openai/models/conversations/lob_prob.rbs +37 -0
- data/sig/openai/models/conversations/message.rbs +95 -0
- data/sig/openai/models/conversations/output_text_content.rbs +52 -0
- data/sig/openai/models/conversations/refusal_content.rbs +17 -0
- data/sig/openai/models/conversations/summary_text_content.rbs +17 -0
- data/sig/openai/models/conversations/text_content.rbs +17 -0
- data/sig/openai/models/conversations/top_log_prob.rbs +28 -0
- data/sig/openai/models/conversations/url_citation_body.rbs +42 -0
- data/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +22 -22
- data/sig/openai/models/responses/input_item_list_params.rbs +0 -7
- data/sig/openai/models/responses/response.rbs +15 -0
- data/sig/openai/models/responses/response_conversation_param.rbs +15 -0
- data/sig/openai/models/responses/response_create_params.rbs +14 -0
- data/sig/openai/models/responses/tool.rbs +83 -18
- data/sig/openai/models.rbs +2 -0
- data/sig/openai/resources/conversations/items.rbs +38 -0
- data/sig/openai/resources/conversations.rbs +31 -0
- data/sig/openai/resources/responses/input_items.rbs +0 -1
- data/sig/openai/resources/responses.rbs +2 -0
- metadata +95 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ac4ed9093113e0eba0188cabba2c4ba6bc349ea0a3d03ada3ed2c091634eeed9
|
4
|
+
data.tar.gz: e20b7e18c4ddcfdefcb9dffc209a975d73e85f1203d0feac5f9cf120efa7dc09
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a4caaa4c1f5a0ce634d6161f42867592da4df7a1d011ac79133304ba3f18f6fae564744f89062bf062c4167c0e22bedadf9b9f05f9416d9921311f0019f2cf9e
|
7
|
+
data.tar.gz: 3ab46d651e729aa986a3befb934300fcd05badc4a44cc0a61bcd08041d9d1e166f2820d9854f133fce94b77ff8d7158cf88982571ccaaa683adcc8f9a541b1b6
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,25 @@
|
|
1
1
|
# Changelog
|
2
2
|
|
3
|
+
## 0.19.0 (2025-08-21)
|
4
|
+
|
5
|
+
Full Changelog: [v0.18.1...v0.19.0](https://github.com/openai/openai-ruby/compare/v0.18.1...v0.19.0)
|
6
|
+
|
7
|
+
### Features
|
8
|
+
|
9
|
+
* **api:** Add connectors support for MCP tool ([469dbe2](https://github.com/openai/openai-ruby/commit/469dbe2f5fab91bac9f4a656250567c9f6bc9867))
|
10
|
+
* **api:** adding support for /v1/conversations to the API ([54d4fe7](https://github.com/openai/openai-ruby/commit/54d4fe72f8157c44d3bca692e232be2e7ef7bbeb))
|
11
|
+
|
12
|
+
|
13
|
+
### Bug Fixes
|
14
|
+
|
15
|
+
* bump sorbet version and fix new type errors from the breaking change ([147f0a4](https://github.com/openai/openai-ruby/commit/147f0a48e2c10ede5d8a30c58ae8f5601d3c4a26))
|
16
|
+
* do note check stainless api key during release creation ([#813](https://github.com/openai/openai-ruby/issues/813)) ([afab147](https://github.com/openai/openai-ruby/commit/afab1477b36c90edd5a163f42d8b7f8f82001622))
|
17
|
+
|
18
|
+
|
19
|
+
### Chores
|
20
|
+
|
21
|
+
* **internal/ci:** setup breaking change detection ([f6a214c](https://github.com/openai/openai-ruby/commit/f6a214cd9373afdde57bee358b4e008f256b2a1e))
|
22
|
+
|
3
23
|
## 0.18.1 (2025-08-19)
|
4
24
|
|
5
25
|
Full Changelog: [v0.18.0...v0.18.1](https://github.com/openai/openai-ruby/compare/v0.18.0...v0.18.1)
|
data/README.md
CHANGED
data/lib/openai/client.rb
CHANGED
@@ -75,6 +75,9 @@ module OpenAI
|
|
75
75
|
# @return [OpenAI::Resources::Responses]
|
76
76
|
attr_reader :responses
|
77
77
|
|
78
|
+
# @return [OpenAI::Resources::Conversations]
|
79
|
+
attr_reader :conversations
|
80
|
+
|
78
81
|
# @return [OpenAI::Resources::Evals]
|
79
82
|
attr_reader :evals
|
80
83
|
|
@@ -160,6 +163,7 @@ module OpenAI
|
|
160
163
|
@batches = OpenAI::Resources::Batches.new(client: self)
|
161
164
|
@uploads = OpenAI::Resources::Uploads.new(client: self)
|
162
165
|
@responses = OpenAI::Resources::Responses.new(client: self)
|
166
|
+
@conversations = OpenAI::Resources::Conversations.new(client: self)
|
163
167
|
@evals = OpenAI::Resources::Evals.new(client: self)
|
164
168
|
@containers = OpenAI::Resources::Containers.new(client: self)
|
165
169
|
end
|
@@ -8,6 +8,7 @@ module OpenAI
|
|
8
8
|
# Ruby does not have a "boolean" Class, this is something for models to refer to.
|
9
9
|
class Boolean < OpenAI::Internal::Type::Boolean
|
10
10
|
extend OpenAI::Helpers::StructuredOutput::JsonSchemaConverter
|
11
|
+
|
11
12
|
# rubocop:disable Lint/UnusedMethodArgument
|
12
13
|
|
13
14
|
# @api private
|
@@ -0,0 +1,92 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Internal
|
5
|
+
# @generic Elem
|
6
|
+
#
|
7
|
+
# @example
|
8
|
+
# if conversation_cursor_page.has_next?
|
9
|
+
# conversation_cursor_page = conversation_cursor_page.next_page
|
10
|
+
# end
|
11
|
+
#
|
12
|
+
# @example
|
13
|
+
# conversation_cursor_page.auto_paging_each do |item|
|
14
|
+
# puts(item)
|
15
|
+
# end
|
16
|
+
class ConversationCursorPage
|
17
|
+
include OpenAI::Internal::Type::BasePage
|
18
|
+
|
19
|
+
# @return [Array<generic<Elem>>, nil]
|
20
|
+
attr_accessor :data
|
21
|
+
|
22
|
+
# @return [Boolean]
|
23
|
+
attr_accessor :has_more
|
24
|
+
|
25
|
+
# @return [String]
|
26
|
+
attr_accessor :last_id
|
27
|
+
|
28
|
+
# @return [Boolean]
|
29
|
+
def next_page?
|
30
|
+
has_more
|
31
|
+
end
|
32
|
+
|
33
|
+
# @raise [OpenAI::HTTP::Error]
|
34
|
+
# @return [self]
|
35
|
+
def next_page
|
36
|
+
unless next_page?
|
37
|
+
message = "No more pages available. Please check #next_page? before calling ##{__method__}"
|
38
|
+
raise RuntimeError.new(message)
|
39
|
+
end
|
40
|
+
|
41
|
+
req = OpenAI::Internal::Util.deep_merge(@req, {query: {after: last_id}})
|
42
|
+
@client.request(req)
|
43
|
+
end
|
44
|
+
|
45
|
+
# @param blk [Proc]
|
46
|
+
#
|
47
|
+
# @yieldparam [generic<Elem>]
|
48
|
+
def auto_paging_each(&blk)
|
49
|
+
unless block_given?
|
50
|
+
raise ArgumentError.new("A block must be given to ##{__method__}")
|
51
|
+
end
|
52
|
+
|
53
|
+
page = self
|
54
|
+
loop do
|
55
|
+
page.data&.each(&blk)
|
56
|
+
|
57
|
+
break unless page.next_page?
|
58
|
+
page = page.next_page
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
# @api private
|
63
|
+
#
|
64
|
+
# @param client [OpenAI::Internal::Transport::BaseClient]
|
65
|
+
# @param req [Hash{Symbol=>Object}]
|
66
|
+
# @param headers [Hash{String=>String}, Net::HTTPHeader]
|
67
|
+
# @param page_data [Hash{Symbol=>Object}]
|
68
|
+
def initialize(client:, req:, headers:, page_data:)
|
69
|
+
super
|
70
|
+
|
71
|
+
case page_data
|
72
|
+
in {data: Array => data}
|
73
|
+
@data = data.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) }
|
74
|
+
else
|
75
|
+
end
|
76
|
+
@has_more = page_data[:has_more]
|
77
|
+
@last_id = page_data[:last_id]
|
78
|
+
end
|
79
|
+
|
80
|
+
# @api private
|
81
|
+
#
|
82
|
+
# @return [String]
|
83
|
+
def inspect
|
84
|
+
# rubocop:disable Layout/LineLength
|
85
|
+
model = OpenAI::Internal::Type::Converter.inspect(@model, depth: 1)
|
86
|
+
|
87
|
+
"#<#{self.class}[#{model}]:0x#{object_id.to_s(16)} has_more=#{has_more.inspect} last_id=#{last_id.inspect}>"
|
88
|
+
# rubocop:enable Layout/LineLength
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
@@ -539,10 +539,7 @@ module OpenAI
|
|
539
539
|
)
|
540
540
|
),
|
541
541
|
page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::Internal::Type::BaseModel]]),
|
542
|
-
stream: T.nilable(
|
543
|
-
T::Class[OpenAI::Internal::Type::BaseStream[T.anything,
|
544
|
-
OpenAI::Internal::Type::BaseModel]]
|
545
|
-
),
|
542
|
+
stream: T.nilable(T::Class[OpenAI::Internal::Type::BaseStream[T.anything, OpenAI::Internal::Type::BaseModel]]),
|
546
543
|
model: T.nilable(OpenAI::Internal::Type::Converter::Input),
|
547
544
|
options: T.nilable(OpenAI::RequestOptions::OrHash)
|
548
545
|
}
|
@@ -193,15 +193,7 @@ module OpenAI
|
|
193
193
|
end
|
194
194
|
|
195
195
|
define_sorbet_constant!(:Request) do
|
196
|
-
T.type_alias
|
197
|
-
{
|
198
|
-
method: Symbol,
|
199
|
-
url: URI::Generic,
|
200
|
-
headers: T::Hash[String, String],
|
201
|
-
body: T.anything,
|
202
|
-
deadline: Float
|
203
|
-
}
|
204
|
-
end
|
196
|
+
T.type_alias { {method: Symbol, url: URI::Generic, headers: T::Hash[String, String], body: T.anything, deadline: Float} }
|
205
197
|
end
|
206
198
|
end
|
207
199
|
end
|
data/lib/openai/internal/util.rb
CHANGED
@@ -244,7 +244,7 @@ module OpenAI
|
|
244
244
|
#
|
245
245
|
# @return [String]
|
246
246
|
def uri_origin(uri)
|
247
|
-
"#{uri.scheme}://#{uri.host}#{uri.port
|
247
|
+
"#{uri.scheme}://#{uri.host}#{":#{uri.port}" unless uri.port == uri.default_port}"
|
248
248
|
end
|
249
249
|
|
250
250
|
# @api private
|
@@ -107,10 +107,7 @@ module OpenAI
|
|
107
107
|
# Details about the input tokens billed for this request.
|
108
108
|
#
|
109
109
|
# @return [OpenAI::Models::Audio::Transcription::Usage::Tokens::InputTokenDetails, nil]
|
110
|
-
optional :input_token_details,
|
111
|
-
-> {
|
112
|
-
OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails
|
113
|
-
}
|
110
|
+
optional :input_token_details, -> { OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails }
|
114
111
|
|
115
112
|
# @!method initialize(input_tokens:, output_tokens:, total_tokens:, input_token_details: nil, type: :tokens)
|
116
113
|
# Usage statistics for models billed by token usage.
|
@@ -90,9 +90,7 @@ module OpenAI
|
|
90
90
|
#
|
91
91
|
# @return [Array<Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity>, nil]
|
92
92
|
optional :timestamp_granularities,
|
93
|
-
-> {
|
94
|
-
OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity]
|
95
|
-
}
|
93
|
+
-> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity] }
|
96
94
|
|
97
95
|
# @!method initialize(file:, model:, chunking_strategy: nil, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {})
|
98
96
|
# Some parameter documentations has been truncated, see
|
@@ -150,10 +148,7 @@ module OpenAI
|
|
150
148
|
# Must be set to `server_vad` to enable manual chunking using server side VAD.
|
151
149
|
#
|
152
150
|
# @return [Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type]
|
153
|
-
required :type,
|
154
|
-
enum: -> {
|
155
|
-
OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type
|
156
|
-
}
|
151
|
+
required :type, enum: -> { OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type }
|
157
152
|
|
158
153
|
# @!attribute prefix_padding_ms
|
159
154
|
# Amount of audio to include before the VAD detected speech (in milliseconds).
|
@@ -109,10 +109,7 @@ module OpenAI
|
|
109
109
|
# Details about the input tokens billed for this request.
|
110
110
|
#
|
111
111
|
# @return [OpenAI::Models::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails, nil]
|
112
|
-
optional :input_token_details,
|
113
|
-
-> {
|
114
|
-
OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails
|
115
|
-
}
|
112
|
+
optional :input_token_details, -> { OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails }
|
116
113
|
|
117
114
|
# @!method initialize(input_tokens:, output_tokens:, total_tokens:, input_token_details: nil, type: :tokens)
|
118
115
|
# Usage statistics for models billed by token usage.
|
@@ -167,10 +167,7 @@ module OpenAI
|
|
167
167
|
# @!attribute code_interpreter
|
168
168
|
#
|
169
169
|
# @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil]
|
170
|
-
optional :code_interpreter,
|
171
|
-
-> {
|
172
|
-
OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter
|
173
|
-
}
|
170
|
+
optional :code_interpreter, -> { OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter }
|
174
171
|
|
175
172
|
# @!attribute file_search
|
176
173
|
#
|
@@ -223,9 +220,7 @@ module OpenAI
|
|
223
220
|
#
|
224
221
|
# @return [Array<OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore>, nil]
|
225
222
|
optional :vector_stores,
|
226
|
-
-> {
|
227
|
-
OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]
|
228
|
-
}
|
223
|
+
-> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] }
|
229
224
|
|
230
225
|
# @!method initialize(vector_store_ids: nil, vector_stores: nil)
|
231
226
|
# Some parameter documentations has been truncated, see
|
@@ -243,9 +238,7 @@ module OpenAI
|
|
243
238
|
#
|
244
239
|
# @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil]
|
245
240
|
optional :chunking_strategy,
|
246
|
-
union: -> {
|
247
|
-
OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy
|
248
|
-
}
|
241
|
+
union: -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy }
|
249
242
|
|
250
243
|
# @!attribute file_ids
|
251
244
|
# A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to
|
@@ -288,14 +281,10 @@ module OpenAI
|
|
288
281
|
|
289
282
|
# The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`.
|
290
283
|
variant :auto,
|
291
|
-
-> {
|
292
|
-
OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto
|
293
|
-
}
|
284
|
+
-> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto }
|
294
285
|
|
295
286
|
variant :static,
|
296
|
-
-> {
|
297
|
-
OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static
|
298
|
-
}
|
287
|
+
-> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static }
|
299
288
|
|
300
289
|
class Auto < OpenAI::Internal::Type::BaseModel
|
301
290
|
# @!attribute type
|
@@ -316,9 +305,7 @@ module OpenAI
|
|
316
305
|
#
|
317
306
|
# @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static]
|
318
307
|
required :static,
|
319
|
-
-> {
|
320
|
-
OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static
|
321
|
-
}
|
308
|
+
-> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static }
|
322
309
|
|
323
310
|
# @!attribute type
|
324
311
|
# Always `static`.
|
@@ -41,10 +41,7 @@ module OpenAI
|
|
41
41
|
variant :"thread.run.in_progress", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress }
|
42
42
|
|
43
43
|
# Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `requires_action` status.
|
44
|
-
variant :"thread.run.requires_action",
|
45
|
-
-> {
|
46
|
-
OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction
|
47
|
-
}
|
44
|
+
variant :"thread.run.requires_action", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction }
|
48
45
|
|
49
46
|
# Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) is completed.
|
50
47
|
variant :"thread.run.completed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted }
|
@@ -68,28 +65,19 @@ module OpenAI
|
|
68
65
|
variant :"thread.run.step.created", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated }
|
69
66
|
|
70
67
|
# Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) moves to an `in_progress` state.
|
71
|
-
variant :"thread.run.step.in_progress",
|
72
|
-
-> {
|
73
|
-
OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress
|
74
|
-
}
|
68
|
+
variant :"thread.run.step.in_progress", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress }
|
75
69
|
|
76
70
|
# Occurs when parts of a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) are being streamed.
|
77
71
|
variant :"thread.run.step.delta", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta }
|
78
72
|
|
79
73
|
# Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is completed.
|
80
|
-
variant :"thread.run.step.completed",
|
81
|
-
-> {
|
82
|
-
OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted
|
83
|
-
}
|
74
|
+
variant :"thread.run.step.completed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted }
|
84
75
|
|
85
76
|
# Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) fails.
|
86
77
|
variant :"thread.run.step.failed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed }
|
87
78
|
|
88
79
|
# Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is cancelled.
|
89
|
-
variant :"thread.run.step.cancelled",
|
90
|
-
-> {
|
91
|
-
OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled
|
92
|
-
}
|
80
|
+
variant :"thread.run.step.cancelled", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled }
|
93
81
|
|
94
82
|
# Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) expires.
|
95
83
|
variant :"thread.run.step.expired", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired }
|
@@ -98,10 +86,7 @@ module OpenAI
|
|
98
86
|
variant :"thread.message.created", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated }
|
99
87
|
|
100
88
|
# Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) moves to an `in_progress` state.
|
101
|
-
variant :"thread.message.in_progress",
|
102
|
-
-> {
|
103
|
-
OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress
|
104
|
-
}
|
89
|
+
variant :"thread.message.in_progress", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress }
|
105
90
|
|
106
91
|
# Occurs when parts of a [Message](https://platform.openai.com/docs/api-reference/messages/object) are being streamed.
|
107
92
|
variant :"thread.message.delta", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta }
|
@@ -110,10 +95,7 @@ module OpenAI
|
|
110
95
|
variant :"thread.message.completed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted }
|
111
96
|
|
112
97
|
# Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) ends before it is completed.
|
113
|
-
variant :"thread.message.incomplete",
|
114
|
-
-> {
|
115
|
-
OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete
|
116
|
-
}
|
98
|
+
variant :"thread.message.incomplete", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete }
|
117
99
|
|
118
100
|
# Occurs when an [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. This can happen due to an internal server error or a timeout.
|
119
101
|
variant :error, -> { OpenAI::Beta::AssistantStreamEvent::ErrorEvent }
|
@@ -299,10 +299,7 @@ module OpenAI
|
|
299
299
|
# @!attribute code_interpreter
|
300
300
|
#
|
301
301
|
# @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil]
|
302
|
-
optional :code_interpreter,
|
303
|
-
-> {
|
304
|
-
OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter
|
305
|
-
}
|
302
|
+
optional :code_interpreter, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter }
|
306
303
|
|
307
304
|
# @!attribute file_search
|
308
305
|
#
|
@@ -15,10 +15,7 @@ module OpenAI
|
|
15
15
|
variant :"thread.message.created", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated }
|
16
16
|
|
17
17
|
# Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) moves to an `in_progress` state.
|
18
|
-
variant :"thread.message.in_progress",
|
19
|
-
-> {
|
20
|
-
OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress
|
21
|
-
}
|
18
|
+
variant :"thread.message.in_progress", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress }
|
22
19
|
|
23
20
|
# Occurs when parts of a [Message](https://platform.openai.com/docs/api-reference/messages/object) are being streamed.
|
24
21
|
variant :"thread.message.delta", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta }
|
@@ -15,10 +15,7 @@ module OpenAI
|
|
15
15
|
variant :"thread.run.step.created", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated }
|
16
16
|
|
17
17
|
# Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) moves to an `in_progress` state.
|
18
|
-
variant :"thread.run.step.in_progress",
|
19
|
-
-> {
|
20
|
-
OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress
|
21
|
-
}
|
18
|
+
variant :"thread.run.step.in_progress", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress }
|
22
19
|
|
23
20
|
# Occurs when parts of a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) are being streamed.
|
24
21
|
variant :"thread.run.step.delta", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta }
|
@@ -223,9 +223,7 @@ module OpenAI
|
|
223
223
|
#
|
224
224
|
# @return [Array<OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message>, nil]
|
225
225
|
optional :messages,
|
226
|
-
-> {
|
227
|
-
OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message]
|
228
|
-
}
|
226
|
+
-> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message] }
|
229
227
|
|
230
228
|
# @!attribute metadata
|
231
229
|
# Set of 16 key-value pairs that can be attached to an object. This can be useful
|
@@ -366,9 +364,7 @@ module OpenAI
|
|
366
364
|
#
|
367
365
|
# @return [Array<OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch>, nil]
|
368
366
|
optional :tools,
|
369
|
-
-> {
|
370
|
-
OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool]
|
371
|
-
}
|
367
|
+
-> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] }
|
372
368
|
|
373
369
|
# @!method initialize(file_id: nil, tools: nil)
|
374
370
|
# @param file_id [String] The ID of the file to attach to the message.
|
@@ -383,9 +379,7 @@ module OpenAI
|
|
383
379
|
variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool }
|
384
380
|
|
385
381
|
variant :file_search,
|
386
|
-
-> {
|
387
|
-
OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch
|
388
|
-
}
|
382
|
+
-> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch }
|
389
383
|
|
390
384
|
class FileSearch < OpenAI::Internal::Type::BaseModel
|
391
385
|
# @!attribute type
|
@@ -415,10 +409,7 @@ module OpenAI
|
|
415
409
|
# @!attribute file_search
|
416
410
|
#
|
417
411
|
# @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil]
|
418
|
-
optional :file_search,
|
419
|
-
-> {
|
420
|
-
OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch
|
421
|
-
}
|
412
|
+
optional :file_search, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch }
|
422
413
|
|
423
414
|
# @!method initialize(code_interpreter: nil, file_search: nil)
|
424
415
|
# A set of resources that are made available to the assistant's tools in this
|
@@ -466,9 +457,7 @@ module OpenAI
|
|
466
457
|
#
|
467
458
|
# @return [Array<OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore>, nil]
|
468
459
|
optional :vector_stores,
|
469
|
-
-> {
|
470
|
-
OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore]
|
471
|
-
}
|
460
|
+
-> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] }
|
472
461
|
|
473
462
|
# @!method initialize(vector_store_ids: nil, vector_stores: nil)
|
474
463
|
# Some parameter documentations has been truncated, see
|
@@ -486,9 +475,7 @@ module OpenAI
|
|
486
475
|
#
|
487
476
|
# @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil]
|
488
477
|
optional :chunking_strategy,
|
489
|
-
union: -> {
|
490
|
-
OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy
|
491
|
-
}
|
478
|
+
union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy }
|
492
479
|
|
493
480
|
# @!attribute file_ids
|
494
481
|
# A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to
|
@@ -531,14 +518,10 @@ module OpenAI
|
|
531
518
|
|
532
519
|
# The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`.
|
533
520
|
variant :auto,
|
534
|
-
-> {
|
535
|
-
OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto
|
536
|
-
}
|
521
|
+
-> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto }
|
537
522
|
|
538
523
|
variant :static,
|
539
|
-
-> {
|
540
|
-
OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static
|
541
|
-
}
|
524
|
+
-> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static }
|
542
525
|
|
543
526
|
class Auto < OpenAI::Internal::Type::BaseModel
|
544
527
|
# @!attribute type
|
@@ -559,9 +542,7 @@ module OpenAI
|
|
559
542
|
#
|
560
543
|
# @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static]
|
561
544
|
required :static,
|
562
|
-
-> {
|
563
|
-
OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static
|
564
|
-
}
|
545
|
+
-> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static }
|
565
546
|
|
566
547
|
# @!attribute type
|
567
548
|
# Always `static`.
|
@@ -614,10 +595,7 @@ module OpenAI
|
|
614
595
|
# @!attribute code_interpreter
|
615
596
|
#
|
616
597
|
# @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil]
|
617
|
-
optional :code_interpreter,
|
618
|
-
-> {
|
619
|
-
OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter
|
620
|
-
}
|
598
|
+
optional :code_interpreter, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter }
|
621
599
|
|
622
600
|
# @!attribute file_search
|
623
601
|
#
|
@@ -148,9 +148,7 @@ module OpenAI
|
|
148
148
|
#
|
149
149
|
# @return [Array<OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch>, nil]
|
150
150
|
optional :tools,
|
151
|
-
-> {
|
152
|
-
OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool]
|
153
|
-
}
|
151
|
+
-> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool] }
|
154
152
|
|
155
153
|
# @!method initialize(file_id: nil, tools: nil)
|
156
154
|
# @param file_id [String] The ID of the file to attach to the message.
|
@@ -164,10 +162,7 @@ module OpenAI
|
|
164
162
|
|
165
163
|
variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool }
|
166
164
|
|
167
|
-
variant :file_search,
|
168
|
-
-> {
|
169
|
-
OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch
|
170
|
-
}
|
165
|
+
variant :file_search, -> { OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch }
|
171
166
|
|
172
167
|
class FileSearch < OpenAI::Internal::Type::BaseModel
|
173
168
|
# @!attribute type
|
@@ -243,9 +238,7 @@ module OpenAI
|
|
243
238
|
#
|
244
239
|
# @return [Array<OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore>, nil]
|
245
240
|
optional :vector_stores,
|
246
|
-
-> {
|
247
|
-
OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]
|
248
|
-
}
|
241
|
+
-> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] }
|
249
242
|
|
250
243
|
# @!method initialize(vector_store_ids: nil, vector_stores: nil)
|
251
244
|
# Some parameter documentations has been truncated, see
|
@@ -263,9 +256,7 @@ module OpenAI
|
|
263
256
|
#
|
264
257
|
# @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil]
|
265
258
|
optional :chunking_strategy,
|
266
|
-
union: -> {
|
267
|
-
OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy
|
268
|
-
}
|
259
|
+
union: -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy }
|
269
260
|
|
270
261
|
# @!attribute file_ids
|
271
262
|
# A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to
|
@@ -308,14 +299,10 @@ module OpenAI
|
|
308
299
|
|
309
300
|
# The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`.
|
310
301
|
variant :auto,
|
311
|
-
-> {
|
312
|
-
OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto
|
313
|
-
}
|
302
|
+
-> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto }
|
314
303
|
|
315
304
|
variant :static,
|
316
|
-
-> {
|
317
|
-
OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static
|
318
|
-
}
|
305
|
+
-> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static }
|
319
306
|
|
320
307
|
class Auto < OpenAI::Internal::Type::BaseModel
|
321
308
|
# @!attribute type
|
@@ -336,9 +323,7 @@ module OpenAI
|
|
336
323
|
#
|
337
324
|
# @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static]
|
338
325
|
required :static,
|
339
|
-
-> {
|
340
|
-
OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static
|
341
|
-
}
|
326
|
+
-> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static }
|
342
327
|
|
343
328
|
# @!attribute type
|
344
329
|
# Always `static`.
|
@@ -38,10 +38,7 @@ module OpenAI
|
|
38
38
|
# The content of the message in array of text and/or images.
|
39
39
|
#
|
40
40
|
# @return [Array<OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock>]
|
41
|
-
required :content,
|
42
|
-
-> {
|
43
|
-
OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent]
|
44
|
-
}
|
41
|
+
required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent] }
|
45
42
|
|
46
43
|
# @!attribute created_at
|
47
44
|
# The Unix timestamp (in seconds) for when the message was created.
|
@@ -153,9 +150,7 @@ module OpenAI
|
|
153
150
|
#
|
154
151
|
# @return [Array<OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly>, nil]
|
155
152
|
optional :tools,
|
156
|
-
-> {
|
157
|
-
OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Message::Attachment::Tool]
|
158
|
-
}
|
153
|
+
-> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Message::Attachment::Tool] }
|
159
154
|
|
160
155
|
# @!method initialize(file_id: nil, tools: nil)
|
161
156
|
# @param file_id [String] The ID of the file to attach to the message.
|
@@ -167,9 +162,7 @@ module OpenAI
|
|
167
162
|
|
168
163
|
variant -> { OpenAI::Beta::CodeInterpreterTool }
|
169
164
|
|
170
|
-
variant -> {
|
171
|
-
OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly
|
172
|
-
}
|
165
|
+
variant -> { OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly }
|
173
166
|
|
174
167
|
class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel
|
175
168
|
# @!attribute type
|