openai 0.18.1 → 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +20 -0
- data/README.md +1 -1
- data/lib/openai/client.rb +4 -0
- data/lib/openai/helpers/structured_output/boolean.rb +1 -0
- data/lib/openai/internal/conversation_cursor_page.rb +92 -0
- data/lib/openai/internal/transport/base_client.rb +1 -4
- data/lib/openai/internal/transport/pooled_net_requester.rb +1 -9
- data/lib/openai/internal/util.rb +1 -1
- data/lib/openai/models/audio/transcription.rb +1 -4
- data/lib/openai/models/audio/transcription_create_params.rb +2 -7
- data/lib/openai/models/audio/transcription_text_done_event.rb +1 -4
- data/lib/openai/models/beta/assistant_create_params.rb +6 -19
- data/lib/openai/models/beta/assistant_stream_event.rb +6 -24
- data/lib/openai/models/beta/assistant_update_params.rb +1 -4
- data/lib/openai/models/beta/message_stream_event.rb +1 -4
- data/lib/openai/models/beta/run_step_stream_event.rb +1 -4
- data/lib/openai/models/beta/thread_create_and_run_params.rb +10 -32
- data/lib/openai/models/beta/thread_create_params.rb +7 -22
- data/lib/openai/models/beta/threads/message.rb +3 -10
- data/lib/openai/models/beta/threads/message_create_params.rb +2 -7
- data/lib/openai/models/beta/threads/run.rb +2 -7
- data/lib/openai/models/beta/threads/run_create_params.rb +3 -10
- data/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +1 -3
- data/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +5 -17
- data/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +1 -3
- data/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +4 -12
- data/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +1 -4
- data/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +1 -4
- data/lib/openai/models/beta/threads/text.rb +1 -4
- data/lib/openai/models/chat/chat_completion_chunk.rb +1 -3
- data/lib/openai/models/chat/chat_completion_custom_tool.rb +2 -7
- data/lib/openai/models/conversations/computer_screenshot_content.rb +38 -0
- data/lib/openai/models/conversations/container_file_citation_body.rb +58 -0
- data/lib/openai/models/conversations/conversation.rb +51 -0
- data/lib/openai/models/conversations/conversation_create_params.rb +39 -0
- data/lib/openai/models/conversations/conversation_delete_params.rb +16 -0
- data/lib/openai/models/conversations/conversation_deleted.rb +29 -0
- data/lib/openai/models/conversations/conversation_deleted_resource.rb +30 -0
- data/lib/openai/models/conversations/conversation_item.rb +568 -0
- data/lib/openai/models/conversations/conversation_item_list.rb +55 -0
- data/lib/openai/models/conversations/conversation_retrieve_params.rb +16 -0
- data/lib/openai/models/conversations/conversation_update_params.rb +31 -0
- data/lib/openai/models/conversations/file_citation_body.rb +42 -0
- data/lib/openai/models/conversations/input_file_content.rb +42 -0
- data/lib/openai/models/conversations/input_image_content.rb +62 -0
- data/lib/openai/models/conversations/input_text_content.rb +26 -0
- data/lib/openai/models/conversations/item_create_params.rb +37 -0
- data/lib/openai/models/conversations/item_delete_params.rb +22 -0
- data/lib/openai/models/conversations/item_list_params.rb +84 -0
- data/lib/openai/models/conversations/item_retrieve_params.rb +36 -0
- data/lib/openai/models/conversations/lob_prob.rb +35 -0
- data/lib/openai/models/conversations/message.rb +115 -0
- data/lib/openai/models/conversations/output_text_content.rb +57 -0
- data/lib/openai/models/conversations/refusal_content.rb +26 -0
- data/lib/openai/models/conversations/summary_text_content.rb +23 -0
- data/lib/openai/models/conversations/text_content.rb +23 -0
- data/lib/openai/models/conversations/top_log_prob.rb +29 -0
- data/lib/openai/models/conversations/url_citation_body.rb +50 -0
- data/lib/openai/models/eval_create_params.rb +6 -20
- data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +29 -53
- data/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +1 -3
- data/lib/openai/models/evals/run_create_params.rb +18 -54
- data/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb +1 -4
- data/lib/openai/models/moderation.rb +5 -15
- data/lib/openai/models/responses/input_item_list_params.rb +1 -9
- data/lib/openai/models/responses/response.rb +26 -1
- data/lib/openai/models/responses/response_computer_tool_call.rb +2 -6
- data/lib/openai/models/responses/response_computer_tool_call_output_item.rb +1 -3
- data/lib/openai/models/responses/response_conversation_param.rb +20 -0
- data/lib/openai/models/responses/response_create_params.rb +34 -1
- data/lib/openai/models/responses/response_input_item.rb +2 -7
- data/lib/openai/models/responses/response_input_message_item.rb +1 -4
- data/lib/openai/models/responses/response_output_item.rb +1 -3
- data/lib/openai/models/responses/response_output_message.rb +1 -3
- data/lib/openai/models/responses/response_output_text.rb +3 -10
- data/lib/openai/models/responses/response_stream_event.rb +4 -16
- data/lib/openai/models/responses/response_text_delta_event.rb +1 -3
- data/lib/openai/models/responses/response_text_done_event.rb +1 -3
- data/lib/openai/models/responses/tool.rb +145 -34
- data/lib/openai/models.rb +2 -0
- data/lib/openai/resources/conversations/items.rb +141 -0
- data/lib/openai/resources/conversations.rb +112 -0
- data/lib/openai/resources/responses/input_items.rb +1 -3
- data/lib/openai/resources/responses.rb +6 -2
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +31 -0
- data/rbi/openai/client.rbi +3 -0
- data/rbi/openai/errors.rbi +5 -5
- data/rbi/openai/internal/conversation_cursor_page.rbi +25 -0
- data/rbi/openai/models/conversations/computer_screenshot_content.rbi +60 -0
- data/rbi/openai/models/conversations/container_file_citation_body.rbi +82 -0
- data/rbi/openai/models/conversations/conversation.rbi +76 -0
- data/rbi/openai/models/conversations/conversation_create_params.rbi +144 -0
- data/rbi/openai/models/conversations/conversation_delete_params.rbi +32 -0
- data/rbi/openai/models/conversations/conversation_deleted.rbi +40 -0
- data/rbi/openai/models/conversations/conversation_deleted_resource.rbi +40 -0
- data/rbi/openai/models/conversations/conversation_item.rbi +835 -0
- data/rbi/openai/models/conversations/conversation_item_list.rbi +101 -0
- data/rbi/openai/models/conversations/conversation_retrieve_params.rbi +32 -0
- data/rbi/openai/models/conversations/conversation_update_params.rbi +56 -0
- data/rbi/openai/models/conversations/file_citation_body.rbi +61 -0
- data/rbi/openai/models/conversations/input_file_content.rbi +72 -0
- data/rbi/openai/models/conversations/input_image_content.rbi +113 -0
- data/rbi/openai/models/conversations/input_text_content.rbi +38 -0
- data/rbi/openai/models/conversations/item_create_params.rbi +150 -0
- data/rbi/openai/models/conversations/item_delete_params.rbi +40 -0
- data/rbi/openai/models/conversations/item_list_params.rbi +174 -0
- data/rbi/openai/models/conversations/item_retrieve_params.rbi +70 -0
- data/rbi/openai/models/conversations/lob_prob.rbi +50 -0
- data/rbi/openai/models/conversations/message.rbi +196 -0
- data/rbi/openai/models/conversations/output_text_content.rbi +110 -0
- data/rbi/openai/models/conversations/refusal_content.rbi +38 -0
- data/rbi/openai/models/conversations/summary_text_content.rbi +31 -0
- data/rbi/openai/models/conversations/text_content.rbi +28 -0
- data/rbi/openai/models/conversations/top_log_prob.rbi +41 -0
- data/rbi/openai/models/conversations/url_citation_body.rbi +74 -0
- data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +33 -33
- data/rbi/openai/models/responses/input_item_list_params.rbi +0 -11
- data/rbi/openai/models/responses/response.rbi +49 -0
- data/rbi/openai/models/responses/response_conversation_param.rbi +33 -0
- data/rbi/openai/models/responses/response_create_params.rbi +54 -0
- data/rbi/openai/models/responses/tool.rbi +243 -31
- data/rbi/openai/models.rbi +2 -0
- data/rbi/openai/resources/conversations/items.rbi +152 -0
- data/rbi/openai/resources/conversations.rbi +110 -0
- data/rbi/openai/resources/responses/input_items.rbi +0 -3
- data/rbi/openai/resources/responses.rbi +26 -0
- data/sig/openai/client.rbs +2 -0
- data/sig/openai/internal/conversation_cursor_page.rbs +15 -0
- data/sig/openai/models/conversations/computer_screenshot_content.rbs +28 -0
- data/sig/openai/models/conversations/container_file_citation_body.rbs +47 -0
- data/sig/openai/models/conversations/conversation.rbs +37 -0
- data/sig/openai/models/conversations/conversation_create_params.rbs +33 -0
- data/sig/openai/models/conversations/conversation_delete_params.rbs +17 -0
- data/sig/openai/models/conversations/conversation_deleted.rbs +28 -0
- data/sig/openai/models/conversations/conversation_deleted_resource.rbs +28 -0
- data/sig/openai/models/conversations/conversation_item.rbs +403 -0
- data/sig/openai/models/conversations/conversation_item_list.rbs +44 -0
- data/sig/openai/models/conversations/conversation_retrieve_params.rbs +17 -0
- data/sig/openai/models/conversations/conversation_update_params.rbs +26 -0
- data/sig/openai/models/conversations/file_citation_body.rbs +37 -0
- data/sig/openai/models/conversations/input_file_content.rbs +41 -0
- data/sig/openai/models/conversations/input_image_content.rbs +49 -0
- data/sig/openai/models/conversations/input_text_content.rbs +17 -0
- data/sig/openai/models/conversations/item_create_params.rbs +37 -0
- data/sig/openai/models/conversations/item_delete_params.rbs +25 -0
- data/sig/openai/models/conversations/item_list_params.rbs +66 -0
- data/sig/openai/models/conversations/item_retrieve_params.rbs +37 -0
- data/sig/openai/models/conversations/lob_prob.rbs +37 -0
- data/sig/openai/models/conversations/message.rbs +95 -0
- data/sig/openai/models/conversations/output_text_content.rbs +52 -0
- data/sig/openai/models/conversations/refusal_content.rbs +17 -0
- data/sig/openai/models/conversations/summary_text_content.rbs +17 -0
- data/sig/openai/models/conversations/text_content.rbs +17 -0
- data/sig/openai/models/conversations/top_log_prob.rbs +28 -0
- data/sig/openai/models/conversations/url_citation_body.rbs +42 -0
- data/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +22 -22
- data/sig/openai/models/responses/input_item_list_params.rbs +0 -7
- data/sig/openai/models/responses/response.rbs +15 -0
- data/sig/openai/models/responses/response_conversation_param.rbs +15 -0
- data/sig/openai/models/responses/response_create_params.rbs +14 -0
- data/sig/openai/models/responses/tool.rbs +83 -18
- data/sig/openai/models.rbs +2 -0
- data/sig/openai/resources/conversations/items.rbs +38 -0
- data/sig/openai/resources/conversations.rbs +31 -0
- data/sig/openai/resources/responses/input_items.rbs +0 -1
- data/sig/openai/resources/responses.rbs +2 -0
- metadata +95 -2
@@ -0,0 +1,42 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class FileCitationBody < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute file_id
|
8
|
+
# The ID of the file.
|
9
|
+
#
|
10
|
+
# @return [String]
|
11
|
+
required :file_id, String
|
12
|
+
|
13
|
+
# @!attribute filename
|
14
|
+
# The filename of the file cited.
|
15
|
+
#
|
16
|
+
# @return [String]
|
17
|
+
required :filename, String
|
18
|
+
|
19
|
+
# @!attribute index
|
20
|
+
# The index of the file in the list of files.
|
21
|
+
#
|
22
|
+
# @return [Integer]
|
23
|
+
required :index, Integer
|
24
|
+
|
25
|
+
# @!attribute type
|
26
|
+
# The type of the file citation. Always `file_citation`.
|
27
|
+
#
|
28
|
+
# @return [Symbol, :file_citation]
|
29
|
+
required :type, const: :file_citation
|
30
|
+
|
31
|
+
# @!method initialize(file_id:, filename:, index:, type: :file_citation)
|
32
|
+
# @param file_id [String] The ID of the file.
|
33
|
+
#
|
34
|
+
# @param filename [String] The filename of the file cited.
|
35
|
+
#
|
36
|
+
# @param index [Integer] The index of the file in the list of files.
|
37
|
+
#
|
38
|
+
# @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`.
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class InputFileContent < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute file_id
|
8
|
+
# The ID of the file to be sent to the model.
|
9
|
+
#
|
10
|
+
# @return [String, nil]
|
11
|
+
required :file_id, String, nil?: true
|
12
|
+
|
13
|
+
# @!attribute type
|
14
|
+
# The type of the input item. Always `input_file`.
|
15
|
+
#
|
16
|
+
# @return [Symbol, :input_file]
|
17
|
+
required :type, const: :input_file
|
18
|
+
|
19
|
+
# @!attribute file_url
|
20
|
+
# The URL of the file to be sent to the model.
|
21
|
+
#
|
22
|
+
# @return [String, nil]
|
23
|
+
optional :file_url, String
|
24
|
+
|
25
|
+
# @!attribute filename
|
26
|
+
# The name of the file to be sent to the model.
|
27
|
+
#
|
28
|
+
# @return [String, nil]
|
29
|
+
optional :filename, String
|
30
|
+
|
31
|
+
# @!method initialize(file_id:, file_url: nil, filename: nil, type: :input_file)
|
32
|
+
# @param file_id [String, nil] The ID of the file to be sent to the model.
|
33
|
+
#
|
34
|
+
# @param file_url [String] The URL of the file to be sent to the model.
|
35
|
+
#
|
36
|
+
# @param filename [String] The name of the file to be sent to the model.
|
37
|
+
#
|
38
|
+
# @param type [Symbol, :input_file] The type of the input item. Always `input_file`.
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class InputImageContent < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute detail
|
8
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
9
|
+
# `auto`. Defaults to `auto`.
|
10
|
+
#
|
11
|
+
# @return [Symbol, OpenAI::Models::Conversations::InputImageContent::Detail]
|
12
|
+
required :detail, enum: -> { OpenAI::Conversations::InputImageContent::Detail }
|
13
|
+
|
14
|
+
# @!attribute file_id
|
15
|
+
# The ID of the file to be sent to the model.
|
16
|
+
#
|
17
|
+
# @return [String, nil]
|
18
|
+
required :file_id, String, nil?: true
|
19
|
+
|
20
|
+
# @!attribute image_url
|
21
|
+
# The URL of the image to be sent to the model. A fully qualified URL or base64
|
22
|
+
# encoded image in a data URL.
|
23
|
+
#
|
24
|
+
# @return [String, nil]
|
25
|
+
required :image_url, String, nil?: true
|
26
|
+
|
27
|
+
# @!attribute type
|
28
|
+
# The type of the input item. Always `input_image`.
|
29
|
+
#
|
30
|
+
# @return [Symbol, :input_image]
|
31
|
+
required :type, const: :input_image
|
32
|
+
|
33
|
+
# @!method initialize(detail:, file_id:, image_url:, type: :input_image)
|
34
|
+
# Some parameter documentations has been truncated, see
|
35
|
+
# {OpenAI::Models::Conversations::InputImageContent} for more details.
|
36
|
+
#
|
37
|
+
# @param detail [Symbol, OpenAI::Models::Conversations::InputImageContent::Detail] The detail level of the image to be sent to the model. One of `high`, `low`, or
|
38
|
+
#
|
39
|
+
# @param file_id [String, nil] The ID of the file to be sent to the model.
|
40
|
+
#
|
41
|
+
# @param image_url [String, nil] The URL of the image to be sent to the model. A fully qualified URL or base64 en
|
42
|
+
#
|
43
|
+
# @param type [Symbol, :input_image] The type of the input item. Always `input_image`.
|
44
|
+
|
45
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
46
|
+
# `auto`. Defaults to `auto`.
|
47
|
+
#
|
48
|
+
# @see OpenAI::Models::Conversations::InputImageContent#detail
|
49
|
+
module Detail
|
50
|
+
extend OpenAI::Internal::Type::Enum
|
51
|
+
|
52
|
+
LOW = :low
|
53
|
+
HIGH = :high
|
54
|
+
AUTO = :auto
|
55
|
+
|
56
|
+
# @!method self.values
|
57
|
+
# @return [Array<Symbol>]
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class InputTextContent < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute text
|
8
|
+
# The text input to the model.
|
9
|
+
#
|
10
|
+
# @return [String]
|
11
|
+
required :text, String
|
12
|
+
|
13
|
+
# @!attribute type
|
14
|
+
# The type of the input item. Always `input_text`.
|
15
|
+
#
|
16
|
+
# @return [Symbol, :input_text]
|
17
|
+
required :type, const: :input_text
|
18
|
+
|
19
|
+
# @!method initialize(text:, type: :input_text)
|
20
|
+
# @param text [String] The text input to the model.
|
21
|
+
#
|
22
|
+
# @param type [Symbol, :input_text] The type of the input item. Always `input_text`.
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
# @see OpenAI::Resources::Conversations::Items#create
|
7
|
+
class ItemCreateParams < OpenAI::Internal::Type::BaseModel
|
8
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
9
|
+
include OpenAI::Internal::Type::RequestParameters
|
10
|
+
|
11
|
+
# @!attribute items
|
12
|
+
# The items to add to the conversation. You may add up to 20 items at a time.
|
13
|
+
#
|
14
|
+
# @return [Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>]
|
15
|
+
required :items, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputItem] }
|
16
|
+
|
17
|
+
# @!attribute include
|
18
|
+
# Additional fields to include in the response. See the `include` parameter for
|
19
|
+
# [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include)
|
20
|
+
# for more information.
|
21
|
+
#
|
22
|
+
# @return [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil]
|
23
|
+
optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] }
|
24
|
+
|
25
|
+
# @!method initialize(items:, include: nil, request_options: {})
|
26
|
+
# Some parameter documentations has been truncated, see
|
27
|
+
# {OpenAI::Models::Conversations::ItemCreateParams} for more details.
|
28
|
+
#
|
29
|
+
# @param items [Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] The items to add to the conversation. You may add up to 20 items at a time.
|
30
|
+
#
|
31
|
+
# @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Additional fields to include in the response. See the `include`
|
32
|
+
#
|
33
|
+
# @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}]
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
# @see OpenAI::Resources::Conversations::Items#delete
|
7
|
+
class ItemDeleteParams < OpenAI::Internal::Type::BaseModel
|
8
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
9
|
+
include OpenAI::Internal::Type::RequestParameters
|
10
|
+
|
11
|
+
# @!attribute conversation_id
|
12
|
+
#
|
13
|
+
# @return [String]
|
14
|
+
required :conversation_id, String
|
15
|
+
|
16
|
+
# @!method initialize(conversation_id:, request_options: {})
|
17
|
+
# @param conversation_id [String]
|
18
|
+
# @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}]
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,84 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
# @see OpenAI::Resources::Conversations::Items#list
|
7
|
+
class ItemListParams < OpenAI::Internal::Type::BaseModel
|
8
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
9
|
+
include OpenAI::Internal::Type::RequestParameters
|
10
|
+
|
11
|
+
# @!attribute after
|
12
|
+
# An item ID to list items after, used in pagination.
|
13
|
+
#
|
14
|
+
# @return [String, nil]
|
15
|
+
optional :after, String
|
16
|
+
|
17
|
+
# @!attribute include
|
18
|
+
# Specify additional output data to include in the model response. Currently
|
19
|
+
# supported values are:
|
20
|
+
#
|
21
|
+
# - `code_interpreter_call.outputs`: Includes the outputs of python code execution
|
22
|
+
# in code interpreter tool call items.
|
23
|
+
# - `computer_call_output.output.image_url`: Include image urls from the computer
|
24
|
+
# call output.
|
25
|
+
# - `file_search_call.results`: Include the search results of the file search tool
|
26
|
+
# call.
|
27
|
+
# - `message.input_image.image_url`: Include image urls from the input message.
|
28
|
+
# - `message.output_text.logprobs`: Include logprobs with assistant messages.
|
29
|
+
# - `reasoning.encrypted_content`: Includes an encrypted version of reasoning
|
30
|
+
# tokens in reasoning item outputs. This enables reasoning items to be used in
|
31
|
+
# multi-turn conversations when using the Responses API statelessly (like when
|
32
|
+
# the `store` parameter is set to `false`, or when an organization is enrolled
|
33
|
+
# in the zero data retention program).
|
34
|
+
#
|
35
|
+
# @return [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil]
|
36
|
+
optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] }
|
37
|
+
|
38
|
+
# @!attribute limit
|
39
|
+
# A limit on the number of objects to be returned. Limit can range between 1 and
|
40
|
+
# 100, and the default is 20.
|
41
|
+
#
|
42
|
+
# @return [Integer, nil]
|
43
|
+
optional :limit, Integer
|
44
|
+
|
45
|
+
# @!attribute order
|
46
|
+
# The order to return the input items in. Default is `desc`.
|
47
|
+
#
|
48
|
+
# - `asc`: Return the input items in ascending order.
|
49
|
+
# - `desc`: Return the input items in descending order.
|
50
|
+
#
|
51
|
+
# @return [Symbol, OpenAI::Models::Conversations::ItemListParams::Order, nil]
|
52
|
+
optional :order, enum: -> { OpenAI::Conversations::ItemListParams::Order }
|
53
|
+
|
54
|
+
# @!method initialize(after: nil, include: nil, limit: nil, order: nil, request_options: {})
|
55
|
+
# Some parameter documentations has been truncated, see
|
56
|
+
# {OpenAI::Models::Conversations::ItemListParams} for more details.
|
57
|
+
#
|
58
|
+
# @param after [String] An item ID to list items after, used in pagination.
|
59
|
+
#
|
60
|
+
# @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Specify additional output data to include in the model response. Currently
|
61
|
+
#
|
62
|
+
# @param limit [Integer] A limit on the number of objects to be returned. Limit can range between
|
63
|
+
#
|
64
|
+
# @param order [Symbol, OpenAI::Models::Conversations::ItemListParams::Order] The order to return the input items in. Default is `desc`.
|
65
|
+
#
|
66
|
+
# @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}]
|
67
|
+
|
68
|
+
# The order to return the input items in. Default is `desc`.
|
69
|
+
#
|
70
|
+
# - `asc`: Return the input items in ascending order.
|
71
|
+
# - `desc`: Return the input items in descending order.
|
72
|
+
module Order
|
73
|
+
extend OpenAI::Internal::Type::Enum
|
74
|
+
|
75
|
+
ASC = :asc
|
76
|
+
DESC = :desc
|
77
|
+
|
78
|
+
# @!method self.values
|
79
|
+
# @return [Array<Symbol>]
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
# @see OpenAI::Resources::Conversations::Items#retrieve
|
7
|
+
class ItemRetrieveParams < OpenAI::Internal::Type::BaseModel
|
8
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
9
|
+
include OpenAI::Internal::Type::RequestParameters
|
10
|
+
|
11
|
+
# @!attribute conversation_id
|
12
|
+
#
|
13
|
+
# @return [String]
|
14
|
+
required :conversation_id, String
|
15
|
+
|
16
|
+
# @!attribute include
|
17
|
+
# Additional fields to include in the response. See the `include` parameter for
|
18
|
+
# [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include)
|
19
|
+
# for more information.
|
20
|
+
#
|
21
|
+
# @return [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil]
|
22
|
+
optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] }
|
23
|
+
|
24
|
+
# @!method initialize(conversation_id:, include: nil, request_options: {})
|
25
|
+
# Some parameter documentations has been truncated, see
|
26
|
+
# {OpenAI::Models::Conversations::ItemRetrieveParams} for more details.
|
27
|
+
#
|
28
|
+
# @param conversation_id [String]
|
29
|
+
#
|
30
|
+
# @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Additional fields to include in the response. See the `include`
|
31
|
+
#
|
32
|
+
# @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}]
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class LobProb < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute token
|
8
|
+
#
|
9
|
+
# @return [String]
|
10
|
+
required :token, String
|
11
|
+
|
12
|
+
# @!attribute bytes
|
13
|
+
#
|
14
|
+
# @return [Array<Integer>]
|
15
|
+
required :bytes, OpenAI::Internal::Type::ArrayOf[Integer]
|
16
|
+
|
17
|
+
# @!attribute logprob
|
18
|
+
#
|
19
|
+
# @return [Float]
|
20
|
+
required :logprob, Float
|
21
|
+
|
22
|
+
# @!attribute top_logprobs
|
23
|
+
#
|
24
|
+
# @return [Array<OpenAI::Models::Conversations::TopLogProb>]
|
25
|
+
required :top_logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Conversations::TopLogProb] }
|
26
|
+
|
27
|
+
# @!method initialize(token:, bytes:, logprob:, top_logprobs:)
|
28
|
+
# @param token [String]
|
29
|
+
# @param bytes [Array<Integer>]
|
30
|
+
# @param logprob [Float]
|
31
|
+
# @param top_logprobs [Array<OpenAI::Models::Conversations::TopLogProb>]
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,115 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class Message < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute id
|
8
|
+
# The unique ID of the message.
|
9
|
+
#
|
10
|
+
# @return [String]
|
11
|
+
required :id, String
|
12
|
+
|
13
|
+
# @!attribute content
|
14
|
+
# The content of the message
|
15
|
+
#
|
16
|
+
# @return [Array<OpenAI::Models::Conversations::InputTextContent, OpenAI::Models::Conversations::OutputTextContent, OpenAI::Models::Conversations::TextContent, OpenAI::Models::Conversations::SummaryTextContent, OpenAI::Models::Conversations::RefusalContent, OpenAI::Models::Conversations::InputImageContent, OpenAI::Models::Conversations::ComputerScreenshotContent, OpenAI::Models::Conversations::InputFileContent>]
|
17
|
+
required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Conversations::Message::Content] }
|
18
|
+
|
19
|
+
# @!attribute role
|
20
|
+
# The role of the message. One of `unknown`, `user`, `assistant`, `system`,
|
21
|
+
# `critic`, `discriminator`, `developer`, or `tool`.
|
22
|
+
#
|
23
|
+
# @return [Symbol, OpenAI::Models::Conversations::Message::Role]
|
24
|
+
required :role, enum: -> { OpenAI::Conversations::Message::Role }
|
25
|
+
|
26
|
+
# @!attribute status
|
27
|
+
# The status of item. One of `in_progress`, `completed`, or `incomplete`.
|
28
|
+
# Populated when items are returned via API.
|
29
|
+
#
|
30
|
+
# @return [Symbol, OpenAI::Models::Conversations::Message::Status]
|
31
|
+
required :status, enum: -> { OpenAI::Conversations::Message::Status }
|
32
|
+
|
33
|
+
# @!attribute type
|
34
|
+
# The type of the message. Always set to `message`.
|
35
|
+
#
|
36
|
+
# @return [Symbol, :message]
|
37
|
+
required :type, const: :message
|
38
|
+
|
39
|
+
# @!method initialize(id:, content:, role:, status:, type: :message)
|
40
|
+
# Some parameter documentations has been truncated, see
|
41
|
+
# {OpenAI::Models::Conversations::Message} for more details.
|
42
|
+
#
|
43
|
+
# @param id [String] The unique ID of the message.
|
44
|
+
#
|
45
|
+
# @param content [Array<OpenAI::Models::Conversations::InputTextContent, OpenAI::Models::Conversations::OutputTextContent, OpenAI::Models::Conversations::TextContent, OpenAI::Models::Conversations::SummaryTextContent, OpenAI::Models::Conversations::RefusalContent, OpenAI::Models::Conversations::InputImageContent, OpenAI::Models::Conversations::ComputerScreenshotContent, OpenAI::Models::Conversations::InputFileContent>] The content of the message
|
46
|
+
#
|
47
|
+
# @param role [Symbol, OpenAI::Models::Conversations::Message::Role] The role of the message. One of `unknown`, `user`, `assistant`, `system`, `criti
|
48
|
+
#
|
49
|
+
# @param status [Symbol, OpenAI::Models::Conversations::Message::Status] The status of item. One of `in_progress`, `completed`, or `incomplete`. Populate
|
50
|
+
#
|
51
|
+
# @param type [Symbol, :message] The type of the message. Always set to `message`.
|
52
|
+
|
53
|
+
module Content
|
54
|
+
extend OpenAI::Internal::Type::Union
|
55
|
+
|
56
|
+
discriminator :type
|
57
|
+
|
58
|
+
variant :input_text, -> { OpenAI::Conversations::InputTextContent }
|
59
|
+
|
60
|
+
variant :output_text, -> { OpenAI::Conversations::OutputTextContent }
|
61
|
+
|
62
|
+
variant :text, -> { OpenAI::Conversations::TextContent }
|
63
|
+
|
64
|
+
variant :summary_text, -> { OpenAI::Conversations::SummaryTextContent }
|
65
|
+
|
66
|
+
variant :refusal, -> { OpenAI::Conversations::RefusalContent }
|
67
|
+
|
68
|
+
variant :input_image, -> { OpenAI::Conversations::InputImageContent }
|
69
|
+
|
70
|
+
variant :computer_screenshot, -> { OpenAI::Conversations::ComputerScreenshotContent }
|
71
|
+
|
72
|
+
variant :input_file, -> { OpenAI::Conversations::InputFileContent }
|
73
|
+
|
74
|
+
# @!method self.variants
|
75
|
+
# @return [Array(OpenAI::Models::Conversations::InputTextContent, OpenAI::Models::Conversations::OutputTextContent, OpenAI::Models::Conversations::TextContent, OpenAI::Models::Conversations::SummaryTextContent, OpenAI::Models::Conversations::RefusalContent, OpenAI::Models::Conversations::InputImageContent, OpenAI::Models::Conversations::ComputerScreenshotContent, OpenAI::Models::Conversations::InputFileContent)]
|
76
|
+
end
|
77
|
+
|
78
|
+
# The role of the message. One of `unknown`, `user`, `assistant`, `system`,
|
79
|
+
# `critic`, `discriminator`, `developer`, or `tool`.
|
80
|
+
#
|
81
|
+
# @see OpenAI::Models::Conversations::Message#role
|
82
|
+
module Role
|
83
|
+
extend OpenAI::Internal::Type::Enum
|
84
|
+
|
85
|
+
UNKNOWN = :unknown
|
86
|
+
USER = :user
|
87
|
+
ASSISTANT = :assistant
|
88
|
+
SYSTEM = :system
|
89
|
+
CRITIC = :critic
|
90
|
+
DISCRIMINATOR = :discriminator
|
91
|
+
DEVELOPER = :developer
|
92
|
+
TOOL = :tool
|
93
|
+
|
94
|
+
# @!method self.values
|
95
|
+
# @return [Array<Symbol>]
|
96
|
+
end
|
97
|
+
|
98
|
+
# The status of item. One of `in_progress`, `completed`, or `incomplete`.
|
99
|
+
# Populated when items are returned via API.
|
100
|
+
#
|
101
|
+
# @see OpenAI::Models::Conversations::Message#status
|
102
|
+
module Status
|
103
|
+
extend OpenAI::Internal::Type::Enum
|
104
|
+
|
105
|
+
IN_PROGRESS = :in_progress
|
106
|
+
COMPLETED = :completed
|
107
|
+
INCOMPLETE = :incomplete
|
108
|
+
|
109
|
+
# @!method self.values
|
110
|
+
# @return [Array<Symbol>]
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
115
|
+
end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class OutputTextContent < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute annotations
|
8
|
+
# The annotations of the text output.
|
9
|
+
#
|
10
|
+
# @return [Array<OpenAI::Models::Conversations::FileCitationBody, OpenAI::Models::Conversations::URLCitationBody, OpenAI::Models::Conversations::ContainerFileCitationBody>]
|
11
|
+
required :annotations,
|
12
|
+
-> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Conversations::OutputTextContent::Annotation] }
|
13
|
+
|
14
|
+
# @!attribute text
|
15
|
+
# The text output from the model.
|
16
|
+
#
|
17
|
+
# @return [String]
|
18
|
+
required :text, String
|
19
|
+
|
20
|
+
# @!attribute type
|
21
|
+
# The type of the output text. Always `output_text`.
|
22
|
+
#
|
23
|
+
# @return [Symbol, :output_text]
|
24
|
+
required :type, const: :output_text
|
25
|
+
|
26
|
+
# @!attribute logprobs
|
27
|
+
#
|
28
|
+
# @return [Array<OpenAI::Models::Conversations::LobProb>, nil]
|
29
|
+
optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Conversations::LobProb] }
|
30
|
+
|
31
|
+
# @!method initialize(annotations:, text:, logprobs: nil, type: :output_text)
|
32
|
+
# @param annotations [Array<OpenAI::Models::Conversations::FileCitationBody, OpenAI::Models::Conversations::URLCitationBody, OpenAI::Models::Conversations::ContainerFileCitationBody>] The annotations of the text output.
|
33
|
+
#
|
34
|
+
# @param text [String] The text output from the model.
|
35
|
+
#
|
36
|
+
# @param logprobs [Array<OpenAI::Models::Conversations::LobProb>]
|
37
|
+
#
|
38
|
+
# @param type [Symbol, :output_text] The type of the output text. Always `output_text`.
|
39
|
+
|
40
|
+
module Annotation
|
41
|
+
extend OpenAI::Internal::Type::Union
|
42
|
+
|
43
|
+
discriminator :type
|
44
|
+
|
45
|
+
variant :file_citation, -> { OpenAI::Conversations::FileCitationBody }
|
46
|
+
|
47
|
+
variant :url_citation, -> { OpenAI::Conversations::URLCitationBody }
|
48
|
+
|
49
|
+
variant :container_file_citation, -> { OpenAI::Conversations::ContainerFileCitationBody }
|
50
|
+
|
51
|
+
# @!method self.variants
|
52
|
+
# @return [Array(OpenAI::Models::Conversations::FileCitationBody, OpenAI::Models::Conversations::URLCitationBody, OpenAI::Models::Conversations::ContainerFileCitationBody)]
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class RefusalContent < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute refusal
|
8
|
+
# The refusal explanation from the model.
|
9
|
+
#
|
10
|
+
# @return [String]
|
11
|
+
required :refusal, String
|
12
|
+
|
13
|
+
# @!attribute type
|
14
|
+
# The type of the refusal. Always `refusal`.
|
15
|
+
#
|
16
|
+
# @return [Symbol, :refusal]
|
17
|
+
required :type, const: :refusal
|
18
|
+
|
19
|
+
# @!method initialize(refusal:, type: :refusal)
|
20
|
+
# @param refusal [String] The refusal explanation from the model.
|
21
|
+
#
|
22
|
+
# @param type [Symbol, :refusal] The type of the refusal. Always `refusal`.
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class SummaryTextContent < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute text
|
8
|
+
#
|
9
|
+
# @return [String]
|
10
|
+
required :text, String
|
11
|
+
|
12
|
+
# @!attribute type
|
13
|
+
#
|
14
|
+
# @return [Symbol, :summary_text]
|
15
|
+
required :type, const: :summary_text
|
16
|
+
|
17
|
+
# @!method initialize(text:, type: :summary_text)
|
18
|
+
# @param text [String]
|
19
|
+
# @param type [Symbol, :summary_text]
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class TextContent < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute text
|
8
|
+
#
|
9
|
+
# @return [String]
|
10
|
+
required :text, String
|
11
|
+
|
12
|
+
# @!attribute type
|
13
|
+
#
|
14
|
+
# @return [Symbol, :text]
|
15
|
+
required :type, const: :text
|
16
|
+
|
17
|
+
# @!method initialize(text:, type: :text)
|
18
|
+
# @param text [String]
|
19
|
+
# @param type [Symbol, :text]
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module Conversations
|
6
|
+
class TopLogProb < OpenAI::Internal::Type::BaseModel
|
7
|
+
# @!attribute token
|
8
|
+
#
|
9
|
+
# @return [String]
|
10
|
+
required :token, String
|
11
|
+
|
12
|
+
# @!attribute bytes
|
13
|
+
#
|
14
|
+
# @return [Array<Integer>]
|
15
|
+
required :bytes, OpenAI::Internal::Type::ArrayOf[Integer]
|
16
|
+
|
17
|
+
# @!attribute logprob
|
18
|
+
#
|
19
|
+
# @return [Float]
|
20
|
+
required :logprob, Float
|
21
|
+
|
22
|
+
# @!method initialize(token:, bytes:, logprob:)
|
23
|
+
# @param token [String]
|
24
|
+
# @param bytes [Array<Integer>]
|
25
|
+
# @param logprob [Float]
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|