openai 0.18.1 → 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +20 -0
- data/README.md +1 -1
- data/lib/openai/client.rb +4 -0
- data/lib/openai/helpers/structured_output/boolean.rb +1 -0
- data/lib/openai/internal/conversation_cursor_page.rb +92 -0
- data/lib/openai/internal/transport/base_client.rb +1 -4
- data/lib/openai/internal/transport/pooled_net_requester.rb +1 -9
- data/lib/openai/internal/util.rb +1 -1
- data/lib/openai/models/audio/transcription.rb +1 -4
- data/lib/openai/models/audio/transcription_create_params.rb +2 -7
- data/lib/openai/models/audio/transcription_text_done_event.rb +1 -4
- data/lib/openai/models/beta/assistant_create_params.rb +6 -19
- data/lib/openai/models/beta/assistant_stream_event.rb +6 -24
- data/lib/openai/models/beta/assistant_update_params.rb +1 -4
- data/lib/openai/models/beta/message_stream_event.rb +1 -4
- data/lib/openai/models/beta/run_step_stream_event.rb +1 -4
- data/lib/openai/models/beta/thread_create_and_run_params.rb +10 -32
- data/lib/openai/models/beta/thread_create_params.rb +7 -22
- data/lib/openai/models/beta/threads/message.rb +3 -10
- data/lib/openai/models/beta/threads/message_create_params.rb +2 -7
- data/lib/openai/models/beta/threads/run.rb +2 -7
- data/lib/openai/models/beta/threads/run_create_params.rb +3 -10
- data/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +1 -3
- data/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +5 -17
- data/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +1 -3
- data/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +4 -12
- data/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +1 -4
- data/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +1 -4
- data/lib/openai/models/beta/threads/text.rb +1 -4
- data/lib/openai/models/chat/chat_completion_chunk.rb +1 -3
- data/lib/openai/models/chat/chat_completion_custom_tool.rb +2 -7
- data/lib/openai/models/conversations/computer_screenshot_content.rb +38 -0
- data/lib/openai/models/conversations/container_file_citation_body.rb +58 -0
- data/lib/openai/models/conversations/conversation.rb +51 -0
- data/lib/openai/models/conversations/conversation_create_params.rb +39 -0
- data/lib/openai/models/conversations/conversation_delete_params.rb +16 -0
- data/lib/openai/models/conversations/conversation_deleted.rb +29 -0
- data/lib/openai/models/conversations/conversation_deleted_resource.rb +30 -0
- data/lib/openai/models/conversations/conversation_item.rb +568 -0
- data/lib/openai/models/conversations/conversation_item_list.rb +55 -0
- data/lib/openai/models/conversations/conversation_retrieve_params.rb +16 -0
- data/lib/openai/models/conversations/conversation_update_params.rb +31 -0
- data/lib/openai/models/conversations/file_citation_body.rb +42 -0
- data/lib/openai/models/conversations/input_file_content.rb +42 -0
- data/lib/openai/models/conversations/input_image_content.rb +62 -0
- data/lib/openai/models/conversations/input_text_content.rb +26 -0
- data/lib/openai/models/conversations/item_create_params.rb +37 -0
- data/lib/openai/models/conversations/item_delete_params.rb +22 -0
- data/lib/openai/models/conversations/item_list_params.rb +84 -0
- data/lib/openai/models/conversations/item_retrieve_params.rb +36 -0
- data/lib/openai/models/conversations/lob_prob.rb +35 -0
- data/lib/openai/models/conversations/message.rb +115 -0
- data/lib/openai/models/conversations/output_text_content.rb +57 -0
- data/lib/openai/models/conversations/refusal_content.rb +26 -0
- data/lib/openai/models/conversations/summary_text_content.rb +23 -0
- data/lib/openai/models/conversations/text_content.rb +23 -0
- data/lib/openai/models/conversations/top_log_prob.rb +29 -0
- data/lib/openai/models/conversations/url_citation_body.rb +50 -0
- data/lib/openai/models/eval_create_params.rb +6 -20
- data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +29 -53
- data/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +1 -3
- data/lib/openai/models/evals/run_create_params.rb +18 -54
- data/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb +1 -4
- data/lib/openai/models/moderation.rb +5 -15
- data/lib/openai/models/responses/input_item_list_params.rb +1 -9
- data/lib/openai/models/responses/response.rb +26 -1
- data/lib/openai/models/responses/response_computer_tool_call.rb +2 -6
- data/lib/openai/models/responses/response_computer_tool_call_output_item.rb +1 -3
- data/lib/openai/models/responses/response_conversation_param.rb +20 -0
- data/lib/openai/models/responses/response_create_params.rb +34 -1
- data/lib/openai/models/responses/response_input_item.rb +2 -7
- data/lib/openai/models/responses/response_input_message_item.rb +1 -4
- data/lib/openai/models/responses/response_output_item.rb +1 -3
- data/lib/openai/models/responses/response_output_message.rb +1 -3
- data/lib/openai/models/responses/response_output_text.rb +3 -10
- data/lib/openai/models/responses/response_stream_event.rb +4 -16
- data/lib/openai/models/responses/response_text_delta_event.rb +1 -3
- data/lib/openai/models/responses/response_text_done_event.rb +1 -3
- data/lib/openai/models/responses/tool.rb +145 -34
- data/lib/openai/models.rb +2 -0
- data/lib/openai/resources/conversations/items.rb +141 -0
- data/lib/openai/resources/conversations.rb +112 -0
- data/lib/openai/resources/responses/input_items.rb +1 -3
- data/lib/openai/resources/responses.rb +6 -2
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +31 -0
- data/rbi/openai/client.rbi +3 -0
- data/rbi/openai/errors.rbi +5 -5
- data/rbi/openai/internal/conversation_cursor_page.rbi +25 -0
- data/rbi/openai/models/conversations/computer_screenshot_content.rbi +60 -0
- data/rbi/openai/models/conversations/container_file_citation_body.rbi +82 -0
- data/rbi/openai/models/conversations/conversation.rbi +76 -0
- data/rbi/openai/models/conversations/conversation_create_params.rbi +144 -0
- data/rbi/openai/models/conversations/conversation_delete_params.rbi +32 -0
- data/rbi/openai/models/conversations/conversation_deleted.rbi +40 -0
- data/rbi/openai/models/conversations/conversation_deleted_resource.rbi +40 -0
- data/rbi/openai/models/conversations/conversation_item.rbi +835 -0
- data/rbi/openai/models/conversations/conversation_item_list.rbi +101 -0
- data/rbi/openai/models/conversations/conversation_retrieve_params.rbi +32 -0
- data/rbi/openai/models/conversations/conversation_update_params.rbi +56 -0
- data/rbi/openai/models/conversations/file_citation_body.rbi +61 -0
- data/rbi/openai/models/conversations/input_file_content.rbi +72 -0
- data/rbi/openai/models/conversations/input_image_content.rbi +113 -0
- data/rbi/openai/models/conversations/input_text_content.rbi +38 -0
- data/rbi/openai/models/conversations/item_create_params.rbi +150 -0
- data/rbi/openai/models/conversations/item_delete_params.rbi +40 -0
- data/rbi/openai/models/conversations/item_list_params.rbi +174 -0
- data/rbi/openai/models/conversations/item_retrieve_params.rbi +70 -0
- data/rbi/openai/models/conversations/lob_prob.rbi +50 -0
- data/rbi/openai/models/conversations/message.rbi +196 -0
- data/rbi/openai/models/conversations/output_text_content.rbi +110 -0
- data/rbi/openai/models/conversations/refusal_content.rbi +38 -0
- data/rbi/openai/models/conversations/summary_text_content.rbi +31 -0
- data/rbi/openai/models/conversations/text_content.rbi +28 -0
- data/rbi/openai/models/conversations/top_log_prob.rbi +41 -0
- data/rbi/openai/models/conversations/url_citation_body.rbi +74 -0
- data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +33 -33
- data/rbi/openai/models/responses/input_item_list_params.rbi +0 -11
- data/rbi/openai/models/responses/response.rbi +49 -0
- data/rbi/openai/models/responses/response_conversation_param.rbi +33 -0
- data/rbi/openai/models/responses/response_create_params.rbi +54 -0
- data/rbi/openai/models/responses/tool.rbi +243 -31
- data/rbi/openai/models.rbi +2 -0
- data/rbi/openai/resources/conversations/items.rbi +152 -0
- data/rbi/openai/resources/conversations.rbi +110 -0
- data/rbi/openai/resources/responses/input_items.rbi +0 -3
- data/rbi/openai/resources/responses.rbi +26 -0
- data/sig/openai/client.rbs +2 -0
- data/sig/openai/internal/conversation_cursor_page.rbs +15 -0
- data/sig/openai/models/conversations/computer_screenshot_content.rbs +28 -0
- data/sig/openai/models/conversations/container_file_citation_body.rbs +47 -0
- data/sig/openai/models/conversations/conversation.rbs +37 -0
- data/sig/openai/models/conversations/conversation_create_params.rbs +33 -0
- data/sig/openai/models/conversations/conversation_delete_params.rbs +17 -0
- data/sig/openai/models/conversations/conversation_deleted.rbs +28 -0
- data/sig/openai/models/conversations/conversation_deleted_resource.rbs +28 -0
- data/sig/openai/models/conversations/conversation_item.rbs +403 -0
- data/sig/openai/models/conversations/conversation_item_list.rbs +44 -0
- data/sig/openai/models/conversations/conversation_retrieve_params.rbs +17 -0
- data/sig/openai/models/conversations/conversation_update_params.rbs +26 -0
- data/sig/openai/models/conversations/file_citation_body.rbs +37 -0
- data/sig/openai/models/conversations/input_file_content.rbs +41 -0
- data/sig/openai/models/conversations/input_image_content.rbs +49 -0
- data/sig/openai/models/conversations/input_text_content.rbs +17 -0
- data/sig/openai/models/conversations/item_create_params.rbs +37 -0
- data/sig/openai/models/conversations/item_delete_params.rbs +25 -0
- data/sig/openai/models/conversations/item_list_params.rbs +66 -0
- data/sig/openai/models/conversations/item_retrieve_params.rbs +37 -0
- data/sig/openai/models/conversations/lob_prob.rbs +37 -0
- data/sig/openai/models/conversations/message.rbs +95 -0
- data/sig/openai/models/conversations/output_text_content.rbs +52 -0
- data/sig/openai/models/conversations/refusal_content.rbs +17 -0
- data/sig/openai/models/conversations/summary_text_content.rbs +17 -0
- data/sig/openai/models/conversations/text_content.rbs +17 -0
- data/sig/openai/models/conversations/top_log_prob.rbs +28 -0
- data/sig/openai/models/conversations/url_citation_body.rbs +42 -0
- data/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +22 -22
- data/sig/openai/models/responses/input_item_list_params.rbs +0 -7
- data/sig/openai/models/responses/response.rbs +15 -0
- data/sig/openai/models/responses/response_conversation_param.rbs +15 -0
- data/sig/openai/models/responses/response_create_params.rbs +14 -0
- data/sig/openai/models/responses/tool.rbs +83 -18
- data/sig/openai/models.rbs +2 -0
- data/sig/openai/resources/conversations/items.rbs +38 -0
- data/sig/openai/resources/conversations.rbs +31 -0
- data/sig/openai/resources/responses/input_items.rbs +0 -1
- data/sig/openai/resources/responses.rbs +2 -0
- metadata +95 -2
@@ -0,0 +1,110 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Resources
|
5
|
+
class Conversations
|
6
|
+
sig { returns(OpenAI::Resources::Conversations::Items) }
|
7
|
+
attr_reader :items
|
8
|
+
|
9
|
+
# Create a conversation with the given ID.
|
10
|
+
sig do
|
11
|
+
params(
|
12
|
+
items:
|
13
|
+
T.nilable(
|
14
|
+
T::Array[
|
15
|
+
T.any(
|
16
|
+
OpenAI::Responses::EasyInputMessage::OrHash,
|
17
|
+
OpenAI::Responses::ResponseInputItem::Message::OrHash,
|
18
|
+
OpenAI::Responses::ResponseOutputMessage::OrHash,
|
19
|
+
OpenAI::Responses::ResponseFileSearchToolCall::OrHash,
|
20
|
+
OpenAI::Responses::ResponseComputerToolCall::OrHash,
|
21
|
+
OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash,
|
22
|
+
OpenAI::Responses::ResponseFunctionWebSearch::OrHash,
|
23
|
+
OpenAI::Responses::ResponseFunctionToolCall::OrHash,
|
24
|
+
OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash,
|
25
|
+
OpenAI::Responses::ResponseReasoningItem::OrHash,
|
26
|
+
OpenAI::Responses::ResponseInputItem::ImageGenerationCall::OrHash,
|
27
|
+
OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash,
|
28
|
+
OpenAI::Responses::ResponseInputItem::LocalShellCall::OrHash,
|
29
|
+
OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::OrHash,
|
30
|
+
OpenAI::Responses::ResponseInputItem::McpListTools::OrHash,
|
31
|
+
OpenAI::Responses::ResponseInputItem::McpApprovalRequest::OrHash,
|
32
|
+
OpenAI::Responses::ResponseInputItem::McpApprovalResponse::OrHash,
|
33
|
+
OpenAI::Responses::ResponseInputItem::McpCall::OrHash,
|
34
|
+
OpenAI::Responses::ResponseCustomToolCallOutput::OrHash,
|
35
|
+
OpenAI::Responses::ResponseCustomToolCall::OrHash,
|
36
|
+
OpenAI::Responses::ResponseInputItem::ItemReference::OrHash
|
37
|
+
)
|
38
|
+
]
|
39
|
+
),
|
40
|
+
metadata: T.nilable(T::Hash[Symbol, String]),
|
41
|
+
request_options: OpenAI::RequestOptions::OrHash
|
42
|
+
).returns(OpenAI::Conversations::Conversation)
|
43
|
+
end
|
44
|
+
def create(
|
45
|
+
# Initial items to include in the conversation context. You may add up to 20 items
|
46
|
+
# at a time.
|
47
|
+
items: nil,
|
48
|
+
# Set of 16 key-value pairs that can be attached to an object. Useful for storing
|
49
|
+
# additional information about the object in a structured format.
|
50
|
+
metadata: nil,
|
51
|
+
request_options: {}
|
52
|
+
)
|
53
|
+
end
|
54
|
+
|
55
|
+
# Get a conversation with the given ID.
|
56
|
+
sig do
|
57
|
+
params(
|
58
|
+
conversation_id: String,
|
59
|
+
request_options: OpenAI::RequestOptions::OrHash
|
60
|
+
).returns(OpenAI::Conversations::Conversation)
|
61
|
+
end
|
62
|
+
def retrieve(
|
63
|
+
# The ID of the conversation to retrieve.
|
64
|
+
conversation_id,
|
65
|
+
request_options: {}
|
66
|
+
)
|
67
|
+
end
|
68
|
+
|
69
|
+
# Update a conversation's metadata with the given ID.
|
70
|
+
sig do
|
71
|
+
params(
|
72
|
+
conversation_id: String,
|
73
|
+
metadata: T::Hash[Symbol, String],
|
74
|
+
request_options: OpenAI::RequestOptions::OrHash
|
75
|
+
).returns(OpenAI::Conversations::Conversation)
|
76
|
+
end
|
77
|
+
def update(
|
78
|
+
# The ID of the conversation to update.
|
79
|
+
conversation_id,
|
80
|
+
# Set of 16 key-value pairs that can be attached to an object. This can be useful
|
81
|
+
# for storing additional information about the object in a structured format, and
|
82
|
+
# querying for objects via API or the dashboard. Keys are strings with a maximum
|
83
|
+
# length of 64 characters. Values are strings with a maximum length of 512
|
84
|
+
# characters.
|
85
|
+
metadata:,
|
86
|
+
request_options: {}
|
87
|
+
)
|
88
|
+
end
|
89
|
+
|
90
|
+
# Delete a conversation with the given ID.
|
91
|
+
sig do
|
92
|
+
params(
|
93
|
+
conversation_id: String,
|
94
|
+
request_options: OpenAI::RequestOptions::OrHash
|
95
|
+
).returns(OpenAI::Conversations::ConversationDeletedResource)
|
96
|
+
end
|
97
|
+
def delete(
|
98
|
+
# The ID of the conversation to delete.
|
99
|
+
conversation_id,
|
100
|
+
request_options: {}
|
101
|
+
)
|
102
|
+
end
|
103
|
+
|
104
|
+
# @api private
|
105
|
+
sig { params(client: OpenAI::Client).returns(T.attached_class) }
|
106
|
+
def self.new(client:)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
end
|
@@ -9,7 +9,6 @@ module OpenAI
|
|
9
9
|
params(
|
10
10
|
response_id: String,
|
11
11
|
after: String,
|
12
|
-
before: String,
|
13
12
|
include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol],
|
14
13
|
limit: Integer,
|
15
14
|
order: OpenAI::Responses::InputItemListParams::Order::OrSymbol,
|
@@ -25,8 +24,6 @@ module OpenAI
|
|
25
24
|
response_id,
|
26
25
|
# An item ID to list items after, used in pagination.
|
27
26
|
after: nil,
|
28
|
-
# An item ID to list items before, used in pagination.
|
29
|
-
before: nil,
|
30
27
|
# Additional fields to include in the response. See the `include` parameter for
|
31
28
|
# Response creation above for more information.
|
32
29
|
include: nil,
|
@@ -22,6 +22,13 @@ module OpenAI
|
|
22
22
|
sig do
|
23
23
|
params(
|
24
24
|
background: T.nilable(T::Boolean),
|
25
|
+
conversation:
|
26
|
+
T.nilable(
|
27
|
+
T.any(
|
28
|
+
String,
|
29
|
+
OpenAI::Responses::ResponseConversationParam::OrHash
|
30
|
+
)
|
31
|
+
),
|
25
32
|
include:
|
26
33
|
T.nilable(
|
27
34
|
T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]
|
@@ -96,6 +103,11 @@ module OpenAI
|
|
96
103
|
# Whether to run the model response in the background.
|
97
104
|
# [Learn more](https://platform.openai.com/docs/guides/background).
|
98
105
|
background: nil,
|
106
|
+
# The conversation that this response belongs to. Items from this conversation are
|
107
|
+
# prepended to `input_items` for this response request. Input items and output
|
108
|
+
# items from this response are automatically added to this conversation after this
|
109
|
+
# response completes.
|
110
|
+
conversation: nil,
|
99
111
|
# Specify additional output data to include in the model response. Currently
|
100
112
|
# supported values are:
|
101
113
|
#
|
@@ -156,6 +168,7 @@ module OpenAI
|
|
156
168
|
# The unique ID of the previous response to the model. Use this to create
|
157
169
|
# multi-turn conversations. Learn more about
|
158
170
|
# [conversation state](https://platform.openai.com/docs/guides/conversation-state).
|
171
|
+
# Cannot be used in conjunction with `conversation`.
|
159
172
|
previous_response_id: nil,
|
160
173
|
# Reference to a prompt template and its variables.
|
161
174
|
# [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts).
|
@@ -274,6 +287,13 @@ module OpenAI
|
|
274
287
|
sig do
|
275
288
|
params(
|
276
289
|
background: T.nilable(T::Boolean),
|
290
|
+
conversation:
|
291
|
+
T.nilable(
|
292
|
+
T.any(
|
293
|
+
String,
|
294
|
+
OpenAI::Responses::ResponseConversationParam::OrHash
|
295
|
+
)
|
296
|
+
),
|
277
297
|
include:
|
278
298
|
T.nilable(
|
279
299
|
T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]
|
@@ -354,6 +374,11 @@ module OpenAI
|
|
354
374
|
# Whether to run the model response in the background.
|
355
375
|
# [Learn more](https://platform.openai.com/docs/guides/background).
|
356
376
|
background: nil,
|
377
|
+
# The conversation that this response belongs to. Items from this conversation are
|
378
|
+
# prepended to `input_items` for this response request. Input items and output
|
379
|
+
# items from this response are automatically added to this conversation after this
|
380
|
+
# response completes.
|
381
|
+
conversation: nil,
|
357
382
|
# Specify additional output data to include in the model response. Currently
|
358
383
|
# supported values are:
|
359
384
|
#
|
@@ -414,6 +439,7 @@ module OpenAI
|
|
414
439
|
# The unique ID of the previous response to the model. Use this to create
|
415
440
|
# multi-turn conversations. Learn more about
|
416
441
|
# [conversation state](https://platform.openai.com/docs/guides/conversation-state).
|
442
|
+
# Cannot be used in conjunction with `conversation`.
|
417
443
|
previous_response_id: nil,
|
418
444
|
# Reference to a prompt template and its variables.
|
419
445
|
# [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts).
|
data/sig/openai/client.rbs
CHANGED
@@ -0,0 +1,15 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module Internal
|
3
|
+
class ConversationCursorPage[Elem]
|
4
|
+
include OpenAI::Internal::Type::BasePage[Elem]
|
5
|
+
|
6
|
+
attr_accessor data: ::Array[Elem]?
|
7
|
+
|
8
|
+
attr_accessor has_more: bool
|
9
|
+
|
10
|
+
attr_accessor last_id: String
|
11
|
+
|
12
|
+
def inspect: -> String
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module Models
|
3
|
+
module Conversations
|
4
|
+
type computer_screenshot_content =
|
5
|
+
{ file_id: String?, image_url: String?, type: :computer_screenshot }
|
6
|
+
|
7
|
+
class ComputerScreenshotContent < OpenAI::Internal::Type::BaseModel
|
8
|
+
attr_accessor file_id: String?
|
9
|
+
|
10
|
+
attr_accessor image_url: String?
|
11
|
+
|
12
|
+
attr_accessor type: :computer_screenshot
|
13
|
+
|
14
|
+
def initialize: (
|
15
|
+
file_id: String?,
|
16
|
+
image_url: String?,
|
17
|
+
?type: :computer_screenshot
|
18
|
+
) -> void
|
19
|
+
|
20
|
+
def to_hash: -> {
|
21
|
+
file_id: String?,
|
22
|
+
image_url: String?,
|
23
|
+
type: :computer_screenshot
|
24
|
+
}
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,47 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module Models
|
3
|
+
module Conversations
|
4
|
+
type container_file_citation_body =
|
5
|
+
{
|
6
|
+
container_id: String,
|
7
|
+
end_index: Integer,
|
8
|
+
file_id: String,
|
9
|
+
filename: String,
|
10
|
+
start_index: Integer,
|
11
|
+
type: :container_file_citation
|
12
|
+
}
|
13
|
+
|
14
|
+
class ContainerFileCitationBody < OpenAI::Internal::Type::BaseModel
|
15
|
+
attr_accessor container_id: String
|
16
|
+
|
17
|
+
attr_accessor end_index: Integer
|
18
|
+
|
19
|
+
attr_accessor file_id: String
|
20
|
+
|
21
|
+
attr_accessor filename: String
|
22
|
+
|
23
|
+
attr_accessor start_index: Integer
|
24
|
+
|
25
|
+
attr_accessor type: :container_file_citation
|
26
|
+
|
27
|
+
def initialize: (
|
28
|
+
container_id: String,
|
29
|
+
end_index: Integer,
|
30
|
+
file_id: String,
|
31
|
+
filename: String,
|
32
|
+
start_index: Integer,
|
33
|
+
?type: :container_file_citation
|
34
|
+
) -> void
|
35
|
+
|
36
|
+
def to_hash: -> {
|
37
|
+
container_id: String,
|
38
|
+
end_index: Integer,
|
39
|
+
file_id: String,
|
40
|
+
filename: String,
|
41
|
+
start_index: Integer,
|
42
|
+
type: :container_file_citation
|
43
|
+
}
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module Models
|
3
|
+
module Conversations
|
4
|
+
type conversation =
|
5
|
+
{
|
6
|
+
id: String,
|
7
|
+
created_at: Integer,
|
8
|
+
metadata: top,
|
9
|
+
object: :conversation
|
10
|
+
}
|
11
|
+
|
12
|
+
class Conversation < OpenAI::Internal::Type::BaseModel
|
13
|
+
attr_accessor id: String
|
14
|
+
|
15
|
+
attr_accessor created_at: Integer
|
16
|
+
|
17
|
+
attr_accessor metadata: top
|
18
|
+
|
19
|
+
attr_accessor object: :conversation
|
20
|
+
|
21
|
+
def initialize: (
|
22
|
+
id: String,
|
23
|
+
created_at: Integer,
|
24
|
+
metadata: top,
|
25
|
+
?object: :conversation
|
26
|
+
) -> void
|
27
|
+
|
28
|
+
def to_hash: -> {
|
29
|
+
id: String,
|
30
|
+
created_at: Integer,
|
31
|
+
metadata: top,
|
32
|
+
object: :conversation
|
33
|
+
}
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module Models
|
3
|
+
module Conversations
|
4
|
+
type conversation_create_params =
|
5
|
+
{
|
6
|
+
items: ::Array[OpenAI::Models::Responses::response_input_item]?,
|
7
|
+
metadata: OpenAI::Models::metadata?
|
8
|
+
}
|
9
|
+
& OpenAI::Internal::Type::request_parameters
|
10
|
+
|
11
|
+
class ConversationCreateParams < OpenAI::Internal::Type::BaseModel
|
12
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
13
|
+
include OpenAI::Internal::Type::RequestParameters
|
14
|
+
|
15
|
+
attr_accessor items: ::Array[OpenAI::Models::Responses::response_input_item]?
|
16
|
+
|
17
|
+
attr_accessor metadata: OpenAI::Models::metadata?
|
18
|
+
|
19
|
+
def initialize: (
|
20
|
+
?items: ::Array[OpenAI::Models::Responses::response_input_item]?,
|
21
|
+
?metadata: OpenAI::Models::metadata?,
|
22
|
+
?request_options: OpenAI::request_opts
|
23
|
+
) -> void
|
24
|
+
|
25
|
+
def to_hash: -> {
|
26
|
+
items: ::Array[OpenAI::Models::Responses::response_input_item]?,
|
27
|
+
metadata: OpenAI::Models::metadata?,
|
28
|
+
request_options: OpenAI::RequestOptions
|
29
|
+
}
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module Models
|
3
|
+
module Conversations
|
4
|
+
type conversation_delete_params =
|
5
|
+
{ } & OpenAI::Internal::Type::request_parameters
|
6
|
+
|
7
|
+
class ConversationDeleteParams < OpenAI::Internal::Type::BaseModel
|
8
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
9
|
+
include OpenAI::Internal::Type::RequestParameters
|
10
|
+
|
11
|
+
def initialize: (?request_options: OpenAI::request_opts) -> void
|
12
|
+
|
13
|
+
def to_hash: -> { request_options: OpenAI::RequestOptions }
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module Models
|
3
|
+
module Conversations
|
4
|
+
type conversation_deleted =
|
5
|
+
{ id: String, deleted: bool, object: :"conversation.deleted" }
|
6
|
+
|
7
|
+
class ConversationDeleted < OpenAI::Internal::Type::BaseModel
|
8
|
+
attr_accessor id: String
|
9
|
+
|
10
|
+
attr_accessor deleted: bool
|
11
|
+
|
12
|
+
attr_accessor object: :"conversation.deleted"
|
13
|
+
|
14
|
+
def initialize: (
|
15
|
+
id: String,
|
16
|
+
deleted: bool,
|
17
|
+
?object: :"conversation.deleted"
|
18
|
+
) -> void
|
19
|
+
|
20
|
+
def to_hash: -> {
|
21
|
+
id: String,
|
22
|
+
deleted: bool,
|
23
|
+
object: :"conversation.deleted"
|
24
|
+
}
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module Models
|
3
|
+
module Conversations
|
4
|
+
type conversation_deleted_resource =
|
5
|
+
{ id: String, deleted: bool, object: :"conversation.deleted" }
|
6
|
+
|
7
|
+
class ConversationDeletedResource < OpenAI::Internal::Type::BaseModel
|
8
|
+
attr_accessor id: String
|
9
|
+
|
10
|
+
attr_accessor deleted: bool
|
11
|
+
|
12
|
+
attr_accessor object: :"conversation.deleted"
|
13
|
+
|
14
|
+
def initialize: (
|
15
|
+
id: String,
|
16
|
+
deleted: bool,
|
17
|
+
?object: :"conversation.deleted"
|
18
|
+
) -> void
|
19
|
+
|
20
|
+
def to_hash: -> {
|
21
|
+
id: String,
|
22
|
+
deleted: bool,
|
23
|
+
object: :"conversation.deleted"
|
24
|
+
}
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|