openai 0.18.1 → 0.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +20 -0
  3. data/README.md +1 -1
  4. data/lib/openai/client.rb +4 -0
  5. data/lib/openai/helpers/structured_output/boolean.rb +1 -0
  6. data/lib/openai/internal/conversation_cursor_page.rb +92 -0
  7. data/lib/openai/internal/transport/base_client.rb +1 -4
  8. data/lib/openai/internal/transport/pooled_net_requester.rb +1 -9
  9. data/lib/openai/internal/util.rb +1 -1
  10. data/lib/openai/models/audio/transcription.rb +1 -4
  11. data/lib/openai/models/audio/transcription_create_params.rb +2 -7
  12. data/lib/openai/models/audio/transcription_text_done_event.rb +1 -4
  13. data/lib/openai/models/beta/assistant_create_params.rb +6 -19
  14. data/lib/openai/models/beta/assistant_stream_event.rb +6 -24
  15. data/lib/openai/models/beta/assistant_update_params.rb +1 -4
  16. data/lib/openai/models/beta/message_stream_event.rb +1 -4
  17. data/lib/openai/models/beta/run_step_stream_event.rb +1 -4
  18. data/lib/openai/models/beta/thread_create_and_run_params.rb +10 -32
  19. data/lib/openai/models/beta/thread_create_params.rb +7 -22
  20. data/lib/openai/models/beta/threads/message.rb +3 -10
  21. data/lib/openai/models/beta/threads/message_create_params.rb +2 -7
  22. data/lib/openai/models/beta/threads/run.rb +2 -7
  23. data/lib/openai/models/beta/threads/run_create_params.rb +3 -10
  24. data/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +1 -3
  25. data/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +5 -17
  26. data/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +1 -3
  27. data/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +4 -12
  28. data/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +1 -4
  29. data/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +1 -4
  30. data/lib/openai/models/beta/threads/text.rb +1 -4
  31. data/lib/openai/models/chat/chat_completion_chunk.rb +1 -3
  32. data/lib/openai/models/chat/chat_completion_custom_tool.rb +2 -7
  33. data/lib/openai/models/conversations/computer_screenshot_content.rb +38 -0
  34. data/lib/openai/models/conversations/container_file_citation_body.rb +58 -0
  35. data/lib/openai/models/conversations/conversation.rb +51 -0
  36. data/lib/openai/models/conversations/conversation_create_params.rb +39 -0
  37. data/lib/openai/models/conversations/conversation_delete_params.rb +16 -0
  38. data/lib/openai/models/conversations/conversation_deleted.rb +29 -0
  39. data/lib/openai/models/conversations/conversation_deleted_resource.rb +30 -0
  40. data/lib/openai/models/conversations/conversation_item.rb +568 -0
  41. data/lib/openai/models/conversations/conversation_item_list.rb +55 -0
  42. data/lib/openai/models/conversations/conversation_retrieve_params.rb +16 -0
  43. data/lib/openai/models/conversations/conversation_update_params.rb +31 -0
  44. data/lib/openai/models/conversations/file_citation_body.rb +42 -0
  45. data/lib/openai/models/conversations/input_file_content.rb +42 -0
  46. data/lib/openai/models/conversations/input_image_content.rb +62 -0
  47. data/lib/openai/models/conversations/input_text_content.rb +26 -0
  48. data/lib/openai/models/conversations/item_create_params.rb +37 -0
  49. data/lib/openai/models/conversations/item_delete_params.rb +22 -0
  50. data/lib/openai/models/conversations/item_list_params.rb +84 -0
  51. data/lib/openai/models/conversations/item_retrieve_params.rb +36 -0
  52. data/lib/openai/models/conversations/lob_prob.rb +35 -0
  53. data/lib/openai/models/conversations/message.rb +115 -0
  54. data/lib/openai/models/conversations/output_text_content.rb +57 -0
  55. data/lib/openai/models/conversations/refusal_content.rb +26 -0
  56. data/lib/openai/models/conversations/summary_text_content.rb +23 -0
  57. data/lib/openai/models/conversations/text_content.rb +23 -0
  58. data/lib/openai/models/conversations/top_log_prob.rb +29 -0
  59. data/lib/openai/models/conversations/url_citation_body.rb +50 -0
  60. data/lib/openai/models/eval_create_params.rb +6 -20
  61. data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +29 -53
  62. data/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +1 -3
  63. data/lib/openai/models/evals/run_create_params.rb +18 -54
  64. data/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb +1 -4
  65. data/lib/openai/models/moderation.rb +5 -15
  66. data/lib/openai/models/responses/input_item_list_params.rb +1 -9
  67. data/lib/openai/models/responses/response.rb +26 -1
  68. data/lib/openai/models/responses/response_computer_tool_call.rb +2 -6
  69. data/lib/openai/models/responses/response_computer_tool_call_output_item.rb +1 -3
  70. data/lib/openai/models/responses/response_conversation_param.rb +20 -0
  71. data/lib/openai/models/responses/response_create_params.rb +34 -1
  72. data/lib/openai/models/responses/response_input_item.rb +2 -7
  73. data/lib/openai/models/responses/response_input_message_item.rb +1 -4
  74. data/lib/openai/models/responses/response_output_item.rb +1 -3
  75. data/lib/openai/models/responses/response_output_message.rb +1 -3
  76. data/lib/openai/models/responses/response_output_text.rb +3 -10
  77. data/lib/openai/models/responses/response_stream_event.rb +4 -16
  78. data/lib/openai/models/responses/response_text_delta_event.rb +1 -3
  79. data/lib/openai/models/responses/response_text_done_event.rb +1 -3
  80. data/lib/openai/models/responses/tool.rb +145 -34
  81. data/lib/openai/models.rb +2 -0
  82. data/lib/openai/resources/conversations/items.rb +141 -0
  83. data/lib/openai/resources/conversations.rb +112 -0
  84. data/lib/openai/resources/responses/input_items.rb +1 -3
  85. data/lib/openai/resources/responses.rb +6 -2
  86. data/lib/openai/version.rb +1 -1
  87. data/lib/openai.rb +31 -0
  88. data/rbi/openai/client.rbi +3 -0
  89. data/rbi/openai/errors.rbi +5 -5
  90. data/rbi/openai/internal/conversation_cursor_page.rbi +25 -0
  91. data/rbi/openai/models/conversations/computer_screenshot_content.rbi +60 -0
  92. data/rbi/openai/models/conversations/container_file_citation_body.rbi +82 -0
  93. data/rbi/openai/models/conversations/conversation.rbi +76 -0
  94. data/rbi/openai/models/conversations/conversation_create_params.rbi +144 -0
  95. data/rbi/openai/models/conversations/conversation_delete_params.rbi +32 -0
  96. data/rbi/openai/models/conversations/conversation_deleted.rbi +40 -0
  97. data/rbi/openai/models/conversations/conversation_deleted_resource.rbi +40 -0
  98. data/rbi/openai/models/conversations/conversation_item.rbi +835 -0
  99. data/rbi/openai/models/conversations/conversation_item_list.rbi +101 -0
  100. data/rbi/openai/models/conversations/conversation_retrieve_params.rbi +32 -0
  101. data/rbi/openai/models/conversations/conversation_update_params.rbi +56 -0
  102. data/rbi/openai/models/conversations/file_citation_body.rbi +61 -0
  103. data/rbi/openai/models/conversations/input_file_content.rbi +72 -0
  104. data/rbi/openai/models/conversations/input_image_content.rbi +113 -0
  105. data/rbi/openai/models/conversations/input_text_content.rbi +38 -0
  106. data/rbi/openai/models/conversations/item_create_params.rbi +150 -0
  107. data/rbi/openai/models/conversations/item_delete_params.rbi +40 -0
  108. data/rbi/openai/models/conversations/item_list_params.rbi +174 -0
  109. data/rbi/openai/models/conversations/item_retrieve_params.rbi +70 -0
  110. data/rbi/openai/models/conversations/lob_prob.rbi +50 -0
  111. data/rbi/openai/models/conversations/message.rbi +196 -0
  112. data/rbi/openai/models/conversations/output_text_content.rbi +110 -0
  113. data/rbi/openai/models/conversations/refusal_content.rbi +38 -0
  114. data/rbi/openai/models/conversations/summary_text_content.rbi +31 -0
  115. data/rbi/openai/models/conversations/text_content.rbi +28 -0
  116. data/rbi/openai/models/conversations/top_log_prob.rbi +41 -0
  117. data/rbi/openai/models/conversations/url_citation_body.rbi +74 -0
  118. data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +33 -33
  119. data/rbi/openai/models/responses/input_item_list_params.rbi +0 -11
  120. data/rbi/openai/models/responses/response.rbi +49 -0
  121. data/rbi/openai/models/responses/response_conversation_param.rbi +33 -0
  122. data/rbi/openai/models/responses/response_create_params.rbi +54 -0
  123. data/rbi/openai/models/responses/tool.rbi +243 -31
  124. data/rbi/openai/models.rbi +2 -0
  125. data/rbi/openai/resources/conversations/items.rbi +152 -0
  126. data/rbi/openai/resources/conversations.rbi +110 -0
  127. data/rbi/openai/resources/responses/input_items.rbi +0 -3
  128. data/rbi/openai/resources/responses.rbi +26 -0
  129. data/sig/openai/client.rbs +2 -0
  130. data/sig/openai/internal/conversation_cursor_page.rbs +15 -0
  131. data/sig/openai/models/conversations/computer_screenshot_content.rbs +28 -0
  132. data/sig/openai/models/conversations/container_file_citation_body.rbs +47 -0
  133. data/sig/openai/models/conversations/conversation.rbs +37 -0
  134. data/sig/openai/models/conversations/conversation_create_params.rbs +33 -0
  135. data/sig/openai/models/conversations/conversation_delete_params.rbs +17 -0
  136. data/sig/openai/models/conversations/conversation_deleted.rbs +28 -0
  137. data/sig/openai/models/conversations/conversation_deleted_resource.rbs +28 -0
  138. data/sig/openai/models/conversations/conversation_item.rbs +403 -0
  139. data/sig/openai/models/conversations/conversation_item_list.rbs +44 -0
  140. data/sig/openai/models/conversations/conversation_retrieve_params.rbs +17 -0
  141. data/sig/openai/models/conversations/conversation_update_params.rbs +26 -0
  142. data/sig/openai/models/conversations/file_citation_body.rbs +37 -0
  143. data/sig/openai/models/conversations/input_file_content.rbs +41 -0
  144. data/sig/openai/models/conversations/input_image_content.rbs +49 -0
  145. data/sig/openai/models/conversations/input_text_content.rbs +17 -0
  146. data/sig/openai/models/conversations/item_create_params.rbs +37 -0
  147. data/sig/openai/models/conversations/item_delete_params.rbs +25 -0
  148. data/sig/openai/models/conversations/item_list_params.rbs +66 -0
  149. data/sig/openai/models/conversations/item_retrieve_params.rbs +37 -0
  150. data/sig/openai/models/conversations/lob_prob.rbs +37 -0
  151. data/sig/openai/models/conversations/message.rbs +95 -0
  152. data/sig/openai/models/conversations/output_text_content.rbs +52 -0
  153. data/sig/openai/models/conversations/refusal_content.rbs +17 -0
  154. data/sig/openai/models/conversations/summary_text_content.rbs +17 -0
  155. data/sig/openai/models/conversations/text_content.rbs +17 -0
  156. data/sig/openai/models/conversations/top_log_prob.rbs +28 -0
  157. data/sig/openai/models/conversations/url_citation_body.rbs +42 -0
  158. data/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +22 -22
  159. data/sig/openai/models/responses/input_item_list_params.rbs +0 -7
  160. data/sig/openai/models/responses/response.rbs +15 -0
  161. data/sig/openai/models/responses/response_conversation_param.rbs +15 -0
  162. data/sig/openai/models/responses/response_create_params.rbs +14 -0
  163. data/sig/openai/models/responses/tool.rbs +83 -18
  164. data/sig/openai/models.rbs +2 -0
  165. data/sig/openai/resources/conversations/items.rbs +38 -0
  166. data/sig/openai/resources/conversations.rbs +31 -0
  167. data/sig/openai/resources/responses/input_items.rbs +0 -1
  168. data/sig/openai/resources/responses.rbs +2 -0
  169. metadata +95 -2
@@ -0,0 +1,141 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OpenAI
4
+ module Resources
5
+ class Conversations
6
+ class Items
7
+ # Some parameter documentations has been truncated, see
8
+ # {OpenAI::Models::Conversations::ItemCreateParams} for more details.
9
+ #
10
+ # Create items in a conversation with the given ID.
11
+ #
12
+ # @overload create(conversation_id, items:, include: nil, request_options: {})
13
+ #
14
+ # @param conversation_id [String] Path param: The ID of the conversation to add the item to.
15
+ #
16
+ # @param items [Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Body param: The items to add to the conversation. You may add up to 20 items at
17
+ #
18
+ # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Query param: Additional fields to include in the response. See the `include`
19
+ #
20
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
21
+ #
22
+ # @return [OpenAI::Models::Conversations::ConversationItemList]
23
+ #
24
+ # @see OpenAI::Models::Conversations::ItemCreateParams
25
+ def create(conversation_id, params)
26
+ parsed, options = OpenAI::Conversations::ItemCreateParams.dump_request(params)
27
+ query_params = [:include]
28
+ @client.request(
29
+ method: :post,
30
+ path: ["conversations/%1$s/items", conversation_id],
31
+ query: parsed.slice(*query_params),
32
+ body: parsed.except(*query_params),
33
+ model: OpenAI::Conversations::ConversationItemList,
34
+ options: options
35
+ )
36
+ end
37
+
38
+ # Some parameter documentations has been truncated, see
39
+ # {OpenAI::Models::Conversations::ItemRetrieveParams} for more details.
40
+ #
41
+ # Get a single item from a conversation with the given IDs.
42
+ #
43
+ # @overload retrieve(item_id, conversation_id:, include: nil, request_options: {})
44
+ #
45
+ # @param item_id [String] Path param: The ID of the item to retrieve.
46
+ #
47
+ # @param conversation_id [String] Path param: The ID of the conversation that contains the item.
48
+ #
49
+ # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Query param: Additional fields to include in the response. See the `include`
50
+ #
51
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
52
+ #
53
+ # @return [OpenAI::Models::Conversations::Message, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Conversations::ConversationItem::LocalShellCall, OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput, OpenAI::Models::Conversations::ConversationItem::McpListTools, OpenAI::Models::Conversations::ConversationItem::McpApprovalRequest, OpenAI::Models::Conversations::ConversationItem::McpApprovalResponse, OpenAI::Models::Conversations::ConversationItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput]
54
+ #
55
+ # @see OpenAI::Models::Conversations::ItemRetrieveParams
56
+ def retrieve(item_id, params)
57
+ parsed, options = OpenAI::Conversations::ItemRetrieveParams.dump_request(params)
58
+ conversation_id =
59
+ parsed.delete(:conversation_id) do
60
+ raise ArgumentError.new("missing required path argument #{_1}")
61
+ end
62
+ @client.request(
63
+ method: :get,
64
+ path: ["conversations/%1$s/items/%2$s", conversation_id, item_id],
65
+ query: parsed,
66
+ model: OpenAI::Conversations::ConversationItem,
67
+ options: options
68
+ )
69
+ end
70
+
71
+ # Some parameter documentations has been truncated, see
72
+ # {OpenAI::Models::Conversations::ItemListParams} for more details.
73
+ #
74
+ # List all items for a conversation with the given ID.
75
+ #
76
+ # @overload list(conversation_id, after: nil, include: nil, limit: nil, order: nil, request_options: {})
77
+ #
78
+ # @param conversation_id [String] The ID of the conversation to list items for.
79
+ #
80
+ # @param after [String] An item ID to list items after, used in pagination.
81
+ #
82
+ # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Specify additional output data to include in the model response. Currently
83
+ #
84
+ # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between
85
+ #
86
+ # @param order [Symbol, OpenAI::Models::Conversations::ItemListParams::Order] The order to return the input items in. Default is `desc`.
87
+ #
88
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
89
+ #
90
+ # @return [OpenAI::Internal::ConversationCursorPage<OpenAI::Models::Conversations::Message, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Conversations::ConversationItem::LocalShellCall, OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput, OpenAI::Models::Conversations::ConversationItem::McpListTools, OpenAI::Models::Conversations::ConversationItem::McpApprovalRequest, OpenAI::Models::Conversations::ConversationItem::McpApprovalResponse, OpenAI::Models::Conversations::ConversationItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput>]
91
+ #
92
+ # @see OpenAI::Models::Conversations::ItemListParams
93
+ def list(conversation_id, params = {})
94
+ parsed, options = OpenAI::Conversations::ItemListParams.dump_request(params)
95
+ @client.request(
96
+ method: :get,
97
+ path: ["conversations/%1$s/items", conversation_id],
98
+ query: parsed,
99
+ page: OpenAI::Internal::ConversationCursorPage,
100
+ model: OpenAI::Conversations::ConversationItem,
101
+ options: options
102
+ )
103
+ end
104
+
105
+ # Delete an item from a conversation with the given IDs.
106
+ #
107
+ # @overload delete(item_id, conversation_id:, request_options: {})
108
+ #
109
+ # @param item_id [String] The ID of the item to delete.
110
+ #
111
+ # @param conversation_id [String] The ID of the conversation that contains the item.
112
+ #
113
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
114
+ #
115
+ # @return [OpenAI::Models::Conversations::Conversation]
116
+ #
117
+ # @see OpenAI::Models::Conversations::ItemDeleteParams
118
+ def delete(item_id, params)
119
+ parsed, options = OpenAI::Conversations::ItemDeleteParams.dump_request(params)
120
+ conversation_id =
121
+ parsed.delete(:conversation_id) do
122
+ raise ArgumentError.new("missing required path argument #{_1}")
123
+ end
124
+ @client.request(
125
+ method: :delete,
126
+ path: ["conversations/%1$s/items/%2$s", conversation_id, item_id],
127
+ model: OpenAI::Conversations::Conversation,
128
+ options: options
129
+ )
130
+ end
131
+
132
+ # @api private
133
+ #
134
+ # @param client [OpenAI::Client]
135
+ def initialize(client:)
136
+ @client = client
137
+ end
138
+ end
139
+ end
140
+ end
141
+ end
@@ -0,0 +1,112 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OpenAI
4
+ module Resources
5
+ class Conversations
6
+ # @return [OpenAI::Resources::Conversations::Items]
7
+ attr_reader :items
8
+
9
+ # Some parameter documentations has been truncated, see
10
+ # {OpenAI::Models::Conversations::ConversationCreateParams} for more details.
11
+ #
12
+ # Create a conversation with the given ID.
13
+ #
14
+ # @overload create(items: nil, metadata: nil, request_options: {})
15
+ #
16
+ # @param items [Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>, nil] Initial items to include in the conversation context.
17
+ #
18
+ # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. Useful for
19
+ #
20
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
21
+ #
22
+ # @return [OpenAI::Models::Conversations::Conversation]
23
+ #
24
+ # @see OpenAI::Models::Conversations::ConversationCreateParams
25
+ def create(params = {})
26
+ parsed, options = OpenAI::Conversations::ConversationCreateParams.dump_request(params)
27
+ @client.request(
28
+ method: :post,
29
+ path: "conversations",
30
+ body: parsed,
31
+ model: OpenAI::Conversations::Conversation,
32
+ options: options
33
+ )
34
+ end
35
+
36
+ # Get a conversation with the given ID.
37
+ #
38
+ # @overload retrieve(conversation_id, request_options: {})
39
+ #
40
+ # @param conversation_id [String] The ID of the conversation to retrieve.
41
+ #
42
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
43
+ #
44
+ # @return [OpenAI::Models::Conversations::Conversation]
45
+ #
46
+ # @see OpenAI::Models::Conversations::ConversationRetrieveParams
47
+ def retrieve(conversation_id, params = {})
48
+ @client.request(
49
+ method: :get,
50
+ path: ["conversations/%1$s", conversation_id],
51
+ model: OpenAI::Conversations::Conversation,
52
+ options: params[:request_options]
53
+ )
54
+ end
55
+
56
+ # Some parameter documentations has been truncated, see
57
+ # {OpenAI::Models::Conversations::ConversationUpdateParams} for more details.
58
+ #
59
+ # Update a conversation's metadata with the given ID.
60
+ #
61
+ # @overload update(conversation_id, metadata:, request_options: {})
62
+ #
63
+ # @param conversation_id [String] The ID of the conversation to update.
64
+ #
65
+ # @param metadata [Hash{Symbol=>String}] Set of 16 key-value pairs that can be attached to an object. This can be
66
+ #
67
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
68
+ #
69
+ # @return [OpenAI::Models::Conversations::Conversation]
70
+ #
71
+ # @see OpenAI::Models::Conversations::ConversationUpdateParams
72
+ def update(conversation_id, params)
73
+ parsed, options = OpenAI::Conversations::ConversationUpdateParams.dump_request(params)
74
+ @client.request(
75
+ method: :post,
76
+ path: ["conversations/%1$s", conversation_id],
77
+ body: parsed,
78
+ model: OpenAI::Conversations::Conversation,
79
+ options: options
80
+ )
81
+ end
82
+
83
+ # Delete a conversation with the given ID.
84
+ #
85
+ # @overload delete(conversation_id, request_options: {})
86
+ #
87
+ # @param conversation_id [String] The ID of the conversation to delete.
88
+ #
89
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
90
+ #
91
+ # @return [OpenAI::Models::Conversations::ConversationDeletedResource]
92
+ #
93
+ # @see OpenAI::Models::Conversations::ConversationDeleteParams
94
+ def delete(conversation_id, params = {})
95
+ @client.request(
96
+ method: :delete,
97
+ path: ["conversations/%1$s", conversation_id],
98
+ model: OpenAI::Conversations::ConversationDeletedResource,
99
+ options: params[:request_options]
100
+ )
101
+ end
102
+
103
+ # @api private
104
+ #
105
+ # @param client [OpenAI::Client]
106
+ def initialize(client:)
107
+ @client = client
108
+ @items = OpenAI::Resources::Conversations::Items.new(client: client)
109
+ end
110
+ end
111
+ end
112
+ end
@@ -9,14 +9,12 @@ module OpenAI
9
9
  #
10
10
  # Returns a list of input items for a given response.
11
11
  #
12
- # @overload list(response_id, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {})
12
+ # @overload list(response_id, after: nil, include: nil, limit: nil, order: nil, request_options: {})
13
13
  #
14
14
  # @param response_id [String] The ID of the response to retrieve input items for.
15
15
  #
16
16
  # @param after [String] An item ID to list items after, used in pagination.
17
17
  #
18
- # @param before [String] An item ID to list items before, used in pagination.
19
- #
20
18
  # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Additional fields to include in the response. See the `include`
21
19
  #
22
20
  # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between
@@ -23,10 +23,12 @@ module OpenAI
23
23
  # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use
24
24
  # your own data as input for the model's response.
25
25
  #
26
- # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
26
+ # @overload create(background: nil, conversation: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
27
27
  #
28
28
  # @param background [Boolean, nil] Whether to run the model response in the background.
29
29
  #
30
+ # @param conversation [String, OpenAI::Models::Responses::ResponseConversationParam, nil] The conversation that this response belongs to. Items from this conversation are
31
+ #
30
32
  # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently
31
33
  #
32
34
  # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
@@ -120,10 +122,12 @@ module OpenAI
120
122
  # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use
121
123
  # your own data as input for the model's response.
122
124
  #
123
- # @overload stream(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
125
+ # @overload stream(background: nil, conversation: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
124
126
  #
125
127
  # @param background [Boolean, nil] Whether to run the model response in the background.
126
128
  #
129
+ # @param conversation [String, OpenAI::Models::Responses::ResponseConversationParam, nil] The conversation that this response belongs to. Items from this conversation are
130
+ #
127
131
  # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently
128
132
  #
129
133
  # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module OpenAI
4
- VERSION = "0.18.1"
4
+ VERSION = "0.19.0"
5
5
  end
data/lib/openai.rb CHANGED
@@ -48,6 +48,7 @@ require_relative "openai/internal/transport/base_client"
48
48
  require_relative "openai/internal/transport/pooled_net_requester"
49
49
  require_relative "openai/client"
50
50
  require_relative "openai/internal/stream"
51
+ require_relative "openai/internal/conversation_cursor_page"
51
52
  require_relative "openai/internal/cursor_page"
52
53
  require_relative "openai/internal/page"
53
54
  require_relative "openai/helpers/structured_output/json_schema_converter"
@@ -246,6 +247,33 @@ require_relative "openai/models/containers/file_list_response"
246
247
  require_relative "openai/models/containers/file_retrieve_params"
247
248
  require_relative "openai/models/containers/file_retrieve_response"
248
249
  require_relative "openai/models/containers/files/content_retrieve_params"
250
+ require_relative "openai/models/conversations/computer_screenshot_content"
251
+ require_relative "openai/models/conversations/container_file_citation_body"
252
+ require_relative "openai/models/conversations/conversation"
253
+ require_relative "openai/models/conversations/conversation_create_params"
254
+ require_relative "openai/models/conversations/conversation_deleted"
255
+ require_relative "openai/models/conversations/conversation_deleted_resource"
256
+ require_relative "openai/models/conversations/conversation_delete_params"
257
+ require_relative "openai/models/conversations/conversation_item"
258
+ require_relative "openai/models/conversations/conversation_item_list"
259
+ require_relative "openai/models/conversations/conversation_retrieve_params"
260
+ require_relative "openai/models/conversations/conversation_update_params"
261
+ require_relative "openai/models/conversations/file_citation_body"
262
+ require_relative "openai/models/conversations/input_file_content"
263
+ require_relative "openai/models/conversations/input_image_content"
264
+ require_relative "openai/models/conversations/input_text_content"
265
+ require_relative "openai/models/conversations/item_create_params"
266
+ require_relative "openai/models/conversations/item_delete_params"
267
+ require_relative "openai/models/conversations/item_list_params"
268
+ require_relative "openai/models/conversations/item_retrieve_params"
269
+ require_relative "openai/models/conversations/lob_prob"
270
+ require_relative "openai/models/conversations/message"
271
+ require_relative "openai/models/conversations/output_text_content"
272
+ require_relative "openai/models/conversations/refusal_content"
273
+ require_relative "openai/models/conversations/summary_text_content"
274
+ require_relative "openai/models/conversations/text_content"
275
+ require_relative "openai/models/conversations/top_log_prob"
276
+ require_relative "openai/models/conversations/url_citation_body"
249
277
  require_relative "openai/models/create_embedding_response"
250
278
  require_relative "openai/models/custom_tool_input_format"
251
279
  require_relative "openai/models/embedding"
@@ -383,6 +411,7 @@ require_relative "openai/models/responses/response_computer_tool_call_output_scr
383
411
  require_relative "openai/models/responses/response_content"
384
412
  require_relative "openai/models/responses/response_content_part_added_event"
385
413
  require_relative "openai/models/responses/response_content_part_done_event"
414
+ require_relative "openai/models/responses/response_conversation_param"
386
415
  require_relative "openai/models/responses/response_created_event"
387
416
  require_relative "openai/models/responses/response_create_params"
388
417
  require_relative "openai/models/responses/response_custom_tool_call"
@@ -535,6 +564,8 @@ require_relative "openai/resources/completions"
535
564
  require_relative "openai/resources/containers"
536
565
  require_relative "openai/resources/containers/files"
537
566
  require_relative "openai/resources/containers/files/content"
567
+ require_relative "openai/resources/conversations"
568
+ require_relative "openai/resources/conversations/items"
538
569
  require_relative "openai/resources/embeddings"
539
570
  require_relative "openai/resources/evals"
540
571
  require_relative "openai/resources/evals/runs"
@@ -67,6 +67,9 @@ module OpenAI
67
67
  sig { returns(OpenAI::Resources::Responses) }
68
68
  attr_reader :responses
69
69
 
70
+ sig { returns(OpenAI::Resources::Conversations) }
71
+ attr_reader :conversations
72
+
70
73
  sig { returns(OpenAI::Resources::Evals) }
71
74
  attr_reader :evals
72
75
 
@@ -68,19 +68,19 @@ module OpenAI
68
68
  end
69
69
 
70
70
  class APIConnectionError < OpenAI::Errors::APIError
71
- sig { void }
71
+ sig { returns(NilClass) }
72
72
  attr_accessor :status
73
73
 
74
- sig { void }
74
+ sig { returns(NilClass) }
75
75
  attr_accessor :body
76
76
 
77
- sig { void }
77
+ sig { returns(NilClass) }
78
78
  attr_accessor :code
79
79
 
80
- sig { void }
80
+ sig { returns(NilClass) }
81
81
  attr_accessor :param
82
82
 
83
- sig { void }
83
+ sig { returns(NilClass) }
84
84
  attr_accessor :type
85
85
 
86
86
  # @api private
@@ -0,0 +1,25 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Internal
5
+ class ConversationCursorPage
6
+ include OpenAI::Internal::Type::BasePage
7
+
8
+ Elem = type_member
9
+
10
+ sig { returns(T.nilable(T::Array[Elem])) }
11
+ attr_accessor :data
12
+
13
+ sig { returns(T::Boolean) }
14
+ attr_accessor :has_more
15
+
16
+ sig { returns(String) }
17
+ attr_accessor :last_id
18
+
19
+ # @api private
20
+ sig { returns(String) }
21
+ def inspect
22
+ end
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,60 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ module Conversations
6
+ class ComputerScreenshotContent < OpenAI::Internal::Type::BaseModel
7
+ OrHash =
8
+ T.type_alias do
9
+ T.any(
10
+ OpenAI::Conversations::ComputerScreenshotContent,
11
+ OpenAI::Internal::AnyHash
12
+ )
13
+ end
14
+
15
+ # The identifier of an uploaded file that contains the screenshot.
16
+ sig { returns(T.nilable(String)) }
17
+ attr_accessor :file_id
18
+
19
+ # The URL of the screenshot image.
20
+ sig { returns(T.nilable(String)) }
21
+ attr_accessor :image_url
22
+
23
+ # Specifies the event type. For a computer screenshot, this property is always set
24
+ # to `computer_screenshot`.
25
+ sig { returns(Symbol) }
26
+ attr_accessor :type
27
+
28
+ sig do
29
+ params(
30
+ file_id: T.nilable(String),
31
+ image_url: T.nilable(String),
32
+ type: Symbol
33
+ ).returns(T.attached_class)
34
+ end
35
+ def self.new(
36
+ # The identifier of an uploaded file that contains the screenshot.
37
+ file_id:,
38
+ # The URL of the screenshot image.
39
+ image_url:,
40
+ # Specifies the event type. For a computer screenshot, this property is always set
41
+ # to `computer_screenshot`.
42
+ type: :computer_screenshot
43
+ )
44
+ end
45
+
46
+ sig do
47
+ override.returns(
48
+ {
49
+ file_id: T.nilable(String),
50
+ image_url: T.nilable(String),
51
+ type: Symbol
52
+ }
53
+ )
54
+ end
55
+ def to_hash
56
+ end
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,82 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ module Conversations
6
+ class ContainerFileCitationBody < OpenAI::Internal::Type::BaseModel
7
+ OrHash =
8
+ T.type_alias do
9
+ T.any(
10
+ OpenAI::Conversations::ContainerFileCitationBody,
11
+ OpenAI::Internal::AnyHash
12
+ )
13
+ end
14
+
15
+ # The ID of the container file.
16
+ sig { returns(String) }
17
+ attr_accessor :container_id
18
+
19
+ # The index of the last character of the container file citation in the message.
20
+ sig { returns(Integer) }
21
+ attr_accessor :end_index
22
+
23
+ # The ID of the file.
24
+ sig { returns(String) }
25
+ attr_accessor :file_id
26
+
27
+ # The filename of the container file cited.
28
+ sig { returns(String) }
29
+ attr_accessor :filename
30
+
31
+ # The index of the first character of the container file citation in the message.
32
+ sig { returns(Integer) }
33
+ attr_accessor :start_index
34
+
35
+ # The type of the container file citation. Always `container_file_citation`.
36
+ sig { returns(Symbol) }
37
+ attr_accessor :type
38
+
39
+ sig do
40
+ params(
41
+ container_id: String,
42
+ end_index: Integer,
43
+ file_id: String,
44
+ filename: String,
45
+ start_index: Integer,
46
+ type: Symbol
47
+ ).returns(T.attached_class)
48
+ end
49
+ def self.new(
50
+ # The ID of the container file.
51
+ container_id:,
52
+ # The index of the last character of the container file citation in the message.
53
+ end_index:,
54
+ # The ID of the file.
55
+ file_id:,
56
+ # The filename of the container file cited.
57
+ filename:,
58
+ # The index of the first character of the container file citation in the message.
59
+ start_index:,
60
+ # The type of the container file citation. Always `container_file_citation`.
61
+ type: :container_file_citation
62
+ )
63
+ end
64
+
65
+ sig do
66
+ override.returns(
67
+ {
68
+ container_id: String,
69
+ end_index: Integer,
70
+ file_id: String,
71
+ filename: String,
72
+ start_index: Integer,
73
+ type: Symbol
74
+ }
75
+ )
76
+ end
77
+ def to_hash
78
+ end
79
+ end
80
+ end
81
+ end
82
+ end
@@ -0,0 +1,76 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ module Conversations
6
+ class Conversation < OpenAI::Internal::Type::BaseModel
7
+ OrHash =
8
+ T.type_alias do
9
+ T.any(
10
+ OpenAI::Conversations::Conversation,
11
+ OpenAI::Internal::AnyHash
12
+ )
13
+ end
14
+
15
+ # The unique ID of the conversation.
16
+ sig { returns(String) }
17
+ attr_accessor :id
18
+
19
+ # The time at which the conversation was created, measured in seconds since the
20
+ # Unix epoch.
21
+ sig { returns(Integer) }
22
+ attr_accessor :created_at
23
+
24
+ # Set of 16 key-value pairs that can be attached to an object. This can be useful
25
+ # for storing additional information about the object in a structured format, and
26
+ # querying for objects via API or the dashboard. Keys are strings with a maximum
27
+ # length of 64 characters. Values are strings with a maximum length of 512
28
+ # characters.
29
+ sig { returns(T.anything) }
30
+ attr_accessor :metadata
31
+
32
+ # The object type, which is always `conversation`.
33
+ sig { returns(Symbol) }
34
+ attr_accessor :object
35
+
36
+ sig do
37
+ params(
38
+ id: String,
39
+ created_at: Integer,
40
+ metadata: T.anything,
41
+ object: Symbol
42
+ ).returns(T.attached_class)
43
+ end
44
+ def self.new(
45
+ # The unique ID of the conversation.
46
+ id:,
47
+ # The time at which the conversation was created, measured in seconds since the
48
+ # Unix epoch.
49
+ created_at:,
50
+ # Set of 16 key-value pairs that can be attached to an object. This can be useful
51
+ # for storing additional information about the object in a structured format, and
52
+ # querying for objects via API or the dashboard. Keys are strings with a maximum
53
+ # length of 64 characters. Values are strings with a maximum length of 512
54
+ # characters.
55
+ metadata:,
56
+ # The object type, which is always `conversation`.
57
+ object: :conversation
58
+ )
59
+ end
60
+
61
+ sig do
62
+ override.returns(
63
+ {
64
+ id: String,
65
+ created_at: Integer,
66
+ metadata: T.anything,
67
+ object: Symbol
68
+ }
69
+ )
70
+ end
71
+ def to_hash
72
+ end
73
+ end
74
+ end
75
+ end
76
+ end