openai 0.33.0 → 0.34.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +13 -0
  3. data/README.md +1 -1
  4. data/lib/openai/models/conversations/conversation_item.rb +0 -4
  5. data/lib/openai/models/conversations/item_list_params.rb +1 -1
  6. data/lib/openai/models/custom_tool_input_format.rb +0 -6
  7. data/lib/openai/models/image_edit_params.rb +1 -1
  8. data/lib/openai/models/responses/custom_tool.rb +0 -6
  9. data/lib/openai/models/responses/input_token_count_params.rb +283 -0
  10. data/lib/openai/models/responses/input_token_count_response.rb +24 -0
  11. data/lib/openai/models/responses/response_code_interpreter_tool_call.rb +4 -4
  12. data/lib/openai/models/responses/response_computer_tool_call.rb +13 -18
  13. data/lib/openai/models/responses/response_computer_tool_call_output_item.rb +7 -7
  14. data/lib/openai/models/responses/response_create_params.rb +1 -1
  15. data/lib/openai/models/responses/response_includable.rb +5 -3
  16. data/lib/openai/models/responses/response_input_item.rb +0 -4
  17. data/lib/openai/models/responses/response_item.rb +0 -4
  18. data/lib/openai/models/responses/response_output_item.rb +0 -4
  19. data/lib/openai/models/responses/tool.rb +2 -12
  20. data/lib/openai/resources/conversations/items.rb +1 -1
  21. data/lib/openai/resources/images.rb +2 -2
  22. data/lib/openai/resources/responses/input_tokens.rb +61 -0
  23. data/lib/openai/resources/responses.rb +6 -2
  24. data/lib/openai/version.rb +1 -1
  25. data/lib/openai.rb +3 -0
  26. data/rbi/openai/models/custom_tool_input_format.rbi +0 -2
  27. data/rbi/openai/models/responses/custom_tool.rbi +0 -2
  28. data/rbi/openai/models/responses/input_token_count_params.rbi +601 -0
  29. data/rbi/openai/models/responses/input_token_count_response.rbi +35 -0
  30. data/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +4 -4
  31. data/rbi/openai/models/responses/response_computer_tool_call.rbi +19 -13
  32. data/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi +16 -8
  33. data/rbi/openai/models/responses/response_includable.rbi +18 -8
  34. data/rbi/openai/models/responses/tool.rbi +0 -1
  35. data/rbi/openai/resources/responses/input_tokens.rbi +120 -0
  36. data/rbi/openai/resources/responses.rbi +3 -0
  37. data/sig/openai/models/responses/input_token_count_params.rbs +165 -0
  38. data/sig/openai/models/responses/input_token_count_response.rbs +24 -0
  39. data/sig/openai/models/responses/response_computer_tool_call.rbs +9 -5
  40. data/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +9 -5
  41. data/sig/openai/models/responses/response_includable.rbs +11 -7
  42. data/sig/openai/resources/responses/input_tokens.rbs +24 -0
  43. data/sig/openai/resources/responses.rbs +2 -0
  44. metadata +11 -2
@@ -30,11 +30,8 @@ module OpenAI
30
30
  # A tool that generates images using a model like `gpt-image-1`.
31
31
  variant :image_generation, -> { OpenAI::Responses::Tool::ImageGeneration }
32
32
 
33
- # A tool that allows the model to execute shell commands in a local environment.
34
33
  variant :local_shell, -> { OpenAI::Responses::Tool::LocalShell }
35
34
 
36
- # A custom tool that processes input using a specified format. Learn more about
37
- # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools).
38
35
  variant :custom, -> { OpenAI::Responses::CustomTool }
39
36
 
40
37
  # Search the Internet for sources related to the prompt. Learn more about the
@@ -375,8 +372,7 @@ module OpenAI
375
372
  # The container ID.
376
373
  variant String
377
374
 
378
- # Configuration for a code interpreter container. Optionally specify the IDs
379
- # of the files to run the code on.
375
+ # Configuration for a code interpreter container. Optionally specify the IDs of the files to run the code on.
380
376
  variant -> { OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto }
381
377
 
382
378
  class CodeInterpreterToolAuto < OpenAI::Internal::Type::BaseModel
@@ -393,10 +389,6 @@ module OpenAI
393
389
  optional :file_ids, OpenAI::Internal::Type::ArrayOf[String]
394
390
 
395
391
  # @!method initialize(file_ids: nil, type: :auto)
396
- # Some parameter documentations has been truncated, see
397
- # {OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto}
398
- # for more details.
399
- #
400
392
  # Configuration for a code interpreter container. Optionally specify the IDs of
401
393
  # the files to run the code on.
402
394
  #
@@ -498,7 +490,7 @@ module OpenAI
498
490
  #
499
491
  # @param background [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Background] Background type for the generated image. One of `transparent`,
500
492
  #
501
- # @param input_fidelity [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::InputFidelity, nil] Control how much effort the model will exert to match the style and features,
493
+ # @param input_fidelity [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::InputFidelity, nil]
502
494
  #
503
495
  # @param input_image_mask [OpenAI::Models::Responses::Tool::ImageGeneration::InputImageMask] Optional mask for inpainting. Contains `image_url`
504
496
  #
@@ -658,8 +650,6 @@ module OpenAI
658
650
  required :type, const: :local_shell
659
651
 
660
652
  # @!method initialize(type: :local_shell)
661
- # A tool that allows the model to execute shell commands in a local environment.
662
- #
663
653
  # @param type [Symbol, :local_shell] The type of the local shell tool. Always `local_shell`.
664
654
  end
665
655
 
@@ -79,7 +79,7 @@ module OpenAI
79
79
  #
80
80
  # @param after [String] An item ID to list items after, used in pagination.
81
81
  #
82
- # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Specify additional output data to include in the model response. Currently
82
+ # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Specify additional output data to include in the model response. Currently suppo
83
83
  #
84
84
  # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between
85
85
  #
@@ -55,7 +55,7 @@ module OpenAI
55
55
  #
56
56
  # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s).
57
57
  #
58
- # @param input_fidelity [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil] Control how much effort the model will exert to match the style and features,
58
+ # @param input_fidelity [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil]
59
59
  #
60
60
  # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind
61
61
  #
@@ -114,7 +114,7 @@ module OpenAI
114
114
  #
115
115
  # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s).
116
116
  #
117
- # @param input_fidelity [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil] Control how much effort the model will exert to match the style and features,
117
+ # @param input_fidelity [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil]
118
118
  #
119
119
  # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind
120
120
  #
@@ -0,0 +1,61 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OpenAI
4
+ module Resources
5
+ class Responses
6
+ class InputTokens
7
+ # Some parameter documentations has been truncated, see
8
+ # {OpenAI::Models::Responses::InputTokenCountParams} for more details.
9
+ #
10
+ # Get input token counts
11
+ #
12
+ # @overload count(conversation: nil, input: nil, instructions: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, text: nil, tool_choice: nil, tools: nil, truncation: nil, request_options: {})
13
+ #
14
+ # @param conversation [String, OpenAI::Models::Responses::ResponseConversationParam, nil] The conversation that this response belongs to. Items from this conversation are
15
+ #
16
+ # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>, nil] Text, image, or file inputs to the model, used to generate a response
17
+ #
18
+ # @param instructions [String, nil] A system (or developer) message inserted into the model's context.
19
+ #
20
+ # @param model [String, nil] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a w
21
+ #
22
+ # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel.
23
+ #
24
+ # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to create multi-tu
25
+ #
26
+ # @param reasoning [OpenAI::Models::Reasoning, nil] **gpt-5 and o-series models only** Configuration options for [reasoning models](
27
+ #
28
+ # @param text [OpenAI::Models::Responses::InputTokenCountParams::Text, nil] Configuration options for a text response from the model. Can be plain
29
+ #
30
+ # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom, nil] How the model should select which tool (or tools) to use when generating
31
+ #
32
+ # @param tools [Array<OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::Tool::Mcp, OpenAI::Models::Responses::Tool::CodeInterpreter, OpenAI::Models::Responses::Tool::ImageGeneration, OpenAI::Models::Responses::Tool::LocalShell, OpenAI::Models::Responses::CustomTool, OpenAI::Models::Responses::WebSearchTool, OpenAI::Models::Responses::WebSearchPreviewTool>, nil] An array of tools the model may call while generating a response. You can specif
33
+ #
34
+ # @param truncation [Symbol, OpenAI::Models::Responses::InputTokenCountParams::Truncation] The truncation strategy to use for the model response. - `auto`: If the input to
35
+ #
36
+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
37
+ #
38
+ # @return [OpenAI::Models::Responses::InputTokenCountResponse]
39
+ #
40
+ # @see OpenAI::Models::Responses::InputTokenCountParams
41
+ def count(params = {})
42
+ parsed, options = OpenAI::Responses::InputTokenCountParams.dump_request(params)
43
+ @client.request(
44
+ method: :post,
45
+ path: "responses/input_tokens",
46
+ body: parsed,
47
+ model: OpenAI::Models::Responses::InputTokenCountResponse,
48
+ options: options
49
+ )
50
+ end
51
+
52
+ # @api private
53
+ #
54
+ # @param client [OpenAI::Client]
55
+ def initialize(client:)
56
+ @client = client
57
+ end
58
+ end
59
+ end
60
+ end
61
+ end
@@ -6,6 +6,9 @@ module OpenAI
6
6
  # @return [OpenAI::Resources::Responses::InputItems]
7
7
  attr_reader :input_items
8
8
 
9
+ # @return [OpenAI::Resources::Responses::InputTokens]
10
+ attr_reader :input_tokens
11
+
9
12
  # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart.
10
13
  #
11
14
  # Some parameter documentations has been truncated, see
@@ -29,7 +32,7 @@ module OpenAI
29
32
  #
30
33
  # @param conversation [String, OpenAI::Models::Responses::ResponseConversationParam, nil] The conversation that this response belongs to. Items from this conversation are
31
34
  #
32
- # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently
35
+ # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently suppo
33
36
  #
34
37
  # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
35
38
  #
@@ -128,7 +131,7 @@ module OpenAI
128
131
  #
129
132
  # @param conversation [String, OpenAI::Models::Responses::ResponseConversationParam, nil] The conversation that this response belongs to. Items from this conversation are
130
133
  #
131
- # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently
134
+ # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently suppo
132
135
  #
133
136
  # @param input [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>] Text, image, or file inputs to the model, used to generate a response.
134
137
  #
@@ -458,6 +461,7 @@ module OpenAI
458
461
  def initialize(client:)
459
462
  @client = client
460
463
  @input_items = OpenAI::Resources::Responses::InputItems.new(client: client)
464
+ @input_tokens = OpenAI::Resources::Responses::InputTokens.new(client: client)
461
465
  end
462
466
 
463
467
  private
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module OpenAI
4
- VERSION = "0.33.0"
4
+ VERSION = "0.34.0"
5
5
  end
data/lib/openai.rb CHANGED
@@ -533,6 +533,8 @@ require_relative "openai/models/responses/easy_input_message"
533
533
  require_relative "openai/models/responses/file_search_tool"
534
534
  require_relative "openai/models/responses/function_tool"
535
535
  require_relative "openai/models/responses/input_item_list_params"
536
+ require_relative "openai/models/responses/input_token_count_params"
537
+ require_relative "openai/models/responses/input_token_count_response"
536
538
  require_relative "openai/models/responses/response"
537
539
  require_relative "openai/models/responses/response_audio_delta_event"
538
540
  require_relative "openai/models/responses/response_audio_done_event"
@@ -747,6 +749,7 @@ require_relative "openai/resources/realtime/calls"
747
749
  require_relative "openai/resources/realtime/client_secrets"
748
750
  require_relative "openai/resources/responses"
749
751
  require_relative "openai/resources/responses/input_items"
752
+ require_relative "openai/resources/responses/input_tokens"
750
753
  require_relative "openai/resources/uploads"
751
754
  require_relative "openai/resources/uploads/parts"
752
755
  require_relative "openai/resources/vector_stores"
@@ -27,7 +27,6 @@ module OpenAI
27
27
  sig { returns(Symbol) }
28
28
  attr_accessor :type
29
29
 
30
- # Unconstrained free-form text.
31
30
  sig { params(type: Symbol).returns(T.attached_class) }
32
31
  def self.new(
33
32
  # Unconstrained text format. Always `text`.
@@ -63,7 +62,6 @@ module OpenAI
63
62
  sig { returns(Symbol) }
64
63
  attr_accessor :type
65
64
 
66
- # A grammar defined by the user.
67
65
  sig do
68
66
  params(
69
67
  definition: String,
@@ -48,8 +48,6 @@ module OpenAI
48
48
  end
49
49
  attr_writer :format_
50
50
 
51
- # A custom tool that processes input using a specified format. Learn more about
52
- # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools).
53
51
  sig do
54
52
  params(
55
53
  name: String,