ruby_llm-responses_api 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,94 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ # OpenAI Responses API provider for RubyLLM.
6
+ # Implements the new Responses API which provides built-in tools,
7
+ # stateful conversations, background mode, and MCP support.
8
+ class OpenAIResponses
9
+ include OpenAIResponses::Chat
10
+ include OpenAIResponses::Streaming
11
+ include OpenAIResponses::Tools
12
+ include OpenAIResponses::Models
13
+ include OpenAIResponses::Media
14
+
15
+ def api_base
16
+ @config.openai_api_base || 'https://api.openai.com/v1'
17
+ end
18
+
19
+ def headers
20
+ {
21
+ 'Authorization' => "Bearer #{@config.openai_api_key}",
22
+ 'OpenAI-Organization' => @config.openai_organization_id,
23
+ 'OpenAI-Project' => @config.openai_project_id
24
+ }.compact
25
+ end
26
+
27
+ # Retrieve a stored response by ID
28
+ # @param response_id [String] The response ID to retrieve
29
+ # @return [Hash] The response data
30
+ def retrieve_response(response_id)
31
+ response = @connection.get(Background.retrieve_url(response_id))
32
+ response.body
33
+ end
34
+
35
+ # Cancel a background response
36
+ # @param response_id [String] The response ID to cancel
37
+ # @return [Hash] The cancellation result
38
+ def cancel_response(response_id)
39
+ response = @connection.post(Background.cancel_url(response_id), {})
40
+ response.body
41
+ end
42
+
43
+ # Delete a stored response
44
+ # @param response_id [String] The response ID to delete
45
+ # @return [Hash] The deletion result
46
+ def delete_response(response_id)
47
+ response = @connection.delete(Background.retrieve_url(response_id))
48
+ response.body
49
+ end
50
+
51
+ # List input items for a response
52
+ # @param response_id [String] The response ID
53
+ # @return [Hash] The input items
54
+ def list_input_items(response_id)
55
+ response = @connection.get(Background.input_items_url(response_id))
56
+ response.body
57
+ end
58
+
59
+ # Poll a background response until completion
60
+ # @param response_id [String] The response ID to poll
61
+ # @param interval [Float] Polling interval in seconds
62
+ # @param timeout [Float, nil] Maximum time to wait in seconds
63
+ # @yield [Hash] Called with response data on each poll
64
+ # @return [Hash] The final response data
65
+ def poll_response(response_id, interval: 1.0, timeout: nil)
66
+ start_time = Time.now
67
+ loop do
68
+ response_data = retrieve_response(response_id)
69
+ yield response_data if block_given?
70
+
71
+ return response_data if Background.complete?(response_data)
72
+
73
+ raise Error, "Polling timeout after #{timeout} seconds" if timeout && (Time.now - start_time) > timeout
74
+
75
+ sleep interval
76
+ end
77
+ end
78
+
79
+ class << self
80
+ def capabilities
81
+ OpenAIResponses::Capabilities
82
+ end
83
+
84
+ def configuration_requirements
85
+ %i[openai_api_key]
86
+ end
87
+
88
+ def slug
89
+ :openai_responses
90
+ end
91
+ end
92
+ end
93
+ end
94
+ end
@@ -0,0 +1,4 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Alias for rubyllm_responses_api to match gem naming convention
4
+ require_relative 'rubyllm_responses_api'
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'ruby_llm'
4
+
5
+ # Provider class must be loaded first to define the class
6
+ require_relative 'ruby_llm/providers/openai_responses/base'
7
+
8
+ # Core modules
9
+ require_relative 'ruby_llm/providers/openai_responses/capabilities'
10
+ require_relative 'ruby_llm/providers/openai_responses/media'
11
+ require_relative 'ruby_llm/providers/openai_responses/tools'
12
+ require_relative 'ruby_llm/providers/openai_responses/models'
13
+ require_relative 'ruby_llm/providers/openai_responses/streaming'
14
+ require_relative 'ruby_llm/providers/openai_responses/chat'
15
+
16
+ # Advanced features
17
+ require_relative 'ruby_llm/providers/openai_responses/built_in_tools'
18
+ require_relative 'ruby_llm/providers/openai_responses/state'
19
+ require_relative 'ruby_llm/providers/openai_responses/background'
20
+ require_relative 'ruby_llm/providers/openai_responses/message_extension'
21
+ require_relative 'ruby_llm/providers/openai_responses/model_registry'
22
+ require_relative 'ruby_llm/providers/openai_responses/active_record_extension'
23
+
24
+ # Include all modules in the provider class
25
+ require_relative 'ruby_llm/providers/openai_responses'
26
+
27
+ # Register the provider
28
+ RubyLLM::Provider.register :openai_responses, RubyLLM::Providers::OpenAIResponses
29
+
30
+ # Register models for this provider
31
+ RubyLLM::Providers::OpenAIResponses::ModelRegistry.register_all!
32
+
33
+ # Extend RubyLLM module with ResponsesAPI namespace
34
+ module RubyLLM
35
+ # ResponsesAPI namespace for direct access to helpers and version
36
+ module ResponsesAPI
37
+ VERSION = '0.1.0'
38
+
39
+ # Shorthand access to built-in tool helpers
40
+ BuiltInTools = Providers::OpenAIResponses::BuiltInTools
41
+ State = Providers::OpenAIResponses::State
42
+ Background = Providers::OpenAIResponses::Background
43
+ end
44
+ end
metadata ADDED
@@ -0,0 +1,177 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: ruby_llm-responses_api
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Chris Hasinski
8
+ bindir: bin
9
+ cert_chain: []
10
+ date: 1980-01-02 00:00:00.000000000 Z
11
+ dependencies:
12
+ - !ruby/object:Gem::Dependency
13
+ name: ruby_llm
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - ">="
17
+ - !ruby/object:Gem::Version
18
+ version: '1.0'
19
+ type: :runtime
20
+ prerelease: false
21
+ version_requirements: !ruby/object:Gem::Requirement
22
+ requirements:
23
+ - - ">="
24
+ - !ruby/object:Gem::Version
25
+ version: '1.0'
26
+ - !ruby/object:Gem::Dependency
27
+ name: activerecord
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - "~>"
31
+ - !ruby/object:Gem::Version
32
+ version: '7.0'
33
+ type: :development
34
+ prerelease: false
35
+ version_requirements: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - "~>"
38
+ - !ruby/object:Gem::Version
39
+ version: '7.0'
40
+ - !ruby/object:Gem::Dependency
41
+ name: rake
42
+ requirement: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - "~>"
45
+ - !ruby/object:Gem::Version
46
+ version: '13.0'
47
+ type: :development
48
+ prerelease: false
49
+ version_requirements: !ruby/object:Gem::Requirement
50
+ requirements:
51
+ - - "~>"
52
+ - !ruby/object:Gem::Version
53
+ version: '13.0'
54
+ - !ruby/object:Gem::Dependency
55
+ name: rspec
56
+ requirement: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - "~>"
59
+ - !ruby/object:Gem::Version
60
+ version: '3.0'
61
+ type: :development
62
+ prerelease: false
63
+ version_requirements: !ruby/object:Gem::Requirement
64
+ requirements:
65
+ - - "~>"
66
+ - !ruby/object:Gem::Version
67
+ version: '3.0'
68
+ - !ruby/object:Gem::Dependency
69
+ name: rubocop
70
+ requirement: !ruby/object:Gem::Requirement
71
+ requirements:
72
+ - - "~>"
73
+ - !ruby/object:Gem::Version
74
+ version: '1.0'
75
+ type: :development
76
+ prerelease: false
77
+ version_requirements: !ruby/object:Gem::Requirement
78
+ requirements:
79
+ - - "~>"
80
+ - !ruby/object:Gem::Version
81
+ version: '1.0'
82
+ - !ruby/object:Gem::Dependency
83
+ name: sqlite3
84
+ requirement: !ruby/object:Gem::Requirement
85
+ requirements:
86
+ - - "~>"
87
+ - !ruby/object:Gem::Version
88
+ version: '1.4'
89
+ type: :development
90
+ prerelease: false
91
+ version_requirements: !ruby/object:Gem::Requirement
92
+ requirements:
93
+ - - "~>"
94
+ - !ruby/object:Gem::Version
95
+ version: '1.4'
96
+ - !ruby/object:Gem::Dependency
97
+ name: vcr
98
+ requirement: !ruby/object:Gem::Requirement
99
+ requirements:
100
+ - - "~>"
101
+ - !ruby/object:Gem::Version
102
+ version: '6.0'
103
+ type: :development
104
+ prerelease: false
105
+ version_requirements: !ruby/object:Gem::Requirement
106
+ requirements:
107
+ - - "~>"
108
+ - !ruby/object:Gem::Version
109
+ version: '6.0'
110
+ - !ruby/object:Gem::Dependency
111
+ name: webmock
112
+ requirement: !ruby/object:Gem::Requirement
113
+ requirements:
114
+ - - "~>"
115
+ - !ruby/object:Gem::Version
116
+ version: '3.0'
117
+ type: :development
118
+ prerelease: false
119
+ version_requirements: !ruby/object:Gem::Requirement
120
+ requirements:
121
+ - - "~>"
122
+ - !ruby/object:Gem::Version
123
+ version: '3.0'
124
+ description: A RubyLLM provider that implements OpenAI's Responses API, providing
125
+ access to built-in tools (web search, code interpreter, file search), stateful conversations,
126
+ background mode, and MCP support.
127
+ email:
128
+ - krzysztof.hasinski@gmail.com
129
+ executables: []
130
+ extensions: []
131
+ extra_rdoc_files: []
132
+ files:
133
+ - CHANGELOG.md
134
+ - LICENSE.txt
135
+ - README.md
136
+ - lib/ruby_llm-responses_api.rb
137
+ - lib/ruby_llm/providers/openai_responses.rb
138
+ - lib/ruby_llm/providers/openai_responses/active_record_extension.rb
139
+ - lib/ruby_llm/providers/openai_responses/background.rb
140
+ - lib/ruby_llm/providers/openai_responses/base.rb
141
+ - lib/ruby_llm/providers/openai_responses/built_in_tools.rb
142
+ - lib/ruby_llm/providers/openai_responses/capabilities.rb
143
+ - lib/ruby_llm/providers/openai_responses/chat.rb
144
+ - lib/ruby_llm/providers/openai_responses/media.rb
145
+ - lib/ruby_llm/providers/openai_responses/message_extension.rb
146
+ - lib/ruby_llm/providers/openai_responses/model_registry.rb
147
+ - lib/ruby_llm/providers/openai_responses/models.rb
148
+ - lib/ruby_llm/providers/openai_responses/state.rb
149
+ - lib/ruby_llm/providers/openai_responses/streaming.rb
150
+ - lib/ruby_llm/providers/openai_responses/tools.rb
151
+ - lib/rubyllm_responses_api.rb
152
+ homepage: https://github.com/khasinski/ruby_llm-responses_api
153
+ licenses:
154
+ - MIT
155
+ metadata:
156
+ homepage_uri: https://github.com/khasinski/ruby_llm-responses_api
157
+ source_code_uri: https://github.com/khasinski/ruby_llm-responses_api
158
+ changelog_uri: https://github.com/khasinski/ruby_llm-responses_api/blob/main/CHANGELOG.md
159
+ rubygems_mfa_required: 'true'
160
+ rdoc_options: []
161
+ require_paths:
162
+ - lib
163
+ required_ruby_version: !ruby/object:Gem::Requirement
164
+ requirements:
165
+ - - ">="
166
+ - !ruby/object:Gem::Version
167
+ version: 3.1.0
168
+ required_rubygems_version: !ruby/object:Gem::Requirement
169
+ requirements:
170
+ - - ">="
171
+ - !ruby/object:Gem::Version
172
+ version: '0'
173
+ requirements: []
174
+ rubygems_version: 4.0.2
175
+ specification_version: 4
176
+ summary: OpenAI Responses API provider for RubyLLM
177
+ test_files: []