ruby_llm 0.1.0.pre4 → 0.1.0.pre6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/test.yml +5 -2
- data/.overcommit.yml +1 -1
- data/README.md +56 -181
- data/bin/console +6 -0
- data/lib/ruby_llm/chat.rb +95 -0
- data/lib/ruby_llm/chunk.rb +6 -0
- data/lib/ruby_llm/configuration.rb +2 -4
- data/lib/ruby_llm/message.rb +26 -18
- data/lib/ruby_llm/model_capabilities/anthropic.rb +43 -48
- data/lib/ruby_llm/model_capabilities/openai.rb +82 -89
- data/lib/ruby_llm/model_info.rb +26 -17
- data/lib/ruby_llm/models.json +686 -0
- data/lib/ruby_llm/models.rb +52 -0
- data/lib/ruby_llm/provider.rb +99 -0
- data/lib/ruby_llm/providers/anthropic.rb +92 -243
- data/lib/ruby_llm/providers/openai.rb +130 -174
- data/lib/ruby_llm/tool.rb +71 -50
- data/lib/ruby_llm/version.rb +1 -1
- data/lib/ruby_llm.rb +35 -37
- data/lib/tasks/models.rake +25 -0
- data/ruby_llm.gemspec +8 -6
- metadata +39 -15
- data/lib/ruby_llm/active_record/acts_as.rb +0 -115
- data/lib/ruby_llm/client.rb +0 -70
- data/lib/ruby_llm/conversation.rb +0 -19
- data/lib/ruby_llm/model_capabilities/base.rb +0 -35
- data/lib/ruby_llm/providers/base.rb +0 -67
@@ -2,214 +2,170 @@
|
|
2
2
|
|
3
3
|
module RubyLLM
|
4
4
|
module Providers
|
5
|
-
class OpenAI
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
5
|
+
class OpenAI
|
6
|
+
include Provider
|
7
|
+
|
8
|
+
private
|
9
|
+
|
10
|
+
def api_base
|
11
|
+
'https://api.openai.com'
|
12
|
+
end
|
13
|
+
|
14
|
+
def headers
|
15
|
+
{
|
16
|
+
'Authorization' => "Bearer #{RubyLLM.config.openai_api_key}"
|
12
17
|
}
|
18
|
+
end
|
13
19
|
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
end
|
20
|
+
def completion_url
|
21
|
+
'/v1/chat/completions'
|
22
|
+
end
|
18
23
|
|
19
|
-
|
20
|
-
|
24
|
+
def models_url
|
25
|
+
'/v1/models'
|
26
|
+
end
|
21
27
|
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
28
|
+
def build_payload(messages, tools, model:, temperature: 0.7, stream: false)
|
29
|
+
{
|
30
|
+
model: model,
|
31
|
+
messages: format_messages(messages),
|
32
|
+
temperature: temperature,
|
33
|
+
stream: stream
|
34
|
+
}.tap do |payload|
|
35
|
+
if tools.any?
|
36
|
+
payload[:tools] = tools.map { |t| tool_for(t) }
|
37
|
+
payload[:tool_choice] = 'auto'
|
38
|
+
end
|
26
39
|
end
|
27
|
-
rescue Faraday::TimeoutError
|
28
|
-
raise RubyLLM::Error, 'Request timed out'
|
29
|
-
rescue Faraday::ConnectionFailed
|
30
|
-
raise RubyLLM::Error, 'Connection failed'
|
31
|
-
rescue Faraday::ClientError => e
|
32
|
-
raise RubyLLM::Error, 'Client error' unless e.response
|
33
|
-
|
34
|
-
error_msg = e.response[:body]['error']&.fetch('message', nil) || "HTTP #{e.response[:status]}"
|
35
|
-
raise RubyLLM::Error, "API error: #{error_msg}"
|
36
40
|
end
|
37
41
|
|
38
|
-
def
|
39
|
-
|
40
|
-
|
42
|
+
def format_messages(messages)
|
43
|
+
messages.map do |msg|
|
44
|
+
{
|
45
|
+
role: msg.role.to_s,
|
46
|
+
content: msg.content,
|
47
|
+
tool_calls: format_tool_calls(msg.tool_calls)
|
48
|
+
}.compact
|
41
49
|
end
|
50
|
+
end
|
42
51
|
|
43
|
-
|
52
|
+
def format_tool_calls(tool_calls)
|
53
|
+
return nil unless tool_calls
|
44
54
|
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
owned_by: model['owned_by']
|
55
|
-
},
|
56
|
-
context_window: capabilities.determine_context_window(model['id']),
|
57
|
-
max_tokens: capabilities.determine_max_tokens(model['id']),
|
58
|
-
supports_vision: capabilities.supports_vision?(model['id']),
|
59
|
-
supports_functions: capabilities.supports_functions?(model['id']),
|
60
|
-
supports_json_mode: capabilities.supports_json_mode?(model['id']),
|
61
|
-
input_price_per_million: capabilities.get_input_price(model['id']),
|
62
|
-
output_price_per_million: capabilities.get_output_price(model['id'])
|
63
|
-
)
|
55
|
+
tool_calls.map do |tc|
|
56
|
+
{
|
57
|
+
id: tc[:id],
|
58
|
+
type: 'function',
|
59
|
+
function: {
|
60
|
+
name: tc[:name],
|
61
|
+
arguments: tc[:arguments]
|
62
|
+
}
|
63
|
+
}
|
64
64
|
end
|
65
|
-
rescue Faraday::Error => e
|
66
|
-
handle_error(e)
|
67
65
|
end
|
68
66
|
|
69
|
-
|
70
|
-
|
71
|
-
def tool_to_function(tool)
|
67
|
+
def tool_for(tool)
|
72
68
|
{
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
69
|
+
type: 'function',
|
70
|
+
function: {
|
71
|
+
name: tool.name,
|
72
|
+
description: tool.description,
|
73
|
+
parameters: {
|
74
|
+
type: 'object',
|
75
|
+
properties: tool.parameters.transform_values { |param| param_schema(param) },
|
76
|
+
required: tool.parameters.select { |_, p| p.required }.keys
|
77
|
+
}
|
79
78
|
}
|
80
79
|
}
|
81
80
|
end
|
82
81
|
|
83
|
-
def
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
end
|
89
|
-
|
90
|
-
puts 'Response from OpenAI:' if ENV['RUBY_LLM_DEBUG']
|
91
|
-
puts JSON.pretty_generate(response.body) if ENV['RUBY_LLM_DEBUG']
|
92
|
-
|
93
|
-
# Check for API errors
|
94
|
-
check_for_api_error(response)
|
95
|
-
|
96
|
-
# Check for HTTP errors
|
97
|
-
if response.status >= 400
|
98
|
-
error_msg = response.body['error']&.fetch('message', nil) || "HTTP #{response.status}"
|
99
|
-
raise RubyLLM::Error, "API error: #{error_msg}"
|
100
|
-
end
|
101
|
-
|
102
|
-
handle_response(response, tools, payload, &block)
|
82
|
+
def param_schema(param)
|
83
|
+
{
|
84
|
+
type: param.type,
|
85
|
+
description: param.description
|
86
|
+
}.compact
|
103
87
|
end
|
104
88
|
|
105
|
-
def
|
89
|
+
def parse_completion_response(response)
|
106
90
|
data = response.body
|
107
|
-
|
108
|
-
return [] unless message_data
|
109
|
-
|
110
|
-
if message_data['function_call'] && tools
|
111
|
-
# Create function call message
|
112
|
-
function_message = Message.new(
|
113
|
-
role: :assistant,
|
114
|
-
content: message_data['content'],
|
115
|
-
tool_calls: [message_data['function_call']]
|
116
|
-
)
|
117
|
-
yield function_message if block_given?
|
118
|
-
|
119
|
-
# Execute function and create result message
|
120
|
-
result = handle_function_call(message_data['function_call'], tools)
|
121
|
-
result_message = Message.new(
|
122
|
-
role: :tool,
|
123
|
-
content: result,
|
124
|
-
tool_results: {
|
125
|
-
name: message_data['function_call']['name'],
|
126
|
-
content: result
|
127
|
-
}
|
128
|
-
)
|
129
|
-
yield result_message if block_given?
|
130
|
-
|
131
|
-
# Get final response with function results
|
132
|
-
new_messages = payload[:messages] + [
|
133
|
-
{ role: 'assistant', content: message_data['content'], function_call: message_data['function_call'] },
|
134
|
-
{ role: 'function', name: message_data['function_call']['name'], content: result }
|
135
|
-
]
|
136
|
-
|
137
|
-
final_response = create_chat_completion(
|
138
|
-
payload.merge(messages: new_messages),
|
139
|
-
tools,
|
140
|
-
&block
|
141
|
-
)
|
91
|
+
return if data.empty?
|
142
92
|
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
[Message.new(
|
155
|
-
role: :assistant,
|
156
|
-
content: message_data['content'],
|
157
|
-
token_usage: token_usage,
|
158
|
-
model_id: data['model']
|
159
|
-
)]
|
160
|
-
end
|
93
|
+
message_data = data.dig('choices', 0, 'message')
|
94
|
+
return unless message_data
|
95
|
+
|
96
|
+
Message.new(
|
97
|
+
role: :assistant,
|
98
|
+
content: message_data['content'],
|
99
|
+
tool_calls: parse_tool_calls(message_data['tool_calls']),
|
100
|
+
input_tokens: data['usage']['prompt_tokens'],
|
101
|
+
output_tokens: data['usage']['completion_tokens'],
|
102
|
+
model_id: data['model']
|
103
|
+
)
|
161
104
|
end
|
162
105
|
|
163
|
-
def
|
164
|
-
return unless
|
165
|
-
|
166
|
-
tool = tools.find { |t| t.name == function_call['name'] }
|
167
|
-
return unless tool
|
106
|
+
def parse_tool_calls(tool_calls)
|
107
|
+
return nil unless tool_calls&.any?
|
168
108
|
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
109
|
+
tool_calls.map do |tc|
|
110
|
+
{
|
111
|
+
id: tc['id'],
|
112
|
+
name: tc.dig('function', 'name'),
|
113
|
+
arguments: tc.dig('function', 'arguments')
|
114
|
+
}
|
174
115
|
end
|
175
116
|
end
|
176
117
|
|
177
|
-
def
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
error_msg = error.response[:body]['error']&.fetch('message', nil) || "HTTP #{error.response[:status]}"
|
187
|
-
raise RubyLLM::Error, "API error: #{error_msg}"
|
118
|
+
def parse_models_response(response)
|
119
|
+
(response.body['data'] || []).map do |model|
|
120
|
+
model_info = begin
|
121
|
+
Models.find(model['id'])
|
122
|
+
rescue StandardError
|
123
|
+
nil
|
124
|
+
end
|
125
|
+
next unless model_info
|
188
126
|
|
189
|
-
|
190
|
-
|
191
|
-
|
127
|
+
model_info.tap do |info|
|
128
|
+
info.metadata.merge!(
|
129
|
+
object: model['object'],
|
130
|
+
owned_by: model['owned_by']
|
131
|
+
)
|
132
|
+
end
|
133
|
+
end.compact
|
192
134
|
end
|
193
135
|
|
194
|
-
def
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
end
|
204
|
-
elsif response_body['error']
|
205
|
-
raise RubyLLM::Error, "API error: #{response_body['error']['message']}"
|
206
|
-
else
|
207
|
-
raise RubyLLM::Error, "API error: #{error.response[:status]}"
|
136
|
+
def handle_stream(&block)
|
137
|
+
to_json_stream do |data|
|
138
|
+
block.call(
|
139
|
+
Chunk.new(
|
140
|
+
role: :assistant,
|
141
|
+
model_id: data['model'],
|
142
|
+
content: data.dig('choices', 0, 'delta', 'content')
|
143
|
+
)
|
144
|
+
)
|
208
145
|
end
|
209
146
|
end
|
210
147
|
|
211
|
-
def
|
212
|
-
|
148
|
+
def parse_list_models_response(response)
|
149
|
+
capabilities = ModelCapabilities::OpenAI
|
150
|
+
(response.body['data'] || []).map do |model|
|
151
|
+
ModelInfo.new(
|
152
|
+
id: model['id'],
|
153
|
+
created_at: Time.at(model['created']),
|
154
|
+
display_name: capabilities.format_display_name(model['id']),
|
155
|
+
provider: 'openai',
|
156
|
+
metadata: {
|
157
|
+
object: model['object'],
|
158
|
+
owned_by: model['owned_by']
|
159
|
+
},
|
160
|
+
context_window: capabilities.context_window_for(model['id']),
|
161
|
+
max_tokens: capabilities.max_tokens_for(model['id']),
|
162
|
+
supports_vision: capabilities.supports_vision?(model['id']),
|
163
|
+
supports_functions: capabilities.supports_functions?(model['id']),
|
164
|
+
supports_json_mode: capabilities.supports_json_mode?(model['id']),
|
165
|
+
input_price_per_million: capabilities.input_price_for(model['id']),
|
166
|
+
output_price_per_million: capabilities.output_price_for(model['id'])
|
167
|
+
)
|
168
|
+
end
|
213
169
|
end
|
214
170
|
end
|
215
171
|
end
|
data/lib/ruby_llm/tool.rb
CHANGED
@@ -1,75 +1,96 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module RubyLLM
|
4
|
-
# Represents a tool/function that can be called by an LLM
|
5
4
|
class Tool
|
6
|
-
|
5
|
+
class Parameter
|
6
|
+
attr_reader :name, :type, :description, :required
|
7
|
+
|
8
|
+
def initialize(name, type: 'string', description: nil, required: true)
|
9
|
+
@name = name
|
10
|
+
@type = type
|
11
|
+
@description = description
|
12
|
+
@required = required
|
13
|
+
end
|
7
14
|
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
15
|
+
def to_h
|
16
|
+
{
|
17
|
+
type: type,
|
18
|
+
description: description,
|
19
|
+
required: required
|
20
|
+
}.compact
|
21
|
+
end
|
22
|
+
end
|
12
23
|
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
required: %i[req keyreq].include?(param_type)
|
17
|
-
}
|
24
|
+
class Builder
|
25
|
+
def initialize(tool)
|
26
|
+
@tool = tool
|
18
27
|
end
|
19
28
|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
if args.is_a?(Hash)
|
34
|
-
instance.method(method_object.name).call(**args)
|
35
|
-
else
|
36
|
-
instance.method(method_object.name).call(args)
|
37
|
-
end
|
29
|
+
def description(text)
|
30
|
+
@tool.instance_variable_set(:@description, text)
|
31
|
+
self
|
32
|
+
end
|
33
|
+
|
34
|
+
def param(name, type: 'string', description: nil, required: true)
|
35
|
+
@tool.parameters[name] = Parameter.new(name, type: type, description: description, required: required)
|
36
|
+
self
|
37
|
+
end
|
38
|
+
|
39
|
+
def handler(&block)
|
40
|
+
@tool.instance_variable_set(:@handler, block)
|
41
|
+
@tool
|
38
42
|
end
|
39
43
|
end
|
40
44
|
|
41
|
-
|
42
|
-
@name = name
|
43
|
-
@description = description
|
44
|
-
@parameters = parameters
|
45
|
-
@handler = block
|
45
|
+
attr_reader :name, :description, :parameters, :handler
|
46
46
|
|
47
|
-
|
47
|
+
def self.define(name, &block)
|
48
|
+
tool = new(name)
|
49
|
+
builder = Builder.new(tool)
|
50
|
+
builder.instance_eval(&block)
|
51
|
+
tool
|
48
52
|
end
|
49
53
|
|
50
|
-
def
|
51
|
-
|
52
|
-
|
54
|
+
def initialize(name)
|
55
|
+
@name = name
|
56
|
+
@parameters = {}
|
53
57
|
end
|
54
58
|
|
55
|
-
|
59
|
+
def call(args)
|
60
|
+
raise Error, "No handler defined for tool #{name}" unless @handler
|
56
61
|
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
+
begin
|
63
|
+
args = symbolize_keys(args)
|
64
|
+
@handler.call(args)
|
65
|
+
rescue StandardError => e
|
66
|
+
{ error: e.message }
|
67
|
+
end
|
62
68
|
end
|
63
69
|
|
64
|
-
|
65
|
-
|
66
|
-
|
70
|
+
class << self
|
71
|
+
def from_method(method, description: nil)
|
72
|
+
define(method.name.to_s) do
|
73
|
+
description description if description
|
74
|
+
|
75
|
+
method.parameters.each do |type, name|
|
76
|
+
param name, required: (type == :req)
|
77
|
+
end
|
67
78
|
|
68
|
-
|
69
|
-
|
79
|
+
handler do |args|
|
80
|
+
method.owner.new.public_send(method.name, **args)
|
81
|
+
end
|
82
|
+
end
|
70
83
|
end
|
84
|
+
end
|
71
85
|
|
72
|
-
|
86
|
+
private
|
87
|
+
|
88
|
+
def symbolize_keys(hash)
|
89
|
+
hash.transform_keys do |key|
|
90
|
+
key.to_sym
|
91
|
+
rescue StandardError
|
92
|
+
key
|
93
|
+
end
|
73
94
|
end
|
74
95
|
end
|
75
96
|
end
|
data/lib/ruby_llm/version.rb
CHANGED
data/lib/ruby_llm.rb
CHANGED
@@ -3,66 +3,64 @@
|
|
3
3
|
require 'zeitwerk'
|
4
4
|
require 'faraday'
|
5
5
|
require 'json'
|
6
|
-
require 'securerandom'
|
7
6
|
require 'logger'
|
7
|
+
require 'event_stream_parser'
|
8
|
+
require 'securerandom'
|
8
9
|
|
9
|
-
# Main module for RubyLLM functionality
|
10
10
|
module RubyLLM
|
11
11
|
class Error < StandardError; end
|
12
12
|
|
13
13
|
class << self
|
14
|
-
|
14
|
+
def chat(model: nil)
|
15
|
+
Chat.new(model: model)
|
16
|
+
end
|
15
17
|
|
16
|
-
def
|
17
|
-
|
18
|
+
def models
|
19
|
+
Models
|
18
20
|
end
|
19
21
|
|
20
22
|
def configure
|
21
|
-
yield
|
23
|
+
yield config
|
24
|
+
end
|
25
|
+
|
26
|
+
def config
|
27
|
+
@config ||= Configuration.new
|
22
28
|
end
|
23
29
|
|
24
|
-
def
|
25
|
-
@
|
30
|
+
def logger
|
31
|
+
@logger ||= Logger.new($stdout)
|
26
32
|
end
|
33
|
+
end
|
34
|
+
end
|
27
35
|
|
28
|
-
|
29
|
-
@loader ||= begin
|
30
|
-
loader = Zeitwerk::Loader.for_gem
|
36
|
+
loader = Zeitwerk::Loader.for_gem
|
31
37
|
|
32
|
-
|
33
|
-
|
38
|
+
# Add lib directory to the load path
|
39
|
+
loader.push_dir(File.expand_path('..', __dir__))
|
34
40
|
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
41
|
+
# Configure custom inflections
|
42
|
+
loader.inflector.inflect(
|
43
|
+
'ruby_llm' => 'RubyLLM',
|
44
|
+
'llm' => 'LLM',
|
45
|
+
'openai' => 'OpenAI',
|
46
|
+
'api' => 'API'
|
47
|
+
)
|
42
48
|
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
49
|
+
# Ignore Rails-specific files and specs
|
50
|
+
loader.ignore("#{__dir__}/ruby_llm/railtie.rb")
|
51
|
+
loader.ignore("#{__dir__}/ruby_llm/active_record")
|
52
|
+
loader.ignore(File.expand_path('../spec', __dir__).to_s)
|
47
53
|
|
48
|
-
|
49
|
-
loader.logger = Logger.new($stdout) if ENV['RUBY_LLM_DEBUG']
|
50
|
-
loader.enable_reloading if ENV['RUBY_LLM_DEBUG']
|
54
|
+
loader.enable_reloading if ENV['RUBY_LLM_DEBUG']
|
51
55
|
|
52
|
-
|
53
|
-
|
54
|
-
loader
|
55
|
-
end
|
56
|
-
end
|
57
|
-
end
|
58
|
-
end
|
56
|
+
loader.setup
|
57
|
+
loader.eager_load if ENV['RUBY_LLM_DEBUG']
|
59
58
|
|
60
|
-
|
61
|
-
RubyLLM.
|
59
|
+
RubyLLM::Provider.register :openai, RubyLLM::Providers::OpenAI
|
60
|
+
RubyLLM::Provider.register :anthropic, RubyLLM::Providers::Anthropic
|
62
61
|
|
63
62
|
# Load Rails integration if Rails is defined
|
64
63
|
if defined?(Rails)
|
65
|
-
require 'active_support'
|
66
64
|
require 'ruby_llm/railtie'
|
67
65
|
require 'ruby_llm/active_record/acts_as'
|
68
66
|
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
namespace :ruby_llm do
|
4
|
+
desc 'Update available models from providers'
|
5
|
+
task :update_models do
|
6
|
+
require 'ruby_llm'
|
7
|
+
|
8
|
+
# Configure API keys
|
9
|
+
RubyLLM.configure do |config|
|
10
|
+
config.openai_api_key = ENV.fetch('OPENAI_API_KEY')
|
11
|
+
config.anthropic_api_key = ENV.fetch('ANTHROPIC_API_KEY')
|
12
|
+
end
|
13
|
+
|
14
|
+
# Get all models
|
15
|
+
models = RubyLLM.models.refresh
|
16
|
+
|
17
|
+
# Write to models.json
|
18
|
+
models_file = File.expand_path('../../lib/ruby_llm/models.json', __dir__)
|
19
|
+
File.write(models_file, JSON.pretty_generate(models.map(&:to_h)))
|
20
|
+
|
21
|
+
puts "Updated models.json with #{models.size} models:"
|
22
|
+
puts "OpenAI models: #{models.count { |m| m.provider == 'openai' }}"
|
23
|
+
puts "Anthropic models: #{models.count { |m| m.provider == 'anthropic' }}"
|
24
|
+
end
|
25
|
+
end
|
data/ruby_llm.gemspec
CHANGED
@@ -8,18 +8,19 @@ Gem::Specification.new do |spec|
|
|
8
8
|
spec.authors = ['Carmine Paolino']
|
9
9
|
spec.email = ['carmine@paolino.me']
|
10
10
|
|
11
|
-
spec.summary = '
|
12
|
-
spec.description = '
|
13
|
-
'
|
14
|
-
'
|
11
|
+
spec.summary = 'Clean Ruby interface to modern AI language models'
|
12
|
+
spec.description = 'A delightful Ruby way to work with AI language models. Provides a unified interface to OpenAI' \
|
13
|
+
' and Anthropic models with automatic token counting, proper streaming support, and a focus on' \
|
14
|
+
' developer happiness. No wrapping your head around multiple APIs - just clean Ruby code that' \
|
15
|
+
' works.'
|
15
16
|
spec.homepage = 'https://github.com/crmne/ruby_llm'
|
16
17
|
spec.license = 'MIT'
|
17
18
|
spec.required_ruby_version = Gem::Requirement.new('>= 2.7.0')
|
18
19
|
|
19
20
|
spec.metadata['homepage_uri'] = spec.homepage
|
20
21
|
spec.metadata['source_code_uri'] = spec.homepage
|
21
|
-
spec.metadata['changelog_uri'] = "#{spec.homepage}/
|
22
|
-
spec.metadata['documentation_uri'] =
|
22
|
+
spec.metadata['changelog_uri'] = "#{spec.homepage}/commits/main"
|
23
|
+
spec.metadata['documentation_uri'] = spec.homepage
|
23
24
|
spec.metadata['bug_tracker_uri'] = "#{spec.homepage}/issues"
|
24
25
|
|
25
26
|
# Specify which files should be added to the gem when it is released.
|
@@ -32,6 +33,7 @@ Gem::Specification.new do |spec|
|
|
32
33
|
spec.require_paths = ['lib']
|
33
34
|
|
34
35
|
# Runtime dependencies
|
36
|
+
spec.add_dependency 'event_stream_parser', '>= 0.3.0', '< 2.0.0'
|
35
37
|
spec.add_dependency 'faraday', '>= 2.0'
|
36
38
|
spec.add_dependency 'faraday-multipart', '>= 1.0'
|
37
39
|
spec.add_dependency 'zeitwerk', '>= 2.6'
|