ruby_llm_community 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/LICENSE +22 -0
- data/README.md +172 -0
- data/lib/generators/ruby_llm/install/templates/INSTALL_INFO.md.tt +108 -0
- data/lib/generators/ruby_llm/install/templates/chat_model.rb.tt +3 -0
- data/lib/generators/ruby_llm/install/templates/create_chats_migration.rb.tt +8 -0
- data/lib/generators/ruby_llm/install/templates/create_messages_migration.rb.tt +15 -0
- data/lib/generators/ruby_llm/install/templates/create_tool_calls_migration.rb.tt +14 -0
- data/lib/generators/ruby_llm/install/templates/initializer.rb.tt +6 -0
- data/lib/generators/ruby_llm/install/templates/message_model.rb.tt +3 -0
- data/lib/generators/ruby_llm/install/templates/tool_call_model.rb.tt +3 -0
- data/lib/generators/ruby_llm/install_generator.rb +121 -0
- data/lib/ruby_llm/active_record/acts_as.rb +382 -0
- data/lib/ruby_llm/aliases.json +217 -0
- data/lib/ruby_llm/aliases.rb +56 -0
- data/lib/ruby_llm/attachment.rb +164 -0
- data/lib/ruby_llm/chat.rb +219 -0
- data/lib/ruby_llm/chunk.rb +6 -0
- data/lib/ruby_llm/configuration.rb +75 -0
- data/lib/ruby_llm/connection.rb +126 -0
- data/lib/ruby_llm/content.rb +52 -0
- data/lib/ruby_llm/context.rb +29 -0
- data/lib/ruby_llm/embedding.rb +30 -0
- data/lib/ruby_llm/error.rb +84 -0
- data/lib/ruby_llm/image.rb +53 -0
- data/lib/ruby_llm/message.rb +76 -0
- data/lib/ruby_llm/mime_type.rb +67 -0
- data/lib/ruby_llm/model/info.rb +101 -0
- data/lib/ruby_llm/model/modalities.rb +22 -0
- data/lib/ruby_llm/model/pricing.rb +51 -0
- data/lib/ruby_llm/model/pricing_category.rb +48 -0
- data/lib/ruby_llm/model/pricing_tier.rb +34 -0
- data/lib/ruby_llm/model.rb +7 -0
- data/lib/ruby_llm/models.json +29924 -0
- data/lib/ruby_llm/models.rb +218 -0
- data/lib/ruby_llm/models_schema.json +168 -0
- data/lib/ruby_llm/provider.rb +219 -0
- data/lib/ruby_llm/providers/anthropic/capabilities.rb +179 -0
- data/lib/ruby_llm/providers/anthropic/chat.rb +106 -0
- data/lib/ruby_llm/providers/anthropic/embeddings.rb +20 -0
- data/lib/ruby_llm/providers/anthropic/media.rb +92 -0
- data/lib/ruby_llm/providers/anthropic/models.rb +48 -0
- data/lib/ruby_llm/providers/anthropic/streaming.rb +43 -0
- data/lib/ruby_llm/providers/anthropic/tools.rb +108 -0
- data/lib/ruby_llm/providers/anthropic.rb +37 -0
- data/lib/ruby_llm/providers/bedrock/capabilities.rb +167 -0
- data/lib/ruby_llm/providers/bedrock/chat.rb +65 -0
- data/lib/ruby_llm/providers/bedrock/media.rb +61 -0
- data/lib/ruby_llm/providers/bedrock/models.rb +82 -0
- data/lib/ruby_llm/providers/bedrock/signing.rb +831 -0
- data/lib/ruby_llm/providers/bedrock/streaming/base.rb +63 -0
- data/lib/ruby_llm/providers/bedrock/streaming/content_extraction.rb +63 -0
- data/lib/ruby_llm/providers/bedrock/streaming/message_processing.rb +79 -0
- data/lib/ruby_llm/providers/bedrock/streaming/payload_processing.rb +90 -0
- data/lib/ruby_llm/providers/bedrock/streaming/prelude_handling.rb +91 -0
- data/lib/ruby_llm/providers/bedrock/streaming.rb +36 -0
- data/lib/ruby_llm/providers/bedrock.rb +83 -0
- data/lib/ruby_llm/providers/deepseek/capabilities.rb +131 -0
- data/lib/ruby_llm/providers/deepseek/chat.rb +17 -0
- data/lib/ruby_llm/providers/deepseek.rb +30 -0
- data/lib/ruby_llm/providers/gemini/capabilities.rb +351 -0
- data/lib/ruby_llm/providers/gemini/chat.rb +139 -0
- data/lib/ruby_llm/providers/gemini/embeddings.rb +39 -0
- data/lib/ruby_llm/providers/gemini/images.rb +48 -0
- data/lib/ruby_llm/providers/gemini/media.rb +55 -0
- data/lib/ruby_llm/providers/gemini/models.rb +41 -0
- data/lib/ruby_llm/providers/gemini/streaming.rb +58 -0
- data/lib/ruby_llm/providers/gemini/tools.rb +82 -0
- data/lib/ruby_llm/providers/gemini.rb +36 -0
- data/lib/ruby_llm/providers/gpustack/chat.rb +17 -0
- data/lib/ruby_llm/providers/gpustack/models.rb +55 -0
- data/lib/ruby_llm/providers/gpustack.rb +33 -0
- data/lib/ruby_llm/providers/mistral/capabilities.rb +163 -0
- data/lib/ruby_llm/providers/mistral/chat.rb +26 -0
- data/lib/ruby_llm/providers/mistral/embeddings.rb +36 -0
- data/lib/ruby_llm/providers/mistral/models.rb +49 -0
- data/lib/ruby_llm/providers/mistral.rb +32 -0
- data/lib/ruby_llm/providers/ollama/chat.rb +28 -0
- data/lib/ruby_llm/providers/ollama/media.rb +50 -0
- data/lib/ruby_llm/providers/ollama.rb +29 -0
- data/lib/ruby_llm/providers/openai/capabilities.rb +306 -0
- data/lib/ruby_llm/providers/openai/chat.rb +86 -0
- data/lib/ruby_llm/providers/openai/embeddings.rb +36 -0
- data/lib/ruby_llm/providers/openai/images.rb +38 -0
- data/lib/ruby_llm/providers/openai/media.rb +81 -0
- data/lib/ruby_llm/providers/openai/models.rb +39 -0
- data/lib/ruby_llm/providers/openai/response.rb +115 -0
- data/lib/ruby_llm/providers/openai/response_media.rb +76 -0
- data/lib/ruby_llm/providers/openai/streaming.rb +190 -0
- data/lib/ruby_llm/providers/openai/tools.rb +100 -0
- data/lib/ruby_llm/providers/openai.rb +44 -0
- data/lib/ruby_llm/providers/openai_base.rb +44 -0
- data/lib/ruby_llm/providers/openrouter/models.rb +88 -0
- data/lib/ruby_llm/providers/openrouter.rb +26 -0
- data/lib/ruby_llm/providers/perplexity/capabilities.rb +138 -0
- data/lib/ruby_llm/providers/perplexity/chat.rb +17 -0
- data/lib/ruby_llm/providers/perplexity/models.rb +42 -0
- data/lib/ruby_llm/providers/perplexity.rb +52 -0
- data/lib/ruby_llm/railtie.rb +17 -0
- data/lib/ruby_llm/stream_accumulator.rb +97 -0
- data/lib/ruby_llm/streaming.rb +162 -0
- data/lib/ruby_llm/tool.rb +100 -0
- data/lib/ruby_llm/tool_call.rb +31 -0
- data/lib/ruby_llm/utils.rb +49 -0
- data/lib/ruby_llm/version.rb +5 -0
- data/lib/ruby_llm.rb +98 -0
- data/lib/tasks/aliases.rake +235 -0
- data/lib/tasks/models_docs.rake +224 -0
- data/lib/tasks/models_update.rake +108 -0
- data/lib/tasks/release.rake +32 -0
- data/lib/tasks/vcr.rake +99 -0
- metadata +128 -7
@@ -0,0 +1,179 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Determines capabilities and pricing for Anthropic models
|
7
|
+
module Capabilities
|
8
|
+
module_function
|
9
|
+
|
10
|
+
# Determines the context window size for a given model
|
11
|
+
# @param model_id [String] the model identifier
|
12
|
+
# @return [Integer] the context window size in tokens
|
13
|
+
def determine_context_window(_model_id)
|
14
|
+
# All Claude 3 and 3.5 and 3.7 models have 200K token context windows
|
15
|
+
200_000
|
16
|
+
end
|
17
|
+
|
18
|
+
# Determines the maximum output tokens for a given model
|
19
|
+
# @param model_id [String] the model identifier
|
20
|
+
# @return [Integer] the maximum output tokens
|
21
|
+
def determine_max_tokens(model_id)
|
22
|
+
case model_id
|
23
|
+
when /claude-3-7-sonnet/, /claude-3-5/ then 8_192
|
24
|
+
else 4_096
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
# Gets the input price per million tokens for a given model
|
29
|
+
# @param model_id [String] the model identifier
|
30
|
+
# @return [Float] the price per million tokens for input
|
31
|
+
def get_input_price(model_id)
|
32
|
+
PRICES.dig(model_family(model_id), :input) || default_input_price
|
33
|
+
end
|
34
|
+
|
35
|
+
# Gets the output price per million tokens for a given model
|
36
|
+
# @param model_id [String] the model identifier
|
37
|
+
# @return [Float] the price per million tokens for output
|
38
|
+
def get_output_price(model_id)
|
39
|
+
PRICES.dig(model_family(model_id), :output) || default_output_price
|
40
|
+
end
|
41
|
+
|
42
|
+
# Determines if a model supports vision capabilities
|
43
|
+
# @param model_id [String] the model identifier
|
44
|
+
# @return [Boolean] true if the model supports vision
|
45
|
+
def supports_vision?(model_id)
|
46
|
+
# All Claude 3, 3.5, and 3.7 models support vision
|
47
|
+
!model_id.match?(/claude-[12]/)
|
48
|
+
end
|
49
|
+
|
50
|
+
# Determines if a model supports function calling
|
51
|
+
# @param model_id [String] the model identifier
|
52
|
+
# @return [Boolean] true if the model supports functions
|
53
|
+
def supports_functions?(model_id)
|
54
|
+
model_id.match?(/claude-3/)
|
55
|
+
end
|
56
|
+
|
57
|
+
# Determines if a model supports JSON mode
|
58
|
+
# @param model_id [String] the model identifier
|
59
|
+
# @return [Boolean] true if the model supports JSON mode
|
60
|
+
def supports_json_mode?(model_id)
|
61
|
+
model_id.match?(/claude-3/)
|
62
|
+
end
|
63
|
+
|
64
|
+
# Determines if a model supports extended thinking
|
65
|
+
# @param model_id [String] the model identifier
|
66
|
+
# @return [Boolean] true if the model supports extended thinking
|
67
|
+
def supports_extended_thinking?(model_id)
|
68
|
+
model_id.match?(/claude-3-7-sonnet/)
|
69
|
+
end
|
70
|
+
|
71
|
+
# Determines the model family for a given model ID
|
72
|
+
# @param model_id [String] the model identifier
|
73
|
+
# @return [Symbol] the model family identifier
|
74
|
+
def model_family(model_id)
|
75
|
+
case model_id
|
76
|
+
when /claude-3-7-sonnet/ then 'claude-3-7-sonnet'
|
77
|
+
when /claude-3-5-sonnet/ then 'claude-3-5-sonnet'
|
78
|
+
when /claude-3-5-haiku/ then 'claude-3-5-haiku'
|
79
|
+
when /claude-3-opus/ then 'claude-3-opus'
|
80
|
+
when /claude-3-sonnet/ then 'claude-3-sonnet'
|
81
|
+
when /claude-3-haiku/ then 'claude-3-haiku'
|
82
|
+
else 'claude-2'
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
# Returns the model type
|
87
|
+
# @param model_id [String] the model identifier (unused but kept for API consistency)
|
88
|
+
# @return [String] the model type, always 'chat' for Anthropic models
|
89
|
+
def model_type(_)
|
90
|
+
'chat'
|
91
|
+
end
|
92
|
+
|
93
|
+
# Pricing information for Anthropic models (per million tokens)
|
94
|
+
PRICES = {
|
95
|
+
'claude-3-7-sonnet': { input: 3.0, output: 15.0 },
|
96
|
+
'claude-3-5-sonnet': { input: 3.0, output: 15.0 },
|
97
|
+
'claude-3-5-haiku': { input: 0.80, output: 4.0 },
|
98
|
+
'claude-3-opus': { input: 15.0, output: 75.0 },
|
99
|
+
'claude-3-haiku': { input: 0.25, output: 1.25 },
|
100
|
+
'claude-2': { input: 3.0, output: 15.0 }
|
101
|
+
}.freeze
|
102
|
+
|
103
|
+
# Default input price if model not found in PRICES
|
104
|
+
# @return [Float] default price per million tokens for input
|
105
|
+
def default_input_price
|
106
|
+
3.0
|
107
|
+
end
|
108
|
+
|
109
|
+
# Default output price if model not found in PRICES
|
110
|
+
# @return [Float] default price per million tokens for output
|
111
|
+
def default_output_price
|
112
|
+
15.0
|
113
|
+
end
|
114
|
+
|
115
|
+
def modalities_for(model_id)
|
116
|
+
modalities = {
|
117
|
+
input: ['text'],
|
118
|
+
output: ['text']
|
119
|
+
}
|
120
|
+
|
121
|
+
# All Claude 3+ models support vision
|
122
|
+
unless model_id.match?(/claude-[12]/)
|
123
|
+
modalities[:input] << 'image'
|
124
|
+
modalities[:input] << 'pdf'
|
125
|
+
end
|
126
|
+
|
127
|
+
modalities
|
128
|
+
end
|
129
|
+
|
130
|
+
def capabilities_for(model_id)
|
131
|
+
capabilities = ['streaming']
|
132
|
+
|
133
|
+
# Function calling for Claude 3+
|
134
|
+
if model_id.match?(/claude-3/)
|
135
|
+
capabilities << 'function_calling'
|
136
|
+
capabilities << 'batch'
|
137
|
+
end
|
138
|
+
|
139
|
+
# Extended thinking (reasoning) for Claude 3.7
|
140
|
+
capabilities << 'reasoning' if model_id.match?(/claude-3-7/)
|
141
|
+
|
142
|
+
# Citations
|
143
|
+
capabilities << 'citations' if model_id.match?(/claude-3\.5|claude-3-7/)
|
144
|
+
|
145
|
+
capabilities
|
146
|
+
end
|
147
|
+
|
148
|
+
def pricing_for(model_id)
|
149
|
+
family = model_family(model_id)
|
150
|
+
prices = PRICES.fetch(family.to_sym, { input: default_input_price, output: default_output_price })
|
151
|
+
|
152
|
+
standard_pricing = {
|
153
|
+
input_per_million: prices[:input],
|
154
|
+
output_per_million: prices[:output]
|
155
|
+
}
|
156
|
+
|
157
|
+
# Batch is typically half the price
|
158
|
+
batch_pricing = {
|
159
|
+
input_per_million: prices[:input] * 0.5,
|
160
|
+
output_per_million: prices[:output] * 0.5
|
161
|
+
}
|
162
|
+
|
163
|
+
# Add reasoning output pricing for 3.7 models
|
164
|
+
if model_id.match?(/claude-3-7/)
|
165
|
+
standard_pricing[:reasoning_output_per_million] = prices[:output] * 2.5
|
166
|
+
batch_pricing[:reasoning_output_per_million] = prices[:output] * 1.25
|
167
|
+
end
|
168
|
+
|
169
|
+
{
|
170
|
+
text_tokens: {
|
171
|
+
standard: standard_pricing,
|
172
|
+
batch: batch_pricing
|
173
|
+
}
|
174
|
+
}
|
175
|
+
end
|
176
|
+
end
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
@@ -0,0 +1,106 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Chat methods of the OpenAI API integration
|
7
|
+
module Chat
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def completion_url
|
11
|
+
'/v1/messages'
|
12
|
+
end
|
13
|
+
|
14
|
+
def render_payload(messages, tools:, temperature:, model:, stream: false, schema: nil) # rubocop:disable Metrics/ParameterLists,Lint/UnusedMethodArgument
|
15
|
+
system_messages, chat_messages = separate_messages(messages)
|
16
|
+
system_content = build_system_content(system_messages)
|
17
|
+
|
18
|
+
build_base_payload(chat_messages, model, stream).tap do |payload|
|
19
|
+
add_optional_fields(payload, system_content:, tools:, temperature:)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def separate_messages(messages)
|
24
|
+
messages.partition { |msg| msg.role == :system }
|
25
|
+
end
|
26
|
+
|
27
|
+
def build_system_content(system_messages)
|
28
|
+
if system_messages.length > 1
|
29
|
+
RubyLLM.logger.warn(
|
30
|
+
"Anthropic's Claude implementation only supports a single system message. " \
|
31
|
+
'Multiple system messages will be combined into one.'
|
32
|
+
)
|
33
|
+
end
|
34
|
+
|
35
|
+
system_messages.map(&:content).join("\n\n")
|
36
|
+
end
|
37
|
+
|
38
|
+
def build_base_payload(chat_messages, model, stream)
|
39
|
+
{
|
40
|
+
model: model,
|
41
|
+
messages: chat_messages.map { |msg| format_message(msg) },
|
42
|
+
stream: stream,
|
43
|
+
max_tokens: RubyLLM.models.find(model)&.max_tokens || 4096
|
44
|
+
}
|
45
|
+
end
|
46
|
+
|
47
|
+
def add_optional_fields(payload, system_content:, tools:, temperature:)
|
48
|
+
payload[:tools] = tools.values.map { |t| Tools.function_for(t) } if tools.any?
|
49
|
+
payload[:system] = system_content unless system_content.empty?
|
50
|
+
payload[:temperature] = temperature unless temperature.nil?
|
51
|
+
end
|
52
|
+
|
53
|
+
def parse_completion_response(response)
|
54
|
+
data = response.body
|
55
|
+
content_blocks = data['content'] || []
|
56
|
+
|
57
|
+
text_content = extract_text_content(content_blocks)
|
58
|
+
tool_use_blocks = Tools.find_tool_uses(content_blocks)
|
59
|
+
|
60
|
+
build_message(data, text_content, tool_use_blocks, response)
|
61
|
+
end
|
62
|
+
|
63
|
+
def extract_text_content(blocks)
|
64
|
+
text_blocks = blocks.select { |c| c['type'] == 'text' }
|
65
|
+
text_blocks.map { |c| c['text'] }.join
|
66
|
+
end
|
67
|
+
|
68
|
+
def build_message(data, content, tool_use_blocks, response)
|
69
|
+
Message.new(
|
70
|
+
role: :assistant,
|
71
|
+
content: content,
|
72
|
+
tool_calls: Tools.parse_tool_calls(tool_use_blocks),
|
73
|
+
input_tokens: data.dig('usage', 'input_tokens'),
|
74
|
+
output_tokens: data.dig('usage', 'output_tokens'),
|
75
|
+
model_id: data['model'],
|
76
|
+
raw: response
|
77
|
+
)
|
78
|
+
end
|
79
|
+
|
80
|
+
def format_message(msg)
|
81
|
+
if msg.tool_call?
|
82
|
+
Tools.format_tool_call(msg)
|
83
|
+
elsif msg.tool_result?
|
84
|
+
Tools.format_tool_result(msg)
|
85
|
+
else
|
86
|
+
format_basic_message(msg)
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
def format_basic_message(msg)
|
91
|
+
{
|
92
|
+
role: convert_role(msg.role),
|
93
|
+
content: Media.format_content(msg.content)
|
94
|
+
}
|
95
|
+
end
|
96
|
+
|
97
|
+
def convert_role(role)
|
98
|
+
case role
|
99
|
+
when :tool, :user then 'user'
|
100
|
+
else 'assistant'
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Embeddings methods of the Anthropic API integration
|
7
|
+
module Embeddings
|
8
|
+
private
|
9
|
+
|
10
|
+
def embed
|
11
|
+
raise Error "Anthropic doesn't support embeddings"
|
12
|
+
end
|
13
|
+
|
14
|
+
alias render_embedding_payload embed
|
15
|
+
alias embedding_url embed
|
16
|
+
alias parse_embedding_response embed
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
@@ -0,0 +1,92 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Handles formatting of media content (images, PDFs, audio) for Anthropic
|
7
|
+
module Media
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def format_content(content)
|
11
|
+
# Convert Hash/Array back to JSON string for API
|
12
|
+
return [format_text(content.to_json)] if content.is_a?(Hash) || content.is_a?(Array)
|
13
|
+
return [format_text(content)] unless content.is_a?(Content)
|
14
|
+
|
15
|
+
parts = []
|
16
|
+
parts << format_text(content.text) if content.text
|
17
|
+
|
18
|
+
content.attachments.each do |attachment|
|
19
|
+
case attachment.type
|
20
|
+
when :image
|
21
|
+
parts << format_image(attachment)
|
22
|
+
when :pdf
|
23
|
+
parts << format_pdf(attachment)
|
24
|
+
when :text
|
25
|
+
parts << format_text_file(attachment)
|
26
|
+
else
|
27
|
+
raise UnsupportedAttachmentError, attachment.mime_type
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
parts
|
32
|
+
end
|
33
|
+
|
34
|
+
def format_text(text)
|
35
|
+
{
|
36
|
+
type: 'text',
|
37
|
+
text: text
|
38
|
+
}
|
39
|
+
end
|
40
|
+
|
41
|
+
def format_image(image)
|
42
|
+
if image.url?
|
43
|
+
{
|
44
|
+
type: 'image',
|
45
|
+
source: {
|
46
|
+
type: 'url',
|
47
|
+
url: image.source
|
48
|
+
}
|
49
|
+
}
|
50
|
+
else
|
51
|
+
{
|
52
|
+
type: 'image',
|
53
|
+
source: {
|
54
|
+
type: 'base64',
|
55
|
+
media_type: image.mime_type,
|
56
|
+
data: image.encoded
|
57
|
+
}
|
58
|
+
}
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
def format_pdf(pdf)
|
63
|
+
if pdf.url?
|
64
|
+
{
|
65
|
+
type: 'document',
|
66
|
+
source: {
|
67
|
+
type: 'url',
|
68
|
+
url: pdf.source
|
69
|
+
}
|
70
|
+
}
|
71
|
+
else
|
72
|
+
{
|
73
|
+
type: 'document',
|
74
|
+
source: {
|
75
|
+
type: 'base64',
|
76
|
+
media_type: pdf.mime_type,
|
77
|
+
data: pdf.encoded
|
78
|
+
}
|
79
|
+
}
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def format_text_file(text_file)
|
84
|
+
{
|
85
|
+
type: 'text',
|
86
|
+
text: Utils.format_text_file_for_llm(text_file)
|
87
|
+
}
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Models methods of the Anthropic API integration
|
7
|
+
module Models
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def models_url
|
11
|
+
'/v1/models'
|
12
|
+
end
|
13
|
+
|
14
|
+
def parse_list_models_response(response, slug, capabilities)
|
15
|
+
Array(response.body['data']).map do |model_data|
|
16
|
+
model_id = model_data['id']
|
17
|
+
|
18
|
+
Model::Info.new(
|
19
|
+
id: model_id,
|
20
|
+
name: model_data['display_name'],
|
21
|
+
provider: slug,
|
22
|
+
family: capabilities.model_family(model_id),
|
23
|
+
created_at: Time.parse(model_data['created_at']),
|
24
|
+
context_window: capabilities.determine_context_window(model_id),
|
25
|
+
max_output_tokens: capabilities.determine_max_tokens(model_id),
|
26
|
+
modalities: capabilities.modalities_for(model_id),
|
27
|
+
capabilities: capabilities.capabilities_for(model_id),
|
28
|
+
pricing: capabilities.pricing_for(model_id),
|
29
|
+
metadata: {}
|
30
|
+
)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def extract_model_id(data)
|
35
|
+
data.dig('message', 'model')
|
36
|
+
end
|
37
|
+
|
38
|
+
def extract_input_tokens(data)
|
39
|
+
data.dig('message', 'usage', 'input_tokens')
|
40
|
+
end
|
41
|
+
|
42
|
+
def extract_output_tokens(data)
|
43
|
+
data.dig('message', 'usage', 'output_tokens') || data.dig('usage', 'output_tokens')
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Streaming methods of the Anthropic API integration
|
7
|
+
module Streaming
|
8
|
+
private
|
9
|
+
|
10
|
+
def stream_url
|
11
|
+
completion_url
|
12
|
+
end
|
13
|
+
|
14
|
+
def build_chunk(data)
|
15
|
+
Chunk.new(
|
16
|
+
role: :assistant,
|
17
|
+
model_id: extract_model_id(data),
|
18
|
+
content: data.dig('delta', 'text'),
|
19
|
+
input_tokens: extract_input_tokens(data),
|
20
|
+
output_tokens: extract_output_tokens(data),
|
21
|
+
tool_calls: extract_tool_calls(data)
|
22
|
+
)
|
23
|
+
end
|
24
|
+
|
25
|
+
def json_delta?(data)
|
26
|
+
data['type'] == 'content_block_delta' && data.dig('delta', 'type') == 'input_json_delta'
|
27
|
+
end
|
28
|
+
|
29
|
+
def parse_streaming_error(data)
|
30
|
+
error_data = JSON.parse(data)
|
31
|
+
return unless error_data['type'] == 'error'
|
32
|
+
|
33
|
+
case error_data.dig('error', 'type')
|
34
|
+
when 'overloaded_error'
|
35
|
+
[529, error_data['error']['message']]
|
36
|
+
else
|
37
|
+
[500, error_data['error']['message']]
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
@@ -0,0 +1,108 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Tools methods of the Anthropic API integration
|
7
|
+
module Tools
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def find_tool_uses(blocks)
|
11
|
+
blocks.select { |c| c['type'] == 'tool_use' }
|
12
|
+
end
|
13
|
+
|
14
|
+
def format_tool_call(msg)
|
15
|
+
content = []
|
16
|
+
|
17
|
+
content << Media.format_text(msg.content) unless msg.content.nil? || msg.content.empty?
|
18
|
+
|
19
|
+
msg.tool_calls.each_value do |tool_call|
|
20
|
+
content << format_tool_use_block(tool_call)
|
21
|
+
end
|
22
|
+
|
23
|
+
{
|
24
|
+
role: 'assistant',
|
25
|
+
content:
|
26
|
+
}
|
27
|
+
end
|
28
|
+
|
29
|
+
def format_tool_result(msg)
|
30
|
+
{
|
31
|
+
role: 'user',
|
32
|
+
content: [format_tool_result_block(msg)]
|
33
|
+
}
|
34
|
+
end
|
35
|
+
|
36
|
+
def format_tool_use_block(tool_call)
|
37
|
+
{
|
38
|
+
type: 'tool_use',
|
39
|
+
id: tool_call.id,
|
40
|
+
name: tool_call.name,
|
41
|
+
input: tool_call.arguments
|
42
|
+
}
|
43
|
+
end
|
44
|
+
|
45
|
+
def format_tool_result_block(msg)
|
46
|
+
{
|
47
|
+
type: 'tool_result',
|
48
|
+
tool_use_id: msg.tool_call_id,
|
49
|
+
content: msg.content
|
50
|
+
}
|
51
|
+
end
|
52
|
+
|
53
|
+
def function_for(tool)
|
54
|
+
{
|
55
|
+
name: tool.name,
|
56
|
+
description: tool.description,
|
57
|
+
input_schema: {
|
58
|
+
type: 'object',
|
59
|
+
properties: clean_parameters(tool.parameters),
|
60
|
+
required: required_parameters(tool.parameters)
|
61
|
+
}
|
62
|
+
}
|
63
|
+
end
|
64
|
+
|
65
|
+
def extract_tool_calls(data)
|
66
|
+
if json_delta?(data)
|
67
|
+
{ nil => ToolCall.new(id: nil, name: nil, arguments: data.dig('delta', 'partial_json')) }
|
68
|
+
else
|
69
|
+
parse_tool_calls(data['content_block'])
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
def parse_tool_calls(content_blocks)
|
74
|
+
return nil if content_blocks.nil?
|
75
|
+
|
76
|
+
# Handle single content block (backward compatibility)
|
77
|
+
content_blocks = [content_blocks] unless content_blocks.is_a?(Array)
|
78
|
+
|
79
|
+
tool_calls = {}
|
80
|
+
content_blocks.each do |block|
|
81
|
+
next unless block && block['type'] == 'tool_use'
|
82
|
+
|
83
|
+
tool_calls[block['id']] = ToolCall.new(
|
84
|
+
id: block['id'],
|
85
|
+
name: block['name'],
|
86
|
+
arguments: block['input']
|
87
|
+
)
|
88
|
+
end
|
89
|
+
|
90
|
+
tool_calls.empty? ? nil : tool_calls
|
91
|
+
end
|
92
|
+
|
93
|
+
def clean_parameters(parameters)
|
94
|
+
parameters.transform_values do |param|
|
95
|
+
{
|
96
|
+
type: param.type,
|
97
|
+
description: param.description
|
98
|
+
}.compact
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
def required_parameters(parameters)
|
103
|
+
parameters.select { |_, param| param.required }.keys
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
# Anthropic Claude API integration. Handles the complexities of
|
6
|
+
# Claude's unique message format and tool calling conventions.
|
7
|
+
class Anthropic < Provider
|
8
|
+
include Anthropic::Chat
|
9
|
+
include Anthropic::Embeddings
|
10
|
+
include Anthropic::Media
|
11
|
+
include Anthropic::Models
|
12
|
+
include Anthropic::Streaming
|
13
|
+
include Anthropic::Tools
|
14
|
+
|
15
|
+
def api_base
|
16
|
+
'https://api.anthropic.com'
|
17
|
+
end
|
18
|
+
|
19
|
+
def headers
|
20
|
+
{
|
21
|
+
'x-api-key' => @config.anthropic_api_key,
|
22
|
+
'anthropic-version' => '2023-06-01'
|
23
|
+
}
|
24
|
+
end
|
25
|
+
|
26
|
+
class << self
|
27
|
+
def capabilities
|
28
|
+
Anthropic::Capabilities
|
29
|
+
end
|
30
|
+
|
31
|
+
def configuration_requirements
|
32
|
+
%i[anthropic_api_key]
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|