dify_llm 1.6.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +157 -0
- data/lib/generators/ruby_llm/install/templates/chat_model.rb.tt +3 -0
- data/lib/generators/ruby_llm/install/templates/create_chats_legacy_migration.rb.tt +8 -0
- data/lib/generators/ruby_llm/install/templates/create_chats_migration.rb.tt +8 -0
- data/lib/generators/ruby_llm/install/templates/create_messages_legacy_migration.rb.tt +16 -0
- data/lib/generators/ruby_llm/install/templates/create_messages_migration.rb.tt +16 -0
- data/lib/generators/ruby_llm/install/templates/create_models_migration.rb.tt +43 -0
- data/lib/generators/ruby_llm/install/templates/create_tool_calls_migration.rb.tt +15 -0
- data/lib/generators/ruby_llm/install/templates/initializer.rb.tt +9 -0
- data/lib/generators/ruby_llm/install/templates/message_model.rb.tt +4 -0
- data/lib/generators/ruby_llm/install/templates/model_model.rb.tt +3 -0
- data/lib/generators/ruby_llm/install/templates/tool_call_model.rb.tt +3 -0
- data/lib/generators/ruby_llm/install_generator.rb +184 -0
- data/lib/generators/ruby_llm/migrate_model_fields/templates/migration.rb.tt +142 -0
- data/lib/generators/ruby_llm/migrate_model_fields_generator.rb +84 -0
- data/lib/ruby_llm/active_record/acts_as.rb +137 -0
- data/lib/ruby_llm/active_record/acts_as_legacy.rb +398 -0
- data/lib/ruby_llm/active_record/chat_methods.rb +315 -0
- data/lib/ruby_llm/active_record/message_methods.rb +72 -0
- data/lib/ruby_llm/active_record/model_methods.rb +84 -0
- data/lib/ruby_llm/aliases.json +274 -0
- data/lib/ruby_llm/aliases.rb +38 -0
- data/lib/ruby_llm/attachment.rb +191 -0
- data/lib/ruby_llm/chat.rb +212 -0
- data/lib/ruby_llm/chunk.rb +6 -0
- data/lib/ruby_llm/configuration.rb +69 -0
- data/lib/ruby_llm/connection.rb +137 -0
- data/lib/ruby_llm/content.rb +50 -0
- data/lib/ruby_llm/context.rb +29 -0
- data/lib/ruby_llm/embedding.rb +29 -0
- data/lib/ruby_llm/error.rb +76 -0
- data/lib/ruby_llm/image.rb +49 -0
- data/lib/ruby_llm/message.rb +76 -0
- data/lib/ruby_llm/mime_type.rb +67 -0
- data/lib/ruby_llm/model/info.rb +103 -0
- data/lib/ruby_llm/model/modalities.rb +22 -0
- data/lib/ruby_llm/model/pricing.rb +48 -0
- data/lib/ruby_llm/model/pricing_category.rb +46 -0
- data/lib/ruby_llm/model/pricing_tier.rb +33 -0
- data/lib/ruby_llm/model.rb +7 -0
- data/lib/ruby_llm/models.json +31418 -0
- data/lib/ruby_llm/models.rb +235 -0
- data/lib/ruby_llm/models_schema.json +168 -0
- data/lib/ruby_llm/provider.rb +215 -0
- data/lib/ruby_llm/providers/anthropic/capabilities.rb +134 -0
- data/lib/ruby_llm/providers/anthropic/chat.rb +106 -0
- data/lib/ruby_llm/providers/anthropic/embeddings.rb +20 -0
- data/lib/ruby_llm/providers/anthropic/media.rb +91 -0
- data/lib/ruby_llm/providers/anthropic/models.rb +48 -0
- data/lib/ruby_llm/providers/anthropic/streaming.rb +43 -0
- data/lib/ruby_llm/providers/anthropic/tools.rb +107 -0
- data/lib/ruby_llm/providers/anthropic.rb +36 -0
- data/lib/ruby_llm/providers/bedrock/capabilities.rb +167 -0
- data/lib/ruby_llm/providers/bedrock/chat.rb +63 -0
- data/lib/ruby_llm/providers/bedrock/media.rb +60 -0
- data/lib/ruby_llm/providers/bedrock/models.rb +98 -0
- data/lib/ruby_llm/providers/bedrock/signing.rb +831 -0
- data/lib/ruby_llm/providers/bedrock/streaming/base.rb +51 -0
- data/lib/ruby_llm/providers/bedrock/streaming/content_extraction.rb +56 -0
- data/lib/ruby_llm/providers/bedrock/streaming/message_processing.rb +67 -0
- data/lib/ruby_llm/providers/bedrock/streaming/payload_processing.rb +78 -0
- data/lib/ruby_llm/providers/bedrock/streaming/prelude_handling.rb +78 -0
- data/lib/ruby_llm/providers/bedrock/streaming.rb +18 -0
- data/lib/ruby_llm/providers/bedrock.rb +82 -0
- data/lib/ruby_llm/providers/deepseek/capabilities.rb +130 -0
- data/lib/ruby_llm/providers/deepseek/chat.rb +16 -0
- data/lib/ruby_llm/providers/deepseek.rb +30 -0
- data/lib/ruby_llm/providers/dify/capabilities.rb +16 -0
- data/lib/ruby_llm/providers/dify/chat.rb +59 -0
- data/lib/ruby_llm/providers/dify/media.rb +37 -0
- data/lib/ruby_llm/providers/dify/streaming.rb +28 -0
- data/lib/ruby_llm/providers/dify.rb +48 -0
- data/lib/ruby_llm/providers/gemini/capabilities.rb +276 -0
- data/lib/ruby_llm/providers/gemini/chat.rb +171 -0
- data/lib/ruby_llm/providers/gemini/embeddings.rb +37 -0
- data/lib/ruby_llm/providers/gemini/images.rb +47 -0
- data/lib/ruby_llm/providers/gemini/media.rb +54 -0
- data/lib/ruby_llm/providers/gemini/models.rb +40 -0
- data/lib/ruby_llm/providers/gemini/streaming.rb +61 -0
- data/lib/ruby_llm/providers/gemini/tools.rb +77 -0
- data/lib/ruby_llm/providers/gemini.rb +36 -0
- data/lib/ruby_llm/providers/gpustack/chat.rb +27 -0
- data/lib/ruby_llm/providers/gpustack/media.rb +45 -0
- data/lib/ruby_llm/providers/gpustack/models.rb +90 -0
- data/lib/ruby_llm/providers/gpustack.rb +34 -0
- data/lib/ruby_llm/providers/mistral/capabilities.rb +155 -0
- data/lib/ruby_llm/providers/mistral/chat.rb +24 -0
- data/lib/ruby_llm/providers/mistral/embeddings.rb +33 -0
- data/lib/ruby_llm/providers/mistral/models.rb +48 -0
- data/lib/ruby_llm/providers/mistral.rb +32 -0
- data/lib/ruby_llm/providers/ollama/chat.rb +27 -0
- data/lib/ruby_llm/providers/ollama/media.rb +45 -0
- data/lib/ruby_llm/providers/ollama/models.rb +36 -0
- data/lib/ruby_llm/providers/ollama.rb +30 -0
- data/lib/ruby_llm/providers/openai/capabilities.rb +291 -0
- data/lib/ruby_llm/providers/openai/chat.rb +83 -0
- data/lib/ruby_llm/providers/openai/embeddings.rb +33 -0
- data/lib/ruby_llm/providers/openai/images.rb +38 -0
- data/lib/ruby_llm/providers/openai/media.rb +80 -0
- data/lib/ruby_llm/providers/openai/models.rb +39 -0
- data/lib/ruby_llm/providers/openai/streaming.rb +41 -0
- data/lib/ruby_llm/providers/openai/tools.rb +78 -0
- data/lib/ruby_llm/providers/openai.rb +42 -0
- data/lib/ruby_llm/providers/openrouter/models.rb +73 -0
- data/lib/ruby_llm/providers/openrouter.rb +26 -0
- data/lib/ruby_llm/providers/perplexity/capabilities.rb +137 -0
- data/lib/ruby_llm/providers/perplexity/chat.rb +16 -0
- data/lib/ruby_llm/providers/perplexity/models.rb +42 -0
- data/lib/ruby_llm/providers/perplexity.rb +48 -0
- data/lib/ruby_llm/providers/vertexai/chat.rb +14 -0
- data/lib/ruby_llm/providers/vertexai/embeddings.rb +32 -0
- data/lib/ruby_llm/providers/vertexai/models.rb +130 -0
- data/lib/ruby_llm/providers/vertexai/streaming.rb +14 -0
- data/lib/ruby_llm/providers/vertexai.rb +55 -0
- data/lib/ruby_llm/railtie.rb +41 -0
- data/lib/ruby_llm/stream_accumulator.rb +97 -0
- data/lib/ruby_llm/streaming.rb +153 -0
- data/lib/ruby_llm/tool.rb +83 -0
- data/lib/ruby_llm/tool_call.rb +22 -0
- data/lib/ruby_llm/utils.rb +45 -0
- data/lib/ruby_llm/version.rb +5 -0
- data/lib/ruby_llm.rb +97 -0
- data/lib/tasks/models.rake +525 -0
- data/lib/tasks/release.rake +67 -0
- data/lib/tasks/ruby_llm.rake +15 -0
- data/lib/tasks/vcr.rake +92 -0
- metadata +291 -0
@@ -0,0 +1,134 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Determines capabilities and pricing for Anthropic models
|
7
|
+
module Capabilities
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def determine_context_window(_model_id)
|
11
|
+
200_000
|
12
|
+
end
|
13
|
+
|
14
|
+
def determine_max_tokens(model_id)
|
15
|
+
case model_id
|
16
|
+
when /claude-3-7-sonnet/, /claude-3-5/ then 8_192
|
17
|
+
else 4_096
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
def get_input_price(model_id)
|
22
|
+
PRICES.dig(model_family(model_id), :input) || default_input_price
|
23
|
+
end
|
24
|
+
|
25
|
+
def get_output_price(model_id)
|
26
|
+
PRICES.dig(model_family(model_id), :output) || default_output_price
|
27
|
+
end
|
28
|
+
|
29
|
+
def supports_vision?(model_id)
|
30
|
+
!model_id.match?(/claude-[12]/)
|
31
|
+
end
|
32
|
+
|
33
|
+
def supports_functions?(model_id)
|
34
|
+
model_id.match?(/claude-3/)
|
35
|
+
end
|
36
|
+
|
37
|
+
def supports_json_mode?(model_id)
|
38
|
+
model_id.match?(/claude-3/)
|
39
|
+
end
|
40
|
+
|
41
|
+
def supports_extended_thinking?(model_id)
|
42
|
+
model_id.match?(/claude-3-7-sonnet/)
|
43
|
+
end
|
44
|
+
|
45
|
+
def model_family(model_id)
|
46
|
+
case model_id
|
47
|
+
when /claude-3-7-sonnet/ then 'claude-3-7-sonnet'
|
48
|
+
when /claude-3-5-sonnet/ then 'claude-3-5-sonnet'
|
49
|
+
when /claude-3-5-haiku/ then 'claude-3-5-haiku'
|
50
|
+
when /claude-3-opus/ then 'claude-3-opus'
|
51
|
+
when /claude-3-sonnet/ then 'claude-3-sonnet'
|
52
|
+
when /claude-3-haiku/ then 'claude-3-haiku'
|
53
|
+
else 'claude-2'
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
def model_type(_)
|
58
|
+
'chat'
|
59
|
+
end
|
60
|
+
|
61
|
+
PRICES = {
|
62
|
+
'claude-3-7-sonnet': { input: 3.0, output: 15.0 },
|
63
|
+
'claude-3-5-sonnet': { input: 3.0, output: 15.0 },
|
64
|
+
'claude-3-5-haiku': { input: 0.80, output: 4.0 },
|
65
|
+
'claude-3-opus': { input: 15.0, output: 75.0 },
|
66
|
+
'claude-3-haiku': { input: 0.25, output: 1.25 },
|
67
|
+
'claude-2': { input: 3.0, output: 15.0 }
|
68
|
+
}.freeze
|
69
|
+
|
70
|
+
def default_input_price
|
71
|
+
3.0
|
72
|
+
end
|
73
|
+
|
74
|
+
def default_output_price
|
75
|
+
15.0
|
76
|
+
end
|
77
|
+
|
78
|
+
def modalities_for(model_id)
|
79
|
+
modalities = {
|
80
|
+
input: ['text'],
|
81
|
+
output: ['text']
|
82
|
+
}
|
83
|
+
|
84
|
+
unless model_id.match?(/claude-[12]/)
|
85
|
+
modalities[:input] << 'image'
|
86
|
+
modalities[:input] << 'pdf'
|
87
|
+
end
|
88
|
+
|
89
|
+
modalities
|
90
|
+
end
|
91
|
+
|
92
|
+
def capabilities_for(model_id)
|
93
|
+
capabilities = ['streaming']
|
94
|
+
|
95
|
+
if model_id.match?(/claude-3/)
|
96
|
+
capabilities << 'function_calling'
|
97
|
+
capabilities << 'batch'
|
98
|
+
end
|
99
|
+
|
100
|
+
capabilities << 'reasoning' if model_id.match?(/claude-3-7|-4/)
|
101
|
+
capabilities << 'citations' if model_id.match?(/claude-3\.5|claude-3-7/)
|
102
|
+
capabilities
|
103
|
+
end
|
104
|
+
|
105
|
+
def pricing_for(model_id)
|
106
|
+
family = model_family(model_id)
|
107
|
+
prices = PRICES.fetch(family.to_sym, { input: default_input_price, output: default_output_price })
|
108
|
+
|
109
|
+
standard_pricing = {
|
110
|
+
input_per_million: prices[:input],
|
111
|
+
output_per_million: prices[:output]
|
112
|
+
}
|
113
|
+
|
114
|
+
batch_pricing = {
|
115
|
+
input_per_million: prices[:input] * 0.5,
|
116
|
+
output_per_million: prices[:output] * 0.5
|
117
|
+
}
|
118
|
+
|
119
|
+
if model_id.match?(/claude-3-7/)
|
120
|
+
standard_pricing[:reasoning_output_per_million] = prices[:output] * 2.5
|
121
|
+
batch_pricing[:reasoning_output_per_million] = prices[:output] * 1.25
|
122
|
+
end
|
123
|
+
|
124
|
+
{
|
125
|
+
text_tokens: {
|
126
|
+
standard: standard_pricing,
|
127
|
+
batch: batch_pricing
|
128
|
+
}
|
129
|
+
}
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
@@ -0,0 +1,106 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Chat methods of the OpenAI API integration
|
7
|
+
module Chat
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def completion_url
|
11
|
+
'/v1/messages'
|
12
|
+
end
|
13
|
+
|
14
|
+
def render_payload(messages, tools:, temperature:, model:, stream: false, schema: nil) # rubocop:disable Metrics/ParameterLists,Lint/UnusedMethodArgument
|
15
|
+
system_messages, chat_messages = separate_messages(messages)
|
16
|
+
system_content = build_system_content(system_messages)
|
17
|
+
|
18
|
+
build_base_payload(chat_messages, model, stream).tap do |payload|
|
19
|
+
add_optional_fields(payload, system_content:, tools:, temperature:)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def separate_messages(messages)
|
24
|
+
messages.partition { |msg| msg.role == :system }
|
25
|
+
end
|
26
|
+
|
27
|
+
def build_system_content(system_messages)
|
28
|
+
if system_messages.length > 1
|
29
|
+
RubyLLM.logger.warn(
|
30
|
+
"Anthropic's Claude implementation only supports a single system message. " \
|
31
|
+
'Multiple system messages will be combined into one.'
|
32
|
+
)
|
33
|
+
end
|
34
|
+
|
35
|
+
system_messages.map(&:content).join("\n\n")
|
36
|
+
end
|
37
|
+
|
38
|
+
def build_base_payload(chat_messages, model, stream)
|
39
|
+
{
|
40
|
+
model: model.id,
|
41
|
+
messages: chat_messages.map { |msg| format_message(msg) },
|
42
|
+
stream: stream,
|
43
|
+
max_tokens: model.max_tokens || 4096
|
44
|
+
}
|
45
|
+
end
|
46
|
+
|
47
|
+
def add_optional_fields(payload, system_content:, tools:, temperature:)
|
48
|
+
payload[:tools] = tools.values.map { |t| Tools.function_for(t) } if tools.any?
|
49
|
+
payload[:system] = system_content unless system_content.empty?
|
50
|
+
payload[:temperature] = temperature unless temperature.nil?
|
51
|
+
end
|
52
|
+
|
53
|
+
def parse_completion_response(response)
|
54
|
+
data = response.body
|
55
|
+
content_blocks = data['content'] || []
|
56
|
+
|
57
|
+
text_content = extract_text_content(content_blocks)
|
58
|
+
tool_use_blocks = Tools.find_tool_uses(content_blocks)
|
59
|
+
|
60
|
+
build_message(data, text_content, tool_use_blocks, response)
|
61
|
+
end
|
62
|
+
|
63
|
+
def extract_text_content(blocks)
|
64
|
+
text_blocks = blocks.select { |c| c['type'] == 'text' }
|
65
|
+
text_blocks.map { |c| c['text'] }.join
|
66
|
+
end
|
67
|
+
|
68
|
+
def build_message(data, content, tool_use_blocks, response)
|
69
|
+
Message.new(
|
70
|
+
role: :assistant,
|
71
|
+
content: content,
|
72
|
+
tool_calls: Tools.parse_tool_calls(tool_use_blocks),
|
73
|
+
input_tokens: data.dig('usage', 'input_tokens'),
|
74
|
+
output_tokens: data.dig('usage', 'output_tokens'),
|
75
|
+
model_id: data['model'],
|
76
|
+
raw: response
|
77
|
+
)
|
78
|
+
end
|
79
|
+
|
80
|
+
def format_message(msg)
|
81
|
+
if msg.tool_call?
|
82
|
+
Tools.format_tool_call(msg)
|
83
|
+
elsif msg.tool_result?
|
84
|
+
Tools.format_tool_result(msg)
|
85
|
+
else
|
86
|
+
format_basic_message(msg)
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
def format_basic_message(msg)
|
91
|
+
{
|
92
|
+
role: convert_role(msg.role),
|
93
|
+
content: Media.format_content(msg.content)
|
94
|
+
}
|
95
|
+
end
|
96
|
+
|
97
|
+
def convert_role(role)
|
98
|
+
case role
|
99
|
+
when :tool, :user then 'user'
|
100
|
+
else 'assistant'
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Embeddings methods of the Anthropic API integration
|
7
|
+
module Embeddings
|
8
|
+
private
|
9
|
+
|
10
|
+
def embed
|
11
|
+
raise Error "Anthropic doesn't support embeddings"
|
12
|
+
end
|
13
|
+
|
14
|
+
alias render_embedding_payload embed
|
15
|
+
alias embedding_url embed
|
16
|
+
alias parse_embedding_response embed
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
@@ -0,0 +1,91 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Handles formatting of media content (images, PDFs, audio) for Anthropic
|
7
|
+
module Media
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def format_content(content)
|
11
|
+
return [format_text(content.to_json)] if content.is_a?(Hash) || content.is_a?(Array)
|
12
|
+
return [format_text(content)] unless content.is_a?(Content)
|
13
|
+
|
14
|
+
parts = []
|
15
|
+
parts << format_text(content.text) if content.text
|
16
|
+
|
17
|
+
content.attachments.each do |attachment|
|
18
|
+
case attachment.type
|
19
|
+
when :image
|
20
|
+
parts << format_image(attachment)
|
21
|
+
when :pdf
|
22
|
+
parts << format_pdf(attachment)
|
23
|
+
when :text
|
24
|
+
parts << format_text_file(attachment)
|
25
|
+
else
|
26
|
+
raise UnsupportedAttachmentError, attachment.mime_type
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
parts
|
31
|
+
end
|
32
|
+
|
33
|
+
def format_text(text)
|
34
|
+
{
|
35
|
+
type: 'text',
|
36
|
+
text: text
|
37
|
+
}
|
38
|
+
end
|
39
|
+
|
40
|
+
def format_image(image)
|
41
|
+
if image.url?
|
42
|
+
{
|
43
|
+
type: 'image',
|
44
|
+
source: {
|
45
|
+
type: 'url',
|
46
|
+
url: image.source
|
47
|
+
}
|
48
|
+
}
|
49
|
+
else
|
50
|
+
{
|
51
|
+
type: 'image',
|
52
|
+
source: {
|
53
|
+
type: 'base64',
|
54
|
+
media_type: image.mime_type,
|
55
|
+
data: image.encoded
|
56
|
+
}
|
57
|
+
}
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def format_pdf(pdf)
|
62
|
+
if pdf.url?
|
63
|
+
{
|
64
|
+
type: 'document',
|
65
|
+
source: {
|
66
|
+
type: 'url',
|
67
|
+
url: pdf.source
|
68
|
+
}
|
69
|
+
}
|
70
|
+
else
|
71
|
+
{
|
72
|
+
type: 'document',
|
73
|
+
source: {
|
74
|
+
type: 'base64',
|
75
|
+
media_type: pdf.mime_type,
|
76
|
+
data: pdf.encoded
|
77
|
+
}
|
78
|
+
}
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
def format_text_file(text_file)
|
83
|
+
{
|
84
|
+
type: 'text',
|
85
|
+
text: text_file.for_llm
|
86
|
+
}
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Models methods of the Anthropic API integration
|
7
|
+
module Models
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def models_url
|
11
|
+
'/v1/models'
|
12
|
+
end
|
13
|
+
|
14
|
+
def parse_list_models_response(response, slug, capabilities)
|
15
|
+
Array(response.body['data']).map do |model_data|
|
16
|
+
model_id = model_data['id']
|
17
|
+
|
18
|
+
Model::Info.new(
|
19
|
+
id: model_id,
|
20
|
+
name: model_data['display_name'],
|
21
|
+
provider: slug,
|
22
|
+
family: capabilities.model_family(model_id),
|
23
|
+
created_at: Time.parse(model_data['created_at']),
|
24
|
+
context_window: capabilities.determine_context_window(model_id),
|
25
|
+
max_output_tokens: capabilities.determine_max_tokens(model_id),
|
26
|
+
modalities: capabilities.modalities_for(model_id),
|
27
|
+
capabilities: capabilities.capabilities_for(model_id),
|
28
|
+
pricing: capabilities.pricing_for(model_id),
|
29
|
+
metadata: {}
|
30
|
+
)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def extract_model_id(data)
|
35
|
+
data.dig('message', 'model')
|
36
|
+
end
|
37
|
+
|
38
|
+
def extract_input_tokens(data)
|
39
|
+
data.dig('message', 'usage', 'input_tokens')
|
40
|
+
end
|
41
|
+
|
42
|
+
def extract_output_tokens(data)
|
43
|
+
data.dig('message', 'usage', 'output_tokens') || data.dig('usage', 'output_tokens')
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Streaming methods of the Anthropic API integration
|
7
|
+
module Streaming
|
8
|
+
private
|
9
|
+
|
10
|
+
def stream_url
|
11
|
+
completion_url
|
12
|
+
end
|
13
|
+
|
14
|
+
def build_chunk(data)
|
15
|
+
Chunk.new(
|
16
|
+
role: :assistant,
|
17
|
+
model_id: extract_model_id(data),
|
18
|
+
content: data.dig('delta', 'text'),
|
19
|
+
input_tokens: extract_input_tokens(data),
|
20
|
+
output_tokens: extract_output_tokens(data),
|
21
|
+
tool_calls: extract_tool_calls(data)
|
22
|
+
)
|
23
|
+
end
|
24
|
+
|
25
|
+
def json_delta?(data)
|
26
|
+
data['type'] == 'content_block_delta' && data.dig('delta', 'type') == 'input_json_delta'
|
27
|
+
end
|
28
|
+
|
29
|
+
def parse_streaming_error(data)
|
30
|
+
error_data = JSON.parse(data)
|
31
|
+
return unless error_data['type'] == 'error'
|
32
|
+
|
33
|
+
case error_data.dig('error', 'type')
|
34
|
+
when 'overloaded_error'
|
35
|
+
[529, error_data['error']['message']]
|
36
|
+
else
|
37
|
+
[500, error_data['error']['message']]
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
@@ -0,0 +1,107 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
class Anthropic
|
6
|
+
# Tools methods of the Anthropic API integration
|
7
|
+
module Tools
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def find_tool_uses(blocks)
|
11
|
+
blocks.select { |c| c['type'] == 'tool_use' }
|
12
|
+
end
|
13
|
+
|
14
|
+
def format_tool_call(msg)
|
15
|
+
content = []
|
16
|
+
|
17
|
+
content << Media.format_text(msg.content) unless msg.content.nil? || msg.content.empty?
|
18
|
+
|
19
|
+
msg.tool_calls.each_value do |tool_call|
|
20
|
+
content << format_tool_use_block(tool_call)
|
21
|
+
end
|
22
|
+
|
23
|
+
{
|
24
|
+
role: 'assistant',
|
25
|
+
content:
|
26
|
+
}
|
27
|
+
end
|
28
|
+
|
29
|
+
def format_tool_result(msg)
|
30
|
+
{
|
31
|
+
role: 'user',
|
32
|
+
content: [format_tool_result_block(msg)]
|
33
|
+
}
|
34
|
+
end
|
35
|
+
|
36
|
+
def format_tool_use_block(tool_call)
|
37
|
+
{
|
38
|
+
type: 'tool_use',
|
39
|
+
id: tool_call.id,
|
40
|
+
name: tool_call.name,
|
41
|
+
input: tool_call.arguments
|
42
|
+
}
|
43
|
+
end
|
44
|
+
|
45
|
+
def format_tool_result_block(msg)
|
46
|
+
{
|
47
|
+
type: 'tool_result',
|
48
|
+
tool_use_id: msg.tool_call_id,
|
49
|
+
content: Media.format_content(msg.content)
|
50
|
+
}
|
51
|
+
end
|
52
|
+
|
53
|
+
def function_for(tool)
|
54
|
+
{
|
55
|
+
name: tool.name,
|
56
|
+
description: tool.description,
|
57
|
+
input_schema: {
|
58
|
+
type: 'object',
|
59
|
+
properties: clean_parameters(tool.parameters),
|
60
|
+
required: required_parameters(tool.parameters)
|
61
|
+
}
|
62
|
+
}
|
63
|
+
end
|
64
|
+
|
65
|
+
def extract_tool_calls(data)
|
66
|
+
if json_delta?(data)
|
67
|
+
{ nil => ToolCall.new(id: nil, name: nil, arguments: data.dig('delta', 'partial_json')) }
|
68
|
+
else
|
69
|
+
parse_tool_calls(data['content_block'])
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
def parse_tool_calls(content_blocks)
|
74
|
+
return nil if content_blocks.nil?
|
75
|
+
|
76
|
+
content_blocks = [content_blocks] unless content_blocks.is_a?(Array)
|
77
|
+
|
78
|
+
tool_calls = {}
|
79
|
+
content_blocks.each do |block|
|
80
|
+
next unless block && block['type'] == 'tool_use'
|
81
|
+
|
82
|
+
tool_calls[block['id']] = ToolCall.new(
|
83
|
+
id: block['id'],
|
84
|
+
name: block['name'],
|
85
|
+
arguments: block['input']
|
86
|
+
)
|
87
|
+
end
|
88
|
+
|
89
|
+
tool_calls.empty? ? nil : tool_calls
|
90
|
+
end
|
91
|
+
|
92
|
+
def clean_parameters(parameters)
|
93
|
+
parameters.transform_values do |param|
|
94
|
+
{
|
95
|
+
type: param.type,
|
96
|
+
description: param.description
|
97
|
+
}.compact
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
def required_parameters(parameters)
|
102
|
+
parameters.select { |_, param| param.required }.keys
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
# Anthropic Claude API integration.
|
6
|
+
class Anthropic < Provider
|
7
|
+
include Anthropic::Chat
|
8
|
+
include Anthropic::Embeddings
|
9
|
+
include Anthropic::Media
|
10
|
+
include Anthropic::Models
|
11
|
+
include Anthropic::Streaming
|
12
|
+
include Anthropic::Tools
|
13
|
+
|
14
|
+
def api_base
|
15
|
+
'https://api.anthropic.com'
|
16
|
+
end
|
17
|
+
|
18
|
+
def headers
|
19
|
+
{
|
20
|
+
'x-api-key' => @config.anthropic_api_key,
|
21
|
+
'anthropic-version' => '2023-06-01'
|
22
|
+
}
|
23
|
+
end
|
24
|
+
|
25
|
+
class << self
|
26
|
+
def capabilities
|
27
|
+
Anthropic::Capabilities
|
28
|
+
end
|
29
|
+
|
30
|
+
def configuration_requirements
|
31
|
+
%i[anthropic_api_key]
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|