ruby_llm_swarm 1.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (154) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +21 -0
  3. data/README.md +175 -0
  4. data/lib/generators/ruby_llm/chat_ui/chat_ui_generator.rb +187 -0
  5. data/lib/generators/ruby_llm/chat_ui/templates/controllers/chats_controller.rb.tt +39 -0
  6. data/lib/generators/ruby_llm/chat_ui/templates/controllers/messages_controller.rb.tt +24 -0
  7. data/lib/generators/ruby_llm/chat_ui/templates/controllers/models_controller.rb.tt +14 -0
  8. data/lib/generators/ruby_llm/chat_ui/templates/jobs/chat_response_job.rb.tt +12 -0
  9. data/lib/generators/ruby_llm/chat_ui/templates/views/chats/_chat.html.erb.tt +16 -0
  10. data/lib/generators/ruby_llm/chat_ui/templates/views/chats/_form.html.erb.tt +29 -0
  11. data/lib/generators/ruby_llm/chat_ui/templates/views/chats/index.html.erb.tt +16 -0
  12. data/lib/generators/ruby_llm/chat_ui/templates/views/chats/new.html.erb.tt +11 -0
  13. data/lib/generators/ruby_llm/chat_ui/templates/views/chats/show.html.erb.tt +23 -0
  14. data/lib/generators/ruby_llm/chat_ui/templates/views/messages/_content.html.erb.tt +1 -0
  15. data/lib/generators/ruby_llm/chat_ui/templates/views/messages/_form.html.erb.tt +21 -0
  16. data/lib/generators/ruby_llm/chat_ui/templates/views/messages/_message.html.erb.tt +13 -0
  17. data/lib/generators/ruby_llm/chat_ui/templates/views/messages/_tool_calls.html.erb.tt +7 -0
  18. data/lib/generators/ruby_llm/chat_ui/templates/views/messages/create.turbo_stream.erb.tt +9 -0
  19. data/lib/generators/ruby_llm/chat_ui/templates/views/models/_model.html.erb.tt +16 -0
  20. data/lib/generators/ruby_llm/chat_ui/templates/views/models/index.html.erb.tt +28 -0
  21. data/lib/generators/ruby_llm/chat_ui/templates/views/models/show.html.erb.tt +18 -0
  22. data/lib/generators/ruby_llm/generator_helpers.rb +194 -0
  23. data/lib/generators/ruby_llm/install/install_generator.rb +106 -0
  24. data/lib/generators/ruby_llm/install/templates/add_references_to_chats_tool_calls_and_messages_migration.rb.tt +9 -0
  25. data/lib/generators/ruby_llm/install/templates/chat_model.rb.tt +3 -0
  26. data/lib/generators/ruby_llm/install/templates/create_chats_migration.rb.tt +7 -0
  27. data/lib/generators/ruby_llm/install/templates/create_messages_migration.rb.tt +16 -0
  28. data/lib/generators/ruby_llm/install/templates/create_models_migration.rb.tt +45 -0
  29. data/lib/generators/ruby_llm/install/templates/create_tool_calls_migration.rb.tt +20 -0
  30. data/lib/generators/ruby_llm/install/templates/initializer.rb.tt +12 -0
  31. data/lib/generators/ruby_llm/install/templates/message_model.rb.tt +4 -0
  32. data/lib/generators/ruby_llm/install/templates/model_model.rb.tt +3 -0
  33. data/lib/generators/ruby_llm/install/templates/tool_call_model.rb.tt +3 -0
  34. data/lib/generators/ruby_llm/upgrade_to_v1_7/templates/migration.rb.tt +145 -0
  35. data/lib/generators/ruby_llm/upgrade_to_v1_7/upgrade_to_v1_7_generator.rb +124 -0
  36. data/lib/generators/ruby_llm/upgrade_to_v1_9/templates/add_v1_9_message_columns.rb.tt +15 -0
  37. data/lib/generators/ruby_llm/upgrade_to_v1_9/upgrade_to_v1_9_generator.rb +49 -0
  38. data/lib/ruby_llm/active_record/acts_as.rb +174 -0
  39. data/lib/ruby_llm/active_record/acts_as_legacy.rb +384 -0
  40. data/lib/ruby_llm/active_record/chat_methods.rb +350 -0
  41. data/lib/ruby_llm/active_record/message_methods.rb +81 -0
  42. data/lib/ruby_llm/active_record/model_methods.rb +84 -0
  43. data/lib/ruby_llm/aliases.json +295 -0
  44. data/lib/ruby_llm/aliases.rb +38 -0
  45. data/lib/ruby_llm/attachment.rb +220 -0
  46. data/lib/ruby_llm/chat.rb +816 -0
  47. data/lib/ruby_llm/chunk.rb +6 -0
  48. data/lib/ruby_llm/configuration.rb +78 -0
  49. data/lib/ruby_llm/connection.rb +126 -0
  50. data/lib/ruby_llm/content.rb +73 -0
  51. data/lib/ruby_llm/context.rb +29 -0
  52. data/lib/ruby_llm/embedding.rb +29 -0
  53. data/lib/ruby_llm/error.rb +84 -0
  54. data/lib/ruby_llm/image.rb +49 -0
  55. data/lib/ruby_llm/message.rb +86 -0
  56. data/lib/ruby_llm/mime_type.rb +71 -0
  57. data/lib/ruby_llm/model/info.rb +111 -0
  58. data/lib/ruby_llm/model/modalities.rb +22 -0
  59. data/lib/ruby_llm/model/pricing.rb +48 -0
  60. data/lib/ruby_llm/model/pricing_category.rb +46 -0
  61. data/lib/ruby_llm/model/pricing_tier.rb +33 -0
  62. data/lib/ruby_llm/model.rb +7 -0
  63. data/lib/ruby_llm/models.json +33198 -0
  64. data/lib/ruby_llm/models.rb +231 -0
  65. data/lib/ruby_llm/models_schema.json +168 -0
  66. data/lib/ruby_llm/moderation.rb +56 -0
  67. data/lib/ruby_llm/provider.rb +243 -0
  68. data/lib/ruby_llm/providers/anthropic/capabilities.rb +134 -0
  69. data/lib/ruby_llm/providers/anthropic/chat.rb +125 -0
  70. data/lib/ruby_llm/providers/anthropic/content.rb +44 -0
  71. data/lib/ruby_llm/providers/anthropic/embeddings.rb +20 -0
  72. data/lib/ruby_llm/providers/anthropic/media.rb +92 -0
  73. data/lib/ruby_llm/providers/anthropic/models.rb +63 -0
  74. data/lib/ruby_llm/providers/anthropic/streaming.rb +45 -0
  75. data/lib/ruby_llm/providers/anthropic/tools.rb +109 -0
  76. data/lib/ruby_llm/providers/anthropic.rb +36 -0
  77. data/lib/ruby_llm/providers/bedrock/capabilities.rb +167 -0
  78. data/lib/ruby_llm/providers/bedrock/chat.rb +63 -0
  79. data/lib/ruby_llm/providers/bedrock/media.rb +61 -0
  80. data/lib/ruby_llm/providers/bedrock/models.rb +98 -0
  81. data/lib/ruby_llm/providers/bedrock/signing.rb +831 -0
  82. data/lib/ruby_llm/providers/bedrock/streaming/base.rb +51 -0
  83. data/lib/ruby_llm/providers/bedrock/streaming/content_extraction.rb +71 -0
  84. data/lib/ruby_llm/providers/bedrock/streaming/message_processing.rb +67 -0
  85. data/lib/ruby_llm/providers/bedrock/streaming/payload_processing.rb +80 -0
  86. data/lib/ruby_llm/providers/bedrock/streaming/prelude_handling.rb +78 -0
  87. data/lib/ruby_llm/providers/bedrock/streaming.rb +18 -0
  88. data/lib/ruby_llm/providers/bedrock.rb +82 -0
  89. data/lib/ruby_llm/providers/deepseek/capabilities.rb +130 -0
  90. data/lib/ruby_llm/providers/deepseek/chat.rb +16 -0
  91. data/lib/ruby_llm/providers/deepseek.rb +30 -0
  92. data/lib/ruby_llm/providers/gemini/capabilities.rb +281 -0
  93. data/lib/ruby_llm/providers/gemini/chat.rb +454 -0
  94. data/lib/ruby_llm/providers/gemini/embeddings.rb +37 -0
  95. data/lib/ruby_llm/providers/gemini/images.rb +47 -0
  96. data/lib/ruby_llm/providers/gemini/media.rb +112 -0
  97. data/lib/ruby_llm/providers/gemini/models.rb +40 -0
  98. data/lib/ruby_llm/providers/gemini/streaming.rb +61 -0
  99. data/lib/ruby_llm/providers/gemini/tools.rb +198 -0
  100. data/lib/ruby_llm/providers/gemini/transcription.rb +116 -0
  101. data/lib/ruby_llm/providers/gemini.rb +37 -0
  102. data/lib/ruby_llm/providers/gpustack/chat.rb +27 -0
  103. data/lib/ruby_llm/providers/gpustack/media.rb +46 -0
  104. data/lib/ruby_llm/providers/gpustack/models.rb +90 -0
  105. data/lib/ruby_llm/providers/gpustack.rb +34 -0
  106. data/lib/ruby_llm/providers/mistral/capabilities.rb +155 -0
  107. data/lib/ruby_llm/providers/mistral/chat.rb +24 -0
  108. data/lib/ruby_llm/providers/mistral/embeddings.rb +33 -0
  109. data/lib/ruby_llm/providers/mistral/models.rb +48 -0
  110. data/lib/ruby_llm/providers/mistral.rb +32 -0
  111. data/lib/ruby_llm/providers/ollama/chat.rb +27 -0
  112. data/lib/ruby_llm/providers/ollama/media.rb +46 -0
  113. data/lib/ruby_llm/providers/ollama/models.rb +36 -0
  114. data/lib/ruby_llm/providers/ollama.rb +30 -0
  115. data/lib/ruby_llm/providers/openai/capabilities.rb +299 -0
  116. data/lib/ruby_llm/providers/openai/chat.rb +88 -0
  117. data/lib/ruby_llm/providers/openai/embeddings.rb +33 -0
  118. data/lib/ruby_llm/providers/openai/images.rb +38 -0
  119. data/lib/ruby_llm/providers/openai/media.rb +81 -0
  120. data/lib/ruby_llm/providers/openai/models.rb +39 -0
  121. data/lib/ruby_llm/providers/openai/moderation.rb +34 -0
  122. data/lib/ruby_llm/providers/openai/streaming.rb +46 -0
  123. data/lib/ruby_llm/providers/openai/tools.rb +98 -0
  124. data/lib/ruby_llm/providers/openai/transcription.rb +70 -0
  125. data/lib/ruby_llm/providers/openai.rb +44 -0
  126. data/lib/ruby_llm/providers/openai_responses.rb +395 -0
  127. data/lib/ruby_llm/providers/openrouter/models.rb +73 -0
  128. data/lib/ruby_llm/providers/openrouter.rb +26 -0
  129. data/lib/ruby_llm/providers/perplexity/capabilities.rb +137 -0
  130. data/lib/ruby_llm/providers/perplexity/chat.rb +16 -0
  131. data/lib/ruby_llm/providers/perplexity/models.rb +42 -0
  132. data/lib/ruby_llm/providers/perplexity.rb +48 -0
  133. data/lib/ruby_llm/providers/vertexai/chat.rb +14 -0
  134. data/lib/ruby_llm/providers/vertexai/embeddings.rb +32 -0
  135. data/lib/ruby_llm/providers/vertexai/models.rb +130 -0
  136. data/lib/ruby_llm/providers/vertexai/streaming.rb +14 -0
  137. data/lib/ruby_llm/providers/vertexai/transcription.rb +16 -0
  138. data/lib/ruby_llm/providers/vertexai.rb +55 -0
  139. data/lib/ruby_llm/railtie.rb +35 -0
  140. data/lib/ruby_llm/responses_session.rb +77 -0
  141. data/lib/ruby_llm/stream_accumulator.rb +101 -0
  142. data/lib/ruby_llm/streaming.rb +153 -0
  143. data/lib/ruby_llm/tool.rb +209 -0
  144. data/lib/ruby_llm/tool_call.rb +22 -0
  145. data/lib/ruby_llm/tool_executors.rb +125 -0
  146. data/lib/ruby_llm/transcription.rb +35 -0
  147. data/lib/ruby_llm/utils.rb +91 -0
  148. data/lib/ruby_llm/version.rb +5 -0
  149. data/lib/ruby_llm.rb +140 -0
  150. data/lib/tasks/models.rake +525 -0
  151. data/lib/tasks/release.rake +67 -0
  152. data/lib/tasks/ruby_llm.rake +15 -0
  153. data/lib/tasks/vcr.rake +92 -0
  154. metadata +346 -0
@@ -0,0 +1,134 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ class Anthropic
6
+ # Determines capabilities and pricing for Anthropic models
7
+ module Capabilities
8
+ module_function
9
+
10
+ def determine_context_window(_model_id)
11
+ 200_000
12
+ end
13
+
14
+ def determine_max_tokens(model_id)
15
+ case model_id
16
+ when /claude-3-7-sonnet/, /claude-3-5/ then 8_192
17
+ else 4_096
18
+ end
19
+ end
20
+
21
+ def get_input_price(model_id)
22
+ PRICES.dig(model_family(model_id), :input) || default_input_price
23
+ end
24
+
25
+ def get_output_price(model_id)
26
+ PRICES.dig(model_family(model_id), :output) || default_output_price
27
+ end
28
+
29
+ def supports_vision?(model_id)
30
+ !model_id.match?(/claude-[12]/)
31
+ end
32
+
33
+ def supports_functions?(model_id)
34
+ model_id.match?(/claude-3/)
35
+ end
36
+
37
+ def supports_json_mode?(model_id)
38
+ model_id.match?(/claude-3/)
39
+ end
40
+
41
+ def supports_extended_thinking?(model_id)
42
+ model_id.match?(/claude-3-7-sonnet/)
43
+ end
44
+
45
+ def model_family(model_id)
46
+ case model_id
47
+ when /claude-3-7-sonnet/ then 'claude-3-7-sonnet'
48
+ when /claude-3-5-sonnet/ then 'claude-3-5-sonnet'
49
+ when /claude-3-5-haiku/ then 'claude-3-5-haiku'
50
+ when /claude-3-opus/ then 'claude-3-opus'
51
+ when /claude-3-sonnet/ then 'claude-3-sonnet'
52
+ when /claude-3-haiku/ then 'claude-3-haiku'
53
+ else 'claude-2'
54
+ end
55
+ end
56
+
57
+ def model_type(_)
58
+ 'chat'
59
+ end
60
+
61
+ PRICES = {
62
+ 'claude-3-7-sonnet': { input: 3.0, output: 15.0 },
63
+ 'claude-3-5-sonnet': { input: 3.0, output: 15.0 },
64
+ 'claude-3-5-haiku': { input: 0.80, output: 4.0 },
65
+ 'claude-3-opus': { input: 15.0, output: 75.0 },
66
+ 'claude-3-haiku': { input: 0.25, output: 1.25 },
67
+ 'claude-2': { input: 3.0, output: 15.0 }
68
+ }.freeze
69
+
70
+ def default_input_price
71
+ 3.0
72
+ end
73
+
74
+ def default_output_price
75
+ 15.0
76
+ end
77
+
78
+ def modalities_for(model_id)
79
+ modalities = {
80
+ input: ['text'],
81
+ output: ['text']
82
+ }
83
+
84
+ unless model_id.match?(/claude-[12]/)
85
+ modalities[:input] << 'image'
86
+ modalities[:input] << 'pdf'
87
+ end
88
+
89
+ modalities
90
+ end
91
+
92
+ def capabilities_for(model_id)
93
+ capabilities = ['streaming']
94
+
95
+ if model_id.match?(/claude-3/)
96
+ capabilities << 'function_calling'
97
+ capabilities << 'batch'
98
+ end
99
+
100
+ capabilities << 'reasoning' if model_id.match?(/claude-3-7|-4/)
101
+ capabilities << 'citations' if model_id.match?(/claude-3\.5|claude-3-7/)
102
+ capabilities
103
+ end
104
+
105
+ def pricing_for(model_id)
106
+ family = model_family(model_id)
107
+ prices = PRICES.fetch(family.to_sym, { input: default_input_price, output: default_output_price })
108
+
109
+ standard_pricing = {
110
+ input_per_million: prices[:input],
111
+ output_per_million: prices[:output]
112
+ }
113
+
114
+ batch_pricing = {
115
+ input_per_million: prices[:input] * 0.5,
116
+ output_per_million: prices[:output] * 0.5
117
+ }
118
+
119
+ if model_id.match?(/claude-3-7/)
120
+ standard_pricing[:reasoning_output_per_million] = prices[:output] * 2.5
121
+ batch_pricing[:reasoning_output_per_million] = prices[:output] * 1.25
122
+ end
123
+
124
+ {
125
+ text_tokens: {
126
+ standard: standard_pricing,
127
+ batch: batch_pricing
128
+ }
129
+ }
130
+ end
131
+ end
132
+ end
133
+ end
134
+ end
@@ -0,0 +1,125 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ class Anthropic
6
+ # Chat methods of the OpenAI API integration
7
+ module Chat
8
+ module_function
9
+
10
+ def completion_url
11
+ '/v1/messages'
12
+ end
13
+
14
+ def render_payload(messages, tools:, temperature:, model:, stream: false, schema: nil) # rubocop:disable Metrics/ParameterLists,Lint/UnusedMethodArgument
15
+ system_messages, chat_messages = separate_messages(messages)
16
+ system_content = build_system_content(system_messages)
17
+
18
+ build_base_payload(chat_messages, model, stream).tap do |payload|
19
+ add_optional_fields(payload, system_content:, tools:, temperature:)
20
+ end
21
+ end
22
+
23
+ def separate_messages(messages)
24
+ messages.partition { |msg| msg.role == :system }
25
+ end
26
+
27
+ def build_system_content(system_messages)
28
+ return [] if system_messages.empty?
29
+
30
+ if system_messages.length > 1
31
+ RubyLLM.logger.warn(
32
+ "Anthropic's Claude implementation only supports a single system message. " \
33
+ 'Multiple system messages will be combined into one.'
34
+ )
35
+ end
36
+
37
+ system_messages.flat_map do |msg|
38
+ content = msg.content
39
+
40
+ if content.is_a?(RubyLLM::Content::Raw)
41
+ content.value
42
+ else
43
+ Media.format_content(content)
44
+ end
45
+ end
46
+ end
47
+
48
+ def build_base_payload(chat_messages, model, stream)
49
+ {
50
+ model: model.id,
51
+ messages: chat_messages.map { |msg| format_message(msg) },
52
+ stream: stream,
53
+ max_tokens: model.max_tokens || 4096
54
+ }
55
+ end
56
+
57
+ def add_optional_fields(payload, system_content:, tools:, temperature:)
58
+ payload[:tools] = tools.values.map { |t| Tools.function_for(t) } if tools.any?
59
+ payload[:system] = system_content unless system_content.empty?
60
+ payload[:temperature] = temperature unless temperature.nil?
61
+ end
62
+
63
+ def parse_completion_response(response)
64
+ data = response.body
65
+ content_blocks = data['content'] || []
66
+
67
+ text_content = extract_text_content(content_blocks)
68
+ tool_use_blocks = Tools.find_tool_uses(content_blocks)
69
+
70
+ build_message(data, text_content, tool_use_blocks, response)
71
+ end
72
+
73
+ def extract_text_content(blocks)
74
+ text_blocks = blocks.select { |c| c['type'] == 'text' }
75
+ text_blocks.map { |c| c['text'] }.join
76
+ end
77
+
78
+ def build_message(data, content, tool_use_blocks, response)
79
+ usage = data['usage'] || {}
80
+ cached_tokens = usage['cache_read_input_tokens']
81
+ cache_creation_tokens = usage['cache_creation_input_tokens']
82
+ if cache_creation_tokens.nil? && usage['cache_creation'].is_a?(Hash)
83
+ cache_creation_tokens = usage['cache_creation'].values.compact.sum
84
+ end
85
+
86
+ Message.new(
87
+ role: :assistant,
88
+ content: content,
89
+ tool_calls: Tools.parse_tool_calls(tool_use_blocks),
90
+ input_tokens: usage['input_tokens'],
91
+ output_tokens: usage['output_tokens'],
92
+ cached_tokens: cached_tokens,
93
+ cache_creation_tokens: cache_creation_tokens,
94
+ model_id: data['model'],
95
+ raw: response
96
+ )
97
+ end
98
+
99
+ def format_message(msg)
100
+ if msg.tool_call?
101
+ Tools.format_tool_call(msg)
102
+ elsif msg.tool_result?
103
+ Tools.format_tool_result(msg)
104
+ else
105
+ format_basic_message(msg)
106
+ end
107
+ end
108
+
109
+ def format_basic_message(msg)
110
+ {
111
+ role: convert_role(msg.role),
112
+ content: Media.format_content(msg.content)
113
+ }
114
+ end
115
+
116
+ def convert_role(role)
117
+ case role
118
+ when :tool, :user then 'user'
119
+ else 'assistant'
120
+ end
121
+ end
122
+ end
123
+ end
124
+ end
125
+ end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ class Anthropic
6
+ # Helper for constructing Anthropic native content blocks.
7
+ class Content
8
+ class << self
9
+ def new(text = nil, cache: false, cache_control: nil, parts: nil, **extras)
10
+ payload = resolve_payload(
11
+ text: text,
12
+ parts: parts,
13
+ cache: cache,
14
+ cache_control: cache_control,
15
+ extras: extras
16
+ )
17
+
18
+ RubyLLM::Content::Raw.new(payload)
19
+ end
20
+
21
+ private
22
+
23
+ def resolve_payload(text:, parts:, cache:, cache_control:, extras:)
24
+ return Array(parts) if parts
25
+
26
+ raise ArgumentError, 'text or parts must be provided' if text.nil?
27
+
28
+ block = { type: 'text', text: text }.merge(extras)
29
+ control = determine_cache_control(cache_control, cache)
30
+ block[:cache_control] = control if control
31
+
32
+ [block]
33
+ end
34
+
35
+ def determine_cache_control(cache_control, cache_flag)
36
+ return cache_control if cache_control
37
+
38
+ { type: 'ephemeral' } if cache_flag
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,20 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ class Anthropic
6
+ # Embeddings methods of the Anthropic API integration
7
+ module Embeddings
8
+ private
9
+
10
+ def embed
11
+ raise Error "Anthropic doesn't support embeddings"
12
+ end
13
+
14
+ alias render_embedding_payload embed
15
+ alias embedding_url embed
16
+ alias parse_embedding_response embed
17
+ end
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,92 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ class Anthropic
6
+ # Handles formatting of media content (images, PDFs, audio) for Anthropic
7
+ module Media
8
+ module_function
9
+
10
+ def format_content(content) # rubocop:disable Metrics/PerceivedComplexity
11
+ return content.value if content.is_a?(RubyLLM::Content::Raw)
12
+ return [format_text(content.to_json)] if content.is_a?(Hash) || content.is_a?(Array)
13
+ return [format_text(content)] unless content.is_a?(RubyLLM::Content)
14
+
15
+ parts = []
16
+ parts << format_text(content.text) if content.text
17
+
18
+ content.attachments.each do |attachment|
19
+ case attachment.type
20
+ when :image
21
+ parts << format_image(attachment)
22
+ when :pdf
23
+ parts << format_pdf(attachment)
24
+ when :text
25
+ parts << format_text_file(attachment)
26
+ else
27
+ raise UnsupportedAttachmentError, attachment.mime_type
28
+ end
29
+ end
30
+
31
+ parts
32
+ end
33
+
34
+ def format_text(text)
35
+ {
36
+ type: 'text',
37
+ text: text
38
+ }
39
+ end
40
+
41
+ def format_image(image)
42
+ if image.url?
43
+ {
44
+ type: 'image',
45
+ source: {
46
+ type: 'url',
47
+ url: image.source
48
+ }
49
+ }
50
+ else
51
+ {
52
+ type: 'image',
53
+ source: {
54
+ type: 'base64',
55
+ media_type: image.mime_type,
56
+ data: image.encoded
57
+ }
58
+ }
59
+ end
60
+ end
61
+
62
+ def format_pdf(pdf)
63
+ if pdf.url?
64
+ {
65
+ type: 'document',
66
+ source: {
67
+ type: 'url',
68
+ url: pdf.source
69
+ }
70
+ }
71
+ else
72
+ {
73
+ type: 'document',
74
+ source: {
75
+ type: 'base64',
76
+ media_type: pdf.mime_type,
77
+ data: pdf.encoded
78
+ }
79
+ }
80
+ end
81
+ end
82
+
83
+ def format_text_file(text_file)
84
+ {
85
+ type: 'text',
86
+ text: text_file.for_llm
87
+ }
88
+ end
89
+ end
90
+ end
91
+ end
92
+ end
@@ -0,0 +1,63 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ class Anthropic
6
+ # Models methods of the Anthropic API integration
7
+ module Models
8
+ module_function
9
+
10
+ def models_url
11
+ '/v1/models'
12
+ end
13
+
14
+ def parse_list_models_response(response, slug, capabilities)
15
+ Array(response.body['data']).map do |model_data|
16
+ model_id = model_data['id']
17
+
18
+ Model::Info.new(
19
+ id: model_id,
20
+ name: model_data['display_name'],
21
+ provider: slug,
22
+ family: capabilities.model_family(model_id),
23
+ created_at: Time.parse(model_data['created_at']),
24
+ context_window: capabilities.determine_context_window(model_id),
25
+ max_output_tokens: capabilities.determine_max_tokens(model_id),
26
+ modalities: capabilities.modalities_for(model_id),
27
+ capabilities: capabilities.capabilities_for(model_id),
28
+ pricing: capabilities.pricing_for(model_id),
29
+ metadata: {}
30
+ )
31
+ end
32
+ end
33
+
34
+ def extract_model_id(data)
35
+ data.dig('message', 'model')
36
+ end
37
+
38
+ def extract_input_tokens(data)
39
+ data.dig('message', 'usage', 'input_tokens')
40
+ end
41
+
42
+ def extract_output_tokens(data)
43
+ data.dig('message', 'usage', 'output_tokens') || data.dig('usage', 'output_tokens')
44
+ end
45
+
46
+ def extract_cached_tokens(data)
47
+ data.dig('message', 'usage', 'cache_read_input_tokens') || data.dig('usage', 'cache_read_input_tokens')
48
+ end
49
+
50
+ def extract_cache_creation_tokens(data)
51
+ direct = data.dig('message', 'usage',
52
+ 'cache_creation_input_tokens') || data.dig('usage', 'cache_creation_input_tokens')
53
+ return direct if direct
54
+
55
+ breakdown = data.dig('message', 'usage', 'cache_creation') || data.dig('usage', 'cache_creation')
56
+ return unless breakdown.is_a?(Hash)
57
+
58
+ breakdown.values.compact.sum
59
+ end
60
+ end
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ class Anthropic
6
+ # Streaming methods of the Anthropic API integration
7
+ module Streaming
8
+ private
9
+
10
+ def stream_url
11
+ completion_url
12
+ end
13
+
14
+ def build_chunk(data)
15
+ Chunk.new(
16
+ role: :assistant,
17
+ model_id: extract_model_id(data),
18
+ content: data.dig('delta', 'text'),
19
+ input_tokens: extract_input_tokens(data),
20
+ output_tokens: extract_output_tokens(data),
21
+ cached_tokens: extract_cached_tokens(data),
22
+ cache_creation_tokens: extract_cache_creation_tokens(data),
23
+ tool_calls: extract_tool_calls(data)
24
+ )
25
+ end
26
+
27
+ def json_delta?(data)
28
+ data['type'] == 'content_block_delta' && data.dig('delta', 'type') == 'input_json_delta'
29
+ end
30
+
31
+ def parse_streaming_error(data)
32
+ error_data = JSON.parse(data)
33
+ return unless error_data['type'] == 'error'
34
+
35
+ case error_data.dig('error', 'type')
36
+ when 'overloaded_error'
37
+ [529, error_data['error']['message']]
38
+ else
39
+ [500, error_data['error']['message']]
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,109 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ class Anthropic
6
+ # Tools methods of the Anthropic API integration
7
+ module Tools
8
+ module_function
9
+
10
+ def find_tool_uses(blocks)
11
+ blocks.select { |c| c['type'] == 'tool_use' }
12
+ end
13
+
14
+ def format_tool_call(msg)
15
+ return { role: 'assistant', content: msg.content.value } if msg.content.is_a?(RubyLLM::Content::Raw)
16
+
17
+ content = []
18
+
19
+ content << Media.format_text(msg.content) unless msg.content.nil? || msg.content.empty?
20
+
21
+ msg.tool_calls.each_value do |tool_call|
22
+ content << format_tool_use_block(tool_call)
23
+ end
24
+
25
+ {
26
+ role: 'assistant',
27
+ content:
28
+ }
29
+ end
30
+
31
+ def format_tool_result(msg)
32
+ {
33
+ role: 'user',
34
+ content: msg.content.is_a?(RubyLLM::Content::Raw) ? msg.content.value : [format_tool_result_block(msg)]
35
+ }
36
+ end
37
+
38
+ def format_tool_use_block(tool_call)
39
+ {
40
+ type: 'tool_use',
41
+ id: tool_call.id,
42
+ name: tool_call.name,
43
+ input: tool_call.arguments
44
+ }
45
+ end
46
+
47
+ def format_tool_result_block(msg)
48
+ {
49
+ type: 'tool_result',
50
+ tool_use_id: msg.tool_call_id,
51
+ content: Media.format_content(msg.content)
52
+ }
53
+ end
54
+
55
+ def function_for(tool)
56
+ input_schema = tool.params_schema ||
57
+ RubyLLM::Tool::SchemaDefinition.from_parameters(tool.parameters)&.json_schema
58
+
59
+ declaration = {
60
+ name: tool.name,
61
+ description: tool.description,
62
+ input_schema: input_schema || default_input_schema
63
+ }
64
+
65
+ return declaration if tool.provider_params.empty?
66
+
67
+ RubyLLM::Utils.deep_merge(declaration, tool.provider_params)
68
+ end
69
+
70
+ def extract_tool_calls(data)
71
+ if json_delta?(data)
72
+ { nil => ToolCall.new(id: nil, name: nil, arguments: data.dig('delta', 'partial_json')) }
73
+ else
74
+ parse_tool_calls(data['content_block'])
75
+ end
76
+ end
77
+
78
+ def parse_tool_calls(content_blocks)
79
+ return nil if content_blocks.nil?
80
+
81
+ content_blocks = [content_blocks] unless content_blocks.is_a?(Array)
82
+
83
+ tool_calls = {}
84
+ content_blocks.each do |block|
85
+ next unless block && block['type'] == 'tool_use'
86
+
87
+ tool_calls[block['id']] = ToolCall.new(
88
+ id: block['id'],
89
+ name: block['name'],
90
+ arguments: block['input']
91
+ )
92
+ end
93
+
94
+ tool_calls.empty? ? nil : tool_calls
95
+ end
96
+
97
+ def default_input_schema
98
+ {
99
+ 'type' => 'object',
100
+ 'properties' => {},
101
+ 'required' => [],
102
+ 'additionalProperties' => false,
103
+ 'strict' => true
104
+ }
105
+ end
106
+ end
107
+ end
108
+ end
109
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ # Anthropic Claude API integration.
6
+ class Anthropic < Provider
7
+ include Anthropic::Chat
8
+ include Anthropic::Embeddings
9
+ include Anthropic::Media
10
+ include Anthropic::Models
11
+ include Anthropic::Streaming
12
+ include Anthropic::Tools
13
+
14
+ def api_base
15
+ 'https://api.anthropic.com'
16
+ end
17
+
18
+ def headers
19
+ {
20
+ 'x-api-key' => @config.anthropic_api_key,
21
+ 'anthropic-version' => '2023-06-01'
22
+ }
23
+ end
24
+
25
+ class << self
26
+ def capabilities
27
+ Anthropic::Capabilities
28
+ end
29
+
30
+ def configuration_requirements
31
+ %i[anthropic_api_key]
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end