ruby_llm 1.10.0 → 1.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +2 -2
- data/lib/ruby_llm/aliases.json +4 -4
- data/lib/ruby_llm/configuration.rb +1 -0
- data/lib/ruby_llm/models.json +920 -1005
- data/lib/ruby_llm/provider.rb +5 -1
- data/lib/ruby_llm/providers/anthropic/media.rb +2 -2
- data/lib/ruby_llm/providers/bedrock/chat.rb +10 -1
- data/lib/ruby_llm/providers/openai/media.rb +1 -1
- data/lib/ruby_llm/providers/xai/chat.rb +15 -0
- data/lib/ruby_llm/providers/xai/models.rb +75 -0
- data/lib/ruby_llm/providers/xai.rb +28 -0
- data/lib/ruby_llm/version.rb +1 -1
- data/lib/ruby_llm.rb +10 -8
- data/lib/tasks/models.rake +1 -0
- metadata +4 -1
data/lib/ruby_llm/provider.rb
CHANGED
|
@@ -108,10 +108,14 @@ module RubyLLM
|
|
|
108
108
|
body = try_parse_json(response.body)
|
|
109
109
|
case body
|
|
110
110
|
when Hash
|
|
111
|
+
error = body['error']
|
|
112
|
+
return error if error.is_a?(String)
|
|
113
|
+
|
|
111
114
|
body.dig('error', 'message')
|
|
112
115
|
when Array
|
|
113
116
|
body.map do |part|
|
|
114
|
-
part
|
|
117
|
+
error = part['error']
|
|
118
|
+
error.is_a?(String) ? error : part.dig('error', 'message')
|
|
115
119
|
end.join('. ')
|
|
116
120
|
else
|
|
117
121
|
body
|
|
@@ -44,7 +44,7 @@ module RubyLLM
|
|
|
44
44
|
type: 'image',
|
|
45
45
|
source: {
|
|
46
46
|
type: 'url',
|
|
47
|
-
url: image.source
|
|
47
|
+
url: image.source.to_s
|
|
48
48
|
}
|
|
49
49
|
}
|
|
50
50
|
else
|
|
@@ -65,7 +65,7 @@ module RubyLLM
|
|
|
65
65
|
type: 'document',
|
|
66
66
|
source: {
|
|
67
67
|
type: 'url',
|
|
68
|
-
url: pdf.source
|
|
68
|
+
url: pdf.source.to_s
|
|
69
69
|
}
|
|
70
70
|
}
|
|
71
71
|
else
|
|
@@ -66,7 +66,7 @@ module RubyLLM
|
|
|
66
66
|
content_blocks << thinking_block if thinking_block
|
|
67
67
|
end
|
|
68
68
|
|
|
69
|
-
|
|
69
|
+
append_formatted_content(content_blocks, msg.content)
|
|
70
70
|
|
|
71
71
|
{
|
|
72
72
|
role: Anthropic::Chat.convert_role(msg.role),
|
|
@@ -100,6 +100,15 @@ module RubyLLM
|
|
|
100
100
|
content: content_blocks
|
|
101
101
|
}
|
|
102
102
|
end
|
|
103
|
+
|
|
104
|
+
def append_formatted_content(content_blocks, content)
|
|
105
|
+
formatted_content = Media.format_content(content)
|
|
106
|
+
if formatted_content.is_a?(Array)
|
|
107
|
+
content_blocks.concat(formatted_content)
|
|
108
|
+
else
|
|
109
|
+
content_blocks << formatted_content
|
|
110
|
+
end
|
|
111
|
+
end
|
|
103
112
|
end
|
|
104
113
|
end
|
|
105
114
|
end
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyLLM
|
|
4
|
+
module Providers
|
|
5
|
+
class XAI
|
|
6
|
+
# Chat implementation for xAI
|
|
7
|
+
# https://docs.x.ai/docs/api-reference#chat-completions
|
|
8
|
+
module Chat
|
|
9
|
+
def format_role(role)
|
|
10
|
+
role.to_s
|
|
11
|
+
end
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
end
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyLLM
|
|
4
|
+
module Providers
|
|
5
|
+
class XAI
|
|
6
|
+
# Models metadata for xAI list models.
|
|
7
|
+
module Models
|
|
8
|
+
module_function
|
|
9
|
+
|
|
10
|
+
IMAGE_MODELS = %w[grok-2-image-1212].freeze
|
|
11
|
+
VISION_MODELS = %w[
|
|
12
|
+
grok-2-vision-1212
|
|
13
|
+
grok-4-0709
|
|
14
|
+
grok-4-fast-non-reasoning
|
|
15
|
+
grok-4-fast-reasoning
|
|
16
|
+
grok-4-1-fast-non-reasoning
|
|
17
|
+
grok-4-1-fast-reasoning
|
|
18
|
+
].freeze
|
|
19
|
+
REASONING_MODELS = %w[
|
|
20
|
+
grok-3-mini
|
|
21
|
+
grok-4-0709
|
|
22
|
+
grok-4-fast-reasoning
|
|
23
|
+
grok-4-1-fast-reasoning
|
|
24
|
+
grok-code-fast-1
|
|
25
|
+
].freeze
|
|
26
|
+
|
|
27
|
+
def parse_list_models_response(response, slug, _capabilities)
|
|
28
|
+
Array(response.body['data']).map do |model_data|
|
|
29
|
+
model_id = model_data['id']
|
|
30
|
+
|
|
31
|
+
Model::Info.new(
|
|
32
|
+
id: model_id,
|
|
33
|
+
name: format_display_name(model_id),
|
|
34
|
+
provider: slug,
|
|
35
|
+
family: 'grok',
|
|
36
|
+
created_at: model_data['created'] ? Time.at(model_data['created']) : nil,
|
|
37
|
+
context_window: nil,
|
|
38
|
+
max_output_tokens: nil,
|
|
39
|
+
modalities: modalities_for(model_id),
|
|
40
|
+
capabilities: capabilities_for(model_id),
|
|
41
|
+
pricing: {},
|
|
42
|
+
metadata: {
|
|
43
|
+
object: model_data['object'],
|
|
44
|
+
owned_by: model_data['owned_by']
|
|
45
|
+
}.compact
|
|
46
|
+
)
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def modalities_for(model_id)
|
|
51
|
+
if IMAGE_MODELS.include?(model_id)
|
|
52
|
+
{ input: ['text'], output: ['image'] }
|
|
53
|
+
else
|
|
54
|
+
input = ['text']
|
|
55
|
+
input << 'image' if VISION_MODELS.include?(model_id)
|
|
56
|
+
{ input: input, output: ['text'] }
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def capabilities_for(model_id)
|
|
61
|
+
return [] if IMAGE_MODELS.include?(model_id)
|
|
62
|
+
|
|
63
|
+
capabilities = %w[streaming function_calling structured_output]
|
|
64
|
+
capabilities << 'reasoning' if REASONING_MODELS.include?(model_id)
|
|
65
|
+
capabilities << 'vision' if VISION_MODELS.include?(model_id)
|
|
66
|
+
capabilities
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def format_display_name(model_id)
|
|
70
|
+
model_id.tr('-', ' ').split.map(&:capitalize).join(' ')
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
end
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyLLM
|
|
4
|
+
module Providers
|
|
5
|
+
# xAI API integration
|
|
6
|
+
class XAI < OpenAI
|
|
7
|
+
include XAI::Chat
|
|
8
|
+
include XAI::Models
|
|
9
|
+
|
|
10
|
+
def api_base
|
|
11
|
+
'https://api.x.ai/v1'
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def headers
|
|
15
|
+
{
|
|
16
|
+
'Authorization' => "Bearer #{@config.xai_api_key}",
|
|
17
|
+
'Content-Type' => 'application/json'
|
|
18
|
+
}
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
class << self
|
|
22
|
+
def configuration_requirements
|
|
23
|
+
%i[xai_api_key]
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
data/lib/ruby_llm/version.rb
CHANGED
data/lib/ruby_llm.rb
CHANGED
|
@@ -13,19 +13,20 @@ require 'zeitwerk'
|
|
|
13
13
|
|
|
14
14
|
loader = Zeitwerk::Loader.for_gem
|
|
15
15
|
loader.inflector.inflect(
|
|
16
|
-
'
|
|
17
|
-
'llm' => 'LLM',
|
|
18
|
-
'openai' => 'OpenAI',
|
|
16
|
+
'UI' => 'UI',
|
|
19
17
|
'api' => 'API',
|
|
20
|
-
'deepseek' => 'DeepSeek',
|
|
21
|
-
'perplexity' => 'Perplexity',
|
|
22
18
|
'bedrock' => 'Bedrock',
|
|
23
|
-
'
|
|
19
|
+
'deepseek' => 'DeepSeek',
|
|
24
20
|
'gpustack' => 'GPUStack',
|
|
21
|
+
'llm' => 'LLM',
|
|
25
22
|
'mistral' => 'Mistral',
|
|
26
|
-
'
|
|
23
|
+
'openai' => 'OpenAI',
|
|
24
|
+
'openrouter' => 'OpenRouter',
|
|
27
25
|
'pdf' => 'PDF',
|
|
28
|
-
'
|
|
26
|
+
'perplexity' => 'Perplexity',
|
|
27
|
+
'ruby_llm' => 'RubyLLM',
|
|
28
|
+
'vertexai' => 'VertexAI',
|
|
29
|
+
'xai' => 'XAI'
|
|
29
30
|
)
|
|
30
31
|
loader.ignore("#{__dir__}/tasks")
|
|
31
32
|
loader.ignore("#{__dir__}/generators")
|
|
@@ -100,6 +101,7 @@ RubyLLM::Provider.register :openai, RubyLLM::Providers::OpenAI
|
|
|
100
101
|
RubyLLM::Provider.register :openrouter, RubyLLM::Providers::OpenRouter
|
|
101
102
|
RubyLLM::Provider.register :perplexity, RubyLLM::Providers::Perplexity
|
|
102
103
|
RubyLLM::Provider.register :vertexai, RubyLLM::Providers::VertexAI
|
|
104
|
+
RubyLLM::Provider.register :xai, RubyLLM::Providers::XAI
|
|
103
105
|
|
|
104
106
|
if defined?(Rails::Railtie)
|
|
105
107
|
require 'ruby_llm/railtie'
|
data/lib/tasks/models.rake
CHANGED
|
@@ -45,6 +45,7 @@ def configure_from_env
|
|
|
45
45
|
config.deepseek_api_key = ENV.fetch('DEEPSEEK_API_KEY', nil)
|
|
46
46
|
config.perplexity_api_key = ENV.fetch('PERPLEXITY_API_KEY', nil)
|
|
47
47
|
config.openrouter_api_key = ENV.fetch('OPENROUTER_API_KEY', nil)
|
|
48
|
+
config.xai_api_key = ENV.fetch('XAI_API_KEY', nil)
|
|
48
49
|
config.mistral_api_key = ENV.fetch('MISTRAL_API_KEY', nil)
|
|
49
50
|
config.vertexai_location = ENV.fetch('GOOGLE_CLOUD_LOCATION', nil)
|
|
50
51
|
config.vertexai_project_id = ENV.fetch('GOOGLE_CLOUD_PROJECT', nil)
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: ruby_llm
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 1.
|
|
4
|
+
version: 1.11.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Carmine Paolino
|
|
@@ -290,6 +290,9 @@ files:
|
|
|
290
290
|
- lib/ruby_llm/providers/vertexai/models.rb
|
|
291
291
|
- lib/ruby_llm/providers/vertexai/streaming.rb
|
|
292
292
|
- lib/ruby_llm/providers/vertexai/transcription.rb
|
|
293
|
+
- lib/ruby_llm/providers/xai.rb
|
|
294
|
+
- lib/ruby_llm/providers/xai/chat.rb
|
|
295
|
+
- lib/ruby_llm/providers/xai/models.rb
|
|
293
296
|
- lib/ruby_llm/railtie.rb
|
|
294
297
|
- lib/ruby_llm/stream_accumulator.rb
|
|
295
298
|
- lib/ruby_llm/streaming.rb
|