ruby_llm 0.1.0.pre → 0.1.0.pre3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 04c616dc84f656d17df731cee4f7ee6a0eb20793d9c1b32df549ba21fbc8c913
4
- data.tar.gz: 2bc73cc3304cc21c1e27b97c3702cec24cdca443ca81d613fe3850fefa7224da
3
+ metadata.gz: d1e5d243e58cd5fc884ef6a8ed5078df18e7659523296f175c55dff24c116079
4
+ data.tar.gz: 9a0cebfd7392a92e05b4f98974735ffab608e96a9af2caa2c5886af92584e0e7
5
5
  SHA512:
6
- metadata.gz: '0080ab8908d42eb938d0a357910c95d9b344a30e95319c53803944d7b6d6876cce30df9926a30e9387b870cb713224665d6edc57e8e07def08fd1d995f4ab056'
7
- data.tar.gz: ee65b82281d0207027ed345e751b0e19d3de9e7ff3bfa6d2b8e05831906c4cd6323da3790d82d3c961658fb6904ba879e806bb2cf39336866494c2894bf96f6f
6
+ metadata.gz: 3df37307db5c36064502f476c714c893717a38fb73136e57c9e395386e8354cce62b47ad12da7cd6fe1c666a8c6e55faf333cd2808cfc280ae775bb96c181d23
7
+ data.tar.gz: df0f1a5cff449264a3868ac463ada43a94eddc712ed1b3de23802b5188ae217e86b8b48314cb99b24468bbdf2cb9970886908156e3a72642d7967b9a4088db40
@@ -7,7 +7,11 @@ on:
7
7
  branches: [ "main" ]
8
8
 
9
9
  jobs:
10
+ test:
11
+ uses: ./.github/workflows/test.yml
12
+
10
13
  build:
14
+ needs: test # This ensures tests must pass before building/publishing
11
15
  name: Build + Publish
12
16
  runs-on: ubuntu-latest
13
17
  permissions:
@@ -16,10 +20,12 @@ jobs:
16
20
 
17
21
  steps:
18
22
  - uses: actions/checkout@v4
19
- - name: Set up Ruby 3.3
23
+
24
+ - name: Set up Ruby
20
25
  uses: ruby/setup-ruby@v1
21
26
  with:
22
- ruby-version: 3.3.x
27
+ ruby-version: '3.3'
28
+ bundler-cache: true
23
29
 
24
30
  - name: Publish to GPR
25
31
  run: |
@@ -42,4 +48,4 @@ jobs:
42
48
  gem build *.gemspec
43
49
  gem push *.gem
44
50
  env:
45
- GEM_HOST_API_KEY: "${{secrets.RUBYGEMS_AUTH_TOKEN}}"
51
+ GEM_HOST_API_KEY: "${{secrets.RUBYGEMS_AUTH_TOKEN}}"
@@ -0,0 +1,32 @@
1
+ name: Test
2
+
3
+ on:
4
+ push:
5
+ branches: [ "main" ]
6
+ pull_request:
7
+ branches: [ "main" ]
8
+
9
+ jobs:
10
+ test:
11
+ runs-on: ubuntu-latest
12
+ strategy:
13
+ matrix:
14
+ ruby-version: ['3.1', '3.2', '3.3']
15
+
16
+ steps:
17
+ - uses: actions/checkout@v4
18
+
19
+ - name: Set up Ruby
20
+ uses: ruby/setup-ruby@v1
21
+ with:
22
+ ruby-version: ${{ matrix.ruby-version }}
23
+ bundler-cache: true
24
+
25
+ - name: Install dependencies
26
+ run: bundle install
27
+
28
+ # - name: Check code format
29
+ # run: bundle exec rubocop
30
+
31
+ - name: Run tests
32
+ run: bundle exec rspec
data/.gitignore ADDED
@@ -0,0 +1,58 @@
1
+ *.gem
2
+ *.rbc
3
+ /.config
4
+ /coverage/
5
+ /InstalledFiles
6
+ /pkg/
7
+ /spec/reports/
8
+ /spec/examples.txt
9
+ /test/tmp/
10
+ /test/version_tmp/
11
+ /tmp/
12
+
13
+ # Used by dotenv library to load environment variables.
14
+ .env
15
+
16
+ # Ignore Byebug command history file.
17
+ .byebug_history
18
+
19
+ ## Specific to RubyMotion:
20
+ .dat*
21
+ .repl_history
22
+ build/
23
+ *.bridgesupport
24
+ build-iPhoneOS/
25
+ build-iPhoneSimulator/
26
+
27
+ ## Specific to RubyMotion (use of CocoaPods):
28
+ #
29
+ # We recommend against adding the Pods directory to your .gitignore. However
30
+ # you should judge for yourself, the pros and cons are mentioned at:
31
+ # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
32
+ #
33
+ # vendor/Pods/
34
+
35
+ ## Documentation cache and generated files:
36
+ /.yardoc/
37
+ /_yardoc/
38
+ /doc/
39
+ /rdoc/
40
+
41
+ ## Environment normalization:
42
+ /.bundle/
43
+ /vendor/bundle
44
+ /lib/bundler/man/
45
+
46
+ # for a library or gem, you might want to ignore these files since the code is
47
+ # intended to run in multiple environments; otherwise, check them in:
48
+ Gemfile.lock
49
+ # .ruby-version
50
+ # .ruby-gemset
51
+
52
+ # unless supporting rvm < 1.11.0 or doing something fancy, ignore this:
53
+ .rvmrc
54
+
55
+ # Used by RuboCop. Remote config files pulled in from inherit_from directive.
56
+ # .rubocop-https?--*
57
+
58
+ repomix-output.txt
data/.overcommit.yml ADDED
@@ -0,0 +1,26 @@
1
+ PreCommit:
2
+ RuboCop:
3
+ enabled: false
4
+ auto_correct: true
5
+ on_warn: fail # Treat all warnings as failures
6
+
7
+ Flay:
8
+ enabled: true
9
+
10
+ RSpec:
11
+ enabled: true
12
+ command: ['bundle', 'exec', 'rspec']
13
+ on_warn: fail
14
+
15
+ TrailingWhitespace:
16
+ enabled: true
17
+ auto_correct: true
18
+ exclude:
19
+ - '**/db/structure.sql' # Ignore trailing whitespace in generated files
20
+
21
+ PostCheckout:
22
+ ALL: # Special hook name that customizes all hooks of this type
23
+ quiet: true # Change all post-checkout hooks to only display output on failure
24
+
25
+ IndexTags:
26
+ enabled: true # Generate a tags file with `ctags` each time HEAD changes
data/.rspec ADDED
@@ -0,0 +1,3 @@
1
+ --require spec_helper
2
+ --format documentation
3
+ --color
data/.rubocop.yml ADDED
@@ -0,0 +1,3 @@
1
+ require:
2
+ - rubocop-rake
3
+ - rubocop-rspec
data/Gemfile ADDED
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ source 'https://rubygems.org'
4
+
5
+ gemspec
data/README.md CHANGED
@@ -10,7 +10,8 @@ RubyLLM provides a unified interface for interacting with various LLM providers
10
10
  ## Features
11
11
 
12
12
  - 🤝 Unified interface for multiple LLM providers (OpenAI, Anthropic, etc.)
13
- - 🛠️ Tool/Function calling support
13
+ - 📋 Comprehensive model listing and capabilities detection
14
+ - 🛠️ Simple and flexible tool/function calling
14
15
  - 📊 Automatic token counting and tracking
15
16
  - 🔄 Streaming support
16
17
  - 🚂 Seamless Rails integration
@@ -44,10 +45,34 @@ require 'ruby_llm'
44
45
  RubyLLM.configure do |config|
45
46
  config.openai_api_key = ENV['OPENAI_API_KEY']
46
47
  config.anthropic_api_key = ENV['ANTHROPIC_API_KEY']
47
- config.default_model = 'gpt-4'
48
+ config.default_model = 'gpt-4o-mini'
48
49
  end
49
50
  ```
50
51
 
52
+ ### Listing Available Models
53
+
54
+ ```ruby
55
+ client = RubyLLM.client
56
+
57
+ # List models from all providers
58
+ all_models = client.list_models
59
+
60
+ # List models from a specific provider
61
+ openai_models = client.list_models(:openai)
62
+ anthropic_models = client.list_models(:anthropic)
63
+
64
+ # Access model information
65
+ model = openai_models.first
66
+ puts model.display_name
67
+ puts "Context window: #{model.context_window}"
68
+ puts "Maximum tokens: #{model.max_tokens}"
69
+ puts "Input price per million tokens: $#{model.input_price_per_million}"
70
+ puts "Output price per million tokens: $#{model.output_price_per_million}"
71
+ puts "Supports vision: #{model.supports_vision}"
72
+ puts "Supports function calling: #{model.supports_functions}"
73
+ puts "Supports JSON mode: #{model.supports_json_mode}"
74
+ ```
75
+
51
76
  ### Simple Chat
52
77
 
53
78
  ```ruby
@@ -60,22 +85,15 @@ response = client.chat([
60
85
  puts response.content
61
86
  ```
62
87
 
63
- ### Streaming
64
-
65
- ```ruby
66
- client.chat([
67
- { role: :user, content: "Count to 10 slowly" }
68
- ], stream: true) do |chunk|
69
- print chunk.content
70
- end
71
- ```
88
+ ### Tools (Function Calling)
72
89
 
73
- ### Tool Usage
90
+ RubyLLM supports tools/functions with a simple, flexible interface. You can create tools using blocks or wrap existing methods:
74
91
 
75
92
  ```ruby
93
+ # Using a block
76
94
  calculator = RubyLLM::Tool.new(
77
95
  name: "calculator",
78
- description: "Perform mathematical calculations",
96
+ description: "Performs mathematical calculations",
79
97
  parameters: {
80
98
  expression: {
81
99
  type: "string",
@@ -87,9 +105,46 @@ calculator = RubyLLM::Tool.new(
87
105
  eval(args[:expression]).to_s
88
106
  end
89
107
 
108
+ # Using an existing method
109
+ class MathUtils
110
+ def arithmetic(x, y, operation)
111
+ case operation
112
+ when 'add' then x + y
113
+ when 'subtract' then x - y
114
+ when 'multiply' then x * y
115
+ when 'divide' then x.to_f / y
116
+ else
117
+ raise ArgumentError, "Unknown operation: #{operation}"
118
+ end
119
+ end
120
+ end
121
+
122
+ math_tool = RubyLLM::Tool.from_method(
123
+ MathUtils.instance_method(:arithmetic),
124
+ description: "Performs basic arithmetic operations",
125
+ parameter_descriptions: {
126
+ x: "First number in the operation",
127
+ y: "Second number in the operation",
128
+ operation: "Operation to perform (add, subtract, multiply, divide)"
129
+ }
130
+ )
131
+
132
+ # Use tools in conversations
90
133
  response = client.chat([
91
134
  { role: :user, content: "What is 123 * 456?" }
92
135
  ], tools: [calculator])
136
+
137
+ puts response.content
138
+ ```
139
+
140
+ ### Streaming
141
+
142
+ ```ruby
143
+ client.chat([
144
+ { role: :user, content: "Count to 10 slowly" }
145
+ ], stream: true) do |chunk|
146
+ print chunk.content
147
+ end
93
148
  ```
94
149
 
95
150
  ## Rails Integration
data/Rakefile CHANGED
@@ -1,4 +1,6 @@
1
- require "bundler/gem_tasks"
2
- require "rake/clean"
1
+ # frozen_string_literal: true
2
+
3
+ require 'bundler/gem_tasks'
4
+ require 'rake/clean'
3
5
 
4
6
  task default: %w[build]
data/bin/console CHANGED
@@ -1,6 +1,9 @@
1
1
  #!/usr/bin/env ruby
2
- require "bundler/setup"
3
- require "ruby_llm"
2
+ # frozen_string_literal: true
4
3
 
5
- require "irb"
4
+ require 'bundler/setup'
5
+ require 'ruby_llm'
6
+ require 'dotenv/load'
7
+
8
+ require 'irb'
6
9
  IRB.start(__FILE__)
@@ -1,21 +1,25 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module RubyLLM
2
4
  module ActiveRecord
5
+ # Provides ActsAs functionality for LLM-related models
3
6
  module ActsAs
4
- def acts_as_llm_model(options = {})
7
+ def acts_as_llm_model(_options = {})
5
8
  include ModelMethods
6
9
  end
7
10
 
8
- def acts_as_llm_conversation(options = {})
11
+ def acts_as_llm_conversation(_options = {})
9
12
  include ConversationMethods
10
13
  has_many :messages, -> { order(created_at: :asc) }
11
14
  end
12
15
 
13
- def acts_as_llm_message(options = {})
16
+ def acts_as_llm_message(_options = {})
14
17
  include MessageMethods
15
18
  belongs_to :conversation
16
19
  end
17
20
  end
18
21
 
22
+ # Methods for LLM model functionality
19
23
  module ModelMethods
20
24
  extend ActiveSupport::Concern
21
25
 
@@ -31,6 +35,7 @@ module RubyLLM
31
35
  end
32
36
  end
33
37
 
38
+ # Methods for LLM conversation handling
34
39
  module ConversationMethods
35
40
  extend ActiveSupport::Concern
36
41
 
@@ -40,26 +45,33 @@ module RubyLLM
40
45
 
41
46
  def send_message(content, model: nil)
42
47
  transaction do
43
- message = messages.create!(
44
- role: :user,
45
- content: content
46
- )
47
-
48
- response = RubyLLM.client.chat(
49
- conversation_messages,
50
- model: model || current_model
51
- )
52
-
53
- messages.create!(
54
- role: :assistant,
55
- content: response.content,
56
- token_count: response.token_count
57
- )
48
+ create_user_message(content)
49
+ create_assistant_response(model)
58
50
  end
59
51
  end
60
52
 
61
53
  private
62
54
 
55
+ def create_user_message(content)
56
+ messages.create!(
57
+ role: :user,
58
+ content: content
59
+ )
60
+ end
61
+
62
+ def create_assistant_response(model)
63
+ response = RubyLLM.client.chat(
64
+ conversation_messages,
65
+ model: model || current_model
66
+ )
67
+
68
+ messages.create!(
69
+ role: :assistant,
70
+ content: response.content,
71
+ token_count: response.token_count
72
+ )
73
+ end
74
+
63
75
  def conversation_messages
64
76
  messages.map(&:to_llm_format)
65
77
  end
@@ -69,6 +81,7 @@ module RubyLLM
69
81
  end
70
82
  end
71
83
 
84
+ # Methods for LLM message handling
72
85
  module MessageMethods
73
86
  extend ActiveSupport::Concern
74
87
 
@@ -1,38 +1,54 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module RubyLLM
4
+ # Client class for handling LLM provider interactions
2
5
  class Client
3
6
  def initialize
4
7
  @providers = {}
5
- @conversations = {}
6
8
  end
7
9
 
8
- def chat(messages, model: nil, temperature: 1.0, stream: false, &block)
10
+ def chat(messages, model: nil, temperature: 0.7, stream: false, tools: nil, &block)
11
+ # Convert any hash messages to Message objects
12
+ formatted_messages = messages.map do |msg|
13
+ msg.is_a?(Message) ? msg : Message.new(**msg)
14
+ end
15
+
9
16
  provider = provider_for(model)
10
- response = provider.chat(
11
- messages,
17
+ provider.chat(
18
+ formatted_messages,
12
19
  model: model,
13
20
  temperature: temperature,
14
21
  stream: stream,
22
+ tools: tools,
15
23
  &block
16
24
  )
17
- response
18
25
  end
19
26
 
20
- def create_conversation(tools: [])
21
- conversation = Conversation.new(tools: tools)
22
- @conversations[conversation.id] = conversation
23
- conversation
27
+ def list_models(provider = nil)
28
+ if provider
29
+ provider_for(nil, provider).list_models
30
+ else
31
+ all_providers.flat_map(&:list_models)
32
+ end
24
33
  end
25
34
 
26
35
  private
27
36
 
28
- def provider_for(model)
29
- provider_name = detect_provider(model)
37
+ def all_providers
38
+ [
39
+ provider_for(nil, :openai),
40
+ provider_for(nil, :anthropic)
41
+ ]
42
+ end
43
+
44
+ def provider_for(model, specific_provider = nil)
45
+ provider_name = specific_provider || detect_provider(model)
30
46
  @providers[provider_name] ||= case provider_name
31
- when :openai then Providers::OpenAI.new
32
- when :anthropic then Providers::Anthropic.new
33
- else
34
- raise Error, "Unsupported provider: #{provider_name}"
35
- end
47
+ when :openai then Providers::OpenAI.new
48
+ when :anthropic then Providers::Anthropic.new
49
+ else
50
+ raise Error, "Unsupported provider: #{provider_name}"
51
+ end
36
52
  end
37
53
 
38
54
  def detect_provider(model)
@@ -1,8 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module RubyLLM
4
+ # Configuration class for RubyLLM settings
2
5
  class Configuration
3
- attr_accessor :openai_api_key, :anthropic_api_key
4
- attr_accessor :default_provider, :default_model
5
- attr_accessor :request_timeout
6
+ attr_accessor :openai_api_key, :anthropic_api_key, :default_provider, :default_model, :request_timeout
6
7
 
7
8
  def initialize
8
9
  @request_timeout = 30
@@ -18,6 +19,7 @@ module RubyLLM
18
19
  end
19
20
  end
20
21
 
22
+ # Settings specific to individual LLM providers
21
23
  class ProviderSettings
22
24
  attr_accessor :api_key, :api_version, :default_model, :base_url
23
25
  end
@@ -1,4 +1,7 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module RubyLLM
4
+ # Represents a conversation with an LLM
2
5
  class Conversation
3
6
  attr_reader :id, :messages, :tools
4
7
 
@@ -1,14 +1,18 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module RubyLLM
2
4
  class Message
3
5
  VALID_ROLES = %i[system user assistant tool].freeze
4
6
 
5
- attr_reader :role, :content, :tool_calls, :tool_results
7
+ attr_reader :role, :content, :tool_calls, :tool_results, :token_usage, :model_id
6
8
 
7
- def initialize(role:, content: nil, tool_calls: nil, tool_results: nil)
9
+ def initialize(role:, content: nil, tool_calls: nil, tool_results: nil, token_usage: nil, model_id: nil)
8
10
  @role = role.to_sym
9
11
  @content = content
10
12
  @tool_calls = tool_calls
11
13
  @tool_results = tool_results
14
+ @token_usage = token_usage
15
+ @model_id = model_id
12
16
  validate!
13
17
  end
14
18
 
@@ -17,7 +21,9 @@ module RubyLLM
17
21
  role: role,
18
22
  content: content,
19
23
  tool_calls: tool_calls,
20
- tool_results: tool_results
24
+ tool_results: tool_results,
25
+ token_usage: token_usage,
26
+ model_id: model_id
21
27
  }.compact
22
28
  end
23
29
 
@@ -25,8 +31,11 @@ module RubyLLM
25
31
 
26
32
  def validate!
27
33
  unless VALID_ROLES.include?(role)
28
- raise ArgumentError, "Invalid role: #{role}. Must be one of: #{VALID_ROLES.join(', ')}"
34
+ raise ArgumentError,
35
+ "Invalid role: #{role}. Must be one of: #{VALID_ROLES.join(', ')}"
29
36
  end
37
+ raise ArgumentError, 'Content cannot be nil' if content.nil?
38
+ raise ArgumentError, 'Content cannot be empty' if content.empty?
30
39
  end
31
40
  end
32
41
  end
@@ -0,0 +1,81 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module ModelCapabilities
5
+ class Anthropic < Base
6
+ def determine_context_window(model_id)
7
+ case model_id
8
+ when /claude-3-5-sonnet/, /claude-3-5-haiku/,
9
+ /claude-3-opus/, /claude-3-sonnet/, /claude-3-haiku/
10
+ 200_000
11
+ else
12
+ 100_000
13
+ end
14
+ end
15
+
16
+ def determine_max_tokens(model_id)
17
+ case model_id
18
+ when /claude-3-5-sonnet/, /claude-3-5-haiku/
19
+ 8_192
20
+ when /claude-3-opus/, /claude-3-sonnet/, /claude-3-haiku/
21
+ 4_096
22
+ else
23
+ 4_096
24
+ end
25
+ end
26
+
27
+ def get_input_price(model_id)
28
+ case model_id
29
+ when /claude-3-5-sonnet/
30
+ 3.0 # $3.00 per million tokens
31
+ when /claude-3-5-haiku/
32
+ 0.80 # $0.80 per million tokens
33
+ when /claude-3-opus/
34
+ 15.0 # $15.00 per million tokens
35
+ when /claude-3-sonnet/
36
+ 3.0 # $3.00 per million tokens
37
+ when /claude-3-haiku/
38
+ 0.25 # $0.25 per million tokens
39
+ else
40
+ 3.0
41
+ end
42
+ end
43
+
44
+ def get_output_price(model_id)
45
+ case model_id
46
+ when /claude-3-5-sonnet/
47
+ 15.0 # $15.00 per million tokens
48
+ when /claude-3-5-haiku/
49
+ 4.0 # $4.00 per million tokens
50
+ when /claude-3-opus/
51
+ 75.0 # $75.00 per million tokens
52
+ when /claude-3-sonnet/
53
+ 15.0 # $15.00 per million tokens
54
+ when /claude-3-haiku/
55
+ 1.25 # $1.25 per million tokens
56
+ else
57
+ 15.0
58
+ end
59
+ end
60
+
61
+ def supports_vision?(model_id)
62
+ case model_id
63
+ when /claude-3-5-haiku/
64
+ false
65
+ when /claude-2/, /claude-1/
66
+ false
67
+ else
68
+ true
69
+ end
70
+ end
71
+
72
+ def supports_functions?(model_id)
73
+ model_id.include?('claude-3')
74
+ end
75
+
76
+ def supports_json_mode?(model_id)
77
+ model_id.include?('claude-3')
78
+ end
79
+ end
80
+ end
81
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module ModelCapabilities
5
+ class Base
6
+ def determine_context_window(_model_id)
7
+ raise NotImplementedError
8
+ end
9
+
10
+ def determine_max_tokens(_model_id)
11
+ raise NotImplementedError
12
+ end
13
+
14
+ def get_input_price(_model_id)
15
+ raise NotImplementedError
16
+ end
17
+
18
+ def get_output_price(_model_id)
19
+ raise NotImplementedError
20
+ end
21
+
22
+ def supports_vision?(_model_id)
23
+ raise NotImplementedError
24
+ end
25
+
26
+ def supports_functions?(_model_id)
27
+ raise NotImplementedError
28
+ end
29
+
30
+ def supports_json_mode?(_model_id)
31
+ raise NotImplementedError
32
+ end
33
+ end
34
+ end
35
+ end