ruby_llm 0.1.0.pre34 → 0.1.0.pre35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 77ec20e57439d352e965de0e435d0c43b3acd53de2d6035345f6ac6e716e7fef
4
- data.tar.gz: 51d838bc1411303fd96c2f28d160ad99657f44846bd12ad2967ee329880e8922
3
+ metadata.gz: 586cc8473d06aebb4eb69ee646c8ad90250a522eac80d3471a22ef05e601b9de
4
+ data.tar.gz: 4f060d755a37373230d1364dda3fc8b74fabdf4128206eb4faf2b11fd02090ac
5
5
  SHA512:
6
- metadata.gz: 9b0ca7a80113ed498125c9e23460879328926e6f3de1e6fe7e7a63453deb94fbdebbe7961d529e6cd50db7ad91776cb92ffeaed29f396aa523f931b12f835f7d
7
- data.tar.gz: bcb30504074335dbdea39d171a3d810cfb14c7f5f9a3cc5b10e1a80970ce06f37775c7b1c5352a2e5fef0a14ea0d6603ae63197be6d2a866c7003b091036c842
6
+ metadata.gz: 8e10930fd06b2dbdbaca81aa93f45cde41e8d1199173597e29d94af7ea073db470a91be9869f89b622aebc28d915510baf070ac6655ddbbe1591a94fbef7b2f8
7
+ data.tar.gz: de99a1e6173f25b3c3bc7222910f6d1c8771b4c4f1062e8970f9472c7f551543c345cdffb1b3733ebd510e2351c27b5b274d483ba7047a32d90884b62577cb18
@@ -155,6 +155,22 @@
155
155
  "output_price_per_million": 15.0,
156
156
  "metadata": {}
157
157
  },
158
+ {
159
+ "id": "claude-3-7-sonnet-20250219",
160
+ "created_at": "2025-02-19T00:00:00Z",
161
+ "display_name": "Claude 3.7 Sonnet",
162
+ "provider": "anthropic",
163
+ "context_window": 200000,
164
+ "max_tokens": 4096,
165
+ "type": "chat",
166
+ "family": "claude2",
167
+ "supports_vision": true,
168
+ "supports_functions": true,
169
+ "supports_json_mode": true,
170
+ "input_price_per_million": 3.0,
171
+ "output_price_per_million": 15.0,
172
+ "metadata": {}
173
+ },
158
174
  {
159
175
  "id": "claude-3-haiku-20240307",
160
176
  "created_at": "2024-03-07T00:00:00Z",
@@ -830,6 +846,44 @@
830
846
  "owned_by": "google"
831
847
  }
832
848
  },
849
+ {
850
+ "id": "gemini-2.0-flash-lite",
851
+ "created_at": null,
852
+ "display_name": "Gemini 2.0 Flash Lite",
853
+ "provider": "gemini",
854
+ "context_window": 1048576,
855
+ "max_tokens": 8192,
856
+ "type": "chat",
857
+ "family": "gemini20_flash_lite",
858
+ "supports_vision": true,
859
+ "supports_functions": false,
860
+ "supports_json_mode": true,
861
+ "input_price_per_million": 0.075,
862
+ "output_price_per_million": 0.3,
863
+ "metadata": {
864
+ "object": "model",
865
+ "owned_by": "google"
866
+ }
867
+ },
868
+ {
869
+ "id": "gemini-2.0-flash-lite-001",
870
+ "created_at": null,
871
+ "display_name": "Gemini 2.0 Flash Lite 001",
872
+ "provider": "gemini",
873
+ "context_window": 1048576,
874
+ "max_tokens": 8192,
875
+ "type": "chat",
876
+ "family": "gemini20_flash_lite",
877
+ "supports_vision": true,
878
+ "supports_functions": false,
879
+ "supports_json_mode": true,
880
+ "input_price_per_million": 0.075,
881
+ "output_price_per_million": 0.3,
882
+ "metadata": {
883
+ "object": "model",
884
+ "owned_by": "google"
885
+ }
886
+ },
833
887
  {
834
888
  "id": "gemini-2.0-flash-lite-preview",
835
889
  "created_at": null,
@@ -868,6 +922,44 @@
868
922
  "owned_by": "google"
869
923
  }
870
924
  },
925
+ {
926
+ "id": "gemini-2.0-flash-mmgen-rev17",
927
+ "created_at": null,
928
+ "display_name": "Gemini 2.0 Flash Mmgen Rev17",
929
+ "provider": "gemini",
930
+ "context_window": 1048576,
931
+ "max_tokens": 8192,
932
+ "type": "chat",
933
+ "family": "gemini20_flash",
934
+ "supports_vision": true,
935
+ "supports_functions": true,
936
+ "supports_json_mode": true,
937
+ "input_price_per_million": 0.1,
938
+ "output_price_per_million": 0.4,
939
+ "metadata": {
940
+ "object": "model",
941
+ "owned_by": "google"
942
+ }
943
+ },
944
+ {
945
+ "id": "gemini-2.0-flash-thinking-001",
946
+ "created_at": null,
947
+ "display_name": "Gemini 2.0 Flash Thinking 001",
948
+ "provider": "gemini",
949
+ "context_window": 1048576,
950
+ "max_tokens": 8192,
951
+ "type": "chat",
952
+ "family": "gemini20_flash",
953
+ "supports_vision": true,
954
+ "supports_functions": true,
955
+ "supports_json_mode": true,
956
+ "input_price_per_million": 0.1,
957
+ "output_price_per_million": 0.4,
958
+ "metadata": {
959
+ "object": "model",
960
+ "owned_by": "google"
961
+ }
962
+ },
871
963
  {
872
964
  "id": "gemini-2.0-flash-thinking-exp",
873
965
  "created_at": null,
@@ -1077,6 +1169,25 @@
1077
1169
  "owned_by": "google"
1078
1170
  }
1079
1171
  },
1172
+ {
1173
+ "id": "gemma-3-27b-it",
1174
+ "created_at": null,
1175
+ "display_name": "Gemma 3 27b It",
1176
+ "provider": "gemini",
1177
+ "context_window": 32768,
1178
+ "max_tokens": 4096,
1179
+ "type": "chat",
1180
+ "family": "other",
1181
+ "supports_vision": false,
1182
+ "supports_functions": false,
1183
+ "supports_json_mode": false,
1184
+ "input_price_per_million": 0.075,
1185
+ "output_price_per_million": 0.3,
1186
+ "metadata": {
1187
+ "object": "model",
1188
+ "owned_by": "google"
1189
+ }
1190
+ },
1080
1191
  {
1081
1192
  "id": "gpt-3.5-turbo",
1082
1193
  "created_at": "2023-02-28T19:56:42+01:00",
@@ -2046,6 +2157,25 @@
2046
2157
  "owned_by": "system"
2047
2158
  }
2048
2159
  },
2160
+ {
2161
+ "id": "veo-2.0-generate-001",
2162
+ "created_at": null,
2163
+ "display_name": "Veo 2.0 Generate 001",
2164
+ "provider": "gemini",
2165
+ "context_window": 32768,
2166
+ "max_tokens": 4096,
2167
+ "type": "chat",
2168
+ "family": "other",
2169
+ "supports_vision": false,
2170
+ "supports_functions": false,
2171
+ "supports_json_mode": false,
2172
+ "input_price_per_million": 0.075,
2173
+ "output_price_per_million": 0.3,
2174
+ "metadata": {
2175
+ "object": "model",
2176
+ "owned_by": "google"
2177
+ }
2178
+ },
2049
2179
  {
2050
2180
  "id": "whisper-1",
2051
2181
  "created_at": "2023-02-27T22:13:04+01:00",
@@ -53,9 +53,7 @@ module RubyLLM
53
53
  end
54
54
 
55
55
  def refresh!
56
- @all = RubyLLM.providers.flat_map do |provider|
57
- provider.new.list_models
58
- end.sort_by(&:id)
56
+ @all = RubyLLM.providers.flat_map(&:list_models).sort_by(&:id)
59
57
  end
60
58
  end
61
59
  end
@@ -23,7 +23,7 @@ module RubyLLM
23
23
  req.headers.merge! headers
24
24
  end
25
25
 
26
- parse_list_models_response response
26
+ parse_list_models_response response, slug, capabilities
27
27
  end
28
28
 
29
29
  def embed(text, model:)
@@ -150,15 +150,6 @@ module RubyLLM
150
150
  body.is_a?(Hash) ? body.dig('error', 'message') : body
151
151
  end
152
152
 
153
- def capabilities
154
- provider_name = self.class.name.split('::').last
155
- provider_name::Capabilities
156
- end
157
-
158
- def slug
159
- self.class.name.split('::').last.downcase
160
- end
161
-
162
153
  class << self
163
154
  def extended(base)
164
155
  base.extend(Methods)
@@ -11,7 +11,7 @@ module RubyLLM
11
11
  '/v1/models'
12
12
  end
13
13
 
14
- def parse_list_models_response(response) # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
14
+ def parse_list_models_response(response, slug, capabilities) # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
15
15
  (response.body['data'] || []).map do |model|
16
16
  ModelInfo.new(
17
17
  id: model['id'],
@@ -24,6 +24,14 @@ module RubyLLM
24
24
  'anthropic-version' => '2023-06-01'
25
25
  }
26
26
  end
27
+
28
+ def capabilities
29
+ Anthropic::Capabilities
30
+ end
31
+
32
+ def slug
33
+ 'anthropic'
34
+ end
27
35
  end
28
36
  end
29
37
  end
@@ -17,6 +17,14 @@ module RubyLLM
17
17
  'Authorization' => "Bearer #{RubyLLM.config.deepseek_api_key}"
18
18
  }
19
19
  end
20
+
21
+ def capabilities
22
+ DeepSeek::Capabilities
23
+ end
24
+
25
+ def slug
26
+ 'deepseek'
27
+ end
20
28
  end
21
29
  end
22
30
  end
@@ -126,8 +126,6 @@ module RubyLLM
126
126
  end
127
127
  end
128
128
 
129
- private
130
-
131
129
  def long_context_model?(model_id)
132
130
  model_id.match?(/gemini-1\.5-(?:pro|flash)/)
133
131
  end
@@ -7,12 +7,12 @@ module RubyLLM
7
7
  module Models
8
8
  module_function
9
9
 
10
- def parse_list_models_response(response)
10
+ def parse_list_models_response(response, slug, capabilities)
11
11
  response.body['data']&.each do |model|
12
12
  model['id'] = model['id'].delete_prefix('models/')
13
13
  end
14
14
 
15
- OpenAI::Models.parse_list_models_response(response)
15
+ OpenAI::Models.parse_list_models_response(response, slug, capabilities)
16
16
  end
17
17
  end
18
18
  end
@@ -18,6 +18,14 @@ module RubyLLM
18
18
  'Authorization' => "Bearer #{RubyLLM.config.gemini_api_key}"
19
19
  }
20
20
  end
21
+
22
+ def capabilities
23
+ Gemini::Capabilities
24
+ end
25
+
26
+ def slug
27
+ 'gemini'
28
+ end
21
29
  end
22
30
  end
23
31
  end
@@ -107,8 +107,6 @@ module RubyLLM
107
107
  end
108
108
  end
109
109
 
110
- private
111
-
112
110
  PRICES = {
113
111
  o1: { input: 15.0, cached_input: 7.5, output: 60.0 },
114
112
  o1_mini: { input: 1.10, cached_input: 0.55, output: 4.40 },
@@ -11,7 +11,7 @@ module RubyLLM
11
11
  'models'
12
12
  end
13
13
 
14
- def parse_list_models_response(response) # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
14
+ def parse_list_models_response(response, slug, capabilities) # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
15
15
  (response.body['data'] || []).map do |model|
16
16
  ModelInfo.new(
17
17
  id: model['id'],
@@ -37,6 +37,14 @@ module RubyLLM
37
37
  'Authorization' => "Bearer #{RubyLLM.config.openai_api_key}"
38
38
  }
39
39
  end
40
+
41
+ def capabilities
42
+ OpenAI::Capabilities
43
+ end
44
+
45
+ def slug
46
+ 'openai'
47
+ end
40
48
  end
41
49
  end
42
50
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module RubyLLM
4
- VERSION = '0.1.0.pre34'
4
+ VERSION = '0.1.0.pre35'
5
5
  end
@@ -12,7 +12,7 @@ PROVIDER_DOCS = {
12
12
  },
13
13
  gemini: {
14
14
  models: 'https://ai.google.dev/gemini-api/docs/models/gemini',
15
- pricing: 'https://ai.google.dev/pricing'
15
+ pricing: 'https://ai.google.dev/gemini-api/docs/pricing'
16
16
  },
17
17
  deepseek: {
18
18
  models: 'https://api-docs.deepseek.com/quick_start/pricing/'
@@ -102,11 +102,12 @@ namespace :models do # rubocop:disable Metrics/BlockLength
102
102
  puts "Processing #{provider}..."
103
103
 
104
104
  # Initialize our AI assistants
105
+ #
105
106
  gemini = RubyLLM.chat(model: 'gemini-2.0-flash').with_temperature(0)
106
- claude = RubyLLM.chat(model: 'claude-3-5-sonnet-20241022').with_temperature(0)
107
+ claude = RubyLLM.chat(model: 'claude-3-7-sonnet-20250219').with_temperature(0)
107
108
 
108
109
  # Read existing capabilities file if present
109
- existing_file = "lib/ruby_llm/model_capabilities/#{provider}.rb"
110
+ existing_file = "lib/ruby_llm/providers/#{provider}/capabilities.rb"
110
111
  existing_code = File.read(existing_file) if File.exist?(existing_file)
111
112
 
112
113
  begin
@@ -155,18 +156,17 @@ namespace :models do # rubocop:disable Metrics/BlockLength
155
156
 
156
157
  #{model_info}
157
158
 
158
- The module should go in lib/ruby_llm/model_capabilities/#{provider}.rb and follow these conventions:
159
+ The module should go in lib/ruby_llm/providers/#{provider}/capabilities.rb and follow these conventions:
159
160
 
160
- 1. Module name should be RubyLLM::ModelCapabilities::#{provider.to_s.capitalize}
161
- 2. Include methods for determining context windows, token limits, pricing, and capabilities
162
- 3. Use consistent naming with other providers
163
- 4. Include detailed pricing information in a PRICES constant
164
- 5. Follow the existing structure in the codebase
165
- 6. Use Ruby idioms and clean code practices
166
- 7. Include module_function to make methods callable at module level
167
- 8. Include all necessary method documentation
161
+ 1. Include methods for determining context windows, token limits, pricing, and capabilities
162
+ 2. Use consistent naming with other providers
163
+ 3. Include detailed pricing information in a PRICES constant
164
+ 4. Follow the existing structure in the codebase
165
+ 5. Use Ruby idioms and clean code practices
166
+ 6. Include module_function to make methods callable at module level
167
+ 7. Include all necessary method documentation
168
168
 
169
- Here's the existing implementation for reference (maintain similar structure):
169
+ Here's the existing implementation for reference (maintain similar structure and same method names):
170
170
 
171
171
  #{existing_code}
172
172
 
@@ -176,7 +176,7 @@ namespace :models do # rubocop:disable Metrics/BlockLength
176
176
  response = claude.ask(code_prompt)
177
177
 
178
178
  # Save the file
179
- file_path = "lib/ruby_llm/model_capabilities/#{provider}.rb"
179
+ file_path = "lib/ruby_llm/providers/#{provider}/capabilities.rb"
180
180
  puts " Writing #{file_path}..."
181
181
 
182
182
  FileUtils.mkdir_p(File.dirname(file_path))
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby_llm
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0.pre34
4
+ version: 0.1.0.pre35
5
5
  platform: ruby
6
6
  authors:
7
7
  - Carmine Paolino
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2025-02-21 00:00:00.000000000 Z
11
+ date: 2025-02-25 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: event_stream_parser