llm_hub 0.2.1 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 353ca2493088bec5034c53bda9389a7277d9b79f9a0af7aa9cd0dea38c7a3d96
4
- data.tar.gz: e67417f5a3210cb1a5fbca143df86ed2bb4564ae62efe4dab751e2dd0a1bb1e4
3
+ metadata.gz: 1de72ed920d28f30dbab0a70be8384dfed017795865cfdfb7af895696cf32774
4
+ data.tar.gz: 0d2388d88588d9b4008294f468ff77813ea9d75d6d663d5d36ead835c5ff42ec
5
5
  SHA512:
6
- metadata.gz: 3896b924c84d3e05877333187e60c57918d9f968ad347027faa8cac6e37849315a0b2815966c1e1fc506ca898e26456fb054973965d2a535b1a56ff41852a6a2
7
- data.tar.gz: 12c26874593c316bde1f841da6161c4395e00c33f63eff6f7e26e10b978f349150ec1a686bbaedb2527432a8d9e726d29194b64f2f7199ba4604977d68ffd60f
6
+ metadata.gz: be6017b2455e8a02db445cbee8da303143cd5455bc4b1f7d615ed7ddf3ec3df25c95f1a65e2bf0d65d66e7dbfbde5b6e724ebf82c78c007bc1339e23b6704c15
7
+ data.tar.gz: 66d508f50a7548f5a6be67658bdfee3c04039d0524e7125e091633601dbc9e70b301d13136a6f390211a78ff2cd46303666b6e0aac3c503acf78a4901c1957ea
data/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.3.1] - 2025-07-08
4
+
5
+ - Updated description
6
+
7
+ ## [0.3.0] - 2025-07-08
8
+
9
+ - Added Google as a provider.
10
+
3
11
  ## [0.2.1] - 2025-07-04
4
12
 
5
13
  - Added new option parameters.
data/README.md CHANGED
@@ -1,8 +1,19 @@
1
1
  # LlmHub
2
2
 
3
- This is a Ruby interface for multiple LLM providers, such as OpenAI, Anthropic, and DeepSeek.
3
+ A Ruby interface for multiple LLM providers with Completion and Embedding support.
4
4
 
5
- It provides easy access to Completion and Embedding functionalities.
5
+ ## Supported Providers
6
+
7
+ ### Completion API
8
+ - **OpenAI**
9
+ - **Anthropic**
10
+ - **DeepSeek**
11
+ - **Google**
12
+
13
+ ### Embedding API
14
+ - **OpenAI**
15
+
16
+ The gem provides a unified interface to interact with these different providers, making it easy to switch between them or use multiple providers in your application.
6
17
 
7
18
  ## Installation
8
19
 
@@ -9,12 +9,13 @@ module LlmHub
9
9
  PROVIDER_CLASSES = {
10
10
  openai: Providers::OpenAI,
11
11
  anthropic: Providers::Anthropic,
12
- deepseek: Providers::Deepseek
12
+ deepseek: Providers::Deepseek,
13
+ google: Providers::Google
13
14
  }.freeze
14
15
 
15
16
  # Initialize a new completion client
16
17
  # @param api_key [String] API key for the provider (required)
17
- # @param provider [Symbol, String] Provider name (:openai, :anthropic, :deepseek) (required)
18
+ # @param provider [Symbol, String] Provider name (:openai, :anthropic, :deepseek, :google) (required)
18
19
  # @param open_time_out [Integer] HTTP open timeout in seconds (optional, defaults to Config value)
19
20
  # @param read_time_out [Integer] HTTP read timeout in seconds (optional, defaults to Config value)
20
21
  # @param retry_count [Integer] Number of retries for failed requests (optional, defaults to Config value)
@@ -38,7 +39,7 @@ module LlmHub
38
39
  option_params: {}
39
40
  )
40
41
  with_retry do
41
- url = @provider_client.url
42
+ url = provider_url(model_name)
42
43
  request_body = @provider_client.request_body(system_prompt, content, model_name, option_params)
43
44
  headers = @provider_client.headers
44
45
 
@@ -51,6 +52,17 @@ module LlmHub
51
52
 
52
53
  private
53
54
 
55
+ # Get the appropriate URL for the provider
56
+ # @param model_name [String] Model name to use
57
+ # @return [String] Provider URL
58
+ def provider_url(model_name)
59
+ if @provider_client.is_a?(Providers::Google)
60
+ @provider_client.url(model_name)
61
+ else
62
+ @provider_client.url
63
+ end
64
+ end
65
+
54
66
  # Format the response from provider
55
67
  # @param response_body [Hash] Raw response from provider
56
68
  # @return [Hash{Symbol => String, Integer}] Formatted response
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LlmHub
4
+ module Completion
5
+ module Providers
6
+ # Google Gemini completion provider
7
+ class Google < Base
8
+ COMPLETIONS_URI = 'https://generativelanguage.googleapis.com/v1beta/models'
9
+
10
+ def url(model_name)
11
+ # Gemini API requires model name in URL
12
+ "#{COMPLETIONS_URI}/#{model_name}:generateContent"
13
+ end
14
+
15
+ def headers
16
+ {
17
+ 'Content-Type' => 'application/json',
18
+ 'x-goog-api-key' => @api_key
19
+ }
20
+ end
21
+
22
+ def request_body(system_prompt, content, _model_name, option_params)
23
+ {
24
+ system_instruction: build_system_instruction(system_prompt),
25
+ contents: build_contents(content),
26
+ generationConfig: build_generation_config(option_params)
27
+ }
28
+ end
29
+
30
+ def extract_answer(response_body)
31
+ response_body&.dig('candidates', 0, 'content', 'parts', 0, 'text')
32
+ end
33
+
34
+ def extract_tokens(response_body)
35
+ usage_metadata = response_body&.dig('usageMetadata')
36
+ {
37
+ total_tokens: usage_metadata&.dig('totalTokenCount'),
38
+ prompt_tokens: usage_metadata&.dig('promptTokenCount'),
39
+ completion_tokens: usage_metadata&.dig('candidatesTokenCount')
40
+ }
41
+ end
42
+
43
+ private
44
+
45
+ def build_system_instruction(system_prompt)
46
+ {
47
+ parts: [
48
+ {
49
+ text: system_prompt
50
+ }
51
+ ]
52
+ }
53
+ end
54
+
55
+ def build_contents(content)
56
+ [{
57
+ role: 'user',
58
+ parts: [{ text: content }]
59
+ }]
60
+ end
61
+
62
+ def build_generation_config(option_params)
63
+ base_config = {
64
+ temperature: 0.2,
65
+ maxOutputTokens: 1024,
66
+ topP: 0.8,
67
+ topK: 40
68
+ }
69
+
70
+ option_params.any? ? base_config.merge(option_params) : base_config
71
+ end
72
+ end
73
+ end
74
+ end
75
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LlmHub
4
- VERSION = '0.2.1'
4
+ VERSION = '0.3.1'
5
5
  end
data/lib/llm_hub.rb CHANGED
@@ -19,6 +19,7 @@ require_relative 'llm_hub/completion/providers/base'
19
19
  require_relative 'llm_hub/completion/providers/openai'
20
20
  require_relative 'llm_hub/completion/providers/anthropic'
21
21
  require_relative 'llm_hub/completion/providers/deepseek'
22
+ require_relative 'llm_hub/completion/providers/google'
22
23
  require_relative 'llm_hub/completion/client'
23
24
 
24
25
  # Embedding providers
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm_hub
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.1
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - akiraNuma
8
8
  bindir: exe
9
9
  cert_chain: []
10
- date: 2025-07-04 00:00:00.000000000 Z
10
+ date: 2025-07-07 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: activesupport
@@ -37,8 +37,8 @@ dependencies:
37
37
  - - ">="
38
38
  - !ruby/object:Gem::Version
39
39
  version: '0'
40
- description: This is a Ruby interface for multiple LLM providers, such as OpenAI and
41
- Anthropic.It provides easy access to Completion and Embedding functionalities.
40
+ description: A Ruby interface for multiple LLM providers.It provides easy access to
41
+ Completion and Embedding functionalities.
42
42
  email:
43
43
  - akiran@akiranumakura.com
44
44
  executables: []
@@ -60,6 +60,7 @@ files:
60
60
  - lib/llm_hub/completion/providers/anthropic.rb
61
61
  - lib/llm_hub/completion/providers/base.rb
62
62
  - lib/llm_hub/completion/providers/deepseek.rb
63
+ - lib/llm_hub/completion/providers/google.rb
63
64
  - lib/llm_hub/completion/providers/openai.rb
64
65
  - lib/llm_hub/config.rb
65
66
  - lib/llm_hub/embedding/client.rb
@@ -91,5 +92,5 @@ required_rubygems_version: !ruby/object:Gem::Requirement
91
92
  requirements: []
92
93
  rubygems_version: 3.6.3
93
94
  specification_version: 4
94
- summary: This is a Ruby interface for multiple LLM providers, such as OpenAI and Anthropic.
95
+ summary: A Ruby interface for multiple LLM providers.
95
96
  test_files: []