girb-ruby_llm 0.1.1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 66f01e57eaba72b64b36e0621eabd8c01251ebf92baacd82b738332867e5f6e8
4
- data.tar.gz: fa63bea432ab5e4d36be4253df606c526c1b0920e1e22e4ca6b2929a7f837cdc
3
+ metadata.gz: 53c8af5d1f30750451fed2b555cd064e492468da3e7954cf5976443374eb4d4d
4
+ data.tar.gz: '0149788d1aa79cd0de9bd0531ca8e855e4934c7e3b63a2fa59000ba2469e6048'
5
5
  SHA512:
6
- metadata.gz: 0a94d28c70c59bb44802dd7932b5a33e46e488bad840cdf4b3666070353d61058b501135e58edc7baea4ca0d4fe6a0dc93bdf19491452d768eca1e968f54510d
7
- data.tar.gz: d0e28049f1c2eb77f30a0e1ede71d39af6d96b1e96975986fd4803805cc982b2f1bfec6301646a574a3261e366617c2c819aa38e1e7d23e11f4986a1b997b2f7
6
+ metadata.gz: 78f109efbd1bb1a677295a8de6bc319f5dbd979df13bac7222cdf8015468b3973b88322bf7ca1cfb528b91d35ed1c2b220ca689eaa04147621dd18ae5abe1c35
7
+ data.tar.gz: 7770dbb62a2f71cf5197bb0db15a4b526824cf6d4f5a1a410170672dce64cb92251efcefabc71d851f75ab1b8d719a98b974cbea9e5c8d3fbbd964bbc891ee37
data/CHANGELOG.md CHANGED
@@ -1,5 +1,18 @@
1
1
  # Changelog
2
2
 
3
+ ## [0.2.0] - 2026-02-05
4
+
5
+ ### Added
6
+
7
+ - Debug gem (rdbg) support for AI-assisted debugging
8
+
9
+ ## [0.1.2] - 2026-02-03
10
+
11
+ ### Fixed
12
+
13
+ - Fix tool names being empty strings in dynamic RubyLLM::Tool classes
14
+ - Properly execute girb tools within RubyLLM's auto-execute framework
15
+
3
16
  ## [0.1.1] - 2026-02-03
4
17
 
5
18
  ### Added
data/README.md CHANGED
@@ -8,48 +8,55 @@ This gem allows you to use multiple LLM providers (OpenAI, Anthropic, Google Gem
8
8
 
9
9
  ## Installation
10
10
 
11
+ ### For Rails Projects
12
+
11
13
  Add to your Gemfile:
12
14
 
13
15
  ```ruby
14
- gem 'girb'
15
- gem 'girb-ruby_llm'
16
+ group :development do
17
+ gem 'girb-ruby_llm'
18
+ end
16
19
  ```
17
20
 
18
- Or install directly:
21
+ Then run:
19
22
 
20
23
  ```bash
21
- gem install girb girb-ruby_llm
24
+ bundle install
22
25
  ```
23
26
 
24
- ## Setup
25
-
26
- ### Option 1: Configure in ~/.irbrc (Recommended)
27
-
28
- Add to your `~/.irbrc`:
27
+ Create a `.girbrc` file in your project root:
29
28
 
30
29
  ```ruby
30
+ # .girbrc
31
31
  require 'girb-ruby_llm'
32
32
 
33
- RubyLLM.configure do |config|
34
- config.gemini_api_key = 'your-api-key'
35
- end
36
-
37
33
  Girb.configure do |c|
38
34
  c.provider = Girb::Providers::RubyLlm.new(model: 'gemini-2.5-flash')
39
35
  end
40
36
  ```
41
37
 
42
- Then use regular `irb` command.
38
+ Now `rails console` will automatically load girb!
43
39
 
44
- ### Option 2: Configure via environment variables
40
+ ### For Non-Rails Projects
41
+
42
+ Install globally:
45
43
 
46
44
  ```bash
47
- export GIRB_PROVIDER=girb-ruby_llm
48
- export GIRB_MODEL=gemini-2.5-flash
49
- export GEMINI_API_KEY=your-api-key
45
+ gem install girb girb-ruby_llm
46
+ ```
47
+
48
+ Create a `.girbrc` file in your project directory:
49
+
50
+ ```ruby
51
+ # .girbrc
52
+ require 'girb-ruby_llm'
53
+
54
+ Girb.configure do |c|
55
+ c.provider = Girb::Providers::RubyLlm.new(model: 'gemini-2.5-flash')
56
+ end
50
57
  ```
51
58
 
52
- Then start with `girb` command.
59
+ Then use `girb` command instead of `irb`.
53
60
 
54
61
  ## Configuration
55
62
 
@@ -90,56 +97,74 @@ Set your API key or endpoint as an environment variable:
90
97
 
91
98
  ## Examples
92
99
 
100
+ ### Using Google Gemini
101
+
102
+ ```ruby
103
+ # .girbrc
104
+ require 'girb-ruby_llm'
105
+
106
+ # Set GEMINI_API_KEY environment variable
107
+ Girb.configure do |c|
108
+ c.provider = Girb::Providers::RubyLlm.new(model: 'gemini-2.5-flash')
109
+ end
110
+ ```
111
+
93
112
  ### Using OpenAI
94
113
 
95
- ```bash
96
- export GIRB_PROVIDER=girb-ruby_llm
97
- export GIRB_MODEL=gpt-4o
98
- export OPENAI_API_KEY="sk-..."
99
- girb
114
+ ```ruby
115
+ # .girbrc
116
+ require 'girb-ruby_llm'
117
+
118
+ # Set OPENAI_API_KEY environment variable
119
+ Girb.configure do |c|
120
+ c.provider = Girb::Providers::RubyLlm.new(model: 'gpt-4o')
121
+ end
100
122
  ```
101
123
 
102
124
  ### Using Anthropic Claude
103
125
 
104
- ```bash
105
- export GIRB_PROVIDER=girb-ruby_llm
106
- export GIRB_MODEL=claude-sonnet-4-20250514
107
- export ANTHROPIC_API_KEY="sk-ant-..."
108
- girb
126
+ ```ruby
127
+ # .girbrc
128
+ require 'girb-ruby_llm'
129
+
130
+ # Set ANTHROPIC_API_KEY environment variable
131
+ Girb.configure do |c|
132
+ c.provider = Girb::Providers::RubyLlm.new(model: 'claude-sonnet-4-20250514')
133
+ end
109
134
  ```
110
135
 
111
136
  ### Using Ollama (Local)
112
137
 
113
- ```bash
114
- # Start Ollama first
115
- ollama serve
138
+ ```ruby
139
+ # .girbrc
140
+ require 'girb-ruby_llm'
116
141
 
117
- # Set the provider, model, and API base URL
118
- export GIRB_PROVIDER=girb-ruby_llm
119
- export GIRB_MODEL=llama3.2:latest
120
- export OLLAMA_API_BASE="http://localhost:11434/v1"
121
- girb
142
+ # Set OLLAMA_API_BASE environment variable (e.g., http://localhost:11434/v1)
143
+ Girb.configure do |c|
144
+ c.provider = Girb::Providers::RubyLlm.new(model: 'llama3.2:latest')
145
+ end
122
146
  ```
123
147
 
124
148
  ### Using OpenAI-compatible APIs (e.g., LM Studio, vLLM)
125
149
 
126
- ```bash
127
- export GIRB_PROVIDER=girb-ruby_llm
128
- export GIRB_MODEL=your-model-name
129
- export OPENAI_API_KEY="not-needed" # Some require any non-empty value
130
- export OPENAI_API_BASE="http://localhost:1234/v1"
131
- girb
150
+ ```ruby
151
+ # .girbrc
152
+ require 'girb-ruby_llm'
153
+
154
+ # Set OPENAI_API_BASE and OPENAI_API_KEY environment variables
155
+ Girb.configure do |c|
156
+ c.provider = Girb::Providers::RubyLlm.new(model: 'your-model-name')
157
+ end
132
158
  ```
133
159
 
134
160
  ### Advanced Configuration
135
161
 
136
- For more control, configure Girb in your `~/.irbrc`:
137
-
138
162
  ```ruby
139
- # ~/.irbrc
163
+ # .girbrc
140
164
  require 'girb-ruby_llm'
141
165
 
142
166
  Girb.configure do |c|
167
+ c.provider = Girb::Providers::RubyLlm.new(model: 'gemini-2.5-flash')
143
168
  c.debug = true # Enable debug output
144
169
  c.custom_prompt = <<~PROMPT
145
170
  Always confirm before destructive operations.
@@ -149,6 +174,17 @@ end
149
174
 
150
175
  Note: `RubyLLM::Models.refresh!` is automatically called for local providers (Ollama, GPUStack).
151
176
 
177
+ ## Alternative: Environment Variable Configuration
178
+
179
+ For the `girb` command, you can also configure via environment variables (used when no `.girbrc` is found):
180
+
181
+ ```bash
182
+ export GIRB_PROVIDER=girb-ruby_llm
183
+ export GIRB_MODEL=gemini-2.5-flash
184
+ export GEMINI_API_KEY=your-api-key
185
+ girb
186
+ ```
187
+
152
188
  ## Supported Models
153
189
 
154
190
  See [RubyLLM Available Models](https://rubyllm.com/reference/available-models) for the full list of supported models.
data/README_ja.md CHANGED
@@ -6,48 +6,55 @@
6
6
 
7
7
  ## インストール
8
8
 
9
+ ### Railsプロジェクトの場合
10
+
9
11
  Gemfileに追加:
10
12
 
11
13
  ```ruby
12
- gem 'girb'
13
- gem 'girb-ruby_llm'
14
+ group :development do
15
+ gem 'girb-ruby_llm'
16
+ end
14
17
  ```
15
18
 
16
- または直接インストール:
19
+ そして実行:
17
20
 
18
21
  ```bash
19
- gem install girb girb-ruby_llm
22
+ bundle install
20
23
  ```
21
24
 
22
- ## セットアップ
23
-
24
- ### 方法1: ~/.irbrcで設定(推奨)
25
-
26
- `~/.irbrc` に追加:
25
+ プロジェクトルートに `.girbrc` ファイルを作成:
27
26
 
28
27
  ```ruby
28
+ # .girbrc
29
29
  require 'girb-ruby_llm'
30
30
 
31
- RubyLLM.configure do |config|
32
- config.gemini_api_key = 'your-api-key'
33
- end
34
-
35
31
  Girb.configure do |c|
36
32
  c.provider = Girb::Providers::RubyLlm.new(model: 'gemini-2.5-flash')
37
33
  end
38
34
  ```
39
35
 
40
- 通常の `irb` コマンドで使用できます。
36
+ これで `rails console` が自動的にgirbを読み込みます!
41
37
 
42
- ### 方法2: 環境変数で設定
38
+ ### 非Railsプロジェクトの場合
39
+
40
+ グローバルにインストール:
43
41
 
44
42
  ```bash
45
- export GIRB_PROVIDER=girb-ruby_llm
46
- export GIRB_MODEL=gemini-2.5-flash
47
- export GEMINI_API_KEY=your-api-key
43
+ gem install girb girb-ruby_llm
44
+ ```
45
+
46
+ プロジェクトディレクトリに `.girbrc` ファイルを作成:
47
+
48
+ ```ruby
49
+ # .girbrc
50
+ require 'girb-ruby_llm'
51
+
52
+ Girb.configure do |c|
53
+ c.provider = Girb::Providers::RubyLlm.new(model: 'gemini-2.5-flash')
54
+ end
48
55
  ```
49
56
 
50
- `girb` コマンドで起動します。
57
+ `irb` の代わりに `girb` コマンドを使用します。
51
58
 
52
59
  ## 設定
53
60
 
@@ -88,56 +95,74 @@ APIキーまたはエンドポイントを環境変数として設定します:
88
95
 
89
96
  ## 使用例
90
97
 
98
+ ### Google Geminiを使用
99
+
100
+ ```ruby
101
+ # .girbrc
102
+ require 'girb-ruby_llm'
103
+
104
+ # GEMINI_API_KEY 環境変数を設定
105
+ Girb.configure do |c|
106
+ c.provider = Girb::Providers::RubyLlm.new(model: 'gemini-2.5-flash')
107
+ end
108
+ ```
109
+
91
110
  ### OpenAIを使用
92
111
 
93
- ```bash
94
- export GIRB_PROVIDER=girb-ruby_llm
95
- export GIRB_MODEL=gpt-4o
96
- export OPENAI_API_KEY="sk-..."
97
- girb
112
+ ```ruby
113
+ # .girbrc
114
+ require 'girb-ruby_llm'
115
+
116
+ # OPENAI_API_KEY 環境変数を設定
117
+ Girb.configure do |c|
118
+ c.provider = Girb::Providers::RubyLlm.new(model: 'gpt-4o')
119
+ end
98
120
  ```
99
121
 
100
122
  ### Anthropic Claudeを使用
101
123
 
102
- ```bash
103
- export GIRB_PROVIDER=girb-ruby_llm
104
- export GIRB_MODEL=claude-sonnet-4-20250514
105
- export ANTHROPIC_API_KEY="sk-ant-..."
106
- girb
124
+ ```ruby
125
+ # .girbrc
126
+ require 'girb-ruby_llm'
127
+
128
+ # ANTHROPIC_API_KEY 環境変数を設定
129
+ Girb.configure do |c|
130
+ c.provider = Girb::Providers::RubyLlm.new(model: 'claude-sonnet-4-20250514')
131
+ end
107
132
  ```
108
133
 
109
134
  ### Ollama(ローカル)を使用
110
135
 
111
- ```bash
112
- # まずOllamaを起動
113
- ollama serve
136
+ ```ruby
137
+ # .girbrc
138
+ require 'girb-ruby_llm'
114
139
 
115
- # プロバイダー、モデル、APIベースURLを設定
116
- export GIRB_PROVIDER=girb-ruby_llm
117
- export GIRB_MODEL=llama3.2:latest
118
- export OLLAMA_API_BASE="http://localhost:11434/v1"
119
- girb
140
+ # OLLAMA_API_BASE 環境変数を設定(例: http://localhost:11434/v1)
141
+ Girb.configure do |c|
142
+ c.provider = Girb::Providers::RubyLlm.new(model: 'llama3.2:latest')
143
+ end
120
144
  ```
121
145
 
122
146
  ### OpenAI互換API(LM Studio、vLLMなど)を使用
123
147
 
124
- ```bash
125
- export GIRB_PROVIDER=girb-ruby_llm
126
- export GIRB_MODEL=your-model-name
127
- export OPENAI_API_KEY="not-needed" # 空でない値が必要な場合
128
- export OPENAI_API_BASE="http://localhost:1234/v1"
129
- girb
148
+ ```ruby
149
+ # .girbrc
150
+ require 'girb-ruby_llm'
151
+
152
+ # OPENAI_API_BASE と OPENAI_API_KEY 環境変数を設定
153
+ Girb.configure do |c|
154
+ c.provider = Girb::Providers::RubyLlm.new(model: 'your-model-name')
155
+ end
130
156
  ```
131
157
 
132
158
  ### 詳細設定
133
159
 
134
- より細かい制御が必要な場合、`~/.irbrc`でGirbを設定できます:
135
-
136
160
  ```ruby
137
- # ~/.irbrc
161
+ # .girbrc
138
162
  require 'girb-ruby_llm'
139
163
 
140
164
  Girb.configure do |c|
165
+ c.provider = Girb::Providers::RubyLlm.new(model: 'gemini-2.5-flash')
141
166
  c.debug = true # デバッグ出力を有効化
142
167
  c.custom_prompt = <<~PROMPT
143
168
  破壊的操作の前に必ず確認してください。
@@ -147,6 +172,17 @@ end
147
172
 
148
173
  注: ローカルプロバイダー(Ollama、GPUStack)使用時は`RubyLLM::Models.refresh!`が自動的に呼ばれます。
149
174
 
175
+ ## 代替: 環境変数での設定
176
+
177
+ `girb` コマンドでは、`.girbrc` が見つからない場合に環境変数で設定することもできます:
178
+
179
+ ```bash
180
+ export GIRB_PROVIDER=girb-ruby_llm
181
+ export GIRB_MODEL=gemini-2.5-flash
182
+ export GEMINI_API_KEY=your-api-key
183
+ girb
184
+ ```
185
+
150
186
  ## 対応モデル
151
187
 
152
188
  サポートされているモデルの完全なリストは[RubyLLM Available Models](https://rubyllm.com/reference/available-models)を参照してください。
@@ -10,7 +10,7 @@ module Girb
10
10
  @model = model
11
11
  end
12
12
 
13
- def chat(messages:, system_prompt:, tools:)
13
+ def chat(messages:, system_prompt:, tools:, binding: nil)
14
14
  # Use specified model or RubyLLM's default
15
15
  chat_options = @model ? { model: @model } : {}
16
16
  ruby_llm_chat = ::RubyLLM.chat(**chat_options)
@@ -18,19 +18,15 @@ module Girb
18
18
  # Set system prompt
19
19
  ruby_llm_chat.with_instructions(system_prompt) if system_prompt && !system_prompt.empty?
20
20
 
21
- # Add tools
21
+ # Add tool schemas (for API payload generation only, not auto-executed)
22
22
  tool_instances = build_tools(tools)
23
23
  tool_instances.each { |tool| ruby_llm_chat.with_tool(tool) }
24
24
 
25
- # Add messages except the last user message
26
- add_messages_to_chat(ruby_llm_chat, messages[0..-2])
25
+ # Add all messages to the chat
26
+ add_messages_to_chat(ruby_llm_chat, messages)
27
27
 
28
- # Get the last user message
29
- last_message = messages.last
30
- last_content = extract_content(last_message)
31
-
32
- # Send the request
33
- response = ruby_llm_chat.ask(last_content)
28
+ # Get raw response without auto-executing tools
29
+ response = raw_complete(ruby_llm_chat)
34
30
 
35
31
  parse_response(response)
36
32
  rescue Faraday::BadRequestError => e
@@ -41,6 +37,22 @@ module Girb
41
37
 
42
38
  private
43
39
 
40
+ # Call provider.complete() directly, bypassing RubyLLM's handle_tool_calls.
41
+ # This returns the raw response so the caller can handle tool execution.
42
+ def raw_complete(chat)
43
+ provider = chat.instance_variable_get(:@provider)
44
+ provider.complete(
45
+ chat.messages,
46
+ tools: chat.tools,
47
+ temperature: chat.instance_variable_get(:@temperature),
48
+ model: chat.model,
49
+ params: chat.params,
50
+ headers: chat.headers,
51
+ schema: chat.schema,
52
+ thinking: chat.instance_variable_get(:@thinking)
53
+ )
54
+ end
55
+
44
56
  def add_messages_to_chat(chat, messages)
45
57
  messages.each do |msg|
46
58
  case msg[:role]
@@ -49,13 +61,13 @@ module Girb
49
61
  when :assistant
50
62
  chat.add_message(role: :assistant, content: msg[:content])
51
63
  when :tool_call
52
- # Add as assistant message with tool_calls
64
+ id = msg[:id] || "call_#{SecureRandom.hex(12)}"
53
65
  chat.add_message(
54
66
  role: :assistant,
55
67
  content: nil,
56
68
  tool_calls: {
57
- msg[:name] => ::RubyLLM::ToolCall.new(
58
- id: msg[:id] || "call_#{SecureRandom.hex(12)}",
69
+ id => ::RubyLLM::ToolCall.new(
70
+ id: id,
59
71
  name: msg[:name],
60
72
  arguments: msg[:args]
61
73
  )
@@ -71,15 +83,6 @@ module Girb
71
83
  end
72
84
  end
73
85
 
74
- def extract_content(message)
75
- case message[:role]
76
- when :user, :assistant
77
- message[:content]
78
- else
79
- message[:content].to_s
80
- end
81
- end
82
-
83
86
  def build_tools(tools)
84
87
  return [] if tools.nil? || tools.empty?
85
88
 
@@ -93,11 +96,10 @@ module Girb
93
96
  tool_description = tool_def[:description]
94
97
  tool_parameters = tool_def[:parameters] || {}
95
98
 
96
- # Create a dynamic tool class
99
+ # Create a dynamic tool class for schema generation only
97
100
  tool_class = Class.new(::RubyLLM::Tool) do
98
101
  description tool_description
99
102
 
100
- # Define parameters
101
103
  properties = tool_parameters[:properties] || {}
102
104
  required_params = tool_parameters[:required] || []
103
105
 
@@ -108,11 +110,12 @@ module Girb
108
110
  required: required_params.include?(prop_name.to_s) || required_params.include?(prop_name)
109
111
  end
110
112
 
111
- # Override name method to return the custom name
112
113
  define_method(:name) { tool_name }
113
114
 
114
- # Execute method (never actually called, just for tool definition)
115
- define_method(:execute) { |**_args| "" }
115
+ # Not used tool execution is handled by the caller's tool loop
116
+ define_method(:execute) do |**_args|
117
+ raise "Tool execution should be handled by the caller, not by the provider"
118
+ end
116
119
  end
117
120
 
118
121
  tool_class.new
@@ -132,6 +135,7 @@ module Girb
132
135
 
133
136
  response.tool_calls.map do |_id, tool_call|
134
137
  {
138
+ id: tool_call.id,
135
139
  name: tool_call.name.to_s,
136
140
  args: tool_call.arguments || {}
137
141
  }
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module GirbRubyLlm
4
- VERSION = "0.1.1"
4
+ VERSION = "0.2.0"
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: girb-ruby_llm
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - rira100000000