llm_providers 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: fd68399074379aa3cca94d766217abe7270698fa764b1656d1fc585977c47e49
4
+ data.tar.gz: 6b82fe10d4ae4bb414317c1fbbca2252976032032fcc846c2864154cb54aee62
5
+ SHA512:
6
+ metadata.gz: 77759c7d037da031e31db5ce76456279d5c1ed104b7df60d19289ceb6cba0ad26b7de1aa80f1e78b1afcc18b41d780ffd34718cb318b6cf905a1947adde806e0
7
+ data.tar.gz: 598b707ba1f7160b7b6f03b3d291a7d2bf0d726afe55d6fd86b60747c8ebc440438c5e5753597eb36016b0012552404aedc98f3cc3dc28e5ba93465c78a8b73a
data/CHANGELOG.md ADDED
@@ -0,0 +1,10 @@
1
+ # Changelog
2
+
3
+ ## [0.1.0] - 2026-02-13
4
+
5
+ - Initial release
6
+ - Support for OpenAI, Anthropic, Google, and OpenRouter providers
7
+ - Streaming responses
8
+ - Tool calling support
9
+ - Token usage tracking
10
+ - Configurable logging
data/LICENSE.txt ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2026 kaba
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
data/README.ja.md ADDED
@@ -0,0 +1,153 @@
1
+ # LlmProviders
2
+
3
+ [![Gem Version](https://badge.fury.io/rb/llm_providers.svg)](https://rubygems.org/gems/llm_providers)
4
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](LICENSE.txt)
5
+
6
+ 複数の LLM プロバイダーに対応した軽量な統一インターフェース。依存は `faraday` のみ — ActiveSupport 不要。
7
+
8
+ [English README](README.md)
9
+
10
+ ## 特徴
11
+
12
+ - **軽量** — 依存は `faraday` のみ、ActiveSupport 不要
13
+ - **統一インターフェース** — 全プロバイダーで同じ API
14
+ - **ストリーミング** — ブロック構文でリアルタイムトークンストリーミング
15
+ - **ツール呼び出し** — プロバイダー間で一貫したツール/関数呼び出し
16
+ - **トークン追跡** — 全レスポンスに使用量統計(入力、出力、キャッシュ)
17
+
18
+ ## 対応プロバイダー
19
+
20
+ | プロバイダー | 環境変数 | デフォルトモデル |
21
+ |------------|---------|---------------|
22
+ | `anthropic` | `ANTHROPIC_API_KEY` | `claude-sonnet-4-5-20250929` |
23
+ | `openai` | `OPENAI_API_KEY` | `gpt-5-mini` |
24
+ | `google` | `GOOGLE_API_KEY` | `gemini-2.5-flash` |
25
+ | `openrouter` *(実験的)* | `OPENROUTER_API_KEY` | `anthropic/claude-sonnet-4.5` |
26
+
27
+ ## インストール
28
+
29
+ Gemfile に追加:
30
+
31
+ ```ruby
32
+ gem "llm_providers"
33
+ ```
34
+
35
+ ## クイックスタート
36
+
37
+ ```ruby
38
+ require "llm_providers"
39
+
40
+ provider = LlmProviders::Providers.build(:anthropic)
41
+
42
+ # 同期
43
+ result = provider.chat(
44
+ messages: [{ role: "user", content: "こんにちは!" }],
45
+ system: "あなたは親切なアシスタントです。"
46
+ )
47
+ puts result[:content]
48
+
49
+ # ストリーミング
50
+ provider.chat(messages: [{ role: "user", content: "こんにちは!" }]) do |chunk|
51
+ print chunk[:content]
52
+ end
53
+ ```
54
+
55
+ ## 使い方
56
+
57
+ ### 設定
58
+
59
+ ```ruby
60
+ LlmProviders.configure do |config|
61
+ config.logger = Rails.logger # 任意の Logger インスタンス
62
+ end
63
+ ```
64
+
65
+ ### プロバイダーオプション
66
+
67
+ ```ruby
68
+ provider = LlmProviders::Providers.build(
69
+ :openai,
70
+ model: "gpt-4.1",
71
+ temperature: 0.7,
72
+ max_tokens: 4096
73
+ )
74
+ ```
75
+
76
+ ### ツール呼び出し
77
+
78
+ ```ruby
79
+ tools = [
80
+ {
81
+ name: "get_weather",
82
+ description: "現在の天気を取得",
83
+ parameters: {
84
+ type: "object",
85
+ properties: {
86
+ location: { type: "string", description: "都市名" }
87
+ },
88
+ required: ["location"]
89
+ }
90
+ }
91
+ ]
92
+
93
+ result = provider.chat(
94
+ messages: [{ role: "user", content: "東京の天気は?" }],
95
+ tools: tools
96
+ )
97
+
98
+ result[:tool_calls].each do |tc|
99
+ puts "#{tc[:name]}: #{tc[:input]}"
100
+ end
101
+ ```
102
+
103
+ ### レスポンス形式
104
+
105
+ `chat` は常に以下のハッシュを返します:
106
+
107
+ ```ruby
108
+ {
109
+ content: "レスポンステキスト",
110
+ tool_calls: [
111
+ { id: "...", name: "...", input: {...} }
112
+ ],
113
+ usage: {
114
+ input: 100, # 入力トークン数
115
+ output: 50, # 出力トークン数
116
+ cached_input: 80 # キャッシュされた入力トークン数(Anthropicのみ)
117
+ },
118
+ latency_ms: 1234,
119
+ raw_response: {...}
120
+ }
121
+ ```
122
+
123
+ ### エラーハンドリング
124
+
125
+ ```ruby
126
+ begin
127
+ result = provider.chat(messages: messages)
128
+ rescue LlmProviders::ProviderError => e
129
+ puts "エラー: #{e.message}"
130
+ puts "コード: #{e.code}" # 例: "anthropic_error", "openai_error"
131
+ end
132
+ ```
133
+
134
+ ## サンプル
135
+
136
+ ```bash
137
+ # 対話チャット
138
+ ANTHROPIC_API_KEY=your-key ruby examples/simple_chat.rb
139
+
140
+ # ワンショット
141
+ ANTHROPIC_API_KEY=your-key ruby examples/one_shot.rb "Hello!"
142
+
143
+ # ツール呼び出し
144
+ ANTHROPIC_API_KEY=your-key ruby examples/with_tools.rb
145
+
146
+ # 他のプロバイダー
147
+ OPENAI_API_KEY=your-key ruby examples/simple_chat.rb openai
148
+ GOOGLE_API_KEY=your-key ruby examples/simple_chat.rb google
149
+ ```
150
+
151
+ ## ライセンス
152
+
153
+ MIT
data/README.md ADDED
@@ -0,0 +1,153 @@
1
+ # LlmProviders
2
+
3
+ [![Gem Version](https://badge.fury.io/rb/llm_providers.svg)](https://rubygems.org/gems/llm_providers)
4
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](LICENSE.txt)
5
+
6
+ A lightweight, unified interface for multiple LLM providers. Only depends on `faraday` — no ActiveSupport required.
7
+
8
+ [日本語版 README](README.ja.md)
9
+
10
+ ## Features
11
+
12
+ - **Lightweight** — Single dependency (`faraday`), no ActiveSupport
13
+ - **Unified interface** — Same API for all providers
14
+ - **Streaming** — Real-time token streaming with block syntax
15
+ - **Tool calling** — Consistent tool/function calling across providers
16
+ - **Token tracking** — Usage stats (input, output, cached) in every response
17
+
18
+ ## Supported Providers
19
+
20
+ | Provider | ENV Variable | Default Model |
21
+ |----------|-------------|---------------|
22
+ | `anthropic` | `ANTHROPIC_API_KEY` | `claude-sonnet-4-5-20250929` |
23
+ | `openai` | `OPENAI_API_KEY` | `gpt-5-mini` |
24
+ | `google` | `GOOGLE_API_KEY` | `gemini-2.5-flash` |
25
+ | `openrouter` *(experimental)* | `OPENROUTER_API_KEY` | `anthropic/claude-sonnet-4.5` |
26
+
27
+ ## Installation
28
+
29
+ Add to your Gemfile:
30
+
31
+ ```ruby
32
+ gem "llm_providers"
33
+ ```
34
+
35
+ ## Quick Start
36
+
37
+ ```ruby
38
+ require "llm_providers"
39
+
40
+ provider = LlmProviders::Providers.build(:anthropic)
41
+
42
+ # Synchronous
43
+ result = provider.chat(
44
+ messages: [{ role: "user", content: "Hello!" }],
45
+ system: "You are a helpful assistant."
46
+ )
47
+ puts result[:content]
48
+
49
+ # Streaming
50
+ provider.chat(messages: [{ role: "user", content: "Hello!" }]) do |chunk|
51
+ print chunk[:content]
52
+ end
53
+ ```
54
+
55
+ ## Usage
56
+
57
+ ### Configuration
58
+
59
+ ```ruby
60
+ LlmProviders.configure do |config|
61
+ config.logger = Rails.logger # or any Logger instance
62
+ end
63
+ ```
64
+
65
+ ### Provider Options
66
+
67
+ ```ruby
68
+ provider = LlmProviders::Providers.build(
69
+ :openai,
70
+ model: "gpt-4.1",
71
+ temperature: 0.7,
72
+ max_tokens: 4096
73
+ )
74
+ ```
75
+
76
+ ### Tool Calling
77
+
78
+ ```ruby
79
+ tools = [
80
+ {
81
+ name: "get_weather",
82
+ description: "Get the current weather",
83
+ parameters: {
84
+ type: "object",
85
+ properties: {
86
+ location: { type: "string", description: "City name" }
87
+ },
88
+ required: ["location"]
89
+ }
90
+ }
91
+ ]
92
+
93
+ result = provider.chat(
94
+ messages: [{ role: "user", content: "What's the weather in Tokyo?" }],
95
+ tools: tools
96
+ )
97
+
98
+ result[:tool_calls].each do |tc|
99
+ puts "#{tc[:name]}: #{tc[:input]}"
100
+ end
101
+ ```
102
+
103
+ ### Response Format
104
+
105
+ Every `chat` call returns a hash with:
106
+
107
+ ```ruby
108
+ {
109
+ content: "Response text",
110
+ tool_calls: [
111
+ { id: "...", name: "...", input: {...} }
112
+ ],
113
+ usage: {
114
+ input: 100, # Input tokens
115
+ output: 50, # Output tokens
116
+ cached_input: 80 # Cached input tokens (Anthropic only)
117
+ },
118
+ latency_ms: 1234,
119
+ raw_response: {...}
120
+ }
121
+ ```
122
+
123
+ ### Error Handling
124
+
125
+ ```ruby
126
+ begin
127
+ result = provider.chat(messages: messages)
128
+ rescue LlmProviders::ProviderError => e
129
+ puts "Error: #{e.message}"
130
+ puts "Code: #{e.code}" # e.g., "anthropic_error", "openai_error"
131
+ end
132
+ ```
133
+
134
+ ## Examples
135
+
136
+ ```bash
137
+ # Interactive chat
138
+ ANTHROPIC_API_KEY=your-key ruby examples/simple_chat.rb
139
+
140
+ # One-shot
141
+ ANTHROPIC_API_KEY=your-key ruby examples/one_shot.rb "Hello!"
142
+
143
+ # With tools
144
+ ANTHROPIC_API_KEY=your-key ruby examples/with_tools.rb
145
+
146
+ # Other providers
147
+ OPENAI_API_KEY=your-key ruby examples/simple_chat.rb openai
148
+ GOOGLE_API_KEY=your-key ruby examples/simple_chat.rb google
149
+ ```
150
+
151
+ ## License
152
+
153
+ MIT
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "logger"
4
+
5
+ module LlmProviders
6
+ class Configuration
7
+ attr_accessor :logger
8
+
9
+ def initialize
10
+ @logger = Logger.new($stdout)
11
+ end
12
+ end
13
+
14
+ class << self
15
+ def configuration
16
+ @configuration ||= Configuration.new
17
+ end
18
+
19
+ def configure
20
+ yield(configuration)
21
+ end
22
+
23
+ def logger
24
+ configuration.logger
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,12 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LlmProviders
4
+ class ProviderError < StandardError
5
+ attr_reader :code
6
+
7
+ def initialize(message, code: nil)
8
+ @code = code
9
+ super(message)
10
+ end
11
+ end
12
+ end
@@ -0,0 +1,276 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LlmProviders
4
+ module Providers
5
+ class Anthropic < Base
6
+ API_URL = "https://api.anthropic.com/v1/messages"
7
+ API_VERSION = "2023-06-01"
8
+
9
+ def chat(messages:, system: nil, tools: nil, &block)
10
+ Time.now
11
+
12
+ payload = build_payload(messages, system, tools)
13
+
14
+ if block_given?
15
+ stream_response(payload, &block)
16
+ else
17
+ sync_response(payload)
18
+ end
19
+ end
20
+
21
+ protected
22
+
23
+ def default_model
24
+ "claude-sonnet-4-5-20250929"
25
+ end
26
+
27
+ def api_key
28
+ ENV.fetch("ANTHROPIC_API_KEY")
29
+ end
30
+
31
+ private
32
+
33
+ def build_payload(messages, system, tools)
34
+ payload = {
35
+ model: @model,
36
+ max_tokens: @max_tokens,
37
+ messages: format_messages(messages)
38
+ }
39
+
40
+ if system && !system.empty?
41
+ payload[:system] = [
42
+ {
43
+ type: "text",
44
+ text: system,
45
+ cache_control: { type: "ephemeral" }
46
+ }
47
+ ]
48
+ end
49
+
50
+ payload[:temperature] = @temperature if @temperature
51
+
52
+ if tools && !tools.empty?
53
+ formatted_tools = format_tools(tools)
54
+ payload[:tools] = formatted_tools.map.with_index do |tool, i|
55
+ if i == formatted_tools.size - 1
56
+ tool.merge(cache_control: { type: "ephemeral" })
57
+ else
58
+ tool
59
+ end
60
+ end
61
+ end
62
+
63
+ payload
64
+ end
65
+
66
+ def format_messages(messages)
67
+ result = []
68
+
69
+ messages.each do |msg|
70
+ case msg[:role]
71
+ when "tool"
72
+ tool_result = {
73
+ type: "tool_result",
74
+ tool_use_id: msg[:tool_call_id],
75
+ content: msg[:content]
76
+ }
77
+
78
+ if result.last && result.last[:role] == "user" && result.last[:content].is_a?(Array)
79
+ result.last[:content] << tool_result
80
+ else
81
+ result << {
82
+ role: "user",
83
+ content: [tool_result]
84
+ }
85
+ end
86
+ when "assistant"
87
+ if msg[:tool_calls] && !msg[:tool_calls].empty?
88
+ content = []
89
+ content << { type: "text", text: msg[:content] } if msg[:content] && !msg[:content].empty?
90
+ msg[:tool_calls].each do |tc|
91
+ tc = stringify_keys(tc)
92
+ content << {
93
+ type: "tool_use",
94
+ id: tc["id"],
95
+ name: tc["name"],
96
+ input: tc["input"]
97
+ }
98
+ end
99
+ result << {
100
+ role: "assistant",
101
+ content: content
102
+ }
103
+ else
104
+ result << {
105
+ role: "assistant",
106
+ content: msg[:content]
107
+ }
108
+ end
109
+ else
110
+ result << {
111
+ role: msg[:role],
112
+ content: msg[:content]
113
+ }
114
+ end
115
+ end
116
+
117
+ result
118
+ end
119
+
120
+ def format_tools(tools)
121
+ tools.map do |tool|
122
+ {
123
+ name: tool[:name],
124
+ description: tool[:description],
125
+ input_schema: tool[:parameters]
126
+ }
127
+ end
128
+ end
129
+
130
+ def stream_response(payload, &block)
131
+ payload[:stream] = true
132
+ started_at = Time.now
133
+
134
+ full_content = ""
135
+ tool_calls = []
136
+ usage = {}
137
+
138
+ conn = Faraday.new do |f|
139
+ f.options.open_timeout = 10
140
+ f.options.read_timeout = 300
141
+ f.options.write_timeout = 30
142
+ f.adapter Faraday.default_adapter
143
+ end
144
+
145
+ response = conn.post(API_URL) do |req|
146
+ req.headers["Content-Type"] = "application/json"
147
+ req.headers["x-api-key"] = api_key
148
+ req.headers["anthropic-version"] = API_VERSION
149
+ req.body = payload.to_json
150
+ req.options.on_data = proc do |chunk, _|
151
+ process_stream_chunk(chunk, full_content, tool_calls) do |parsed|
152
+ if parsed[:content]
153
+ full_content += parsed[:content]
154
+ block.call(content: parsed[:content])
155
+ end
156
+ usage = parsed[:usage] if parsed[:usage]
157
+ end
158
+ end
159
+ end
160
+
161
+ unless response.success?
162
+ error_message = begin
163
+ if response.body.is_a?(Hash)
164
+ response.body["error"]&.dig("message")
165
+ else
166
+ response.body.to_s
167
+ end
168
+ rescue StandardError
169
+ response.body.to_s
170
+ end
171
+ raise ProviderError.new(
172
+ (error_message && !error_message.empty? ? error_message : nil) || "API error",
173
+ code: "anthropic_error"
174
+ )
175
+ end
176
+
177
+ {
178
+ content: full_content,
179
+ tool_calls: tool_calls,
180
+ usage: usage,
181
+ latency_ms: ((Time.now - started_at) * 1000).to_i,
182
+ raw_response: { content: full_content, tool_calls: tool_calls }
183
+ }
184
+ end
185
+
186
+ def process_stream_chunk(chunk, _full_content, tool_calls)
187
+ chunk.each_line do |line|
188
+ next unless line.start_with?("data: ")
189
+
190
+ data = line.sub("data: ", "").strip
191
+ next if data == "[DONE]"
192
+
193
+ begin
194
+ event = JSON.parse(data)
195
+
196
+ case event["type"]
197
+ when "content_block_delta"
198
+ if event.dig("delta", "type") == "text_delta"
199
+ yield(content: event.dig("delta", "text"))
200
+ elsif event.dig("delta", "type") == "input_json_delta"
201
+ if tool_calls.any?
202
+ tool_calls.last[:input_json] ||= ""
203
+ tool_calls.last[:input_json] += event.dig("delta", "partial_json").to_s
204
+ end
205
+ end
206
+ when "content_block_start"
207
+ if event.dig("content_block", "type") == "tool_use"
208
+ tool_calls << {
209
+ id: event.dig("content_block", "id"),
210
+ name: event.dig("content_block", "name"),
211
+ input: {},
212
+ input_json: ""
213
+ }
214
+ end
215
+ when "content_block_stop"
216
+ if tool_calls.any? && tool_calls.last[:input_json] && !tool_calls.last[:input_json].empty?
217
+ begin
218
+ tool_calls.last[:input] = JSON.parse(tool_calls.last[:input_json])
219
+ rescue JSON::ParserError
220
+ # Keep empty on parse failure
221
+ end
222
+ tool_calls.last.delete(:input_json)
223
+ end
224
+ when "message_delta"
225
+ if event["usage"]
226
+ yield(usage: {
227
+ input: event.dig("usage", "input_tokens"),
228
+ output: event.dig("usage", "output_tokens"),
229
+ cached_input: event.dig("usage", "cache_read_input_tokens")
230
+ })
231
+ end
232
+ end
233
+ rescue JSON::ParserError
234
+ # Skip invalid JSON
235
+ end
236
+ end
237
+ end
238
+
239
+ def sync_response(payload)
240
+ started_at = Time.now
241
+
242
+ response = http_client.post(API_URL) do |req|
243
+ req.headers["x-api-key"] = api_key
244
+ req.headers["anthropic-version"] = API_VERSION
245
+ req.body = payload
246
+ end
247
+
248
+ unless response.success?
249
+ raise ProviderError.new(
250
+ response.body["error"]&.dig("message") || "API error",
251
+ code: "anthropic_error"
252
+ )
253
+ end
254
+
255
+ body = response.body
256
+ content = body["content"]&.find { |c| c["type"] == "text" }&.dig("text") || ""
257
+
258
+ tool_calls = body["content"]&.select { |c| c["type"] == "tool_use" }&.map do |tc|
259
+ { id: tc["id"], name: tc["name"], input: tc["input"] }
260
+ end || []
261
+
262
+ {
263
+ content: content,
264
+ tool_calls: tool_calls,
265
+ usage: {
266
+ input: body.dig("usage", "input_tokens"),
267
+ output: body.dig("usage", "output_tokens"),
268
+ cached_input: body.dig("usage", "cache_read_input_tokens")
269
+ },
270
+ latency_ms: ((Time.now - started_at) * 1000).to_i,
271
+ raw_response: body
272
+ }
273
+ end
274
+ end
275
+ end
276
+ end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "faraday"
4
+
5
+ module LlmProviders
6
+ module Providers
7
+ class Base
8
+ def initialize(model: nil, temperature: nil, max_tokens: nil)
9
+ @model = model || default_model
10
+ @temperature = temperature || 0.7
11
+ @max_tokens = max_tokens || 16_384
12
+ end
13
+
14
+ def chat(messages:, system: nil, tools: nil, &block)
15
+ raise NotImplementedError
16
+ end
17
+
18
+ protected
19
+
20
+ def default_model
21
+ raise NotImplementedError
22
+ end
23
+
24
+ def api_key
25
+ raise NotImplementedError
26
+ end
27
+
28
+ def stringify_keys(hash)
29
+ hash.transform_keys(&:to_s)
30
+ end
31
+
32
+ def http_client
33
+ @http_client ||= Faraday.new do |f|
34
+ f.request :json
35
+ f.response :json
36
+ f.options.open_timeout = 10
37
+ f.options.read_timeout = 120
38
+ f.options.write_timeout = 30
39
+ f.adapter Faraday.default_adapter
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end