soka 0.0.1.beta2 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +18 -0
- data/README.md +2 -1
- data/lib/soka/agent_tool.rb +1 -1
- data/lib/soka/engines/concerns/response_processor.rb +1 -4
- data/lib/soka/llms/anthropic.rb +27 -32
- data/lib/soka/llms/openai.rb +0 -10
- data/lib/soka/version.rb +1 -1
- data/lib/soka.rb +0 -3
- metadata +3 -47
- data/lib/soka/llms/concerns/response_parser.rb +0 -47
- data/lib/soka/llms/concerns/streaming_handler.rb +0 -78
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 11429e43f0b571946c8e67e96b18d137b3f5228298ad28b97e0fbd20c798df62
|
4
|
+
data.tar.gz: 870c1fbbda1a2d2031e7210043d54d9cad3e0fcafc699c1c5d36fb99ec2bcae4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 849434f90de4f1fdadc31cf9df1652731bf85cfbb9e097098e644c3b779b6e7efe53bea79c98208b3333b3b654c7fab9ebc5ccc3a0540aedc65b2f1ee7d10676
|
7
|
+
data.tar.gz: b1f88b544efdfbc9f2883c9fee969132eb4b9efa17ccc6f38aca4940892b1b5558b7c872e890c5b8fe06178fb0aaeac2a30d023e72095fa9768482909404fcce
|
data/CHANGELOG.md
CHANGED
@@ -7,6 +7,24 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
7
7
|
|
8
8
|
## [Unreleased]
|
9
9
|
|
10
|
+
## [0.0.2] - 2025-08-01
|
11
|
+
|
12
|
+
### Features
|
13
|
+
- feat(agent_tool): add support for 'Object' type mapping
|
14
|
+
|
15
|
+
### Code Refactoring
|
16
|
+
- refactor: remove dry-rb dependencies
|
17
|
+
- refactor: change tool execution method from call to execute
|
18
|
+
|
19
|
+
### Chores
|
20
|
+
- chore: update gemspec description to include 'Gemini AI Studio'
|
21
|
+
- chore(README): update footer to include creator information
|
22
|
+
|
23
|
+
## [0.0.1] - 2025-07-29
|
24
|
+
|
25
|
+
### Code Refactoring
|
26
|
+
- refactor(llms): remove streaming support and response parser module (陳均均, 2025-07-29)
|
27
|
+
|
10
28
|
## [0.0.1.beta2] - 2025-07-29
|
11
29
|
|
12
30
|
### Chores
|
data/README.md
CHANGED
data/lib/soka/agent_tool.rb
CHANGED
@@ -123,7 +123,7 @@ module Soka
|
|
123
123
|
'Float' => 'number', 'Numeric' => 'number',
|
124
124
|
'TrueClass' => 'boolean', 'FalseClass' => 'boolean', 'Boolean' => 'boolean',
|
125
125
|
'Array' => 'array',
|
126
|
-
'Hash' => 'object'
|
126
|
+
'Hash' => 'object', 'Object' => 'object'
|
127
127
|
}.freeze
|
128
128
|
end
|
129
129
|
end
|
@@ -75,10 +75,7 @@ module Soka
|
|
75
75
|
tool = tools.find { |t| t.class.tool_name == tool_name }
|
76
76
|
raise ToolError, "Tool '#{tool_name}' not found" unless tool
|
77
77
|
|
78
|
-
tool.
|
79
|
-
rescue StandardError => e
|
80
|
-
# Re-raise as ToolError to be caught by process_action
|
81
|
-
raise ToolError, "Error executing tool: #{e.message}"
|
78
|
+
tool.execute(**symbolize_keys(tool_input))
|
82
79
|
end
|
83
80
|
|
84
81
|
def symbolize_keys(hash)
|
data/lib/soka/llms/anthropic.rb
CHANGED
@@ -4,14 +4,12 @@ module Soka
|
|
4
4
|
module LLMs
|
5
5
|
# Anthropic (Claude) LLM provider implementation
|
6
6
|
class Anthropic < Base
|
7
|
-
include Concerns::ResponseParser
|
8
|
-
|
9
7
|
ENV_KEY = 'ANTHROPIC_API_KEY'
|
10
8
|
|
11
9
|
private
|
12
10
|
|
13
11
|
def default_model
|
14
|
-
'claude-4-
|
12
|
+
'claude-sonnet-4-0'
|
15
13
|
end
|
16
14
|
|
17
15
|
def base_url
|
@@ -45,34 +43,6 @@ module Soka
|
|
45
43
|
handle_error(e)
|
46
44
|
end
|
47
45
|
|
48
|
-
def supports_streaming?
|
49
|
-
true
|
50
|
-
end
|
51
|
-
|
52
|
-
def streaming_chat(messages, **params, &)
|
53
|
-
request_params = build_streaming_params(messages, params)
|
54
|
-
execute_streaming_request(request_params, &)
|
55
|
-
rescue Faraday::Error => e
|
56
|
-
handle_error(e)
|
57
|
-
end
|
58
|
-
|
59
|
-
def build_streaming_params(messages, params)
|
60
|
-
request_params = build_request_params(messages, params)
|
61
|
-
request_params[:stream] = true
|
62
|
-
request_params
|
63
|
-
end
|
64
|
-
|
65
|
-
def execute_streaming_request(request_params, &)
|
66
|
-
connection.post('/v1/messages') do |req|
|
67
|
-
req.headers['x-api-key'] = api_key
|
68
|
-
req.headers['anthropic-version'] = options[:anthropic_version]
|
69
|
-
req.body = request_params
|
70
|
-
req.options.on_data = proc do |chunk, _overall_received_bytes|
|
71
|
-
process_stream_chunk(chunk, &)
|
72
|
-
end
|
73
|
-
end
|
74
|
-
end
|
75
|
-
|
76
46
|
private
|
77
47
|
|
78
48
|
def build_request_params(messages, params)
|
@@ -118,7 +88,32 @@ module Soka
|
|
118
88
|
end
|
119
89
|
end
|
120
90
|
|
121
|
-
|
91
|
+
def parse_response(response)
|
92
|
+
body = response.body
|
93
|
+
validate_response_status(response.status, body)
|
94
|
+
build_result_from_response(body)
|
95
|
+
end
|
96
|
+
|
97
|
+
def validate_response_status(status, body)
|
98
|
+
return if status == 200
|
99
|
+
|
100
|
+
error_message = body.dig('error', 'message') || 'Unknown error'
|
101
|
+
raise LLMError, "Anthropic API error: #{error_message}"
|
102
|
+
end
|
103
|
+
|
104
|
+
def build_result_from_response(body)
|
105
|
+
content = body.dig('content', 0, 'text')
|
106
|
+
raise LLMError, 'No content in response' unless content
|
107
|
+
|
108
|
+
Result.new(
|
109
|
+
model: body['model'],
|
110
|
+
content: content,
|
111
|
+
input_tokens: body.dig('usage', 'input_tokens'),
|
112
|
+
output_tokens: body.dig('usage', 'output_tokens'),
|
113
|
+
finish_reason: body['stop_reason'],
|
114
|
+
raw_response: body
|
115
|
+
)
|
116
|
+
end
|
122
117
|
end
|
123
118
|
end
|
124
119
|
end
|
data/lib/soka/llms/openai.rb
CHANGED
@@ -4,8 +4,6 @@ module Soka
|
|
4
4
|
module LLMs
|
5
5
|
# OpenAI (GPT) LLM provider implementation
|
6
6
|
class OpenAI < Base
|
7
|
-
include Concerns::StreamingHandler
|
8
|
-
|
9
7
|
ENV_KEY = 'OPENAI_API_KEY'
|
10
8
|
|
11
9
|
private
|
@@ -44,12 +42,6 @@ module Soka
|
|
44
42
|
handle_error(e)
|
45
43
|
end
|
46
44
|
|
47
|
-
def supports_streaming?
|
48
|
-
true
|
49
|
-
end
|
50
|
-
|
51
|
-
# Streaming methods are in StreamingHandler module
|
52
|
-
|
53
45
|
private
|
54
46
|
|
55
47
|
def build_request_params(messages, params)
|
@@ -90,8 +82,6 @@ module Soka
|
|
90
82
|
raw_response: body
|
91
83
|
)
|
92
84
|
end
|
93
|
-
|
94
|
-
# Stream processing methods are in StreamingHandler module
|
95
85
|
end
|
96
86
|
end
|
97
87
|
end
|
data/lib/soka/version.rb
CHANGED
data/lib/soka.rb
CHANGED
metadata
CHANGED
@@ -1,56 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: soka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- jiunjiun
|
8
|
-
bindir:
|
8
|
+
bindir: bin
|
9
9
|
cert_chain: []
|
10
10
|
date: 1980-01-02 00:00:00.000000000 Z
|
11
11
|
dependencies:
|
12
|
-
- !ruby/object:Gem::Dependency
|
13
|
-
name: dry-struct
|
14
|
-
requirement: !ruby/object:Gem::Requirement
|
15
|
-
requirements:
|
16
|
-
- - "~>"
|
17
|
-
- !ruby/object:Gem::Version
|
18
|
-
version: '1.6'
|
19
|
-
type: :runtime
|
20
|
-
prerelease: false
|
21
|
-
version_requirements: !ruby/object:Gem::Requirement
|
22
|
-
requirements:
|
23
|
-
- - "~>"
|
24
|
-
- !ruby/object:Gem::Version
|
25
|
-
version: '1.6'
|
26
|
-
- !ruby/object:Gem::Dependency
|
27
|
-
name: dry-types
|
28
|
-
requirement: !ruby/object:Gem::Requirement
|
29
|
-
requirements:
|
30
|
-
- - "~>"
|
31
|
-
- !ruby/object:Gem::Version
|
32
|
-
version: '1.7'
|
33
|
-
type: :runtime
|
34
|
-
prerelease: false
|
35
|
-
version_requirements: !ruby/object:Gem::Requirement
|
36
|
-
requirements:
|
37
|
-
- - "~>"
|
38
|
-
- !ruby/object:Gem::Version
|
39
|
-
version: '1.7'
|
40
|
-
- !ruby/object:Gem::Dependency
|
41
|
-
name: dry-validation
|
42
|
-
requirement: !ruby/object:Gem::Requirement
|
43
|
-
requirements:
|
44
|
-
- - "~>"
|
45
|
-
- !ruby/object:Gem::Version
|
46
|
-
version: '1.10'
|
47
|
-
type: :runtime
|
48
|
-
prerelease: false
|
49
|
-
version_requirements: !ruby/object:Gem::Requirement
|
50
|
-
requirements:
|
51
|
-
- - "~>"
|
52
|
-
- !ruby/object:Gem::Version
|
53
|
-
version: '1.10'
|
54
12
|
- !ruby/object:Gem::Dependency
|
55
13
|
name: faraday
|
56
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -80,7 +38,7 @@ dependencies:
|
|
80
38
|
- !ruby/object:Gem::Version
|
81
39
|
version: '2.6'
|
82
40
|
description: Soka is a Ruby framework for building AI agents using the ReAct (Reasoning
|
83
|
-
and Acting) pattern. It supports multiple AI providers including Gemini Studio,
|
41
|
+
and Acting) pattern. It supports multiple AI providers including Gemini AI Studio,
|
84
42
|
OpenAI, and Anthropic.
|
85
43
|
email:
|
86
44
|
- imjiunjiun@gmail.com
|
@@ -121,8 +79,6 @@ files:
|
|
121
79
|
- lib/soka/llm.rb
|
122
80
|
- lib/soka/llms/anthropic.rb
|
123
81
|
- lib/soka/llms/base.rb
|
124
|
-
- lib/soka/llms/concerns/response_parser.rb
|
125
|
-
- lib/soka/llms/concerns/streaming_handler.rb
|
126
82
|
- lib/soka/llms/gemini.rb
|
127
83
|
- lib/soka/llms/openai.rb
|
128
84
|
- lib/soka/memory.rb
|
@@ -1,47 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Soka
|
4
|
-
module LLMs
|
5
|
-
module Concerns
|
6
|
-
# Module for parsing Anthropic API responses
|
7
|
-
module ResponseParser
|
8
|
-
private
|
9
|
-
|
10
|
-
# Parse API response
|
11
|
-
# @param response [Faraday::Response] The HTTP response
|
12
|
-
# @return [String] The parsed content
|
13
|
-
# @raise [LLMError] If response indicates an error
|
14
|
-
def parse_response(response)
|
15
|
-
handle_error(response) unless response.success?
|
16
|
-
|
17
|
-
data = JSON.parse(response.body)
|
18
|
-
extract_content(data)
|
19
|
-
end
|
20
|
-
|
21
|
-
# Extract content from response data
|
22
|
-
# @param data [Hash] The parsed response data
|
23
|
-
# @return [String] The extracted content
|
24
|
-
# @raise [LLMError] If content is missing
|
25
|
-
def extract_content(data)
|
26
|
-
content = data.dig('content', 0, 'text')
|
27
|
-
raise LLMError, 'No content in response' unless content
|
28
|
-
|
29
|
-
content
|
30
|
-
end
|
31
|
-
|
32
|
-
# Handle API errors
|
33
|
-
# @param response [Faraday::Response] The HTTP response
|
34
|
-
# @raise [LLMError] Always raises with error details
|
35
|
-
def handle_error(response)
|
36
|
-
error_data = begin
|
37
|
-
JSON.parse(response.body)
|
38
|
-
rescue StandardError
|
39
|
-
{}
|
40
|
-
end
|
41
|
-
error_message = error_data.dig('error', 'message') || "HTTP #{response.status}"
|
42
|
-
raise LLMError, "Anthropic API error: #{error_message}"
|
43
|
-
end
|
44
|
-
end
|
45
|
-
end
|
46
|
-
end
|
47
|
-
end
|
@@ -1,78 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Soka
|
4
|
-
module LLMs
|
5
|
-
module Concerns
|
6
|
-
# Module for handling streaming responses from OpenAI
|
7
|
-
module StreamingHandler
|
8
|
-
private
|
9
|
-
|
10
|
-
# Stream chat completion
|
11
|
-
# @param messages [Array<Hash>] The messages to send
|
12
|
-
# @param params [Hash] Additional parameters
|
13
|
-
# @yield [String] Yields each chunk of the response
|
14
|
-
# @return [String] The complete response
|
15
|
-
def stream_chat(messages, **params, &)
|
16
|
-
return regular_chat(messages, **params) unless block_given?
|
17
|
-
|
18
|
-
complete_response = +''
|
19
|
-
request_params = build_request_params(messages, **params, stream: true)
|
20
|
-
|
21
|
-
response = connection.post('/v1/chat/completions') do |req|
|
22
|
-
req.body = request_params.to_json
|
23
|
-
end
|
24
|
-
|
25
|
-
handle_streaming_response(response, complete_response, &)
|
26
|
-
end
|
27
|
-
|
28
|
-
# Handle streaming response
|
29
|
-
# @param response [Faraday::Response] The HTTP response
|
30
|
-
# @param complete_response [String] Buffer for complete response
|
31
|
-
# @yield [String] Yields each chunk
|
32
|
-
# @return [String] The complete response
|
33
|
-
def handle_streaming_response(response, complete_response)
|
34
|
-
response.body.each_line do |line|
|
35
|
-
chunk = process_streaming_line(line)
|
36
|
-
next unless chunk
|
37
|
-
|
38
|
-
complete_response << chunk
|
39
|
-
yield chunk
|
40
|
-
end
|
41
|
-
complete_response
|
42
|
-
end
|
43
|
-
|
44
|
-
# Process a single streaming line
|
45
|
-
# @param line [String] The line to process
|
46
|
-
# @return [String, nil] The parsed chunk or nil
|
47
|
-
def process_streaming_line(line)
|
48
|
-
return nil if line.strip.empty? || !line.start_with?('data: ')
|
49
|
-
|
50
|
-
data = line[6..].strip
|
51
|
-
return nil if data == '[DONE]'
|
52
|
-
|
53
|
-
parse_streaming_chunk(data)
|
54
|
-
end
|
55
|
-
|
56
|
-
# Parse a streaming chunk
|
57
|
-
# @param data [String] The chunk data
|
58
|
-
# @return [String, nil] The parsed content
|
59
|
-
def parse_streaming_chunk(data)
|
60
|
-
parsed = JSON.parse(data)
|
61
|
-
parsed.dig('choices', 0, 'delta', 'content')
|
62
|
-
rescue JSON::ParserError
|
63
|
-
nil
|
64
|
-
end
|
65
|
-
|
66
|
-
# Perform regular (non-streaming) chat
|
67
|
-
# @param messages [Array<Hash>] The messages
|
68
|
-
# @param params [Hash] Additional parameters
|
69
|
-
# @return [String] The response content
|
70
|
-
def regular_chat(messages, **params)
|
71
|
-
request_params = build_request_params(messages, **params)
|
72
|
-
response = connection.post('/v1/chat/completions', request_params.to_json)
|
73
|
-
parse_response(response)
|
74
|
-
end
|
75
|
-
end
|
76
|
-
end
|
77
|
-
end
|
78
|
-
end
|