soka 0.0.1.beta2 → 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -0
- data/lib/soka/llms/anthropic.rb +27 -32
- data/lib/soka/llms/openai.rb +0 -10
- data/lib/soka/version.rb +1 -1
- metadata +1 -3
- data/lib/soka/llms/concerns/response_parser.rb +0 -47
- data/lib/soka/llms/concerns/streaming_handler.rb +0 -78
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 76bb26ac543ab962b1b37acf5e5d081f974664dba0a888dc3237aa387fed6a88
|
4
|
+
data.tar.gz: e2ea18c046c68ad39ab56cf380594de750ac6c9ee847fe9102b3fe4d9b419d12
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e5c57a6ef0dd80d47ab47518a260a3e5690edb937a8dc3a3772d9152a91be3ba1a719a84836e6e82716223bd508a6dcad9967179937bdbffa9c6ce9de3900c2c
|
7
|
+
data.tar.gz: ac4f48b822f91935d9284f3a6681fb26daf3b5a4d870c417b05c10e072ae10dc117c3f57fa856e69241ddeace5841000be5340d6ae96977e30932a1c46cc7126
|
data/CHANGELOG.md
CHANGED
@@ -7,6 +7,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
7
7
|
|
8
8
|
## [Unreleased]
|
9
9
|
|
10
|
+
## [0.0.1] - 2025-07-29
|
11
|
+
|
12
|
+
### Code Refactoring
|
13
|
+
- refactor(llms): remove streaming support and response parser module (陳均均, 2025-07-29)
|
14
|
+
|
10
15
|
## [0.0.1.beta2] - 2025-07-29
|
11
16
|
|
12
17
|
### Chores
|
data/lib/soka/llms/anthropic.rb
CHANGED
@@ -4,14 +4,12 @@ module Soka
|
|
4
4
|
module LLMs
|
5
5
|
# Anthropic (Claude) LLM provider implementation
|
6
6
|
class Anthropic < Base
|
7
|
-
include Concerns::ResponseParser
|
8
|
-
|
9
7
|
ENV_KEY = 'ANTHROPIC_API_KEY'
|
10
8
|
|
11
9
|
private
|
12
10
|
|
13
11
|
def default_model
|
14
|
-
'claude-4-
|
12
|
+
'claude-sonnet-4-0'
|
15
13
|
end
|
16
14
|
|
17
15
|
def base_url
|
@@ -45,34 +43,6 @@ module Soka
|
|
45
43
|
handle_error(e)
|
46
44
|
end
|
47
45
|
|
48
|
-
def supports_streaming?
|
49
|
-
true
|
50
|
-
end
|
51
|
-
|
52
|
-
def streaming_chat(messages, **params, &)
|
53
|
-
request_params = build_streaming_params(messages, params)
|
54
|
-
execute_streaming_request(request_params, &)
|
55
|
-
rescue Faraday::Error => e
|
56
|
-
handle_error(e)
|
57
|
-
end
|
58
|
-
|
59
|
-
def build_streaming_params(messages, params)
|
60
|
-
request_params = build_request_params(messages, params)
|
61
|
-
request_params[:stream] = true
|
62
|
-
request_params
|
63
|
-
end
|
64
|
-
|
65
|
-
def execute_streaming_request(request_params, &)
|
66
|
-
connection.post('/v1/messages') do |req|
|
67
|
-
req.headers['x-api-key'] = api_key
|
68
|
-
req.headers['anthropic-version'] = options[:anthropic_version]
|
69
|
-
req.body = request_params
|
70
|
-
req.options.on_data = proc do |chunk, _overall_received_bytes|
|
71
|
-
process_stream_chunk(chunk, &)
|
72
|
-
end
|
73
|
-
end
|
74
|
-
end
|
75
|
-
|
76
46
|
private
|
77
47
|
|
78
48
|
def build_request_params(messages, params)
|
@@ -118,7 +88,32 @@ module Soka
|
|
118
88
|
end
|
119
89
|
end
|
120
90
|
|
121
|
-
|
91
|
+
def parse_response(response)
|
92
|
+
body = response.body
|
93
|
+
validate_response_status(response.status, body)
|
94
|
+
build_result_from_response(body)
|
95
|
+
end
|
96
|
+
|
97
|
+
def validate_response_status(status, body)
|
98
|
+
return if status == 200
|
99
|
+
|
100
|
+
error_message = body.dig('error', 'message') || 'Unknown error'
|
101
|
+
raise LLMError, "Anthropic API error: #{error_message}"
|
102
|
+
end
|
103
|
+
|
104
|
+
def build_result_from_response(body)
|
105
|
+
content = body.dig('content', 0, 'text')
|
106
|
+
raise LLMError, 'No content in response' unless content
|
107
|
+
|
108
|
+
Result.new(
|
109
|
+
model: body['model'],
|
110
|
+
content: content,
|
111
|
+
input_tokens: body.dig('usage', 'input_tokens'),
|
112
|
+
output_tokens: body.dig('usage', 'output_tokens'),
|
113
|
+
finish_reason: body['stop_reason'],
|
114
|
+
raw_response: body
|
115
|
+
)
|
116
|
+
end
|
122
117
|
end
|
123
118
|
end
|
124
119
|
end
|
data/lib/soka/llms/openai.rb
CHANGED
@@ -4,8 +4,6 @@ module Soka
|
|
4
4
|
module LLMs
|
5
5
|
# OpenAI (GPT) LLM provider implementation
|
6
6
|
class OpenAI < Base
|
7
|
-
include Concerns::StreamingHandler
|
8
|
-
|
9
7
|
ENV_KEY = 'OPENAI_API_KEY'
|
10
8
|
|
11
9
|
private
|
@@ -44,12 +42,6 @@ module Soka
|
|
44
42
|
handle_error(e)
|
45
43
|
end
|
46
44
|
|
47
|
-
def supports_streaming?
|
48
|
-
true
|
49
|
-
end
|
50
|
-
|
51
|
-
# Streaming methods are in StreamingHandler module
|
52
|
-
|
53
45
|
private
|
54
46
|
|
55
47
|
def build_request_params(messages, params)
|
@@ -90,8 +82,6 @@ module Soka
|
|
90
82
|
raw_response: body
|
91
83
|
)
|
92
84
|
end
|
93
|
-
|
94
|
-
# Stream processing methods are in StreamingHandler module
|
95
85
|
end
|
96
86
|
end
|
97
87
|
end
|
data/lib/soka/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: soka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.1
|
4
|
+
version: 0.0.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- jiunjiun
|
@@ -121,8 +121,6 @@ files:
|
|
121
121
|
- lib/soka/llm.rb
|
122
122
|
- lib/soka/llms/anthropic.rb
|
123
123
|
- lib/soka/llms/base.rb
|
124
|
-
- lib/soka/llms/concerns/response_parser.rb
|
125
|
-
- lib/soka/llms/concerns/streaming_handler.rb
|
126
124
|
- lib/soka/llms/gemini.rb
|
127
125
|
- lib/soka/llms/openai.rb
|
128
126
|
- lib/soka/memory.rb
|
@@ -1,47 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Soka
|
4
|
-
module LLMs
|
5
|
-
module Concerns
|
6
|
-
# Module for parsing Anthropic API responses
|
7
|
-
module ResponseParser
|
8
|
-
private
|
9
|
-
|
10
|
-
# Parse API response
|
11
|
-
# @param response [Faraday::Response] The HTTP response
|
12
|
-
# @return [String] The parsed content
|
13
|
-
# @raise [LLMError] If response indicates an error
|
14
|
-
def parse_response(response)
|
15
|
-
handle_error(response) unless response.success?
|
16
|
-
|
17
|
-
data = JSON.parse(response.body)
|
18
|
-
extract_content(data)
|
19
|
-
end
|
20
|
-
|
21
|
-
# Extract content from response data
|
22
|
-
# @param data [Hash] The parsed response data
|
23
|
-
# @return [String] The extracted content
|
24
|
-
# @raise [LLMError] If content is missing
|
25
|
-
def extract_content(data)
|
26
|
-
content = data.dig('content', 0, 'text')
|
27
|
-
raise LLMError, 'No content in response' unless content
|
28
|
-
|
29
|
-
content
|
30
|
-
end
|
31
|
-
|
32
|
-
# Handle API errors
|
33
|
-
# @param response [Faraday::Response] The HTTP response
|
34
|
-
# @raise [LLMError] Always raises with error details
|
35
|
-
def handle_error(response)
|
36
|
-
error_data = begin
|
37
|
-
JSON.parse(response.body)
|
38
|
-
rescue StandardError
|
39
|
-
{}
|
40
|
-
end
|
41
|
-
error_message = error_data.dig('error', 'message') || "HTTP #{response.status}"
|
42
|
-
raise LLMError, "Anthropic API error: #{error_message}"
|
43
|
-
end
|
44
|
-
end
|
45
|
-
end
|
46
|
-
end
|
47
|
-
end
|
@@ -1,78 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Soka
|
4
|
-
module LLMs
|
5
|
-
module Concerns
|
6
|
-
# Module for handling streaming responses from OpenAI
|
7
|
-
module StreamingHandler
|
8
|
-
private
|
9
|
-
|
10
|
-
# Stream chat completion
|
11
|
-
# @param messages [Array<Hash>] The messages to send
|
12
|
-
# @param params [Hash] Additional parameters
|
13
|
-
# @yield [String] Yields each chunk of the response
|
14
|
-
# @return [String] The complete response
|
15
|
-
def stream_chat(messages, **params, &)
|
16
|
-
return regular_chat(messages, **params) unless block_given?
|
17
|
-
|
18
|
-
complete_response = +''
|
19
|
-
request_params = build_request_params(messages, **params, stream: true)
|
20
|
-
|
21
|
-
response = connection.post('/v1/chat/completions') do |req|
|
22
|
-
req.body = request_params.to_json
|
23
|
-
end
|
24
|
-
|
25
|
-
handle_streaming_response(response, complete_response, &)
|
26
|
-
end
|
27
|
-
|
28
|
-
# Handle streaming response
|
29
|
-
# @param response [Faraday::Response] The HTTP response
|
30
|
-
# @param complete_response [String] Buffer for complete response
|
31
|
-
# @yield [String] Yields each chunk
|
32
|
-
# @return [String] The complete response
|
33
|
-
def handle_streaming_response(response, complete_response)
|
34
|
-
response.body.each_line do |line|
|
35
|
-
chunk = process_streaming_line(line)
|
36
|
-
next unless chunk
|
37
|
-
|
38
|
-
complete_response << chunk
|
39
|
-
yield chunk
|
40
|
-
end
|
41
|
-
complete_response
|
42
|
-
end
|
43
|
-
|
44
|
-
# Process a single streaming line
|
45
|
-
# @param line [String] The line to process
|
46
|
-
# @return [String, nil] The parsed chunk or nil
|
47
|
-
def process_streaming_line(line)
|
48
|
-
return nil if line.strip.empty? || !line.start_with?('data: ')
|
49
|
-
|
50
|
-
data = line[6..].strip
|
51
|
-
return nil if data == '[DONE]'
|
52
|
-
|
53
|
-
parse_streaming_chunk(data)
|
54
|
-
end
|
55
|
-
|
56
|
-
# Parse a streaming chunk
|
57
|
-
# @param data [String] The chunk data
|
58
|
-
# @return [String, nil] The parsed content
|
59
|
-
def parse_streaming_chunk(data)
|
60
|
-
parsed = JSON.parse(data)
|
61
|
-
parsed.dig('choices', 0, 'delta', 'content')
|
62
|
-
rescue JSON::ParserError
|
63
|
-
nil
|
64
|
-
end
|
65
|
-
|
66
|
-
# Perform regular (non-streaming) chat
|
67
|
-
# @param messages [Array<Hash>] The messages
|
68
|
-
# @param params [Hash] Additional parameters
|
69
|
-
# @return [String] The response content
|
70
|
-
def regular_chat(messages, **params)
|
71
|
-
request_params = build_request_params(messages, **params)
|
72
|
-
response = connection.post('/v1/chat/completions', request_params.to_json)
|
73
|
-
parse_response(response)
|
74
|
-
end
|
75
|
-
end
|
76
|
-
end
|
77
|
-
end
|
78
|
-
end
|