zuno 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/providers/openai.rb +32 -8
- data/lib/zuno/chat.rb +9 -8
- data/lib/zuno/configuration.rb +0 -1
- data/lib/zuno/version.rb +1 -1
- metadata +16 -6
- data/lib/providers/anthropic.rb +0 -33
- data/lib/providers/groq_cloud.rb +0 -66
- data/lib/zuno/transcription.rb +0 -11
- data/lib/zuno/translation.rb +0 -11
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 6809336cae0fd3e0ff6510407f7a29ca852f833dc7f567e05f246b958982a968
|
4
|
+
data.tar.gz: 007fe0df2ef864d4dc8ce8afe903a733c85e0da41921fea7ecb53f5c1da56263
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 373a15dfb01d69283621579bc58a58561bceb30da4c309014632cb1e5404f659e7ba1f5d206f70f7650838a488b9483ecb02ecd36523d35f79e9d12fe43d9e89
|
7
|
+
data.tar.gz: ade9ff32f23eb1ccb60660c3372076e6c6f8b92d4ca64bebf0e617381d1b2ee4204de6bfc29eb54dd53765d0235bfc544e78107d09a086b8439bfed72f05f3f1
|
data/lib/providers/openai.rb
CHANGED
@@ -1,4 +1,7 @@
|
|
1
1
|
require 'ostruct'
|
2
|
+
require 'faraday'
|
3
|
+
require 'byebug'
|
4
|
+
require 'event_stream_parser'
|
2
5
|
|
3
6
|
module Providers
|
4
7
|
class OpenAI
|
@@ -12,22 +15,43 @@ module Providers
|
|
12
15
|
@api_key = Zuno.configuration.openai_api_key
|
13
16
|
end
|
14
17
|
|
15
|
-
def chat_completion(messages, model, options = {})
|
18
|
+
def chat_completion(messages, model, options = {}, raw_response)
|
16
19
|
response = @connection.post("/v1/chat/completions") do |req|
|
17
20
|
req.headers["Content-Type"] = "application/json"
|
18
21
|
req.headers["Authorization"] = "Bearer #{@api_key}"
|
19
22
|
req.body = {
|
20
23
|
model: model,
|
21
|
-
messages: messages
|
24
|
+
messages: messages,
|
22
25
|
}.merge(options).to_json
|
26
|
+
|
27
|
+
if options[:stream]
|
28
|
+
parser = EventStreamParser::Parser.new
|
29
|
+
req.options.on_data = Proc.new do |chunk, size|
|
30
|
+
if raw_response
|
31
|
+
yield chunk
|
32
|
+
else
|
33
|
+
parser.feed(chunk) do |type, data, id, reconnection_time|
|
34
|
+
return if data == "[DONE]"
|
35
|
+
content = JSON.parse(data)["choices"][0]["delta"]["content"]
|
36
|
+
yield OpenStruct.new(content: content) if content
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
23
41
|
end
|
24
42
|
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
43
|
+
unless options[:stream]
|
44
|
+
if raw_response
|
45
|
+
return response.body
|
46
|
+
else
|
47
|
+
if response.body["error"]
|
48
|
+
raise response.body["error"]["message"]
|
49
|
+
elsif response.body["choices"][0]["message"]["content"]
|
50
|
+
OpenStruct.new(content: response.body["choices"][0]["message"]["content"])
|
51
|
+
elsif response.body["choices"][0]["message"]["tool_calls"]
|
52
|
+
OpenStruct.new(tool_calls: response.body["choices"][0]["message"]["tool_calls"])
|
53
|
+
end
|
54
|
+
end
|
31
55
|
end
|
32
56
|
end
|
33
57
|
end
|
data/lib/zuno/chat.rb
CHANGED
@@ -1,18 +1,23 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require "providers/openai"
|
4
|
-
require "providers/anthropic"
|
5
4
|
|
6
5
|
module Zuno
|
7
6
|
OPENAI_MODELS = %w[gpt-3.5-turbo gpt-4-turbo gpt-4-turbo-preview gpt-4o gpt-4o-mini].freeze
|
8
|
-
ANTHROPIC_MODELS = %w[claude-3-5-sonnet-20240620 claude-3-opus-20240229 claude-3-sonnet-20240229 claude-3-haiku-20240307].freeze
|
9
|
-
GROQ_CLOUD_MODELS = %w[llama3-8b-8192 llama3-70b-8192 mixtral-8x7b-32768 gemma-7b-it gemma2-9b-it].freeze
|
10
7
|
|
11
8
|
class << self
|
12
9
|
def chat(messages:, model: nil, **options)
|
13
10
|
model ||= Zuno.configuration.chat_completion_model
|
14
11
|
provider = provider_for_model(model)
|
15
|
-
|
12
|
+
raw_response = options.delete(:raw_response) || false
|
13
|
+
|
14
|
+
if options[:stream]
|
15
|
+
provider.chat_completion(messages, model, options, raw_response) do |chunk|
|
16
|
+
yield chunk if block_given?
|
17
|
+
end
|
18
|
+
else
|
19
|
+
provider.chat_completion(messages, model, options, raw_response)
|
20
|
+
end
|
16
21
|
end
|
17
22
|
|
18
23
|
private
|
@@ -20,8 +25,6 @@ module Zuno
|
|
20
25
|
def provider_for_model(model)
|
21
26
|
case model
|
22
27
|
when *OPENAI_MODELS then Providers::OpenAI.new
|
23
|
-
when *ANTHROPIC_MODELS then Providers::Anthropic.new
|
24
|
-
when *GROQ_CLOUD_MODELS then Providers::GroqCloud.new
|
25
28
|
else
|
26
29
|
raise ArgumentError, "Unsupported model: #{model}"
|
27
30
|
end
|
@@ -30,8 +33,6 @@ module Zuno
|
|
30
33
|
def model_providers_mapping
|
31
34
|
@model_providers_mapping ||= {
|
32
35
|
**OPENAI_MODELS.to_h { |model| [model, Providers::OpenAI.new] },
|
33
|
-
**ANTHROPIC_MODELS.to_h { |model| [model, Providers::Anthropic.new] },
|
34
|
-
**GROQ_CLOUD_MODELS.to_h { |model| [model, Providers::GroqCloud.new] }
|
35
36
|
}
|
36
37
|
end
|
37
38
|
end
|
data/lib/zuno/configuration.rb
CHANGED
data/lib/zuno/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: zuno
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- John Paul
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-09-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: httparty
|
@@ -38,6 +38,20 @@ dependencies:
|
|
38
38
|
- - ">="
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: event_stream_parser
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ">="
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
41
55
|
description: AI toolkit for Ruby
|
42
56
|
email:
|
43
57
|
- johnarpaul@gmail.com
|
@@ -45,14 +59,10 @@ executables: []
|
|
45
59
|
extensions: []
|
46
60
|
extra_rdoc_files: []
|
47
61
|
files:
|
48
|
-
- lib/providers/anthropic.rb
|
49
|
-
- lib/providers/groq_cloud.rb
|
50
62
|
- lib/providers/openai.rb
|
51
63
|
- lib/zuno.rb
|
52
64
|
- lib/zuno/chat.rb
|
53
65
|
- lib/zuno/configuration.rb
|
54
|
-
- lib/zuno/transcription.rb
|
55
|
-
- lib/zuno/translation.rb
|
56
66
|
- lib/zuno/version.rb
|
57
67
|
homepage: https://github.com/dqnamo
|
58
68
|
licenses:
|
data/lib/providers/anthropic.rb
DELETED
@@ -1,33 +0,0 @@
|
|
1
|
-
require 'ostruct'
|
2
|
-
|
3
|
-
module Providers
|
4
|
-
class Anthropic
|
5
|
-
def initialize
|
6
|
-
@connection = Faraday.new(url: "https://api.anthropic.com") do |faraday|
|
7
|
-
faraday.request :json
|
8
|
-
faraday.response :json
|
9
|
-
faraday.adapter Faraday.default_adapter
|
10
|
-
end
|
11
|
-
|
12
|
-
@api_key = Zuno.configuration.anthropic_api_key
|
13
|
-
end
|
14
|
-
|
15
|
-
def chat_completion(messages, model, options = {})
|
16
|
-
response = @connection.post("/v1/messages") do |req|
|
17
|
-
req.headers["x-api-key"] = @api_key
|
18
|
-
req.headers["Content-Type"] = "application/json"
|
19
|
-
req.headers["anthropic-version"] = "2023-06-01"
|
20
|
-
req.body = {
|
21
|
-
model: model,
|
22
|
-
messages: messages,
|
23
|
-
}.merge(options).to_json
|
24
|
-
end
|
25
|
-
|
26
|
-
if response.body["error"]
|
27
|
-
raise response.body["error"]["message"]
|
28
|
-
else
|
29
|
-
OpenStruct.new(response: response.body["content"][0]["text"])
|
30
|
-
end
|
31
|
-
end
|
32
|
-
end
|
33
|
-
end
|
data/lib/providers/groq_cloud.rb
DELETED
@@ -1,66 +0,0 @@
|
|
1
|
-
require 'httparty'
|
2
|
-
require 'open-uri'
|
3
|
-
require 'ostruct'
|
4
|
-
|
5
|
-
module Providers
|
6
|
-
class GroqCloud
|
7
|
-
include HTTParty
|
8
|
-
base_uri 'https://api.groq.com'
|
9
|
-
|
10
|
-
def initialize
|
11
|
-
@api_key = Zuno.configuration.groq_cloud_api_key
|
12
|
-
self.class.headers 'Authorization' => "Bearer #{@api_key}"
|
13
|
-
end
|
14
|
-
|
15
|
-
def transcribe(audio, language = "en")
|
16
|
-
response = self.class.post(
|
17
|
-
'/openai/v1/audio/transcriptions',
|
18
|
-
multipart: true,
|
19
|
-
body: {
|
20
|
-
file: audio,
|
21
|
-
model: 'whisper-large-v3',
|
22
|
-
temperature: 0,
|
23
|
-
response_format: 'json',
|
24
|
-
language: language
|
25
|
-
}
|
26
|
-
)
|
27
|
-
|
28
|
-
OpenStruct.new(text: JSON.parse(response.body)["text"])
|
29
|
-
end
|
30
|
-
|
31
|
-
def translate(audio)
|
32
|
-
response = self.class.post(
|
33
|
-
'/openai/v1/audio/translations',
|
34
|
-
multipart: true,
|
35
|
-
body: {
|
36
|
-
file: audio,
|
37
|
-
model: 'whisper-large-v3',
|
38
|
-
temperature: 0,
|
39
|
-
response_format: 'json',
|
40
|
-
}
|
41
|
-
)
|
42
|
-
|
43
|
-
OpenStruct.new(text: JSON.parse(response.body)["text"])
|
44
|
-
end
|
45
|
-
|
46
|
-
def chat_completion(messages, model, options = {})
|
47
|
-
response = self.class.post(
|
48
|
-
'/openai/v1/chat/completions',
|
49
|
-
body: {
|
50
|
-
model: model,
|
51
|
-
messages: messages
|
52
|
-
}.merge(options).to_json
|
53
|
-
)
|
54
|
-
|
55
|
-
parsed_response = JSON.parse(response.body)
|
56
|
-
|
57
|
-
if parsed_response["error"]
|
58
|
-
raise parsed_response["error"]["message"]
|
59
|
-
elsif parsed_response["choices"][0]["message"]["content"]
|
60
|
-
OpenStruct.new(response: parsed_response["choices"][0]["message"]["content"])
|
61
|
-
elsif parsed_response["choices"][0]["message"]["tool_calls"]
|
62
|
-
OpenStruct.new(tool_calls: parsed_response["choices"][0]["message"]["tool_calls"])
|
63
|
-
end
|
64
|
-
end
|
65
|
-
end
|
66
|
-
end
|
data/lib/zuno/transcription.rb
DELETED