gen-ai 0.4.0.alpha.3 → 0.4.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +23 -8
- data/lib/gen_ai/api/client.rb +13 -7
- data/lib/gen_ai/api/format/anthropic.rb +21 -0
- data/lib/gen_ai/api/format/gemini.rb +10 -3
- data/lib/gen_ai/chat/anthropic.rb +31 -0
- data/lib/gen_ai/chat/base.rb +6 -3
- data/lib/gen_ai/chat/gemini.rb +0 -3
- data/lib/gen_ai/language/anthropic.rb +52 -0
- data/lib/gen_ai/language/gemini.rb +2 -1
- data/lib/gen_ai/language/open_ai.rb +1 -1
- data/lib/gen_ai/version.rb +1 -1
- metadata +7 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 43e7d6407d84beec138ffb2a176910fd1cbe25b0f67cc1cf04f7cd462a615617
|
4
|
+
data.tar.gz: 3ed1891455618eb80e9410a455d954ab547092d3f3aaa3c2302857917456f9b8
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d690ce67be4296396f0f07e0c0c8873c19c3477191f23a2043b95ef4d771f57da159e96c26c599da09c5b5ca5d65769b2156c7a625771b5ac7dab310c9ced46b
|
7
|
+
data.tar.gz: 355662f601a9bf54f45afa1e026004cee58561d5581bb621f74da11a479081566e2260f1db4a969a126f68e6b825a7ad6ee339233ec5b2bd81438110cb290b4e
|
data/README.md
CHANGED
@@ -32,10 +32,12 @@ require 'gen_ai'
|
|
32
32
|
|
33
33
|
Language models capabilities
|
34
34
|
|
35
|
-
| Provider
|
36
|
-
|
|
37
|
-
| **OpenAI**
|
38
|
-
| **Google Palm2**
|
35
|
+
| Provider | Embedding | Completion | Conversation | Sentiment | Summarization |
|
36
|
+
| ----------------- | :-------: | :--------: | :----------: | :-------: | :-----------: |
|
37
|
+
| **OpenAI** | ✅ | ✅ | ✅ | 🛠️ | 🛠️ |
|
38
|
+
| **Google Palm2** | ✅ | ✅ | ✅ | 🛠️ | 🛠️ |
|
39
|
+
| **Google Gemini** | ❌ | 🛠️ | ✅ | 🛠️ | 🛠️ |
|
40
|
+
| **Anthropic** | ❌ | ✅ | ✅ | 🛠️ | 🛠️ |
|
39
41
|
|
40
42
|
Image generation model capabilities
|
41
43
|
|
@@ -86,21 +88,34 @@ result.values
|
|
86
88
|
|
87
89
|
```
|
88
90
|
|
89
|
-
|
91
|
+
### Chat
|
92
|
+
Have a **conversation** with Large Language Model and Build your own AI chatbot.
|
93
|
+
|
94
|
+
Setting a context for the conversation is optional, but it helps the model to understand the topic of the conversation.
|
90
95
|
|
91
96
|
```ruby
|
92
|
-
|
97
|
+
chat = GenAI::Chat.new(:open_ai, ENV['OPEN_AI_TOKEN'])
|
98
|
+
chat.start(context: "You are a chat bot named Erl")
|
99
|
+
chat.message("Hi, what's your name")
|
93
100
|
# = >#<GenAI::Result:0x0000000106ff3d20...>
|
94
101
|
|
95
102
|
result.value
|
96
|
-
# => "
|
103
|
+
# => "I am a chatbot and you can call me Erl. How can I help you?""
|
104
|
+
|
105
|
+
```
|
97
106
|
|
107
|
+
|
108
|
+
Provider a history of the conversation to the model to help it understand the context of the conversation.
|
109
|
+
|
110
|
+
```ruby
|
98
111
|
history = [
|
99
112
|
{role: 'user', content: 'What is the capital of Great Britain?'},
|
100
113
|
{role: 'assistant', content: 'London'},
|
101
114
|
]
|
102
115
|
|
103
|
-
|
116
|
+
chat = GenAI::Chat.new(:open_ai, ENV['OPEN_AI_TOKEN'])
|
117
|
+
result = model.start(history: history)
|
118
|
+
result = model.message("what about France?")
|
104
119
|
# => #<GenAI::Result:0x00000001033c3bc0...>
|
105
120
|
|
106
121
|
result.value
|
data/lib/gen_ai/api/client.rb
CHANGED
@@ -6,9 +6,10 @@ require 'faraday/multipart'
|
|
6
6
|
module GenAI
|
7
7
|
module Api
|
8
8
|
class Client
|
9
|
-
def initialize(url:, token:)
|
9
|
+
def initialize(url:, token:, headers: {})
|
10
10
|
@url = url
|
11
11
|
@token = token
|
12
|
+
@headers = headers
|
12
13
|
end
|
13
14
|
|
14
15
|
def post(path, body, options = {})
|
@@ -28,19 +29,24 @@ module GenAI
|
|
28
29
|
|
29
30
|
private
|
30
31
|
|
31
|
-
attr_reader :url, :token
|
32
|
+
attr_reader :url, :token, :headers
|
32
33
|
|
33
34
|
def connection(multipart: false)
|
34
|
-
Faraday.new(url: url, headers:
|
35
|
-
'Accept' => 'application/json',
|
36
|
-
'Content-Type' => multipart ? 'multipart/form-data' : 'application/json',
|
37
|
-
'Authorization' => "Bearer #{token}"
|
38
|
-
}) do |conn|
|
35
|
+
Faraday.new(url: url, headers: build_headers(token, headers, multipart)) do |conn|
|
39
36
|
conn.request :multipart if multipart
|
40
37
|
conn.request :url_encoded
|
41
38
|
end
|
42
39
|
end
|
43
40
|
|
41
|
+
def build_headers(token, headers, multipart)
|
42
|
+
hash = {
|
43
|
+
'Accept' => 'application/json',
|
44
|
+
'Content-Type' => multipart ? 'multipart/form-data' : 'application/json'
|
45
|
+
}
|
46
|
+
hash['Authorization'] = "Bearer #{token}" if token
|
47
|
+
hash.merge(headers)
|
48
|
+
end
|
49
|
+
|
44
50
|
def handle_response
|
45
51
|
response = yield
|
46
52
|
|
@@ -0,0 +1,21 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module GenAI
|
4
|
+
module Api
|
5
|
+
module Format
|
6
|
+
module Anthropic
|
7
|
+
def format_messages(messages)
|
8
|
+
messages.map(&:deep_symbolize_keys)
|
9
|
+
end
|
10
|
+
|
11
|
+
def extract_completions(response)
|
12
|
+
if response['type'] == 'completion'
|
13
|
+
[response['completion'].strip]
|
14
|
+
else
|
15
|
+
response['content'].map { |item| item['text'] }
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -4,12 +4,19 @@ module GenAI
|
|
4
4
|
module Api
|
5
5
|
module Format
|
6
6
|
module Gemini
|
7
|
+
USER_ROLE = 'user'
|
8
|
+
ASSISTANT_ROLE = 'model'
|
9
|
+
|
7
10
|
def format_messages(messages)
|
8
|
-
messages.map { |message| transform_message(message) }
|
11
|
+
messages.map { |message| transform_message(message.deep_symbolize_keys) }
|
9
12
|
end
|
10
13
|
|
11
14
|
def transform_message(message)
|
12
|
-
|
15
|
+
if message.keys == %i[role content]
|
16
|
+
{ role: role_for(message), parts: [text: message[:content]] }
|
17
|
+
else
|
18
|
+
message
|
19
|
+
end
|
13
20
|
end
|
14
21
|
|
15
22
|
def extract_completions(response)
|
@@ -19,7 +26,7 @@ module GenAI
|
|
19
26
|
private
|
20
27
|
|
21
28
|
def role_for(message)
|
22
|
-
message[:role] == 'user' ?
|
29
|
+
message[:role] == 'user' ? USER_ROLE : ASSISTANT_ROLE
|
23
30
|
end
|
24
31
|
end
|
25
32
|
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module GenAI
|
4
|
+
class Chat
|
5
|
+
class Anthropic < Base
|
6
|
+
SYSTEM_ROLE = 'system'
|
7
|
+
|
8
|
+
private
|
9
|
+
|
10
|
+
def build_history(messages, context, examples)
|
11
|
+
@default_options[:system] = context
|
12
|
+
history = []
|
13
|
+
history.concat(examples)
|
14
|
+
history.concat(messages)
|
15
|
+
history
|
16
|
+
end
|
17
|
+
|
18
|
+
def role(message)
|
19
|
+
message[:role]
|
20
|
+
end
|
21
|
+
|
22
|
+
def transform_message(message)
|
23
|
+
message
|
24
|
+
end
|
25
|
+
|
26
|
+
def append_to_message(message)
|
27
|
+
@history.last[:content] = "#{@history.last[:content]}\n#{message}"
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
data/lib/gen_ai/chat/base.rb
CHANGED
@@ -6,23 +6,26 @@ module GenAI
|
|
6
6
|
USER_ROLE = 'user'
|
7
7
|
ASSISTANT_ROLE = 'assistant'
|
8
8
|
|
9
|
+
attr_reader :history, :default_options
|
10
|
+
|
9
11
|
def initialize(provider:, token:, options: {})
|
10
12
|
@history = []
|
13
|
+
@default_options = {}
|
11
14
|
@model = GenAI::Language.new(provider, token, options: options)
|
12
15
|
end
|
13
16
|
|
14
17
|
def start(history: [], context: nil, examples: [])
|
15
|
-
@history = build_history(history.map(&:deep_symbolize_keys
|
18
|
+
@history = build_history(history.map(&:deep_symbolize_keys), context, examples.map(&:deep_symbolize_keys))
|
16
19
|
end
|
17
20
|
|
18
21
|
def message(message, options = {})
|
19
|
-
if @history.size == 1
|
22
|
+
if @history.size == 1 && @history.first[:role] != 'system'
|
20
23
|
append_to_message(message)
|
21
24
|
else
|
22
25
|
append_to_history({ role: USER_ROLE, content: message })
|
23
26
|
end
|
24
27
|
|
25
|
-
response = @model.chat(@history.dup, options)
|
28
|
+
response = @model.chat(@history.dup, default_options.merge(options).compact)
|
26
29
|
append_to_history({ role: ASSISTANT_ROLE, content: response.value })
|
27
30
|
response
|
28
31
|
end
|
data/lib/gen_ai/chat/gemini.rb
CHANGED
@@ -0,0 +1,52 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'faraday'
|
4
|
+
|
5
|
+
module GenAI
|
6
|
+
class Language
|
7
|
+
class Anthropic < Base
|
8
|
+
include GenAI::Api::Format::Anthropic
|
9
|
+
|
10
|
+
BASE_API_URL = 'https://api.anthropic.com'
|
11
|
+
ANTHROPIC_VERSION = '2023-06-01'
|
12
|
+
ANTHROPIC_BETA = 'messages-2023-12-15'
|
13
|
+
COMPLETION_MODEL = 'claude-2.1'
|
14
|
+
DEFAULT_MAX_TOKENS = 1024
|
15
|
+
|
16
|
+
def initialize(token:, options: {})
|
17
|
+
@token = token
|
18
|
+
build_client(token)
|
19
|
+
end
|
20
|
+
|
21
|
+
def complete(prompt, options = {})
|
22
|
+
response = client.post '/v1/complete', {
|
23
|
+
prompt: "\n\nHuman: #{prompt}\n\nAssistant:",
|
24
|
+
model: options.delete(:model) || COMPLETION_MODEL,
|
25
|
+
max_tokens_to_sample: options.delete(:max_tokens_to_sample) || DEFAULT_MAX_TOKENS
|
26
|
+
}.merge(options)
|
27
|
+
|
28
|
+
build_result(model: COMPLETION_MODEL, raw: response, parsed: extract_completions(response))
|
29
|
+
end
|
30
|
+
|
31
|
+
def chat(messages, options = {})
|
32
|
+
response = client.post '/v1/messages', {
|
33
|
+
messages: format_messages(messages),
|
34
|
+
model: options.delete(:model) || COMPLETION_MODEL,
|
35
|
+
max_tokens: options.delete(:max_tokens) || DEFAULT_MAX_TOKENS
|
36
|
+
}.merge(options)
|
37
|
+
|
38
|
+
build_result(model: COMPLETION_MODEL, raw: response, parsed: extract_completions(response))
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
def build_client(token)
|
44
|
+
@client = GenAI::Api::Client.new(url: BASE_API_URL, token: nil, headers: {
|
45
|
+
'anthropic-beta' => ANTHROPIC_BETA,
|
46
|
+
'anthropic-version' => ANTHROPIC_VERSION,
|
47
|
+
'x-api-key' => token
|
48
|
+
})
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
@@ -2,6 +2,7 @@
|
|
2
2
|
|
3
3
|
require 'faraday'
|
4
4
|
|
5
|
+
require 'pry'
|
5
6
|
module GenAI
|
6
7
|
class Language
|
7
8
|
class Gemini < Base
|
@@ -18,7 +19,7 @@ module GenAI
|
|
18
19
|
|
19
20
|
def chat(messages, options = {})
|
20
21
|
response = client.post "/v1beta/models/gemini-pro:generateContent?key=#{@token}", {
|
21
|
-
contents: messages
|
22
|
+
contents: format_messages(messages),
|
22
23
|
generationConfig: options.except(:model)
|
23
24
|
}
|
24
25
|
|
data/lib/gen_ai/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: gen-ai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.4.0
|
4
|
+
version: 0.4.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Alex Chaplinsky
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-01-
|
11
|
+
date: 2024-01-07 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activesupport
|
@@ -113,10 +113,12 @@ files:
|
|
113
113
|
- lib/gen/ai.rb
|
114
114
|
- lib/gen_ai.rb
|
115
115
|
- lib/gen_ai/api/client.rb
|
116
|
+
- lib/gen_ai/api/format/anthropic.rb
|
116
117
|
- lib/gen_ai/api/format/gemini.rb
|
117
118
|
- lib/gen_ai/api/format/open_ai.rb
|
118
119
|
- lib/gen_ai/base.rb
|
119
120
|
- lib/gen_ai/chat.rb
|
121
|
+
- lib/gen_ai/chat/anthropic.rb
|
120
122
|
- lib/gen_ai/chat/base.rb
|
121
123
|
- lib/gen_ai/chat/gemini.rb
|
122
124
|
- lib/gen_ai/chat/google_palm.rb
|
@@ -127,6 +129,7 @@ files:
|
|
127
129
|
- lib/gen_ai/image/open_ai.rb
|
128
130
|
- lib/gen_ai/image/stability_ai.rb
|
129
131
|
- lib/gen_ai/language.rb
|
132
|
+
- lib/gen_ai/language/anthropic.rb
|
130
133
|
- lib/gen_ai/language/base.rb
|
131
134
|
- lib/gen_ai/language/gemini.rb
|
132
135
|
- lib/gen_ai/language/google_palm.rb
|
@@ -153,9 +156,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
153
156
|
version: 2.7.0
|
154
157
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
155
158
|
requirements:
|
156
|
-
- - "
|
159
|
+
- - ">="
|
157
160
|
- !ruby/object:Gem::Version
|
158
|
-
version:
|
161
|
+
version: '0'
|
159
162
|
requirements: []
|
160
163
|
rubygems_version: 3.3.7
|
161
164
|
signing_key:
|