omniai-google 1.6.1 → 1.6.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/omniai/google/chat.rb +4 -1
- data/lib/omniai/google/client.rb +2 -2
- data/lib/omniai/google/version.rb +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b2626eef567a04e92b73eaf64a39dc274c2651b479810cd62a94be1a920cf25f
|
4
|
+
data.tar.gz: 8c968e5ca2328ce77e604f91c36607c22ef6572bf2e7633505316fdd6bab93db
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c104b92ca09ae4c29285b2ac35e7badd23d2c54e4cb7189b0f1fb2fa1d0ad0cac4770e6c8ceeaaf5468225db52913458e6d4b6b94a0054f5c58735b01a78911d
|
7
|
+
data.tar.gz: a1037ddc9ccc58f84dfba6a094d0c7e1481dbba103e4e4d9f16dfbfb21def59fee61e4c463e96e3807c0763db4920f84934133b66090c9a8d575ac20c4225913
|
data/lib/omniai/google/chat.rb
CHANGED
@@ -23,6 +23,8 @@ module OmniAI
|
|
23
23
|
GEMINI_FLASH = GEMINI_1_5_FLASH
|
24
24
|
end
|
25
25
|
|
26
|
+
DEFAULT_MODEL = Model::GEMINI_PRO
|
27
|
+
|
26
28
|
TEXT_SERIALIZER = lambda do |content, *|
|
27
29
|
{ text: content.text }
|
28
30
|
end
|
@@ -34,9 +36,10 @@ module OmniAI
|
|
34
36
|
# MESSAGE_SERIALIZER.call(message)
|
35
37
|
MESSAGE_SERIALIZER = lambda do |message, context:|
|
36
38
|
parts = message.content.is_a?(String) ? [Text.new(message.content)] : message.content
|
39
|
+
role = message.system? ? Role::USER : message.role
|
37
40
|
|
38
41
|
{
|
39
|
-
role
|
42
|
+
role:,
|
40
43
|
parts: parts.map { |part| part.serialize(context:) },
|
41
44
|
}
|
42
45
|
end
|
data/lib/omniai/google/client.rb
CHANGED
@@ -45,7 +45,7 @@ module OmniAI
|
|
45
45
|
|
46
46
|
# @raise [OmniAI::Error]
|
47
47
|
#
|
48
|
-
# @param messages [String
|
48
|
+
# @param messages [String] optional
|
49
49
|
# @param model [String] optional
|
50
50
|
# @param format [Symbol] optional :text or :json
|
51
51
|
# @param temperature [Float, nil] optional
|
@@ -56,7 +56,7 @@ module OmniAI
|
|
56
56
|
# @yieldparam prompt [OmniAI::Chat::Prompt]
|
57
57
|
#
|
58
58
|
# @return [OmniAI::Chat::Completion]
|
59
|
-
def chat(messages, model: Chat::
|
59
|
+
def chat(messages = nil, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil, tools: nil, &)
|
60
60
|
Chat.process!(messages, model:, temperature:, format:, stream:, tools:, client: self, &)
|
61
61
|
end
|
62
62
|
end
|