omniai-google 1.4.0 → 1.6.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1dc0ec04a882a2e35a5d7ff7b613bde89641e57a67ed4a68cdb84ab72d04a982
4
- data.tar.gz: a72573e3a57e13b6bf56e33c4133ea4a9ae0c4c3d79688979a8f166345508a6f
3
+ metadata.gz: 3a64561af96984da60b42099f415cdb90d738757f6108af74b035e1a2e5d34ce
4
+ data.tar.gz: 837b82e0ec5e45b3300858591f1a60ffd2783d16d0c5b1ad8280dd9585d944de
5
5
  SHA512:
6
- metadata.gz: 3cd82fccac3f31b37f9505569ff411d91c9c2a9429fde7308c7b5b34abe922bd4734613f355ed616ff27ff49502c17714d380d0676a2f6f63493fa4baf65c0bc
7
- data.tar.gz: c1ac419641eebf177c72a425ac695e5fad10d09cd943998c5952d29ace751234a135eeb009e3000db47869de07db675bbadcddec6cd225ce851641635ffc2550
6
+ metadata.gz: abb56f1dc0d7ddb3efcd79ffcd132c45c85e3ca5fc43f72bfc8fd94b535f0bf76bb32946e1dca7e55cb180bc8d7492ecc116f16cb7fd8b454825f38395c92f22
7
+ data.tar.gz: 9baa74ce37c0a0c7a20b9f36e15d205a89df9556042d17e999f622c86e505f1f5ace1b143091f346120045102e62a1b4bf34d522d4c90b179aec8773f34b8c78
data/Gemfile CHANGED
@@ -5,7 +5,6 @@ source 'https://rubygems.org'
5
5
  gemspec
6
6
 
7
7
  gem 'rake'
8
-
9
8
  gem 'rspec'
10
9
  gem 'rspec_junit_formatter'
11
10
  gem 'rubocop'
@@ -13,3 +12,4 @@ gem 'rubocop-rake'
13
12
  gem 'rubocop-rspec'
14
13
  gem 'simplecov'
15
14
  gem 'webmock'
15
+ gem 'yard'
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module Google
5
+ class Chat
6
+ module Response
7
+ # A chunk given when streaming.
8
+ class Chunk < OmniAI::Chat::Response::Chunk
9
+ # @return [Array<OmniAI::Chat::Choice>]
10
+ def choices
11
+ @choices ||= [].tap do |choices|
12
+ @data['candidates'].each do |candidate|
13
+ candidate['content']['parts'].each do |part|
14
+ choices << OmniAI::Chat::Response::DeltaChoice.new(data: {
15
+ 'index' => candidate['index'],
16
+ 'delta' => {
17
+ 'role' => candidate['content']['role'],
18
+ 'content' => part['text'],
19
+ },
20
+ })
21
+ end
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module Google
5
+ class Chat
6
+ module Response
7
+ # A completion returned by the API.
8
+ class Completion < OmniAI::Chat::Response::Completion
9
+ # @return [Array<OmniAI::Chat::Choice>]
10
+ def choices
11
+ @choices ||= [].tap do |entries|
12
+ @data['candidates'].each do |candidate|
13
+ candidate['content']['parts'].each do |part|
14
+ entries << OmniAI::Chat::Response::MessageChoice.new(data: {
15
+ 'index' => candidate['index'],
16
+ 'message' => {
17
+ 'role' => candidate['content']['role'],
18
+ 'content' => part['text'],
19
+ },
20
+ })
21
+ end
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module Google
5
+ class Chat
6
+ module Response
7
+ # A stream given when streaming.
8
+ class Stream < OmniAI::Chat::Response::Stream
9
+ # @yield [OmniAI::Chat::Chunk]
10
+ def stream!(&)
11
+ @response.body.each do |chunk|
12
+ @parser.feed(chunk) do |_, data|
13
+ yield(Chunk.new(data: JSON.parse(data)))
14
+ end
15
+ end
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
@@ -23,6 +23,46 @@ module OmniAI
23
23
  GEMINI_FLASH = GEMINI_1_5_FLASH
24
24
  end
25
25
 
26
+ TEXT_SERIALIZER = lambda do |content, *|
27
+ { text: content.text }
28
+ end
29
+
30
+ # @param [Message]
31
+ # @return [Hash]
32
+ # @example
33
+ # message = Message.new(...)
34
+ # MESSAGE_SERIALIZER.call(message)
35
+ MESSAGE_SERIALIZER = lambda do |message, context:|
36
+ parts = message.content.is_a?(String) ? [Text.new(message.content)] : message.content
37
+
38
+ {
39
+ role: message.role,
40
+ parts: parts.map { |part| part.serialize(context:) },
41
+ }
42
+ end
43
+
44
+ # @param [Media]
45
+ # @return [Hash]
46
+ # @example
47
+ # media = Media.new(...)
48
+ # MEDIA_SERIALIZER.call(media)
49
+ MEDIA_SERIALIZER = lambda do |media, *|
50
+ {
51
+ inlineData: {
52
+ mimeType: media.type,
53
+ data: media.data,
54
+ },
55
+ }
56
+ end
57
+
58
+ # @return [Context]
59
+ CONTEXT = Context.build do |context|
60
+ context.serializers[:message] = MESSAGE_SERIALIZER
61
+ context.serializers[:text] = TEXT_SERIALIZER
62
+ context.serializers[:file] = MEDIA_SERIALIZER
63
+ context.serializers[:url] = MEDIA_SERIALIZER
64
+ end
65
+
26
66
  protected
27
67
 
28
68
  # @return [HTTP::Response]
@@ -40,10 +80,20 @@ module OmniAI
40
80
  def payload
41
81
  OmniAI::Google.config.chat_options.merge({
42
82
  contents:,
83
+ tools:,
43
84
  generationConfig: generation_config,
44
85
  }).compact
45
86
  end
46
87
 
88
+ # @return [Hash]
89
+ def tools
90
+ return unless @tools
91
+
92
+ [
93
+ function_declarations: @tools&.map(&:prepare),
94
+ ]
95
+ end
96
+
47
97
  # @return [Hash]
48
98
  def generation_config
49
99
  return unless @temperature
@@ -57,9 +107,7 @@ module OmniAI
57
107
  #
58
108
  # @return [Array<Hash>]
59
109
  def contents
60
- messages.map do |message|
61
- { role: message[:role], parts: [{ text: message[:content] }] }
62
- end
110
+ @prompt.serialize(context: CONTEXT)
63
111
  end
64
112
 
65
113
  # @return [String]
@@ -50,10 +50,11 @@ module OmniAI
50
50
  # @param format [Symbol] optional :text or :json
51
51
  # @param temperature [Float, nil] optional
52
52
  # @param stream [Proc, nil] optional
53
+ # @param tools [Array<OmniAI::Chat::Tool>, nil] optional
53
54
  #
54
55
  # @return [OmniAI::Chat::Completion]
55
- def chat(messages, model: Chat::Model::GEMINI_PRO, temperature: nil, format: nil, stream: nil)
56
- Chat.process!(messages, model:, temperature:, format:, stream:, client: self)
56
+ def chat(messages, model: Chat::Model::GEMINI_PRO, temperature: nil, format: nil, stream: nil, tools: nil)
57
+ Chat.process!(messages, model:, temperature:, format:, stream:, tools:, client: self)
57
58
  end
58
59
  end
59
60
  end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module OmniAI
4
4
  module Google
5
- VERSION = '1.4.0'
5
+ VERSION = '1.6.0'
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai-google
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.4.0
4
+ version: 1.6.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-07-04 00:00:00.000000000 Z
11
+ date: 2024-07-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: event_stream_parser
@@ -63,9 +63,9 @@ files:
63
63
  - README.md
64
64
  - lib/omniai/google.rb
65
65
  - lib/omniai/google/chat.rb
66
- - lib/omniai/google/chat/chunk.rb
67
- - lib/omniai/google/chat/completion.rb
68
- - lib/omniai/google/chat/stream.rb
66
+ - lib/omniai/google/chat/response/chunk.rb
67
+ - lib/omniai/google/chat/response/completion.rb
68
+ - lib/omniai/google/chat/response/stream.rb
69
69
  - lib/omniai/google/client.rb
70
70
  - lib/omniai/google/config.rb
71
71
  - lib/omniai/google/version.rb
@@ -91,7 +91,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
91
91
  - !ruby/object:Gem::Version
92
92
  version: '0'
93
93
  requirements: []
94
- rubygems_version: 3.5.3
94
+ rubygems_version: 3.5.14
95
95
  signing_key:
96
96
  specification_version: 4
97
97
  summary: A generalized framework for interacting with Google
@@ -1,24 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module Google
5
- class Chat
6
- # A chunk given when streaming.
7
- class Chunk < OmniAI::Chat::Chunk
8
- # @return [Array<OmniAI::Chat::Choice>]
9
- def choices
10
- @choices ||= [].tap do |choices|
11
- @data['candidates'].each do |candidate|
12
- candidate['content']['parts'].each do |part|
13
- choices << OmniAI::Chat::DeltaChoice.for(data: {
14
- 'index' => candidate['index'],
15
- 'delta' => { 'role' => candidate['content']['role'], 'content' => part['text'] },
16
- })
17
- end
18
- end
19
- end
20
- end
21
- end
22
- end
23
- end
24
- end
@@ -1,24 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module Google
5
- class Chat
6
- # A completion returned by the API.
7
- class Completion < OmniAI::Chat::Completion
8
- # @return [Array<OmniAI::Chat::Choice>]
9
- def choices
10
- @choices ||= [].tap do |entries|
11
- @data['candidates'].each do |candidate|
12
- candidate['content']['parts'].each do |part|
13
- entries << OmniAI::Chat::MessageChoice.for(data: {
14
- 'index' => candidate['index'],
15
- 'message' => { 'role' => candidate['content']['role'], 'content' => part['text'] },
16
- })
17
- end
18
- end
19
- end
20
- end
21
- end
22
- end
23
- end
24
- end
@@ -1,19 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module Google
5
- class Chat
6
- # A stream given when streaming.
7
- class Stream < OmniAI::Chat::Stream
8
- # @yield [OmniAI::Chat::Chunk]
9
- def stream!(&)
10
- @response.body.each do |chunk|
11
- @parser.feed(chunk) do |_, data|
12
- yield(Chunk.new(data: JSON.parse(data)))
13
- end
14
- end
15
- end
16
- end
17
- end
18
- end
19
- end