omniai-google 2.2.2 → 2.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e870633ad811d978c7d3e70313d96d646a2ec03bd2ca989c24ebf84dfe9a1bbc
4
- data.tar.gz: 5bdd175f1a6ec4299a76f02d00c66e6e8615f05408c3c77edbad2aca096c67f9
3
+ metadata.gz: 904b9944d8f92b2dd343568a5185c0adf8151d84e57698a85542187f88e5b469
4
+ data.tar.gz: fbcac301f2f80c16b3ad2d1c3e1d812ea52c9ea7f5ba5ca0ba22c2f2e7b5a594
5
5
  SHA512:
6
- metadata.gz: bf8f59a91eff99a9b266f9d487b580999322e27107bc0591c12fe070ffe61b1a177452c3504f3f20a2cdcf38aec642a121a4212715c5f6ba594701b5c8f4ec94
7
- data.tar.gz: 8ea4b0be462ce5df1e502323d2230df7652e83bc57afe8706517fe1c9188680c318ffcfc90a9e6728bd0f238339ab4aaba642dd05c0cd8d31ee7d3b365cadb09
6
+ metadata.gz: ac86234994438b8197e88ce447479aab12aaddef77e2d50e04c58560af2d83c0f18af34512de2e19524ae383203194dd42fb4264510bd01aed26a60bc8e77cf8
7
+ data.tar.gz: 9a212401e92a2ab193fab62db2b01da7f7012c42ba4e1f95a5446b921a57946c816347248138f98c0a8f387cbe2fcef359fc0a8189363388aff0c2ffd76aa2d8
data/README.md CHANGED
@@ -35,9 +35,7 @@ A client may also be passed the following options:
35
35
 
36
36
  Vertex AI and Google AI offer different options for interacting w/ Google's AI APIs. Checkout the [Vertex AI and Google AI differences](https://cloud.google.com/vertex-ai/generative-ai/docs/overview#how-gemini-vertex-different-gemini-aistudio) to determine which option best fits your requirements.
37
37
 
38
- #### Configuration w/ Gemini
39
-
40
- **w/ `api_key`**
38
+ #### Configuration w/ Google AI
41
39
 
42
40
  If using Gemini simply provide an `api_key`:
43
41
 
@@ -47,7 +45,7 @@ OmniAI::Google.configure do |config|
47
45
  end
48
46
  ```
49
47
 
50
- #### Configuration w/ Vertex
48
+ #### Configuration w/ Vertex AI
51
49
 
52
50
  If using Vertex supply the `credentials`, `host`, `location_id` and `project_id`:
53
51
 
@@ -50,7 +50,7 @@ module OmniAI
50
50
 
51
51
  @project_id = project_id
52
52
  @location_id = location_id
53
- @credentials = credentials
53
+ @credentials = Credentials.parse(credentials)
54
54
  @version = version
55
55
  end
56
56
 
@@ -109,6 +109,11 @@ module OmniAI
109
109
  !@credentials.nil?
110
110
  end
111
111
 
112
+ # @return [Boolean]
113
+ def vertex?
114
+ @host.include?("aiplatform.googleapis.com")
115
+ end
116
+
112
117
  private
113
118
 
114
119
  # @return [String] e.g. "Bearer ..."
@@ -51,7 +51,7 @@ module OmniAI
51
51
  Credentials.detect
52
52
  end
53
53
 
54
- # @param value [String, File, Google::Auth::ServiceAccountCredentials, nil]
54
+ # @param value [String, File, Hash, Google::Auth::ServiceAccountCredentials, nil]
55
55
  def credentials=(value)
56
56
  @credentials = Credentials.parse(value)
57
57
  end
@@ -17,14 +17,15 @@ module OmniAI
17
17
  end
18
18
  end
19
19
 
20
- # @param value [Google::Auth::ServiceAccountCredentials, IO, Pathname, String nil]
20
+ # @param value [Google::Auth::ServiceAccountCredentials, IO, Pathname, String, Hash, nil]
21
21
  # @return [Google::Auth::ServiceAccountCredentials]
22
22
  def self.parse(value)
23
23
  case value
24
- when ::Google::Auth::ServiceAccountCredentials then value
25
24
  when IO, StringIO then ::Google::Auth::ServiceAccountCredentials.make_creds(json_key_io: value, scope: SCOPE)
25
+ when Hash then parse(JSON.generate(value))
26
26
  when Pathname then parse(File.open(value))
27
27
  when String then parse(StringIO.new(value))
28
+ else value
28
29
  end
29
30
  end
30
31
  end
@@ -12,6 +12,7 @@ module OmniAI
12
12
  class Embed < OmniAI::Embed
13
13
  module Model
14
14
  TEXT_EMBEDDING_004 = "text-embedding-004"
15
+ TEXT_EMBEDDING_005 = "text-embedding-005"
15
16
  TEXT_MULTILINGUAL_EMBEDDING_002 = "text-multilingual-embedding-002"
16
17
  EMBEDDING = TEXT_EMBEDDING_004
17
18
  MULTILINGUAL_EMBEDDING = TEXT_MULTILINGUAL_EMBEDDING_002
@@ -19,24 +20,48 @@ module OmniAI
19
20
 
20
21
  DEFAULT_MODEL = Model::EMBEDDING
21
22
 
22
- EMBEDDINGS_DESERIALIZER = proc do |data, *|
23
+ DEFAULT_EMBEDDINGS_DESERIALIZER = proc do |data, *|
23
24
  data["embeddings"].map { |embedding| embedding["values"] }
24
25
  end
25
26
 
27
+ VERTEX_EMBEDDINGS_DESERIALIZER = proc do |data, *|
28
+ data["predictions"].map { |prediction| prediction["embeddings"]["values"] }
29
+ end
30
+
31
+ # @return [Context]
32
+ DEFAULT_CONTEXT = Context.build do |context|
33
+ context.deserializers[:embeddings] = DEFAULT_EMBEDDINGS_DESERIALIZER
34
+ end
35
+
26
36
  # @return [Context]
27
- CONTEXT = Context.build do |context|
28
- context.deserializers[:embeddings] = EMBEDDINGS_DESERIALIZER
37
+ VERTEX_CONTEXT = Context.build do |context|
38
+ context.deserializers[:embeddings] = VERTEX_EMBEDDINGS_DESERIALIZER
29
39
  end
30
40
 
31
41
  protected
32
42
 
43
+ # @return [Boolean]
44
+ def vertex?
45
+ @client.vertex?
46
+ end
47
+
48
+ # @return [Context]
49
+ def context
50
+ vertex? ? VERTEX_CONTEXT : DEFAULT_CONTEXT
51
+ end
52
+
33
53
  # @param response [HTTP::Response]
34
54
  # @return [Response]
35
55
  def parse!(response:)
36
- Response.new(data: response.parse, context: CONTEXT)
56
+ Response.new(data: response.parse, context:)
57
+ end
58
+
59
+ # @return [Array[Hash]]
60
+ def instances
61
+ arrayify(@input).map { |content| { content: } }
37
62
  end
38
63
 
39
- # @return [Array<Hash<{ text: String }>]
64
+ # @return [Array[Hash]]
40
65
  def requests
41
66
  arrayify(@input).map do |text|
42
67
  {
@@ -48,14 +73,26 @@ module OmniAI
48
73
 
49
74
  # @return [Hash]
50
75
  def payload
51
- { requests: }
76
+ vertex? ? { instances: } : { requests: }
77
+ end
78
+
79
+ # @return [Hash]
80
+ def params
81
+ { key: (@client.api_key unless @client.credentials?) }.compact
52
82
  end
53
83
 
54
84
  # @return [String]
55
85
  def path
56
- "/#{@client.path}/models/#{@model}:batchEmbedContents?key=#{@client.api_key}"
86
+ "/#{@client.path}/models/#{@model}:#{procedure}"
87
+ end
88
+
89
+ # @return [String]
90
+ def procedure
91
+ vertex? ? "predict" : "batchEmbedContents"
57
92
  end
58
93
 
94
+ # @param input [Object]
95
+ # @return [Array]
59
96
  def arrayify(input)
60
97
  input.is_a?(Array) ? input : [input]
61
98
  end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module OmniAI
4
4
  module Google
5
- VERSION = "2.2.2"
5
+ VERSION = "2.3.1"
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai-google
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.2.2
4
+ version: 2.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
8
8
  bindir: exe
9
9
  cert_chain: []
10
- date: 2025-04-03 00:00:00.000000000 Z
10
+ date: 2025-04-04 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: event_stream_parser
@@ -43,14 +43,14 @@ dependencies:
43
43
  requirements:
44
44
  - - "~>"
45
45
  - !ruby/object:Gem::Version
46
- version: '2.2'
46
+ version: '2.3'
47
47
  type: :runtime
48
48
  prerelease: false
49
49
  version_requirements: !ruby/object:Gem::Requirement
50
50
  requirements:
51
51
  - - "~>"
52
52
  - !ruby/object:Gem::Version
53
- version: '2.2'
53
+ version: '2.3'
54
54
  - !ruby/object:Gem::Dependency
55
55
  name: zeitwerk
56
56
  requirement: !ruby/object:Gem::Requirement