gemini_cache 0.0.1 → 0.0.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/gemini_cache.rb +12 -4
- metadata +1 -5
- data/lib/geminiext/cache.rb +0 -127
- data/lib/geminiext/messages.rb.disabled +0 -34
- data/lib/geminiext/model.rb.disabled +0 -27
- data/lib/geminiext/response_extender.rb.disabled +0 -44
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: '04685d49a72d97ba748888a2c1a90ffae49dd52d5bdea582e5cc93c5278ee05d'
|
4
|
+
data.tar.gz: 1fbf73ef5d37c9eeffee36e2667faf3846a326a217c4ae3dcc9bd6f781bec0ed
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9397ae965759c13a8b4f23cd26e17fbd66807cf6ef09ec299b5ff6695fdbc32f74c9858574f727da489d7039d761be2d900220c39d8d3d9a6cfb22360eb6bcdb
|
7
|
+
data.tar.gz: efa123d59d17f71e2193b82de461b0648d5a887377175331993b9bc48ef17872b0987f9a9622311c40062a6639f3c72e792d69b5a2345568f4985601eb0de624
|
data/lib/gemini_cache.rb
CHANGED
@@ -31,8 +31,8 @@ module GeminiCache
|
|
31
31
|
raise 'Nome do cache ou display name é obrigatório' if name.nil? && display_name.nil?
|
32
32
|
raise 'Nome do cache e display name não podem ser informados juntos' if !name.nil? && !display_name.nil?
|
33
33
|
|
34
|
-
return GeminiCache
|
35
|
-
return GeminiCache
|
34
|
+
return GeminiCache.list.find { |item| item['name'].eql? name } if !name.nil?
|
35
|
+
return GeminiCache.list.find { |item| item['displayName'].eql? display_name } if !display_name.nil?
|
36
36
|
end
|
37
37
|
|
38
38
|
def self.list
|
@@ -48,8 +48,8 @@ module GeminiCache
|
|
48
48
|
return [] if JSON.parse(response.body).empty?
|
49
49
|
|
50
50
|
JSON.parse(response.body)['cachedContents'].map do |item|
|
51
|
-
def item.delete = GeminiCache
|
52
|
-
def item.set_ttl(ttl = 120) = GeminiCache
|
51
|
+
def item.delete = GeminiCache.delete(name: self['name'])
|
52
|
+
def item.set_ttl(ttl = 120) = GeminiCache.update(name: self['name'], content: { ttl: "#{ttl}s" })
|
53
53
|
|
54
54
|
def item.generate_content(contents:)
|
55
55
|
conn = Faraday.new(
|
@@ -122,4 +122,12 @@ module GeminiCache
|
|
122
122
|
rescue Faraday::Error => e
|
123
123
|
raise "Erro na requisição: #{e.message}"
|
124
124
|
end
|
125
|
+
|
126
|
+
def self.delete_all
|
127
|
+
GeminiCache.list.each { |item| item.delete }
|
128
|
+
end
|
129
|
+
|
130
|
+
class << self
|
131
|
+
alias clear delete_all
|
132
|
+
end
|
125
133
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: gemini_cache
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Gedean Dias
|
@@ -31,10 +31,6 @@ extensions: []
|
|
31
31
|
extra_rdoc_files: []
|
32
32
|
files:
|
33
33
|
- lib/gemini_cache.rb
|
34
|
-
- lib/geminiext/cache.rb
|
35
|
-
- lib/geminiext/messages.rb.disabled
|
36
|
-
- lib/geminiext/model.rb.disabled
|
37
|
-
- lib/geminiext/response_extender.rb.disabled
|
38
34
|
homepage: https://github.com/gedean/gemini_cache
|
39
35
|
licenses:
|
40
36
|
- MIT
|
data/lib/geminiext/cache.rb
DELETED
@@ -1,127 +0,0 @@
|
|
1
|
-
require 'faraday'
|
2
|
-
require 'json'
|
3
|
-
|
4
|
-
module GeminiExt
|
5
|
-
module Cache
|
6
|
-
def self.create(contents:, display_name:, model: 'gemini-1.5-flash-8b', ttl: 600)
|
7
|
-
content = {
|
8
|
-
model: "models/#{model}",
|
9
|
-
display_name:,
|
10
|
-
contents:,
|
11
|
-
ttl: "#{ttl}s"
|
12
|
-
}.to_json
|
13
|
-
|
14
|
-
conn = Faraday.new(
|
15
|
-
url: 'https://generativelanguage.googleapis.com',
|
16
|
-
headers: { 'Content-Type' => 'application/json' }
|
17
|
-
)
|
18
|
-
|
19
|
-
response = conn.post('/v1beta/cachedContents') do |req|
|
20
|
-
req.params['key'] = ENV.fetch('GEMINI_API_KEY')
|
21
|
-
req.body = content
|
22
|
-
end
|
23
|
-
|
24
|
-
return JSON.parse(response.body) if response.status == 200
|
25
|
-
|
26
|
-
raise "Erro ao criar cache: #{response.status} - #{response.body}"
|
27
|
-
rescue Faraday::Error => e
|
28
|
-
raise "Erro na requisição: #{e.message}"
|
29
|
-
end
|
30
|
-
|
31
|
-
def self.get(name: nil, display_name: nil)
|
32
|
-
raise 'Nome do cache ou display name é obrigatório' if name.nil? && display_name.nil?
|
33
|
-
raise 'Nome do cache e display name não podem ser informados juntos' if !name.nil? && !display_name.nil?
|
34
|
-
|
35
|
-
return GeminiExt::Cache.list.find { |item| item['name'].eql? name } if !name.nil?
|
36
|
-
return GeminiExt::Cache.list.find { |item| item['displayName'].eql? display_name } if !display_name.nil?
|
37
|
-
end
|
38
|
-
|
39
|
-
def self.list
|
40
|
-
conn = Faraday.new(
|
41
|
-
url: 'https://generativelanguage.googleapis.com',
|
42
|
-
headers: { 'Content-Type' => 'application/json' }
|
43
|
-
)
|
44
|
-
|
45
|
-
response = conn.get("/v1beta/cachedContents") do |req|
|
46
|
-
req.params['key'] = ENV.fetch('GEMINI_API_KEY')
|
47
|
-
end
|
48
|
-
|
49
|
-
return [] if JSON.parse(response.body).empty?
|
50
|
-
|
51
|
-
JSON.parse(response.body)['cachedContents'].map do |item|
|
52
|
-
def item.delete = GeminiExt::Cache.delete(name: self['name'])
|
53
|
-
def item.set_ttl(ttl = 120) = GeminiExt::Cache.update(name: self['name'], content: { ttl: "#{ttl}s" })
|
54
|
-
|
55
|
-
def item.generate_content(contents:)
|
56
|
-
conn = Faraday.new(
|
57
|
-
url: 'https://generativelanguage.googleapis.com',
|
58
|
-
headers: { 'Content-Type' => 'application/json' }
|
59
|
-
) do |f|
|
60
|
-
f.options.timeout = 300 # timeout em segundos para a requisição completa
|
61
|
-
f.options.open_timeout = 300 # timeout em segundos para abrir a conexão
|
62
|
-
end
|
63
|
-
|
64
|
-
response = conn.post("/v1beta/models/#{self['model'].split('/').last}:generateContent") do |req|
|
65
|
-
req.params['key'] = ENV.fetch('GEMINI_API_KEY')
|
66
|
-
req.body = {
|
67
|
-
cached_content: self['name'],
|
68
|
-
contents:
|
69
|
-
}.to_json
|
70
|
-
end
|
71
|
-
|
72
|
-
if response.status == 200
|
73
|
-
resp = JSON.parse(response.body)
|
74
|
-
def resp.content = dig('candidates', 0, 'content', 'parts', 0, 'text')
|
75
|
-
return resp
|
76
|
-
end
|
77
|
-
|
78
|
-
raise "Erro ao gerar conteúdo: #{response.body}"
|
79
|
-
rescue Faraday::Error => e
|
80
|
-
raise "Erro na requisição: #{e.message}"
|
81
|
-
end
|
82
|
-
|
83
|
-
def item.single_prompt(prompt: ) = generate_content(contents: [{ parts: [{ text: prompt }], role: 'user' }])
|
84
|
-
|
85
|
-
item
|
86
|
-
end
|
87
|
-
|
88
|
-
rescue Faraday::Error => e
|
89
|
-
raise "Erro na requisição: #{e.message}"
|
90
|
-
end
|
91
|
-
|
92
|
-
def self.update(name:, content:)
|
93
|
-
conn = Faraday.new(
|
94
|
-
url: 'https://generativelanguage.googleapis.com',
|
95
|
-
headers: { 'Content-Type' => 'application/json' }
|
96
|
-
)
|
97
|
-
|
98
|
-
response = conn.patch("/v1beta/#{name}") do |req|
|
99
|
-
req.params['key'] = ENV.fetch('GEMINI_API_KEY')
|
100
|
-
req.body = content.to_json
|
101
|
-
end
|
102
|
-
|
103
|
-
return JSON.parse(response.body) if response.status == 200
|
104
|
-
|
105
|
-
raise "Erro ao atualizar cache: #{response.body}"
|
106
|
-
rescue Faraday::Error => e
|
107
|
-
raise "Erro na requisição: #{e.message}"
|
108
|
-
end
|
109
|
-
|
110
|
-
def self.delete(name:)
|
111
|
-
conn = Faraday.new(
|
112
|
-
url: 'https://generativelanguage.googleapis.com',
|
113
|
-
headers: { 'Content-Type' => 'application/json' }
|
114
|
-
)
|
115
|
-
|
116
|
-
response = conn.delete("/v1beta/#{name}") do |req|
|
117
|
-
req.params['key'] = ENV.fetch('GEMINI_API_KEY')
|
118
|
-
end
|
119
|
-
|
120
|
-
return true if response.status == 200
|
121
|
-
|
122
|
-
raise "Erro ao deletar cache: #{response.body}"
|
123
|
-
rescue Faraday::Error => e
|
124
|
-
raise "Erro na requisição: #{e.message}"
|
125
|
-
end
|
126
|
-
end
|
127
|
-
end
|
@@ -1,34 +0,0 @@
|
|
1
|
-
module geminiext
|
2
|
-
class Messages < Array
|
3
|
-
def initialize messages = nil
|
4
|
-
super parse_messages(messages)
|
5
|
-
end
|
6
|
-
|
7
|
-
def add(message) = concat(parse_messages(message))
|
8
|
-
|
9
|
-
private
|
10
|
-
def parse_messages(messages)
|
11
|
-
return [] if messages.nil?
|
12
|
-
|
13
|
-
messages = [messages] unless messages.is_a?(Array)
|
14
|
-
|
15
|
-
# if first element is ok, then do not parse the rest
|
16
|
-
return messages if messages.first in { role: String | Symbol, content: String | Array | Hash}
|
17
|
-
|
18
|
-
messages.flat_map do |msg|
|
19
|
-
if msg.is_a?(Hash)
|
20
|
-
if msg.keys.size == 1
|
21
|
-
role, content = msg.first
|
22
|
-
{ role: role.to_s, content: content }
|
23
|
-
elsif msg.key?(:role) && msg.key?(:content)
|
24
|
-
{ role: msg[:role].to_s, content: msg[:content] }
|
25
|
-
else
|
26
|
-
msg.map { |role, content| { role: role.to_s, content: content } }
|
27
|
-
end
|
28
|
-
else
|
29
|
-
raise ArgumentError, "Invalid message format: #{msg}"
|
30
|
-
end
|
31
|
-
end
|
32
|
-
end
|
33
|
-
end
|
34
|
-
end
|
@@ -1,27 +0,0 @@
|
|
1
|
-
module geminiext
|
2
|
-
module Model
|
3
|
-
GPT_BASIC_MODEL = ENV.fetch('OPENAI_GPT_BASIC_MODEL', 'gpt-4o-mini')
|
4
|
-
GPT_ADVANCED_MODEL = ENV.fetch('OPENAI_GPT_ADVANCED_MODEL', 'gpt-4o')
|
5
|
-
GPT_ADVANCED_MODEL_LATEST = ENV.fetch('OPENAI_GPT_ADVANCED_MODEL_LATEST', 'chatgpt-4o-latest')
|
6
|
-
|
7
|
-
O1_BASIC_MODEL = ENV.fetch('OPENAI_O1_BASIC_MODEL', 'o1-mini')
|
8
|
-
O1_ADVANCED_MODEL = ENV.fetch('OPENAI_O1_ADVANCED_MODEL', 'o1-preview')
|
9
|
-
|
10
|
-
def self.select(model)
|
11
|
-
case model
|
12
|
-
when :gpt_basic
|
13
|
-
GPT_BASIC_MODEL
|
14
|
-
when :gpt_advanced
|
15
|
-
GPT_ADVANCED_MODEL
|
16
|
-
when :gpt_advanced_latest
|
17
|
-
GPT_ADVANCED_MODEL_LATEST
|
18
|
-
when :o1_basic
|
19
|
-
O1_BASIC_MODEL
|
20
|
-
when :o1_advanced
|
21
|
-
O1_ADVANCED_MODEL
|
22
|
-
else
|
23
|
-
model
|
24
|
-
end
|
25
|
-
end
|
26
|
-
end
|
27
|
-
end
|
@@ -1,44 +0,0 @@
|
|
1
|
-
module ResponseExtender
|
2
|
-
def chat_params = self[:chat_params]
|
3
|
-
|
4
|
-
def message = dig('choices', 0, 'message')
|
5
|
-
|
6
|
-
def content = dig('choices', 0, 'message', 'content')
|
7
|
-
def content? = !content.nil?
|
8
|
-
|
9
|
-
def tool_calls = dig('choices', 0, 'message', 'tool_calls')
|
10
|
-
def tool_calls? = !tool_calls.nil?
|
11
|
-
|
12
|
-
def functions
|
13
|
-
return if tool_calls.nil?
|
14
|
-
|
15
|
-
functions = tool_calls.filter { |tool| tool['type'].eql? 'function' }
|
16
|
-
return if functions.empty?
|
17
|
-
|
18
|
-
functions_list = []
|
19
|
-
functions.map.with_index do |function, function_index|
|
20
|
-
function_info = tool_calls.dig(function_index, 'function')
|
21
|
-
function_def = { id: function['id'], name: function_info['name'], arguments: Oj.load(function_info['arguments'], symbol_keys: true) }
|
22
|
-
|
23
|
-
def function_def.run(context:)
|
24
|
-
{
|
25
|
-
tool_call_id: self[:id],
|
26
|
-
role: :tool,
|
27
|
-
name: self[:name],
|
28
|
-
content: context.send(self[:name], **self[:arguments])
|
29
|
-
}
|
30
|
-
end
|
31
|
-
|
32
|
-
functions_list << function_def
|
33
|
-
end
|
34
|
-
|
35
|
-
functions_list
|
36
|
-
end
|
37
|
-
|
38
|
-
def functions_run_all(context:)
|
39
|
-
raise 'No functions to run' if functions.nil?
|
40
|
-
functions.map { |function| function.run(context:) }
|
41
|
-
end
|
42
|
-
|
43
|
-
def functions? = !functions.nil?
|
44
|
-
end
|