geminize 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/.standard.yml +3 -0
- data/.yardopts +14 -0
- data/CHANGELOG.md +24 -0
- data/CODE_OF_CONDUCT.md +132 -0
- data/CONTRIBUTING.md +109 -0
- data/LICENSE.txt +21 -0
- data/README.md +423 -0
- data/Rakefile +10 -0
- data/examples/README.md +75 -0
- data/examples/configuration.rb +58 -0
- data/examples/embeddings.rb +195 -0
- data/examples/multimodal.rb +126 -0
- data/examples/rails_chat/README.md +69 -0
- data/examples/rails_chat/app/controllers/chat_controller.rb +26 -0
- data/examples/rails_chat/app/views/chat/index.html.erb +112 -0
- data/examples/rails_chat/config/routes.rb +8 -0
- data/examples/rails_initializer.rb +46 -0
- data/examples/system_instructions.rb +101 -0
- data/lib/geminize/chat.rb +98 -0
- data/lib/geminize/client.rb +318 -0
- data/lib/geminize/configuration.rb +98 -0
- data/lib/geminize/conversation_repository.rb +161 -0
- data/lib/geminize/conversation_service.rb +126 -0
- data/lib/geminize/embeddings.rb +145 -0
- data/lib/geminize/error_mapper.rb +96 -0
- data/lib/geminize/error_parser.rb +120 -0
- data/lib/geminize/errors.rb +185 -0
- data/lib/geminize/middleware/error_handler.rb +72 -0
- data/lib/geminize/model_info.rb +91 -0
- data/lib/geminize/models/chat_request.rb +186 -0
- data/lib/geminize/models/chat_response.rb +118 -0
- data/lib/geminize/models/content_request.rb +530 -0
- data/lib/geminize/models/content_response.rb +99 -0
- data/lib/geminize/models/conversation.rb +156 -0
- data/lib/geminize/models/embedding_request.rb +222 -0
- data/lib/geminize/models/embedding_response.rb +1064 -0
- data/lib/geminize/models/memory.rb +88 -0
- data/lib/geminize/models/message.rb +140 -0
- data/lib/geminize/models/model.rb +171 -0
- data/lib/geminize/models/model_list.rb +124 -0
- data/lib/geminize/models/stream_response.rb +99 -0
- data/lib/geminize/rails/app/controllers/concerns/geminize/controller.rb +105 -0
- data/lib/geminize/rails/app/helpers/geminize_helper.rb +125 -0
- data/lib/geminize/rails/controller_additions.rb +41 -0
- data/lib/geminize/rails/engine.rb +29 -0
- data/lib/geminize/rails/helper_additions.rb +37 -0
- data/lib/geminize/rails.rb +50 -0
- data/lib/geminize/railtie.rb +33 -0
- data/lib/geminize/request_builder.rb +57 -0
- data/lib/geminize/text_generation.rb +285 -0
- data/lib/geminize/validators.rb +150 -0
- data/lib/geminize/vector_utils.rb +164 -0
- data/lib/geminize/version.rb +5 -0
- data/lib/geminize.rb +527 -0
- data/lib/generators/geminize/install_generator.rb +22 -0
- data/lib/generators/geminize/templates/README +31 -0
- data/lib/generators/geminize/templates/initializer.rb +38 -0
- data/sig/geminize.rbs +4 -0
- metadata +218 -0
@@ -0,0 +1,98 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Geminize
|
4
|
+
# Class for chat functionality
|
5
|
+
class Chat
|
6
|
+
# @return [Geminize::Client] The client instance
|
7
|
+
attr_reader :client
|
8
|
+
|
9
|
+
# @return [Models::Conversation] The current conversation
|
10
|
+
attr_reader :conversation
|
11
|
+
|
12
|
+
# Initialize a new chat instance
|
13
|
+
# @param conversation [Geminize::Models::Conversation, nil] The conversation to use
|
14
|
+
# @param client [Geminize::Client, nil] The client to use (optional)
|
15
|
+
# @param options [Hash] Additional options
|
16
|
+
def initialize(conversation = nil, client = nil, options = {})
|
17
|
+
@conversation = conversation || Models::Conversation.new
|
18
|
+
@client = client || Client.new(options)
|
19
|
+
@options = options
|
20
|
+
end
|
21
|
+
|
22
|
+
# Send a user message and get a model response
|
23
|
+
# @param content [String] The content of the user message
|
24
|
+
# @param model_name [String, nil] The model to use (optional)
|
25
|
+
# @param params [Hash] Additional generation parameters
|
26
|
+
# @option params [Float] :temperature Controls randomness (0.0-1.0)
|
27
|
+
# @option params [Integer] :max_tokens Maximum tokens to generate
|
28
|
+
# @option params [Float] :top_p Top-p value for nucleus sampling (0.0-1.0)
|
29
|
+
# @option params [Integer] :top_k Top-k value for sampling
|
30
|
+
# @option params [Array<String>] :stop_sequences Stop sequences to end generation
|
31
|
+
# @option params [String] :system_instruction System instruction to guide model behavior
|
32
|
+
# @return [Models::ChatResponse] The chat response
|
33
|
+
# @raise [Geminize::GeminizeError] If the request fails
|
34
|
+
def send_message(content, model_name = nil, params = {})
|
35
|
+
# Add user message to conversation
|
36
|
+
@conversation.add_user_message(content)
|
37
|
+
|
38
|
+
# Create the chat request
|
39
|
+
request_params = params.dup
|
40
|
+
|
41
|
+
# Only include system_instruction in params if explicitly provided or set in conversation
|
42
|
+
if params[:system_instruction] || @conversation.system_instruction
|
43
|
+
request_params[:system_instruction] = params[:system_instruction] || @conversation.system_instruction
|
44
|
+
end
|
45
|
+
|
46
|
+
chat_request = Models::ChatRequest.new(
|
47
|
+
content,
|
48
|
+
model_name || Geminize.configuration.default_model,
|
49
|
+
nil, # user_id
|
50
|
+
request_params
|
51
|
+
)
|
52
|
+
|
53
|
+
# Generate the response using the conversation history
|
54
|
+
response = generate_response(chat_request)
|
55
|
+
|
56
|
+
# Extract and add the model's response to the conversation
|
57
|
+
if response.has_text?
|
58
|
+
@conversation.add_model_message(response.text)
|
59
|
+
end
|
60
|
+
|
61
|
+
response
|
62
|
+
end
|
63
|
+
|
64
|
+
# Set a system instruction for the conversation
|
65
|
+
# @param instruction [String] The system instruction
|
66
|
+
# @return [self] The chat instance
|
67
|
+
def set_system_instruction(instruction)
|
68
|
+
@conversation.system_instruction = instruction
|
69
|
+
self
|
70
|
+
end
|
71
|
+
|
72
|
+
# Generate a response based on the current conversation
|
73
|
+
# @param chat_request [Models::ChatRequest] The chat request
|
74
|
+
# @return [Models::ChatResponse] The chat response
|
75
|
+
# @raise [Geminize::GeminizeError] If the request fails
|
76
|
+
def generate_response(chat_request)
|
77
|
+
model_name = chat_request.model_name
|
78
|
+
endpoint = RequestBuilder.build_text_generation_endpoint(model_name)
|
79
|
+
|
80
|
+
# Create payload with conversation history
|
81
|
+
payload = RequestBuilder.build_chat_request(chat_request, @conversation.messages_as_hashes)
|
82
|
+
|
83
|
+
# Send request to API
|
84
|
+
response_data = @client.post(endpoint, payload)
|
85
|
+
Models::ChatResponse.from_hash(response_data)
|
86
|
+
end
|
87
|
+
|
88
|
+
# Create a new conversation
|
89
|
+
# @param title [String, nil] Optional title for the conversation
|
90
|
+
# @param system_instruction [String, nil] Optional system instruction
|
91
|
+
# @return [Chat] A new chat instance with a fresh conversation
|
92
|
+
def self.new_conversation(title = nil, system_instruction = nil)
|
93
|
+
# Create a conversation with explicit parameters
|
94
|
+
conversation = Models::Conversation.new(nil, title, nil, nil, system_instruction)
|
95
|
+
new(conversation)
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
@@ -0,0 +1,318 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "faraday"
|
4
|
+
require "faraday/retry"
|
5
|
+
require "json"
|
6
|
+
require "logger"
|
7
|
+
|
8
|
+
module Geminize
|
9
|
+
# Client for making HTTP requests to the Gemini API
|
10
|
+
class Client
|
11
|
+
# @return [Faraday::Connection] The Faraday connection
|
12
|
+
attr_reader :connection
|
13
|
+
|
14
|
+
# @return [Boolean] Flag indicating if a streaming operation is in progress
|
15
|
+
attr_reader :streaming_in_progress
|
16
|
+
|
17
|
+
# @return [Boolean] Flag indicating if a streaming operation should be cancelled
|
18
|
+
attr_reader :cancel_streaming
|
19
|
+
|
20
|
+
# Initialize a new client
|
21
|
+
# @param options [Hash] Additional options to override the defaults
|
22
|
+
# @option options [String] :api_key API key for Gemini API
|
23
|
+
# @option options [String] :api_version API version to use
|
24
|
+
# @option options [Integer] :timeout Request timeout in seconds
|
25
|
+
# @option options [Integer] :open_timeout Connection open timeout in seconds
|
26
|
+
# @option options [Integer] :streaming_timeout Timeout for streaming requests in seconds
|
27
|
+
# @option options [Logger] :logger Custom logger instance (default: nil)
|
28
|
+
def initialize(options = {})
|
29
|
+
@config = Geminize.configuration
|
30
|
+
@options = options
|
31
|
+
@connection = build_connection
|
32
|
+
@streaming_in_progress = false
|
33
|
+
@cancel_streaming = false
|
34
|
+
end
|
35
|
+
|
36
|
+
# Make a GET request to the specified endpoint
|
37
|
+
# @param endpoint [String] The API endpoint path
|
38
|
+
# @param params [Hash] Optional query parameters
|
39
|
+
# @param headers [Hash] Optional headers
|
40
|
+
# @return [Hash] The response body parsed as JSON
|
41
|
+
def get(endpoint, params = {}, headers = {})
|
42
|
+
response = connection.get(
|
43
|
+
build_url(endpoint),
|
44
|
+
add_api_key(params),
|
45
|
+
default_headers.merge(headers)
|
46
|
+
)
|
47
|
+
parse_response(response)
|
48
|
+
end
|
49
|
+
|
50
|
+
# Make a POST request to the specified endpoint
|
51
|
+
# @param endpoint [String] The API endpoint path
|
52
|
+
# @param payload [Hash] The request body
|
53
|
+
# @param params [Hash] Optional query parameters
|
54
|
+
# @param headers [Hash] Optional headers
|
55
|
+
# @return [Hash] The response body parsed as JSON
|
56
|
+
def post(endpoint, payload = {}, params = {}, headers = {})
|
57
|
+
response = connection.post(
|
58
|
+
build_url(endpoint),
|
59
|
+
payload.to_json,
|
60
|
+
default_headers.merge(headers).merge({"Content-Type" => "application/json"})
|
61
|
+
) do |req|
|
62
|
+
req.params.merge!(add_api_key(params))
|
63
|
+
end
|
64
|
+
parse_response(response)
|
65
|
+
end
|
66
|
+
|
67
|
+
# Make a streaming POST request to the specified endpoint
|
68
|
+
# @param endpoint [String] The API endpoint path
|
69
|
+
# @param payload [Hash] The request body
|
70
|
+
# @param params [Hash] Optional query parameters
|
71
|
+
# @param headers [Hash] Optional headers
|
72
|
+
# @yield [chunk] Yields each chunk of the streaming response
|
73
|
+
# @yieldparam chunk [String, Hash] A chunk of the response (raw text or parsed JSON)
|
74
|
+
# @return [void]
|
75
|
+
# @raise [Geminize::StreamingError] If the streaming request fails
|
76
|
+
# @raise [Geminize::StreamingInterruptedError] If the connection is interrupted
|
77
|
+
# @raise [Geminize::StreamingTimeoutError] If the streaming connection times out
|
78
|
+
# @raise [Geminize::InvalidStreamFormatError] If the stream format is invalid
|
79
|
+
def post_stream(endpoint, payload = {}, params = {}, headers = {}, &block)
|
80
|
+
raise ArgumentError, "A block is required for streaming requests" unless block_given?
|
81
|
+
|
82
|
+
# Check if another streaming operation is in progress
|
83
|
+
if @streaming_in_progress
|
84
|
+
raise StreamingError.new("Another streaming operation is already in progress")
|
85
|
+
end
|
86
|
+
|
87
|
+
@streaming_in_progress = true
|
88
|
+
@cancel_streaming = false
|
89
|
+
|
90
|
+
# Ensure we have alt=sse parameter for the API to get server-sent events
|
91
|
+
params = params.merge(alt: "sse")
|
92
|
+
|
93
|
+
# Create a separate connection for streaming
|
94
|
+
streaming_connection = build_streaming_connection
|
95
|
+
|
96
|
+
# Initialize buffer for SSE processing
|
97
|
+
@buffer = ""
|
98
|
+
|
99
|
+
# Track if we've received any data
|
100
|
+
received_data = false
|
101
|
+
|
102
|
+
begin
|
103
|
+
# Make the streaming request
|
104
|
+
streaming_connection.post(
|
105
|
+
build_url(endpoint),
|
106
|
+
payload.to_json,
|
107
|
+
default_headers.merge(headers).merge({
|
108
|
+
"Content-Type" => "application/json",
|
109
|
+
"Accept" => "text/event-stream" # Request SSE format explicitly
|
110
|
+
})
|
111
|
+
) do |req|
|
112
|
+
req.params.merge!(add_api_key(params))
|
113
|
+
|
114
|
+
# Configure buffer management and chunked transfer reception
|
115
|
+
req.options.on_data = proc do |chunk, size, env|
|
116
|
+
# Check if cancellation is requested
|
117
|
+
if @cancel_streaming
|
118
|
+
env[:request].http_connection.close
|
119
|
+
raise StreamingInterruptedError.new("Streaming was cancelled by the client")
|
120
|
+
end
|
121
|
+
|
122
|
+
received_data = true
|
123
|
+
|
124
|
+
# Skip empty chunks
|
125
|
+
next if chunk.strip.empty?
|
126
|
+
|
127
|
+
# Use a buffer for handling partial SSE messages
|
128
|
+
@buffer += chunk
|
129
|
+
|
130
|
+
# Process complete SSE messages in buffer
|
131
|
+
process_buffer(&block)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
rescue Faraday::ConnectionFailed => e
|
135
|
+
# Connection was established but interrupted
|
136
|
+
if received_data
|
137
|
+
raise StreamingInterruptedError.new("Streaming connection interrupted: #{e.message}")
|
138
|
+
else
|
139
|
+
raise RequestError.new("Failed to establish streaming connection: #{e.message}", "CONNECTION_ERROR", nil)
|
140
|
+
end
|
141
|
+
rescue Faraday::TimeoutError => e
|
142
|
+
raise StreamingTimeoutError.new("Streaming operation timed out: #{e.message}")
|
143
|
+
rescue JSON::ParserError => e
|
144
|
+
raise InvalidStreamFormatError.new("Could not parse streaming response: #{e.message}")
|
145
|
+
rescue => e
|
146
|
+
# Generic error handler
|
147
|
+
error_message = "Streaming error: #{e.message}"
|
148
|
+
raise StreamingError.new(error_message, nil, nil)
|
149
|
+
ensure
|
150
|
+
# Always clean up resources
|
151
|
+
@buffer = nil
|
152
|
+
@streaming_in_progress = false
|
153
|
+
@cancel_streaming = false
|
154
|
+
|
155
|
+
# Reset the connection to free resources
|
156
|
+
begin
|
157
|
+
streaming_connection&.close if streaming_connection&.respond_to?(:close)
|
158
|
+
rescue => e
|
159
|
+
# Just log the error if there's a problem closing the connection
|
160
|
+
@options[:logger]&.warn("Error closing streaming connection: #{e.message}")
|
161
|
+
end
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
# Set the cancel_streaming flag to cancel an in-progress streaming operation
|
166
|
+
# @param value [Boolean] Value to set
|
167
|
+
# @return [Boolean] The new value
|
168
|
+
def cancel_streaming=(value)
|
169
|
+
# Only set if a streaming operation is in progress
|
170
|
+
return unless @streaming_in_progress
|
171
|
+
|
172
|
+
@cancel_streaming = value
|
173
|
+
end
|
174
|
+
|
175
|
+
private
|
176
|
+
|
177
|
+
# Process the buffer for complete SSE messages
|
178
|
+
# @yield [data] Yields each parsed SSE data chunk
|
179
|
+
# @return [void]
|
180
|
+
def process_buffer
|
181
|
+
# Split the buffer by double newlines, which separate SSE messages
|
182
|
+
messages = @buffer.split(/\r\n\r\n|\n\n|\r\r/)
|
183
|
+
|
184
|
+
# The last element might be incomplete, so keep it in the buffer
|
185
|
+
@buffer = messages.pop || ""
|
186
|
+
|
187
|
+
# Process each complete message
|
188
|
+
messages.each do |message|
|
189
|
+
# Skip empty messages
|
190
|
+
next if message.strip.empty?
|
191
|
+
|
192
|
+
# Extract data lines
|
193
|
+
data_lines = []
|
194
|
+
message.each_line do |line|
|
195
|
+
if line.start_with?("data: ")
|
196
|
+
data_lines << line[6..]
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
# Skip if no data lines found
|
201
|
+
next if data_lines.empty?
|
202
|
+
|
203
|
+
# Join data lines for multi-line data
|
204
|
+
data = data_lines.join("")
|
205
|
+
|
206
|
+
# Skip "[DONE]" marker
|
207
|
+
next if data.strip == "[DONE]"
|
208
|
+
|
209
|
+
begin
|
210
|
+
# Try to parse as JSON
|
211
|
+
parsed_data = JSON.parse(data)
|
212
|
+
|
213
|
+
# Yield parsed data regardless of type
|
214
|
+
yield parsed_data
|
215
|
+
rescue JSON::ParserError
|
216
|
+
# If not valid JSON, yield as raw text
|
217
|
+
yield data
|
218
|
+
end
|
219
|
+
end
|
220
|
+
end
|
221
|
+
|
222
|
+
# Build the Faraday connection with the configured URL and default headers
|
223
|
+
# @return [Faraday::Connection]
|
224
|
+
def build_connection
|
225
|
+
Faraday.new(url: @config.api_base_url) do |conn|
|
226
|
+
conn.options.timeout = @options[:timeout] || @config.timeout
|
227
|
+
conn.options.open_timeout = @options[:open_timeout] || @config.open_timeout
|
228
|
+
|
229
|
+
# Add JSON response parsing
|
230
|
+
conn.response :json, content_type: /\bjson$/
|
231
|
+
|
232
|
+
# Add our custom error handling middleware
|
233
|
+
conn.response :geminize_error_handler
|
234
|
+
|
235
|
+
# Add retry middleware
|
236
|
+
conn.request :retry, {
|
237
|
+
max: 3,
|
238
|
+
interval: 0.05,
|
239
|
+
interval_randomness: 0.5,
|
240
|
+
backoff_factor: 2,
|
241
|
+
retry_statuses: [429, 503]
|
242
|
+
}
|
243
|
+
|
244
|
+
# Add logging if enabled
|
245
|
+
if @config.log_requests || @options[:logger]
|
246
|
+
logger = @options[:logger] || Logger.new($stdout)
|
247
|
+
conn.response :logger, logger, bodies: true
|
248
|
+
end
|
249
|
+
end
|
250
|
+
end
|
251
|
+
|
252
|
+
# Build the Faraday connection optimized for streaming with the configured URL
|
253
|
+
# @return [Faraday::Connection]
|
254
|
+
def build_streaming_connection
|
255
|
+
Faraday.new(url: @config.api_base_url) do |conn|
|
256
|
+
# Set longer timeouts for streaming connections which may stay open longer
|
257
|
+
conn.options.timeout = (@options[:streaming_timeout] || @config.streaming_timeout || 300)
|
258
|
+
conn.options.open_timeout = (@options[:open_timeout] || @config.open_timeout)
|
259
|
+
|
260
|
+
# Disable response parsing middleware for raw streaming
|
261
|
+
conn.adapter :net_http do |http|
|
262
|
+
# Configure Net::HTTP for streaming
|
263
|
+
http.read_timeout = (@options[:streaming_timeout] || @config.streaming_timeout || 300)
|
264
|
+
http.keep_alive_timeout = 60
|
265
|
+
http.max_retries = 0 # Disable retries for streaming connections
|
266
|
+
end
|
267
|
+
|
268
|
+
# Error handling for streaming connections
|
269
|
+
conn.response :geminize_error_handler
|
270
|
+
|
271
|
+
# Add logging if enabled
|
272
|
+
if @config.log_requests || @options[:logger]
|
273
|
+
logger = @options[:logger] || Logger.new($stdout)
|
274
|
+
conn.response :logger, logger, bodies: false
|
275
|
+
end
|
276
|
+
end
|
277
|
+
end
|
278
|
+
|
279
|
+
# Build the complete URL including API version
|
280
|
+
# @param endpoint [String] The API endpoint path
|
281
|
+
# @return [String] The complete URL path
|
282
|
+
def build_url(endpoint)
|
283
|
+
version = @options[:api_version] || @config.api_version
|
284
|
+
"#{version}/#{endpoint}"
|
285
|
+
end
|
286
|
+
|
287
|
+
# Default headers for all requests
|
288
|
+
# @return [Hash] Default headers
|
289
|
+
def default_headers
|
290
|
+
{
|
291
|
+
"Accept" => "application/json"
|
292
|
+
}
|
293
|
+
end
|
294
|
+
|
295
|
+
# Add API key to request parameters
|
296
|
+
# @param params [Hash] Original parameters
|
297
|
+
# @return [Hash] Parameters with API key added
|
298
|
+
def add_api_key(params)
|
299
|
+
api_key = @options[:api_key] || @config.api_key
|
300
|
+
params.merge(key: api_key)
|
301
|
+
end
|
302
|
+
|
303
|
+
# Parse the response body as JSON
|
304
|
+
# @param response [Faraday::Response] The response object
|
305
|
+
# @return [Hash] The parsed JSON
|
306
|
+
def parse_response(response)
|
307
|
+
return {} if response.body.to_s.empty?
|
308
|
+
|
309
|
+
if response.body.is_a?(Hash)
|
310
|
+
response.body
|
311
|
+
else
|
312
|
+
JSON.parse(response.body)
|
313
|
+
end
|
314
|
+
rescue JSON::ParserError => e
|
315
|
+
raise Geminize::RequestError.new("Invalid JSON response: #{e.message}", "INVALID_JSON", nil)
|
316
|
+
end
|
317
|
+
end
|
318
|
+
end
|
@@ -0,0 +1,98 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "singleton"
|
4
|
+
|
5
|
+
module Geminize
|
6
|
+
# Handles configuration options for the Geminize gem
|
7
|
+
class Configuration
|
8
|
+
include Singleton
|
9
|
+
|
10
|
+
# Base URL for the Google Gemini API
|
11
|
+
API_BASE_URL = "https://generativelanguage.googleapis.com"
|
12
|
+
|
13
|
+
# Default API version
|
14
|
+
DEFAULT_API_VERSION = "v1beta"
|
15
|
+
|
16
|
+
# Default model
|
17
|
+
DEFAULT_MODEL = "gemini-2.0-flash"
|
18
|
+
|
19
|
+
# Default embedding model
|
20
|
+
DEFAULT_EMBEDDING_MODEL = "embedding-001"
|
21
|
+
|
22
|
+
# Default timeout values (in seconds)
|
23
|
+
DEFAULT_TIMEOUT = 30
|
24
|
+
DEFAULT_OPEN_TIMEOUT = 10
|
25
|
+
DEFAULT_STREAMING_TIMEOUT = 300
|
26
|
+
DEFAULT_ON_DATA_TIMEOUT = 60
|
27
|
+
|
28
|
+
# API key for accessing the Gemini API
|
29
|
+
# @return [String, nil]
|
30
|
+
attr_accessor :api_key
|
31
|
+
|
32
|
+
# API version to use
|
33
|
+
# @return [String]
|
34
|
+
attr_accessor :api_version
|
35
|
+
|
36
|
+
# Default model to use if not specified in requests
|
37
|
+
# @return [String]
|
38
|
+
attr_accessor :default_model
|
39
|
+
|
40
|
+
# Default embedding model to use if not specified in embedding requests
|
41
|
+
# @return [String]
|
42
|
+
attr_accessor :default_embedding_model
|
43
|
+
|
44
|
+
# Request timeout in seconds
|
45
|
+
# @return [Integer]
|
46
|
+
attr_accessor :timeout
|
47
|
+
|
48
|
+
# Connection open timeout in seconds
|
49
|
+
# @return [Integer]
|
50
|
+
attr_accessor :open_timeout
|
51
|
+
|
52
|
+
# Streaming request timeout in seconds
|
53
|
+
# @return [Integer]
|
54
|
+
attr_accessor :streaming_timeout
|
55
|
+
|
56
|
+
# Timeout between data chunks in streaming responses
|
57
|
+
# @return [Integer]
|
58
|
+
attr_accessor :on_data_timeout
|
59
|
+
|
60
|
+
# @return [Boolean]
|
61
|
+
attr_accessor :log_requests
|
62
|
+
|
63
|
+
# Initialize with default configuration values
|
64
|
+
def initialize
|
65
|
+
reset!
|
66
|
+
end
|
67
|
+
|
68
|
+
# Reset configuration to default values
|
69
|
+
# @return [void]
|
70
|
+
def reset!
|
71
|
+
@api_key = ENV["GEMINI_API_KEY"]
|
72
|
+
@api_version = DEFAULT_API_VERSION
|
73
|
+
@default_model = DEFAULT_MODEL
|
74
|
+
@default_embedding_model = DEFAULT_EMBEDDING_MODEL
|
75
|
+
@timeout = DEFAULT_TIMEOUT
|
76
|
+
@open_timeout = DEFAULT_OPEN_TIMEOUT
|
77
|
+
@streaming_timeout = DEFAULT_STREAMING_TIMEOUT
|
78
|
+
@on_data_timeout = DEFAULT_ON_DATA_TIMEOUT
|
79
|
+
@log_requests = false
|
80
|
+
end
|
81
|
+
|
82
|
+
# Get the base URL for the Gemini API
|
83
|
+
# @return [String]
|
84
|
+
def api_base_url
|
85
|
+
API_BASE_URL
|
86
|
+
end
|
87
|
+
|
88
|
+
# Validates the current configuration
|
89
|
+
# @return [Boolean]
|
90
|
+
# @raise [ConfigurationError] if the configuration is invalid
|
91
|
+
def validate!
|
92
|
+
raise ConfigurationError, "API key must be set" if @api_key.nil? || @api_key.empty?
|
93
|
+
raise ConfigurationError, "API version must be set" if @api_version.nil? || @api_version.empty?
|
94
|
+
|
95
|
+
true
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
@@ -0,0 +1,161 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "fileutils"
|
4
|
+
require "json"
|
5
|
+
|
6
|
+
module Geminize
|
7
|
+
# Interface for conversation repositories
|
8
|
+
class ConversationRepository
|
9
|
+
# Save a conversation
|
10
|
+
# @param conversation [Models::Conversation] The conversation to save
|
11
|
+
# @return [Boolean] True if the save was successful
|
12
|
+
def save(conversation)
|
13
|
+
raise NotImplementedError, "Subclasses must implement #save"
|
14
|
+
end
|
15
|
+
|
16
|
+
# Load a conversation by ID
|
17
|
+
# @param id [String] The ID of the conversation to load
|
18
|
+
# @return [Models::Conversation, nil] The loaded conversation or nil if not found
|
19
|
+
def load(id)
|
20
|
+
raise NotImplementedError, "Subclasses must implement #load"
|
21
|
+
end
|
22
|
+
|
23
|
+
# Delete a conversation by ID
|
24
|
+
# @param id [String] The ID of the conversation to delete
|
25
|
+
# @return [Boolean] True if the deletion was successful
|
26
|
+
def delete(id)
|
27
|
+
raise NotImplementedError, "Subclasses must implement #delete"
|
28
|
+
end
|
29
|
+
|
30
|
+
# List all available conversations
|
31
|
+
# @return [Array<Hash>] An array of conversation metadata
|
32
|
+
def list
|
33
|
+
raise NotImplementedError, "Subclasses must implement #list"
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
# File-based implementation of the ConversationRepository
|
38
|
+
class FileConversationRepository < ConversationRepository
|
39
|
+
# @return [String] The directory where conversations are stored
|
40
|
+
attr_reader :storage_dir
|
41
|
+
|
42
|
+
# Initialize a new file-based repository
|
43
|
+
# @param directory [String] The directory to store conversations in
|
44
|
+
def initialize(directory = nil)
|
45
|
+
@storage_dir = directory || File.join(Dir.home, ".geminize", "conversations")
|
46
|
+
FileUtils.mkdir_p(@storage_dir) unless Dir.exist?(@storage_dir)
|
47
|
+
end
|
48
|
+
|
49
|
+
# Save a conversation to disk
|
50
|
+
# @param conversation [Models::Conversation] The conversation to save
|
51
|
+
# @return [Boolean] True if the save was successful
|
52
|
+
def save(conversation)
|
53
|
+
return false unless conversation
|
54
|
+
|
55
|
+
begin
|
56
|
+
file_path = file_path_for(conversation.id)
|
57
|
+
File.write(file_path, conversation.to_json)
|
58
|
+
true
|
59
|
+
rescue
|
60
|
+
false
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# Load a conversation from disk by ID
|
65
|
+
# @param id [String] The ID of the conversation to load
|
66
|
+
# @return [Models::Conversation, nil] The loaded conversation or nil if not found
|
67
|
+
def load(id)
|
68
|
+
file_path = file_path_for(id)
|
69
|
+
return nil unless File.exist?(file_path)
|
70
|
+
|
71
|
+
begin
|
72
|
+
json = File.read(file_path)
|
73
|
+
Models::Conversation.from_json(json)
|
74
|
+
rescue
|
75
|
+
nil
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
# Delete a conversation from disk by ID
|
80
|
+
# @param id [String] The ID of the conversation to delete
|
81
|
+
# @return [Boolean] True if the deletion was successful
|
82
|
+
def delete(id)
|
83
|
+
file_path = file_path_for(id)
|
84
|
+
return false unless File.exist?(file_path)
|
85
|
+
|
86
|
+
begin
|
87
|
+
File.delete(file_path)
|
88
|
+
true
|
89
|
+
rescue
|
90
|
+
false
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
# List all available conversations
|
95
|
+
# @return [Array<Models::Conversation>] An array of conversations
|
96
|
+
def list
|
97
|
+
Dir.glob(File.join(@storage_dir, "*.json")).map do |file_path|
|
98
|
+
json = File.read(file_path)
|
99
|
+
Models::Conversation.from_json(json)
|
100
|
+
rescue
|
101
|
+
nil # Skip files that can't be parsed
|
102
|
+
end.compact.sort_by { |conversation| conversation.updated_at }.reverse
|
103
|
+
rescue
|
104
|
+
[]
|
105
|
+
end
|
106
|
+
|
107
|
+
private
|
108
|
+
|
109
|
+
# Get the file path for a conversation ID
|
110
|
+
# @param id [String] The conversation ID
|
111
|
+
# @return [String] The file path
|
112
|
+
def file_path_for(id)
|
113
|
+
# Ensure the ID is safe for a filename
|
114
|
+
safe_id = id.to_s.gsub(/[^a-zA-Z0-9_-]/, "_")
|
115
|
+
File.join(@storage_dir, "#{safe_id}.json")
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
# In-memory implementation of the ConversationRepository (for testing)
|
120
|
+
class MemoryConversationRepository < ConversationRepository
|
121
|
+
# Initialize a new memory repository
|
122
|
+
def initialize
|
123
|
+
@conversations = {}
|
124
|
+
end
|
125
|
+
|
126
|
+
# Save a conversation to memory
|
127
|
+
# @param conversation [Models::Conversation] The conversation to save
|
128
|
+
# @return [Boolean] True if the save was successful
|
129
|
+
def save(conversation)
|
130
|
+
return false unless conversation
|
131
|
+
|
132
|
+
@conversations[conversation.id] = conversation
|
133
|
+
true
|
134
|
+
end
|
135
|
+
|
136
|
+
# Load a conversation from memory by ID
|
137
|
+
# @param id [String] The ID of the conversation to load
|
138
|
+
# @return [Models::Conversation, nil] The loaded conversation or nil if not found
|
139
|
+
def load(id)
|
140
|
+
@conversations[id]
|
141
|
+
end
|
142
|
+
|
143
|
+
# Delete a conversation from memory by ID
|
144
|
+
# @param id [String] The ID of the conversation to delete
|
145
|
+
# @return [Boolean] True if the deletion was successful
|
146
|
+
def delete(id)
|
147
|
+
if @conversations.key?(id)
|
148
|
+
@conversations.delete(id)
|
149
|
+
true
|
150
|
+
else
|
151
|
+
false
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
# List all available conversations
|
156
|
+
# @return [Array<Models::Conversation>] An array of conversations
|
157
|
+
def list
|
158
|
+
@conversations.values.sort_by(&:updated_at).reverse
|
159
|
+
end
|
160
|
+
end
|
161
|
+
end
|