gemini_craft 0.1.3 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +292 -4
- data/README.md +652 -38
- data/lib/gemini_craft/cache.rb +102 -18
- data/lib/gemini_craft/client.rb +280 -75
- data/lib/gemini_craft/configuration.rb +24 -1
- data/lib/gemini_craft/error.rb +27 -0
- data/lib/gemini_craft/version.rb +1 -1
- data/lib/gemini_craft.rb +36 -3
- metadata +6 -3
data/lib/gemini_craft/cache.rb
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
require "digest"
|
4
4
|
|
5
5
|
module GeminiCraft
|
6
|
-
#
|
6
|
+
# Enhanced in-memory cache for API responses with automatic cleanup
|
7
7
|
class Cache
|
8
8
|
# Initialize the cache
|
9
9
|
# @param config [GeminiCraft::Configuration] Configuration object
|
@@ -11,45 +11,129 @@ module GeminiCraft
|
|
11
11
|
@config = config
|
12
12
|
@store = {}
|
13
13
|
@timestamps = {}
|
14
|
+
@access_times = {}
|
15
|
+
@mutex = Mutex.new
|
16
|
+
@cleanup_thread = nil
|
17
|
+
|
18
|
+
start_cleanup_thread if @config.cache_enabled
|
14
19
|
end
|
15
20
|
|
16
21
|
# Get a value from the cache
|
17
22
|
# @param key [String] Cache key
|
18
23
|
# @return [String, nil] Cached value or nil if not found/expired
|
19
24
|
def get(key)
|
20
|
-
|
25
|
+
@mutex.synchronize do
|
26
|
+
return nil unless @store.key?(key)
|
21
27
|
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
end
|
28
|
+
# Check if the entry has expired
|
29
|
+
if expired?(key)
|
30
|
+
remove_entry(key)
|
31
|
+
return nil
|
32
|
+
end
|
28
33
|
|
29
|
-
|
34
|
+
# Update access time for LRU
|
35
|
+
@access_times[key] = Time.now.to_i
|
36
|
+
@store[key]
|
37
|
+
end
|
30
38
|
end
|
31
39
|
|
32
40
|
# Set a value in the cache
|
33
41
|
# @param key [String] Cache key
|
34
42
|
# @param value [String] Value to cache
|
35
43
|
def set(key, value)
|
36
|
-
@
|
37
|
-
|
44
|
+
@mutex.synchronize do
|
45
|
+
@store[key] = value
|
46
|
+
current_time = Time.now.to_i
|
47
|
+
@timestamps[key] = current_time
|
48
|
+
@access_times[key] = current_time
|
49
|
+
|
50
|
+
# Perform cleanup if cache is getting large
|
51
|
+
cleanup_if_needed
|
52
|
+
end
|
38
53
|
end
|
39
54
|
|
40
55
|
# Clear the entire cache
|
41
56
|
def clear
|
42
|
-
@
|
43
|
-
|
57
|
+
@mutex.synchronize do
|
58
|
+
@store.clear
|
59
|
+
@timestamps.clear
|
60
|
+
@access_times.clear
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# Get cache statistics
|
65
|
+
# @return [Hash] Cache statistics
|
66
|
+
def stats
|
67
|
+
@mutex.synchronize do
|
68
|
+
{
|
69
|
+
size: @store.size,
|
70
|
+
oldest_entry: @timestamps.values.min,
|
71
|
+
newest_entry: @timestamps.values.max,
|
72
|
+
total_keys: @timestamps.keys
|
73
|
+
}
|
74
|
+
end
|
44
75
|
end
|
45
76
|
|
46
77
|
# Remove expired entries from the cache
|
47
78
|
def cleanup
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
79
|
+
@mutex.synchronize do
|
80
|
+
current_time = Time.now.to_i
|
81
|
+
expired_keys = []
|
82
|
+
|
83
|
+
@timestamps.each do |key, timestamp|
|
84
|
+
expired_keys << key if current_time - timestamp > @config.cache_ttl
|
85
|
+
end
|
86
|
+
|
87
|
+
expired_keys.each { |key| remove_entry(key) }
|
88
|
+
expired_keys.size
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
# Stop the cleanup thread (for testing)
|
93
|
+
def stop_cleanup_thread
|
94
|
+
return unless @cleanup_thread
|
95
|
+
|
96
|
+
@cleanup_thread.kill
|
97
|
+
@cleanup_thread = nil
|
98
|
+
end
|
99
|
+
|
100
|
+
private
|
101
|
+
|
102
|
+
def expired?(key)
|
103
|
+
return false unless @timestamps[key]
|
104
|
+
|
105
|
+
Time.now.to_i - @timestamps[key] > @config.cache_ttl
|
106
|
+
end
|
107
|
+
|
108
|
+
def remove_entry(key)
|
109
|
+
@store.delete(key)
|
110
|
+
@timestamps.delete(key)
|
111
|
+
@access_times.delete(key)
|
112
|
+
end
|
113
|
+
|
114
|
+
def cleanup_if_needed
|
115
|
+
return if @store.size < 100 # Lower threshold for testing
|
116
|
+
|
117
|
+
# Remove expired entries first
|
118
|
+
cleanup
|
119
|
+
|
120
|
+
# If still too large, remove least recently used entries
|
121
|
+
return if @store.size < 100
|
122
|
+
|
123
|
+
# Remove 50% of entries (LRU)
|
124
|
+
lru_count = @store.size / 2
|
125
|
+
lru_keys = @access_times.sort_by { |_, time| time }.first(lru_count).map(&:first)
|
126
|
+
lru_keys.each { |key| remove_entry(key) }
|
127
|
+
end
|
128
|
+
|
129
|
+
def start_cleanup_thread
|
130
|
+
@cleanup_thread = Thread.new do
|
131
|
+
loop do
|
132
|
+
sleep(@config.cache_ttl / 2) # Cleanup every half TTL period
|
133
|
+
cleanup
|
134
|
+
rescue StandardError => e
|
135
|
+
# Log error if logger is available, otherwise silently continue
|
136
|
+
Rails.logger.warn "[GeminiCraft::Cache] Cleanup error: #{e.message}" if defined?(Rails) && Rails.logger
|
53
137
|
end
|
54
138
|
end
|
55
139
|
end
|
data/lib/gemini_craft/client.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# lib/gemini_craft/client.rb - FIXED STREAMING
|
2
|
+
|
1
3
|
# frozen_string_literal: true
|
2
4
|
|
3
5
|
require "faraday"
|
@@ -5,54 +7,165 @@ require "faraday/retry"
|
|
5
7
|
require "json"
|
6
8
|
|
7
9
|
module GeminiCraft
|
8
|
-
# Client for interacting with the Gemini API
|
9
10
|
class Client
|
10
|
-
attr_reader :config, :cache
|
11
|
+
attr_reader :config, :cache, :logger
|
11
12
|
|
12
|
-
# Initialize a new client
|
13
13
|
def initialize
|
14
14
|
@config = GeminiCraft.configuration
|
15
15
|
@config.validate!
|
16
16
|
@cache = Cache.new(@config)
|
17
|
+
@logger = setup_logger
|
17
18
|
end
|
18
19
|
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
# @return [String] The generated content
|
24
|
-
def generate_content(text, system_instruction = nil, options = {})
|
25
|
-
# Create cache key from the request parameters
|
26
|
-
cache_key = generate_cache_key(text, system_instruction, options)
|
20
|
+
def generate_content(text, system_instruction = nil, options = {}, stream: false)
|
21
|
+
log(:info, "Generating content", { model: @config.model, stream: stream })
|
22
|
+
|
23
|
+
cache_key = generate_cache_key(text, system_instruction, options) unless stream
|
27
24
|
|
28
|
-
|
29
|
-
|
25
|
+
if !stream && @config.cache_enabled && (cached_response = @cache.get(cache_key))
|
26
|
+
log(:debug, "Cache hit", { cache_key: cache_key })
|
30
27
|
return cached_response
|
31
28
|
end
|
32
29
|
|
33
|
-
|
30
|
+
payload = build_payload(text, system_instruction, options, stream: stream)
|
31
|
+
|
32
|
+
if stream
|
33
|
+
generate_streaming_content(payload)
|
34
|
+
else
|
35
|
+
generate_standard_content(payload, cache_key)
|
36
|
+
end
|
37
|
+
rescue StandardError => e
|
38
|
+
log(:error, "Content generation failed", { error: e.message })
|
39
|
+
raise
|
40
|
+
end
|
41
|
+
|
42
|
+
def generate_with_functions(text, functions, system_instruction = nil, options = {})
|
43
|
+
log(:info, "Generating content with functions", { function_count: functions.size })
|
44
|
+
|
34
45
|
payload = build_payload(text, system_instruction, options)
|
46
|
+
payload[:tools] = [{ function_declarations: functions }]
|
35
47
|
|
36
|
-
# Send request to API
|
37
48
|
response = make_request("models/#{@config.model}:generateContent", payload)
|
49
|
+
process_function_response(response)
|
50
|
+
end
|
51
|
+
|
52
|
+
private
|
38
53
|
|
39
|
-
|
54
|
+
def setup_logger
|
55
|
+
return @config.logger if @config.logger
|
56
|
+
return Logger.new(IO::NULL) unless defined?(Rails)
|
57
|
+
|
58
|
+
Rails.logger
|
59
|
+
rescue StandardError
|
60
|
+
Logger.new(IO::NULL)
|
61
|
+
end
|
62
|
+
|
63
|
+
def log(level, message, metadata = {})
|
64
|
+
return unless @logger.respond_to?(level)
|
65
|
+
|
66
|
+
log_message = "[GeminiCraft] #{message}"
|
67
|
+
log_message += " #{metadata.inspect}" unless metadata.empty?
|
68
|
+
|
69
|
+
@logger.send(level, log_message)
|
70
|
+
end
|
71
|
+
|
72
|
+
def generate_standard_content(payload, cache_key)
|
73
|
+
response = make_request("models/#{@config.model}:generateContent", payload)
|
40
74
|
content = extract_content(response)
|
41
75
|
|
42
|
-
|
43
|
-
|
76
|
+
if @config.cache_enabled && cache_key
|
77
|
+
@cache.set(cache_key, content)
|
78
|
+
log(:debug, "Response cached", { cache_key: cache_key })
|
79
|
+
end
|
44
80
|
|
45
81
|
content
|
46
82
|
end
|
47
83
|
|
48
|
-
|
84
|
+
def generate_streaming_content(payload)
|
85
|
+
Enumerator.new do |yielder|
|
86
|
+
# Remove stream flag from payload
|
87
|
+
streaming_payload = payload.dup
|
88
|
+
streaming_payload.delete(:stream)
|
89
|
+
|
90
|
+
# Use streamGenerateContent endpoint with alt=sse
|
91
|
+
streaming_connection.post("models/#{@config.model}:streamGenerateContent") do |req|
|
92
|
+
req.params["key"] = @config.api_key
|
93
|
+
req.params["alt"] = "sse"
|
94
|
+
req.headers["Content-Type"] = "application/json"
|
95
|
+
req.headers["Accept"] = "text/event-stream"
|
96
|
+
req.body = JSON.generate(streaming_payload)
|
97
|
+
|
98
|
+
# Process each chunk as it arrives
|
99
|
+
req.options.on_data = proc do |chunk, _overall_received_bytes, _env|
|
100
|
+
process_streaming_chunk(chunk, yielder)
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
def process_streaming_chunk(chunk, yielder)
|
107
|
+
StreamingProcessor.new(self).process_chunk(chunk) { |content| yielder << content }
|
108
|
+
rescue StandardError => e
|
109
|
+
log(:error, "Streaming error", { error: e.message })
|
110
|
+
raise StreamingError, "Streaming failed: #{e.message}"
|
111
|
+
end
|
112
|
+
|
113
|
+
def handle_streaming_response(response)
|
114
|
+
# Handle final streaming response if there are any errors
|
115
|
+
return if [200, 204].include?(response.status)
|
116
|
+
|
117
|
+
error_body = response.body.empty? ? "Unknown streaming error" : response.body
|
118
|
+
raise APIError, "Streaming request failed (#{response.status}): #{error_body}"
|
119
|
+
end
|
120
|
+
|
121
|
+
def process_function_response(response)
|
122
|
+
FunctionResponseProcessor.new.process(response)
|
123
|
+
end
|
49
124
|
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
125
|
+
def build_payload(text, system_instruction, options, stream: false)
|
126
|
+
PayloadBuilder.new.build(text, system_instruction, options, stream: stream)
|
127
|
+
end
|
128
|
+
|
129
|
+
def make_request(endpoint, payload)
|
130
|
+
log(:debug, "Making API request", { endpoint: endpoint })
|
131
|
+
|
132
|
+
response = connection.post(endpoint) do |req|
|
133
|
+
req.params["key"] = @config.api_key
|
134
|
+
req.headers["Content-Type"] = "application/json"
|
135
|
+
req.body = JSON.generate(payload)
|
136
|
+
end
|
137
|
+
|
138
|
+
ResponseHandler.new(self).handle_response(response)
|
139
|
+
rescue Faraday::TimeoutError => e
|
140
|
+
raise TimeoutError, "Request timed out: #{e.message}"
|
141
|
+
rescue Faraday::ConnectionFailed => e
|
142
|
+
raise ConnectionError, "Connection failed: #{e.message}"
|
143
|
+
rescue Faraday::Error => e
|
144
|
+
raise APIError, "API request failed: #{e.message}"
|
145
|
+
end
|
146
|
+
|
147
|
+
def connection
|
148
|
+
@connection ||= ConnectionBuilder.new(@config).build_connection
|
149
|
+
end
|
150
|
+
|
151
|
+
# FIXED: Separate connection for streaming to handle SSE properly
|
152
|
+
def streaming_connection
|
153
|
+
@streaming_connection ||= StreamingConnectionBuilder.new(@config).build_connection
|
154
|
+
end
|
155
|
+
|
156
|
+
def extract_content(response)
|
157
|
+
ContentExtractor.new.extract(response)
|
158
|
+
rescue StandardError => e
|
159
|
+
raise ResponseError, "Failed to extract content from response: #{e.message}"
|
160
|
+
end
|
161
|
+
|
162
|
+
def generate_cache_key(text, system_instruction, options)
|
163
|
+
CacheKeyGenerator.new(@config.model).generate(text, system_instruction, options)
|
164
|
+
end
|
165
|
+
end
|
166
|
+
|
167
|
+
class PayloadBuilder
|
168
|
+
def build(text, system_instruction, options, stream: false)
|
56
169
|
payload = {
|
57
170
|
contents: [
|
58
171
|
{
|
@@ -65,7 +178,6 @@ module GeminiCraft
|
|
65
178
|
]
|
66
179
|
}
|
67
180
|
|
68
|
-
# Add system instruction if provided
|
69
181
|
if system_instruction
|
70
182
|
payload[:system_instruction] = {
|
71
183
|
parts: [
|
@@ -76,68 +188,161 @@ module GeminiCraft
|
|
76
188
|
}
|
77
189
|
end
|
78
190
|
|
79
|
-
#
|
80
|
-
payload
|
191
|
+
# Don't include stream flag in payload for SSE streaming
|
192
|
+
# payload[:stream] = true if stream
|
81
193
|
|
194
|
+
payload.merge!(options) if options && !options.empty?
|
82
195
|
payload
|
83
196
|
end
|
197
|
+
end
|
84
198
|
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
def
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
199
|
+
class StreamingProcessor
|
200
|
+
def initialize(client)
|
201
|
+
@client = client
|
202
|
+
end
|
203
|
+
|
204
|
+
def process_chunk(chunk)
|
205
|
+
lines = chunk.split(/\r?\n/)
|
206
|
+
|
207
|
+
lines.each do |line|
|
208
|
+
next unless line.start_with?("data: ")
|
209
|
+
|
210
|
+
json_data = line[6..].strip
|
211
|
+
next if json_data.empty? || json_data == "[DONE]"
|
212
|
+
|
213
|
+
begin
|
214
|
+
data = JSON.parse(json_data)
|
215
|
+
content = extract_streaming_content(data)
|
216
|
+
yield(content) unless content.empty?
|
217
|
+
rescue JSON::ParserError
|
218
|
+
@client.send(:log, :debug, "Skipping invalid JSON chunk", { chunk: json_data[0..50] })
|
219
|
+
end
|
95
220
|
end
|
221
|
+
end
|
96
222
|
|
97
|
-
|
98
|
-
|
99
|
-
|
223
|
+
private
|
224
|
+
|
225
|
+
def extract_streaming_content(data)
|
226
|
+
candidates = data["candidates"]
|
227
|
+
return "" if candidates.nil? || candidates.empty?
|
228
|
+
|
229
|
+
candidate = candidates.first
|
230
|
+
content = candidate["content"]
|
231
|
+
return "" if content.nil?
|
232
|
+
|
233
|
+
parts = content["parts"]
|
234
|
+
return "" if parts.nil? || parts.empty?
|
235
|
+
|
236
|
+
parts.first["text"] || ""
|
237
|
+
rescue StandardError
|
238
|
+
""
|
100
239
|
end
|
240
|
+
end
|
101
241
|
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
242
|
+
class FunctionResponseProcessor
|
243
|
+
def process(response)
|
244
|
+
candidates = response["candidates"]
|
245
|
+
return { content: "", function_calls: [] } if candidates.nil? || candidates.empty?
|
246
|
+
|
247
|
+
candidate = candidates.first
|
248
|
+
content_parts = candidate.dig("content", "parts") || []
|
249
|
+
|
250
|
+
text_parts = []
|
251
|
+
function_calls = []
|
252
|
+
|
253
|
+
content_parts.each do |part|
|
254
|
+
if part["text"]
|
255
|
+
text_parts << part["text"]
|
256
|
+
elsif part["functionCall"]
|
257
|
+
function_calls << {
|
258
|
+
name: part["functionCall"]["name"],
|
259
|
+
args: part["functionCall"]["args"] || {}
|
260
|
+
}
|
261
|
+
end
|
111
262
|
end
|
263
|
+
|
264
|
+
{
|
265
|
+
content: text_parts.join(" "),
|
266
|
+
function_calls: function_calls
|
267
|
+
}
|
268
|
+
end
|
269
|
+
end
|
270
|
+
|
271
|
+
class ResponseHandler
|
272
|
+
def initialize(client)
|
273
|
+
@client = client
|
112
274
|
end
|
113
275
|
|
114
|
-
# Handle and parse the API response
|
115
|
-
# @param response [Faraday::Response] The API response
|
116
|
-
# @return [Hash] Parsed response body
|
117
|
-
# @raise [GeminiCraft::APIError] If the API returns an error
|
118
276
|
def handle_response(response)
|
119
277
|
case response.status
|
120
278
|
when 200
|
121
279
|
JSON.parse(response.body)
|
122
|
-
when 400
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
280
|
+
when 400
|
281
|
+
handle_client_error(response, "Bad Request")
|
282
|
+
when 401
|
283
|
+
raise AuthenticationError, "Invalid API key or authentication failed"
|
284
|
+
when 403
|
285
|
+
raise AuthorizationError, "Access forbidden - check your API permissions"
|
286
|
+
when 404
|
287
|
+
raise NotFoundError, "Model or endpoint not found"
|
288
|
+
when 429
|
289
|
+
raise RateLimitError, "Rate limit exceeded - please slow down your requests"
|
129
290
|
when 500..599
|
130
|
-
raise
|
291
|
+
raise ServerError, "API server error (#{response.status}): The server encountered an error"
|
131
292
|
else
|
132
293
|
raise APIError, "Unknown API error (#{response.status})"
|
133
294
|
end
|
134
295
|
end
|
135
296
|
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
297
|
+
private
|
298
|
+
|
299
|
+
def handle_client_error(response, error_type)
|
300
|
+
error_body = begin
|
301
|
+
JSON.parse(response.body)
|
302
|
+
rescue StandardError
|
303
|
+
{ "error" => { "message" => response.body } }
|
304
|
+
end
|
305
|
+
|
306
|
+
message = error_body.dig("error", "message") || "Unknown error"
|
307
|
+
raise ClientError, "#{error_type} (#{response.status}): #{message}"
|
308
|
+
end
|
309
|
+
end
|
310
|
+
|
311
|
+
class ConnectionBuilder
|
312
|
+
def initialize(config)
|
313
|
+
@config = config
|
314
|
+
end
|
315
|
+
|
316
|
+
def build_connection
|
317
|
+
Faraday.new(url: @config.api_base_url) do |faraday|
|
318
|
+
faraday.options.timeout = @config.timeout
|
319
|
+
faraday.options.open_timeout = 10
|
320
|
+
faraday.adapter Faraday.default_adapter
|
321
|
+
faraday.request :retry, max: @config.max_retries, interval: 0.5
|
322
|
+
end
|
323
|
+
end
|
324
|
+
end
|
325
|
+
|
326
|
+
# FIXED: Separate connection builder for streaming
|
327
|
+
class StreamingConnectionBuilder
|
328
|
+
def initialize(config)
|
329
|
+
@config = config
|
330
|
+
end
|
331
|
+
|
332
|
+
def build_connection
|
333
|
+
Faraday.new(url: @config.api_base_url) do |faraday|
|
334
|
+
faraday.options.timeout = @config.timeout * 3 # Longer timeout for streaming
|
335
|
+
faraday.options.open_timeout = 15
|
336
|
+
faraday.adapter Faraday.default_adapter
|
337
|
+
|
338
|
+
# No retry for streaming connections
|
339
|
+
# Streaming should handle failures gracefully
|
340
|
+
end
|
341
|
+
end
|
342
|
+
end
|
343
|
+
|
344
|
+
class ContentExtractor
|
345
|
+
def extract(response)
|
141
346
|
candidates = response["candidates"]
|
142
347
|
return "" if candidates.nil? || candidates.empty?
|
143
348
|
|
@@ -152,21 +357,21 @@ module GeminiCraft
|
|
152
357
|
rescue StandardError => e
|
153
358
|
raise ResponseError, "Failed to extract content from response: #{e.message}"
|
154
359
|
end
|
360
|
+
end
|
155
361
|
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
def
|
362
|
+
class CacheKeyGenerator
|
363
|
+
def initialize(model)
|
364
|
+
@model = model
|
365
|
+
end
|
366
|
+
|
367
|
+
def generate(text, system_instruction, options)
|
162
368
|
key_parts = [
|
163
|
-
@
|
369
|
+
@model,
|
164
370
|
text,
|
165
371
|
system_instruction,
|
166
372
|
options.to_s
|
167
373
|
]
|
168
374
|
|
169
|
-
# Create a deterministic string from the key parts
|
170
375
|
Digest::SHA256.hexdigest(key_parts.join("--"))
|
171
376
|
end
|
172
377
|
end
|
@@ -3,7 +3,8 @@
|
|
3
3
|
module GeminiCraft
|
4
4
|
# Configuration for the GeminiCraft gem
|
5
5
|
class Configuration
|
6
|
-
attr_accessor :api_key, :api_base_url, :model, :timeout, :cache_enabled, :cache_ttl, :max_retries
|
6
|
+
attr_accessor :api_key, :api_base_url, :model, :timeout, :cache_enabled, :cache_ttl, :max_retries,
|
7
|
+
:logger, :log_level, :streaming_enabled, :connection_pool_size, :keep_alive_timeout
|
7
8
|
|
8
9
|
# Initialize a new configuration with default values
|
9
10
|
def initialize
|
@@ -14,6 +15,11 @@ module GeminiCraft
|
|
14
15
|
@cache_enabled = false
|
15
16
|
@cache_ttl = 3600 # 1 hour in seconds
|
16
17
|
@max_retries = 3
|
18
|
+
@logger = nil
|
19
|
+
@log_level = :info
|
20
|
+
@streaming_enabled = false
|
21
|
+
@connection_pool_size = 5
|
22
|
+
@keep_alive_timeout = 30
|
17
23
|
end
|
18
24
|
|
19
25
|
# Validate that the configuration has required parameters
|
@@ -21,6 +27,23 @@ module GeminiCraft
|
|
21
27
|
def validate!
|
22
28
|
raise ConfigurationError, "API key must be configured" unless api_key
|
23
29
|
raise ConfigurationError, "Model must be configured" unless model
|
30
|
+
|
31
|
+
validate_log_level!
|
32
|
+
validate_timeouts!
|
33
|
+
end
|
34
|
+
|
35
|
+
private
|
36
|
+
|
37
|
+
def validate_log_level!
|
38
|
+
valid_levels = %i[debug info warn error fatal]
|
39
|
+
return if valid_levels.include?(log_level)
|
40
|
+
|
41
|
+
raise ConfigurationError, "Invalid log level: #{log_level}. Must be one of: #{valid_levels.join(", ")}"
|
42
|
+
end
|
43
|
+
|
44
|
+
def validate_timeouts!
|
45
|
+
raise ConfigurationError, "Timeout must be positive" if timeout <= 0
|
46
|
+
raise ConfigurationError, "Cache TTL must be positive" if cache_ttl <= 0
|
24
47
|
end
|
25
48
|
end
|
26
49
|
|
data/lib/gemini_craft/error.rb
CHANGED
@@ -9,4 +9,31 @@ module GeminiCraft
|
|
9
9
|
|
10
10
|
# Error raised when the response cannot be processed
|
11
11
|
class ResponseError < Error; end
|
12
|
+
|
13
|
+
# Error raised when authentication fails
|
14
|
+
class AuthenticationError < APIError; end
|
15
|
+
|
16
|
+
# Error raised when authorization fails
|
17
|
+
class AuthorizationError < APIError; end
|
18
|
+
|
19
|
+
# Error raised when the requested resource is not found
|
20
|
+
class NotFoundError < APIError; end
|
21
|
+
|
22
|
+
# Error raised when rate limits are exceeded
|
23
|
+
class RateLimitError < APIError; end
|
24
|
+
|
25
|
+
# Error raised for client-side errors (4xx)
|
26
|
+
class ClientError < APIError; end
|
27
|
+
|
28
|
+
# Error raised for server-side errors (5xx)
|
29
|
+
class ServerError < APIError; end
|
30
|
+
|
31
|
+
# Error raised when requests timeout
|
32
|
+
class TimeoutError < APIError; end
|
33
|
+
|
34
|
+
# Error raised when connection fails
|
35
|
+
class ConnectionError < APIError; end
|
36
|
+
|
37
|
+
# Error raised when streaming fails
|
38
|
+
class StreamingError < APIError; end
|
12
39
|
end
|
data/lib/gemini_craft/version.rb
CHANGED