factorix 0.5.1 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +45 -0
- data/README.md +1 -1
- data/exe/factorix +17 -0
- data/lib/factorix/api/mod_download_api.rb +11 -6
- data/lib/factorix/api/mod_info.rb +2 -2
- data/lib/factorix/api/mod_management_api.rb +1 -1
- data/lib/factorix/api/mod_portal_api.rb +6 -49
- data/lib/factorix/api_credential.rb +1 -1
- data/lib/factorix/cache/base.rb +116 -0
- data/lib/factorix/cache/entry.rb +25 -0
- data/lib/factorix/cache/file_system.rb +137 -57
- data/lib/factorix/cache/redis.rb +287 -0
- data/lib/factorix/cache/s3.rb +388 -0
- data/lib/factorix/cli/commands/backup_support.rb +1 -1
- data/lib/factorix/cli/commands/base.rb +3 -3
- data/lib/factorix/cli/commands/cache/evict.rb +19 -24
- data/lib/factorix/cli/commands/cache/stat.rb +66 -67
- data/lib/factorix/cli/commands/command_wrapper.rb +5 -5
- data/lib/factorix/cli/commands/completion.rb +1 -2
- data/lib/factorix/cli/commands/confirmable.rb +1 -1
- data/lib/factorix/cli/commands/download_support.rb +2 -7
- data/lib/factorix/cli/commands/mod/check.rb +1 -1
- data/lib/factorix/cli/commands/mod/disable.rb +1 -1
- data/lib/factorix/cli/commands/mod/download.rb +7 -7
- data/lib/factorix/cli/commands/mod/edit.rb +10 -13
- data/lib/factorix/cli/commands/mod/enable.rb +1 -1
- data/lib/factorix/cli/commands/mod/image/add.rb +3 -6
- data/lib/factorix/cli/commands/mod/image/edit.rb +2 -5
- data/lib/factorix/cli/commands/mod/image/list.rb +5 -8
- data/lib/factorix/cli/commands/mod/install.rb +7 -7
- data/lib/factorix/cli/commands/mod/list.rb +7 -7
- data/lib/factorix/cli/commands/mod/search.rb +13 -12
- data/lib/factorix/cli/commands/mod/settings/dump.rb +3 -3
- data/lib/factorix/cli/commands/mod/settings/restore.rb +2 -2
- data/lib/factorix/cli/commands/mod/show.rb +22 -23
- data/lib/factorix/cli/commands/mod/sync.rb +8 -8
- data/lib/factorix/cli/commands/mod/uninstall.rb +1 -1
- data/lib/factorix/cli/commands/mod/update.rb +11 -43
- data/lib/factorix/cli/commands/mod/upload.rb +7 -10
- data/lib/factorix/cli/commands/path.rb +2 -2
- data/lib/factorix/cli/commands/portal_support.rb +27 -0
- data/lib/factorix/cli/commands/version.rb +1 -1
- data/lib/factorix/container.rb +155 -0
- data/lib/factorix/dependency/parser.rb +1 -1
- data/lib/factorix/errors.rb +3 -0
- data/lib/factorix/http/cache_decorator.rb +5 -5
- data/lib/factorix/http/client.rb +3 -3
- data/lib/factorix/info_json.rb +7 -7
- data/lib/factorix/mod_list.rb +2 -2
- data/lib/factorix/mod_settings.rb +2 -2
- data/lib/factorix/portal.rb +3 -2
- data/lib/factorix/runtime/user_configurable.rb +9 -9
- data/lib/factorix/service_credential.rb +3 -3
- data/lib/factorix/transfer/downloader.rb +19 -11
- data/lib/factorix/version.rb +1 -1
- data/lib/factorix.rb +110 -1
- data/sig/factorix/api/mod_download_api.rbs +1 -2
- data/sig/factorix/cache/base.rbs +28 -0
- data/sig/factorix/cache/entry.rbs +14 -0
- data/sig/factorix/cache/file_system.rbs +7 -6
- data/sig/factorix/cache/redis.rbs +36 -0
- data/sig/factorix/cache/s3.rbs +38 -0
- data/sig/factorix/container.rbs +15 -0
- data/sig/factorix/errors.rbs +3 -0
- data/sig/factorix/portal.rbs +1 -1
- data/sig/factorix.rbs +99 -0
- metadata +27 -4
- data/lib/factorix/application.rb +0 -218
- data/sig/factorix/application.rbs +0 -86
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
begin
|
|
4
|
+
require "aws-sdk-s3"
|
|
5
|
+
rescue LoadError
|
|
6
|
+
raise Factorix::Error, "aws-sdk-s3 gem is required for S3 cache backend. Add it to your Gemfile."
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
require "securerandom"
|
|
10
|
+
|
|
11
|
+
module Factorix
|
|
12
|
+
module Cache
|
|
13
|
+
# S3-based cache storage implementation.
|
|
14
|
+
#
|
|
15
|
+
# Stores cache entries in AWS S3 with automatic prefix generation.
|
|
16
|
+
# TTL is managed via custom metadata on objects.
|
|
17
|
+
# Supports distributed locking using conditional PUT operations.
|
|
18
|
+
#
|
|
19
|
+
# @example Configuration
|
|
20
|
+
# Factorix.configure do |config|
|
|
21
|
+
# config.cache.download.backend = :s3
|
|
22
|
+
# config.cache.download.s3.bucket = "factorix-develop"
|
|
23
|
+
# config.cache.download.s3.region = "ap-northeast-1"
|
|
24
|
+
# config.cache.download.s3.lock_timeout = 30
|
|
25
|
+
# end
|
|
26
|
+
class S3 < Base
|
|
27
|
+
# @!parse
|
|
28
|
+
# # @return [Dry::Logger::Dispatcher]
|
|
29
|
+
# attr_reader :logger
|
|
30
|
+
include Import[:logger]
|
|
31
|
+
|
|
32
|
+
# Default timeout for distributed lock acquisition in seconds.
|
|
33
|
+
DEFAULT_LOCK_TIMEOUT = 30
|
|
34
|
+
public_constant :DEFAULT_LOCK_TIMEOUT
|
|
35
|
+
|
|
36
|
+
# TTL for distributed locks in seconds.
|
|
37
|
+
LOCK_TTL = 30
|
|
38
|
+
private_constant :LOCK_TTL
|
|
39
|
+
|
|
40
|
+
# Metadata key for storing expiration timestamp.
|
|
41
|
+
EXPIRES_AT_KEY = "expires-at"
|
|
42
|
+
private_constant :EXPIRES_AT_KEY
|
|
43
|
+
|
|
44
|
+
# Metadata key for storing logical key.
|
|
45
|
+
LOGICAL_KEY_KEY = "logical-key"
|
|
46
|
+
private_constant :LOGICAL_KEY_KEY
|
|
47
|
+
|
|
48
|
+
# Initialize a new S3 cache storage.
|
|
49
|
+
#
|
|
50
|
+
# @param bucket [String] S3 bucket name (required)
|
|
51
|
+
# @param region [String, nil] AWS region (defaults to AWS_REGION env or SDK default)
|
|
52
|
+
# @param cache_type [String, Symbol] Cache type for prefix (e.g., :api, :download)
|
|
53
|
+
# @param lock_timeout [Integer] Timeout for lock acquisition in seconds
|
|
54
|
+
# @param ttl [Integer, nil] time-to-live in seconds (nil for unlimited)
|
|
55
|
+
def initialize(bucket:, cache_type:, region: nil, lock_timeout: DEFAULT_LOCK_TIMEOUT, **)
|
|
56
|
+
super(**)
|
|
57
|
+
@client = Aws::S3::Client.new(**{region:}.compact)
|
|
58
|
+
@bucket = bucket
|
|
59
|
+
@prefix = "cache/#{cache_type}/"
|
|
60
|
+
@lock_timeout = lock_timeout
|
|
61
|
+
logger.info("Initializing S3 cache", bucket: @bucket, prefix: @prefix, ttl: @ttl, lock_timeout: @lock_timeout)
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
# Check if a cache entry exists and is not expired.
|
|
65
|
+
#
|
|
66
|
+
# @param key [String] logical cache key
|
|
67
|
+
# @return [Boolean] true if the cache entry exists and is valid
|
|
68
|
+
def exist?(key)
|
|
69
|
+
head_object(key)
|
|
70
|
+
!expired?(key)
|
|
71
|
+
rescue Aws::S3::Errors::NotFound
|
|
72
|
+
false
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# Read a cached entry.
|
|
76
|
+
#
|
|
77
|
+
# @param key [String] logical cache key
|
|
78
|
+
# @return [String, nil] cached content or nil if not found/expired
|
|
79
|
+
def read(key)
|
|
80
|
+
return nil if expired?(key)
|
|
81
|
+
|
|
82
|
+
resp = @client.get_object(bucket: @bucket, key: storage_key(key))
|
|
83
|
+
resp.body.read
|
|
84
|
+
rescue Aws::S3::Errors::NoSuchKey
|
|
85
|
+
nil
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
# Write cached content to a file.
|
|
89
|
+
#
|
|
90
|
+
# @param key [String] logical cache key
|
|
91
|
+
# @param output [Pathname] path to write the cached content
|
|
92
|
+
# @return [Boolean] true if written successfully, false if not found/expired
|
|
93
|
+
def write_to(key, output)
|
|
94
|
+
return false if expired?(key)
|
|
95
|
+
|
|
96
|
+
@client.get_object(bucket: @bucket, key: storage_key(key), response_target: output.to_s)
|
|
97
|
+
logger.debug("Cache hit", key:)
|
|
98
|
+
true
|
|
99
|
+
rescue Aws::S3::Errors::NoSuchKey
|
|
100
|
+
false
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
# Store data in the cache.
|
|
104
|
+
#
|
|
105
|
+
# @param key [String] logical cache key
|
|
106
|
+
# @param src [Pathname] path to the source file
|
|
107
|
+
# @return [Boolean] true if stored successfully
|
|
108
|
+
def store(key, src)
|
|
109
|
+
metadata = {LOGICAL_KEY_KEY => key}
|
|
110
|
+
metadata[EXPIRES_AT_KEY] = (Time.now.to_i + @ttl).to_s if @ttl
|
|
111
|
+
|
|
112
|
+
@client.put_object(
|
|
113
|
+
bucket: @bucket,
|
|
114
|
+
key: storage_key(key),
|
|
115
|
+
body: src.binread,
|
|
116
|
+
metadata:
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
logger.debug("Stored in cache", key:, size_bytes: src.size)
|
|
120
|
+
true
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
# Delete a cache entry.
|
|
124
|
+
#
|
|
125
|
+
# @param key [String] logical cache key
|
|
126
|
+
# @return [Boolean] true if deleted, false if not found
|
|
127
|
+
def delete(key)
|
|
128
|
+
return false unless exist_without_expiry_check?(key)
|
|
129
|
+
|
|
130
|
+
@client.delete_object(bucket: @bucket, key: storage_key(key))
|
|
131
|
+
logger.debug("Deleted from cache", key:)
|
|
132
|
+
true
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
# Clear all cache entries in this prefix.
|
|
136
|
+
#
|
|
137
|
+
# @return [void]
|
|
138
|
+
def clear
|
|
139
|
+
logger.info("Clearing S3 cache prefix", bucket: @bucket, prefix: @prefix)
|
|
140
|
+
count = 0
|
|
141
|
+
|
|
142
|
+
list_all_objects do |objects|
|
|
143
|
+
keys_to_delete = objects.filter_map {|obj| {key: obj.key} unless obj.key.end_with?(".lock") }
|
|
144
|
+
next if keys_to_delete.empty?
|
|
145
|
+
|
|
146
|
+
@client.delete_objects(bucket: @bucket, delete: {objects: keys_to_delete})
|
|
147
|
+
count += keys_to_delete.size
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
logger.info("Cache cleared", objects_removed: count)
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
# Get the age of a cache entry in seconds.
|
|
154
|
+
#
|
|
155
|
+
# @param key [String] logical cache key
|
|
156
|
+
# @return [Float, nil] age in seconds, or nil if entry doesn't exist
|
|
157
|
+
def age(key)
|
|
158
|
+
resp = head_object(key)
|
|
159
|
+
Time.now - resp.last_modified
|
|
160
|
+
rescue Aws::S3::Errors::NotFound
|
|
161
|
+
nil
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
# Check if a cache entry has expired based on TTL.
|
|
165
|
+
#
|
|
166
|
+
# @param key [String] logical cache key
|
|
167
|
+
# @return [Boolean] true if expired, false otherwise
|
|
168
|
+
def expired?(key)
|
|
169
|
+
return false if @ttl.nil?
|
|
170
|
+
|
|
171
|
+
resp = head_object(key)
|
|
172
|
+
value = resp.metadata[EXPIRES_AT_KEY]
|
|
173
|
+
return false if value.nil?
|
|
174
|
+
|
|
175
|
+
Time.now.to_i > Integer(value, 10)
|
|
176
|
+
rescue Aws::S3::Errors::NotFound
|
|
177
|
+
true
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
# Get the size of a cached entry in bytes.
|
|
181
|
+
#
|
|
182
|
+
# @param key [String] logical cache key
|
|
183
|
+
# @return [Integer, nil] size in bytes, or nil if entry doesn't exist/expired
|
|
184
|
+
def size(key)
|
|
185
|
+
return nil if expired?(key)
|
|
186
|
+
|
|
187
|
+
resp = head_object(key)
|
|
188
|
+
resp.content_length
|
|
189
|
+
rescue Aws::S3::Errors::NotFound
|
|
190
|
+
nil
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
# Execute a block with a distributed lock.
|
|
194
|
+
# Uses conditional PUT for lock acquisition.
|
|
195
|
+
#
|
|
196
|
+
# @param key [String] logical cache key
|
|
197
|
+
# @yield block to execute with lock held
|
|
198
|
+
# @raise [LockTimeoutError] if lock cannot be acquired within timeout
|
|
199
|
+
def with_lock(key)
|
|
200
|
+
lkey = lock_key(key)
|
|
201
|
+
lock_value = SecureRandom.uuid
|
|
202
|
+
deadline = Time.now + @lock_timeout
|
|
203
|
+
|
|
204
|
+
loop do
|
|
205
|
+
if try_acquire_lock(lkey, lock_value)
|
|
206
|
+
logger.debug("Acquired lock", key:)
|
|
207
|
+
break
|
|
208
|
+
end
|
|
209
|
+
|
|
210
|
+
cleanup_stale_lock(lkey)
|
|
211
|
+
raise LockTimeoutError, "Failed to acquire lock for key: #{key}" if Time.now > deadline
|
|
212
|
+
|
|
213
|
+
sleep 0.1
|
|
214
|
+
end
|
|
215
|
+
|
|
216
|
+
begin
|
|
217
|
+
yield
|
|
218
|
+
ensure
|
|
219
|
+
@client.delete_object(bucket: @bucket, key: lkey)
|
|
220
|
+
logger.debug("Released lock", key:)
|
|
221
|
+
end
|
|
222
|
+
end
|
|
223
|
+
|
|
224
|
+
# Enumerate cache entries.
|
|
225
|
+
#
|
|
226
|
+
# @yield [key, entry] logical key and Entry object
|
|
227
|
+
# @yieldparam key [String] logical cache key
|
|
228
|
+
# @yieldparam entry [Entry] cache entry metadata
|
|
229
|
+
# @return [Enumerator] if no block given
|
|
230
|
+
def each
|
|
231
|
+
return enum_for(__method__) unless block_given?
|
|
232
|
+
|
|
233
|
+
list_all_objects do |objects|
|
|
234
|
+
objects.each do |obj|
|
|
235
|
+
next if obj.key.end_with?(".lock")
|
|
236
|
+
|
|
237
|
+
logical_key, entry = build_entry_with_metadata(obj)
|
|
238
|
+
next if logical_key.nil? # Skip entries without logical key metadata
|
|
239
|
+
|
|
240
|
+
yield logical_key, entry
|
|
241
|
+
end
|
|
242
|
+
end
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
# Return backend-specific information.
|
|
246
|
+
#
|
|
247
|
+
# @return [Hash] backend configuration
|
|
248
|
+
def backend_info
|
|
249
|
+
{
|
|
250
|
+
type: "s3",
|
|
251
|
+
bucket: @bucket,
|
|
252
|
+
prefix: @prefix,
|
|
253
|
+
lock_timeout: @lock_timeout
|
|
254
|
+
}
|
|
255
|
+
end
|
|
256
|
+
|
|
257
|
+
# Generate a hashed internal key for the given logical key.
|
|
258
|
+
# Uses SHA1 to create a unique, deterministic key.
|
|
259
|
+
# Use Digest(:SHA1) instead of Digest::SHA1 for thread-safety (Ruby 2.2+)
|
|
260
|
+
#
|
|
261
|
+
# @param logical_key [String] logical key to hash
|
|
262
|
+
# @return [String] SHA1 hash of the logical key
|
|
263
|
+
private def storage_key_for(logical_key) = Digest(:SHA1).hexdigest(logical_key)
|
|
264
|
+
|
|
265
|
+
# Generate storage key for the given logical key.
|
|
266
|
+
#
|
|
267
|
+
# @param logical_key [String] logical key
|
|
268
|
+
# @return [String] prefixed hashed storage key
|
|
269
|
+
private def storage_key(logical_key) = "#{@prefix}#{storage_key_for(logical_key)}"
|
|
270
|
+
|
|
271
|
+
# Generate lock key for the given logical key.
|
|
272
|
+
#
|
|
273
|
+
# @param logical_key [String] logical key
|
|
274
|
+
# @return [String] lock key
|
|
275
|
+
private def lock_key(logical_key) = "#{@prefix}#{storage_key_for(logical_key)}.lock"
|
|
276
|
+
|
|
277
|
+
# Get object metadata.
|
|
278
|
+
#
|
|
279
|
+
# @param key [String] logical key
|
|
280
|
+
# @return [Aws::S3::Types::HeadObjectOutput] object metadata
|
|
281
|
+
private def head_object(key)
|
|
282
|
+
@client.head_object(bucket: @bucket, key: storage_key(key))
|
|
283
|
+
end
|
|
284
|
+
|
|
285
|
+
# Check if object exists without expiry check.
|
|
286
|
+
#
|
|
287
|
+
# @param key [String] logical key
|
|
288
|
+
# @return [Boolean] true if exists
|
|
289
|
+
private def exist_without_expiry_check?(key)
|
|
290
|
+
head_object(key)
|
|
291
|
+
true
|
|
292
|
+
rescue Aws::S3::Errors::NotFound
|
|
293
|
+
false
|
|
294
|
+
end
|
|
295
|
+
|
|
296
|
+
# Try to acquire a distributed lock.
|
|
297
|
+
#
|
|
298
|
+
# @param lkey [String] lock key
|
|
299
|
+
# @param lock_value [String] unique lock value
|
|
300
|
+
# @return [Boolean] true if lock acquired
|
|
301
|
+
private def try_acquire_lock(lkey, lock_value)
|
|
302
|
+
lock_body = "#{lock_value}:#{Time.now.to_i + LOCK_TTL}"
|
|
303
|
+
@client.put_object(
|
|
304
|
+
bucket: @bucket,
|
|
305
|
+
key: lkey,
|
|
306
|
+
body: lock_body,
|
|
307
|
+
if_none_match: "*"
|
|
308
|
+
)
|
|
309
|
+
true
|
|
310
|
+
rescue Aws::S3::Errors::PreconditionFailed
|
|
311
|
+
false
|
|
312
|
+
end
|
|
313
|
+
|
|
314
|
+
# Clean up stale lock if expired.
|
|
315
|
+
#
|
|
316
|
+
# @param lkey [String] lock key
|
|
317
|
+
private def cleanup_stale_lock(lkey)
|
|
318
|
+
resp = @client.get_object(bucket: @bucket, key: lkey)
|
|
319
|
+
lock_data = resp.body.read
|
|
320
|
+
_lock_value, expires_at = lock_data.split(":")
|
|
321
|
+
|
|
322
|
+
if expires_at && Time.now.to_i > Integer(expires_at, 10)
|
|
323
|
+
@client.delete_object(bucket: @bucket, key: lkey)
|
|
324
|
+
logger.debug("Cleaned up stale lock", key: lkey)
|
|
325
|
+
end
|
|
326
|
+
rescue Aws::S3::Errors::NoSuchKey
|
|
327
|
+
# Lock doesn't exist, nothing to clean up
|
|
328
|
+
end
|
|
329
|
+
|
|
330
|
+
# List all objects in the prefix with pagination.
|
|
331
|
+
#
|
|
332
|
+
# @yield [Array<Aws::S3::Types::Object>] batch of objects
|
|
333
|
+
private def list_all_objects
|
|
334
|
+
continuation_token = nil
|
|
335
|
+
|
|
336
|
+
loop do
|
|
337
|
+
resp = @client.list_objects_v2(
|
|
338
|
+
bucket: @bucket,
|
|
339
|
+
prefix: @prefix,
|
|
340
|
+
continuation_token:
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
yield resp.contents if resp.contents.any?
|
|
344
|
+
|
|
345
|
+
break unless resp.is_truncated
|
|
346
|
+
|
|
347
|
+
continuation_token = resp.next_continuation_token
|
|
348
|
+
end
|
|
349
|
+
end
|
|
350
|
+
|
|
351
|
+
# Build an Entry from an S3 object, fetching metadata to get logical key.
|
|
352
|
+
#
|
|
353
|
+
# @param obj [Aws::S3::Types::Object] S3 object
|
|
354
|
+
# @return [Array(String, Entry), Array(nil, nil)] logical key and entry, or nils if metadata missing
|
|
355
|
+
private def build_entry_with_metadata(obj)
|
|
356
|
+
resp = @client.head_object(bucket: @bucket, key: obj.key)
|
|
357
|
+
logical_key = resp.metadata[LOGICAL_KEY_KEY]
|
|
358
|
+
return [nil, nil] if logical_key.nil?
|
|
359
|
+
|
|
360
|
+
age = Time.now - obj.last_modified
|
|
361
|
+
expired = check_expired_from_head_response(resp)
|
|
362
|
+
|
|
363
|
+
entry = Entry.new(
|
|
364
|
+
size: obj.size,
|
|
365
|
+
age:,
|
|
366
|
+
expired:
|
|
367
|
+
)
|
|
368
|
+
|
|
369
|
+
[logical_key, entry]
|
|
370
|
+
rescue Aws::S3::Errors::NotFound
|
|
371
|
+
[nil, nil]
|
|
372
|
+
end
|
|
373
|
+
|
|
374
|
+
# Check if object is expired from head_object response.
|
|
375
|
+
#
|
|
376
|
+
# @param resp [Aws::S3::Types::HeadObjectOutput] head_object response
|
|
377
|
+
# @return [Boolean] true if expired
|
|
378
|
+
private def check_expired_from_head_response(resp)
|
|
379
|
+
return false if @ttl.nil?
|
|
380
|
+
|
|
381
|
+
value = resp.metadata[EXPIRES_AT_KEY]
|
|
382
|
+
return false if value.nil?
|
|
383
|
+
|
|
384
|
+
Time.now.to_i > Integer(value, 10)
|
|
385
|
+
end
|
|
386
|
+
end
|
|
387
|
+
end
|
|
388
|
+
end
|
|
@@ -15,7 +15,7 @@ module Factorix
|
|
|
15
15
|
# @param base [Class] the class prepending this module
|
|
16
16
|
def self.prepended(base)
|
|
17
17
|
base.class_eval do
|
|
18
|
-
option :backup_extension,
|
|
18
|
+
option :backup_extension, default: ".bak", desc: "Backup file extension"
|
|
19
19
|
end
|
|
20
20
|
end
|
|
21
21
|
|
|
@@ -68,8 +68,8 @@ module Factorix
|
|
|
68
68
|
def self.backup_support! = prepend BackupSupport
|
|
69
69
|
|
|
70
70
|
# Common options available to all commands
|
|
71
|
-
option :config_path,
|
|
72
|
-
option :log_level,
|
|
71
|
+
option :config_path, aliases: ["-c"], desc: "Path to configuration file"
|
|
72
|
+
option :log_level, values: %w[debug info warn error fatal], desc: "Set log level"
|
|
73
73
|
option :quiet, type: :flag, default: false, aliases: ["-q"], desc: "Suppress non-essential output"
|
|
74
74
|
|
|
75
75
|
private def say(message, prefix: "")
|
|
@@ -78,7 +78,7 @@ module Factorix
|
|
|
78
78
|
resolved_prefix = EMOJI_PREFIXES.fetch(prefix) { prefix.to_s }
|
|
79
79
|
output = resolved_prefix.empty? ? message : "#{resolved_prefix} #{message}"
|
|
80
80
|
style = STYLES.fetch(prefix, PLAIN)
|
|
81
|
-
puts style[output]
|
|
81
|
+
out.puts style[output]
|
|
82
82
|
end
|
|
83
83
|
|
|
84
84
|
private def quiet?
|
|
@@ -36,7 +36,7 @@ module Factorix
|
|
|
36
36
|
|
|
37
37
|
option :all, type: :flag, default: false, desc: "Remove all entries"
|
|
38
38
|
option :expired, type: :flag, default: false, desc: "Remove expired entries only"
|
|
39
|
-
option :older_than,
|
|
39
|
+
option :older_than, default: nil, desc: "Remove entries older than AGE (e.g., 30s, 5m, 2h, 7d)"
|
|
40
40
|
|
|
41
41
|
# Execute the cache evict command
|
|
42
42
|
#
|
|
@@ -48,7 +48,6 @@ module Factorix
|
|
|
48
48
|
def call(caches: nil, all: false, expired: false, older_than: nil, **)
|
|
49
49
|
validate_options!(all, expired, older_than)
|
|
50
50
|
|
|
51
|
-
@now = Time.now
|
|
52
51
|
@older_than_seconds = parse_age(older_than) if older_than
|
|
53
52
|
|
|
54
53
|
cache_names = resolve_cache_names(caches)
|
|
@@ -95,7 +94,7 @@ module Factorix
|
|
|
95
94
|
# @return [Array<Symbol>] resolved cache names
|
|
96
95
|
# @raise [InvalidArgumentError] if unknown cache name specified
|
|
97
96
|
private def resolve_cache_names(caches)
|
|
98
|
-
all_caches =
|
|
97
|
+
all_caches = Factorix.config.cache.values.keys
|
|
99
98
|
|
|
100
99
|
return all_caches if caches.nil? || caches.empty?
|
|
101
100
|
|
|
@@ -114,25 +113,26 @@ module Factorix
|
|
|
114
113
|
# @param expired [Boolean] remove expired entries only
|
|
115
114
|
# @return [Hash] eviction result with :count and :size
|
|
116
115
|
private def evict_cache(name, all:, expired:)
|
|
117
|
-
|
|
118
|
-
cache_dir = config.dir
|
|
119
|
-
ttl = config.ttl
|
|
120
|
-
|
|
121
|
-
return {count: 0, size: 0} unless cache_dir.exist?
|
|
116
|
+
cache = Container.resolve(:"#{name}_cache")
|
|
122
117
|
|
|
123
118
|
count = 0
|
|
124
119
|
size = 0
|
|
125
120
|
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
121
|
+
# Collect keys to evict (we can't modify during iteration)
|
|
122
|
+
to_evict = []
|
|
123
|
+
cache.each do |key, entry|
|
|
124
|
+
next unless should_evict?(entry, all:, expired:)
|
|
125
|
+
|
|
126
|
+
to_evict << [key, entry.size]
|
|
127
|
+
end
|
|
129
128
|
|
|
130
|
-
|
|
129
|
+
# Perform eviction
|
|
130
|
+
to_evict.each do |key, entry_size|
|
|
131
|
+
next unless cache.delete(key)
|
|
131
132
|
|
|
132
|
-
size += path.size
|
|
133
|
-
path.delete
|
|
134
133
|
count += 1
|
|
135
|
-
|
|
134
|
+
size += entry_size
|
|
135
|
+
logger.debug("Evicted cache entry", key:)
|
|
136
136
|
end
|
|
137
137
|
|
|
138
138
|
logger.info("Evicted cache entries", cache: name, count:, size:)
|
|
@@ -141,23 +141,18 @@ module Factorix
|
|
|
141
141
|
|
|
142
142
|
# Determine if a cache entry should be evicted
|
|
143
143
|
#
|
|
144
|
-
# @param
|
|
145
|
-
# @param ttl [Integer, nil] cache TTL
|
|
144
|
+
# @param entry [Cache::Entry] cache entry
|
|
146
145
|
# @param all [Boolean] remove all entries
|
|
147
146
|
# @param expired [Boolean] remove expired entries only
|
|
148
147
|
# @return [Boolean] true if entry should be evicted
|
|
149
|
-
private def should_evict?(
|
|
148
|
+
private def should_evict?(entry, all:, expired:)
|
|
150
149
|
return true if all
|
|
151
150
|
|
|
152
|
-
age_seconds = @now - path.mtime
|
|
153
|
-
|
|
154
151
|
if expired
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
age_seconds > ttl
|
|
152
|
+
entry.expired?
|
|
158
153
|
else
|
|
159
154
|
# --older-than
|
|
160
|
-
|
|
155
|
+
entry.age > @older_than_seconds
|
|
161
156
|
end
|
|
162
157
|
end
|
|
163
158
|
|