factorix 0.6.0 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +36 -0
  3. data/README.md +3 -0
  4. data/completion/_factorix.bash +15 -1
  5. data/completion/_factorix.fish +15 -7
  6. data/completion/_factorix.zsh +22 -0
  7. data/exe/factorix +17 -0
  8. data/lib/factorix/api/game_download_api.rb +154 -0
  9. data/lib/factorix/api/mod_download_api.rb +10 -5
  10. data/lib/factorix/api/mod_info.rb +1 -1
  11. data/lib/factorix/api/mod_portal_api.rb +6 -49
  12. data/lib/factorix/cache/base.rb +116 -0
  13. data/lib/factorix/cache/entry.rb +25 -0
  14. data/lib/factorix/cache/file_system.rb +137 -57
  15. data/lib/factorix/cache/redis.rb +287 -0
  16. data/lib/factorix/cache/s3.rb +388 -0
  17. data/lib/factorix/cli/commands/cache/evict.rb +17 -22
  18. data/lib/factorix/cli/commands/cache/stat.rb +57 -58
  19. data/lib/factorix/cli/commands/download.rb +150 -0
  20. data/lib/factorix/cli/commands/download_support.rb +1 -6
  21. data/lib/factorix/cli/commands/mod/download.rb +2 -3
  22. data/lib/factorix/cli/commands/mod/edit.rb +1 -4
  23. data/lib/factorix/cli/commands/mod/image/add.rb +1 -4
  24. data/lib/factorix/cli/commands/mod/image/edit.rb +1 -4
  25. data/lib/factorix/cli/commands/mod/image/list.rb +1 -4
  26. data/lib/factorix/cli/commands/mod/install.rb +2 -3
  27. data/lib/factorix/cli/commands/mod/list.rb +3 -3
  28. data/lib/factorix/cli/commands/mod/search.rb +2 -3
  29. data/lib/factorix/cli/commands/mod/show.rb +2 -3
  30. data/lib/factorix/cli/commands/mod/sync.rb +2 -3
  31. data/lib/factorix/cli/commands/mod/update.rb +6 -39
  32. data/lib/factorix/cli/commands/mod/upload.rb +1 -4
  33. data/lib/factorix/cli/commands/portal_support.rb +27 -0
  34. data/lib/factorix/cli.rb +1 -0
  35. data/lib/factorix/container.rb +32 -13
  36. data/lib/factorix/dependency/graph/builder.rb +2 -2
  37. data/lib/factorix/dependency/graph.rb +2 -2
  38. data/lib/factorix/dependency/validation_result.rb +3 -3
  39. data/lib/factorix/errors.rb +3 -0
  40. data/lib/factorix/http/cache_decorator.rb +14 -7
  41. data/lib/factorix/http/cached_response.rb +4 -1
  42. data/lib/factorix/http/client.rb +13 -3
  43. data/lib/factorix/http/response.rb +4 -1
  44. data/lib/factorix/http/retry_decorator.rb +11 -0
  45. data/lib/factorix/info_json.rb +5 -5
  46. data/lib/factorix/portal.rb +3 -2
  47. data/lib/factorix/save_file.rb +2 -2
  48. data/lib/factorix/transfer/downloader.rb +19 -11
  49. data/lib/factorix/version.rb +1 -1
  50. data/lib/factorix.rb +46 -53
  51. data/sig/factorix/api/mod_download_api.rbs +1 -2
  52. data/sig/factorix/cache/base.rbs +28 -0
  53. data/sig/factorix/cache/entry.rbs +14 -0
  54. data/sig/factorix/cache/file_system.rbs +7 -6
  55. data/sig/factorix/cache/redis.rbs +36 -0
  56. data/sig/factorix/cache/s3.rbs +38 -0
  57. data/sig/factorix/errors.rbs +3 -0
  58. data/sig/factorix/portal.rbs +1 -1
  59. metadata +27 -2
@@ -0,0 +1,388 @@
1
+ # frozen_string_literal: true
2
+
3
+ begin
4
+ require "aws-sdk-s3"
5
+ rescue LoadError
6
+ raise Factorix::Error, "aws-sdk-s3 gem is required for S3 cache backend. Add it to your Gemfile."
7
+ end
8
+
9
+ require "securerandom"
10
+
11
+ module Factorix
12
+ module Cache
13
+ # S3-based cache storage implementation.
14
+ #
15
+ # Stores cache entries in AWS S3 with automatic prefix generation.
16
+ # TTL is managed via custom metadata on objects.
17
+ # Supports distributed locking using conditional PUT operations.
18
+ #
19
+ # @example Configuration
20
+ # Factorix.configure do |config|
21
+ # config.cache.download.backend = :s3
22
+ # config.cache.download.s3.bucket = "factorix-develop"
23
+ # config.cache.download.s3.region = "ap-northeast-1"
24
+ # config.cache.download.s3.lock_timeout = 30
25
+ # end
26
+ class S3 < Base
27
+ # @!parse
28
+ # # @return [Dry::Logger::Dispatcher]
29
+ # attr_reader :logger
30
+ include Import[:logger]
31
+
32
+ # Default timeout for distributed lock acquisition in seconds.
33
+ DEFAULT_LOCK_TIMEOUT = 30
34
+ public_constant :DEFAULT_LOCK_TIMEOUT
35
+
36
+ # TTL for distributed locks in seconds.
37
+ LOCK_TTL = 30
38
+ private_constant :LOCK_TTL
39
+
40
+ # Metadata key for storing expiration timestamp.
41
+ EXPIRES_AT_KEY = "expires-at"
42
+ private_constant :EXPIRES_AT_KEY
43
+
44
+ # Metadata key for storing logical key.
45
+ LOGICAL_KEY_KEY = "logical-key"
46
+ private_constant :LOGICAL_KEY_KEY
47
+
48
+ # Initialize a new S3 cache storage.
49
+ #
50
+ # @param bucket [String] S3 bucket name (required)
51
+ # @param region [String, nil] AWS region (defaults to AWS_REGION env or SDK default)
52
+ # @param cache_type [String, Symbol] Cache type for prefix (e.g., :api, :download)
53
+ # @param lock_timeout [Integer] Timeout for lock acquisition in seconds
54
+ # @param ttl [Integer, nil] time-to-live in seconds (nil for unlimited)
55
+ def initialize(bucket:, cache_type:, region: nil, lock_timeout: DEFAULT_LOCK_TIMEOUT, **)
56
+ super(**)
57
+ @client = Aws::S3::Client.new(**{region:}.compact)
58
+ @bucket = bucket
59
+ @prefix = "cache/#{cache_type}/"
60
+ @lock_timeout = lock_timeout
61
+ logger.info("Initializing S3 cache", bucket: @bucket, prefix: @prefix, ttl: @ttl, lock_timeout: @lock_timeout)
62
+ end
63
+
64
+ # Check if a cache entry exists and is not expired.
65
+ #
66
+ # @param key [String] logical cache key
67
+ # @return [Boolean] true if the cache entry exists and is valid
68
+ def exist?(key)
69
+ head_object(key)
70
+ !expired?(key)
71
+ rescue Aws::S3::Errors::NotFound
72
+ false
73
+ end
74
+
75
+ # Read a cached entry.
76
+ #
77
+ # @param key [String] logical cache key
78
+ # @return [String, nil] cached content or nil if not found/expired
79
+ def read(key)
80
+ return nil if expired?(key)
81
+
82
+ resp = @client.get_object(bucket: @bucket, key: storage_key(key))
83
+ resp.body.read
84
+ rescue Aws::S3::Errors::NoSuchKey
85
+ nil
86
+ end
87
+
88
+ # Write cached content to a file.
89
+ #
90
+ # @param key [String] logical cache key
91
+ # @param output [Pathname] path to write the cached content
92
+ # @return [Boolean] true if written successfully, false if not found/expired
93
+ def write_to(key, output)
94
+ return false if expired?(key)
95
+
96
+ @client.get_object(bucket: @bucket, key: storage_key(key), response_target: output.to_s)
97
+ logger.debug("Cache hit", key:)
98
+ true
99
+ rescue Aws::S3::Errors::NoSuchKey
100
+ false
101
+ end
102
+
103
+ # Store data in the cache.
104
+ #
105
+ # @param key [String] logical cache key
106
+ # @param src [Pathname] path to the source file
107
+ # @return [Boolean] true if stored successfully
108
+ def store(key, src)
109
+ metadata = {LOGICAL_KEY_KEY => key}
110
+ metadata[EXPIRES_AT_KEY] = (Time.now.to_i + @ttl).to_s if @ttl
111
+
112
+ @client.put_object(
113
+ bucket: @bucket,
114
+ key: storage_key(key),
115
+ body: src.binread,
116
+ metadata:
117
+ )
118
+
119
+ logger.debug("Stored in cache", key:, size_bytes: src.size)
120
+ true
121
+ end
122
+
123
+ # Delete a cache entry.
124
+ #
125
+ # @param key [String] logical cache key
126
+ # @return [Boolean] true if deleted, false if not found
127
+ def delete(key)
128
+ return false unless exist_without_expiry_check?(key)
129
+
130
+ @client.delete_object(bucket: @bucket, key: storage_key(key))
131
+ logger.debug("Deleted from cache", key:)
132
+ true
133
+ end
134
+
135
+ # Clear all cache entries in this prefix.
136
+ #
137
+ # @return [void]
138
+ def clear
139
+ logger.info("Clearing S3 cache prefix", bucket: @bucket, prefix: @prefix)
140
+ count = 0
141
+
142
+ list_all_objects do |objects|
143
+ keys_to_delete = objects.filter_map {|obj| {key: obj.key} unless obj.key.end_with?(".lock") }
144
+ next if keys_to_delete.empty?
145
+
146
+ @client.delete_objects(bucket: @bucket, delete: {objects: keys_to_delete})
147
+ count += keys_to_delete.size
148
+ end
149
+
150
+ logger.info("Cache cleared", objects_removed: count)
151
+ end
152
+
153
+ # Get the age of a cache entry in seconds.
154
+ #
155
+ # @param key [String] logical cache key
156
+ # @return [Float, nil] age in seconds, or nil if entry doesn't exist
157
+ def age(key)
158
+ resp = head_object(key)
159
+ Time.now - resp.last_modified
160
+ rescue Aws::S3::Errors::NotFound
161
+ nil
162
+ end
163
+
164
+ # Check if a cache entry has expired based on TTL.
165
+ #
166
+ # @param key [String] logical cache key
167
+ # @return [Boolean] true if expired, false otherwise
168
+ def expired?(key)
169
+ return false if @ttl.nil?
170
+
171
+ resp = head_object(key)
172
+ value = resp.metadata[EXPIRES_AT_KEY]
173
+ return false if value.nil?
174
+
175
+ Time.now.to_i > Integer(value, 10)
176
+ rescue Aws::S3::Errors::NotFound
177
+ true
178
+ end
179
+
180
+ # Get the size of a cached entry in bytes.
181
+ #
182
+ # @param key [String] logical cache key
183
+ # @return [Integer, nil] size in bytes, or nil if entry doesn't exist/expired
184
+ def size(key)
185
+ return nil if expired?(key)
186
+
187
+ resp = head_object(key)
188
+ resp.content_length
189
+ rescue Aws::S3::Errors::NotFound
190
+ nil
191
+ end
192
+
193
+ # Execute a block with a distributed lock.
194
+ # Uses conditional PUT for lock acquisition.
195
+ #
196
+ # @param key [String] logical cache key
197
+ # @yield block to execute with lock held
198
+ # @raise [LockTimeoutError] if lock cannot be acquired within timeout
199
+ def with_lock(key)
200
+ lkey = lock_key(key)
201
+ lock_value = SecureRandom.uuid
202
+ deadline = Time.now + @lock_timeout
203
+
204
+ loop do
205
+ if try_acquire_lock(lkey, lock_value)
206
+ logger.debug("Acquired lock", key:)
207
+ break
208
+ end
209
+
210
+ cleanup_stale_lock(lkey)
211
+ raise LockTimeoutError, "Failed to acquire lock for key: #{key}" if Time.now > deadline
212
+
213
+ sleep 0.1
214
+ end
215
+
216
+ begin
217
+ yield
218
+ ensure
219
+ @client.delete_object(bucket: @bucket, key: lkey)
220
+ logger.debug("Released lock", key:)
221
+ end
222
+ end
223
+
224
+ # Enumerate cache entries.
225
+ #
226
+ # @yield [key, entry] logical key and Entry object
227
+ # @yieldparam key [String] logical cache key
228
+ # @yieldparam entry [Entry] cache entry metadata
229
+ # @return [Enumerator] if no block given
230
+ def each
231
+ return enum_for(__method__) unless block_given?
232
+
233
+ list_all_objects do |objects|
234
+ objects.each do |obj|
235
+ next if obj.key.end_with?(".lock")
236
+
237
+ logical_key, entry = build_entry_with_metadata(obj)
238
+ next if logical_key.nil? # Skip entries without logical key metadata
239
+
240
+ yield logical_key, entry
241
+ end
242
+ end
243
+ end
244
+
245
+ # Return backend-specific information.
246
+ #
247
+ # @return [Hash] backend configuration
248
+ def backend_info
249
+ {
250
+ type: "s3",
251
+ bucket: @bucket,
252
+ prefix: @prefix,
253
+ lock_timeout: @lock_timeout
254
+ }
255
+ end
256
+
257
+ # Generate a hashed internal key for the given logical key.
258
+ # Uses SHA1 to create a unique, deterministic key.
259
+ # Use Digest(:SHA1) instead of Digest::SHA1 for thread-safety (Ruby 2.2+)
260
+ #
261
+ # @param logical_key [String] logical key to hash
262
+ # @return [String] SHA1 hash of the logical key
263
+ private def storage_key_for(logical_key) = Digest(:SHA1).hexdigest(logical_key)
264
+
265
+ # Generate storage key for the given logical key.
266
+ #
267
+ # @param logical_key [String] logical key
268
+ # @return [String] prefixed hashed storage key
269
+ private def storage_key(logical_key) = "#{@prefix}#{storage_key_for(logical_key)}"
270
+
271
+ # Generate lock key for the given logical key.
272
+ #
273
+ # @param logical_key [String] logical key
274
+ # @return [String] lock key
275
+ private def lock_key(logical_key) = "#{@prefix}#{storage_key_for(logical_key)}.lock"
276
+
277
+ # Get object metadata.
278
+ #
279
+ # @param key [String] logical key
280
+ # @return [Aws::S3::Types::HeadObjectOutput] object metadata
281
+ private def head_object(key)
282
+ @client.head_object(bucket: @bucket, key: storage_key(key))
283
+ end
284
+
285
+ # Check if object exists without expiry check.
286
+ #
287
+ # @param key [String] logical key
288
+ # @return [Boolean] true if exists
289
+ private def exist_without_expiry_check?(key)
290
+ head_object(key)
291
+ true
292
+ rescue Aws::S3::Errors::NotFound
293
+ false
294
+ end
295
+
296
+ # Try to acquire a distributed lock.
297
+ #
298
+ # @param lkey [String] lock key
299
+ # @param lock_value [String] unique lock value
300
+ # @return [Boolean] true if lock acquired
301
+ private def try_acquire_lock(lkey, lock_value)
302
+ lock_body = "#{lock_value}:#{Time.now.to_i + LOCK_TTL}"
303
+ @client.put_object(
304
+ bucket: @bucket,
305
+ key: lkey,
306
+ body: lock_body,
307
+ if_none_match: "*"
308
+ )
309
+ true
310
+ rescue Aws::S3::Errors::PreconditionFailed
311
+ false
312
+ end
313
+
314
+ # Clean up stale lock if expired.
315
+ #
316
+ # @param lkey [String] lock key
317
+ private def cleanup_stale_lock(lkey)
318
+ resp = @client.get_object(bucket: @bucket, key: lkey)
319
+ lock_data = resp.body.read
320
+ _lock_value, expires_at = lock_data.split(":")
321
+
322
+ if expires_at && Time.now.to_i > Integer(expires_at, 10)
323
+ @client.delete_object(bucket: @bucket, key: lkey)
324
+ logger.debug("Cleaned up stale lock", key: lkey)
325
+ end
326
+ rescue Aws::S3::Errors::NoSuchKey
327
+ # Lock doesn't exist, nothing to clean up
328
+ end
329
+
330
+ # List all objects in the prefix with pagination.
331
+ #
332
+ # @yield [Array<Aws::S3::Types::Object>] batch of objects
333
+ private def list_all_objects
334
+ continuation_token = nil
335
+
336
+ loop do
337
+ resp = @client.list_objects_v2(
338
+ bucket: @bucket,
339
+ prefix: @prefix,
340
+ continuation_token:
341
+ )
342
+
343
+ yield resp.contents if resp.contents.any?
344
+
345
+ break unless resp.is_truncated
346
+
347
+ continuation_token = resp.next_continuation_token
348
+ end
349
+ end
350
+
351
+ # Build an Entry from an S3 object, fetching metadata to get logical key.
352
+ #
353
+ # @param obj [Aws::S3::Types::Object] S3 object
354
+ # @return [Array(String, Entry), Array(nil, nil)] logical key and entry, or nils if metadata missing
355
+ private def build_entry_with_metadata(obj)
356
+ resp = @client.head_object(bucket: @bucket, key: obj.key)
357
+ logical_key = resp.metadata[LOGICAL_KEY_KEY]
358
+ return [nil, nil] if logical_key.nil?
359
+
360
+ age = Time.now - obj.last_modified
361
+ expired = check_expired_from_head_response(resp)
362
+
363
+ entry = Entry[
364
+ size: obj.size,
365
+ age:,
366
+ expired:
367
+ ]
368
+
369
+ [logical_key, entry]
370
+ rescue Aws::S3::Errors::NotFound
371
+ [nil, nil]
372
+ end
373
+
374
+ # Check if object is expired from head_object response.
375
+ #
376
+ # @param resp [Aws::S3::Types::HeadObjectOutput] head_object response
377
+ # @return [Boolean] true if expired
378
+ private def check_expired_from_head_response(resp)
379
+ return false if @ttl.nil?
380
+
381
+ value = resp.metadata[EXPIRES_AT_KEY]
382
+ return false if value.nil?
383
+
384
+ Time.now.to_i > Integer(value, 10)
385
+ end
386
+ end
387
+ end
388
+ end
@@ -48,7 +48,6 @@ module Factorix
48
48
  def call(caches: nil, all: false, expired: false, older_than: nil, **)
49
49
  validate_options!(all, expired, older_than)
50
50
 
51
- @now = Time.now
52
51
  @older_than_seconds = parse_age(older_than) if older_than
53
52
 
54
53
  cache_names = resolve_cache_names(caches)
@@ -114,25 +113,26 @@ module Factorix
114
113
  # @param expired [Boolean] remove expired entries only
115
114
  # @return [Hash] eviction result with :count and :size
116
115
  private def evict_cache(name, all:, expired:)
117
- config = Factorix.config.cache.public_send(name)
118
- cache_dir = config.dir
119
- ttl = config.ttl
120
-
121
- return {count: 0, size: 0} unless cache_dir.exist?
116
+ cache = Container.resolve(:"#{name}_cache")
122
117
 
123
118
  count = 0
124
119
  size = 0
125
120
 
126
- cache_dir.glob("**/*").each do |path|
127
- next unless path.file?
128
- next if path.extname == ".lock"
121
+ # Collect keys to evict (we can't modify during iteration)
122
+ to_evict = []
123
+ cache.each do |key, entry|
124
+ next unless should_evict?(entry, all:, expired:)
125
+
126
+ to_evict << [key, entry.size]
127
+ end
129
128
 
130
- next unless should_evict?(path, ttl, all:, expired:)
129
+ # Perform eviction
130
+ to_evict.each do |key, entry_size|
131
+ next unless cache.delete(key)
131
132
 
132
- size += path.size
133
- path.delete
134
133
  count += 1
135
- logger.debug("Evicted cache entry", path: path.to_s)
134
+ size += entry_size
135
+ logger.debug("Evicted cache entry", key:)
136
136
  end
137
137
 
138
138
  logger.info("Evicted cache entries", cache: name, count:, size:)
@@ -141,23 +141,18 @@ module Factorix
141
141
 
142
142
  # Determine if a cache entry should be evicted
143
143
  #
144
- # @param path [Pathname] path to cache entry
145
- # @param ttl [Integer, nil] cache TTL
144
+ # @param entry [Cache::Entry] cache entry
146
145
  # @param all [Boolean] remove all entries
147
146
  # @param expired [Boolean] remove expired entries only
148
147
  # @return [Boolean] true if entry should be evicted
149
- private def should_evict?(path, ttl, all:, expired:)
148
+ private def should_evict?(entry, all:, expired:)
150
149
  return true if all
151
150
 
152
- age_seconds = @now - path.mtime
153
-
154
151
  if expired
155
- return false if ttl.nil? # No TTL means never expires
156
-
157
- age_seconds > ttl
152
+ entry.expired?
158
153
  else
159
154
  # --older-than
160
- age_seconds > @older_than_seconds
155
+ entry.age > @older_than_seconds
161
156
  end
162
157
  end
163
158
 
@@ -1,5 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require "dry/inflector"
3
4
  require "json"
4
5
 
5
6
  module Factorix
@@ -8,13 +9,12 @@ module Factorix
8
9
  module Cache
9
10
  # Display cache statistics
10
11
  #
11
- # This command outout.puts statistics for all cache stores
12
+ # This command outputs statistics for all cache stores
12
13
  # in a human-readable or JSON format.
13
14
  #
14
15
  # @example
15
16
  # $ factorix cache stat
16
17
  # download:
17
- # Directory: ~/.cache/factorix/download
18
18
  # TTL: unlimited
19
19
  # Entries: 42 / 42 (100.0% valid)
20
20
  # ...
@@ -41,7 +41,6 @@ module Factorix
41
41
  def call(json:, **)
42
42
  logger.debug("Collecting cache statistics")
43
43
 
44
- @now = Time.now
45
44
  cache_names = Factorix.config.cache.values.keys
46
45
  stats = cache_names.to_h {|name| [name, collect_stats(name)] }
47
46
 
@@ -52,52 +51,42 @@ module Factorix
52
51
  end
53
52
  end
54
53
 
54
+ # Collect statistics for a cache
55
+ #
56
+ # @param name [Symbol] cache name
57
+ # @return [Hash] cache statistics
55
58
  private def collect_stats(name)
59
+ cache = Container.resolve(:"#{name}_cache")
56
60
  config = Factorix.config.cache.public_send(name)
57
- cache_dir = config.dir
58
61
 
59
- entries = scan_entries(cache_dir, config.ttl)
62
+ entries = scan_entries(cache)
60
63
 
61
64
  {
62
- directory: cache_dir.to_s,
63
65
  ttl: config.ttl,
64
- max_file_size: config.max_file_size,
65
- compression_threshold: config.compression_threshold,
66
66
  entries: build_entry_stats(entries),
67
67
  size: build_size_stats(entries),
68
68
  age: build_age_stats(entries),
69
- stale_locks: count_stale_locks(cache_dir)
69
+ backend_info: cache.backend_info
70
70
  }
71
71
  end
72
72
 
73
- # Scan cache directory and collect entry information
73
+ # Collect cache entries using the cache interface
74
74
  #
75
- # @param cache_dir [Pathname] cache directory path
76
- # @param ttl [Integer, nil] time-to-live in seconds
77
- # @return [Array<Hash>] array of entry info hashes
78
- private def scan_entries(cache_dir, ttl)
79
- return [] unless cache_dir.exist?
80
-
75
+ # @param cache [Cache::Base] cache instance
76
+ # @return [Array<Cache::Entry>] array of cache entries
77
+ private def scan_entries(cache)
81
78
  entries = []
82
- cache_dir.glob("**/*").each do |path|
83
- next unless path.file?
84
- next if path.extname == ".lock"
85
-
86
- age_seconds = @now - path.mtime
87
- expired = ttl ? age_seconds > ttl : false
88
-
89
- entries << {size: path.size, age: age_seconds, expired:}
90
- end
79
+ cache.each {|_key, entry| entries << entry }
91
80
  entries
92
81
  end
93
82
 
94
83
  # Build entry count statistics
95
84
  #
96
- # @param entries [Array<Hash>] entry info array
85
+ # @param entries [Array<Cache::Entry>] entry array
97
86
  # @return [Hash] entry statistics
98
87
  private def build_entry_stats(entries)
99
88
  total = entries.size
100
- valid = entries.count {|e| !e[:expired] }
89
+ valid = entries.count {|e| !e.expired? }
101
90
  expired = total - valid
102
91
 
103
92
  {total:, valid:, expired:}
@@ -105,37 +94,26 @@ module Factorix
105
94
 
106
95
  # Build size statistics
107
96
  #
108
- # @param entries [Array<Hash>] entry info array
97
+ # @param entries [Array<Cache::Entry>] entry array
109
98
  # @return [Hash] size statistics
110
99
  private def build_size_stats(entries)
111
100
  return {total: 0, avg: 0, min: 0, max: 0} if entries.empty?
112
101
 
113
- sizes = entries.map {|e| e[:size] }
102
+ sizes = entries.map(&:size)
114
103
  {total: sizes.sum, avg: sizes.sum / sizes.size, min: sizes.min, max: sizes.max}
115
104
  end
116
105
 
117
106
  # Build age statistics
118
107
  #
119
- # @param entries [Array<Hash>] entry info array
108
+ # @param entries [Array<Cache::Entry>] entry array
120
109
  # @return [Hash] age statistics
121
110
  private def build_age_stats(entries)
122
111
  return {oldest: nil, newest: nil, avg: nil} if entries.empty?
123
112
 
124
- ages = entries.map {|e| e[:age] }
113
+ ages = entries.map(&:age)
125
114
  {oldest: ages.max, newest: ages.min, avg: ages.sum / ages.size}
126
115
  end
127
116
 
128
- # Count stale lock files
129
- #
130
- # @param cache_dir [Pathname] cache directory path
131
- # @return [Integer] number of stale lock files
132
- private def count_stale_locks(cache_dir)
133
- return 0 unless cache_dir.exist?
134
-
135
- lock_lifetime = Factorix::Cache::FileSystem::LOCK_FILE_LIFETIME
136
- cache_dir.glob("**/*.lock").count {|path| @now - path.mtime > lock_lifetime }
137
- end
138
-
139
117
  # Output statistics in text format (ccache-style)
140
118
  #
141
119
  # @param stats [Hash] statistics for all caches
@@ -153,10 +131,7 @@ module Factorix
153
131
  # @param data [Hash] cache statistics
154
132
  # @return [void]
155
133
  private def output_cache_stats(data)
156
- out.puts " Directory: #{data[:directory]}"
157
134
  out.puts " TTL: #{format_ttl(data[:ttl])}"
158
- out.puts " Max file size: #{format_size(data[:max_file_size])}"
159
- out.puts " Compression: #{format_compression(data[:compression_threshold])}"
160
135
 
161
136
  entries = data[:entries]
162
137
  valid_pct = entries[:total] > 0 ? (Float(entries[:valid]) / entries[:total] * 100) : 0.0
@@ -172,7 +147,43 @@ module Factorix
172
147
  out.puts " Age: -"
173
148
  end
174
149
 
175
- out.puts " Stale locks: #{data[:stale_locks]}"
150
+ output_backend_info(data[:backend_info])
151
+ end
152
+
153
+ INFLECTOR = Dry::Inflector.new do |inflections|
154
+ inflections.acronym("URL")
155
+ end
156
+ private_constant :INFLECTOR
157
+
158
+ # Output backend-specific information
159
+ #
160
+ # @param info [Hash] backend-specific information
161
+ # @return [void]
162
+ private def output_backend_info(info)
163
+ return if info.empty?
164
+
165
+ out.puts " Backend:"
166
+ info.each do |key, value|
167
+ label = INFLECTOR.humanize(key)
168
+ formatted_value = format_backend_value(key, value)
169
+ out.puts " %-20s %s" % [label + ":", formatted_value]
170
+ end
171
+ end
172
+
173
+ # Format a backend info value for display
174
+ #
175
+ # @param key [Symbol] the key name
176
+ # @param value [Object] the value to format
177
+ # @return [String] formatted value
178
+ private def format_backend_value(key, value)
179
+ case key
180
+ when :max_file_size, :compression_threshold
181
+ value.nil? ? "unlimited" : format_size(value)
182
+ when :lock_timeout
183
+ format_duration(value)
184
+ else
185
+ value.to_s
186
+ end
176
187
  end
177
188
 
178
189
  # Format TTL value for display
@@ -182,18 +193,6 @@ module Factorix
182
193
  private def format_ttl(ttl)
183
194
  ttl.nil? ? "unlimited" : format_duration(ttl)
184
195
  end
185
-
186
- # Format compression threshold for display
187
- #
188
- # @param threshold [Integer, nil] compression threshold in bytes
189
- # @return [String] formatted compression setting
190
- private def format_compression(threshold)
191
- case threshold
192
- when nil then "disabled"
193
- when 0 then "enabled (always)"
194
- else "enabled (>= #{format_size(threshold)})"
195
- end
196
- end
197
196
  end
198
197
  end
199
198
  end