factorix 0.6.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +26 -0
  3. data/exe/factorix +17 -0
  4. data/lib/factorix/api/mod_download_api.rb +10 -5
  5. data/lib/factorix/api/mod_portal_api.rb +6 -49
  6. data/lib/factorix/cache/base.rb +116 -0
  7. data/lib/factorix/cache/entry.rb +25 -0
  8. data/lib/factorix/cache/file_system.rb +137 -57
  9. data/lib/factorix/cache/redis.rb +287 -0
  10. data/lib/factorix/cache/s3.rb +388 -0
  11. data/lib/factorix/cli/commands/cache/evict.rb +17 -22
  12. data/lib/factorix/cli/commands/cache/stat.rb +57 -58
  13. data/lib/factorix/cli/commands/download_support.rb +1 -6
  14. data/lib/factorix/cli/commands/mod/download.rb +2 -3
  15. data/lib/factorix/cli/commands/mod/edit.rb +1 -4
  16. data/lib/factorix/cli/commands/mod/image/add.rb +1 -4
  17. data/lib/factorix/cli/commands/mod/image/edit.rb +1 -4
  18. data/lib/factorix/cli/commands/mod/image/list.rb +1 -4
  19. data/lib/factorix/cli/commands/mod/install.rb +2 -3
  20. data/lib/factorix/cli/commands/mod/search.rb +2 -3
  21. data/lib/factorix/cli/commands/mod/show.rb +2 -3
  22. data/lib/factorix/cli/commands/mod/sync.rb +2 -3
  23. data/lib/factorix/cli/commands/mod/update.rb +6 -39
  24. data/lib/factorix/cli/commands/mod/upload.rb +1 -4
  25. data/lib/factorix/cli/commands/portal_support.rb +27 -0
  26. data/lib/factorix/container.rb +27 -13
  27. data/lib/factorix/errors.rb +3 -0
  28. data/lib/factorix/http/cache_decorator.rb +5 -5
  29. data/lib/factorix/info_json.rb +5 -5
  30. data/lib/factorix/portal.rb +3 -2
  31. data/lib/factorix/transfer/downloader.rb +19 -11
  32. data/lib/factorix/version.rb +1 -1
  33. data/lib/factorix.rb +45 -53
  34. data/sig/factorix/api/mod_download_api.rbs +1 -2
  35. data/sig/factorix/cache/base.rbs +28 -0
  36. data/sig/factorix/cache/entry.rbs +14 -0
  37. data/sig/factorix/cache/file_system.rbs +7 -6
  38. data/sig/factorix/cache/redis.rbs +36 -0
  39. data/sig/factorix/cache/s3.rbs +38 -0
  40. data/sig/factorix/errors.rbs +3 -0
  41. data/sig/factorix/portal.rbs +1 -1
  42. metadata +25 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1eb581275b20274cebfa09669eb5f0111ed00f358d5a68947333b5990eb04d56
4
- data.tar.gz: 2a57a400c94476e860c2647af1e8fb3f845b231b1e377e265bfcc1d1de11d7ef
3
+ metadata.gz: f1d6b99d769a238f64676cfd19aef4d0f8852d2dc38df210eb753a8bc202f67f
4
+ data.tar.gz: c51b472d2eb47e7d0d39c4789112da30faf5fbf7e225da330d19e989bce86f43
5
5
  SHA512:
6
- metadata.gz: 9134480ff3cd2675216f6fc9407bb0719467c2488a43449c727294f5871289cb955b6b5a20257314e11d562a60e2eea8edf81bf72c8df739055675142dff8ad5
7
- data.tar.gz: 15630f358ac2dbe3ae85b3a0798fdea532b7d9cba43352ffc982c74cf6e5eea1a264329f9543c26100618c7d3e45c2b6cc8a69bf5aca203333a2c77157b68571
6
+ metadata.gz: 0e9bf68578d26f4b08e77028b1da59ff8d972f807822be5fa19ca1eb951e89412a3314a3db5fde1fcd821f399ee6c9e4e8c28e37d2d0c5e3d000046c5e663d2f
7
+ data.tar.gz: 81222393a0ab0a6b6e2cbbccac52dec243d4806ef688a1240a01367142094e368175caba564d4842ebd627ebdef98e322f8217b14a3424e8e20c7bbeddc248a4
data/CHANGELOG.md CHANGED
@@ -1,5 +1,31 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.7.0] - 2026-01-24
4
+
5
+ ### Added
6
+
7
+ - Add pluggable cache backend architecture with Redis and S3 support (#18, #19)
8
+ - Configure backend per cache type: `config.cache.<type>.backend = :file_system | :redis | :s3`
9
+ - **Redis backend** (`Cache::Redis`): requires `redis` gem (~> 5)
10
+ - Distributed locking via Lua script for atomic lock release
11
+ - Auto-namespaced keys: `factorix-cache:{cache_type}:{key}`
12
+ - **S3 backend** (`Cache::S3`): requires `aws-sdk-s3` gem
13
+ - Distributed locking via conditional PUT (`if_none_match: "*"`)
14
+ - TTL managed via S3 custom metadata, age from native `Last-Modified`
15
+ - `cache stat` command displays backend-specific information (directory, URL, bucket, etc.)
16
+
17
+ ### Changed
18
+
19
+ - Refactor `Cache::FileSystem` to use `cache_type:` parameter instead of `root:` (#25)
20
+ - Aligns interface with other backends for consistent initialization
21
+ - Cache directory is now auto-computed from `Container[:runtime].factorix_cache_dir / cache_type`
22
+
23
+ ### Removed
24
+
25
+ - Remove deprecated `Factorix::Application` compatibility class
26
+ - Use `Factorix::Container` for DI (`[]`, `resolve`, `register`)
27
+ - Use `Factorix.config` and `Factorix.configure` for configuration
28
+
3
29
  ## [0.6.0] - 2026-01-18
4
30
 
5
31
  ### Changed
data/exe/factorix CHANGED
@@ -8,6 +8,23 @@ require "zip"
8
8
  # Suppress warnings about invalid dates in ZIP files
9
9
  Zip.warn_invalid_date = false
10
10
 
11
+ # Load config early, before dry-cli instantiates commands.
12
+ # This is necessary because command classes use Import which resolves
13
+ # dependencies at instantiation time (before CommandWrapper#call).
14
+ # Without this, cache backends would use default config instead of user config.
15
+ #
16
+ # Note: --config-path option is handled separately in CommandWrapper and
17
+ # will override settings if specified.
18
+ config_path_index = ARGV.index("--config-path") || ARGV.index("-c")
19
+ if config_path_index && ARGV[config_path_index + 1]
20
+ Factorix.load_config(Pathname(ARGV[config_path_index + 1]))
21
+ elsif ENV["FACTORIX_CONFIG"]
22
+ Factorix.load_config(Pathname(ENV.fetch("FACTORIX_CONFIG")))
23
+ else
24
+ default_config = Factorix::Container[:runtime].factorix_config_path
25
+ Factorix.load_config(default_config) if default_config.exist?
26
+ end
27
+
11
28
  begin
12
29
  Dry::CLI.new(Factorix::CLI).call
13
30
  exit 0
@@ -12,11 +12,9 @@ module Factorix
12
12
  # when FACTORIO_USERNAME/FACTORIO_TOKEN environment variables are not set.
13
13
  # It's resolved lazily via reader method instead.
14
14
  # @!parse
15
- # # @return [Transfer::Downloader]
16
- # attr_reader :downloader
17
15
  # # @return [Dry::Logger::Dispatcher]
18
16
  # attr_reader :logger
19
- include Import[:downloader, :logger]
17
+ include Import[:logger]
20
18
 
21
19
  BASE_URL = "https://mods.factorio.com"
22
20
  private_constant :BASE_URL
@@ -34,17 +32,24 @@ module Factorix
34
32
  # @param download_url [String] relative download URL from API response (e.g., "/download/mod-name/...")
35
33
  # @param output [Pathname] output file path
36
34
  # @param expected_sha1 [String, nil] expected SHA1 digest for verification (optional)
35
+ # @param handler [Object, nil] event handler for download progress (optional)
37
36
  # @return [void]
38
37
  # @raise [ArgumentError] if download_url is not a relative path starting with "/"
39
38
  # @raise [DigestMismatchError] if SHA1 verification fails
40
- def download(download_url, output, expected_sha1: nil)
39
+ def download(download_url, output, expected_sha1: nil, handler: nil)
41
40
  unless download_url.start_with?("/")
42
41
  logger.error("Invalid download_url", url: download_url)
43
42
  raise ArgumentError, "download_url must be a relative path starting with '/'"
44
43
  end
45
44
 
46
45
  uri = build_download_uri(download_url)
47
- downloader.download(uri, output, expected_sha1:)
46
+ downloader = Container[:downloader]
47
+ downloader.subscribe(handler) if handler
48
+ begin
49
+ downloader.download(uri, output, expected_sha1:)
50
+ ensure
51
+ downloader.unsubscribe(handler) if handler
52
+ end
48
53
  end
49
54
 
50
55
  private def service_credential
@@ -2,7 +2,6 @@
2
2
 
3
3
  require "erb"
4
4
  require "json"
5
- require "tempfile"
6
5
  require "uri"
7
6
 
8
7
  module Factorix
@@ -86,13 +85,13 @@ module Factorix
86
85
 
87
86
  # Invalidate get_mod cache
88
87
  mod_uri = build_uri("/api/mods/#{encoded_name}")
89
- mod_key = cache.key_for(mod_uri.to_s)
90
- cache.with_lock(mod_key) { cache.delete(mod_key) }
88
+ mod_cache_key = mod_uri.to_s
89
+ cache.with_lock(mod_cache_key) { cache.delete(mod_cache_key) }
91
90
 
92
91
  # Invalidate get_mod_full cache
93
92
  full_uri = build_uri("/api/mods/#{encoded_name}/full")
94
- full_key = cache.key_for(full_uri.to_s)
95
- cache.with_lock(full_key) { cache.delete(full_key) }
93
+ full_cache_key = full_uri.to_s
94
+ cache.with_lock(full_cache_key) { cache.delete(full_cache_key) }
96
95
 
97
96
  logger.debug("Invalidated cache for MOD", mod: mod_name)
98
97
  end
@@ -101,55 +100,13 @@ module Factorix
101
100
  URI.join(BASE_URL, path).tap {|uri| uri.query = URI.encode_www_form(params.sort.to_h) unless params.empty? }
102
101
  end
103
102
 
104
- # Fetch data with cache support
103
+ # Fetch data from API (caching is handled by CacheDecorator in api_http_client)
105
104
  #
106
105
  # @param uri [URI::HTTPS] URI to fetch
107
106
  # @return [Hash{Symbol => untyped}] parsed JSON response with symbolized keys
108
107
  private def fetch_with_cache(uri)
109
- key = cache.key_for(uri.to_s)
110
-
111
- cached = cache.read(key, encoding: "UTF-8")
112
- if cached
113
- logger.debug("API cache hit", uri: uri.to_s)
114
- return JSON.parse(cached, symbolize_names: true)
115
- end
116
-
117
- logger.debug("API cache miss", uri: uri.to_s)
118
- response_body = fetch_from_api(uri)
119
-
120
- store_in_cache(key, response_body)
121
-
122
- JSON.parse(response_body, symbolize_names: true)
123
- end
124
-
125
- # Fetch data from API via HTTP
126
- #
127
- # @param uri [URI::HTTPS] URI to fetch
128
- # @return [String] response body
129
- # @raise [HTTPClientError] for 4xx errors
130
- # @raise [HTTPServerError] for 5xx errors
131
- private def fetch_from_api(uri)
132
- logger.info("Fetching from API", uri: uri.to_s)
133
108
  response = client.get(uri)
134
- logger.info("API response", code: response.code, size_bytes: response.body.bytesize)
135
- response.body
136
- end
137
-
138
- # Store response body in cache via temporary file
139
- #
140
- # @param key [String] cache key
141
- # @param data [String] response body
142
- # @return [void]
143
- private def store_in_cache(key, data)
144
- temp_file = Tempfile.new("cache")
145
- begin
146
- temp_file.write(data)
147
- temp_file.close
148
- cache.store(key, Pathname(temp_file.path))
149
- logger.debug("Stored API response in cache", key:)
150
- ensure
151
- temp_file.unlink
152
- end
109
+ JSON.parse((+response.body).force_encoding(Encoding::UTF_8), symbolize_names: true)
153
110
  end
154
111
 
155
112
  # Validate page_size parameter
@@ -0,0 +1,116 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Factorix
4
+ module Cache
5
+ # Abstract base class for cache backends.
6
+ #
7
+ # All cache backends (FileSystem, S3, Redis) inherit from this class
8
+ # and implement the abstract methods defined here.
9
+ #
10
+ # @abstract Subclasses must implement all abstract methods.
11
+ class Base
12
+ # @return [Integer, nil] time-to-live in seconds (nil for unlimited)
13
+ attr_reader :ttl
14
+
15
+ # Initialize a new cache backend.
16
+ #
17
+ # @param ttl [Integer, nil] time-to-live in seconds (nil for unlimited)
18
+ def initialize(ttl: nil)
19
+ @ttl = ttl
20
+ end
21
+
22
+ # Check if a cache entry exists and is not expired.
23
+ #
24
+ # @param key [String] logical cache key
25
+ # @return [Boolean] true if the cache entry exists and is valid
26
+ # @abstract
27
+ def exist?(key) = raise NotImplementedError, "#{self.class}#exist? must be implemented"
28
+
29
+ # Read a cached entry as a string.
30
+ #
31
+ # @param key [String] logical cache key
32
+ # @return [String, nil] cached content or nil if not found/expired
33
+ # @abstract
34
+ def read(key) = raise NotImplementedError, "#{self.class}#read must be implemented"
35
+
36
+ # Write cached content to a file.
37
+ #
38
+ # Unlike {#read} which returns content as a String, this method writes
39
+ # directly to a file path, which is more memory-efficient for large files.
40
+ #
41
+ # @param key [String] logical cache key
42
+ # @param output [Pathname] path to write the cached content
43
+ # @return [Boolean] true if written successfully, false if not found/expired
44
+ # @abstract
45
+ def write_to(key, output) = raise NotImplementedError, "#{self.class}#write_to must be implemented"
46
+
47
+ # Store data in the cache.
48
+ #
49
+ # @param key [String] logical cache key
50
+ # @param src [Pathname] path to the source file
51
+ # @return [Boolean] true if stored successfully
52
+ # @abstract
53
+ def store(key, src) = raise NotImplementedError, "#{self.class}#store must be implemented"
54
+
55
+ # Delete a cache entry.
56
+ #
57
+ # @param key [String] logical cache key
58
+ # @return [Boolean] true if deleted, false if not found
59
+ # @abstract
60
+ def delete(key) = raise NotImplementedError, "#{self.class}#delete must be implemented"
61
+
62
+ # Clear all cache entries.
63
+ #
64
+ # @return [void]
65
+ # @abstract
66
+ def clear = raise NotImplementedError, "#{self.class}#clear must be implemented"
67
+
68
+ # Execute a block with an exclusive lock on the cache entry.
69
+ #
70
+ # @param key [String] logical cache key
71
+ # @yield block to execute with lock held
72
+ # @abstract
73
+ def with_lock(key) = raise NotImplementedError, "#{self.class}#with_lock must be implemented"
74
+
75
+ # Get the age of a cache entry in seconds.
76
+ #
77
+ # @param key [String] logical cache key
78
+ # @return [Float, nil] age in seconds, or nil if entry doesn't exist
79
+ # @abstract
80
+ def age(key) = raise NotImplementedError, "#{self.class}#age must be implemented"
81
+
82
+ # Check if a cache entry has expired based on TTL.
83
+ #
84
+ # @param key [String] logical cache key
85
+ # @return [Boolean] true if expired, false otherwise
86
+ # @abstract
87
+ def expired?(key) = raise NotImplementedError, "#{self.class}#expired? must be implemented"
88
+
89
+ # Get the size of a cached entry in bytes.
90
+ #
91
+ # @param key [String] logical cache key
92
+ # @return [Integer, nil] size in bytes, or nil if entry doesn't exist/expired
93
+ # @abstract
94
+ def size(key) = raise NotImplementedError, "#{self.class}#size must be implemented"
95
+
96
+ # Enumerate cache entries.
97
+ #
98
+ # Yields [key, entry] pairs similar to Hash#each.
99
+ #
100
+ # @yield [key, entry] logical key and Entry object
101
+ # @yieldparam key [String] logical cache key
102
+ # @yieldparam entry [Entry] cache entry metadata
103
+ # @return [Enumerator] if no block given
104
+ # @abstract
105
+ def each = raise NotImplementedError, "#{self.class}#each must be implemented"
106
+
107
+ # Return backend-specific information.
108
+ #
109
+ # Subclasses should override this method to provide configuration details
110
+ # specific to their backend implementation.
111
+ #
112
+ # @return [Hash] backend-specific information (empty by default)
113
+ def backend_info = {}
114
+ end
115
+ end
116
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Factorix
4
+ module Cache
5
+ Entry = Data.define(:size, :age, :expired)
6
+
7
+ # Represents a cache entry for enumeration operations.
8
+ #
9
+ # Used by {Base#each} to yield entry metadata alongside keys.
10
+ # Note: The key is NOT included in Entry; it is yielded separately.
11
+ #
12
+ # @!attribute [r] size
13
+ # @return [Integer] entry size in bytes
14
+ # @!attribute [r] age
15
+ # @return [Float] age in seconds since creation/modification
16
+ class Entry
17
+ private :expired
18
+
19
+ # Check if the cache entry has expired.
20
+ #
21
+ # @return [Boolean] true if entry has exceeded TTL
22
+ def expired? = expired
23
+ end
24
+ end
25
+ end
@@ -2,6 +2,7 @@
2
2
 
3
3
  require "digest"
4
4
  require "fileutils"
5
+ require "json"
5
6
  require "pathname"
6
7
  require "zlib"
7
8
 
@@ -12,7 +13,12 @@ module Factorix
12
13
  # Uses a two-level directory structure to store cached files,
13
14
  # with file locking to handle concurrent access and TTL support
14
15
  # for cache expiration.
15
- class FileSystem
16
+ #
17
+ # Cache entries consist of:
18
+ # - Data file: the cached content (optionally compressed)
19
+ # - Metadata file (.metadata): JSON containing the logical key
20
+ # - Lock file (.lock): used for concurrent access control
21
+ class FileSystem < Base
16
22
  # @!parse
17
23
  # # @return [Dry::Logger::Dispatcher]
18
24
  # attr_reader :logger
@@ -29,52 +35,46 @@ module Factorix
29
35
  private_constant :ZLIB_CMF_BYTE
30
36
 
31
37
  # Initialize a new file system cache storage.
32
- # Creates the cache directory if it doesn't exist
38
+ # Creates the cache directory if it doesn't exist.
39
+ # Cache directory is auto-calculated as: factorix_cache_dir / cache_type
33
40
  #
34
- # @param cache_dir [Pathname] path to the cache directory
35
- # @param ttl [Integer, nil] time-to-live in seconds (nil for unlimited)
41
+ # @param cache_type [Symbol] cache type for directory name (e.g., :api, :download)
36
42
  # @param max_file_size [Integer, nil] maximum file size in bytes (nil for unlimited)
37
43
  # @param compression_threshold [Integer, nil] compress data larger than this size in bytes
38
44
  # (nil: no compression, 0: always compress, N: compress if >= N bytes)
39
- def initialize(cache_dir, ttl: nil, max_file_size: nil, compression_threshold: nil, logger: nil)
40
- super(logger:)
41
- @cache_dir = cache_dir
42
- @ttl = ttl
45
+ # @param ttl [Integer, nil] time-to-live in seconds (nil for unlimited)
46
+ def initialize(cache_type:, max_file_size: nil, compression_threshold: nil, **)
47
+ super(**)
48
+ @cache_dir = Container[:runtime].factorix_cache_dir / cache_type.to_s
43
49
  @max_file_size = max_file_size
44
50
  @compression_threshold = compression_threshold
45
51
  @cache_dir.mkpath
46
- logger.info("Initializing cache", dir: @cache_dir.to_s, ttl: @ttl, max_size: @max_file_size, compression_threshold: @compression_threshold)
52
+ logger.info("Initializing cache", root: @cache_dir.to_s, ttl: @ttl, max_size: @max_file_size, compression_threshold: @compression_threshold)
47
53
  end
48
54
 
49
- # Generate a cache key for the given URL string.
50
- # Uses SHA1 to create a unique, deterministic key
51
- #
52
- # @param url_string [String] URL string to generate key for
53
- # @return [String] cache key
54
- # Use Digest(:SHA1) instead of Digest::SHA1 for thread-safety (Ruby 2.2+)
55
- def key_for(url_string) = Digest(:SHA1).hexdigest(url_string)
56
-
57
55
  # Check if a cache entry exists and is not expired.
58
56
  # A cache entry is considered to exist if its file exists and is not expired
59
57
  #
60
- # @param key [String] cache key to check
58
+ # @param key [String] logical cache key
61
59
  # @return [Boolean] true if the cache entry exists and is valid, false otherwise
62
60
  def exist?(key)
63
- return false unless cache_path_for(key).exist?
61
+ internal_key = storage_key_for(key)
62
+ return false unless cache_path_for(internal_key).exist?
64
63
  return true if @ttl.nil?
65
64
 
66
65
  !expired?(key)
67
66
  end
68
67
 
69
- # Fetch a cached file and copy it to the output path.
68
+ # Write cached content to a file.
70
69
  # If the cache entry doesn't exist or is expired, returns false without modifying the output path.
71
70
  # Automatically decompresses zlib-compressed cache entries.
72
71
  #
73
- # @param key [String] cache key to fetch
74
- # @param output [Pathname] path to copy the cached file to
75
- # @return [Boolean] true if the cache entry was found and copied, false otherwise
76
- def fetch(key, output)
77
- path = cache_path_for(key)
72
+ # @param key [String] logical cache key
73
+ # @param output [Pathname] path to write the cached content to
74
+ # @return [Boolean] true if written successfully, false if not found/expired
75
+ def write_to(key, output)
76
+ internal_key = storage_key_for(key)
77
+ path = cache_path_for(internal_key)
78
78
  unless path.exist?
79
79
  logger.debug("Cache miss", key:)
80
80
  return false
@@ -96,21 +96,21 @@ module Factorix
96
96
  true
97
97
  end
98
98
 
99
- # Read a cached file as a string.
99
+ # Read a cached file as a binary string.
100
100
  # If the cache entry doesn't exist or is expired, returns nil.
101
101
  # Automatically decompresses zlib-compressed cache entries.
102
102
  #
103
- # @param key [String] cache key to read
104
- # @param encoding [Encoding, String] encoding to use (default: ASCII-8BIT for binary)
103
+ # @param key [String] logical cache key
105
104
  # @return [String, nil] cached content or nil if not found/expired
106
- def read(key, encoding: Encoding::ASCII_8BIT)
107
- path = cache_path_for(key)
105
+ def read(key)
106
+ internal_key = storage_key_for(key)
107
+ path = cache_path_for(internal_key)
108
108
  return nil unless path.exist?
109
109
  return nil if expired?(key)
110
110
 
111
111
  data = path.binread
112
112
  data = Zlib.inflate(data) if zlib_compressed?(data)
113
- data.force_encoding(encoding)
113
+ data
114
114
  end
115
115
 
116
116
  # Store a file in the cache.
@@ -118,7 +118,7 @@ module Factorix
118
118
  # Optionally compresses data based on compression_threshold setting.
119
119
  # If the (possibly compressed) size exceeds max_file_size, skips caching and returns false.
120
120
  #
121
- # @param key [String] cache key to store under
121
+ # @param key [String] logical cache key
122
122
  # @param src [Pathname] path of the file to store
123
123
  # @return [Boolean] true if cached successfully, false if skipped due to size limit
124
124
  def store(key, src)
@@ -135,22 +135,30 @@ module Factorix
135
135
  return false
136
136
  end
137
137
 
138
- path = cache_path_for(key)
138
+ internal_key = storage_key_for(key)
139
+ path = cache_path_for(internal_key)
140
+ metadata_path = metadata_path_for(internal_key)
141
+
139
142
  path.dirname.mkpath
140
143
  path.binwrite(data)
144
+ metadata_path.write(JSON.generate({logical_key: key}))
141
145
  logger.debug("Stored in cache", key:, size_bytes: data.bytesize)
142
146
  true
143
147
  end
144
148
 
145
149
  # Delete a specific cache entry.
146
150
  #
147
- # @param key [String] cache key to delete
151
+ # @param key [String] logical cache key
148
152
  # @return [Boolean] true if the entry was deleted, false if it didn't exist
149
153
  def delete(key)
150
- path = cache_path_for(key)
154
+ internal_key = storage_key_for(key)
155
+ path = cache_path_for(internal_key)
156
+ metadata_path = metadata_path_for(internal_key)
157
+
151
158
  return false unless path.exist?
152
159
 
153
160
  path.delete
161
+ metadata_path.delete if metadata_path.exist?
154
162
  logger.debug("Deleted from cache", key:)
155
163
  true
156
164
  end
@@ -160,13 +168,14 @@ module Factorix
160
168
  #
161
169
  # @return [void]
162
170
  def clear
163
- logger.info("Clearing cache directory", dir: @cache_dir.to_s)
171
+ logger.info("Clearing cache directory", root: @cache_dir.to_s)
164
172
  count = 0
165
173
  @cache_dir.glob("**/*").each do |path|
166
- if path.file?
167
- path.delete
168
- count += 1
169
- end
174
+ next unless path.file?
175
+ next if path.extname == ".lock"
176
+
177
+ path.delete
178
+ count += 1
170
179
  end
171
180
  logger.info("Cache cleared", files_removed: count)
172
181
  end
@@ -174,10 +183,11 @@ module Factorix
174
183
  # Get the age of a cache entry in seconds.
175
184
  # Returns nil if the entry doesn't exist.
176
185
  #
177
- # @param key [String] cache key
186
+ # @param key [String] logical cache key
178
187
  # @return [Float, nil] age in seconds, or nil if entry doesn't exist
179
188
  def age(key)
180
- path = cache_path_for(key)
189
+ internal_key = storage_key_for(key)
190
+ path = cache_path_for(internal_key)
181
191
  return nil unless path.exist?
182
192
 
183
193
  Time.now - path.mtime
@@ -186,7 +196,7 @@ module Factorix
186
196
  # Check if a cache entry has expired based on TTL.
187
197
  # Returns false if TTL is not set (unlimited) or if entry doesn't exist.
188
198
  #
189
- # @param key [String] cache key
199
+ # @param key [String] logical cache key
190
200
  # @return [Boolean] true if expired, false otherwise
191
201
  def expired?(key)
192
202
  return false if @ttl.nil?
@@ -200,10 +210,11 @@ module Factorix
200
210
  # Get the size of a cached file in bytes.
201
211
  # Returns nil if the entry doesn't exist or is expired.
202
212
  #
203
- # @param key [String] cache key
213
+ # @param key [String] logical cache key
204
214
  # @return [Integer, nil] file size in bytes, or nil if entry doesn't exist/expired
205
215
  def size(key)
206
- path = cache_path_for(key)
216
+ internal_key = storage_key_for(key)
217
+ path = cache_path_for(internal_key)
207
218
  return nil unless path.exist?
208
219
  return nil if expired?(key)
209
220
 
@@ -211,13 +222,14 @@ module Factorix
211
222
  end
212
223
 
213
224
  # Executes the given block with a file lock.
214
- # Uses flock for process-safe file locking and automatically removes stale locks
225
+ # Uses flock for process-safe file locking and automatically removes stale locks.
215
226
  #
216
- # @param key [String] cache key to lock
227
+ # @param key [String] logical cache key
217
228
  # @yield Executes the block with exclusive file lock
218
229
  # @return [void]
219
230
  def with_lock(key)
220
- lock_path = lock_path_for(key)
231
+ internal_key = storage_key_for(key)
232
+ lock_path = lock_path_for(internal_key)
221
233
  cleanup_stale_lock(lock_path)
222
234
 
223
235
  lock_path.dirname.mkpath
@@ -240,23 +252,83 @@ module Factorix
240
252
  end
241
253
  end
242
254
 
243
- # Get the cache file path for the given key.
255
+ # Enumerate cache entries.
256
+ #
257
+ # Yields [key, entry] pairs similar to Hash#each.
258
+ # Skips entries without metadata files (legacy entries).
259
+ #
260
+ # @yield [key, entry] logical key and Entry object
261
+ # @yieldparam key [String] logical cache key
262
+ # @yieldparam entry [Entry] cache entry metadata
263
+ # @return [Enumerator] if no block given
264
+ def each
265
+ return enum_for(__method__) unless block_given?
266
+
267
+ @cache_dir.glob("**/*").each do |path|
268
+ next unless path.file?
269
+ next if path.extname == ".metadata" || path.extname == ".lock"
270
+
271
+ metadata_path = Pathname("#{path}.metadata")
272
+ next unless metadata_path.exist?
273
+
274
+ logical_key = JSON.parse(metadata_path.read)["logical_key"]
275
+ age = Time.now - path.mtime
276
+ entry = Entry.new(
277
+ size: path.size,
278
+ age:,
279
+ expired: @ttl ? age > @ttl : false
280
+ )
281
+
282
+ yield logical_key, entry
283
+ end
284
+ end
285
+
286
+ # Return backend-specific information.
287
+ #
288
+ # @return [Hash] backend configuration and status
289
+ def backend_info
290
+ {
291
+ type: "file_system",
292
+ directory: @cache_dir.to_s,
293
+ max_file_size: @max_file_size,
294
+ compression_threshold: @compression_threshold,
295
+ stale_locks: count_stale_locks
296
+ }
297
+ end
298
+
299
+ # Generate a storage key for the given logical key.
300
+ # Uses SHA1 to create a unique, deterministic key.
301
+ # Use Digest(:SHA1) instead of Digest::SHA1 for thread-safety (Ruby 2.2+)
302
+ #
303
+ # @param logical_key [String] logical key to generate storage key for
304
+ # @return [String] storage key (SHA1 hash)
305
+ private def storage_key_for(logical_key) = Digest(:SHA1).hexdigest(logical_key)
306
+
307
+ # Get the cache file path for the given internal key.
244
308
  # Uses a two-level directory structure to avoid too many files in one directory
245
309
  #
246
- # @param key [String] cache key
310
+ # @param internal_key [String] internal storage key
247
311
  # @return [Pathname] path to the cache file
248
- private def cache_path_for(key)
249
- prefix = key[0, 2]
250
- @cache_dir.join(prefix, key[2..])
312
+ private def cache_path_for(internal_key)
313
+ prefix = internal_key[0, 2]
314
+ @cache_dir.join(prefix, internal_key[2..])
315
+ end
316
+
317
+ # Get the metadata file path for the given internal key.
318
+ #
319
+ # @param internal_key [String] internal storage key
320
+ # @return [Pathname] path to the metadata file
321
+ private def metadata_path_for(internal_key)
322
+ Pathname("#{cache_path_for(internal_key)}.metadata")
251
323
  end
252
324
 
253
- # Get the lock file path for the given key.
325
+ # Get the lock file path for the given internal key.
254
326
  # Lock files are stored alongside cache files with a .lock extension
255
327
  #
256
- # @param key [String] cache key
328
+ # @param internal_key [String] internal storage key
257
329
  # @return [Pathname] path to the lock file
258
- private def lock_path_for(key)
259
- cache_path_for(key).sub_ext(".lock")
330
+ private def lock_path_for(internal_key)
331
+ cache_path_for(internal_key).sub_ext(".lock")
260
332
  end
261
333
 
262
334
  # Check if data should be compressed based on compression_threshold setting.
@@ -302,6 +374,14 @@ module Factorix
302
374
  nil
303
375
  end
304
376
  end
377
+
378
+ # Count stale lock files in the cache directory.
379
+ #
380
+ # @return [Integer] number of stale lock files
381
+ private def count_stale_locks
382
+ cutoff = Time.now - LOCK_FILE_LIFETIME
383
+ @cache_dir.glob("**/*.lock").count {|path| path.mtime < cutoff }
384
+ end
305
385
  end
306
386
  end
307
387
  end