fmu-settings 0.5.2__tar.gz → 0.5.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fmu-settings might be problematic. Click here for more details.

Files changed (47) hide show
  1. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/PKG-INFO +1 -1
  2. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/_fmu_dir.py +23 -3
  3. fmu_settings-0.5.4/src/fmu/settings/_resources/cache_manager.py +149 -0
  4. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/_resources/lock_manager.py +33 -17
  5. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/_resources/pydantic_resource_manager.py +11 -2
  6. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/_version.py +3 -3
  7. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu_settings.egg-info/PKG-INFO +1 -1
  8. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu_settings.egg-info/SOURCES.txt +2 -0
  9. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/tests/test_fmu_dir.py +19 -0
  10. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/tests/test_global_config.py +20 -20
  11. fmu_settings-0.5.4/tests/test_resources/test_cache_manager.py +116 -0
  12. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/tests/test_resources/test_lock_manager.py +32 -28
  13. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/tests/test_resources/test_resource_managers.py +100 -0
  14. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/.coveragerc +0 -0
  15. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/.github/pull_request_template.md +0 -0
  16. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/.github/workflows/ci.yml +0 -0
  17. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/.github/workflows/codeql.yml +0 -0
  18. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/.github/workflows/publish.yml +0 -0
  19. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/.gitignore +0 -0
  20. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/CONTRIBUTING.md +0 -0
  21. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/LICENSE +0 -0
  22. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/README.md +0 -0
  23. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/SECURITY.md +0 -0
  24. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/pyproject.toml +0 -0
  25. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/setup.cfg +0 -0
  26. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/__init__.py +0 -0
  27. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/__init__.py +0 -0
  28. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/_global_config.py +0 -0
  29. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/_init.py +0 -0
  30. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/_logging.py +0 -0
  31. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/_resources/__init__.py +0 -0
  32. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/_resources/config_managers.py +0 -0
  33. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/models/__init__.py +0 -0
  34. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/models/_enums.py +0 -0
  35. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/models/_mappings.py +0 -0
  36. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/models/lock_info.py +0 -0
  37. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/models/project_config.py +0 -0
  38. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/models/user_config.py +0 -0
  39. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/py.typed +0 -0
  40. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu/settings/types.py +0 -0
  41. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu_settings.egg-info/dependency_links.txt +0 -0
  42. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu_settings.egg-info/requires.txt +0 -0
  43. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/src/fmu_settings.egg-info/top_level.txt +0 -0
  44. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/tests/conftest.py +0 -0
  45. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/tests/test_init.py +0 -0
  46. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/tests/test_resources/test_project_config.py +0 -0
  47. {fmu_settings-0.5.2 → fmu_settings-0.5.4}/tests/test_resources/test_user_config.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fmu-settings
3
- Version: 0.5.2
3
+ Version: 0.5.4
4
4
  Summary: A library for managing FMU settings
5
5
  Author-email: Equinor <fg-fmu_atlas@equinor.com>
6
6
  License: GPL-3.0
@@ -1,9 +1,10 @@
1
1
  """Main interface for working with .fmu directory."""
2
2
 
3
3
  from pathlib import Path
4
- from typing import Any, Final, Self, TypeAlias, cast
4
+ from typing import TYPE_CHECKING, Any, Final, Self, TypeAlias, cast
5
5
 
6
6
  from ._logging import null_logger
7
+ from ._resources.cache_manager import CacheManager
7
8
  from ._resources.config_managers import (
8
9
  ProjectConfigManager,
9
10
  UserConfigManager,
@@ -22,6 +23,7 @@ class FMUDirectoryBase:
22
23
 
23
24
  config: FMUConfigManager
24
25
  _lock: LockManager
26
+ _cache_manager: CacheManager
25
27
 
26
28
  def __init__(self: Self, base_path: str | Path) -> None:
27
29
  """Initializes access to a .fmu directory.
@@ -38,6 +40,7 @@ class FMUDirectoryBase:
38
40
  self.base_path = Path(base_path).resolve()
39
41
  logger.debug(f"Initializing FMUDirectory from '{base_path}'")
40
42
  self._lock = LockManager(self)
43
+ self._cache_manager = CacheManager(self, max_revisions=5)
41
44
 
42
45
  fmu_dir = self.base_path / ".fmu"
43
46
  if fmu_dir.exists():
@@ -57,6 +60,21 @@ class FMUDirectoryBase:
57
60
  """Returns the path to the .fmu directory."""
58
61
  return self._path
59
62
 
63
+ @property
64
+ def cache(self: Self) -> CacheManager:
65
+ """Access the cache manager."""
66
+ return self._cache_manager
67
+
68
+ @property
69
+ def cache_max_revisions(self: Self) -> int:
70
+ """Current retention limit for revision snapshots."""
71
+ return self._cache_manager.max_revisions
72
+
73
+ @cache_max_revisions.setter
74
+ def cache_max_revisions(self: Self, value: int) -> None:
75
+ """Update the retention limit for revision snapshots."""
76
+ self._cache_manager.max_revisions = value
77
+
60
78
  def get_config_value(self: Self, key: str, default: Any = None) -> Any:
61
79
  """Gets a configuration value by key.
62
80
 
@@ -214,7 +232,8 @@ class FMUDirectoryBase:
214
232
 
215
233
 
216
234
  class ProjectFMUDirectory(FMUDirectoryBase):
217
- config: ProjectConfigManager
235
+ if TYPE_CHECKING:
236
+ config: ProjectConfigManager
218
237
 
219
238
  def __init__(self, base_path: str | Path) -> None:
220
239
  """Initializes a project-based .fmu directory."""
@@ -287,7 +306,8 @@ class ProjectFMUDirectory(FMUDirectoryBase):
287
306
 
288
307
 
289
308
  class UserFMUDirectory(FMUDirectoryBase):
290
- config: UserConfigManager
309
+ if TYPE_CHECKING:
310
+ config: UserConfigManager
291
311
 
292
312
  def __init__(self) -> None:
293
313
  """Initializes a project-based .fmu directory."""
@@ -0,0 +1,149 @@
1
+ """Utilities for storing revision snapshots of .fmu files."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from datetime import UTC, datetime
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING, Final, Self
8
+ from uuid import uuid4
9
+
10
+ from fmu.settings._logging import null_logger
11
+
12
+ if TYPE_CHECKING:
13
+ from fmu.settings._fmu_dir import FMUDirectoryBase
14
+
15
+ logger: Final = null_logger(__name__)
16
+
17
+ _CACHEDIR_TAG_CONTENT: Final = (
18
+ "Signature: 8a477f597d28d172789f06886806bc55\n"
19
+ "# This directory contains cached FMU files.\n"
20
+ "# For information about cache directory tags, see:\n"
21
+ "# https://bford.info/cachedir/spec.html"
22
+ )
23
+
24
+
25
+ class CacheManager:
26
+ """Stores complete file revisions under the `.fmu/cache` tree."""
27
+
28
+ def __init__(
29
+ self: Self,
30
+ fmu_dir: FMUDirectoryBase,
31
+ max_revisions: int = 5,
32
+ ) -> None:
33
+ """Initialize the cache manager.
34
+
35
+ Args:
36
+ fmu_dir: The FMUDirectory instance.
37
+ max_revisions: Maximum number of revisions to retain. Default is 5.
38
+ """
39
+ self._fmu_dir = fmu_dir
40
+ self._cache_root = Path("cache")
41
+ self._max_revisions = max(0, max_revisions)
42
+
43
+ @property
44
+ def max_revisions(self: Self) -> int:
45
+ """Maximum number of revisions retained per resource."""
46
+ return self._max_revisions
47
+
48
+ @max_revisions.setter
49
+ def max_revisions(self: Self, value: int) -> None:
50
+ """Update the per-resource revision retention."""
51
+ self._max_revisions = max(0, value)
52
+
53
+ def store_revision(
54
+ self: Self,
55
+ resource_file_path: Path | str,
56
+ content: str,
57
+ encoding: str = "utf-8",
58
+ ) -> Path | None:
59
+ """Write a full snapshot of the resource file to the cache directory.
60
+
61
+ Args:
62
+ resource_file_path: Relative path within the ``.fmu`` directory (e.g.,
63
+ ``config.json``) of the resource file being cached.
64
+ content: Serialized payload to store.
65
+ encoding: Encoding used when persisting the snapshot. Defaults to UTF-8.
66
+
67
+ Returns:
68
+ Absolute filesystem path to the stored snapshot, or ``None`` if caching is
69
+ disabled (``max_revisions`` equals zero).
70
+ """
71
+ if self.max_revisions == 0:
72
+ return None
73
+
74
+ resource_file_path = Path(resource_file_path)
75
+ cache_dir = self._ensure_resource_cache_dir(resource_file_path)
76
+ snapshot_name = self._snapshot_filename(resource_file_path)
77
+ snapshot_path = cache_dir / snapshot_name
78
+
79
+ cache_relative = self._cache_root / resource_file_path.stem
80
+ self._fmu_dir.write_text_file(
81
+ cache_relative / snapshot_name, content, encoding=encoding
82
+ )
83
+ logger.debug("Stored revision snapshot at %s", snapshot_path)
84
+
85
+ self._trim(cache_dir)
86
+ return snapshot_path
87
+
88
+ def list_revisions(self: Self, resource_file_path: Path | str) -> list[Path]:
89
+ """List existing snapshots for a resource file, sorted oldest to newest.
90
+
91
+ Args:
92
+ resource_file_path: Relative path within the ``.fmu`` directory (e.g.,
93
+ ``config.json``) whose cache entries should be listed.
94
+
95
+ Returns:
96
+ A list of absolute `Path` objects sorted oldest to newest.
97
+ """
98
+ resource_file_path = Path(resource_file_path)
99
+ cache_relative = self._cache_root / resource_file_path.stem
100
+ if not self._fmu_dir.file_exists(cache_relative):
101
+ return []
102
+ cache_dir = self._fmu_dir.get_file_path(cache_relative)
103
+
104
+ revisions = [p for p in cache_dir.iterdir() if p.is_file()]
105
+ revisions.sort(key=lambda path: path.name)
106
+ return revisions
107
+
108
+ def _ensure_resource_cache_dir(self: Self, resource_file_path: Path) -> Path:
109
+ """Create (if needed) and return the cache directory for resource file."""
110
+ self._cache_root_path(create=True)
111
+ resource_cache_dir_relative = self._cache_root / resource_file_path.stem
112
+ return self._fmu_dir.ensure_directory(resource_cache_dir_relative)
113
+
114
+ def _cache_root_path(self: Self, create: bool) -> Path:
115
+ """Resolve the cache root, creating it and the cachedir tag if requested."""
116
+ if create:
117
+ cache_root = self._fmu_dir.ensure_directory(self._cache_root)
118
+ self._ensure_cachedir_tag()
119
+ return cache_root
120
+
121
+ return self._fmu_dir.get_file_path(self._cache_root)
122
+
123
+ def _ensure_cachedir_tag(self: Self) -> None:
124
+ """Ensure the cache root complies with the Cachedir specification."""
125
+ tag_path_relative = self._cache_root / "CACHEDIR.TAG"
126
+ if self._fmu_dir.file_exists(tag_path_relative):
127
+ return
128
+ self._fmu_dir.write_text_file(tag_path_relative, _CACHEDIR_TAG_CONTENT)
129
+
130
+ def _snapshot_filename(self: Self, resource_file_path: Path) -> str:
131
+ """Generate a timestamped filename for the next snapshot."""
132
+ timestamp = datetime.now(UTC).strftime("%Y%m%dT%H%M%S.%fZ")
133
+ suffix = resource_file_path.suffix or ".txt"
134
+ token = uuid4().hex[:8]
135
+ return f"{timestamp}-{token}{suffix}"
136
+
137
+ def _trim(self: Self, cache_dir: Path) -> None:
138
+ """Remove the oldest snapshots until the retention limit is respected."""
139
+ revisions = [p for p in cache_dir.iterdir() if p.is_file()]
140
+ if len(revisions) <= self.max_revisions:
141
+ return
142
+
143
+ revisions.sort(key=lambda path: path.name)
144
+ excess = len(revisions) - self.max_revisions
145
+ for old_revision in revisions[:excess]:
146
+ try:
147
+ old_revision.unlink()
148
+ except FileNotFoundError:
149
+ continue
@@ -32,9 +32,15 @@ class LockError(Exception):
32
32
  """Raised when the lock cannot be acquired."""
33
33
 
34
34
 
35
+ class LockNotFoundError(FileNotFoundError):
36
+ """Raised when the lock cannot be found."""
37
+
38
+
35
39
  class LockManager(PydanticResourceManager[LockInfo]):
36
40
  """Manages the .lock file."""
37
41
 
42
+ cache_enabled: bool = False
43
+
38
44
  def __init__(
39
45
  self: Self,
40
46
  fmu_dir: FMUDirectoryBase,
@@ -82,7 +88,7 @@ class LockManager(PydanticResourceManager[LockInfo]):
82
88
  return
83
89
 
84
90
  if not wait:
85
- lock_info = self._safe_load()
91
+ lock_info = self.safe_load()
86
92
  if lock_info:
87
93
  raise LockError(
88
94
  f"Lock file is held by {lock_info.user}@{lock_info.hostname} "
@@ -152,12 +158,16 @@ class LockManager(PydanticResourceManager[LockInfo]):
152
158
  with contextlib.suppress(OSError):
153
159
  temp_path.unlink()
154
160
 
155
- def is_locked(self: Self) -> bool:
161
+ def is_locked(self: Self, *, propagate_errors: bool = False) -> bool:
156
162
  """Returns whether or not the lock is locked by anyone.
157
163
 
158
164
  This does a force load on the lock file.
159
165
  """
160
- lock_info = self._safe_load(force=True)
166
+ lock_info = (
167
+ self.load(force=True, store_cache=False)
168
+ if propagate_errors
169
+ else self.safe_load(force=True, store_cache=False)
170
+ )
161
171
  if not lock_info:
162
172
  return False
163
173
  return time.time() < lock_info.expires_at
@@ -166,15 +176,16 @@ class LockManager(PydanticResourceManager[LockInfo]):
166
176
  """Returns whether or not the lock is currently acquired by this instance."""
167
177
  if self._cache is None or self._acquired_at is None:
168
178
  return False
169
- return self._is_mine(self._cache) and not self._is_stale()
179
+
180
+ current_lock = self.safe_load(force=True, store_cache=False)
181
+ if current_lock is None:
182
+ return False
183
+
184
+ return self._is_mine(current_lock) and not self._is_stale()
170
185
 
171
186
  def ensure_can_write(self: Self) -> None:
172
187
  """Raise PermissionError if another process currently holds the lock."""
173
- try:
174
- lock_info = self.load(force=True, store_cache=False)
175
- except Exception:
176
- lock_info = None
177
-
188
+ lock_info = self.safe_load(force=True, store_cache=False)
178
189
  if (
179
190
  self.exists
180
191
  and lock_info is not None
@@ -196,9 +207,9 @@ class LockManager(PydanticResourceManager[LockInfo]):
196
207
  if not self.exists:
197
208
  if self.is_acquired():
198
209
  self.release()
199
- raise LockError("Cannot refresh: lock file does not exist")
210
+ raise LockNotFoundError("Cannot refresh: lock file does not exist")
200
211
 
201
- lock_info = self._safe_load()
212
+ lock_info = self.safe_load()
202
213
  if not lock_info or not self._is_mine(lock_info):
203
214
  raise LockError(
204
215
  "Cannot refresh: lock file is held by another process or host."
@@ -210,7 +221,7 @@ class LockManager(PydanticResourceManager[LockInfo]):
210
221
  def release(self: Self) -> None:
211
222
  """Release the lock."""
212
223
  if self.exists:
213
- lock_info = self._safe_load()
224
+ lock_info = self.safe_load()
214
225
  if lock_info and self._is_mine(lock_info):
215
226
  with contextlib.suppress(ValueError):
216
227
  self.path.unlink()
@@ -218,12 +229,15 @@ class LockManager(PydanticResourceManager[LockInfo]):
218
229
  self._acquired_at = None
219
230
  self._cache = None
220
231
 
221
- def save(self: Self, data: LockInfo) -> None:
232
+ def save(
233
+ self: Self,
234
+ data: LockInfo,
235
+ ) -> None:
222
236
  """Save the lockfile in an NFS-atomic manner.
223
237
 
224
238
  This overrides save() from the Pydantic resource manager.
225
239
  """
226
- lock_info = self._safe_load()
240
+ lock_info = self.safe_load()
227
241
  if not lock_info or not self._is_mine(lock_info):
228
242
  raise LockError(
229
243
  "Failed to save lock: lock file is held by another process or host."
@@ -250,20 +264,22 @@ class LockManager(PydanticResourceManager[LockInfo]):
250
264
  and lock_info.acquired_at == self._acquired_at
251
265
  )
252
266
 
253
- def _safe_load(self: Self, force: bool = False) -> LockInfo | None:
267
+ def safe_load(
268
+ self: Self, force: bool = False, store_cache: bool = False
269
+ ) -> LockInfo | None:
254
270
  """Load lock info, returning None if corrupted.
255
271
 
256
272
  Because this file does not exist in a static state, wrap around loading it.
257
273
  """
258
274
  try:
259
- return self.load(force=force)
275
+ return self.load(force=force, store_cache=store_cache)
260
276
  except Exception:
261
277
  return None
262
278
 
263
279
  def _is_stale(self: Self, lock_info: LockInfo | None = None) -> bool:
264
280
  """Check if existing lock is stale (expired or process dead)."""
265
281
  if lock_info is None:
266
- lock_info = self._safe_load()
282
+ lock_info = self.safe_load()
267
283
 
268
284
  if not lock_info:
269
285
  return True
@@ -22,6 +22,8 @@ MutablePydanticResource = TypeVar("MutablePydanticResource", bound=ResettableBas
22
22
  class PydanticResourceManager(Generic[PydanticResource]):
23
23
  """Base class for managing resources represented by Pydantic models."""
24
24
 
25
+ cache_enabled: bool = True
26
+
25
27
  def __init__(
26
28
  self: Self, fmu_dir: FMUDirectoryBase, model_class: type[PydanticResource]
27
29
  ) -> None:
@@ -99,15 +101,22 @@ class PydanticResourceManager(Generic[PydanticResource]):
99
101
 
100
102
  return self._cache
101
103
 
102
- def save(self: Self, model: PydanticResource) -> None:
104
+ def save(
105
+ self: Self,
106
+ model: PydanticResource,
107
+ ) -> None:
103
108
  """Save the Pydantic model to disk.
104
109
 
105
110
  Args:
106
- model: Validated Pydantic model instance
111
+ model: Validated Pydantic model instance.
107
112
  """
108
113
  self.fmu_dir._lock.ensure_can_write()
109
114
  json_data = model.model_dump_json(by_alias=True, indent=2)
110
115
  self.fmu_dir.write_text_file(self.relative_path, json_data)
116
+
117
+ if self.cache_enabled and self.exists:
118
+ self.fmu_dir.cache.store_revision(self.relative_path, json_data)
119
+
111
120
  self._cache = model
112
121
 
113
122
 
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.5.2'
32
- __version_tuple__ = version_tuple = (0, 5, 2)
31
+ __version__ = version = '0.5.4'
32
+ __version_tuple__ = version_tuple = (0, 5, 4)
33
33
 
34
- __commit_id__ = commit_id = 'g4490dbb9d'
34
+ __commit_id__ = commit_id = 'g118a9d2da'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fmu-settings
3
- Version: 0.5.2
3
+ Version: 0.5.4
4
4
  Summary: A library for managing FMU settings
5
5
  Author-email: Equinor <fg-fmu_atlas@equinor.com>
6
6
  License: GPL-3.0
@@ -19,6 +19,7 @@ src/fmu/settings/_version.py
19
19
  src/fmu/settings/py.typed
20
20
  src/fmu/settings/types.py
21
21
  src/fmu/settings/_resources/__init__.py
22
+ src/fmu/settings/_resources/cache_manager.py
22
23
  src/fmu/settings/_resources/config_managers.py
23
24
  src/fmu/settings/_resources/lock_manager.py
24
25
  src/fmu/settings/_resources/pydantic_resource_manager.py
@@ -37,6 +38,7 @@ tests/conftest.py
37
38
  tests/test_fmu_dir.py
38
39
  tests/test_global_config.py
39
40
  tests/test_init.py
41
+ tests/test_resources/test_cache_manager.py
40
42
  tests/test_resources/test_lock_manager.py
41
43
  tests/test_resources/test_project_config.py
42
44
  tests/test_resources/test_resource_managers.py
@@ -110,6 +110,25 @@ def test_find_nearest_not_found(tmp_path: Path, monkeypatch: MonkeyPatch) -> Non
110
110
  ProjectFMUDirectory.find_nearest(tmp_path)
111
111
 
112
112
 
113
+ def test_cache_property_returns_cached_manager(fmu_dir: ProjectFMUDirectory) -> None:
114
+ """Cache manager should be memoized and ready for use."""
115
+ cache = fmu_dir.cache
116
+
117
+ assert cache is fmu_dir.cache
118
+ assert fmu_dir._cache_manager is cache
119
+ assert cache.max_revisions == 5 # noqa: PLR2004
120
+
121
+
122
+ def test_set_cache_max_revisions_updates_manager(
123
+ fmu_dir: ProjectFMUDirectory,
124
+ ) -> None:
125
+ """Changing retention should update the existing cache manager."""
126
+ cache = fmu_dir.cache
127
+ fmu_dir.cache_max_revisions = 7
128
+
129
+ assert cache.max_revisions == 7 # noqa: PLR2004
130
+
131
+
113
132
  def test_get_config_value(fmu_dir: ProjectFMUDirectory) -> None:
114
133
  """Tests get_config_value retrieves correctly from the config."""
115
134
  assert fmu_dir.get_config_value("version") == __version__
@@ -56,8 +56,8 @@ def test_validate_global_config_strict_model(
56
56
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
57
57
  ) -> None:
58
58
  """Tests strict validation on 'model'."""
59
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
60
- model=fields.Model(name=name, revision=""),
59
+ cfg = generate_strict_valid_globalconfiguration(
60
+ model=fields.Model(name=name, revision=""), # type: ignore
61
61
  )
62
62
  if valid:
63
63
  validate_global_configuration_strictly(cfg) # Does not raise
@@ -75,8 +75,8 @@ def test_validate_global_config_strict_access(
75
75
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
76
76
  ) -> None:
77
77
  """Tests strict validation on 'access'."""
78
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
79
- asset=fields.Asset(name=name),
78
+ cfg = generate_strict_valid_globalconfiguration(
79
+ asset=fields.Asset(name=name), # type: ignore
80
80
  )
81
81
  if valid:
82
82
  validate_global_configuration_strictly(cfg) # Does not raise
@@ -95,8 +95,8 @@ def test_validate_global_config_strict_smda_country_uuid(
95
95
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
96
96
  ) -> None:
97
97
  """Tests strict validation on 'smda.country' uuids."""
98
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
99
- country_items=[
98
+ cfg = generate_strict_valid_globalconfiguration(
99
+ country_items=[ # type: ignore
100
100
  fields.CountryItem(identifier="bar", uuid=uuid),
101
101
  fields.CountryItem(identifier="foo", uuid=uuid4()),
102
102
  ],
@@ -118,8 +118,8 @@ def test_validate_global_config_strict_smda_discovery_identifier(
118
118
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
119
119
  ) -> None:
120
120
  """Tests strict validation on 'smda.discovery' identifiers."""
121
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
122
- discovery_items=[
121
+ cfg = generate_strict_valid_globalconfiguration(
122
+ discovery_items=[ # type: ignore
123
123
  fields.DiscoveryItem(short_identifier=identifier, uuid=uuid4()),
124
124
  fields.DiscoveryItem(short_identifier="foo", uuid=uuid4()),
125
125
  ],
@@ -141,8 +141,8 @@ def test_validate_global_config_strict_smda_discovery_uuid(
141
141
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
142
142
  ) -> None:
143
143
  """Tests strict validation on 'smda.discovery' uuids."""
144
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
145
- discovery_items=[
144
+ cfg = generate_strict_valid_globalconfiguration(
145
+ discovery_items=[ # type: ignore
146
146
  fields.DiscoveryItem(short_identifier="bar", uuid=uuid),
147
147
  fields.DiscoveryItem(short_identifier="foo", uuid=uuid4()),
148
148
  ],
@@ -164,8 +164,8 @@ def test_validate_global_config_strict_smda_field_identifier(
164
164
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
165
165
  ) -> None:
166
166
  """Tests strict validation on 'smda.discovery' identifiers."""
167
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
168
- field_items=[
167
+ cfg = generate_strict_valid_globalconfiguration(
168
+ field_items=[ # type: ignore
169
169
  fields.FieldItem(identifier=identifier, uuid=uuid4()),
170
170
  fields.FieldItem(identifier="foo", uuid=uuid4()),
171
171
  ],
@@ -187,8 +187,8 @@ def test_validate_global_config_strict_smda_field_uuid(
187
187
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
188
188
  ) -> None:
189
189
  """Tests strict validation on 'smda.discovery' uuids."""
190
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
191
- field_items=[
190
+ cfg = generate_strict_valid_globalconfiguration(
191
+ field_items=[ # type: ignore
192
192
  fields.FieldItem(identifier="bar", uuid=uuid),
193
193
  fields.FieldItem(identifier="foo", uuid=uuid4()),
194
194
  ],
@@ -210,8 +210,8 @@ def test_validate_global_config_strict_coordinate_system(
210
210
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
211
211
  ) -> None:
212
212
  """Tests strict validation on 'smda.coordinate_system'."""
213
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
214
- coordinate_system=fields.CoordinateSystem(identifier="", uuid=uuid),
213
+ cfg = generate_strict_valid_globalconfiguration(
214
+ coordinate_system=fields.CoordinateSystem(identifier="", uuid=uuid), # type: ignore
215
215
  )
216
216
  if valid:
217
217
  validate_global_configuration_strictly(cfg) # Does not raise
@@ -230,8 +230,8 @@ def test_validate_global_config_strict_stratigraphic_column_uuids(
230
230
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
231
231
  ) -> None:
232
232
  """Tests strict validation on 'smda.stratigraphic_column' uuid."""
233
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
234
- stratigraphic_column=fields.StratigraphicColumn(identifier="", uuid=uuid),
233
+ cfg = generate_strict_valid_globalconfiguration(
234
+ stratigraphic_column=fields.StratigraphicColumn(identifier="", uuid=uuid), # type: ignore
235
235
  )
236
236
  if valid:
237
237
  validate_global_configuration_strictly(cfg) # Does not raise
@@ -250,8 +250,8 @@ def test_validate_global_config_strict_stratigraphic_column_names(
250
250
  generate_strict_valid_globalconfiguration: Callable[[], GlobalConfiguration],
251
251
  ) -> None:
252
252
  """Tests strict validation on 'smda.stratigraphic_column' identifiers."""
253
- cfg = generate_strict_valid_globalconfiguration( # type: ignore
254
- stratigraphic_column=fields.StratigraphicColumn(
253
+ cfg = generate_strict_valid_globalconfiguration(
254
+ stratigraphic_column=fields.StratigraphicColumn( # type: ignore
255
255
  identifier=identifier, uuid=uuid4()
256
256
  ),
257
257
  )
@@ -0,0 +1,116 @@
1
+ """Tests for the cache manager utilities."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+ from typing import TYPE_CHECKING
7
+
8
+ from fmu.settings._resources.cache_manager import _CACHEDIR_TAG_CONTENT, CacheManager
9
+
10
+ if TYPE_CHECKING:
11
+ import pytest
12
+
13
+ from fmu.settings._fmu_dir import ProjectFMUDirectory
14
+
15
+
16
+ def _read_snapshot_names(config_cache: Path) -> list[str]:
17
+ return sorted(p.name for p in config_cache.iterdir() if p.is_file())
18
+
19
+
20
+ def test_cache_manager_list_revisions_without_directory(
21
+ fmu_dir: ProjectFMUDirectory,
22
+ ) -> None:
23
+ """Listing revisions on a missing cache dir yields an empty list."""
24
+ manager = CacheManager(fmu_dir)
25
+ assert manager.list_revisions("foo.json") == []
26
+
27
+
28
+ def test_cache_manager_list_revisions_with_existing_snapshots(
29
+ fmu_dir: ProjectFMUDirectory,
30
+ ) -> None:
31
+ """Listing revisions returns sorted snapshot paths."""
32
+ manager = CacheManager(fmu_dir)
33
+ manager.store_revision("foo.json", "one")
34
+ manager.store_revision("foo.json", "two")
35
+ revisions = manager.list_revisions("foo.json")
36
+ assert [path.name for path in revisions] == sorted(path.name for path in revisions)
37
+ assert len(revisions) == 2 # noqa: PLR2004
38
+
39
+
40
+ def test_cache_manager_honours_existing_cachedir_tag(
41
+ fmu_dir: ProjectFMUDirectory,
42
+ ) -> None:
43
+ """Existing cachedir tags are preserved when storing revisions."""
44
+ cache_root = fmu_dir.path / "cache"
45
+ cache_root.mkdir(exist_ok=True)
46
+ tag_path = cache_root / "CACHEDIR.TAG"
47
+ tag_path.write_text("custom tag", encoding="utf-8")
48
+
49
+ manager = CacheManager(fmu_dir)
50
+ manager.store_revision("foo.json", '{"foo": "bar"}')
51
+
52
+ assert tag_path.read_text(encoding="utf-8") == "custom tag"
53
+
54
+
55
+ def test_cache_manager_cache_root_helpers_create_tag(
56
+ fmu_dir: ProjectFMUDirectory,
57
+ ) -> None:
58
+ """Cache root helpers return consistent paths and create cachedir tags."""
59
+ manager = CacheManager(fmu_dir)
60
+ root = manager._cache_root_path(create=False)
61
+ assert root == fmu_dir.get_file_path("cache")
62
+
63
+ created = manager._cache_root_path(create=True)
64
+ assert created == root
65
+
66
+ tag_path = created / "CACHEDIR.TAG"
67
+ assert tag_path.is_file()
68
+ assert tag_path.read_text(encoding="utf-8") == _CACHEDIR_TAG_CONTENT
69
+
70
+
71
+ def test_cache_manager_uses_default_extension_for_suffixless_paths(
72
+ fmu_dir: ProjectFMUDirectory,
73
+ ) -> None:
74
+ """Files without suffix get '.txt' snapshots."""
75
+ manager = CacheManager(fmu_dir)
76
+ snapshot = manager.store_revision("logs/entry", "payload")
77
+ assert snapshot is not None
78
+ assert snapshot.suffix == ".txt"
79
+ assert snapshot.read_text(encoding="utf-8") == "payload"
80
+
81
+
82
+ def test_cache_manager_trim_handles_missing_files(
83
+ fmu_dir: ProjectFMUDirectory,
84
+ monkeypatch: pytest.MonkeyPatch,
85
+ ) -> None:
86
+ """Trimming gracefully handles concurrent removals."""
87
+ manager = CacheManager(fmu_dir, max_revisions=1)
88
+ manager.store_revision("foo.json", "first")
89
+
90
+ original_unlink = Path.unlink
91
+
92
+ def flaky_unlink(self: Path, *, missing_ok: bool = False) -> None:
93
+ if self.name.endswith(".json") and not getattr(flaky_unlink, "raised", False):
94
+ flaky_unlink.raised = True # type: ignore[attr-defined]
95
+ original_unlink(self, missing_ok=missing_ok)
96
+ raise FileNotFoundError
97
+ original_unlink(self, missing_ok=missing_ok)
98
+
99
+ monkeypatch.setattr(Path, "unlink", flaky_unlink)
100
+
101
+ manager.store_revision("foo.json", "second")
102
+
103
+ config_cache = fmu_dir.path / "cache" / "foo"
104
+ assert getattr(flaky_unlink, "raised", False) is True
105
+ assert len(_read_snapshot_names(config_cache)) == 1
106
+
107
+
108
+ def test_cache_manager_max_revisions_zero_skips_storage(
109
+ fmu_dir: ProjectFMUDirectory,
110
+ ) -> None:
111
+ """Storing with zero retention should return None and create nothing."""
112
+ manager = CacheManager(fmu_dir, max_revisions=0)
113
+ result = manager.store_revision("foo.json", "data")
114
+ assert result is None
115
+ cache_dir = fmu_dir.path / "cache" / "foo"
116
+ assert not cache_dir.exists()
@@ -19,6 +19,7 @@ from fmu.settings._resources.lock_manager import (
19
19
  DEFAULT_LOCK_TIMEOUT,
20
20
  LockError,
21
21
  LockManager,
22
+ LockNotFoundError,
22
23
  )
23
24
  from fmu.settings.models.lock_info import LockInfo
24
25
 
@@ -287,7 +288,7 @@ def test_is_stale_load_fails(fmu_dir: ProjectFMUDirectory) -> None:
287
288
  """Tests is_stale if loading the lock file fails."""
288
289
  lock = LockManager(fmu_dir)
289
290
  lock.acquire()
290
- with patch.object(lock, "_safe_load", return_value=None):
291
+ with patch.object(lock, "safe_load", return_value=None):
291
292
  assert lock._is_stale() is True
292
293
 
293
294
 
@@ -414,6 +415,18 @@ def test_is_locked_by_other_process(
414
415
  assert lock.is_locked() is False
415
416
 
416
417
 
418
+ def test_is_locked_propagate_errors(
419
+ fmu_dir: ProjectFMUDirectory, monkeypatch: MonkeyPatch
420
+ ) -> None:
421
+ """Tests that load with propagate errors raises."""
422
+ lock = LockManager(fmu_dir)
423
+ lock.path.write_text("a")
424
+ assert lock.is_locked() is False
425
+
426
+ with pytest.raises(ValueError, match="Invalid JSON"):
427
+ assert lock.is_locked(propagate_errors=True) is False
428
+
429
+
417
430
  def test_is_acquired_expected(
418
431
  fmu_dir: ProjectFMUDirectory, monkeypatch: MonkeyPatch
419
432
  ) -> None:
@@ -493,15 +506,21 @@ def test_refresh_works_as_expected(
493
506
  def test_refresh_without_lock_file(
494
507
  fmu_dir: ProjectFMUDirectory, monkeypatch: MonkeyPatch
495
508
  ) -> None:
496
- """Tests refresh when lock file is not present."""
497
- lock = LockManager(fmu_dir)
498
- with pytest.raises(LockError, match="does not exist"):
499
- lock.refresh()
509
+ """Tests that if a user deletes anothers lock it's invalidated on a refresh."""
510
+ with pytest.raises(LockNotFoundError, match="does not exist"):
511
+ fmu_dir._lock.refresh()
500
512
 
501
- lock.acquire()
502
- lock.path.unlink() # It was deleted or something.
503
- with pytest.raises(LockError, match="does not exist"):
504
- lock.refresh()
513
+ fmu_dir._lock.acquire()
514
+ assert fmu_dir._lock.is_acquired() is True
515
+
516
+ # Someone deletes the lock
517
+ fmu_dir._lock.path.unlink()
518
+
519
+ with pytest.raises(
520
+ LockNotFoundError, match="Cannot refresh: lock file does not exist"
521
+ ):
522
+ fmu_dir._lock.refresh()
523
+ assert fmu_dir._lock.is_acquired() is False
505
524
 
506
525
 
507
526
  def test_refresh_without_owning_lock(
@@ -533,11 +552,11 @@ def test_safe_load(fmu_dir: ProjectFMUDirectory, monkeypatch: MonkeyPatch) -> No
533
552
  lock = LockManager(fmu_dir)
534
553
  lock.acquire()
535
554
  assert lock._cache is not None
536
- assert lock._safe_load() == lock._cache
555
+ assert lock.safe_load() == lock._cache
537
556
 
538
557
  lock.release()
539
558
  lock.path.write_text("a")
540
- assert lock._safe_load() is None
559
+ assert lock.safe_load() is None
541
560
 
542
561
 
543
562
  def test_save_expected(fmu_dir: ProjectFMUDirectory, monkeypatch: MonkeyPatch) -> None:
@@ -597,7 +616,7 @@ def test_ensure_can_write_invalid_lock(fmu_dir: ProjectFMUDirectory) -> None:
597
616
  """Tests ensure_can_write ignores unreadable lock info."""
598
617
  lock = LockManager(fmu_dir)
599
618
  lock.path.write_text("garbage")
600
- with patch.object(lock, "_safe_load", return_value=None):
619
+ with patch.object(lock, "safe_load", return_value=None):
601
620
  lock.ensure_can_write()
602
621
 
603
622
 
@@ -622,7 +641,7 @@ def test_ensure_can_write_stale_lock(fmu_dir: ProjectFMUDirectory) -> None:
622
641
  )
623
642
  lock.path.write_text(lock_info.model_dump_json(indent=2))
624
643
  with (
625
- patch.object(lock, "_safe_load", return_value=lock_info),
644
+ patch.object(lock, "safe_load", return_value=lock_info),
626
645
  patch.object(lock, "is_acquired", return_value=False),
627
646
  patch.object(lock, "_is_stale", return_value=True),
628
647
  ):
@@ -649,18 +668,3 @@ def test_ensure_can_write_foreign_lock(fmu_dir: ProjectFMUDirectory) -> None:
649
668
  pytest.raises(PermissionError, match="Cannot write to .fmu directory"),
650
669
  ):
651
670
  lock.ensure_can_write()
652
-
653
-
654
- def test_manual_delete_invalidates_lock_file_on_refresh(
655
- fmu_dir: ProjectFMUDirectory,
656
- ) -> None:
657
- """Tests that if a user deletes anothers lock it's invalidated on a refresh."""
658
- fmu_dir._lock.acquire()
659
- assert fmu_dir._lock.is_acquired() is True
660
-
661
- # Someone deletes the lock
662
- fmu_dir._lock.path.unlink()
663
-
664
- with pytest.raises(LockError, match="Cannot refresh: lock file does not exist"):
665
- fmu_dir._lock.refresh()
666
- assert fmu_dir._lock.is_acquired() is False
@@ -1,6 +1,7 @@
1
1
  """Tests for fmu.settings.resources.managers."""
2
2
 
3
3
  import json
4
+ import shutil
4
5
  from pathlib import Path
5
6
  from typing import Self
6
7
  from unittest.mock import patch
@@ -9,6 +10,7 @@ import pytest
9
10
  from pydantic import BaseModel
10
11
 
11
12
  from fmu.settings._fmu_dir import ProjectFMUDirectory
13
+ from fmu.settings._resources.cache_manager import CacheManager
12
14
  from fmu.settings._resources.lock_manager import LockManager
13
15
  from fmu.settings._resources.pydantic_resource_manager import PydanticResourceManager
14
16
 
@@ -184,3 +186,101 @@ def test_pydantic_resource_manager_loads_invalid_model(
184
186
  ValueError, match=r"Invalid content in resource file[\s\S]*input_value=0"
185
187
  ):
186
188
  a.load(force=True)
189
+
190
+
191
+ def test_pydantic_resource_manager_save_does_not_cache_when_disabled(
192
+ fmu_dir: ProjectFMUDirectory,
193
+ ) -> None:
194
+ """Saving without cache enabled should not create cache artifacts."""
195
+ original_default = AManager.cache_enabled
196
+ AManager.cache_enabled = False
197
+ cache_root = fmu_dir.path / "cache"
198
+ try:
199
+ if cache_root.exists():
200
+ shutil.rmtree(cache_root)
201
+ a = AManager(fmu_dir)
202
+ a.save(A(foo="bar"))
203
+ finally:
204
+ AManager.cache_enabled = original_default
205
+
206
+ assert not cache_root.exists()
207
+
208
+
209
+ def test_pydantic_resource_manager_save_stores_revision_when_enabled(
210
+ fmu_dir: ProjectFMUDirectory,
211
+ ) -> None:
212
+ """Saving with cache enabled should persist a revision snapshot."""
213
+ a = AManager(fmu_dir)
214
+ model = A(foo="bar")
215
+ a.save(model)
216
+
217
+ cache_root = fmu_dir.path / "cache"
218
+ assert cache_root.is_dir()
219
+ tag_path = cache_root / "CACHEDIR.TAG"
220
+ assert tag_path.read_text(encoding="utf-8").startswith(
221
+ "Signature: 8a477f597d28d172789f06886806bc55"
222
+ )
223
+
224
+ config_cache = cache_root / "foo"
225
+ snapshots = list(config_cache.iterdir())
226
+ assert len(snapshots) == 1
227
+ snapshot = snapshots[0]
228
+ assert snapshot.suffix == ".json"
229
+ assert json.loads(snapshot.read_text(encoding="utf-8")) == model.model_dump()
230
+
231
+
232
+ def test_pydantic_resource_manager_revision_cache_trims_excess(
233
+ fmu_dir: ProjectFMUDirectory, monkeypatch: pytest.MonkeyPatch
234
+ ) -> None:
235
+ """Revision caching should retain only the configured number of snapshots."""
236
+ original_limit = fmu_dir.cache_max_revisions
237
+ fmu_dir.cache_max_revisions = 2
238
+ try:
239
+ sequence = iter(["rev1.json", "rev2.json", "rev3.json"])
240
+ monkeypatch.setattr(
241
+ CacheManager,
242
+ "_snapshot_filename",
243
+ lambda self, config_file_path: next(sequence),
244
+ )
245
+
246
+ a = AManager(fmu_dir)
247
+ a.save(A(foo="one"))
248
+ a.save(A(foo="two"))
249
+ a.save(A(foo="three"))
250
+ finally:
251
+ fmu_dir.cache_max_revisions = original_limit
252
+
253
+ config_cache = fmu_dir.path / "cache" / "foo"
254
+ snapshots = sorted(p.name for p in config_cache.iterdir())
255
+ assert snapshots == ["rev2.json", "rev3.json"]
256
+
257
+ assert (
258
+ json.loads((config_cache / "rev3.json").read_text(encoding="utf-8"))["foo"]
259
+ == "three"
260
+ )
261
+
262
+
263
+ def test_pydantic_resource_manager_respects_retention_setting(
264
+ fmu_dir: ProjectFMUDirectory,
265
+ ) -> None:
266
+ """Saving uses the cache manager retention setting."""
267
+ original_limit = fmu_dir.cache_max_revisions
268
+ fmu_dir.cache_max_revisions = 3
269
+ try:
270
+ a = AManager(fmu_dir)
271
+ a.save(A(foo="one"))
272
+ a.save(A(foo="two"))
273
+ a.save(A(foo="three"))
274
+ a.save(A(foo="four"))
275
+ finally:
276
+ fmu_dir.cache_max_revisions = original_limit
277
+
278
+ config_cache = fmu_dir.path / "cache" / "foo"
279
+ snapshots = sorted(p.name for p in config_cache.iterdir())
280
+ assert len(snapshots) == 3 # noqa: PLR2004
281
+
282
+ contents = [
283
+ json.loads((config_cache / name).read_text(encoding="utf-8"))["foo"]
284
+ for name in snapshots
285
+ ]
286
+ assert contents == ["two", "three", "four"]
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes