fmu-settings 0.5.2__py3-none-any.whl → 0.14.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fmu/settings/_fmu_dir.py +251 -19
- fmu/settings/_init.py +19 -32
- fmu/settings/_readme_texts.py +34 -0
- fmu/settings/_resources/cache_manager.py +185 -0
- fmu/settings/_resources/changelog_manager.py +157 -0
- fmu/settings/_resources/config_managers.py +17 -0
- fmu/settings/_resources/lock_manager.py +33 -17
- fmu/settings/_resources/log_manager.py +98 -0
- fmu/settings/_resources/pydantic_resource_manager.py +173 -27
- fmu/settings/_resources/user_session_log_manager.py +47 -0
- fmu/settings/_version.py +2 -2
- fmu/settings/models/_enums.py +18 -0
- fmu/settings/models/change_info.py +37 -0
- fmu/settings/models/event_info.py +15 -0
- fmu/settings/models/log.py +63 -0
- fmu/settings/models/project_config.py +58 -4
- fmu/settings/models/user_config.py +5 -0
- {fmu_settings-0.5.2.dist-info → fmu_settings-0.14.1.dist-info}/METADATA +3 -1
- fmu_settings-0.14.1.dist-info/RECORD +32 -0
- fmu_settings-0.5.2.dist-info/RECORD +0 -24
- {fmu_settings-0.5.2.dist-info → fmu_settings-0.14.1.dist-info}/WHEEL +0 -0
- {fmu_settings-0.5.2.dist-info → fmu_settings-0.14.1.dist-info}/licenses/LICENSE +0 -0
- {fmu_settings-0.5.2.dist-info → fmu_settings-0.14.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import socket
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import TYPE_CHECKING, Any, Self
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
|
|
11
|
+
from fmu.settings._resources.log_manager import LogManager
|
|
12
|
+
from fmu.settings.models._enums import ChangeType, FilterType
|
|
13
|
+
from fmu.settings.models.change_info import ChangeInfo
|
|
14
|
+
from fmu.settings.models.log import Filter, Log, LogFileName
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
# Avoid circular dependency for type hint in __init__ only
|
|
18
|
+
from fmu.settings._fmu_dir import (
|
|
19
|
+
FMUDirectoryBase,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ChangelogManager(LogManager[ChangeInfo]):
|
|
24
|
+
"""Manages the .fmu changelog file."""
|
|
25
|
+
|
|
26
|
+
def __init__(self: Self, fmu_dir: FMUDirectoryBase) -> None:
|
|
27
|
+
"""Initializes the Change log resource manager."""
|
|
28
|
+
super().__init__(fmu_dir, Log[ChangeInfo])
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def relative_path(self: Self) -> Path:
|
|
32
|
+
"""Returns the relative path to the log file."""
|
|
33
|
+
return Path("logs") / LogFileName.changelog
|
|
34
|
+
|
|
35
|
+
def log_update_to_changelog(
|
|
36
|
+
self: Self,
|
|
37
|
+
updates: dict[str, Any],
|
|
38
|
+
old_resource_dict: dict[str, Any],
|
|
39
|
+
relative_path: Path,
|
|
40
|
+
) -> None:
|
|
41
|
+
"""Logs the update of a resource to the changelog."""
|
|
42
|
+
_MISSING_KEY = object()
|
|
43
|
+
for key, new_value in updates.items():
|
|
44
|
+
change_type = ChangeType.update
|
|
45
|
+
if "." in key:
|
|
46
|
+
old_value = self._get_dot_notation_key(
|
|
47
|
+
resource_dict=old_resource_dict, key=key, default=_MISSING_KEY
|
|
48
|
+
)
|
|
49
|
+
else:
|
|
50
|
+
old_value = old_resource_dict.get(key, _MISSING_KEY)
|
|
51
|
+
|
|
52
|
+
if old_value != _MISSING_KEY:
|
|
53
|
+
old_value_string = (
|
|
54
|
+
str(old_value.model_dump())
|
|
55
|
+
if isinstance(old_value, BaseModel)
|
|
56
|
+
else str(old_value)
|
|
57
|
+
)
|
|
58
|
+
new_value_string = (
|
|
59
|
+
str(new_value.model_dump())
|
|
60
|
+
if isinstance(new_value, BaseModel)
|
|
61
|
+
else str(new_value)
|
|
62
|
+
)
|
|
63
|
+
change_string = (
|
|
64
|
+
f"Updated field '{key}'. Old value: {old_value_string}"
|
|
65
|
+
f" -> New value: {new_value_string}"
|
|
66
|
+
)
|
|
67
|
+
else:
|
|
68
|
+
change_type = ChangeType.add
|
|
69
|
+
new_value_string = (
|
|
70
|
+
str(new_value.model_dump())
|
|
71
|
+
if isinstance(new_value, BaseModel)
|
|
72
|
+
else str(new_value)
|
|
73
|
+
)
|
|
74
|
+
change_string = f"Added field '{key}'. New value: {new_value_string}"
|
|
75
|
+
|
|
76
|
+
change_entry = ChangeInfo(
|
|
77
|
+
timestamp=datetime.now(UTC),
|
|
78
|
+
change_type=change_type,
|
|
79
|
+
user=os.getenv("USER", "unknown"),
|
|
80
|
+
path=self.fmu_dir.path,
|
|
81
|
+
change=change_string,
|
|
82
|
+
hostname=socket.gethostname(),
|
|
83
|
+
file=str(relative_path),
|
|
84
|
+
key=key,
|
|
85
|
+
)
|
|
86
|
+
self.add_log_entry(change_entry)
|
|
87
|
+
|
|
88
|
+
def log_merge_to_changelog(
|
|
89
|
+
self: Self, source_path: Path, incoming_path: Path, merged_resources: list[str]
|
|
90
|
+
) -> None:
|
|
91
|
+
"""Logs a change entry with merge details to the changelog."""
|
|
92
|
+
resources_string = ", ".join([f"'{resource}'" for resource in merged_resources])
|
|
93
|
+
change_string = (
|
|
94
|
+
f"Merged resources {resources_string} from "
|
|
95
|
+
f"'{incoming_path}' into '{source_path}'."
|
|
96
|
+
)
|
|
97
|
+
self.add_log_entry(
|
|
98
|
+
ChangeInfo(
|
|
99
|
+
timestamp=datetime.now(UTC),
|
|
100
|
+
change_type=ChangeType.merge,
|
|
101
|
+
user=os.getenv("USER", "unknown"),
|
|
102
|
+
path=source_path,
|
|
103
|
+
change=change_string,
|
|
104
|
+
hostname=socket.gethostname(),
|
|
105
|
+
file=resources_string,
|
|
106
|
+
key=".fmu",
|
|
107
|
+
)
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
def _get_latest_change_timestamp(self: Self) -> datetime:
|
|
111
|
+
"""Get the timestamp of the latest change entry in the changelog."""
|
|
112
|
+
return self.load()[-1].timestamp
|
|
113
|
+
|
|
114
|
+
def get_changelog_diff(
|
|
115
|
+
self: Self, incoming_changelog: ChangelogManager
|
|
116
|
+
) -> Log[ChangeInfo]:
|
|
117
|
+
"""Get new entries from the incoming changelog.
|
|
118
|
+
|
|
119
|
+
All log entries from the incoming changelog newer than the
|
|
120
|
+
log entries in the current changelog are returned.
|
|
121
|
+
"""
|
|
122
|
+
if self.exists and incoming_changelog.exists:
|
|
123
|
+
starting_point = self._get_latest_change_timestamp()
|
|
124
|
+
return incoming_changelog.filter_log(
|
|
125
|
+
Filter(
|
|
126
|
+
field_name="timestamp",
|
|
127
|
+
filter_value=str(starting_point),
|
|
128
|
+
filter_type=FilterType.date,
|
|
129
|
+
operator=">",
|
|
130
|
+
)
|
|
131
|
+
)
|
|
132
|
+
raise FileNotFoundError(
|
|
133
|
+
"Changelog resources to diff must exist in both directories: "
|
|
134
|
+
f"Current changelog resource exists: {self.exists}. "
|
|
135
|
+
f"Incoming changelog resource exists: {incoming_changelog.exists}."
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
def merge_changelog(
|
|
139
|
+
self: Self, incoming_changelog: ChangelogManager
|
|
140
|
+
) -> Log[ChangeInfo]:
|
|
141
|
+
"""Add new entries from the incoming changelog to the current changelog.
|
|
142
|
+
|
|
143
|
+
All log entries from the incoming changelog newer than the
|
|
144
|
+
log entries in the current changelog are added.
|
|
145
|
+
"""
|
|
146
|
+
new_log_entries = self.get_changelog_diff(incoming_changelog)
|
|
147
|
+
self.merge_changes(new_log_entries.root)
|
|
148
|
+
return self.load()
|
|
149
|
+
|
|
150
|
+
def merge_changes(self: Self, change: list[ChangeInfo]) -> Log[ChangeInfo]:
|
|
151
|
+
"""Merge a list of changes into the current changelog.
|
|
152
|
+
|
|
153
|
+
All log entries in the change object are added to the changelog.
|
|
154
|
+
"""
|
|
155
|
+
for entry in change:
|
|
156
|
+
self.add_log_entry(entry)
|
|
157
|
+
return self.load()
|
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
import getpass
|
|
6
|
+
from datetime import UTC, datetime
|
|
5
7
|
from pathlib import Path
|
|
6
8
|
from typing import TYPE_CHECKING, Final, Self
|
|
7
9
|
|
|
@@ -35,6 +37,14 @@ class ProjectConfigManager(MutablePydanticResourceManager[ProjectConfig]):
|
|
|
35
37
|
"""Returns the relative path to the config file."""
|
|
36
38
|
return Path("config.json")
|
|
37
39
|
|
|
40
|
+
def save(self: Self, model: ProjectConfig) -> None:
|
|
41
|
+
"""Save the ProjectConfig to disk, updating last_modified fields."""
|
|
42
|
+
model_dict = model.model_dump()
|
|
43
|
+
model_dict["last_modified_at"] = datetime.now(UTC)
|
|
44
|
+
model_dict["last_modified_by"] = getpass.getuser()
|
|
45
|
+
updated_model = ProjectConfig.model_validate(model_dict)
|
|
46
|
+
super().save(updated_model)
|
|
47
|
+
|
|
38
48
|
|
|
39
49
|
class UserConfigManager(MutablePydanticResourceManager[UserConfig]):
|
|
40
50
|
"""Manages the .fmu configuration file in a user's home directory."""
|
|
@@ -47,3 +57,10 @@ class UserConfigManager(MutablePydanticResourceManager[UserConfig]):
|
|
|
47
57
|
def relative_path(self: Self) -> Path:
|
|
48
58
|
"""Returns the relative path to the config file."""
|
|
49
59
|
return Path("config.json")
|
|
60
|
+
|
|
61
|
+
def save(self: Self, model: UserConfig) -> None:
|
|
62
|
+
"""Save the UserConfig to disk, updating last_modified_at."""
|
|
63
|
+
model_dict = model.model_dump()
|
|
64
|
+
model_dict["last_modified_at"] = datetime.now(UTC)
|
|
65
|
+
updated_model = UserConfig.model_validate(model_dict)
|
|
66
|
+
super().save(updated_model)
|
|
@@ -32,9 +32,15 @@ class LockError(Exception):
|
|
|
32
32
|
"""Raised when the lock cannot be acquired."""
|
|
33
33
|
|
|
34
34
|
|
|
35
|
+
class LockNotFoundError(FileNotFoundError):
|
|
36
|
+
"""Raised when the lock cannot be found."""
|
|
37
|
+
|
|
38
|
+
|
|
35
39
|
class LockManager(PydanticResourceManager[LockInfo]):
|
|
36
40
|
"""Manages the .lock file."""
|
|
37
41
|
|
|
42
|
+
automatic_caching: bool = False
|
|
43
|
+
|
|
38
44
|
def __init__(
|
|
39
45
|
self: Self,
|
|
40
46
|
fmu_dir: FMUDirectoryBase,
|
|
@@ -82,7 +88,7 @@ class LockManager(PydanticResourceManager[LockInfo]):
|
|
|
82
88
|
return
|
|
83
89
|
|
|
84
90
|
if not wait:
|
|
85
|
-
lock_info = self.
|
|
91
|
+
lock_info = self.safe_load()
|
|
86
92
|
if lock_info:
|
|
87
93
|
raise LockError(
|
|
88
94
|
f"Lock file is held by {lock_info.user}@{lock_info.hostname} "
|
|
@@ -152,12 +158,16 @@ class LockManager(PydanticResourceManager[LockInfo]):
|
|
|
152
158
|
with contextlib.suppress(OSError):
|
|
153
159
|
temp_path.unlink()
|
|
154
160
|
|
|
155
|
-
def is_locked(self: Self) -> bool:
|
|
161
|
+
def is_locked(self: Self, *, propagate_errors: bool = False) -> bool:
|
|
156
162
|
"""Returns whether or not the lock is locked by anyone.
|
|
157
163
|
|
|
158
164
|
This does a force load on the lock file.
|
|
159
165
|
"""
|
|
160
|
-
lock_info =
|
|
166
|
+
lock_info = (
|
|
167
|
+
self.load(force=True, store_cache=False)
|
|
168
|
+
if propagate_errors
|
|
169
|
+
else self.safe_load(force=True, store_cache=False)
|
|
170
|
+
)
|
|
161
171
|
if not lock_info:
|
|
162
172
|
return False
|
|
163
173
|
return time.time() < lock_info.expires_at
|
|
@@ -166,15 +176,16 @@ class LockManager(PydanticResourceManager[LockInfo]):
|
|
|
166
176
|
"""Returns whether or not the lock is currently acquired by this instance."""
|
|
167
177
|
if self._cache is None or self._acquired_at is None:
|
|
168
178
|
return False
|
|
169
|
-
|
|
179
|
+
|
|
180
|
+
current_lock = self.safe_load(force=True, store_cache=False)
|
|
181
|
+
if current_lock is None:
|
|
182
|
+
return False
|
|
183
|
+
|
|
184
|
+
return self._is_mine(current_lock) and not self._is_stale()
|
|
170
185
|
|
|
171
186
|
def ensure_can_write(self: Self) -> None:
|
|
172
187
|
"""Raise PermissionError if another process currently holds the lock."""
|
|
173
|
-
|
|
174
|
-
lock_info = self.load(force=True, store_cache=False)
|
|
175
|
-
except Exception:
|
|
176
|
-
lock_info = None
|
|
177
|
-
|
|
188
|
+
lock_info = self.safe_load(force=True, store_cache=False)
|
|
178
189
|
if (
|
|
179
190
|
self.exists
|
|
180
191
|
and lock_info is not None
|
|
@@ -196,9 +207,9 @@ class LockManager(PydanticResourceManager[LockInfo]):
|
|
|
196
207
|
if not self.exists:
|
|
197
208
|
if self.is_acquired():
|
|
198
209
|
self.release()
|
|
199
|
-
raise
|
|
210
|
+
raise LockNotFoundError("Cannot refresh: lock file does not exist")
|
|
200
211
|
|
|
201
|
-
lock_info = self.
|
|
212
|
+
lock_info = self.safe_load()
|
|
202
213
|
if not lock_info or not self._is_mine(lock_info):
|
|
203
214
|
raise LockError(
|
|
204
215
|
"Cannot refresh: lock file is held by another process or host."
|
|
@@ -210,7 +221,7 @@ class LockManager(PydanticResourceManager[LockInfo]):
|
|
|
210
221
|
def release(self: Self) -> None:
|
|
211
222
|
"""Release the lock."""
|
|
212
223
|
if self.exists:
|
|
213
|
-
lock_info = self.
|
|
224
|
+
lock_info = self.safe_load()
|
|
214
225
|
if lock_info and self._is_mine(lock_info):
|
|
215
226
|
with contextlib.suppress(ValueError):
|
|
216
227
|
self.path.unlink()
|
|
@@ -218,12 +229,15 @@ class LockManager(PydanticResourceManager[LockInfo]):
|
|
|
218
229
|
self._acquired_at = None
|
|
219
230
|
self._cache = None
|
|
220
231
|
|
|
221
|
-
def save(
|
|
232
|
+
def save(
|
|
233
|
+
self: Self,
|
|
234
|
+
data: LockInfo,
|
|
235
|
+
) -> None:
|
|
222
236
|
"""Save the lockfile in an NFS-atomic manner.
|
|
223
237
|
|
|
224
238
|
This overrides save() from the Pydantic resource manager.
|
|
225
239
|
"""
|
|
226
|
-
lock_info = self.
|
|
240
|
+
lock_info = self.safe_load()
|
|
227
241
|
if not lock_info or not self._is_mine(lock_info):
|
|
228
242
|
raise LockError(
|
|
229
243
|
"Failed to save lock: lock file is held by another process or host."
|
|
@@ -250,20 +264,22 @@ class LockManager(PydanticResourceManager[LockInfo]):
|
|
|
250
264
|
and lock_info.acquired_at == self._acquired_at
|
|
251
265
|
)
|
|
252
266
|
|
|
253
|
-
def
|
|
267
|
+
def safe_load(
|
|
268
|
+
self: Self, force: bool = False, store_cache: bool = False
|
|
269
|
+
) -> LockInfo | None:
|
|
254
270
|
"""Load lock info, returning None if corrupted.
|
|
255
271
|
|
|
256
272
|
Because this file does not exist in a static state, wrap around loading it.
|
|
257
273
|
"""
|
|
258
274
|
try:
|
|
259
|
-
return self.load(force=force)
|
|
275
|
+
return self.load(force=force, store_cache=store_cache)
|
|
260
276
|
except Exception:
|
|
261
277
|
return None
|
|
262
278
|
|
|
263
279
|
def _is_stale(self: Self, lock_info: LockInfo | None = None) -> bool:
|
|
264
280
|
"""Check if existing lock is stale (expired or process dead)."""
|
|
265
281
|
if lock_info is None:
|
|
266
|
-
lock_info = self.
|
|
282
|
+
lock_info = self.safe_load()
|
|
267
283
|
|
|
268
284
|
if not lock_info:
|
|
269
285
|
return True
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Generic, Self
|
|
4
|
+
|
|
5
|
+
import pandas as pd
|
|
6
|
+
from pydantic import ValidationError
|
|
7
|
+
|
|
8
|
+
from fmu.settings._resources.pydantic_resource_manager import PydanticResourceManager
|
|
9
|
+
from fmu.settings.models._enums import FilterType
|
|
10
|
+
from fmu.settings.models.log import Filter, Log, LogEntryType
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
# Avoid circular dependency for type hint in __init__ only
|
|
14
|
+
from fmu.settings._fmu_dir import (
|
|
15
|
+
FMUDirectoryBase,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class LogManager(PydanticResourceManager[Log[LogEntryType]], Generic[LogEntryType]):
|
|
20
|
+
"""Manages the .fmu log files."""
|
|
21
|
+
|
|
22
|
+
automatic_caching: bool = False
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self: Self, fmu_dir: FMUDirectoryBase, model_class: type[Log[LogEntryType]]
|
|
26
|
+
) -> None:
|
|
27
|
+
"""Initializes the log resource manager."""
|
|
28
|
+
self._cached_dataframe: pd.DataFrame | None = None
|
|
29
|
+
super().__init__(fmu_dir, model_class)
|
|
30
|
+
|
|
31
|
+
def add_log_entry(self: Self, log_entry: LogEntryType) -> None:
|
|
32
|
+
"""Adds a log entry to the log resource."""
|
|
33
|
+
try:
|
|
34
|
+
validated_entry = log_entry.model_validate(log_entry.model_dump())
|
|
35
|
+
log_model: Log[LogEntryType] = (
|
|
36
|
+
self.load() if self.exists else self.model_class([])
|
|
37
|
+
)
|
|
38
|
+
log_model.add_entry(validated_entry)
|
|
39
|
+
self.save(log_model)
|
|
40
|
+
self._cached_dataframe = None
|
|
41
|
+
except ValidationError as e:
|
|
42
|
+
raise ValueError(
|
|
43
|
+
f"Invalid log entry added to '{self.model_class.__name__}' with "
|
|
44
|
+
f"value '{log_entry}': '{e}"
|
|
45
|
+
) from e
|
|
46
|
+
|
|
47
|
+
def filter_log(self: Self, filter: Filter) -> Log[LogEntryType]:
|
|
48
|
+
"""Filters the log resource with the provided filter."""
|
|
49
|
+
if self._cached_dataframe is None:
|
|
50
|
+
log_model: Log[LogEntryType] = self.load()
|
|
51
|
+
df_log = pd.DataFrame([entry.model_dump() for entry in log_model])
|
|
52
|
+
self._cached_dataframe = df_log
|
|
53
|
+
df_log = self._cached_dataframe
|
|
54
|
+
|
|
55
|
+
if filter.filter_type == FilterType.text and filter.operator not in {
|
|
56
|
+
"==",
|
|
57
|
+
"!=",
|
|
58
|
+
}:
|
|
59
|
+
raise ValueError(
|
|
60
|
+
f"Invalid filter operator {filter.operator} applied to "
|
|
61
|
+
f"'{FilterType.text}' field {filter.field_name} when filtering "
|
|
62
|
+
f"log resource {self.model_class.__name__} "
|
|
63
|
+
f"with value {filter.filter_value}."
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
match filter.operator:
|
|
67
|
+
case "==":
|
|
68
|
+
filtered_df = df_log[
|
|
69
|
+
df_log[filter.field_name] == filter.parse_filter_value()
|
|
70
|
+
]
|
|
71
|
+
case "!=":
|
|
72
|
+
filtered_df = df_log[
|
|
73
|
+
df_log[filter.field_name] != filter.parse_filter_value()
|
|
74
|
+
]
|
|
75
|
+
case "<=":
|
|
76
|
+
filtered_df = df_log[
|
|
77
|
+
df_log[filter.field_name] <= filter.parse_filter_value()
|
|
78
|
+
]
|
|
79
|
+
case "<":
|
|
80
|
+
filtered_df = df_log[
|
|
81
|
+
df_log[filter.field_name] < filter.parse_filter_value()
|
|
82
|
+
]
|
|
83
|
+
case ">=":
|
|
84
|
+
filtered_df = df_log[
|
|
85
|
+
df_log[filter.field_name] >= filter.parse_filter_value()
|
|
86
|
+
]
|
|
87
|
+
case ">":
|
|
88
|
+
filtered_df = df_log[
|
|
89
|
+
df_log[filter.field_name] > filter.parse_filter_value()
|
|
90
|
+
]
|
|
91
|
+
case _:
|
|
92
|
+
raise ValueError(
|
|
93
|
+
"Invalid filter operator applied when "
|
|
94
|
+
f"filtering log resource {self.model_class.__name__} "
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
filtered_dict = filtered_df.to_dict("records")
|
|
98
|
+
return self.model_class.model_validate(filtered_dict)
|