ert 17.1.9__py3-none-any.whl → 18.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _ert/events.py +19 -2
- ert/__main__.py +8 -7
- ert/analysis/_update_commons.py +12 -3
- ert/cli/main.py +6 -3
- ert/cli/monitor.py +7 -0
- ert/config/__init__.py +13 -3
- ert/config/_create_observation_dataframes.py +60 -12
- ert/config/_observations.py +14 -1
- ert/config/_read_summary.py +8 -6
- ert/config/ensemble_config.py +6 -14
- ert/config/ert_config.py +19 -13
- ert/config/{everest_objective_config.py → everest_response.py} +23 -12
- ert/config/ext_param_config.py +133 -1
- ert/config/field.py +12 -8
- ert/config/forward_model_step.py +108 -6
- ert/config/gen_data_config.py +2 -6
- ert/config/gen_kw_config.py +0 -9
- ert/config/known_response_types.py +14 -0
- ert/config/parameter_config.py +0 -17
- ert/config/parsing/config_keywords.py +1 -0
- ert/config/parsing/config_schema.py +12 -0
- ert/config/parsing/config_schema_deprecations.py +11 -0
- ert/config/parsing/config_schema_item.py +1 -1
- ert/config/queue_config.py +4 -4
- ert/config/response_config.py +0 -7
- ert/config/rft_config.py +230 -0
- ert/config/summary_config.py +2 -6
- ert/config/violations.py +0 -0
- ert/config/workflow_fixtures.py +2 -1
- ert/dark_storage/client/__init__.py +2 -2
- ert/dark_storage/client/_session.py +4 -4
- ert/dark_storage/client/client.py +2 -2
- ert/dark_storage/compute/misfits.py +7 -6
- ert/dark_storage/endpoints/compute/misfits.py +2 -2
- ert/dark_storage/endpoints/observations.py +4 -4
- ert/dark_storage/endpoints/responses.py +15 -1
- ert/ensemble_evaluator/__init__.py +8 -1
- ert/ensemble_evaluator/evaluator.py +81 -29
- ert/ensemble_evaluator/event.py +6 -0
- ert/ensemble_evaluator/snapshot.py +3 -1
- ert/ensemble_evaluator/state.py +1 -0
- ert/field_utils/__init__.py +8 -0
- ert/field_utils/field_utils.py +211 -1
- ert/gui/ertwidgets/__init__.py +23 -16
- ert/gui/ertwidgets/analysismoduleedit.py +2 -2
- ert/gui/ertwidgets/checklist.py +1 -1
- ert/gui/ertwidgets/create_experiment_dialog.py +3 -1
- ert/gui/ertwidgets/ensembleselector.py +2 -2
- ert/gui/ertwidgets/models/__init__.py +2 -0
- ert/gui/ertwidgets/models/activerealizationsmodel.py +2 -1
- ert/gui/ertwidgets/models/path_model.py +1 -1
- ert/gui/ertwidgets/models/targetensemblemodel.py +2 -1
- ert/gui/ertwidgets/models/text_model.py +1 -1
- ert/gui/ertwidgets/searchbox.py +13 -4
- ert/gui/{suggestor → ertwidgets/suggestor}/_suggestor_message.py +13 -4
- ert/gui/main.py +11 -6
- ert/gui/main_window.py +1 -2
- ert/gui/simulation/ensemble_experiment_panel.py +1 -1
- ert/gui/simulation/ensemble_information_filter_panel.py +1 -1
- ert/gui/simulation/ensemble_smoother_panel.py +1 -1
- ert/gui/simulation/evaluate_ensemble_panel.py +1 -1
- ert/gui/simulation/experiment_panel.py +1 -1
- ert/gui/simulation/manual_update_panel.py +31 -8
- ert/gui/simulation/multiple_data_assimilation_panel.py +12 -8
- ert/gui/simulation/run_dialog.py +25 -4
- ert/gui/simulation/single_test_run_panel.py +2 -2
- ert/gui/summarypanel.py +1 -1
- ert/gui/tools/load_results/load_results_panel.py +1 -1
- ert/gui/tools/manage_experiments/storage_info_widget.py +7 -7
- ert/gui/tools/manage_experiments/storage_widget.py +1 -2
- ert/gui/tools/plot/plot_api.py +13 -10
- ert/gui/tools/plot/plot_window.py +12 -0
- ert/gui/tools/plot/plottery/plot_config.py +2 -0
- ert/gui/tools/plot/plottery/plot_context.py +14 -0
- ert/gui/tools/plot/plottery/plots/ensemble.py +9 -2
- ert/gui/tools/plot/plottery/plots/statistics.py +59 -19
- ert/mode_definitions.py +2 -0
- ert/plugins/__init__.py +0 -1
- ert/plugins/hook_implementations/workflows/gen_data_rft_export.py +10 -2
- ert/plugins/hook_specifications/__init__.py +0 -2
- ert/plugins/hook_specifications/jobs.py +0 -9
- ert/plugins/plugin_manager.py +2 -33
- ert/resources/shell_scripts/delete_directory.py +2 -2
- ert/run_models/__init__.py +18 -5
- ert/run_models/_create_run_path.py +33 -21
- ert/run_models/ensemble_experiment.py +10 -4
- ert/run_models/ensemble_information_filter.py +8 -1
- ert/run_models/ensemble_smoother.py +9 -3
- ert/run_models/evaluate_ensemble.py +8 -6
- ert/run_models/event.py +7 -3
- ert/run_models/everest_run_model.py +155 -44
- ert/run_models/initial_ensemble_run_model.py +23 -22
- ert/run_models/manual_update.py +4 -2
- ert/run_models/manual_update_enif.py +37 -0
- ert/run_models/model_factory.py +81 -22
- ert/run_models/multiple_data_assimilation.py +21 -10
- ert/run_models/run_model.py +54 -34
- ert/run_models/single_test_run.py +7 -4
- ert/run_models/update_run_model.py +4 -2
- ert/runpaths.py +5 -6
- ert/sample_prior.py +9 -4
- ert/scheduler/driver.py +37 -0
- ert/scheduler/event.py +3 -1
- ert/scheduler/job.py +23 -13
- ert/scheduler/lsf_driver.py +6 -2
- ert/scheduler/openpbs_driver.py +7 -1
- ert/scheduler/scheduler.py +5 -0
- ert/scheduler/slurm_driver.py +6 -2
- ert/services/__init__.py +2 -2
- ert/services/_base_service.py +31 -15
- ert/services/ert_server.py +317 -0
- ert/shared/_doc_utils/ert_jobs.py +1 -4
- ert/shared/storage/connection.py +3 -3
- ert/shared/version.py +3 -3
- ert/storage/local_ensemble.py +25 -5
- ert/storage/local_experiment.py +6 -14
- ert/storage/local_storage.py +35 -30
- ert/storage/migration/to18.py +12 -0
- ert/storage/migration/to8.py +4 -4
- ert/substitutions.py +12 -28
- ert/validation/active_range.py +7 -7
- ert/validation/rangestring.py +16 -16
- {ert-17.1.9.dist-info → ert-18.0.0.dist-info}/METADATA +8 -7
- {ert-17.1.9.dist-info → ert-18.0.0.dist-info}/RECORD +160 -159
- everest/api/everest_data_api.py +1 -14
- everest/bin/config_branch_script.py +3 -6
- everest/bin/everconfigdump_script.py +1 -9
- everest/bin/everest_script.py +21 -11
- everest/bin/kill_script.py +2 -2
- everest/bin/monitor_script.py +2 -2
- everest/bin/utils.py +6 -3
- everest/config/__init__.py +4 -1
- everest/config/control_config.py +61 -2
- everest/config/control_variable_config.py +2 -1
- everest/config/everest_config.py +38 -16
- everest/config/forward_model_config.py +5 -3
- everest/config/install_data_config.py +7 -5
- everest/config/install_job_config.py +7 -3
- everest/config/install_template_config.py +3 -3
- everest/config/optimization_config.py +19 -6
- everest/config/output_constraint_config.py +8 -2
- everest/config/server_config.py +6 -49
- everest/config/utils.py +25 -105
- everest/config/validation_utils.py +10 -10
- everest/config_file_loader.py +13 -2
- everest/detached/everserver.py +7 -8
- everest/everest_storage.py +6 -10
- everest/gui/everest_client.py +0 -1
- everest/gui/main_window.py +2 -2
- everest/optimizer/everest2ropt.py +59 -32
- everest/optimizer/opt_model_transforms.py +12 -13
- everest/optimizer/utils.py +0 -29
- everest/strings.py +0 -5
- ert/config/everest_constraints_config.py +0 -95
- ert/services/storage_service.py +0 -127
- everest/config/sampler_config.py +0 -103
- everest/simulator/__init__.py +0 -88
- everest/simulator/everest_to_ert.py +0 -51
- /ert/gui/{suggestor → ertwidgets/suggestor}/__init__.py +0 -0
- /ert/gui/{suggestor → ertwidgets/suggestor}/_colors.py +0 -0
- /ert/gui/{suggestor → ertwidgets/suggestor}/suggestor.py +0 -0
- {ert-17.1.9.dist-info → ert-18.0.0.dist-info}/WHEEL +0 -0
- {ert-17.1.9.dist-info → ert-18.0.0.dist-info}/entry_points.txt +0 -0
- {ert-17.1.9.dist-info → ert-18.0.0.dist-info}/licenses/COPYING +0 -0
- {ert-17.1.9.dist-info → ert-18.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,317 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
import threading
|
|
8
|
+
from collections.abc import Mapping
|
|
9
|
+
from inspect import Traceback
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from tempfile import NamedTemporaryFile
|
|
12
|
+
from time import sleep
|
|
13
|
+
from typing import Any, cast
|
|
14
|
+
|
|
15
|
+
import requests
|
|
16
|
+
|
|
17
|
+
from ert.dark_storage.client import Client, ErtClientConnectionInfo
|
|
18
|
+
from ert.services._base_service import ErtServerConnectionInfo, _Proc
|
|
19
|
+
from ert.trace import get_traceparent
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ErtServerContext:
|
|
23
|
+
def __init__(self, service: ErtServer) -> None:
|
|
24
|
+
self._service = service
|
|
25
|
+
|
|
26
|
+
def __enter__(self) -> ErtServer:
|
|
27
|
+
return self._service
|
|
28
|
+
|
|
29
|
+
def __exit__(
|
|
30
|
+
self,
|
|
31
|
+
exc_type: type[BaseException],
|
|
32
|
+
exc_value: BaseException,
|
|
33
|
+
traceback: Traceback,
|
|
34
|
+
) -> bool:
|
|
35
|
+
self._service.shutdown()
|
|
36
|
+
return exc_type is None
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class ErtServer:
|
|
40
|
+
_instance: ErtServer | None = None
|
|
41
|
+
|
|
42
|
+
def __init__(
|
|
43
|
+
self,
|
|
44
|
+
storage_path: str,
|
|
45
|
+
timeout: int = 120,
|
|
46
|
+
parent_pid: int | None = None,
|
|
47
|
+
connection_info: ErtServerConnectionInfo | Exception | None = None,
|
|
48
|
+
verbose: bool = False,
|
|
49
|
+
logging_config: str | None = None, # Only used from everserver
|
|
50
|
+
) -> None:
|
|
51
|
+
if timeout is None:
|
|
52
|
+
timeout = 120
|
|
53
|
+
|
|
54
|
+
self._storage_path = storage_path
|
|
55
|
+
self._connection_info: ErtServerConnectionInfo | Exception | None = (
|
|
56
|
+
connection_info
|
|
57
|
+
)
|
|
58
|
+
self._on_connection_info_received_event = threading.Event()
|
|
59
|
+
self._timeout = timeout
|
|
60
|
+
self._url: str | None = None
|
|
61
|
+
|
|
62
|
+
if self._connection_info is not None:
|
|
63
|
+
# This means that server is already running
|
|
64
|
+
if isinstance(connection_info, Mapping) and "urls" not in connection_info:
|
|
65
|
+
raise KeyError("No URLs found in connection info")
|
|
66
|
+
|
|
67
|
+
self._on_connection_info_received_event.set()
|
|
68
|
+
self._thread_that_starts_server_process = None
|
|
69
|
+
return
|
|
70
|
+
|
|
71
|
+
run_storage_main_cmd = [
|
|
72
|
+
sys.executable,
|
|
73
|
+
str(Path(__file__).parent / "_storage_main.py"),
|
|
74
|
+
"--project",
|
|
75
|
+
storage_path,
|
|
76
|
+
]
|
|
77
|
+
|
|
78
|
+
if logging_config is not None:
|
|
79
|
+
run_storage_main_cmd += ["--logging-config", logging_config]
|
|
80
|
+
|
|
81
|
+
traceparent = get_traceparent()
|
|
82
|
+
if traceparent is not None:
|
|
83
|
+
run_storage_main_cmd += ["--traceparent", traceparent]
|
|
84
|
+
|
|
85
|
+
if parent_pid is not None:
|
|
86
|
+
run_storage_main_cmd += ["--parent_pid", str(parent_pid)]
|
|
87
|
+
|
|
88
|
+
if verbose:
|
|
89
|
+
run_storage_main_cmd.append("--verbose")
|
|
90
|
+
|
|
91
|
+
self._thread_that_starts_server_process = _Proc(
|
|
92
|
+
service_name="storage",
|
|
93
|
+
exec_args=run_storage_main_cmd,
|
|
94
|
+
timeout=120,
|
|
95
|
+
on_connection_info_received=self.on_connection_info_received_from_server_process,
|
|
96
|
+
project=Path(self._storage_path),
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
def fetch_auth(self) -> tuple[str, Any]:
|
|
100
|
+
"""
|
|
101
|
+
Returns a tuple of username and password, compatible with requests' `auth`
|
|
102
|
+
kwarg.
|
|
103
|
+
|
|
104
|
+
Blocks while the server is starting.
|
|
105
|
+
"""
|
|
106
|
+
return (
|
|
107
|
+
"__token__",
|
|
108
|
+
cast(dict[str, Any], self.fetch_connection_info())["authtoken"],
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
@classmethod
|
|
112
|
+
def init_service(
|
|
113
|
+
cls,
|
|
114
|
+
project: Path,
|
|
115
|
+
timeout: int = 0,
|
|
116
|
+
logging_config: str | None = None,
|
|
117
|
+
) -> ErtServerContext:
|
|
118
|
+
try:
|
|
119
|
+
service = cls.connect(
|
|
120
|
+
project=project or Path.cwd(), timeout=0, logging_config=logging_config
|
|
121
|
+
)
|
|
122
|
+
# Check the server is up and running
|
|
123
|
+
_ = service.fetch_url()
|
|
124
|
+
return ErtServerContext(service)
|
|
125
|
+
except (TimeoutError, json.JSONDecodeError, KeyError) as e:
|
|
126
|
+
logging.getLogger(__name__).warning(
|
|
127
|
+
"Failed locating existing storage service due to "
|
|
128
|
+
f"{type(e).__name__}: {e}, starting new service"
|
|
129
|
+
)
|
|
130
|
+
return cls.start_server(
|
|
131
|
+
project=project, timeout=timeout, logging_config=logging_config
|
|
132
|
+
)
|
|
133
|
+
except PermissionError as pe:
|
|
134
|
+
logging.getLogger(__name__).error(
|
|
135
|
+
f"{type(pe).__name__}: {pe}, cannot connect to storage service "
|
|
136
|
+
f"due to permission issues."
|
|
137
|
+
)
|
|
138
|
+
raise pe
|
|
139
|
+
|
|
140
|
+
def fetch_url(self) -> str:
|
|
141
|
+
"""Returns the url. Blocks while the server is starting"""
|
|
142
|
+
if self._url is not None:
|
|
143
|
+
return self._url
|
|
144
|
+
|
|
145
|
+
for url in self.fetch_connection_info()["urls"]:
|
|
146
|
+
con_info = self.fetch_connection_info()
|
|
147
|
+
try:
|
|
148
|
+
resp = requests.get(
|
|
149
|
+
f"{url}/healthcheck",
|
|
150
|
+
auth=self.fetch_auth(),
|
|
151
|
+
verify=con_info["cert"],
|
|
152
|
+
)
|
|
153
|
+
logging.getLogger(__name__).info(
|
|
154
|
+
f"Connecting to {url} got status: "
|
|
155
|
+
f"{resp.status_code}, {resp.headers}, {resp.reason}, {resp.text}"
|
|
156
|
+
)
|
|
157
|
+
if resp.status_code == 200:
|
|
158
|
+
self._url = url
|
|
159
|
+
return str(url)
|
|
160
|
+
|
|
161
|
+
except requests.ConnectionError as ce:
|
|
162
|
+
logging.getLogger(__name__).info(
|
|
163
|
+
f"Could not connect to {url}, but will try something else. "
|
|
164
|
+
f"Error: {ce}"
|
|
165
|
+
)
|
|
166
|
+
raise TimeoutError(
|
|
167
|
+
"None of the URLs provided for the ert storage server worked."
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
@classmethod
|
|
171
|
+
def session(cls, project: os.PathLike[str], timeout: int | None = None) -> Client:
|
|
172
|
+
"""
|
|
173
|
+
Start a HTTP transaction with the server
|
|
174
|
+
"""
|
|
175
|
+
inst = cls.connect(timeout=timeout, project=project)
|
|
176
|
+
info = inst.fetch_connection_info()
|
|
177
|
+
return Client(
|
|
178
|
+
conn_info=ErtClientConnectionInfo(
|
|
179
|
+
base_url=inst.fetch_url(),
|
|
180
|
+
auth_token=inst.fetch_auth()[1],
|
|
181
|
+
cert=info["cert"],
|
|
182
|
+
)
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
@property
|
|
186
|
+
def logger(self) -> logging.Logger:
|
|
187
|
+
return logging.getLogger("ert.shared.storage")
|
|
188
|
+
|
|
189
|
+
def shutdown(self) -> int:
|
|
190
|
+
"""Shutdown the server."""
|
|
191
|
+
if self._thread_that_starts_server_process is None:
|
|
192
|
+
return -1
|
|
193
|
+
|
|
194
|
+
self.__class__._instance = None
|
|
195
|
+
error_code = self._thread_that_starts_server_process.shutdown()
|
|
196
|
+
self._thread_that_starts_server_process = None
|
|
197
|
+
|
|
198
|
+
return error_code
|
|
199
|
+
|
|
200
|
+
@classmethod
|
|
201
|
+
def connect(
|
|
202
|
+
cls,
|
|
203
|
+
*,
|
|
204
|
+
project: os.PathLike[str],
|
|
205
|
+
timeout: int | None = None,
|
|
206
|
+
logging_config: str | None = None,
|
|
207
|
+
) -> ErtServer:
|
|
208
|
+
if cls._instance is not None:
|
|
209
|
+
cls._instance.wait_until_ready()
|
|
210
|
+
assert isinstance(cls._instance, cls)
|
|
211
|
+
return cls._instance
|
|
212
|
+
|
|
213
|
+
path = Path(project)
|
|
214
|
+
|
|
215
|
+
# Wait for storage_server.json file to appear
|
|
216
|
+
try:
|
|
217
|
+
if timeout is None:
|
|
218
|
+
timeout = 240
|
|
219
|
+
t = -1
|
|
220
|
+
while t < timeout:
|
|
221
|
+
storage_server_path = path / "storage_server.json"
|
|
222
|
+
if (
|
|
223
|
+
storage_server_path.exists()
|
|
224
|
+
and storage_server_path.stat().st_size > 0
|
|
225
|
+
):
|
|
226
|
+
with (path / "storage_server.json").open() as f:
|
|
227
|
+
storage_server_content = json.load(f)
|
|
228
|
+
|
|
229
|
+
return ErtServer(
|
|
230
|
+
storage_path=str(path),
|
|
231
|
+
connection_info=storage_server_content,
|
|
232
|
+
logging_config=logging_config,
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
sleep(1)
|
|
236
|
+
t += 1
|
|
237
|
+
|
|
238
|
+
raise TimeoutError("Server not started")
|
|
239
|
+
except PermissionError as pe:
|
|
240
|
+
logging.getLogger(__name__).error(
|
|
241
|
+
f"{type(pe).__name__}: {pe}, cannot connect to ert server service "
|
|
242
|
+
f"due to permission issues."
|
|
243
|
+
)
|
|
244
|
+
raise pe
|
|
245
|
+
|
|
246
|
+
@classmethod
|
|
247
|
+
def start_server(
|
|
248
|
+
cls,
|
|
249
|
+
project: Path,
|
|
250
|
+
parent_pid: int | None = None,
|
|
251
|
+
verbose: bool = False,
|
|
252
|
+
timeout: int | None = None,
|
|
253
|
+
logging_config: str | None = None,
|
|
254
|
+
) -> ErtServerContext:
|
|
255
|
+
if cls._instance is not None:
|
|
256
|
+
raise RuntimeError("Server already running")
|
|
257
|
+
cls._instance = obj = cls(
|
|
258
|
+
storage_path=str(project),
|
|
259
|
+
parent_pid=parent_pid,
|
|
260
|
+
verbose=verbose,
|
|
261
|
+
timeout=timeout or 120,
|
|
262
|
+
logging_config=logging_config,
|
|
263
|
+
)
|
|
264
|
+
if obj._thread_that_starts_server_process is not None:
|
|
265
|
+
obj._thread_that_starts_server_process.start()
|
|
266
|
+
return ErtServerContext(obj)
|
|
267
|
+
|
|
268
|
+
def on_connection_info_received_from_server_process(
|
|
269
|
+
self, info: ErtServerConnectionInfo | Exception | None
|
|
270
|
+
) -> None:
|
|
271
|
+
if self._connection_info is not None:
|
|
272
|
+
raise ValueError("Connection information already set")
|
|
273
|
+
if info is None:
|
|
274
|
+
raise ValueError
|
|
275
|
+
self._connection_info = info
|
|
276
|
+
|
|
277
|
+
if self._storage_path is not None:
|
|
278
|
+
if not Path(self._storage_path).exists():
|
|
279
|
+
raise RuntimeError(f"No storage exists at : {self._storage_path}")
|
|
280
|
+
path = f"{self._storage_path}/storage_server.json"
|
|
281
|
+
else:
|
|
282
|
+
path = "storage_server.json"
|
|
283
|
+
|
|
284
|
+
if isinstance(info, Mapping):
|
|
285
|
+
with NamedTemporaryFile(dir=f"{self._storage_path}", delete=False) as f:
|
|
286
|
+
f.write(json.dumps(info, indent=4).encode("utf-8"))
|
|
287
|
+
f.flush()
|
|
288
|
+
os.rename(f.name, path)
|
|
289
|
+
|
|
290
|
+
self._on_connection_info_received_event.set()
|
|
291
|
+
|
|
292
|
+
def wait_until_ready(self, timeout: int | None = None) -> bool:
|
|
293
|
+
if timeout is None:
|
|
294
|
+
timeout = self._timeout
|
|
295
|
+
|
|
296
|
+
if self._on_connection_info_received_event.wait(timeout):
|
|
297
|
+
return not (
|
|
298
|
+
self._connection_info is None
|
|
299
|
+
or isinstance(self._connection_info, Exception)
|
|
300
|
+
)
|
|
301
|
+
if isinstance(self._connection_info, TimeoutError):
|
|
302
|
+
self.logger.critical(f"startup exceeded defined timeout {timeout}s")
|
|
303
|
+
return False # Timeout reached
|
|
304
|
+
|
|
305
|
+
def fetch_connection_info(self) -> ErtServerConnectionInfo:
|
|
306
|
+
is_ready = self.wait_until_ready(self._timeout)
|
|
307
|
+
if isinstance(self._connection_info, Exception):
|
|
308
|
+
raise self._connection_info
|
|
309
|
+
if not is_ready:
|
|
310
|
+
raise TimeoutError
|
|
311
|
+
if self._connection_info is None:
|
|
312
|
+
raise ValueError("conn_info is None")
|
|
313
|
+
return self._connection_info
|
|
314
|
+
|
|
315
|
+
def wait(self) -> None:
|
|
316
|
+
if self._thread_that_starts_server_process is not None:
|
|
317
|
+
self._thread_that_starts_server_process.join()
|
|
@@ -180,10 +180,7 @@ class _ErtDocumentation(SphinxDirective):
|
|
|
180
180
|
|
|
181
181
|
class ErtForwardModelDocumentation(_ErtDocumentation):
|
|
182
182
|
pm = ErtPluginManager()
|
|
183
|
-
_JOBS: ClassVar[dict[str, Any]] =
|
|
184
|
-
**pm.get_documentation_for_jobs(),
|
|
185
|
-
**pm.get_documentation_for_forward_model_steps(),
|
|
186
|
-
}
|
|
183
|
+
_JOBS: ClassVar[dict[str, Any]] = pm.get_documentation_for_forward_model_steps()
|
|
187
184
|
|
|
188
185
|
def run(self) -> list[nodes.section]:
|
|
189
186
|
return self._generate_job_documentation_without_title(
|
ert/shared/storage/connection.py
CHANGED
|
@@ -3,15 +3,15 @@ from __future__ import annotations
|
|
|
3
3
|
import os
|
|
4
4
|
from typing import Any
|
|
5
5
|
|
|
6
|
-
from ert.services import
|
|
6
|
+
from ert.services import ErtServer
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def get_info(
|
|
10
10
|
project_id: os.PathLike[str],
|
|
11
11
|
) -> dict[str, str | tuple[str, Any]]:
|
|
12
|
-
client =
|
|
12
|
+
client = ErtServer.connect(project=project_id)
|
|
13
13
|
return {
|
|
14
14
|
"baseurl": client.fetch_url(),
|
|
15
15
|
"auth": client.fetch_auth(),
|
|
16
|
-
"cert": client.
|
|
16
|
+
"cert": client.fetch_connection_info()["cert"],
|
|
17
17
|
}
|
ert/shared/version.py
CHANGED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '
|
|
32
|
-
__version_tuple__ = version_tuple = (
|
|
31
|
+
__version__ = version = '18.0.0'
|
|
32
|
+
__version_tuple__ = version_tuple = (18, 0, 0)
|
|
33
33
|
|
|
34
|
-
__commit_id__ = commit_id = '
|
|
34
|
+
__commit_id__ = commit_id = 'g8fc6e0edc'
|
ert/storage/local_ensemble.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
+
import contextlib
|
|
4
5
|
import logging
|
|
5
6
|
import os
|
|
6
7
|
import time
|
|
@@ -660,10 +661,29 @@ class LocalEnsemble(BaseMode):
|
|
|
660
661
|
iens_active_index: npt.NDArray[np.int_],
|
|
661
662
|
) -> None:
|
|
662
663
|
config_node = self.experiment.parameter_configuration[param_group]
|
|
664
|
+
complete_df: pl.DataFrame | None = None
|
|
665
|
+
with contextlib.suppress(KeyError):
|
|
666
|
+
complete_df = self._load_parameters_lazy(SCALAR_FILENAME).collect(
|
|
667
|
+
engine="streaming"
|
|
668
|
+
)
|
|
663
669
|
for real, ds in config_node.create_storage_datasets(
|
|
664
670
|
parameters, iens_active_index
|
|
665
671
|
):
|
|
666
|
-
|
|
672
|
+
if isinstance(ds, pl.DataFrame):
|
|
673
|
+
if complete_df is None:
|
|
674
|
+
complete_df = ds
|
|
675
|
+
else:
|
|
676
|
+
complete_df = (
|
|
677
|
+
complete_df.join(ds, on="realization", how="left")
|
|
678
|
+
.unique(subset=["realization"], keep="first")
|
|
679
|
+
.sort("realization")
|
|
680
|
+
)
|
|
681
|
+
else:
|
|
682
|
+
self.save_parameters(ds, config_node.name, real)
|
|
683
|
+
|
|
684
|
+
group_path = self.mount_point / f"{_escape_filename(SCALAR_FILENAME)}.parquet"
|
|
685
|
+
if complete_df is not None:
|
|
686
|
+
self._storage._to_parquet_transaction(group_path, complete_df)
|
|
667
687
|
|
|
668
688
|
def load_scalars(
|
|
669
689
|
self, group: str | None = None, realizations: npt.NDArray[np.int_] | None = None
|
|
@@ -1329,7 +1349,7 @@ async def _write_responses_to_storage(
|
|
|
1329
1349
|
for config in response_configs:
|
|
1330
1350
|
try:
|
|
1331
1351
|
start_time = time.perf_counter()
|
|
1332
|
-
logger.debug(f"Starting to load response: {config.
|
|
1352
|
+
logger.debug(f"Starting to load response: {config.type}")
|
|
1333
1353
|
try:
|
|
1334
1354
|
if isinstance(config, SummaryConfig) and realization == 0:
|
|
1335
1355
|
_log_grid_contents(
|
|
@@ -1344,14 +1364,14 @@ async def _write_responses_to_storage(
|
|
|
1344
1364
|
continue
|
|
1345
1365
|
await asyncio.sleep(0)
|
|
1346
1366
|
logger.debug(
|
|
1347
|
-
f"Loaded {config.
|
|
1367
|
+
f"Loaded {config.type}",
|
|
1348
1368
|
extra={"Time": f"{(time.perf_counter() - start_time):.4f}s"},
|
|
1349
1369
|
)
|
|
1350
1370
|
start_time = time.perf_counter()
|
|
1351
|
-
ensemble.save_response(config.
|
|
1371
|
+
ensemble.save_response(config.type, ds, realization)
|
|
1352
1372
|
await asyncio.sleep(0)
|
|
1353
1373
|
logger.debug(
|
|
1354
|
-
f"Saved {config.
|
|
1374
|
+
f"Saved {config.type} to storage",
|
|
1355
1375
|
extra={"Time": f"{(time.perf_counter() - start_time):.4f}s"},
|
|
1356
1376
|
)
|
|
1357
1377
|
except Exception as err:
|
ert/storage/local_experiment.py
CHANGED
|
@@ -16,14 +16,11 @@ from pydantic import BaseModel, Field, TypeAdapter
|
|
|
16
16
|
from surfio import IrapSurface
|
|
17
17
|
|
|
18
18
|
from ert.config import (
|
|
19
|
-
EverestConstraintsConfig,
|
|
20
|
-
EverestObjectivesConfig,
|
|
21
19
|
ExtParamConfig,
|
|
22
|
-
GenDataConfig,
|
|
23
20
|
GenKwConfig,
|
|
21
|
+
KnownResponseTypes,
|
|
24
22
|
ParameterConfig,
|
|
25
23
|
ResponseConfig,
|
|
26
|
-
SummaryConfig,
|
|
27
24
|
SurfaceConfig,
|
|
28
25
|
)
|
|
29
26
|
from ert.config import Field as FieldConfig
|
|
@@ -62,10 +59,7 @@ class _Index(BaseModel):
|
|
|
62
59
|
|
|
63
60
|
_responses_adapter = TypeAdapter( # type: ignore
|
|
64
61
|
Annotated[
|
|
65
|
-
|
|
66
|
-
| SummaryConfig
|
|
67
|
-
| EverestConstraintsConfig
|
|
68
|
-
| EverestObjectivesConfig,
|
|
62
|
+
KnownResponseTypes,
|
|
69
63
|
Field(discriminator="type"),
|
|
70
64
|
]
|
|
71
65
|
)
|
|
@@ -199,9 +193,7 @@ class LocalExperiment(BaseMode):
|
|
|
199
193
|
|
|
200
194
|
response_data = {}
|
|
201
195
|
for response in responses or []:
|
|
202
|
-
response_data.update(
|
|
203
|
-
{response.response_type: response.model_dump(mode="json")}
|
|
204
|
-
)
|
|
196
|
+
response_data.update({response.type: response.model_dump(mode="json")})
|
|
205
197
|
storage._write_transaction(
|
|
206
198
|
path / cls._responses_file,
|
|
207
199
|
json.dumps(response_data, default=str, indent=2).encode("utf-8"),
|
|
@@ -418,7 +410,7 @@ class LocalExperiment(BaseMode):
|
|
|
418
410
|
|
|
419
411
|
for data in self.response_info.values():
|
|
420
412
|
response_instance = _responses_adapter.validate_python(data)
|
|
421
|
-
responses[response_instance.
|
|
413
|
+
responses[response_instance.type] = response_instance
|
|
422
414
|
|
|
423
415
|
return responses
|
|
424
416
|
|
|
@@ -451,7 +443,7 @@ class LocalExperiment(BaseMode):
|
|
|
451
443
|
mapping = {}
|
|
452
444
|
for config in self.response_configuration.values():
|
|
453
445
|
for key in config.keys if config.has_finalized_keys else []:
|
|
454
|
-
mapping[key] = config.
|
|
446
|
+
mapping[key] = config.type
|
|
455
447
|
|
|
456
448
|
return mapping
|
|
457
449
|
|
|
@@ -503,7 +495,7 @@ class LocalExperiment(BaseMode):
|
|
|
503
495
|
self._path / self._responses_file,
|
|
504
496
|
json.dumps(
|
|
505
497
|
{
|
|
506
|
-
c.
|
|
498
|
+
c.type: c.model_dump(mode="json")
|
|
507
499
|
for c in responses_configuration.values()
|
|
508
500
|
},
|
|
509
501
|
default=str,
|
ert/storage/local_storage.py
CHANGED
|
@@ -30,7 +30,7 @@ from .realization_storage_state import RealizationStorageState
|
|
|
30
30
|
|
|
31
31
|
logger = logging.getLogger(__name__)
|
|
32
32
|
|
|
33
|
-
_LOCAL_STORAGE_VERSION =
|
|
33
|
+
_LOCAL_STORAGE_VERSION = 18
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
class _Migrations(BaseModel):
|
|
@@ -64,8 +64,6 @@ class LocalStorage(BaseMode):
|
|
|
64
64
|
self,
|
|
65
65
|
path: str | os.PathLike[str],
|
|
66
66
|
mode: Mode,
|
|
67
|
-
*,
|
|
68
|
-
ignore_migration_check: bool = False,
|
|
69
67
|
) -> None:
|
|
70
68
|
"""
|
|
71
69
|
Initializes the LocalStorage instance.
|
|
@@ -76,19 +74,20 @@ class LocalStorage(BaseMode):
|
|
|
76
74
|
The file system path to the storage.
|
|
77
75
|
mode : Mode
|
|
78
76
|
The access mode for the storage (read/write).
|
|
79
|
-
ignore_migration_check : bool
|
|
80
|
-
If True, skips migration checks during initialization.
|
|
81
77
|
"""
|
|
82
78
|
|
|
83
|
-
super().__init__(mode)
|
|
84
79
|
self.path = Path(path).absolute()
|
|
80
|
+
super().__init__(mode)
|
|
81
|
+
|
|
82
|
+
if mode.can_write:
|
|
83
|
+
self._acquire_lock()
|
|
85
84
|
|
|
86
85
|
self._experiments: dict[UUID, LocalExperiment]
|
|
87
86
|
self._ensembles: dict[UUID, LocalEnsemble]
|
|
88
87
|
self._index: _Index
|
|
89
88
|
|
|
90
89
|
try:
|
|
91
|
-
version = _storage_version(self.path)
|
|
90
|
+
self.version = _storage_version(self.path)
|
|
92
91
|
except FileNotFoundError as err:
|
|
93
92
|
# No index json, will have a problem if other components of storage exists
|
|
94
93
|
errors = []
|
|
@@ -100,26 +99,36 @@ class LocalStorage(BaseMode):
|
|
|
100
99
|
errors.append(f"ensemble path: {self.path / self.ENSEMBLES_PATH}")
|
|
101
100
|
if errors:
|
|
102
101
|
raise ValueError(f"No index.json, but found: {errors}") from err
|
|
103
|
-
version = _LOCAL_STORAGE_VERSION
|
|
102
|
+
self.version = _LOCAL_STORAGE_VERSION
|
|
103
|
+
|
|
104
|
+
if self.check_migration_needed():
|
|
105
|
+
self.perform_migration()
|
|
104
106
|
|
|
105
|
-
|
|
107
|
+
self.refresh()
|
|
108
|
+
if mode.can_write:
|
|
109
|
+
self._save_index()
|
|
110
|
+
|
|
111
|
+
def check_migration_needed(self) -> bool:
|
|
112
|
+
if self.version > _LOCAL_STORAGE_VERSION:
|
|
106
113
|
raise RuntimeError(
|
|
107
|
-
f"Cannot open storage '{self.path}': Storage version {version} "
|
|
114
|
+
f"Cannot open storage '{self.path}': Storage version {self.version} "
|
|
108
115
|
f"is newer than the current version {_LOCAL_STORAGE_VERSION}, "
|
|
109
116
|
"upgrade ert to continue, or run with a different ENSPATH"
|
|
110
117
|
)
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
self.
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
118
|
+
|
|
119
|
+
return self.version < _LOCAL_STORAGE_VERSION
|
|
120
|
+
|
|
121
|
+
def perform_migration(self) -> None:
|
|
122
|
+
if self.check_migration_needed():
|
|
123
|
+
if self.can_write:
|
|
124
|
+
self._migrate(self.version)
|
|
125
|
+
self._save_index()
|
|
126
|
+
else:
|
|
127
|
+
raise RuntimeError(
|
|
128
|
+
f"Cannot open storage '{self.path}' in read-only mode: "
|
|
129
|
+
f"Storage version {self.version} is too old. "
|
|
130
|
+
f"Run ert to initiate migration."
|
|
131
|
+
)
|
|
123
132
|
|
|
124
133
|
def refresh(self) -> None:
|
|
125
134
|
"""
|
|
@@ -283,22 +292,16 @@ class LocalStorage(BaseMode):
|
|
|
283
292
|
def close(self) -> None:
|
|
284
293
|
"""
|
|
285
294
|
Closes the storage, releasing any acquired locks and saving the index.
|
|
286
|
-
|
|
287
295
|
This method should be called to cleanly close the storage, especially
|
|
288
296
|
when it was opened in write mode. Failing to call this method may leave
|
|
289
297
|
a lock file behind, which would interfere with subsequent access to
|
|
290
298
|
the storage.
|
|
291
299
|
"""
|
|
292
|
-
|
|
293
300
|
self._ensembles.clear()
|
|
294
301
|
self._experiments.clear()
|
|
295
302
|
|
|
296
|
-
if not self.can_write:
|
|
297
|
-
return
|
|
298
|
-
|
|
299
|
-
self._save_index()
|
|
300
|
-
|
|
301
303
|
if self.can_write:
|
|
304
|
+
self._save_index()
|
|
302
305
|
self._release_lock()
|
|
303
306
|
|
|
304
307
|
def _release_lock(self) -> None:
|
|
@@ -499,6 +502,7 @@ class LocalStorage(BaseMode):
|
|
|
499
502
|
to15,
|
|
500
503
|
to16,
|
|
501
504
|
to17,
|
|
505
|
+
to18,
|
|
502
506
|
)
|
|
503
507
|
|
|
504
508
|
try:
|
|
@@ -545,6 +549,7 @@ class LocalStorage(BaseMode):
|
|
|
545
549
|
14: to15,
|
|
546
550
|
15: to16,
|
|
547
551
|
16: to17,
|
|
552
|
+
17: to18,
|
|
548
553
|
}
|
|
549
554
|
for from_version in range(version, _LOCAL_STORAGE_VERSION):
|
|
550
555
|
migrations[from_version].migrate(self.path)
|
|
@@ -562,7 +567,7 @@ class LocalStorage(BaseMode):
|
|
|
562
567
|
Get a unique experiment name
|
|
563
568
|
|
|
564
569
|
If an experiment with the given name exists an _0 is appended
|
|
565
|
-
or _n+1 where n is the
|
|
570
|
+
or _n+1 where n is the largest postfix found for the given experiment name
|
|
566
571
|
"""
|
|
567
572
|
if not experiment_name:
|
|
568
573
|
return self.get_unique_experiment_name("default")
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
info = (
|
|
4
|
+
"Added localization attributes to summary observations."
|
|
5
|
+
"Added RFT observations."
|
|
6
|
+
"No change to current storage, only additions. "
|
|
7
|
+
"Bumping to 17 to indicate appended schema in storage."
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def migrate(path: Path) -> None:
|
|
12
|
+
pass
|
ert/storage/migration/to8.py
CHANGED
|
@@ -138,14 +138,14 @@ def _migrate_observations_to_grouped_parquet(path: Path) -> None:
|
|
|
138
138
|
|
|
139
139
|
for response_type in ["gen_data", "summary"]:
|
|
140
140
|
infos = [
|
|
141
|
-
|
|
141
|
+
info_ for info_ in obs_ds_infos if info_.response_type == response_type
|
|
142
142
|
]
|
|
143
143
|
if len(infos) > 0:
|
|
144
|
-
concatd_df = pl.concat([
|
|
144
|
+
concatd_df = pl.concat([info_.polars_df for info_ in infos])
|
|
145
145
|
concatd_df.write_parquet(experiment / "observations" / response_type)
|
|
146
146
|
|
|
147
|
-
for
|
|
148
|
-
os.remove(
|
|
147
|
+
for info_ in infos:
|
|
148
|
+
os.remove(info_.original_ds_path)
|
|
149
149
|
|
|
150
150
|
|
|
151
151
|
def migrate(path: Path) -> None:
|