ert 19.0.0__py3-none-any.whl → 19.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. ert/__main__.py +94 -63
  2. ert/analysis/_es_update.py +11 -14
  3. ert/config/_create_observation_dataframes.py +12 -228
  4. ert/config/_observations.py +164 -152
  5. ert/config/_read_summary.py +4 -5
  6. ert/config/ert_config.py +1 -56
  7. ert/config/observation_config_migrations.py +793 -0
  8. ert/config/rft_config.py +1 -1
  9. ert/dark_storage/compute/misfits.py +0 -42
  10. ert/dark_storage/endpoints/__init__.py +0 -2
  11. ert/dark_storage/endpoints/experiments.py +0 -3
  12. ert/dark_storage/json_schema/experiment.py +0 -1
  13. ert/field_utils/grdecl_io.py +9 -26
  14. ert/gui/main_window.py +2 -0
  15. ert/gui/tools/manage_experiments/export_dialog.py +4 -0
  16. ert/gui/tools/manage_experiments/storage_info_widget.py +1 -5
  17. ert/gui/tools/plot/plot_api.py +10 -10
  18. ert/gui/tools/plot/plot_widget.py +12 -14
  19. ert/gui/tools/plot/plot_window.py +1 -10
  20. ert/services/__init__.py +7 -3
  21. ert/services/_storage_main.py +59 -22
  22. ert/services/ert_server.py +186 -24
  23. ert/shared/version.py +3 -3
  24. ert/storage/local_ensemble.py +3 -107
  25. ert/storage/local_experiment.py +0 -16
  26. ert/storage/local_storage.py +1 -3
  27. ert/utils/__init__.py +20 -0
  28. {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/METADATA +2 -2
  29. {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/RECORD +40 -47
  30. {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/WHEEL +1 -1
  31. everest/bin/everest_script.py +5 -5
  32. everest/bin/kill_script.py +2 -2
  33. everest/bin/monitor_script.py +2 -2
  34. everest/bin/utils.py +4 -4
  35. everest/detached/everserver.py +6 -6
  36. everest/gui/main_window.py +2 -2
  37. everest/util/__init__.py +1 -19
  38. ert/dark_storage/compute/__init__.py +0 -0
  39. ert/dark_storage/endpoints/compute/__init__.py +0 -0
  40. ert/dark_storage/endpoints/compute/misfits.py +0 -95
  41. ert/services/_base_service.py +0 -387
  42. ert/services/webviz_ert_service.py +0 -20
  43. ert/shared/storage/command.py +0 -38
  44. ert/shared/storage/extraction.py +0 -42
  45. ert/storage/migration/to23.py +0 -49
  46. {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/entry_points.txt +0 -0
  47. {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/licenses/COPYING +0 -0
  48. {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,187 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import contextlib
4
+ import io
3
5
  import json
4
6
  import logging
5
7
  import os
8
+ import signal
6
9
  import sys
7
10
  import threading
8
11
  import types
9
- from collections.abc import Mapping
12
+ from collections.abc import Callable, Mapping, Sequence
10
13
  from pathlib import Path
14
+ from select import PIPE_BUF, select
15
+ from subprocess import Popen, TimeoutExpired
11
16
  from tempfile import NamedTemporaryFile
12
17
  from time import sleep
13
- from typing import Any, cast
18
+ from typing import Any, TypedDict, cast
14
19
 
15
20
  import requests
16
21
 
17
22
  from ert.dark_storage.client import Client, ErtClientConnectionInfo
18
- from ert.services._base_service import ErtServerConnectionInfo, _Proc
19
23
  from ert.trace import get_traceparent
20
24
 
25
+ SERVICE_CONF_PATHS: set[str] = set()
26
+
27
+
28
+ class ErtServerConnectionInfo(TypedDict):
29
+ urls: list[str]
30
+ authtoken: str
31
+ host: str
32
+ port: str
33
+ cert: str
34
+ auth: str
35
+
36
+
37
+ class ErtServerExit(OSError):
38
+ pass
39
+
40
+
41
+ def cleanup_service_files(signum: int, frame: types.FrameType | None) -> None:
42
+ for file_path in SERVICE_CONF_PATHS:
43
+ file = Path(file_path)
44
+ if file.exists():
45
+ file.unlink()
46
+ raise ErtServerExit(f"Signal {signum} received.")
47
+
48
+
49
+ if threading.current_thread() is threading.main_thread():
50
+ signal.signal(signal.SIGTERM, cleanup_service_files)
51
+ signal.signal(signal.SIGINT, cleanup_service_files)
52
+
53
+
54
+ class ServerBootFail(RuntimeError):
55
+ pass
56
+
57
+
58
+ class _Proc(threading.Thread):
59
+ def __init__(
60
+ self,
61
+ service_name: str,
62
+ exec_args: Sequence[str],
63
+ timeout: int,
64
+ on_connection_info_received: Callable[
65
+ [ErtServerConnectionInfo | Exception | None], None
66
+ ],
67
+ project: Path,
68
+ ) -> None:
69
+ super().__init__()
70
+
71
+ self._shutdown = threading.Event()
72
+
73
+ self._service_name = service_name
74
+ self._exec_args = exec_args
75
+ self._timeout = timeout
76
+ self._propagate_connection_info_from_childproc = on_connection_info_received
77
+ self._service_config_path = project / f"{self._service_name}_server.json"
78
+
79
+ fd_read, fd_write = os.pipe()
80
+ self._comm_pipe = os.fdopen(fd_read)
81
+
82
+ env = os.environ.copy()
83
+ env["ERT_COMM_FD"] = str(fd_write)
84
+
85
+ SERVICE_CONF_PATHS.add(str(self._service_config_path))
86
+
87
+ # The process is waited for in _do_shutdown()
88
+ self._childproc = Popen(
89
+ self._exec_args,
90
+ pass_fds=(fd_write,),
91
+ env=env,
92
+ close_fds=True,
93
+ )
94
+ os.close(fd_write)
95
+
96
+ def run(self) -> None:
97
+ comm = self._read_connection_info_from_process(self._childproc)
98
+
99
+ if comm is None:
100
+ self._propagate_connection_info_from_childproc(TimeoutError())
101
+ return # _read_conn_info() has already cleaned up in this case
102
+
103
+ conn_info: ErtServerConnectionInfo | Exception | None = None
104
+ try:
105
+ conn_info = json.loads(comm)
106
+ except json.JSONDecodeError:
107
+ conn_info = ServerBootFail()
108
+ except Exception as exc:
109
+ conn_info = exc
110
+
111
+ try:
112
+ self._propagate_connection_info_from_childproc(conn_info)
113
+
114
+ while True:
115
+ if self._childproc.poll() is not None:
116
+ break
117
+ if self._shutdown.wait(1):
118
+ self._do_shutdown()
119
+ break
120
+
121
+ except Exception as e:
122
+ print(str(e))
123
+ self.logger.exception(e)
124
+
125
+ finally:
126
+ self._ensure_connection_info_file_is_deleted()
127
+
128
+ def shutdown(self) -> int:
129
+ """Shutdown the server."""
130
+ self._shutdown.set()
131
+ self.join()
132
+
133
+ return self._childproc.returncode
134
+
135
+ def _read_connection_info_from_process(self, proc: Popen[bytes]) -> str | None:
136
+ comm_buf = io.StringIO()
137
+ first_iter = True
138
+ while first_iter or proc.poll() is None:
139
+ first_iter = False
140
+ ready = select([self._comm_pipe], [], [], self._timeout)
141
+
142
+ # Timeout reached, exit with a failure
143
+ if ready == ([], [], []):
144
+ self._do_shutdown()
145
+ self._ensure_connection_info_file_is_deleted()
146
+ return None
147
+
148
+ x = self._comm_pipe.read(PIPE_BUF)
149
+ if not x: # EOF
150
+ break
151
+ comm_buf.write(x)
152
+ return comm_buf.getvalue()
153
+
154
+ def _do_shutdown(self) -> None:
155
+ if self._childproc is None:
156
+ return
157
+ try:
158
+ self._childproc.terminate()
159
+ self._childproc.wait(10) # Give it 10s to shut down cleanly..
160
+ except TimeoutExpired:
161
+ try:
162
+ self._childproc.kill() # ... then kick it harder...
163
+ self._childproc.wait(self._timeout) # ... and wait again
164
+ except TimeoutExpired:
165
+ self.logger.error(
166
+ f"waiting for child-process exceeded timeout {self._timeout}s"
167
+ )
168
+
169
+ def _ensure_connection_info_file_is_deleted(self) -> None:
170
+ """
171
+ Ensure that the JSON connection information file is deleted
172
+ """
173
+ with contextlib.suppress(OSError):
174
+ if self._service_config_path.exists():
175
+ self._service_config_path.unlink()
176
+
177
+ @property
178
+ def logger(self) -> logging.Logger:
179
+ return logging.getLogger("ert.shared.storage")
180
+
181
+
182
+ _ERT_SERVER_CONNECTION_INFO_FILE = "storage_server.json"
183
+ _ERT_SERVER_EXECUTABLE_FILE = str(Path(__file__).parent / "_storage_main.py")
184
+
21
185
 
22
186
  class ErtServerContext:
23
187
  def __init__(self, service: ErtServer) -> None:
@@ -70,7 +234,7 @@ class ErtServer:
70
234
 
71
235
  run_storage_main_cmd = [
72
236
  sys.executable,
73
- str(Path(__file__).parent / "_storage_main.py"),
237
+ _ERT_SERVER_EXECUTABLE_FILE,
74
238
  "--project",
75
239
  storage_path,
76
240
  ]
@@ -91,7 +255,7 @@ class ErtServer:
91
255
  self._thread_that_starts_server_process = _Proc(
92
256
  service_name="storage",
93
257
  exec_args=run_storage_main_cmd,
94
- timeout=120,
258
+ timeout=timeout,
95
259
  on_connection_info_received=self.on_connection_info_received_from_server_process,
96
260
  project=Path(self._storage_path),
97
261
  )
@@ -167,21 +331,6 @@ class ErtServer:
167
331
  "None of the URLs provided for the ert storage server worked."
168
332
  )
169
333
 
170
- @classmethod
171
- def session(cls, project: os.PathLike[str], timeout: int | None = None) -> Client:
172
- """
173
- Start a HTTP transaction with the server
174
- """
175
- inst = cls.connect(timeout=timeout, project=project)
176
- info = inst.fetch_connection_info()
177
- return Client(
178
- conn_info=ErtClientConnectionInfo(
179
- base_url=inst.fetch_url(),
180
- auth_token=inst.fetch_auth()[1],
181
- cert=info["cert"],
182
- )
183
- )
184
-
185
334
  @property
186
335
  def logger(self) -> logging.Logger:
187
336
  return logging.getLogger("ert.shared.storage")
@@ -218,12 +367,12 @@ class ErtServer:
218
367
  timeout = 240
219
368
  t = -1
220
369
  while t < timeout:
221
- storage_server_path = path / "storage_server.json"
370
+ storage_server_path = path / _ERT_SERVER_CONNECTION_INFO_FILE
222
371
  if (
223
372
  storage_server_path.exists()
224
373
  and storage_server_path.stat().st_size > 0
225
374
  ):
226
- with (path / "storage_server.json").open() as f:
375
+ with (path / _ERT_SERVER_CONNECTION_INFO_FILE).open() as f:
227
376
  storage_server_content = json.load(f)
228
377
 
229
378
  return ErtServer(
@@ -277,9 +426,9 @@ class ErtServer:
277
426
  if self._storage_path is not None:
278
427
  if not Path(self._storage_path).exists():
279
428
  raise RuntimeError(f"No storage exists at : {self._storage_path}")
280
- path = f"{self._storage_path}/storage_server.json"
429
+ path = f"{self._storage_path}/{_ERT_SERVER_CONNECTION_INFO_FILE}"
281
430
  else:
282
- path = "storage_server.json"
431
+ path = _ERT_SERVER_CONNECTION_INFO_FILE
283
432
 
284
433
  if isinstance(info, Mapping):
285
434
  with NamedTemporaryFile(dir=f"{self._storage_path}", delete=False) as f:
@@ -315,3 +464,16 @@ class ErtServer:
315
464
  def wait(self) -> None:
316
465
  if self._thread_that_starts_server_process is not None:
317
466
  self._thread_that_starts_server_process.join()
467
+
468
+
469
+ def create_ertserver_client(project: Path, timeout: int | None = None) -> Client:
470
+ """Read connection info from file in path and create HTTP client."""
471
+ connection = ErtServer.connect(timeout=timeout, project=project)
472
+ info = connection.fetch_connection_info()
473
+ return Client(
474
+ conn_info=ErtClientConnectionInfo(
475
+ base_url=connection.fetch_url(),
476
+ auth_token=connection.fetch_auth()[1],
477
+ cert=info["cert"],
478
+ )
479
+ )
ert/shared/version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '19.0.0'
32
- __version_tuple__ = version_tuple = (19, 0, 0)
31
+ __version__ = version = '19.0.0rc1'
32
+ __version_tuple__ = version_tuple = (19, 0, 0, 'rc1')
33
33
 
34
- __commit_id__ = commit_id = 'g2518c4485'
34
+ __commit_id__ = commit_id = 'g6995ffc2f'
@@ -15,7 +15,6 @@ from typing import TYPE_CHECKING
15
15
  from uuid import UUID
16
16
 
17
17
  import numpy as np
18
- import pandas as pd
19
18
  import polars as pl
20
19
  import resfo
21
20
  import xarray as xr
@@ -592,8 +591,8 @@ class LocalEnsemble(BaseMode):
592
591
  pl.col(col)
593
592
  .map_elements(
594
593
  self.experiment.parameter_configuration[col].transform_data(),
595
- return_dtype=df[col].dtype,
596
594
  )
595
+ .cast(df[col].dtype)
597
596
  .alias(col)
598
597
  for col in df.columns
599
598
  if col != "realization"
@@ -679,9 +678,6 @@ class LocalEnsemble(BaseMode):
679
678
  if complete_df is None:
680
679
  complete_df = ds
681
680
  else:
682
- complete_df = complete_df.drop(
683
- [c for c in ds.columns if c != "realization"], strict=False
684
- )
685
681
  complete_df = (
686
682
  complete_df.join(ds, on="realization", how="left")
687
683
  .unique(subset=["realization"], keep="first")
@@ -1057,7 +1053,7 @@ class LocalEnsemble(BaseMode):
1057
1053
  pl.col(col).is_in(observed_values.implode())
1058
1054
  )
1059
1055
 
1060
- pivoted = responses.collect(engine="streaming").pivot( # noqa: PD010
1056
+ pivoted = responses.collect(engine="streaming").pivot(
1061
1057
  on="realization",
1062
1058
  index=["response_key", *response_cls.primary_key],
1063
1059
  values="values",
@@ -1099,11 +1095,6 @@ class LocalEnsemble(BaseMode):
1099
1095
  on=["response_key", *response_cls.primary_key],
1100
1096
  )
1101
1097
 
1102
- # Do not drop primary keys which
1103
- # overlap with localization attributes
1104
- primary_keys_to_drop = set(response_cls.primary_key).difference(
1105
- {"north", "east", "radius"}
1106
- )
1107
1098
  joined = (
1108
1099
  joined.with_columns(
1109
1100
  pl.concat_str(
@@ -1113,7 +1104,7 @@ class LocalEnsemble(BaseMode):
1113
1104
  # Avoid potential collisions w/ primary key
1114
1105
  )
1115
1106
  )
1116
- .drop(primary_keys_to_drop)
1107
+ .drop(response_cls.primary_key)
1117
1108
  .rename({"__tmp_index_key__": "index"})
1118
1109
  )
1119
1110
 
@@ -1129,9 +1120,6 @@ class LocalEnsemble(BaseMode):
1129
1120
  "observation_key",
1130
1121
  "observations",
1131
1122
  "std",
1132
- "east",
1133
- "north",
1134
- "radius",
1135
1123
  ]
1136
1124
  )
1137
1125
 
@@ -1190,98 +1178,6 @@ class LocalEnsemble(BaseMode):
1190
1178
  self._path / "index.json", self._index.model_dump_json().encode("utf-8")
1191
1179
  )
1192
1180
 
1193
- @property
1194
- def all_parameters_and_gen_data(self) -> pl.DataFrame | None:
1195
- """
1196
- Only for Everest wrt objectives/constraints,
1197
- disregards summary data and primary key values
1198
- """
1199
- param_dfs = []
1200
- for param_group in self.experiment.parameter_configuration:
1201
- params_pd = self.load_parameters(param_group)["values"].to_pandas()
1202
-
1203
- assert isinstance(params_pd, pd.DataFrame)
1204
- params_pd = params_pd.reset_index()
1205
- param_df = pl.from_pandas(params_pd)
1206
-
1207
- param_columns = [c for c in param_df.columns if c != "realizations"]
1208
- param_df = param_df.rename(
1209
- {
1210
- **{
1211
- c: param_group + "." + c.replace("\0", ".")
1212
- for c in param_columns
1213
- },
1214
- "realizations": "realization",
1215
- }
1216
- )
1217
- param_df = param_df.cast(
1218
- {
1219
- "realization": pl.UInt16,
1220
- **{c: pl.Float64 for c in param_df.columns if c != "realization"},
1221
- }
1222
- )
1223
- param_dfs.append(param_df)
1224
-
1225
- responses = self.load_responses(
1226
- "gen_data", tuple(self.get_realization_list_with_responses())
1227
- )
1228
-
1229
- if responses is None:
1230
- return pl.concat(param_dfs)
1231
-
1232
- params_wide = pl.concat(
1233
- [
1234
- (
1235
- pdf.sort("realization").drop("realization")
1236
- if i > 0
1237
- else pdf.sort("realization")
1238
- )
1239
- for i, pdf in enumerate(param_dfs)
1240
- ],
1241
- how="horizontal",
1242
- )
1243
-
1244
- responses_wide = responses["realization", "response_key", "values"].pivot( # noqa: PD010
1245
- on="response_key", values="values"
1246
- )
1247
-
1248
- # If responses are missing for some realizations, this _left_ join will
1249
- # put null (polars) which maps to nan when doing .to_numpy() into the
1250
- # response columns for those realizations
1251
- params_and_responses = params_wide.join(
1252
- responses_wide, on="realization", how="left"
1253
- ).with_columns(pl.lit(self.iteration).alias("batch"))
1254
-
1255
- assert self.everest_realization_info is not None
1256
-
1257
- model_realization_mapping = {
1258
- k: v["model_realization"] for k, v in self.everest_realization_info.items()
1259
- }
1260
- perturbation_mapping = {
1261
- k: v["perturbation"] for k, v in self.everest_realization_info.items()
1262
- }
1263
-
1264
- params_and_responses = params_and_responses.with_columns(
1265
- pl.col("realization")
1266
- .replace(model_realization_mapping)
1267
- .alias("model_realization"),
1268
- pl.col("realization")
1269
- .cast(pl.Int32)
1270
- .replace(perturbation_mapping)
1271
- .alias("perturbation"),
1272
- )
1273
-
1274
- column_order = [
1275
- "batch",
1276
- "model_realization",
1277
- "perturbation",
1278
- "realization",
1279
- *[c for c in responses_wide.columns if c != "realization"],
1280
- *[c for c in params_wide.columns if c != "realization"],
1281
- ]
1282
-
1283
- return params_and_responses[column_order]
1284
-
1285
1181
 
1286
1182
  async def _read_parameters(
1287
1183
  run_path: str,
@@ -508,19 +508,3 @@ class LocalExperiment(BaseMode):
508
508
 
509
509
  if self.response_type_to_response_keys is not None:
510
510
  del self.response_type_to_response_keys
511
-
512
- @property
513
- def all_parameters_and_gen_data(self) -> pl.DataFrame | None:
514
- if not self.ensembles:
515
- return None
516
-
517
- ensemble_dfs = [
518
- e.all_parameters_and_gen_data
519
- for e in self.ensembles
520
- if e.all_parameters_and_gen_data is not None
521
- ]
522
-
523
- if not ensemble_dfs:
524
- return None
525
-
526
- return pl.concat(ensemble_dfs)
@@ -31,7 +31,7 @@ from .realization_storage_state import RealizationStorageState
31
31
 
32
32
  logger = logging.getLogger(__name__)
33
33
 
34
- _LOCAL_STORAGE_VERSION = 23
34
+ _LOCAL_STORAGE_VERSION = 22
35
35
 
36
36
 
37
37
  class _Migrations(BaseModel):
@@ -518,7 +518,6 @@ class LocalStorage(BaseMode):
518
518
  to20,
519
519
  to21,
520
520
  to22,
521
- to23,
522
521
  )
523
522
 
524
523
  try:
@@ -570,7 +569,6 @@ class LocalStorage(BaseMode):
570
569
  19: to20,
571
570
  20: to21,
572
571
  21: to22,
573
- 22: to23,
574
572
  }
575
573
  for from_version in range(version, _LOCAL_STORAGE_VERSION):
576
574
  migrations[from_version].migrate(self.path)
ert/utils/__init__.py CHANGED
@@ -1,9 +1,13 @@
1
1
  import logging
2
2
  import time
3
3
  from collections.abc import Callable
4
+ from datetime import UTC, datetime
4
5
  from functools import wraps
6
+ from pathlib import Path
5
7
  from typing import ParamSpec, TypeVar
6
8
 
9
+ from _ert.utils import file_safe_timestamp
10
+
7
11
  P = ParamSpec("P")
8
12
  R = TypeVar("R")
9
13
 
@@ -30,3 +34,19 @@ def log_duration(
30
34
  return wrapper
31
35
 
32
36
  return decorator
37
+
38
+
39
+ def makedirs_if_needed(path: Path, roll_if_exists: bool = False) -> None:
40
+ if path.is_dir():
41
+ if not roll_if_exists:
42
+ return
43
+ _roll_dir(path) # exists and should be rolled
44
+ path.mkdir(parents=True, exist_ok=False)
45
+
46
+
47
+ def _roll_dir(old_name: Path) -> None:
48
+ old_name = old_name.resolve()
49
+ timestamp = file_safe_timestamp(datetime.now(UTC).isoformat())
50
+ new_name = f"{old_name}__{timestamp}"
51
+ old_name.rename(new_name)
52
+ logging.getLogger().info(f"renamed {old_name} to {new_name}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ert
3
- Version: 19.0.0
3
+ Version: 19.0.0rc1
4
4
  Summary: Ensemble based Reservoir Tool (ERT)
5
5
  Author-email: Equinor ASA <fg_sib-scout@equinor.com>
6
6
  License-Expression: GPL-3.0-only
@@ -47,7 +47,7 @@ Requires-Dist: orjson
47
47
  Requires-Dist: packaging
48
48
  Requires-Dist: pandas
49
49
  Requires-Dist: pluggy>=1.3.0
50
- Requires-Dist: polars<1.35,>=1.32.3
50
+ Requires-Dist: polars!=1.35,>=1.32.3
51
51
  Requires-Dist: progressbar2
52
52
  Requires-Dist: psutil
53
53
  Requires-Dist: pyarrow