ert 19.0.0rc1__py3-none-any.whl → 19.0.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. ert/__main__.py +63 -94
  2. ert/analysis/_es_update.py +14 -11
  3. ert/config/_create_observation_dataframes.py +262 -23
  4. ert/config/_observations.py +153 -181
  5. ert/config/_read_summary.py +5 -4
  6. ert/config/ert_config.py +56 -1
  7. ert/config/parsing/observations_parser.py +0 -6
  8. ert/config/rft_config.py +1 -1
  9. ert/dark_storage/compute/__init__.py +0 -0
  10. ert/dark_storage/compute/misfits.py +42 -0
  11. ert/dark_storage/endpoints/__init__.py +2 -0
  12. ert/dark_storage/endpoints/compute/__init__.py +0 -0
  13. ert/dark_storage/endpoints/compute/misfits.py +95 -0
  14. ert/dark_storage/endpoints/experiments.py +3 -0
  15. ert/dark_storage/json_schema/experiment.py +1 -0
  16. ert/gui/main_window.py +0 -2
  17. ert/gui/tools/manage_experiments/export_dialog.py +0 -4
  18. ert/gui/tools/manage_experiments/storage_info_widget.py +5 -1
  19. ert/gui/tools/plot/plot_api.py +10 -10
  20. ert/gui/tools/plot/plot_widget.py +0 -5
  21. ert/gui/tools/plot/plot_window.py +1 -1
  22. ert/services/__init__.py +3 -7
  23. ert/services/_base_service.py +387 -0
  24. ert/services/_storage_main.py +22 -59
  25. ert/services/ert_server.py +24 -186
  26. ert/services/webviz_ert_service.py +20 -0
  27. ert/shared/storage/command.py +38 -0
  28. ert/shared/storage/extraction.py +42 -0
  29. ert/shared/version.py +3 -3
  30. ert/storage/local_ensemble.py +95 -2
  31. ert/storage/local_experiment.py +16 -0
  32. ert/storage/local_storage.py +1 -3
  33. ert/utils/__init__.py +0 -20
  34. {ert-19.0.0rc1.dist-info → ert-19.0.0rc3.dist-info}/METADATA +2 -2
  35. {ert-19.0.0rc1.dist-info → ert-19.0.0rc3.dist-info}/RECORD +46 -41
  36. {ert-19.0.0rc1.dist-info → ert-19.0.0rc3.dist-info}/WHEEL +1 -1
  37. everest/bin/everest_script.py +5 -5
  38. everest/bin/kill_script.py +2 -2
  39. everest/bin/monitor_script.py +2 -2
  40. everest/bin/utils.py +4 -4
  41. everest/detached/everserver.py +6 -6
  42. everest/gui/main_window.py +2 -2
  43. everest/util/__init__.py +19 -1
  44. ert/config/observation_config_migrations.py +0 -793
  45. ert/storage/migration/to22.py +0 -18
  46. {ert-19.0.0rc1.dist-info → ert-19.0.0rc3.dist-info}/entry_points.txt +0 -0
  47. {ert-19.0.0rc1.dist-info → ert-19.0.0rc3.dist-info}/licenses/COPYING +0 -0
  48. {ert-19.0.0rc1.dist-info → ert-19.0.0rc3.dist-info}/top_level.txt +0 -0
@@ -1,187 +1,23 @@
1
1
  from __future__ import annotations
2
2
 
3
- import contextlib
4
- import io
5
3
  import json
6
4
  import logging
7
5
  import os
8
- import signal
9
6
  import sys
10
7
  import threading
11
8
  import types
12
- from collections.abc import Callable, Mapping, Sequence
9
+ from collections.abc import Mapping
13
10
  from pathlib import Path
14
- from select import PIPE_BUF, select
15
- from subprocess import Popen, TimeoutExpired
16
11
  from tempfile import NamedTemporaryFile
17
12
  from time import sleep
18
- from typing import Any, TypedDict, cast
13
+ from typing import Any, cast
19
14
 
20
15
  import requests
21
16
 
22
17
  from ert.dark_storage.client import Client, ErtClientConnectionInfo
18
+ from ert.services._base_service import ErtServerConnectionInfo, _Proc
23
19
  from ert.trace import get_traceparent
24
20
 
25
- SERVICE_CONF_PATHS: set[str] = set()
26
-
27
-
28
- class ErtServerConnectionInfo(TypedDict):
29
- urls: list[str]
30
- authtoken: str
31
- host: str
32
- port: str
33
- cert: str
34
- auth: str
35
-
36
-
37
- class ErtServerExit(OSError):
38
- pass
39
-
40
-
41
- def cleanup_service_files(signum: int, frame: types.FrameType | None) -> None:
42
- for file_path in SERVICE_CONF_PATHS:
43
- file = Path(file_path)
44
- if file.exists():
45
- file.unlink()
46
- raise ErtServerExit(f"Signal {signum} received.")
47
-
48
-
49
- if threading.current_thread() is threading.main_thread():
50
- signal.signal(signal.SIGTERM, cleanup_service_files)
51
- signal.signal(signal.SIGINT, cleanup_service_files)
52
-
53
-
54
- class ServerBootFail(RuntimeError):
55
- pass
56
-
57
-
58
- class _Proc(threading.Thread):
59
- def __init__(
60
- self,
61
- service_name: str,
62
- exec_args: Sequence[str],
63
- timeout: int,
64
- on_connection_info_received: Callable[
65
- [ErtServerConnectionInfo | Exception | None], None
66
- ],
67
- project: Path,
68
- ) -> None:
69
- super().__init__()
70
-
71
- self._shutdown = threading.Event()
72
-
73
- self._service_name = service_name
74
- self._exec_args = exec_args
75
- self._timeout = timeout
76
- self._propagate_connection_info_from_childproc = on_connection_info_received
77
- self._service_config_path = project / f"{self._service_name}_server.json"
78
-
79
- fd_read, fd_write = os.pipe()
80
- self._comm_pipe = os.fdopen(fd_read)
81
-
82
- env = os.environ.copy()
83
- env["ERT_COMM_FD"] = str(fd_write)
84
-
85
- SERVICE_CONF_PATHS.add(str(self._service_config_path))
86
-
87
- # The process is waited for in _do_shutdown()
88
- self._childproc = Popen(
89
- self._exec_args,
90
- pass_fds=(fd_write,),
91
- env=env,
92
- close_fds=True,
93
- )
94
- os.close(fd_write)
95
-
96
- def run(self) -> None:
97
- comm = self._read_connection_info_from_process(self._childproc)
98
-
99
- if comm is None:
100
- self._propagate_connection_info_from_childproc(TimeoutError())
101
- return # _read_conn_info() has already cleaned up in this case
102
-
103
- conn_info: ErtServerConnectionInfo | Exception | None = None
104
- try:
105
- conn_info = json.loads(comm)
106
- except json.JSONDecodeError:
107
- conn_info = ServerBootFail()
108
- except Exception as exc:
109
- conn_info = exc
110
-
111
- try:
112
- self._propagate_connection_info_from_childproc(conn_info)
113
-
114
- while True:
115
- if self._childproc.poll() is not None:
116
- break
117
- if self._shutdown.wait(1):
118
- self._do_shutdown()
119
- break
120
-
121
- except Exception as e:
122
- print(str(e))
123
- self.logger.exception(e)
124
-
125
- finally:
126
- self._ensure_connection_info_file_is_deleted()
127
-
128
- def shutdown(self) -> int:
129
- """Shutdown the server."""
130
- self._shutdown.set()
131
- self.join()
132
-
133
- return self._childproc.returncode
134
-
135
- def _read_connection_info_from_process(self, proc: Popen[bytes]) -> str | None:
136
- comm_buf = io.StringIO()
137
- first_iter = True
138
- while first_iter or proc.poll() is None:
139
- first_iter = False
140
- ready = select([self._comm_pipe], [], [], self._timeout)
141
-
142
- # Timeout reached, exit with a failure
143
- if ready == ([], [], []):
144
- self._do_shutdown()
145
- self._ensure_connection_info_file_is_deleted()
146
- return None
147
-
148
- x = self._comm_pipe.read(PIPE_BUF)
149
- if not x: # EOF
150
- break
151
- comm_buf.write(x)
152
- return comm_buf.getvalue()
153
-
154
- def _do_shutdown(self) -> None:
155
- if self._childproc is None:
156
- return
157
- try:
158
- self._childproc.terminate()
159
- self._childproc.wait(10) # Give it 10s to shut down cleanly..
160
- except TimeoutExpired:
161
- try:
162
- self._childproc.kill() # ... then kick it harder...
163
- self._childproc.wait(self._timeout) # ... and wait again
164
- except TimeoutExpired:
165
- self.logger.error(
166
- f"waiting for child-process exceeded timeout {self._timeout}s"
167
- )
168
-
169
- def _ensure_connection_info_file_is_deleted(self) -> None:
170
- """
171
- Ensure that the JSON connection information file is deleted
172
- """
173
- with contextlib.suppress(OSError):
174
- if self._service_config_path.exists():
175
- self._service_config_path.unlink()
176
-
177
- @property
178
- def logger(self) -> logging.Logger:
179
- return logging.getLogger("ert.shared.storage")
180
-
181
-
182
- _ERT_SERVER_CONNECTION_INFO_FILE = "storage_server.json"
183
- _ERT_SERVER_EXECUTABLE_FILE = str(Path(__file__).parent / "_storage_main.py")
184
-
185
21
 
186
22
  class ErtServerContext:
187
23
  def __init__(self, service: ErtServer) -> None:
@@ -234,7 +70,7 @@ class ErtServer:
234
70
 
235
71
  run_storage_main_cmd = [
236
72
  sys.executable,
237
- _ERT_SERVER_EXECUTABLE_FILE,
73
+ str(Path(__file__).parent / "_storage_main.py"),
238
74
  "--project",
239
75
  storage_path,
240
76
  ]
@@ -255,7 +91,7 @@ class ErtServer:
255
91
  self._thread_that_starts_server_process = _Proc(
256
92
  service_name="storage",
257
93
  exec_args=run_storage_main_cmd,
258
- timeout=timeout,
94
+ timeout=120,
259
95
  on_connection_info_received=self.on_connection_info_received_from_server_process,
260
96
  project=Path(self._storage_path),
261
97
  )
@@ -331,6 +167,21 @@ class ErtServer:
331
167
  "None of the URLs provided for the ert storage server worked."
332
168
  )
333
169
 
170
+ @classmethod
171
+ def session(cls, project: os.PathLike[str], timeout: int | None = None) -> Client:
172
+ """
173
+ Start a HTTP transaction with the server
174
+ """
175
+ inst = cls.connect(timeout=timeout, project=project)
176
+ info = inst.fetch_connection_info()
177
+ return Client(
178
+ conn_info=ErtClientConnectionInfo(
179
+ base_url=inst.fetch_url(),
180
+ auth_token=inst.fetch_auth()[1],
181
+ cert=info["cert"],
182
+ )
183
+ )
184
+
334
185
  @property
335
186
  def logger(self) -> logging.Logger:
336
187
  return logging.getLogger("ert.shared.storage")
@@ -367,12 +218,12 @@ class ErtServer:
367
218
  timeout = 240
368
219
  t = -1
369
220
  while t < timeout:
370
- storage_server_path = path / _ERT_SERVER_CONNECTION_INFO_FILE
221
+ storage_server_path = path / "storage_server.json"
371
222
  if (
372
223
  storage_server_path.exists()
373
224
  and storage_server_path.stat().st_size > 0
374
225
  ):
375
- with (path / _ERT_SERVER_CONNECTION_INFO_FILE).open() as f:
226
+ with (path / "storage_server.json").open() as f:
376
227
  storage_server_content = json.load(f)
377
228
 
378
229
  return ErtServer(
@@ -426,9 +277,9 @@ class ErtServer:
426
277
  if self._storage_path is not None:
427
278
  if not Path(self._storage_path).exists():
428
279
  raise RuntimeError(f"No storage exists at : {self._storage_path}")
429
- path = f"{self._storage_path}/{_ERT_SERVER_CONNECTION_INFO_FILE}"
280
+ path = f"{self._storage_path}/storage_server.json"
430
281
  else:
431
- path = _ERT_SERVER_CONNECTION_INFO_FILE
282
+ path = "storage_server.json"
432
283
 
433
284
  if isinstance(info, Mapping):
434
285
  with NamedTemporaryFile(dir=f"{self._storage_path}", delete=False) as f:
@@ -464,16 +315,3 @@ class ErtServer:
464
315
  def wait(self) -> None:
465
316
  if self._thread_that_starts_server_process is not None:
466
317
  self._thread_that_starts_server_process.join()
467
-
468
-
469
- def create_ertserver_client(project: Path, timeout: int | None = None) -> Client:
470
- """Read connection info from file in path and create HTTP client."""
471
- connection = ErtServer.connect(timeout=timeout, project=project)
472
- info = connection.fetch_connection_info()
473
- return Client(
474
- conn_info=ErtClientConnectionInfo(
475
- base_url=connection.fetch_url(),
476
- auth_token=connection.fetch_auth()[1],
477
- cert=info["cert"],
478
- )
479
- )
@@ -0,0 +1,20 @@
1
+ import sys
2
+ from typing import Any
3
+
4
+ from ert.services._base_service import BaseService
5
+
6
+
7
+ class WebvizErt(BaseService):
8
+ service_name = "webviz-ert"
9
+
10
+ def __init__(self, **kwargs: Any) -> None:
11
+ exec_args = [sys.executable, "-m", "webviz_ert"]
12
+ if kwargs.get("experimental_mode"):
13
+ exec_args.append("--experimental-mode")
14
+ if kwargs.get("verbose"):
15
+ exec_args.append("--verbose")
16
+ exec_args.extend(["--title", str(kwargs.get("title"))])
17
+ project = kwargs.get("project")
18
+ exec_args.extend(["--project_identifier", str(project)])
19
+
20
+ super().__init__(exec_args, project=project)
@@ -0,0 +1,38 @@
1
+ import os
2
+ from argparse import ArgumentParser
3
+
4
+
5
+ def add_parser_options(ap: ArgumentParser) -> None:
6
+ ap.add_argument(
7
+ "config",
8
+ type=str,
9
+ help=("ERT config file to start the server from "),
10
+ nargs="?", # optional
11
+ )
12
+ ap.add_argument(
13
+ "--project",
14
+ "-p",
15
+ type=str,
16
+ help="Path to directory in which to create storage_server.json",
17
+ default=os.getcwd(),
18
+ )
19
+ ap.add_argument(
20
+ "--traceparent",
21
+ type=str,
22
+ help="Trace parent id to be used by the storage root span",
23
+ default=None,
24
+ )
25
+ ap.add_argument(
26
+ "--parent_pid",
27
+ type=int,
28
+ help="The parent process id",
29
+ default=os.getppid(),
30
+ )
31
+ ap.add_argument(
32
+ "--host", type=str, default=os.environ.get("ERT_STORAGE_HOST", "127.0.0.1")
33
+ )
34
+ ap.add_argument("--logging-config", type=str, default=None)
35
+ ap.add_argument(
36
+ "--verbose", action="store_true", help="Show verbose output.", default=False
37
+ )
38
+ ap.add_argument("--debug", action="store_true", default=False)
@@ -0,0 +1,42 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Mapping
4
+
5
+ from ert.config import GenKwConfig
6
+ from ert.storage import Experiment
7
+
8
+ _PRIOR_NAME_MAP = {
9
+ "NORMAL": "normal",
10
+ "LOGNORMAL": "lognormal",
11
+ "TRIANGULAR": "trig",
12
+ "TRUNCATED_NORMAL": "ert_truncnormal",
13
+ "CONST": "const",
14
+ "UNIFORM": "uniform",
15
+ "LOGUNIF": "loguniform",
16
+ "DUNIF": "ert_duniform",
17
+ "RAW": "stdnormal",
18
+ "ERRF": "ert_erf",
19
+ "DERRF": "ert_derf",
20
+ }
21
+
22
+
23
+ def create_priors(
24
+ experiment: Experiment,
25
+ ) -> Mapping[str, dict[str, str | float]]:
26
+ priors_dict = {}
27
+
28
+ for param in experiment.parameter_configuration.values():
29
+ if isinstance(param, GenKwConfig):
30
+ prior: dict[str, str | float] = {
31
+ "function": _PRIOR_NAME_MAP[param.distribution.name.upper()],
32
+ **param.distribution.model_dump(exclude={"name"}),
33
+ }
34
+ # webviz-ert expects some variables names
35
+ if param.distribution.name == "triangular":
36
+ mapping = {"min": "_xmin", "max": "xmax", "mode": "xmode"}
37
+ else:
38
+ mapping = {"min": "_min", "max": "_max"}
39
+ prior = {mapping.get(k, k): v for k, v in prior.items()}
40
+ priors_dict[f"{param.group}:{param.name}"] = prior
41
+
42
+ return priors_dict
ert/shared/version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '19.0.0rc1'
32
- __version_tuple__ = version_tuple = (19, 0, 0, 'rc1')
31
+ __version__ = version = '19.0.0rc3'
32
+ __version_tuple__ = version_tuple = (19, 0, 0, 'rc3')
33
33
 
34
- __commit_id__ = commit_id = 'g6995ffc2f'
34
+ __commit_id__ = commit_id = 'g40bbd3ce0'
@@ -15,6 +15,7 @@ from typing import TYPE_CHECKING
15
15
  from uuid import UUID
16
16
 
17
17
  import numpy as np
18
+ import pandas as pd
18
19
  import polars as pl
19
20
  import resfo
20
21
  import xarray as xr
@@ -591,8 +592,8 @@ class LocalEnsemble(BaseMode):
591
592
  pl.col(col)
592
593
  .map_elements(
593
594
  self.experiment.parameter_configuration[col].transform_data(),
595
+ return_dtype=df[col].dtype,
594
596
  )
595
- .cast(df[col].dtype)
596
597
  .alias(col)
597
598
  for col in df.columns
598
599
  if col != "realization"
@@ -1053,7 +1054,7 @@ class LocalEnsemble(BaseMode):
1053
1054
  pl.col(col).is_in(observed_values.implode())
1054
1055
  )
1055
1056
 
1056
- pivoted = responses.collect(engine="streaming").pivot(
1057
+ pivoted = responses.collect(engine="streaming").pivot( # noqa: PD010
1057
1058
  on="realization",
1058
1059
  index=["response_key", *response_cls.primary_key],
1059
1060
  values="values",
@@ -1178,6 +1179,98 @@ class LocalEnsemble(BaseMode):
1178
1179
  self._path / "index.json", self._index.model_dump_json().encode("utf-8")
1179
1180
  )
1180
1181
 
1182
+ @property
1183
+ def all_parameters_and_gen_data(self) -> pl.DataFrame | None:
1184
+ """
1185
+ Only for Everest wrt objectives/constraints,
1186
+ disregards summary data and primary key values
1187
+ """
1188
+ param_dfs = []
1189
+ for param_group in self.experiment.parameter_configuration:
1190
+ params_pd = self.load_parameters(param_group)["values"].to_pandas()
1191
+
1192
+ assert isinstance(params_pd, pd.DataFrame)
1193
+ params_pd = params_pd.reset_index()
1194
+ param_df = pl.from_pandas(params_pd)
1195
+
1196
+ param_columns = [c for c in param_df.columns if c != "realizations"]
1197
+ param_df = param_df.rename(
1198
+ {
1199
+ **{
1200
+ c: param_group + "." + c.replace("\0", ".")
1201
+ for c in param_columns
1202
+ },
1203
+ "realizations": "realization",
1204
+ }
1205
+ )
1206
+ param_df = param_df.cast(
1207
+ {
1208
+ "realization": pl.UInt16,
1209
+ **{c: pl.Float64 for c in param_df.columns if c != "realization"},
1210
+ }
1211
+ )
1212
+ param_dfs.append(param_df)
1213
+
1214
+ responses = self.load_responses(
1215
+ "gen_data", tuple(self.get_realization_list_with_responses())
1216
+ )
1217
+
1218
+ if responses is None:
1219
+ return pl.concat(param_dfs)
1220
+
1221
+ params_wide = pl.concat(
1222
+ [
1223
+ (
1224
+ pdf.sort("realization").drop("realization")
1225
+ if i > 0
1226
+ else pdf.sort("realization")
1227
+ )
1228
+ for i, pdf in enumerate(param_dfs)
1229
+ ],
1230
+ how="horizontal",
1231
+ )
1232
+
1233
+ responses_wide = responses["realization", "response_key", "values"].pivot( # noqa: PD010
1234
+ on="response_key", values="values"
1235
+ )
1236
+
1237
+ # If responses are missing for some realizations, this _left_ join will
1238
+ # put null (polars) which maps to nan when doing .to_numpy() into the
1239
+ # response columns for those realizations
1240
+ params_and_responses = params_wide.join(
1241
+ responses_wide, on="realization", how="left"
1242
+ ).with_columns(pl.lit(self.iteration).alias("batch"))
1243
+
1244
+ assert self.everest_realization_info is not None
1245
+
1246
+ model_realization_mapping = {
1247
+ k: v["model_realization"] for k, v in self.everest_realization_info.items()
1248
+ }
1249
+ perturbation_mapping = {
1250
+ k: v["perturbation"] for k, v in self.everest_realization_info.items()
1251
+ }
1252
+
1253
+ params_and_responses = params_and_responses.with_columns(
1254
+ pl.col("realization")
1255
+ .replace(model_realization_mapping)
1256
+ .alias("model_realization"),
1257
+ pl.col("realization")
1258
+ .cast(pl.Int32)
1259
+ .replace(perturbation_mapping)
1260
+ .alias("perturbation"),
1261
+ )
1262
+
1263
+ column_order = [
1264
+ "batch",
1265
+ "model_realization",
1266
+ "perturbation",
1267
+ "realization",
1268
+ *[c for c in responses_wide.columns if c != "realization"],
1269
+ *[c for c in params_wide.columns if c != "realization"],
1270
+ ]
1271
+
1272
+ return params_and_responses[column_order]
1273
+
1181
1274
 
1182
1275
  async def _read_parameters(
1183
1276
  run_path: str,
@@ -508,3 +508,19 @@ class LocalExperiment(BaseMode):
508
508
 
509
509
  if self.response_type_to_response_keys is not None:
510
510
  del self.response_type_to_response_keys
511
+
512
+ @property
513
+ def all_parameters_and_gen_data(self) -> pl.DataFrame | None:
514
+ if not self.ensembles:
515
+ return None
516
+
517
+ ensemble_dfs = [
518
+ e.all_parameters_and_gen_data
519
+ for e in self.ensembles
520
+ if e.all_parameters_and_gen_data is not None
521
+ ]
522
+
523
+ if not ensemble_dfs:
524
+ return None
525
+
526
+ return pl.concat(ensemble_dfs)
@@ -31,7 +31,7 @@ from .realization_storage_state import RealizationStorageState
31
31
 
32
32
  logger = logging.getLogger(__name__)
33
33
 
34
- _LOCAL_STORAGE_VERSION = 22
34
+ _LOCAL_STORAGE_VERSION = 21
35
35
 
36
36
 
37
37
  class _Migrations(BaseModel):
@@ -517,7 +517,6 @@ class LocalStorage(BaseMode):
517
517
  to19,
518
518
  to20,
519
519
  to21,
520
- to22,
521
520
  )
522
521
 
523
522
  try:
@@ -568,7 +567,6 @@ class LocalStorage(BaseMode):
568
567
  18: to19,
569
568
  19: to20,
570
569
  20: to21,
571
- 21: to22,
572
570
  }
573
571
  for from_version in range(version, _LOCAL_STORAGE_VERSION):
574
572
  migrations[from_version].migrate(self.path)
ert/utils/__init__.py CHANGED
@@ -1,13 +1,9 @@
1
1
  import logging
2
2
  import time
3
3
  from collections.abc import Callable
4
- from datetime import UTC, datetime
5
4
  from functools import wraps
6
- from pathlib import Path
7
5
  from typing import ParamSpec, TypeVar
8
6
 
9
- from _ert.utils import file_safe_timestamp
10
-
11
7
  P = ParamSpec("P")
12
8
  R = TypeVar("R")
13
9
 
@@ -34,19 +30,3 @@ def log_duration(
34
30
  return wrapper
35
31
 
36
32
  return decorator
37
-
38
-
39
- def makedirs_if_needed(path: Path, roll_if_exists: bool = False) -> None:
40
- if path.is_dir():
41
- if not roll_if_exists:
42
- return
43
- _roll_dir(path) # exists and should be rolled
44
- path.mkdir(parents=True, exist_ok=False)
45
-
46
-
47
- def _roll_dir(old_name: Path) -> None:
48
- old_name = old_name.resolve()
49
- timestamp = file_safe_timestamp(datetime.now(UTC).isoformat())
50
- new_name = f"{old_name}__{timestamp}"
51
- old_name.rename(new_name)
52
- logging.getLogger().info(f"renamed {old_name} to {new_name}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ert
3
- Version: 19.0.0rc1
3
+ Version: 19.0.0rc3
4
4
  Summary: Ensemble based Reservoir Tool (ERT)
5
5
  Author-email: Equinor ASA <fg_sib-scout@equinor.com>
6
6
  License-Expression: GPL-3.0-only
@@ -47,7 +47,7 @@ Requires-Dist: orjson
47
47
  Requires-Dist: packaging
48
48
  Requires-Dist: pandas
49
49
  Requires-Dist: pluggy>=1.3.0
50
- Requires-Dist: polars!=1.35,>=1.32.3
50
+ Requires-Dist: polars<1.35,>=1.32.3
51
51
  Requires-Dist: progressbar2
52
52
  Requires-Dist: psutil
53
53
  Requires-Dist: pyarrow