ert 18.0.9__py3-none-any.whl → 19.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. _ert/forward_model_runner/client.py +6 -2
  2. ert/__main__.py +20 -6
  3. ert/cli/main.py +7 -3
  4. ert/config/__init__.py +3 -4
  5. ert/config/_create_observation_dataframes.py +85 -59
  6. ert/config/_get_num_cpu.py +1 -1
  7. ert/config/_observations.py +106 -31
  8. ert/config/distribution.py +1 -1
  9. ert/config/ensemble_config.py +3 -3
  10. ert/config/ert_config.py +50 -0
  11. ert/config/{ext_param_config.py → everest_control.py} +8 -12
  12. ert/config/everest_response.py +3 -5
  13. ert/config/field.py +76 -14
  14. ert/config/forward_model_step.py +12 -9
  15. ert/config/gen_data_config.py +3 -4
  16. ert/config/gen_kw_config.py +2 -12
  17. ert/config/parameter_config.py +1 -16
  18. ert/config/parsing/_option_dict.py +10 -2
  19. ert/config/parsing/config_keywords.py +1 -0
  20. ert/config/parsing/config_schema.py +8 -0
  21. ert/config/parsing/config_schema_deprecations.py +3 -3
  22. ert/config/parsing/config_schema_item.py +12 -3
  23. ert/config/parsing/context_values.py +3 -3
  24. ert/config/parsing/file_context_token.py +1 -1
  25. ert/config/parsing/observations_parser.py +12 -2
  26. ert/config/parsing/queue_system.py +9 -0
  27. ert/config/queue_config.py +0 -1
  28. ert/config/response_config.py +0 -1
  29. ert/config/rft_config.py +78 -33
  30. ert/config/summary_config.py +1 -2
  31. ert/config/surface_config.py +59 -16
  32. ert/dark_storage/common.py +1 -1
  33. ert/dark_storage/compute/misfits.py +4 -1
  34. ert/dark_storage/endpoints/compute/misfits.py +4 -2
  35. ert/dark_storage/endpoints/experiment_server.py +12 -9
  36. ert/dark_storage/endpoints/experiments.py +2 -2
  37. ert/dark_storage/endpoints/observations.py +14 -4
  38. ert/dark_storage/endpoints/parameters.py +2 -18
  39. ert/dark_storage/endpoints/responses.py +10 -5
  40. ert/dark_storage/json_schema/experiment.py +1 -1
  41. ert/data/_measured_data.py +6 -5
  42. ert/ensemble_evaluator/config.py +2 -1
  43. ert/field_utils/field_utils.py +1 -1
  44. ert/field_utils/roff_io.py +1 -1
  45. ert/gui/__init__.py +5 -2
  46. ert/gui/ertnotifier.py +1 -1
  47. ert/gui/ertwidgets/pathchooser.py +0 -3
  48. ert/gui/ertwidgets/suggestor/suggestor.py +63 -30
  49. ert/gui/main.py +27 -5
  50. ert/gui/main_window.py +0 -5
  51. ert/gui/simulation/experiment_panel.py +12 -3
  52. ert/gui/simulation/run_dialog.py +2 -16
  53. ert/gui/tools/manage_experiments/export_dialog.py +136 -0
  54. ert/gui/tools/manage_experiments/storage_info_widget.py +133 -28
  55. ert/gui/tools/plot/plot_api.py +24 -15
  56. ert/gui/tools/plot/plot_widget.py +19 -4
  57. ert/gui/tools/plot/plot_window.py +35 -18
  58. ert/gui/tools/plot/plottery/plots/__init__.py +2 -0
  59. ert/gui/tools/plot/plottery/plots/cesp.py +3 -1
  60. ert/gui/tools/plot/plottery/plots/distribution.py +6 -1
  61. ert/gui/tools/plot/plottery/plots/ensemble.py +3 -1
  62. ert/gui/tools/plot/plottery/plots/gaussian_kde.py +12 -2
  63. ert/gui/tools/plot/plottery/plots/histogram.py +3 -1
  64. ert/gui/tools/plot/plottery/plots/misfits.py +436 -0
  65. ert/gui/tools/plot/plottery/plots/observations.py +18 -4
  66. ert/gui/tools/plot/plottery/plots/statistics.py +3 -1
  67. ert/gui/tools/plot/plottery/plots/std_dev.py +3 -1
  68. ert/plugins/hook_implementations/workflows/csv_export.py +2 -3
  69. ert/plugins/plugin_manager.py +4 -0
  70. ert/resources/forward_models/run_reservoirsimulator.py +8 -3
  71. ert/run_models/_create_run_path.py +3 -3
  72. ert/run_models/everest_run_model.py +13 -11
  73. ert/run_models/initial_ensemble_run_model.py +2 -2
  74. ert/run_models/run_model.py +9 -0
  75. ert/services/_base_service.py +6 -5
  76. ert/services/ert_server.py +4 -4
  77. ert/shared/_doc_utils/__init__.py +4 -2
  78. ert/shared/net_utils.py +43 -18
  79. ert/shared/version.py +3 -3
  80. ert/storage/__init__.py +2 -0
  81. ert/storage/local_ensemble.py +25 -8
  82. ert/storage/local_experiment.py +2 -2
  83. ert/storage/local_storage.py +45 -25
  84. ert/storage/migration/to11.py +1 -1
  85. ert/storage/migration/to18.py +0 -1
  86. ert/storage/migration/to19.py +34 -0
  87. ert/storage/migration/to20.py +23 -0
  88. ert/storage/migration/to21.py +25 -0
  89. ert/storage/migration/to22.py +18 -0
  90. ert/storage/migration/to23.py +49 -0
  91. ert/workflow_runner.py +2 -1
  92. {ert-18.0.9.dist-info → ert-19.0.0.dist-info}/METADATA +1 -1
  93. {ert-18.0.9.dist-info → ert-19.0.0.dist-info}/RECORD +111 -109
  94. {ert-18.0.9.dist-info → ert-19.0.0.dist-info}/WHEEL +1 -1
  95. everest/bin/everlint_script.py +0 -2
  96. everest/bin/utils.py +2 -1
  97. everest/bin/visualization_script.py +4 -11
  98. everest/config/control_config.py +4 -4
  99. everest/config/control_variable_config.py +2 -2
  100. everest/config/everest_config.py +9 -0
  101. everest/config/utils.py +2 -2
  102. everest/config/validation_utils.py +7 -1
  103. everest/config_file_loader.py +0 -2
  104. everest/detached/client.py +3 -3
  105. everest/everest_storage.py +0 -2
  106. everest/gui/everest_client.py +2 -2
  107. everest/optimizer/everest2ropt.py +4 -4
  108. everest/optimizer/opt_model_transforms.py +2 -2
  109. ert/config/violations.py +0 -0
  110. ert/gui/tools/export/__init__.py +0 -3
  111. ert/gui/tools/export/export_panel.py +0 -83
  112. ert/gui/tools/export/export_tool.py +0 -69
  113. ert/gui/tools/export/exporter.py +0 -36
  114. {ert-18.0.9.dist-info → ert-19.0.0.dist-info}/entry_points.txt +0 -0
  115. {ert-18.0.9.dist-info → ert-19.0.0.dist-info}/licenses/COPYING +0 -0
  116. {ert-18.0.9.dist-info → ert-19.0.0.dist-info}/top_level.txt +0 -0
@@ -31,8 +31,8 @@ from typing_extensions import TypedDict
31
31
 
32
32
  from ert.config import (
33
33
  EverestConstraintsConfig,
34
+ EverestControl,
34
35
  EverestObjectivesConfig,
35
- ExtParamConfig,
36
36
  GenDataConfig,
37
37
  HookRuntime,
38
38
  KnownQueueOptionsAdapter,
@@ -535,19 +535,19 @@ class EverestRunModel(RunModel, EverestRunModelConfig):
535
535
  )
536
536
 
537
537
  @property
538
- def ext_param_configs(self) -> list[ExtParamConfig]:
539
- ext_params = [
538
+ def _everest_control_configs(self) -> list[EverestControl]:
539
+ controls = [
540
540
  c for c in self.parameter_configuration if c.type == "everest_parameters"
541
541
  ]
542
542
 
543
- # There will and must always be one extparam config for an
543
+ # There will and must always be one EverestControl config for an
544
544
  # Everest optimization.
545
- return cast(list[ExtParamConfig], ext_params)
545
+ return cast(list[EverestControl], controls)
546
546
 
547
547
  @cached_property
548
548
  def _transforms(self) -> EverestOptModelTransforms:
549
549
  return get_optimization_domain_transforms(
550
- self.ext_param_configs,
550
+ self._everest_control_configs,
551
551
  self.objectives_config,
552
552
  self.input_constraints,
553
553
  self.output_constraints_config,
@@ -693,7 +693,9 @@ class EverestRunModel(RunModel, EverestRunModelConfig):
693
693
  )
694
694
 
695
695
  formatted_control_names = [
696
- name for config in self.ext_param_configs for name in config.input_keys
696
+ name
697
+ for config in self._everest_control_configs
698
+ for name in config.input_keys
697
699
  ]
698
700
  self._ever_storage.init(
699
701
  formatted_control_names=formatted_control_names,
@@ -762,7 +764,7 @@ class EverestRunModel(RunModel, EverestRunModelConfig):
762
764
 
763
765
  def _create_optimizer(self) -> tuple[BasicOptimizer, list[float]]:
764
766
  enopt_config, initial_guesses = everest2ropt(
765
- cast(list[ExtParamConfig], self.parameter_configuration),
767
+ cast(list[EverestControl], self.parameter_configuration),
766
768
  self.objectives_config,
767
769
  self.input_constraints,
768
770
  self.output_constraints_config,
@@ -832,13 +834,13 @@ class EverestRunModel(RunModel, EverestRunModelConfig):
832
834
  for sim_id in range(sim_to_control_vector.shape[0]):
833
835
  sim_controls = sim_to_control_vector[sim_id]
834
836
  offset = 0
835
- for ext_param_config in self.ext_param_configs:
836
- n_param_keys = len(ext_param_config.parameter_keys)
837
+ for control_config in self._everest_control_configs:
838
+ n_param_keys = len(control_config.parameter_keys)
837
839
 
838
840
  # Save controls to ensemble
839
841
  ensemble.save_parameters_numpy(
840
842
  sim_controls[offset : (offset + n_param_keys)].reshape(-1, 1),
841
- ext_param_config.name,
843
+ control_config.name,
842
844
  np.array([sim_id]),
843
845
  )
844
846
  offset += n_param_keys
@@ -6,7 +6,7 @@ from polars.datatypes import DataTypeClass
6
6
  from pydantic import BaseModel, Field, field_validator
7
7
 
8
8
  from ert.config import (
9
- ExtParamConfig,
9
+ EverestControl,
10
10
  GenKwConfig,
11
11
  KnownResponseTypes,
12
12
  SurfaceConfig,
@@ -56,7 +56,7 @@ class InitialEnsembleRunModelConfig(RunModelConfig):
56
56
  design_matrix: DictEncodedDataFrame | None
57
57
  parameter_configuration: list[
58
58
  Annotated[
59
- (GenKwConfig | SurfaceConfig | FieldConfig | ExtParamConfig),
59
+ (GenKwConfig | SurfaceConfig | FieldConfig | EverestControl),
60
60
  Field(discriminator="type"),
61
61
  ]
62
62
  ]
@@ -63,6 +63,7 @@ from ert.mode_definitions import MODULE_MODE
63
63
  from ert.runpaths import Runpaths
64
64
  from ert.storage import (
65
65
  Ensemble,
66
+ LocalStorage,
66
67
  Storage,
67
68
  open_storage,
68
69
  )
@@ -208,6 +209,10 @@ class RunModel(RunModelConfig, ABC):
208
209
  def model_post_init(self, ctx: Any) -> None:
209
210
  self._initial_realizations_mask = self.active_realizations.copy()
210
211
  self._completed_realizations_mask = [False] * len(self.active_realizations)
212
+
213
+ if LocalStorage.check_migration_needed(Path(self.storage_path)):
214
+ LocalStorage.perform_migration(Path(self.storage_path))
215
+
211
216
  self._storage = open_storage(self.storage_path, mode="w")
212
217
  self._rng = np.random.default_rng(self.random_seed)
213
218
  self._start_iteration = self.start_iteration
@@ -813,6 +818,10 @@ class RunModel(RunModelConfig, ABC):
813
818
  if self._max_parallelism_violation.amount > 0 and isinstance(
814
819
  self._max_parallelism_violation.message, str
815
820
  ):
821
+ logger.info(
822
+ "Warning displayed to user for NUM_CPU misconfiguration:\n"
823
+ f"{self._max_parallelism_violation.message}"
824
+ )
816
825
  warnings.warn(
817
826
  self._max_parallelism_violation.message,
818
827
  PostSimulationWarning,
@@ -12,6 +12,7 @@ import os
12
12
  import signal
13
13
  import sys
14
14
  import threading
15
+ import types
15
16
  from collections.abc import Callable, Mapping, Sequence
16
17
  from logging import Logger, getLogger
17
18
  from pathlib import Path
@@ -23,7 +24,7 @@ from types import FrameType
23
24
  from typing import TYPE_CHECKING, Any, Generic, Self, TypedDict, TypeVar
24
25
 
25
26
  if TYPE_CHECKING:
26
- from inspect import Traceback
27
+ pass
27
28
 
28
29
  T = TypeVar("T", bound="BaseService")
29
30
 
@@ -87,9 +88,9 @@ class _Context(Generic[T]):
87
88
 
88
89
  def __exit__(
89
90
  self,
90
- exc_type: type[BaseException],
91
- exc_value: BaseException,
92
- traceback: Traceback,
91
+ exc_type: type[BaseException] | None,
92
+ exc_value: BaseException | None,
93
+ traceback: types.TracebackType | None,
93
94
  ) -> bool:
94
95
  self._service.shutdown()
95
96
  return exc_type is None
@@ -277,7 +278,7 @@ class BaseService:
277
278
  )
278
279
 
279
280
  @classmethod
280
- def start_server(cls: type[T], *args: Any, **kwargs: Any) -> _Context[T]:
281
+ def start_server(cls, *args: Any, **kwargs: Any) -> _Context[Self]:
281
282
  if cls._instance is not None:
282
283
  raise RuntimeError("Server already running")
283
284
  cls._instance = obj = cls(*args, **kwargs)
@@ -5,8 +5,8 @@ import logging
5
5
  import os
6
6
  import sys
7
7
  import threading
8
+ import types
8
9
  from collections.abc import Mapping
9
- from inspect import Traceback
10
10
  from pathlib import Path
11
11
  from tempfile import NamedTemporaryFile
12
12
  from time import sleep
@@ -28,9 +28,9 @@ class ErtServerContext:
28
28
 
29
29
  def __exit__(
30
30
  self,
31
- exc_type: type[BaseException],
32
- exc_value: BaseException,
33
- traceback: Traceback,
31
+ exc_type: type[BaseException] | None,
32
+ exc_value: BaseException | None,
33
+ traceback: types.TracebackType | None,
34
34
  ) -> bool:
35
35
  self._service.shutdown()
36
36
  return exc_type is None
@@ -8,12 +8,14 @@ from docutils.statemachine import StringList
8
8
  from sphinx.util.nodes import nested_parse_with_titles
9
9
 
10
10
 
11
- def _parse_raw_rst(rst_string: str, node: nodes.Node, state: Any) -> None:
11
+ def _parse_raw_rst(rst_string: str, node: nodes.Element, state: Any) -> None:
12
12
  string_list = docutils.statemachine.StringList(list(rst_string.split("\n")))
13
13
  _parse_string_list(string_list, node, state)
14
14
 
15
15
 
16
- def _parse_string_list(string_list: StringList, node: nodes.Node, state: Any) -> None:
16
+ def _parse_string_list(
17
+ string_list: StringList, node: nodes.Element, state: Any
18
+ ) -> None:
17
19
  nested_parse_with_titles(state, string_list, node)
18
20
 
19
21
 
ert/shared/net_utils.py CHANGED
@@ -1,8 +1,10 @@
1
+ import ipaddress
1
2
  import logging
2
3
  import random
3
4
  import socket
4
5
  from functools import lru_cache
5
6
 
7
+ import psutil
6
8
  from dns import exception, resolver, reversename
7
9
 
8
10
 
@@ -50,6 +52,7 @@ def get_machine_name() -> str:
50
52
  def find_available_socket(
51
53
  host: str | None = None,
52
54
  port_range: range = range(51820, 51840 + 1),
55
+ prioritize_private_ip_address: bool = False,
53
56
  ) -> socket.socket:
54
57
  """
55
58
  The default and recommended approach here is to return a bound socket to the
@@ -71,7 +74,9 @@ def find_available_socket(
71
74
 
72
75
  See e.g. implementation and comments in EvaluatorServerConfig
73
76
  """
74
- current_host = host if host is not None else get_ip_address()
77
+ current_host = (
78
+ host if host is not None else get_ip_address(prioritize_private_ip_address)
79
+ )
75
80
 
76
81
  if port_range.start == port_range.stop:
77
82
  ports = list(range(port_range.start, port_range.stop + 1))
@@ -135,20 +140,40 @@ def get_family(host: str) -> socket.AddressFamily:
135
140
  return socket.AF_INET6
136
141
 
137
142
 
138
- # See https://stackoverflow.com/a/28950776
139
- def get_ip_address() -> str:
140
- try:
141
- s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
142
- try:
143
- s.settimeout(0)
144
- # try pinging a reserved, internal address in order
145
- # to determine IP representing the default route
146
- s.connect(("10.255.255.255", 1))
147
- address = s.getsockname()[0]
148
- finally:
149
- s.close()
150
- except BaseException:
151
- logger.warning("Cannot determine ip-address. Falling back to localhost.")
152
- address = "127.0.0.1"
153
- logger.debug(f"ip-address: {address}")
154
- return address
143
+ def get_ip_address(prioritize_private: bool = False) -> str:
144
+ """
145
+ Get the first (private or public) IPv4 address of the current machine on the LAN.
146
+ Default behaviour returns the first public IP if found, then private, then loopback.
147
+
148
+ Parameters:
149
+ prioritize_private (bool): If True, private IP addresses are prioritized
150
+
151
+ Returns:
152
+ str: The selected IP address as a string.
153
+ """
154
+ loopback = ""
155
+ public = ""
156
+ private = ""
157
+ interfaces = psutil.net_if_addrs()
158
+ for addresses in interfaces.values():
159
+ for address in addresses:
160
+ if address.family.name == "AF_INET":
161
+ ip = address.address
162
+ if ipaddress.ip_address(ip).is_loopback and not loopback:
163
+ loopback = ip
164
+ elif ipaddress.ip_address(ip).is_private and not private:
165
+ private = ip
166
+ elif not public:
167
+ public = ip
168
+
169
+ # Select first non-empty value, based on prioritization
170
+ if prioritize_private:
171
+ selected_ip = private or public or loopback
172
+ else:
173
+ selected_ip = public or private or loopback
174
+
175
+ if selected_ip:
176
+ return selected_ip
177
+ else:
178
+ logger.warning("Cannot determine ip-address. Falling back to 127.0.0.1")
179
+ return "127.0.0.1"
ert/shared/version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '18.0.9'
32
- __version_tuple__ = version_tuple = (18, 0, 9)
31
+ __version__ = version = '19.0.0'
32
+ __version_tuple__ = version_tuple = (19, 0, 0)
33
33
 
34
- __commit_id__ = commit_id = 'ga202455d4'
34
+ __commit_id__ = commit_id = 'g2518c4485'
ert/storage/__init__.py CHANGED
@@ -39,6 +39,8 @@ class ErtStoragePermissionError(ErtStorageException):
39
39
  def open_storage(
40
40
  path: str | os.PathLike[str], mode: ModeLiteral | Mode = "r"
41
41
  ) -> Storage:
42
+ _ = LocalStorage.check_migration_needed(Path(path))
43
+
42
44
  try:
43
45
  return LocalStorage(Path(path), Mode(mode))
44
46
  except PermissionError as err:
@@ -562,12 +562,18 @@ class LocalEnsemble(BaseMode):
562
562
  df = pl.scan_parquet(group_path)
563
563
  return df
564
564
 
565
- def _load_scalar_keys(
565
+ def load_scalar_keys(
566
566
  self,
567
- keys: list[str],
567
+ keys: list[str] | None = None,
568
568
  realizations: int | npt.NDArray[np.int_] | None = None,
569
569
  transformed: bool = False,
570
570
  ) -> pl.DataFrame:
571
+ if keys is None:
572
+ keys = self.experiment.parameter_keys
573
+ elif set(keys) - set(self.experiment.parameter_keys):
574
+ missing = set(keys) - set(self.experiment.parameter_keys)
575
+ raise KeyError(f"Parameters not registered to the experiment: {missing}")
576
+
571
577
  df_lazy = self._load_parameters_lazy(SCALAR_FILENAME)
572
578
  df_lazy = df_lazy.select(["realization", *keys])
573
579
  if realizations is not None:
@@ -620,7 +626,7 @@ class LocalEnsemble(BaseMode):
620
626
  cardinality = next(cfg.cardinality for cfg in cfgs)
621
627
 
622
628
  if cardinality == ParameterCardinality.multiple_configs_per_ensemble_dataset:
623
- return self._load_scalar_keys(
629
+ return self.load_scalar_keys(
624
630
  [cfg.name for cfg in cfgs], realizations, transformed
625
631
  )
626
632
  return self._load_dataset(
@@ -647,7 +653,7 @@ class LocalEnsemble(BaseMode):
647
653
  ]
648
654
  if keys:
649
655
  return (
650
- self._load_scalar_keys(keys, realizations)
656
+ self.load_scalar_keys(keys, realizations)
651
657
  .drop("realization")
652
658
  .to_numpy()
653
659
  .T.copy()
@@ -673,6 +679,9 @@ class LocalEnsemble(BaseMode):
673
679
  if complete_df is None:
674
680
  complete_df = ds
675
681
  else:
682
+ complete_df = complete_df.drop(
683
+ [c for c in ds.columns if c != "realization"], strict=False
684
+ )
676
685
  complete_df = (
677
686
  complete_df.join(ds, on="realization", how="left")
678
687
  .unique(subset=["realization"], keep="first")
@@ -947,7 +956,7 @@ class LocalEnsemble(BaseMode):
947
956
  data = self.load_parameters(parameter_group)
948
957
  if isinstance(data, pl.DataFrame):
949
958
  return data.drop("realization").std().to_numpy().reshape(-1)
950
- return data.std("realizations")["values"].values
959
+ return data.std("realizations")["values"].to_numpy()
951
960
 
952
961
  def get_parameter_state(
953
962
  self, realization: int
@@ -1048,7 +1057,7 @@ class LocalEnsemble(BaseMode):
1048
1057
  pl.col(col).is_in(observed_values.implode())
1049
1058
  )
1050
1059
 
1051
- pivoted = responses.collect(engine="streaming").pivot(
1060
+ pivoted = responses.collect(engine="streaming").pivot( # noqa: PD010
1052
1061
  on="realization",
1053
1062
  index=["response_key", *response_cls.primary_key],
1054
1063
  values="values",
@@ -1090,6 +1099,11 @@ class LocalEnsemble(BaseMode):
1090
1099
  on=["response_key", *response_cls.primary_key],
1091
1100
  )
1092
1101
 
1102
+ # Do not drop primary keys which
1103
+ # overlap with localization attributes
1104
+ primary_keys_to_drop = set(response_cls.primary_key).difference(
1105
+ {"north", "east", "radius"}
1106
+ )
1093
1107
  joined = (
1094
1108
  joined.with_columns(
1095
1109
  pl.concat_str(
@@ -1099,7 +1113,7 @@ class LocalEnsemble(BaseMode):
1099
1113
  # Avoid potential collisions w/ primary key
1100
1114
  )
1101
1115
  )
1102
- .drop(response_cls.primary_key)
1116
+ .drop(primary_keys_to_drop)
1103
1117
  .rename({"__tmp_index_key__": "index"})
1104
1118
  )
1105
1119
 
@@ -1115,6 +1129,9 @@ class LocalEnsemble(BaseMode):
1115
1129
  "observation_key",
1116
1130
  "observations",
1117
1131
  "std",
1132
+ "east",
1133
+ "north",
1134
+ "radius",
1118
1135
  ]
1119
1136
  )
1120
1137
 
@@ -1224,7 +1241,7 @@ class LocalEnsemble(BaseMode):
1224
1241
  how="horizontal",
1225
1242
  )
1226
1243
 
1227
- responses_wide = responses["realization", "response_key", "values"].pivot(
1244
+ responses_wide = responses["realization", "response_key", "values"].pivot( # noqa: PD010
1228
1245
  on="response_key", values="values"
1229
1246
  )
1230
1247
 
@@ -16,7 +16,7 @@ from pydantic import BaseModel, Field, TypeAdapter
16
16
  from surfio import IrapSurface
17
17
 
18
18
  from ert.config import (
19
- ExtParamConfig,
19
+ EverestControl,
20
20
  GenKwConfig,
21
21
  KnownResponseTypes,
22
22
  ParameterConfig,
@@ -66,7 +66,7 @@ _responses_adapter = TypeAdapter( # type: ignore
66
66
 
67
67
  _parameters_adapter = TypeAdapter( # type: ignore
68
68
  Annotated[
69
- (GenKwConfig | SurfaceConfig | FieldConfig | ExtParamConfig),
69
+ (GenKwConfig | SurfaceConfig | FieldConfig | EverestControl),
70
70
  Field(discriminator="type"),
71
71
  ]
72
72
  )
@@ -5,14 +5,14 @@ import logging
5
5
  import os
6
6
  import re
7
7
  import shutil
8
+ import types
8
9
  from collections.abc import Generator, MutableSequence
9
10
  from datetime import datetime
10
11
  from functools import cached_property
11
12
  from pathlib import Path
12
13
  from tempfile import NamedTemporaryFile
13
14
  from textwrap import dedent
14
- from types import TracebackType
15
- from typing import Any
15
+ from typing import Any, Self
16
16
  from uuid import UUID, uuid4
17
17
 
18
18
  import polars as pl
@@ -20,6 +20,7 @@ import xarray as xr
20
20
  from filelock import FileLock, Timeout
21
21
  from pydantic import BaseModel, Field
22
22
 
23
+ import ert.storage
23
24
  from ert.config import ErtConfig, ParameterConfig, ResponseConfig
24
25
  from ert.shared import __version__
25
26
 
@@ -30,7 +31,7 @@ from .realization_storage_state import RealizationStorageState
30
31
 
31
32
  logger = logging.getLogger(__name__)
32
33
 
33
- _LOCAL_STORAGE_VERSION = 18
34
+ _LOCAL_STORAGE_VERSION = 23
34
35
 
35
36
 
36
37
  class _Migrations(BaseModel):
@@ -64,6 +65,7 @@ class LocalStorage(BaseMode):
64
65
  self,
65
66
  path: str | os.PathLike[str],
66
67
  mode: Mode,
68
+ stage_for_migration: bool = False,
67
69
  ) -> None:
68
70
  """
69
71
  Initializes the LocalStorage instance.
@@ -74,6 +76,8 @@ class LocalStorage(BaseMode):
74
76
  The file system path to the storage.
75
77
  mode : Mode
76
78
  The access mode for the storage (read/write).
79
+ stage_for_migration : bool
80
+ Whether to avoid reloading storage to allow migration
77
81
  """
78
82
 
79
83
  self.path = Path(path).absolute()
@@ -82,9 +86,9 @@ class LocalStorage(BaseMode):
82
86
  if mode.can_write:
83
87
  self._acquire_lock()
84
88
 
85
- self._experiments: dict[UUID, LocalExperiment]
86
- self._ensembles: dict[UUID, LocalEnsemble]
87
- self._index: _Index
89
+ self._experiments: dict[UUID, LocalExperiment] = {}
90
+ self._ensembles: dict[UUID, LocalEnsemble] = {}
91
+ self._index: _Index = _Index()
88
92
 
89
93
  try:
90
94
  self.version = _storage_version(self.path)
@@ -101,19 +105,18 @@ class LocalStorage(BaseMode):
101
105
  raise ValueError(f"No index.json, but found: {errors}") from err
102
106
  self.version = _LOCAL_STORAGE_VERSION
103
107
 
104
- if self.check_migration_needed(Path(self.path)):
105
- if not self.can_write:
106
- raise RuntimeError(
107
- f"Cannot open storage '{self.path}' in read-only mode: "
108
- f"Storage version {self.version} is too old. "
109
- f"Run ert to initiate migration."
110
- )
111
- else:
112
- self._migrate(self.version)
108
+ if self.check_migration_needed(Path(self.path)) and not self.can_write:
109
+ raise RuntimeError(
110
+ f"Cannot open storage '{self.path}' in read-only mode: "
111
+ f"Storage version {self.version} is too old. "
112
+ f"Run ert to initiate migration."
113
+ )
113
114
 
114
- self.refresh()
115
- if mode.can_write:
116
- self._save_index()
115
+ if not stage_for_migration:
116
+ self.reload()
117
+
118
+ if mode.can_write:
119
+ self._save_index()
117
120
 
118
121
  @staticmethod
119
122
  def check_migration_needed(storage_dir: Path) -> bool:
@@ -123,7 +126,7 @@ class LocalStorage(BaseMode):
123
126
  version = _LOCAL_STORAGE_VERSION
124
127
 
125
128
  if version > _LOCAL_STORAGE_VERSION:
126
- raise RuntimeError(
129
+ raise ert.storage.ErtStorageException(
127
130
  f"Cannot open storage '{storage_dir.absolute()}': Storage version "
128
131
  f"{version} is newer than the current version {_LOCAL_STORAGE_VERSION}"
129
132
  f", upgrade ert to continue, or run with a different ENSPATH"
@@ -131,7 +134,14 @@ class LocalStorage(BaseMode):
131
134
 
132
135
  return version < _LOCAL_STORAGE_VERSION
133
136
 
134
- def refresh(self) -> None:
137
+ @staticmethod
138
+ def perform_migration(path: Path) -> None:
139
+ if LocalStorage.check_migration_needed(path):
140
+ with LocalStorage(path, Mode("w"), True) as storage:
141
+ storage._migrate(storage.version)
142
+ storage.reload()
143
+
144
+ def reload(self) -> None:
135
145
  """
136
146
  Reloads the index, experiments, and ensembles from the storage.
137
147
 
@@ -267,14 +277,14 @@ class LocalStorage(BaseMode):
267
277
  def _swap_path(self) -> Path:
268
278
  return self.path / self.SWAP_PATH
269
279
 
270
- def __enter__(self) -> LocalStorage:
280
+ def __enter__(self) -> Self:
271
281
  return self
272
282
 
273
283
  def __exit__(
274
284
  self,
275
- exception: Exception,
276
- exception_type: type[Exception],
277
- traceback: TracebackType,
285
+ exception: type[BaseException] | None,
286
+ exception_type: BaseException | None,
287
+ traceback: types.TracebackType | None,
278
288
  ) -> None:
279
289
  self.close()
280
290
 
@@ -504,6 +514,11 @@ class LocalStorage(BaseMode):
504
514
  to16,
505
515
  to17,
506
516
  to18,
517
+ to19,
518
+ to20,
519
+ to21,
520
+ to22,
521
+ to23,
507
522
  )
508
523
 
509
524
  try:
@@ -533,7 +548,7 @@ class LocalStorage(BaseMode):
533
548
 
534
549
  self._index = self._load_index()
535
550
 
536
- logger.info("storage backed up for version less than 6")
551
+ logger.info("Storage backed up for version less than 5")
537
552
  print(self._legacy_storage_migration_message(bkup_path, "14.6.*"))
538
553
  return None
539
554
  elif version < _LOCAL_STORAGE_VERSION:
@@ -551,6 +566,11 @@ class LocalStorage(BaseMode):
551
566
  15: to16,
552
567
  16: to17,
553
568
  17: to18,
569
+ 18: to19,
570
+ 19: to20,
571
+ 20: to21,
572
+ 21: to22,
573
+ 22: to23,
554
574
  }
555
575
  for from_version in range(version, _LOCAL_STORAGE_VERSION):
556
576
  migrations[from_version].migrate(self.path)
@@ -44,7 +44,7 @@ def migrate(path: Path) -> None:
44
44
  array = ds.isel(realizations=0, drop=True)["values"]
45
45
  realization = int(real_dir.name.split("-")[1])
46
46
 
47
- def parse_value(value: float | int | str) -> float | int | str:
47
+ def parse_value(value: float | str) -> float | int | str:
48
48
  if isinstance(value, float | int):
49
49
  return value
50
50
  try:
@@ -4,7 +4,6 @@ info = (
4
4
  "Added localization attributes to summary observations."
5
5
  "Added RFT observations."
6
6
  "No change to current storage, only additions. "
7
- "Bumping to 17 to indicate appended schema in storage."
8
7
  )
9
8
 
10
9
 
@@ -0,0 +1,34 @@
1
+ import json
2
+ from pathlib import Path
3
+ from typing import Any
4
+
5
+ info = "Add dimensionality attribute to parameters"
6
+
7
+
8
+ def migrate_param(parameters_json: dict[str, Any]) -> dict[str, Any]:
9
+ new_configs = {}
10
+ for param_config in parameters_json.values():
11
+ if param_config["type"] == "surface":
12
+ param_config["dimensionality"] = 2
13
+ elif param_config["type"] == "field":
14
+ param_config["dimensionality"] = 3
15
+ else:
16
+ param_config["dimensionality"] = 1
17
+
18
+ new_configs[param_config["name"]] = param_config
19
+ return new_configs
20
+
21
+
22
+ def migrate_parameters_for_experiment(experiment: Path) -> None:
23
+ with open(experiment / "parameter.json", encoding="utf-8") as fin:
24
+ parameters_json = json.load(fin)
25
+
26
+ new_parameter_configs = migrate_param(parameters_json)
27
+ Path(experiment / "parameter.json").write_text(
28
+ json.dumps(new_parameter_configs, indent=2), encoding="utf-8"
29
+ )
30
+
31
+
32
+ def migrate(path: Path) -> None:
33
+ for experiment in path.glob("experiments/*"):
34
+ migrate_parameters_for_experiment(experiment)