ert 19.0.0__py3-none-any.whl → 19.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ert/__main__.py +94 -63
- ert/analysis/_es_update.py +11 -14
- ert/config/_create_observation_dataframes.py +12 -228
- ert/config/_observations.py +164 -152
- ert/config/_read_summary.py +4 -5
- ert/config/ert_config.py +1 -56
- ert/config/observation_config_migrations.py +793 -0
- ert/config/rft_config.py +1 -1
- ert/dark_storage/compute/misfits.py +0 -42
- ert/dark_storage/endpoints/__init__.py +0 -2
- ert/dark_storage/endpoints/experiments.py +0 -3
- ert/dark_storage/json_schema/experiment.py +0 -1
- ert/field_utils/grdecl_io.py +9 -26
- ert/gui/main_window.py +2 -0
- ert/gui/tools/manage_experiments/export_dialog.py +4 -0
- ert/gui/tools/manage_experiments/storage_info_widget.py +1 -5
- ert/gui/tools/plot/plot_api.py +10 -10
- ert/gui/tools/plot/plot_widget.py +12 -14
- ert/gui/tools/plot/plot_window.py +1 -10
- ert/services/__init__.py +7 -3
- ert/services/_storage_main.py +59 -22
- ert/services/ert_server.py +186 -24
- ert/shared/version.py +3 -3
- ert/storage/local_ensemble.py +3 -107
- ert/storage/local_experiment.py +0 -16
- ert/storage/local_storage.py +1 -3
- ert/utils/__init__.py +20 -0
- {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/METADATA +2 -2
- {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/RECORD +40 -47
- {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/WHEEL +1 -1
- everest/bin/everest_script.py +5 -5
- everest/bin/kill_script.py +2 -2
- everest/bin/monitor_script.py +2 -2
- everest/bin/utils.py +4 -4
- everest/detached/everserver.py +6 -6
- everest/gui/main_window.py +2 -2
- everest/util/__init__.py +1 -19
- ert/dark_storage/compute/__init__.py +0 -0
- ert/dark_storage/endpoints/compute/__init__.py +0 -0
- ert/dark_storage/endpoints/compute/misfits.py +0 -95
- ert/services/_base_service.py +0 -387
- ert/services/webviz_ert_service.py +0 -20
- ert/shared/storage/command.py +0 -38
- ert/shared/storage/extraction.py +0 -42
- ert/storage/migration/to23.py +0 -49
- {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/entry_points.txt +0 -0
- {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/licenses/COPYING +0 -0
- {ert-19.0.0.dist-info → ert-19.0.0rc1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,793 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
from itertools import starmap
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, Self, cast
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
import polars as pl
|
|
12
|
+
from resfo_utilities import history_key
|
|
13
|
+
|
|
14
|
+
from ._create_observation_dataframes import (
|
|
15
|
+
_handle_error_mode,
|
|
16
|
+
_parse_date,
|
|
17
|
+
)
|
|
18
|
+
from ._observations import (
|
|
19
|
+
ErrorModes,
|
|
20
|
+
ObservationError,
|
|
21
|
+
_missing_value_error,
|
|
22
|
+
_unknown_key_error,
|
|
23
|
+
validate_error_mode,
|
|
24
|
+
validate_float,
|
|
25
|
+
validate_int,
|
|
26
|
+
validate_positive_float,
|
|
27
|
+
validate_positive_int,
|
|
28
|
+
)
|
|
29
|
+
from .ert_config import ErtConfig, logger
|
|
30
|
+
from .parsing import (
|
|
31
|
+
HistorySource,
|
|
32
|
+
ObservationConfigError,
|
|
33
|
+
ObservationDict,
|
|
34
|
+
read_file,
|
|
35
|
+
)
|
|
36
|
+
from .parsing.config_errors import ConfigValidationError, ConfigWarning
|
|
37
|
+
from .refcase import Refcase
|
|
38
|
+
|
|
39
|
+
DEFAULT_TIME_DELTA = timedelta(seconds=30)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class Segment(ObservationError):
|
|
44
|
+
name: str
|
|
45
|
+
start: int
|
|
46
|
+
stop: int
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _validate_segment_dict(name_token: str, inp: dict[str, Any]) -> Segment:
|
|
50
|
+
start = None
|
|
51
|
+
stop = None
|
|
52
|
+
error_mode = ErrorModes.RELMIN
|
|
53
|
+
error = 0.1
|
|
54
|
+
error_min = 0.1
|
|
55
|
+
for key, value in inp.items():
|
|
56
|
+
match key:
|
|
57
|
+
case "START":
|
|
58
|
+
start = validate_int(value, key)
|
|
59
|
+
case "STOP":
|
|
60
|
+
stop = validate_int(value, key)
|
|
61
|
+
case "ERROR":
|
|
62
|
+
error = validate_positive_float(value, key)
|
|
63
|
+
case "ERROR_MIN":
|
|
64
|
+
error_min = validate_positive_float(value, key)
|
|
65
|
+
case "ERROR_MODE":
|
|
66
|
+
error_mode = validate_error_mode(value)
|
|
67
|
+
case _:
|
|
68
|
+
raise _unknown_key_error(key, name_token)
|
|
69
|
+
|
|
70
|
+
if start is None:
|
|
71
|
+
raise _missing_value_error(name_token, "START")
|
|
72
|
+
if stop is None:
|
|
73
|
+
raise _missing_value_error(name_token, "STOP")
|
|
74
|
+
return Segment(
|
|
75
|
+
name=name_token,
|
|
76
|
+
start=start,
|
|
77
|
+
stop=stop,
|
|
78
|
+
error_mode=error_mode,
|
|
79
|
+
error=error,
|
|
80
|
+
error_min=error_min,
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@dataclass
|
|
85
|
+
class HistoryObservation(ObservationError):
|
|
86
|
+
name: str
|
|
87
|
+
segments: list[Segment]
|
|
88
|
+
|
|
89
|
+
@property
|
|
90
|
+
def key(self) -> str:
|
|
91
|
+
"""The :term:`summary key` to be fetched from :ref:`refcase`."""
|
|
92
|
+
# For history observations the key is also the name, ie.
|
|
93
|
+
# "HISTORY_OBSERVATION FOPR" means to add the values from
|
|
94
|
+
# the summary vector FOPRH in refcase as observations.
|
|
95
|
+
return self.name
|
|
96
|
+
|
|
97
|
+
@classmethod
|
|
98
|
+
def from_obs_dict(
|
|
99
|
+
cls, directory: str, observation_dict: ObservationDict
|
|
100
|
+
) -> list[Self]:
|
|
101
|
+
error_mode = ErrorModes.RELMIN
|
|
102
|
+
error = 0.1
|
|
103
|
+
error_min = 0.1
|
|
104
|
+
segments = []
|
|
105
|
+
for key, value in observation_dict.items():
|
|
106
|
+
match key:
|
|
107
|
+
case "type" | "name":
|
|
108
|
+
pass
|
|
109
|
+
case "ERROR":
|
|
110
|
+
error = validate_positive_float(value, key)
|
|
111
|
+
case "ERROR_MIN":
|
|
112
|
+
error_min = validate_positive_float(value, key)
|
|
113
|
+
case "ERROR_MODE":
|
|
114
|
+
error_mode = validate_error_mode(value)
|
|
115
|
+
case "segments":
|
|
116
|
+
segments = list(starmap(_validate_segment_dict, value))
|
|
117
|
+
case _:
|
|
118
|
+
raise _unknown_key_error(str(key), observation_dict["name"])
|
|
119
|
+
|
|
120
|
+
instance = cls(
|
|
121
|
+
name=observation_dict["name"],
|
|
122
|
+
error_mode=error_mode,
|
|
123
|
+
error=error,
|
|
124
|
+
error_min=error_min,
|
|
125
|
+
segments=segments,
|
|
126
|
+
)
|
|
127
|
+
return [instance]
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@dataclass(frozen=True)
|
|
131
|
+
class TextEdit:
|
|
132
|
+
"""
|
|
133
|
+
Represents a replacement of a declaration block in a file.
|
|
134
|
+
Line numbers are 1-based and inclusive.
|
|
135
|
+
"""
|
|
136
|
+
|
|
137
|
+
start_line: int
|
|
138
|
+
end_line: int
|
|
139
|
+
replacement: str
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def extract_declaration_block(lines: list[str], start_line: int) -> tuple[int, int]:
|
|
143
|
+
i = start_line - 1
|
|
144
|
+
line = lines[i]
|
|
145
|
+
line_without_comment = line.split("--", 1)[0]
|
|
146
|
+
|
|
147
|
+
# Single-line declaration
|
|
148
|
+
if ";" in line_without_comment and "{" not in line_without_comment:
|
|
149
|
+
return i, i
|
|
150
|
+
|
|
151
|
+
brace_depth = 0
|
|
152
|
+
has_opening_brace = False
|
|
153
|
+
for j in range(i, len(lines)):
|
|
154
|
+
line_to_process = lines[j].split("--", 1)[0]
|
|
155
|
+
|
|
156
|
+
if "{" in line_to_process:
|
|
157
|
+
has_opening_brace = True
|
|
158
|
+
|
|
159
|
+
brace_depth += line_to_process.count("{")
|
|
160
|
+
brace_depth -= line_to_process.count("}")
|
|
161
|
+
|
|
162
|
+
if has_opening_brace and brace_depth == 0:
|
|
163
|
+
return i, j
|
|
164
|
+
|
|
165
|
+
raise ValueError(f"Unterminated declaration at line {start_line}")
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@dataclass(frozen=True)
|
|
169
|
+
class _SummaryFromHistoryChange:
|
|
170
|
+
source_observation: HistoryObservation
|
|
171
|
+
summary_obs_declarations: list[str]
|
|
172
|
+
lines: list[str]
|
|
173
|
+
|
|
174
|
+
def edits(self) -> list[TextEdit]:
|
|
175
|
+
start, end = extract_declaration_block(
|
|
176
|
+
self.lines,
|
|
177
|
+
self.source_observation.name.line, # type: ignore
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
replacement = "\n\n".join(self.summary_obs_declarations) + "\n"
|
|
181
|
+
|
|
182
|
+
return [
|
|
183
|
+
TextEdit(
|
|
184
|
+
start_line=start + 1,
|
|
185
|
+
end_line=end + 1,
|
|
186
|
+
replacement=replacement,
|
|
187
|
+
)
|
|
188
|
+
]
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
@dataclass(frozen=True)
|
|
192
|
+
class _GeneralObservationChange:
|
|
193
|
+
source_observation: LegacyGeneralObservation
|
|
194
|
+
declaration: str
|
|
195
|
+
restart: int
|
|
196
|
+
lines: list[str]
|
|
197
|
+
|
|
198
|
+
def edits(self) -> list[TextEdit]:
|
|
199
|
+
start, end = extract_declaration_block(
|
|
200
|
+
self.lines,
|
|
201
|
+
self.source_observation.name.line, # type: ignore
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
return [
|
|
205
|
+
TextEdit(
|
|
206
|
+
start_line=start + 1,
|
|
207
|
+
end_line=end + 1,
|
|
208
|
+
replacement=self.declaration + "\n",
|
|
209
|
+
)
|
|
210
|
+
]
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
@dataclass(frozen=True)
|
|
214
|
+
class _SummaryObservationChange:
|
|
215
|
+
source_observation: LegacySummaryObservation
|
|
216
|
+
declaration: str
|
|
217
|
+
date: datetime
|
|
218
|
+
lines: list[str]
|
|
219
|
+
|
|
220
|
+
def edits(self) -> list[TextEdit]:
|
|
221
|
+
start, end = extract_declaration_block(
|
|
222
|
+
self.lines,
|
|
223
|
+
self.source_observation.name.line, # type: ignore
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
return [
|
|
227
|
+
TextEdit(
|
|
228
|
+
start_line=start + 1,
|
|
229
|
+
end_line=end + 1,
|
|
230
|
+
replacement=self.declaration + "\n",
|
|
231
|
+
)
|
|
232
|
+
]
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
@dataclass(frozen=True)
|
|
236
|
+
class _TimeMapAndRefcaseRemovalInfo:
|
|
237
|
+
obs_config_path: str
|
|
238
|
+
refcase_path: str | None
|
|
239
|
+
time_map_path: str | None
|
|
240
|
+
history_changes: list[_SummaryFromHistoryChange]
|
|
241
|
+
general_obs_changes: list[_GeneralObservationChange]
|
|
242
|
+
summary_obs_changes: list[_SummaryObservationChange]
|
|
243
|
+
|
|
244
|
+
def is_empty(self) -> bool:
|
|
245
|
+
return (
|
|
246
|
+
len(self.history_changes)
|
|
247
|
+
+ len(self.general_obs_changes)
|
|
248
|
+
+ len(self.summary_obs_changes)
|
|
249
|
+
) == 0
|
|
250
|
+
|
|
251
|
+
def collect_edits(self) -> list[TextEdit]:
|
|
252
|
+
edits: list[TextEdit] = []
|
|
253
|
+
|
|
254
|
+
for history_change in self.history_changes:
|
|
255
|
+
edits.extend(history_change.edits())
|
|
256
|
+
|
|
257
|
+
for gen_change in self.general_obs_changes:
|
|
258
|
+
edits.extend(gen_change.edits())
|
|
259
|
+
|
|
260
|
+
for summary_change in self.summary_obs_changes:
|
|
261
|
+
edits.extend(summary_change.edits())
|
|
262
|
+
|
|
263
|
+
return edits
|
|
264
|
+
|
|
265
|
+
def apply_to_file(self, path: Path) -> None:
|
|
266
|
+
edits = self.collect_edits()
|
|
267
|
+
|
|
268
|
+
# Sort edits bottom-up, so that line numbers remain valid for subsequent edits
|
|
269
|
+
edits.sort(key=lambda e: e.start_line, reverse=True)
|
|
270
|
+
|
|
271
|
+
lines = path.read_text(encoding="utf-8").splitlines()
|
|
272
|
+
|
|
273
|
+
for edit in edits:
|
|
274
|
+
# Line numbers are 1-based, inclusive. Convert to 0-based slice.
|
|
275
|
+
start_index = edit.start_line - 1
|
|
276
|
+
end_index = edit.end_line
|
|
277
|
+
|
|
278
|
+
replacement_lines = edit.replacement.splitlines()
|
|
279
|
+
lines[start_index:end_index] = replacement_lines
|
|
280
|
+
|
|
281
|
+
path.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
@dataclass
|
|
285
|
+
class LegacyObservationDate:
|
|
286
|
+
days: float | None = None
|
|
287
|
+
hours: float | None = None
|
|
288
|
+
date: str | None = None
|
|
289
|
+
restart: int | None = None
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
@dataclass
|
|
293
|
+
class _LegacySummaryValues:
|
|
294
|
+
name: str
|
|
295
|
+
value: float
|
|
296
|
+
key: str #: The :term:`summary key` in the summary response
|
|
297
|
+
location_x: float | None = None
|
|
298
|
+
location_y: float | None = None
|
|
299
|
+
location_range: float | None = None
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
@dataclass
|
|
303
|
+
class LegacySummaryObservation(
|
|
304
|
+
LegacyObservationDate, _LegacySummaryValues, ObservationError
|
|
305
|
+
):
|
|
306
|
+
@classmethod
|
|
307
|
+
def from_obs_dict(
|
|
308
|
+
cls, directory: str, observation_dict: ObservationDict
|
|
309
|
+
) -> list[Self]:
|
|
310
|
+
error_mode = ErrorModes.ABS
|
|
311
|
+
summary_key = None
|
|
312
|
+
|
|
313
|
+
date_dict: LegacyObservationDate = LegacyObservationDate()
|
|
314
|
+
float_values: dict[str, float] = {"ERROR_MIN": 0.1}
|
|
315
|
+
localization_values: dict[str, float] = {}
|
|
316
|
+
for key, value in observation_dict.items():
|
|
317
|
+
match key:
|
|
318
|
+
case "type" | "name":
|
|
319
|
+
pass
|
|
320
|
+
case "RESTART":
|
|
321
|
+
date_dict.restart = validate_positive_int(value, key)
|
|
322
|
+
case "ERROR" | "ERROR_MIN":
|
|
323
|
+
float_values[str(key)] = validate_positive_float(value, key)
|
|
324
|
+
case "DAYS" | "HOURS":
|
|
325
|
+
setattr(
|
|
326
|
+
date_dict, str(key).lower(), validate_positive_float(value, key)
|
|
327
|
+
)
|
|
328
|
+
case "VALUE":
|
|
329
|
+
float_values[str(key)] = validate_float(value, key)
|
|
330
|
+
case "ERROR_MODE":
|
|
331
|
+
error_mode = validate_error_mode(value)
|
|
332
|
+
case "KEY":
|
|
333
|
+
summary_key = value
|
|
334
|
+
case "DATE":
|
|
335
|
+
date_dict.date = value
|
|
336
|
+
case "LOCATION_X":
|
|
337
|
+
localization_values["x"] = validate_float(value, key)
|
|
338
|
+
case "LOCATION_Y":
|
|
339
|
+
localization_values["y"] = validate_float(value, key)
|
|
340
|
+
case "LOCATION_RANGE":
|
|
341
|
+
localization_values["range"] = validate_float(value, key)
|
|
342
|
+
case _:
|
|
343
|
+
raise _unknown_key_error(str(key), observation_dict["name"])
|
|
344
|
+
if "VALUE" not in float_values:
|
|
345
|
+
raise _missing_value_error(observation_dict["name"], "VALUE")
|
|
346
|
+
if summary_key is None:
|
|
347
|
+
raise _missing_value_error(observation_dict["name"], "KEY")
|
|
348
|
+
if "ERROR" not in float_values:
|
|
349
|
+
raise _missing_value_error(observation_dict["name"], "ERROR")
|
|
350
|
+
|
|
351
|
+
return [
|
|
352
|
+
cls(
|
|
353
|
+
name=observation_dict["name"],
|
|
354
|
+
error_mode=error_mode,
|
|
355
|
+
error=float_values["ERROR"],
|
|
356
|
+
error_min=float_values["ERROR_MIN"],
|
|
357
|
+
key=summary_key,
|
|
358
|
+
value=float_values["VALUE"],
|
|
359
|
+
location_x=localization_values.get("x"),
|
|
360
|
+
location_y=localization_values.get("y"),
|
|
361
|
+
location_range=localization_values.get("range"),
|
|
362
|
+
**date_dict.__dict__,
|
|
363
|
+
)
|
|
364
|
+
]
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
@dataclass
|
|
368
|
+
class _LegacyGeneralObservation:
|
|
369
|
+
name: str
|
|
370
|
+
data: str
|
|
371
|
+
value: float | None = None
|
|
372
|
+
error: float | None = None
|
|
373
|
+
index_list: str | None = None
|
|
374
|
+
index_file: str | None = None
|
|
375
|
+
obs_file: str | None = None
|
|
376
|
+
days: float | None = None
|
|
377
|
+
hours: float | None = None
|
|
378
|
+
date: str | None = None
|
|
379
|
+
restart: int | None = None
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
@dataclass
|
|
383
|
+
class LegacyGeneralObservation(_LegacyGeneralObservation):
|
|
384
|
+
@classmethod
|
|
385
|
+
def from_obs_dict(
|
|
386
|
+
cls, directory: str, observation_dict: ObservationDict
|
|
387
|
+
) -> list[Self]:
|
|
388
|
+
try:
|
|
389
|
+
data = observation_dict["DATA"]
|
|
390
|
+
except KeyError as err:
|
|
391
|
+
raise _missing_value_error(observation_dict["name"], "DATA") from err
|
|
392
|
+
|
|
393
|
+
output = cls(name=observation_dict["name"], data=data)
|
|
394
|
+
for key, value in observation_dict.items():
|
|
395
|
+
match key:
|
|
396
|
+
case "type" | "name":
|
|
397
|
+
pass
|
|
398
|
+
case "RESTART":
|
|
399
|
+
output.restart = validate_positive_int(value, key)
|
|
400
|
+
case "VALUE":
|
|
401
|
+
output.value = validate_float(value, key)
|
|
402
|
+
case "ERROR" | "DAYS" | "HOURS":
|
|
403
|
+
setattr(
|
|
404
|
+
output, str(key).lower(), validate_positive_float(value, key)
|
|
405
|
+
)
|
|
406
|
+
case "DATE" | "INDEX_LIST":
|
|
407
|
+
setattr(output, str(key).lower(), value)
|
|
408
|
+
case "OBS_FILE" | "INDEX_FILE":
|
|
409
|
+
assert not isinstance(key, tuple)
|
|
410
|
+
filename = value
|
|
411
|
+
if not os.path.isabs(filename):
|
|
412
|
+
filename = os.path.join(directory, filename)
|
|
413
|
+
if not os.path.exists(filename):
|
|
414
|
+
raise ObservationConfigError.with_context(
|
|
415
|
+
"The following keywords did not"
|
|
416
|
+
f" resolve to a valid path:\n {key}",
|
|
417
|
+
value,
|
|
418
|
+
)
|
|
419
|
+
setattr(output, str(key).lower(), filename)
|
|
420
|
+
case "DATA":
|
|
421
|
+
output.data = value
|
|
422
|
+
case _:
|
|
423
|
+
raise _unknown_key_error(str(key), observation_dict["name"])
|
|
424
|
+
if output.value is not None and output.error is None:
|
|
425
|
+
raise ObservationConfigError.with_context(
|
|
426
|
+
f"For GENERAL_OBSERVATION {observation_dict['name']}, with"
|
|
427
|
+
f" VALUE = {output.value}, ERROR must also be given.",
|
|
428
|
+
observation_dict["name"],
|
|
429
|
+
)
|
|
430
|
+
return [output]
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
def remove_refcase_and_time_map_dependence_from_obs_config(
|
|
434
|
+
config_path: str,
|
|
435
|
+
) -> _TimeMapAndRefcaseRemovalInfo | None:
|
|
436
|
+
"""
|
|
437
|
+
Parses an ERT configuration to find observation declarations that depend on
|
|
438
|
+
REFCASE or TIME_MAP and generates a set of proposed changes to remove these
|
|
439
|
+
dependencies.
|
|
440
|
+
|
|
441
|
+
The function reads the main ERT config and identifies the observation
|
|
442
|
+
configuration file. It then processes three types of observation declarations:
|
|
443
|
+
|
|
444
|
+
1. HISTORY_OBSERVATION: These are converted into one or more
|
|
445
|
+
SUMMARY_OBSERVATION declarations, with dates and values extracted
|
|
446
|
+
from the REFCASE.
|
|
447
|
+
2. GENERAL_OBSERVATION: Declarations using the DATE keyword are updated
|
|
448
|
+
to use the corresponding RESTART (report step) from the REFCASE or TIME_MAP.
|
|
449
|
+
3. SUMMARY_OBSERVATION: Declarations using the RESTART (report step) keyword
|
|
450
|
+
are updated to use the corresponding DATE from the REFCASE or TIME_MAP.
|
|
451
|
+
|
|
452
|
+
All proposed modifications are collected and returned in a
|
|
453
|
+
_TimeMapAndRefcaseRemovalInfo object, which can be used to apply the
|
|
454
|
+
changes to the observation file. This function does not modify any files itself.
|
|
455
|
+
|
|
456
|
+
"""
|
|
457
|
+
user_config_contents = read_file(config_path)
|
|
458
|
+
config_dict = ErtConfig._config_dict_from_contents(
|
|
459
|
+
user_config_contents,
|
|
460
|
+
config_path,
|
|
461
|
+
)
|
|
462
|
+
|
|
463
|
+
refcase = (
|
|
464
|
+
Refcase.from_config_dict(config_dict) if "REFCASE" in config_dict else None
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
obs_config_file, obs_config_entries = config_dict.get("OBS_CONFIG", (None, None))
|
|
468
|
+
if obs_config_file is None:
|
|
469
|
+
return None
|
|
470
|
+
|
|
471
|
+
time_map = None
|
|
472
|
+
time_map_path = None
|
|
473
|
+
if time_map_args := config_dict.get("TIME_MAP"):
|
|
474
|
+
time_map_file, time_map_contents = time_map_args
|
|
475
|
+
try:
|
|
476
|
+
time_map = _read_time_map(time_map_contents)
|
|
477
|
+
except ValueError as err:
|
|
478
|
+
raise ConfigValidationError.with_context(
|
|
479
|
+
f"Could not read timemap file {time_map_file}: {err}",
|
|
480
|
+
time_map_file,
|
|
481
|
+
) from err
|
|
482
|
+
|
|
483
|
+
obs_config_lines = read_file(str(obs_config_file)).splitlines()
|
|
484
|
+
config_dir = Path(obs_config_file).parent
|
|
485
|
+
|
|
486
|
+
history_source = config_dict.get("HISTORY_SOURCE", HistorySource.REFCASE_HISTORY)
|
|
487
|
+
obs_time_list: list[datetime] = []
|
|
488
|
+
if refcase is not None:
|
|
489
|
+
obs_time_list = refcase.all_dates
|
|
490
|
+
elif time_map is not None:
|
|
491
|
+
obs_time_list = time_map
|
|
492
|
+
|
|
493
|
+
# Create observation objects from the configuration
|
|
494
|
+
history_observations: list[HistoryObservation] = [
|
|
495
|
+
obs
|
|
496
|
+
for obs_dict in obs_config_entries
|
|
497
|
+
if obs_dict.get("type") == "HISTORY_OBSERVATION"
|
|
498
|
+
for obs in HistoryObservation.from_obs_dict("", obs_dict)
|
|
499
|
+
]
|
|
500
|
+
|
|
501
|
+
genobs_deprecated_keys = {"DATE", "DAYS", "HOURS"}
|
|
502
|
+
general_observations: list[LegacyGeneralObservation] = [
|
|
503
|
+
obs
|
|
504
|
+
for obs_dict in obs_config_entries
|
|
505
|
+
if obs_dict.get("type") == "GENERAL_OBSERVATION"
|
|
506
|
+
and (len(genobs_deprecated_keys.intersection(set(obs_dict))) > 0)
|
|
507
|
+
for obs in LegacyGeneralObservation.from_obs_dict(str(config_dir), obs_dict)
|
|
508
|
+
]
|
|
509
|
+
|
|
510
|
+
summary_deprecated_keys = {"RESTART", "DAYS", "HOURS"}
|
|
511
|
+
summary_observations: list[LegacySummaryObservation] = [
|
|
512
|
+
obs
|
|
513
|
+
for obs_dict in obs_config_entries
|
|
514
|
+
if obs_dict.get("type") == "SUMMARY_OBSERVATION"
|
|
515
|
+
and (len(summary_deprecated_keys.intersection(set(obs_dict))) > 0)
|
|
516
|
+
for obs in LegacySummaryObservation.from_obs_dict(str(config_dir), obs_dict)
|
|
517
|
+
]
|
|
518
|
+
# Process history observations, which generate summary observation declarations
|
|
519
|
+
history_changes = []
|
|
520
|
+
for history_obs in history_observations:
|
|
521
|
+
history_obs_df = _handle_history_observation(
|
|
522
|
+
refcase, history_obs, history_obs.name, history_source, len(obs_time_list)
|
|
523
|
+
)
|
|
524
|
+
declarations = []
|
|
525
|
+
for obs_row in history_obs_df.to_dicts():
|
|
526
|
+
declaration = (
|
|
527
|
+
f"SUMMARY_OBSERVATION "
|
|
528
|
+
f"{obs_row['observation_key']} {{\n"
|
|
529
|
+
f" VALUE = {obs_row['observations']};\n"
|
|
530
|
+
f" ERROR = {obs_row['std']};\n"
|
|
531
|
+
f" DATE = {obs_row['time'].strftime('%Y-%m-%d')};\n"
|
|
532
|
+
f" KEY = {obs_row['observation_key']};\n"
|
|
533
|
+
"};"
|
|
534
|
+
)
|
|
535
|
+
declarations.append(declaration)
|
|
536
|
+
|
|
537
|
+
history_changes.append(
|
|
538
|
+
_SummaryFromHistoryChange(
|
|
539
|
+
source_observation=history_obs,
|
|
540
|
+
summary_obs_declarations=declarations,
|
|
541
|
+
lines=obs_config_lines,
|
|
542
|
+
)
|
|
543
|
+
)
|
|
544
|
+
|
|
545
|
+
# Process general observations
|
|
546
|
+
general_obs_changes = []
|
|
547
|
+
for gen_obs in general_observations:
|
|
548
|
+
restart = _get_restart(
|
|
549
|
+
cast(LegacyObservationDate, gen_obs),
|
|
550
|
+
gen_obs.name,
|
|
551
|
+
obs_time_list,
|
|
552
|
+
refcase is not None,
|
|
553
|
+
)
|
|
554
|
+
|
|
555
|
+
index_list_or_file_declaration = ""
|
|
556
|
+
if gen_obs.index_list is not None:
|
|
557
|
+
index_list_or_file_declaration = f" INDEX_LIST = {gen_obs.index_list};\n"
|
|
558
|
+
elif gen_obs.index_file is not None:
|
|
559
|
+
index_list_or_file_declaration = f" INDEX_FILE = {gen_obs.index_file};\n"
|
|
560
|
+
|
|
561
|
+
obs_file_or_value_declaration = ""
|
|
562
|
+
if gen_obs.value is not None:
|
|
563
|
+
obs_file_or_value_declaration = f" VALUE = {gen_obs.value};\n"
|
|
564
|
+
obs_file_or_value_declaration += f" ERROR = {gen_obs.error};\n"
|
|
565
|
+
if gen_obs.obs_file is not None:
|
|
566
|
+
obs_file_or_value_declaration = (
|
|
567
|
+
f" OBS_FILE = {Path(gen_obs.obs_file).relative_to(config_dir)};\n"
|
|
568
|
+
)
|
|
569
|
+
|
|
570
|
+
declaration = (
|
|
571
|
+
f"GENERAL_OBSERVATION {gen_obs.name} {{\n"
|
|
572
|
+
f" DATA = {gen_obs.data};\n"
|
|
573
|
+
f"{index_list_or_file_declaration}"
|
|
574
|
+
f" RESTART = {restart};\n"
|
|
575
|
+
f"{obs_file_or_value_declaration}"
|
|
576
|
+
"};"
|
|
577
|
+
)
|
|
578
|
+
general_obs_changes.append(
|
|
579
|
+
_GeneralObservationChange(
|
|
580
|
+
source_observation=gen_obs,
|
|
581
|
+
declaration=declaration,
|
|
582
|
+
restart=restart,
|
|
583
|
+
lines=obs_config_lines,
|
|
584
|
+
)
|
|
585
|
+
)
|
|
586
|
+
|
|
587
|
+
# Process summary observations
|
|
588
|
+
summary_obs_changes = []
|
|
589
|
+
for smry_obs in summary_observations:
|
|
590
|
+
restart = _get_restart(
|
|
591
|
+
smry_obs, smry_obs.name, obs_time_list, refcase is not None
|
|
592
|
+
)
|
|
593
|
+
date = obs_time_list[restart]
|
|
594
|
+
|
|
595
|
+
declaration = (
|
|
596
|
+
f"SUMMARY_OBSERVATION {smry_obs.name} {{\n"
|
|
597
|
+
f" VALUE = {smry_obs.value};\n"
|
|
598
|
+
f" ERROR = {smry_obs.error};\n"
|
|
599
|
+
f" DATE = {date.strftime('%Y-%m-%d')};\n"
|
|
600
|
+
f" KEY = {smry_obs.key};\n"
|
|
601
|
+
+ (
|
|
602
|
+
f" LOCATION_X={smry_obs.location_x};\n"
|
|
603
|
+
if smry_obs.location_x is not None
|
|
604
|
+
else ""
|
|
605
|
+
)
|
|
606
|
+
+ (
|
|
607
|
+
f" LOCATION_Y={smry_obs.location_y};\n"
|
|
608
|
+
if smry_obs.location_y is not None
|
|
609
|
+
else ""
|
|
610
|
+
)
|
|
611
|
+
+ (
|
|
612
|
+
f" LOCATION_RANGE={smry_obs.location_range};\n"
|
|
613
|
+
if smry_obs.location_range is not None
|
|
614
|
+
else ""
|
|
615
|
+
)
|
|
616
|
+
+ "};"
|
|
617
|
+
)
|
|
618
|
+
summary_obs_changes.append(
|
|
619
|
+
_SummaryObservationChange(
|
|
620
|
+
source_observation=smry_obs,
|
|
621
|
+
declaration=declaration,
|
|
622
|
+
date=date,
|
|
623
|
+
lines=obs_config_lines,
|
|
624
|
+
)
|
|
625
|
+
)
|
|
626
|
+
|
|
627
|
+
return _TimeMapAndRefcaseRemovalInfo(
|
|
628
|
+
obs_config_path=str(obs_config_file),
|
|
629
|
+
refcase_path=config_dict.get("REFCASE", None),
|
|
630
|
+
time_map_path=time_map_path,
|
|
631
|
+
history_changes=history_changes,
|
|
632
|
+
general_obs_changes=general_obs_changes,
|
|
633
|
+
summary_obs_changes=summary_obs_changes,
|
|
634
|
+
)
|
|
635
|
+
|
|
636
|
+
|
|
637
|
+
def _find_nearest(
|
|
638
|
+
time_map: list[datetime],
|
|
639
|
+
time: datetime,
|
|
640
|
+
threshold: timedelta = DEFAULT_TIME_DELTA,
|
|
641
|
+
) -> int:
|
|
642
|
+
nearest_index = -1
|
|
643
|
+
nearest_diff = None
|
|
644
|
+
for i, t in enumerate(time_map):
|
|
645
|
+
diff = abs(time - t)
|
|
646
|
+
if diff < threshold and (nearest_diff is None or nearest_diff > diff):
|
|
647
|
+
nearest_diff = diff
|
|
648
|
+
nearest_index = i
|
|
649
|
+
if nearest_diff is None:
|
|
650
|
+
raise IndexError(f"{time} is not in the time map")
|
|
651
|
+
return nearest_index
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
def _get_time(
|
|
655
|
+
date_dict: LegacyObservationDate, start_time: datetime, context: Any = None
|
|
656
|
+
) -> tuple[datetime, str]:
|
|
657
|
+
if date_dict.date is not None:
|
|
658
|
+
return _parse_date(date_dict.date), f"DATE={date_dict.date}"
|
|
659
|
+
if date_dict.days is not None:
|
|
660
|
+
days = date_dict.days
|
|
661
|
+
return start_time + timedelta(days=days), f"DAYS={days}"
|
|
662
|
+
if date_dict.hours is not None:
|
|
663
|
+
hours = date_dict.hours
|
|
664
|
+
return start_time + timedelta(hours=hours), f"HOURS={hours}"
|
|
665
|
+
raise ObservationConfigError.with_context("Missing time specifier", context=context)
|
|
666
|
+
|
|
667
|
+
|
|
668
|
+
def _get_restart(
|
|
669
|
+
date_dict: LegacyObservationDate,
|
|
670
|
+
obs_name: str,
|
|
671
|
+
time_map: list[datetime],
|
|
672
|
+
has_refcase: bool,
|
|
673
|
+
) -> int:
|
|
674
|
+
if date_dict.restart is not None:
|
|
675
|
+
return date_dict.restart
|
|
676
|
+
if not time_map:
|
|
677
|
+
raise ObservationConfigError.with_context(
|
|
678
|
+
f"Missing REFCASE or TIME_MAP for observations: {obs_name}",
|
|
679
|
+
obs_name,
|
|
680
|
+
)
|
|
681
|
+
|
|
682
|
+
time, date_str = _get_time(date_dict, time_map[0], context=obs_name)
|
|
683
|
+
|
|
684
|
+
try:
|
|
685
|
+
return _find_nearest(time_map, time)
|
|
686
|
+
except IndexError as err:
|
|
687
|
+
raise ObservationConfigError.with_context(
|
|
688
|
+
f"Could not find {time} ({date_str}) in "
|
|
689
|
+
f"the time map for observations {obs_name}. "
|
|
690
|
+
+ (
|
|
691
|
+
"The time map is set from the REFCASE keyword. Either "
|
|
692
|
+
"the REFCASE has an incorrect/missing date, or the observation "
|
|
693
|
+
"is given an incorrect date.)"
|
|
694
|
+
if has_refcase
|
|
695
|
+
else "(The time map is set from the TIME_MAP "
|
|
696
|
+
"keyword. Either the time map file has an "
|
|
697
|
+
"incorrect/missing date, or the observation is given an "
|
|
698
|
+
"incorrect date."
|
|
699
|
+
),
|
|
700
|
+
obs_name,
|
|
701
|
+
) from err
|
|
702
|
+
|
|
703
|
+
|
|
704
|
+
def _handle_history_observation(
|
|
705
|
+
refcase: Refcase | None,
|
|
706
|
+
history_observation: HistoryObservation,
|
|
707
|
+
summary_key: str,
|
|
708
|
+
history_type: HistorySource,
|
|
709
|
+
time_len: int,
|
|
710
|
+
) -> pl.DataFrame:
|
|
711
|
+
if refcase is None:
|
|
712
|
+
raise ObservationConfigError.with_context(
|
|
713
|
+
"REFCASE is required for HISTORY_OBSERVATION", summary_key
|
|
714
|
+
)
|
|
715
|
+
|
|
716
|
+
if history_type == HistorySource.REFCASE_HISTORY:
|
|
717
|
+
local_key = history_key(summary_key)
|
|
718
|
+
else:
|
|
719
|
+
local_key = summary_key
|
|
720
|
+
if local_key not in refcase.keys:
|
|
721
|
+
raise ObservationConfigError.with_context(
|
|
722
|
+
f"Key {local_key!r} is not present in refcase", summary_key
|
|
723
|
+
)
|
|
724
|
+
values = refcase.values[refcase.keys.index(local_key)]
|
|
725
|
+
std_dev = _handle_error_mode(values, history_observation)
|
|
726
|
+
for segment in history_observation.segments:
|
|
727
|
+
start = segment.start
|
|
728
|
+
stop = segment.stop
|
|
729
|
+
if start < 0:
|
|
730
|
+
ConfigWarning.warn(
|
|
731
|
+
f"Segment {segment.name} out of bounds."
|
|
732
|
+
" Truncating start of segment to 0.",
|
|
733
|
+
segment.name,
|
|
734
|
+
)
|
|
735
|
+
start = 0
|
|
736
|
+
if stop >= time_len:
|
|
737
|
+
ConfigWarning.warn(
|
|
738
|
+
f"Segment {segment.name} out of bounds. Truncating"
|
|
739
|
+
f" end of segment to {time_len - 1}.",
|
|
740
|
+
segment.name,
|
|
741
|
+
)
|
|
742
|
+
stop = time_len - 1
|
|
743
|
+
if start > stop:
|
|
744
|
+
ConfigWarning.warn(
|
|
745
|
+
f"Segment {segment.name} start after stop. Truncating"
|
|
746
|
+
f" end of segment to {start}.",
|
|
747
|
+
segment.name,
|
|
748
|
+
)
|
|
749
|
+
stop = start
|
|
750
|
+
if np.size(std_dev[start:stop]) == 0:
|
|
751
|
+
ConfigWarning.warn(
|
|
752
|
+
f"Segment {segment.name} does not"
|
|
753
|
+
" contain any time steps. The interval "
|
|
754
|
+
f"[{start}, {stop}) does not intersect with steps in the"
|
|
755
|
+
"time map.",
|
|
756
|
+
segment.name,
|
|
757
|
+
)
|
|
758
|
+
std_dev[start:stop] = _handle_error_mode(values[start:stop], segment)
|
|
759
|
+
dates_series = pl.Series(refcase.dates).dt.cast_time_unit("ms")
|
|
760
|
+
if (std_dev <= 0).any():
|
|
761
|
+
raise ObservationConfigError.with_context(
|
|
762
|
+
"Observation uncertainty must be strictly > 0", summary_key
|
|
763
|
+
) from None
|
|
764
|
+
|
|
765
|
+
return pl.DataFrame(
|
|
766
|
+
{
|
|
767
|
+
"response_key": summary_key,
|
|
768
|
+
"observation_key": summary_key,
|
|
769
|
+
"time": dates_series,
|
|
770
|
+
"observations": pl.Series(values, dtype=pl.Float32),
|
|
771
|
+
"std": pl.Series(std_dev, dtype=pl.Float32),
|
|
772
|
+
"location_x": pl.Series([None] * len(values), dtype=pl.Float32),
|
|
773
|
+
"location_y": pl.Series([None] * len(values), dtype=pl.Float32),
|
|
774
|
+
"location_range": pl.Series([None] * len(values), dtype=pl.Float32),
|
|
775
|
+
}
|
|
776
|
+
)
|
|
777
|
+
|
|
778
|
+
|
|
779
|
+
def _read_time_map(file_contents: str) -> list[datetime]:
|
|
780
|
+
def str_to_datetime(date_str: str) -> datetime:
|
|
781
|
+
try:
|
|
782
|
+
return datetime.fromisoformat(date_str)
|
|
783
|
+
except ValueError:
|
|
784
|
+
logger.warning(
|
|
785
|
+
"DD/MM/YYYY date format is deprecated"
|
|
786
|
+
", please use ISO date format YYYY-MM-DD."
|
|
787
|
+
)
|
|
788
|
+
return datetime.strptime(date_str, "%d/%m/%Y")
|
|
789
|
+
|
|
790
|
+
dates = []
|
|
791
|
+
for line in file_contents.splitlines():
|
|
792
|
+
dates.append(str_to_datetime(line.strip()))
|
|
793
|
+
return dates
|