ert 19.0.0rc4__py3-none-any.whl → 20.0.0b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. ert/__main__.py +94 -63
  2. ert/analysis/_es_update.py +11 -14
  3. ert/config/__init__.py +3 -2
  4. ert/config/_create_observation_dataframes.py +51 -375
  5. ert/config/_observations.py +483 -200
  6. ert/config/_read_summary.py +4 -5
  7. ert/config/ert_config.py +53 -80
  8. ert/config/everest_control.py +40 -39
  9. ert/config/everest_response.py +1 -13
  10. ert/config/field.py +0 -72
  11. ert/config/forward_model_step.py +17 -1
  12. ert/config/gen_data_config.py +14 -17
  13. ert/config/observation_config_migrations.py +821 -0
  14. ert/config/parameter_config.py +18 -28
  15. ert/config/parsing/__init__.py +0 -1
  16. ert/config/parsing/_parse_zonemap.py +45 -0
  17. ert/config/parsing/config_keywords.py +1 -1
  18. ert/config/parsing/config_schema.py +2 -8
  19. ert/config/parsing/observations_parser.py +2 -0
  20. ert/config/response_config.py +5 -23
  21. ert/config/rft_config.py +44 -19
  22. ert/config/summary_config.py +1 -13
  23. ert/config/surface_config.py +0 -57
  24. ert/dark_storage/compute/misfits.py +0 -42
  25. ert/dark_storage/endpoints/__init__.py +0 -2
  26. ert/dark_storage/endpoints/experiments.py +2 -5
  27. ert/dark_storage/json_schema/experiment.py +1 -2
  28. ert/field_utils/__init__.py +0 -2
  29. ert/field_utils/field_utils.py +1 -117
  30. ert/gui/ertwidgets/listeditbox.py +9 -1
  31. ert/gui/ertwidgets/models/ertsummary.py +20 -6
  32. ert/gui/ertwidgets/pathchooser.py +9 -1
  33. ert/gui/ertwidgets/stringbox.py +11 -3
  34. ert/gui/ertwidgets/textbox.py +10 -3
  35. ert/gui/ertwidgets/validationsupport.py +19 -1
  36. ert/gui/main_window.py +11 -6
  37. ert/gui/simulation/experiment_panel.py +1 -1
  38. ert/gui/simulation/run_dialog.py +11 -1
  39. ert/gui/tools/manage_experiments/export_dialog.py +4 -0
  40. ert/gui/tools/manage_experiments/manage_experiments_panel.py +1 -0
  41. ert/gui/tools/manage_experiments/storage_info_widget.py +5 -2
  42. ert/gui/tools/manage_experiments/storage_widget.py +18 -3
  43. ert/gui/tools/plot/data_type_proxy_model.py +1 -1
  44. ert/gui/tools/plot/plot_api.py +35 -27
  45. ert/gui/tools/plot/plot_widget.py +5 -0
  46. ert/gui/tools/plot/plot_window.py +4 -7
  47. ert/run_models/ensemble_experiment.py +1 -3
  48. ert/run_models/ensemble_smoother.py +1 -3
  49. ert/run_models/everest_run_model.py +12 -13
  50. ert/run_models/initial_ensemble_run_model.py +19 -22
  51. ert/run_models/model_factory.py +7 -7
  52. ert/run_models/multiple_data_assimilation.py +1 -3
  53. ert/sample_prior.py +12 -14
  54. ert/services/__init__.py +7 -3
  55. ert/services/_storage_main.py +59 -22
  56. ert/services/ert_server.py +186 -24
  57. ert/shared/version.py +3 -3
  58. ert/storage/local_ensemble.py +46 -115
  59. ert/storage/local_experiment.py +0 -16
  60. ert/utils/__init__.py +20 -0
  61. ert/warnings/specific_warning_handler.py +3 -2
  62. {ert-19.0.0rc4.dist-info → ert-20.0.0b0.dist-info}/METADATA +4 -51
  63. {ert-19.0.0rc4.dist-info → ert-20.0.0b0.dist-info}/RECORD +75 -80
  64. everest/bin/everest_script.py +5 -5
  65. everest/bin/kill_script.py +2 -2
  66. everest/bin/monitor_script.py +2 -2
  67. everest/bin/utils.py +4 -4
  68. everest/detached/everserver.py +6 -6
  69. everest/gui/everest_client.py +0 -6
  70. everest/gui/main_window.py +2 -2
  71. everest/util/__init__.py +1 -19
  72. ert/dark_storage/compute/__init__.py +0 -0
  73. ert/dark_storage/endpoints/compute/__init__.py +0 -0
  74. ert/dark_storage/endpoints/compute/misfits.py +0 -95
  75. ert/services/_base_service.py +0 -387
  76. ert/services/webviz_ert_service.py +0 -20
  77. ert/shared/storage/command.py +0 -38
  78. ert/shared/storage/extraction.py +0 -42
  79. {ert-19.0.0rc4.dist-info → ert-20.0.0b0.dist-info}/WHEEL +0 -0
  80. {ert-19.0.0rc4.dist-info → ert-20.0.0b0.dist-info}/entry_points.txt +0 -0
  81. {ert-19.0.0rc4.dist-info → ert-20.0.0b0.dist-info}/licenses/COPYING +0 -0
  82. {ert-19.0.0rc4.dist-info → ert-20.0.0b0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,821 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from dataclasses import dataclass
5
+ from datetime import datetime, timedelta
6
+ from itertools import starmap
7
+ from pathlib import Path
8
+ from typing import Any, Self, assert_never, cast
9
+
10
+ import numpy as np
11
+ import numpy.typing as npt
12
+ import polars as pl
13
+ from resfo_utilities import history_key
14
+
15
+ from ._observations import (
16
+ ErrorModes,
17
+ _missing_value_error,
18
+ _parse_date,
19
+ _unknown_key_error,
20
+ validate_error_mode,
21
+ validate_float,
22
+ validate_int,
23
+ validate_positive_float,
24
+ validate_positive_int,
25
+ )
26
+ from .ert_config import ErtConfig, logger
27
+ from .parsing import (
28
+ HistorySource,
29
+ ObservationConfigError,
30
+ ObservationDict,
31
+ read_file,
32
+ )
33
+ from .parsing.config_errors import ConfigValidationError, ConfigWarning
34
+ from .refcase import Refcase
35
+
36
+ DEFAULT_TIME_DELTA = timedelta(seconds=30)
37
+
38
+
39
+ @dataclass
40
+ class ObservationError:
41
+ error_mode: ErrorModes
42
+ error: float
43
+ error_min: float
44
+
45
+
46
+ @dataclass
47
+ class Segment(ObservationError):
48
+ name: str
49
+ start: int
50
+ stop: int
51
+
52
+
53
+ def _validate_segment_dict(name_token: str, inp: dict[str, Any]) -> Segment:
54
+ start = None
55
+ stop = None
56
+ error_mode = ErrorModes.RELMIN
57
+ error = 0.1
58
+ error_min = 0.1
59
+ for key, value in inp.items():
60
+ match key:
61
+ case "START":
62
+ start = validate_int(value, key)
63
+ case "STOP":
64
+ stop = validate_int(value, key)
65
+ case "ERROR":
66
+ error = validate_positive_float(value, key, strictly_positive=True)
67
+ case "ERROR_MIN":
68
+ error_min = validate_positive_float(value, key, strictly_positive=True)
69
+ case "ERROR_MODE":
70
+ error_mode = validate_error_mode(value)
71
+ case _:
72
+ raise _unknown_key_error(key, name_token)
73
+
74
+ if start is None:
75
+ raise _missing_value_error(name_token, "START")
76
+ if stop is None:
77
+ raise _missing_value_error(name_token, "STOP")
78
+ return Segment(
79
+ name=name_token,
80
+ start=start,
81
+ stop=stop,
82
+ error_mode=error_mode,
83
+ error=error,
84
+ error_min=error_min,
85
+ )
86
+
87
+
88
+ @dataclass
89
+ class HistoryObservation(ObservationError):
90
+ name: str
91
+ segments: list[Segment]
92
+
93
+ @property
94
+ def key(self) -> str:
95
+ """The :term:`summary key` to be fetched from :ref:`refcase`."""
96
+ # For history observations the key is also the name, ie.
97
+ # "HISTORY_OBSERVATION FOPR" means to add the values from
98
+ # the summary vector FOPRH in refcase as observations.
99
+ return self.name
100
+
101
+ @classmethod
102
+ def from_obs_dict(
103
+ cls, directory: str, observation_dict: ObservationDict
104
+ ) -> list[Self]:
105
+ error_mode = ErrorModes.RELMIN
106
+ error = 0.1
107
+ error_min = 0.1
108
+ segments = []
109
+ for key, value in observation_dict.items():
110
+ match key:
111
+ case "type" | "name":
112
+ pass
113
+ case "ERROR":
114
+ error = validate_positive_float(value, key, strictly_positive=True)
115
+ case "ERROR_MIN":
116
+ error_min = validate_positive_float(
117
+ value, key, strictly_positive=True
118
+ )
119
+ case "ERROR_MODE":
120
+ error_mode = validate_error_mode(value)
121
+ case "segments":
122
+ segments = list(starmap(_validate_segment_dict, value))
123
+ case _:
124
+ raise _unknown_key_error(str(key), observation_dict["name"])
125
+
126
+ instance = cls(
127
+ name=observation_dict["name"],
128
+ error_mode=error_mode,
129
+ error=error,
130
+ error_min=error_min,
131
+ segments=segments,
132
+ )
133
+ return [instance]
134
+
135
+
136
+ @dataclass(frozen=True)
137
+ class TextEdit:
138
+ """
139
+ Represents a replacement of a declaration block in a file.
140
+ Line numbers are 1-based and inclusive.
141
+ """
142
+
143
+ start_line: int
144
+ end_line: int
145
+ replacement: str
146
+
147
+
148
+ def extract_declaration_block(lines: list[str], start_line: int) -> tuple[int, int]:
149
+ i = start_line - 1
150
+ line = lines[i]
151
+ line_without_comment = line.split("--", 1)[0]
152
+
153
+ # Single-line declaration
154
+ if ";" in line_without_comment and "{" not in line_without_comment:
155
+ return i, i
156
+
157
+ brace_depth = 0
158
+ has_opening_brace = False
159
+ for j in range(i, len(lines)):
160
+ line_to_process = lines[j].split("--", 1)[0]
161
+
162
+ if "{" in line_to_process:
163
+ has_opening_brace = True
164
+
165
+ brace_depth += line_to_process.count("{")
166
+ brace_depth -= line_to_process.count("}")
167
+
168
+ if has_opening_brace and brace_depth == 0:
169
+ return i, j
170
+
171
+ raise ValueError(f"Unterminated declaration at line {start_line}")
172
+
173
+
174
+ @dataclass(frozen=True)
175
+ class _SummaryFromHistoryChange:
176
+ source_observation: HistoryObservation
177
+ summary_obs_declarations: list[str]
178
+ lines: list[str]
179
+
180
+ def edits(self) -> list[TextEdit]:
181
+ start, end = extract_declaration_block(
182
+ self.lines,
183
+ self.source_observation.name.line, # type: ignore
184
+ )
185
+
186
+ replacement = "\n\n".join(self.summary_obs_declarations) + "\n"
187
+
188
+ return [
189
+ TextEdit(
190
+ start_line=start + 1,
191
+ end_line=end + 1,
192
+ replacement=replacement,
193
+ )
194
+ ]
195
+
196
+
197
+ @dataclass(frozen=True)
198
+ class _GeneralObservationChange:
199
+ source_observation: LegacyGeneralObservation
200
+ declaration: str
201
+ restart: int
202
+ lines: list[str]
203
+
204
+ def edits(self) -> list[TextEdit]:
205
+ start, end = extract_declaration_block(
206
+ self.lines,
207
+ self.source_observation.name.line, # type: ignore
208
+ )
209
+
210
+ return [
211
+ TextEdit(
212
+ start_line=start + 1,
213
+ end_line=end + 1,
214
+ replacement=self.declaration + "\n",
215
+ )
216
+ ]
217
+
218
+
219
+ @dataclass(frozen=True)
220
+ class _SummaryObservationChange:
221
+ source_observation: LegacySummaryObservation
222
+ declaration: str
223
+ date: datetime
224
+ lines: list[str]
225
+
226
+ def edits(self) -> list[TextEdit]:
227
+ start, end = extract_declaration_block(
228
+ self.lines,
229
+ self.source_observation.name.line, # type: ignore
230
+ )
231
+
232
+ return [
233
+ TextEdit(
234
+ start_line=start + 1,
235
+ end_line=end + 1,
236
+ replacement=self.declaration + "\n",
237
+ )
238
+ ]
239
+
240
+
241
+ @dataclass(frozen=True)
242
+ class _TimeMapAndRefcaseRemovalInfo:
243
+ obs_config_path: str
244
+ refcase_path: str | None
245
+ time_map_path: str | None
246
+ history_changes: list[_SummaryFromHistoryChange]
247
+ general_obs_changes: list[_GeneralObservationChange]
248
+ summary_obs_changes: list[_SummaryObservationChange]
249
+
250
+ def is_empty(self) -> bool:
251
+ return (
252
+ len(self.history_changes)
253
+ + len(self.general_obs_changes)
254
+ + len(self.summary_obs_changes)
255
+ ) == 0
256
+
257
+ def collect_edits(self) -> list[TextEdit]:
258
+ edits: list[TextEdit] = []
259
+
260
+ for history_change in self.history_changes:
261
+ edits.extend(history_change.edits())
262
+
263
+ for gen_change in self.general_obs_changes:
264
+ edits.extend(gen_change.edits())
265
+
266
+ for summary_change in self.summary_obs_changes:
267
+ edits.extend(summary_change.edits())
268
+
269
+ return edits
270
+
271
+ def apply_to_file(self, path: Path) -> None:
272
+ edits = self.collect_edits()
273
+
274
+ # Sort edits bottom-up, so that line numbers remain valid for subsequent edits
275
+ edits.sort(key=lambda e: e.start_line, reverse=True)
276
+
277
+ lines = path.read_text(encoding="utf-8").splitlines()
278
+
279
+ for edit in edits:
280
+ # Line numbers are 1-based, inclusive. Convert to 0-based slice.
281
+ start_index = edit.start_line - 1
282
+ end_index = edit.end_line
283
+
284
+ replacement_lines = edit.replacement.splitlines()
285
+ lines[start_index:end_index] = replacement_lines
286
+
287
+ path.write_text("\n".join(lines) + "\n", encoding="utf-8")
288
+
289
+
290
+ @dataclass
291
+ class LegacyObservationDate:
292
+ days: float | None = None
293
+ hours: float | None = None
294
+ date: str | None = None
295
+ restart: int | None = None
296
+
297
+
298
+ @dataclass
299
+ class _LegacySummaryValues:
300
+ name: str
301
+ value: float
302
+ key: str #: The :term:`summary key` in the summary response
303
+ location_x: float | None = None
304
+ location_y: float | None = None
305
+ location_range: float | None = None
306
+
307
+
308
+ @dataclass
309
+ class LegacySummaryObservation(
310
+ LegacyObservationDate, _LegacySummaryValues, ObservationError
311
+ ):
312
+ @classmethod
313
+ def from_obs_dict(
314
+ cls, directory: str, observation_dict: ObservationDict
315
+ ) -> list[Self]:
316
+ error_mode = ErrorModes.ABS
317
+ summary_key = None
318
+
319
+ date_dict: LegacyObservationDate = LegacyObservationDate()
320
+ float_values: dict[str, float] = {"ERROR_MIN": 0.1}
321
+ localization_values: dict[str, float] = {}
322
+ for key, value in observation_dict.items():
323
+ match key:
324
+ case "type" | "name":
325
+ pass
326
+ case "RESTART":
327
+ date_dict.restart = validate_positive_int(value, key)
328
+ case "ERROR" | "ERROR_MIN":
329
+ float_values[str(key)] = validate_positive_float(
330
+ value, key, strictly_positive=True
331
+ )
332
+ case "DAYS" | "HOURS":
333
+ setattr(
334
+ date_dict, str(key).lower(), validate_positive_float(value, key)
335
+ )
336
+ case "VALUE":
337
+ float_values[str(key)] = validate_float(value, key)
338
+ case "ERROR_MODE":
339
+ error_mode = validate_error_mode(value)
340
+ case "KEY":
341
+ summary_key = value
342
+ case "DATE":
343
+ date_dict.date = value
344
+ case "LOCATION_X":
345
+ localization_values["x"] = validate_float(value, key)
346
+ case "LOCATION_Y":
347
+ localization_values["y"] = validate_float(value, key)
348
+ case "LOCATION_RANGE":
349
+ localization_values["range"] = validate_float(value, key)
350
+ case _:
351
+ raise _unknown_key_error(str(key), observation_dict["name"])
352
+ if "VALUE" not in float_values:
353
+ raise _missing_value_error(observation_dict["name"], "VALUE")
354
+ if summary_key is None:
355
+ raise _missing_value_error(observation_dict["name"], "KEY")
356
+ if "ERROR" not in float_values:
357
+ raise _missing_value_error(observation_dict["name"], "ERROR")
358
+
359
+ return [
360
+ cls(
361
+ name=observation_dict["name"],
362
+ error_mode=error_mode,
363
+ error=float_values["ERROR"],
364
+ error_min=float_values["ERROR_MIN"],
365
+ key=summary_key,
366
+ value=float_values["VALUE"],
367
+ location_x=localization_values.get("x"),
368
+ location_y=localization_values.get("y"),
369
+ location_range=localization_values.get("range"),
370
+ **date_dict.__dict__,
371
+ )
372
+ ]
373
+
374
+
375
+ @dataclass
376
+ class _LegacyGeneralObservation:
377
+ name: str
378
+ data: str
379
+ value: float | None = None
380
+ error: float | None = None
381
+ index_list: str | None = None
382
+ index_file: str | None = None
383
+ obs_file: str | None = None
384
+ days: float | None = None
385
+ hours: float | None = None
386
+ date: str | None = None
387
+ restart: int | None = None
388
+
389
+
390
+ @dataclass
391
+ class LegacyGeneralObservation(_LegacyGeneralObservation):
392
+ @classmethod
393
+ def from_obs_dict(
394
+ cls, directory: str, observation_dict: ObservationDict
395
+ ) -> list[Self]:
396
+ try:
397
+ data = observation_dict["DATA"]
398
+ except KeyError as err:
399
+ raise _missing_value_error(observation_dict["name"], "DATA") from err
400
+
401
+ output = cls(name=observation_dict["name"], data=data)
402
+ for key, value in observation_dict.items():
403
+ match key:
404
+ case "type" | "name":
405
+ pass
406
+ case "RESTART":
407
+ output.restart = validate_positive_int(value, key)
408
+ case "VALUE":
409
+ output.value = validate_float(value, key)
410
+ case "ERROR" | "DAYS" | "HOURS":
411
+ setattr(
412
+ output, str(key).lower(), validate_positive_float(value, key)
413
+ )
414
+ case "DATE" | "INDEX_LIST":
415
+ setattr(output, str(key).lower(), value)
416
+ case "OBS_FILE" | "INDEX_FILE":
417
+ assert not isinstance(key, tuple)
418
+ filename = value
419
+ if not os.path.isabs(filename):
420
+ filename = os.path.join(directory, filename)
421
+ if not os.path.exists(filename):
422
+ raise ObservationConfigError.with_context(
423
+ "The following keywords did not"
424
+ f" resolve to a valid path:\n {key}",
425
+ value,
426
+ )
427
+ setattr(output, str(key).lower(), filename)
428
+ case "DATA":
429
+ output.data = value
430
+ case _:
431
+ raise _unknown_key_error(str(key), observation_dict["name"])
432
+ if output.value is not None and output.error is None:
433
+ raise ObservationConfigError.with_context(
434
+ f"For GENERAL_OBSERVATION {observation_dict['name']}, with"
435
+ f" VALUE = {output.value}, ERROR must also be given.",
436
+ observation_dict["name"],
437
+ )
438
+ return [output]
439
+
440
+
441
+ def remove_refcase_and_time_map_dependence_from_obs_config(
442
+ config_path: str,
443
+ ) -> _TimeMapAndRefcaseRemovalInfo | None:
444
+ """
445
+ Parses an ERT configuration to find observation declarations that depend on
446
+ REFCASE or TIME_MAP and generates a set of proposed changes to remove these
447
+ dependencies.
448
+
449
+ The function reads the main ERT config and identifies the observation
450
+ configuration file. It then processes three types of observation declarations:
451
+
452
+ 1. HISTORY_OBSERVATION: These are converted into one or more
453
+ SUMMARY_OBSERVATION declarations, with dates and values extracted
454
+ from the REFCASE.
455
+ 2. GENERAL_OBSERVATION: Declarations using the DATE keyword are updated
456
+ to use the corresponding RESTART (report step) from the REFCASE or TIME_MAP.
457
+ 3. SUMMARY_OBSERVATION: Declarations using the RESTART (report step) keyword
458
+ are updated to use the corresponding DATE from the REFCASE or TIME_MAP.
459
+
460
+ All proposed modifications are collected and returned in a
461
+ _TimeMapAndRefcaseRemovalInfo object, which can be used to apply the
462
+ changes to the observation file. This function does not modify any files itself.
463
+
464
+ """
465
+ user_config_contents = read_file(config_path)
466
+ config_dict = ErtConfig._config_dict_from_contents(
467
+ user_config_contents,
468
+ config_path,
469
+ )
470
+
471
+ refcase = (
472
+ Refcase.from_config_dict(config_dict) if "REFCASE" in config_dict else None
473
+ )
474
+
475
+ obs_config_file, obs_config_entries = config_dict.get("OBS_CONFIG", (None, None))
476
+ if obs_config_file is None:
477
+ return None
478
+
479
+ time_map = None
480
+ time_map_path = None
481
+ if time_map_args := config_dict.get("TIME_MAP"):
482
+ time_map_file, time_map_contents = time_map_args
483
+ try:
484
+ time_map = _read_time_map(time_map_contents)
485
+ except ValueError as err:
486
+ raise ConfigValidationError.with_context(
487
+ f"Could not read timemap file {time_map_file}: {err}",
488
+ time_map_file,
489
+ ) from err
490
+
491
+ obs_config_lines = read_file(str(obs_config_file)).splitlines()
492
+ config_dir = Path(obs_config_file).parent
493
+
494
+ history_source = config_dict.get("HISTORY_SOURCE", HistorySource.REFCASE_HISTORY)
495
+ obs_time_list: list[datetime] = []
496
+ if refcase is not None:
497
+ obs_time_list = refcase.all_dates
498
+ elif time_map is not None:
499
+ obs_time_list = time_map
500
+
501
+ # Create observation objects from the configuration
502
+ history_observations: list[HistoryObservation] = [
503
+ obs
504
+ for obs_dict in obs_config_entries
505
+ if obs_dict.get("type") == "HISTORY_OBSERVATION"
506
+ for obs in HistoryObservation.from_obs_dict("", obs_dict)
507
+ ]
508
+
509
+ genobs_deprecated_keys = {"DATE", "DAYS", "HOURS"}
510
+ general_observations: list[LegacyGeneralObservation] = [
511
+ obs
512
+ for obs_dict in obs_config_entries
513
+ if obs_dict.get("type") == "GENERAL_OBSERVATION"
514
+ and (len(genobs_deprecated_keys.intersection(set(obs_dict))) > 0)
515
+ for obs in LegacyGeneralObservation.from_obs_dict(str(config_dir), obs_dict)
516
+ ]
517
+
518
+ summary_deprecated_keys = {"RESTART", "DAYS", "HOURS"}
519
+ summary_observations: list[LegacySummaryObservation] = [
520
+ obs
521
+ for obs_dict in obs_config_entries
522
+ if obs_dict.get("type") == "SUMMARY_OBSERVATION"
523
+ and (len(summary_deprecated_keys.intersection(set(obs_dict))) > 0)
524
+ for obs in LegacySummaryObservation.from_obs_dict(str(config_dir), obs_dict)
525
+ ]
526
+ # Process history observations, which generate summary observation declarations
527
+ history_changes = []
528
+ for history_obs in history_observations:
529
+ history_obs_df = _handle_history_observation(
530
+ refcase, history_obs, history_obs.name, history_source, len(obs_time_list)
531
+ )
532
+ declarations = []
533
+ for obs_row in history_obs_df.to_dicts():
534
+ declaration = (
535
+ f"SUMMARY_OBSERVATION "
536
+ f"{obs_row['observation_key']} {{\n"
537
+ f" VALUE = {obs_row['observations']};\n"
538
+ f" ERROR = {obs_row['std']};\n"
539
+ f" DATE = {obs_row['time'].strftime('%Y-%m-%d')};\n"
540
+ f" KEY = {obs_row['observation_key']};\n"
541
+ "};"
542
+ )
543
+ declarations.append(declaration)
544
+
545
+ history_changes.append(
546
+ _SummaryFromHistoryChange(
547
+ source_observation=history_obs,
548
+ summary_obs_declarations=declarations,
549
+ lines=obs_config_lines,
550
+ )
551
+ )
552
+
553
+ # Process general observations
554
+ general_obs_changes = []
555
+ for gen_obs in general_observations:
556
+ restart = _get_restart(
557
+ cast(LegacyObservationDate, gen_obs),
558
+ gen_obs.name,
559
+ obs_time_list,
560
+ refcase is not None,
561
+ )
562
+
563
+ index_list_or_file_declaration = ""
564
+ if gen_obs.index_list is not None:
565
+ index_list_or_file_declaration = f" INDEX_LIST = {gen_obs.index_list};\n"
566
+ elif gen_obs.index_file is not None:
567
+ index_list_or_file_declaration = f" INDEX_FILE = {gen_obs.index_file};\n"
568
+
569
+ obs_file_or_value_declaration = ""
570
+ if gen_obs.value is not None:
571
+ obs_file_or_value_declaration = f" VALUE = {gen_obs.value};\n"
572
+ obs_file_or_value_declaration += f" ERROR = {gen_obs.error};\n"
573
+ if gen_obs.obs_file is not None:
574
+ obs_file_or_value_declaration = (
575
+ f" OBS_FILE = {Path(gen_obs.obs_file).relative_to(config_dir)};\n"
576
+ )
577
+
578
+ declaration = (
579
+ f"GENERAL_OBSERVATION {gen_obs.name} {{\n"
580
+ f" DATA = {gen_obs.data};\n"
581
+ f"{index_list_or_file_declaration}"
582
+ f" RESTART = {restart};\n"
583
+ f"{obs_file_or_value_declaration}"
584
+ "};"
585
+ )
586
+ general_obs_changes.append(
587
+ _GeneralObservationChange(
588
+ source_observation=gen_obs,
589
+ declaration=declaration,
590
+ restart=restart,
591
+ lines=obs_config_lines,
592
+ )
593
+ )
594
+
595
+ # Process summary observations
596
+ summary_obs_changes = []
597
+ for smry_obs in summary_observations:
598
+ restart = _get_restart(
599
+ smry_obs, smry_obs.name, obs_time_list, refcase is not None
600
+ )
601
+ date = obs_time_list[restart]
602
+
603
+ declaration = (
604
+ f"SUMMARY_OBSERVATION {smry_obs.name} {{\n"
605
+ f" VALUE = {smry_obs.value};\n"
606
+ f" ERROR = {smry_obs.error};\n"
607
+ f" DATE = {date.strftime('%Y-%m-%d')};\n"
608
+ f" KEY = {smry_obs.key};\n"
609
+ + (
610
+ f" LOCATION_X={smry_obs.location_x};\n"
611
+ if smry_obs.location_x is not None
612
+ else ""
613
+ )
614
+ + (
615
+ f" LOCATION_Y={smry_obs.location_y};\n"
616
+ if smry_obs.location_y is not None
617
+ else ""
618
+ )
619
+ + (
620
+ f" LOCATION_RANGE={smry_obs.location_range};\n"
621
+ if smry_obs.location_range is not None
622
+ else ""
623
+ )
624
+ + "};"
625
+ )
626
+ summary_obs_changes.append(
627
+ _SummaryObservationChange(
628
+ source_observation=smry_obs,
629
+ declaration=declaration,
630
+ date=date,
631
+ lines=obs_config_lines,
632
+ )
633
+ )
634
+
635
+ return _TimeMapAndRefcaseRemovalInfo(
636
+ obs_config_path=str(obs_config_file),
637
+ refcase_path=config_dict.get("REFCASE", None),
638
+ time_map_path=time_map_path,
639
+ history_changes=history_changes,
640
+ general_obs_changes=general_obs_changes,
641
+ summary_obs_changes=summary_obs_changes,
642
+ )
643
+
644
+
645
+ def _find_nearest(
646
+ time_map: list[datetime],
647
+ time: datetime,
648
+ threshold: timedelta = DEFAULT_TIME_DELTA,
649
+ ) -> int:
650
+ nearest_index = -1
651
+ nearest_diff = None
652
+ for i, t in enumerate(time_map):
653
+ diff = abs(time - t)
654
+ if diff < threshold and (nearest_diff is None or nearest_diff > diff):
655
+ nearest_diff = diff
656
+ nearest_index = i
657
+ if nearest_diff is None:
658
+ raise IndexError(f"{time} is not in the time map")
659
+ return nearest_index
660
+
661
+
662
+ def _get_time(
663
+ date_dict: LegacyObservationDate, start_time: datetime, context: Any = None
664
+ ) -> tuple[datetime, str]:
665
+ if date_dict.date is not None:
666
+ return _parse_date(date_dict.date), f"DATE={date_dict.date}"
667
+ if date_dict.days is not None:
668
+ days = date_dict.days
669
+ return start_time + timedelta(days=days), f"DAYS={days}"
670
+ if date_dict.hours is not None:
671
+ hours = date_dict.hours
672
+ return start_time + timedelta(hours=hours), f"HOURS={hours}"
673
+ raise ObservationConfigError.with_context("Missing time specifier", context=context)
674
+
675
+
676
+ def _get_restart(
677
+ date_dict: LegacyObservationDate,
678
+ obs_name: str,
679
+ time_map: list[datetime],
680
+ has_refcase: bool,
681
+ ) -> int:
682
+ if date_dict.restart is not None:
683
+ return date_dict.restart
684
+ if not time_map:
685
+ raise ObservationConfigError.with_context(
686
+ f"Missing REFCASE or TIME_MAP for observations: {obs_name}",
687
+ obs_name,
688
+ )
689
+
690
+ time, date_str = _get_time(date_dict, time_map[0], context=obs_name)
691
+
692
+ try:
693
+ return _find_nearest(time_map, time)
694
+ except IndexError as err:
695
+ raise ObservationConfigError.with_context(
696
+ f"Could not find {time} ({date_str}) in "
697
+ f"the time map for observations {obs_name}. "
698
+ + (
699
+ "The time map is set from the REFCASE keyword. Either "
700
+ "the REFCASE has an incorrect/missing date, or the observation "
701
+ "is given an incorrect date.)"
702
+ if has_refcase
703
+ else "(The time map is set from the TIME_MAP "
704
+ "keyword. Either the time map file has an "
705
+ "incorrect/missing date, or the observation is given an "
706
+ "incorrect date."
707
+ ),
708
+ obs_name,
709
+ ) from err
710
+
711
+
712
+ def _legacy_handle_error_mode(
713
+ values: npt.ArrayLike,
714
+ error_dict: ObservationError,
715
+ ) -> npt.NDArray[np.double]:
716
+ values = np.asarray(values)
717
+ error_mode = error_dict.error_mode
718
+ error_min = error_dict.error_min
719
+ error = error_dict.error
720
+ match error_mode:
721
+ case ErrorModes.ABS:
722
+ return np.full(values.shape, error)
723
+ case ErrorModes.REL:
724
+ return np.abs(values) * error
725
+ case ErrorModes.RELMIN:
726
+ return np.maximum(np.abs(values) * error, np.full(values.shape, error_min))
727
+ case default:
728
+ assert_never(default)
729
+
730
+
731
+ def _handle_history_observation(
732
+ refcase: Refcase | None,
733
+ history_observation: HistoryObservation,
734
+ summary_key: str,
735
+ history_type: HistorySource,
736
+ time_len: int,
737
+ ) -> pl.DataFrame:
738
+ if refcase is None:
739
+ raise ObservationConfigError.with_context(
740
+ "REFCASE is required for HISTORY_OBSERVATION", summary_key
741
+ )
742
+
743
+ if history_type == HistorySource.REFCASE_HISTORY:
744
+ local_key = history_key(summary_key)
745
+ else:
746
+ local_key = summary_key
747
+ if local_key not in refcase.keys:
748
+ raise ObservationConfigError.with_context(
749
+ f"Key {local_key!r} is not present in refcase", summary_key
750
+ )
751
+ values = refcase.values[refcase.keys.index(local_key)]
752
+ std_dev = _legacy_handle_error_mode(values, history_observation)
753
+ for segment in history_observation.segments:
754
+ start = segment.start
755
+ stop = segment.stop
756
+ if start < 0:
757
+ ConfigWarning.warn(
758
+ f"Segment {segment.name} out of bounds."
759
+ " Truncating start of segment to 0.",
760
+ segment.name,
761
+ )
762
+ start = 0
763
+ if stop >= time_len:
764
+ ConfigWarning.warn(
765
+ f"Segment {segment.name} out of bounds. Truncating"
766
+ f" end of segment to {time_len - 1}.",
767
+ segment.name,
768
+ )
769
+ stop = time_len - 1
770
+ if start > stop:
771
+ ConfigWarning.warn(
772
+ f"Segment {segment.name} start after stop. Truncating"
773
+ f" end of segment to {start}.",
774
+ segment.name,
775
+ )
776
+ stop = start
777
+ if np.size(std_dev[start:stop]) == 0:
778
+ ConfigWarning.warn(
779
+ f"Segment {segment.name} does not"
780
+ " contain any time steps. The interval "
781
+ f"[{start}, {stop}) does not intersect with steps in the"
782
+ "time map.",
783
+ segment.name,
784
+ )
785
+ std_dev[start:stop] = _legacy_handle_error_mode(values[start:stop], segment)
786
+ dates_series = pl.Series(refcase.dates).dt.cast_time_unit("ms")
787
+ if (std_dev <= 0).any():
788
+ raise ObservationConfigError.with_context(
789
+ "Observation uncertainty must be given a strictly positive value",
790
+ summary_key,
791
+ ) from None
792
+
793
+ return pl.DataFrame(
794
+ {
795
+ "response_key": summary_key,
796
+ "observation_key": summary_key,
797
+ "time": dates_series,
798
+ "observations": pl.Series(values, dtype=pl.Float32),
799
+ "std": pl.Series(std_dev, dtype=pl.Float32),
800
+ "location_x": pl.Series([None] * len(values), dtype=pl.Float32),
801
+ "location_y": pl.Series([None] * len(values), dtype=pl.Float32),
802
+ "location_range": pl.Series([None] * len(values), dtype=pl.Float32),
803
+ }
804
+ )
805
+
806
+
807
+ def _read_time_map(file_contents: str) -> list[datetime]:
808
+ def str_to_datetime(date_str: str) -> datetime:
809
+ try:
810
+ return datetime.fromisoformat(date_str)
811
+ except ValueError:
812
+ logger.warning(
813
+ "DD/MM/YYYY date format is deprecated"
814
+ ", please use ISO date format YYYY-MM-DD."
815
+ )
816
+ return datetime.strptime(date_str, "%d/%m/%Y")
817
+
818
+ dates = []
819
+ for line in file_contents.splitlines():
820
+ dates.append(str_to_datetime(line.strip()))
821
+ return dates