ert 16.0.9__py3-none-any.whl → 19.0.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _ert/events.py +19 -2
- _ert/forward_model_runner/client.py +6 -2
- _ert/forward_model_runner/fm_dispatch.py +9 -6
- _ert/forward_model_runner/reporting/event.py +1 -0
- _ert/forward_model_runner/runner.py +1 -2
- _ert/utils.py +12 -0
- ert/__main__.py +58 -38
- ert/analysis/_enif_update.py +8 -4
- ert/analysis/_es_update.py +19 -6
- ert/analysis/_update_commons.py +16 -6
- ert/base_model_context.py +1 -1
- ert/cli/main.py +17 -12
- ert/cli/monitor.py +7 -0
- ert/config/__init__.py +17 -6
- ert/config/_create_observation_dataframes.py +118 -21
- ert/config/_get_num_cpu.py +1 -1
- ert/config/_observations.py +91 -2
- ert/config/_read_summary.py +74 -328
- ert/config/design_matrix.py +62 -23
- ert/config/distribution.py +1 -1
- ert/config/ensemble_config.py +9 -17
- ert/config/ert_config.py +155 -58
- ert/config/everest_control.py +234 -0
- ert/config/{everest_constraints_config.py → everest_response.py} +27 -15
- ert/config/field.py +99 -90
- ert/config/forward_model_step.py +122 -17
- ert/config/gen_data_config.py +5 -10
- ert/config/gen_kw_config.py +11 -41
- ert/config/known_response_types.py +14 -0
- ert/config/parameter_config.py +1 -33
- ert/config/parsing/_option_dict.py +10 -2
- ert/config/parsing/config_errors.py +1 -1
- ert/config/parsing/config_keywords.py +2 -1
- ert/config/parsing/config_schema.py +23 -11
- ert/config/parsing/config_schema_deprecations.py +3 -3
- ert/config/parsing/config_schema_item.py +26 -11
- ert/config/parsing/context_values.py +3 -3
- ert/config/parsing/file_context_token.py +1 -1
- ert/config/parsing/observations_parser.py +6 -2
- ert/config/parsing/queue_system.py +9 -0
- ert/config/parsing/schema_item_type.py +1 -0
- ert/config/queue_config.py +42 -50
- ert/config/response_config.py +0 -8
- ert/config/rft_config.py +275 -0
- ert/config/summary_config.py +3 -8
- ert/config/surface_config.py +73 -26
- ert/config/workflow_fixtures.py +2 -1
- ert/config/workflow_job.py +135 -54
- ert/dark_storage/client/__init__.py +2 -2
- ert/dark_storage/client/_session.py +4 -4
- ert/dark_storage/client/client.py +2 -2
- ert/dark_storage/common.py +12 -3
- ert/dark_storage/compute/misfits.py +11 -7
- ert/dark_storage/endpoints/compute/misfits.py +6 -4
- ert/dark_storage/endpoints/ensembles.py +4 -0
- ert/dark_storage/endpoints/experiment_server.py +30 -24
- ert/dark_storage/endpoints/experiments.py +2 -2
- ert/dark_storage/endpoints/observations.py +8 -6
- ert/dark_storage/endpoints/parameters.py +4 -12
- ert/dark_storage/endpoints/responses.py +24 -5
- ert/dark_storage/json_schema/ensemble.py +3 -0
- ert/dark_storage/json_schema/experiment.py +1 -1
- ert/data/_measured_data.py +6 -5
- ert/ensemble_evaluator/__init__.py +8 -1
- ert/ensemble_evaluator/config.py +2 -1
- ert/ensemble_evaluator/evaluator.py +81 -29
- ert/ensemble_evaluator/event.py +6 -0
- ert/ensemble_evaluator/snapshot.py +3 -1
- ert/ensemble_evaluator/state.py +1 -0
- ert/field_utils/__init__.py +8 -0
- ert/field_utils/field_utils.py +228 -15
- ert/field_utils/grdecl_io.py +1 -1
- ert/field_utils/roff_io.py +1 -1
- ert/gui/__init__.py +5 -2
- ert/gui/ertnotifier.py +1 -1
- ert/gui/ertwidgets/__init__.py +23 -16
- ert/gui/ertwidgets/analysismoduleedit.py +2 -2
- ert/gui/ertwidgets/checklist.py +1 -1
- ert/gui/ertwidgets/closabledialog.py +2 -0
- ert/gui/ertwidgets/copyablelabel.py +2 -0
- ert/gui/ertwidgets/create_experiment_dialog.py +3 -1
- ert/gui/ertwidgets/ensembleselector.py +2 -2
- ert/gui/ertwidgets/listeditbox.py +2 -0
- ert/gui/ertwidgets/models/__init__.py +2 -0
- ert/gui/ertwidgets/models/activerealizationsmodel.py +5 -1
- ert/gui/ertwidgets/models/path_model.py +1 -1
- ert/gui/ertwidgets/models/targetensemblemodel.py +5 -1
- ert/gui/ertwidgets/models/text_model.py +4 -1
- ert/gui/ertwidgets/pathchooser.py +0 -3
- ert/gui/ertwidgets/searchbox.py +17 -4
- ert/gui/ertwidgets/stringbox.py +2 -0
- ert/gui/{suggestor → ertwidgets/suggestor}/_suggestor_message.py +13 -4
- ert/gui/{suggestor → ertwidgets/suggestor}/suggestor.py +63 -30
- ert/gui/main.py +41 -13
- ert/gui/main_window.py +3 -7
- ert/gui/model/fm_step_list.py +3 -0
- ert/gui/model/real_list.py +1 -0
- ert/gui/model/snapshot.py +1 -0
- ert/gui/simulation/combobox_with_description.py +3 -0
- ert/gui/simulation/ensemble_experiment_panel.py +8 -2
- ert/gui/simulation/ensemble_information_filter_panel.py +7 -2
- ert/gui/simulation/ensemble_smoother_panel.py +8 -2
- ert/gui/simulation/evaluate_ensemble_panel.py +17 -7
- ert/gui/simulation/experiment_panel.py +18 -6
- ert/gui/simulation/manual_update_panel.py +35 -10
- ert/gui/simulation/multiple_data_assimilation_panel.py +13 -9
- ert/gui/simulation/run_dialog.py +47 -20
- ert/gui/simulation/single_test_run_panel.py +6 -3
- ert/gui/simulation/view/progress_widget.py +2 -0
- ert/gui/simulation/view/realization.py +5 -1
- ert/gui/simulation/view/update.py +2 -0
- ert/gui/summarypanel.py +20 -1
- ert/gui/tools/event_viewer/panel.py +3 -4
- ert/gui/tools/event_viewer/tool.py +2 -0
- ert/gui/tools/load_results/load_results_panel.py +1 -1
- ert/gui/tools/load_results/load_results_tool.py +2 -0
- ert/gui/tools/manage_experiments/export_dialog.py +136 -0
- ert/gui/tools/manage_experiments/manage_experiments_panel.py +2 -0
- ert/gui/tools/manage_experiments/storage_info_widget.py +121 -16
- ert/gui/tools/manage_experiments/storage_widget.py +4 -3
- ert/gui/tools/plot/customize/color_chooser.py +5 -2
- ert/gui/tools/plot/customize/customize_plot_dialog.py +2 -0
- ert/gui/tools/plot/customize/default_customization_view.py +4 -0
- ert/gui/tools/plot/customize/limits_customization_view.py +3 -0
- ert/gui/tools/plot/customize/statistics_customization_view.py +3 -0
- ert/gui/tools/plot/customize/style_chooser.py +2 -0
- ert/gui/tools/plot/customize/style_customization_view.py +3 -0
- ert/gui/tools/plot/data_type_keys_widget.py +2 -0
- ert/gui/tools/plot/data_type_proxy_model.py +3 -0
- ert/gui/tools/plot/plot_api.py +50 -28
- ert/gui/tools/plot/plot_ensemble_selection_widget.py +17 -10
- ert/gui/tools/plot/plot_widget.py +15 -2
- ert/gui/tools/plot/plot_window.py +41 -19
- ert/gui/tools/plot/plottery/plot_config.py +2 -0
- ert/gui/tools/plot/plottery/plot_context.py +14 -0
- ert/gui/tools/plot/plottery/plots/__init__.py +2 -0
- ert/gui/tools/plot/plottery/plots/cesp.py +3 -1
- ert/gui/tools/plot/plottery/plots/distribution.py +6 -1
- ert/gui/tools/plot/plottery/plots/ensemble.py +13 -5
- ert/gui/tools/plot/plottery/plots/gaussian_kde.py +12 -2
- ert/gui/tools/plot/plottery/plots/histogram.py +3 -1
- ert/gui/tools/plot/plottery/plots/misfits.py +436 -0
- ert/gui/tools/plot/plottery/plots/observations.py +18 -4
- ert/gui/tools/plot/plottery/plots/statistics.py +62 -20
- ert/gui/tools/plot/plottery/plots/std_dev.py +3 -1
- ert/gui/tools/plot/widgets/clearable_line_edit.py +9 -0
- ert/gui/tools/plot/widgets/filter_popup.py +2 -0
- ert/gui/tools/plot/widgets/filterable_kw_list_model.py +3 -0
- ert/gui/tools/plugins/plugin.py +1 -1
- ert/gui/tools/plugins/plugins_tool.py +2 -0
- ert/gui/tools/plugins/process_job_dialog.py +3 -0
- ert/gui/tools/workflows/workflow_dialog.py +2 -0
- ert/gui/tools/workflows/workflows_tool.py +2 -0
- ert/libres_facade.py +5 -7
- ert/logging/__init__.py +4 -1
- ert/mode_definitions.py +2 -0
- ert/plugins/__init__.py +4 -6
- ert/plugins/hook_implementations/workflows/csv_export.py +2 -3
- ert/plugins/hook_implementations/workflows/gen_data_rft_export.py +10 -2
- ert/plugins/hook_specifications/__init__.py +0 -10
- ert/plugins/hook_specifications/jobs.py +0 -9
- ert/plugins/plugin_manager.py +53 -124
- ert/resources/forward_models/run_reservoirsimulator.py +8 -4
- ert/resources/forward_models/template_render.py +10 -10
- ert/resources/shell_scripts/delete_directory.py +2 -2
- ert/run_models/__init__.py +24 -6
- ert/run_models/_create_run_path.py +133 -38
- ert/run_models/ensemble_experiment.py +10 -4
- ert/run_models/ensemble_information_filter.py +8 -1
- ert/run_models/ensemble_smoother.py +9 -3
- ert/run_models/evaluate_ensemble.py +8 -6
- ert/run_models/event.py +7 -3
- ert/run_models/everest_run_model.py +337 -113
- ert/run_models/initial_ensemble_run_model.py +25 -24
- ert/run_models/manual_update.py +6 -3
- ert/run_models/manual_update_enif.py +37 -0
- ert/run_models/model_factory.py +78 -18
- ert/run_models/multiple_data_assimilation.py +22 -11
- ert/run_models/run_model.py +72 -73
- ert/run_models/single_test_run.py +7 -4
- ert/run_models/update_run_model.py +4 -2
- ert/runpaths.py +5 -6
- ert/sample_prior.py +9 -4
- ert/scheduler/__init__.py +10 -5
- ert/scheduler/driver.py +40 -0
- ert/scheduler/event.py +3 -1
- ert/scheduler/job.py +23 -13
- ert/scheduler/lsf_driver.py +15 -5
- ert/scheduler/openpbs_driver.py +10 -4
- ert/scheduler/scheduler.py +5 -0
- ert/scheduler/slurm_driver.py +20 -5
- ert/services/__init__.py +2 -2
- ert/services/_base_service.py +37 -20
- ert/services/_storage_main.py +20 -18
- ert/services/ert_server.py +317 -0
- ert/shared/_doc_utils/__init__.py +4 -2
- ert/shared/_doc_utils/ert_jobs.py +1 -4
- ert/shared/net_utils.py +43 -18
- ert/shared/storage/connection.py +3 -3
- ert/shared/version.py +3 -3
- ert/storage/__init__.py +14 -1
- ert/storage/local_ensemble.py +44 -13
- ert/storage/local_experiment.py +54 -34
- ert/storage/local_storage.py +90 -58
- ert/storage/migration/to10.py +3 -2
- ert/storage/migration/to11.py +9 -10
- ert/storage/migration/to12.py +19 -20
- ert/storage/migration/to13.py +28 -27
- ert/storage/migration/to14.py +3 -3
- ert/storage/migration/to15.py +25 -0
- ert/storage/migration/to16.py +38 -0
- ert/storage/migration/to17.py +42 -0
- ert/storage/migration/to18.py +11 -0
- ert/storage/migration/to19.py +34 -0
- ert/storage/migration/to20.py +23 -0
- ert/storage/migration/to21.py +25 -0
- ert/storage/migration/to6.py +3 -2
- ert/storage/migration/to7.py +12 -13
- ert/storage/migration/to8.py +9 -11
- ert/storage/migration/to9.py +5 -4
- ert/storage/realization_storage_state.py +7 -7
- ert/substitutions.py +12 -28
- ert/validation/active_range.py +7 -7
- ert/validation/ensemble_realizations_argument.py +4 -2
- ert/validation/rangestring.py +16 -16
- ert/workflow_runner.py +6 -3
- {ert-16.0.9.dist-info → ert-19.0.0rc2.dist-info}/METADATA +21 -15
- ert-19.0.0rc2.dist-info/RECORD +524 -0
- {ert-16.0.9.dist-info → ert-19.0.0rc2.dist-info}/WHEEL +1 -1
- everest/api/everest_data_api.py +14 -1
- everest/assets/everest_logo.svg +406 -0
- everest/bin/config_branch_script.py +30 -14
- everest/bin/everconfigdump_script.py +2 -10
- everest/bin/everest_script.py +53 -33
- everest/bin/everlint_script.py +3 -5
- everest/bin/kill_script.py +7 -5
- everest/bin/main.py +11 -24
- everest/bin/monitor_script.py +64 -35
- everest/bin/utils.py +58 -43
- everest/bin/visualization_script.py +23 -13
- everest/config/__init__.py +4 -1
- everest/config/control_config.py +81 -6
- everest/config/control_variable_config.py +4 -3
- everest/config/everest_config.py +102 -79
- everest/config/forward_model_config.py +5 -3
- everest/config/install_data_config.py +7 -5
- everest/config/install_job_config.py +45 -3
- everest/config/install_template_config.py +3 -3
- everest/config/optimization_config.py +19 -6
- everest/config/output_constraint_config.py +8 -2
- everest/config/server_config.py +6 -55
- everest/config/simulator_config.py +62 -17
- everest/config/utils.py +25 -105
- everest/config/validation_utils.py +34 -15
- everest/config_file_loader.py +30 -21
- everest/detached/__init__.py +0 -6
- everest/detached/client.py +7 -52
- everest/detached/everserver.py +19 -45
- everest/everest_storage.py +24 -40
- everest/gui/everest_client.py +2 -3
- everest/gui/main_window.py +2 -2
- everest/optimizer/everest2ropt.py +68 -42
- everest/optimizer/opt_model_transforms.py +15 -20
- everest/optimizer/utils.py +0 -29
- everest/plugins/hook_specs.py +0 -24
- everest/strings.py +1 -6
- everest/util/__init__.py +3 -1
- ert/config/everest_objective_config.py +0 -95
- ert/config/ext_param_config.py +0 -107
- ert/gui/tools/export/__init__.py +0 -3
- ert/gui/tools/export/export_panel.py +0 -83
- ert/gui/tools/export/export_tool.py +0 -67
- ert/gui/tools/export/exporter.py +0 -36
- ert/plugins/hook_specifications/ecl_config.py +0 -29
- ert/services/storage_service.py +0 -127
- ert/summary_key_type.py +0 -234
- ert-16.0.9.dist-info/RECORD +0 -521
- everest/bin/everexport_script.py +0 -53
- everest/config/sampler_config.py +0 -103
- everest/simulator/__init__.py +0 -88
- everest/simulator/everest_to_ert.py +0 -252
- /ert/gui/{suggestor → ertwidgets/suggestor}/__init__.py +0 -0
- /ert/gui/{suggestor → ertwidgets/suggestor}/_colors.py +0 -0
- {ert-16.0.9.dist-info → ert-19.0.0rc2.dist-info}/entry_points.txt +0 -0
- {ert-16.0.9.dist-info → ert-19.0.0rc2.dist-info}/licenses/COPYING +0 -0
- {ert-16.0.9.dist-info → ert-19.0.0rc2.dist-info}/top_level.txt +0 -0
ert/config/_read_summary.py
CHANGED
|
@@ -7,20 +7,21 @@ for specification of the file format
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
9
|
import fnmatch
|
|
10
|
-
import os
|
|
11
|
-
import os.path
|
|
12
10
|
import re
|
|
11
|
+
import warnings
|
|
13
12
|
from collections.abc import Callable, Sequence
|
|
14
13
|
from datetime import datetime, timedelta
|
|
15
14
|
from enum import Enum, auto
|
|
16
|
-
from
|
|
15
|
+
from functools import lru_cache
|
|
17
16
|
|
|
18
17
|
import numpy as np
|
|
19
18
|
import numpy.typing as npt
|
|
20
|
-
import
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
19
|
+
from resfo_utilities import (
|
|
20
|
+
InvalidSummaryError,
|
|
21
|
+
InvalidSummaryKeyError,
|
|
22
|
+
SummaryReader,
|
|
23
|
+
make_summary_key,
|
|
24
|
+
)
|
|
24
25
|
|
|
25
26
|
from .response_config import InvalidResponseFile
|
|
26
27
|
|
|
@@ -48,119 +49,27 @@ def read_summary(
|
|
|
48
49
|
data/CASE.SMSPEC.
|
|
49
50
|
|
|
50
51
|
"""
|
|
51
|
-
|
|
52
|
+
if summary_basename.lower().endswith(".data"):
|
|
53
|
+
# For backwards compatability, it is
|
|
54
|
+
# allowed to give REFCASE and ECLBASE both
|
|
55
|
+
# with and without .DATA extensions
|
|
56
|
+
summary_basename = summary_basename[:-5]
|
|
57
|
+
summary = SummaryReader(case_path=summary_basename)
|
|
52
58
|
try:
|
|
53
59
|
date_index, start_date, date_units, keys, indices = _read_spec(
|
|
54
|
-
|
|
60
|
+
summary, select_keys
|
|
55
61
|
)
|
|
56
62
|
fetched, time_map = _read_summary(
|
|
57
63
|
summary, start_date, date_units, indices, date_index
|
|
58
64
|
)
|
|
59
|
-
except
|
|
65
|
+
except InvalidSummaryError as err:
|
|
60
66
|
raise InvalidResponseFile(
|
|
61
67
|
f"Failed to read summary files {summary_basename}: {err}"
|
|
62
68
|
) from err
|
|
63
69
|
return (start_date, keys, time_map, fetched)
|
|
64
70
|
|
|
65
71
|
|
|
66
|
-
|
|
67
|
-
keyword: str,
|
|
68
|
-
number: int | None = None,
|
|
69
|
-
name: str | None = None,
|
|
70
|
-
nx: int | None = None,
|
|
71
|
-
ny: int | None = None,
|
|
72
|
-
lgr_name: str | None = None,
|
|
73
|
-
li: int | None = None,
|
|
74
|
-
lj: int | None = None,
|
|
75
|
-
lk: int | None = None,
|
|
76
|
-
) -> str:
|
|
77
|
-
"""Converts values found in the smspec file to the summary_key format.
|
|
78
|
-
|
|
79
|
-
Ert and some other applications use a colon separated format to specify
|
|
80
|
-
summary parameters. For instance:
|
|
81
|
-
|
|
82
|
-
>>> make_summary_key(keyword="WOPR", name="WELL1")
|
|
83
|
-
'WOPR:WELL1'
|
|
84
|
-
>>> make_summary_key(keyword="BOPR", number=4, nx=2, ny=2)
|
|
85
|
-
'BOPR:2,2,1'
|
|
86
|
-
"""
|
|
87
|
-
try:
|
|
88
|
-
sum_type = SummaryKeyType.from_variable(keyword)
|
|
89
|
-
except Exception as err:
|
|
90
|
-
raise InvalidResponseFile(
|
|
91
|
-
f"Could not read summary keyword '{keyword}': {err}"
|
|
92
|
-
) from err
|
|
93
|
-
|
|
94
|
-
match sum_type:
|
|
95
|
-
case SummaryKeyType.FIELD | SummaryKeyType.OTHER:
|
|
96
|
-
return keyword
|
|
97
|
-
case SummaryKeyType.REGION | SummaryKeyType.AQUIFER:
|
|
98
|
-
return f"{keyword}:{number}"
|
|
99
|
-
case SummaryKeyType.BLOCK:
|
|
100
|
-
nx, ny = _check_if_missing("block", "dimens", nx, ny)
|
|
101
|
-
(number,) = _check_if_missing("block", "nums", number)
|
|
102
|
-
i, j, k = _cell_index(number - 1, nx, ny)
|
|
103
|
-
return f"{keyword}:{i},{j},{k}"
|
|
104
|
-
case SummaryKeyType.GROUP | SummaryKeyType.WELL:
|
|
105
|
-
return f"{keyword}:{name}"
|
|
106
|
-
case SummaryKeyType.SEGMENT:
|
|
107
|
-
return f"{keyword}:{name}:{number}"
|
|
108
|
-
case SummaryKeyType.COMPLETION:
|
|
109
|
-
nx, ny = _check_if_missing("completion", "dimens", nx, ny)
|
|
110
|
-
(number,) = _check_if_missing("completion", "nums", number)
|
|
111
|
-
i, j, k = _cell_index(number - 1, nx, ny)
|
|
112
|
-
return f"{keyword}:{name}:{i},{j},{k}"
|
|
113
|
-
case SummaryKeyType.INTER_REGION:
|
|
114
|
-
(number,) = _check_if_missing("inter region", "nums", number)
|
|
115
|
-
r1 = number % 32768
|
|
116
|
-
r2 = ((number - r1) // 32768) - 10
|
|
117
|
-
return f"{keyword}:{r1}-{r2}"
|
|
118
|
-
case SummaryKeyType.LOCAL_WELL:
|
|
119
|
-
(name,) = _check_if_missing("local well", "WGNAMES", name)
|
|
120
|
-
(lgr_name,) = _check_if_missing("local well", "LGRS", lgr_name)
|
|
121
|
-
return f"{keyword}:{lgr_name}:{name}"
|
|
122
|
-
case SummaryKeyType.LOCAL_BLOCK:
|
|
123
|
-
li, lj, lk = _check_if_missing("local block", "NUMLX", li, lj, lk)
|
|
124
|
-
(lgr_name,) = _check_if_missing("local block", "LGRS", lgr_name)
|
|
125
|
-
return f"{keyword}:{lgr_name}:{li},{lj},{lk}"
|
|
126
|
-
case SummaryKeyType.LOCAL_COMPLETION:
|
|
127
|
-
li, lj, lk = _check_if_missing("local completion", "NUMLX", li, lj, lk)
|
|
128
|
-
(name,) = _check_if_missing("local completion", "WGNAMES", name)
|
|
129
|
-
(lgr_name,) = _check_if_missing("local completion", "LGRS", lgr_name)
|
|
130
|
-
return f"{keyword}:{lgr_name}:{name}:{li},{lj},{lk}"
|
|
131
|
-
case SummaryKeyType.NETWORK:
|
|
132
|
-
(name,) = _check_if_missing("network", "WGNAMES", name)
|
|
133
|
-
return f"{keyword}:{name}"
|
|
134
|
-
case default:
|
|
135
|
-
assert_never(default)
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
__all__ = ["make_summary_key", "read_summary"]
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
def _cell_index(
|
|
142
|
-
array_index: int, nx: PositiveInt, ny: PositiveInt
|
|
143
|
-
) -> tuple[int, int, int]:
|
|
144
|
-
k = array_index // (nx * ny)
|
|
145
|
-
array_index -= k * (nx * ny)
|
|
146
|
-
j = array_index // nx
|
|
147
|
-
array_index -= j * nx
|
|
148
|
-
|
|
149
|
-
return array_index + 1, j + 1, k + 1
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
T = TypeVar("T")
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
def _check_if_missing(
|
|
156
|
-
keyword_name: str, missing_key: str, *test_vars: T | None
|
|
157
|
-
) -> list[T]:
|
|
158
|
-
if any(v is None for v in test_vars):
|
|
159
|
-
raise InvalidResponseFile(
|
|
160
|
-
f"Found {keyword_name} keyword in summary "
|
|
161
|
-
f"specification without {missing_key} keyword"
|
|
162
|
-
)
|
|
163
|
-
return test_vars # type: ignore
|
|
72
|
+
__all__ = ["read_summary"]
|
|
164
73
|
|
|
165
74
|
|
|
166
75
|
class DateUnit(Enum):
|
|
@@ -175,97 +84,30 @@ class DateUnit(Enum):
|
|
|
175
84
|
raise InvalidResponseFile(f"Unknown date unit {val}")
|
|
176
85
|
|
|
177
86
|
|
|
178
|
-
|
|
179
|
-
"""
|
|
180
|
-
>>> _is_base_with_extension("ECLBASE", "ECLBASE.SMSPEC", ["smspec"])
|
|
181
|
-
True
|
|
182
|
-
>>> _is_base_with_extension("ECLBASE", "BASE.SMSPEC", ["smspec"])
|
|
183
|
-
False
|
|
184
|
-
>>> _is_base_with_extension("ECLBASE", "BASE.FUNSMRY", ["smspec"])
|
|
185
|
-
False
|
|
186
|
-
>>> _is_base_with_extension("ECLBASE", "ECLBASE.smspec", ["smspec"])
|
|
187
|
-
True
|
|
188
|
-
>>> _is_base_with_extension("ECLBASE.tar.gz", "ECLBASE.tar.gz.smspec", ["smspec"])
|
|
189
|
-
True
|
|
190
|
-
"""
|
|
191
|
-
if "." not in path:
|
|
192
|
-
return False
|
|
193
|
-
splitted = path.split(".")
|
|
194
|
-
return ".".join(splitted[0:-1]) == base and splitted[-1].lower() in exts
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
def _is_unsmry(base: str, path: str) -> bool:
|
|
198
|
-
return _is_base_with_extension(base, path, ["unsmry", "funsmry"])
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
def _is_smspec(base: str, path: str) -> bool:
|
|
202
|
-
return _is_base_with_extension(base, path, ["smspec", "fsmspec"])
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
def _find_file_matching(
|
|
206
|
-
kind: str, case: str, predicate: Callable[[str, str], bool]
|
|
207
|
-
) -> str:
|
|
208
|
-
directory, base = os.path.split(case)
|
|
209
|
-
candidates = list(
|
|
210
|
-
filter(lambda x: predicate(base, x), os.listdir(directory or "."))
|
|
211
|
-
)
|
|
212
|
-
if not candidates:
|
|
213
|
-
raise FileNotFoundError(f"Could not find any {kind} matching case path {case}")
|
|
214
|
-
if len(candidates) > 1:
|
|
215
|
-
raise FileNotFoundError(
|
|
216
|
-
f"Ambiguous reference to {kind} in {case}, could be any of {candidates}"
|
|
217
|
-
)
|
|
218
|
-
return os.path.join(directory, candidates[0])
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
def _get_summary_filenames(filepath: str) -> tuple[str, str]:
|
|
222
|
-
if filepath.lower().endswith(".data"):
|
|
223
|
-
# For backwards compatability, it is
|
|
224
|
-
# allowed to give REFCASE and ECLBASE both
|
|
225
|
-
# with and without .DATA extensions
|
|
226
|
-
filepath = filepath[:-5]
|
|
227
|
-
summary = _find_file_matching("unified summary file", filepath, _is_unsmry)
|
|
228
|
-
spec = _find_file_matching("smspec file", filepath, _is_smspec)
|
|
229
|
-
return summary, spec
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
def _key2str(key: bytes | str) -> str:
|
|
233
|
-
ret = key.decode() if isinstance(key, bytes) else key
|
|
234
|
-
assert isinstance(ret, str)
|
|
235
|
-
return ret.strip()
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
def _check_vals(
|
|
239
|
-
kw: str, spec: str, vals: npt.NDArray[Any] | resfo.MESS
|
|
240
|
-
) -> npt.NDArray[Any]:
|
|
241
|
-
if vals is resfo.MESS or isinstance(vals, resfo.MESS):
|
|
242
|
-
raise InvalidResponseFile(f"{kw.strip()} in {spec} has incorrect type MESS")
|
|
243
|
-
return vals
|
|
244
|
-
|
|
245
|
-
|
|
87
|
+
@lru_cache
|
|
246
88
|
def _fetch_keys_to_matcher(fetch_keys: Sequence[str]) -> Callable[[str], bool]:
|
|
247
89
|
"""
|
|
248
90
|
Transform the list of keys (with * used as repeated wildcard) into
|
|
249
91
|
a matcher.
|
|
250
92
|
|
|
251
|
-
>>> match = _fetch_keys_to_matcher(
|
|
93
|
+
>>> match = _fetch_keys_to_matcher(("",))
|
|
252
94
|
>>> match("FOPR")
|
|
253
95
|
False
|
|
254
96
|
|
|
255
|
-
>>> match = _fetch_keys_to_matcher(
|
|
97
|
+
>>> match = _fetch_keys_to_matcher(("*",))
|
|
256
98
|
>>> match("FOPR"), match("FO*")
|
|
257
99
|
(True, True)
|
|
258
100
|
|
|
259
101
|
|
|
260
|
-
>>> match = _fetch_keys_to_matcher(
|
|
102
|
+
>>> match = _fetch_keys_to_matcher(("F*PR",))
|
|
261
103
|
>>> match("WOPR"), match("FOPR"), match("FGPR"), match("SOIL")
|
|
262
104
|
(False, True, True, False)
|
|
263
105
|
|
|
264
|
-
>>> match = _fetch_keys_to_matcher(
|
|
106
|
+
>>> match = _fetch_keys_to_matcher(("WGOR:*",))
|
|
265
107
|
>>> match("FOPR"), match("WGOR:OP1"), match("WGOR:OP2"), match("WGOR")
|
|
266
108
|
(False, True, True, False)
|
|
267
109
|
|
|
268
|
-
>>> match = _fetch_keys_to_matcher(
|
|
110
|
+
>>> match = _fetch_keys_to_matcher(("FOPR", "FGPR"))
|
|
269
111
|
>>> match("FOPR"), match("FGPR"), match("WGOR:OP2"), match("WGOR")
|
|
270
112
|
(True, True, False, False)
|
|
271
113
|
"""
|
|
@@ -276,119 +118,52 @@ def _fetch_keys_to_matcher(fetch_keys: Sequence[str]) -> Callable[[str], bool]:
|
|
|
276
118
|
|
|
277
119
|
|
|
278
120
|
def _read_spec(
|
|
279
|
-
|
|
121
|
+
summary: SummaryReader, fetch_keys: Sequence[str]
|
|
280
122
|
) -> tuple[int, datetime, DateUnit, list[str], npt.NDArray[np.int64]]:
|
|
281
|
-
date =
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
wgnames = None
|
|
286
|
-
|
|
287
|
-
arrays: dict[str, npt.NDArray[Any] | None] = dict.fromkeys(
|
|
288
|
-
[
|
|
289
|
-
"NUMS ",
|
|
290
|
-
"KEYWORDS",
|
|
291
|
-
"NUMLX ",
|
|
292
|
-
"NUMLY ",
|
|
293
|
-
"NUMLZ ",
|
|
294
|
-
"LGRS ",
|
|
295
|
-
"UNITS ",
|
|
296
|
-
],
|
|
297
|
-
None,
|
|
298
|
-
)
|
|
299
|
-
if spec.lower().endswith("fsmspec"):
|
|
300
|
-
mode = "rt"
|
|
301
|
-
assumed_format = resfo.Format.FORMATTED
|
|
123
|
+
date = summary.start_date
|
|
124
|
+
dims = summary.dimensions
|
|
125
|
+
if dims is not None:
|
|
126
|
+
nx, ny = dims[0:2]
|
|
302
127
|
else:
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
with open(spec, mode) as fp:
|
|
307
|
-
for entry in resfo.lazy_read(fp, assumed_format):
|
|
308
|
-
if all(p is not None for p in [date, n, nx, ny, *arrays.values()]):
|
|
309
|
-
break
|
|
310
|
-
kw = entry.read_keyword()
|
|
311
|
-
if kw in arrays:
|
|
312
|
-
arrays[kw] = _check_vals(kw, spec, entry.read_array())
|
|
313
|
-
if kw in {"WGNAMES ", "NAMES "}:
|
|
314
|
-
wgnames = _check_vals(kw, spec, entry.read_array())
|
|
315
|
-
if kw == "DIMENS ":
|
|
316
|
-
vals = _check_vals(kw, spec, entry.read_array())
|
|
317
|
-
size = len(vals)
|
|
318
|
-
n = vals[0] if size > 0 else None
|
|
319
|
-
nx = vals[1] if size > 1 else None
|
|
320
|
-
ny = vals[2] if size > 2 else None
|
|
321
|
-
if kw == "STARTDAT":
|
|
322
|
-
vals = _check_vals(kw, spec, entry.read_array())
|
|
323
|
-
size = len(vals)
|
|
324
|
-
day = vals[0] if size > 0 else 0
|
|
325
|
-
month = vals[1] if size > 1 else 0
|
|
326
|
-
year = vals[2] if size > 2 else 0
|
|
327
|
-
hour = vals[3] if size > 3 else 0
|
|
328
|
-
minute = vals[4] if size > 4 else 0
|
|
329
|
-
microsecond = vals[5] if size > 5 else 0
|
|
330
|
-
try:
|
|
331
|
-
date = datetime(
|
|
332
|
-
day=day,
|
|
333
|
-
month=month,
|
|
334
|
-
year=year,
|
|
335
|
-
hour=hour,
|
|
336
|
-
minute=minute,
|
|
337
|
-
second=microsecond // 10**6,
|
|
338
|
-
# Due to https://github.com/equinor/ert/issues/6952
|
|
339
|
-
# microseconds have to be ignored to avoid overflow
|
|
340
|
-
# in netcdf3 files
|
|
341
|
-
# microsecond=self.micro_seconds % 10**6,
|
|
342
|
-
)
|
|
343
|
-
except Exception as err:
|
|
344
|
-
raise InvalidResponseFile(
|
|
345
|
-
f"SMSPEC {spec} contains invalid STARTDAT: {err}"
|
|
346
|
-
) from err
|
|
347
|
-
keywords = arrays["KEYWORDS"]
|
|
348
|
-
nums = arrays["NUMS "]
|
|
349
|
-
numlx = arrays["NUMLX "]
|
|
350
|
-
numly = arrays["NUMLY "]
|
|
351
|
-
numlz = arrays["NUMLZ "]
|
|
352
|
-
lgr_names = arrays["LGRS "]
|
|
128
|
+
nx, ny = None, None
|
|
129
|
+
|
|
130
|
+
keywords = summary.summary_keywords
|
|
353
131
|
|
|
354
132
|
if date is None:
|
|
355
|
-
raise InvalidResponseFile(
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
if n is None:
|
|
359
|
-
n = len(keywords)
|
|
133
|
+
raise InvalidResponseFile(
|
|
134
|
+
f"Keyword STARTDAT missing in {summary.smspec_filename}"
|
|
135
|
+
)
|
|
360
136
|
|
|
361
137
|
indices: list[int] = []
|
|
362
138
|
keys: list[str] = []
|
|
363
139
|
index_mapping: dict[str, int] = {}
|
|
364
140
|
date_index = None
|
|
141
|
+
date_unit_str = None
|
|
142
|
+
|
|
143
|
+
should_load_key = _fetch_keys_to_matcher(tuple(fetch_keys))
|
|
144
|
+
|
|
145
|
+
for i, kw in enumerate(keywords):
|
|
146
|
+
try:
|
|
147
|
+
key = make_summary_key(
|
|
148
|
+
kw.summary_variable,
|
|
149
|
+
kw.number,
|
|
150
|
+
kw.name,
|
|
151
|
+
nx,
|
|
152
|
+
ny,
|
|
153
|
+
kw.lgr_name,
|
|
154
|
+
kw.li,
|
|
155
|
+
kw.lj,
|
|
156
|
+
kw.lk,
|
|
157
|
+
)
|
|
158
|
+
if kw.summary_variable == "TIME":
|
|
159
|
+
date_index = i
|
|
160
|
+
date_unit_str = kw.unit
|
|
161
|
+
except InvalidSummaryKeyError as err:
|
|
162
|
+
warnings.warn(
|
|
163
|
+
f"Found {err} in summary specification, key not loaded", stacklevel=2
|
|
164
|
+
)
|
|
165
|
+
continue
|
|
365
166
|
|
|
366
|
-
should_load_key = _fetch_keys_to_matcher(fetch_keys)
|
|
367
|
-
|
|
368
|
-
def optional_get(arr: npt.NDArray[Any] | None, idx: int) -> Any:
|
|
369
|
-
if arr is None:
|
|
370
|
-
return None
|
|
371
|
-
if len(arr) <= idx:
|
|
372
|
-
return None
|
|
373
|
-
return arr[idx]
|
|
374
|
-
|
|
375
|
-
for i in range(n):
|
|
376
|
-
keyword = _key2str(keywords[i])
|
|
377
|
-
if keyword == "TIME":
|
|
378
|
-
date_index = i
|
|
379
|
-
|
|
380
|
-
name = optional_get(wgnames, i)
|
|
381
|
-
if name is not None:
|
|
382
|
-
name = _key2str(name)
|
|
383
|
-
num = optional_get(nums, i)
|
|
384
|
-
lgr_name = optional_get(lgr_names, i)
|
|
385
|
-
if lgr_name is not None:
|
|
386
|
-
lgr_name = _key2str(lgr_name)
|
|
387
|
-
li = optional_get(numlx, i)
|
|
388
|
-
lj = optional_get(numly, i)
|
|
389
|
-
lk = optional_get(numlz, i)
|
|
390
|
-
|
|
391
|
-
key = make_summary_key(keyword, num, name, nx, ny, lgr_name, li, lj, lk)
|
|
392
167
|
if should_load_key(key):
|
|
393
168
|
if key in index_mapping:
|
|
394
169
|
# only keep the index of the last occurrence of a key
|
|
@@ -406,19 +181,19 @@ def _read_spec(
|
|
|
406
181
|
|
|
407
182
|
indices_array = np.array(indices, dtype=np.int64)[rearranged]
|
|
408
183
|
|
|
409
|
-
units = arrays["UNITS "]
|
|
410
|
-
if units is None:
|
|
411
|
-
raise InvalidResponseFile(f"Keyword units missing in {spec}")
|
|
412
184
|
if date_index is None:
|
|
413
|
-
raise InvalidResponseFile(
|
|
414
|
-
|
|
415
|
-
|
|
185
|
+
raise InvalidResponseFile(
|
|
186
|
+
f"KEYWORDS did not contain TIME in {summary.smspec_filename}"
|
|
187
|
+
)
|
|
188
|
+
if date_unit_str is None:
|
|
189
|
+
raise InvalidResponseFile(f"Unit missing for TIME in {summary.smspec_filename}")
|
|
416
190
|
|
|
417
|
-
unit_key = _key2str(units[date_index])
|
|
418
191
|
try:
|
|
419
|
-
date_unit = DateUnit[
|
|
192
|
+
date_unit = DateUnit[date_unit_str]
|
|
420
193
|
except KeyError:
|
|
421
|
-
raise InvalidResponseFile(
|
|
194
|
+
raise InvalidResponseFile(
|
|
195
|
+
f"Unknown date unit in {summary.smspec_filename}: {date_unit_str}"
|
|
196
|
+
) from None
|
|
422
197
|
|
|
423
198
|
return (
|
|
424
199
|
date_index,
|
|
@@ -443,49 +218,20 @@ def _round_to_seconds(dt: datetime) -> datetime:
|
|
|
443
218
|
|
|
444
219
|
|
|
445
220
|
def _read_summary(
|
|
446
|
-
summary:
|
|
221
|
+
summary: SummaryReader,
|
|
447
222
|
start_date: datetime,
|
|
448
223
|
unit: DateUnit,
|
|
449
224
|
indices: npt.NDArray[np.int64],
|
|
450
225
|
date_index: int,
|
|
451
226
|
) -> tuple[npt.NDArray[np.float32], list[datetime]]:
|
|
452
|
-
if summary.lower().endswith("funsmry"):
|
|
453
|
-
mode = "rt"
|
|
454
|
-
assumed_format = resfo.Format.FORMATTED
|
|
455
|
-
else:
|
|
456
|
-
mode = "rb"
|
|
457
|
-
assumed_format = resfo.Format.UNFORMATTED
|
|
458
|
-
|
|
459
|
-
last_params = None
|
|
460
227
|
values: list[npt.NDArray[np.float32]] = []
|
|
461
228
|
dates: list[datetime] = []
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
if last_params is not None:
|
|
466
|
-
vals = _check_vals("PARAMS", summary, last_params.read_array())
|
|
467
|
-
values.append(vals[indices])
|
|
468
|
-
|
|
229
|
+
try:
|
|
230
|
+
for v in summary.values(report_step_only=True):
|
|
231
|
+
values.append(v[indices])
|
|
469
232
|
dates.append(
|
|
470
|
-
_round_to_seconds(
|
|
471
|
-
start_date + unit.make_delta(float(vals[date_index]))
|
|
472
|
-
),
|
|
233
|
+
_round_to_seconds(start_date + unit.make_delta(float(v[date_index]))),
|
|
473
234
|
)
|
|
474
|
-
|
|
475
|
-
# times have to be rounded to whole seconds to avoid overflow
|
|
476
|
-
# in netcdf3 files
|
|
477
|
-
# dates.append(start_date + unit.make_delta(float(vals[date_index])))
|
|
478
|
-
last_params = None
|
|
479
|
-
|
|
480
|
-
try:
|
|
481
|
-
with open(summary, mode) as fp:
|
|
482
|
-
for entry in resfo.lazy_read(fp, assumed_format):
|
|
483
|
-
kw = entry.read_keyword()
|
|
484
|
-
if kw == "PARAMS ":
|
|
485
|
-
last_params = entry
|
|
486
|
-
if kw == "SEQHDR ":
|
|
487
|
-
read_params()
|
|
488
|
-
read_params()
|
|
235
|
+
return np.array(values, dtype=np.float32).T, dates
|
|
489
236
|
except ValueError as e:
|
|
490
237
|
raise InvalidResponseFile(f"Unable to read summary data from {summary}") from e
|
|
491
|
-
return np.array(values, dtype=np.float32).T, dates
|
ert/config/design_matrix.py
CHANGED
|
@@ -25,6 +25,7 @@ class DesignMatrix:
|
|
|
25
25
|
xls_filename: Path
|
|
26
26
|
design_sheet: str
|
|
27
27
|
default_sheet: str | None
|
|
28
|
+
priority_source: str = "design_matrix"
|
|
28
29
|
|
|
29
30
|
def __post_init__(self) -> None:
|
|
30
31
|
try:
|
|
@@ -33,6 +34,9 @@ class DesignMatrix:
|
|
|
33
34
|
self.design_matrix_df,
|
|
34
35
|
self.parameter_configurations,
|
|
35
36
|
) = self.read_and_validate_design_matrix()
|
|
37
|
+
self.parameter_priority = {
|
|
38
|
+
cfg.name: self.priority_source for cfg in self.parameter_configurations
|
|
39
|
+
}
|
|
36
40
|
except (ValueError, AttributeError) as exc:
|
|
37
41
|
raise ConfigValidationError.with_context(
|
|
38
42
|
f"Error reading design matrix {self.xls_filename}"
|
|
@@ -45,8 +49,21 @@ class DesignMatrix:
|
|
|
45
49
|
def from_config_list(cls, config_list: list[str | dict[str, str]]) -> DesignMatrix:
|
|
46
50
|
filename = Path(cast(str, config_list[0]))
|
|
47
51
|
options = cast(dict[str, str], config_list[1])
|
|
52
|
+
valid_options = ["DESIGN_SHEET", "DEFAULT_SHEET", "PRIORITY"]
|
|
53
|
+
option_errors = [
|
|
54
|
+
ErrorInfo(
|
|
55
|
+
f"Option {option} is not a valid DESIGN_MATRIX option. "
|
|
56
|
+
f"Valid options are {', '.join(valid_options)}."
|
|
57
|
+
).set_context(config_list)
|
|
58
|
+
for option in options
|
|
59
|
+
if option not in valid_options
|
|
60
|
+
]
|
|
61
|
+
|
|
62
|
+
if option_errors:
|
|
63
|
+
raise ConfigValidationError.from_collected(option_errors)
|
|
48
64
|
design_sheet = options.get("DESIGN_SHEET", "DesignSheet")
|
|
49
65
|
default_sheet = options.get("DEFAULT_SHEET", None)
|
|
66
|
+
priority_source = options.get("PRIORITY", DataSource.DESIGN_MATRIX)
|
|
50
67
|
errors = []
|
|
51
68
|
if filename.suffix not in {
|
|
52
69
|
".xlsx",
|
|
@@ -63,6 +80,13 @@ class DesignMatrix:
|
|
|
63
80
|
"DESIGN_SHEET and DEFAULT_SHEET can not point to the same sheet."
|
|
64
81
|
).set_context(config_list)
|
|
65
82
|
)
|
|
83
|
+
if priority_source not in {DataSource.DESIGN_MATRIX, DataSource.SAMPLED}:
|
|
84
|
+
errors.append(
|
|
85
|
+
ErrorInfo(
|
|
86
|
+
f"PRIORITY must be either '{DataSource.DESIGN_MATRIX}'"
|
|
87
|
+
f" or '{DataSource.SAMPLED}' priority is '{priority_source}'"
|
|
88
|
+
).set_context(config_list)
|
|
89
|
+
)
|
|
66
90
|
if errors:
|
|
67
91
|
raise ConfigValidationError.from_collected(errors)
|
|
68
92
|
assert design_sheet is not None
|
|
@@ -70,6 +94,7 @@ class DesignMatrix:
|
|
|
70
94
|
xls_filename=filename,
|
|
71
95
|
design_sheet=design_sheet,
|
|
72
96
|
default_sheet=default_sheet,
|
|
97
|
+
priority_source=priority_source,
|
|
73
98
|
)
|
|
74
99
|
|
|
75
100
|
def merge_with_other(self, dm_other: DesignMatrix) -> None:
|
|
@@ -87,24 +112,17 @@ class DesignMatrix:
|
|
|
87
112
|
common_keys = set(
|
|
88
113
|
self.design_matrix_df.select(pl.exclude("realization")).columns
|
|
89
114
|
) & set(dm_other.design_matrix_df.columns)
|
|
90
|
-
non_identical_cols = set()
|
|
91
115
|
if common_keys:
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
f"Design Matrices '{self.xls_filename.name} "
|
|
101
|
-
f"({self.design_sheet} {self.default_sheet or ''})' and "
|
|
102
|
-
f"'{dm_other.xls_filename.name} ({dm_other.design_sheet} "
|
|
103
|
-
f"{dm_other.default_sheet or ''})' "
|
|
104
|
-
"contains non identical columns with the same name: "
|
|
105
|
-
f"{non_identical_cols}!"
|
|
106
|
-
)
|
|
116
|
+
errors.append(
|
|
117
|
+
ErrorInfo(
|
|
118
|
+
f"Design Matrices '{self.xls_filename.name} "
|
|
119
|
+
f"({self.design_sheet} {self.default_sheet or ''})' and "
|
|
120
|
+
f"'{dm_other.xls_filename.name} ({dm_other.design_sheet} "
|
|
121
|
+
f"{dm_other.default_sheet or ''})' "
|
|
122
|
+
"contains columns with the same name: "
|
|
123
|
+
f"{common_keys}!"
|
|
107
124
|
)
|
|
125
|
+
)
|
|
108
126
|
|
|
109
127
|
if errors:
|
|
110
128
|
raise ConfigValidationError.from_collected(errors)
|
|
@@ -113,9 +131,7 @@ class DesignMatrix:
|
|
|
113
131
|
self.design_matrix_df = pl.concat(
|
|
114
132
|
[
|
|
115
133
|
self.design_matrix_df,
|
|
116
|
-
dm_other.design_matrix_df.select(
|
|
117
|
-
pl.exclude([*list(common_keys), "realization"])
|
|
118
|
-
),
|
|
134
|
+
dm_other.design_matrix_df.select(pl.exclude(["realization"])),
|
|
119
135
|
],
|
|
120
136
|
how="horizontal",
|
|
121
137
|
)
|
|
@@ -133,6 +149,7 @@ class DesignMatrix:
|
|
|
133
149
|
for cfg in dm_other.parameter_configurations
|
|
134
150
|
if cfg.name not in common_keys
|
|
135
151
|
)
|
|
152
|
+
self.parameter_priority.update(dm_other.parameter_priority)
|
|
136
153
|
|
|
137
154
|
def merge_with_existing_parameters(
|
|
138
155
|
self, existing_parameters: list[ParameterConfig]
|
|
@@ -154,11 +171,33 @@ class DesignMatrix:
|
|
|
154
171
|
|
|
155
172
|
for param_cfg in existing_parameters:
|
|
156
173
|
if isinstance(param_cfg, GenKwConfig) and param_cfg.name in design_cfgs:
|
|
157
|
-
param_cfg.input_source = DataSource.DESIGN_MATRIX
|
|
158
|
-
param_cfg.update = False
|
|
159
|
-
param_cfg.distribution = RawSettings()
|
|
160
174
|
del design_cfgs[param_cfg.name]
|
|
161
|
-
|
|
175
|
+
input_source = DataSource(
|
|
176
|
+
self.parameter_priority.get(
|
|
177
|
+
param_cfg.name, DataSource.DESIGN_MATRIX.value
|
|
178
|
+
)
|
|
179
|
+
)
|
|
180
|
+
new_param_configs += [
|
|
181
|
+
GenKwConfig(
|
|
182
|
+
name=param_cfg.name,
|
|
183
|
+
update=(
|
|
184
|
+
input_source == DataSource.SAMPLED and param_cfg.update
|
|
185
|
+
),
|
|
186
|
+
distribution=(
|
|
187
|
+
RawSettings()
|
|
188
|
+
if input_source == DataSource.DESIGN_MATRIX
|
|
189
|
+
else param_cfg.distribution
|
|
190
|
+
),
|
|
191
|
+
group=(
|
|
192
|
+
DESIGN_MATRIX_GROUP
|
|
193
|
+
if input_source == DataSource.DESIGN_MATRIX
|
|
194
|
+
else param_cfg.group
|
|
195
|
+
),
|
|
196
|
+
input_source=input_source,
|
|
197
|
+
),
|
|
198
|
+
]
|
|
199
|
+
else:
|
|
200
|
+
new_param_configs += [param_cfg]
|
|
162
201
|
if design_cfgs.values():
|
|
163
202
|
new_param_configs += list(design_cfgs.values())
|
|
164
203
|
return new_param_configs
|
ert/config/distribution.py
CHANGED
|
@@ -17,7 +17,7 @@ class TransSettingsValidation(BaseModel):
|
|
|
17
17
|
model_config = {"extra": "forbid"}
|
|
18
18
|
|
|
19
19
|
@classmethod
|
|
20
|
-
def create(cls
|
|
20
|
+
def create(cls, *args: Any, **kwargs: Any) -> Self:
|
|
21
21
|
return cls(*args, **kwargs)
|
|
22
22
|
|
|
23
23
|
@classmethod
|