webviz-subsurface 0.2.39__py3-none-any.whl → 0.2.41__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- webviz_subsurface/_figures/timeseries_figure.py +1 -1
- webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_lazy.py +3 -1
- webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_presampled.py +3 -1
- webviz_subsurface/_providers/ensemble_table_provider/ensemble_table_provider_impl_arrow.py +3 -1
- webviz_subsurface/_utils/dataframe_utils.py +1 -1
- webviz_subsurface/_version.py +34 -0
- webviz_subsurface/plugins/_bhp_qc/views/_view_functions.py +5 -5
- webviz_subsurface/plugins/_co2_migration/__init__.py +1 -0
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_plugin.py +86 -46
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/callbacks.py +53 -30
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/co2volume.py +283 -40
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/color_tables.py +1 -1
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/containment_data_provider.py +6 -4
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/containment_info.py +6 -0
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/ensemble_well_picks.py +1 -1
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/generic.py +59 -6
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/initialization.py +73 -10
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/polygon_handler.py +1 -1
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/summary_graphs.py +20 -18
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/surface_publishing.py +18 -20
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/unsmry_data_provider.py +8 -8
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/views/mainview/mainview.py +98 -44
- webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/views/mainview/settings.py +7 -5
- webviz_subsurface/plugins/_disk_usage.py +19 -8
- webviz_subsurface/plugins/_line_plotter_fmu/controllers/build_figure.py +4 -4
- webviz_subsurface/plugins/_map_viewer_fmu/layout.py +2 -1
- webviz_subsurface/plugins/_map_viewer_fmu/map_viewer_fmu.py +1 -1
- webviz_subsurface/plugins/_parameter_analysis/_utils/_parameters_model.py +5 -5
- webviz_subsurface/plugins/_property_statistics/property_statistics.py +1 -1
- webviz_subsurface/plugins/_relative_permeability.py +6 -6
- webviz_subsurface/plugins/_reservoir_simulation_timeseries_regional.py +12 -12
- webviz_subsurface/plugins/_running_time_analysis_fmu.py +6 -1
- webviz_subsurface/plugins/_seismic_misfit.py +2 -3
- webviz_subsurface/plugins/_simulation_time_series/_views/_subplot_view/_utils/vector_statistics.py +4 -4
- webviz_subsurface/plugins/_structural_uncertainty/views/intersection_and_map.py +1 -1
- webviz_subsurface/plugins/_swatinit_qc/_business_logic.py +1 -1
- webviz_subsurface-0.2.41.dist-info/METADATA +822 -0
- {webviz_subsurface-0.2.39.dist-info → webviz_subsurface-0.2.41.dist-info}/RECORD +51 -102
- {webviz_subsurface-0.2.39.dist-info → webviz_subsurface-0.2.41.dist-info}/WHEEL +1 -1
- {webviz_subsurface-0.2.39.dist-info → webviz_subsurface-0.2.41.dist-info}/entry_points.txt +1 -1
- {webviz_subsurface-0.2.39.dist-info → webviz_subsurface-0.2.41.dist-info}/top_level.txt +0 -1
- tests/integration_tests/test_parameter_filter.py +0 -28
- tests/integration_tests/test_surface_selector.py +0 -53
- tests/unit_tests/abbreviations_tests/test_reservoir_simulation.py +0 -94
- tests/unit_tests/data_input/__init__.py +0 -0
- tests/unit_tests/data_input/test_calc_from_cumulatives.py +0 -178
- tests/unit_tests/data_input/test_image_processing.py +0 -11
- tests/unit_tests/mocks/__init__.py +0 -0
- tests/unit_tests/mocks/ensemble_summary_provider_dummy.py +0 -67
- tests/unit_tests/model_tests/__init__.py +0 -0
- tests/unit_tests/model_tests/test_ensemble_model.py +0 -176
- tests/unit_tests/model_tests/test_ensemble_set_model.py +0 -105
- tests/unit_tests/model_tests/test_gruptree_model.py +0 -89
- tests/unit_tests/model_tests/test_property_statistics_model.py +0 -42
- tests/unit_tests/model_tests/test_surface_set_model.py +0 -48
- tests/unit_tests/model_tests/test_well_attributes_model.py +0 -110
- tests/unit_tests/model_tests/test_well_set_model.py +0 -70
- tests/unit_tests/plugin_tests/__init__.py +0 -0
- tests/unit_tests/plugin_tests/test_grouptree.py +0 -175
- tests/unit_tests/plugin_tests/test_simulation_time_series/__init__.py +0 -0
- tests/unit_tests/plugin_tests/test_simulation_time_series/mocks/__init__.py +0 -0
- tests/unit_tests/plugin_tests/test_simulation_time_series/mocks/derived_vectors_accessor_ensemble_summary_provider_mock.py +0 -60
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/__init__.py +0 -0
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_create_vector_traces_utils.py +0 -530
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_dataframe_utils.py +0 -119
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_datetime_utils.py +0 -51
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_delta_ensemble_utils.py +0 -222
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_derived_delta_ensemble_vectors_accessor_impl.py +0 -319
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_derived_ensemble_vectors_accessor_impl.py +0 -271
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_derived_ensemble_vectors_accessor_utils.py +0 -78
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_derived_vector_accessor.py +0 -57
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_ensemble_summary_provider_set_utils.py +0 -213
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_from_timeseries_cumulatives.py +0 -322
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_history_vectors.py +0 -201
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_trace_line_shape.py +0 -56
- tests/unit_tests/plugin_tests/test_simulation_time_series/test_utils/test_vector_statistics.py +0 -171
- tests/unit_tests/plugin_tests/test_tornado_data.py +0 -130
- tests/unit_tests/plugin_tests/test_well_completions.py +0 -158
- tests/unit_tests/provider_tests/__init__.py +0 -0
- tests/unit_tests/provider_tests/test_ensemble_summary_provider.py +0 -255
- tests/unit_tests/provider_tests/test_ensemble_summary_provider_impl_arrow_lazy.py +0 -388
- tests/unit_tests/provider_tests/test_ensemble_summary_provider_impl_arrow_presampled.py +0 -160
- tests/unit_tests/provider_tests/test_ensemble_summary_provider_resampling.py +0 -320
- tests/unit_tests/provider_tests/test_ensemble_table_provider.py +0 -190
- tests/unit_tests/utils_tests/__init__.py +0 -0
- tests/unit_tests/utils_tests/test_dataframe_utils.py +0 -281
- tests/unit_tests/utils_tests/test_ensemble_summary_provider_set/__init__.py +0 -0
- tests/unit_tests/utils_tests/test_ensemble_summary_provider_set/test_ensemble_summary_provider_set.py +0 -306
- tests/unit_tests/utils_tests/test_formatting.py +0 -10
- tests/unit_tests/utils_tests/test_simulation_timeseries.py +0 -51
- webviz_subsurface/plugins/_co2_leakage/__init__.py +0 -1
- webviz_subsurface/plugins/_co2_leakage/_utilities/__init__.py +0 -0
- webviz_subsurface/plugins/_co2_leakage/views/__init__.py +0 -0
- webviz_subsurface/plugins/_co2_leakage/views/mainview/__init__.py +0 -0
- webviz_subsurface-0.2.39.dist-info/METADATA +0 -147
- /webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_error.py +0 -0
- /webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_types.py +0 -0
- {tests/integration_tests → webviz_subsurface/plugins/_co2_migration/_utilities}/__init__.py +0 -0
- /webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/_misc.py +0 -0
- /webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/fault_polygons_handler.py +0 -0
- /webviz_subsurface/plugins/{_co2_leakage → _co2_migration}/_utilities/plume_extent.py +0 -0
- {tests/unit_tests → webviz_subsurface/plugins/_co2_migration/views}/__init__.py +0 -0
- {tests/unit_tests/abbreviations_tests → webviz_subsurface/plugins/_co2_migration/views/mainview}/__init__.py +0 -0
- {webviz_subsurface-0.2.39.dist-info → webviz_subsurface-0.2.41.dist-info/licenses}/LICENSE +0 -0
- {webviz_subsurface-0.2.39.dist-info → webviz_subsurface-0.2.41.dist-info/licenses}/LICENSE.chromedriver +0 -0
|
@@ -1,160 +0,0 @@
|
|
|
1
|
-
from datetime import date, datetime
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
import pandas as pd
|
|
5
|
-
import pytest
|
|
6
|
-
from _pytest.fixtures import SubRequest
|
|
7
|
-
|
|
8
|
-
from webviz_subsurface._providers.ensemble_summary_provider._provider_impl_arrow_presampled import (
|
|
9
|
-
ProviderImplArrowPresampled,
|
|
10
|
-
)
|
|
11
|
-
from webviz_subsurface._providers.ensemble_summary_provider.ensemble_summary_provider import (
|
|
12
|
-
EnsembleSummaryProvider,
|
|
13
|
-
)
|
|
14
|
-
|
|
15
|
-
# fmt: off
|
|
16
|
-
INPUT_DATA_DATETIME = [
|
|
17
|
-
["DATE", "REAL", "A", "C", "Z"],
|
|
18
|
-
[datetime(2021, 12, 20, 23, 59), 0, 10.0, 1.0, 0.0 ],
|
|
19
|
-
[datetime(2021, 12, 20, 23, 59), 1, 12.0, 1.0, 0.0 ],
|
|
20
|
-
[datetime(2021, 12, 21, 22, 58), 1, 13.0, 1.0, 0.0 ],
|
|
21
|
-
]
|
|
22
|
-
|
|
23
|
-
INPUT_DATA_AFTER_2262 = [
|
|
24
|
-
["DATE", "REAL", "A", "C", "Z"],
|
|
25
|
-
[datetime(2500, 12, 20, 23, 59), 0, 10.0, 1.0, 0.0 ],
|
|
26
|
-
[datetime(2500, 12, 20, 23, 59), 1, 12.0, 1.0, 0.0 ],
|
|
27
|
-
[datetime(2500, 12, 21, 22, 58), 1, 13.0, 1.0, 0.0 ],
|
|
28
|
-
]
|
|
29
|
-
|
|
30
|
-
INPUT_DATA_DATE = [
|
|
31
|
-
["DATE", "REAL", "A", "C", "Z"],
|
|
32
|
-
[date(2022, 12, 20), 0, 10.0, 1.0, 0.0 ],
|
|
33
|
-
[date(2022, 12, 20), 1, 12.0, 1.0, 0.0 ],
|
|
34
|
-
[date(2022, 12, 21), 1, 13.0, 1.0, 0.0 ],
|
|
35
|
-
]
|
|
36
|
-
|
|
37
|
-
INPUT_DATA_STR = [
|
|
38
|
-
["DATE", "REAL", "A", "C", "Z"],
|
|
39
|
-
["2023-12-20", 0, 10.0, 1.0, 0.0 ],
|
|
40
|
-
["2023-12-20", 1, 12.0, 1.0, 0.0 ],
|
|
41
|
-
["2023-12-21", 1, 13.0, 1.0, 0.0 ],
|
|
42
|
-
]
|
|
43
|
-
# fmt: on
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
@pytest.fixture(
|
|
47
|
-
name="provider",
|
|
48
|
-
params=[
|
|
49
|
-
INPUT_DATA_DATETIME,
|
|
50
|
-
INPUT_DATA_AFTER_2262,
|
|
51
|
-
INPUT_DATA_DATE,
|
|
52
|
-
INPUT_DATA_STR,
|
|
53
|
-
],
|
|
54
|
-
)
|
|
55
|
-
def fixture_provider(request: SubRequest, tmp_path: Path) -> EnsembleSummaryProvider:
|
|
56
|
-
input_py = request.param
|
|
57
|
-
storage_dir = tmp_path
|
|
58
|
-
|
|
59
|
-
input_df = pd.DataFrame(input_py[1:], columns=input_py[0])
|
|
60
|
-
|
|
61
|
-
ProviderImplArrowPresampled.write_backing_store_from_ensemble_dataframe(
|
|
62
|
-
storage_dir, "dummy_key", input_df
|
|
63
|
-
)
|
|
64
|
-
new_provider = ProviderImplArrowPresampled.from_backing_store(
|
|
65
|
-
storage_dir, "dummy_key"
|
|
66
|
-
)
|
|
67
|
-
|
|
68
|
-
if not new_provider:
|
|
69
|
-
raise ValueError("Failed to create EnsembleSummaryProvider")
|
|
70
|
-
|
|
71
|
-
return new_provider
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def test_get_vector_names(provider: EnsembleSummaryProvider) -> None:
|
|
75
|
-
all_vecnames = provider.vector_names()
|
|
76
|
-
assert len(all_vecnames) == 3
|
|
77
|
-
assert all_vecnames == ["A", "C", "Z"]
|
|
78
|
-
|
|
79
|
-
non_const_vec_names = provider.vector_names_filtered_by_value(
|
|
80
|
-
exclude_constant_values=True
|
|
81
|
-
)
|
|
82
|
-
assert len(non_const_vec_names) == 1
|
|
83
|
-
assert non_const_vec_names == ["A"]
|
|
84
|
-
|
|
85
|
-
non_zero_vec_names = provider.vector_names_filtered_by_value(
|
|
86
|
-
exclude_all_values_zero=True
|
|
87
|
-
)
|
|
88
|
-
assert len(non_zero_vec_names) == 2
|
|
89
|
-
assert non_zero_vec_names == ["A", "C"]
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
def test_get_realizations(provider: EnsembleSummaryProvider) -> None:
|
|
93
|
-
all_realizations = provider.realizations()
|
|
94
|
-
assert len(all_realizations) == 2
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def test_get_dates(provider: EnsembleSummaryProvider) -> None:
|
|
98
|
-
intersection_of_dates = provider.dates(resampling_frequency=None)
|
|
99
|
-
assert len(intersection_of_dates) == 1
|
|
100
|
-
assert isinstance(intersection_of_dates[0], datetime)
|
|
101
|
-
|
|
102
|
-
r0_dates = provider.dates(resampling_frequency=None, realizations=[0])
|
|
103
|
-
r1_dates = provider.dates(resampling_frequency=None, realizations=[1])
|
|
104
|
-
assert len(r0_dates) == 1
|
|
105
|
-
assert len(r1_dates) == 2
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def test_get_vectors(provider: EnsembleSummaryProvider) -> None:
|
|
109
|
-
all_vecnames = provider.vector_names()
|
|
110
|
-
assert len(all_vecnames) == 3
|
|
111
|
-
|
|
112
|
-
vecdf = provider.get_vectors_df(["A"], resampling_frequency=None)
|
|
113
|
-
assert vecdf.shape == (3, 3)
|
|
114
|
-
assert vecdf.columns.tolist() == ["DATE", "REAL", "A"]
|
|
115
|
-
|
|
116
|
-
sampleddate = vecdf["DATE"][0]
|
|
117
|
-
assert isinstance(sampleddate, datetime)
|
|
118
|
-
|
|
119
|
-
vecdf = provider.get_vectors_df(["A"], resampling_frequency=None, realizations=[1])
|
|
120
|
-
assert vecdf.shape == (2, 3)
|
|
121
|
-
assert vecdf.columns.tolist() == ["DATE", "REAL", "A"]
|
|
122
|
-
|
|
123
|
-
vecdf = provider.get_vectors_df(
|
|
124
|
-
["C", "A"], resampling_frequency=None, realizations=[0]
|
|
125
|
-
)
|
|
126
|
-
assert vecdf.shape == (1, 4)
|
|
127
|
-
assert vecdf.columns.tolist() == ["DATE", "REAL", "C", "A"]
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
def test_get_vectors_for_date(provider: EnsembleSummaryProvider) -> None:
|
|
131
|
-
intersection_of_dates = provider.dates(resampling_frequency=None)
|
|
132
|
-
assert len(intersection_of_dates) == 1
|
|
133
|
-
|
|
134
|
-
date_to_get = intersection_of_dates[0]
|
|
135
|
-
assert isinstance(date_to_get, datetime)
|
|
136
|
-
|
|
137
|
-
vecdf = provider.get_vectors_for_date_df(date_to_get, ["A"])
|
|
138
|
-
assert vecdf.shape == (2, 2)
|
|
139
|
-
assert vecdf.columns.tolist() == ["REAL", "A"]
|
|
140
|
-
|
|
141
|
-
date_to_get = intersection_of_dates[0]
|
|
142
|
-
vecdf = provider.get_vectors_for_date_df(date_to_get, ["A", "C"], [0])
|
|
143
|
-
assert vecdf.shape == (1, 3)
|
|
144
|
-
assert vecdf.columns.tolist() == ["REAL", "A", "C"]
|
|
145
|
-
|
|
146
|
-
date_to_get = intersection_of_dates[0]
|
|
147
|
-
vecdf = provider.get_vectors_for_date_df(date_to_get, ["A", "Z"], [0])
|
|
148
|
-
assert vecdf.shape == (1, 3)
|
|
149
|
-
assert vecdf.columns.tolist() == ["REAL", "A", "Z"]
|
|
150
|
-
|
|
151
|
-
real1_dates = provider.dates(resampling_frequency=None, realizations=[1])
|
|
152
|
-
assert len(real1_dates) == 2
|
|
153
|
-
date_to_get = real1_dates[0]
|
|
154
|
-
vecdf = provider.get_vectors_for_date_df(date_to_get, ["A", "Z"])
|
|
155
|
-
assert vecdf.shape == (2, 3)
|
|
156
|
-
assert vecdf.columns.tolist() == ["REAL", "A", "Z"]
|
|
157
|
-
date_to_get = real1_dates[1]
|
|
158
|
-
vecdf = provider.get_vectors_for_date_df(date_to_get, ["A", "Z"])
|
|
159
|
-
assert vecdf.shape == (1, 3)
|
|
160
|
-
assert vecdf.columns.tolist() == ["REAL", "A", "Z"]
|
|
@@ -1,320 +0,0 @@
|
|
|
1
|
-
import numpy as np
|
|
2
|
-
import pyarrow as pa
|
|
3
|
-
|
|
4
|
-
from webviz_subsurface._providers.ensemble_summary_provider._resampling import (
|
|
5
|
-
Frequency,
|
|
6
|
-
generate_normalized_sample_dates,
|
|
7
|
-
interpolate_backfill,
|
|
8
|
-
sample_segmented_multi_real_table_at_date,
|
|
9
|
-
)
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def _create_table_from_row_data(
|
|
13
|
-
per_row_input_data: list, schema: pa.Schema
|
|
14
|
-
) -> pa.Table:
|
|
15
|
-
# Turn rows into columns
|
|
16
|
-
columns_with_header = list(zip(*per_row_input_data))
|
|
17
|
-
|
|
18
|
-
input_dict = {}
|
|
19
|
-
for col in columns_with_header:
|
|
20
|
-
colname = col[0]
|
|
21
|
-
coldata = col[1:]
|
|
22
|
-
input_dict[colname] = coldata
|
|
23
|
-
|
|
24
|
-
table = pa.Table.from_pydict(input_dict, schema=schema)
|
|
25
|
-
|
|
26
|
-
return table
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
def test_generate_sample_dates_daily() -> None:
|
|
30
|
-
dates = generate_normalized_sample_dates(
|
|
31
|
-
np.datetime64("2020-12-30"), np.datetime64("2021-01-05"), Frequency.DAILY
|
|
32
|
-
)
|
|
33
|
-
assert len(dates) == 7
|
|
34
|
-
assert dates[0] == np.datetime64("2020-12-30")
|
|
35
|
-
assert dates[-1] == np.datetime64("2021-01-05")
|
|
36
|
-
|
|
37
|
-
dates = generate_normalized_sample_dates(
|
|
38
|
-
np.datetime64("2020-12-30T01:30"),
|
|
39
|
-
np.datetime64("2021-01-05T02:30"),
|
|
40
|
-
Frequency.DAILY,
|
|
41
|
-
)
|
|
42
|
-
assert len(dates) == 8
|
|
43
|
-
assert dates[0] == np.datetime64("2020-12-30")
|
|
44
|
-
assert dates[-1] == np.datetime64("2021-01-06")
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
def test_generate_sample_dates_weekly() -> None:
|
|
48
|
-
# Mondays
|
|
49
|
-
# 2020-12-21
|
|
50
|
-
# 2020-12-28
|
|
51
|
-
# 2021-01-04
|
|
52
|
-
# 2021-01-11
|
|
53
|
-
|
|
54
|
-
dates = generate_normalized_sample_dates(
|
|
55
|
-
np.datetime64("2020-12-28"), np.datetime64("2021-01-11"), Frequency.WEEKLY
|
|
56
|
-
)
|
|
57
|
-
assert len(dates) == 3
|
|
58
|
-
assert dates[0] == np.datetime64("2020-12-28")
|
|
59
|
-
assert dates[-1] == np.datetime64("2021-01-11")
|
|
60
|
-
|
|
61
|
-
dates = generate_normalized_sample_dates(
|
|
62
|
-
np.datetime64("2020-12-27T00:01"),
|
|
63
|
-
np.datetime64("2021-01-05T02:30"),
|
|
64
|
-
Frequency.WEEKLY,
|
|
65
|
-
)
|
|
66
|
-
assert len(dates) == 4
|
|
67
|
-
assert dates[0] == np.datetime64("2020-12-21")
|
|
68
|
-
assert dates[-1] == np.datetime64("2021-01-11")
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
def test_generate_sample_dates_monthly() -> None:
|
|
72
|
-
dates = generate_normalized_sample_dates(
|
|
73
|
-
np.datetime64("2020-12-01"), np.datetime64("2021-01-01"), Frequency.MONTHLY
|
|
74
|
-
)
|
|
75
|
-
assert len(dates) == 2
|
|
76
|
-
assert dates[0] == np.datetime64("2020-12-01")
|
|
77
|
-
assert dates[-1] == np.datetime64("2021-01-01")
|
|
78
|
-
|
|
79
|
-
dates = generate_normalized_sample_dates(
|
|
80
|
-
np.datetime64("2020-12-30"),
|
|
81
|
-
np.datetime64("2022-01-01T01:01"),
|
|
82
|
-
Frequency.MONTHLY,
|
|
83
|
-
)
|
|
84
|
-
assert len(dates) == 15
|
|
85
|
-
assert dates[0] == np.datetime64("2020-12-01")
|
|
86
|
-
assert dates[-1] == np.datetime64("2022-02-01")
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
def test_generate_sample_dates_yearly() -> None:
|
|
90
|
-
dates = generate_normalized_sample_dates(
|
|
91
|
-
np.datetime64("2020-01-01"), np.datetime64("2020-01-02"), Frequency.YEARLY
|
|
92
|
-
)
|
|
93
|
-
assert len(dates) == 2
|
|
94
|
-
assert dates[0] == np.datetime64("2020-01-01")
|
|
95
|
-
assert dates[-1] == np.datetime64("2021-01-01")
|
|
96
|
-
|
|
97
|
-
dates = generate_normalized_sample_dates(
|
|
98
|
-
np.datetime64("2020-01-01"), np.datetime64("2022-01-01"), Frequency.YEARLY
|
|
99
|
-
)
|
|
100
|
-
assert len(dates) == 3
|
|
101
|
-
assert dates[0] == np.datetime64("2020-01-01")
|
|
102
|
-
assert dates[-1] == np.datetime64("2022-01-01")
|
|
103
|
-
|
|
104
|
-
dates = generate_normalized_sample_dates(
|
|
105
|
-
np.datetime64("2020-12-30"),
|
|
106
|
-
np.datetime64("2022-01-01T01:01"),
|
|
107
|
-
Frequency.YEARLY,
|
|
108
|
-
)
|
|
109
|
-
assert len(dates) == 4
|
|
110
|
-
assert dates[0] == np.datetime64("2020-01-01")
|
|
111
|
-
assert dates[-1] == np.datetime64("2023-01-01")
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def test_interpolate_backfill() -> None:
|
|
115
|
-
raw_x = np.array([0, 2, 4, 6])
|
|
116
|
-
raw_y = np.array([0, 20, 40, 60])
|
|
117
|
-
|
|
118
|
-
x = np.array([0, 2, 4, 6])
|
|
119
|
-
y = interpolate_backfill(x, raw_x, raw_y, -99, 99)
|
|
120
|
-
assert (y == raw_y).all()
|
|
121
|
-
|
|
122
|
-
x = np.array([-1, 1, 5, 7])
|
|
123
|
-
expected_y = np.array([-99, 20, 60, 99])
|
|
124
|
-
y = interpolate_backfill(x, raw_x, raw_y, -99, 99)
|
|
125
|
-
assert (y == expected_y).all()
|
|
126
|
-
|
|
127
|
-
x = np.array([-2, -1, 0, 3, 3, 6, 7, 8])
|
|
128
|
-
expected_y = np.array([-99, -99, 0, 40, 40, 60, 99, 99])
|
|
129
|
-
y = interpolate_backfill(x, raw_x, raw_y, -99, 99)
|
|
130
|
-
assert (y == expected_y).all()
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
def test_sample_segmented_multi_real_table_at_date_with_single_real() -> None:
|
|
134
|
-
# pylint: disable=too-many-statements
|
|
135
|
-
# fmt:off
|
|
136
|
-
input_data = [
|
|
137
|
-
["DATE", "REAL", "T", "R"],
|
|
138
|
-
[np.datetime64("2020-01-01", "ms"), 1, 10.0, 1],
|
|
139
|
-
[np.datetime64("2020-01-04", "ms"), 1, 40.0, 4],
|
|
140
|
-
[np.datetime64("2020-01-06", "ms"), 1, 60.0, 6],
|
|
141
|
-
]
|
|
142
|
-
# fmt:on
|
|
143
|
-
|
|
144
|
-
schema = pa.schema(
|
|
145
|
-
[
|
|
146
|
-
pa.field("DATE", pa.timestamp("ms")),
|
|
147
|
-
pa.field("REAL", pa.int64()),
|
|
148
|
-
pa.field("T", pa.float32(), metadata={b"is_rate": b"False"}),
|
|
149
|
-
pa.field("R", pa.float32(), metadata={b"is_rate": b"True"}),
|
|
150
|
-
]
|
|
151
|
-
)
|
|
152
|
-
|
|
153
|
-
table = _create_table_from_row_data(per_row_input_data=input_data, schema=schema)
|
|
154
|
-
|
|
155
|
-
# Exact hit, first actual date
|
|
156
|
-
sampledate = np.datetime64("2020-01-01", "ms")
|
|
157
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
158
|
-
assert res.num_rows == 1
|
|
159
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
160
|
-
assert res["REAL"][0].as_py() == 1
|
|
161
|
-
assert res["T"][0].as_py() == 10
|
|
162
|
-
assert res["R"][0].as_py() == 1
|
|
163
|
-
|
|
164
|
-
# Exact hit, last actual date
|
|
165
|
-
sampledate = np.datetime64("2020-01-06", "ms")
|
|
166
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
167
|
-
assert res.num_rows == 1
|
|
168
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
169
|
-
assert res["REAL"][0].as_py() == 1
|
|
170
|
-
assert res["T"][0].as_py() == 60
|
|
171
|
-
assert res["R"][0].as_py() == 6
|
|
172
|
-
|
|
173
|
-
# Exact hit, middle date
|
|
174
|
-
sampledate = np.datetime64("2020-01-04", "ms")
|
|
175
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
176
|
-
assert res.num_rows == 1
|
|
177
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
178
|
-
assert res["REAL"][0].as_py() == 1
|
|
179
|
-
assert res["T"][0].as_py() == 40
|
|
180
|
-
assert res["R"][0].as_py() == 4
|
|
181
|
-
|
|
182
|
-
# Before first date
|
|
183
|
-
sampledate = np.datetime64("2019-01-01", "ms")
|
|
184
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
185
|
-
assert res.num_rows == 1
|
|
186
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
187
|
-
assert res["REAL"][0].as_py() == 1
|
|
188
|
-
assert res["T"][0].as_py() == 10
|
|
189
|
-
assert res["R"][0].as_py() == 0
|
|
190
|
-
|
|
191
|
-
# After last date
|
|
192
|
-
sampledate = np.datetime64("2020-01-10", "ms")
|
|
193
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
194
|
-
assert res.num_rows == 1
|
|
195
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
196
|
-
assert res["REAL"][0].as_py() == 1
|
|
197
|
-
assert res["T"][0].as_py() == 60
|
|
198
|
-
assert res["R"][0].as_py() == 0
|
|
199
|
-
|
|
200
|
-
# Interpolated
|
|
201
|
-
sampledate = np.datetime64("2020-01-02", "ms")
|
|
202
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
203
|
-
assert res.num_rows == 1
|
|
204
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
205
|
-
assert res["REAL"][0].as_py() == 1
|
|
206
|
-
assert res["T"][0].as_py() == 20
|
|
207
|
-
assert res["R"][0].as_py() == 4
|
|
208
|
-
|
|
209
|
-
# Interpolated
|
|
210
|
-
sampledate = np.datetime64("2020-01-03", "ms")
|
|
211
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
212
|
-
assert res.num_rows == 1
|
|
213
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
214
|
-
assert res["REAL"][0].as_py() == 1
|
|
215
|
-
assert res["T"][0].as_py() == 30
|
|
216
|
-
assert res["R"][0].as_py() == 4
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
def test_sample_segmented_multi_real_table_at_date() -> None:
|
|
220
|
-
# pylint: disable=too-many-statements
|
|
221
|
-
# fmt:off
|
|
222
|
-
input_data = [
|
|
223
|
-
["DATE", "REAL", "T", "R"],
|
|
224
|
-
[np.datetime64("2020-01-01", "ms"), 0, 10.0, 1],
|
|
225
|
-
[np.datetime64("2020-01-04", "ms"), 0, 40.0, 4],
|
|
226
|
-
[np.datetime64("2020-01-06", "ms"), 0, 60.0, 6],
|
|
227
|
-
[np.datetime64("2020-01-02", "ms"), 1, 2000.0, 200],
|
|
228
|
-
[np.datetime64("2020-01-05", "ms"), 1, 5000.0, 500],
|
|
229
|
-
[np.datetime64("2020-01-07", "ms"), 1, 7000.0, 700],
|
|
230
|
-
]
|
|
231
|
-
# fmt:on
|
|
232
|
-
|
|
233
|
-
schema = pa.schema(
|
|
234
|
-
[
|
|
235
|
-
pa.field("DATE", pa.timestamp("ms")),
|
|
236
|
-
pa.field("REAL", pa.int64()),
|
|
237
|
-
pa.field("T", pa.float32(), metadata={b"is_rate": b"False"}),
|
|
238
|
-
pa.field("R", pa.float32(), metadata={b"is_rate": b"True"}),
|
|
239
|
-
]
|
|
240
|
-
)
|
|
241
|
-
|
|
242
|
-
table = _create_table_from_row_data(per_row_input_data=input_data, schema=schema)
|
|
243
|
-
|
|
244
|
-
# Exact hit on first date in R=0
|
|
245
|
-
sampledate = np.datetime64("2020-01-01", "ms")
|
|
246
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
247
|
-
assert res.num_rows == 2
|
|
248
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
249
|
-
assert res["DATE"].to_numpy()[1] == sampledate
|
|
250
|
-
assert res["REAL"][0].as_py() == 0
|
|
251
|
-
assert res["REAL"][1].as_py() == 1
|
|
252
|
-
assert res["T"][0].as_py() == 10
|
|
253
|
-
assert res["T"][1].as_py() == 2000
|
|
254
|
-
assert res["R"][0].as_py() == 1
|
|
255
|
-
assert res["R"][1].as_py() == 0
|
|
256
|
-
|
|
257
|
-
# Exact hit on first date in R=1
|
|
258
|
-
sampledate = np.datetime64("2020-01-02", "ms")
|
|
259
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
260
|
-
assert res.num_rows == 2
|
|
261
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
262
|
-
assert res["DATE"].to_numpy()[1] == sampledate
|
|
263
|
-
assert res["REAL"][0].as_py() == 0
|
|
264
|
-
assert res["REAL"][1].as_py() == 1
|
|
265
|
-
assert res["T"][0].as_py() == 20
|
|
266
|
-
assert res["T"][1].as_py() == 2000
|
|
267
|
-
assert res["R"][0].as_py() == 4
|
|
268
|
-
assert res["R"][1].as_py() == 200
|
|
269
|
-
|
|
270
|
-
# Exact hit on last actual date in R=0
|
|
271
|
-
sampledate = np.datetime64("2020-01-06", "ms")
|
|
272
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
273
|
-
assert res.num_rows == 2
|
|
274
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
275
|
-
assert res["DATE"].to_numpy()[1] == sampledate
|
|
276
|
-
assert res["REAL"][0].as_py() == 0
|
|
277
|
-
assert res["REAL"][1].as_py() == 1
|
|
278
|
-
assert res["T"][0].as_py() == 60
|
|
279
|
-
assert res["T"][1].as_py() == 6000
|
|
280
|
-
assert res["R"][0].as_py() == 6
|
|
281
|
-
assert res["R"][1].as_py() == 700
|
|
282
|
-
|
|
283
|
-
# Exact hit on last actual date in R=1
|
|
284
|
-
sampledate = np.datetime64("2020-01-07", "ms")
|
|
285
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
286
|
-
assert res.num_rows == 2
|
|
287
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
288
|
-
assert res["DATE"].to_numpy()[1] == sampledate
|
|
289
|
-
assert res["REAL"][0].as_py() == 0
|
|
290
|
-
assert res["REAL"][1].as_py() == 1
|
|
291
|
-
assert res["T"][0].as_py() == 60
|
|
292
|
-
assert res["T"][1].as_py() == 7000
|
|
293
|
-
assert res["R"][0].as_py() == 0
|
|
294
|
-
assert res["R"][1].as_py() == 700
|
|
295
|
-
|
|
296
|
-
# Interpolated
|
|
297
|
-
sampledate = np.datetime64("2020-01-02", "ms")
|
|
298
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
299
|
-
assert res.num_rows == 2
|
|
300
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
301
|
-
assert res["DATE"].to_numpy()[1] == sampledate
|
|
302
|
-
assert res["REAL"][0].as_py() == 0
|
|
303
|
-
assert res["REAL"][1].as_py() == 1
|
|
304
|
-
assert res["T"][0].as_py() == 20
|
|
305
|
-
assert res["T"][1].as_py() == 2000
|
|
306
|
-
assert res["R"][0].as_py() == 4
|
|
307
|
-
assert res["R"][1].as_py() == 200
|
|
308
|
-
|
|
309
|
-
# Interpolated
|
|
310
|
-
sampledate = np.datetime64("2020-01-03", "ms")
|
|
311
|
-
res = sample_segmented_multi_real_table_at_date(table, sampledate)
|
|
312
|
-
assert res.num_rows == 2
|
|
313
|
-
assert res["DATE"].to_numpy()[0] == sampledate
|
|
314
|
-
assert res["DATE"].to_numpy()[1] == sampledate
|
|
315
|
-
assert res["REAL"][0].as_py() == 0
|
|
316
|
-
assert res["REAL"][1].as_py() == 1
|
|
317
|
-
assert res["T"][0].as_py() == 30
|
|
318
|
-
assert res["T"][1].as_py() == 3000
|
|
319
|
-
assert res["R"][0].as_py() == 4
|
|
320
|
-
assert res["R"][1].as_py() == 500
|
|
@@ -1,190 +0,0 @@
|
|
|
1
|
-
from pathlib import Path
|
|
2
|
-
from typing import Dict, Optional
|
|
3
|
-
|
|
4
|
-
import pandas as pd
|
|
5
|
-
|
|
6
|
-
from webviz_subsurface._providers import (
|
|
7
|
-
ColumnMetadata,
|
|
8
|
-
EnsembleTableProvider,
|
|
9
|
-
EnsembleTableProviderFactory,
|
|
10
|
-
)
|
|
11
|
-
from webviz_subsurface._providers.ensemble_table_provider import (
|
|
12
|
-
EnsembleTableProviderImplArrow,
|
|
13
|
-
)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def _create_synthetic_table_provider(
|
|
17
|
-
storage_dir: Path,
|
|
18
|
-
) -> EnsembleTableProvider:
|
|
19
|
-
# fmt: off
|
|
20
|
-
input_data = [
|
|
21
|
-
["REAL", "A", "B", "STR" ],
|
|
22
|
-
[ 0, 1.0, 11.0, "aa" ],
|
|
23
|
-
[ 0, 2.0, 12.0, "bb" ],
|
|
24
|
-
[ 0, 3.0, 13.0, "cc" ],
|
|
25
|
-
[ 1, 4.0, 14.0, "dd" ],
|
|
26
|
-
[ 1, 5.0, 15.0, "ee" ],
|
|
27
|
-
[ 1, 6.0, 16.0, "ff" ],
|
|
28
|
-
[ 1, 7.0, 17.0, "gg" ],
|
|
29
|
-
]
|
|
30
|
-
# fmt: on
|
|
31
|
-
|
|
32
|
-
input_df = pd.DataFrame(input_data[1:], columns=input_data[0])
|
|
33
|
-
|
|
34
|
-
provider: Optional[EnsembleTableProvider]
|
|
35
|
-
|
|
36
|
-
EnsembleTableProviderImplArrow.write_backing_store_from_ensemble_dataframe(
|
|
37
|
-
storage_dir, "dummy_key", input_df
|
|
38
|
-
)
|
|
39
|
-
provider = EnsembleTableProviderImplArrow.from_backing_store(
|
|
40
|
-
storage_dir, "dummy_key"
|
|
41
|
-
)
|
|
42
|
-
|
|
43
|
-
if not provider:
|
|
44
|
-
raise ValueError("Failed to create EnsembleTableProvider")
|
|
45
|
-
|
|
46
|
-
return provider
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def test_synthetic_get_column_data(testdata_folder: Path) -> None:
|
|
50
|
-
model = _create_synthetic_table_provider(testdata_folder)
|
|
51
|
-
assert model.column_names() == ["A", "B", "STR"]
|
|
52
|
-
assert model.realizations() == [0, 1]
|
|
53
|
-
|
|
54
|
-
df = model.get_column_data(["A"])
|
|
55
|
-
assert df.shape == (7, 2)
|
|
56
|
-
assert df.columns.tolist() == ["REAL", "A"]
|
|
57
|
-
|
|
58
|
-
df = model.get_column_data(["STR"], [1])
|
|
59
|
-
assert df.shape == (4, 2)
|
|
60
|
-
assert df.columns.tolist() == ["REAL", "STR"]
|
|
61
|
-
|
|
62
|
-
assert model.column_metadata("REAL") is None
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
def test_create_from_aggregated_csv_file_smry_csv(
|
|
66
|
-
testdata_folder: Path, tmp_path: Path
|
|
67
|
-
) -> None:
|
|
68
|
-
factory = EnsembleTableProviderFactory(tmp_path, allow_storage_writes=True)
|
|
69
|
-
provider = factory.create_from_ensemble_csv_file(
|
|
70
|
-
testdata_folder / "reek_test_data" / "aggregated_data" / "smry.csv"
|
|
71
|
-
)
|
|
72
|
-
|
|
73
|
-
assert len(provider.column_names()) == 17
|
|
74
|
-
assert provider.column_names()[0] == "DATE"
|
|
75
|
-
assert provider.column_names()[16] == "YEARS"
|
|
76
|
-
|
|
77
|
-
assert len(provider.realizations()) == 40
|
|
78
|
-
|
|
79
|
-
valdf = provider.get_column_data(["YEARS"])
|
|
80
|
-
assert len(valdf.columns) == 2
|
|
81
|
-
assert valdf.columns[0] == "REAL"
|
|
82
|
-
assert valdf.columns[1] == "YEARS"
|
|
83
|
-
assert valdf["REAL"].nunique() == 40
|
|
84
|
-
|
|
85
|
-
valdf = provider.get_column_data(["YEARS"], [0, 39, 10])
|
|
86
|
-
assert len(valdf.columns) == 2
|
|
87
|
-
assert valdf.columns[0] == "REAL"
|
|
88
|
-
assert valdf.columns[1] == "YEARS"
|
|
89
|
-
assert valdf["REAL"].nunique() == 3
|
|
90
|
-
|
|
91
|
-
# No metadata in csv files
|
|
92
|
-
meta: Optional[ColumnMetadata] = provider.column_metadata("FOPR")
|
|
93
|
-
assert meta is None
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
def test_create_from_per_realization_csv_file(
|
|
97
|
-
testdata_folder: Path, tmp_path: Path
|
|
98
|
-
) -> None:
|
|
99
|
-
factory = EnsembleTableProviderFactory(tmp_path, allow_storage_writes=True)
|
|
100
|
-
provider = factory.create_from_per_realization_csv_file(
|
|
101
|
-
str(testdata_folder / "01_drogon_ahm/realization-*/iter-0"),
|
|
102
|
-
"share/results/tables/rft.csv",
|
|
103
|
-
)
|
|
104
|
-
|
|
105
|
-
all_column_names = provider.column_names()
|
|
106
|
-
# print(all_column_names)
|
|
107
|
-
assert len(all_column_names) == 13
|
|
108
|
-
|
|
109
|
-
assert len(provider.realizations()) == 100
|
|
110
|
-
|
|
111
|
-
valdf = provider.get_column_data(["CONIDX"], [2])
|
|
112
|
-
assert valdf.shape == (218, 2)
|
|
113
|
-
assert valdf.columns[0] == "REAL"
|
|
114
|
-
assert valdf.columns[1] == "CONIDX"
|
|
115
|
-
assert valdf["REAL"].unique() == [2]
|
|
116
|
-
assert valdf["CONIDX"].nunique() == 24
|
|
117
|
-
assert sorted(valdf["CONIDX"].unique()) == list(range(1, 25))
|
|
118
|
-
|
|
119
|
-
# No metadata in csv files
|
|
120
|
-
meta: Optional[ColumnMetadata] = provider.column_metadata("CONIDX")
|
|
121
|
-
assert meta is None
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
def test_create_from_per_realization_arrow_file(
|
|
125
|
-
testdata_folder: Path, tmp_path: Path
|
|
126
|
-
) -> None:
|
|
127
|
-
factory = EnsembleTableProviderFactory(tmp_path, allow_storage_writes=True)
|
|
128
|
-
provider = factory.create_from_per_realization_arrow_file(
|
|
129
|
-
str(testdata_folder / "01_drogon_ahm/realization-*/iter-0"),
|
|
130
|
-
"share/results/unsmry/*arrow",
|
|
131
|
-
)
|
|
132
|
-
|
|
133
|
-
valdf = provider.get_column_data(provider.column_names())
|
|
134
|
-
assert valdf.shape[0] == 25284
|
|
135
|
-
assert "FOPT" in valdf.columns
|
|
136
|
-
assert valdf["REAL"].nunique() == 100
|
|
137
|
-
|
|
138
|
-
# Test metadata
|
|
139
|
-
meta: Optional[ColumnMetadata] = provider.column_metadata("FOPR")
|
|
140
|
-
assert meta is not None
|
|
141
|
-
assert meta.unit == "SM3/DAY"
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
def test_create_from_per_realization_parameter_file(
|
|
145
|
-
testdata_folder: Path, tmp_path: Path
|
|
146
|
-
) -> None:
|
|
147
|
-
factory = EnsembleTableProviderFactory(tmp_path, allow_storage_writes=True)
|
|
148
|
-
provider = factory.create_from_per_realization_parameter_file(
|
|
149
|
-
str(testdata_folder / "01_drogon_ahm/realization-*/iter-0")
|
|
150
|
-
)
|
|
151
|
-
|
|
152
|
-
valdf = provider.get_column_data(provider.column_names())
|
|
153
|
-
assert "GLOBVAR:FAULT_SEAL_SCALING" in valdf.columns
|
|
154
|
-
assert valdf["REAL"].nunique() == 100
|
|
155
|
-
|
|
156
|
-
# No metadata in parameter files
|
|
157
|
-
meta: Optional[ColumnMetadata] = provider.column_metadata(
|
|
158
|
-
"GLOBVAR:FAULT_SEAL_SCALING"
|
|
159
|
-
)
|
|
160
|
-
assert meta is None
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
def test_create_provider_set_from_aggregated_csv_file(tmp_path: Path) -> None:
|
|
164
|
-
"""This tests importing a csv file with an ensemble column with multiple
|
|
165
|
-
ensembles. It will return a dictionary of providers, one for each ensemble.
|
|
166
|
-
"""
|
|
167
|
-
factory = EnsembleTableProviderFactory(tmp_path, allow_storage_writes=True)
|
|
168
|
-
provider_set: Dict[
|
|
169
|
-
str, EnsembleTableProvider
|
|
170
|
-
] = factory.create_provider_set_from_aggregated_csv_file("tests/data/volumes.csv")
|
|
171
|
-
assert set(provider_set.keys()) == {"iter-0", "iter-1"}
|
|
172
|
-
|
|
173
|
-
for _, provider in provider_set.items():
|
|
174
|
-
valdf = provider.get_column_data(provider.column_names())
|
|
175
|
-
print(valdf)
|
|
176
|
-
print(provider.column_names())
|
|
177
|
-
assert set(valdf["REAL"].unique()) == {0, 1}
|
|
178
|
-
assert {
|
|
179
|
-
"ZONE",
|
|
180
|
-
"REGION",
|
|
181
|
-
"BULK_OIL",
|
|
182
|
-
"PORE_OIL",
|
|
183
|
-
"HCPV_OIL",
|
|
184
|
-
"STOIIP_OIL",
|
|
185
|
-
"SOURCE",
|
|
186
|
-
}.issubset(set(provider.column_names()))
|
|
187
|
-
|
|
188
|
-
# No metadata in csv files
|
|
189
|
-
meta: Optional[ColumnMetadata] = provider.column_metadata("ZONE")
|
|
190
|
-
assert meta is None
|
|
File without changes
|