foxes 1.3__py3-none-any.whl → 1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of foxes might be problematic. Click here for more details.
- docs/source/conf.py +3 -3
- examples/abl_states/run.py +2 -2
- examples/compare_rotors_pwakes/run.py +1 -1
- examples/compare_wakes/run.py +1 -2
- examples/dyn_wakes/run.py +29 -6
- examples/field_data_nc/run.py +1 -1
- examples/induction/run.py +3 -3
- examples/multi_height/run.py +1 -1
- examples/power_mask/run.py +2 -2
- examples/quickstart/run.py +0 -1
- examples/random_timeseries/run.py +3 -4
- examples/scan_row/run.py +3 -3
- examples/sequential/run.py +33 -10
- examples/single_state/run.py +3 -4
- examples/states_lookup_table/run.py +3 -3
- examples/streamline_wakes/run.py +29 -6
- examples/tab_file/run.py +3 -3
- examples/timelines/run.py +29 -5
- examples/timeseries/run.py +3 -3
- examples/timeseries_slurm/run.py +3 -3
- examples/wind_rose/run.py +3 -3
- examples/yawed_wake/run.py +19 -9
- foxes/__init__.py +21 -17
- foxes/algorithms/__init__.py +6 -6
- foxes/algorithms/downwind/__init__.py +2 -2
- foxes/algorithms/downwind/downwind.py +49 -17
- foxes/algorithms/downwind/models/__init__.py +6 -6
- foxes/algorithms/downwind/models/farm_wakes_calc.py +11 -9
- foxes/algorithms/downwind/models/init_farm_data.py +58 -29
- foxes/algorithms/downwind/models/point_wakes_calc.py +7 -13
- foxes/algorithms/downwind/models/set_amb_farm_results.py +1 -1
- foxes/algorithms/downwind/models/set_amb_point_results.py +6 -6
- foxes/algorithms/iterative/__init__.py +7 -3
- foxes/algorithms/iterative/iterative.py +1 -2
- foxes/algorithms/iterative/models/__init__.py +7 -3
- foxes/algorithms/iterative/models/farm_wakes_calc.py +9 -5
- foxes/algorithms/sequential/__init__.py +3 -3
- foxes/algorithms/sequential/models/__init__.py +2 -2
- foxes/algorithms/sequential/sequential.py +3 -4
- foxes/config/__init__.py +5 -1
- foxes/constants.py +16 -0
- foxes/core/__init__.py +45 -22
- foxes/core/algorithm.py +5 -6
- foxes/core/data.py +94 -22
- foxes/core/data_calc_model.py +4 -2
- foxes/core/engine.py +42 -53
- foxes/core/farm_controller.py +2 -2
- foxes/core/farm_data_model.py +16 -13
- foxes/core/ground_model.py +4 -13
- foxes/core/model.py +24 -6
- foxes/core/partial_wakes_model.py +147 -10
- foxes/core/point_data_model.py +21 -17
- foxes/core/rotor_model.py +4 -3
- foxes/core/states.py +2 -3
- foxes/core/turbine.py +2 -1
- foxes/core/wake_deflection.py +130 -0
- foxes/core/wake_model.py +222 -9
- foxes/core/wake_superposition.py +122 -4
- foxes/core/wind_farm.py +6 -6
- foxes/data/__init__.py +7 -2
- foxes/data/states/point_cloud_100.nc +0 -0
- foxes/data/states/weibull_cloud_4.nc +0 -0
- foxes/data/states/weibull_grid.nc +0 -0
- foxes/data/states/weibull_sectors_12.csv +13 -0
- foxes/data/states/weibull_sectors_12.nc +0 -0
- foxes/engines/__init__.py +14 -15
- foxes/engines/dask.py +42 -20
- foxes/engines/default.py +2 -2
- foxes/engines/numpy.py +11 -13
- foxes/engines/pool.py +20 -11
- foxes/engines/single.py +8 -6
- foxes/input/__init__.py +3 -3
- foxes/input/farm_layout/__init__.py +9 -8
- foxes/input/farm_layout/from_arrays.py +68 -0
- foxes/input/farm_layout/from_csv.py +1 -1
- foxes/input/farm_layout/ring.py +0 -1
- foxes/input/states/__init__.py +28 -12
- foxes/input/states/create/__init__.py +3 -2
- foxes/input/states/dataset_states.py +710 -0
- foxes/input/states/field_data.py +531 -0
- foxes/input/states/multi_height.py +11 -6
- foxes/input/states/one_point_flow.py +1 -4
- foxes/input/states/point_cloud_data.py +618 -0
- foxes/input/states/scan.py +2 -0
- foxes/input/states/single.py +3 -1
- foxes/input/states/states_table.py +23 -30
- foxes/input/states/weibull_sectors.py +330 -0
- foxes/input/states/wrg_states.py +8 -6
- foxes/input/yaml/__init__.py +9 -3
- foxes/input/yaml/dict.py +42 -41
- foxes/input/yaml/windio/__init__.py +10 -5
- foxes/input/yaml/windio/read_attributes.py +42 -29
- foxes/input/yaml/windio/read_farm.py +17 -15
- foxes/input/yaml/windio/read_fields.py +4 -2
- foxes/input/yaml/windio/read_outputs.py +25 -15
- foxes/input/yaml/windio/read_site.py +172 -11
- foxes/input/yaml/windio/windio.py +23 -11
- foxes/input/yaml/yaml.py +1 -0
- foxes/models/__init__.py +15 -14
- foxes/models/axial_induction/__init__.py +2 -2
- foxes/models/farm_controllers/__init__.py +1 -1
- foxes/models/farm_models/__init__.py +1 -1
- foxes/models/ground_models/__init__.py +3 -2
- foxes/models/ground_models/wake_mirror.py +3 -3
- foxes/models/model_book.py +190 -63
- foxes/models/partial_wakes/__init__.py +6 -6
- foxes/models/partial_wakes/axiwake.py +30 -5
- foxes/models/partial_wakes/centre.py +47 -0
- foxes/models/partial_wakes/rotor_points.py +41 -11
- foxes/models/partial_wakes/segregated.py +2 -25
- foxes/models/partial_wakes/top_hat.py +27 -2
- foxes/models/point_models/__init__.py +4 -4
- foxes/models/rotor_models/__init__.py +4 -3
- foxes/models/rotor_models/centre.py +1 -1
- foxes/models/rotor_models/direct_infusion.py +241 -0
- foxes/models/turbine_models/__init__.py +11 -11
- foxes/models/turbine_models/calculator.py +16 -3
- foxes/models/turbine_models/kTI_model.py +1 -0
- foxes/models/turbine_models/lookup_table.py +2 -0
- foxes/models/turbine_models/power_mask.py +1 -0
- foxes/models/turbine_models/rotor_centre_calc.py +2 -0
- foxes/models/turbine_models/sector_management.py +1 -0
- foxes/models/turbine_models/set_farm_vars.py +3 -9
- foxes/models/turbine_models/table_factors.py +2 -0
- foxes/models/turbine_models/thrust2ct.py +1 -0
- foxes/models/turbine_models/yaw2yawm.py +2 -0
- foxes/models/turbine_models/yawm2yaw.py +2 -0
- foxes/models/turbine_types/PCt_file.py +2 -6
- foxes/models/turbine_types/PCt_from_two.py +1 -2
- foxes/models/turbine_types/__init__.py +10 -9
- foxes/models/turbine_types/calculator_type.py +123 -0
- foxes/models/turbine_types/null_type.py +1 -0
- foxes/models/turbine_types/wsrho2PCt_from_two.py +2 -0
- foxes/models/turbine_types/wsti2PCt_from_two.py +3 -1
- foxes/models/vertical_profiles/__init__.py +7 -7
- foxes/models/wake_deflections/__init__.py +3 -0
- foxes/models/{wake_frames/yawed_wakes.py → wake_deflections/bastankhah2016.py} +32 -111
- foxes/models/wake_deflections/jimenez.py +277 -0
- foxes/models/wake_deflections/no_deflection.py +94 -0
- foxes/models/wake_frames/__init__.py +6 -7
- foxes/models/wake_frames/dynamic_wakes.py +12 -3
- foxes/models/wake_frames/rotor_wd.py +3 -1
- foxes/models/wake_frames/seq_dynamic_wakes.py +41 -7
- foxes/models/wake_frames/streamlines.py +8 -6
- foxes/models/wake_frames/timelines.py +9 -3
- foxes/models/wake_models/__init__.py +7 -7
- foxes/models/wake_models/dist_sliced.py +50 -84
- foxes/models/wake_models/gaussian.py +20 -0
- foxes/models/wake_models/induction/__init__.py +5 -5
- foxes/models/wake_models/induction/rankine_half_body.py +30 -71
- foxes/models/wake_models/induction/rathmann.py +65 -64
- foxes/models/wake_models/induction/self_similar.py +65 -68
- foxes/models/wake_models/induction/self_similar2020.py +0 -3
- foxes/models/wake_models/induction/vortex_sheet.py +71 -75
- foxes/models/wake_models/ti/__init__.py +2 -2
- foxes/models/wake_models/ti/crespo_hernandez.py +5 -3
- foxes/models/wake_models/ti/iec_ti.py +6 -4
- foxes/models/wake_models/top_hat.py +58 -7
- foxes/models/wake_models/wind/__init__.py +6 -4
- foxes/models/wake_models/wind/bastankhah14.py +25 -7
- foxes/models/wake_models/wind/bastankhah16.py +35 -3
- foxes/models/wake_models/wind/jensen.py +15 -2
- foxes/models/wake_models/wind/turbopark.py +28 -2
- foxes/models/wake_superpositions/__init__.py +18 -9
- foxes/models/wake_superpositions/ti_linear.py +4 -4
- foxes/models/wake_superpositions/ti_max.py +4 -4
- foxes/models/wake_superpositions/ti_pow.py +4 -4
- foxes/models/wake_superpositions/ti_quadratic.py +4 -4
- foxes/models/wake_superpositions/wind_vector.py +257 -0
- foxes/models/wake_superpositions/ws_linear.py +9 -10
- foxes/models/wake_superpositions/ws_max.py +8 -8
- foxes/models/wake_superpositions/ws_pow.py +8 -8
- foxes/models/wake_superpositions/ws_product.py +4 -4
- foxes/models/wake_superpositions/ws_quadratic.py +8 -8
- foxes/output/__init__.py +21 -19
- foxes/output/farm_layout.py +4 -2
- foxes/output/farm_results_eval.py +19 -16
- foxes/output/flow_plots_2d/__init__.py +2 -2
- foxes/output/flow_plots_2d/flow_plots.py +18 -0
- foxes/output/flow_plots_2d/get_fig.py +5 -2
- foxes/output/output.py +6 -1
- foxes/output/results_writer.py +1 -1
- foxes/output/rose_plot.py +13 -3
- foxes/output/rotor_point_plots.py +3 -0
- foxes/output/seq_plugins/__init__.py +2 -2
- foxes/output/seq_plugins/seq_flow_ani_plugin.py +0 -3
- foxes/output/seq_plugins/seq_wake_debug_plugin.py +0 -1
- foxes/output/state_turbine_map.py +3 -0
- foxes/output/turbine_type_curves.py +10 -8
- foxes/utils/__init__.py +37 -19
- foxes/utils/abl/__init__.py +4 -4
- foxes/utils/cubic_roots.py +1 -1
- foxes/utils/data_book.py +4 -3
- foxes/utils/dict.py +49 -37
- foxes/utils/exec_python.py +5 -5
- foxes/utils/factory.py +3 -5
- foxes/utils/geom2d/__init__.py +7 -5
- foxes/utils/geopandas_utils.py +2 -2
- foxes/utils/pandas_utils.py +4 -3
- foxes/utils/tab_files.py +0 -1
- foxes/utils/weibull.py +28 -0
- foxes/utils/wrg_utils.py +3 -1
- foxes/utils/xarray_utils.py +9 -2
- foxes/variables.py +67 -9
- {foxes-1.3.dist-info → foxes-1.5.dist-info}/METADATA +34 -63
- foxes-1.5.dist-info/RECORD +328 -0
- {foxes-1.3.dist-info → foxes-1.5.dist-info}/WHEEL +1 -1
- tests/1_verification/flappy_0_6/PCt_files/flappy/run.py +2 -3
- tests/1_verification/flappy_0_6/PCt_files/test_PCt_files.py +1 -1
- tests/1_verification/flappy_0_6/abl_states/flappy/run.py +0 -1
- tests/1_verification/flappy_0_6/partial_top_hat/flappy/run.py +0 -1
- tests/1_verification/flappy_0_6/partial_top_hat/test_partial_top_hat.py +0 -2
- tests/1_verification/flappy_0_6/row_Jensen_linear_centre/test_row_Jensen_linear_centre.py +0 -1
- tests/1_verification/flappy_0_6/row_Jensen_linear_tophat/test_row_Jensen_linear_tophat.py +0 -1
- tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2005/test_row_Jensen_linear_tophat_IECTI_2005.py +0 -1
- tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2019/test_row_Jensen_linear_tophat_IECTI_2019.py +0 -1
- tests/1_verification/flappy_0_6/row_Jensen_quadratic_centre/test_row_Jensen_quadratic_centre.py +0 -1
- tests/1_verification/flappy_0_6_2/grid_rotors/flappy/run.py +0 -2
- tests/1_verification/flappy_0_6_2/row_Bastankhah_Crespo/test_row_Bastankhah_Crespo.py +0 -1
- tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/flappy/run.py +0 -1
- tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/test_row_Bastankhah_linear_centre.py +0 -1
- foxes/input/states/field_data_nc.py +0 -847
- foxes/output/round.py +0 -10
- foxes/utils/pandas_helpers.py +0 -178
- foxes-1.3.dist-info/RECORD +0 -313
- {foxes-1.3.dist-info → foxes-1.5.dist-info}/entry_points.txt +0 -0
- {foxes-1.3.dist-info → foxes-1.5.dist-info/licenses}/LICENSE +0 -0
- {foxes-1.3.dist-info → foxes-1.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,710 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import xarray as xr
|
|
4
|
+
from copy import copy, deepcopy
|
|
5
|
+
|
|
6
|
+
from foxes.core import States, get_engine
|
|
7
|
+
from foxes.utils import import_module
|
|
8
|
+
from foxes.data import STATES, StaticData
|
|
9
|
+
import foxes.variables as FV
|
|
10
|
+
import foxes.constants as FC
|
|
11
|
+
from foxes.config import config, get_input_path
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _read_nc_file(
|
|
15
|
+
fpath,
|
|
16
|
+
coords,
|
|
17
|
+
vars,
|
|
18
|
+
nc_engine,
|
|
19
|
+
sel,
|
|
20
|
+
isel,
|
|
21
|
+
minimal,
|
|
22
|
+
):
|
|
23
|
+
"""Helper function for nc file reading"""
|
|
24
|
+
data = xr.open_dataset(fpath, engine=nc_engine)
|
|
25
|
+
for c in coords:
|
|
26
|
+
if c is not None and c not in data.sizes:
|
|
27
|
+
raise KeyError(
|
|
28
|
+
f"Missing coordinate '{c}' in file {fpath}, got: {list(data.sizes.keys())}"
|
|
29
|
+
)
|
|
30
|
+
if minimal:
|
|
31
|
+
return data[coords[0]].to_numpy()
|
|
32
|
+
else:
|
|
33
|
+
data = data[vars]
|
|
34
|
+
data.attrs = {}
|
|
35
|
+
if isel is not None and len(isel):
|
|
36
|
+
data = data.isel(**isel)
|
|
37
|
+
if sel is not None and len(sel):
|
|
38
|
+
data = data.sel(**sel)
|
|
39
|
+
assert min(data.sizes.values()) > 0, (
|
|
40
|
+
f"States: No data in file {fpath}, isel={isel}, sel={sel}, resulting sizes={data.sizes}"
|
|
41
|
+
)
|
|
42
|
+
return data
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class DatasetStates(States):
|
|
46
|
+
"""
|
|
47
|
+
Abstract base class for heterogeneous ambient states that
|
|
48
|
+
are based on data from NetCDF files or an xarray Dataset.
|
|
49
|
+
|
|
50
|
+
Attributes
|
|
51
|
+
----------
|
|
52
|
+
data_source: str or xarray.Dataset
|
|
53
|
+
The data or the file search pattern, should end with
|
|
54
|
+
suffix '.nc'. One or many files.
|
|
55
|
+
ovars: list of str
|
|
56
|
+
The output variables
|
|
57
|
+
var2ncvar: dict
|
|
58
|
+
Mapping from variable names to variable names
|
|
59
|
+
in the nc file
|
|
60
|
+
fixed_vars: dict
|
|
61
|
+
Uniform values for output variables, instead
|
|
62
|
+
of reading from data
|
|
63
|
+
load_mode: str
|
|
64
|
+
The load mode, choices: preload, lazy, fly.
|
|
65
|
+
preload loads all data during initialization,
|
|
66
|
+
lazy lazy-loads the data using dask, and fly
|
|
67
|
+
reads only states index and weights during initialization
|
|
68
|
+
and then opens the relevant files again within
|
|
69
|
+
the chunk calculation
|
|
70
|
+
time_format: str
|
|
71
|
+
The datetime parsing format string
|
|
72
|
+
sel: dict, optional
|
|
73
|
+
Subset selection via xr.Dataset.sel()
|
|
74
|
+
isel: dict, optional
|
|
75
|
+
Subset selection via xr.Dataset.isel()
|
|
76
|
+
weight_factor: float
|
|
77
|
+
The factor to multiply the weights with
|
|
78
|
+
|
|
79
|
+
:group: input.states
|
|
80
|
+
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
def __init__(
|
|
84
|
+
self,
|
|
85
|
+
data_source,
|
|
86
|
+
output_vars,
|
|
87
|
+
var2ncvar={},
|
|
88
|
+
fixed_vars={},
|
|
89
|
+
load_mode="preload",
|
|
90
|
+
time_format="%Y-%m-%d_%H:%M:%S",
|
|
91
|
+
sel=None,
|
|
92
|
+
isel=None,
|
|
93
|
+
weight_factor=None,
|
|
94
|
+
**kwargs,
|
|
95
|
+
):
|
|
96
|
+
"""
|
|
97
|
+
Constructor.
|
|
98
|
+
|
|
99
|
+
Parameters
|
|
100
|
+
----------
|
|
101
|
+
data_source: str or xarray.Dataset
|
|
102
|
+
The data or the file search pattern, should end with
|
|
103
|
+
suffix '.nc'. One or many files.
|
|
104
|
+
output_vars: list of str
|
|
105
|
+
The output variables
|
|
106
|
+
var2ncvar: dict, optional
|
|
107
|
+
Mapping from variable names to variable names
|
|
108
|
+
in the nc file
|
|
109
|
+
fixed_vars: dict, optional
|
|
110
|
+
Uniform values for output variables, instead
|
|
111
|
+
of reading from data
|
|
112
|
+
load_mode: str
|
|
113
|
+
The load mode, choices: preload, lazy, fly.
|
|
114
|
+
preload loads all data during initialization,
|
|
115
|
+
lazy lazy-loads the data using dask, and fly
|
|
116
|
+
reads only states index and weights during initialization
|
|
117
|
+
and then opens the relevant files again within
|
|
118
|
+
the chunk calculation
|
|
119
|
+
time_format: str
|
|
120
|
+
The datetime parsing format string
|
|
121
|
+
sel: dict, optional
|
|
122
|
+
Subset selection via xr.Dataset.sel()
|
|
123
|
+
isel: dict, optional
|
|
124
|
+
Subset selection via xr.Dataset.isel()
|
|
125
|
+
weight_factor: float, optional
|
|
126
|
+
The factor to multiply the weights with
|
|
127
|
+
kwargs: dict, optional
|
|
128
|
+
Additional arguments for the base class
|
|
129
|
+
|
|
130
|
+
"""
|
|
131
|
+
super().__init__(**kwargs)
|
|
132
|
+
|
|
133
|
+
self.ovars = list(output_vars)
|
|
134
|
+
self.fixed_vars = fixed_vars
|
|
135
|
+
self.load_mode = load_mode
|
|
136
|
+
self.var2ncvar = var2ncvar
|
|
137
|
+
self.time_format = time_format
|
|
138
|
+
self.sel = sel
|
|
139
|
+
self.isel = isel
|
|
140
|
+
self.weight_factor = weight_factor
|
|
141
|
+
|
|
142
|
+
self._N = None
|
|
143
|
+
self._inds = None
|
|
144
|
+
self.__data_source = data_source
|
|
145
|
+
|
|
146
|
+
@property
|
|
147
|
+
def data_source(self):
|
|
148
|
+
"""
|
|
149
|
+
The data source
|
|
150
|
+
|
|
151
|
+
Returns
|
|
152
|
+
-------
|
|
153
|
+
s: object
|
|
154
|
+
The data source
|
|
155
|
+
|
|
156
|
+
"""
|
|
157
|
+
if self.load_mode in ["preload", "fly"] and self.running:
|
|
158
|
+
raise ValueError(
|
|
159
|
+
f"States '{self.name}': Cannot access data_source while running for load mode '{self.load_mode}'"
|
|
160
|
+
)
|
|
161
|
+
return self.__data_source
|
|
162
|
+
|
|
163
|
+
def _read_ds(self, ds, cmap, variables, verbosity=0):
|
|
164
|
+
"""
|
|
165
|
+
Helper function for _get_data, extracts data from the original Dataset.
|
|
166
|
+
|
|
167
|
+
Parameters
|
|
168
|
+
----------
|
|
169
|
+
ds: xarray.Dataset
|
|
170
|
+
The Dataset to read data from
|
|
171
|
+
cmap: dict
|
|
172
|
+
A mapping from foxes variable names to Dataset dimension names
|
|
173
|
+
variables: list of str
|
|
174
|
+
The variables to extract from the Dataset
|
|
175
|
+
verbosity: int
|
|
176
|
+
The verbosity level, 0 = silent
|
|
177
|
+
|
|
178
|
+
Returns
|
|
179
|
+
-------
|
|
180
|
+
coords: dict
|
|
181
|
+
keys: Foxes variable names, values: 1D coordinate value arrays
|
|
182
|
+
data: dict
|
|
183
|
+
The extracted data, keys are variable names,
|
|
184
|
+
values are tuples (dims, data_array)
|
|
185
|
+
where dims is a tuple of dimension names and
|
|
186
|
+
data_array is a numpy.ndarray with the data values
|
|
187
|
+
|
|
188
|
+
"""
|
|
189
|
+
data = {}
|
|
190
|
+
for v in variables:
|
|
191
|
+
w = self.var2ncvar.get(v, v)
|
|
192
|
+
if w in ds.data_vars:
|
|
193
|
+
d = ds[w]
|
|
194
|
+
i = [d.dims.index(c) for c in cmap.values() if c in d.dims]
|
|
195
|
+
assert len(i) == len(d.dims), (
|
|
196
|
+
f"States '{self.name}': Variable '{w}' has dimensions {d.dims}, but not all of them are in the coordinate mapping {cmap}"
|
|
197
|
+
)
|
|
198
|
+
dms = tuple([v for v, c in cmap.items() if c in d.dims])
|
|
199
|
+
j = list(range(len(i)))
|
|
200
|
+
if i == j:
|
|
201
|
+
data[v] = (dms, d.to_numpy())
|
|
202
|
+
elif len(i) == 2:
|
|
203
|
+
data[v] = (dms, np.swapaxes(d.to_numpy(), 0, 1))
|
|
204
|
+
else:
|
|
205
|
+
data[v] = (dms, np.moveaxis(d.to_numpy(), i, j))
|
|
206
|
+
else:
|
|
207
|
+
raise KeyError(
|
|
208
|
+
f"States '{self.name}': Variable '{w}' not found in data source '{self.data_source}', available variables: {list(ds.data_vars)}"
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
coords = {v: ds[c].to_numpy() for v, c in cmap.items() if c in ds.coords}
|
|
212
|
+
|
|
213
|
+
if verbosity > 1:
|
|
214
|
+
if len(coords):
|
|
215
|
+
print(f"\n{self.name}: Coordinate ranges")
|
|
216
|
+
for c, d in coords.items():
|
|
217
|
+
print(f" {c}: {np.min(d)} --> {np.max(d)}")
|
|
218
|
+
print(f"\n{self.name}: Data ranges")
|
|
219
|
+
for v, d in data.items():
|
|
220
|
+
nn = np.sum(np.isnan(d))
|
|
221
|
+
print(
|
|
222
|
+
f" {v}: {np.nanmin(d)} --> {np.nanmax(d)}, nans: {nn} ({100 * nn / len(d.flat):.2f}%)"
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
return coords, data
|
|
226
|
+
|
|
227
|
+
def _get_data(self, ds, cmap, variables, verbosity=0):
|
|
228
|
+
"""
|
|
229
|
+
Gets the data from the Dataset and prepares it for calculations.
|
|
230
|
+
|
|
231
|
+
Parameters
|
|
232
|
+
----------
|
|
233
|
+
ds: xarray.Dataset
|
|
234
|
+
The Dataset to read data from
|
|
235
|
+
cmap: dict
|
|
236
|
+
A mapping from foxes variable names to Dataset dimension names
|
|
237
|
+
variables: list of str
|
|
238
|
+
The variables to extract from the Dataset
|
|
239
|
+
verbosity: int
|
|
240
|
+
The verbosity level, 0 = silent
|
|
241
|
+
|
|
242
|
+
Returns
|
|
243
|
+
-------
|
|
244
|
+
coords: dict
|
|
245
|
+
keys: Foxes variable names, values: 1D coordinate value arrays
|
|
246
|
+
data: dict
|
|
247
|
+
The extracted data, keys are dimension tuples,
|
|
248
|
+
values are tuples (DATA key, variables, data_array)
|
|
249
|
+
where DATA key is the name in the mdata object,
|
|
250
|
+
variables is a list of variable names, and
|
|
251
|
+
data_array is a numpy.ndarray with the data values,
|
|
252
|
+
the last dimension corresponds to the variables
|
|
253
|
+
weights: numpy.ndarray or None
|
|
254
|
+
The weights array, if only state dependent, otherwise
|
|
255
|
+
weights are among data. Shape: (n_states,)
|
|
256
|
+
|
|
257
|
+
"""
|
|
258
|
+
coords, data0 = self._read_ds(ds, cmap, variables, verbosity=verbosity)
|
|
259
|
+
|
|
260
|
+
weights = None
|
|
261
|
+
if FV.WEIGHT in variables:
|
|
262
|
+
assert FV.WEIGHT in data0, (
|
|
263
|
+
f"States '{self.name}': Missing weights variable '{FV.WEIGHT}' in data, found {sorted(list(data0.keys()))}"
|
|
264
|
+
)
|
|
265
|
+
if self.weight_factor is not None:
|
|
266
|
+
data0[FV.WEIGHT][1] *= self.weight_factor
|
|
267
|
+
if data0[FV.WEIGHT][0] == (FC.STATE,):
|
|
268
|
+
weights = data0.pop(FV.WEIGHT)[1]
|
|
269
|
+
|
|
270
|
+
data = {} # dim: [DATA key, variables, data array]
|
|
271
|
+
for v, (dims, d) in data0.items():
|
|
272
|
+
if dims not in data:
|
|
273
|
+
i = len(data)
|
|
274
|
+
data[dims] = [self.var(f"data{i}"), [], []]
|
|
275
|
+
data[dims][1].append(v)
|
|
276
|
+
data[dims][2].append(d)
|
|
277
|
+
for dims in data.keys():
|
|
278
|
+
data[dims][2] = np.stack(data[dims][2], axis=-1)
|
|
279
|
+
data = {
|
|
280
|
+
tuple(list(dims) + [f"vars{i}"]): d
|
|
281
|
+
for i, (dims, d) in enumerate(data.items())
|
|
282
|
+
}
|
|
283
|
+
return coords, data, weights
|
|
284
|
+
|
|
285
|
+
def _preload(self, algo, cmap, bounds_extra_space, verbosity=0):
|
|
286
|
+
"""Helper function for preloading data."""
|
|
287
|
+
|
|
288
|
+
assert FC.STATE in cmap, (
|
|
289
|
+
f"States '{self.name}': States coordinate '{FC.STATE}' not in cmap {cmap}"
|
|
290
|
+
)
|
|
291
|
+
states_coord = cmap[FC.STATE]
|
|
292
|
+
|
|
293
|
+
if not isinstance(self.data_source, xr.Dataset):
|
|
294
|
+
# check static data:
|
|
295
|
+
fpath = get_input_path(self.data_source)
|
|
296
|
+
if "*" not in str(self.data_source):
|
|
297
|
+
if not fpath.is_file():
|
|
298
|
+
fpath = StaticData().get_file_path(
|
|
299
|
+
STATES, fpath.name, check_raw=False
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
# find bounds:
|
|
303
|
+
if bounds_extra_space is not None:
|
|
304
|
+
assert FV.X in cmap, (
|
|
305
|
+
f"States '{self.name}': x coordinate '{FV.X}' not in cmap {cmap}"
|
|
306
|
+
)
|
|
307
|
+
assert FV.Y in cmap, (
|
|
308
|
+
f"States '{self.name}': y coordinate '{FV.Y}' not in cmap {cmap}"
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# if bounds and self.x_coord is not None and self.x_coord not in self.sel:
|
|
312
|
+
xy_min, xy_max = algo.farm.get_xy_bounds(
|
|
313
|
+
extra_space=bounds_extra_space, algo=algo
|
|
314
|
+
)
|
|
315
|
+
if verbosity > 0:
|
|
316
|
+
print(
|
|
317
|
+
f"States '{self.name}': Restricting to bounds {xy_min} - {xy_max}"
|
|
318
|
+
)
|
|
319
|
+
if self.sel is None:
|
|
320
|
+
self.sel = {}
|
|
321
|
+
self.sel.update(
|
|
322
|
+
{
|
|
323
|
+
cmap[FV.X]: slice(xy_min[0], xy_max[1]),
|
|
324
|
+
cmap[FV.Y]: slice(xy_min[1], xy_max[1]),
|
|
325
|
+
}
|
|
326
|
+
)
|
|
327
|
+
|
|
328
|
+
# read files:
|
|
329
|
+
if verbosity > 0:
|
|
330
|
+
if self.load_mode == "preload":
|
|
331
|
+
print(
|
|
332
|
+
f"States '{self.name}': Reading data from '{self.data_source}'"
|
|
333
|
+
)
|
|
334
|
+
elif self.load_mode == "lazy":
|
|
335
|
+
print(
|
|
336
|
+
f"States '{self.name}': Reading header from '{self.data_source}'"
|
|
337
|
+
)
|
|
338
|
+
else:
|
|
339
|
+
print(
|
|
340
|
+
f"States '{self.name}': Reading states from '{self.data_source}'"
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
files = sorted(list(fpath.resolve().parent.glob(fpath.name)))
|
|
344
|
+
coords = list(cmap.values())
|
|
345
|
+
vars = [self.var2ncvar.get(v, v) for v in self.variables]
|
|
346
|
+
self.__data_source = get_engine().map(
|
|
347
|
+
_read_nc_file,
|
|
348
|
+
files,
|
|
349
|
+
coords=coords,
|
|
350
|
+
vars=vars,
|
|
351
|
+
nc_engine=config.nc_engine,
|
|
352
|
+
isel=self.isel,
|
|
353
|
+
sel=self.sel,
|
|
354
|
+
minimal=self.load_mode == "fly",
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
if self.load_mode in ["preload", "lazy"]:
|
|
358
|
+
if self.load_mode == "lazy":
|
|
359
|
+
try:
|
|
360
|
+
self.__data_source = [ds.chunk() for ds in self.__data_source]
|
|
361
|
+
except (ModuleNotFoundError, ValueError) as e:
|
|
362
|
+
import_module("dask")
|
|
363
|
+
raise e
|
|
364
|
+
if len(self.__data_source) == 1:
|
|
365
|
+
self.__data_source = self.__data_source[0]
|
|
366
|
+
else:
|
|
367
|
+
self.__data_source = xr.concat(
|
|
368
|
+
self.__data_source,
|
|
369
|
+
dim=states_coord,
|
|
370
|
+
coords="minimal",
|
|
371
|
+
data_vars="minimal",
|
|
372
|
+
compat="equals",
|
|
373
|
+
join="exact",
|
|
374
|
+
combine_attrs="drop",
|
|
375
|
+
)
|
|
376
|
+
if self.load_mode == "preload":
|
|
377
|
+
self.__data_source.load()
|
|
378
|
+
self._inds = self.__data_source[states_coord].to_numpy()
|
|
379
|
+
self._N = len(self._inds)
|
|
380
|
+
|
|
381
|
+
elif self.load_mode == "fly":
|
|
382
|
+
self._inds = self.__data_source
|
|
383
|
+
self.__data_source = fpath
|
|
384
|
+
self._files_maxi = {f: len(inds) for f, inds in zip(files, self._inds)}
|
|
385
|
+
self._inds = np.concatenate(self._inds, axis=0)
|
|
386
|
+
self._N = len(self._inds)
|
|
387
|
+
|
|
388
|
+
else:
|
|
389
|
+
raise KeyError(
|
|
390
|
+
f"States '{self.name}': Unknown load_mode '{self.load_mode}', choices: preload, lazy, fly"
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
if self.time_format is not None:
|
|
394
|
+
self._inds = pd.to_datetime(
|
|
395
|
+
self._inds, format=self.time_format
|
|
396
|
+
).to_numpy()
|
|
397
|
+
|
|
398
|
+
# given data is already Dataset:
|
|
399
|
+
else:
|
|
400
|
+
self._inds = self.data_source[states_coord].to_numpy()
|
|
401
|
+
self._N = len(self._inds)
|
|
402
|
+
|
|
403
|
+
return self.__data_source
|
|
404
|
+
|
|
405
|
+
def load_data(
|
|
406
|
+
self,
|
|
407
|
+
algo,
|
|
408
|
+
cmap,
|
|
409
|
+
variables,
|
|
410
|
+
bounds_extra_space=None,
|
|
411
|
+
verbosity=0,
|
|
412
|
+
):
|
|
413
|
+
"""
|
|
414
|
+
Load and/or create all model data that is subject to chunking.
|
|
415
|
+
|
|
416
|
+
Such data should not be stored under self, for memory reasons. The
|
|
417
|
+
data returned here will automatically be chunked and then provided
|
|
418
|
+
as part of the mdata object during calculations.
|
|
419
|
+
|
|
420
|
+
Parameters
|
|
421
|
+
----------
|
|
422
|
+
algo: foxes.core.Algorithm
|
|
423
|
+
The calculation algorithm
|
|
424
|
+
cmap: dict
|
|
425
|
+
A mapping from foxes variable names to Dataset dimension names
|
|
426
|
+
variables: list of str
|
|
427
|
+
The variables to extract from the Dataset
|
|
428
|
+
bounds_extra_space: float, optional
|
|
429
|
+
The extra space in meters to add to the horizontal wind farm bounds
|
|
430
|
+
verbosity: int
|
|
431
|
+
The verbosity level, 0 = silent
|
|
432
|
+
|
|
433
|
+
Returns
|
|
434
|
+
-------
|
|
435
|
+
idata: dict
|
|
436
|
+
The dict has exactly two entries: `data_vars`,
|
|
437
|
+
a dict with entries `name_str -> (dim_tuple, data_ndarray)`;
|
|
438
|
+
and `coords`, a dict with entries `dim_name_str -> dim_array`
|
|
439
|
+
|
|
440
|
+
"""
|
|
441
|
+
# preload data:
|
|
442
|
+
self._preload(algo, cmap, bounds_extra_space, verbosity=verbosity)
|
|
443
|
+
|
|
444
|
+
idata = super().load_data(algo, verbosity)
|
|
445
|
+
|
|
446
|
+
if self.load_mode == "preload":
|
|
447
|
+
self._coords, data, w = self._get_data(
|
|
448
|
+
self.data_source, cmap, variables, verbosity
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
if FC.STATE in self._coords:
|
|
452
|
+
idata["coords"][FC.STATE] = self._coords.pop(FC.STATE)
|
|
453
|
+
else:
|
|
454
|
+
del idata["coords"][FC.STATE]
|
|
455
|
+
if w is not None:
|
|
456
|
+
idata["data_vars"][FV.WEIGHT] = ((FC.STATE,), w)
|
|
457
|
+
|
|
458
|
+
vmap = {FC.STATE: FC.STATE}
|
|
459
|
+
self._data_state_keys = []
|
|
460
|
+
self._data_nostate = {}
|
|
461
|
+
for dims, d in data.items():
|
|
462
|
+
dms = tuple([vmap.get(c, self.var(c)) for c in dims])
|
|
463
|
+
if FC.STATE in dims:
|
|
464
|
+
self._data_state_keys.append(d[0])
|
|
465
|
+
idata["coords"][dms[-1]] = d[1]
|
|
466
|
+
idata["data_vars"][d[0]] = (dms, d[2])
|
|
467
|
+
else:
|
|
468
|
+
self._data_nostate[dims] = (d[1], d[2])
|
|
469
|
+
del data
|
|
470
|
+
|
|
471
|
+
return idata
|
|
472
|
+
|
|
473
|
+
def set_running(
|
|
474
|
+
self,
|
|
475
|
+
algo,
|
|
476
|
+
data_stash,
|
|
477
|
+
sel=None,
|
|
478
|
+
isel=None,
|
|
479
|
+
verbosity=0,
|
|
480
|
+
):
|
|
481
|
+
"""
|
|
482
|
+
Sets this model status to running, and moves
|
|
483
|
+
all large data to stash.
|
|
484
|
+
|
|
485
|
+
The stashed data will be returned by the
|
|
486
|
+
unset_running() function after running calculations.
|
|
487
|
+
|
|
488
|
+
Parameters
|
|
489
|
+
----------
|
|
490
|
+
algo: foxes.core.Algorithm
|
|
491
|
+
The calculation algorithm
|
|
492
|
+
data_stash: dict
|
|
493
|
+
Large data stash, this function adds data here.
|
|
494
|
+
Key: model name. Value: dict, large model data
|
|
495
|
+
sel: dict, optional
|
|
496
|
+
The subset selection dictionary
|
|
497
|
+
isel: dict, optional
|
|
498
|
+
The index subset selection dictionary
|
|
499
|
+
verbosity: int
|
|
500
|
+
The verbosity level, 0 = silent
|
|
501
|
+
|
|
502
|
+
"""
|
|
503
|
+
super().set_running(algo, data_stash, sel, isel, verbosity)
|
|
504
|
+
|
|
505
|
+
data_stash[self.name] = dict(
|
|
506
|
+
inds=self._inds,
|
|
507
|
+
)
|
|
508
|
+
del self._inds
|
|
509
|
+
|
|
510
|
+
if self.load_mode == "preload":
|
|
511
|
+
data_stash[self.name]["data_source"] = self.__data_source
|
|
512
|
+
del self.__data_source
|
|
513
|
+
|
|
514
|
+
def unset_running(
|
|
515
|
+
self,
|
|
516
|
+
algo,
|
|
517
|
+
data_stash,
|
|
518
|
+
sel=None,
|
|
519
|
+
isel=None,
|
|
520
|
+
verbosity=0,
|
|
521
|
+
):
|
|
522
|
+
"""
|
|
523
|
+
Sets this model status to not running, recovering large data
|
|
524
|
+
from stash
|
|
525
|
+
|
|
526
|
+
Parameters
|
|
527
|
+
----------
|
|
528
|
+
algo: foxes.core.Algorithm
|
|
529
|
+
The calculation algorithm
|
|
530
|
+
data_stash: dict
|
|
531
|
+
Large data stash, this function adds data here.
|
|
532
|
+
Key: model name. Value: dict, large model data
|
|
533
|
+
sel: dict, optional
|
|
534
|
+
The subset selection dictionary
|
|
535
|
+
isel: dict, optional
|
|
536
|
+
The index subset selection dictionary
|
|
537
|
+
verbosity: int
|
|
538
|
+
The verbosity level, 0 = silent
|
|
539
|
+
|
|
540
|
+
"""
|
|
541
|
+
super().unset_running(algo, data_stash, sel, isel, verbosity)
|
|
542
|
+
|
|
543
|
+
data = data_stash[self.name]
|
|
544
|
+
self._inds = data.pop("inds")
|
|
545
|
+
|
|
546
|
+
if self.load_mode == "preload":
|
|
547
|
+
self.__data_source = data.pop("data_source")
|
|
548
|
+
|
|
549
|
+
def output_point_vars(self, algo):
|
|
550
|
+
"""
|
|
551
|
+
The variables which are being modified by the model.
|
|
552
|
+
|
|
553
|
+
Parameters
|
|
554
|
+
----------
|
|
555
|
+
algo: foxes.core.Algorithm
|
|
556
|
+
The calculation algorithm
|
|
557
|
+
|
|
558
|
+
Returns
|
|
559
|
+
-------
|
|
560
|
+
output_vars: list of str
|
|
561
|
+
The output variable names
|
|
562
|
+
|
|
563
|
+
"""
|
|
564
|
+
return self.ovars
|
|
565
|
+
|
|
566
|
+
def size(self):
|
|
567
|
+
"""
|
|
568
|
+
The total number of states.
|
|
569
|
+
|
|
570
|
+
Returns
|
|
571
|
+
-------
|
|
572
|
+
int:
|
|
573
|
+
The total number of states
|
|
574
|
+
|
|
575
|
+
"""
|
|
576
|
+
return self._N
|
|
577
|
+
|
|
578
|
+
def index(self):
|
|
579
|
+
"""
|
|
580
|
+
The index list
|
|
581
|
+
|
|
582
|
+
Returns
|
|
583
|
+
-------
|
|
584
|
+
indices: array_like
|
|
585
|
+
The index labels of states, or None for default integers
|
|
586
|
+
|
|
587
|
+
"""
|
|
588
|
+
if self.running:
|
|
589
|
+
raise ValueError(f"States '{self.name}': Cannot access index while running")
|
|
590
|
+
return self._inds
|
|
591
|
+
|
|
592
|
+
def get_calc_data(self, mdata, cmap, variables):
|
|
593
|
+
"""
|
|
594
|
+
Gathers data for calculations.
|
|
595
|
+
|
|
596
|
+
Call this function from the calculate function of the
|
|
597
|
+
derived class.
|
|
598
|
+
|
|
599
|
+
Parameters
|
|
600
|
+
----------
|
|
601
|
+
mdata: foxes.core.MData
|
|
602
|
+
The mdata object
|
|
603
|
+
cmap: dict
|
|
604
|
+
A mapping from foxes variable names to Dataset dimension names
|
|
605
|
+
variables: list of str
|
|
606
|
+
The variables to extract from the Dataset
|
|
607
|
+
|
|
608
|
+
Returns
|
|
609
|
+
-------
|
|
610
|
+
coords: dict
|
|
611
|
+
keys: Foxes variable names, values: 1D coordinate value arrays
|
|
612
|
+
data: dict
|
|
613
|
+
The extracted data, keys are dimension tuples,
|
|
614
|
+
values are tuples (DATA key, variables, data_array)
|
|
615
|
+
where DATA key is the name in the mdata object,
|
|
616
|
+
variables is a list of variable names, and
|
|
617
|
+
data_array is a numpy.ndarray with the data values,
|
|
618
|
+
the last dimension corresponds to the variables
|
|
619
|
+
weights: numpy.ndarray or None
|
|
620
|
+
The weights array, if only state dependent, otherwise
|
|
621
|
+
weights are among data. Shape: (n_states,)
|
|
622
|
+
|
|
623
|
+
"""
|
|
624
|
+
# prepare
|
|
625
|
+
assert FC.STATE in cmap, (
|
|
626
|
+
f"States '{self.name}': States coordinate '{FC.STATE}' not in cmap {cmap}"
|
|
627
|
+
)
|
|
628
|
+
states_coord = cmap[FC.STATE]
|
|
629
|
+
n_states = mdata.n_states
|
|
630
|
+
|
|
631
|
+
# case preload
|
|
632
|
+
if self.load_mode == "preload":
|
|
633
|
+
coords = self._coords
|
|
634
|
+
weights = mdata[FV.WEIGHT] if FV.WEIGHT in mdata else None
|
|
635
|
+
data = deepcopy(self._data_nostate)
|
|
636
|
+
for DATA in self._data_state_keys:
|
|
637
|
+
dims = mdata.dims[DATA]
|
|
638
|
+
vrs = mdata[dims[-1]].tolist()
|
|
639
|
+
dms = tuple(
|
|
640
|
+
[self.unvar(c) if c != FC.STATE else FC.STATE for c in dims[:-1]]
|
|
641
|
+
+ [dims[-1]]
|
|
642
|
+
)
|
|
643
|
+
data[dms] = (vrs, mdata[DATA].copy())
|
|
644
|
+
|
|
645
|
+
# case lazy
|
|
646
|
+
elif self.load_mode == "lazy":
|
|
647
|
+
i0 = mdata.states_i0(counter=True)
|
|
648
|
+
s = slice(i0, i0 + n_states)
|
|
649
|
+
ds = self.data_source.isel({states_coord: s}).load()
|
|
650
|
+
coords, data, weights = self._get_data(ds, cmap, variables, verbosity=0)
|
|
651
|
+
data = {dims: (d[1], d[2]) for dims, d in data.items()}
|
|
652
|
+
del ds
|
|
653
|
+
|
|
654
|
+
# case fly
|
|
655
|
+
elif self.load_mode == "fly":
|
|
656
|
+
vars = [self.var2ncvar.get(v, v) for v in variables]
|
|
657
|
+
i0 = mdata.states_i0(counter=True)
|
|
658
|
+
i1 = i0 + n_states
|
|
659
|
+
j0 = 0
|
|
660
|
+
data = []
|
|
661
|
+
for fpath, n in self._files_maxi.items():
|
|
662
|
+
if i0 < j0:
|
|
663
|
+
break
|
|
664
|
+
else:
|
|
665
|
+
j1 = j0 + n
|
|
666
|
+
if i0 < j1:
|
|
667
|
+
a = i0 - j0
|
|
668
|
+
b = min(i1, j1) - j0
|
|
669
|
+
isel = copy(self.isel) if self.isel is not None else {}
|
|
670
|
+
isel[states_coord] = slice(a, b)
|
|
671
|
+
|
|
672
|
+
data.append(
|
|
673
|
+
_read_nc_file(
|
|
674
|
+
fpath,
|
|
675
|
+
coords=list(cmap.values()),
|
|
676
|
+
vars=vars,
|
|
677
|
+
nc_engine=config.nc_engine,
|
|
678
|
+
isel=isel,
|
|
679
|
+
sel=self.sel,
|
|
680
|
+
minimal=False,
|
|
681
|
+
)
|
|
682
|
+
)
|
|
683
|
+
|
|
684
|
+
i0 += b - a
|
|
685
|
+
j0 = j1
|
|
686
|
+
|
|
687
|
+
assert i0 == i1, (
|
|
688
|
+
f"States '{self.name}': Missing states for load_mode '{self.load_mode}': (i0, i1) = {(i0, i1)}"
|
|
689
|
+
)
|
|
690
|
+
if len(data) == 1:
|
|
691
|
+
data = data[0]
|
|
692
|
+
else:
|
|
693
|
+
data = xr.concat(
|
|
694
|
+
data,
|
|
695
|
+
dim=states_coord,
|
|
696
|
+
data_vars="minimal",
|
|
697
|
+
coords="minimal",
|
|
698
|
+
compat="override",
|
|
699
|
+
join="exact",
|
|
700
|
+
combine_attrs="drop",
|
|
701
|
+
)
|
|
702
|
+
coords, data, weights = self._get_data(data, cmap, variables, verbosity=0)
|
|
703
|
+
data = {dims: (d[1], d[2]) for dims, d in data.items()}
|
|
704
|
+
|
|
705
|
+
else:
|
|
706
|
+
raise KeyError(
|
|
707
|
+
f"States '{self.name}': Unknown load_mode '{self.load_mode}', choices: preload, lazy, fly"
|
|
708
|
+
)
|
|
709
|
+
|
|
710
|
+
return coords, data, weights
|