foxes 0.8.2__py3-none-any.whl → 1.1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of foxes might be problematic. Click here for more details.

Files changed (215) hide show
  1. docs/source/conf.py +353 -0
  2. examples/abl_states/run.py +160 -0
  3. examples/compare_rotors_pwakes/run.py +217 -0
  4. examples/compare_wakes/run.py +241 -0
  5. examples/dyn_wakes/run.py +311 -0
  6. examples/field_data_nc/run.py +121 -0
  7. examples/induction/run.py +201 -0
  8. examples/multi_height/run.py +113 -0
  9. examples/power_mask/run.py +249 -0
  10. examples/random_timeseries/run.py +210 -0
  11. examples/scan_row/run.py +193 -0
  12. examples/sector_management/run.py +162 -0
  13. examples/sequential/run.py +209 -0
  14. examples/single_state/run.py +201 -0
  15. examples/states_lookup_table/run.py +137 -0
  16. examples/streamline_wakes/run.py +138 -0
  17. examples/tab_file/run.py +142 -0
  18. examples/timelines/run.py +267 -0
  19. examples/timeseries/run.py +190 -0
  20. examples/timeseries_slurm/run.py +185 -0
  21. examples/wind_rose/run.py +141 -0
  22. examples/windio/run.py +29 -0
  23. examples/yawed_wake/run.py +196 -0
  24. foxes/__init__.py +4 -8
  25. foxes/algorithms/__init__.py +1 -1
  26. foxes/algorithms/downwind/downwind.py +247 -111
  27. foxes/algorithms/downwind/models/farm_wakes_calc.py +12 -7
  28. foxes/algorithms/downwind/models/init_farm_data.py +2 -2
  29. foxes/algorithms/downwind/models/point_wakes_calc.py +6 -7
  30. foxes/algorithms/downwind/models/reorder_farm_output.py +1 -2
  31. foxes/algorithms/downwind/models/set_amb_farm_results.py +1 -1
  32. foxes/algorithms/downwind/models/set_amb_point_results.py +5 -3
  33. foxes/algorithms/iterative/iterative.py +74 -34
  34. foxes/algorithms/iterative/models/farm_wakes_calc.py +12 -7
  35. foxes/algorithms/iterative/models/urelax.py +3 -3
  36. foxes/algorithms/sequential/models/plugin.py +5 -5
  37. foxes/algorithms/sequential/models/seq_state.py +1 -1
  38. foxes/algorithms/sequential/sequential.py +126 -255
  39. foxes/constants.py +22 -7
  40. foxes/core/__init__.py +1 -0
  41. foxes/core/algorithm.py +632 -147
  42. foxes/core/data.py +252 -20
  43. foxes/core/data_calc_model.py +15 -291
  44. foxes/core/engine.py +640 -0
  45. foxes/core/farm_controller.py +38 -10
  46. foxes/core/farm_data_model.py +16 -1
  47. foxes/core/ground_model.py +2 -2
  48. foxes/core/model.py +249 -182
  49. foxes/core/partial_wakes_model.py +1 -1
  50. foxes/core/point_data_model.py +17 -2
  51. foxes/core/rotor_model.py +27 -21
  52. foxes/core/states.py +17 -1
  53. foxes/core/turbine_type.py +28 -0
  54. foxes/core/wake_frame.py +30 -34
  55. foxes/core/wake_model.py +5 -5
  56. foxes/core/wake_superposition.py +1 -1
  57. foxes/data/windio/windio_5turbines_timeseries.yaml +31 -15
  58. foxes/engines/__init__.py +17 -0
  59. foxes/engines/dask.py +982 -0
  60. foxes/engines/default.py +75 -0
  61. foxes/engines/futures.py +72 -0
  62. foxes/engines/mpi.py +38 -0
  63. foxes/engines/multiprocess.py +71 -0
  64. foxes/engines/numpy.py +167 -0
  65. foxes/engines/pool.py +249 -0
  66. foxes/engines/ray.py +79 -0
  67. foxes/engines/single.py +141 -0
  68. foxes/input/farm_layout/__init__.py +1 -0
  69. foxes/input/farm_layout/from_csv.py +4 -0
  70. foxes/input/farm_layout/from_json.py +2 -2
  71. foxes/input/farm_layout/grid.py +2 -2
  72. foxes/input/farm_layout/ring.py +65 -0
  73. foxes/input/farm_layout/row.py +2 -2
  74. foxes/input/states/__init__.py +7 -0
  75. foxes/input/states/create/random_abl_states.py +1 -1
  76. foxes/input/states/field_data_nc.py +158 -33
  77. foxes/input/states/multi_height.py +128 -14
  78. foxes/input/states/one_point_flow.py +577 -0
  79. foxes/input/states/scan_ws.py +74 -3
  80. foxes/input/states/single.py +1 -1
  81. foxes/input/states/slice_data_nc.py +681 -0
  82. foxes/input/states/states_table.py +204 -35
  83. foxes/input/windio/__init__.py +2 -2
  84. foxes/input/windio/get_states.py +44 -23
  85. foxes/input/windio/read_attributes.py +48 -17
  86. foxes/input/windio/read_farm.py +116 -102
  87. foxes/input/windio/read_fields.py +16 -6
  88. foxes/input/windio/read_outputs.py +71 -24
  89. foxes/input/windio/runner.py +31 -17
  90. foxes/input/windio/windio.py +41 -23
  91. foxes/models/farm_models/turbine2farm.py +1 -1
  92. foxes/models/ground_models/wake_mirror.py +10 -6
  93. foxes/models/model_book.py +58 -20
  94. foxes/models/partial_wakes/axiwake.py +3 -3
  95. foxes/models/partial_wakes/rotor_points.py +3 -3
  96. foxes/models/partial_wakes/top_hat.py +2 -2
  97. foxes/models/point_models/set_uniform_data.py +1 -1
  98. foxes/models/point_models/tke2ti.py +1 -1
  99. foxes/models/point_models/wake_deltas.py +1 -1
  100. foxes/models/rotor_models/centre.py +4 -0
  101. foxes/models/rotor_models/grid.py +24 -25
  102. foxes/models/rotor_models/levels.py +4 -5
  103. foxes/models/turbine_models/calculator.py +4 -6
  104. foxes/models/turbine_models/kTI_model.py +22 -6
  105. foxes/models/turbine_models/lookup_table.py +30 -4
  106. foxes/models/turbine_models/rotor_centre_calc.py +4 -3
  107. foxes/models/turbine_models/set_farm_vars.py +103 -34
  108. foxes/models/turbine_types/PCt_file.py +27 -3
  109. foxes/models/turbine_types/PCt_from_two.py +27 -3
  110. foxes/models/turbine_types/TBL_file.py +80 -0
  111. foxes/models/turbine_types/__init__.py +2 -0
  112. foxes/models/turbine_types/lookup.py +316 -0
  113. foxes/models/turbine_types/null_type.py +51 -1
  114. foxes/models/turbine_types/wsrho2PCt_from_two.py +29 -5
  115. foxes/models/turbine_types/wsti2PCt_from_two.py +31 -7
  116. foxes/models/vertical_profiles/__init__.py +1 -1
  117. foxes/models/vertical_profiles/data_profile.py +1 -1
  118. foxes/models/wake_frames/__init__.py +1 -0
  119. foxes/models/wake_frames/dynamic_wakes.py +424 -0
  120. foxes/models/wake_frames/farm_order.py +25 -5
  121. foxes/models/wake_frames/rotor_wd.py +6 -4
  122. foxes/models/wake_frames/seq_dynamic_wakes.py +61 -74
  123. foxes/models/wake_frames/streamlines.py +21 -22
  124. foxes/models/wake_frames/timelines.py +330 -129
  125. foxes/models/wake_frames/yawed_wakes.py +7 -4
  126. foxes/models/wake_models/dist_sliced.py +2 -4
  127. foxes/models/wake_models/induction/rankine_half_body.py +5 -5
  128. foxes/models/wake_models/induction/rathmann.py +78 -24
  129. foxes/models/wake_models/induction/self_similar.py +78 -28
  130. foxes/models/wake_models/induction/vortex_sheet.py +86 -48
  131. foxes/models/wake_models/ti/crespo_hernandez.py +6 -4
  132. foxes/models/wake_models/ti/iec_ti.py +40 -21
  133. foxes/models/wake_models/top_hat.py +1 -1
  134. foxes/models/wake_models/wind/bastankhah14.py +8 -6
  135. foxes/models/wake_models/wind/bastankhah16.py +17 -16
  136. foxes/models/wake_models/wind/jensen.py +4 -3
  137. foxes/models/wake_models/wind/turbopark.py +16 -13
  138. foxes/models/wake_superpositions/ti_linear.py +1 -1
  139. foxes/models/wake_superpositions/ti_max.py +1 -1
  140. foxes/models/wake_superpositions/ti_pow.py +1 -1
  141. foxes/models/wake_superpositions/ti_quadratic.py +1 -1
  142. foxes/models/wake_superpositions/ws_linear.py +8 -7
  143. foxes/models/wake_superpositions/ws_max.py +8 -7
  144. foxes/models/wake_superpositions/ws_pow.py +8 -7
  145. foxes/models/wake_superpositions/ws_product.py +5 -5
  146. foxes/models/wake_superpositions/ws_quadratic.py +8 -7
  147. foxes/output/__init__.py +4 -1
  148. foxes/output/farm_layout.py +16 -12
  149. foxes/output/farm_results_eval.py +1 -1
  150. foxes/output/flow_plots_2d/__init__.py +0 -1
  151. foxes/output/flow_plots_2d/flow_plots.py +70 -30
  152. foxes/output/grids.py +92 -22
  153. foxes/output/results_writer.py +2 -2
  154. foxes/output/rose_plot.py +3 -3
  155. foxes/output/seq_plugins/__init__.py +2 -0
  156. foxes/output/{flow_plots_2d → seq_plugins}/seq_flow_ani_plugin.py +64 -22
  157. foxes/output/seq_plugins/seq_wake_debug_plugin.py +145 -0
  158. foxes/output/slice_data.py +131 -111
  159. foxes/output/state_turbine_map.py +19 -14
  160. foxes/output/state_turbine_table.py +19 -19
  161. foxes/utils/__init__.py +1 -1
  162. foxes/utils/abl/neutral.py +2 -2
  163. foxes/utils/abl/stable.py +2 -2
  164. foxes/utils/abl/unstable.py +2 -2
  165. foxes/utils/data_book.py +1 -1
  166. foxes/utils/dev_utils.py +42 -0
  167. foxes/utils/dict.py +24 -1
  168. foxes/utils/exec_python.py +1 -1
  169. foxes/utils/factory.py +176 -53
  170. foxes/utils/geom2d/circle.py +1 -1
  171. foxes/utils/geom2d/polygon.py +1 -1
  172. foxes/utils/geopandas_utils.py +2 -2
  173. foxes/utils/load.py +2 -2
  174. foxes/utils/pandas_helpers.py +3 -2
  175. foxes/utils/wind_dir.py +0 -2
  176. foxes/utils/xarray_utils.py +24 -14
  177. foxes/variables.py +39 -2
  178. {foxes-0.8.2.dist-info → foxes-1.1.0.2.dist-info}/METADATA +75 -33
  179. foxes-1.1.0.2.dist-info/RECORD +309 -0
  180. {foxes-0.8.2.dist-info → foxes-1.1.0.2.dist-info}/WHEEL +1 -1
  181. foxes-1.1.0.2.dist-info/top_level.txt +4 -0
  182. tests/0_consistency/iterative/test_iterative.py +92 -0
  183. tests/0_consistency/partial_wakes/test_partial_wakes.py +90 -0
  184. tests/1_verification/flappy_0_6/PCt_files/flappy/run.py +85 -0
  185. tests/1_verification/flappy_0_6/PCt_files/test_PCt_files.py +103 -0
  186. tests/1_verification/flappy_0_6/abl_states/flappy/run.py +85 -0
  187. tests/1_verification/flappy_0_6/abl_states/test_abl_states.py +87 -0
  188. tests/1_verification/flappy_0_6/partial_top_hat/flappy/run.py +82 -0
  189. tests/1_verification/flappy_0_6/partial_top_hat/test_partial_top_hat.py +82 -0
  190. tests/1_verification/flappy_0_6/row_Jensen_linear_centre/flappy/run.py +92 -0
  191. tests/1_verification/flappy_0_6/row_Jensen_linear_centre/test_row_Jensen_linear_centre.py +93 -0
  192. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat/flappy/run.py +92 -0
  193. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat/test_row_Jensen_linear_tophat.py +96 -0
  194. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2005/flappy/run.py +94 -0
  195. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2005/test_row_Jensen_linear_tophat_IECTI_2005.py +122 -0
  196. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2019/flappy/run.py +94 -0
  197. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2019/test_row_Jensen_linear_tophat_IECTI_2019.py +122 -0
  198. tests/1_verification/flappy_0_6/row_Jensen_quadratic_centre/flappy/run.py +92 -0
  199. tests/1_verification/flappy_0_6/row_Jensen_quadratic_centre/test_row_Jensen_quadratic_centre.py +93 -0
  200. tests/1_verification/flappy_0_6_2/grid_rotors/flappy/run.py +85 -0
  201. tests/1_verification/flappy_0_6_2/grid_rotors/test_grid_rotors.py +130 -0
  202. tests/1_verification/flappy_0_6_2/row_Bastankhah_Crespo/flappy/run.py +96 -0
  203. tests/1_verification/flappy_0_6_2/row_Bastankhah_Crespo/test_row_Bastankhah_Crespo.py +116 -0
  204. tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/flappy/run.py +93 -0
  205. tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/test_row_Bastankhah_linear_centre.py +99 -0
  206. tests/3_examples/test_examples.py +34 -0
  207. foxes/VERSION +0 -1
  208. foxes/output/flow_plots_2d.py +0 -0
  209. foxes/utils/geopandas_helpers.py +0 -294
  210. foxes/utils/runners/__init__.py +0 -1
  211. foxes/utils/runners/runners.py +0 -280
  212. foxes-0.8.2.dist-info/RECORD +0 -247
  213. foxes-0.8.2.dist-info/top_level.txt +0 -1
  214. foxes-0.8.2.dist-info/zip-safe +0 -1
  215. {foxes-0.8.2.dist-info → foxes-1.1.0.2.dist-info}/LICENSE +0 -0
foxes/engines/dask.py ADDED
@@ -0,0 +1,982 @@
1
+ import numpy as np
2
+ import xarray as xr
3
+ from copy import deepcopy
4
+ from tqdm import tqdm
5
+
6
+ from foxes.core import Engine, MData, FData, TData
7
+ from foxes.utils import import_module
8
+ import foxes.variables as FV
9
+ import foxes.constants as FC
10
+
11
+ dask = None
12
+ distributed = None
13
+
14
+
15
+ def delayed(func):
16
+ """A dummy decorator"""
17
+ return func
18
+
19
+
20
+ def load_dask():
21
+ """On-demand loading of the dask package"""
22
+ global dask, ProgressBar, delayed
23
+ if dask is None:
24
+ dask = import_module("dask", hint="pip install dask")
25
+ ProgressBar = import_module(
26
+ "dask.diagnostics", hint="pip install dask"
27
+ ).ProgressBar
28
+ delayed = dask.delayed
29
+
30
+
31
+ def load_distributed():
32
+ """On-demand loading of the distributed package"""
33
+ global distributed
34
+ if distributed is None:
35
+ distributed = import_module("distributed", hint="pip install distributed")
36
+
37
+
38
+ class DaskBaseEngine(Engine):
39
+ """
40
+ Abstract base class for foxes calculations with dask.
41
+
42
+ Parameters
43
+ ----------
44
+ dask_config: dict
45
+ The dask configuration parameters
46
+ progress_bar: bool
47
+ Flag for showing progress bar
48
+
49
+ :group: engines
50
+
51
+ """
52
+
53
+ def __init__(
54
+ self,
55
+ *args,
56
+ dask_config={},
57
+ progress_bar=True,
58
+ **kwargs,
59
+ ):
60
+ """
61
+ Constructor.
62
+
63
+ Parameters
64
+ ----------
65
+ args: tuple, optional
66
+ Additional parameters for the base class
67
+ dask_config: dict, optional
68
+ The dask configuration parameters
69
+ progress_bar: bool
70
+ Flag for showing progress bar
71
+ kwargs: dict, optional
72
+ Additional parameters for the base class
73
+
74
+ """
75
+ super().__init__(*args, **kwargs)
76
+
77
+ load_dask()
78
+
79
+ self.dask_config = dask_config
80
+ self.progress_bar = progress_bar
81
+
82
+ def __enter__(self):
83
+ if self.progress_bar:
84
+ self._pbar = ProgressBar(minimum=2)
85
+ self._pbar.__enter__()
86
+ return super().__enter__()
87
+
88
+ def __exit__(self, *args):
89
+ if self.progress_bar:
90
+ self._pbar.__exit__(*args)
91
+ super().__exit__(*args)
92
+
93
+ def initialize(self):
94
+ """
95
+ Initializes the engine.
96
+ """
97
+ dask.config.set(**self.dask_config)
98
+ super().initialize()
99
+
100
+ def chunk_data(self, data):
101
+ """
102
+ Applies the selected chunking
103
+
104
+ Parameters
105
+ ----------
106
+ data: xarray.Dataset
107
+ The data to be chunked
108
+
109
+ Returns
110
+ -------
111
+ data: xarray.Dataset
112
+ The chunked data
113
+
114
+ """
115
+ cks = {}
116
+ cks[FC.STATE] = min(data.sizes[FC.STATE], self.chunk_size_states)
117
+ if FC.TARGET in data.sizes:
118
+ cks[FC.TARGET] = min(data.sizes[FC.TARGET], self.chunk_size_points)
119
+
120
+ if len(set(cks.keys()).intersection(data.coords.keys())):
121
+ return data.chunk({v: d for v, d in cks.items() if v in data.coords})
122
+ else:
123
+ return data
124
+
125
+ def finalize(self, *exit_args, **exit_kwargs):
126
+ """
127
+ Finalizes the engine.
128
+
129
+ Parameters
130
+ ----------
131
+ exit_args: tuple, optional
132
+ Arguments from the exit function
133
+ exit_kwargs: dict, optional
134
+ Arguments from the exit function
135
+
136
+ """
137
+ dask.config.refresh()
138
+ super().finalize(*exit_args, **exit_kwargs)
139
+
140
+
141
+ def _run_as_ufunc(
142
+ state_inds,
143
+ *ldata,
144
+ algo,
145
+ dvars,
146
+ lvars,
147
+ ldims,
148
+ evars,
149
+ edims,
150
+ edata,
151
+ loop_dims,
152
+ out_vars,
153
+ out_coords,
154
+ calc_pars,
155
+ init_vars,
156
+ ensure_variables,
157
+ calculate,
158
+ ):
159
+ """
160
+ Wrapper that mitigates between apply_ufunc and `calculate`.
161
+ """
162
+ n_prev = len(init_vars)
163
+ if n_prev:
164
+ prev = ldata[:n_prev]
165
+ ldata = ldata[n_prev:]
166
+
167
+ # reconstruct original data:
168
+ data = []
169
+ for i, hvars in enumerate(dvars):
170
+ v2l = {v: lvars.index(v) for v in hvars if v in lvars}
171
+ v2e = {v: evars.index(v) for v in hvars if v in evars}
172
+
173
+ hdata = {v: ldata[v2l[v]] if v in v2l else edata[v2e[v]] for v in hvars}
174
+ hdims = {v: ldims[v2l[v]] if v in v2l else edims[v2e[v]] for v in hvars}
175
+
176
+ if i == 0:
177
+ data.append(
178
+ MData(
179
+ data=hdata, dims=hdims, loop_dims=loop_dims, states_i0=state_inds[0]
180
+ )
181
+ )
182
+ elif i == 1:
183
+ data.append(
184
+ FData(
185
+ data=hdata, dims=hdims, loop_dims=loop_dims, states_i0=state_inds[0]
186
+ )
187
+ )
188
+ elif i == 2:
189
+ data.append(
190
+ TData(
191
+ data=hdata, dims=hdims, loop_dims=loop_dims, states_i0=state_inds[0]
192
+ )
193
+ )
194
+ else:
195
+ raise NotImplementedError(
196
+ f"Not more than 3 data sets implemented, found {len(dvars)}"
197
+ )
198
+
199
+ del hdata, hdims, v2l, v2e
200
+
201
+ # deduce output shape:
202
+ oshape = []
203
+ for li, l in enumerate(out_coords):
204
+ for i, dims in enumerate(ldims):
205
+ if l in dims:
206
+ oshape.append(ldata[i].shape[dims.index(l)])
207
+ break
208
+ if len(oshape) != li + 1:
209
+ raise ValueError(f"Failed to find loop dimension")
210
+
211
+ # add zero output data arrays:
212
+ odims = {v: tuple(out_coords) for v in out_vars}
213
+ odata = {
214
+ v: (
215
+ np.full(oshape, np.nan, dtype=FC.DTYPE)
216
+ if v not in init_vars
217
+ else prev[init_vars.index(v)].copy()
218
+ )
219
+ for v in out_vars
220
+ if v not in data[-1]
221
+ }
222
+
223
+ if len(data) == 1:
224
+ data.append(FData(odata, odims, loop_dims, states_i0=state_inds[0]))
225
+ else:
226
+ odata.update(data[-1])
227
+ odims.update(data[-1].dims)
228
+ if len(data) == 2:
229
+ data[-1] = FData(odata, odims, loop_dims, states_i0=state_inds[0])
230
+ else:
231
+ data[-1] = TData(odata, odims, loop_dims, states_i0=state_inds[0])
232
+ del odims, odata
233
+
234
+ # link chunk state indices from mdata to fdata and tdata:
235
+ if FC.STATE in data[0]:
236
+ for d in data[1:]:
237
+ d[FC.STATE] = data[0][FC.STATE]
238
+
239
+ # link weights from mdata to fdata:
240
+ if FV.WEIGHT in data[0]:
241
+ data[1][FV.WEIGHT] = data[0][FV.WEIGHT]
242
+ data[1].dims[FV.WEIGHT] = data[0].dims[FV.WEIGHT]
243
+
244
+ # run model calculation:
245
+ ensure_variables(algo, *data)
246
+ results = calculate(algo, *data, **calc_pars)
247
+
248
+ # replace missing results by first input data with matching shape:
249
+ missing = set(out_vars).difference(results.keys())
250
+ if len(missing):
251
+ found = set()
252
+ for v in missing:
253
+ for dta in data:
254
+ if v in dta and dta[v].shape == tuple(oshape):
255
+ results[v] = dta[v]
256
+ found.add(v)
257
+ break
258
+ missing -= found
259
+ if len(missing):
260
+ raise ValueError(
261
+ f"Missing results {list(missing)}, expected shape {oshape}"
262
+ )
263
+ del data
264
+
265
+ # create output:
266
+ n_vars = len(out_vars)
267
+ data = np.zeros(oshape + [n_vars], dtype=FC.DTYPE)
268
+ for v in out_vars:
269
+ data[..., out_vars.index(v)] = results[v]
270
+
271
+ return data
272
+
273
+
274
+ class XArrayEngine(DaskBaseEngine):
275
+ """
276
+ The engine for foxes calculations via xarray.apply_ufunc.
277
+
278
+ :group: engines
279
+
280
+ """
281
+
282
+ def run_calculation(
283
+ self,
284
+ algo,
285
+ model,
286
+ model_data=None,
287
+ farm_data=None,
288
+ point_data=None,
289
+ out_vars=[],
290
+ chunk_store={},
291
+ sel=None,
292
+ isel=None,
293
+ persist=True,
294
+ iterative=False,
295
+ **calc_pars,
296
+ ):
297
+ """
298
+ Runs the model calculation
299
+
300
+ Parameters
301
+ ----------
302
+ algo: foxes.core.Algorithm
303
+ The algorithm object
304
+ model: foxes.core.DataCalcModel
305
+ The model that whose calculate function
306
+ should be run
307
+ model_data: xarray.Dataset
308
+ The initial model data
309
+ farm_data: xarray.Dataset
310
+ The initial farm data
311
+ point_data: xarray.Dataset
312
+ The initial point data
313
+ out_vars: list of str, optional
314
+ Names of the output variables
315
+ chunk_store: foxes.utils.Dict
316
+ The chunk store
317
+ sel: dict, optional
318
+ Selection of coordinate subsets
319
+ isel: dict, optional
320
+ Selection of coordinate subsets index values
321
+ persist: bool
322
+ Flag for persisting xarray Dataset objects
323
+ iterative: bool
324
+ Flag for use within the iterative algorithm
325
+ calc_pars: dict, optional
326
+ Additional parameters for the model.calculate()
327
+
328
+ Returns
329
+ -------
330
+ results: xarray.Dataset
331
+ The model results
332
+
333
+ """
334
+ # subset selection:
335
+ model_data, farm_data, point_data = self.select_subsets(
336
+ model_data, farm_data, point_data, sel=sel, isel=isel
337
+ )
338
+
339
+ # basic checks:
340
+ super().run_calculation(algo, model, model_data, farm_data, point_data)
341
+
342
+ # find chunk sizes, if not given:
343
+ chunk_size_states0 = self.chunk_size_states
344
+ chunk_size_points0 = self.chunk_size_points
345
+ n_states = model_data.sizes[FC.STATE]
346
+ n_targets = point_data.sizes[FC.TARGET] if point_data is not None else 0
347
+ chunk_sizes_states, chunk_sizes_targets = self.calc_chunk_sizes(
348
+ n_states, n_targets
349
+ )
350
+ self.chunk_size_states = np.min(chunk_sizes_states)
351
+ self.chunk_size_points = np.min(chunk_sizes_targets)
352
+ self.print(
353
+ f"{type(self).__name__}: Selecting chunk_size_states = {self.chunk_size_states}, chunk_size_points = {self.chunk_size_points}"
354
+ ) # , level=2)
355
+
356
+ # prepare:
357
+ algo.reset_chunk_store(chunk_store)
358
+ out_coords = model.output_coords()
359
+ loop_dims = [d for d in self.loop_dims if d in out_coords]
360
+ loopd = set(loop_dims)
361
+
362
+ # extract loop-var dependent and independent data:
363
+ ldata = []
364
+ lvars = []
365
+ ldims = []
366
+ edata = []
367
+ evars = []
368
+ edims = []
369
+ dvars = []
370
+ ivars = []
371
+ idims = []
372
+ data = [
373
+ self.chunk_data(d)
374
+ for d in [model_data, farm_data, point_data]
375
+ if d is not None
376
+ ]
377
+ for ds in data:
378
+
379
+ hvarsl = [v for v, d in ds.items() if len(loopd.intersection(d.dims))]
380
+ ldata += [ds[v] for v in hvarsl]
381
+ ldims += [ds[v].dims for v in hvarsl]
382
+ lvars += hvarsl
383
+
384
+ hvarse = [v for v in ds.keys() if v not in hvarsl]
385
+ edata += [ds[v].values for v in hvarse]
386
+ edims += [ds[v].dims for v in hvarse]
387
+ evars += hvarse
388
+
389
+ for c, d in ds.coords.items():
390
+ if c in loopd:
391
+ ldata.append(
392
+ self.chunk_data(
393
+ xr.DataArray(data=d.values, coords={c: d}, dims=[c])
394
+ )
395
+ )
396
+ ldims.append((c,))
397
+ lvars.append(c)
398
+ else:
399
+ edata.append(d.values)
400
+ edims.append((c,))
401
+ evars.append(c)
402
+
403
+ dvars.append(list(ds.keys()) + list(ds.coords.keys()))
404
+
405
+ # apply persist:
406
+ if persist:
407
+ ldata = [d.persist() for d in ldata]
408
+
409
+ # setup dask options:
410
+ dargs = dict(output_sizes={FC.VARS: len(out_vars)})
411
+ out_core_vars = [d for d in out_coords if d not in loop_dims] + [FC.VARS]
412
+ if FC.TURBINE in loopd and FC.TURBINE not in ldims.values():
413
+ dargs["output_sizes"][FC.TURBINE] = algo.n_turbines
414
+
415
+ # find states_i0:
416
+ state_inds = self.chunk_data(
417
+ xr.DataArray(
418
+ np.arange(ldata[0].sizes[FC.STATE]),
419
+ dims=FC.STATE,
420
+ coords={FC.STATE: ldata[0][FC.STATE].to_numpy()},
421
+ )
422
+ )
423
+
424
+ # setup arguments for wrapper function:
425
+ out_coords = loop_dims + list(set(out_core_vars).difference([FC.VARS]))
426
+ wargs = dict(
427
+ algo=algo,
428
+ dvars=dvars,
429
+ lvars=lvars,
430
+ ldims=ldims,
431
+ evars=evars,
432
+ edims=edims,
433
+ edata=edata,
434
+ loop_dims=loop_dims,
435
+ out_vars=out_vars,
436
+ out_coords=out_coords,
437
+ calc_pars=calc_pars,
438
+ init_vars=ivars,
439
+ ensure_variables=model.ensure_variables,
440
+ calculate=model.calculate,
441
+ )
442
+
443
+ # run parallel computation:
444
+ iidims = [[c for c in d if c not in loopd] for d in idims]
445
+ icdims = [[c for c in d if c not in loopd] for d in ldims]
446
+ results = xr.apply_ufunc(
447
+ _run_as_ufunc,
448
+ state_inds,
449
+ *ldata,
450
+ input_core_dims=[[]] + iidims + icdims,
451
+ output_core_dims=[out_core_vars],
452
+ output_dtypes=[FC.DTYPE],
453
+ dask="parallelized",
454
+ dask_gufunc_kwargs=dargs,
455
+ kwargs=wargs,
456
+ )
457
+
458
+ results = results.assign_coords({FC.VARS: out_vars}).to_dataset(dim=FC.VARS)
459
+
460
+ # reset:
461
+ self.chunk_size_states = chunk_size_states0
462
+ self.chunk_size_points = chunk_size_points0
463
+
464
+ # update data by calculation results:
465
+ return results.compute(num_workers=self.n_procs)
466
+
467
+
468
+ @delayed
469
+ def _run_lazy(algo, model, iterative, chunk_store, i0_t0, *data, **cpars):
470
+ """Helper function for lazy running"""
471
+ algo.reset_chunk_store(chunk_store)
472
+ results = model.calculate(algo, *data, **cpars)
473
+ chunk_store = algo.reset_chunk_store() if iterative else {}
474
+ cstore = {i0_t0: chunk_store[i0_t0]} if i0_t0 in chunk_store else {}
475
+ return results, cstore
476
+
477
+
478
+ class DaskEngine(DaskBaseEngine):
479
+ """
480
+ The dask engine for delayed foxes calculations.
481
+
482
+ :group: engines
483
+
484
+ """
485
+
486
+ def run_calculation(
487
+ self,
488
+ algo,
489
+ model,
490
+ model_data=None,
491
+ farm_data=None,
492
+ point_data=None,
493
+ out_vars=[],
494
+ chunk_store={},
495
+ sel=None,
496
+ isel=None,
497
+ iterative=False,
498
+ **calc_pars,
499
+ ):
500
+ """
501
+ Runs the model calculation
502
+
503
+ Parameters
504
+ ----------
505
+ algo: foxes.core.Algorithm
506
+ The algorithm object
507
+ model: foxes.core.DataCalcModel
508
+ The model that whose calculate function
509
+ should be run
510
+ model_data: xarray.Dataset
511
+ The initial model data
512
+ farm_data: xarray.Dataset
513
+ The initial farm data
514
+ point_data: xarray.Dataset
515
+ The initial point data
516
+ out_vars: list of str, optional
517
+ Names of the output variables
518
+ chunk_store: foxes.utils.Dict
519
+ The chunk store
520
+ sel: dict, optional
521
+ Selection of coordinate subsets
522
+ isel: dict, optional
523
+ Selection of coordinate subsets index values
524
+ iterative: bool
525
+ Flag for use within the iterative algorithm
526
+ calc_pars: dict, optional
527
+ Additional parameters for the model.calculate()
528
+
529
+ Returns
530
+ -------
531
+ results: xarray.Dataset
532
+ The model results
533
+
534
+ """
535
+ # subset selection:
536
+ model_data, farm_data, point_data = self.select_subsets(
537
+ model_data, farm_data, point_data, sel=sel, isel=isel
538
+ )
539
+
540
+ # basic checks:
541
+ super().run_calculation(algo, model, model_data, farm_data, point_data)
542
+
543
+ # prepare:
544
+ n_states = model_data.sizes[FC.STATE]
545
+ out_coords = model.output_coords()
546
+ coords = {}
547
+ if FC.STATE in out_coords and FC.STATE in model_data.coords:
548
+ coords[FC.STATE] = model_data[FC.STATE].to_numpy()
549
+ if farm_data is None:
550
+ farm_data = xr.Dataset()
551
+ goal_data = farm_data if point_data is None else point_data
552
+
553
+ # calculate chunk sizes:
554
+ n_targets = point_data.sizes[FC.TARGET] if point_data is not None else 0
555
+ chunk_sizes_states, chunk_sizes_targets = self.calc_chunk_sizes(
556
+ n_states, n_targets
557
+ )
558
+ n_chunks_states = len(chunk_sizes_states)
559
+ n_chunks_targets = len(chunk_sizes_targets)
560
+ self.print(
561
+ f"Selecting n_chunks_states = {n_chunks_states}, n_chunks_targets = {n_chunks_targets}",
562
+ level=2,
563
+ )
564
+
565
+ # submit chunks:
566
+ n_chunks_all = n_chunks_states * n_chunks_targets
567
+ self.print(
568
+ f"Submitting {n_chunks_all} chunks to {self.n_procs} processes", level=2
569
+ )
570
+ pbar = tqdm(total=n_chunks_all) if self.verbosity > 1 else None
571
+ results = {}
572
+ i0_states = 0
573
+ for chunki_states in range(n_chunks_states):
574
+ i1_states = i0_states + chunk_sizes_states[chunki_states]
575
+ i0_targets = 0
576
+ for chunki_points in range(n_chunks_targets):
577
+ i1_targets = i0_targets + chunk_sizes_targets[chunki_points]
578
+
579
+ # get this chunk's data:
580
+ data = self.get_chunk_input_data(
581
+ algo=algo,
582
+ model_data=model_data,
583
+ farm_data=farm_data,
584
+ point_data=point_data,
585
+ states_i0_i1=(i0_states, i1_states),
586
+ targets_i0_i1=(i0_targets, i1_targets),
587
+ out_vars=out_vars,
588
+ )
589
+
590
+ # submit model calculation:
591
+ results[(chunki_states, chunki_points)] = _run_lazy(
592
+ deepcopy(algo),
593
+ deepcopy(model),
594
+ iterative,
595
+ chunk_store,
596
+ (i0_states, i0_targets),
597
+ *data,
598
+ **calc_pars,
599
+ )
600
+ del data
601
+
602
+ i0_targets = i1_targets
603
+
604
+ if pbar is not None:
605
+ pbar.update()
606
+
607
+ i0_states = i1_states
608
+
609
+ del farm_data, point_data, calc_pars
610
+ if pbar is not None:
611
+ pbar.close()
612
+
613
+ # wait for results:
614
+ if n_chunks_all > 1 or self.verbosity > 1:
615
+ self.print(
616
+ f"Computing {n_chunks_all} chunks using {self.n_procs} processes"
617
+ )
618
+ results = dask.compute(results)[0]
619
+
620
+ return self.combine_results(
621
+ algo=algo,
622
+ results=results,
623
+ model_data=model_data,
624
+ out_vars=out_vars,
625
+ out_coords=out_coords,
626
+ n_chunks_states=n_chunks_states,
627
+ n_chunks_targets=n_chunks_targets,
628
+ goal_data=goal_data,
629
+ iterative=iterative,
630
+ )
631
+
632
+
633
+ def _run_on_cluster(
634
+ algo,
635
+ model,
636
+ *data,
637
+ names,
638
+ dims,
639
+ mdata_size,
640
+ fdata_size,
641
+ loop_dims,
642
+ iterative,
643
+ chunk_store,
644
+ i0_states,
645
+ i0_targets,
646
+ cpars,
647
+ ):
648
+ """Helper function for running on a cluster"""
649
+
650
+ algo.reset_chunk_store(chunk_store)
651
+
652
+ mdata = MData(
653
+ data={names[i]: data[i] for i in range(mdata_size)},
654
+ dims={names[i]: dims[i] for i in range(mdata_size)},
655
+ loop_dims=loop_dims[0],
656
+ states_i0=i0_states,
657
+ )
658
+
659
+ fdata_end = mdata_size + fdata_size
660
+ fdata = FData(
661
+ data={names[i]: data[i].copy() for i in range(mdata_size, fdata_end)},
662
+ dims={names[i]: dims[i] for i in range(mdata_size, fdata_end)},
663
+ loop_dims=loop_dims[1],
664
+ states_i0=i0_states,
665
+ )
666
+
667
+ tdata = None
668
+ if len(data) > fdata_end:
669
+ tdata = TData(
670
+ data={names[i]: data[i].copy() for i in range(fdata_end, len(data))},
671
+ dims={names[i]: dims[i] for i in range(fdata_end, len(data))},
672
+ loop_dims=loop_dims[2],
673
+ states_i0=i0_states,
674
+ )
675
+
676
+ data = [d for d in [mdata, fdata, tdata] if d is not None]
677
+
678
+ results = model.calculate(algo, *data, **cpars)
679
+ chunk_store = algo.reset_chunk_store() if iterative else {}
680
+
681
+ k = (i0_states, i0_targets)
682
+ cstore = {k: chunk_store[k]} if k in chunk_store else {}
683
+ return results, cstore
684
+
685
+
686
+ class LocalClusterEngine(DaskBaseEngine):
687
+ """
688
+ The dask engine for foxes calculations on a local cluster.
689
+
690
+ Attributes
691
+ ----------
692
+ cluster_pars: dict
693
+ Parameters for the cluster
694
+ client_pars: dict
695
+ Parameters for the client of the cluster
696
+
697
+ :group: engines
698
+
699
+ """
700
+
701
+ def __init__(
702
+ self,
703
+ *args,
704
+ cluster_pars={},
705
+ client_pars={},
706
+ **kwargs,
707
+ ):
708
+ """
709
+ Constructor.
710
+
711
+ Parameters
712
+ ----------
713
+ args: tuple, optional
714
+ Additional parameters for the DaskBaseEngine class
715
+ cluster_pars: dict
716
+ Parameters for the cluster
717
+ client_pars: dict
718
+ Parameters for the client of the cluster
719
+ kwargs: dict, optional
720
+ Additional parameters for the base class
721
+
722
+ """
723
+ super().__init__(*args, **kwargs)
724
+
725
+ load_distributed()
726
+
727
+ self.cluster_pars = cluster_pars
728
+ self.client_pars = client_pars
729
+
730
+ self.dask_config["scheduler"] = "distributed"
731
+ self.dask_config["distributed.scheduler.worker-ttl"] = None
732
+
733
+ self._cluster = None
734
+ self._client = None
735
+
736
+ def __enter__(self):
737
+ self.print("Launching local dask cluster..")
738
+ self._cluster = distributed.LocalCluster(
739
+ n_workers=self.n_procs, **self.cluster_pars
740
+ ).__enter__()
741
+ self._client = distributed.Client(self._cluster, **self.client_pars).__enter__()
742
+ self.print(self._cluster)
743
+ self.print(f"Dashboard: {self._client.dashboard_link}\n")
744
+ return super().__enter__()
745
+
746
+ def __exit__(self, *args):
747
+ self.print(f"Shutting down {type(self._cluster).__name__}")
748
+ # self._client.retire_workers()
749
+ # from time import sleep
750
+ # sleep(1)
751
+ # self._client.shutdown()
752
+ self._client.__exit__(*args)
753
+ self._cluster.__exit__(*args)
754
+ super().__exit__(*args)
755
+
756
+ def __del__(self):
757
+ if hasattr(self, "_client") and self._client is not None:
758
+ self._client.__del__()
759
+ if hasattr(self, "_cluster") and self._cluster is not None:
760
+ self._cluster.__del__()
761
+ super().__del__()
762
+
763
+ def run_calculation(
764
+ self,
765
+ algo,
766
+ model,
767
+ model_data=None,
768
+ farm_data=None,
769
+ point_data=None,
770
+ out_vars=[],
771
+ chunk_store={},
772
+ sel=None,
773
+ isel=None,
774
+ iterative=False,
775
+ **calc_pars,
776
+ ):
777
+ """
778
+ Runs the model calculation
779
+
780
+ Parameters
781
+ ----------
782
+ algo: foxes.core.Algorithm
783
+ The algorithm object
784
+ model: foxes.core.DataCalcModel
785
+ The model that whose calculate function
786
+ should be run
787
+ model_data: xarray.Dataset
788
+ The initial model data
789
+ farm_data: xarray.Dataset
790
+ The initial farm data
791
+ point_data: xarray.Dataset
792
+ The initial point data
793
+ out_vars: list of str, optional
794
+ Names of the output variables
795
+ chunk_store: foxes.utils.Dict
796
+ The chunk store
797
+ sel: dict, optional
798
+ Selection of coordinate subsets
799
+ isel: dict, optional
800
+ Selection of coordinate subsets index values
801
+ iterative: bool
802
+ Flag for use within the iterative algorithm
803
+ calc_pars: dict, optional
804
+ Additional parameters for the model.calculate()
805
+
806
+ Returns
807
+ -------
808
+ results: xarray.Dataset
809
+ The model results
810
+
811
+ """
812
+ # subset selection:
813
+ model_data, farm_data, point_data = self.select_subsets(
814
+ model_data, farm_data, point_data, sel=sel, isel=isel
815
+ )
816
+
817
+ # basic checks:
818
+ super().run_calculation(algo, model, model_data, farm_data, point_data)
819
+
820
+ # prepare:
821
+ n_states = model_data.sizes[FC.STATE]
822
+ out_coords = model.output_coords()
823
+ coords = {}
824
+ if FC.STATE in out_coords and FC.STATE in model_data.coords:
825
+ coords[FC.STATE] = model_data[FC.STATE].to_numpy()
826
+ if farm_data is None:
827
+ farm_data = xr.Dataset()
828
+ goal_data = farm_data if point_data is None else point_data
829
+
830
+ # calculate chunk sizes:
831
+ n_targets = point_data.sizes[FC.TARGET] if point_data is not None else 0
832
+ chunk_sizes_states, chunk_sizes_targets = self.calc_chunk_sizes(
833
+ n_states, n_targets
834
+ )
835
+ n_chunks_states = len(chunk_sizes_states)
836
+ n_chunks_targets = len(chunk_sizes_targets)
837
+ self.print(
838
+ f"Selecting n_chunks_states = {n_chunks_states}, n_chunks_targets = {n_chunks_targets}",
839
+ level=2,
840
+ )
841
+
842
+ # scatter algo and model:
843
+ n_chunks_all = n_chunks_states * n_chunks_targets
844
+ falgo = self._client.scatter(algo, broadcast=True)
845
+ fmodel = self._client.scatter(model, broadcast=True)
846
+ cpars = self._client.scatter(calc_pars, broadcast=True)
847
+ all_data = [falgo, fmodel, cpars]
848
+
849
+ # submit chunks:
850
+ self.print(f"Submitting {n_chunks_all} chunks to {self.n_procs} processes")
851
+ pbar = tqdm(total=n_chunks_all) if self.verbosity > 0 else None
852
+ jobs = {}
853
+ i0_states = 0
854
+ for chunki_states in range(n_chunks_states):
855
+ i1_states = i0_states + chunk_sizes_states[chunki_states]
856
+ i0_targets = 0
857
+ for chunki_points in range(n_chunks_targets):
858
+ i1_targets = i0_targets + chunk_sizes_targets[chunki_points]
859
+
860
+ # get this chunk's data:
861
+ data = self.get_chunk_input_data(
862
+ algo=algo,
863
+ model_data=model_data,
864
+ farm_data=farm_data,
865
+ point_data=point_data,
866
+ states_i0_i1=(i0_states, i1_states),
867
+ targets_i0_i1=(i0_targets, i1_targets),
868
+ out_vars=out_vars,
869
+ )
870
+
871
+ # scatter data:
872
+ fut_data = []
873
+ names = []
874
+ dims = []
875
+ ldims = [d.loop_dims for d in data]
876
+ for dt in data:
877
+ for k, d in dt.items():
878
+ fut_data.append(self._client.scatter(d, hash=False))
879
+ names.append(k)
880
+ dims.append(dt.dims[k])
881
+ names = self._client.scatter(names)
882
+ dims = self._client.scatter(dims)
883
+ ldims = self._client.scatter(ldims)
884
+ all_data += [fut_data, names, dims, ldims]
885
+
886
+ # scatter chunk store data:
887
+ cstore = chunk_store
888
+ if len(cstore):
889
+ cstore = self._client.scatter(cstore, hash=False)
890
+ all_data.append(cstore)
891
+
892
+ # submit model calculation:
893
+ jobs[(chunki_states, chunki_points)] = self._client.submit(
894
+ _run_on_cluster,
895
+ falgo,
896
+ fmodel,
897
+ *fut_data,
898
+ names=names,
899
+ dims=dims,
900
+ mdata_size=len(data[0]),
901
+ fdata_size=len(data[1]),
902
+ loop_dims=ldims,
903
+ iterative=iterative,
904
+ chunk_store=cstore,
905
+ i0_states=i0_states,
906
+ i0_targets=i0_targets,
907
+ cpars=cpars,
908
+ retries=10,
909
+ )
910
+ del fut_data, cstore
911
+
912
+ i0_targets = i1_targets
913
+
914
+ if pbar is not None:
915
+ pbar.update()
916
+
917
+ i0_states = i1_states
918
+
919
+ del falgo, fmodel, farm_data, point_data, calc_pars
920
+ if pbar is not None:
921
+ pbar.close()
922
+
923
+ # wait for results:
924
+ self.print(f"Computing {n_chunks_all} chunks using {self.n_procs} processes")
925
+ pbar = (
926
+ tqdm(total=n_chunks_all)
927
+ if n_chunks_all > 1 and self.verbosity > 0
928
+ else None
929
+ )
930
+ results = {}
931
+ for chunki_states in range(n_chunks_states):
932
+ for chunki_points in range(n_chunks_targets):
933
+ key = (chunki_states, chunki_points)
934
+ results[key] = jobs.get(key).result()
935
+ if pbar is not None:
936
+ pbar.update()
937
+ if pbar is not None:
938
+ pbar.close()
939
+
940
+ results = self.combine_results(
941
+ algo=algo,
942
+ results=results,
943
+ model_data=model_data,
944
+ out_vars=out_vars,
945
+ out_coords=out_coords,
946
+ n_chunks_states=n_chunks_states,
947
+ n_chunks_targets=n_chunks_targets,
948
+ goal_data=goal_data,
949
+ iterative=iterative,
950
+ ).persist()
951
+
952
+ # self._client.cancel(all_data)
953
+
954
+ return results
955
+
956
+
957
+ class SlurmClusterEngine(LocalClusterEngine):
958
+ """
959
+ The dask engine for foxes calculations on a SLURM cluster.
960
+
961
+ :group: engines
962
+
963
+ """
964
+
965
+ def __enter__(self):
966
+ self.print("Launching dask cluster on HPC using SLURM..")
967
+ cargs = deepcopy(self.cluster_pars)
968
+ nodes = cargs.pop("nodes", 1)
969
+
970
+ dask_jobqueue = import_module(
971
+ "dask_jobqueue", hint="pip install setuptools dask-jobqueue"
972
+ )
973
+ self._cluster = dask_jobqueue.SLURMCluster(**cargs)
974
+ self._cluster.scale(jobs=nodes)
975
+ self._cluster = self._cluster.__enter__()
976
+ self._client = distributed.Client(self._cluster, **self.client_pars).__enter__()
977
+
978
+ self.print(self._cluster)
979
+ self.print(f"Dashboard: {self._client.dashboard_link}\n")
980
+ print(self._cluster.job_script())
981
+
982
+ return DaskBaseEngine.__enter__(self)