foxes 0.8.1__py3-none-any.whl → 1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of foxes might be problematic. Click here for more details.

Files changed (175) hide show
  1. docs/source/conf.py +353 -0
  2. examples/abl_states/run.py +160 -0
  3. examples/compare_rotors_pwakes/run.py +217 -0
  4. examples/compare_wakes/run.py +241 -0
  5. examples/dyn_wakes/run.py +311 -0
  6. examples/field_data_nc/run.py +121 -0
  7. examples/induction_RHB/run.py +201 -0
  8. examples/multi_height/run.py +113 -0
  9. examples/power_mask/run.py +249 -0
  10. examples/random_timeseries/run.py +210 -0
  11. examples/scan_row/run.py +193 -0
  12. examples/sector_management/run.py +162 -0
  13. examples/sequential/run.py +209 -0
  14. examples/single_state/run.py +201 -0
  15. examples/states_lookup_table/run.py +137 -0
  16. examples/streamline_wakes/run.py +138 -0
  17. examples/tab_file/run.py +142 -0
  18. examples/timelines/run.py +267 -0
  19. examples/timeseries/run.py +183 -0
  20. examples/timeseries_slurm/run.py +185 -0
  21. examples/wind_rose/run.py +141 -0
  22. examples/windio/run.py +29 -0
  23. examples/yawed_wake/run.py +196 -0
  24. foxes/__init__.py +4 -8
  25. foxes/algorithms/__init__.py +1 -1
  26. foxes/algorithms/downwind/downwind.py +232 -101
  27. foxes/algorithms/downwind/models/farm_wakes_calc.py +11 -6
  28. foxes/algorithms/downwind/models/init_farm_data.py +1 -1
  29. foxes/algorithms/downwind/models/point_wakes_calc.py +5 -6
  30. foxes/algorithms/downwind/models/reorder_farm_output.py +0 -1
  31. foxes/algorithms/downwind/models/set_amb_point_results.py +4 -2
  32. foxes/algorithms/iterative/iterative.py +73 -33
  33. foxes/algorithms/iterative/models/farm_wakes_calc.py +11 -6
  34. foxes/algorithms/sequential/models/plugin.py +1 -1
  35. foxes/algorithms/sequential/sequential.py +126 -255
  36. foxes/constants.py +17 -2
  37. foxes/core/__init__.py +1 -0
  38. foxes/core/algorithm.py +631 -146
  39. foxes/core/data.py +252 -20
  40. foxes/core/data_calc_model.py +13 -289
  41. foxes/core/engine.py +630 -0
  42. foxes/core/farm_controller.py +37 -9
  43. foxes/core/farm_data_model.py +15 -0
  44. foxes/core/model.py +133 -80
  45. foxes/core/point_data_model.py +15 -0
  46. foxes/core/rotor_model.py +27 -21
  47. foxes/core/states.py +16 -0
  48. foxes/core/turbine_type.py +28 -0
  49. foxes/core/wake_frame.py +22 -4
  50. foxes/core/wake_model.py +2 -3
  51. foxes/data/windio/windio_5turbines_timeseries.yaml +23 -1
  52. foxes/engines/__init__.py +16 -0
  53. foxes/engines/dask.py +975 -0
  54. foxes/engines/default.py +75 -0
  55. foxes/engines/futures.py +72 -0
  56. foxes/engines/mpi.py +38 -0
  57. foxes/engines/multiprocess.py +74 -0
  58. foxes/engines/numpy.py +185 -0
  59. foxes/engines/pool.py +263 -0
  60. foxes/engines/single.py +139 -0
  61. foxes/input/farm_layout/__init__.py +1 -0
  62. foxes/input/farm_layout/from_csv.py +4 -0
  63. foxes/input/farm_layout/from_json.py +1 -1
  64. foxes/input/farm_layout/grid.py +2 -2
  65. foxes/input/farm_layout/ring.py +65 -0
  66. foxes/input/farm_layout/row.py +2 -2
  67. foxes/input/states/__init__.py +6 -0
  68. foxes/input/states/create/random_abl_states.py +1 -1
  69. foxes/input/states/field_data_nc.py +157 -32
  70. foxes/input/states/multi_height.py +127 -13
  71. foxes/input/states/one_point_flow.py +577 -0
  72. foxes/input/states/scan_ws.py +73 -2
  73. foxes/input/states/states_table.py +204 -35
  74. foxes/input/windio/__init__.py +1 -1
  75. foxes/input/windio/get_states.py +44 -23
  76. foxes/input/windio/read_attributes.py +41 -16
  77. foxes/input/windio/read_farm.py +116 -102
  78. foxes/input/windio/read_fields.py +13 -6
  79. foxes/input/windio/read_outputs.py +63 -22
  80. foxes/input/windio/runner.py +31 -17
  81. foxes/input/windio/windio.py +36 -22
  82. foxes/models/ground_models/wake_mirror.py +8 -4
  83. foxes/models/model_book.py +29 -18
  84. foxes/models/partial_wakes/rotor_points.py +3 -3
  85. foxes/models/rotor_models/centre.py +4 -0
  86. foxes/models/rotor_models/grid.py +22 -23
  87. foxes/models/rotor_models/levels.py +4 -5
  88. foxes/models/turbine_models/calculator.py +0 -2
  89. foxes/models/turbine_models/lookup_table.py +27 -2
  90. foxes/models/turbine_models/rotor_centre_calc.py +4 -3
  91. foxes/models/turbine_models/set_farm_vars.py +103 -34
  92. foxes/models/turbine_types/PCt_file.py +24 -0
  93. foxes/models/turbine_types/PCt_from_two.py +24 -0
  94. foxes/models/turbine_types/__init__.py +1 -0
  95. foxes/models/turbine_types/lookup.py +316 -0
  96. foxes/models/turbine_types/null_type.py +50 -0
  97. foxes/models/turbine_types/wsrho2PCt_from_two.py +24 -0
  98. foxes/models/turbine_types/wsti2PCt_from_two.py +24 -0
  99. foxes/models/vertical_profiles/data_profile.py +1 -1
  100. foxes/models/wake_frames/__init__.py +1 -0
  101. foxes/models/wake_frames/dynamic_wakes.py +424 -0
  102. foxes/models/wake_frames/farm_order.py +23 -3
  103. foxes/models/wake_frames/rotor_wd.py +4 -2
  104. foxes/models/wake_frames/seq_dynamic_wakes.py +56 -63
  105. foxes/models/wake_frames/streamlines.py +19 -20
  106. foxes/models/wake_frames/timelines.py +328 -127
  107. foxes/models/wake_frames/yawed_wakes.py +4 -1
  108. foxes/models/wake_models/dist_sliced.py +1 -3
  109. foxes/models/wake_models/induction/rankine_half_body.py +4 -4
  110. foxes/models/wake_models/induction/rathmann.py +2 -2
  111. foxes/models/wake_models/induction/self_similar.py +2 -2
  112. foxes/models/wake_models/induction/vortex_sheet.py +2 -2
  113. foxes/models/wake_models/ti/iec_ti.py +34 -17
  114. foxes/models/wake_models/top_hat.py +1 -1
  115. foxes/models/wake_models/wind/bastankhah14.py +2 -2
  116. foxes/models/wake_models/wind/bastankhah16.py +8 -7
  117. foxes/models/wake_models/wind/jensen.py +1 -1
  118. foxes/models/wake_models/wind/turbopark.py +2 -2
  119. foxes/output/__init__.py +4 -1
  120. foxes/output/farm_layout.py +2 -2
  121. foxes/output/flow_plots_2d/__init__.py +0 -1
  122. foxes/output/flow_plots_2d/flow_plots.py +70 -30
  123. foxes/output/grids.py +91 -21
  124. foxes/output/seq_plugins/__init__.py +2 -0
  125. foxes/output/{flow_plots_2d → seq_plugins}/seq_flow_ani_plugin.py +62 -20
  126. foxes/output/seq_plugins/seq_wake_debug_plugin.py +145 -0
  127. foxes/output/slice_data.py +131 -111
  128. foxes/output/state_turbine_map.py +18 -13
  129. foxes/output/state_turbine_table.py +19 -19
  130. foxes/utils/__init__.py +1 -1
  131. foxes/utils/dev_utils.py +42 -0
  132. foxes/utils/dict.py +1 -1
  133. foxes/utils/factory.py +147 -52
  134. foxes/utils/pandas_helpers.py +4 -3
  135. foxes/utils/wind_dir.py +0 -2
  136. foxes/utils/xarray_utils.py +25 -13
  137. foxes/variables.py +37 -0
  138. {foxes-0.8.1.dist-info → foxes-1.0.dist-info}/METADATA +72 -34
  139. foxes-1.0.dist-info/RECORD +307 -0
  140. {foxes-0.8.1.dist-info → foxes-1.0.dist-info}/WHEEL +1 -1
  141. foxes-1.0.dist-info/top_level.txt +4 -0
  142. tests/0_consistency/iterative/test_iterative.py +92 -0
  143. tests/0_consistency/partial_wakes/test_partial_wakes.py +90 -0
  144. tests/1_verification/flappy_0_6/PCt_files/flappy/run.py +85 -0
  145. tests/1_verification/flappy_0_6/PCt_files/test_PCt_files.py +103 -0
  146. tests/1_verification/flappy_0_6/abl_states/flappy/run.py +85 -0
  147. tests/1_verification/flappy_0_6/abl_states/test_abl_states.py +87 -0
  148. tests/1_verification/flappy_0_6/partial_top_hat/flappy/run.py +82 -0
  149. tests/1_verification/flappy_0_6/partial_top_hat/test_partial_top_hat.py +82 -0
  150. tests/1_verification/flappy_0_6/row_Jensen_linear_centre/flappy/run.py +92 -0
  151. tests/1_verification/flappy_0_6/row_Jensen_linear_centre/test_row_Jensen_linear_centre.py +93 -0
  152. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat/flappy/run.py +92 -0
  153. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat/test_row_Jensen_linear_tophat.py +96 -0
  154. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2005/flappy/run.py +94 -0
  155. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2005/test_row_Jensen_linear_tophat_IECTI_2005.py +122 -0
  156. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2019/flappy/run.py +94 -0
  157. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2019/test_row_Jensen_linear_tophat_IECTI_2019.py +122 -0
  158. tests/1_verification/flappy_0_6/row_Jensen_quadratic_centre/flappy/run.py +92 -0
  159. tests/1_verification/flappy_0_6/row_Jensen_quadratic_centre/test_row_Jensen_quadratic_centre.py +93 -0
  160. tests/1_verification/flappy_0_6_2/grid_rotors/flappy/run.py +85 -0
  161. tests/1_verification/flappy_0_6_2/grid_rotors/test_grid_rotors.py +130 -0
  162. tests/1_verification/flappy_0_6_2/row_Bastankhah_Crespo/flappy/run.py +96 -0
  163. tests/1_verification/flappy_0_6_2/row_Bastankhah_Crespo/test_row_Bastankhah_Crespo.py +116 -0
  164. tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/flappy/run.py +93 -0
  165. tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/test_row_Bastankhah_linear_centre.py +99 -0
  166. tests/3_examples/test_examples.py +34 -0
  167. foxes/VERSION +0 -1
  168. foxes/output/flow_plots_2d.py +0 -0
  169. foxes/utils/plotly_helpers.py +0 -19
  170. foxes/utils/runners/__init__.py +0 -1
  171. foxes/utils/runners/runners.py +0 -280
  172. foxes-0.8.1.dist-info/RECORD +0 -248
  173. foxes-0.8.1.dist-info/top_level.txt +0 -1
  174. foxes-0.8.1.dist-info/zip-safe +0 -1
  175. {foxes-0.8.1.dist-info → foxes-1.0.dist-info}/LICENSE +0 -0
foxes/engines/dask.py ADDED
@@ -0,0 +1,975 @@
1
+ import numpy as np
2
+ import xarray as xr
3
+ from copy import deepcopy
4
+ from tqdm import tqdm
5
+
6
+ from foxes.core import Engine, MData, FData, TData
7
+ from foxes.utils import import_module
8
+ import foxes.variables as FV
9
+ import foxes.constants as FC
10
+
11
+
12
+ def delayed(func):
13
+ """A dummy decorator"""
14
+ return func
15
+
16
+
17
+ def load_dask():
18
+ """On-demand loading of the dask package"""
19
+ global dask, ProgressBar, delayed
20
+ dask = import_module("dask", hint="pip install dask")
21
+ ProgressBar = import_module("dask.diagnostics", hint="pip install dask").ProgressBar
22
+ delayed = dask.delayed
23
+
24
+
25
+ def load_distributed():
26
+ """On-demand loading of the distributed package"""
27
+ global distributed
28
+ distributed = import_module("distributed", hint="pip install distributed")
29
+
30
+
31
+ class DaskBaseEngine(Engine):
32
+ """
33
+ Abstract base class for foxes calculations with dask.
34
+
35
+ Parameters
36
+ ----------
37
+ dask_config: dict
38
+ The dask configuration parameters
39
+ progress_bar: bool
40
+ Flag for showing progress bar
41
+
42
+ :group: engines
43
+
44
+ """
45
+
46
+ def __init__(
47
+ self,
48
+ *args,
49
+ dask_config={},
50
+ progress_bar=True,
51
+ **kwargs,
52
+ ):
53
+ """
54
+ Constructor.
55
+
56
+ Parameters
57
+ ----------
58
+ args: tuple, optional
59
+ Additional parameters for the base class
60
+ dask_config: dict, optional
61
+ The dask configuration parameters
62
+ progress_bar: bool
63
+ Flag for showing progress bar
64
+ kwargs: dict, optional
65
+ Additional parameters for the base class
66
+
67
+ """
68
+ super().__init__(*args, **kwargs)
69
+
70
+ load_dask()
71
+
72
+ self.dask_config = dask_config
73
+ self.progress_bar = progress_bar
74
+
75
+ def __enter__(self):
76
+ if self.progress_bar:
77
+ self._pbar = ProgressBar(minimum=2)
78
+ self._pbar.__enter__()
79
+ return super().__enter__()
80
+
81
+ def __exit__(self, *args):
82
+ if self.progress_bar:
83
+ self._pbar.__exit__(*args)
84
+ super().__exit__(*args)
85
+
86
+ def initialize(self):
87
+ """
88
+ Initializes the engine.
89
+ """
90
+ dask.config.set(**self.dask_config)
91
+ super().initialize()
92
+
93
+ def chunk_data(self, data):
94
+ """
95
+ Applies the selected chunking
96
+
97
+ Parameters
98
+ ----------
99
+ data: xarray.Dataset
100
+ The data to be chunked
101
+
102
+ Returns
103
+ -------
104
+ data: xarray.Dataset
105
+ The chunked data
106
+
107
+ """
108
+ cks = {}
109
+ cks[FC.STATE] = min(data.sizes[FC.STATE], self.chunk_size_states)
110
+ if FC.TARGET in data.sizes:
111
+ cks[FC.TARGET] = min(data.sizes[FC.TARGET], self.chunk_size_points)
112
+
113
+ if len(set(cks.keys()).intersection(data.coords.keys())):
114
+ return data.chunk({v: d for v, d in cks.items() if v in data.coords})
115
+ else:
116
+ return data
117
+
118
+ def finalize(self, *exit_args, **exit_kwargs):
119
+ """
120
+ Finalizes the engine.
121
+
122
+ Parameters
123
+ ----------
124
+ exit_args: tuple, optional
125
+ Arguments from the exit function
126
+ exit_kwargs: dict, optional
127
+ Arguments from the exit function
128
+
129
+ """
130
+ dask.config.refresh()
131
+ super().finalize(*exit_args, **exit_kwargs)
132
+
133
+
134
+ def _run_as_ufunc(
135
+ state_inds,
136
+ *ldata,
137
+ algo,
138
+ dvars,
139
+ lvars,
140
+ ldims,
141
+ evars,
142
+ edims,
143
+ edata,
144
+ loop_dims,
145
+ out_vars,
146
+ out_coords,
147
+ calc_pars,
148
+ init_vars,
149
+ ensure_variables,
150
+ calculate,
151
+ ):
152
+ """
153
+ Wrapper that mitigates between apply_ufunc and `calculate`.
154
+ """
155
+ n_prev = len(init_vars)
156
+ if n_prev:
157
+ prev = ldata[:n_prev]
158
+ ldata = ldata[n_prev:]
159
+
160
+ # reconstruct original data:
161
+ data = []
162
+ for i, hvars in enumerate(dvars):
163
+ v2l = {v: lvars.index(v) for v in hvars if v in lvars}
164
+ v2e = {v: evars.index(v) for v in hvars if v in evars}
165
+
166
+ hdata = {v: ldata[v2l[v]] if v in v2l else edata[v2e[v]] for v in hvars}
167
+ hdims = {v: ldims[v2l[v]] if v in v2l else edims[v2e[v]] for v in hvars}
168
+
169
+ if i == 0:
170
+ data.append(
171
+ MData(
172
+ data=hdata, dims=hdims, loop_dims=loop_dims, states_i0=state_inds[0]
173
+ )
174
+ )
175
+ elif i == 1:
176
+ data.append(
177
+ FData(
178
+ data=hdata, dims=hdims, loop_dims=loop_dims, states_i0=state_inds[0]
179
+ )
180
+ )
181
+ elif i == 2:
182
+ data.append(
183
+ TData(
184
+ data=hdata, dims=hdims, loop_dims=loop_dims, states_i0=state_inds[0]
185
+ )
186
+ )
187
+ else:
188
+ raise NotImplementedError(
189
+ f"Not more than 3 data sets implemented, found {len(dvars)}"
190
+ )
191
+
192
+ del hdata, hdims, v2l, v2e
193
+
194
+ # deduce output shape:
195
+ oshape = []
196
+ for li, l in enumerate(out_coords):
197
+ for i, dims in enumerate(ldims):
198
+ if l in dims:
199
+ oshape.append(ldata[i].shape[dims.index(l)])
200
+ break
201
+ if len(oshape) != li + 1:
202
+ raise ValueError(f"Failed to find loop dimension")
203
+
204
+ # add zero output data arrays:
205
+ odims = {v: tuple(out_coords) for v in out_vars}
206
+ odata = {
207
+ v: (
208
+ np.full(oshape, np.nan, dtype=FC.DTYPE)
209
+ if v not in init_vars
210
+ else prev[init_vars.index(v)].copy()
211
+ )
212
+ for v in out_vars
213
+ if v not in data[-1]
214
+ }
215
+
216
+ if len(data) == 1:
217
+ data.append(FData(odata, odims, loop_dims, states_i0=state_inds[0]))
218
+ else:
219
+ odata.update(data[-1])
220
+ odims.update(data[-1].dims)
221
+ if len(data) == 2:
222
+ data[-1] = FData(odata, odims, loop_dims, states_i0=state_inds[0])
223
+ else:
224
+ data[-1] = TData(odata, odims, loop_dims, states_i0=state_inds[0])
225
+ del odims, odata
226
+
227
+ # link chunk state indices from mdata to fdata and tdata:
228
+ if FC.STATE in data[0]:
229
+ for d in data[1:]:
230
+ d[FC.STATE] = data[0][FC.STATE]
231
+
232
+ # link weights from mdata to fdata:
233
+ if FV.WEIGHT in data[0]:
234
+ data[1][FV.WEIGHT] = data[0][FV.WEIGHT]
235
+ data[1].dims[FV.WEIGHT] = data[0].dims[FV.WEIGHT]
236
+
237
+ # run model calculation:
238
+ ensure_variables(algo, *data)
239
+ results = calculate(algo, *data, **calc_pars)
240
+
241
+ # replace missing results by first input data with matching shape:
242
+ missing = set(out_vars).difference(results.keys())
243
+ if len(missing):
244
+ found = set()
245
+ for v in missing:
246
+ for dta in data:
247
+ if v in dta and dta[v].shape == tuple(oshape):
248
+ results[v] = dta[v]
249
+ found.add(v)
250
+ break
251
+ missing -= found
252
+ if len(missing):
253
+ raise ValueError(
254
+ f"Missing results {list(missing)}, expected shape {oshape}"
255
+ )
256
+ del data
257
+
258
+ # create output:
259
+ n_vars = len(out_vars)
260
+ data = np.zeros(oshape + [n_vars], dtype=FC.DTYPE)
261
+ for v in out_vars:
262
+ data[..., out_vars.index(v)] = results[v]
263
+
264
+ return data
265
+
266
+
267
+ class XArrayEngine(DaskBaseEngine):
268
+ """
269
+ The engine for foxes calculations via xarray.apply_ufunc.
270
+
271
+ :group: engines
272
+
273
+ """
274
+
275
+ def run_calculation(
276
+ self,
277
+ algo,
278
+ model,
279
+ model_data=None,
280
+ farm_data=None,
281
+ point_data=None,
282
+ out_vars=[],
283
+ chunk_store={},
284
+ sel=None,
285
+ isel=None,
286
+ persist=True,
287
+ iterative=False,
288
+ **calc_pars,
289
+ ):
290
+ """
291
+ Runs the model calculation
292
+
293
+ Parameters
294
+ ----------
295
+ algo: foxes.core.Algorithm
296
+ The algorithm object
297
+ model: foxes.core.DataCalcModel
298
+ The model that whose calculate function
299
+ should be run
300
+ model_data: xarray.Dataset
301
+ The initial model data
302
+ farm_data: xarray.Dataset
303
+ The initial farm data
304
+ point_data: xarray.Dataset
305
+ The initial point data
306
+ out_vars: list of str, optional
307
+ Names of the output variables
308
+ chunk_store: foxes.utils.Dict
309
+ The chunk store
310
+ sel: dict, optional
311
+ Selection of coordinate subsets
312
+ isel: dict, optional
313
+ Selection of coordinate subsets index values
314
+ persist: bool
315
+ Flag for persisting xarray Dataset objects
316
+ iterative: bool
317
+ Flag for use within the iterative algorithm
318
+ calc_pars: dict, optional
319
+ Additional parameters for the model.calculate()
320
+
321
+ Returns
322
+ -------
323
+ results: xarray.Dataset
324
+ The model results
325
+
326
+ """
327
+ # subset selection:
328
+ model_data, farm_data, point_data = self.select_subsets(
329
+ model_data, farm_data, point_data, sel=sel, isel=isel
330
+ )
331
+
332
+ # basic checks:
333
+ super().run_calculation(algo, model, model_data, farm_data, point_data)
334
+
335
+ # find chunk sizes, if not given:
336
+ chunk_size_states0 = self.chunk_size_states
337
+ chunk_size_points0 = self.chunk_size_points
338
+ n_states = model_data.sizes[FC.STATE]
339
+ n_targets = point_data.sizes[FC.TARGET] if point_data is not None else 0
340
+ __, chunk_sizes_states, chunk_sizes_targets = self.calc_chunk_sizes(
341
+ n_states, n_targets
342
+ )
343
+ self.chunk_size_states = np.min(chunk_sizes_states)
344
+ self.chunk_size_points = np.min(chunk_sizes_targets)
345
+ self.print(
346
+ f"Selecting chunk_size_states = {self.chunk_size_states}, chunk_size_points = {self.chunk_size_points}"
347
+ ) # , level=2)
348
+
349
+ # prepare:
350
+ algo.reset_chunk_store(chunk_store)
351
+ out_coords = model.output_coords()
352
+ loop_dims = [d for d in self.loop_dims if d in out_coords]
353
+ loopd = set(loop_dims)
354
+
355
+ # extract loop-var dependent and independent data:
356
+ ldata = []
357
+ lvars = []
358
+ ldims = []
359
+ edata = []
360
+ evars = []
361
+ edims = []
362
+ dvars = []
363
+ ivars = []
364
+ idims = []
365
+ data = [
366
+ self.chunk_data(d)
367
+ for d in [model_data, farm_data, point_data]
368
+ if d is not None
369
+ ]
370
+ for ds in data:
371
+
372
+ hvarsl = [v for v, d in ds.items() if len(loopd.intersection(d.dims))]
373
+ ldata += [ds[v] for v in hvarsl]
374
+ ldims += [ds[v].dims for v in hvarsl]
375
+ lvars += hvarsl
376
+
377
+ hvarse = [v for v in ds.keys() if v not in hvarsl]
378
+ edata += [ds[v].values for v in hvarse]
379
+ edims += [ds[v].dims for v in hvarse]
380
+ evars += hvarse
381
+
382
+ for c, d in ds.coords.items():
383
+ if c in loopd:
384
+ ldata.append(
385
+ self.chunk_data(
386
+ xr.DataArray(data=d.values, coords={c: d}, dims=[c])
387
+ )
388
+ )
389
+ ldims.append((c,))
390
+ lvars.append(c)
391
+ else:
392
+ edata.append(d.values)
393
+ edims.append((c,))
394
+ evars.append(c)
395
+
396
+ dvars.append(list(ds.keys()) + list(ds.coords.keys()))
397
+
398
+ # apply persist:
399
+ if persist:
400
+ ldata = [d.persist() for d in ldata]
401
+
402
+ # setup dask options:
403
+ dargs = dict(output_sizes={FC.VARS: len(out_vars)})
404
+ out_core_vars = [d for d in out_coords if d not in loop_dims] + [FC.VARS]
405
+ if FC.TURBINE in loopd and FC.TURBINE not in ldims.values():
406
+ dargs["output_sizes"][FC.TURBINE] = algo.n_turbines
407
+
408
+ # find states_i0:
409
+ state_inds = self.chunk_data(
410
+ xr.DataArray(
411
+ np.arange(ldata[0].sizes[FC.STATE]),
412
+ dims=FC.STATE,
413
+ coords={FC.STATE: ldata[0][FC.STATE].to_numpy()},
414
+ )
415
+ )
416
+
417
+ # setup arguments for wrapper function:
418
+ out_coords = loop_dims + list(set(out_core_vars).difference([FC.VARS]))
419
+ wargs = dict(
420
+ algo=algo,
421
+ dvars=dvars,
422
+ lvars=lvars,
423
+ ldims=ldims,
424
+ evars=evars,
425
+ edims=edims,
426
+ edata=edata,
427
+ loop_dims=loop_dims,
428
+ out_vars=out_vars,
429
+ out_coords=out_coords,
430
+ calc_pars=calc_pars,
431
+ init_vars=ivars,
432
+ ensure_variables=model.ensure_variables,
433
+ calculate=model.calculate,
434
+ )
435
+
436
+ # run parallel computation:
437
+ iidims = [[c for c in d if c not in loopd] for d in idims]
438
+ icdims = [[c for c in d if c not in loopd] for d in ldims]
439
+ results = xr.apply_ufunc(
440
+ _run_as_ufunc,
441
+ state_inds,
442
+ *ldata,
443
+ input_core_dims=[[]] + iidims + icdims,
444
+ output_core_dims=[out_core_vars],
445
+ output_dtypes=[FC.DTYPE],
446
+ dask="parallelized",
447
+ dask_gufunc_kwargs=dargs,
448
+ kwargs=wargs,
449
+ )
450
+
451
+ results = results.assign_coords({FC.VARS: out_vars}).to_dataset(dim=FC.VARS)
452
+
453
+ # reset:
454
+ self.chunk_size_states = chunk_size_states0
455
+ self.chunk_size_points = chunk_size_points0
456
+
457
+ # update data by calculation results:
458
+ return results.compute(num_workers=self.n_procs)
459
+
460
+
461
+ @delayed
462
+ def _run_lazy(algo, model, iterative, chunk_store, i0_t0, *data, **cpars):
463
+ """Helper function for lazy running"""
464
+ algo.reset_chunk_store(chunk_store)
465
+ results = model.calculate(algo, *data, **cpars)
466
+ chunk_store = algo.reset_chunk_store() if iterative else {}
467
+ cstore = {i0_t0: chunk_store[i0_t0]} if i0_t0 in chunk_store else {}
468
+ return results, cstore
469
+
470
+
471
+ class DaskEngine(DaskBaseEngine):
472
+ """
473
+ The dask engine for delayed foxes calculations.
474
+
475
+ :group: engines
476
+
477
+ """
478
+
479
+ def run_calculation(
480
+ self,
481
+ algo,
482
+ model,
483
+ model_data=None,
484
+ farm_data=None,
485
+ point_data=None,
486
+ out_vars=[],
487
+ chunk_store={},
488
+ sel=None,
489
+ isel=None,
490
+ iterative=False,
491
+ **calc_pars,
492
+ ):
493
+ """
494
+ Runs the model calculation
495
+
496
+ Parameters
497
+ ----------
498
+ algo: foxes.core.Algorithm
499
+ The algorithm object
500
+ model: foxes.core.DataCalcModel
501
+ The model that whose calculate function
502
+ should be run
503
+ model_data: xarray.Dataset
504
+ The initial model data
505
+ farm_data: xarray.Dataset
506
+ The initial farm data
507
+ point_data: xarray.Dataset
508
+ The initial point data
509
+ out_vars: list of str, optional
510
+ Names of the output variables
511
+ chunk_store: foxes.utils.Dict
512
+ The chunk store
513
+ sel: dict, optional
514
+ Selection of coordinate subsets
515
+ isel: dict, optional
516
+ Selection of coordinate subsets index values
517
+ iterative: bool
518
+ Flag for use within the iterative algorithm
519
+ calc_pars: dict, optional
520
+ Additional parameters for the model.calculate()
521
+
522
+ Returns
523
+ -------
524
+ results: xarray.Dataset
525
+ The model results
526
+
527
+ """
528
+ # subset selection:
529
+ model_data, farm_data, point_data = self.select_subsets(
530
+ model_data, farm_data, point_data, sel=sel, isel=isel
531
+ )
532
+
533
+ # basic checks:
534
+ super().run_calculation(algo, model, model_data, farm_data, point_data)
535
+
536
+ # prepare:
537
+ n_states = model_data.sizes[FC.STATE]
538
+ out_coords = model.output_coords()
539
+ coords = {}
540
+ if FC.STATE in out_coords and FC.STATE in model_data.coords:
541
+ coords[FC.STATE] = model_data[FC.STATE].to_numpy()
542
+ if farm_data is None:
543
+ farm_data = xr.Dataset()
544
+ goal_data = farm_data if point_data is None else point_data
545
+
546
+ # calculate chunk sizes:
547
+ n_targets = point_data.sizes[FC.TARGET] if point_data is not None else 0
548
+ chunk_sizes_states, chunk_sizes_targets = self.calc_chunk_sizes(
549
+ n_states, n_targets
550
+ )
551
+ n_chunks_states = len(chunk_sizes_states)
552
+ n_chunks_targets = len(chunk_sizes_targets)
553
+ self.print(
554
+ f"Selecting n_chunks_states = {n_chunks_states}, n_chunks_targets = {n_chunks_targets}",
555
+ level=2,
556
+ )
557
+
558
+ # submit chunks:
559
+ n_chunks_all = n_chunks_states * n_chunks_targets
560
+ self.print(
561
+ f"Submitting {n_chunks_all} chunks to {self.n_procs} processes", level=2
562
+ )
563
+ pbar = tqdm(total=n_chunks_all) if self.verbosity > 1 else None
564
+ results = {}
565
+ i0_states = 0
566
+ for chunki_states in range(n_chunks_states):
567
+ i1_states = i0_states + chunk_sizes_states[chunki_states]
568
+ i0_targets = 0
569
+ for chunki_points in range(n_chunks_targets):
570
+ i1_targets = i0_targets + chunk_sizes_targets[chunki_points]
571
+
572
+ # get this chunk's data:
573
+ data = self.get_chunk_input_data(
574
+ algo=algo,
575
+ model_data=model_data,
576
+ farm_data=farm_data,
577
+ point_data=point_data,
578
+ states_i0_i1=(i0_states, i1_states),
579
+ targets_i0_i1=(i0_targets, i1_targets),
580
+ out_vars=out_vars,
581
+ )
582
+
583
+ # submit model calculation:
584
+ results[(chunki_states, chunki_points)] = _run_lazy(
585
+ deepcopy(algo),
586
+ deepcopy(model),
587
+ iterative,
588
+ chunk_store,
589
+ (i0_states, i0_targets),
590
+ *data,
591
+ **calc_pars,
592
+ )
593
+ del data
594
+
595
+ i0_targets = i1_targets
596
+
597
+ if pbar is not None:
598
+ pbar.update()
599
+
600
+ i0_states = i1_states
601
+
602
+ del farm_data, point_data, calc_pars
603
+ if pbar is not None:
604
+ pbar.close()
605
+
606
+ # wait for results:
607
+ if n_chunks_all > 1 or self.verbosity > 1:
608
+ self.print(
609
+ f"Computing {n_chunks_all} chunks using {self.n_procs} processes"
610
+ )
611
+ results = dask.compute(results)[0]
612
+
613
+ return self.combine_results(
614
+ algo=algo,
615
+ results=results,
616
+ model_data=model_data,
617
+ out_vars=out_vars,
618
+ out_coords=out_coords,
619
+ n_chunks_states=n_chunks_states,
620
+ n_chunks_targets=n_chunks_targets,
621
+ goal_data=goal_data,
622
+ iterative=iterative,
623
+ )
624
+
625
+
626
+ def _run_on_cluster(
627
+ algo,
628
+ model,
629
+ *data,
630
+ names,
631
+ dims,
632
+ mdata_size,
633
+ fdata_size,
634
+ loop_dims,
635
+ iterative,
636
+ chunk_store,
637
+ i0_states,
638
+ i0_targets,
639
+ cpars,
640
+ ):
641
+ """Helper function for running on a cluster"""
642
+
643
+ algo.reset_chunk_store(chunk_store)
644
+
645
+ mdata = MData(
646
+ data={names[i]: data[i] for i in range(mdata_size)},
647
+ dims={names[i]: dims[i] for i in range(mdata_size)},
648
+ loop_dims=loop_dims[0],
649
+ states_i0=i0_states,
650
+ )
651
+
652
+ fdata_end = mdata_size + fdata_size
653
+ fdata = FData(
654
+ data={names[i]: data[i].copy() for i in range(mdata_size, fdata_end)},
655
+ dims={names[i]: dims[i] for i in range(mdata_size, fdata_end)},
656
+ loop_dims=loop_dims[1],
657
+ states_i0=i0_states,
658
+ )
659
+
660
+ tdata = None
661
+ if len(data) > fdata_end:
662
+ tdata = TData(
663
+ data={names[i]: data[i].copy() for i in range(fdata_end, len(data))},
664
+ dims={names[i]: dims[i] for i in range(fdata_end, len(data))},
665
+ loop_dims=loop_dims[2],
666
+ states_i0=i0_states,
667
+ )
668
+
669
+ data = [d for d in [mdata, fdata, tdata] if d is not None]
670
+
671
+ results = model.calculate(algo, *data, **cpars)
672
+ chunk_store = algo.reset_chunk_store() if iterative else {}
673
+
674
+ k = (i0_states, i0_targets)
675
+ cstore = {k: chunk_store[k]} if k in chunk_store else {}
676
+ return results, cstore
677
+
678
+
679
+ class LocalClusterEngine(DaskBaseEngine):
680
+ """
681
+ The dask engine for foxes calculations on a local cluster.
682
+
683
+ Attributes
684
+ ----------
685
+ cluster_pars: dict
686
+ Parameters for the cluster
687
+ client_pars: dict
688
+ Parameters for the client of the cluster
689
+
690
+ :group: engines
691
+
692
+ """
693
+
694
+ def __init__(
695
+ self,
696
+ *args,
697
+ cluster_pars={},
698
+ client_pars={},
699
+ **kwargs,
700
+ ):
701
+ """
702
+ Constructor.
703
+
704
+ Parameters
705
+ ----------
706
+ args: tuple, optional
707
+ Additional parameters for the DaskBaseEngine class
708
+ cluster_pars: dict
709
+ Parameters for the cluster
710
+ client_pars: dict
711
+ Parameters for the client of the cluster
712
+ kwargs: dict, optional
713
+ Additional parameters for the base class
714
+
715
+ """
716
+ super().__init__(*args, **kwargs)
717
+
718
+ load_distributed()
719
+
720
+ self.cluster_pars = cluster_pars
721
+ self.client_pars = client_pars
722
+
723
+ self.dask_config["scheduler"] = "distributed"
724
+ self.dask_config["distributed.scheduler.worker-ttl"] = None
725
+
726
+ self._cluster = None
727
+ self._client = None
728
+
729
+ def __enter__(self):
730
+ self.print("Launching local dask cluster..")
731
+ self._cluster = distributed.LocalCluster(
732
+ n_workers=self.n_procs, **self.cluster_pars
733
+ ).__enter__()
734
+ self._client = distributed.Client(self._cluster, **self.client_pars).__enter__()
735
+ self.print(self._cluster)
736
+ self.print(f"Dashboard: {self._client.dashboard_link}\n")
737
+ return super().__enter__()
738
+
739
+ def __exit__(self, *args):
740
+ self.print(f"Shutting down {type(self._cluster).__name__}")
741
+ # self._client.retire_workers()
742
+ # from time import sleep
743
+ # sleep(1)
744
+ # self._client.shutdown()
745
+ self._client.__exit__(*args)
746
+ self._cluster.__exit__(*args)
747
+ super().__exit__(*args)
748
+
749
+ def __del__(self):
750
+ if hasattr(self, "_client") and self._client is not None:
751
+ self._client.__del__()
752
+ if hasattr(self, "_cluster") and self._cluster is not None:
753
+ self._cluster.__del__()
754
+ super().__del__()
755
+
756
+ def run_calculation(
757
+ self,
758
+ algo,
759
+ model,
760
+ model_data=None,
761
+ farm_data=None,
762
+ point_data=None,
763
+ out_vars=[],
764
+ chunk_store={},
765
+ sel=None,
766
+ isel=None,
767
+ iterative=False,
768
+ **calc_pars,
769
+ ):
770
+ """
771
+ Runs the model calculation
772
+
773
+ Parameters
774
+ ----------
775
+ algo: foxes.core.Algorithm
776
+ The algorithm object
777
+ model: foxes.core.DataCalcModel
778
+ The model that whose calculate function
779
+ should be run
780
+ model_data: xarray.Dataset
781
+ The initial model data
782
+ farm_data: xarray.Dataset
783
+ The initial farm data
784
+ point_data: xarray.Dataset
785
+ The initial point data
786
+ out_vars: list of str, optional
787
+ Names of the output variables
788
+ chunk_store: foxes.utils.Dict
789
+ The chunk store
790
+ sel: dict, optional
791
+ Selection of coordinate subsets
792
+ isel: dict, optional
793
+ Selection of coordinate subsets index values
794
+ iterative: bool
795
+ Flag for use within the iterative algorithm
796
+ calc_pars: dict, optional
797
+ Additional parameters for the model.calculate()
798
+
799
+ Returns
800
+ -------
801
+ results: xarray.Dataset
802
+ The model results
803
+
804
+ """
805
+ # subset selection:
806
+ model_data, farm_data, point_data = self.select_subsets(
807
+ model_data, farm_data, point_data, sel=sel, isel=isel
808
+ )
809
+
810
+ # basic checks:
811
+ super().run_calculation(algo, model, model_data, farm_data, point_data)
812
+
813
+ # prepare:
814
+ n_states = model_data.sizes[FC.STATE]
815
+ out_coords = model.output_coords()
816
+ coords = {}
817
+ if FC.STATE in out_coords and FC.STATE in model_data.coords:
818
+ coords[FC.STATE] = model_data[FC.STATE].to_numpy()
819
+ if farm_data is None:
820
+ farm_data = xr.Dataset()
821
+ goal_data = farm_data if point_data is None else point_data
822
+
823
+ # calculate chunk sizes:
824
+ n_targets = point_data.sizes[FC.TARGET] if point_data is not None else 0
825
+ chunk_sizes_states, chunk_sizes_targets = self.calc_chunk_sizes(
826
+ n_states, n_targets
827
+ )
828
+ n_chunks_states = len(chunk_sizes_states)
829
+ n_chunks_targets = len(chunk_sizes_targets)
830
+ self.print(
831
+ f"Selecting n_chunks_states = {n_chunks_states}, n_chunks_targets = {n_chunks_targets}",
832
+ level=2,
833
+ )
834
+
835
+ # scatter algo and model:
836
+ n_chunks_all = n_chunks_states * n_chunks_targets
837
+ falgo = self._client.scatter(algo, broadcast=True)
838
+ fmodel = self._client.scatter(model, broadcast=True)
839
+ cpars = self._client.scatter(calc_pars, broadcast=True)
840
+ all_data = [falgo, fmodel, cpars]
841
+
842
+ # submit chunks:
843
+ self.print(f"Submitting {n_chunks_all} chunks to {self.n_procs} processes")
844
+ pbar = tqdm(total=n_chunks_all) if self.verbosity > 0 else None
845
+ jobs = {}
846
+ i0_states = 0
847
+ for chunki_states in range(n_chunks_states):
848
+ i1_states = i0_states + chunk_sizes_states[chunki_states]
849
+ i0_targets = 0
850
+ for chunki_points in range(n_chunks_targets):
851
+ i1_targets = i0_targets + chunk_sizes_targets[chunki_points]
852
+
853
+ # get this chunk's data:
854
+ data = self.get_chunk_input_data(
855
+ algo=algo,
856
+ model_data=model_data,
857
+ farm_data=farm_data,
858
+ point_data=point_data,
859
+ states_i0_i1=(i0_states, i1_states),
860
+ targets_i0_i1=(i0_targets, i1_targets),
861
+ out_vars=out_vars,
862
+ )
863
+
864
+ # scatter data:
865
+ fut_data = []
866
+ names = []
867
+ dims = []
868
+ ldims = [d.loop_dims for d in data]
869
+ for dt in data:
870
+ for k, d in dt.items():
871
+ fut_data.append(self._client.scatter(d, hash=False))
872
+ names.append(k)
873
+ dims.append(dt.dims[k])
874
+ names = self._client.scatter(names)
875
+ dims = self._client.scatter(dims)
876
+ ldims = self._client.scatter(ldims)
877
+ all_data += [fut_data, names, dims, ldims]
878
+
879
+ # scatter chunk store data:
880
+ cstore = chunk_store
881
+ if len(cstore):
882
+ cstore = self._client.scatter(cstore, hash=False)
883
+ all_data.append(cstore)
884
+
885
+ # submit model calculation:
886
+ jobs[(chunki_states, chunki_points)] = self._client.submit(
887
+ _run_on_cluster,
888
+ falgo,
889
+ fmodel,
890
+ *fut_data,
891
+ names=names,
892
+ dims=dims,
893
+ mdata_size=len(data[0]),
894
+ fdata_size=len(data[1]),
895
+ loop_dims=ldims,
896
+ iterative=iterative,
897
+ chunk_store=cstore,
898
+ i0_states=i0_states,
899
+ i0_targets=i0_targets,
900
+ cpars=cpars,
901
+ retries=10,
902
+ )
903
+ del fut_data, cstore
904
+
905
+ i0_targets = i1_targets
906
+
907
+ if pbar is not None:
908
+ pbar.update()
909
+
910
+ i0_states = i1_states
911
+
912
+ del falgo, fmodel, farm_data, point_data, calc_pars
913
+ if pbar is not None:
914
+ pbar.close()
915
+
916
+ # wait for results:
917
+ self.print(f"Computing {n_chunks_all} chunks using {self.n_procs} processes")
918
+ pbar = (
919
+ tqdm(total=n_chunks_all)
920
+ if n_chunks_all > 1 and self.verbosity > 0
921
+ else None
922
+ )
923
+ results = {}
924
+ for chunki_states in range(n_chunks_states):
925
+ for chunki_points in range(n_chunks_targets):
926
+ key = (chunki_states, chunki_points)
927
+ results[key] = jobs.get(key).result()
928
+ if pbar is not None:
929
+ pbar.update()
930
+ if pbar is not None:
931
+ pbar.close()
932
+
933
+ results = self.combine_results(
934
+ algo=algo,
935
+ results=results,
936
+ model_data=model_data,
937
+ out_vars=out_vars,
938
+ out_coords=out_coords,
939
+ n_chunks_states=n_chunks_states,
940
+ n_chunks_targets=n_chunks_targets,
941
+ goal_data=goal_data,
942
+ iterative=iterative,
943
+ ).persist()
944
+
945
+ # self._client.cancel(all_data)
946
+
947
+ return results
948
+
949
+
950
+ class SlurmClusterEngine(LocalClusterEngine):
951
+ """
952
+ The dask engine for foxes calculations on a SLURM cluster.
953
+
954
+ :group: engines
955
+
956
+ """
957
+
958
+ def __enter__(self):
959
+ self.print("Launching dask cluster on HPC using SLURM..")
960
+ cargs = deepcopy(self.cluster_pars)
961
+ nodes = cargs.pop("nodes", 1)
962
+
963
+ dask_jobqueue = import_module(
964
+ "dask_jobqueue", hint="pip install setuptools dask-jobqueue"
965
+ )
966
+ self._cluster = dask_jobqueue.SLURMCluster(**cargs)
967
+ self._cluster.scale(jobs=nodes)
968
+ self._cluster = self._cluster.__enter__()
969
+ self._client = distributed.Client(self._cluster, **self.client_pars).__enter__()
970
+
971
+ self.print(self._cluster)
972
+ self.print(f"Dashboard: {self._client.dashboard_link}\n")
973
+ print(self._cluster.job_script())
974
+
975
+ return DaskBaseEngine.__enter__(self)