foxes 0.8.2__py3-none-any.whl → 1.1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of foxes might be problematic. Click here for more details.

Files changed (215) hide show
  1. docs/source/conf.py +353 -0
  2. examples/abl_states/run.py +160 -0
  3. examples/compare_rotors_pwakes/run.py +217 -0
  4. examples/compare_wakes/run.py +241 -0
  5. examples/dyn_wakes/run.py +311 -0
  6. examples/field_data_nc/run.py +121 -0
  7. examples/induction/run.py +201 -0
  8. examples/multi_height/run.py +113 -0
  9. examples/power_mask/run.py +249 -0
  10. examples/random_timeseries/run.py +210 -0
  11. examples/scan_row/run.py +193 -0
  12. examples/sector_management/run.py +162 -0
  13. examples/sequential/run.py +209 -0
  14. examples/single_state/run.py +201 -0
  15. examples/states_lookup_table/run.py +137 -0
  16. examples/streamline_wakes/run.py +138 -0
  17. examples/tab_file/run.py +142 -0
  18. examples/timelines/run.py +267 -0
  19. examples/timeseries/run.py +190 -0
  20. examples/timeseries_slurm/run.py +185 -0
  21. examples/wind_rose/run.py +141 -0
  22. examples/windio/run.py +29 -0
  23. examples/yawed_wake/run.py +196 -0
  24. foxes/__init__.py +4 -8
  25. foxes/algorithms/__init__.py +1 -1
  26. foxes/algorithms/downwind/downwind.py +247 -111
  27. foxes/algorithms/downwind/models/farm_wakes_calc.py +12 -7
  28. foxes/algorithms/downwind/models/init_farm_data.py +2 -2
  29. foxes/algorithms/downwind/models/point_wakes_calc.py +6 -7
  30. foxes/algorithms/downwind/models/reorder_farm_output.py +1 -2
  31. foxes/algorithms/downwind/models/set_amb_farm_results.py +1 -1
  32. foxes/algorithms/downwind/models/set_amb_point_results.py +5 -3
  33. foxes/algorithms/iterative/iterative.py +74 -34
  34. foxes/algorithms/iterative/models/farm_wakes_calc.py +12 -7
  35. foxes/algorithms/iterative/models/urelax.py +3 -3
  36. foxes/algorithms/sequential/models/plugin.py +5 -5
  37. foxes/algorithms/sequential/models/seq_state.py +1 -1
  38. foxes/algorithms/sequential/sequential.py +126 -255
  39. foxes/constants.py +22 -7
  40. foxes/core/__init__.py +1 -0
  41. foxes/core/algorithm.py +632 -147
  42. foxes/core/data.py +252 -20
  43. foxes/core/data_calc_model.py +15 -291
  44. foxes/core/engine.py +640 -0
  45. foxes/core/farm_controller.py +38 -10
  46. foxes/core/farm_data_model.py +16 -1
  47. foxes/core/ground_model.py +2 -2
  48. foxes/core/model.py +249 -182
  49. foxes/core/partial_wakes_model.py +1 -1
  50. foxes/core/point_data_model.py +17 -2
  51. foxes/core/rotor_model.py +27 -21
  52. foxes/core/states.py +17 -1
  53. foxes/core/turbine_type.py +28 -0
  54. foxes/core/wake_frame.py +30 -34
  55. foxes/core/wake_model.py +5 -5
  56. foxes/core/wake_superposition.py +1 -1
  57. foxes/data/windio/windio_5turbines_timeseries.yaml +31 -15
  58. foxes/engines/__init__.py +17 -0
  59. foxes/engines/dask.py +982 -0
  60. foxes/engines/default.py +75 -0
  61. foxes/engines/futures.py +72 -0
  62. foxes/engines/mpi.py +38 -0
  63. foxes/engines/multiprocess.py +71 -0
  64. foxes/engines/numpy.py +167 -0
  65. foxes/engines/pool.py +249 -0
  66. foxes/engines/ray.py +79 -0
  67. foxes/engines/single.py +141 -0
  68. foxes/input/farm_layout/__init__.py +1 -0
  69. foxes/input/farm_layout/from_csv.py +4 -0
  70. foxes/input/farm_layout/from_json.py +2 -2
  71. foxes/input/farm_layout/grid.py +2 -2
  72. foxes/input/farm_layout/ring.py +65 -0
  73. foxes/input/farm_layout/row.py +2 -2
  74. foxes/input/states/__init__.py +7 -0
  75. foxes/input/states/create/random_abl_states.py +1 -1
  76. foxes/input/states/field_data_nc.py +158 -33
  77. foxes/input/states/multi_height.py +128 -14
  78. foxes/input/states/one_point_flow.py +577 -0
  79. foxes/input/states/scan_ws.py +74 -3
  80. foxes/input/states/single.py +1 -1
  81. foxes/input/states/slice_data_nc.py +681 -0
  82. foxes/input/states/states_table.py +204 -35
  83. foxes/input/windio/__init__.py +2 -2
  84. foxes/input/windio/get_states.py +44 -23
  85. foxes/input/windio/read_attributes.py +48 -17
  86. foxes/input/windio/read_farm.py +116 -102
  87. foxes/input/windio/read_fields.py +16 -6
  88. foxes/input/windio/read_outputs.py +71 -24
  89. foxes/input/windio/runner.py +31 -17
  90. foxes/input/windio/windio.py +41 -23
  91. foxes/models/farm_models/turbine2farm.py +1 -1
  92. foxes/models/ground_models/wake_mirror.py +10 -6
  93. foxes/models/model_book.py +58 -20
  94. foxes/models/partial_wakes/axiwake.py +3 -3
  95. foxes/models/partial_wakes/rotor_points.py +3 -3
  96. foxes/models/partial_wakes/top_hat.py +2 -2
  97. foxes/models/point_models/set_uniform_data.py +1 -1
  98. foxes/models/point_models/tke2ti.py +1 -1
  99. foxes/models/point_models/wake_deltas.py +1 -1
  100. foxes/models/rotor_models/centre.py +4 -0
  101. foxes/models/rotor_models/grid.py +24 -25
  102. foxes/models/rotor_models/levels.py +4 -5
  103. foxes/models/turbine_models/calculator.py +4 -6
  104. foxes/models/turbine_models/kTI_model.py +22 -6
  105. foxes/models/turbine_models/lookup_table.py +30 -4
  106. foxes/models/turbine_models/rotor_centre_calc.py +4 -3
  107. foxes/models/turbine_models/set_farm_vars.py +103 -34
  108. foxes/models/turbine_types/PCt_file.py +27 -3
  109. foxes/models/turbine_types/PCt_from_two.py +27 -3
  110. foxes/models/turbine_types/TBL_file.py +80 -0
  111. foxes/models/turbine_types/__init__.py +2 -0
  112. foxes/models/turbine_types/lookup.py +316 -0
  113. foxes/models/turbine_types/null_type.py +51 -1
  114. foxes/models/turbine_types/wsrho2PCt_from_two.py +29 -5
  115. foxes/models/turbine_types/wsti2PCt_from_two.py +31 -7
  116. foxes/models/vertical_profiles/__init__.py +1 -1
  117. foxes/models/vertical_profiles/data_profile.py +1 -1
  118. foxes/models/wake_frames/__init__.py +1 -0
  119. foxes/models/wake_frames/dynamic_wakes.py +424 -0
  120. foxes/models/wake_frames/farm_order.py +25 -5
  121. foxes/models/wake_frames/rotor_wd.py +6 -4
  122. foxes/models/wake_frames/seq_dynamic_wakes.py +61 -74
  123. foxes/models/wake_frames/streamlines.py +21 -22
  124. foxes/models/wake_frames/timelines.py +330 -129
  125. foxes/models/wake_frames/yawed_wakes.py +7 -4
  126. foxes/models/wake_models/dist_sliced.py +2 -4
  127. foxes/models/wake_models/induction/rankine_half_body.py +5 -5
  128. foxes/models/wake_models/induction/rathmann.py +78 -24
  129. foxes/models/wake_models/induction/self_similar.py +78 -28
  130. foxes/models/wake_models/induction/vortex_sheet.py +86 -48
  131. foxes/models/wake_models/ti/crespo_hernandez.py +6 -4
  132. foxes/models/wake_models/ti/iec_ti.py +40 -21
  133. foxes/models/wake_models/top_hat.py +1 -1
  134. foxes/models/wake_models/wind/bastankhah14.py +8 -6
  135. foxes/models/wake_models/wind/bastankhah16.py +17 -16
  136. foxes/models/wake_models/wind/jensen.py +4 -3
  137. foxes/models/wake_models/wind/turbopark.py +16 -13
  138. foxes/models/wake_superpositions/ti_linear.py +1 -1
  139. foxes/models/wake_superpositions/ti_max.py +1 -1
  140. foxes/models/wake_superpositions/ti_pow.py +1 -1
  141. foxes/models/wake_superpositions/ti_quadratic.py +1 -1
  142. foxes/models/wake_superpositions/ws_linear.py +8 -7
  143. foxes/models/wake_superpositions/ws_max.py +8 -7
  144. foxes/models/wake_superpositions/ws_pow.py +8 -7
  145. foxes/models/wake_superpositions/ws_product.py +5 -5
  146. foxes/models/wake_superpositions/ws_quadratic.py +8 -7
  147. foxes/output/__init__.py +4 -1
  148. foxes/output/farm_layout.py +16 -12
  149. foxes/output/farm_results_eval.py +1 -1
  150. foxes/output/flow_plots_2d/__init__.py +0 -1
  151. foxes/output/flow_plots_2d/flow_plots.py +70 -30
  152. foxes/output/grids.py +92 -22
  153. foxes/output/results_writer.py +2 -2
  154. foxes/output/rose_plot.py +3 -3
  155. foxes/output/seq_plugins/__init__.py +2 -0
  156. foxes/output/{flow_plots_2d → seq_plugins}/seq_flow_ani_plugin.py +64 -22
  157. foxes/output/seq_plugins/seq_wake_debug_plugin.py +145 -0
  158. foxes/output/slice_data.py +131 -111
  159. foxes/output/state_turbine_map.py +19 -14
  160. foxes/output/state_turbine_table.py +19 -19
  161. foxes/utils/__init__.py +1 -1
  162. foxes/utils/abl/neutral.py +2 -2
  163. foxes/utils/abl/stable.py +2 -2
  164. foxes/utils/abl/unstable.py +2 -2
  165. foxes/utils/data_book.py +1 -1
  166. foxes/utils/dev_utils.py +42 -0
  167. foxes/utils/dict.py +24 -1
  168. foxes/utils/exec_python.py +1 -1
  169. foxes/utils/factory.py +176 -53
  170. foxes/utils/geom2d/circle.py +1 -1
  171. foxes/utils/geom2d/polygon.py +1 -1
  172. foxes/utils/geopandas_utils.py +2 -2
  173. foxes/utils/load.py +2 -2
  174. foxes/utils/pandas_helpers.py +3 -2
  175. foxes/utils/wind_dir.py +0 -2
  176. foxes/utils/xarray_utils.py +24 -14
  177. foxes/variables.py +39 -2
  178. {foxes-0.8.2.dist-info → foxes-1.1.0.2.dist-info}/METADATA +75 -33
  179. foxes-1.1.0.2.dist-info/RECORD +309 -0
  180. {foxes-0.8.2.dist-info → foxes-1.1.0.2.dist-info}/WHEEL +1 -1
  181. foxes-1.1.0.2.dist-info/top_level.txt +4 -0
  182. tests/0_consistency/iterative/test_iterative.py +92 -0
  183. tests/0_consistency/partial_wakes/test_partial_wakes.py +90 -0
  184. tests/1_verification/flappy_0_6/PCt_files/flappy/run.py +85 -0
  185. tests/1_verification/flappy_0_6/PCt_files/test_PCt_files.py +103 -0
  186. tests/1_verification/flappy_0_6/abl_states/flappy/run.py +85 -0
  187. tests/1_verification/flappy_0_6/abl_states/test_abl_states.py +87 -0
  188. tests/1_verification/flappy_0_6/partial_top_hat/flappy/run.py +82 -0
  189. tests/1_verification/flappy_0_6/partial_top_hat/test_partial_top_hat.py +82 -0
  190. tests/1_verification/flappy_0_6/row_Jensen_linear_centre/flappy/run.py +92 -0
  191. tests/1_verification/flappy_0_6/row_Jensen_linear_centre/test_row_Jensen_linear_centre.py +93 -0
  192. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat/flappy/run.py +92 -0
  193. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat/test_row_Jensen_linear_tophat.py +96 -0
  194. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2005/flappy/run.py +94 -0
  195. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2005/test_row_Jensen_linear_tophat_IECTI_2005.py +122 -0
  196. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2019/flappy/run.py +94 -0
  197. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2019/test_row_Jensen_linear_tophat_IECTI_2019.py +122 -0
  198. tests/1_verification/flappy_0_6/row_Jensen_quadratic_centre/flappy/run.py +92 -0
  199. tests/1_verification/flappy_0_6/row_Jensen_quadratic_centre/test_row_Jensen_quadratic_centre.py +93 -0
  200. tests/1_verification/flappy_0_6_2/grid_rotors/flappy/run.py +85 -0
  201. tests/1_verification/flappy_0_6_2/grid_rotors/test_grid_rotors.py +130 -0
  202. tests/1_verification/flappy_0_6_2/row_Bastankhah_Crespo/flappy/run.py +96 -0
  203. tests/1_verification/flappy_0_6_2/row_Bastankhah_Crespo/test_row_Bastankhah_Crespo.py +116 -0
  204. tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/flappy/run.py +93 -0
  205. tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/test_row_Bastankhah_linear_centre.py +99 -0
  206. tests/3_examples/test_examples.py +34 -0
  207. foxes/VERSION +0 -1
  208. foxes/output/flow_plots_2d.py +0 -0
  209. foxes/utils/geopandas_helpers.py +0 -294
  210. foxes/utils/runners/__init__.py +0 -1
  211. foxes/utils/runners/runners.py +0 -280
  212. foxes-0.8.2.dist-info/RECORD +0 -247
  213. foxes-0.8.2.dist-info/top_level.txt +0 -1
  214. foxes-0.8.2.dist-info/zip-safe +0 -1
  215. {foxes-0.8.2.dist-info → foxes-1.1.0.2.dist-info}/LICENSE +0 -0
foxes/core/data.py CHANGED
@@ -25,7 +25,14 @@ class Data(Dict):
25
25
 
26
26
  """
27
27
 
28
- def __init__(self, data, dims, loop_dims, name="data"):
28
+ def __init__(
29
+ self,
30
+ data,
31
+ dims,
32
+ loop_dims,
33
+ states_i0=None,
34
+ name="data",
35
+ ):
29
36
  """
30
37
  Constructor.
31
38
 
@@ -39,6 +46,8 @@ class Data(Dict):
39
46
  loop_dims: array_like of str
40
47
  List of the loop dimensions during xarray's
41
48
  `apply_ufunc` calculations
49
+ states_i0: int, optional
50
+ The index of the first state
42
51
  name: str
43
52
  The data container name
44
53
 
@@ -49,6 +58,8 @@ class Data(Dict):
49
58
  self.dims = dims
50
59
  self.loop_dims = loop_dims
51
60
 
61
+ self.__states_i0 = states_i0
62
+
52
63
  self.sizes = {}
53
64
  for v, d in data.items():
54
65
  self._run_entry_checks(v, d, dims[v])
@@ -81,7 +92,7 @@ class Data(Dict):
81
92
  """
82
93
  return self.sizes[FC.TURBINE] if FC.TURBINE in self.sizes else None
83
94
 
84
- def states_i0(self, counter=False, algo=None):
95
+ def states_i0(self, counter=False):
85
96
  """
86
97
  Get the state counter for first state in chunk
87
98
 
@@ -89,8 +100,6 @@ class Data(Dict):
89
100
  ----------
90
101
  counter: bool
91
102
  Return the state counter instead of the index
92
- algo: foxes.core.Algorithm, optional
93
- The algorithm, required for state counter
94
103
 
95
104
  Returns
96
105
  -------
@@ -102,9 +111,9 @@ class Data(Dict):
102
111
  if FC.STATE not in self:
103
112
  return None
104
113
  elif counter:
105
- if algo is None:
106
- raise KeyError(f"{self.name}: Missing algo for deducing state counter")
107
- return np.argwhere(algo.states.index() == self[FC.STATE][0])[0][0]
114
+ if self.__states_i0 is None:
115
+ raise KeyError(f"Data '{self.name}': states_i0 requested but not set")
116
+ return self.__states_i0
108
117
  else:
109
118
  return self[FC.STATE][0]
110
119
 
@@ -129,6 +138,15 @@ class Data(Dict):
129
138
 
130
139
  self.dims[FV.TXYH] = tuple(list(dims[FV.X]) + [FC.XYH])
131
140
 
141
+ allc = set()
142
+ for dms in self.dims.values():
143
+ if dms is not None:
144
+ allc.update(dms)
145
+ allc = allc.difference(set(data.keys()))
146
+ for c in allc.intersection(self.sizes.keys()):
147
+ data[c] = np.arange(self.sizes[c])
148
+ dims[c] = (c,)
149
+
132
150
  def _run_entry_checks(self, name, data, dims):
133
151
  """Run entry checks on new data"""
134
152
  # remove axes of size 1, added by dask for extra loop dimensions:
@@ -165,12 +183,14 @@ class Data(Dict):
165
183
  self._run_entry_checks(name, data, dims)
166
184
  self._auto_update()
167
185
 
168
- def get_slice(self, s, dim_map={}, name=None, keep=True):
186
+ def get_slice(self, variables, s, dim_map={}, name=None):
169
187
  """
170
188
  Get a slice of data.
171
189
 
172
190
  Parameters
173
191
  ----------
192
+ variables: list of str
193
+ The variable list that corresponds to s
174
194
  s: slice
175
195
  The slice
176
196
  dim_map: dict
@@ -178,9 +198,6 @@ class Data(Dict):
178
198
  If not found, same dimensions are assumed.
179
199
  name: str, optional
180
200
  The name of the data object
181
- keep: bool
182
- Keep non-matching fields as they are, else
183
- throw them out
184
201
 
185
202
  Returns
186
203
  -------
@@ -188,20 +205,103 @@ class Data(Dict):
188
205
  The new data object, containing slices
189
206
 
190
207
  """
208
+ if not isinstance(variables, (list, tuple, np.ndarray)):
209
+ variables = [variables]
210
+ if not isinstance(s, (list, tuple, np.ndarray)):
211
+ s = [s]
212
+
191
213
  data = {}
192
214
  dims = {}
193
215
  for v in self.keys():
194
- try:
195
- d = self.dims[v]
196
- data[v] = self[v][s]
197
- dims[v] = dim_map.get(d, d)
198
- except IndexError:
199
- if keep:
200
- data[v] = self[v]
201
- dims[v] = self.dims[v]
216
+ d = self.dims[v]
217
+ if d is not None:
218
+ hs = tuple(
219
+ [s[variables.index(w)] if w in variables else np.s_[:] for w in d]
220
+ )
221
+ data[v] = self[v][hs]
222
+ dims[v] = (
223
+ tuple([dim_map.get(dd, dd) for dd in d]) if len(dim_map) else d
224
+ )
202
225
  if name is None:
203
226
  name = self.name
204
- return type(self)(data, dims, loop_dims=self.loop_dims, name=name)
227
+ if FC.STATE in variables and self.__states_i0 is not None:
228
+ i0 = self.states_i0(counter=True)
229
+ a = s[variables.index(FC.STATE)]
230
+ sts = np.arange(i0, i0 + self.n_states)[a]
231
+ if len(sts) == 1:
232
+ states_i0 = sts[0]
233
+ elif np.all(sts == np.arange(sts[0], sts[0] + len(sts))):
234
+ states_i0 = sts[0]
235
+ else:
236
+ raise ValueError(
237
+ f"Cannot determine states_i0 for states slices {a}, leading to selection {list(sts)}"
238
+ )
239
+ else:
240
+ states_i0 = None
241
+ return type(self)(
242
+ data, dims, loop_dims=self.loop_dims, name=name, states_i0=states_i0
243
+ )
244
+
245
+ @classmethod
246
+ def from_dataset(cls, ds, *args, callback=None, s_states=None, copy=True, **kwargs):
247
+ """
248
+ Create Data object from a dataset
249
+
250
+ Parameters
251
+ ----------
252
+ ds: xarray.Dataset
253
+ The dataset
254
+ args: tuple, optional
255
+ Additional parameters for the constructor
256
+ callback: Function, optional
257
+ Function f(data, dims) that manipulates
258
+ the data and dims dicts before construction
259
+ s_states: slice, optional
260
+ Slice object for states
261
+ copy: bool
262
+ Flag for copying data
263
+ kwargs: dict, optional
264
+ Additional parameters for the constructor
265
+
266
+ Returns
267
+ -------
268
+ data: Data
269
+ The data object
270
+
271
+ """
272
+ data = {}
273
+ dims = {}
274
+
275
+ for c, d in ds.coords.items():
276
+ if c == FC.STATE:
277
+ s = np.s_[:] if s_states is None else s_states
278
+ data[c] = d.to_numpy()[s].copy() if copy else d.to_numpy()[s]
279
+ else:
280
+ data[c] = d.to_numpy().copy() if copy else d.to_numpy()
281
+ dims[c] = d.dims
282
+
283
+ n_states = None
284
+ for v, d in ds.data_vars.items():
285
+ if FC.STATE in d.dims:
286
+ if d.dims[0] != FC.STATE:
287
+ raise ValueError(
288
+ f"Expecting coordinate '{FC.STATE}' at position 0 for data variable '{v}', got {d.dims}"
289
+ )
290
+ n_states = len(d.to_numpy())
291
+ s = np.s_[:] if s_states is None else s_states
292
+ data[v] = d.to_numpy()[s].copy() if copy else d.to_numpy()[s]
293
+ else:
294
+ data[v] = d.to_numpy().copy() if copy else d.to_numpy()
295
+ dims[v] = d.dims
296
+
297
+ if callback is not None:
298
+ callback(data, dims)
299
+
300
+ if FC.STATE not in data and s_states is not None and n_states is not None:
301
+ data[FC.STATE] = np.arange(n_states)[s_states]
302
+ dims[FC.STATE] = (FC.STATE,)
303
+
304
+ return cls(*args, data=data, dims=dims, **kwargs)
205
305
 
206
306
 
207
307
  class MData(Data):
@@ -284,6 +384,46 @@ class FData(Data):
284
384
  f"FData '{self.name}': Missing '{x}' in sizes, got {sorted(list(self.sizes.keys()))}"
285
385
  )
286
386
 
387
+ @classmethod
388
+ def from_dataset(cls, ds, *args, mdata=None, callback=None, **kwargs):
389
+ """
390
+ Create Data object from a dataset
391
+
392
+ Parameters
393
+ ----------
394
+ ds: xarray.Dataset
395
+ The dataset
396
+ args: tuple, optional
397
+ Additional parameters for the constructor
398
+ mdata: MData, optional
399
+ The mdata object
400
+ callback: Function, optional
401
+ Function f(data, dims) that manipulates
402
+ the data and dims dicts before construction
403
+ kwargs: dict, optional
404
+ Additional parameters for the constructor
405
+
406
+ Returns
407
+ -------
408
+ data: Data
409
+ The data object
410
+
411
+ """
412
+ if mdata is None:
413
+ return super().from_dataset(ds, *args, callback=callback, **kwargs)
414
+ else:
415
+
416
+ def cb(data, dims):
417
+ if FC.STATE not in data:
418
+ data[FC.STATE] = mdata[FC.STATE]
419
+ dims[FC.STATE] = mdata.dims[FC.STATE]
420
+ data[FV.WEIGHT] = mdata[FV.WEIGHT]
421
+ dims[FV.WEIGHT] = mdata.dims[FV.WEIGHT]
422
+ if callback is not None:
423
+ callback(data, dims)
424
+
425
+ return super().from_dataset(ds, *args, callback=cb, **kwargs)
426
+
287
427
 
288
428
  class TData(Data):
289
429
  """
@@ -427,6 +567,21 @@ class TData(Data):
427
567
  """
428
568
  return np.einsum("stp...,p->st...", self[variable], self[FC.TWEIGHTS])
429
569
 
570
+ def targets_i0(self):
571
+ """
572
+ Get the target counter for first target in chunk
573
+
574
+ Returns
575
+ -------
576
+ int:
577
+ The target index for first target in chunk
578
+
579
+ """
580
+ if FC.TARGET not in self:
581
+ return None
582
+ else:
583
+ return self[FC.TARGET][0]
584
+
430
585
  @classmethod
431
586
  def from_points(
432
587
  cls,
@@ -515,3 +670,80 @@ class TData(Data):
515
670
  data[FC.TWEIGHTS] = tweights
516
671
  dims[FC.TWEIGHTS] = (FC.TPOINT,)
517
672
  return cls(data, dims, [FC.STATE], name=name, **kwargs)
673
+
674
+ @classmethod
675
+ def from_dataset(
676
+ cls,
677
+ ds,
678
+ *args,
679
+ s_targets=None,
680
+ mdata=None,
681
+ callback=None,
682
+ **kwargs,
683
+ ):
684
+ """
685
+ Create Data object from a dataset
686
+
687
+ Parameters
688
+ ----------
689
+ ds: xarray.Dataset
690
+ The dataset
691
+ args: tuple, optional
692
+ Additional parameters for the constructor
693
+ s_targets: slice, optional
694
+ Slice object for targets
695
+ mdata: MData, optional
696
+ The mdata object
697
+ callback: Function, optional
698
+ Function f(data, dims) that manipulates
699
+ the data and dims dicts before construction
700
+ kwargs: dict, optional
701
+ Additional parameters for the constructor
702
+
703
+ Returns
704
+ -------
705
+ data: Data
706
+ The data object
707
+
708
+ """
709
+ if mdata is None:
710
+ cb0 = callback
711
+ else:
712
+
713
+ def cb_mdata(data, dims):
714
+ if FC.STATE not in data:
715
+ data[FC.STATE] = mdata[FC.STATE]
716
+ dims[FC.STATE] = mdata.dims[FC.STATE]
717
+ if callback is not None:
718
+ callback(data, dims)
719
+
720
+ cb0 = cb_mdata
721
+
722
+ if s_targets is None:
723
+ cb1 = cb0
724
+ else:
725
+
726
+ def cb_targets(data, dims):
727
+ if FC.TARGET not in data:
728
+ data[FC.TARGET] = np.arange(ds.sizes[FC.TARGET])
729
+ dims[FC.TARGET] = (FC.TARGET,)
730
+ for v, d in data.items():
731
+ if FC.TARGET in dims[v]:
732
+ if dims[v] == (FC.TARGET,):
733
+ data[v] = d[s_targets].copy()
734
+ elif len(dims[v]) < 3 or dims[v][:3] != (
735
+ FC.STATE,
736
+ FC.TARGET,
737
+ FC.TPOINT,
738
+ ):
739
+ raise ValueError(
740
+ f"Expecting coordinates '{ (FC.STATE, FC.TARGET, FC.TPOINT)}' at positions 0-2 for data variable '{v}', got {dims[v]}"
741
+ )
742
+ else:
743
+ data[v] = d[:, s_targets]
744
+ if cb0 is not None:
745
+ cb0(data, dims)
746
+
747
+ cb1 = cb_targets
748
+
749
+ return super().from_dataset(ds, *args, callback=cb1, **kwargs)
@@ -1,19 +1,11 @@
1
- import numpy as np
2
- import xarray as xr
3
1
  from abc import abstractmethod
4
- from dask.distributed import progress
5
- from dask.diagnostics import ProgressBar
6
2
 
7
3
  from .model import Model
8
- from .data import MData, FData, TData
9
- from foxes.utils.runners import DaskRunner
10
- import foxes.constants as FC
11
- import foxes.variables as FV
12
4
 
13
5
 
14
6
  class DataCalcModel(Model):
15
7
  """
16
- Abstract base class for models with
8
+ Abstract base class for models
17
9
  that run calculation on xarray Dataset
18
10
  data.
19
11
 
@@ -28,9 +20,22 @@ class DataCalcModel(Model):
28
20
 
29
21
  """
30
22
 
23
+ @abstractmethod
24
+ def output_coords(self):
25
+ """
26
+ Gets the coordinates of all output arrays
27
+
28
+ Returns
29
+ -------
30
+ dims: tuple of str
31
+ The coordinates of all output arrays
32
+
33
+ """
34
+ pass
35
+
31
36
  @abstractmethod
32
37
  def calculate(self, algo, *data, **parameters):
33
- """ "
38
+ """
34
39
  The main model calculation.
35
40
 
36
41
  This function is executed on a single chunk of data,
@@ -53,284 +58,3 @@ class DataCalcModel(Model):
53
58
 
54
59
  """
55
60
  pass
56
-
57
- def _wrap_calc(
58
- self,
59
- *ldata,
60
- algo,
61
- dvars,
62
- lvars,
63
- ldims,
64
- evars,
65
- edims,
66
- edata,
67
- loop_dims,
68
- out_vars,
69
- out_dims,
70
- calc_pars,
71
- init_vars,
72
- ):
73
- """
74
- Wrapper that mitigates between apply_ufunc and `calculate`.
75
- """
76
- n_prev = len(init_vars)
77
- if n_prev:
78
- prev = ldata[:n_prev]
79
- ldata = ldata[n_prev:]
80
-
81
- # reconstruct original data:
82
- data = []
83
- for i, hvars in enumerate(dvars):
84
- v2l = {v: lvars.index(v) for v in hvars if v in lvars}
85
- v2e = {v: evars.index(v) for v in hvars if v in evars}
86
-
87
- hdata = {v: ldata[v2l[v]] if v in v2l else edata[v2e[v]] for v in hvars}
88
- hdims = {v: ldims[v2l[v]] if v in v2l else edims[v2e[v]] for v in hvars}
89
-
90
- if i == 0:
91
- data.append(MData(data=hdata, dims=hdims, loop_dims=loop_dims))
92
- elif i == 1:
93
- data.append(FData(data=hdata, dims=hdims, loop_dims=loop_dims))
94
- elif i == 2:
95
- data.append(TData(data=hdata, dims=hdims, loop_dims=loop_dims))
96
- else:
97
- raise NotImplementedError(
98
- f"Not more than 3 data sets implemented, found {len(dvars)}"
99
- )
100
-
101
- del hdata, hdims, v2l, v2e
102
-
103
- # deduce output shape:
104
- oshape = []
105
- for li, l in enumerate(out_dims):
106
- for i, dims in enumerate(ldims):
107
- if l in dims:
108
- oshape.append(ldata[i].shape[dims.index(l)])
109
- break
110
- if len(oshape) != li + 1:
111
- raise ValueError(f"Model '{self.name}': Failed to find loop dimension")
112
-
113
- # add zero output data arrays:
114
- odims = {v: tuple(out_dims) for v in out_vars}
115
- odata = {
116
- v: (
117
- np.full(oshape, np.nan, dtype=FC.DTYPE)
118
- if v not in init_vars
119
- else prev[init_vars.index(v)].copy()
120
- )
121
- for v in out_vars
122
- if v not in data[-1]
123
- }
124
-
125
- if len(data) == 1:
126
- data.append(FData(odata, odims, loop_dims))
127
- else:
128
- odata.update(data[-1])
129
- odims.update(data[-1].dims)
130
- if len(data) == 2:
131
- data[-1] = FData(odata, odims, loop_dims)
132
- else:
133
- data[-1] = TData(odata, odims, loop_dims)
134
- del odims, odata
135
-
136
- # link chunk state indices from mdata to fdata and tdata:
137
- if FC.STATE in data[0]:
138
- for d in data[1:]:
139
- d[FC.STATE] = data[0][FC.STATE]
140
-
141
- # link weights from mdata to fdata:
142
- if FV.WEIGHT in data[0]:
143
- data[1][FV.WEIGHT] = data[0][FV.WEIGHT]
144
- data[1].dims[FV.WEIGHT] = data[0].dims[FV.WEIGHT]
145
-
146
- # run model calculation:
147
- self.ensure_variables(algo, *data)
148
- results = self.calculate(algo, *data, **calc_pars)
149
-
150
- # replace missing results by first input data with matching shape:
151
- missing = set(out_vars).difference(results.keys())
152
- if len(missing):
153
- found = set()
154
- for v in missing:
155
- for dta in data:
156
- if v in dta and dta[v].shape == tuple(oshape):
157
- results[v] = dta[v]
158
- found.add(v)
159
- break
160
- missing -= found
161
- if len(missing):
162
- raise ValueError(
163
- f"Model '{self.name}': Missing results {list(missing)}, expected shape {oshape}"
164
- )
165
- del data
166
-
167
- # create output:
168
- n_vars = len(out_vars)
169
- data = np.zeros(oshape + [n_vars], dtype=FC.DTYPE)
170
- for v in out_vars:
171
- data[..., out_vars.index(v)] = results[v]
172
-
173
- return data
174
-
175
- def run_calculation(
176
- self,
177
- algo,
178
- *data,
179
- out_vars,
180
- loop_dims,
181
- out_core_vars,
182
- initial_results=None,
183
- sel=None,
184
- isel=None,
185
- **calc_pars,
186
- ):
187
- """
188
- Starts the model calculation in parallel, via
189
- xarray's `apply_ufunc`.
190
-
191
- Typically this function is called by algorithms.
192
-
193
- Parameters
194
- ----------
195
- algo: foxes.core.Algorithm
196
- The calculation algorithm
197
- data: tuple of xarray.Dataset
198
- The input data
199
- out_vars: list of str
200
- The calculation output variables
201
- loop_dims: array_like of str
202
- List of the loop dimensions during xarray's
203
- `apply_ufunc` calculations
204
- out_core_vars: list of str
205
- The core dimensions of the output data, use
206
- `FC.VARS` for variables dimension (required)
207
- initial_results: xarray.Dataset, optional
208
- Initial results
209
- sel: dict, optional
210
- Selection of loop_dim variable subset values
211
- isel: dict, optional
212
- Selection of loop_dim variable subset index values
213
- calc_pars: dict, optional
214
- Additional arguments for the `calculate` function
215
-
216
- Returns
217
- -------
218
- results: xarray.Dataset
219
- The calculation results
220
-
221
- """
222
- # check:
223
- if not self.initialized:
224
- raise ValueError(
225
- f"DataCalcModel '{self.name}': run_calculation called for uninitialized model"
226
- )
227
-
228
- # prepare:
229
- loopd = set(loop_dims)
230
-
231
- # extract loop-var dependent and independent data:
232
- ldata = []
233
- lvars = []
234
- ldims = []
235
- edata = []
236
- evars = []
237
- edims = []
238
- dvars = []
239
- ivars = []
240
- idims = []
241
- if initial_results is not None:
242
- ds = initial_results
243
- hvarsl = [v for v, d in ds.items() if len(loopd.intersection(d.dims))]
244
- ldata += [ds[v] for v in hvarsl]
245
- idims += [ds[v].dims for v in hvarsl]
246
- ivars += hvarsl
247
-
248
- for ds in data:
249
- hvarsl = [v for v, d in ds.items() if len(loopd.intersection(d.dims))]
250
- ldata += [ds[v] for v in hvarsl]
251
- ldims += [ds[v].dims for v in hvarsl]
252
- lvars += hvarsl
253
-
254
- hvarse = [v for v in ds.keys() if v not in hvarsl]
255
- edata += [ds[v].values for v in hvarse]
256
- edims += [ds[v].dims for v in hvarse]
257
- evars += hvarse
258
-
259
- for c, d in ds.coords.items():
260
- if c in loopd:
261
- ldata.append(xr.DataArray(data=d.values, coords={c: d}, dims=[c]))
262
- ldims.append((c,))
263
- lvars.append(c)
264
- else:
265
- edata.append(d.values)
266
- edims.append((c,))
267
- evars.append(c)
268
-
269
- dvars.append(list(ds.keys()) + list(ds.coords.keys()))
270
-
271
- # subset selection:
272
- if sel is not None:
273
- nldata = []
274
- for ds in ldata:
275
- s = {k: v for k, v in sel.items() if k in ds.coords}
276
- if len(s):
277
- nldata.append(ds.sel(s))
278
- ldata = nldata
279
- del nldata
280
- if isel is not None:
281
- nldata = []
282
- for ds in ldata:
283
- s = {k: v for k, v in isel.items() if k in ds.coords}
284
- if len(s):
285
- nldata.append(ds.isel(s))
286
- ldata = nldata
287
- del nldata
288
-
289
- # setup dask options:
290
- dargs = dict(output_sizes={FC.VARS: len(out_vars)})
291
- if FC.TURBINE in loopd and FC.TURBINE not in ldims.values():
292
- dargs["output_sizes"][FC.TURBINE] = algo.n_turbines
293
- if FC.VARS not in out_core_vars:
294
- raise ValueError(
295
- f"Model '{self.name}': Expecting '{FC.VARS}' in out_core_vars, got {out_core_vars}"
296
- )
297
-
298
- # setup arguments for wrapper function:
299
- out_dims = loop_dims + list(set(out_core_vars).difference([FC.VARS]))
300
- wargs = dict(
301
- algo=algo,
302
- dvars=dvars,
303
- lvars=lvars,
304
- ldims=ldims,
305
- evars=evars,
306
- edims=edims,
307
- edata=edata,
308
- loop_dims=loop_dims,
309
- out_vars=out_vars,
310
- out_dims=out_dims,
311
- calc_pars=calc_pars,
312
- init_vars=ivars,
313
- )
314
-
315
- # run parallel computation:
316
- iidims = [[c for c in d if c not in loopd] for d in idims]
317
- icdims = [[c for c in d if c not in loopd] for d in ldims]
318
- results = xr.apply_ufunc(
319
- self._wrap_calc,
320
- *ldata,
321
- input_core_dims=iidims + icdims,
322
- output_core_dims=[out_core_vars],
323
- output_dtypes=[FC.DTYPE],
324
- dask="parallelized",
325
- dask_gufunc_kwargs=dargs,
326
- kwargs=wargs,
327
- )
328
-
329
- # reorganize results Dataset:
330
- results = results.assign_coords({FC.VARS: out_vars}).to_dataset(dim=FC.VARS)
331
-
332
- if DaskRunner.is_distributed() and len(ProgressBar.active):
333
- progress(results.persist())
334
-
335
- # update data by calculation results:
336
- return results.compute()