foxes 0.8.2__py3-none-any.whl → 1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of foxes might be problematic. Click here for more details.

Files changed (174) hide show
  1. docs/source/conf.py +353 -0
  2. examples/abl_states/run.py +160 -0
  3. examples/compare_rotors_pwakes/run.py +217 -0
  4. examples/compare_wakes/run.py +241 -0
  5. examples/dyn_wakes/run.py +311 -0
  6. examples/field_data_nc/run.py +121 -0
  7. examples/induction_RHB/run.py +201 -0
  8. examples/multi_height/run.py +113 -0
  9. examples/power_mask/run.py +249 -0
  10. examples/random_timeseries/run.py +210 -0
  11. examples/scan_row/run.py +193 -0
  12. examples/sector_management/run.py +162 -0
  13. examples/sequential/run.py +209 -0
  14. examples/single_state/run.py +201 -0
  15. examples/states_lookup_table/run.py +137 -0
  16. examples/streamline_wakes/run.py +138 -0
  17. examples/tab_file/run.py +142 -0
  18. examples/timelines/run.py +267 -0
  19. examples/timeseries/run.py +183 -0
  20. examples/timeseries_slurm/run.py +185 -0
  21. examples/wind_rose/run.py +141 -0
  22. examples/windio/run.py +29 -0
  23. examples/yawed_wake/run.py +196 -0
  24. foxes/__init__.py +4 -8
  25. foxes/algorithms/__init__.py +1 -1
  26. foxes/algorithms/downwind/downwind.py +232 -101
  27. foxes/algorithms/downwind/models/farm_wakes_calc.py +11 -6
  28. foxes/algorithms/downwind/models/init_farm_data.py +1 -1
  29. foxes/algorithms/downwind/models/point_wakes_calc.py +5 -6
  30. foxes/algorithms/downwind/models/reorder_farm_output.py +0 -1
  31. foxes/algorithms/downwind/models/set_amb_point_results.py +4 -2
  32. foxes/algorithms/iterative/iterative.py +73 -33
  33. foxes/algorithms/iterative/models/farm_wakes_calc.py +11 -6
  34. foxes/algorithms/sequential/models/plugin.py +1 -1
  35. foxes/algorithms/sequential/sequential.py +126 -255
  36. foxes/constants.py +17 -2
  37. foxes/core/__init__.py +1 -0
  38. foxes/core/algorithm.py +631 -146
  39. foxes/core/data.py +252 -20
  40. foxes/core/data_calc_model.py +13 -289
  41. foxes/core/engine.py +630 -0
  42. foxes/core/farm_controller.py +37 -9
  43. foxes/core/farm_data_model.py +15 -0
  44. foxes/core/model.py +133 -80
  45. foxes/core/point_data_model.py +15 -0
  46. foxes/core/rotor_model.py +27 -21
  47. foxes/core/states.py +16 -0
  48. foxes/core/turbine_type.py +28 -0
  49. foxes/core/wake_frame.py +22 -4
  50. foxes/core/wake_model.py +2 -3
  51. foxes/data/windio/windio_5turbines_timeseries.yaml +23 -1
  52. foxes/engines/__init__.py +16 -0
  53. foxes/engines/dask.py +975 -0
  54. foxes/engines/default.py +75 -0
  55. foxes/engines/futures.py +72 -0
  56. foxes/engines/mpi.py +38 -0
  57. foxes/engines/multiprocess.py +74 -0
  58. foxes/engines/numpy.py +185 -0
  59. foxes/engines/pool.py +263 -0
  60. foxes/engines/single.py +139 -0
  61. foxes/input/farm_layout/__init__.py +1 -0
  62. foxes/input/farm_layout/from_csv.py +4 -0
  63. foxes/input/farm_layout/from_json.py +1 -1
  64. foxes/input/farm_layout/grid.py +2 -2
  65. foxes/input/farm_layout/ring.py +65 -0
  66. foxes/input/farm_layout/row.py +2 -2
  67. foxes/input/states/__init__.py +6 -0
  68. foxes/input/states/create/random_abl_states.py +1 -1
  69. foxes/input/states/field_data_nc.py +157 -32
  70. foxes/input/states/multi_height.py +127 -13
  71. foxes/input/states/one_point_flow.py +577 -0
  72. foxes/input/states/scan_ws.py +73 -2
  73. foxes/input/states/states_table.py +204 -35
  74. foxes/input/windio/__init__.py +1 -1
  75. foxes/input/windio/get_states.py +44 -23
  76. foxes/input/windio/read_attributes.py +41 -16
  77. foxes/input/windio/read_farm.py +116 -102
  78. foxes/input/windio/read_fields.py +13 -6
  79. foxes/input/windio/read_outputs.py +63 -22
  80. foxes/input/windio/runner.py +31 -17
  81. foxes/input/windio/windio.py +36 -22
  82. foxes/models/ground_models/wake_mirror.py +8 -4
  83. foxes/models/model_book.py +29 -18
  84. foxes/models/partial_wakes/rotor_points.py +3 -3
  85. foxes/models/rotor_models/centre.py +4 -0
  86. foxes/models/rotor_models/grid.py +22 -23
  87. foxes/models/rotor_models/levels.py +4 -5
  88. foxes/models/turbine_models/calculator.py +0 -2
  89. foxes/models/turbine_models/lookup_table.py +27 -2
  90. foxes/models/turbine_models/rotor_centre_calc.py +4 -3
  91. foxes/models/turbine_models/set_farm_vars.py +103 -34
  92. foxes/models/turbine_types/PCt_file.py +24 -0
  93. foxes/models/turbine_types/PCt_from_two.py +24 -0
  94. foxes/models/turbine_types/__init__.py +1 -0
  95. foxes/models/turbine_types/lookup.py +316 -0
  96. foxes/models/turbine_types/null_type.py +50 -0
  97. foxes/models/turbine_types/wsrho2PCt_from_two.py +24 -0
  98. foxes/models/turbine_types/wsti2PCt_from_two.py +24 -0
  99. foxes/models/vertical_profiles/data_profile.py +1 -1
  100. foxes/models/wake_frames/__init__.py +1 -0
  101. foxes/models/wake_frames/dynamic_wakes.py +424 -0
  102. foxes/models/wake_frames/farm_order.py +23 -3
  103. foxes/models/wake_frames/rotor_wd.py +4 -2
  104. foxes/models/wake_frames/seq_dynamic_wakes.py +56 -63
  105. foxes/models/wake_frames/streamlines.py +19 -20
  106. foxes/models/wake_frames/timelines.py +328 -127
  107. foxes/models/wake_frames/yawed_wakes.py +4 -1
  108. foxes/models/wake_models/dist_sliced.py +1 -3
  109. foxes/models/wake_models/induction/rankine_half_body.py +4 -4
  110. foxes/models/wake_models/induction/rathmann.py +2 -2
  111. foxes/models/wake_models/induction/self_similar.py +2 -2
  112. foxes/models/wake_models/induction/vortex_sheet.py +2 -2
  113. foxes/models/wake_models/ti/iec_ti.py +34 -17
  114. foxes/models/wake_models/top_hat.py +1 -1
  115. foxes/models/wake_models/wind/bastankhah14.py +2 -2
  116. foxes/models/wake_models/wind/bastankhah16.py +8 -7
  117. foxes/models/wake_models/wind/jensen.py +1 -1
  118. foxes/models/wake_models/wind/turbopark.py +2 -2
  119. foxes/output/__init__.py +4 -1
  120. foxes/output/farm_layout.py +2 -2
  121. foxes/output/flow_plots_2d/__init__.py +0 -1
  122. foxes/output/flow_plots_2d/flow_plots.py +70 -30
  123. foxes/output/grids.py +91 -21
  124. foxes/output/seq_plugins/__init__.py +2 -0
  125. foxes/output/{flow_plots_2d → seq_plugins}/seq_flow_ani_plugin.py +62 -20
  126. foxes/output/seq_plugins/seq_wake_debug_plugin.py +145 -0
  127. foxes/output/slice_data.py +131 -111
  128. foxes/output/state_turbine_map.py +18 -13
  129. foxes/output/state_turbine_table.py +19 -19
  130. foxes/utils/__init__.py +1 -1
  131. foxes/utils/dev_utils.py +42 -0
  132. foxes/utils/dict.py +1 -1
  133. foxes/utils/factory.py +147 -52
  134. foxes/utils/pandas_helpers.py +4 -3
  135. foxes/utils/wind_dir.py +0 -2
  136. foxes/utils/xarray_utils.py +23 -13
  137. foxes/variables.py +37 -0
  138. {foxes-0.8.2.dist-info → foxes-1.0.dist-info}/METADATA +71 -33
  139. foxes-1.0.dist-info/RECORD +307 -0
  140. {foxes-0.8.2.dist-info → foxes-1.0.dist-info}/WHEEL +1 -1
  141. foxes-1.0.dist-info/top_level.txt +4 -0
  142. tests/0_consistency/iterative/test_iterative.py +92 -0
  143. tests/0_consistency/partial_wakes/test_partial_wakes.py +90 -0
  144. tests/1_verification/flappy_0_6/PCt_files/flappy/run.py +85 -0
  145. tests/1_verification/flappy_0_6/PCt_files/test_PCt_files.py +103 -0
  146. tests/1_verification/flappy_0_6/abl_states/flappy/run.py +85 -0
  147. tests/1_verification/flappy_0_6/abl_states/test_abl_states.py +87 -0
  148. tests/1_verification/flappy_0_6/partial_top_hat/flappy/run.py +82 -0
  149. tests/1_verification/flappy_0_6/partial_top_hat/test_partial_top_hat.py +82 -0
  150. tests/1_verification/flappy_0_6/row_Jensen_linear_centre/flappy/run.py +92 -0
  151. tests/1_verification/flappy_0_6/row_Jensen_linear_centre/test_row_Jensen_linear_centre.py +93 -0
  152. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat/flappy/run.py +92 -0
  153. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat/test_row_Jensen_linear_tophat.py +96 -0
  154. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2005/flappy/run.py +94 -0
  155. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2005/test_row_Jensen_linear_tophat_IECTI_2005.py +122 -0
  156. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2019/flappy/run.py +94 -0
  157. tests/1_verification/flappy_0_6/row_Jensen_linear_tophat_IECTI2019/test_row_Jensen_linear_tophat_IECTI_2019.py +122 -0
  158. tests/1_verification/flappy_0_6/row_Jensen_quadratic_centre/flappy/run.py +92 -0
  159. tests/1_verification/flappy_0_6/row_Jensen_quadratic_centre/test_row_Jensen_quadratic_centre.py +93 -0
  160. tests/1_verification/flappy_0_6_2/grid_rotors/flappy/run.py +85 -0
  161. tests/1_verification/flappy_0_6_2/grid_rotors/test_grid_rotors.py +130 -0
  162. tests/1_verification/flappy_0_6_2/row_Bastankhah_Crespo/flappy/run.py +96 -0
  163. tests/1_verification/flappy_0_6_2/row_Bastankhah_Crespo/test_row_Bastankhah_Crespo.py +116 -0
  164. tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/flappy/run.py +93 -0
  165. tests/1_verification/flappy_0_6_2/row_Bastankhah_linear_centre/test_row_Bastankhah_linear_centre.py +99 -0
  166. tests/3_examples/test_examples.py +34 -0
  167. foxes/VERSION +0 -1
  168. foxes/output/flow_plots_2d.py +0 -0
  169. foxes/utils/runners/__init__.py +0 -1
  170. foxes/utils/runners/runners.py +0 -280
  171. foxes-0.8.2.dist-info/RECORD +0 -247
  172. foxes-0.8.2.dist-info/top_level.txt +0 -1
  173. foxes-0.8.2.dist-info/zip-safe +0 -1
  174. {foxes-0.8.2.dist-info → foxes-1.0.dist-info}/LICENSE +0 -0
foxes/core/algorithm.py CHANGED
@@ -1,11 +1,14 @@
1
1
  import numpy as np
2
2
  import xarray as xr
3
+ from abc import abstractmethod
3
4
 
4
5
  from .model import Model
5
6
  from foxes.data import StaticData
6
7
  from foxes.utils import Dict, all_subclasses
7
8
  import foxes.constants as FC
8
9
 
10
+ from .engine import Engine
11
+
9
12
 
10
13
  class Algorithm(Model):
11
14
  """
@@ -17,23 +20,22 @@ class Algorithm(Model):
17
20
 
18
21
  Attributes
19
22
  ----------
20
- mbook: foxes.models.ModelBook
21
- The model book
22
- farm: foxes.WindFarm
23
- The wind farm
24
- chunks: dict
25
- The chunks choice for running in parallel with dask,
26
- e.g. `{"state": 1000}` for chunks of 1000 states
27
23
  verbosity: int
28
24
  The verbosity level, 0 means silent
29
- dbook: foxes.DataBook
30
- The data book, or None for default
31
25
 
32
26
  :group: core
33
27
 
34
28
  """
35
29
 
36
- def __init__(self, mbook, farm, chunks, verbosity, dbook=None):
30
+ def __init__(
31
+ self,
32
+ mbook,
33
+ farm,
34
+ verbosity=1,
35
+ dbook=None,
36
+ engine=None,
37
+ **engine_pars,
38
+ ):
37
39
  """
38
40
  Constructor.
39
41
 
@@ -43,149 +45,84 @@ class Algorithm(Model):
43
45
  The model book
44
46
  farm: foxes.WindFarm
45
47
  The wind farm
46
- chunks: dict
47
- The chunks choice for running in parallel with dask,
48
- e.g. `{"state": 1000}` for chunks of 1000 states
49
48
  verbosity: int
50
49
  The verbosity level, 0 means silent
51
50
  dbook: foxes.DataBook, optional
52
51
  The data book, or None for default
52
+ engine: str
53
+ The engine class name
54
+ engine_pars: dict, optional
55
+ Parameters for the engine constructor
53
56
 
54
57
  """
55
58
  super().__init__()
56
59
 
57
60
  self.name = type(self).__name__
58
- self.mbook = mbook
59
- self.farm = farm
60
- self.chunks = chunks
61
61
  self.verbosity = verbosity
62
62
  self.n_states = None
63
63
  self.n_turbines = farm.n_turbines
64
- self.dbook = StaticData() if dbook is None else dbook
65
-
66
- if chunks is not None and FC.TARGET not in chunks:
67
- self.chunks[FC.TARGET] = chunks.get(FC.POINT, None)
68
64
 
69
- self._idata_mem = Dict()
65
+ self.__farm = farm
66
+ self.__mbook = mbook
67
+ self.__dbook = StaticData() if dbook is None else dbook
68
+ self.__idata_mem = Dict(name="idata_mem")
69
+ self.__chunk_store = Dict(name="chunk_store")
70
+
71
+ if engine is not None:
72
+ e = Engine.new(engine_type=engine, verbosity=verbosity, **engine_pars)
73
+ self.print(f"Algorithm '{self.name}': Selecting engine '{e}'")
74
+ e.initialize()
75
+ elif len(engine_pars):
76
+ self.print(
77
+ f"Algorithm '{self.name}': Parameter 'engine' is None; ignoring engine parameters {engine_pars}"
78
+ )
70
79
 
71
- def print(self, *args, vlim=1, **kwargs):
80
+ @property
81
+ def farm(self):
72
82
  """
73
- Print function, based on verbosity.
83
+ The wind farm
74
84
 
75
- Parameters
76
- ----------
77
- args: tuple, optional
78
- Arguments for the print function
79
- kwargs: dict, optional
80
- Keyword arguments for the print function
81
- vlim: int
82
- The verbosity limit
85
+ Returns
86
+ -------
87
+ mb: foxes.core.WindFarm
88
+ The wind farm
83
89
 
84
90
  """
85
- if self.verbosity >= vlim:
86
- print(*args, **kwargs)
91
+ return self.__farm
87
92
 
88
- def __get_sizes(self, idata, mtype):
89
- """
90
- Private helper function
93
+ @property
94
+ def mbook(self):
91
95
  """
96
+ The model book
92
97
 
93
- sizes = {}
94
- for v, t in idata["data_vars"].items():
95
- if not isinstance(t, tuple) or len(t) != 2:
96
- raise ValueError(
97
- f"Input {mtype} data entry '{v}': Not a tuple of size 2, got '{t}'"
98
- )
99
- if not isinstance(t[0], tuple):
100
- raise ValueError(
101
- f"Input {mtype} data entry '{v}': First tuple entry not a dimensions tuple, got '{t[0]}'"
102
- )
103
- for c in t[0]:
104
- if not isinstance(c, str):
105
- raise ValueError(
106
- f"Input {mtype} data entry '{v}': First tuple entry not a dimensions tuple, got '{t[0]}'"
107
- )
108
- if not isinstance(t[1], np.ndarray):
109
- raise ValueError(
110
- f"Input {mtype} data entry '{v}': Second entry is not a numpy array, got: {type(t[1]).__name__}"
111
- )
112
- if len(t[1].shape) != len(t[0]):
113
- raise ValueError(
114
- f"Input {mtype} data entry '{v}': Wrong data shape, expecting {len(t[0])} dimensions, got {t[1].shape}"
115
- )
116
- if FC.STATE in t[0]:
117
- if t[0][0] != FC.STATE:
118
- raise ValueError(
119
- f"Input {mtype} data entry '{v}': Dimension '{FC.STATE}' not at first position, got {t[0]}"
120
- )
121
- if FC.TURBINE in t[0]:
122
- if t[0][1] != FC.TURBINE:
123
- raise ValueError(
124
- f"Input {mtype} data entry '{v}': Dimension '{FC.TURBINE}' not at second position, got {t[0]}"
125
- )
126
- if FC.TARGET in t[0]:
127
- if t[0][1] != FC.TARGET:
128
- raise ValueError(
129
- f"Input {mtype} data entry '{v}': Dimension '{FC.TARGET}' not at second position, got {t[0]}"
130
- )
131
- if len(t[0]) < 3 or t[0][2] != FC.TPOINT:
132
- raise KeyError(
133
- f"Input {mtype} data entry '{v}': Expecting dimension '{FC.TPOINT}' as third entry. Got {t[0]}"
134
- )
135
- elif FC.TURBINE in t[0]:
136
- raise ValueError(
137
- f"Input {mtype} data entry '{v}': Dimension '{FC.TURBINE}' requires combination with dimension '{FC.STATE}'"
138
- )
139
- for d, s in zip(t[0], t[1].shape):
140
- if d not in sizes:
141
- sizes[d] = s
142
- elif sizes[d] != s:
143
- raise ValueError(
144
- f"Input {mtype} data entry '{v}': Dimension '{d}' has wrong size, expecting {sizes[d]}, got {s}"
145
- )
146
- for v, c in idata["coords"].items():
147
- if v not in sizes:
148
- raise KeyError(
149
- f"Input coords entry '{v}': Not used in farm data, found {sorted(list(sizes.keys()))}"
150
- )
151
- elif len(c) != sizes[v]:
152
- raise ValueError(
153
- f"Input coords entry '{v}': Wrong coordinate size for '{v}': Expecting {sizes[v]}, got {len(c)}"
154
- )
155
-
156
- return sizes
98
+ Returns
99
+ -------
100
+ mb: foxes.models.ModelBook()
101
+ The model book
157
102
 
158
- def __get_xrdata(self, idata, sizes):
159
- """
160
- Private helper function
161
103
  """
162
- xrdata = xr.Dataset(**idata)
163
- if self.chunks is not None:
164
- if FC.TURBINE in self.chunks.keys():
165
- raise ValueError(
166
- f"Dimension '{FC.TURBINE}' cannot be chunked, got chunks {self.chunks}"
167
- )
168
- if FC.TPOINT in self.chunks.keys():
169
- raise ValueError(
170
- f"Dimension '{FC.TPOINT}' cannot be chunked, got chunks {self.chunks}"
171
- )
172
- xrdata = xrdata.chunk(
173
- chunks={c: v for c, v in self.chunks.items() if c in sizes}
104
+ if self.running:
105
+ raise ValueError(
106
+ f"Algorithm '{self.name}': Cannot access mbook while running"
174
107
  )
175
- return xrdata
176
-
177
- def chunked(self, ds):
178
- return (
179
- ds.chunk(chunks={c: v for c, v in self.chunks.items() if c in ds.coords})
180
- if self.chunks is not None
181
- else ds
182
- )
108
+ return self.__mbook
183
109
 
184
- def initialize(self):
110
+ @property
111
+ def dbook(self):
185
112
  """
186
- Initializes the algorithm.
113
+ The data book
114
+
115
+ Returns
116
+ -------
117
+ mb: foxes.data.StaticData()
118
+ The data book
119
+
187
120
  """
188
- super().initialize(self, self.verbosity)
121
+ if self.running:
122
+ raise ValueError(
123
+ f"Algorithm '{self.name}': Cannot access dbook while running"
124
+ )
125
+ return self.__dbook
189
126
 
190
127
  @property
191
128
  def idata_mem(self):
@@ -198,7 +135,51 @@ class Algorithm(Model):
198
135
  Keys: model name, value: idata dict
199
136
 
200
137
  """
201
- return self._idata_mem
138
+ if self.running:
139
+ raise ValueError(
140
+ f"Algorithm '{self.name}': Cannot access idata_mem while running"
141
+ )
142
+ return self.__idata_mem
143
+
144
+ @property
145
+ def chunk_store(self):
146
+ """
147
+ The current chunk store
148
+
149
+ Returns
150
+ -------
151
+ dict :
152
+ Keys: model name, value: idata dict
153
+
154
+ """
155
+ return self.__chunk_store
156
+
157
+ def print(self, *args, vlim=1, **kwargs):
158
+ """
159
+ Print function, based on verbosity.
160
+
161
+ Parameters
162
+ ----------
163
+ args: tuple, optional
164
+ Arguments for the print function
165
+ kwargs: dict, optional
166
+ Keyword arguments for the print function
167
+ vlim: int
168
+ The verbosity limit
169
+
170
+ """
171
+ if self.verbosity >= vlim:
172
+ print(*args, **kwargs)
173
+
174
+ def initialize(self):
175
+ """
176
+ Initializes the algorithm.
177
+ """
178
+ if self.running:
179
+ raise ValueError(
180
+ f"Algorithm '{self.name}': Cannot initialize while running"
181
+ )
182
+ super().initialize(self, self.verbosity)
202
183
 
203
184
  def store_model_data(self, model, idata, force=False):
204
185
  """
@@ -217,9 +198,12 @@ class Algorithm(Model):
217
198
 
218
199
  """
219
200
  mname = f"{type(model).__name__}_{model.name}"
220
- if not force and mname in self._idata_mem:
201
+ if force:
202
+ self.__idata_mem[mname] = idata
203
+ elif mname in self.idata_mem:
221
204
  raise KeyError(f"Attempt to overwrite stored data for model '{mname}'")
222
- self._idata_mem[mname] = idata
205
+ else:
206
+ self.idata_mem[mname] = idata
223
207
 
224
208
  def get_model_data(self, model):
225
209
  """
@@ -233,10 +217,10 @@ class Algorithm(Model):
233
217
  """
234
218
  mname = f"{type(model).__name__}_{model.name}"
235
219
  try:
236
- return self._idata_mem[mname]
220
+ return self.idata_mem[mname]
237
221
  except KeyError:
238
222
  raise KeyError(
239
- f"Key '{mname}' not found in idata_mem, available keys: {sorted(list(self._idata_mem.keys()))}"
223
+ f"Key '{mname}' not found in idata_mem, available keys: {sorted(list(self.idata_mem.keys()))}"
240
224
  )
241
225
 
242
226
  def del_model_data(self, model):
@@ -251,7 +235,7 @@ class Algorithm(Model):
251
235
  """
252
236
  mname = f"{type(model).__name__}_{model.name}"
253
237
  try:
254
- del self._idata_mem[mname]
238
+ del self.idata_mem[mname]
255
239
  except KeyError:
256
240
  raise KeyError(f"Attempt to delete data of model '{mname}', but not stored")
257
241
 
@@ -308,7 +292,7 @@ class Algorithm(Model):
308
292
  np.append(d[1], a, axis=i),
309
293
  )
310
294
 
311
- self._idata_mem.update(newk)
295
+ self.idata_mem.update(newk)
312
296
 
313
297
  def get_models_idata(self):
314
298
  """
@@ -328,13 +312,13 @@ class Algorithm(Model):
328
312
  f"Algorithm '{self.name}': get_models_idata called before initialization"
329
313
  )
330
314
  idata = {"coords": {}, "data_vars": {}}
331
- for k, hidata in self._idata_mem.items():
315
+ for k, hidata in self.idata_mem.items():
332
316
  if len(k) < 3 or k[:2] != "__":
333
317
  idata["coords"].update(hidata["coords"])
334
318
  idata["data_vars"].update(hidata["data_vars"])
335
319
  return idata
336
320
 
337
- def get_models_data(self, idata=None):
321
+ def get_models_data(self, idata=None, sel=None, isel=None):
338
322
  """
339
323
  Creates xarray from model input data.
340
324
 
@@ -345,19 +329,27 @@ class Algorithm(Model):
345
329
  a dict with entries `name_str -> (dim_tuple, data_ndarray)`;
346
330
  and `coords`, a dict with entries `dim_name_str -> dim_array`.
347
331
  Take algorithm's idata object by default.
332
+ sel: dict, optional
333
+ Selection of coordinates in dataset
334
+ isel: dict, optional
335
+ Selection of coordinates in dataset
348
336
 
349
337
  Returns
350
338
  -------
351
- xarray.Dataset
339
+ ds: xarray.Dataset
352
340
  The model input data
353
341
 
354
342
  """
355
343
  if idata is None:
356
344
  idata = self.get_models_idata()
357
- sizes = self.__get_sizes(idata, "models")
358
- return self.__get_xrdata(idata, sizes)
359
-
360
- def new_point_data(self, points, states_indices=None):
345
+ ds = xr.Dataset(**idata)
346
+ if isel is not None:
347
+ ds = ds.isel(isel)
348
+ if sel is not None:
349
+ ds = ds.sel(sel)
350
+ return ds
351
+
352
+ def new_point_data(self, points, states_indices=None, n_states=None):
361
353
  """
362
354
  Creates a point data xarray object, containing only points.
363
355
 
@@ -367,6 +359,8 @@ class Algorithm(Model):
367
359
  The points, shape: (n_states, n_points, 3)
368
360
  states_indices: array_like, optional
369
361
  The indices of the states dimension
362
+ n_states: int, optional
363
+ The number of states
370
364
 
371
365
  Returns
372
366
  -------
@@ -374,7 +368,8 @@ class Algorithm(Model):
374
368
  A dataset containing the points data
375
369
 
376
370
  """
377
-
371
+ if n_states is None:
372
+ n_states = self.n_states
378
373
  if states_indices is None:
379
374
  idata = {"coords": {}, "data_vars": {}}
380
375
  else:
@@ -382,11 +377,11 @@ class Algorithm(Model):
382
377
 
383
378
  if (
384
379
  len(points.shape) != 3
385
- or points.shape[0] != self.n_states
380
+ or points.shape[0] != n_states
386
381
  or points.shape[2] != 3
387
382
  ):
388
383
  raise ValueError(
389
- f"points have wrong dimensions, expecting ({self.n_states}, {points.shape[1]}, 3), got {points.shape}"
384
+ f"points have wrong dimensions, expecting ({n_states}, {points.shape[1]}, 3), got {points.shape}"
390
385
  )
391
386
  idata["data_vars"][FC.TARGETS] = (
392
387
  (FC.STATE, FC.TARGET, FC.TPOINT, FC.XYH),
@@ -397,8 +392,495 @@ class Algorithm(Model):
397
392
  np.array([1.0], dtype=FC.DTYPE),
398
393
  )
399
394
 
400
- sizes = self.__get_sizes(idata, "point")
401
- return self.__get_xrdata(idata, sizes)
395
+ return xr.Dataset(**idata)
396
+
397
+ def find_chunk_in_store(
398
+ self,
399
+ mdata,
400
+ tdata=None,
401
+ prev_s=0,
402
+ prev_t=0,
403
+ error=True,
404
+ ):
405
+ """
406
+ Finds indices in chunk store
407
+
408
+ Parameters
409
+ ----------
410
+ name: str
411
+ The data name
412
+ mdata: foxes.core.MData
413
+ The mdata object
414
+ tdata: foxes.core.TData, optional
415
+ The tdata object
416
+ prev_s: int
417
+ How many states chunks backward
418
+ prev_t: int
419
+ How many points chunks backward
420
+ error: bool
421
+ Flag for raising KeyError if data not found
422
+
423
+ Returns
424
+ -------
425
+ inds: tuple
426
+ The (i0, n_states, t0, n_targets) data of the
427
+ returning chunk
428
+
429
+ """
430
+ i0 = int(mdata.states_i0(counter=True))
431
+ t0 = int(tdata.targets_i0() if tdata is not None else 0)
432
+ n_states = int(mdata.n_states)
433
+ n_targets = int(tdata.n_targets if tdata is not None else 0)
434
+
435
+ if prev_s > 0 or prev_t > 0:
436
+
437
+ inds = np.array(
438
+ [
439
+ [
440
+ d["i0"],
441
+ d["i0"] + d["n_states"],
442
+ d["n_states"],
443
+ d["t0"],
444
+ d["t0"] + d["n_targets"],
445
+ d["n_targets"],
446
+ ]
447
+ for d in self.chunk_store.values()
448
+ ],
449
+ dtype=int,
450
+ )
451
+
452
+ if prev_t > 0:
453
+ while prev_t > 0:
454
+ sel = np.where((inds[:, 0] == i0) & (inds[:, 4] == t0))[0]
455
+ if len(sel) == 0:
456
+ if error:
457
+ raise KeyError(
458
+ f"{self.name}: Previous key {(i0, t0)}, prev={(prev_s, prev_t)}, not found in chunk store, got inds {inds}"
459
+ )
460
+ else:
461
+ return None
462
+ else:
463
+ n_targets = inds[sel[0], 5]
464
+ t0 -= n_targets
465
+ prev_t -= 1
466
+
467
+ if prev_s > 0:
468
+ while prev_s > 0:
469
+ sel = np.where((inds[:, 1] == i0) & (inds[:, 3] == t0))[0]
470
+ if len(sel) == 0:
471
+ if error:
472
+ raise KeyError(
473
+ f"{self.name}: Previous key {(i0, t0)}, prev={(prev_s, prev_t)}, not found in chunk store, got inds {inds}"
474
+ )
475
+ else:
476
+ return None
477
+ else:
478
+ n_states = inds[sel[0], 2]
479
+ i0 -= n_states
480
+ prev_s -= 1
481
+
482
+ return i0, n_states, t0, n_targets
483
+
484
+ def add_to_chunk_store(
485
+ self,
486
+ name,
487
+ data,
488
+ mdata,
489
+ tdata=None,
490
+ copy=True,
491
+ ):
492
+ """
493
+ Add data to the chunk store
494
+
495
+ Parameters
496
+ ----------
497
+ name: str
498
+ The data name
499
+ data: numpy.ndarray
500
+ The data
501
+ mdata: foxes.core.MData
502
+ The mdata object
503
+ tdata: foxes.core.TData, optional
504
+ The tdata object
505
+ copy: bool
506
+ Flag for copying incoming data
507
+
508
+ """
509
+ i0 = int(mdata.states_i0(counter=True))
510
+ t0 = int(tdata.targets_i0() if tdata is not None else 0)
511
+
512
+ key = (i0, t0)
513
+ if key not in self.chunk_store:
514
+ n_states = int(mdata.n_states)
515
+ n_targets = int(tdata.n_targets if tdata is not None else 0)
516
+ self.chunk_store[key] = Dict(
517
+ {
518
+ "i0": i0,
519
+ "t0": t0,
520
+ "n_states": n_states,
521
+ "n_targets": n_targets,
522
+ },
523
+ name=f"chunk_store_{i0}_{t0}",
524
+ )
525
+
526
+ self.chunk_store[key][name] = data.copy() if copy else data
527
+
528
+ def get_from_chunk_store(
529
+ self,
530
+ name,
531
+ mdata,
532
+ tdata=None,
533
+ prev_s=0,
534
+ prev_t=0,
535
+ ret_inds=False,
536
+ error=True,
537
+ ):
538
+ """
539
+ Get data to the chunk store
540
+
541
+ Parameters
542
+ ----------
543
+ name: str
544
+ The data name
545
+ mdata: foxes.core.MData
546
+ The mdata object
547
+ tdata: foxes.core.TData, optional
548
+ The tdata object
549
+ prev_s: int
550
+ How many states chunks backward
551
+ prev_t: int
552
+ How many points chunks backward
553
+ ret_inds: bool
554
+ Also return (i0, n_states, t0, n_targets)
555
+ of the returned chunk
556
+ error: bool
557
+ Flag for raising KeyError if data not found
558
+
559
+ Returns
560
+ -------
561
+ data: numpy.ndarray
562
+ The data
563
+ inds: tuple, optional
564
+ The (i0, n_states, t0, n_targets) data of the
565
+ returning chunk
566
+
567
+ """
568
+ inds = self.find_chunk_in_store(mdata, tdata, prev_s, prev_t, error)
569
+
570
+ if inds is None:
571
+ return (None, (None, None, None, None)) if ret_inds else None
572
+ else:
573
+ i0, __, t0, __ = inds
574
+ try:
575
+ data = self.chunk_store[(i0, t0)][name]
576
+ except KeyError as e:
577
+ if error:
578
+ raise e
579
+ else:
580
+ data = None
581
+ if ret_inds:
582
+ return data, inds
583
+ else:
584
+ return data
585
+
586
+ def reset_chunk_store(self, new_chunk_store=None):
587
+ """
588
+ Resets the chunk store
589
+
590
+ Parameters
591
+ ----------
592
+ new_chunk_store: foxes.utils.Dict, optional
593
+ The new chunk store
594
+
595
+ Returns
596
+ -------
597
+ chunk_store: foxes.utils.Dict
598
+ The chunk store before resetting
599
+
600
+ """
601
+ chunk_store = self.chunk_store
602
+ if new_chunk_store is None:
603
+ self.__chunk_store = Dict(name="chunk_store")
604
+ elif isinstance(new_chunk_store, Dict):
605
+ self.__chunk_store = new_chunk_store
606
+ else:
607
+ self.__chunk_store = Dict(name="chunk_store")
608
+ self.__chunk_store.update(new_chunk_store)
609
+ return chunk_store
610
+
611
+ def block_convergence(self, **kwargs):
612
+ """
613
+ Switch on convergence block during iterative run
614
+
615
+ Parameters
616
+ ----------
617
+ kwargs: dict, optional
618
+ Parameters for add_to_chunk_store()
619
+
620
+ """
621
+ self.add_to_chunk_store(
622
+ name=FC.BLOCK_CONVERGENCE, data=True, copy=False, **kwargs
623
+ )
624
+
625
+ def eval_conv_block(self):
626
+ """
627
+ Evaluate convergence block, removing blocks on the fly
628
+
629
+ Returns
630
+ -------
631
+ blocked: bool
632
+ True if convergence is currently blocked
633
+
634
+ """
635
+ blocked = False
636
+ for c in self.__chunk_store.values():
637
+ blocked = c.pop(FC.BLOCK_CONVERGENCE, False) or blocked
638
+ return blocked
639
+
640
+ def set_running(
641
+ self,
642
+ algo,
643
+ data_stash,
644
+ sel=None,
645
+ isel=None,
646
+ verbosity=0,
647
+ ):
648
+ """
649
+ Sets this model status to running, and moves
650
+ all large data to stash.
651
+
652
+ The stashed data will be returned by the
653
+ unset_running() function after running calculations.
654
+
655
+ Parameters
656
+ ----------
657
+ algo: foxes.core.Algorithm
658
+ The calculation algorithm
659
+ data_stash: dict
660
+ Large data stash, this function adds data here.
661
+ Key: model name. Value: dict, large model data
662
+ sel: dict, optional
663
+ The subset selection dictionary
664
+ isel: dict, optional
665
+ The index subset selection dictionary
666
+ verbosity: int
667
+ The verbosity level, 0 = silent
668
+
669
+ """
670
+ assert algo is self
671
+
672
+ super().set_running(algo, data_stash, sel, isel, verbosity)
673
+
674
+ data_stash[self.name].update(
675
+ dict(
676
+ mbook=self.__mbook,
677
+ dbook=self.__dbook,
678
+ idata_mem=self.__idata_mem,
679
+ )
680
+ )
681
+ del self.__mbook, self.__dbook
682
+ self.__idata_mem = {}
683
+
684
+ def unset_running(
685
+ self,
686
+ algo,
687
+ data_stash,
688
+ sel=None,
689
+ isel=None,
690
+ verbosity=0,
691
+ ):
692
+ """
693
+ Sets this model status to not running, recovering large data
694
+ from stash
695
+
696
+ Parameters
697
+ ----------
698
+ algo: foxes.core.Algorithm
699
+ The calculation algorithm
700
+ data_stash: dict
701
+ Large data stash, this function adds data here.
702
+ Key: model name. Value: dict, large model data
703
+ sel: dict, optional
704
+ The subset selection dictionary
705
+ isel: dict, optional
706
+ The index subset selection dictionary
707
+ verbosity: int
708
+ The verbosity level, 0 = silent
709
+
710
+ """
711
+ assert algo is self
712
+
713
+ super().unset_running(algo, data_stash, sel, isel, verbosity)
714
+
715
+ data = data_stash[self.name]
716
+ self.__mbook = data.pop("mbook")
717
+ self.__dbook = data.pop("dbook")
718
+ self.__idata_mem = data.pop("idata_mem")
719
+
720
+ @abstractmethod
721
+ def _launch_parallel_farm_calc(
722
+ self,
723
+ *args,
724
+ mbook,
725
+ dbook,
726
+ chunk_store,
727
+ **kwargs,
728
+ ):
729
+ """
730
+ Runs the main farm calculation, launching parallelization
731
+
732
+ Parameters
733
+ ----------
734
+ args: tuple, optional
735
+ Additional parameters for running
736
+ mbook: foxes.models.ModelBook
737
+ The model book
738
+ dbook: foxes.DataBook
739
+ The data book, or None for default
740
+ chunk_store: foxes.utils.Dict
741
+ The chunk store
742
+ kwargs: dict, optional
743
+ Additional parameters for running
744
+
745
+ Returns
746
+ -------
747
+ farm_results: xarray.Dataset
748
+ The farm results. The calculated variables have
749
+ dimensions (state, turbine)
750
+
751
+ """
752
+ pass
753
+
754
+ def calc_farm(self, *args, **kwargs):
755
+ """
756
+ Calculate farm data.
757
+
758
+ Parameters
759
+ ----------
760
+ args: tuple, optional
761
+ Parameters
762
+ kwargs: dict, optional
763
+ Keyword parameters
764
+
765
+ Returns
766
+ -------
767
+ farm_results: xarray.Dataset
768
+ The farm results. The calculated variables have
769
+ dimensions (state, turbine)
770
+
771
+ """
772
+ if self.running:
773
+ raise ValueError(
774
+ f"Algorithm '{self.name}': Cannot call calc_farm while running"
775
+ )
776
+
777
+ # set to running:
778
+ data_stash = {}
779
+ chunk_store = self.reset_chunk_store()
780
+ mdls = [
781
+ m
782
+ for m in [self] + list(args) + list(kwargs.values())
783
+ if isinstance(m, Model)
784
+ ]
785
+ for m in mdls:
786
+ m.set_running(
787
+ self, data_stash, sel=None, isel=None, verbosity=self.verbosity - 2
788
+ )
789
+
790
+ # run parallel calculation:
791
+ farm_results = self._launch_parallel_farm_calc(
792
+ *args,
793
+ chunk_store=chunk_store,
794
+ sel=None,
795
+ isel=None,
796
+ **kwargs,
797
+ )
798
+
799
+ # reset to not running:
800
+ for m in mdls:
801
+ m.unset_running(
802
+ self, data_stash, sel=None, isel=None, verbosity=self.verbosity - 2
803
+ )
804
+
805
+ return farm_results
806
+
807
+ @abstractmethod
808
+ def _launch_parallel_points_calc(
809
+ self,
810
+ *args,
811
+ chunk_store,
812
+ **kwargs,
813
+ ):
814
+ """
815
+ Runs the main points calculation, launching parallelization
816
+
817
+ Parameters
818
+ ----------
819
+ args: tuple, optional
820
+ Additional parameters for running
821
+ chunk_store: foxes.utils.Dict
822
+ The chunk store
823
+ kwargs: dict, optional
824
+ Additional parameters for running
825
+
826
+ Returns
827
+ -------
828
+ point_results: xarray.Dataset
829
+ The point results. The calculated variables have
830
+ dimensions (state, point)
831
+
832
+ """
833
+ pass
834
+
835
+ def calc_points(self, *args, sel=None, isel=None, **kwargs):
836
+ """
837
+ Calculate points data.
838
+
839
+ Parameters
840
+ ----------
841
+ args: tuple, optional
842
+ Parameters
843
+ sel: dict, optional
844
+ The subset selection dictionary
845
+ isel: dict, optional
846
+ The index subset selection dictionary
847
+ kwargs: dict, optional
848
+ Keyword parameters
849
+
850
+ Returns
851
+ -------
852
+ point_results: xarray.Dataset
853
+ The point results. The calculated variables have
854
+ dimensions (state, point)
855
+
856
+ """
857
+ if self.running:
858
+ raise ValueError(
859
+ f"Algorithm '{self.name}': Cannot call calc_points while running"
860
+ )
861
+
862
+ # set to running:
863
+ data_stash = {}
864
+ self.set_running(
865
+ self, data_stash, sel=sel, isel=isel, verbosity=self.verbosity - 2
866
+ )
867
+
868
+ # run parallel calculation:
869
+ chunk_store = self.reset_chunk_store()
870
+ point_results = self._launch_parallel_points_calc(
871
+ *args,
872
+ chunk_store=chunk_store,
873
+ sel=sel,
874
+ isel=isel,
875
+ **kwargs,
876
+ )
877
+
878
+ # reset to not running:
879
+ self.unset_running(
880
+ self, data_stash, sel=sel, isel=isel, verbosity=self.verbosity - 2
881
+ )
882
+
883
+ return point_results
402
884
 
403
885
  def finalize(self, clear_mem=False):
404
886
  """
@@ -410,9 +892,12 @@ class Algorithm(Model):
410
892
  Clear idata memory
411
893
 
412
894
  """
895
+ if self.running:
896
+ raise ValueError(f"Algorithm '{self.name}': Cannot finalize while running")
413
897
  super().finalize(self, self.verbosity)
414
898
  if clear_mem:
415
- self._idata_mem = Dict()
899
+ self.__idata_mem = Dict()
900
+ # self.reset_chunk_store()
416
901
 
417
902
  @classmethod
418
903
  def new(cls, algo_type, *args, **kwargs):