NREL-reV 0.8.7__py3-none-any.whl → 0.8.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. {NREL_reV-0.8.7.dist-info → NREL_reV-0.8.9.dist-info}/METADATA +12 -10
  2. {NREL_reV-0.8.7.dist-info → NREL_reV-0.8.9.dist-info}/RECORD +38 -38
  3. {NREL_reV-0.8.7.dist-info → NREL_reV-0.8.9.dist-info}/WHEEL +1 -1
  4. reV/SAM/SAM.py +182 -133
  5. reV/SAM/econ.py +18 -14
  6. reV/SAM/generation.py +608 -419
  7. reV/SAM/windbos.py +93 -79
  8. reV/bespoke/bespoke.py +690 -445
  9. reV/bespoke/place_turbines.py +6 -6
  10. reV/config/project_points.py +220 -140
  11. reV/econ/econ.py +165 -113
  12. reV/econ/economies_of_scale.py +57 -34
  13. reV/generation/base.py +310 -183
  14. reV/generation/generation.py +298 -190
  15. reV/handlers/exclusions.py +16 -15
  16. reV/handlers/multi_year.py +12 -9
  17. reV/handlers/outputs.py +6 -5
  18. reV/hybrids/hybrid_methods.py +28 -30
  19. reV/hybrids/hybrids.py +304 -188
  20. reV/nrwal/nrwal.py +262 -168
  21. reV/qa_qc/cli_qa_qc.py +14 -10
  22. reV/qa_qc/qa_qc.py +217 -119
  23. reV/qa_qc/summary.py +228 -146
  24. reV/rep_profiles/rep_profiles.py +349 -230
  25. reV/supply_curve/aggregation.py +349 -188
  26. reV/supply_curve/competitive_wind_farms.py +90 -48
  27. reV/supply_curve/exclusions.py +138 -85
  28. reV/supply_curve/extent.py +75 -50
  29. reV/supply_curve/points.py +536 -309
  30. reV/supply_curve/sc_aggregation.py +366 -225
  31. reV/supply_curve/supply_curve.py +505 -308
  32. reV/supply_curve/tech_mapping.py +144 -82
  33. reV/utilities/__init__.py +199 -16
  34. reV/utilities/pytest_utils.py +8 -4
  35. reV/version.py +1 -1
  36. {NREL_reV-0.8.7.dist-info → NREL_reV-0.8.9.dist-info}/LICENSE +0 -0
  37. {NREL_reV-0.8.7.dist-info → NREL_reV-0.8.9.dist-info}/entry_points.txt +0 -0
  38. {NREL_reV-0.8.7.dist-info → NREL_reV-0.8.9.dist-info}/top_level.txt +0 -0
reV/bespoke/bespoke.py CHANGED
@@ -2,42 +2,47 @@
2
2
  """
3
3
  reV bespoke wind plant analysis tools
4
4
  """
5
+
5
6
  # pylint: disable=anomalous-backslash-in-string
6
- from inspect import signature
7
- import time
8
- import logging
9
7
  import copy
10
- import pandas as pd
11
- import numpy as np
12
- import os
13
8
  import json
14
- import psutil
9
+ import logging
10
+ import os
11
+ import time
12
+ from concurrent.futures import as_completed
15
13
  from importlib import import_module
14
+ from inspect import signature
16
15
  from numbers import Number
17
- from concurrent.futures import as_completed
18
16
  from warnings import warn
19
17
 
18
+ import numpy as np
19
+ import pandas as pd
20
+ import psutil
21
+ from rex.joint_pd.joint_pd import JointPD
22
+ from rex.multi_year_resource import MultiYearWindResource
23
+ from rex.renewable_resource import WindResource
24
+ from rex.utilities.bc_parse_table import parse_bc_table
25
+ from rex.utilities.execution import SpawnProcessPool
26
+ from rex.utilities.loggers import create_dirs, log_mem
27
+ from rex.utilities.utilities import parse_year
28
+
20
29
  from reV.config.output_request import SAMOutputRequest
21
- from reV.generation.generation import Gen
22
- from reV.SAM.generation import WindPower, WindPowerPD
23
30
  from reV.econ.utilities import lcoe_fcr
24
- from reV.handlers.outputs import Outputs
31
+ from reV.generation.generation import Gen
25
32
  from reV.handlers.exclusions import ExclusionLayers
33
+ from reV.handlers.outputs import Outputs
34
+ from reV.SAM.generation import WindPower, WindPowerPD
35
+ from reV.supply_curve.aggregation import AggFileHandler, BaseAggregation
26
36
  from reV.supply_curve.extent import SupplyCurveExtent
27
37
  from reV.supply_curve.points import AggregationSupplyCurvePoint as AggSCPoint
28
38
  from reV.supply_curve.points import SupplyCurvePoint
29
- from reV.supply_curve.aggregation import BaseAggregation, AggFileHandler
30
- from reV.utilities.exceptions import (EmptySupplyCurvePointError,
31
- FileInputError)
32
- from reV.utilities import log_versions, ModuleName
33
-
34
- from rex.utilities.bc_parse_table import parse_bc_table
35
- from rex.joint_pd.joint_pd import JointPD
36
- from rex.renewable_resource import WindResource
37
- from rex.multi_year_resource import MultiYearWindResource
38
- from rex.utilities.loggers import log_mem, create_dirs
39
- from rex.utilities.utilities import parse_year
40
- from rex.utilities.execution import SpawnProcessPool
39
+ from reV.utilities import (
40
+ ModuleName,
41
+ ResourceMetaField,
42
+ SupplyCurveField,
43
+ log_versions,
44
+ )
45
+ from reV.utilities.exceptions import EmptySupplyCurvePointError, FileInputError
41
46
 
42
47
  logger = logging.getLogger(__name__)
43
48
 
@@ -79,34 +84,44 @@ class BespokeMultiPlantData:
79
84
  self._pre_load_data()
80
85
 
81
86
  def _pre_load_data(self):
82
- """Pre-load the resource data. """
87
+ """Pre-load the resource data."""
83
88
 
84
89
  for sc_gid, gids in self.sc_gid_to_res_gid.items():
85
90
  hh = self.sc_gid_to_hh[sc_gid]
86
91
  self.hh_to_res_gids.setdefault(hh, set()).update(gids)
87
92
 
88
- self.hh_to_res_gids = {hh: sorted(gids)
89
- for hh, gids in self.hh_to_res_gids.items()}
93
+ self.hh_to_res_gids = {
94
+ hh: sorted(gids) for hh, gids in self.hh_to_res_gids.items()
95
+ }
90
96
 
91
97
  start_time = time.time()
92
- if '*' in self.res_fpath:
98
+ if "*" in self.res_fpath:
93
99
  handler = MultiYearWindResource
94
100
  else:
95
101
  handler = WindResource
96
102
 
97
103
  with handler(self.res_fpath) as res:
98
- self._wind_dirs = {hh: res[f"winddirection_{hh}m", :, gids]
99
- for hh, gids in self.hh_to_res_gids.items()}
100
- self._wind_speeds = {hh: res[f"windspeed_{hh}m", :, gids]
101
- for hh, gids in self.hh_to_res_gids.items()}
102
- self._temps = {hh: res[f"temperature_{hh}m", :, gids]
103
- for hh, gids in self.hh_to_res_gids.items()}
104
- self._pressures = {hh: res[f"pressure_{hh}m", :, gids]
105
- for hh, gids in self.hh_to_res_gids.items()}
104
+ self._wind_dirs = {
105
+ hh: res[f"winddirection_{hh}m", :, gids]
106
+ for hh, gids in self.hh_to_res_gids.items()
107
+ }
108
+ self._wind_speeds = {
109
+ hh: res[f"windspeed_{hh}m", :, gids]
110
+ for hh, gids in self.hh_to_res_gids.items()
111
+ }
112
+ self._temps = {
113
+ hh: res[f"temperature_{hh}m", :, gids]
114
+ for hh, gids in self.hh_to_res_gids.items()
115
+ }
116
+ self._pressures = {
117
+ hh: res[f"pressure_{hh}m", :, gids]
118
+ for hh, gids in self.hh_to_res_gids.items()
119
+ }
106
120
  self._time_index = res.time_index
107
121
 
108
- logger.debug(f"Data took {(time.time() - start_time) / 60:.2f} "
109
- f"min to load")
122
+ logger.debug(
123
+ f"Data took {(time.time() - start_time) / 60:.2f} " f"min to load"
124
+ )
110
125
 
111
126
  def get_preloaded_data_for_gid(self, sc_gid):
112
127
  """Get the pre-loaded data for a single SC GID.
@@ -125,12 +140,14 @@ class BespokeMultiPlantData:
125
140
  hh = self.sc_gid_to_hh[sc_gid]
126
141
  sc_point_res_gids = sorted(self.sc_gid_to_res_gid[sc_gid])
127
142
  data_inds = np.searchsorted(self.hh_to_res_gids[hh], sc_point_res_gids)
128
- return BespokeSinglePlantData(sc_point_res_gids,
129
- self._wind_dirs[hh][:, data_inds],
130
- self._wind_speeds[hh][:, data_inds],
131
- self._temps[hh][:, data_inds],
132
- self._pressures[hh][:, data_inds],
133
- self._time_index)
143
+ return BespokeSinglePlantData(
144
+ sc_point_res_gids,
145
+ self._wind_dirs[hh][:, data_inds],
146
+ self._wind_speeds[hh][:, data_inds],
147
+ self._temps[hh][:, data_inds],
148
+ self._pressures[hh][:, data_inds],
149
+ self._time_index,
150
+ )
134
151
 
135
152
 
136
153
  class BespokeSinglePlantData:
@@ -141,8 +158,9 @@ class BespokeSinglePlantData:
141
158
  reads to a single HDF5 file.
142
159
  """
143
160
 
144
- def __init__(self, data_inds, wind_dirs, wind_speeds, temps, pressures,
145
- time_index):
161
+ def __init__(
162
+ self, data_inds, wind_dirs, wind_speeds, temps, pressures, time_index
163
+ ):
146
164
  """Initialize BespokeSinglePlantData
147
165
 
148
166
  Parameters
@@ -186,24 +204,43 @@ class BespokeSinglePlantData:
186
204
 
187
205
 
188
206
  class BespokeSinglePlant:
189
- """Framework for analyzing and optimized a wind plant layout specific to
207
+ """Framework for analyzing and optimizing a wind plant layout specific to
190
208
  the local wind resource and exclusions for a single reV supply curve point.
191
209
  """
192
210
 
193
- DEPENDENCIES = ('shapely',)
211
+ DEPENDENCIES = ("shapely",)
194
212
  OUT_ATTRS = copy.deepcopy(Gen.OUT_ATTRS)
195
213
 
196
- def __init__(self, gid, excl, res, tm_dset, sam_sys_inputs,
197
- objective_function, capital_cost_function,
198
- fixed_operating_cost_function,
199
- variable_operating_cost_function,
200
- min_spacing='5x', wake_loss_multiplier=1, ga_kwargs=None,
201
- output_request=('system_capacity', 'cf_mean'),
202
- ws_bins=(0.0, 20.0, 5.0), wd_bins=(0.0, 360.0, 45.0),
203
- excl_dict=None, inclusion_mask=None, data_layers=None,
204
- resolution=64, excl_area=None, exclusion_shape=None,
205
- eos_mult_baseline_cap_mw=200, prior_meta=None, gid_map=None,
206
- bias_correct=None, pre_loaded_data=None, close=True):
214
+ def __init__(
215
+ self,
216
+ gid,
217
+ excl,
218
+ res,
219
+ tm_dset,
220
+ sam_sys_inputs,
221
+ objective_function,
222
+ capital_cost_function,
223
+ fixed_operating_cost_function,
224
+ variable_operating_cost_function,
225
+ min_spacing="5x",
226
+ wake_loss_multiplier=1,
227
+ ga_kwargs=None,
228
+ output_request=("system_capacity", "cf_mean"),
229
+ ws_bins=(0.0, 20.0, 5.0),
230
+ wd_bins=(0.0, 360.0, 45.0),
231
+ excl_dict=None,
232
+ inclusion_mask=None,
233
+ data_layers=None,
234
+ resolution=64,
235
+ excl_area=None,
236
+ exclusion_shape=None,
237
+ eos_mult_baseline_cap_mw=200,
238
+ prior_meta=None,
239
+ gid_map=None,
240
+ bias_correct=None,
241
+ pre_loaded_data=None,
242
+ close=True,
243
+ ):
207
244
  """
208
245
  Parameters
209
246
  ----------
@@ -352,38 +389,48 @@ class BespokeSinglePlant:
352
389
  Flag to close object file handlers on exit.
353
390
  """
354
391
 
355
- logger.debug('Initializing BespokeSinglePlant for gid {}...'
356
- .format(gid))
357
- logger.debug('Resource filepath: {}'.format(res))
358
- logger.debug('Exclusion filepath: {}'.format(excl))
359
- logger.debug('Exclusion dict: {}'.format(excl_dict))
360
- logger.debug('Bespoke objective function: {}'
361
- .format(objective_function))
362
- logger.debug('Bespoke cost function: {}'.format(objective_function))
363
- logger.debug('Bespoke wake loss multiplier: {}'
364
- .format(wake_loss_multiplier))
365
- logger.debug('Bespoke GA initialization kwargs: {}'.format(ga_kwargs))
366
- logger.debug('Bespoke EOS multiplier baseline capacity: {:,} MW'
367
- .format(eos_mult_baseline_cap_mw))
368
-
369
- if isinstance(min_spacing, str) and min_spacing.endswith('x'):
392
+ logger.debug(
393
+ "Initializing BespokeSinglePlant for gid {}...".format(gid)
394
+ )
395
+ logger.debug("Resource filepath: {}".format(res))
396
+ logger.debug("Exclusion filepath: {}".format(excl))
397
+ logger.debug("Exclusion dict: {}".format(excl_dict))
398
+ logger.debug(
399
+ "Bespoke objective function: {}".format(objective_function)
400
+ )
401
+ logger.debug("Bespoke cost function: {}".format(objective_function))
402
+ logger.debug(
403
+ "Bespoke wake loss multiplier: {}".format(wake_loss_multiplier)
404
+ )
405
+ logger.debug("Bespoke GA initialization kwargs: {}".format(ga_kwargs))
406
+ logger.debug(
407
+ "Bespoke EOS multiplier baseline capacity: {:,} MW".format(
408
+ eos_mult_baseline_cap_mw
409
+ )
410
+ )
411
+
412
+ if isinstance(min_spacing, str) and min_spacing.endswith("x"):
370
413
  rotor_diameter = sam_sys_inputs["wind_turbine_rotor_diameter"]
371
- min_spacing = float(min_spacing.strip('x')) * rotor_diameter
414
+ min_spacing = float(min_spacing.strip("x")) * rotor_diameter
372
415
 
373
416
  if not isinstance(min_spacing, (int, float)):
374
417
  try:
375
418
  min_spacing = float(min_spacing)
376
419
  except Exception as e:
377
- msg = ('min_spacing must be numeric but received: {}, {}'
378
- .format(min_spacing, type(min_spacing)))
420
+ msg = (
421
+ "min_spacing must be numeric but received: {}, {}".format(
422
+ min_spacing, type(min_spacing)
423
+ )
424
+ )
379
425
  logger.error(msg)
380
426
  raise TypeError(msg) from e
381
427
 
382
428
  self.objective_function = objective_function
383
429
  self.capital_cost_function = capital_cost_function
384
430
  self.fixed_operating_cost_function = fixed_operating_cost_function
385
- self.variable_operating_cost_function = \
431
+ self.variable_operating_cost_function = (
386
432
  variable_operating_cost_function
433
+ )
387
434
  self.min_spacing = min_spacing
388
435
  self.wake_loss_multiplier = wake_loss_multiplier
389
436
  self.ga_kwargs = ga_kwargs or {}
@@ -411,26 +458,33 @@ class BespokeSinglePlant:
411
458
  Handler = self.get_wind_handler(res)
412
459
  res = res if not isinstance(res, str) else Handler(res)
413
460
 
414
- self._sc_point = AggSCPoint(gid, excl, res, tm_dset,
415
- excl_dict=excl_dict,
416
- inclusion_mask=inclusion_mask,
417
- resolution=resolution,
418
- excl_area=excl_area,
419
- exclusion_shape=exclusion_shape,
420
- close=close)
461
+ self._sc_point = AggSCPoint(
462
+ gid,
463
+ excl,
464
+ res,
465
+ tm_dset,
466
+ excl_dict=excl_dict,
467
+ inclusion_mask=inclusion_mask,
468
+ resolution=resolution,
469
+ excl_area=excl_area,
470
+ exclusion_shape=exclusion_shape,
471
+ close=close,
472
+ )
421
473
 
422
474
  self._parse_output_req()
423
475
  self._data_layers = data_layers
424
476
  self._parse_prior_run()
425
477
 
426
478
  def __str__(self):
427
- s = ('BespokeSinglePlant for reV SC gid {} with resolution {}'
428
- .format(self.sc_point.gid, self.sc_point.resolution))
479
+ s = "BespokeSinglePlant for reV SC gid {} with resolution {}".format(
480
+ self.sc_point.gid, self.sc_point.resolution
481
+ )
429
482
  return s
430
483
 
431
484
  def __repr__(self):
432
- s = ('BespokeSinglePlant for reV SC gid {} with resolution {}'
433
- .format(self.sc_point.gid, self.sc_point.resolution))
485
+ s = "BespokeSinglePlant for reV SC gid {} with resolution {}".format(
486
+ self.sc_point.gid, self.sc_point.resolution
487
+ )
434
488
  return s
435
489
 
436
490
  def __enter__(self):
@@ -447,14 +501,14 @@ class BespokeSinglePlant:
447
501
  (ws_mean, *_mean) if requested.
448
502
  """
449
503
 
450
- required = ('cf_mean', 'annual_energy')
504
+ required = ("cf_mean", "annual_energy")
451
505
  for req in required:
452
506
  if req not in self._out_req:
453
507
  self._out_req.append(req)
454
508
 
455
- if 'ws_mean' in self._out_req:
456
- self._out_req.remove('ws_mean')
457
- self._outputs['ws_mean'] = self.res_df['windspeed'].mean()
509
+ if "ws_mean" in self._out_req:
510
+ self._out_req.remove("ws_mean")
511
+ self._outputs["ws_mean"] = self.res_df["windspeed"].mean()
458
512
 
459
513
  for req in copy.deepcopy(self._out_req):
460
514
  if req in self.res_df:
@@ -463,17 +517,20 @@ class BespokeSinglePlant:
463
517
  year = annual_ti.year[0]
464
518
  mask = self.res_df.index.isin(annual_ti)
465
519
  arr = self.res_df.loc[mask, req].values.flatten()
466
- self._outputs[req + f'-{year}'] = arr
520
+ self._outputs[req + f"-{year}"] = arr
467
521
 
468
- elif req.replace('_mean', '') in self.res_df:
522
+ elif req.replace("_mean", "") in self.res_df:
469
523
  self._out_req.remove(req)
470
- dset = req.replace('_mean', '')
524
+ dset = req.replace("_mean", "")
471
525
  self._outputs[req] = self.res_df[dset].mean()
472
526
 
473
- if ('lcoe_fcr' in self._out_req
474
- and 'fixed_charge_rate' not in self.original_sam_sys_inputs):
475
- msg = ('User requested "lcoe_fcr" but did not input '
476
- '"fixed_charge_rate" in the SAM system config.')
527
+ if "lcoe_fcr" in self._out_req and (
528
+ "fixed_charge_rate" not in self.original_sam_sys_inputs
529
+ ):
530
+ msg = (
531
+ 'User requested "lcoe_fcr" but did not input '
532
+ '"fixed_charge_rate" in the SAM system config.'
533
+ )
477
534
  logger.error(msg)
478
535
  raise KeyError(msg)
479
536
 
@@ -482,14 +539,18 @@ class BespokeSinglePlant:
482
539
  sure the SAM system inputs are set accordingly."""
483
540
 
484
541
  # {meta_column: sam_sys_input_key}
485
- required = {'capacity': 'system_capacity',
486
- 'turbine_x_coords': 'wind_farm_xCoordinates',
487
- 'turbine_y_coords': 'wind_farm_yCoordinates'}
542
+ required = {
543
+ SupplyCurveField.CAPACITY: "system_capacity",
544
+ SupplyCurveField.TURBINE_X_COORDS: "wind_farm_xCoordinates",
545
+ SupplyCurveField.TURBINE_Y_COORDS: "wind_farm_yCoordinates",
546
+ }
488
547
 
489
548
  if self._prior_meta:
490
549
  missing = [k for k in required if k not in self.meta]
491
- msg = ('Prior bespoke run meta data is missing the following '
492
- 'required columns: {}'.format(missing))
550
+ msg = (
551
+ "Prior bespoke run meta data is missing the following "
552
+ "required columns: {}".format(missing)
553
+ )
493
554
  assert not any(missing), msg
494
555
 
495
556
  for meta_col, sam_sys_key in required.items():
@@ -497,7 +558,7 @@ class BespokeSinglePlant:
497
558
  self._sam_sys_inputs[sam_sys_key] = prior_value
498
559
 
499
560
  # convert reV supply curve cap in MW to SAM capacity in kW
500
- self._sam_sys_inputs['system_capacity'] *= 1e3
561
+ self._sam_sys_inputs["system_capacity"] *= 1e3
501
562
 
502
563
  @staticmethod
503
564
  def _parse_gid_map(gid_map):
@@ -522,15 +583,22 @@ class BespokeSinglePlant:
522
583
  """
523
584
 
524
585
  if isinstance(gid_map, str):
525
- if gid_map.endswith('.csv'):
526
- gid_map = pd.read_csv(gid_map).to_dict()
527
- assert 'gid' in gid_map, 'Need "gid" in gid_map column'
528
- assert 'gid_map' in gid_map, 'Need "gid_map" in gid_map column'
529
- gid_map = {gid_map['gid'][i]: gid_map['gid_map'][i]
530
- for i in gid_map['gid'].keys()}
531
-
532
- elif gid_map.endswith('.json'):
533
- with open(gid_map, 'r') as f:
586
+ if gid_map.endswith(".csv"):
587
+ gid_map = (
588
+ pd.read_csv(gid_map)
589
+ .rename(SupplyCurveField.map_to(ResourceMetaField), axis=1)
590
+ .to_dict()
591
+ )
592
+ err_msg = f"Need {ResourceMetaField.GID} in gid_map column"
593
+ assert ResourceMetaField.GID in gid_map, err_msg
594
+ assert "gid_map" in gid_map, 'Need "gid_map" in gid_map column'
595
+ gid_map = {
596
+ gid_map[ResourceMetaField.GID][i]: gid_map["gid_map"][i]
597
+ for i in gid_map[ResourceMetaField.GID]
598
+ }
599
+
600
+ elif gid_map.endswith(".json"):
601
+ with open(gid_map) as f:
534
602
  gid_map = json.load(f)
535
603
 
536
604
  return gid_map
@@ -563,19 +631,23 @@ class BespokeSinglePlant:
563
631
  Bias corrected windspeed data in same shape as input
564
632
  """
565
633
 
566
- if self._bias_correct is not None and dset.startswith('windspeed_'):
567
-
634
+ if self._bias_correct is not None and dset.startswith("windspeed_"):
568
635
  out = parse_bc_table(self._bias_correct, h5_gids)
569
636
  bc_fun, bc_fun_kwargs, bool_bc = out
570
637
 
571
638
  if bool_bc.any():
572
- logger.debug('Bias correcting windspeed with function {} '
573
- 'for h5 gids: {}'.format(bc_fun, h5_gids))
639
+ logger.debug(
640
+ "Bias correcting windspeed with function {} "
641
+ "for h5 gids: {}".format(bc_fun, h5_gids)
642
+ )
574
643
 
575
- bc_fun_kwargs['ws'] = ws[:, bool_bc]
644
+ bc_fun_kwargs["ws"] = ws[:, bool_bc]
576
645
  sig = signature(bc_fun)
577
- bc_fun_kwargs = {k: v for k, v in bc_fun_kwargs.items()
578
- if k in sig.parameters}
646
+ bc_fun_kwargs = {
647
+ k: v
648
+ for k, v in bc_fun_kwargs.items()
649
+ if k in sig.parameters
650
+ }
579
651
 
580
652
  ws[:, bool_bc] = bc_fun(**bc_fun_kwargs)
581
653
 
@@ -631,7 +703,7 @@ class BespokeSinglePlant:
631
703
  of degrees from north.
632
704
  """
633
705
 
634
- dset = f'winddirection_{self.hub_height}m'
706
+ dset = f"winddirection_{self.hub_height}m"
635
707
  gids = self.sc_point.h5_gid_set
636
708
  h5_gids = copy.deepcopy(gids)
637
709
  if self._gid_map is not None:
@@ -736,31 +808,36 @@ class BespokeSinglePlant:
736
808
  """
737
809
  if self._meta is None:
738
810
  res_gids = json.dumps([int(g) for g in self.sc_point.h5_gid_set])
739
- gid_counts = json.dumps([float(np.round(n, 1))
740
- for n in self.sc_point.gid_counts])
811
+ gid_counts = json.dumps(
812
+ [float(np.round(n, 1)) for n in self.sc_point.gid_counts]
813
+ )
741
814
 
742
- with SupplyCurveExtent(self.sc_point._excl_fpath,
743
- resolution=self.sc_point.resolution) as sc:
815
+ with SupplyCurveExtent(
816
+ self.sc_point._excl_fpath, resolution=self.sc_point.resolution
817
+ ) as sc:
744
818
  row_ind, col_ind = sc.get_sc_row_col_ind(self.sc_point.gid)
745
819
 
746
820
  self._meta = pd.DataFrame(
747
- {'sc_point_gid': self.sc_point.gid,
748
- 'sc_row_ind': row_ind,
749
- 'sc_col_ind': col_ind,
750
- 'gid': self.sc_point.gid,
751
- 'latitude': self.sc_point.latitude,
752
- 'longitude': self.sc_point.longitude,
753
- 'timezone': self.sc_point.timezone,
754
- 'country': self.sc_point.country,
755
- 'state': self.sc_point.state,
756
- 'county': self.sc_point.county,
757
- 'elevation': self.sc_point.elevation,
758
- 'offshore': self.sc_point.offshore,
759
- 'res_gids': res_gids,
760
- 'gid_counts': gid_counts,
761
- 'n_gids': self.sc_point.n_gids,
762
- 'area_sq_km': self.sc_point.area,
763
- }, index=[self.sc_point.gid])
821
+ {
822
+ SupplyCurveField.SC_POINT_GID: self.sc_point.gid,
823
+ SupplyCurveField.SC_ROW_IND: row_ind,
824
+ SupplyCurveField.SC_COL_IND: col_ind,
825
+ SupplyCurveField.GID: self.sc_point.gid,
826
+ SupplyCurveField.LATITUDE: self.sc_point.latitude,
827
+ SupplyCurveField.LONGITUDE: self.sc_point.longitude,
828
+ SupplyCurveField.TIMEZONE: self.sc_point.timezone,
829
+ SupplyCurveField.COUNTRY: self.sc_point.country,
830
+ SupplyCurveField.STATE: self.sc_point.state,
831
+ SupplyCurveField.COUNTY: self.sc_point.county,
832
+ SupplyCurveField.ELEVATION: self.sc_point.elevation,
833
+ SupplyCurveField.OFFSHORE: self.sc_point.offshore,
834
+ SupplyCurveField.RES_GIDS: res_gids,
835
+ SupplyCurveField.GID_COUNTS: gid_counts,
836
+ SupplyCurveField.N_GIDS: self.sc_point.n_gids,
837
+ SupplyCurveField.AREA_SQ_KM: self.sc_point.area,
838
+ },
839
+ index=[self.sc_point.gid],
840
+ )
764
841
 
765
842
  return self._meta
766
843
 
@@ -772,7 +849,7 @@ class BespokeSinglePlant:
772
849
  -------
773
850
  int
774
851
  """
775
- return int(self.sam_sys_inputs['wind_turbine_hub_ht'])
852
+ return int(self.sam_sys_inputs["wind_turbine_hub_ht"])
776
853
 
777
854
  @property
778
855
  def res_df(self):
@@ -792,21 +869,26 @@ class BespokeSinglePlant:
792
869
  ti = self._pre_loaded_data.time_index
793
870
 
794
871
  wd = self.get_weighted_res_dir()
795
- ws = self.get_weighted_res_ts(f'windspeed_{self.hub_height}m')
796
- temp = self.get_weighted_res_ts(f'temperature_{self.hub_height}m')
797
- pres = self.get_weighted_res_ts(f'pressure_{self.hub_height}m')
872
+ ws = self.get_weighted_res_ts(f"windspeed_{self.hub_height}m")
873
+ temp = self.get_weighted_res_ts(f"temperature_{self.hub_height}m")
874
+ pres = self.get_weighted_res_ts(f"pressure_{self.hub_height}m")
798
875
 
799
876
  # convert mbar to atm
800
877
  if np.nanmax(pres) > 1000:
801
878
  pres *= 9.86923e-6
802
879
 
803
- self._res_df = pd.DataFrame({'temperature': temp,
804
- 'pressure': pres,
805
- 'windspeed': ws,
806
- 'winddirection': wd}, index=ti)
807
-
808
- if 'time_index_step' in self.original_sam_sys_inputs:
809
- ti_step = self.original_sam_sys_inputs['time_index_step']
880
+ self._res_df = pd.DataFrame(
881
+ {
882
+ "temperature": temp,
883
+ "pressure": pres,
884
+ "windspeed": ws,
885
+ "winddirection": wd,
886
+ },
887
+ index=ti,
888
+ )
889
+
890
+ if "time_index_step" in self.original_sam_sys_inputs:
891
+ ti_step = self.original_sam_sys_inputs["time_index_step"]
810
892
  self._res_df = self._res_df.iloc[::ti_step]
811
893
 
812
894
  return self._res_df
@@ -857,9 +939,11 @@ class BespokeSinglePlant:
857
939
  ws_bins = JointPD._make_bins(*self._ws_bins)
858
940
  wd_bins = JointPD._make_bins(*self._wd_bins)
859
941
 
860
- hist_out = np.histogram2d(self.res_df['windspeed'],
861
- self.res_df['winddirection'],
862
- bins=(ws_bins, wd_bins))
942
+ hist_out = np.histogram2d(
943
+ self.res_df["windspeed"],
944
+ self.res_df["winddirection"],
945
+ bins=(ws_bins, wd_bins),
946
+ )
863
947
  self._wind_dist, self._ws_edges, self._wd_edges = hist_out
864
948
  self._wind_dist /= self._wind_dist.sum()
865
949
 
@@ -880,19 +964,20 @@ class BespokeSinglePlant:
880
964
  res_df = self.res_df[(self.res_df.index.year == year)]
881
965
  sam_inputs = copy.deepcopy(self.sam_sys_inputs)
882
966
 
883
- if 'lcoe_fcr' in self._out_req:
967
+ if "lcoe_fcr" in self._out_req:
884
968
  lcoe_kwargs = self.get_lcoe_kwargs()
885
969
  sam_inputs.update(lcoe_kwargs)
886
970
 
887
- i_wp = WindPower(res_df, self.meta, sam_inputs,
888
- output_request=self._out_req)
971
+ i_wp = WindPower(
972
+ res_df, self.meta, sam_inputs, output_request=self._out_req
973
+ )
889
974
  wind_plant_ts[year] = i_wp
890
975
 
891
976
  return wind_plant_ts
892
977
 
893
978
  @property
894
979
  def wind_plant_pd(self):
895
- """reV WindPowerPD compute object for plant layout optimization based
980
+ """ReV WindPowerPD compute object for plant layout optimization based
896
981
  on wind joint probability distribution
897
982
 
898
983
  Returns
@@ -902,14 +987,19 @@ class BespokeSinglePlant:
902
987
 
903
988
  if self._wind_plant_pd is None:
904
989
  wind_dist, ws_edges, wd_edges = self.wind_dist
905
- self._wind_plant_pd = WindPowerPD(ws_edges, wd_edges, wind_dist,
906
- self.meta, self.sam_sys_inputs,
907
- output_request=self._out_req)
990
+ self._wind_plant_pd = WindPowerPD(
991
+ ws_edges,
992
+ wd_edges,
993
+ wind_dist,
994
+ self.meta,
995
+ self.sam_sys_inputs,
996
+ output_request=self._out_req,
997
+ )
908
998
  return self._wind_plant_pd
909
999
 
910
1000
  @property
911
1001
  def wind_plant_ts(self):
912
- """reV WindPower compute object(s) based on wind resource timeseries
1002
+ """ReV WindPower compute object(s) based on wind resource timeseries
913
1003
  data keyed by year
914
1004
 
915
1005
  Returns
@@ -929,6 +1019,7 @@ class BespokeSinglePlant:
929
1019
  if self._plant_optm is None:
930
1020
  # put import here to delay breaking due to special dependencies
931
1021
  from reV.bespoke.place_turbines import PlaceTurbines
1022
+
932
1023
  self._plant_optm = PlaceTurbines(
933
1024
  self.wind_plant_pd,
934
1025
  self.objective_function,
@@ -938,7 +1029,8 @@ class BespokeSinglePlant:
938
1029
  self.include_mask,
939
1030
  self.pixel_side_length,
940
1031
  self.min_spacing,
941
- self.wake_loss_multiplier)
1032
+ self.wake_loss_multiplier,
1033
+ )
942
1034
 
943
1035
  return self._plant_optm
944
1036
 
@@ -946,22 +1038,24 @@ class BespokeSinglePlant:
946
1038
  """Recalculate the multi-year mean LCOE based on the multi-year mean
947
1039
  annual energy production (AEP)"""
948
1040
 
949
- if 'lcoe_fcr-means' in self.outputs:
1041
+ if "lcoe_fcr-means" in self.outputs:
950
1042
  lcoe_kwargs = self.get_lcoe_kwargs()
951
1043
 
952
- logger.debug('Recalulating multi-year mean LCOE using '
953
- 'multi-year mean AEP.')
1044
+ logger.debug(
1045
+ "Recalulating multi-year mean LCOE using "
1046
+ "multi-year mean AEP."
1047
+ )
954
1048
 
955
- fcr = lcoe_kwargs['fixed_charge_rate']
956
- cap_cost = lcoe_kwargs['capital_cost']
957
- foc = lcoe_kwargs['fixed_operating_cost']
958
- voc = lcoe_kwargs['variable_operating_cost']
959
- aep = self.outputs['annual_energy-means']
1049
+ fcr = lcoe_kwargs["fixed_charge_rate"]
1050
+ cap_cost = lcoe_kwargs["capital_cost"]
1051
+ foc = lcoe_kwargs["fixed_operating_cost"]
1052
+ voc = lcoe_kwargs["variable_operating_cost"]
1053
+ aep = self.outputs["annual_energy-means"]
960
1054
 
961
1055
  my_mean_lcoe = lcoe_fcr(fcr, cap_cost, foc, aep, voc)
962
1056
 
963
- self._outputs['lcoe_fcr-means'] = my_mean_lcoe
964
- self._meta['mean_lcoe'] = my_mean_lcoe
1057
+ self._outputs["lcoe_fcr-means"] = my_mean_lcoe
1058
+ self._meta[SupplyCurveField.MEAN_LCOE] = my_mean_lcoe
965
1059
 
966
1060
  def get_lcoe_kwargs(self):
967
1061
  """Get a namespace of arguments for calculating LCOE based on the
@@ -979,8 +1073,13 @@ class BespokeSinglePlant:
979
1073
  original_sam_sys_inputs, meta
980
1074
  """
981
1075
 
982
- kwargs_list = ['fixed_charge_rate', 'system_capacity', 'capital_cost',
983
- 'fixed_operating_cost', 'variable_operating_cost']
1076
+ kwargs_list = [
1077
+ "fixed_charge_rate",
1078
+ "system_capacity",
1079
+ "capital_cost",
1080
+ "fixed_operating_cost",
1081
+ "variable_operating_cost",
1082
+ ]
984
1083
  lcoe_kwargs = {}
985
1084
 
986
1085
  for kwarg in kwargs_list:
@@ -999,9 +1098,12 @@ class BespokeSinglePlant:
999
1098
 
1000
1099
  missing = [k for k in kwargs_list if k not in lcoe_kwargs]
1001
1100
  if any(missing):
1002
- msg = ('Could not find these LCOE kwargs in outputs, '
1003
- 'plant_optimizer, original_sam_sys_inputs, or meta: {}'
1004
- .format(missing))
1101
+ msg = (
1102
+ "Could not find these LCOE kwargs in outputs, "
1103
+ "plant_optimizer, original_sam_sys_inputs, or meta: {}".format(
1104
+ missing
1105
+ )
1106
+ )
1005
1107
  logger.error(msg)
1006
1108
  raise KeyError(msg)
1007
1109
 
@@ -1024,7 +1126,7 @@ class BespokeSinglePlant:
1024
1126
  """
1025
1127
  handler = res
1026
1128
  if isinstance(res, str):
1027
- if '*' in res:
1129
+ if "*" in res:
1028
1130
  handler = MultiYearWindResource
1029
1131
  else:
1030
1132
  handler = WindResource
@@ -1042,21 +1144,28 @@ class BespokeSinglePlant:
1042
1144
  missing.append(name)
1043
1145
 
1044
1146
  if any(missing):
1045
- msg = ('The reV bespoke module depends on the following special '
1046
- 'dependencies that were not found in the active '
1047
- 'environment: {}'.format(missing))
1147
+ msg = (
1148
+ "The reV bespoke module depends on the following special "
1149
+ "dependencies that were not found in the active "
1150
+ "environment: {}".format(missing)
1151
+ )
1048
1152
  logger.error(msg)
1049
1153
  raise ModuleNotFoundError(msg)
1050
1154
 
1051
1155
  @staticmethod
1052
- def _check_sys_inputs(plant1, plant2,
1053
- ignore=('wind_resource_model_choice',
1054
- 'wind_resource_data',
1055
- 'wind_turbine_powercurve_powerout',
1056
- 'hourly',
1057
- 'capital_cost',
1058
- 'fixed_operating_cost',
1059
- 'variable_operating_cost')):
1156
+ def _check_sys_inputs(
1157
+ plant1,
1158
+ plant2,
1159
+ ignore=(
1160
+ "wind_resource_model_choice",
1161
+ "wind_resource_data",
1162
+ "wind_turbine_powercurve_powerout",
1163
+ "hourly",
1164
+ "capital_cost",
1165
+ "fixed_operating_cost",
1166
+ "variable_operating_cost",
1167
+ ),
1168
+ ):
1060
1169
  """Check two reV-SAM models for matching system inputs.
1061
1170
 
1062
1171
  Parameters
@@ -1066,13 +1175,13 @@ class BespokeSinglePlant:
1066
1175
  """
1067
1176
  bad = []
1068
1177
  for k, v in plant1.sam_sys_inputs.items():
1069
- if k not in plant2.sam_sys_inputs:
1070
- bad.append(k)
1071
- elif str(v) != str(plant2.sam_sys_inputs[k]):
1178
+ if k not in plant2.sam_sys_inputs or str(v) != str(
1179
+ plant2.sam_sys_inputs[k]
1180
+ ):
1072
1181
  bad.append(k)
1073
1182
  bad = [b for b in bad if b not in ignore]
1074
1183
  if any(bad):
1075
- msg = 'Inputs no longer match: {}'.format(bad)
1184
+ msg = "Inputs no longer match: {}".format(bad)
1076
1185
  logger.error(msg)
1077
1186
  raise RuntimeError(msg)
1078
1187
 
@@ -1088,41 +1197,51 @@ class BespokeSinglePlant:
1088
1197
  BespokeSinglePlant.outputs property.
1089
1198
  """
1090
1199
 
1091
- logger.debug('Running {} years of SAM timeseries analysis for {}'
1092
- .format(len(self.years), self))
1200
+ logger.debug(
1201
+ "Running {} years of SAM timeseries analysis for {}".format(
1202
+ len(self.years), self
1203
+ )
1204
+ )
1093
1205
  self._wind_plant_ts = self.initialize_wind_plant_ts()
1094
1206
  for year, plant in self.wind_plant_ts.items():
1095
1207
  self._check_sys_inputs(plant, self.wind_plant_pd)
1096
1208
  try:
1097
1209
  plant.run_gen_and_econ()
1098
1210
  except Exception as e:
1099
- msg = ('{} failed while trying to run SAM WindPower '
1100
- 'timeseries analysis for {}'.format(self, year))
1211
+ msg = (
1212
+ "{} failed while trying to run SAM WindPower "
1213
+ "timeseries analysis for {}".format(self, year)
1214
+ )
1101
1215
  logger.exception(msg)
1102
1216
  raise RuntimeError(msg) from e
1103
1217
 
1104
1218
  for k, v in plant.outputs.items():
1105
- self._outputs[k + '-{}'.format(year)] = v
1219
+ self._outputs[k + "-{}".format(year)] = v
1106
1220
 
1107
1221
  means = {}
1108
1222
  for k1, v1 in self._outputs.items():
1109
- if isinstance(v1, Number) and parse_year(k1, option='boolean'):
1223
+ if isinstance(v1, Number) and parse_year(k1, option="boolean"):
1110
1224
  year = parse_year(k1)
1111
- base_str = k1.replace(str(year), '')
1112
- all_values = [v2 for k2, v2 in self._outputs.items()
1113
- if base_str in k2]
1114
- means[base_str + 'means'] = np.mean(all_values)
1225
+ base_str = k1.replace(str(year), "")
1226
+ all_values = [
1227
+ v2 for k2, v2 in self._outputs.items() if base_str in k2
1228
+ ]
1229
+ means[base_str + "means"] = np.mean(all_values)
1115
1230
 
1116
1231
  self._outputs.update(means)
1117
1232
 
1118
1233
  # copy dataset outputs to meta data for supply curve table summary
1119
- if 'cf_mean-means' in self.outputs:
1120
- self._meta.loc[:, 'mean_cf'] = self.outputs['cf_mean-means']
1121
- if 'lcoe_fcr-means' in self.outputs:
1122
- self._meta.loc[:, 'mean_lcoe'] = self.outputs['lcoe_fcr-means']
1234
+ if "cf_mean-means" in self.outputs:
1235
+ self._meta.loc[:, SupplyCurveField.MEAN_CF] = self.outputs[
1236
+ "cf_mean-means"
1237
+ ]
1238
+ if "lcoe_fcr-means" in self.outputs:
1239
+ self._meta.loc[:, SupplyCurveField.MEAN_LCOE] = self.outputs[
1240
+ "lcoe_fcr-means"
1241
+ ]
1123
1242
  self.recalc_lcoe()
1124
1243
 
1125
- logger.debug('Timeseries analysis complete!')
1244
+ logger.debug("Timeseries analysis complete!")
1126
1245
 
1127
1246
  return self.outputs
1128
1247
 
@@ -1138,13 +1257,14 @@ class BespokeSinglePlant:
1138
1257
  BespokeSinglePlant.outputs property.
1139
1258
  """
1140
1259
 
1141
- logger.debug('Running plant layout optimization for {}'.format(self))
1260
+ logger.debug("Running plant layout optimization for {}".format(self))
1142
1261
  try:
1143
1262
  self.plant_optimizer.place_turbines(**self.ga_kwargs)
1144
1263
  except Exception as e:
1145
- msg = ('{} failed while trying to run the '
1146
- 'turbine placement optimizer'
1147
- .format(self))
1264
+ msg = (
1265
+ "{} failed while trying to run the "
1266
+ "turbine placement optimizer".format(self)
1267
+ )
1148
1268
  logger.exception(msg)
1149
1269
  raise RuntimeError(msg) from e
1150
1270
 
@@ -1162,62 +1282,76 @@ class BespokeSinglePlant:
1162
1282
  pxc = json.dumps(pxc)
1163
1283
  pyc = json.dumps(pyc)
1164
1284
 
1165
- self._meta["turbine_x_coords"] = txc
1166
- self._meta["turbine_y_coords"] = tyc
1285
+ self._meta[SupplyCurveField.TURBINE_X_COORDS] = txc
1286
+ self._meta[SupplyCurveField.TURBINE_Y_COORDS] = tyc
1167
1287
  self._meta["possible_x_coords"] = pxc
1168
1288
  self._meta["possible_y_coords"] = pyc
1169
1289
 
1170
1290
  self._outputs["full_polygons"] = self.plant_optimizer.full_polygons
1171
- self._outputs["packing_polygons"] = \
1291
+ self._outputs["packing_polygons"] = (
1172
1292
  self.plant_optimizer.packing_polygons
1293
+ )
1173
1294
  self._outputs["system_capacity"] = self.plant_optimizer.capacity
1174
1295
 
1175
1296
  self._meta["n_turbines"] = self.plant_optimizer.nturbs
1176
1297
  self._meta["bespoke_aep"] = self.plant_optimizer.aep
1177
1298
  self._meta["bespoke_objective"] = self.plant_optimizer.objective
1178
- self._meta["bespoke_capital_cost"] = \
1179
- self.plant_optimizer.capital_cost
1180
- self._meta["bespoke_fixed_operating_cost"] = \
1299
+ self._meta["bespoke_capital_cost"] = self.plant_optimizer.capital_cost
1300
+ self._meta["bespoke_fixed_operating_cost"] = (
1181
1301
  self.plant_optimizer.fixed_operating_cost
1182
- self._meta["bespoke_variable_operating_cost"] = \
1302
+ )
1303
+ self._meta["bespoke_variable_operating_cost"] = (
1183
1304
  self.plant_optimizer.variable_operating_cost
1305
+ )
1184
1306
  self._meta["included_area"] = self.plant_optimizer.area
1185
- self._meta["included_area_capacity_density"] = \
1307
+ self._meta["included_area_capacity_density"] = (
1186
1308
  self.plant_optimizer.capacity_density
1187
- self._meta["convex_hull_area"] = \
1188
- self.plant_optimizer.convex_hull_area
1189
- self._meta["convex_hull_capacity_density"] = \
1309
+ )
1310
+ self._meta["convex_hull_area"] = self.plant_optimizer.convex_hull_area
1311
+ self._meta["convex_hull_capacity_density"] = (
1190
1312
  self.plant_optimizer.convex_hull_capacity_density
1191
- self._meta["full_cell_capacity_density"] = \
1313
+ )
1314
+ self._meta["full_cell_capacity_density"] = (
1192
1315
  self.plant_optimizer.full_cell_capacity_density
1316
+ )
1193
1317
 
1194
- logger.debug('Plant layout optimization complete!')
1318
+ logger.debug("Plant layout optimization complete!")
1195
1319
 
1196
1320
  # copy dataset outputs to meta data for supply curve table summary
1197
1321
  # convert SAM system capacity in kW to reV supply curve cap in MW
1198
- self._meta['capacity'] = self.outputs['system_capacity'] / 1e3
1322
+ self._meta[SupplyCurveField.CAPACITY] = (
1323
+ self.outputs["system_capacity"] / 1e3
1324
+ )
1199
1325
 
1200
1326
  # add required ReEDS multipliers to meta
1201
1327
  baseline_cost = self.plant_optimizer.capital_cost_per_kw(
1202
- capacity_mw=self._baseline_cap_mw)
1203
- self._meta['eos_mult'] = (self.plant_optimizer.capital_cost
1204
- / self.plant_optimizer.capacity
1205
- / baseline_cost)
1206
- self._meta['reg_mult'] = (self.sam_sys_inputs
1207
- .get("capital_cost_multiplier", 1))
1328
+ capacity_mw=self._baseline_cap_mw
1329
+ )
1330
+ self._meta[SupplyCurveField.EOS_MULT] = (
1331
+ self.plant_optimizer.capital_cost
1332
+ / self.plant_optimizer.capacity
1333
+ / baseline_cost
1334
+ )
1335
+ self._meta[SupplyCurveField.REG_MULT] = self.sam_sys_inputs.get(
1336
+ "capital_cost_multiplier", 1
1337
+ )
1208
1338
 
1209
1339
  return self.outputs
1210
1340
 
1211
1341
  def agg_data_layers(self):
1212
1342
  """Aggregate optional data layers if requested and save to self.meta"""
1213
1343
  if self._data_layers is not None:
1214
- logger.debug('Aggregating {} extra data layers.'
1215
- .format(len(self._data_layers)))
1344
+ logger.debug(
1345
+ "Aggregating {} extra data layers.".format(
1346
+ len(self._data_layers)
1347
+ )
1348
+ )
1216
1349
  point_summary = self.meta.to_dict()
1217
- point_summary = self.sc_point.agg_data_layers(point_summary,
1218
- self._data_layers)
1350
+ point_summary = self.sc_point.agg_data_layers(
1351
+ point_summary, self._data_layers
1352
+ )
1219
1353
  self._meta = pd.DataFrame(point_summary)
1220
- logger.debug('Finished aggregating extra data layers.')
1354
+ logger.debug("Finished aggregating extra data layers.")
1221
1355
 
1222
1356
  @property
1223
1357
  def outputs(self):
@@ -1246,9 +1380,10 @@ class BespokeSinglePlant:
1246
1380
 
1247
1381
  with cls(*args, **kwargs) as bsp:
1248
1382
  if bsp._prior_meta:
1249
- logger.debug('Skipping bespoke plant optimization for gid {}. '
1250
- 'Received prior meta data for this point.'
1251
- .format(bsp.gid))
1383
+ logger.debug(
1384
+ "Skipping bespoke plant optimization for gid {}. "
1385
+ "Received prior meta data for this point.".format(bsp.gid)
1386
+ )
1252
1387
  else:
1253
1388
  _ = bsp.run_plant_optimization()
1254
1389
 
@@ -1257,9 +1392,9 @@ class BespokeSinglePlant:
1257
1392
 
1258
1393
  meta = bsp.meta
1259
1394
  out = bsp.outputs
1260
- out['meta'] = meta
1395
+ out["meta"] = meta
1261
1396
  for year, ti in zip(bsp.years, bsp.annual_time_indexes):
1262
- out['time_index-{}'.format(year)] = ti
1397
+ out["time_index-{}".format(year)] = ti
1263
1398
 
1264
1399
  return out
1265
1400
 
@@ -1267,17 +1402,36 @@ class BespokeSinglePlant:
1267
1402
  class BespokeWindPlants(BaseAggregation):
1268
1403
  """BespokeWindPlants"""
1269
1404
 
1270
- def __init__(self, excl_fpath, res_fpath, tm_dset, objective_function,
1271
- capital_cost_function, fixed_operating_cost_function,
1272
- variable_operating_cost_function, project_points,
1273
- sam_files, min_spacing='5x', wake_loss_multiplier=1,
1274
- ga_kwargs=None, output_request=('system_capacity', 'cf_mean'),
1275
- ws_bins=(0.0, 20.0, 5.0), wd_bins=(0.0, 360.0, 45.0),
1276
- excl_dict=None, area_filter_kernel='queen', min_area=None,
1277
- resolution=64, excl_area=None, data_layers=None,
1278
- pre_extract_inclusions=False, prior_run=None, gid_map=None,
1279
- bias_correct=None, pre_load_data=False):
1280
- """reV bespoke analysis class.
1405
+ def __init__(
1406
+ self,
1407
+ excl_fpath,
1408
+ res_fpath,
1409
+ tm_dset,
1410
+ objective_function,
1411
+ capital_cost_function,
1412
+ fixed_operating_cost_function,
1413
+ variable_operating_cost_function,
1414
+ project_points,
1415
+ sam_files,
1416
+ min_spacing="5x",
1417
+ wake_loss_multiplier=1,
1418
+ ga_kwargs=None,
1419
+ output_request=("system_capacity", "cf_mean"),
1420
+ ws_bins=(0.0, 20.0, 5.0),
1421
+ wd_bins=(0.0, 360.0, 45.0),
1422
+ excl_dict=None,
1423
+ area_filter_kernel="queen",
1424
+ min_area=None,
1425
+ resolution=64,
1426
+ excl_area=None,
1427
+ data_layers=None,
1428
+ pre_extract_inclusions=False,
1429
+ prior_run=None,
1430
+ gid_map=None,
1431
+ bias_correct=None,
1432
+ pre_load_data=False,
1433
+ ):
1434
+ r"""ReV bespoke analysis class.
1281
1435
 
1282
1436
  Much like generation, ``reV`` bespoke analysis runs SAM
1283
1437
  simulations by piping in renewable energy resource data (usually
@@ -1685,39 +1839,58 @@ class BespokeWindPlants(BaseAggregation):
1685
1839
  """
1686
1840
 
1687
1841
  log_versions(logger)
1688
- logger.info('Initializing BespokeWindPlants...')
1689
- logger.info('Resource filepath: {}'.format(res_fpath))
1690
- logger.info('Exclusion filepath: {}'.format(excl_fpath))
1691
- logger.debug('Exclusion dict: {}'.format(excl_dict))
1692
- logger.info('Bespoke objective function: {}'
1693
- .format(objective_function))
1694
- logger.info('Bespoke capital cost function: {}'
1695
- .format(capital_cost_function))
1696
- logger.info('Bespoke fixed operating cost function: {}'
1697
- .format(fixed_operating_cost_function))
1698
- logger.info('Bespoke variable operating cost function: {}'
1699
- .format(variable_operating_cost_function))
1700
- logger.info('Bespoke wake loss multiplier: {}'
1701
- .format(wake_loss_multiplier))
1702
- logger.info('Bespoke GA initialization kwargs: {}'.format(ga_kwargs))
1703
-
1704
- logger.info('Bespoke pre-extracting exclusions: {}'
1705
- .format(pre_extract_inclusions))
1706
- logger.info('Bespoke pre-extracting resource data: {}'
1707
- .format(pre_load_data))
1708
- logger.info('Bespoke prior run: {}'.format(prior_run))
1709
- logger.info('Bespoke GID map: {}'.format(gid_map))
1710
- logger.info('Bespoke bias correction table: {}'.format(bias_correct))
1842
+ logger.info("Initializing BespokeWindPlants...")
1843
+ logger.info("Resource filepath: {}".format(res_fpath))
1844
+ logger.info("Exclusion filepath: {}".format(excl_fpath))
1845
+ logger.debug("Exclusion dict: {}".format(excl_dict))
1846
+ logger.info(
1847
+ "Bespoke objective function: {}".format(objective_function)
1848
+ )
1849
+ logger.info(
1850
+ "Bespoke capital cost function: {}".format(capital_cost_function)
1851
+ )
1852
+ logger.info(
1853
+ "Bespoke fixed operating cost function: {}".format(
1854
+ fixed_operating_cost_function
1855
+ )
1856
+ )
1857
+ logger.info(
1858
+ "Bespoke variable operating cost function: {}".format(
1859
+ variable_operating_cost_function
1860
+ )
1861
+ )
1862
+ logger.info(
1863
+ "Bespoke wake loss multiplier: {}".format(wake_loss_multiplier)
1864
+ )
1865
+ logger.info("Bespoke GA initialization kwargs: {}".format(ga_kwargs))
1866
+
1867
+ logger.info(
1868
+ "Bespoke pre-extracting exclusions: {}".format(
1869
+ pre_extract_inclusions
1870
+ )
1871
+ )
1872
+ logger.info(
1873
+ "Bespoke pre-extracting resource data: {}".format(pre_load_data)
1874
+ )
1875
+ logger.info("Bespoke prior run: {}".format(prior_run))
1876
+ logger.info("Bespoke GID map: {}".format(gid_map))
1877
+ logger.info("Bespoke bias correction table: {}".format(bias_correct))
1711
1878
 
1712
1879
  BespokeSinglePlant.check_dependencies()
1713
1880
 
1714
1881
  self._project_points = self._parse_points(project_points, sam_files)
1715
1882
 
1716
- super().__init__(excl_fpath, tm_dset, excl_dict=excl_dict,
1717
- area_filter_kernel=area_filter_kernel,
1718
- min_area=min_area, resolution=resolution,
1719
- excl_area=excl_area, gids=self._project_points.gids,
1720
- pre_extract_inclusions=pre_extract_inclusions)
1883
+ super().__init__(
1884
+ excl_fpath,
1885
+ tm_dset,
1886
+ excl_dict=excl_dict,
1887
+ area_filter_kernel=area_filter_kernel,
1888
+ min_area=min_area,
1889
+ resolution=resolution,
1890
+ excl_area=excl_area,
1891
+ gids=self._project_points.gids,
1892
+ pre_extract_inclusions=pre_extract_inclusions,
1893
+ )
1721
1894
 
1722
1895
  self._res_fpath = res_fpath
1723
1896
  self._obj_fun = objective_function
@@ -1742,8 +1915,11 @@ class BespokeWindPlants(BaseAggregation):
1742
1915
 
1743
1916
  self._slice_lookup = None
1744
1917
 
1745
- logger.info('Initialized BespokeWindPlants with project points: {}'
1746
- .format(self._project_points))
1918
+ logger.info(
1919
+ "Initialized BespokeWindPlants with project points: {}".format(
1920
+ self._project_points
1921
+ )
1922
+ )
1747
1923
 
1748
1924
  @staticmethod
1749
1925
  def _parse_points(points, sam_configs):
@@ -1755,8 +1931,8 @@ class BespokeWindPlants(BaseAggregation):
1755
1931
  Slice or list specifying project points, string pointing to a
1756
1932
  project points csv, or a fully instantiated PointsControl object.
1757
1933
  Can also be a single site integer value. Points csv should have
1758
- 'gid' and 'config' column, the config maps to the sam_configs dict
1759
- keys.
1934
+ `SupplyCurveField.GID` and 'config' column, the config maps to the
1935
+ sam_configs dict keys.
1760
1936
  sam_configs : dict | str | SAMConfig
1761
1937
  SAM input configuration ID(s) and file path(s). Keys are the SAM
1762
1938
  config ID(s) which map to the config column in the project points
@@ -1770,8 +1946,13 @@ class BespokeWindPlants(BaseAggregation):
1770
1946
  Project points object laying out the supply curve gids to
1771
1947
  analyze.
1772
1948
  """
1773
- pc = Gen.get_pc(points, points_range=None, sam_configs=sam_configs,
1774
- tech='windpower', sites_per_worker=1)
1949
+ pc = Gen.get_pc(
1950
+ points,
1951
+ points_range=None,
1952
+ sam_configs=sam_configs,
1953
+ tech="windpower",
1954
+ sites_per_worker=1,
1955
+ )
1775
1956
 
1776
1957
  return pc.project_points
1777
1958
 
@@ -1801,15 +1982,15 @@ class BespokeWindPlants(BaseAggregation):
1801
1982
 
1802
1983
  if prior_run is not None:
1803
1984
  assert os.path.isfile(prior_run)
1804
- assert prior_run.endswith('.h5')
1985
+ assert prior_run.endswith(".h5")
1805
1986
 
1806
- with Outputs(prior_run, mode='r') as f:
1987
+ with Outputs(prior_run, mode="r") as f:
1807
1988
  meta = f.meta
1808
1989
 
1809
1990
  # pylint: disable=no-member
1810
1991
  for col in meta.columns:
1811
1992
  val = meta[col].values[0]
1812
- if isinstance(val, str) and val[0] == '[' and val[-1] == ']':
1993
+ if isinstance(val, str) and val[0] == "[" and val[-1] == "]":
1813
1994
  meta[col] = meta[col].apply(json.loads)
1814
1995
 
1815
1996
  return meta
@@ -1830,7 +2011,7 @@ class BespokeWindPlants(BaseAggregation):
1830
2011
  meta = None
1831
2012
 
1832
2013
  if self._prior_meta is not None:
1833
- mask = self._prior_meta['gid'] == gid
2014
+ mask = self._prior_meta[SupplyCurveField.GID] == gid
1834
2015
  if any(mask):
1835
2016
  meta = self._prior_meta[mask]
1836
2017
 
@@ -1846,14 +2027,19 @@ class BespokeWindPlants(BaseAggregation):
1846
2027
  for path in paths:
1847
2028
  if not os.path.exists(path):
1848
2029
  raise FileNotFoundError(
1849
- 'Could not find required exclusions file: '
1850
- '{}'.format(path))
2030
+ "Could not find required exclusions file: " "{}".format(
2031
+ path
2032
+ )
2033
+ )
1851
2034
 
1852
2035
  with ExclusionLayers(paths) as excl:
1853
2036
  if self._tm_dset not in excl:
1854
- raise FileInputError('Could not find techmap dataset "{}" '
1855
- 'in the exclusions file(s): {}'
1856
- .format(self._tm_dset, paths))
2037
+ raise FileInputError(
2038
+ 'Could not find techmap dataset "{}" '
2039
+ "in the exclusions file(s): {}".format(
2040
+ self._tm_dset, paths
2041
+ )
2042
+ )
1857
2043
 
1858
2044
  # just check that this file exists, cannot check res_fpath if *glob
1859
2045
  Handler = BespokeSinglePlant.get_wind_handler(self._res_fpath)
@@ -1861,22 +2047,28 @@ class BespokeWindPlants(BaseAggregation):
1861
2047
  assert any(f.dsets)
1862
2048
 
1863
2049
  def _pre_load_data(self, pre_load_data):
1864
- """Pre-load resource data, if requested. """
2050
+ """Pre-load resource data, if requested."""
1865
2051
  if not pre_load_data:
1866
2052
  return
1867
2053
 
1868
- sc_gid_to_hh = {gid: self._hh_for_sc_gid(gid)
1869
- for gid in self._project_points.df["gid"]}
2054
+ sc_gid_to_hh = {
2055
+ gid: self._hh_for_sc_gid(gid)
2056
+ for gid in self._project_points.df[ResourceMetaField.GID]
2057
+ }
1870
2058
 
1871
2059
  with ExclusionLayers(self._excl_fpath) as excl:
1872
2060
  tm = excl[self._tm_dset]
1873
2061
 
1874
2062
  scp_kwargs = {"shape": self.shape, "resolution": self._resolution}
1875
- slices = {gid: SupplyCurvePoint.get_agg_slices(gid=gid, **scp_kwargs)
1876
- for gid in self._project_points.df["gid"]}
2063
+ slices = {
2064
+ gid: SupplyCurvePoint.get_agg_slices(gid=gid, **scp_kwargs)
2065
+ for gid in self._project_points.df[ResourceMetaField.GID]
2066
+ }
1877
2067
 
1878
- sc_gid_to_res_gid = {gid: sorted(set(tm[slx, sly].flatten()))
1879
- for gid, (slx, sly) in slices.items()}
2068
+ sc_gid_to_res_gid = {
2069
+ gid: sorted(set(tm[slx, sly].flatten()))
2070
+ for gid, (slx, sly) in slices.items()
2071
+ }
1880
2072
 
1881
2073
  for sc_gid, res_gids in sc_gid_to_res_gid.items():
1882
2074
  if res_gids[0] < 0:
@@ -1884,13 +2076,14 @@ class BespokeWindPlants(BaseAggregation):
1884
2076
 
1885
2077
  if self._gid_map is not None:
1886
2078
  for sc_gid, res_gids in sc_gid_to_res_gid.items():
1887
- sc_gid_to_res_gid[sc_gid] = sorted(self._gid_map[g]
1888
- for g in res_gids)
2079
+ sc_gid_to_res_gid[sc_gid] = sorted(
2080
+ self._gid_map[g] for g in res_gids
2081
+ )
1889
2082
 
1890
2083
  logger.info("Pre-loading resource data for Bespoke run... ")
1891
- self._pre_loaded_data = BespokeMultiPlantData(self._res_fpath,
1892
- sc_gid_to_hh,
1893
- sc_gid_to_res_gid)
2084
+ self._pre_loaded_data = BespokeMultiPlantData(
2085
+ self._res_fpath, sc_gid_to_hh, sc_gid_to_res_gid
2086
+ )
1894
2087
 
1895
2088
  def _hh_for_sc_gid(self, sc_gid):
1896
2089
  """Fetch the hh for a given sc_gid"""
@@ -1898,7 +2091,7 @@ class BespokeWindPlants(BaseAggregation):
1898
2091
  return int(config["wind_turbine_hub_ht"])
1899
2092
 
1900
2093
  def _pre_loaded_data_for_sc_gid(self, sc_gid):
1901
- """Pre-load data for a given SC GID, if requested. """
2094
+ """Pre-load data for a given SC GID, if requested."""
1902
2095
  if self._pre_loaded_data is None:
1903
2096
  return None
1904
2097
 
@@ -1926,9 +2119,12 @@ class BespokeWindPlants(BaseAggregation):
1926
2119
  if self._bias_correct is not None:
1927
2120
  h5_gids = []
1928
2121
  try:
1929
- scp_kwargs = dict(gid=gid, excl=self._excl_fpath,
1930
- tm_dset=self._tm_dset,
1931
- resolution=self._resolution)
2122
+ scp_kwargs = dict(
2123
+ gid=gid,
2124
+ excl=self._excl_fpath,
2125
+ tm_dset=self._tm_dset,
2126
+ resolution=self._resolution,
2127
+ )
1932
2128
  with SupplyCurvePoint(**scp_kwargs) as scp:
1933
2129
  h5_gids = scp.h5_gid_set
1934
2130
  except EmptySupplyCurvePointError:
@@ -1972,7 +2168,7 @@ class BespokeWindPlants(BaseAggregation):
1972
2168
  -------
1973
2169
  pd.DataFrame
1974
2170
  """
1975
- meta = [self.outputs[g]['meta'] for g in self.completed_gids]
2171
+ meta = [self.outputs[g]["meta"] for g in self.completed_gids]
1976
2172
  if len(self.completed_gids) > 1:
1977
2173
  meta = pd.concat(meta, axis=0)
1978
2174
  else:
@@ -1981,10 +2177,11 @@ class BespokeWindPlants(BaseAggregation):
1981
2177
 
1982
2178
  @property
1983
2179
  def slice_lookup(self):
1984
- """dict | None: Lookup mapping sc_point_gid to exclusion slice. """
2180
+ """Dict | None: Lookup mapping sc_point_gid to exclusion slice."""
1985
2181
  if self._slice_lookup is None and self._inclusion_mask is not None:
1986
- with SupplyCurveExtent(self._excl_fpath,
1987
- resolution=self._resolution) as sc:
2182
+ with SupplyCurveExtent(
2183
+ self._excl_fpath, resolution=self._resolution
2184
+ ) as sc:
1988
2185
  assert self.shape == self._inclusion_mask.shape
1989
2186
  self._slice_lookup = sc.get_slice_lookup(self.gids)
1990
2187
 
@@ -2013,8 +2210,13 @@ class BespokeWindPlants(BaseAggregation):
2013
2210
  site_data = self._project_points.df.iloc[gid_idx]
2014
2211
 
2015
2212
  site_sys_inputs = self._project_points[gid][1]
2016
- site_sys_inputs.update({k: v for k, v in site_data.to_dict().items()
2017
- if not (isinstance(v, float) and np.isnan(v))})
2213
+ site_sys_inputs.update(
2214
+ {
2215
+ k: v
2216
+ for k, v in site_data.to_dict().items()
2217
+ if not (isinstance(v, float) and np.isnan(v))
2218
+ }
2219
+ )
2018
2220
  return site_sys_inputs
2019
2221
 
2020
2222
  def _init_fout(self, out_fpath, sample):
@@ -2033,13 +2235,14 @@ class BespokeWindPlants(BaseAggregation):
2033
2235
  if not os.path.exists(out_dir):
2034
2236
  create_dirs(out_dir)
2035
2237
 
2036
- with Outputs(out_fpath, mode='w') as f:
2037
- f._set_meta('meta', self.meta, attrs={})
2038
- ti_dsets = [d for d in sample.keys()
2039
- if d.startswith('time_index-')]
2238
+ with Outputs(out_fpath, mode="w") as f:
2239
+ f._set_meta("meta", self.meta, attrs={})
2240
+ ti_dsets = [
2241
+ d for d in sample.keys() if d.startswith("time_index-")
2242
+ ]
2040
2243
  for dset in ti_dsets:
2041
2244
  f._set_time_index(dset, sample[dset], attrs={})
2042
- f._set_time_index('time_index', sample[dset], attrs={})
2245
+ f._set_time_index("time_index", sample[dset], attrs={})
2043
2246
 
2044
2247
  def _collect_out_arr(self, dset, sample):
2045
2248
  """Collect single-plant data arrays into complete arrays with data from
@@ -2070,8 +2273,9 @@ class BespokeWindPlants(BaseAggregation):
2070
2273
  shape = (len(single_arr), len(self.completed_gids))
2071
2274
  sample_num = single_arr[0]
2072
2275
  else:
2073
- msg = ('Not writing dataset "{}" of type "{}" to disk.'
2074
- .format(dset, type(single_arr)))
2276
+ msg = 'Not writing dataset "{}" of type "{}" to disk.'.format(
2277
+ dset, type(single_arr)
2278
+ )
2075
2279
  logger.info(msg)
2076
2280
  return None
2077
2281
 
@@ -2082,8 +2286,9 @@ class BespokeWindPlants(BaseAggregation):
2082
2286
  full_arr = np.zeros(shape, dtype=dtype)
2083
2287
 
2084
2288
  # collect data from all wind plants
2085
- logger.info('Collecting dataset "{}" with final shape {}'
2086
- .format(dset, shape))
2289
+ logger.info(
2290
+ 'Collecting dataset "{}" with final shape {}'.format(dset, shape)
2291
+ )
2087
2292
  for i, gid in enumerate(self.completed_gids):
2088
2293
  if len(full_arr.shape) == 1:
2089
2294
  full_arr[i] = self.outputs[gid][dset]
@@ -2107,16 +2312,18 @@ class BespokeWindPlants(BaseAggregation):
2107
2312
  Full filepath to desired .h5 output file, the .h5 extension has
2108
2313
  been added if it was not already present.
2109
2314
  """
2110
- if not out_fpath.endswith('.h5'):
2111
- out_fpath += '.h5'
2315
+ if not out_fpath.endswith(".h5"):
2316
+ out_fpath += ".h5"
2112
2317
 
2113
2318
  if ModuleName.BESPOKE not in out_fpath:
2114
2319
  extension_with_module = "_{}.h5".format(ModuleName.BESPOKE)
2115
2320
  out_fpath = out_fpath.replace(".h5", extension_with_module)
2116
2321
 
2117
2322
  if not self.completed_gids:
2118
- msg = ("No output data found! It is likely that all requested "
2119
- "points are excluded.")
2323
+ msg = (
2324
+ "No output data found! It is likely that all requested "
2325
+ "points are excluded."
2326
+ )
2120
2327
  logger.warning(msg)
2121
2328
  warn(msg)
2122
2329
  return out_fpath
@@ -2124,49 +2331,69 @@ class BespokeWindPlants(BaseAggregation):
2124
2331
  sample = self.outputs[self.completed_gids[0]]
2125
2332
  self._init_fout(out_fpath, sample)
2126
2333
 
2127
- dsets = [d for d in sample.keys()
2128
- if not d.startswith('time_index-')
2129
- and d != 'meta']
2130
- with Outputs(out_fpath, mode='a') as f:
2334
+ dsets = [
2335
+ d
2336
+ for d in sample.keys()
2337
+ if not d.startswith("time_index-") and d != "meta"
2338
+ ]
2339
+ with Outputs(out_fpath, mode="a") as f:
2131
2340
  for dset in dsets:
2132
2341
  full_arr = self._collect_out_arr(dset, sample)
2133
2342
  if full_arr is not None:
2134
2343
  dset_no_year = dset
2135
- if parse_year(dset, option='boolean'):
2344
+ if parse_year(dset, option="boolean"):
2136
2345
  year = parse_year(dset)
2137
- dset_no_year = dset.replace('-{}'.format(year), '')
2346
+ dset_no_year = dset.replace("-{}".format(year), "")
2138
2347
 
2139
2348
  attrs = BespokeSinglePlant.OUT_ATTRS.get(dset_no_year, {})
2140
2349
  attrs = copy.deepcopy(attrs)
2141
- dtype = attrs.pop('dtype', np.float32)
2142
- chunks = attrs.pop('chunks', None)
2350
+ dtype = attrs.pop("dtype", np.float32)
2351
+ chunks = attrs.pop("chunks", None)
2143
2352
  try:
2144
- f.write_dataset(dset, full_arr, dtype, chunks=chunks,
2145
- attrs=attrs)
2353
+ f.write_dataset(
2354
+ dset, full_arr, dtype, chunks=chunks, attrs=attrs
2355
+ )
2146
2356
  except Exception as e:
2147
2357
  msg = 'Failed to write "{}" to disk.'.format(dset)
2148
2358
  logger.exception(msg)
2149
- raise IOError(msg) from e
2359
+ raise OSError(msg) from e
2150
2360
 
2151
- logger.info('Saved output data to: {}'.format(out_fpath))
2361
+ logger.info("Saved output data to: {}".format(out_fpath))
2152
2362
  return out_fpath
2153
2363
 
2154
2364
  # pylint: disable=arguments-renamed
2155
2365
  @classmethod
2156
- def run_serial(cls, excl_fpath, res_fpath, tm_dset,
2157
- sam_sys_inputs, objective_function,
2158
- capital_cost_function,
2159
- fixed_operating_cost_function,
2160
- variable_operating_cost_function,
2161
- min_spacing='5x', wake_loss_multiplier=1, ga_kwargs=None,
2162
- output_request=('system_capacity', 'cf_mean'),
2163
- ws_bins=(0.0, 20.0, 5.0), wd_bins=(0.0, 360.0, 45.0),
2164
- excl_dict=None, inclusion_mask=None,
2165
- area_filter_kernel='queen', min_area=None,
2166
- resolution=64, excl_area=0.0081, data_layers=None,
2167
- gids=None, exclusion_shape=None, slice_lookup=None,
2168
- prior_meta=None, gid_map=None, bias_correct=None,
2169
- pre_loaded_data=None):
2366
+ def run_serial(
2367
+ cls,
2368
+ excl_fpath,
2369
+ res_fpath,
2370
+ tm_dset,
2371
+ sam_sys_inputs,
2372
+ objective_function,
2373
+ capital_cost_function,
2374
+ fixed_operating_cost_function,
2375
+ variable_operating_cost_function,
2376
+ min_spacing="5x",
2377
+ wake_loss_multiplier=1,
2378
+ ga_kwargs=None,
2379
+ output_request=("system_capacity", "cf_mean"),
2380
+ ws_bins=(0.0, 20.0, 5.0),
2381
+ wd_bins=(0.0, 360.0, 45.0),
2382
+ excl_dict=None,
2383
+ inclusion_mask=None,
2384
+ area_filter_kernel="queen",
2385
+ min_area=None,
2386
+ resolution=64,
2387
+ excl_area=0.0081,
2388
+ data_layers=None,
2389
+ gids=None,
2390
+ exclusion_shape=None,
2391
+ slice_lookup=None,
2392
+ prior_meta=None,
2393
+ gid_map=None,
2394
+ bias_correct=None,
2395
+ pre_loaded_data=None,
2396
+ ):
2170
2397
  """
2171
2398
  Standalone serial method to run bespoke optimization.
2172
2399
  See BespokeWindPlants docstring for parameter description.
@@ -2195,18 +2422,19 @@ class BespokeWindPlants(BaseAggregation):
2195
2422
  Handler = BespokeSinglePlant.get_wind_handler(res_fpath)
2196
2423
 
2197
2424
  # pre-extract handlers so they are not repeatedly initialized
2198
- file_kwargs = {'excl_dict': excl_dict,
2199
- 'area_filter_kernel': area_filter_kernel,
2200
- 'min_area': min_area,
2201
- 'h5_handler': Handler,
2202
- }
2425
+ file_kwargs = {
2426
+ "excl_dict": excl_dict,
2427
+ "area_filter_kernel": area_filter_kernel,
2428
+ "min_area": min_area,
2429
+ "h5_handler": Handler,
2430
+ }
2203
2431
 
2204
2432
  with AggFileHandler(excl_fpath, res_fpath, **file_kwargs) as fh:
2205
2433
  n_finished = 0
2206
2434
  for gid in gids:
2207
2435
  gid_inclusions = cls._get_gid_inclusion_mask(
2208
- inclusion_mask, gid, slice_lookup,
2209
- resolution=resolution)
2436
+ inclusion_mask, gid, slice_lookup, resolution=resolution
2437
+ )
2210
2438
  try:
2211
2439
  bsp_plant_out = BespokeSinglePlant.run(
2212
2440
  gid,
@@ -2234,20 +2462,26 @@ class BespokeWindPlants(BaseAggregation):
2234
2462
  gid_map=gid_map,
2235
2463
  bias_correct=bias_correct,
2236
2464
  pre_loaded_data=pre_loaded_data,
2237
- close=False)
2465
+ close=False,
2466
+ )
2238
2467
 
2239
2468
  except EmptySupplyCurvePointError:
2240
- logger.debug('SC gid {} is fully excluded or does not '
2241
- 'have any valid source data!'.format(gid))
2469
+ logger.debug(
2470
+ "SC gid {} is fully excluded or does not "
2471
+ "have any valid source data!".format(gid)
2472
+ )
2242
2473
  except Exception as e:
2243
- msg = 'SC gid {} failed!'.format(gid)
2474
+ msg = "SC gid {} failed!".format(gid)
2244
2475
  logger.exception(msg)
2245
2476
  raise RuntimeError(msg) from e
2246
2477
  else:
2247
2478
  n_finished += 1
2248
- logger.debug('Serial bespoke: '
2249
- '{} out of {} points complete'
2250
- .format(n_finished, len(gids)))
2479
+ logger.debug(
2480
+ "Serial bespoke: "
2481
+ "{} out of {} points complete".format(
2482
+ n_finished, len(gids)
2483
+ )
2484
+ )
2251
2485
  log_mem(logger)
2252
2486
  out[gid] = bsp_plant_out
2253
2487
 
@@ -2269,17 +2503,18 @@ class BespokeWindPlants(BaseAggregation):
2269
2503
  Bespoke outputs keyed by sc point gid
2270
2504
  """
2271
2505
 
2272
- logger.info('Running bespoke optimization for points {} through {} '
2273
- 'at a resolution of {} on {} cores.'
2274
- .format(self.gids[0], self.gids[-1], self._resolution,
2275
- max_workers))
2506
+ logger.info(
2507
+ "Running bespoke optimization for points {} through {} "
2508
+ "at a resolution of {} on {} cores.".format(
2509
+ self.gids[0], self.gids[-1], self._resolution, max_workers
2510
+ )
2511
+ )
2276
2512
 
2277
2513
  futures = []
2278
2514
  out = {}
2279
2515
  n_finished = 0
2280
- loggers = [__name__, 'reV.supply_curve.point_summary', 'reV']
2516
+ loggers = [__name__, "reV.supply_curve.point_summary", "reV"]
2281
2517
  with SpawnProcessPool(max_workers=max_workers, loggers=loggers) as exe:
2282
-
2283
2518
  # iterate through split executions, submitting each to worker
2284
2519
  for gid in self.gids:
2285
2520
  # submit executions and append to futures list
@@ -2288,36 +2523,39 @@ class BespokeWindPlants(BaseAggregation):
2288
2523
  rs, cs = self.slice_lookup[gid]
2289
2524
  gid_incl_mask = self._inclusion_mask[rs, cs]
2290
2525
 
2291
- futures.append(exe.submit(
2292
- self.run_serial,
2293
- self._excl_fpath,
2294
- self._res_fpath,
2295
- self._tm_dset,
2296
- self.sam_sys_inputs_with_site_data(gid),
2297
- self._obj_fun,
2298
- self._cap_cost_fun,
2299
- self._foc_fun,
2300
- self._voc_fun,
2301
- self._min_spacing,
2302
- wake_loss_multiplier=self._wake_loss_multiplier,
2303
- ga_kwargs=self._ga_kwargs,
2304
- output_request=self._output_request,
2305
- ws_bins=self._ws_bins,
2306
- wd_bins=self._wd_bins,
2307
- excl_dict=self._excl_dict,
2308
- inclusion_mask=gid_incl_mask,
2309
- area_filter_kernel=self._area_filter_kernel,
2310
- min_area=self._min_area,
2311
- resolution=self._resolution,
2312
- excl_area=self._excl_area,
2313
- data_layers=self._data_layers,
2314
- gids=gid,
2315
- exclusion_shape=self.shape,
2316
- slice_lookup=copy.deepcopy(self.slice_lookup),
2317
- prior_meta=self._get_prior_meta(gid),
2318
- gid_map=self._gid_map,
2319
- bias_correct=self._get_bc_for_gid(gid),
2320
- pre_loaded_data=self._pre_loaded_data_for_sc_gid(gid)))
2526
+ futures.append(
2527
+ exe.submit(
2528
+ self.run_serial,
2529
+ self._excl_fpath,
2530
+ self._res_fpath,
2531
+ self._tm_dset,
2532
+ self.sam_sys_inputs_with_site_data(gid),
2533
+ self._obj_fun,
2534
+ self._cap_cost_fun,
2535
+ self._foc_fun,
2536
+ self._voc_fun,
2537
+ self._min_spacing,
2538
+ wake_loss_multiplier=self._wake_loss_multiplier,
2539
+ ga_kwargs=self._ga_kwargs,
2540
+ output_request=self._output_request,
2541
+ ws_bins=self._ws_bins,
2542
+ wd_bins=self._wd_bins,
2543
+ excl_dict=self._excl_dict,
2544
+ inclusion_mask=gid_incl_mask,
2545
+ area_filter_kernel=self._area_filter_kernel,
2546
+ min_area=self._min_area,
2547
+ resolution=self._resolution,
2548
+ excl_area=self._excl_area,
2549
+ data_layers=self._data_layers,
2550
+ gids=gid,
2551
+ exclusion_shape=self.shape,
2552
+ slice_lookup=copy.deepcopy(self.slice_lookup),
2553
+ prior_meta=self._get_prior_meta(gid),
2554
+ gid_map=self._gid_map,
2555
+ bias_correct=self._get_bc_for_gid(gid),
2556
+ pre_loaded_data=self._pre_loaded_data_for_sc_gid(gid),
2557
+ )
2558
+ )
2321
2559
 
2322
2560
  # gather results
2323
2561
  for future in as_completed(futures):
@@ -2325,12 +2563,17 @@ class BespokeWindPlants(BaseAggregation):
2325
2563
  out.update(future.result())
2326
2564
  if n_finished % 10 == 0:
2327
2565
  mem = psutil.virtual_memory()
2328
- logger.info('Parallel bespoke futures collected: '
2329
- '{} out of {}. Memory usage is {:.3f} GB out '
2330
- 'of {:.3f} GB ({:.2f}% utilized).'
2331
- .format(n_finished, len(futures),
2332
- mem.used / 1e9, mem.total / 1e9,
2333
- 100 * mem.used / mem.total))
2566
+ logger.info(
2567
+ "Parallel bespoke futures collected: "
2568
+ "{} out of {}. Memory usage is {:.3f} GB out "
2569
+ "of {:.3f} GB ({:.2f}% utilized).".format(
2570
+ n_finished,
2571
+ len(futures),
2572
+ mem.used / 1e9,
2573
+ mem.total / 1e9,
2574
+ 100 * mem.used / mem.total,
2575
+ )
2576
+ )
2334
2577
 
2335
2578
  return out
2336
2579
 
@@ -2356,7 +2599,7 @@ class BespokeWindPlants(BaseAggregation):
2356
2599
  """
2357
2600
 
2358
2601
  # parallel job distribution test.
2359
- if self._obj_fun == 'test':
2602
+ if self._obj_fun == "test":
2360
2603
  return True
2361
2604
 
2362
2605
  if max_workers == 1:
@@ -2374,33 +2617,35 @@ class BespokeWindPlants(BaseAggregation):
2374
2617
  wlm = self._wake_loss_multiplier
2375
2618
  i_bc = self._get_bc_for_gid(gid)
2376
2619
 
2377
- si = self.run_serial(self._excl_fpath,
2378
- self._res_fpath,
2379
- self._tm_dset,
2380
- sam_inputs,
2381
- self._obj_fun,
2382
- self._cap_cost_fun,
2383
- self._foc_fun,
2384
- self._voc_fun,
2385
- min_spacing=self._min_spacing,
2386
- wake_loss_multiplier=wlm,
2387
- ga_kwargs=self._ga_kwargs,
2388
- output_request=self._output_request,
2389
- ws_bins=self._ws_bins,
2390
- wd_bins=self._wd_bins,
2391
- excl_dict=self._excl_dict,
2392
- inclusion_mask=gid_incl_mask,
2393
- area_filter_kernel=afk,
2394
- min_area=self._min_area,
2395
- resolution=self._resolution,
2396
- excl_area=self._excl_area,
2397
- data_layers=self._data_layers,
2398
- slice_lookup=slice_lookup,
2399
- prior_meta=prior_meta,
2400
- gid_map=self._gid_map,
2401
- bias_correct=i_bc,
2402
- gids=gid,
2403
- pre_loaded_data=pre_loaded_data)
2620
+ si = self.run_serial(
2621
+ self._excl_fpath,
2622
+ self._res_fpath,
2623
+ self._tm_dset,
2624
+ sam_inputs,
2625
+ self._obj_fun,
2626
+ self._cap_cost_fun,
2627
+ self._foc_fun,
2628
+ self._voc_fun,
2629
+ min_spacing=self._min_spacing,
2630
+ wake_loss_multiplier=wlm,
2631
+ ga_kwargs=self._ga_kwargs,
2632
+ output_request=self._output_request,
2633
+ ws_bins=self._ws_bins,
2634
+ wd_bins=self._wd_bins,
2635
+ excl_dict=self._excl_dict,
2636
+ inclusion_mask=gid_incl_mask,
2637
+ area_filter_kernel=afk,
2638
+ min_area=self._min_area,
2639
+ resolution=self._resolution,
2640
+ excl_area=self._excl_area,
2641
+ data_layers=self._data_layers,
2642
+ slice_lookup=slice_lookup,
2643
+ prior_meta=prior_meta,
2644
+ gid_map=self._gid_map,
2645
+ bias_correct=i_bc,
2646
+ gids=gid,
2647
+ pre_loaded_data=pre_loaded_data,
2648
+ )
2404
2649
  self._outputs.update(si)
2405
2650
  else:
2406
2651
  self._outputs = self.run_parallel(max_workers=max_workers)