NREL-reV 0.8.7__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. {NREL_reV-0.8.7.dist-info → NREL_reV-0.9.0.dist-info}/METADATA +13 -10
  2. {NREL_reV-0.8.7.dist-info → NREL_reV-0.9.0.dist-info}/RECORD +43 -43
  3. {NREL_reV-0.8.7.dist-info → NREL_reV-0.9.0.dist-info}/WHEEL +1 -1
  4. reV/SAM/SAM.py +217 -133
  5. reV/SAM/econ.py +18 -14
  6. reV/SAM/generation.py +611 -422
  7. reV/SAM/windbos.py +93 -79
  8. reV/bespoke/bespoke.py +681 -377
  9. reV/bespoke/cli_bespoke.py +2 -0
  10. reV/bespoke/place_turbines.py +187 -43
  11. reV/config/output_request.py +2 -1
  12. reV/config/project_points.py +218 -140
  13. reV/econ/econ.py +166 -114
  14. reV/econ/economies_of_scale.py +91 -45
  15. reV/generation/base.py +331 -184
  16. reV/generation/generation.py +326 -200
  17. reV/generation/output_attributes/lcoe_fcr_inputs.json +38 -3
  18. reV/handlers/__init__.py +0 -1
  19. reV/handlers/exclusions.py +16 -15
  20. reV/handlers/multi_year.py +57 -26
  21. reV/handlers/outputs.py +6 -5
  22. reV/handlers/transmission.py +44 -27
  23. reV/hybrids/hybrid_methods.py +30 -30
  24. reV/hybrids/hybrids.py +305 -189
  25. reV/nrwal/nrwal.py +262 -168
  26. reV/qa_qc/cli_qa_qc.py +14 -10
  27. reV/qa_qc/qa_qc.py +217 -119
  28. reV/qa_qc/summary.py +228 -146
  29. reV/rep_profiles/rep_profiles.py +349 -230
  30. reV/supply_curve/aggregation.py +349 -188
  31. reV/supply_curve/competitive_wind_farms.py +90 -48
  32. reV/supply_curve/exclusions.py +138 -85
  33. reV/supply_curve/extent.py +75 -50
  34. reV/supply_curve/points.py +735 -390
  35. reV/supply_curve/sc_aggregation.py +357 -248
  36. reV/supply_curve/supply_curve.py +604 -347
  37. reV/supply_curve/tech_mapping.py +144 -82
  38. reV/utilities/__init__.py +274 -16
  39. reV/utilities/pytest_utils.py +8 -4
  40. reV/version.py +1 -1
  41. {NREL_reV-0.8.7.dist-info → NREL_reV-0.9.0.dist-info}/LICENSE +0 -0
  42. {NREL_reV-0.8.7.dist-info → NREL_reV-0.9.0.dist-info}/entry_points.txt +0 -0
  43. {NREL_reV-0.8.7.dist-info → NREL_reV-0.9.0.dist-info}/top_level.txt +0 -0
@@ -4,7 +4,7 @@
4
4
  "dtype": "float32",
5
5
  "scale_factor": 1,
6
6
  "type": "scalar",
7
- "units": "dollars"
7
+ "units": "usd"
8
8
  },
9
9
  "fixed_charge_rate": {
10
10
  "chunks": null,
@@ -18,13 +18,48 @@
18
18
  "dtype": "float32",
19
19
  "scale_factor": 1,
20
20
  "type": "scalar",
21
- "units": "dollars"
21
+ "units": "usd"
22
22
  },
23
23
  "variable_operating_cost": {
24
24
  "chunks": null,
25
25
  "dtype": "float32",
26
26
  "scale_factor": 1,
27
27
  "type": "scalar",
28
- "units": "dol/kWh"
28
+ "units": "usd/kWh"
29
+ },
30
+ "base_capital_cost": {
31
+ "chunks": null,
32
+ "dtype": "float32",
33
+ "scale_factor": 1,
34
+ "type": "scalar",
35
+ "units": "usd"
36
+ },
37
+ "base_fixed_operating_cost": {
38
+ "chunks": null,
39
+ "dtype": "float32",
40
+ "scale_factor": 1,
41
+ "type": "scalar",
42
+ "units": "usd"
43
+ },
44
+ "base_variable_operating_cost": {
45
+ "chunks": null,
46
+ "dtype": "float32",
47
+ "scale_factor": 1,
48
+ "type": "scalar",
49
+ "units": "usd/kWh"
50
+ },
51
+ "capital_cost_multiplier": {
52
+ "chunks": null,
53
+ "dtype": "float32",
54
+ "scale_factor": 1,
55
+ "type": "scalar",
56
+ "units": "unitless"
57
+ },
58
+ "system_capacity": {
59
+ "chunks": null,
60
+ "dtype": "float32",
61
+ "scale_factor": 1,
62
+ "type": "scalar",
63
+ "units": "kW"
29
64
  }
30
65
  }
reV/handlers/__init__.py CHANGED
@@ -3,5 +3,4 @@
3
3
  Sub-package of data handlers
4
4
  """
5
5
  from .exclusions import ExclusionLayers
6
- from .multi_year import MultiYear
7
6
  from .outputs import Outputs
@@ -2,17 +2,18 @@
2
2
  """
3
3
  Exclusion layers handler
4
4
  """
5
- import logging
6
5
  import json
6
+ import logging
7
+
7
8
  import numpy as np
9
+ from rex.multi_file_resource import MultiFileResource
10
+ from rex.resource import Resource
11
+ from rex.utilities.parse_keys import parse_keys
8
12
 
9
13
  from reV.utilities.exceptions import HandlerKeyError, MultiFileExclusionError
10
14
 
11
- from rex.utilities.parse_keys import parse_keys
12
- from rex.resource import Resource
13
- from rex.multi_file_resource import MultiFileResource
14
-
15
15
  logger = logging.getLogger(__name__)
16
+ LATITUDE, LONGITUDE = "latitude", "longitude"
16
17
 
17
18
 
18
19
  class ExclusionLayers:
@@ -81,8 +82,8 @@ class ExclusionLayers:
81
82
 
82
83
  def _preflight_multi_file(self):
83
84
  """Run simple multi-file exclusion checks."""
84
- lat_shape = self.h5.shapes['latitude']
85
- lon_shape = self.h5.shapes['longitude']
85
+ lat_shape = self.h5.shapes[LATITUDE]
86
+ lon_shape = self.h5.shapes[LONGITUDE]
86
87
  for layer in self.layers:
87
88
  lshape = self.h5.shapes[layer]
88
89
  lshape = lshape[1:] if len(lshape) > 2 else lshape
@@ -231,7 +232,7 @@ class ExclusionLayers:
231
232
  """
232
233
  shape = self.h5.attrs.get('shape', None)
233
234
  if shape is None:
234
- shape = self.h5.shapes['latitude']
235
+ shape = self.h5.shapes[LATITUDE]
235
236
 
236
237
  return tuple(shape)
237
238
 
@@ -247,7 +248,7 @@ class ExclusionLayers:
247
248
  """
248
249
  chunks = self.h5.attrs.get('chunks', None)
249
250
  if chunks is None:
250
- chunks = self.h5.chunks['latitude']
251
+ chunks = self.h5.chunks[LATITUDE]
251
252
 
252
253
  return chunks
253
254
 
@@ -260,7 +261,7 @@ class ExclusionLayers:
260
261
  -------
261
262
  ndarray
262
263
  """
263
- return self['latitude']
264
+ return self[LATITUDE]
264
265
 
265
266
  @property
266
267
  def longitude(self):
@@ -271,7 +272,7 @@ class ExclusionLayers:
271
272
  -------
272
273
  ndarray
273
274
  """
274
- return self['longitude']
275
+ return self[LONGITUDE]
275
276
 
276
277
  def get_layer_profile(self, layer):
277
278
  """
@@ -384,13 +385,13 @@ class ExclusionLayers:
384
385
  lat : ndarray
385
386
  Latitude coordinates
386
387
  """
387
- if 'latitude' not in self.h5:
388
+ if LATITUDE not in self.h5:
388
389
  msg = ('"latitude" is missing from {}'
389
390
  .format(self.h5_file))
390
391
  logger.error(msg)
391
392
  raise HandlerKeyError(msg)
392
393
 
393
- ds_slice = ('latitude', ) + ds_slice
394
+ ds_slice = (LATITUDE, ) + ds_slice
394
395
 
395
396
  lat = self.h5[ds_slice]
396
397
 
@@ -410,13 +411,13 @@ class ExclusionLayers:
410
411
  lon : ndarray
411
412
  Longitude coordinates
412
413
  """
413
- if 'longitude' not in self.h5:
414
+ if LONGITUDE not in self.h5:
414
415
  msg = ('"longitude" is missing from {}'
415
416
  .format(self.h5_file))
416
417
  logger.error(msg)
417
418
  raise HandlerKeyError(msg)
418
419
 
419
- ds_slice = ('longitude', ) + ds_slice
420
+ ds_slice = (LONGITUDE, ) + ds_slice
420
421
 
421
422
  lon = self.h5[ds_slice]
422
423
 
@@ -3,22 +3,26 @@
3
3
  Classes to collect reV outputs from multiple annual files.
4
4
  """
5
5
  import glob
6
- import time
7
6
  import logging
8
- import numpy as np
9
7
  import os
10
- import pandas as pd
8
+ import time
11
9
  from warnings import warn
12
10
 
13
- from rex import Resource
14
- from rex.utilities.utilities import (get_class_properties, parse_year,
15
- get_lat_lon_cols)
11
+ import numpy as np
12
+ import pandas as pd
16
13
  from gaps.pipeline import parse_previous_status
14
+ from rex import Resource
15
+ from rex.utilities.utilities import (
16
+ get_class_properties,
17
+ get_lat_lon_cols,
18
+ parse_year,
19
+ )
17
20
 
18
- from reV.handlers.outputs import Outputs
21
+ from reV.generation.base import LCOE_REQUIRED_OUTPUTS
19
22
  from reV.config.output_request import SAMOutputRequest
20
- from reV.utilities.exceptions import HandlerRuntimeError, ConfigError
21
- from reV.utilities import log_versions, ModuleName
23
+ from reV.handlers.outputs import Outputs
24
+ from reV.utilities import ModuleName, log_versions
25
+ from reV.utilities.exceptions import ConfigError, HandlerRuntimeError
22
26
 
23
27
  logger = logging.getLogger(__name__)
24
28
 
@@ -57,8 +61,12 @@ class MultiYearGroup:
57
61
  source files. This takes priority over `source_dir` and
58
62
  `source_prefix` but is not used if `source_files` are
59
63
  specified explicitly. By default, ``None``.
60
- dsets : list | tuple, optional
61
- List of datasets to collect. By default, ``('cf_mean',)``.
64
+ dsets : str | list | tuple, optional
65
+ List of datasets to collect. This can be set to
66
+ ``"PIPELINE"`` if running from the command line as part of a
67
+ reV pipeline. In this case, all the datasets from the
68
+ previous pipeline step will be collected.
69
+ By default, ``('cf_mean',)``.
62
70
  pass_through_dsets : list | tuple, optional
63
71
  Optional list of datasets that are identical in the
64
72
  multi-year files (e.g. input datasets that don't vary from
@@ -73,10 +81,35 @@ class MultiYearGroup:
73
81
  self._source_prefix = source_prefix
74
82
  self._source_pattern = source_pattern
75
83
  self._pass_through_dsets = None
76
- if pass_through_dsets is not None:
77
- self._pass_through_dsets = SAMOutputRequest(pass_through_dsets)
84
+ self._dsets = None
85
+
86
+ self._parse_pass_through_dsets(dsets, pass_through_dsets or [])
87
+ self._parse_dsets(dsets)
78
88
 
79
- self._dsets = self._parse_dsets(dsets)
89
+ def _parse_pass_through_dsets(self, dsets, pass_through_dsets):
90
+ """Parse a multi-year pass-through dataset collection request.
91
+
92
+ Parameters
93
+ ----------
94
+ dsets : str | list
95
+ One or more datasets to collect, or "PIPELINE"
96
+ pass_through_dsets : list
97
+ List of pass through datasets.
98
+ """
99
+ if isinstance(dsets, str) and dsets == 'PIPELINE':
100
+ files = parse_previous_status(self._dirout, ModuleName.MULTI_YEAR)
101
+ with Resource(files[0]) as res:
102
+ dsets = res.datasets
103
+
104
+ if "lcoe_fcr" in dsets:
105
+ for dset in LCOE_REQUIRED_OUTPUTS:
106
+ if dset not in pass_through_dsets:
107
+ pass_through_dsets.append(dset)
108
+ if "dc_ac_ratio" in dsets:
109
+ if "dc_ac_ratio" not in pass_through_dsets:
110
+ pass_through_dsets.append("dc_ac_ratio")
111
+
112
+ self._pass_through_dsets = SAMOutputRequest(pass_through_dsets)
80
113
 
81
114
  def _parse_dsets(self, dsets):
82
115
  """Parse a multi-year dataset collection request. Can handle PIPELINE
@@ -87,11 +120,6 @@ class MultiYearGroup:
87
120
  ----------
88
121
  dsets : str | list
89
122
  One or more datasets to collect, or "PIPELINE"
90
-
91
- Returns
92
- -------
93
- dsets : SAMOutputRequest
94
- Dataset list object.
95
123
  """
96
124
  if isinstance(dsets, str) and dsets == 'PIPELINE':
97
125
  files = parse_previous_status(self._dirout, ModuleName.MULTI_YEAR)
@@ -101,9 +129,7 @@ class MultiYearGroup:
101
129
  and d != 'meta'
102
130
  and d not in self.pass_through_dsets]
103
131
 
104
- dsets = SAMOutputRequest(dsets)
105
-
106
- return dsets
132
+ self._dsets = SAMOutputRequest(dsets)
107
133
 
108
134
  @property
109
135
  def name(self):
@@ -812,10 +838,15 @@ def my_collect_groups(out_fpath, groups, clobber=True):
812
838
  MultiYear.collect_means(out_fpath, group['source_files'],
813
839
  dset, group=group['group'])
814
840
 
815
- if group.get('pass_through_dsets', None) is not None:
816
- for dset in group['pass_through_dsets']:
817
- MultiYear.pass_through(out_fpath, group['source_files'],
818
- dset, group=group['group'])
841
+ pass_through_dsets = group.get('pass_through_dsets') or []
842
+ if "lcoe_fcr" in group['dsets']:
843
+ for dset in LCOE_REQUIRED_OUTPUTS:
844
+ if dset not in pass_through_dsets:
845
+ pass_through_dsets.append(dset)
846
+
847
+ for dset in pass_through_dsets:
848
+ MultiYear.pass_through(out_fpath, group['source_files'],
849
+ dset, group=group['group'])
819
850
 
820
851
  runtime = (time.time() - t0) / 60
821
852
  logger.info('- {} collection completed in: {:.2f} min.'
reV/handlers/outputs.py CHANGED
@@ -2,15 +2,16 @@
2
2
  """
3
3
  Classes to handle reV h5 output files.
4
4
  """
5
+ import json
5
6
  import logging
7
+ import sys
8
+
6
9
  import NRWAL
7
10
  import PySAM
8
11
  import rex
9
- import sys
10
- import json
12
+ from rex.outputs import Outputs as rexOutputs
11
13
 
12
14
  from reV.version import __version__
13
- from rex.outputs import Outputs as rexOutputs
14
15
 
15
16
  logger = logging.getLogger(__name__)
16
17
 
@@ -28,8 +29,8 @@ class Outputs(rexOutputs):
28
29
  >>> import pandas as pd
29
30
  >>> import numpy as np
30
31
  >>>
31
- >>> meta = pd.DataFrame({'latitude': np.ones(100),
32
- >>> 'longitude': np.ones(100)})
32
+ >>> meta = pd.DataFrame({SupplyCurveField.LATITUDE: np.ones(100),
33
+ >>> SupplyCurveField.LONGITUDE: np.ones(100)})
33
34
  >>>
34
35
  >>> time_index = pd.date_range('20210101', '20220101', freq='1h',
35
36
  >>> closed='right')
@@ -9,6 +9,7 @@ import os
9
9
  import pandas as pd
10
10
  from warnings import warn
11
11
 
12
+ from reV.utilities import SupplyCurveField
12
13
  from reV.utilities.exceptions import (HandlerWarning, HandlerKeyError,
13
14
  HandlerRuntimeError)
14
15
 
@@ -153,12 +154,17 @@ class TransmissionFeatures:
153
154
  raise
154
155
 
155
156
  trans_table = \
156
- trans_table.rename(columns={'trans_line_gid': 'trans_gid',
157
- 'trans_gids': 'trans_line_gids'})
158
-
159
- if 'dist_mi' in trans_table and 'dist_km' not in trans_table:
160
- trans_table = trans_table.rename(columns={'dist_mi': 'dist_km'})
161
- trans_table['dist_km'] *= 1.60934
157
+ trans_table.rename(
158
+ columns={'trans_line_gid': SupplyCurveField.TRANS_GID,
159
+ 'trans_gids': 'trans_line_gids'})
160
+
161
+ contains_dist_in_miles = "dist_mi" in trans_table
162
+ missing_km_dist = SupplyCurveField.DIST_SPUR_KM not in trans_table
163
+ if contains_dist_in_miles and missing_km_dist:
164
+ trans_table = trans_table.rename(
165
+ columns={"dist_mi": SupplyCurveField.DIST_SPUR_KM}
166
+ )
167
+ trans_table[SupplyCurveField.DIST_SPUR_KM] *= 1.60934
162
168
 
163
169
  return trans_table
164
170
 
@@ -184,23 +190,28 @@ class TransmissionFeatures:
184
190
  features = {}
185
191
 
186
192
  cap_frac = self._avail_cap_frac
187
- trans_features = trans_table.groupby('trans_gid').first()
193
+ trans_features = trans_table.groupby(SupplyCurveField.TRANS_GID)
194
+ trans_features = trans_features.first()
188
195
 
189
196
  for gid, feature in trans_features.iterrows():
190
- name = feature['category'].lower()
197
+ name = feature[SupplyCurveField.TRANS_TYPE].lower()
191
198
  feature_dict = {'type': name}
192
199
 
193
200
  if name == 'transline':
194
- feature_dict['avail_cap'] = feature['ac_cap'] * cap_frac
201
+ feature_dict[SupplyCurveField.TRANS_CAPACITY] = (
202
+ feature['ac_cap'] * cap_frac
203
+ )
195
204
 
196
205
  elif name == 'substation':
197
206
  feature_dict['lines'] = json.loads(feature['trans_line_gids'])
198
207
 
199
208
  elif name == 'loadcen':
200
- feature_dict['avail_cap'] = feature['ac_cap'] * cap_frac
209
+ feature_dict[SupplyCurveField.TRANS_CAPACITY] = (
210
+ feature['ac_cap'] * cap_frac
211
+ )
201
212
 
202
213
  elif name == 'pcaloadcen':
203
- feature_dict['avail_cap'] = None
214
+ feature_dict[SupplyCurveField.TRANS_CAPACITY] = None
204
215
 
205
216
  else:
206
217
  msg = ('Cannot not recognize feature type "{}" '
@@ -297,7 +308,8 @@ class TransmissionFeatures:
297
308
  Substation available capacity
298
309
  """
299
310
  try:
300
- line_caps = [self[l_gid]['avail_cap'] for l_gid in line_gids]
311
+ line_caps = [self[l_gid][SupplyCurveField.TRANS_CAPACITY]
312
+ for l_gid in line_gids]
301
313
  except HandlerKeyError as e:
302
314
  msg = ('Could not find capacities for substation gid {} and '
303
315
  'connected lines: {}'.format(gid, line_gids))
@@ -331,8 +343,8 @@ class TransmissionFeatures:
331
343
 
332
344
  feature = self[gid]
333
345
 
334
- if 'avail_cap' in feature:
335
- avail_cap = feature['avail_cap']
346
+ if SupplyCurveField.TRANS_CAPACITY in feature:
347
+ avail_cap = feature[SupplyCurveField.TRANS_CAPACITY]
336
348
 
337
349
  elif 'lines' in feature:
338
350
  avail_cap = self._substation_capacity(gid, feature['lines'])
@@ -387,7 +399,7 @@ class TransmissionFeatures:
387
399
  capacity : float
388
400
  Capacity needed in MW
389
401
  """
390
- avail_cap = self[gid]['avail_cap']
402
+ avail_cap = self[gid][SupplyCurveField.TRANS_CAPACITY]
391
403
 
392
404
  if avail_cap < capacity:
393
405
  msg = ("Cannot connect to {}: "
@@ -397,7 +409,7 @@ class TransmissionFeatures:
397
409
  logger.error(msg)
398
410
  raise RuntimeError(msg)
399
411
 
400
- self[gid]['avail_cap'] -= capacity
412
+ self[gid][SupplyCurveField.TRANS_CAPACITY] -= capacity
401
413
 
402
414
  def _fill_lines(self, line_gids, line_caps, capacity):
403
415
  """
@@ -471,7 +483,7 @@ class TransmissionFeatures:
471
483
  Substation connection is limited by maximum capacity of the
472
484
  attached lines
473
485
  """
474
- line_caps = np.array([self[gid]['avail_cap']
486
+ line_caps = np.array([self[gid][SupplyCurveField.TRANS_CAPACITY]
475
487
  for gid in line_gids])
476
488
  if self._line_limited:
477
489
  gid = line_gids[np.argmax(line_caps)]
@@ -603,8 +615,8 @@ class TransmissionFeatures:
603
615
  raise
604
616
 
605
617
  feature_cap = pd.Series(feature_cap)
606
- feature_cap.name = 'avail_cap'
607
- feature_cap.index.name = 'trans_gid'
618
+ feature_cap.name = SupplyCurveField.TRANS_CAPACITY
619
+ feature_cap.index.name = SupplyCurveField.TRANS_GID
608
620
  feature_cap = feature_cap.to_frame().reset_index()
609
621
 
610
622
  return feature_cap
@@ -635,16 +647,20 @@ class TransmissionCosts(TransmissionFeatures):
635
647
 
636
648
  features = {}
637
649
 
638
- if 'avail_cap' not in trans_table:
650
+ if SupplyCurveField.TRANS_CAPACITY not in trans_table:
639
651
  kwargs = {'avail_cap_frac': self._avail_cap_frac}
640
652
  fc = TransmissionFeatures.feature_capacity(trans_table,
641
653
  **kwargs)
642
- trans_table = trans_table.merge(fc, on='trans_gid')
654
+ trans_table = trans_table.merge(fc, on=SupplyCurveField.TRANS_GID)
643
655
 
644
- trans_features = trans_table.groupby('trans_gid').first()
656
+ trans_features = trans_table.groupby(SupplyCurveField.TRANS_GID)
657
+ trans_features = trans_features.first()
645
658
  for gid, feature in trans_features.iterrows():
646
- name = feature['category'].lower()
647
- feature_dict = {'type': name, 'avail_cap': feature['avail_cap']}
659
+ name = feature[SupplyCurveField.TRANS_TYPE].lower()
660
+ feature_dict = {'type': name,
661
+ SupplyCurveField.TRANS_CAPACITY: (
662
+ feature[SupplyCurveField.TRANS_CAPACITY]
663
+ )}
648
664
  features[gid] = feature_dict
649
665
 
650
666
  return features
@@ -665,7 +681,7 @@ class TransmissionCosts(TransmissionFeatures):
665
681
  default = 100%
666
682
  """
667
683
 
668
- return self[gid]['avail_cap']
684
+ return self[gid][SupplyCurveField.TRANS_CAPACITY]
669
685
 
670
686
  @classmethod
671
687
  def feature_costs(cls, trans_table, capacity=None, line_tie_in_cost=14000,
@@ -722,8 +738,9 @@ class TransmissionCosts(TransmissionFeatures):
722
738
  costs = []
723
739
  for _, row in trans_table.iterrows():
724
740
  tm = row.get('transmission_multiplier', 1)
725
- costs.append(feature.cost(row['trans_gid'],
726
- row['dist_km'], capacity=capacity,
741
+ costs.append(feature.cost(row[SupplyCurveField.TRANS_GID],
742
+ row[SupplyCurveField.DIST_SPUR_KM],
743
+ capacity=capacity,
727
744
  transmission_multiplier=tm))
728
745
  except Exception:
729
746
  logger.exception("Error computing costs for all connections in {}"
@@ -3,18 +3,12 @@
3
3
 
4
4
  @author: ppinchuk
5
5
  """
6
+ from reV.utilities import SupplyCurveField
6
7
 
7
8
 
8
9
  def aggregate_solar_capacity(h):
9
10
  """Compute the total solar capcity allowed in hybridization.
10
11
 
11
- Note
12
- ----
13
- No limiting is done on the ratio of wind to solar. This method
14
- checks for an existing 'hybrid_solar_capacity'. If one does not exist,
15
- it is assumed that there is no limit on the solar to wind capacity
16
- ratio and the solar capacity is copied into this new column.
17
-
18
12
  Parameters
19
13
  ----------
20
14
  h : `reV.hybrids.Hybridization`
@@ -26,27 +20,25 @@ def aggregate_solar_capacity(h):
26
20
  -------
27
21
  data : Series | None
28
22
  A series of data containing the capacity allowed in the hybrid
29
- capacity sum, or `None` if 'hybrid_solar_capacity' already exists.
23
+ capacity sum, or `None` if 'hybrid_solar_capacity' already
24
+ exists.
30
25
 
31
26
  Notes
32
27
  -----
33
-
28
+ No limiting is done on the ratio of wind to solar. This method
29
+ checks for an existing 'hybrid_solar_capacity'. If one does not
30
+ exist, it is assumed that there is no limit on the solar to wind
31
+ capacity ratio and the solar capacity is copied into this new
32
+ column.
34
33
  """
35
- if 'hybrid_solar_capacity' in h.hybrid_meta:
34
+ if f'hybrid_solar_{SupplyCurveField.CAPACITY_AC_MW}' in h.hybrid_meta:
36
35
  return None
37
- return h.hybrid_meta['solar_capacity']
36
+ return h.hybrid_meta[f'solar_{SupplyCurveField.CAPACITY_AC_MW}']
38
37
 
39
38
 
40
39
  def aggregate_wind_capacity(h):
41
40
  """Compute the total wind capcity allowed in hybridization.
42
41
 
43
- Note
44
- ----
45
- No limiting is done on the ratio of wind to solar. This method
46
- checks for an existing 'hybrid_wind_capacity'. If one does not exist,
47
- it is assumed that there is no limit on the solar to wind capacity
48
- ratio and the wind capacity is copied into this new column.
49
-
50
42
  Parameters
51
43
  ----------
52
44
  h : `reV.hybrids.Hybridization`
@@ -58,15 +50,19 @@ def aggregate_wind_capacity(h):
58
50
  -------
59
51
  data : Series | None
60
52
  A series of data containing the capacity allowed in the hybrid
61
- capacity sum, or `None` if 'hybrid_solar_capacity' already exists.
53
+ capacity sum, or `None` if 'hybrid_solar_capacity' already
54
+ exists.
62
55
 
63
56
  Notes
64
57
  -----
65
-
58
+ No limiting is done on the ratio of wind to solar. This method
59
+ checks for an existing 'hybrid_wind_capacity'. If one does not
60
+ exist, it is assumed that there is no limit on the solar to wind
61
+ capacity ratio and the wind capacity is copied into this new column.
66
62
  """
67
- if 'hybrid_wind_capacity' in h.hybrid_meta:
63
+ if f'hybrid_wind_{SupplyCurveField.CAPACITY_AC_MW}' in h.hybrid_meta:
68
64
  return None
69
- return h.hybrid_meta['wind_capacity']
65
+ return h.hybrid_meta[f'wind_{SupplyCurveField.CAPACITY_AC_MW}']
70
66
 
71
67
 
72
68
  def aggregate_capacity(h):
@@ -85,8 +81,8 @@ def aggregate_capacity(h):
85
81
  A series of data containing the aggregated capacity, or `None`
86
82
  if the capacity columns are missing.
87
83
  """
88
-
89
- sc, wc = 'hybrid_solar_capacity', 'hybrid_wind_capacity'
84
+ sc = f'hybrid_solar_{SupplyCurveField.CAPACITY_AC_MW}'
85
+ wc = f'hybrid_wind_{SupplyCurveField.CAPACITY_AC_MW}'
90
86
  missing_solar_cap = sc not in h.hybrid_meta.columns
91
87
  missing_wind_cap = wc not in h.hybrid_meta.columns
92
88
  if missing_solar_cap or missing_wind_cap:
@@ -113,8 +109,10 @@ def aggregate_capacity_factor(h):
113
109
  if the capacity and/or mean_cf columns are missing.
114
110
  """
115
111
 
116
- sc, wc = 'hybrid_solar_capacity', 'hybrid_wind_capacity'
117
- scf, wcf = 'solar_mean_cf', 'wind_mean_cf'
112
+ sc = f'hybrid_solar_{SupplyCurveField.CAPACITY_AC_MW}'
113
+ wc = f'hybrid_wind_{SupplyCurveField.CAPACITY_AC_MW}'
114
+ scf = f'solar_{SupplyCurveField.MEAN_CF_AC}'
115
+ wcf = f'wind_{SupplyCurveField.MEAN_CF_AC}'
118
116
  missing_solar_cap = sc not in h.hybrid_meta.columns
119
117
  missing_wind_cap = wc not in h.hybrid_meta.columns
120
118
  missing_solar_mean_cf = scf not in h.hybrid_meta.columns
@@ -132,8 +130,10 @@ def aggregate_capacity_factor(h):
132
130
 
133
131
 
134
132
  HYBRID_METHODS = {
135
- 'hybrid_solar_capacity': aggregate_solar_capacity,
136
- 'hybrid_wind_capacity': aggregate_wind_capacity,
137
- 'hybrid_capacity': aggregate_capacity,
138
- 'hybrid_mean_cf': aggregate_capacity_factor
133
+ f'hybrid_solar_{SupplyCurveField.CAPACITY_AC_MW}': (
134
+ aggregate_solar_capacity
135
+ ),
136
+ f'hybrid_wind_{SupplyCurveField.CAPACITY_AC_MW}': aggregate_wind_capacity,
137
+ f'hybrid_{SupplyCurveField.CAPACITY_AC_MW}': aggregate_capacity,
138
+ f'hybrid_{SupplyCurveField.MEAN_CF_AC}': aggregate_capacity_factor
139
139
  }