NREL-reV 0.13.0__py3-none-any.whl → 0.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -31,7 +31,7 @@ class TechMapping:
31
31
  """Framework to create map between tech layer (exclusions), res, and gen"""
32
32
 
33
33
  def __init__(
34
- self, excl_fpath, res_fpath, sc_resolution=2560, dist_margin=1.05
34
+ self, excl_fpath, res_fpath, sc_resolution=1200, dist_margin=1.05
35
35
  ):
36
36
  """
37
37
  Parameters
@@ -42,18 +42,26 @@ class TechMapping:
42
42
  res_fpath : str
43
43
  Filepath to .h5 resource file that we're mapping to.
44
44
  sc_resolution : int | None, optional
45
- Supply curve resolution, does not affect the exclusion to resource
46
- (tech) mapping, but defines how many exclusion pixels are mapped
47
- at a time, by default 2560
45
+ Defines how many exclusion pixels are mapped at a time. Units
46
+ indicate the length of one dimension, in pixels, of each square
47
+ chunk to be mapped. By default, this value is 1200, which will
48
+ map the exclusion pixels in 1200x1200 pixel chunks.
49
+
50
+ .. Note:: This parameter does not affect the exclusion to resource
51
+ (tech) mapping, which deviates from how the effect of the
52
+ ``sc_resolution`` parameter works in other functionality within
53
+ ``reV``.
54
+
48
55
  dist_margin : float, optional
49
56
  Extra margin to multiply times the computed distance between
50
57
  neighboring resource points, by default 1.05
51
58
  """
52
59
  self._excl_fpath = excl_fpath
60
+ self._res_fpath = res_fpath
53
61
  self._check_fout()
54
62
 
55
63
  self._tree, self._dist_thresh = self._build_tree(
56
- res_fpath, dist_margin=dist_margin
64
+ self._res_fpath, dist_margin=dist_margin
57
65
  )
58
66
 
59
67
  with SupplyCurveExtent(
@@ -181,16 +189,17 @@ class TechMapping:
181
189
  return row_slice, col_slice
182
190
 
183
191
  @classmethod
184
- def _get_excl_coords(cls, excl_fpath, gids, sc_row_indices, sc_col_indices,
192
+ def _get_excl_coords(cls, excl_fpath, gid, sc_row_indices, sc_col_indices,
185
193
  excl_row_slices, excl_col_slices,
186
194
  coord_labels=(LATITUDE, LONGITUDE)):
187
195
  """
188
- Extract the exclusion coordinates for the desired gids for TechMapping.
196
+ Extract the exclusion coordinates for the desired supply curve point
197
+ gid for TechMapping.
189
198
 
190
199
  Parameters
191
200
  ----------
192
- gids : np.ndarray
193
- Supply curve gids with tech exclusion points to map to the
201
+ gid : int
202
+ Supply curve gid with tech exclusion points to map to the
194
203
  resource meta points.
195
204
  excl_fpath : str
196
205
  Filepath to exclusions h5 file, must contain latitude and longitude
@@ -213,72 +222,50 @@ class TechMapping:
213
222
 
214
223
  Returns
215
224
  -------
216
- coords_out : list
217
- List of arrays of the un-projected latitude, longitude array of
218
- tech exclusion points. List entries correspond to input gids.
225
+ coords_out : ndarray
226
+ 2D array (Nx2) of the un-projected latitude, longitude of
227
+ tech exclusion pixels within the specified gid point. Rows
228
+ correspond to exclusion pixels of the specified gid, columns
229
+ correspond to latitude and longitude, respectively.
219
230
  """
220
- coords_out = []
221
231
  with h5py.File(excl_fpath, "r") as f:
222
- for gid in gids:
223
- row_slice, col_slice = cls._get_excl_slices(
224
- gid,
225
- sc_row_indices,
226
- sc_col_indices,
227
- excl_row_slices,
228
- excl_col_slices,
232
+ row_slice, col_slice = cls._get_excl_slices(
233
+ gid,
234
+ sc_row_indices,
235
+ sc_col_indices,
236
+ excl_row_slices,
237
+ excl_col_slices,
238
+ )
239
+ try:
240
+ lats = f[coord_labels[0]][row_slice, col_slice]
241
+ lons = f[coord_labels[1]][row_slice, col_slice]
242
+ coords_out = np.vstack((lats.flatten(), lons.flatten())).T
243
+ except Exception as e:
244
+ m = (
245
+ "Could not unpack coordinates for gid {} with "
246
+ "row/col slice {}/{}. Received the following "
247
+ "error:\n{}".format(gid, row_slice, col_slice, e)
229
248
  )
230
- try:
231
- lats = f[coord_labels[0]][row_slice, col_slice]
232
- lons = f[coord_labels[1]][row_slice, col_slice]
233
- emeta = np.vstack((lats.flatten(), lons.flatten())).T
234
- except Exception as e:
235
- m = (
236
- "Could not unpack coordinates for gid {} with "
237
- "row/col slice {}/{}. Received the following "
238
- "error:\n{}".format(gid, row_slice, col_slice, e)
239
- )
240
- logger.error(m)
241
- raise e
242
-
243
- coords_out.append(emeta)
249
+ logger.error(m)
250
+ raise e
244
251
 
245
252
  return coords_out
246
253
 
247
254
  @classmethod
248
255
  def map_resource_gids(
249
256
  cls,
250
- gids,
251
- excl_fpath,
252
- sc_row_indices,
253
- sc_col_indices,
254
- excl_row_slices,
255
- excl_col_slices,
257
+ excl_coords,
256
258
  tree,
257
259
  dist_thresh,
258
260
  ):
259
- """Map exclusion gids to the resource meta.
261
+ """Map exclusion pixels to the resource meta.
260
262
 
261
263
  Parameters
262
264
  ----------
263
- gids : np.ndarray
264
- Supply curve gids with tech exclusion points to map to the
265
- resource meta points.
266
- excl_fpath : str
267
- Filepath to exclusions h5 file, must contain latitude and longitude
268
- arrays to allow for mapping to resource points
269
- sc_row_indices : list
270
- List of row indices in exclusion array for for every sc_point gid
271
- sc_col_indices : list
272
- List of column indices in exclusion array for for every sc_point
273
- gid
274
- excl_row_slices : list
275
- List representing the supply curve points rows. Each list entry
276
- contains the exclusion row slice that are included in the sc
277
- point.
278
- excl_col_slices : list
279
- List representing the supply curve points columns. Each list entry
280
- contains the exclusion columns slice that are included in the sc
281
- point.
265
+ excl_coords : ndarray
266
+ 2D Array of the un-projected latitude, longitude array of
267
+ tech exclusion pixels. Rows correspond to exclusion pixels,
268
+ columns correspond to latitude and longitude, respectively.
282
269
  tree : cKDTree
283
270
  cKDTree built from resource lat, lon coordinates
284
271
  dist_tresh : float
@@ -288,99 +275,56 @@ class TechMapping:
288
275
 
289
276
  Returns
290
277
  -------
291
- ind : list
292
- List of arrays of index values from the NN. List entries correspond
293
- to input gids.
278
+ ind : ndarray
279
+ 1D arrays of index values from the NN. Entries correspond
280
+ to input exclusion pixels.
294
281
  """
295
- logger.debug(
296
- "Getting tech map coordinates for chunks {} through {}".format(
297
- gids[0], gids[-1]
298
- )
299
- )
300
- ind_out = []
301
- coords_out = cls._get_excl_coords(
302
- excl_fpath,
303
- gids,
304
- sc_row_indices,
305
- sc_col_indices,
306
- excl_row_slices,
307
- excl_col_slices,
308
- )
309
-
310
- logger.debug(
311
- "Running tech mapping for chunks {} through {}".format(
312
- gids[0], gids[-1]
313
- )
314
- )
315
- for i, _ in enumerate(gids):
316
- dist, ind = tree.query(coords_out[i])
317
- ind[(dist >= dist_thresh)] = -1
318
- ind_out.append(ind)
282
+ dist, ind = tree.query(excl_coords)
283
+ ind[(dist >= dist_thresh)] = -1
319
284
 
320
- return ind_out
285
+ return ind
321
286
 
322
- @staticmethod
323
- def save_tech_map(
324
- excl_fpath,
325
- dset,
326
- indices,
327
- distance_threshold=None,
328
- res_fpath=None,
329
- chunks=(128, 128),
330
- ):
331
- """Save tech mapping indices and coordinates to an h5 output file.
287
+ def initialize_dataset(self, dset, chunks=(128, 128)):
288
+ """
289
+ Initialize output dataset in exclusions h5 file. If dataset already
290
+ exists, a warning will be issued.
332
291
 
333
292
  Parameters
334
293
  ----------
335
- excl_fpath : str
336
- Filepath to exclusions h5 file to add techmap to as 'dset'
337
294
  dset : str
338
- Dataset name in fpath_out to save mapping results to.
339
- indices : np.ndarray
340
- Index values of the NN resource point. -1 if no res point found.
341
- 2D integer array with shape equal to the exclusions extent shape.
342
- distance_threshold : float
343
- Distance upper bound to save as attr.
344
- res_fpath : str, optional
345
- Filepath to .h5 resource file that we're mapping to,
346
- by default None
347
- chunks : tuple
348
- Chunk shape of the 2D output datasets.
295
+ Name of the dataset in the exclusions H5 file to create.
296
+ chunks : tuple, optional
297
+ Chunk size for the dataset, by default (128, 128).
349
298
  """
350
- logger.info('Writing tech map "{}" to {}'.format(dset, excl_fpath))
351
299
 
352
- shape = indices.shape
353
- chunks = (np.min((shape[0], chunks[0])), np.min((shape[1], chunks[1])))
354
-
355
- with h5py.File(excl_fpath, "a") as f:
300
+ with h5py.File(self._excl_fpath, "a") as f:
356
301
  if dset in list(f):
357
302
  wmsg = (
358
- 'TechMap results dataset "{}" is being replaced '
303
+ 'TechMap results dataset "{}" already exists '
359
304
  'in pre-existing Exclusions TechMapping file "{}"'.format(
360
- dset, excl_fpath
305
+ dset, self._excl_fpath
361
306
  )
362
307
  )
363
308
  logger.warning(wmsg)
364
309
  warn(wmsg, FileInputWarning)
365
- f[dset][...] = indices
366
310
  else:
311
+ logger.info(
312
+ f"Initializing tech map dataset {dset} in "
313
+ f"{self._excl_fpath}"
314
+ )
367
315
  f.create_dataset(
368
316
  dset,
369
- shape=shape,
370
- dtype=indices.dtype,
371
- data=indices,
317
+ shape=self._excl_shape,
318
+ dtype=np.int32,
372
319
  chunks=chunks,
320
+ fillvalue=-1
373
321
  )
374
322
 
375
- if distance_threshold:
376
- f[dset].attrs["distance_threshold"] = distance_threshold
323
+ if self._dist_thresh:
324
+ f[dset].attrs["distance_threshold"] = self._dist_thresh
377
325
 
378
- if res_fpath:
379
- f[dset].attrs["src_res_fpath"] = res_fpath
380
-
381
- logger.info(
382
- 'Successfully saved tech map "{}" to {}'.format(dset, excl_fpath)
383
- )
326
+ if self._res_fpath:
327
+ f[dset].attrs["src_res_fpath"] = self._res_fpath
384
328
 
385
329
  def _check_fout(self):
386
330
  """Check the TechMapping output file for cached data."""
@@ -393,134 +337,150 @@ class TechMapping:
393
337
  logger.exception(emsg)
394
338
  raise FileInputError(emsg)
395
339
 
396
- def map_resource(self, max_workers=None, points_per_worker=10):
340
+ def map_resource(
341
+ self, dset, max_workers=None, batch_size=100
342
+ ):
397
343
  """
398
- Map all resource gids to exclusion gids
344
+ Map all resource gids to exclusion gids. Save results to dset in
345
+ exclusions h5 file.
399
346
 
400
347
  Parameters
401
348
  ----------
349
+ dset : str, optional
350
+ Name of the output dataset in the exclusions H5 file to which the
351
+ tech map will be saved.
402
352
  max_workers : int, optional
403
353
  Number of cores to run mapping on. None uses all available cpus,
404
354
  by default None
405
- points_per_worker : int, optional
406
- Number of supply curve points to map to resource gids on each
407
- worker, by default 10
408
-
409
- Returns
410
- -------
411
- indices : np.ndarray
412
- Index values of the NN resource point. -1 if no res point found.
413
- 2D integer array with shape equal to the exclusions extent shape.
355
+ batch_size : int, optional
356
+ Number of tasks to be submitted to parallel worker pool at one
357
+ time, by default 1000. As a rule of thumb, this number should be
358
+ set to ~10x the number of max_workers. Higher values are not
359
+ necessarily better, and may slow down processing and/or result in
360
+ out-of-memory errors. Values less than the number of workers can
361
+ also lead to slower processing, due to poor load balancing.
414
362
  """
415
- gid_chunks = ceil(len(self._gids) / points_per_worker)
416
- gid_chunks = np.array_split(self._gids, gid_chunks)
363
+ loggers = [__name__, "reV"]
417
364
 
418
- # init full output arrays
419
- indices = -1 * np.ones((self._n_excl,), dtype=np.int32)
420
- iarr = self._make_excl_iarr(self._excl_shape)
365
+ n_jobs = len(self._gids)
366
+ n_batches = ceil(n_jobs / batch_size)
367
+ gid_batches = np.array_split(self._gids, n_batches)
421
368
 
422
- futures = {}
423
- loggers = [__name__, "reV"]
369
+ logger.info(
370
+ f"Kicking off {n_jobs} resource mapping jobs in {n_batches} "
371
+ "batches."
372
+ )
373
+ n_finished = 0
424
374
  with SpawnProcessPool(max_workers=max_workers, loggers=loggers) as exe:
425
- # iterate through split executions, submitting each to worker
426
- for i, gid_set in enumerate(gid_chunks):
427
- # submit executions and append to futures list
428
- futures[
429
- exe.submit(
430
- self.map_resource_gids,
431
- gid_set,
375
+ for gid_batch in gid_batches:
376
+ futures = {}
377
+ # iterate through split executions, submitting each to worker
378
+ for i, gid in enumerate(gid_batch):
379
+ # submit executions and append to futures list
380
+ excl_coords = self._get_excl_coords(
432
381
  self._excl_fpath,
382
+ gid,
433
383
  self._sc_row_indices,
434
384
  self._sc_col_indices,
435
385
  self._excl_row_slices,
436
386
  self._excl_col_slices,
437
- self._tree,
438
- self.distance_threshold,
439
387
  )
440
- ] = i
388
+ futures[
389
+ exe.submit(
390
+ self.map_resource_gids,
391
+ excl_coords,
392
+ self._tree,
393
+ self.distance_threshold,
394
+ )
395
+ ] = i
396
+
397
+ with h5py.File(self._excl_fpath, "a") as f:
398
+ indices = f[dset]
399
+ for future in as_completed(futures):
400
+ i = futures[future]
401
+ result = future.result()
402
+
403
+ gid = gid_batch[i]
404
+ row_slice, col_slice = self._get_excl_slices(
405
+ gid,
406
+ self._sc_row_indices,
407
+ self._sc_col_indices,
408
+ self._excl_row_slices,
409
+ self._excl_col_slices,
410
+ )
411
+ n_rows = row_slice.stop - row_slice.start
412
+ n_cols = col_slice.stop - col_slice.start
413
+ result_shape = (n_rows, n_cols)
414
+ indices[row_slice, col_slice] = result.reshape(
415
+ result_shape
416
+ )
441
417
 
442
- n_finished = 0
443
- for future in as_completed(futures):
444
418
  n_finished += 1
445
419
  logger.info(
446
- "Parallel TechMapping futures collected: "
447
- "{} out of {}".format(n_finished, len(futures))
420
+ "Parallel TechMapping batches completed: "
421
+ f"{n_finished} out of {n_batches}"
448
422
  )
449
423
 
450
- i = futures[future]
451
- result = future.result()
452
-
453
- for j, gid in enumerate(gid_chunks[i]):
454
- row_slice, col_slice = self._get_excl_slices(
455
- gid,
456
- self._sc_row_indices,
457
- self._sc_col_indices,
458
- self._excl_row_slices,
459
- self._excl_col_slices,
460
- )
461
- ind_slice = iarr[row_slice, col_slice].flatten()
462
- indices[ind_slice] = result[j]
463
-
464
- indices = indices.reshape(self._excl_shape)
465
-
466
- return indices
467
-
468
424
  @classmethod
469
425
  def run(
470
426
  cls,
471
427
  excl_fpath,
472
428
  res_fpath,
473
- dset=None,
474
- sc_resolution=2560,
429
+ dset,
430
+ sc_resolution=1200,
475
431
  dist_margin=1.05,
476
432
  max_workers=None,
477
- points_per_worker=10,
433
+ batch_size=1000,
478
434
  ):
479
435
  """Run parallel mapping and save to h5 file.
480
436
 
481
437
  Parameters
482
438
  ----------
483
439
  excl_fpath : str
484
- Filepath to exclusions h5 (tech layer). dset will be
485
- created in excl_fpath.
440
+ Filepath to exclusions data HDF5 file. This file must must contain
441
+ latitude and longitude datasets.
486
442
  res_fpath : str
487
- Filepath to .h5 resource file that we're mapping to.
488
- dset : str, optional
489
- Dataset name in excl_fpath to save mapping results to, if None
490
- do not save tech_map to excl_fpath, by default None
443
+ Filepath to HDF5 resource file (e.g. WTK or NSRDB) to which
444
+ the exclusions will be mapped. Can refer to a single file (e.g.,
445
+ "/path/to/nsrdb_2024.h5" or a wild-card e.g.,
446
+ "/path/to/nsrdb_{}.h5")
447
+ dset : str
448
+ Dataset name in the `excl_fpath` file to which the the
449
+ techmap (exclusions-to-resource mapping data) will be saved.
450
+
451
+ .. Important:: If this dataset already exists in the h5 file,
452
+ it will be overwritten.
453
+
491
454
  sc_resolution : int | None, optional
492
- Supply curve resolution, does not affect the exclusion to resource
493
- (tech) mapping, but defines how many exclusion pixels are mapped
494
- at a time, by default 2560
455
+ Defines how many exclusion pixels are mapped at a time. Units
456
+ indicate the length of one dimension, in pixels, of each square
457
+ chunk to be mapped. By default, this value is 1200, which will
458
+ map the exclusion pixels in 1200x1200 pixel chunks.
459
+
460
+ .. Note:: This parameter does not affect the exclusion to resource
461
+ (tech) mapping, which deviates from how the effect of the
462
+ ``sc_resolution`` parameter works in other functionality within
463
+ ``reV``.
464
+
495
465
  dist_margin : float, optional
496
466
  Extra margin to multiply times the computed distance between
497
467
  neighboring resource points, by default 1.05
498
468
  max_workers : int, optional
499
469
  Number of cores to run mapping on. None uses all available cpus,
500
470
  by default None
501
- points_per_worker : int, optional
502
- Number of supply curve points to map to resource gids on each
503
- worker, by default 10
504
-
505
- Returns
506
- -------
507
- indices : np.ndarray
508
- Index values of the NN resource point. -1 if no res point found.
509
- 2D integer array with shape equal to the exclusions extent shape.
471
+ batch_size : int, optional
472
+ Number of tasks to be submitted to parallel worker pool at one
473
+ time, by default 1000. As a rule of thumb, this number should be
474
+ set to ~10x the number of max_workers. Higher values are not
475
+ necessarily better, and may slow down processing and/or result in
476
+ out-of-memory errors. Values less than the number of workers can
477
+ also lead to slower processing, due to poor load balancing.
510
478
  """
511
479
  kwargs = {"dist_margin": dist_margin, "sc_resolution": sc_resolution}
512
480
  mapper = cls(excl_fpath, res_fpath, **kwargs)
513
- indices = mapper.map_resource(
514
- max_workers=max_workers, points_per_worker=points_per_worker
481
+ mapper.initialize_dataset(dset)
482
+ mapper.map_resource(
483
+ max_workers=max_workers,
484
+ dset=dset,
485
+ batch_size=batch_size,
515
486
  )
516
-
517
- if dset:
518
- mapper.save_tech_map(
519
- excl_fpath,
520
- dset,
521
- indices,
522
- distance_threshold=mapper.distance_threshold,
523
- res_fpath=res_fpath,
524
- )
525
-
526
- return indices
reV/utilities/__init__.py CHANGED
@@ -149,14 +149,16 @@ class SupplyCurveField(FieldEnum):
149
149
  MEAN_LCOE_FRICTION = "lcoe_friction_usd_per_mwh"
150
150
  RAW_LCOE = "lcoe_raw_usd_per_mwh"
151
151
  EOS_MULT = "multiplier_cc_eos"
152
+ FIXED_EOS_MULT = "multiplier_foc_eos"
153
+ VAR_EOS_MULT = "multiplier_voc_eos"
152
154
  REG_MULT = "multiplier_cc_regional"
153
155
  SC_POINT_ANNUAL_ENERGY_MWH = "annual_energy_site_mwh"
154
- COST_BASE_OCC_USD_PER_AC_MW = "cost_base_occ_usd_per_ac_mw"
155
- COST_SITE_OCC_USD_PER_AC_MW = "cost_site_occ_usd_per_ac_mw"
156
+ COST_BASE_CC_USD_PER_AC_MW = "cost_base_cc_usd_per_ac_mw"
157
+ COST_SITE_CC_USD_PER_AC_MW = "cost_site_cc_usd_per_ac_mw"
156
158
  COST_BASE_FOC_USD_PER_AC_MW = "cost_base_foc_usd_per_ac_mw"
157
159
  COST_SITE_FOC_USD_PER_AC_MW = "cost_site_foc_usd_per_ac_mw"
158
- COST_BASE_VOC_USD_PER_AC_MW = "cost_base_voc_usd_per_ac_mw"
159
- COST_SITE_VOC_USD_PER_AC_MW = "cost_site_voc_usd_per_ac_mw"
160
+ COST_BASE_VOC_USD_PER_AC_MWH = "cost_base_voc_usd_per_ac_mwh"
161
+ COST_SITE_VOC_USD_PER_AC_MWH = "cost_site_voc_usd_per_ac_mwh"
160
162
  FIXED_CHARGE_RATE = "fixed_charge_rate"
161
163
 
162
164
  # Bespoke outputs
@@ -266,6 +268,8 @@ class _LegacySCAliases(Enum):
266
268
  CONVEX_HULL_AREA = "convex_hull_area"
267
269
  CONVEX_HULL_CAPACITY_DENSITY = "convex_hull_capacity_density"
268
270
  FULL_CELL_CAPACITY_DENSITY = "full_cell_capacity_density"
271
+ COST_BASE_CC_USD_PER_AC_MW = "cost_base_occ_usd_per_ac_mw"
272
+ COST_SITE_CC_USD_PER_AC_MW = "cost_site_occ_usd_per_ac_mw"
269
273
 
270
274
 
271
275
  class ModuleName(str, Enum):
@@ -293,6 +297,7 @@ class ModuleName(str, Enum):
293
297
  REP_PROFILES = "rep-profiles"
294
298
  SUPPLY_CURVE = "supply-curve"
295
299
  SUPPLY_CURVE_AGGREGATION = "supply-curve-aggregation"
300
+ TECH_MAPPING = "tech-mapping"
296
301
 
297
302
  def __str__(self):
298
303
  return self.value
reV/version.py CHANGED
@@ -2,4 +2,4 @@
2
2
  reV Version number
3
3
  """
4
4
 
5
- __version__ = "0.13.0"
5
+ __version__ = "0.14.0"