bifacial-radiance 0.5.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. bifacial_radiance/HPCScripts/BasicSimulations/addNewModule.py +15 -0
  2. bifacial_radiance/HPCScripts/BasicSimulations/dask_on_node.sh +11 -0
  3. bifacial_radiance/HPCScripts/BasicSimulations/run_sbatch.sbatch +51 -0
  4. bifacial_radiance/HPCScripts/BasicSimulations/simulate_fixedtilt_gencumsky.py +110 -0
  5. bifacial_radiance/HPCScripts/BasicSimulations/simulate_fixedtilt_gendaylit.py +102 -0
  6. bifacial_radiance/HPCScripts/BasicSimulations/simulate_tracking_gendaylit.py +126 -0
  7. bifacial_radiance/HPCScripts/Other Examples (unorganized)/PuertoRico.py +168 -0
  8. bifacial_radiance/HPCScripts/Other Examples (unorganized)/PuertoRico_2.py +166 -0
  9. bifacial_radiance/HPCScripts/Other Examples (unorganized)/PuertoRico_Original.py +195 -0
  10. bifacial_radiance/HPCScripts/Other Examples (unorganized)/basic_module_sampling.py +154 -0
  11. bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_B.py +162 -0
  12. bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_Cases.py +122 -0
  13. bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_CasesMonth.py +142 -0
  14. bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_PRNew.py +91 -0
  15. bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_PRNewP2.py +95 -0
  16. bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_TreeResults.py +108 -0
  17. bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_basic_module_sampling.py +103 -0
  18. bifacial_radiance/HPCScripts/Other Examples (unorganized)/simulate_JackHourly.py +160 -0
  19. bifacial_radiance/HPCScripts/Other Examples (unorganized)/simulate_improvedArray_Oct2127.py +623 -0
  20. bifacial_radiance/TEMP/.gitignore +4 -0
  21. bifacial_radiance/__init__.py +24 -0
  22. bifacial_radiance/data/CEC Modules.csv +16860 -0
  23. bifacial_radiance/data/default.ini +65 -0
  24. bifacial_radiance/data/falsecolor.exe +0 -0
  25. bifacial_radiance/data/gencumsky/License.txt +54 -0
  26. bifacial_radiance/data/gencumsky/Makefile +17 -0
  27. bifacial_radiance/data/gencumsky/README.txt +9 -0
  28. bifacial_radiance/data/gencumsky/Solar Irradiation Modelling.doc +0 -0
  29. bifacial_radiance/data/gencumsky/Sun.cpp +118 -0
  30. bifacial_radiance/data/gencumsky/Sun.h +45 -0
  31. bifacial_radiance/data/gencumsky/average_val.awk +3 -0
  32. bifacial_radiance/data/gencumsky/cPerezSkyModel.cpp +238 -0
  33. bifacial_radiance/data/gencumsky/cPerezSkyModel.h +57 -0
  34. bifacial_radiance/data/gencumsky/cSkyVault.cpp +536 -0
  35. bifacial_radiance/data/gencumsky/cSkyVault.h +86 -0
  36. bifacial_radiance/data/gencumsky/climateFile.cpp +312 -0
  37. bifacial_radiance/data/gencumsky/climateFile.h +37 -0
  38. bifacial_radiance/data/gencumsky/cumulative.cal +177 -0
  39. bifacial_radiance/data/gencumsky/cumulative.rad +14 -0
  40. bifacial_radiance/data/gencumsky/cumulativesky_rotated.rad +2 -0
  41. bifacial_radiance/data/gencumsky/gencumulativesky +0 -0
  42. bifacial_radiance/data/gencumsky/gencumulativesky.cpp +269 -0
  43. bifacial_radiance/data/gencumsky/make_gencumskyexe.py +107 -0
  44. bifacial_radiance/data/gencumsky/paths.h +62 -0
  45. bifacial_radiance/data/gencumulativesky +0 -0
  46. bifacial_radiance/data/gencumulativesky.exe +0 -0
  47. bifacial_radiance/data/ground.rad +83 -0
  48. bifacial_radiance/data/module.json +103 -0
  49. bifacial_radiance/gui.py +1696 -0
  50. bifacial_radiance/images/fig1_fixed_small.gif +0 -0
  51. bifacial_radiance/images/fig2_tracked_small.gif +0 -0
  52. bifacial_radiance/load.py +1156 -0
  53. bifacial_radiance/main.py +5673 -0
  54. bifacial_radiance/mismatch.py +461 -0
  55. bifacial_radiance/modelchain.py +299 -0
  56. bifacial_radiance/module.py +1427 -0
  57. bifacial_radiance/performance.py +466 -0
  58. bifacial_radiance/spectral_utils.py +555 -0
  59. bifacial_radiance-0.5.1.dist-info/METADATA +129 -0
  60. bifacial_radiance-0.5.1.dist-info/RECORD +63 -0
  61. bifacial_radiance-0.5.1.dist-info/WHEEL +6 -0
  62. bifacial_radiance-0.5.1.dist-info/licenses/LICENSE +30 -0
  63. bifacial_radiance-0.5.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,555 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ from collections.abc import Iterable
4
+ import os
5
+ from scipy import integrate
6
+ from tqdm import tqdm
7
+ from pvlib import iotools
8
+
9
+
10
+ class spectral_property(object):
11
+ """
12
+ WRITE DOCSTRING HERE
13
+ """
14
+
15
+ def load_file(filepath):
16
+ with open(filepath, 'r') as infile:
17
+ meta = next(infile)[:-1]
18
+ data = pd.read_csv(infile)
19
+
20
+ return spectral_property(data['value'], data['wavelength'],
21
+ interpolation=meta.split(':')[1])
22
+
23
+ def to_nm(wavelength, units):
24
+ unit_conversion = { 'nm': 1,
25
+ 'um': 1000 }
26
+
27
+ # Verify units are in conversion table
28
+ if units not in unit_conversion:
29
+ print("Warning: Unknown unit specified. Options are {}.".format(
30
+ unit_conversion.keys()))
31
+ units = 'nm'
32
+
33
+ return wavelength * unit_conversion[units]
34
+
35
+ def _linear_interpolation(self, wavelength_nm):
36
+ # Find upper and lower index
37
+ upper_bound = self.data[self.data.index > wavelength_nm].index.min()
38
+ lower_bound = self.data[self.data.index < wavelength_nm].index.max()
39
+
40
+ # Determine values of surrounding indices
41
+ upper_val = self.data['value'][upper_bound]
42
+ lower_val = self.data['value'][lower_bound]
43
+
44
+ # Calculate deltas
45
+ delta_lambda = upper_bound - lower_bound
46
+ delta_val = upper_val - lower_val
47
+
48
+ return lower_val + delta_val*(wavelength_nm - lower_bound)/delta_lambda
49
+
50
+ def _nearest_interpolation(self, wavelength_nm):
51
+ # Find upper and lower index
52
+ upper_bound = self.data[self.data.index > wavelength_nm].index.min()
53
+ lower_bound = self.data[self.data.index < wavelength_nm].index.max()
54
+
55
+ # Determine which index is closer
56
+ if (upper_bound - wavelength_nm) < (wavelength_nm - lower_bound):
57
+ return self.data['value'][upper_bound]
58
+ return self.data['value'][lower_bound]
59
+
60
+ def _lower_interpolation(self, wavelength_nm):
61
+ # Find lower index
62
+ lower_bound = self.data[self.data.index < wavelength_nm].index.max()
63
+
64
+ return self.data['value'][lower_bound]
65
+
66
+ def _upper_interpolation(self, wavelength_nm):
67
+ # Find upper index
68
+ upper_bound = self.data[self.data.index > wavelength_nm].index.min()
69
+
70
+ return self.data['value'][upper_bound]
71
+
72
+ interpolation_methods = {
73
+ 'linear': _linear_interpolation,
74
+ 'nearest': _nearest_interpolation,
75
+ 'lower': _lower_interpolation,
76
+ 'upper': _upper_interpolation
77
+ }
78
+
79
+ def __init__(self, values, index, index_units='nm', interpolation=None):
80
+ # Verify lengths match
81
+ if len(values) != len(index):
82
+ print("Warning: Length of values and index must match.")
83
+ return
84
+
85
+ # Convert inputs to list
86
+ values = [ val for val in values ]
87
+ index = [ spectral_property.to_nm(idx, index_units) for idx in index ]
88
+
89
+ # Create DataFrame
90
+ self.data = pd.DataFrame()
91
+ self.data['value'] = values
92
+ self.data['wavelength'] = index
93
+ self.data = self.data.set_index('wavelength')
94
+
95
+ self.interpolation = None
96
+ if interpolation in spectral_property.interpolation_methods:
97
+ self.interpolation = \
98
+ spectral_property.interpolation_methods[interpolation]
99
+ self.interpolation_type = interpolation
100
+ elif interpolation:
101
+ print("Warning: Specified interpolation type unknown.")
102
+
103
+ def _get_single(self, wavelength, units):
104
+ # Convert wavelength to nm
105
+ wavelength = spectral_property.to_nm(wavelength, units)
106
+
107
+ if wavelength in self.data.index:
108
+ # If the value for that wavelength is known, return it
109
+ return self.data['value'][wavelength]
110
+ elif self.interpolation:
111
+ # Check wavelength is within range
112
+ if wavelength < self.data.index.min() or \
113
+ wavelength > self.data.index.max():
114
+ print("Warning: Requested wavelength outside spectrum.")
115
+ return None
116
+
117
+ # Return interpolated value
118
+ return self.interpolation(self, wavelength)
119
+
120
+ return None
121
+
122
+ def __getitem__(self, wavelength, units='nm'):
123
+ if isinstance(wavelength, Iterable):
124
+ return np.array([ self._get_single(wl, units) for wl in wavelength ])
125
+ return self._get_single(wavelength, units)
126
+
127
+ def to_file(self, filepath, append=False):
128
+ mode = 'w'
129
+ if append:
130
+ mode = 'a'
131
+
132
+ with open(filepath, mode) as outfile:
133
+ outfile.write(f"interpolation:{self.interpolation_type}\n")
134
+ self.data.to_csv(outfile)
135
+
136
+ def range(self):
137
+ # Find upper and lower index
138
+ upper_bound = self.data.index.max()
139
+ lower_bound = self.data.index.min()
140
+
141
+ return (lower_bound, upper_bound)
142
+
143
+ def scale_values(self, scaling_factor):
144
+ self.data['value'] *= scaling_factor
145
+
146
+ def spectral_albedo_smarts(zen, azm, material, min_wavelength=300,
147
+ max_wavelength=4000):
148
+
149
+ import pySMARTS
150
+
151
+ smarts_res = pySMARTS.SMARTSSpectraZenAzm('30 31', str(zen), str(azm), material,
152
+ min_wvl=str(min_wavelength),
153
+ max_wvl=str(max_wavelength))
154
+
155
+ return spectral_property(smarts_res['Local_ground_reflectance'],
156
+ smarts_res['Wvlgth'], interpolation='linear')
157
+
158
+ def spectral_irradiance_smarts(zen, azm, material='LiteSoil', min_wavelength=300,
159
+ max_wavelength=4000):
160
+
161
+ import pySMARTS
162
+
163
+ try:
164
+ smarts_res = pySMARTS.SMARTSSpectraZenAzm('2 3 4', str(zen), str(azm),
165
+ material=material,
166
+ min_wvl=str(min_wavelength),
167
+ max_wvl=str(max_wavelength))
168
+ except PermissionError as e:
169
+ msg = "{}".format(e)
170
+ raise PermissionError(msg + " Error accessing SMARTS. Make sure you have "
171
+ "SMARTS installed in a directory that you have read/write privileges for. ")
172
+
173
+
174
+ dni_spectrum = spectral_property(smarts_res['Direct_normal_irradiance'],
175
+ smarts_res['Wvlgth'], interpolation='linear')
176
+ dhi_spectrum = spectral_property(smarts_res['Difuse_horizn_irradiance'],
177
+ smarts_res['Wvlgth'], interpolation='linear')
178
+ ghi_spectrum = spectral_property(smarts_res['Global_horizn_irradiance'],
179
+ smarts_res['Wvlgth'], interpolation='linear')
180
+
181
+ return (dni_spectrum, dhi_spectrum, ghi_spectrum)
182
+
183
+
184
+
185
+ def spectral_irradiance_smarts_SRRL(YEAR, MONTH, DAY, HOUR, ZONE,
186
+ LATIT, LONGIT, ALTIT,
187
+ RH, TAIR, SEASON, TDAY, SPR, W,
188
+ TILT, WAZIM, HEIGHT,
189
+ ALPHA1, ALPHA2, OMEGL, GG, BETA, TAU5,
190
+ RHOG, material,
191
+ IOUT='2 3 4', min_wvl='280', max_wvl='4000'):
192
+
193
+ import pySMARTS
194
+
195
+ smarts_res = pySMARTS.SMARTSSRRL(IOUT=IOUT, YEAR=YEAR,MONTH=MONTH,DAY=DAY,HOUR=HOUR, ZONE=ZONE,
196
+ LATIT=LATIT, LONGIT=LONGIT, ALTIT=ALTIT,
197
+ RH=RH, TAIR=TAIR, SEASON=SEASON, TDAY=TDAY, SPR=SPR, W=W,
198
+ TILT=TILT, WAZIM=WAZIM, HEIGHT=HEIGHT,
199
+ ALPHA1 = ALPHA1, ALPHA2 = ALPHA2, OMEGL = OMEGL,
200
+ GG = GG, BETA = BETA, TAU5= TAU5,
201
+ RHOG=RHOG, material=material,
202
+ min_wvl=min_wvl, max_wvl=max_wvl)
203
+
204
+
205
+ dni_spectrum = spectral_property(smarts_res[smarts_res.keys()[1]],
206
+ smarts_res['Wvlgth'], interpolation='linear')
207
+ dhi_spectrum = spectral_property(smarts_res[smarts_res.keys()[2]],
208
+ smarts_res['Wvlgth'], interpolation='linear')
209
+ ghi_spectrum = spectral_property(smarts_res[smarts_res.keys()[3]],
210
+ smarts_res['Wvlgth'], interpolation='linear')
211
+
212
+ return (dni_spectrum, dhi_spectrum, ghi_spectrum)
213
+
214
+
215
+
216
+ def spectral_albedo_smarts_SRRL(YEAR, MONTH, DAY, HOUR, ZONE,
217
+ LATIT, LONGIT, ALTIT,
218
+ RH, TAIR, SEASON, TDAY, SPR, W,
219
+ TILT, WAZIM, HEIGHT,
220
+ ALPHA1, ALPHA2, OMEGL, GG, BETA, TAU5,
221
+ RHOG, material,
222
+ IOUT='30 31', min_wvl='280', max_wvl='4000'):
223
+
224
+ import pySMARTS
225
+
226
+ smarts_res = pySMARTS.SMARTSSRRL(IOUT=IOUT, YEAR=YEAR,MONTH=MONTH,DAY=DAY,HOUR=HOUR, ZONE=ZONE,
227
+ LATIT=LATIT, LONGIT=LONGIT, ALTIT=ALTIT,
228
+ RH=RH, TAIR=TAIR, SEASON=SEASON, TDAY=TDAY, SPR=SPR, W=W,
229
+ TILT=TILT, WAZIM=WAZIM, HEIGHT=HEIGHT,
230
+ ALPHA1 = ALPHA1, ALPHA2 = ALPHA2, OMEGL = OMEGL,
231
+ GG = GG, BETA = BETA, TAU5= TAU5,
232
+ RHOG=RHOG, material=material,
233
+ min_wvl=min_wvl, max_wvl=max_wvl)
234
+
235
+ return spectral_property(smarts_res['Local_ground_reflectance'],
236
+ smarts_res['Wvlgth'], interpolation='linear')
237
+
238
+
239
+ def generate_spectra(metdata, simulation_path, ground_material='Gravel', spectra_folder=None, scale_spectra=False,
240
+ scale_albedo=False, scale_albedo_nonspectral_sim=False, scale_upper_bound=2500, min_wavelength=280, max_wavelength=4000):
241
+ """
242
+ generate spectral curve for particular material. Requires pySMARTS
243
+
244
+ Parameters
245
+ ----------
246
+ metdata : bifacial_radiance MetObj
247
+ MetObj containing weather data, with a datetime index.
248
+ simulation_path: string or path
249
+ path of simulation directory
250
+ ground_material : string, optional
251
+ type of ground material for spectral simulation. Options include:
252
+ Grass, Gravel, Snow, Seasonal etc.
253
+ The default is 'Gravel'.
254
+ spectra_folder : path, optional
255
+ location to save spectral data. The default is None.
256
+ scale_spectra : bool, optional
257
+ DESCRIPTION. The default is False.
258
+ scale_albedo : bool, optional
259
+ DESCRIPTION. The default is False.
260
+ scale_albedo_nonspectral_sim : bool, optional
261
+ DESCRIPTION. The default is False.
262
+ scale_upper_bound: integer, optional
263
+ Set an upper bound for the wavelength when taking the mean
264
+ or integral of any generated spectra.
265
+
266
+ Returns
267
+ -------
268
+ spectral_alb : spectral_property class
269
+ spectral_alb.data: dataframe with frequency and magnitude data.
270
+ spectral_dni : spectral_property class
271
+ spectral_dni.data: dataframe with frequency and magnitude data.
272
+ spectral_dhi : spectral_property class
273
+ spectral_dhi.data: dataframe with frequency and magnitude data.
274
+ weighted_alb : pd.series
275
+ datetime-indexed series of weighted albedo values
276
+
277
+ """
278
+
279
+ # make the datetime easily readable and indexed
280
+ dts = pd.Series(data=metdata.datetime)
281
+
282
+ # weighted albedo data frame
283
+ walb = pd.Series(index=np.array(metdata.datetime),dtype='float64')
284
+
285
+ # print useful reminders
286
+ if scale_albedo_nonspectral_sim:
287
+ print(' -= Non-Spectral Simulation =- \n Spectra files will NOT be saved.')
288
+ else:
289
+ print(' -= Spectral Simulation =- \n Spectra files will be saved.')
290
+
291
+ for dt in tqdm(dts,ncols=100,desc='Generating Spectra'):
292
+
293
+ # scrape all the necessary metadata
294
+ idx = dts.index[dts==dt][0]
295
+ dni = metdata.dni[idx]
296
+ dhi = metdata.dhi[idx]
297
+ ghi = metdata.ghi[idx]
298
+ if metdata.albedo is not None:
299
+ alb = metdata.albedo[idx]
300
+ else:
301
+ alb = 0.2
302
+ solpos = metdata.solpos.iloc[idx]
303
+ zen = float(solpos.zenith)
304
+ azm = float(solpos.azimuth) - 180
305
+ lat = metdata.latitude
306
+
307
+ # create file names
308
+ suffix = f'_{str(dt.year)[-2:]}_{dt.month:02}_{dt.day:02}_{dt.hour:02}_{dt.minute:02}.txt'
309
+ dni_file = os.path.join(simulation_path, spectra_folder, "dni"+suffix)
310
+ dhi_file = os.path.join(simulation_path, spectra_folder, "dhi"+suffix)
311
+ ghi_file = os.path.join(simulation_path, spectra_folder, "ghi"+suffix)
312
+ alb_file = os.path.join(simulation_path, spectra_folder, "alb"+suffix)
313
+
314
+ # generate the base spectra
315
+ try:
316
+ spectral_dni, spectral_dhi, spectral_ghi = spectral_irradiance_smarts(zen, azm, min_wavelength=min_wavelength, max_wavelength=max_wavelength)
317
+ except:
318
+ if scale_albedo_nonspectral_sim:
319
+ walb[dt] = 0.0
320
+ continue
321
+
322
+ # limit dataframes for calculations by scaling upper bound
323
+ tdni = spectral_dni.data[spectral_dni.data.index <= scale_upper_bound]
324
+ tdhi = spectral_dhi.data[spectral_dhi.data.index <= scale_upper_bound]
325
+ tghi = spectral_ghi.data[spectral_ghi.data.index <= scale_upper_bound]
326
+
327
+ # scaling spectra
328
+ if scale_spectra:
329
+ dni_scale = dni / integrate.trapezoid(tdni.value, x=tdni.index)
330
+ dhi_scale = dhi / integrate.trapezoid(tdhi.value, x=tdhi.index)
331
+ ghi_scale = ghi / integrate.trapezoid(tghi.value, x=tghi.index)
332
+ spectral_dni.scale_values(dni_scale)
333
+ spectral_dhi.scale_values(dhi_scale)
334
+ spectral_ghi.scale_values(ghi_scale)
335
+
336
+ # Determine Seasonal ground cover, if necessary
337
+ north = [1,2,3,4,10,11,12]
338
+ south = [5,6,7,8,9,10]
339
+ if lat < 0: winter = north
340
+ if lat > 0: winter = south
341
+
342
+ if ground_material == 'Seasonal':
343
+ MONTH = metdata.datetime[idx].month
344
+ if MONTH in winter :
345
+ if alb >= 0.6:
346
+ ground_material = 'Snow'
347
+ else:
348
+ ground_material = 'DryGrass'
349
+ else:
350
+ ground_material = 'Grass'
351
+
352
+ # Generate base spectral albedo
353
+ spectral_alb = spectral_albedo_smarts(zen, azm, ground_material, min_wavelength=280)
354
+
355
+ # Limit albedo by upper bound wavelength
356
+ talb = spectral_alb.data[spectral_alb.data.index <= scale_upper_bound]
357
+
358
+ # scaling albedo
359
+ if scale_albedo:
360
+ #***
361
+ # Currently using simple scaling model (scale by mean)
362
+ #***
363
+ denom = talb.values.mean()
364
+ scale_factor = alb / denom
365
+ spectral_alb.scale_values(scale_factor)
366
+
367
+ # If performing a non-spectral simulation, generate single albedo weighted by spectra
368
+ if scale_albedo_nonspectral_sim:
369
+ #SR = SR[SR.index <= scale_upper_bound] # placeholder for Spectral Responsivity
370
+ num = talb * tghi #* SR
371
+ num = integrate.trapezoid(num.value, x=num.index)
372
+ denom = tghi #* SR
373
+ denom = integrate.trapezoid(denom.value, x=denom.index)
374
+ alb_weighted = num / denom
375
+
376
+ walb[dt] = alb_weighted
377
+
378
+ # only save the files if performing a spectral simulation
379
+ if not scale_albedo_nonspectral_sim:
380
+ spectral_alb.to_file(alb_file)
381
+ spectral_dhi.to_file(dhi_file)
382
+ spectral_dni.to_file(dni_file)
383
+ spectral_ghi.to_file(ghi_file)
384
+
385
+ # save a basic csv of weighted albedo, indexed by datetime
386
+ if scale_albedo_nonspectral_sim:
387
+ walbPath = os.path.join(simulation_path,spectra_folder,'albedo_scaled_nonSpec.csv')
388
+ walb.to_csv(walbPath)
389
+ print('Weighted albedo CSV saved.')
390
+ weighted_alb = walb
391
+ else:
392
+ weighted_alb = None
393
+
394
+ return (spectral_alb, spectral_dni, spectral_dhi, weighted_alb)
395
+
396
+ def generate_spectral_tmys(wavelengths, spectra_folder, metdata, location_name, output_folder):
397
+ """
398
+ Generate a series of TMY-like files with per-wavelength irradiance. There will be one file per
399
+ wavelength. These are necessary to run a spectral simulation with gencumsky
400
+
401
+ Paramters:
402
+ ----------
403
+ wavelengths: (np.array or list)
404
+ array or list of integer wavelengths to simulate, in units [nm]. example: [300,325,350]
405
+ spectra_folder: (path or str)
406
+ File path or path-like string pointing to the folder contained the SMARTS generated spectra
407
+ metdata: pandas DataFrame
408
+ DataFrame containing the weather data, with a datetime index.
409
+ location_name:
410
+ _description_
411
+ output_folder:
412
+ File path or path-like string pointing to the destination folder for spectral TMYs
413
+ """
414
+
415
+ # -- read in the spectra files
416
+ spectra_files = next(os.walk(spectra_folder))[2]
417
+ spectra_files.sort()
418
+
419
+ # -- read in the weather file and format
420
+ tmydata = metdata.tmydata.copy()
421
+ tmydata.rename(columns={'dni':'DNI',
422
+ 'dhi':'DHI',
423
+ 'temp_air':'DryBulb',
424
+ 'wind_speed':'Wspd',
425
+ 'ghi':'GHI',
426
+ 'relative_humidity':'RH',
427
+ 'albedo':'Alb'
428
+ }, inplace=True)
429
+ dtindex = tmydata.index
430
+
431
+ # -- grab the weather file header to reproduce location meta-data
432
+ header = metdata.metadata.copy()
433
+
434
+ # -- read in a spectra file to copy wavelength-index
435
+ temp = pd.read_csv(os.path.join(spectra_folder,spectra_files[0]), header=1, index_col = 0)
436
+
437
+ # -- copy and reproduce the datetime index
438
+ dates = []
439
+ for file in spectra_files:
440
+ take = file[4:-4]
441
+ if take not in dates:
442
+ dates.append(take)
443
+ dates = pd.to_datetime(dates,format='%y_%m_%d_%H_%M').tz_localize(dtindex.tz)
444
+
445
+ # -- create a multi-index of columns [timeindex:alb,dni,dhi,ghi]
446
+ iterables = [dates,['ALB','DHI','DNI','GHI']]
447
+ multi_index = pd.MultiIndex.from_product(iterables, names=['time_index','irr_type'])
448
+
449
+ # -- create empty dataframe
450
+ spectra_df = pd.DataFrame(index=temp.index,columns=multi_index)
451
+
452
+ # -- fill with irradiance data
453
+ for file in spectra_files:
454
+ a = pd.to_datetime(file[4:-4],format='%y_%m_%d_%H_%M').tz_localize(dtindex.tz)
455
+ b = file[:3].upper()
456
+ spectra_df[a,b] = pd.read_csv(os.path.join(spectra_folder,file),header=1, index_col=0)
457
+
458
+ # -- reorder the columns to match TMYs
459
+ spectra_df.columns.set_levels(['Alb','DHI','DNI','GHI'],level=1)
460
+ # -- create arrays of zeros for data outside the array
461
+ zeros = np.zeros(len(dtindex))
462
+
463
+ # -- build the blank tmy-like data frame
464
+ blank_df = pd.DataFrame(index=dtindex, data={'Date (MM/DD/YYYY)':dtindex.strftime('%#m/%#d/%Y'),
465
+ 'Time (HH:MM)':dtindex.strftime('%H:%M'),
466
+ 'Wspd':tmydata['Wspd'],'Dry-bulb':tmydata['DryBulb'],
467
+ 'DHI':zeros,'DNI':zeros,'GHI':zeros,'ALB':zeros})
468
+
469
+ # column names for transfer
470
+ irrs = ['DNI','DHI','GHI','ALB']
471
+
472
+ # -- grab data, save file
473
+ for wave in tqdm(wavelengths, ncols=100, desc='Generating Spectral TMYs'):
474
+ fileName = f'{location_name}_TMY_w{wave:04}.csv'
475
+ fileName = os.path.join(output_folder,fileName)
476
+ wave_df = blank_df.copy()
477
+ for col in spectra_df.columns:
478
+ wave_df.loc[col[0],col[1]] = spectra_df[col].loc[wave]
479
+
480
+ with open(fileName, 'w', newline='') as ict:
481
+ # for line in header:
482
+ # ict.write(line)
483
+ wave_df.to_csv(ict, index=False)
484
+
485
+
486
+ def integrated_spectrum(spectra_folder, metdata ):
487
+ """
488
+ Generate integrated sums across the full spectra
489
+
490
+ Paramters:
491
+ ----------
492
+ spectra_folder: (path or str)
493
+ File path or path-like string pointing to the folder contained the SMARTS generated spectra
494
+ metdata: pandas DataFrame
495
+ DataFrame containing the weather data, with a datetime index.
496
+
497
+
498
+ Returns:
499
+ --------
500
+ integrated_sums: (list)
501
+ list of integrated sums for DNI, DHI, DNI*ALB, DHI*ALB
502
+ """
503
+
504
+ # -- read in the spectra files
505
+ spectra_files = next(os.walk(spectra_folder))[2]
506
+ spectra_files.sort()
507
+
508
+ # -- read in the weather file and format
509
+ tmydata = metdata.tmydata.copy()
510
+ #tmydata.index = tmydata.index+pd.Timedelta(hours=1)
511
+ tmydata.rename(columns={'dni':'DNI',
512
+ 'dhi':'DHI',
513
+ 'temp_air':'DryBulb',
514
+ 'wind_speed':'Wspd',
515
+ 'ghi':'GHI',
516
+ 'relative_humidity':'RH',
517
+ 'albedo':'Alb'
518
+ }, inplace=True)
519
+ dtindex = tmydata.index
520
+
521
+ # -- grab the weather file header to reproduce location meta-data
522
+ header = metdata.metadata.copy()
523
+
524
+ # -- read in a spectra file to copy wavelength-index
525
+ temp = pd.read_csv(os.path.join(spectra_folder,spectra_files[0]), header=1, index_col = 0)
526
+
527
+ # -- copy and reproduce the datetime index
528
+ dates = []
529
+ for file in spectra_files:
530
+ take = file[4:-4]
531
+ if take not in dates:
532
+ dates.append(take)
533
+ dates = pd.to_datetime(dates,format='%y_%m_%d_%H_%M').tz_localize(dtindex.tz)
534
+
535
+ # -- create a multi-index of columns [timeindex:alb,dni,dhi,ghi]
536
+ iterables = [dates,['ALB','DHI','DNI','GHI']]
537
+ multi_index = pd.MultiIndex.from_product(iterables, names=['time_index','irr_type'])
538
+
539
+ # -- create empty dataframe
540
+ spectra_df = pd.DataFrame(index=temp.index,columns=multi_index)
541
+ # -- fill with irradiance data
542
+ for file in spectra_files:
543
+ a = pd.to_datetime(file[4:-4],format='%y_%m_%d_%H_%M').tz_localize(dtindex.tz)
544
+ b = file[:3].upper()
545
+ spectra_df[a,b] = pd.read_csv(os.path.join(spectra_folder,file),header=1, index_col=0)
546
+ integrated_sums = pd.DataFrame(index=dates, columns=['Sum_DNI', 'Sum_DHI', 'Sum_DNI_ALB', 'Sum_DHI_ALB'])
547
+ for col in spectra_df.columns:
548
+ integrated_sums.loc[col[0], 'Sum_DNI'] = integrate.trapezoid(spectra_df[col[0], 'DNI'], spectra_df.index)
549
+ integrated_sums.loc[col[0], 'Sum_DHI'] = integrate.trapezoid(spectra_df[col[0], 'DHI'], spectra_df.index)
550
+ integrated_sums.loc[col[0], 'Sum_DNI_ALB'] = integrate.trapezoid(spectra_df[col[0], 'DNI'] * spectra_df[col[0], 'ALB'], spectra_df.index)
551
+ integrated_sums.loc[col[0], 'Sum_DHI_ALB'] = integrate.trapezoid(spectra_df[col[0], 'DHI'] * spectra_df[col[0], 'ALB'], spectra_df.index)
552
+
553
+ return integrated_sums
554
+
555
+
@@ -0,0 +1,129 @@
1
+ Metadata-Version: 2.4
2
+ Name: bifacial_radiance
3
+ Version: 0.5.1
4
+ Summary: Tools to interface with Radiance for the PV researcher
5
+ Author-email: Chris Deline <chris.deline@nrel.gov>, Silvana Ovaitt <silvana.ovaitt@nrel.gov>
6
+ Project-URL: Homepage, https://github.com/NREL/bifacial_radiance
7
+ Project-URL: Documentation, https://bifacial-radiance.readthedocs.io
8
+ Project-URL: Repository, https://github.com/NREL/bifacial_radiance
9
+ Project-URL: Issues, https://github.com/NREL/bifacial_radiance/issues
10
+ Keywords: bifacial,radiance,photovoltaics,pv,ray tracing
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Science/Research
13
+ Classifier: License :: OSI Approved :: BSD License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.8
16
+ Classifier: Programming Language :: Python :: 3.9
17
+ Classifier: Programming Language :: Python :: 3.10
18
+ Classifier: Programming Language :: Python :: 3.11
19
+ Classifier: Programming Language :: Python :: 3.12
20
+ Classifier: Programming Language :: Python :: 3.13
21
+ Requires-Python: >=3.8
22
+ Description-Content-Type: text/markdown
23
+ License-File: LICENSE
24
+ Requires-Dist: configparser
25
+ Requires-Dist: deprecated
26
+ Requires-Dist: pandas
27
+ Requires-Dist: pvlib>=0.8.0
28
+ Requires-Dist: pvmismatch
29
+ Requires-Dist: pyradiance
30
+ Requires-Dist: requests
31
+ Requires-Dist: scipy>1.6.0
32
+ Requires-Dist: tqdm
33
+ Provides-Extra: test
34
+ Requires-Dist: pytest; extra == "test"
35
+ Requires-Dist: pytest-cov; extra == "test"
36
+ Requires-Dist: pySMARTS; extra == "test"
37
+ Provides-Extra: doc
38
+ Requires-Dist: ipython; extra == "doc"
39
+ Requires-Dist: sphinx>=1.8.0; extra == "doc"
40
+ Requires-Dist: sphinx-autoapi>=1.1.0; extra == "doc"
41
+ Requires-Dist: pydata-sphinx-theme>=0.14.4; extra == "doc"
42
+ Requires-Dist: nbsphinx>=0.8.8; extra == "doc"
43
+ Requires-Dist: sphinx-gallery>=0.8.1; extra == "doc"
44
+ Provides-Extra: all
45
+ Requires-Dist: bifacial_radiance[doc,test]; extra == "all"
46
+ Requires-Dist: jupyter; extra == "all"
47
+ Dynamic: license-file
48
+
49
+ ![logo](docs/images_wiki/bifacial_radiance.png)
50
+
51
+ # bifacial_radiance
52
+ Main branch: [![Build Status](https://github.com/nrel/bifacial_radiance/actions/workflows/pytest.yaml/badge.svg?branch=main)](https://github.com/nrel/bifacial_radiance/actions)
53
+ [![Coverage Status](https://coveralls.io/repos/github/NREL/bifacial_radiance/badge.svg?branch=main)](https://coveralls.io/github/NREL/bifacial_radiance?branch=main)
54
+ [![Documentation Status](https://readthedocs.org/projects/bifacial-radiance/badge/?version=stable)](https://bifacial-radiance.readthedocs.io/en/latest/?badge=stable)
55
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3860350.svg)](https://doi.org/10.5281/zenodo.3860350)
56
+ [![status](https://joss.theoj.org/papers/b018890e2ab7ddf723d37b17e308e273/status.svg)](https://joss.theoj.org/papers/b018890e2ab7ddf723d37b17e308e273)
57
+
58
+ Development branch: [![Build Status](https://github.com/nrel/bifacial_radiance/actions/workflows/pytest.yaml/badge.svg?branch=development)](https://github.com/nrel/bifacial_radiance/actions)
59
+ [![Coverage Status](https://coveralls.io/repos/github/NREL/bifacial_radiance/badge.svg?branch=development)](https://coveralls.io/github/NREL/bifacial_radiance?branch=development)
60
+ [![Documentation Status](https://readthedocs.org/projects/bifacial-radiance/badge/?version=latest)](https://bifacial-radiance.readthedocs.io/en/latest/?badge=latest)
61
+
62
+ ## Introduction
63
+
64
+ bifacial_radiance contains a series of Python wrapper functions to make working with
65
+ RADIANCE easier, particularly for the PV researcher interested in bifacial PV
66
+ performance. For more information, check out our [documentation](https://bifacial-radiance.readthedocs.io),
67
+ [Tutorials in the form of Jupyter Notebooks](docs/tutorials/), or reffer to our [Wiki](https://github.com/NREL/bifacial_radiance/wiki)
68
+ and [Issues](https://github.com/NREL/bifacial_radiance/issues) page.
69
+
70
+ ## Installation
71
+
72
+ https://youtu.be/4A9GocfHKyM
73
+ This video shows how to install the bifacial_radiance software and all associated software needed. More info on the Wiki. Instructions are also shown below.
74
+
75
+ For detailed instructions of how to install bifacial_radiance, you can also refer to the [installation guide](https://bifacial-radiance.readthedocs.io/en/stable/user_guide/installation.html)
76
+
77
+ ## GUI!
78
+
79
+ A GUI has been added in version 3.0. The GUI reads/writes all input parameters necessary to run a simulation, and runs the specified simulation by calling the correct functions with the specified parameters. So no need to use a journal or a script! But you still need to install following the procedure below.
80
+
81
+ To run the gui, import bifacial_radiance and run bifacial_radiance.gui()
82
+
83
+ ![GUI](docs/images_wiki/bifacial_radiance_GUI.png)
84
+
85
+
86
+ ## Usage
87
+
88
+ We have a tutorial video, showing how the program is structured, how to use the Jupyter tutorials and the GUI. You can watch it here [Tutorial Webinar](https://www.youtube.com/watch?v=1X9L-R-RVGA), with the [slides available here](https://www.nrel.gov/docs/fy20osti/75218.pdf).
89
+
90
+ Check out the [Jupyter Tutorial Notebooks](docs/tutorials/) to see detailed examples of the capacities of bifacial_radiance.
91
+ The [Intro examples](https://bifacial-radiance.readthedocs.io/en/stable/introexamples.html) and the [readthedocs documentation](https://bifacial-radiance.readthedocs.io) also provide a good starting point.
92
+
93
+ ## Contributing
94
+
95
+ We need your help to make bifacial_radiance a great tool! Please see the [Contributing page](https://bifacial-radiance.readthedocs.io/en/stable/contributing.html) for more on how you can contribute. The long-term success of bifacial_radiance requires substantial community support.
96
+
97
+ ## License
98
+
99
+ Bifacial_radiance open source code is copyrighted by the Alliance for Sustainable Energy and licensed with BSD-3-Clause terms, found [here](https://github.com/NREL/bifacial_radiance/blob/master/LICENSE).
100
+
101
+ ## Getting Support
102
+
103
+ If you suspect that you may have discovered a bug or if you'd like to
104
+ change something about bifacial_radiance, then please make an issue on our
105
+ [GitHub issues page](https://github.com/NREL/bifacial_radiance/issues).
106
+
107
+ bifacial_radiance questions can be asked on
108
+ [Stack Overflow](http://stackoverflow.com) and tagged with
109
+ the [bifacial_radiance](http://stackoverflow.com/questions/tagged/bifacial_radiance) tag.
110
+
111
+ The [bifacial-radiance google group](https://groups.google.com/forum/#!forum/bifacial_radiance)
112
+ has just started, and will be used for discussing various topics of interest to the bifacial-radiance
113
+ community. We also make new version announcements on the google group.
114
+
115
+ ## Citing
116
+
117
+ If you use bifacial_radiance in a published work, please cite:
118
+
119
+ Ayala Pelaez and Deline, (2020). bifacial_radiance: a python package for modeling bifacial solar photovoltaic systems. Journal of Open Source Software, 5(50), 1865, https://doi.org/10.21105/joss.01865
120
+
121
+
122
+ Please also cite the DOI corresponding to the specific version of bifacial_radiance that you used. bifacial_radiance DOIs are listed at [Zenodo.org](https://zenodo.org/search?page=1&size=20&q=conceptrecid:3860349&all_versions&sort=-version)
123
+
124
+ Additional bifacial_radiance publications with validation of the software include:
125
+ * Deline, Chris, and Ayala, Silvana. Bifacial_Radiance. Computer Software. https://github.com/NREL/bifacial_radiance. USDOE Office of Energy Efficiency and Renewable Energy (EERE), Solar Energy Technologies Office (EE-4S). 17 Dec. 2017. Web. doi:10.11578/dc.20180530.16. https://www.osti.gov/doecode/biblio/6869
126
+ * Ayala Pelaez S, Deline C, Greenberg P, Stein JS, Kostuk RK. Model and validation of single-axis tracking with bifacial PV. IEEE J Photovoltaics. 2019;9(3):715-721. https://ieeexplore.ieee.org/document/8644027 and https://www.nrel.gov/docs/fy19osti/72039.pdf (pre-print, conference version)
127
+ * Ayala Pelaez, Deline C, MacAlpine M, Marion B, Stein J, Kostuk K. Comparison of Bifacial Solar Irradiance Model Predictions with Field Validation. IEEE J Photovoltaics. 2019; 9(1):82-87. https://ieeexplore.ieee.org/document/8534404
128
+
129
+ Or check our [Github Wiki](https://github.com/NREL/bifacial_radiance/wiki) for a complete list of publications.