bifacial-radiance 0.5.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bifacial_radiance/HPCScripts/BasicSimulations/addNewModule.py +15 -0
- bifacial_radiance/HPCScripts/BasicSimulations/dask_on_node.sh +11 -0
- bifacial_radiance/HPCScripts/BasicSimulations/run_sbatch.sbatch +51 -0
- bifacial_radiance/HPCScripts/BasicSimulations/simulate_fixedtilt_gencumsky.py +110 -0
- bifacial_radiance/HPCScripts/BasicSimulations/simulate_fixedtilt_gendaylit.py +102 -0
- bifacial_radiance/HPCScripts/BasicSimulations/simulate_tracking_gendaylit.py +126 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/PuertoRico.py +168 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/PuertoRico_2.py +166 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/PuertoRico_Original.py +195 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/basic_module_sampling.py +154 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_B.py +162 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_Cases.py +122 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_CasesMonth.py +142 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_PRNew.py +91 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_PRNewP2.py +95 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_TreeResults.py +108 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_basic_module_sampling.py +103 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/simulate_JackHourly.py +160 -0
- bifacial_radiance/HPCScripts/Other Examples (unorganized)/simulate_improvedArray_Oct2127.py +623 -0
- bifacial_radiance/TEMP/.gitignore +4 -0
- bifacial_radiance/__init__.py +24 -0
- bifacial_radiance/data/CEC Modules.csv +16860 -0
- bifacial_radiance/data/default.ini +65 -0
- bifacial_radiance/data/falsecolor.exe +0 -0
- bifacial_radiance/data/gencumsky/License.txt +54 -0
- bifacial_radiance/data/gencumsky/Makefile +17 -0
- bifacial_radiance/data/gencumsky/README.txt +9 -0
- bifacial_radiance/data/gencumsky/Solar Irradiation Modelling.doc +0 -0
- bifacial_radiance/data/gencumsky/Sun.cpp +118 -0
- bifacial_radiance/data/gencumsky/Sun.h +45 -0
- bifacial_radiance/data/gencumsky/average_val.awk +3 -0
- bifacial_radiance/data/gencumsky/cPerezSkyModel.cpp +238 -0
- bifacial_radiance/data/gencumsky/cPerezSkyModel.h +57 -0
- bifacial_radiance/data/gencumsky/cSkyVault.cpp +536 -0
- bifacial_radiance/data/gencumsky/cSkyVault.h +86 -0
- bifacial_radiance/data/gencumsky/climateFile.cpp +312 -0
- bifacial_radiance/data/gencumsky/climateFile.h +37 -0
- bifacial_radiance/data/gencumsky/cumulative.cal +177 -0
- bifacial_radiance/data/gencumsky/cumulative.rad +14 -0
- bifacial_radiance/data/gencumsky/cumulativesky_rotated.rad +2 -0
- bifacial_radiance/data/gencumsky/gencumulativesky +0 -0
- bifacial_radiance/data/gencumsky/gencumulativesky.cpp +269 -0
- bifacial_radiance/data/gencumsky/make_gencumskyexe.py +107 -0
- bifacial_radiance/data/gencumsky/paths.h +62 -0
- bifacial_radiance/data/gencumulativesky +0 -0
- bifacial_radiance/data/gencumulativesky.exe +0 -0
- bifacial_radiance/data/ground.rad +83 -0
- bifacial_radiance/data/module.json +103 -0
- bifacial_radiance/gui.py +1696 -0
- bifacial_radiance/images/fig1_fixed_small.gif +0 -0
- bifacial_radiance/images/fig2_tracked_small.gif +0 -0
- bifacial_radiance/load.py +1156 -0
- bifacial_radiance/main.py +5673 -0
- bifacial_radiance/mismatch.py +461 -0
- bifacial_radiance/modelchain.py +299 -0
- bifacial_radiance/module.py +1427 -0
- bifacial_radiance/performance.py +466 -0
- bifacial_radiance/spectral_utils.py +555 -0
- bifacial_radiance-0.5.1.dist-info/METADATA +129 -0
- bifacial_radiance-0.5.1.dist-info/RECORD +63 -0
- bifacial_radiance-0.5.1.dist-info/WHEEL +6 -0
- bifacial_radiance-0.5.1.dist-info/licenses/LICENSE +30 -0
- bifacial_radiance-0.5.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
mkdir /scratch/sayala/RadianceScenes/Full_Row_40Mods/outputanalysis
|
|
4
|
+
mkdir /scratch/sayala/RadianceScenes/Full_Row_40Mods/outputanalysis/dataframes_Gpoat
|
|
5
|
+
mkdir /scratch/sayala/RadianceScenes/Full_Row_40Mods/outputanalysis/dataframes_Gfront
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
# In[1]:
|
|
10
|
+
# This section collects the cell by cell irradiance calculated by bifacial_radiance
|
|
11
|
+
# into an hourly pickle in format " irr_1axis_01_01_08.csv "
|
|
12
|
+
# and stores them in a folder (i.e. hourly_dataframes_row_FrontOnly)
|
|
13
|
+
# It also accumulates up the cell by cell irradiances into a yearly irradiance pickle
|
|
14
|
+
# at the end and plots it.
|
|
15
|
+
# Each dataframe/pickle has module 20 (Northmost) as the first row (So if you're
|
|
16
|
+
# seeing the data it's like you're seeing the array from a top-down view.
|
|
17
|
+
# Note that this is not the way that dataframes get plotted in python so it needs
|
|
18
|
+
# to be inverted later for plotting. UGH.)
|
|
19
|
+
|
|
20
|
+
import os
|
|
21
|
+
import pandas as pd
|
|
22
|
+
import re
|
|
23
|
+
import numpy as np
|
|
24
|
+
#from collections import Counter
|
|
25
|
+
|
|
26
|
+
# Where the 500k results are stored:
|
|
27
|
+
savefolder=r'/scratch/sayala/JORDAN/'
|
|
28
|
+
|
|
29
|
+
case = 'CaseC'
|
|
30
|
+
|
|
31
|
+
if case == 'Case0':
|
|
32
|
+
testfolder=r'/scratch/sayala/JORDAN/JackSolar_Hourly/'
|
|
33
|
+
savefolderGrear=r'/scratch/sayala/JORDAN/dataframes_Grear/'
|
|
34
|
+
savefolderGground=r'/scratch/sayala/JORDAN/dataframes_Gground/'
|
|
35
|
+
savefolderfront =r'/scratch/sayala/JORDAN/dataframes_Gfront'
|
|
36
|
+
|
|
37
|
+
if case == 'CaseA':
|
|
38
|
+
testfolder=r'/scratch/sayala/JORDAN/JackSolar_CaseA/'
|
|
39
|
+
savefolderGrear=r'/scratch/sayala/JORDAN/dataframes_Grear_CaseA/'
|
|
40
|
+
savefolderGground=r'/scratch/sayala/JORDAN/dataframes_Gground_CaseA/'
|
|
41
|
+
savefolderfront =r'/scratch/sayala/JORDAN/dataframes_Gfront_CaseA'
|
|
42
|
+
|
|
43
|
+
if case == 'CaseB':
|
|
44
|
+
testfolder=r'/scratch/sayala/JORDAN/JackSolar_CaseB/'
|
|
45
|
+
savefolderGrear=r'/scratch/sayala/JORDAN/dataframes_Grear_CaseB/'
|
|
46
|
+
savefolderGground=r'/scratch/sayala/JORDAN/dataframes_Gground_CaseB/'
|
|
47
|
+
savefolderfront =r'/scratch/sayala/JORDAN/dataframes_Gfront_CaseB'
|
|
48
|
+
|
|
49
|
+
if case == 'CaseC':
|
|
50
|
+
testfolder=r'/scratch/sayala/JORDAN/JackSolar_CaseC/'
|
|
51
|
+
savefolderGrear=r'/scratch/sayala/JORDAN/dataframes_Grear_CaseC/'
|
|
52
|
+
savefolderGground=r'/scratch/sayala/JORDAN/dataframes_Gground_CaseC/'
|
|
53
|
+
savefolderfront =r'/scratch/sayala/JORDAN/dataframes_Gfront_CaseC'
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
savefolder = r'/scratch/sayala/JORDAN/'
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
filelist = sorted(os.listdir(testfolder))
|
|
61
|
+
print('{} files in the directory'.format(filelist.__len__()))
|
|
62
|
+
#print(filelist[1].partition('_Module_')[0])
|
|
63
|
+
hourlist = [x[5:] for x in filelist]
|
|
64
|
+
|
|
65
|
+
samp='/scratch/sayala/JORDAN/JackSolar_Hourly/Hour_12/results/irr_xloc_0.csv'
|
|
66
|
+
# Reading the valuesf or the row array and storing them in dataframes/pickles
|
|
67
|
+
a = np.zeros(shape=(22, 105))
|
|
68
|
+
df = pd.DataFrame(a)
|
|
69
|
+
df_GroundOnly = pd.DataFrame(a)
|
|
70
|
+
|
|
71
|
+
frontirrad = 0
|
|
72
|
+
newdf = True
|
|
73
|
+
|
|
74
|
+
for i in range(0, len(hourlist)):
|
|
75
|
+
hourfoo = hourlist[i]
|
|
76
|
+
#for i in range(12,14):
|
|
77
|
+
print("Working on Hour "+str(hourfoo))
|
|
78
|
+
resfolder = os.path.join(testfolder, 'Hour_'+str(hourfoo))
|
|
79
|
+
resfolder = os.path.join(resfolder, 'results/')
|
|
80
|
+
print(resfolder)
|
|
81
|
+
A = sorted(os.listdir(resfolder))
|
|
82
|
+
if len(A) != 0:
|
|
83
|
+
|
|
84
|
+
rowarray_Grear=[]
|
|
85
|
+
rowarray_Ground=[]
|
|
86
|
+
|
|
87
|
+
for ii in range (0, 22):
|
|
88
|
+
filename = 'irr_xloc_'+str(ii)+'.csv'
|
|
89
|
+
data = pd.read_csv(os.path.join(resfolder,filename))
|
|
90
|
+
groundirrad = list(data['Wm2Front'])
|
|
91
|
+
rowarray_Ground.append(groundirrad)
|
|
92
|
+
|
|
93
|
+
rearirrad = list(data['Wm2Back'])
|
|
94
|
+
rowarray_Grear.append(rearirrad)
|
|
95
|
+
|
|
96
|
+
rowarray_Grear=pd.DataFrame(rowarray_Grear)
|
|
97
|
+
resfmt = 'irr_1axis_Hour_{}.pkl'.format(f'{i:04}')
|
|
98
|
+
if newdf:
|
|
99
|
+
df_all_Grear = rowarray_Grear
|
|
100
|
+
else:
|
|
101
|
+
df_all_Grear = df_all_Grear+rowarray_Grear
|
|
102
|
+
|
|
103
|
+
rowarray_Ground=pd.DataFrame(rowarray_Ground)
|
|
104
|
+
if newdf:
|
|
105
|
+
df_all_Ground = rowarray_Ground
|
|
106
|
+
newdf = False
|
|
107
|
+
else:
|
|
108
|
+
df_all_Ground = df_all_Ground+rowarray_Ground
|
|
109
|
+
|
|
110
|
+
# Read the 1 front file and compile values
|
|
111
|
+
filename = 'irr_frontSide.csv'
|
|
112
|
+
data = pd.read_csv(os.path.join(resfolder,filename))
|
|
113
|
+
frontirrad = frontirrad + data['Wm2Front']
|
|
114
|
+
|
|
115
|
+
compiledsavenameGround = 'All_Ground_'+case+'.csv'
|
|
116
|
+
compiledsavenameGrear = 'All_Grear_'+case+'.csv'
|
|
117
|
+
df_all_Ground.to_csv(os.path.join(savefolder,compiledsavenameGround))
|
|
118
|
+
df_all_Grear.to_csv(os.path.join(savefolder,compiledsavenameGrear))
|
|
119
|
+
|
|
120
|
+
print("Frontirrad = ", frontirrad)
|
|
121
|
+
|
|
122
|
+
print("FINISHED")
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
mkdir /scratch/sayala/RadianceScenes/Full_Row_40Mods/outputanalysis
|
|
4
|
+
mkdir /scratch/sayala/RadianceScenes/Full_Row_40Mods/outputanalysis/dataframes_Gpoat
|
|
5
|
+
mkdir /scratch/sayala/RadianceScenes/Full_Row_40Mods/outputanalysis/dataframes_Gfront
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
# In[1]:
|
|
10
|
+
# This section collects the cell by cell irradiance calculated by bifacial_radiance
|
|
11
|
+
# into an hourly pickle in format " irr_1axis_01_01_08.csv "
|
|
12
|
+
# and stores them in a folder (i.e. hourly_dataframes_row_FrontOnly)
|
|
13
|
+
# It also accumulates up the cell by cell irradiances into a yearly irradiance pickle
|
|
14
|
+
# at the end and plots it.
|
|
15
|
+
# Each dataframe/pickle has module 20 (Northmost) as the first row (So if you're
|
|
16
|
+
# seeing the data it's like you're seeing the array from a top-down view.
|
|
17
|
+
# Note that this is not the way that dataframes get plotted in python so it needs
|
|
18
|
+
# to be inverted later for plotting. UGH.)
|
|
19
|
+
|
|
20
|
+
import os
|
|
21
|
+
import pandas as pd
|
|
22
|
+
import re
|
|
23
|
+
import numpy as np
|
|
24
|
+
import bisect
|
|
25
|
+
#from collections import Counter
|
|
26
|
+
|
|
27
|
+
# Where the 500k results are stored:
|
|
28
|
+
savefolder=r'/scratch/sayala/JORDAN/'
|
|
29
|
+
|
|
30
|
+
case = 'CaseC'
|
|
31
|
+
|
|
32
|
+
if case == 'Case0':
|
|
33
|
+
testfolder=r'/scratch/sayala/JORDAN/JackSolar_Hourly/'
|
|
34
|
+
savefolderGrear=r'/scratch/sayala/JORDAN/dataframes_Grear/'
|
|
35
|
+
savefolderGground=r'/scratch/sayala/JORDAN/dataframes_Gground/'
|
|
36
|
+
savefolderfront =r'/scratch/sayala/JORDAN/dataframes_Gfront'
|
|
37
|
+
|
|
38
|
+
if case == 'CaseA':
|
|
39
|
+
testfolder=r'/scratch/sayala/JORDAN/JackSolar_CaseA/'
|
|
40
|
+
savefolderGrear=r'/scratch/sayala/JORDAN/dataframes_Grear_CaseA/'
|
|
41
|
+
savefolderGground=r'/scratch/sayala/JORDAN/dataframes_Gground_CaseA/'
|
|
42
|
+
savefolderfront =r'/scratch/sayala/JORDAN/dataframes_Gfront_CaseA'
|
|
43
|
+
|
|
44
|
+
if case == 'CaseB':
|
|
45
|
+
testfolder=r'/scratch/sayala/JORDAN/JackSolar_CaseB/'
|
|
46
|
+
savefolderGrear=r'/scratch/sayala/JORDAN/dataframes_Grear_CaseB/'
|
|
47
|
+
savefolderGground=r'/scratch/sayala/JORDAN/dataframes_Gground_CaseB/'
|
|
48
|
+
savefolderfront =r'/scratch/sayala/JORDAN/dataframes_Gfront_CaseB'
|
|
49
|
+
|
|
50
|
+
if case == 'CaseC':
|
|
51
|
+
testfolder=r'/scratch/sayala/JORDAN/JackSolar_CaseC/'
|
|
52
|
+
savefolderGrear=r'/scratch/sayala/JORDAN/dataframes_Grear_CaseC/'
|
|
53
|
+
savefolderGground=r'/scratch/sayala/JORDAN/dataframes_Gground_CaseC/'
|
|
54
|
+
savefolderfront =r'/scratch/sayala/JORDAN/dataframes_Gfront_CaseC'
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
savefolder = r'/scratch/sayala/JORDAN/'
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
filelist = sorted(os.listdir(testfolder))
|
|
62
|
+
print('{} files in the directory'.format(filelist.__len__()))
|
|
63
|
+
#print(filelist[1].partition('_Module_')[0])
|
|
64
|
+
hourlist = [x[5:] for x in filelist]
|
|
65
|
+
|
|
66
|
+
samp='/scratch/sayala/JORDAN/JackSolar_Hourly/Hour_12/results/irr_xloc_0.csv'
|
|
67
|
+
# Reading the valuesf or the row array and storing them in dataframes/pickles
|
|
68
|
+
a = np.zeros(shape=(22, 105))
|
|
69
|
+
df = pd.DataFrame(a)
|
|
70
|
+
df_GroundOnly = pd.DataFrame(a)
|
|
71
|
+
|
|
72
|
+
frontirrad = 0
|
|
73
|
+
newdf = True
|
|
74
|
+
|
|
75
|
+
starts = [2881, 3626, 4346, 5090, 5835]
|
|
76
|
+
ends = [3621, 4341, 5085, 5829, 6550]
|
|
77
|
+
|
|
78
|
+
frontirrads_all = []
|
|
79
|
+
|
|
80
|
+
for mm in range(0, len(starts)):
|
|
81
|
+
|
|
82
|
+
lower_bound = starts[mm]
|
|
83
|
+
upper_bound = ends[mm]
|
|
84
|
+
l = list(map(int, hourlist))
|
|
85
|
+
lower_bound_i = bisect.bisect_left(l, lower_bound)
|
|
86
|
+
upper_bound_i = bisect.bisect_right(l, upper_bound, lo=lower_bound_i)
|
|
87
|
+
subhourlist = l[lower_bound_i:upper_bound_i]
|
|
88
|
+
|
|
89
|
+
frontirrad = 0
|
|
90
|
+
newdf = True
|
|
91
|
+
|
|
92
|
+
for i in range(0, len(subhourlist)):
|
|
93
|
+
hourfoo = subhourlist[i]
|
|
94
|
+
#for i in range(12,14):
|
|
95
|
+
print("Working on Hour "+str(hourfoo))
|
|
96
|
+
resfolder = os.path.join(testfolder, 'Hour_'+str(hourfoo))
|
|
97
|
+
resfolder = os.path.join(resfolder, 'results/')
|
|
98
|
+
print(resfolder)
|
|
99
|
+
A = sorted(os.listdir(resfolder))
|
|
100
|
+
if len(A) != 0:
|
|
101
|
+
|
|
102
|
+
rowarray_Grear=[]
|
|
103
|
+
rowarray_Ground=[]
|
|
104
|
+
|
|
105
|
+
for ii in range (0, 22):
|
|
106
|
+
filename = 'irr_xloc_'+str(ii)+'.csv'
|
|
107
|
+
data = pd.read_csv(os.path.join(resfolder,filename))
|
|
108
|
+
groundirrad = list(data['Wm2Front'])
|
|
109
|
+
rowarray_Ground.append(groundirrad)
|
|
110
|
+
|
|
111
|
+
rearirrad = list(data['Wm2Back'])
|
|
112
|
+
rowarray_Grear.append(rearirrad)
|
|
113
|
+
|
|
114
|
+
rowarray_Grear=pd.DataFrame(rowarray_Grear)
|
|
115
|
+
resfmt = 'irr_1axis_Hour_{}.pkl'.format(f'{i:04}')
|
|
116
|
+
if newdf:
|
|
117
|
+
df_all_Grear = rowarray_Grear
|
|
118
|
+
else:
|
|
119
|
+
df_all_Grear = df_all_Grear+rowarray_Grear
|
|
120
|
+
|
|
121
|
+
rowarray_Ground=pd.DataFrame(rowarray_Ground)
|
|
122
|
+
if newdf:
|
|
123
|
+
df_all_Ground = rowarray_Ground
|
|
124
|
+
newdf = False
|
|
125
|
+
else:
|
|
126
|
+
df_all_Ground = df_all_Ground+rowarray_Ground
|
|
127
|
+
|
|
128
|
+
# Read the 1 front file and compile values
|
|
129
|
+
filename = 'irr_frontSide.csv'
|
|
130
|
+
data = pd.read_csv(os.path.join(resfolder,filename))
|
|
131
|
+
frontirrad = frontirrad + data['Wm2Front']
|
|
132
|
+
|
|
133
|
+
compiledsavenameGround = 'All_Ground_'+case+'_month_'+str(mm)+'.csv'
|
|
134
|
+
compiledsavenameGrear = 'All_Grear_'+case+'_month_'+str(mm)+'.csv'
|
|
135
|
+
df_all_Ground.to_csv(os.path.join(savefolder,compiledsavenameGround))
|
|
136
|
+
df_all_Grear.to_csv(os.path.join(savefolder,compiledsavenameGrear))
|
|
137
|
+
|
|
138
|
+
frontirrads_all.append(frontirrad)
|
|
139
|
+
#print("Frontirrad Month ", mm, frontirrad)
|
|
140
|
+
|
|
141
|
+
print (frontirrads_all)
|
|
142
|
+
print("FINISHED")
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
# P1
|
|
4
|
+
# Compile the Ground Irradiance
|
|
5
|
+
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
import os
|
|
10
|
+
import pandas as pd
|
|
11
|
+
import numpy as np
|
|
12
|
+
|
|
13
|
+
# Folders where results are and will be saved
|
|
14
|
+
savefolder=r'/scratch/sayala/JORDAN/RESULTS_PR_NEW'
|
|
15
|
+
posSampled = 50 #!! Modify to the number of positions sampled
|
|
16
|
+
|
|
17
|
+
xgaps = [0.9144000000000001, 1.2192, 3.6576000000000004, 4.572, 5.486400000000001, ]
|
|
18
|
+
numpanelss = np.array([3, 4])
|
|
19
|
+
sensorsxs = np.array(list(range(0, 50)))
|
|
20
|
+
|
|
21
|
+
ft2m = 0.3048
|
|
22
|
+
|
|
23
|
+
xgaps = np.round(np.array([3, 4, 6, 9, 12, 15, 18, 21]) * ft2m,1)
|
|
24
|
+
numpanelss = [3, 4]
|
|
25
|
+
sensorsxs = np.array(list(range(0, 401)))
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
errors_all_numpanels = []
|
|
29
|
+
errors_all_xgap = []
|
|
30
|
+
errors_all_posx = []
|
|
31
|
+
|
|
32
|
+
for ii in range(0, len(numpanelss)):
|
|
33
|
+
numpanels = numpanelss[ii]
|
|
34
|
+
for jj in range(0, len(xgaps)):
|
|
35
|
+
xgap = xgaps[jj]
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
x_all = []
|
|
39
|
+
y_all = []
|
|
40
|
+
z_all = []
|
|
41
|
+
mattype_all = []
|
|
42
|
+
Wm2_all = []
|
|
43
|
+
|
|
44
|
+
numpanels_all = []
|
|
45
|
+
xgap_all = []
|
|
46
|
+
posx_all = []
|
|
47
|
+
|
|
48
|
+
for posx in sensorsxs:
|
|
49
|
+
xgap = xgaps[jj]
|
|
50
|
+
|
|
51
|
+
#/scratch/sayala/JORDAN/PUERTO_RICO_NEW/numpanels_3_xgap_1.2_Posx_001/results/irr_xloc_1_Front.csv
|
|
52
|
+
zero_filled_posx = str(posx).zfill(3)
|
|
53
|
+
filename = '/scratch/sayala/JORDAN/PUERTO_RICO_NEW/numpanels_{}_xgap_{}_Posx_{}/results/irr_xloc_{}_Front.csv'.format(numpanels, xgap, zero_filled_posx, posx)
|
|
54
|
+
print("Working on entry numpanels_{}_xgap_{}_Posx_{}".format(numpanels, xgap, posx))
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
data = pd.read_csv(filename)
|
|
58
|
+
|
|
59
|
+
# Save all the values
|
|
60
|
+
x_all.append(list(data['x']))
|
|
61
|
+
y_all.append(list(data['y']))
|
|
62
|
+
z_all.append(list(data['z']))
|
|
63
|
+
mattype_all.append(list(data['mattype']))
|
|
64
|
+
Wm2_all.append(list(data['Wm2']))
|
|
65
|
+
|
|
66
|
+
# Saving position and parameters for indexing
|
|
67
|
+
numpanels_all.append(numpanels)
|
|
68
|
+
xgap_all.append(xgap)
|
|
69
|
+
posx_all.append(posx)
|
|
70
|
+
|
|
71
|
+
except:
|
|
72
|
+
print('*** Missing entry numpanels_{}_xgap_{}_Posx_{}'.format(numpanels, xgap, posx))
|
|
73
|
+
errors_all_numpanels.append(numpanels)
|
|
74
|
+
errors_all_xgap.append(xgap)
|
|
75
|
+
errors_all_posx.append(posx)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
savefilename = 'Results_numpanels_{}_xgap_{}_Posx_{}.csv'.format(numpanels, xgap, posx)
|
|
79
|
+
df = pd.DataFrame(list(zip(numpanels_all,xgap_all, posx_all,
|
|
80
|
+
x_all,y_all,z_all, mattype_all, Wm2_all)),
|
|
81
|
+
columns=['numpanels', 'xgap', 'posx', 'x','y','z',
|
|
82
|
+
'mattype','Wm2'])
|
|
83
|
+
|
|
84
|
+
df.to_csv(os.path.join(savefolder,savefilename))
|
|
85
|
+
|
|
86
|
+
errorfile = pd.DataFrame(list(zip(errors_all_numpanels,errors_all_xgap, errors_all_posx)),
|
|
87
|
+
columns=['numpanels', 'xgap', 'posx'])
|
|
88
|
+
|
|
89
|
+
errorfile.to_csv(os.path.join(savefolder, 'ERRORS_05Sep21_PRNew.csv'))
|
|
90
|
+
|
|
91
|
+
print("FINISHED")
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
# P2
|
|
4
|
+
# COMPILE THE MODULE's IRRADIANCE
|
|
5
|
+
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
import os
|
|
10
|
+
import pandas as pd
|
|
11
|
+
import numpy as np
|
|
12
|
+
|
|
13
|
+
# Folders where results are and will be saved
|
|
14
|
+
savefolder=r'/scratch/sayala/JORDAN/RESULTS_PR_NEW'
|
|
15
|
+
posSampled = 50 #!! Modify to the number of positions sampled
|
|
16
|
+
|
|
17
|
+
ft2m = 0.3048
|
|
18
|
+
|
|
19
|
+
xgaps = np.round(np.array([3, 4, 6, 9, 12, 15, 18, 21]) * ft2m,1)
|
|
20
|
+
numpanelss = [3, 4]
|
|
21
|
+
sensorsxs = np.array(list(range(0, 41)))
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
errors_all_numpanels = []
|
|
25
|
+
errors_all_xgap = []
|
|
26
|
+
errors_all_posx = []
|
|
27
|
+
|
|
28
|
+
for ii in range(0, len(numpanelss)):
|
|
29
|
+
numpanels = numpanelss[ii]
|
|
30
|
+
for jj in range(0, len(xgaps)):
|
|
31
|
+
xgap = xgaps[jj]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
x_all = []
|
|
35
|
+
y_all = []
|
|
36
|
+
z_all = []
|
|
37
|
+
mattype_all = []
|
|
38
|
+
rearZ_all = []
|
|
39
|
+
mattype_all = []
|
|
40
|
+
rearMat_all = []
|
|
41
|
+
Wm2Front_all = []
|
|
42
|
+
Wm2Back_all = []
|
|
43
|
+
|
|
44
|
+
numpanels_all = []
|
|
45
|
+
xgap_all = []
|
|
46
|
+
posx_all = []
|
|
47
|
+
|
|
48
|
+
for posx in sensorsxs:
|
|
49
|
+
xgap = xgaps[jj]
|
|
50
|
+
|
|
51
|
+
#/scratch/sayala/JORDAN/PUERTO_RICO_NEW_P2/numpanels_3_xgap_1.2_Posx_001/results/irr_xloc_1_Front.csv
|
|
52
|
+
zero_filled_posx = str(posx).zfill(3)
|
|
53
|
+
filename = '/scratch/sayala/JORDAN/PUERTO_RICO_NEW_P2/numpanels_{}_xgap_{}_Posx_{}/results/irr_xloc_{}.csv'.format(numpanels, xgap, zero_filled_posx, posx)
|
|
54
|
+
print("Working on entry numpanels_{}_xgap_{}_Posx_{}".format(numpanels, xgap, posx))
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
data = pd.read_csv(filename)
|
|
58
|
+
|
|
59
|
+
# Save all the values
|
|
60
|
+
x_all.append(list(data['x']))
|
|
61
|
+
y_all.append(list(data['y']))
|
|
62
|
+
z_all.append(list(data['z']))
|
|
63
|
+
rearZ_all.append(list(data['rearZ']))
|
|
64
|
+
mattype_all.append(list(data['mattype']))
|
|
65
|
+
rearMat_all.append(list(data['rearMat']))
|
|
66
|
+
Wm2Front_all.append(list(data['Wm2Front']))
|
|
67
|
+
Wm2Back_all.append(list(data['Wm2Back']))
|
|
68
|
+
|
|
69
|
+
# Saving position and parameters for indexing
|
|
70
|
+
numpanels_all.append(numpanels)
|
|
71
|
+
xgap_all.append(xgap)
|
|
72
|
+
posx_all.append(posx)
|
|
73
|
+
|
|
74
|
+
except:
|
|
75
|
+
print('*** Missing entry numpanels_{}_xgap_{}_Posx_{}'.format(numpanels, xgap, posx))
|
|
76
|
+
errors_all_numpanels.append(numpanels)
|
|
77
|
+
errors_all_xgap.append(xgap)
|
|
78
|
+
errors_all_posx.append(posx)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
savefilename = 'Results_p2_numpanels_{}_xgap_{}_Posx_{}.csv'.format(numpanels, xgap, posx)
|
|
82
|
+
df = pd.DataFrame(list(zip(numpanels_all,xgap_all, posx_all,
|
|
83
|
+
x_all,y_all,z_all,rearZ_all,
|
|
84
|
+
mattype_all,rearMat_all,Wm2Front_all,Wm2Back_all)),
|
|
85
|
+
columns=['numpanels', 'xgap', 'posx', 'x','y','z','rearZ',
|
|
86
|
+
'mattype','rearMat','Wm2Front','Wm2Back'])
|
|
87
|
+
|
|
88
|
+
df.to_csv(os.path.join(savefolder,savefilename))
|
|
89
|
+
|
|
90
|
+
errorfile = pd.DataFrame(list(zip(errors_all_numpanels,errors_all_xgap, errors_all_posx)),
|
|
91
|
+
columns=['numpanels', 'xgap', 'posx'])
|
|
92
|
+
|
|
93
|
+
errorfile.to_csv(os.path.join(savefolder, 'ERRORS_05Sep21_PRNew2.csv'))
|
|
94
|
+
|
|
95
|
+
print("FINISHED")
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
mkdir /scratch/sayala/RadianceScenes/Full_Row_40Mods/outputanalysis
|
|
4
|
+
mkdir /scratch/sayala/RadianceScenes/Full_Row_40Mods/outputanalysis/dataframes_Gpoat
|
|
5
|
+
mkdir /scratch/sayala/RadianceScenes/Full_Row_40Mods/outputanalysis/dataframes_Gfront
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
# In[1]:
|
|
10
|
+
# This section collects the cell by cell irradiance calculated by bifacial_radiance
|
|
11
|
+
# into an hourly pickle in format " irr_1axis_01_01_08.csv "
|
|
12
|
+
# and stores them in a folder (i.e. hourly_dataframes_row_FrontOnly)
|
|
13
|
+
# It also accumulates up the cell by cell irradiances into a yearly irradiance pickle
|
|
14
|
+
# at the end and plots it.
|
|
15
|
+
# Each dataframe/pickle has module 20 (Northmost) as the first row (So if you're
|
|
16
|
+
# seeing the data it's like you're seeing the array from a top-down view.
|
|
17
|
+
# Note that this is not the way that dataframes get plotted in python so it needs
|
|
18
|
+
# to be inverted later for plotting. UGH.)
|
|
19
|
+
|
|
20
|
+
import os
|
|
21
|
+
import pandas as pd
|
|
22
|
+
import re
|
|
23
|
+
import numpy as np
|
|
24
|
+
#from collections import Counter
|
|
25
|
+
|
|
26
|
+
# Where the 500k results are stored:
|
|
27
|
+
savefolder = r'/scratch/sayala/JORDAN/'
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# irr_Coffee_ch_1.8_xgap_0.6_tilt_18_pitch_1.6_Front&Back.csv
|
|
31
|
+
|
|
32
|
+
ch_all = []
|
|
33
|
+
xgap_all = []
|
|
34
|
+
tilt_all = []
|
|
35
|
+
pitch_all = []
|
|
36
|
+
NorthIrrad = []
|
|
37
|
+
SouthIrrad = []
|
|
38
|
+
EastIrrad = []
|
|
39
|
+
WestIrrad = []
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
ft2m = 0.3048
|
|
43
|
+
clearance_heights = np.array([6.0, 8.0, 10.0])* ft2m
|
|
44
|
+
xgaps = np.array([2, 3, 4]) * ft2m
|
|
45
|
+
Ds = np.array([2, 3, 4]) * ft2m # D is a variable that represents the spacing between rows, not-considering the collector areas.
|
|
46
|
+
tilts = [18, 10]
|
|
47
|
+
y = 1
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
for ch in range (0, len(clearance_heights)):
|
|
52
|
+
|
|
53
|
+
clearance_height = clearance_heights[ch]
|
|
54
|
+
for xx in range (0, len(xgaps)):
|
|
55
|
+
|
|
56
|
+
xgap = xgaps[xx]
|
|
57
|
+
|
|
58
|
+
for tt in range (0, len(tilts)):
|
|
59
|
+
|
|
60
|
+
tilt = tilts[tt]
|
|
61
|
+
for dd in range (0, len(Ds)):
|
|
62
|
+
pitch = y * np.cos(np.radians(tilt))+Ds[dd]
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
folder_name = ('CH_'+str(clearance_height)+
|
|
66
|
+
'_xgap_'+str(xgap)+\
|
|
67
|
+
'_tilt_'+str(tilt)+
|
|
68
|
+
'_pitch_'+str(pitch))
|
|
69
|
+
|
|
70
|
+
testfolder = os.path.join(r'/scratch/sayala/JORDAN/PUERTO_RICO/', folder_name)
|
|
71
|
+
resultsfolder = os.path.join(testfolder, 'results')
|
|
72
|
+
|
|
73
|
+
sim_name = ('irr_Coffee'+'_ch_'+str(round(clearance_height,1))+
|
|
74
|
+
'_xgap_'+str(round(xgap,1))+\
|
|
75
|
+
'_tilt_'+str(round(tilt,1))+
|
|
76
|
+
'_pitch_'+str(round(pitch,1))+'_North&South.csv')
|
|
77
|
+
|
|
78
|
+
sim_name2 = ('irr_Coffee'+'_ch_'+str(round(clearance_height,1))+
|
|
79
|
+
'_xgap_'+str(round(xgap,1))+\
|
|
80
|
+
'_tilt_'+str(round(tilt,1))+
|
|
81
|
+
'_pitch_'+str(round(pitch,1))+'_East&West.csv')
|
|
82
|
+
|
|
83
|
+
ch_all.append(clearance_height)
|
|
84
|
+
xgap_all.append(xgap)
|
|
85
|
+
tilt_all.append(tilt)
|
|
86
|
+
pitch_all.append(pitch)
|
|
87
|
+
data = pd.read_csv(os.path.join(resultsfolder, sim_name))
|
|
88
|
+
NorthIrrad.append(data['Wm2Front'].item())
|
|
89
|
+
SouthIrrad.append(data['Wm2Back'].item())
|
|
90
|
+
data = pd.read_csv(os.path.join(resultsfolder, sim_name2))
|
|
91
|
+
EastIrrad.append(data['Wm2Front'].item())
|
|
92
|
+
WestIrrad.append(data['Wm2Back'].item())
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
ch_all = pd.Series(ch_all, name='clearance_height')
|
|
96
|
+
xgap_all = pd.Series(xgap_all, name='xgap')
|
|
97
|
+
tilt_all = pd.Series(tilt_all, name='tilt')
|
|
98
|
+
pitch_all = pd.Series(pitch_all, name='pitch')
|
|
99
|
+
NorthIrrad = pd.Series(NorthIrrad, name='NorthIrrad')
|
|
100
|
+
SouthIrrad = pd.Series(SouthIrrad, name='SouthIrrad')
|
|
101
|
+
EastIrrad = pd.Series(EastIrrad, name='EastIrrad')
|
|
102
|
+
WestIrrad = pd.Series(WestIrrad, name='WestIrrad')
|
|
103
|
+
|
|
104
|
+
df = pd.concat([ch_all, xgap_all, tilt_all, pitch_all, NorthIrrad, SouthIrrad, EastIrrad, WestIrrad], axis=1)
|
|
105
|
+
df.to_csv(os.path.join(savefolder,'TREES.csv'))
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
print("FINISHED")
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
# Folders where results are and will be saved
|
|
6
|
+
savefolder=r'/scratch/sayala/RadianceScenes/BasicSimulations/'
|
|
7
|
+
posSampled = 50 #!! Modify to the number of positions sampled
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
for jj in range(0, 2):
|
|
12
|
+
# 0 - With
|
|
13
|
+
# 1 - Without
|
|
14
|
+
|
|
15
|
+
if jj == 0:
|
|
16
|
+
testfolder=r'/scratch/sayala/RadianceScenes/BasicSimulations/Gendaylit1axis/WithResults'
|
|
17
|
+
savefilename = 'COMPILED_Results_WITH_19AUG.csv'
|
|
18
|
+
withopt = 'WITH'
|
|
19
|
+
if jj == 1:
|
|
20
|
+
testfolder=r'/scratch/sayala/RadianceScenes/BasicSimulations/Gendaylit1axis/WithoutResults'
|
|
21
|
+
savefilename = 'COMPILED_Results_WITHOUT_19AUG.csv'
|
|
22
|
+
withopt = 'WITHOUT'
|
|
23
|
+
|
|
24
|
+
filelist = sorted(os.listdir(testfolder))
|
|
25
|
+
#daylist = [x[4:] for x in filelist]
|
|
26
|
+
# timestamplist = []
|
|
27
|
+
# for i in range(len(daylist)):
|
|
28
|
+
# timestamplist[i] = sorted(os.listdir(testfolder+r'\'+f'{day for day in daylist}'))
|
|
29
|
+
print('{} files in the directory'.format(filelist.__len__()))
|
|
30
|
+
#print(filelist[1].partition('_Module_')[0])
|
|
31
|
+
#!! Make sures this matches the folder names pattern or adjust accordingly.
|
|
32
|
+
# This assumes the folders are named "Day_01_01_01_08" (y m d h)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
x_all = []
|
|
37
|
+
y_all = []
|
|
38
|
+
z_all = []
|
|
39
|
+
rearZ_all = []
|
|
40
|
+
mattype_all = []
|
|
41
|
+
rearMat_all = []
|
|
42
|
+
Wm2Front_all = []
|
|
43
|
+
Wm2Back_all = []
|
|
44
|
+
pos_all = []
|
|
45
|
+
timestamp_all = []
|
|
46
|
+
errors_all = []
|
|
47
|
+
|
|
48
|
+
timestamplist = [x[4:15] for x in filelist]
|
|
49
|
+
# positionlist = [x[21:] for x in filelist]
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
timestamplist = ['21_04_29_11']
|
|
53
|
+
|
|
54
|
+
for i in range(0, len(timestamplist)):
|
|
55
|
+
|
|
56
|
+
print("Working on entry "+str(i)+" timestamp "+timestamplist[i])
|
|
57
|
+
|
|
58
|
+
posSampled = 200
|
|
59
|
+
|
|
60
|
+
for ii in range (0, posSampled):
|
|
61
|
+
resfolder = os.path.join(testfolder, 'Day_'+timestamplist[i]+'_Posx_'+str(ii))
|
|
62
|
+
resfolder = os.path.join(resfolder, 'results/')
|
|
63
|
+
print(resfolder)
|
|
64
|
+
|
|
65
|
+
#!! Make sure this matches the format being used to save results or
|
|
66
|
+
# modify accordingly.
|
|
67
|
+
# example filename: 'irr_20_01_01_08_pos_0.csv'
|
|
68
|
+
filename = 'irr_1axis_'+timestamplist[i]+'_00_'+withopt+'_pos_'+str(ii)+'.csv'
|
|
69
|
+
try:
|
|
70
|
+
data = pd.read_csv(os.path.join(resfolder,filename))
|
|
71
|
+
|
|
72
|
+
# Save all the values
|
|
73
|
+
x_all.append(list(data['x']))
|
|
74
|
+
y_all.append(list(data['y']))
|
|
75
|
+
z_all.append(list(data['z']))
|
|
76
|
+
rearZ_all.append(list(data['rearZ']))
|
|
77
|
+
mattype_all.append(list(data['mattype']))
|
|
78
|
+
rearMat_all.append(list(data['rearMat']))
|
|
79
|
+
Wm2Front_all.append(list(data['Wm2Front']))
|
|
80
|
+
Wm2Back_all.append(list(data['Wm2Back']))
|
|
81
|
+
|
|
82
|
+
# Saving position and timestamp for indexing
|
|
83
|
+
pos_all.append(ii)
|
|
84
|
+
timestamp_all.append(timestamplist[i])
|
|
85
|
+
|
|
86
|
+
except:
|
|
87
|
+
print('*** Missing positions ', ii)
|
|
88
|
+
errors_all.append(ii)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
df = pd.DataFrame(list(zip(timestamp_all,pos_all,x_all,y_all,z_all,rearZ_all,
|
|
92
|
+
mattype_all,rearMat_all,Wm2Front_all,Wm2Back_all)),
|
|
93
|
+
columns=['Timestamp', 'Position', 'x','y','z','rearZ',
|
|
94
|
+
'mattype','rearMat','Wm2Front','Wm2Back'])
|
|
95
|
+
|
|
96
|
+
df.to_csv(os.path.join(savefolder,savefilename))
|
|
97
|
+
|
|
98
|
+
errorfile = os.path.join(savefolder, 'ERRORS'+withopt+'.txt')
|
|
99
|
+
with open(errorfile, 'w') as f:
|
|
100
|
+
for s in errors_all:
|
|
101
|
+
f.write(str(s) + '\n')
|
|
102
|
+
print("FINISHED")
|
|
103
|
+
|