honeybee-radiance-postprocess 0.4.457__py2.py3-none-any.whl → 0.4.459__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- honeybee_radiance_postprocess/cli/postprocess.py +2 -0
- honeybee_radiance_postprocess/cli/well.py +96 -0
- honeybee_radiance_postprocess/ies/__init__.py +1 -0
- honeybee_radiance_postprocess/ies/lm.py +214 -0
- honeybee_radiance_postprocess/ies/lm_schedule.py +243 -0
- honeybee_radiance_postprocess/well/__init__.py +1 -0
- honeybee_radiance_postprocess/well/well.py +393 -0
- {honeybee_radiance_postprocess-0.4.457.dist-info → honeybee_radiance_postprocess-0.4.459.dist-info}/METADATA +1 -1
- {honeybee_radiance_postprocess-0.4.457.dist-info → honeybee_radiance_postprocess-0.4.459.dist-info}/RECORD +13 -7
- {honeybee_radiance_postprocess-0.4.457.dist-info → honeybee_radiance_postprocess-0.4.459.dist-info}/LICENSE +0 -0
- {honeybee_radiance_postprocess-0.4.457.dist-info → honeybee_radiance_postprocess-0.4.459.dist-info}/WHEEL +0 -0
- {honeybee_radiance_postprocess-0.4.457.dist-info → honeybee_radiance_postprocess-0.4.459.dist-info}/entry_points.txt +0 -0
- {honeybee_radiance_postprocess-0.4.457.dist-info → honeybee_radiance_postprocess-0.4.459.dist-info}/top_level.txt +0 -0
@@ -22,6 +22,7 @@ from ..util import filter_array
|
|
22
22
|
from .two_phase import two_phase
|
23
23
|
from .leed import leed
|
24
24
|
from .abnt import abnt
|
25
|
+
from .well import well
|
25
26
|
from ..helper import model_grid_areas, grid_summary
|
26
27
|
|
27
28
|
_logger = logging.getLogger(__name__)
|
@@ -34,6 +35,7 @@ def post_process():
|
|
34
35
|
post_process.add_command(two_phase)
|
35
36
|
post_process.add_command(leed)
|
36
37
|
post_process.add_command(abnt)
|
38
|
+
post_process.add_command(well)
|
37
39
|
|
38
40
|
@post_process.command('annual-daylight')
|
39
41
|
@click.argument(
|
@@ -0,0 +1,96 @@
|
|
1
|
+
"""honeybee-radiance-postprocess WELL commands."""
|
2
|
+
import json
|
3
|
+
import sys
|
4
|
+
import logging
|
5
|
+
import os
|
6
|
+
import click
|
7
|
+
|
8
|
+
from ..well.well import well_annual_daylight
|
9
|
+
from ..results.annual_daylight import AnnualDaylight
|
10
|
+
|
11
|
+
_logger = logging.getLogger(__name__)
|
12
|
+
|
13
|
+
|
14
|
+
@click.group(help='Commands for WELL post-processing of Radiance results.')
|
15
|
+
def well():
|
16
|
+
pass
|
17
|
+
|
18
|
+
|
19
|
+
@well.command('well-annual-daylight')
|
20
|
+
@click.argument(
|
21
|
+
'folder',
|
22
|
+
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True)
|
23
|
+
)
|
24
|
+
@click.option(
|
25
|
+
'--grids-filter', '-gf', help='A pattern to filter the grids.', default='*',
|
26
|
+
show_default=True
|
27
|
+
)
|
28
|
+
@click.option(
|
29
|
+
'--shade-transmittance', '-st', help='A value to use as a multiplier in place of '
|
30
|
+
'solar shading. Value for shade transmittance must be 1 > value > 0.',
|
31
|
+
default=0.02, show_default=True, type=click.FLOAT
|
32
|
+
)
|
33
|
+
@click.option(
|
34
|
+
'--shade-transmittance-file', '-stf', help='A JSON file with a dictionary '
|
35
|
+
'where aperture groups are keys, and the value for each key is the shade '
|
36
|
+
'transmittance. Values for shade transmittance must be 1 > value > 0. '
|
37
|
+
'If any aperture groups are missing in the JSON file, its shade transmittance '
|
38
|
+
'value will be set to the value of the --shade-transmittance option (0.02 by '
|
39
|
+
'default).', default=None, show_default=True,
|
40
|
+
type=click.Path(exists=False, file_okay=True, dir_okay=False, resolve_path=True)
|
41
|
+
)
|
42
|
+
@click.option(
|
43
|
+
'--use-shade-transmittance/--use-states', help='A flag to select if the '
|
44
|
+
'post-processing should use a shade transmittance or the simulated states '
|
45
|
+
'of aperture groups. Using states should only be selected if the annual '
|
46
|
+
'daylight simulation included ray tracing of a second (blind) state for '
|
47
|
+
'each aperture group.',
|
48
|
+
is_flag=True, default=True, show_default=True
|
49
|
+
)
|
50
|
+
@click.option(
|
51
|
+
'--sub-folder', '-sf', help='Relative path for subfolder to write output '
|
52
|
+
'files.', default='well_summary', show_default=True
|
53
|
+
)
|
54
|
+
def well_daylight(
|
55
|
+
folder, shade_transmittance, shade_transmittance_file, grids_filter,
|
56
|
+
use_shade_transmittance, sub_folder
|
57
|
+
):
|
58
|
+
"""Calculate credits for WELL L06.
|
59
|
+
|
60
|
+
Use the shade-transmittance option to set a shade transmittance values for
|
61
|
+
aperture groups. The shade-transmittance-file option takes precedence over
|
62
|
+
the shade-transmittance, however, if any aperture groups are missing in the
|
63
|
+
JSON file given to the shade-transmittance-file option, the value from
|
64
|
+
shade-transmittance will be used for those aperture groups.
|
65
|
+
|
66
|
+
\b
|
67
|
+
Args:
|
68
|
+
folder: Results folder. This folder is an output folder of annual daylight
|
69
|
+
recipe. The daylight simulation must include aperture groups.
|
70
|
+
"""
|
71
|
+
use_states = not use_shade_transmittance
|
72
|
+
if (
|
73
|
+
shade_transmittance_file
|
74
|
+
and os.path.isfile(shade_transmittance_file)
|
75
|
+
and use_shade_transmittance
|
76
|
+
):
|
77
|
+
with open(shade_transmittance_file) as json_file:
|
78
|
+
shd_trans = json.load(json_file)
|
79
|
+
results = AnnualDaylight(folder)
|
80
|
+
# check if aperture groups are missing in json file
|
81
|
+
for light_path in results.light_paths:
|
82
|
+
if (not light_path in shd_trans and
|
83
|
+
light_path != '__static_apertures__'):
|
84
|
+
shd_trans[light_path] = shade_transmittance
|
85
|
+
shade_transmittance = shd_trans
|
86
|
+
try:
|
87
|
+
well_annual_daylight(
|
88
|
+
folder, grids_filter=grids_filter,
|
89
|
+
shade_transmittance=shade_transmittance, use_states=use_states,
|
90
|
+
sub_folder=sub_folder
|
91
|
+
)
|
92
|
+
except Exception:
|
93
|
+
_logger.exception('Failed to generate LEED summary.')
|
94
|
+
sys.exit(1)
|
95
|
+
else:
|
96
|
+
sys.exit(0)
|
@@ -0,0 +1 @@
|
|
1
|
+
"""honeybee-radiance-postprocess library."""
|
@@ -0,0 +1,214 @@
|
|
1
|
+
"""Functions for IES LM post-processing."""
|
2
|
+
from typing import Tuple, Union
|
3
|
+
from pathlib import Path
|
4
|
+
from collections import defaultdict
|
5
|
+
import json
|
6
|
+
import itertools
|
7
|
+
import numpy as np
|
8
|
+
|
9
|
+
from ladybug.analysisperiod import AnalysisPeriod
|
10
|
+
from ladybug.datatype.generic import GenericType
|
11
|
+
from ladybug.color import Colorset
|
12
|
+
from ladybug.datacollection import HourlyContinuousCollection
|
13
|
+
from ladybug.datatype.fraction import Fraction
|
14
|
+
from ladybug.datatype.time import Time
|
15
|
+
from ladybug.legend import LegendParameters
|
16
|
+
from ladybug.header import Header
|
17
|
+
from honeybee.model import Model
|
18
|
+
from honeybee.units import conversion_factor_to_meters
|
19
|
+
from honeybee_radiance.writer import _filter_by_pattern
|
20
|
+
from honeybee_radiance.postprocess.annual import filter_schedule_by_hours
|
21
|
+
|
22
|
+
from ..metrics import da_array2d, ase_array2d
|
23
|
+
from ..annual import schedule_to_hoys, occupancy_schedule_8_to_6
|
24
|
+
from ..results.annual_daylight import AnnualDaylight
|
25
|
+
from ..util import filter_array, recursive_dict_merge
|
26
|
+
from ..dynamic import DynamicSchedule, ApertureGroupSchedule
|
27
|
+
from .lm_schedule import shd_trans_schedule_descending, states_schedule_descending
|
28
|
+
|
29
|
+
|
30
|
+
def shade_transmittance_per_light_path(
|
31
|
+
light_paths: list, shade_transmittance: Union[float, dict],
|
32
|
+
shd_trans_dict: dict) -> dict:
|
33
|
+
"""Filter shade_transmittance by light paths and add default multiplier.
|
34
|
+
|
35
|
+
Args:
|
36
|
+
light_paths: A list of light paths.
|
37
|
+
shade_transmittance: A value to use as a multiplier in place of solar
|
38
|
+
shading. This input can be either a single value that will be used
|
39
|
+
for all aperture groups, or a dictionary where aperture groups are
|
40
|
+
keys, and the value for each key is the shade transmittance. Values
|
41
|
+
for shade transmittance must be 1 > value > 0.
|
42
|
+
shd_trans_dict: A dictionary used to store shade transmittance value
|
43
|
+
for each aperture group.
|
44
|
+
|
45
|
+
Returns:
|
46
|
+
A dictionary with filtered light paths.
|
47
|
+
"""
|
48
|
+
shade_transmittances = {}
|
49
|
+
if isinstance(shade_transmittance, dict):
|
50
|
+
for light_path in light_paths:
|
51
|
+
# default multiplier
|
52
|
+
shade_transmittances[light_path] = [1]
|
53
|
+
# add custom shade transmittance
|
54
|
+
if light_path in shade_transmittance:
|
55
|
+
shade_transmittances[light_path].append(
|
56
|
+
shade_transmittance[light_path])
|
57
|
+
shd_trans_dict[light_path] = shade_transmittance[light_path]
|
58
|
+
# add default shade transmittance (0.02)
|
59
|
+
elif light_path != '__static_apertures__':
|
60
|
+
shade_transmittances[light_path].append(0.02)
|
61
|
+
shd_trans_dict[light_path] = 0.02
|
62
|
+
else:
|
63
|
+
shade_transmittances[light_path].append(1)
|
64
|
+
shd_trans_dict[light_path] = 1
|
65
|
+
else:
|
66
|
+
shd_trans = float(shade_transmittance)
|
67
|
+
for light_path in light_paths:
|
68
|
+
# default multiplier
|
69
|
+
shade_transmittances[light_path] = [1]
|
70
|
+
# add custom shade transmittance
|
71
|
+
if light_path != '__static_apertures__':
|
72
|
+
shade_transmittances[light_path].append(shd_trans)
|
73
|
+
shd_trans_dict[light_path] = shd_trans
|
74
|
+
else:
|
75
|
+
shade_transmittances[light_path].append(1)
|
76
|
+
shd_trans_dict[light_path] = 1
|
77
|
+
|
78
|
+
return shade_transmittances, shd_trans_dict
|
79
|
+
|
80
|
+
|
81
|
+
def dynamic_schedule_direct_illuminance(
|
82
|
+
results: Union[str, AnnualDaylight], grids_filter: str = '*',
|
83
|
+
shade_transmittance: Union[float, dict] = 0.02,
|
84
|
+
use_states: bool = False
|
85
|
+
) -> Tuple[dict, dict]:
|
86
|
+
"""Calculate a schedule of each aperture group.
|
87
|
+
|
88
|
+
This function calculates an annual shading schedule of each aperture
|
89
|
+
group. Hour by hour it will select the least shaded aperture group
|
90
|
+
configuration, so that no more than 2% of the sensors points receive
|
91
|
+
direct illuminance of 1000 lux or more.
|
92
|
+
|
93
|
+
Args:
|
94
|
+
results: Path to results folder or a Results class object.
|
95
|
+
grids_filter: The name of a grid or a pattern to filter the grids.
|
96
|
+
Defaults to '*'.
|
97
|
+
shade_transmittance: A value to use as a multiplier in place of solar
|
98
|
+
shading. This input can be either a single value that will be used
|
99
|
+
for all aperture groups, or a dictionary where aperture groups are
|
100
|
+
keys, and the value for each key is the shade transmittance. Values
|
101
|
+
for shade transmittance must be 1 > value > 0.
|
102
|
+
Defaults to 0.02.
|
103
|
+
use_states: A boolean to note whether to use the simulated states. Set
|
104
|
+
to True to use the simulated states. The default is False which will
|
105
|
+
use the shade transmittance instead.
|
106
|
+
|
107
|
+
Returns:
|
108
|
+
Tuple: A tuple with a dictionary of the annual schedule and a
|
109
|
+
dictionary of hours where no shading configuration comply with the
|
110
|
+
2% rule.
|
111
|
+
"""
|
112
|
+
if not isinstance(results, AnnualDaylight):
|
113
|
+
results = AnnualDaylight(results)
|
114
|
+
|
115
|
+
grids_info = results._filter_grids(grids_filter=grids_filter)
|
116
|
+
schedule = occupancy_schedule_8_to_6(as_list=True)
|
117
|
+
occ_pattern = \
|
118
|
+
filter_schedule_by_hours(results.sun_up_hours, schedule=schedule)[0]
|
119
|
+
occ_mask = np.array(occ_pattern)
|
120
|
+
|
121
|
+
states_schedule = defaultdict(list)
|
122
|
+
fail_to_comply = {}
|
123
|
+
shd_trans_dict = {}
|
124
|
+
|
125
|
+
for grid_info in grids_info:
|
126
|
+
grid_count = grid_info['count']
|
127
|
+
light_paths = [lp[0] for lp in grid_info['light_path']]
|
128
|
+
|
129
|
+
shade_transmittances, shd_trans_dict = (
|
130
|
+
shade_transmittance_per_light_path(
|
131
|
+
light_paths, shade_transmittance, shd_trans_dict
|
132
|
+
)
|
133
|
+
)
|
134
|
+
|
135
|
+
if len(light_paths) > 6:
|
136
|
+
if use_states:
|
137
|
+
states_schedule, fail_to_comply = states_schedule_descending(
|
138
|
+
results, grid_info, light_paths, occ_mask,
|
139
|
+
states_schedule, fail_to_comply)
|
140
|
+
else:
|
141
|
+
states_schedule, fail_to_comply = shd_trans_schedule_descending(
|
142
|
+
results, grid_info, light_paths, shade_transmittances, occ_mask,
|
143
|
+
states_schedule, fail_to_comply)
|
144
|
+
else:
|
145
|
+
if use_states:
|
146
|
+
combinations = results._get_state_combinations(grid_info)
|
147
|
+
else:
|
148
|
+
shade_transmittances, shd_trans_dict = shade_transmittance_per_light_path(
|
149
|
+
light_paths, shade_transmittance, shd_trans_dict)
|
150
|
+
keys, values = zip(*shade_transmittances.items())
|
151
|
+
combinations = [dict(zip(keys, v)) for v in itertools.product(*values)]
|
152
|
+
|
153
|
+
array_list_combinations = []
|
154
|
+
for combination in combinations:
|
155
|
+
combination_arrays = []
|
156
|
+
for light_path, value in combination.items():
|
157
|
+
if use_states:
|
158
|
+
combination_arrays.append(
|
159
|
+
results._get_array(grid_info, light_path, state=value,
|
160
|
+
res_type='direct')
|
161
|
+
)
|
162
|
+
else:
|
163
|
+
array = results._get_array(
|
164
|
+
grid_info, light_path, res_type='direct')
|
165
|
+
if value == 1:
|
166
|
+
combination_arrays.append(array)
|
167
|
+
else:
|
168
|
+
combination_arrays.append(array * value)
|
169
|
+
combination_array = sum(combination_arrays)
|
170
|
+
|
171
|
+
combination_percentage = \
|
172
|
+
(combination_array >= 1000).sum(axis=0) / grid_count
|
173
|
+
array_list_combinations.append(combination_percentage)
|
174
|
+
array_combinations = np.array(array_list_combinations)
|
175
|
+
array_combinations[array_combinations > 0.02] = -np.inf
|
176
|
+
|
177
|
+
grid_comply = np.where(np.all(array_combinations==-np.inf, axis=0))[0]
|
178
|
+
if grid_comply.size != 0:
|
179
|
+
grid_comply = np.array(results.sun_up_hours)[grid_comply]
|
180
|
+
fail_to_comply[grid_info['name']] = \
|
181
|
+
[int(hoy) for hoy in grid_comply]
|
182
|
+
|
183
|
+
array_combinations_filter = np.apply_along_axis(
|
184
|
+
filter_array, 1, array_combinations, occ_mask
|
185
|
+
)
|
186
|
+
max_indices = array_combinations_filter.argmax(axis=0)
|
187
|
+
# select the combination for each hour
|
188
|
+
combinations = [combinations[idx] for idx in max_indices]
|
189
|
+
# merge the combinations of dicts
|
190
|
+
for combination in combinations:
|
191
|
+
for light_path, value in combination.items():
|
192
|
+
if light_path != '__static_apertures__':
|
193
|
+
states_schedule[light_path].append(value)
|
194
|
+
|
195
|
+
occupancy_hoys = schedule_to_hoys(schedule, results.sun_up_hours)
|
196
|
+
|
197
|
+
# map states to 8760 values
|
198
|
+
if use_states:
|
199
|
+
aperture_group_schedules = []
|
200
|
+
for identifier, values in states_schedule.items():
|
201
|
+
mapped_states = results.values_to_annual(
|
202
|
+
occupancy_hoys, values, results.timestep, dtype=np.int32)
|
203
|
+
aperture_group_schedules.append(
|
204
|
+
ApertureGroupSchedule(identifier, mapped_states.tolist())
|
205
|
+
)
|
206
|
+
states_schedule = \
|
207
|
+
DynamicSchedule.from_group_schedules(aperture_group_schedules)
|
208
|
+
else:
|
209
|
+
for light_path, shd_trans in states_schedule.items():
|
210
|
+
mapped_states = results.values_to_annual(
|
211
|
+
occupancy_hoys, shd_trans, results.timestep)
|
212
|
+
states_schedule[light_path] = mapped_states
|
213
|
+
|
214
|
+
return states_schedule, fail_to_comply, shd_trans_dict
|
@@ -0,0 +1,243 @@
|
|
1
|
+
"""Module for dynamic LM schedules."""
|
2
|
+
from typing import Tuple
|
3
|
+
import numpy as np
|
4
|
+
|
5
|
+
from ..results.annual_daylight import AnnualDaylight
|
6
|
+
from ..util import filter_array
|
7
|
+
|
8
|
+
|
9
|
+
def shd_trans_schedule_descending(
|
10
|
+
results: AnnualDaylight, grid_info, light_paths, shade_transmittances, occ_mask,
|
11
|
+
states_schedule, fail_to_comply
|
12
|
+
) -> Tuple[dict, dict]:
|
13
|
+
grid_count = grid_info['count']
|
14
|
+
full_direct = []
|
15
|
+
full_thresh = []
|
16
|
+
full_shd_trans_array = []
|
17
|
+
for light_path in light_paths:
|
18
|
+
array = results._get_array(grid_info, light_path, res_type="direct")
|
19
|
+
array = np.apply_along_axis(filter_array, 1, array, occ_mask)
|
20
|
+
full_direct.append(array)
|
21
|
+
full_thresh.append((array >= 1000).sum(axis=0))
|
22
|
+
full_shd_trans_array.append(shade_transmittances[light_path][1])
|
23
|
+
|
24
|
+
# Sum the array element-wise.
|
25
|
+
# This array is the sum of all direct illuminance without shade
|
26
|
+
# transmittance.
|
27
|
+
full_direct_sum = sum(full_direct)
|
28
|
+
|
29
|
+
# Create base list of shading combinations (all set to 1).
|
30
|
+
# We will replace the 1s later.
|
31
|
+
combinations = [
|
32
|
+
{light_path: 1 for light_path in light_paths}
|
33
|
+
for i in range(full_direct_sum.shape[1])
|
34
|
+
]
|
35
|
+
|
36
|
+
# Find the percentage of floor area >= 1000 lux.
|
37
|
+
# This array is the percentage for each hour (axis=0).
|
38
|
+
direct_pct_above = (full_direct_sum >= 1000).sum(axis=0) / grid_count
|
39
|
+
|
40
|
+
# Find the indices where the percentage of floor area is > 2%.
|
41
|
+
# This array is the problematic hours.
|
42
|
+
above_2_indices = np.where(direct_pct_above > 0.02)[0]
|
43
|
+
|
44
|
+
# Use the indices to get the relevant hours.
|
45
|
+
direct_sum = np.take(full_direct_sum, above_2_indices, axis=1)
|
46
|
+
|
47
|
+
# Use the indices to get the relevant hours.
|
48
|
+
direct = np.take(full_direct, above_2_indices, axis=2)
|
49
|
+
|
50
|
+
# Use the indices to get the relevant hours.
|
51
|
+
thresh = np.take(full_thresh, above_2_indices, axis=1)
|
52
|
+
|
53
|
+
# Sort and get indices. Negate the array to get descending order.
|
54
|
+
# Descending order puts the "highest offender" light path first.
|
55
|
+
sort_thresh = np.argsort(-thresh, axis=0).transpose()
|
56
|
+
|
57
|
+
_combinations = []
|
58
|
+
_combinations.insert(
|
59
|
+
0, (np.arange(full_direct_sum.shape[1]), combinations)
|
60
|
+
)
|
61
|
+
|
62
|
+
if np.any(above_2_indices):
|
63
|
+
# There are hours where the percentage of floor area is > 2%.
|
64
|
+
for idx, lp in enumerate(light_paths):
|
65
|
+
# Take column. For each iteration it will take the next column
|
66
|
+
# in descending order, i.e., the "highest offender" is the first
|
67
|
+
# column.
|
68
|
+
sort_indices = np.take(sort_thresh, idx, axis=1)
|
69
|
+
|
70
|
+
# Map light path identifiers to indices.
|
71
|
+
light_path_ids = np.take(light_paths, sort_indices)
|
72
|
+
|
73
|
+
# Map shade transmittance to indices.
|
74
|
+
shd_trans_array = np.take(full_shd_trans_array, sort_indices)
|
75
|
+
|
76
|
+
# Create combination for the subset.
|
77
|
+
_subset_combination = [
|
78
|
+
{light_path: _shd_trans} for light_path, _shd_trans in
|
79
|
+
zip(light_path_ids, shd_trans_array)
|
80
|
+
]
|
81
|
+
_combinations.insert(0, (above_2_indices, _subset_combination))
|
82
|
+
|
83
|
+
# Take the values from each array by indexing.
|
84
|
+
direct_array = \
|
85
|
+
direct[sort_indices, :, range(len(sort_indices))].transpose()
|
86
|
+
|
87
|
+
# Subtract the illuminance values.
|
88
|
+
direct_sum = direct_sum - (direct_array * (1 - shd_trans_array))
|
89
|
+
|
90
|
+
# Find the percentage of floor area >= 1000 lux.
|
91
|
+
direct_pct_above = (direct_sum >= 1000).sum(axis=0) / grid_count
|
92
|
+
|
93
|
+
# Find the indices where the percentage of floor area is > 2%.
|
94
|
+
above_2_indices = np.where(direct_pct_above > 0.02)[0]
|
95
|
+
|
96
|
+
# Break if there are no hours above 2%.
|
97
|
+
if not np.any(above_2_indices):
|
98
|
+
break
|
99
|
+
|
100
|
+
# Update variables for the next iteration.
|
101
|
+
direct_sum = np.take(direct_sum, above_2_indices, axis=1)
|
102
|
+
direct = np.take(direct, above_2_indices, axis=2)
|
103
|
+
thresh = np.take(thresh, above_2_indices, axis=1)
|
104
|
+
sort_thresh = np.take(sort_thresh, above_2_indices, axis=0)
|
105
|
+
|
106
|
+
if np.any(above_2_indices):
|
107
|
+
# There are hours not complying with the 2% rule.
|
108
|
+
previous_indices = []
|
109
|
+
previous_combination = []
|
110
|
+
grid_comply = []
|
111
|
+
# Merge the combinations from the iterations of the subsets.
|
112
|
+
for i, subset in enumerate(_combinations):
|
113
|
+
if i == 0:
|
114
|
+
previous_indices = subset[0]
|
115
|
+
else:
|
116
|
+
_indices = subset[0]
|
117
|
+
grid_comply = []
|
118
|
+
for _pr_idx in previous_indices:
|
119
|
+
grid_comply.append(_indices[_pr_idx])
|
120
|
+
previous_indices = grid_comply
|
121
|
+
# Convert indices to sun up hours indices.
|
122
|
+
filter_indices = np.where(occ_mask.astype(bool))[0]
|
123
|
+
grid_comply = [filter_indices[_gc] for _gc in grid_comply]
|
124
|
+
grid_comply = np.array(results.sun_up_hours)[grid_comply]
|
125
|
+
fail_to_comply[grid_info['name']] = \
|
126
|
+
[int(hoy) for hoy in grid_comply]
|
127
|
+
|
128
|
+
previous_indices = None
|
129
|
+
previous_combination = None
|
130
|
+
# Merge the combinations from the iterations of the subsets.
|
131
|
+
for i, subset in enumerate(_combinations):
|
132
|
+
if i == 0:
|
133
|
+
previous_indices, previous_combination = subset
|
134
|
+
else:
|
135
|
+
_indices, _combination = subset
|
136
|
+
for _pr_idx, _pr_comb in \
|
137
|
+
zip(previous_indices, previous_combination):
|
138
|
+
for light_path, _shd_trans in _pr_comb.items():
|
139
|
+
_combination[_pr_idx][light_path] = _shd_trans
|
140
|
+
previous_indices = _indices
|
141
|
+
previous_combination = _combination
|
142
|
+
|
143
|
+
combinations = _combination
|
144
|
+
|
145
|
+
# Merge the combinations of dicts.
|
146
|
+
for combination in combinations:
|
147
|
+
for light_path, shd_trans in combination.items():
|
148
|
+
if light_path != "__static_apertures__":
|
149
|
+
states_schedule[light_path].append(shd_trans)
|
150
|
+
|
151
|
+
return states_schedule, fail_to_comply
|
152
|
+
|
153
|
+
|
154
|
+
def states_schedule_descending(
|
155
|
+
results: AnnualDaylight, grid_info, light_paths, occ_mask,
|
156
|
+
states_schedule, fail_to_comply
|
157
|
+
) -> Tuple[dict, dict]:
|
158
|
+
grid_count = grid_info['count']
|
159
|
+
full_direct = []
|
160
|
+
full_thresh = []
|
161
|
+
full_direct_blinds = []
|
162
|
+
for light_path in light_paths:
|
163
|
+
array = results._get_array(
|
164
|
+
grid_info, light_path, state=0, res_type="direct")
|
165
|
+
array = np.apply_along_axis(filter_array, 1, array, occ_mask)
|
166
|
+
full_direct.append(array)
|
167
|
+
full_thresh.append((array >= 1000).sum(axis=0))
|
168
|
+
|
169
|
+
array = results._get_array(
|
170
|
+
grid_info, light_path, state=1, res_type="direct")
|
171
|
+
array = np.apply_along_axis(filter_array, 1, array, occ_mask)
|
172
|
+
full_direct_blinds.append(array)
|
173
|
+
|
174
|
+
full_direct = np.array(full_direct)
|
175
|
+
full_direct_blinds = np.array(full_direct_blinds)
|
176
|
+
full_direct_sum = full_direct.sum(axis=0)
|
177
|
+
|
178
|
+
new_array = full_direct.copy()
|
179
|
+
|
180
|
+
percentage_sensors = (full_direct_sum >= 1000).sum(axis=0) / grid_count
|
181
|
+
if not np.any(percentage_sensors > 0.02):
|
182
|
+
combinations = [
|
183
|
+
{light_path: 0 for light_path in light_paths}
|
184
|
+
for i in range(full_direct_sum.shape[1])]
|
185
|
+
else:
|
186
|
+
tracking_array = np.zeros(
|
187
|
+
(new_array.shape[0], new_array.shape[2]), dtype=int)
|
188
|
+
|
189
|
+
percentage_sensors = (full_direct >= 1000).sum(axis=1) / grid_count
|
190
|
+
|
191
|
+
ranking_indices = np.argsort(-percentage_sensors, axis=0)
|
192
|
+
|
193
|
+
for rank in range(ranking_indices.shape[0]):
|
194
|
+
# Calculate the percentage of sensors with values >= 1000 for the current new_array
|
195
|
+
summed_array = np.sum(new_array, axis=0)
|
196
|
+
percentage_sensors_summed = np.sum(
|
197
|
+
summed_array >= 1000, axis=0) / grid_count
|
198
|
+
indices_above_2_percent = np.where(
|
199
|
+
percentage_sensors_summed > 0.02)[0]
|
200
|
+
|
201
|
+
# Exit if there are no more hours exceeding the threshold
|
202
|
+
if len(indices_above_2_percent) == 0:
|
203
|
+
break
|
204
|
+
|
205
|
+
# Array indices to use for replacement for these hours
|
206
|
+
replace_indices = indices_above_2_percent
|
207
|
+
array_indices = ranking_indices[rank, replace_indices]
|
208
|
+
|
209
|
+
# Use advanced indexing to replace values in new_array for these hours
|
210
|
+
for hour_idx, array_idx in zip(replace_indices, array_indices):
|
211
|
+
new_array[array_idx, :, hour_idx] = full_direct_blinds[
|
212
|
+
array_idx, :, hour_idx
|
213
|
+
]
|
214
|
+
|
215
|
+
# Update the tracking array
|
216
|
+
tracking_array[array_indices, replace_indices] = 1
|
217
|
+
|
218
|
+
combinations = []
|
219
|
+
for hour in range(new_array.shape[2]):
|
220
|
+
hour_dict = {
|
221
|
+
light_paths[i]: tracking_array[i, hour]
|
222
|
+
for i in range(tracking_array.shape[0])}
|
223
|
+
combinations.append(hour_dict)
|
224
|
+
|
225
|
+
final_summed_array = np.sum(new_array, axis=0)
|
226
|
+
final_percentage_sensors_summed = (
|
227
|
+
final_summed_array >= 1000).sum(
|
228
|
+
axis=0) / grid_count
|
229
|
+
final_indices_above_2_percent = np.where(
|
230
|
+
final_percentage_sensors_summed > 0.02)[0]
|
231
|
+
if np.any(final_indices_above_2_percent):
|
232
|
+
sun_up_hours_indices = np.where(occ_mask == 1)[0][
|
233
|
+
final_indices_above_2_percent]
|
234
|
+
grid_comply = np.array(results.sun_up_hours)[sun_up_hours_indices]
|
235
|
+
fail_to_comply[grid_info['name']] = [
|
236
|
+
int(hoy) for hoy in grid_comply]
|
237
|
+
|
238
|
+
for combination in combinations:
|
239
|
+
for light_path, value in combination.items():
|
240
|
+
if light_path != '__static_apertures__':
|
241
|
+
states_schedule[light_path].append(value)
|
242
|
+
|
243
|
+
return states_schedule, fail_to_comply
|
@@ -0,0 +1 @@
|
|
1
|
+
"""honeybee-radiance-postprocess library."""
|
@@ -0,0 +1,393 @@
|
|
1
|
+
"""Functions for WELL post-processing."""
|
2
|
+
from typing import Tuple, Union
|
3
|
+
from pathlib import Path
|
4
|
+
from collections import defaultdict
|
5
|
+
import json
|
6
|
+
import itertools
|
7
|
+
import numpy as np
|
8
|
+
|
9
|
+
from ladybug.analysisperiod import AnalysisPeriod
|
10
|
+
from ladybug.datatype.generic import GenericType
|
11
|
+
from ladybug.color import Colorset
|
12
|
+
from ladybug.datacollection import HourlyContinuousCollection
|
13
|
+
from ladybug.datatype.fraction import Fraction
|
14
|
+
from ladybug.datatype.time import Time
|
15
|
+
from ladybug.legend import LegendParameters
|
16
|
+
from ladybug.header import Header
|
17
|
+
from honeybee.model import Model
|
18
|
+
from honeybee.units import conversion_factor_to_meters
|
19
|
+
from honeybee_radiance.writer import _filter_by_pattern
|
20
|
+
from honeybee_radiance.postprocess.annual import filter_schedule_by_hours
|
21
|
+
|
22
|
+
from ..metrics import da_array2d
|
23
|
+
from ..annual import schedule_to_hoys, occupancy_schedule_8_to_6
|
24
|
+
from ..results.annual_daylight import AnnualDaylight
|
25
|
+
from ..util import filter_array, recursive_dict_merge
|
26
|
+
from ..dynamic import DynamicSchedule, ApertureGroupSchedule
|
27
|
+
from ..ies.lm import dynamic_schedule_direct_illuminance
|
28
|
+
|
29
|
+
|
30
|
+
def _create_grid_summary(
|
31
|
+
grid_info, sda_grid, sda_blinds_up_grid, sda_blinds_down_grid, pass_sda,
|
32
|
+
total_floor, area_weighted=True):
|
33
|
+
"""Create a WELL summary for a single grid.
|
34
|
+
|
35
|
+
Args:
|
36
|
+
grid_info: Grid information.
|
37
|
+
sda_grid: Spatial Daylight Autonomy.
|
38
|
+
pass_sda: The percentage of the sensor points or floor area that
|
39
|
+
passes sDA.
|
40
|
+
total_floor: The number of sensor points or floor area.
|
41
|
+
area_weighted: Boolean to determine if the results are area
|
42
|
+
weighted. Defaults to True.
|
43
|
+
|
44
|
+
Returns:
|
45
|
+
Tuple:
|
46
|
+
- summary_grid: Summary of each grid individually.
|
47
|
+
"""
|
48
|
+
grid_id = grid_info['full_id']
|
49
|
+
grid_name = grid_info['name']
|
50
|
+
grid_summary = {
|
51
|
+
grid_id: {}
|
52
|
+
}
|
53
|
+
|
54
|
+
if area_weighted:
|
55
|
+
_grid_summary = {
|
56
|
+
grid_id: {
|
57
|
+
'name': grid_name,
|
58
|
+
'full_id': grid_id,
|
59
|
+
'sda': round(sda_grid, 2),
|
60
|
+
'sda_blinds_up': round(sda_blinds_up_grid, 2),
|
61
|
+
'sda_blinds_down': round(sda_blinds_down_grid, 2),
|
62
|
+
'floor_area_passing_sda': round(pass_sda, 2),
|
63
|
+
'total_floor_area': round(total_floor, 2)
|
64
|
+
}
|
65
|
+
}
|
66
|
+
else:
|
67
|
+
_grid_summary = {
|
68
|
+
grid_id: {
|
69
|
+
'name': grid_name,
|
70
|
+
'full_id': grid_id,
|
71
|
+
'sda': round(sda_grid, 2),
|
72
|
+
'sda_blinds_up': round(sda_blinds_up_grid, 2),
|
73
|
+
'sda_blinds_down': round(sda_blinds_down_grid, 2),
|
74
|
+
'sensor_count_passing_sda': int(round(pass_sda, 2)),
|
75
|
+
'total_sensor_count': total_floor
|
76
|
+
}
|
77
|
+
}
|
78
|
+
|
79
|
+
recursive_dict_merge(grid_summary, _grid_summary)
|
80
|
+
|
81
|
+
return grid_summary
|
82
|
+
|
83
|
+
|
84
|
+
def _well_summary(
|
85
|
+
pass_sda_grids: list, grids_info: list,
|
86
|
+
grid_areas: list, pass_sda_blinds_up_grids: list,
|
87
|
+
pass_sda_blinds_down_grids: list) -> Tuple[dict, dict]:
|
88
|
+
"""Create combined summary and summary for each grid individually.
|
89
|
+
|
90
|
+
Args:
|
91
|
+
pass_sda_grids: A list where each sublist is a list of True/False that
|
92
|
+
tells if each sensor point passes sDA.
|
93
|
+
grids_info: A list of grid information.
|
94
|
+
grid_areas: A list where each sublist is the area of each sensor point.
|
95
|
+
The alternative is a list of None values for each grid information.
|
96
|
+
|
97
|
+
Returns:
|
98
|
+
Tuple:
|
99
|
+
- summary: Summary of of all grids combined.
|
100
|
+
- summary_grid: Summary of each grid individually.
|
101
|
+
"""
|
102
|
+
summary = {}
|
103
|
+
summary_grid = {}
|
104
|
+
|
105
|
+
if all(grid_area is not None for grid_area in grid_areas):
|
106
|
+
# weighted by mesh face area
|
107
|
+
total_area = 0
|
108
|
+
total_area_pass_sda = 0
|
109
|
+
for (pass_sda, grid_area, grid_info, pass_sda_blinds_up,
|
110
|
+
pass_sda_blinds_down) in \
|
111
|
+
zip(pass_sda_grids, grid_areas, grids_info,
|
112
|
+
pass_sda_blinds_up_grids, pass_sda_blinds_down_grids):
|
113
|
+
total_grid_area = grid_area.sum()
|
114
|
+
|
115
|
+
area_pass_sda = grid_area[pass_sda].sum()
|
116
|
+
area_pass_sda_blind_up = grid_area[pass_sda_blinds_up].sum()
|
117
|
+
area_pass_sda_blinds_down = grid_area[pass_sda_blinds_down].sum()
|
118
|
+
sda_grid = area_pass_sda / total_grid_area * 100
|
119
|
+
sda_blinds_up_grid = area_pass_sda_blind_up / total_grid_area * 100
|
120
|
+
sda_blinds_down_grid = area_pass_sda_blinds_down / total_grid_area * 100
|
121
|
+
|
122
|
+
# grid summary
|
123
|
+
grid_summary = \
|
124
|
+
_create_grid_summary(
|
125
|
+
grid_info, sda_grid, sda_blinds_up_grid, sda_blinds_down_grid,
|
126
|
+
area_pass_sda, total_grid_area, area_weighted=True
|
127
|
+
)
|
128
|
+
|
129
|
+
recursive_dict_merge(summary_grid, grid_summary)
|
130
|
+
|
131
|
+
total_area += total_grid_area
|
132
|
+
total_area_pass_sda += area_pass_sda
|
133
|
+
|
134
|
+
summary['sda'] = round(total_area_pass_sda / total_area * 100, 2)
|
135
|
+
summary['floor_area_passing_sda'] = total_area_pass_sda
|
136
|
+
summary['total_floor_area'] = total_area
|
137
|
+
else:
|
138
|
+
# assume all sensor points cover the same area
|
139
|
+
total_sensor_count = 0
|
140
|
+
total_sensor_count_pass_sda = 0
|
141
|
+
for (pass_sda, grid_info, pass_sda_blinds_up, pass_sda_blinds_down) in \
|
142
|
+
zip(pass_sda_grids, grids_info, pass_sda_blinds_up_grids,
|
143
|
+
pass_sda_blinds_down_grids):
|
144
|
+
grid_count = grid_info['count']
|
145
|
+
|
146
|
+
sensor_count_pass_sda = pass_sda.sum()
|
147
|
+
sensor_count_pass_sda_blinds_up = pass_sda_blinds_up.sum()
|
148
|
+
sensor_count_pass_sda_blinds_down = pass_sda_blinds_down.sum()
|
149
|
+
sda_grid = sensor_count_pass_sda / grid_count * 100
|
150
|
+
sda_blinds_up_grid = sensor_count_pass_sda_blinds_up / grid_count * 100
|
151
|
+
sda_blinds_down_grid = sensor_count_pass_sda_blinds_down / grid_count * 100
|
152
|
+
|
153
|
+
# grid summary
|
154
|
+
grid_summary = \
|
155
|
+
_create_grid_summary(
|
156
|
+
grid_info, sda_grid, sda_blinds_up_grid, sda_blinds_down_grid,
|
157
|
+
sensor_count_pass_sda, grid_count, area_weighted=False
|
158
|
+
)
|
159
|
+
|
160
|
+
recursive_dict_merge(summary_grid, grid_summary)
|
161
|
+
|
162
|
+
total_sensor_count += grid_count
|
163
|
+
total_sensor_count_pass_sda += sensor_count_pass_sda
|
164
|
+
|
165
|
+
summary['sda'] = round(total_sensor_count_pass_sda / total_sensor_count * 100, 2)
|
166
|
+
summary['sensor_count_passing_sda'] = int(total_sensor_count_pass_sda)
|
167
|
+
summary['total_sensor_count'] = total_sensor_count
|
168
|
+
|
169
|
+
return summary, summary_grid
|
170
|
+
|
171
|
+
|
172
|
+
def well_annual_daylight(
|
173
|
+
results: Union[str, AnnualDaylight], grids_filter: str = '*',
|
174
|
+
shade_transmittance: Union[float, dict] = 0.05,
|
175
|
+
use_states: bool = False, states_schedule: dict = None,
|
176
|
+
threshold: float = 300, target_time: float = 50, sub_folder: str = None):
|
177
|
+
"""Calculate credits for WELL L06.
|
178
|
+
|
179
|
+
Args:
|
180
|
+
results: Path to results folder or a Results class object.
|
181
|
+
grids_filter: The name of a grid or a pattern to filter the grids.
|
182
|
+
Defaults to '*'.
|
183
|
+
shade_transmittance: A value to use as a multiplier in place of solar
|
184
|
+
shading. This input can be either a single value that will be used
|
185
|
+
for all aperture groups, or a dictionary where aperture groups are
|
186
|
+
keys, and the value for each key is the shade transmittance. Values
|
187
|
+
for shade transmittance must be 1 > value > 0.
|
188
|
+
Defaults to 0.05.
|
189
|
+
use_states: A boolean to note whether to use the simulated states. Set
|
190
|
+
to True to use the simulated states. The default is False which will
|
191
|
+
use the shade transmittance instead.
|
192
|
+
states_schedule: A custom dictionary of shading states. In case this is
|
193
|
+
left empty, the function will calculate a shading schedule by using
|
194
|
+
the shade_transmittance input. If a states schedule is provided it
|
195
|
+
will check that it is complying with the 2% rule. Defaults to None.
|
196
|
+
threshold: Threshold value for daylight autonomy. Default: 300.
|
197
|
+
target_time: A minimum threshold of occupied time (eg. 50% of the
|
198
|
+
time), above which a given sensor passes and contributes to the
|
199
|
+
spatial daylight autonomy. Defaults to 50.
|
200
|
+
sub_folder: Relative path for a subfolder to write the output. If None,
|
201
|
+
the files will not be written. Defaults to None.
|
202
|
+
|
203
|
+
Returns:
|
204
|
+
Tuple:
|
205
|
+
- summary: Summary of all grids combined.
|
206
|
+
- summary_grid: Summary of each grid individually.
|
207
|
+
- da_grids: List of daylight autonomy values for each grid. Each item
|
208
|
+
in the list is a NumPy array of DA values.
|
209
|
+
- states_schedule: A dictionary of annual shading schedules for each
|
210
|
+
aperture group.
|
211
|
+
- grids_info: Grid information.
|
212
|
+
"""
|
213
|
+
schedule = occupancy_schedule_8_to_6(as_list=True)
|
214
|
+
|
215
|
+
if not isinstance(results, AnnualDaylight):
|
216
|
+
results = AnnualDaylight(results, schedule=schedule)
|
217
|
+
else:
|
218
|
+
# set schedule to default leed schedule
|
219
|
+
results.schedule = schedule
|
220
|
+
|
221
|
+
occ_mask = results.occ_mask
|
222
|
+
total_occ = results.total_occ
|
223
|
+
|
224
|
+
grids_info = results._filter_grids(grids_filter=grids_filter)
|
225
|
+
|
226
|
+
if not states_schedule:
|
227
|
+
states_schedule, fail_to_comply, shd_trans_dict = dynamic_schedule_direct_illuminance(
|
228
|
+
results, grids_filter=grids_filter, shade_transmittance=shade_transmittance, use_states=use_states)
|
229
|
+
else:
|
230
|
+
raise NotImplementedError(
|
231
|
+
'Custom input for argument states_schedule is not yet implemented.'
|
232
|
+
)
|
233
|
+
|
234
|
+
# check to see if there is a HBJSON with sensor grid meshes for areas
|
235
|
+
grid_areas, units_conversion = [], 1
|
236
|
+
for base_file in Path(results.folder).parent.iterdir():
|
237
|
+
if base_file.suffix in ('.hbjson', '.hbpkl'):
|
238
|
+
hb_model = Model.from_file(base_file)
|
239
|
+
units_conversion = conversion_factor_to_meters(hb_model.units)
|
240
|
+
filt_grids = _filter_by_pattern(
|
241
|
+
hb_model.properties.radiance.sensor_grids, filter=grids_filter)
|
242
|
+
for s_grid in filt_grids:
|
243
|
+
if s_grid.mesh is not None:
|
244
|
+
grid_areas.append(s_grid.mesh.face_areas)
|
245
|
+
grid_areas = [np.array(grid) for grid in grid_areas]
|
246
|
+
if not grid_areas:
|
247
|
+
grid_areas = [None] * len(grids_info)
|
248
|
+
|
249
|
+
# spatial daylight autonomy
|
250
|
+
da_grids = []
|
251
|
+
pass_sda_grids = []
|
252
|
+
pass_sda_blinds_up_grids = []
|
253
|
+
pass_sda_blinds_down_grids = []
|
254
|
+
for grid_info in grids_info:
|
255
|
+
light_paths = [lp[0] for lp in grid_info['light_path']]
|
256
|
+
base_zero_array = np.apply_along_axis(filter_array, 1, np.zeros(
|
257
|
+
(grid_info['count'], len(results.sun_up_hours))), occ_mask)
|
258
|
+
arrays = [base_zero_array.copy()]
|
259
|
+
arrays_blinds_up = [base_zero_array.copy()]
|
260
|
+
arrays_blinds_down = [base_zero_array.copy()]
|
261
|
+
# combine total array for all light paths
|
262
|
+
if use_states:
|
263
|
+
array = results._array_from_states(grid_info, states=states_schedule, zero_array=True)
|
264
|
+
array = np.apply_along_axis(filter_array, 1, array, occ_mask)
|
265
|
+
|
266
|
+
for light_path in light_paths:
|
267
|
+
# do an extra pass to calculate with blinds always up or down
|
268
|
+
if light_path != '__static_apertures__':
|
269
|
+
array_blinds_up = results._get_array(
|
270
|
+
grid_info, light_path, state=0, res_type='total')
|
271
|
+
array_filter = np.apply_along_axis(
|
272
|
+
filter_array, 1, array_blinds_up, occ_mask)
|
273
|
+
arrays_blinds_up.append(array_filter)
|
274
|
+
array_blinds_down = results._get_array(
|
275
|
+
grid_info, light_path, state=1, res_type='total')
|
276
|
+
array_filter = np.apply_along_axis(
|
277
|
+
filter_array, 1, array_blinds_down, occ_mask)
|
278
|
+
arrays_blinds_down.append(array_filter)
|
279
|
+
else:
|
280
|
+
static_array = results._get_array(
|
281
|
+
grid_info, light_path, state=0, res_type='total')
|
282
|
+
array_filter = np.apply_along_axis(
|
283
|
+
filter_array, 1, static_array, occ_mask)
|
284
|
+
arrays_blinds_up.append(array_filter)
|
285
|
+
arrays_blinds_down.append(array_filter)
|
286
|
+
else:
|
287
|
+
for light_path in light_paths:
|
288
|
+
array = results._get_array(
|
289
|
+
grid_info, light_path, res_type='total')
|
290
|
+
array_filter = np.apply_along_axis(
|
291
|
+
filter_array, 1, array, occ_mask)
|
292
|
+
if light_path != '__static_apertures__':
|
293
|
+
sun_up_hours = np.array(results.sun_up_hours).astype(int)
|
294
|
+
shd_trans_array = states_schedule[light_path][sun_up_hours]
|
295
|
+
shd_trans_array = shd_trans_array[occ_mask.astype(bool)]
|
296
|
+
arrays.append(array_filter * shd_trans_array)
|
297
|
+
arrays_blinds_up.append(array_filter)
|
298
|
+
arrays_blinds_down.append(
|
299
|
+
array_filter * shd_trans_dict[light_path])
|
300
|
+
else:
|
301
|
+
arrays.append(array_filter)
|
302
|
+
arrays_blinds_up.append(array_filter)
|
303
|
+
arrays_blinds_down.append(array_filter)
|
304
|
+
array = sum(arrays)
|
305
|
+
|
306
|
+
array_blinds_up = sum(arrays_blinds_up)
|
307
|
+
array_blinds_down = sum(arrays_blinds_down)
|
308
|
+
# calculate da per grid
|
309
|
+
da_grid = da_array2d(array, total_occ=total_occ, threshold=threshold)
|
310
|
+
da_grids.append(da_grid)
|
311
|
+
da_blinds_up_grid = da_array2d(
|
312
|
+
array_blinds_up, total_occ=total_occ, threshold=threshold)
|
313
|
+
da_blinds_down_grid = da_array2d(
|
314
|
+
array_blinds_down, total_occ=total_occ, threshold=threshold)
|
315
|
+
# calculate sda per grid
|
316
|
+
pass_sda_grids.append(da_grid >= target_time)
|
317
|
+
pass_sda_blinds_up_grids.append(da_blinds_up_grid >= target_time)
|
318
|
+
pass_sda_blinds_down_grids.append(da_blinds_down_grid >= target_time)
|
319
|
+
|
320
|
+
# create summaries for all grids and each grid individually
|
321
|
+
summary, summary_grid = _well_summary(
|
322
|
+
pass_sda_grids, grids_info, grid_areas,
|
323
|
+
pass_sda_blinds_up_grids, pass_sda_blinds_down_grids)
|
324
|
+
|
325
|
+
# credits
|
326
|
+
if not fail_to_comply:
|
327
|
+
if summary['sda'] >= 75:
|
328
|
+
summary['credits'] = 2
|
329
|
+
elif summary['sda'] >= 55:
|
330
|
+
summary['credits'] = 1
|
331
|
+
else:
|
332
|
+
summary['credits'] = 0
|
333
|
+
else:
|
334
|
+
summary['credits'] = 0
|
335
|
+
fail_to_comply_rooms = ', '.join(list(fail_to_comply.keys()))
|
336
|
+
note = (
|
337
|
+
'0 credits have been awarded. The following sensor grids have at '
|
338
|
+
'least one hour where 2% of the floor area receives direct '
|
339
|
+
f'illuminance of 1000 lux or more: {fail_to_comply_rooms}.'
|
340
|
+
)
|
341
|
+
summary['note'] = note
|
342
|
+
|
343
|
+
# convert to datacollection
|
344
|
+
def to_datacollection(aperture_group: str, values: np.ndarray):
|
345
|
+
# convert values to 0 and 1 (0 = no shading, 1 = shading)
|
346
|
+
if use_states:
|
347
|
+
header = Header(data_type=GenericType(aperture_group, ''), unit='',
|
348
|
+
analysis_period=AnalysisPeriod())
|
349
|
+
hourly_data = HourlyContinuousCollection(header=header, values=values)
|
350
|
+
else:
|
351
|
+
values[values == 1] = 0
|
352
|
+
values[values == shd_trans_dict[aperture_group]] = 1
|
353
|
+
header = Header(data_type=GenericType(aperture_group, ''), unit='',
|
354
|
+
analysis_period=AnalysisPeriod(),
|
355
|
+
metadata={'Shade Transmittance': shd_trans_dict[aperture_group]})
|
356
|
+
hourly_data = HourlyContinuousCollection(header=header, values=values.tolist())
|
357
|
+
return hourly_data.to_dict()
|
358
|
+
|
359
|
+
if use_states:
|
360
|
+
states_schedule = {k:to_datacollection(k, v['schedule']) for k, v in states_schedule.to_dict().items()}
|
361
|
+
else:
|
362
|
+
states_schedule = {k:to_datacollection(k, v) for k, v in states_schedule.items()}
|
363
|
+
|
364
|
+
if sub_folder:
|
365
|
+
folder = Path(sub_folder)
|
366
|
+
folder.mkdir(parents=True, exist_ok=True)
|
367
|
+
|
368
|
+
summary_file = folder.joinpath('summary.json')
|
369
|
+
summary_file.write_text(json.dumps(summary, indent=2))
|
370
|
+
summary_grid_file = folder.joinpath('summary_grid.json')
|
371
|
+
summary_grid_file.write_text(json.dumps(summary_grid, indent=2))
|
372
|
+
states_schedule_file = folder.joinpath('states_schedule.json')
|
373
|
+
states_schedule_file.write_text(json.dumps(states_schedule))
|
374
|
+
grids_info_file = folder.joinpath('grids_info.json')
|
375
|
+
grids_info_file.write_text(json.dumps(grids_info, indent=2))
|
376
|
+
|
377
|
+
for (da, grid_info) in \
|
378
|
+
zip(da_grids, grids_info):
|
379
|
+
grid_id = grid_info['full_id']
|
380
|
+
da_file = folder.joinpath('results', 'da', f'{grid_id}.da')
|
381
|
+
da_file.parent.mkdir(parents=True, exist_ok=True)
|
382
|
+
np.savetxt(da_file, da, fmt='%.2f')
|
383
|
+
|
384
|
+
da_grids_info_file = folder.joinpath(
|
385
|
+
'results', 'da', 'grids_info.json')
|
386
|
+
da_grids_info_file.write_text(json.dumps(grids_info, indent=2))
|
387
|
+
|
388
|
+
states_schedule_err_file = \
|
389
|
+
folder.joinpath('states_schedule_err.json')
|
390
|
+
states_schedule_err_file.write_text(json.dumps(fail_to_comply))
|
391
|
+
|
392
|
+
return (summary, summary_grid, da_grids, states_schedule,
|
393
|
+
fail_to_comply, grids_info)
|
@@ -20,12 +20,16 @@ honeybee_radiance_postprocess/cli/grid.py,sha256=gqnU3-HdggWCUg9mA1RLZJYHM7tH0v6
|
|
20
20
|
honeybee_radiance_postprocess/cli/leed.py,sha256=bxGX2UBehYNcaPJWHL2yEasSP6dATD7B0aNNQOflqqM,3712
|
21
21
|
honeybee_radiance_postprocess/cli/merge.py,sha256=oOqqud3VSo-3f3coDoUILcp78OI4DKxXLWCS1bi3PC4,5752
|
22
22
|
honeybee_radiance_postprocess/cli/mtxop.py,sha256=UZJnjNpPjDmShy1-Mxos4H2vTUqk_yP3ZyaC1_LLFeI,5015
|
23
|
-
honeybee_radiance_postprocess/cli/postprocess.py,sha256=
|
23
|
+
honeybee_radiance_postprocess/cli/postprocess.py,sha256=yUR0J3EQyLRiINcjFfxb-9mUeEm3U-lZefjwq--lvtU,39256
|
24
24
|
honeybee_radiance_postprocess/cli/schedule.py,sha256=6uIy98Co4zm-ZRcELo4Lfx_aN3lNiqPe-BSimXwt1F8,3877
|
25
25
|
honeybee_radiance_postprocess/cli/translate.py,sha256=rwUjjDK_Ttjen4ooAMvugyDN5xfltEEFURDZ_Tb1w-g,7308
|
26
26
|
honeybee_radiance_postprocess/cli/two_phase.py,sha256=xA6ayPv26DM5fuMkLhBMYGklf_j5ymowmncwJGXRgo8,7034
|
27
27
|
honeybee_radiance_postprocess/cli/util.py,sha256=Be9cGmYhcV2W37ma6SgQPCWCpWLLLlroxRYN_l58kY0,4077
|
28
28
|
honeybee_radiance_postprocess/cli/viewfactor.py,sha256=kU36YRzLya5PReYREjTfw3zOcWKHYZjVlVclyuR7Cqk,5245
|
29
|
+
honeybee_radiance_postprocess/cli/well.py,sha256=JaedicMSvP7wfrX8opytnShsSVN5AnIr4dN-ZJBxrxM,3686
|
30
|
+
honeybee_radiance_postprocess/ies/__init__.py,sha256=kQXElEqFnLGNnrMSpA51XDHoqBup849FHeAqWASIy6w,45
|
31
|
+
honeybee_radiance_postprocess/ies/lm.py,sha256=kHvwd2uT8Y-c2TjpvQzjLrWzwgayWjpzpbwS2S6cEvo,9570
|
32
|
+
honeybee_radiance_postprocess/ies/lm_schedule.py,sha256=ci58GXq2PntJ4yNUdI_x4UCRmq6KrLes-u7GeboX058,9954
|
29
33
|
honeybee_radiance_postprocess/leed/__init__.py,sha256=kQXElEqFnLGNnrMSpA51XDHoqBup849FHeAqWASIy6w,45
|
30
34
|
honeybee_radiance_postprocess/leed/leed.py,sha256=ZTemgCz7b8o1ZHPH0T02yY0EtF40BxnUAAiC8XvdEWM,33346
|
31
35
|
honeybee_radiance_postprocess/leed/leed_schedule.py,sha256=s3by1sv1DtOlCawvaMvnIDvEo5D8ATEJvWQ_rEeJIHg,9956
|
@@ -33,9 +37,11 @@ honeybee_radiance_postprocess/results/__init__.py,sha256=1agBQbfT4Tf8KqSZzlfKYX8
|
|
33
37
|
honeybee_radiance_postprocess/results/annual_daylight.py,sha256=11d4J1iIuITKuoWyWa-2_2WdrHYBULC0YP-mWBWi4JQ,34724
|
34
38
|
honeybee_radiance_postprocess/results/annual_irradiance.py,sha256=5zwrr4MNeHUebbSRpSBbscPOZUs2AHmYCQfIIbdYImY,8298
|
35
39
|
honeybee_radiance_postprocess/results/results.py,sha256=ABb_S8kDPruhGkDsfREXMg6K0p8FRhAZ3QIRUZCQPAI,54888
|
36
|
-
honeybee_radiance_postprocess
|
37
|
-
honeybee_radiance_postprocess
|
38
|
-
honeybee_radiance_postprocess-0.4.
|
39
|
-
honeybee_radiance_postprocess-0.4.
|
40
|
-
honeybee_radiance_postprocess-0.4.
|
41
|
-
honeybee_radiance_postprocess-0.4.
|
40
|
+
honeybee_radiance_postprocess/well/__init__.py,sha256=kQXElEqFnLGNnrMSpA51XDHoqBup849FHeAqWASIy6w,45
|
41
|
+
honeybee_radiance_postprocess/well/well.py,sha256=hsicVYFl-2DR4mtVANzUPu9NKA760elfYoIpOKc9bkQ,17501
|
42
|
+
honeybee_radiance_postprocess-0.4.459.dist-info/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
43
|
+
honeybee_radiance_postprocess-0.4.459.dist-info/METADATA,sha256=iWJMWWvA_uyFhoWy2wipcmxl_SUWLMLn0Uixanlu5go,2240
|
44
|
+
honeybee_radiance_postprocess-0.4.459.dist-info/WHEEL,sha256=AHX6tWk3qWuce7vKLrj7lnulVHEdWoltgauo8bgCXgU,109
|
45
|
+
honeybee_radiance_postprocess-0.4.459.dist-info/entry_points.txt,sha256=gFtVPx6UItXt27GfEZZO00eOZChJJEL6JwGSAB_O3rs,96
|
46
|
+
honeybee_radiance_postprocess-0.4.459.dist-info/top_level.txt,sha256=4-sFbzy7ewP2EDqJV3jeFlAFx7SuxtoBBELWaKAnLdA,30
|
47
|
+
honeybee_radiance_postprocess-0.4.459.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|