honeybee-radiance-postprocess 0.4.555__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. honeybee_radiance_postprocess/__init__.py +1 -0
  2. honeybee_radiance_postprocess/__main__.py +4 -0
  3. honeybee_radiance_postprocess/annual.py +73 -0
  4. honeybee_radiance_postprocess/annualdaylight.py +289 -0
  5. honeybee_radiance_postprocess/annualirradiance.py +35 -0
  6. honeybee_radiance_postprocess/breeam/__init__.py +1 -0
  7. honeybee_radiance_postprocess/breeam/breeam.py +552 -0
  8. honeybee_radiance_postprocess/cli/__init__.py +33 -0
  9. honeybee_radiance_postprocess/cli/abnt.py +392 -0
  10. honeybee_radiance_postprocess/cli/breeam.py +96 -0
  11. honeybee_radiance_postprocess/cli/datacollection.py +133 -0
  12. honeybee_radiance_postprocess/cli/grid.py +295 -0
  13. honeybee_radiance_postprocess/cli/leed.py +143 -0
  14. honeybee_radiance_postprocess/cli/merge.py +161 -0
  15. honeybee_radiance_postprocess/cli/mtxop.py +161 -0
  16. honeybee_radiance_postprocess/cli/postprocess.py +1092 -0
  17. honeybee_radiance_postprocess/cli/schedule.py +103 -0
  18. honeybee_radiance_postprocess/cli/translate.py +216 -0
  19. honeybee_radiance_postprocess/cli/two_phase.py +252 -0
  20. honeybee_radiance_postprocess/cli/util.py +121 -0
  21. honeybee_radiance_postprocess/cli/viewfactor.py +157 -0
  22. honeybee_radiance_postprocess/cli/well.py +110 -0
  23. honeybee_radiance_postprocess/data_type.py +102 -0
  24. honeybee_radiance_postprocess/dynamic.py +273 -0
  25. honeybee_radiance_postprocess/electriclight.py +24 -0
  26. honeybee_radiance_postprocess/en17037.py +304 -0
  27. honeybee_radiance_postprocess/helper.py +266 -0
  28. honeybee_radiance_postprocess/ies/__init__.py +1 -0
  29. honeybee_radiance_postprocess/ies/lm.py +224 -0
  30. honeybee_radiance_postprocess/ies/lm_schedule.py +248 -0
  31. honeybee_radiance_postprocess/leed/__init__.py +1 -0
  32. honeybee_radiance_postprocess/leed/leed.py +801 -0
  33. honeybee_radiance_postprocess/leed/leed_schedule.py +256 -0
  34. honeybee_radiance_postprocess/metrics.py +439 -0
  35. honeybee_radiance_postprocess/reader.py +80 -0
  36. honeybee_radiance_postprocess/results/__init__.py +4 -0
  37. honeybee_radiance_postprocess/results/annual_daylight.py +752 -0
  38. honeybee_radiance_postprocess/results/annual_irradiance.py +196 -0
  39. honeybee_radiance_postprocess/results/results.py +1416 -0
  40. honeybee_radiance_postprocess/type_hints.py +38 -0
  41. honeybee_radiance_postprocess/util.py +211 -0
  42. honeybee_radiance_postprocess/vis_metadata.py +49 -0
  43. honeybee_radiance_postprocess/well/__init__.py +1 -0
  44. honeybee_radiance_postprocess/well/well.py +509 -0
  45. honeybee_radiance_postprocess-0.4.555.dist-info/METADATA +79 -0
  46. honeybee_radiance_postprocess-0.4.555.dist-info/RECORD +50 -0
  47. honeybee_radiance_postprocess-0.4.555.dist-info/WHEEL +5 -0
  48. honeybee_radiance_postprocess-0.4.555.dist-info/entry_points.txt +2 -0
  49. honeybee_radiance_postprocess-0.4.555.dist-info/licenses/LICENSE +661 -0
  50. honeybee_radiance_postprocess-0.4.555.dist-info/top_level.txt +1 -0
@@ -0,0 +1,801 @@
1
+ """Functions for LEED post-processing."""
2
+ from typing import Tuple, Union
3
+ from pathlib import Path
4
+ from collections import defaultdict
5
+ import json
6
+ import itertools
7
+ try:
8
+ import cupy as np
9
+ is_gpu = True
10
+ except ImportError:
11
+ is_gpu = False
12
+ import numpy as np
13
+
14
+ from ladybug.analysisperiod import AnalysisPeriod
15
+ from ladybug.datatype.generic import GenericType
16
+ from ladybug.color import Colorset
17
+ from ladybug.datacollection import HourlyContinuousCollection
18
+ from ladybug.datatype.fraction import Fraction
19
+ from ladybug.datatype.time import Time
20
+ from ladybug.legend import LegendParameters
21
+ from ladybug.header import Header
22
+ from honeybee.model import Model
23
+ from honeybee.units import conversion_factor_to_meters
24
+ from honeybee_radiance.writer import _filter_by_pattern
25
+ from honeybee_radiance.postprocess.annual import filter_schedule_by_hours
26
+
27
+ from ..metrics import da_array2d, ase_array2d
28
+ from ..annual import schedule_to_hoys, occupancy_schedule_8_to_6
29
+ from ..results.annual_daylight import AnnualDaylight
30
+ from ..util import recursive_dict_merge, filter_array2d
31
+ from ..dynamic import DynamicSchedule, ApertureGroupSchedule
32
+ from .leed_schedule import shd_trans_schedule_descending, states_schedule_descending
33
+
34
+ is_cpu = not is_gpu
35
+
36
+
37
+ def _create_grid_summary(
38
+ grid_info, sda_grid, sda_blinds_up_grid, sda_blinds_down_grid, ase_grid,
39
+ pass_sda, pass_ase, total_floor, area_weighted=True):
40
+ """Create a LEED summary for a single grid.
41
+
42
+ Args:
43
+ grid_info: Grid information.
44
+ sda_grid: Spatial Daylight Autonomy.
45
+ ase_grid: Annual Sunlight Exposure.
46
+ pass_sda: The percentage of the sensor points or floor area that
47
+ passes sDA.
48
+ pass_ase: The percentage of the sensor points or floor area that
49
+ passes ASE.
50
+ total_floor: The number of sensor points or floor area.
51
+ area_weighted: Boolean to determine if the results are area
52
+ weighted. Defaults to True.
53
+
54
+ Returns:
55
+ Tuple:
56
+ - summary_grid: Summary of each grid individually.
57
+ """
58
+ grid_id = grid_info['full_id']
59
+ grid_name = grid_info['name']
60
+ grid_summary = {
61
+ grid_id: {}
62
+ }
63
+ if ase_grid > 10:
64
+ ase_note = (
65
+ 'The Annual Sunlight Exposure is greater than 10% for space: '
66
+ f'{grid_name}. Identify in writing how the space is designed to '
67
+ 'address glare.'
68
+ )
69
+ grid_summary[grid_id]['ase_note'] = ase_note
70
+
71
+ if area_weighted:
72
+ _grid_summary = {
73
+ grid_id: {
74
+ 'name': grid_name,
75
+ 'full_id': grid_id,
76
+ 'ase': round(ase_grid, 2),
77
+ 'sda': round(sda_grid, 2),
78
+ 'sda_blinds_up': round(sda_blinds_up_grid, 2),
79
+ 'sda_blinds_down': round(sda_blinds_down_grid, 2),
80
+ 'floor_area_passing_ase': round(pass_ase, 2),
81
+ 'floor_area_passing_sda': round(pass_sda, 2),
82
+ 'total_floor_area': round(total_floor, 2)
83
+ }
84
+ }
85
+ else:
86
+ _grid_summary = {
87
+ grid_id: {
88
+ 'name': grid_name,
89
+ 'full_id': grid_id,
90
+ 'ase': round(ase_grid, 2),
91
+ 'sda': round(sda_grid, 2),
92
+ 'sda_blinds_up': round(sda_blinds_up_grid, 2),
93
+ 'sda_blinds_down': round(sda_blinds_down_grid, 2),
94
+ 'sensor_count_passing_ase': int(round(pass_ase, 2)),
95
+ 'sensor_count_passing_sda': int(round(pass_sda, 2)),
96
+ 'total_sensor_count': total_floor
97
+ }
98
+ }
99
+
100
+ recursive_dict_merge(grid_summary, _grid_summary)
101
+
102
+ return grid_summary
103
+
104
+
105
+ def _leed_summary(
106
+ pass_ase_grids: list, pass_sda_grids: list, grids_info: list,
107
+ grid_areas: list, pass_sda_blinds_up_grids: list,
108
+ pass_sda_blinds_down_grids: list) -> Tuple[dict, dict]:
109
+ """Create combined summary and summary for each grid individually.
110
+
111
+ Args:
112
+ pass_ase_grids: A list where each sublist is a list of True/False that
113
+ tells if each sensor point passes ASE.
114
+ pass_sda_grids: A list where each sublist is a list of True/False that
115
+ tells if each sensor point passes sDA.
116
+ grids_info: A list of grid information.
117
+ grid_areas: A list where each sublist is the area of each sensor point.
118
+ The alternative is a list of None values for each grid information.
119
+
120
+ Returns:
121
+ Tuple:
122
+ - summary: Summary of of all grids combined.
123
+ - summary_grid: Summary of each grid individually.
124
+ """
125
+ summary = {}
126
+ summary_grid = {}
127
+ if all(grid_area is not None for grid_area in grid_areas):
128
+ # weighted by mesh face area
129
+ total_area = 0
130
+ total_area_pass_ase = 0
131
+ total_area_pass_sda = 0
132
+ for (pass_ase, pass_sda, grid_area, grid_info, pass_sda_blinds_up,
133
+ pass_sda_blinds_down) in \
134
+ zip(pass_ase_grids, pass_sda_grids, grid_areas, grids_info,
135
+ pass_sda_blinds_up_grids, pass_sda_blinds_down_grids):
136
+ total_grid_area = float(grid_area.sum())
137
+
138
+ area_pass_ase = float(grid_area[pass_ase].sum())
139
+ ase_grid = float((total_grid_area - area_pass_ase) / total_grid_area * 100)
140
+
141
+ area_pass_sda = float(grid_area[pass_sda].sum())
142
+ area_pass_sda_blind_up = grid_area[pass_sda_blinds_up].sum()
143
+ area_pass_sda_blinds_down = grid_area[pass_sda_blinds_down].sum()
144
+ sda_grid = float(area_pass_sda / total_grid_area * 100)
145
+ sda_blinds_up_grid = float(area_pass_sda_blind_up / total_grid_area * 100)
146
+ sda_blinds_down_grid = float(area_pass_sda_blinds_down / total_grid_area * 100)
147
+
148
+ # grid summary
149
+ grid_summary = \
150
+ _create_grid_summary(
151
+ grid_info, sda_grid, sda_blinds_up_grid, sda_blinds_down_grid,
152
+ ase_grid, area_pass_sda, area_pass_ase, total_grid_area,
153
+ area_weighted=True
154
+ )
155
+
156
+ recursive_dict_merge(summary_grid, grid_summary)
157
+
158
+ total_area += total_grid_area
159
+ total_area_pass_ase += area_pass_ase
160
+ total_area_pass_sda += area_pass_sda
161
+
162
+ summary['ase'] = round((total_area - total_area_pass_ase) / total_area * 100, 2)
163
+ summary['sda'] = round(total_area_pass_sda / total_area * 100, 2)
164
+ summary['floor_area_passing_ase'] = total_area_pass_ase
165
+ summary['floor_area_passing_sda'] = total_area_pass_sda
166
+ summary['total_floor_area'] = total_area
167
+ else:
168
+ # assume all sensor points cover the same area
169
+ total_sensor_count = 0
170
+ total_sensor_count_pass_ase = 0
171
+ total_sensor_count_pass_sda = 0
172
+ for (pass_ase, pass_sda, grid_info, pass_sda_blinds_up,
173
+ pass_sda_blinds_down) in \
174
+ zip(pass_ase_grids, pass_sda_grids, grids_info,
175
+ pass_sda_blinds_up_grids, pass_sda_blinds_down_grids):
176
+ grid_count = grid_info['count']
177
+ sensor_count_pass_ase = pass_ase.sum()
178
+ ase_grid = (grid_count - sensor_count_pass_ase) / grid_count * 100
179
+
180
+ sensor_count_pass_sda = pass_sda.sum()
181
+ sensor_count_pass_sda_blinds_up = pass_sda_blinds_up.sum()
182
+ sensor_count_pass_sda_blinds_down = pass_sda_blinds_down.sum()
183
+ sda_grid = sensor_count_pass_sda / grid_count * 100
184
+ sda_blinds_up_grid = sensor_count_pass_sda_blinds_up / grid_count * 100
185
+ sda_blinds_down_grid = sensor_count_pass_sda_blinds_down / grid_count * 100
186
+
187
+ # grid summary
188
+ grid_summary = \
189
+ _create_grid_summary(
190
+ grid_info, sda_grid, sda_blinds_up_grid, sda_blinds_down_grid,
191
+ ase_grid, sensor_count_pass_sda, sensor_count_pass_ase,
192
+ grid_count, area_weighted=False
193
+ )
194
+
195
+ recursive_dict_merge(summary_grid, grid_summary)
196
+
197
+ total_sensor_count += grid_count
198
+ total_sensor_count_pass_ase += sensor_count_pass_ase
199
+ total_sensor_count_pass_sda += sensor_count_pass_sda
200
+
201
+ summary['ase'] = round((total_sensor_count - total_sensor_count_pass_ase) /
202
+ total_sensor_count * 100, 2
203
+ )
204
+ summary['sda'] = round(total_sensor_count_pass_sda / total_sensor_count * 100, 2)
205
+ summary['sensor_count_passing_ase'] = int(total_sensor_count_pass_ase)
206
+ summary['sensor_count_passing_sda'] = int(total_sensor_count_pass_sda)
207
+ summary['total_sensor_count'] = total_sensor_count
208
+
209
+ return summary, summary_grid
210
+
211
+
212
+ def _ase_hourly_percentage(
213
+ results: AnnualDaylight, array: np.ndarray, grid_info: dict,
214
+ direct_threshold: float = 1000, grid_area: Union[None, np.ndarray] = None
215
+ ) -> np.ndarray:
216
+ """Calculate the percentage of floor area that receives greater than 1000
217
+ direct lux for each hour.
218
+
219
+ Args:
220
+ results: A Results object.
221
+ array: A NumPy array of the grid to process.
222
+ grid_info: Grid information of the grid to process..
223
+ direct_threshold: Direct threshold.
224
+ grid_area: Grid area as a NumPy array with a area value for each sensor
225
+ point, or a None value if there is no area associated with the
226
+ sensor point.
227
+
228
+ Returns:
229
+ A hourly data collection of the percentage of floor area that receives
230
+ greater than 1000 direct lux.
231
+ """
232
+ if grid_area is not None:
233
+ grid_area_2d = np.array([grid_area] * array.shape[1]).transpose()
234
+ area_above = \
235
+ np.where((array > direct_threshold), grid_area_2d, 0).sum(axis=0)
236
+ percentage_above = area_above / grid_area.sum() * 100
237
+ else:
238
+ percentage_above = \
239
+ (array > direct_threshold).sum(axis=0) / grid_info['count'] * 100
240
+
241
+ occupancy_hoys = schedule_to_hoys(results.schedule, results.sun_up_hours)
242
+ # map states to 8760 values
243
+ percentage_above = results.values_to_annual(
244
+ occupancy_hoys, percentage_above, results.timestep)
245
+ header = Header(Fraction('Percentage above 1000 direct lux'), '%',
246
+ AnalysisPeriod(results.timestep),
247
+ metadata={'SensorGrid': grid_info['name']})
248
+ data_collection = HourlyContinuousCollection(header, percentage_above.tolist())
249
+
250
+ return data_collection
251
+
252
+
253
+ def shade_transmittance_per_light_path(
254
+ light_paths: list, shade_transmittance: Union[float, dict],
255
+ shd_trans_dict: dict) -> dict:
256
+ """Filter shade_transmittance by light paths and add default multiplier.
257
+
258
+ Args:
259
+ light_paths: A list of light paths.
260
+ shade_transmittance: A value to use as a multiplier in place of solar
261
+ shading. This input can be either a single value that will be used
262
+ for all aperture groups, or a dictionary where aperture groups are
263
+ keys, and the value for each key is the shade transmittance. Values
264
+ for shade transmittance must be 1 > value > 0.
265
+ shd_trans_dict: A dictionary used to store shade transmittance value
266
+ for each aperture group.
267
+
268
+ Returns:
269
+ A dictionary with filtered light paths.
270
+ """
271
+ shade_transmittances = {}
272
+ if isinstance(shade_transmittance, dict):
273
+ for light_path in light_paths:
274
+ # default multiplier
275
+ shade_transmittances[light_path] = [1]
276
+ # add custom shade transmittance
277
+ if light_path in shade_transmittance:
278
+ shade_transmittances[light_path].append(
279
+ shade_transmittance[light_path])
280
+ shd_trans_dict[light_path] = shade_transmittance[light_path]
281
+ # add default shade transmittance (0.05)
282
+ elif light_path != '__static_apertures__':
283
+ shade_transmittances[light_path].append(0.05)
284
+ shd_trans_dict[light_path] = 0.05
285
+ else:
286
+ shade_transmittances[light_path].append(1)
287
+ shd_trans_dict[light_path] = 1
288
+ else:
289
+ shd_trans = float(shade_transmittance)
290
+ for light_path in light_paths:
291
+ # default multiplier
292
+ shade_transmittances[light_path] = [1]
293
+ # add custom shade transmittance
294
+ if light_path != '__static_apertures__':
295
+ shade_transmittances[light_path].append(shd_trans)
296
+ shd_trans_dict[light_path] = shd_trans
297
+ else:
298
+ shade_transmittances[light_path].append(1)
299
+ shd_trans_dict[light_path] = 1
300
+
301
+ return shade_transmittances, shd_trans_dict
302
+
303
+
304
+ def leed_states_schedule(
305
+ results: Union[str, AnnualDaylight], grids_filter: str = '*',
306
+ shade_transmittance: Union[float, dict] = 0.05,
307
+ use_states: bool = False) -> Tuple[dict, dict, dict]:
308
+ """Calculate a schedule of each aperture group for LEED compliant sDA.
309
+
310
+ This function calculates an annual shading schedule of each aperture
311
+ group. Hour by hour it will select the least shaded aperture group
312
+ configuration, so that no more than 2% of the sensors points receive
313
+ direct illuminance of 1000 lux or more.
314
+
315
+ Args:
316
+ results: Path to results folder or a Results class object.
317
+ grids_filter: The name of a grid or a pattern to filter the grids.
318
+ Defaults to '*'.
319
+ shade_transmittance: A value to use as a multiplier in place of solar
320
+ shading. This input can be either a single value that will be used
321
+ for all aperture groups, or a dictionary where aperture groups are
322
+ keys, and the value for each key is the shade transmittance. Values
323
+ for shade transmittance must be 1 > value > 0.
324
+ Defaults to 0.05.
325
+ use_states: A boolean to note whether to use the simulated states. Set
326
+ to True to use the simulated states. The default is False which will
327
+ use the shade transmittance instead.
328
+
329
+ Returns:
330
+ Tuple: A tuple with a dictionary of the annual schedule and a
331
+ dictionary of hours where no shading configuration comply with the
332
+ 2% rule.
333
+ """
334
+ if not isinstance(results, AnnualDaylight):
335
+ results = AnnualDaylight(results)
336
+
337
+ grids_info = results._filter_grids(grids_filter=grids_filter)
338
+ schedule = occupancy_schedule_8_to_6(as_list=True)
339
+ occ_pattern = \
340
+ filter_schedule_by_hours(results.sun_up_hours, schedule=schedule)[0]
341
+ occ_mask = np.array(occ_pattern)
342
+
343
+ states_schedule = defaultdict(list)
344
+ fail_to_comply = {}
345
+ shd_trans_dict = {}
346
+
347
+ for grid_info in grids_info:
348
+ grid_states_schedule = defaultdict(list)
349
+ grid_count = grid_info['count']
350
+ light_paths = []
351
+ for lp in grid_info['light_path']:
352
+ for _lp in lp:
353
+ if _lp == '__static_apertures__' and len(lp) > 1:
354
+ pass
355
+ else:
356
+ light_paths.append(_lp)
357
+
358
+ shade_transmittances, shd_trans_dict = (
359
+ shade_transmittance_per_light_path(
360
+ light_paths, shade_transmittance, shd_trans_dict
361
+ )
362
+ )
363
+
364
+ if len(light_paths) > 6:
365
+ if use_states:
366
+ grid_states_schedule, fail_to_comply = states_schedule_descending(
367
+ results, grid_info, light_paths, occ_mask,
368
+ grid_states_schedule, fail_to_comply)
369
+ else:
370
+ grid_states_schedule, fail_to_comply = shd_trans_schedule_descending(
371
+ results, grid_info, light_paths, shade_transmittances, occ_mask,
372
+ grid_states_schedule, fail_to_comply)
373
+ else:
374
+ if use_states:
375
+ combinations = results._get_state_combinations(grid_info)
376
+ else:
377
+ shade_transmittances, shd_trans_dict = shade_transmittance_per_light_path(
378
+ light_paths, shade_transmittance, shd_trans_dict)
379
+ keys, values = zip(*shade_transmittances.items())
380
+ combinations = [dict(zip(keys, v)) for v in itertools.product(*values)]
381
+
382
+ array_list_combinations = []
383
+ for combination in combinations:
384
+ combination_arrays = []
385
+ for light_path, value in combination.items():
386
+ if use_states:
387
+ combination_arrays.append(
388
+ results._get_array(grid_info, light_path, state=value,
389
+ res_type='direct')
390
+ )
391
+ else:
392
+ array = results._get_array(
393
+ grid_info, light_path, res_type='direct')
394
+ if value == 1:
395
+ combination_arrays.append(array)
396
+ else:
397
+ combination_arrays.append(array * value)
398
+ combination_array = sum(combination_arrays)
399
+
400
+ combination_percentage = \
401
+ (combination_array >= 1000).sum(axis=0) / grid_count
402
+ array_list_combinations.append(combination_percentage)
403
+ array_combinations = np.array(array_list_combinations)
404
+ array_combinations[array_combinations > 0.02] = -np.inf
405
+
406
+ grid_comply = np.where(np.all(array_combinations==-np.inf, axis=0))[0]
407
+ if grid_comply.size != 0:
408
+ grid_comply = np.array(results.sun_up_hours)[grid_comply]
409
+ fail_to_comply[grid_info['name']] = \
410
+ [int(hoy) for hoy in grid_comply]
411
+
412
+ array_combinations_filter = filter_array2d(array_combinations, occ_mask)
413
+
414
+ max_indices = [int(i) for i in array_combinations_filter.argmax(axis=0)]
415
+ combinations = [combinations[idx] for idx in max_indices]
416
+ # merge the combinations of dicts
417
+ for combination in combinations:
418
+ for light_path, value in combination.items():
419
+ if light_path != '__static_apertures__':
420
+ grid_states_schedule[light_path].append(value)
421
+
422
+ del array_list_combinations, array_combinations, array_combinations_filter, combination_arrays
423
+
424
+ for key, value in grid_states_schedule.items():
425
+ if key not in states_schedule:
426
+ states_schedule[key] = value
427
+ else:
428
+ if use_states:
429
+ merged_array = np.logical_or(np.array(states_schedule[key]), np.array(value)).astype(int)
430
+ else:
431
+ merged_array = np.minimum(np.array(states_schedule[key]), np.array(value))
432
+ states_schedule[key] = merged_array
433
+
434
+ occupancy_hoys = schedule_to_hoys(schedule, results.sun_up_hours)
435
+
436
+ # map states to 8760 values
437
+ if use_states:
438
+ aperture_group_schedules = []
439
+ for identifier, values in states_schedule.items():
440
+ mapped_states = results.values_to_annual(
441
+ occupancy_hoys, values, results.timestep, dtype=np.int32)
442
+ aperture_group_schedules.append(
443
+ ApertureGroupSchedule(identifier, mapped_states.tolist())
444
+ )
445
+ states_schedule = \
446
+ DynamicSchedule.from_group_schedules(aperture_group_schedules)
447
+ else:
448
+ for light_path, shd_trans in states_schedule.items():
449
+ mapped_states = results.values_to_annual(
450
+ occupancy_hoys, shd_trans, results.timestep)
451
+ states_schedule[light_path] = mapped_states
452
+
453
+ return states_schedule, fail_to_comply, shd_trans_dict
454
+
455
+
456
+ def leed_option_one(
457
+ results: Union[str, AnnualDaylight], grids_filter: str = '*',
458
+ shade_transmittance: Union[float, dict] = 0.05,
459
+ use_states: bool = False, states_schedule: dict = None,
460
+ threshold: float = 300, direct_threshold: float = 1000,
461
+ occ_hours: int = 250, target_time: float = 50, sub_folder: str = None):
462
+ """Calculate credits for LEED v4.1 Daylight Option 1.
463
+
464
+ Args:
465
+ results: Path to results folder or a Results class object.
466
+ grids_filter: The name of a grid or a pattern to filter the grids.
467
+ Defaults to '*'.
468
+ shade_transmittance: A value to use as a multiplier in place of solar
469
+ shading. This input can be either a single value that will be used
470
+ for all aperture groups, or a dictionary where aperture groups are
471
+ keys, and the value for each key is the shade transmittance. Values
472
+ for shade transmittance must be 1 > value > 0.
473
+ Defaults to 0.05.
474
+ use_states: A boolean to note whether to use the simulated states. Set
475
+ to True to use the simulated states. The default is False which will
476
+ use the shade transmittance instead.
477
+ states_schedule: A custom dictionary of shading states. In case this is
478
+ left empty, the function will calculate a shading schedule by using
479
+ the shade_transmittance input. If a states schedule is provided it
480
+ will check that it is complying with the 2% rule. Defaults to None.
481
+ threshold: Threshold value for daylight autonomy. Default: 300.
482
+ direct_threshold: The threshold that determines if a sensor is overlit.
483
+ Defaults to 1000.
484
+ occ_hours: The number of occupied hours that cannot receive more than
485
+ the direct_threshold. Defaults to 250.
486
+ target_time: A minimum threshold of occupied time (eg. 50% of the
487
+ time), above which a given sensor passes and contributes to the
488
+ spatial daylight autonomy. Defaults to 50.
489
+ sub_folder: Relative path for a subfolder to write the output. If None,
490
+ the files will not be written. Defaults to None.
491
+
492
+ Returns:
493
+ Tuple:
494
+ - summary: Summary of all grids combined.
495
+ - summary_grid: Summary of each grid individually.
496
+ - da_grids: List of daylight autonomy values for each grid. Each item
497
+ in the list is a NumPy array of DA values.
498
+ - hours_above: List of hours above 1000 direct illuminance (with
499
+ default states) for each grid. Each item in the list is a NumPy
500
+ array of hours above 1000 lux.
501
+ - states_schedule: A dictionary of annual shading schedules for each
502
+ aperture group.
503
+ - fail_to_comply: A dictionary with the hoys where the 2% rule failed.
504
+ - grids_info: Grid information.
505
+ """
506
+ # use default leed occupancy schedule
507
+ schedule = occupancy_schedule_8_to_6(as_list=True)
508
+
509
+ if not isinstance(results, AnnualDaylight):
510
+ results = AnnualDaylight(results, schedule=schedule, cache_arrays=True)
511
+ else:
512
+ # set schedule to default leed schedule
513
+ results.schedule = schedule
514
+
515
+ occ_mask = results.occ_mask
516
+ total_occ = results.total_occ
517
+
518
+ grids_info = results._filter_grids(grids_filter=grids_filter)
519
+
520
+ if not states_schedule:
521
+ states_schedule, fail_to_comply, shd_trans_dict = \
522
+ leed_states_schedule(results, grids_filter=grids_filter,
523
+ shade_transmittance=shade_transmittance, use_states=use_states)
524
+ else:
525
+ raise NotImplementedError(
526
+ 'Custom input for argument states_schedule is not yet implemented.'
527
+ )
528
+
529
+ # check to see if there is a HBJSON with sensor grid meshes for areas
530
+ grid_areas, units_conversion = [], 1
531
+ for base_file in Path(results.folder).parent.iterdir():
532
+ if base_file.suffix in ('.hbjson', '.hbpkl'):
533
+ hb_model = Model.from_file(base_file)
534
+ units_conversion = conversion_factor_to_meters(hb_model.units)
535
+ filt_grids = _filter_by_pattern(
536
+ hb_model.properties.radiance.sensor_grids, filter=grids_filter)
537
+ for s_grid in filt_grids:
538
+ if s_grid.mesh is not None:
539
+ grid_areas.append(s_grid.mesh.face_areas)
540
+ grid_areas = [np.array(grid) for grid in grid_areas]
541
+ break
542
+ if not grid_areas:
543
+ grid_areas = [None] * len(grids_info)
544
+
545
+ # annual sunlight exposure
546
+ ase_grids = []
547
+ hours_above = []
548
+ pass_ase_grids = []
549
+ ase_hr_pct = []
550
+ for (grid_info, grid_area) in zip(grids_info, grid_areas):
551
+ light_paths = []
552
+ for lp in grid_info['light_path']:
553
+ for _lp in lp:
554
+ if _lp == '__static_apertures__' and len(lp) > 1:
555
+ pass
556
+ else:
557
+ light_paths.append(_lp)
558
+ arrays = []
559
+ # combine direct array for all light paths
560
+ for light_path in light_paths:
561
+ array = results._get_array(
562
+ grid_info, light_path, res_type='direct')
563
+ array_filter = filter_array2d(array, occ_mask)
564
+ arrays.append(array_filter)
565
+ array = sum(arrays)
566
+ # calculate ase per grid
567
+ ase_grid, h_above = ase_array2d(
568
+ array, occ_hours=occ_hours, direct_threshold=direct_threshold)
569
+
570
+ # calculate the number of sensor points above 1000 lux for each hour
571
+ ase_hr_pct.append(
572
+ _ase_hourly_percentage(
573
+ results, array, grid_info, direct_threshold=direct_threshold,
574
+ grid_area=grid_area
575
+ )
576
+ )
577
+
578
+ ase_grids.append(ase_grid)
579
+ hours_above.append(h_above)
580
+ pass_ase = h_above < occ_hours
581
+ pass_ase_grids.append(pass_ase)
582
+ results.clear_cached_arrays(res_type='direct') # don't need direct arrays
583
+
584
+ # spatial daylight autonomy
585
+ da_grids = []
586
+ pass_sda_grids = []
587
+ pass_sda_blinds_up_grids = []
588
+ pass_sda_blinds_down_grids = []
589
+ for grid_info in grids_info:
590
+ light_paths = []
591
+ for lp in grid_info['light_path']:
592
+ for _lp in lp:
593
+ if _lp == '__static_apertures__' and len(lp) > 1:
594
+ pass
595
+ else:
596
+ light_paths.append(_lp)
597
+ base_zero_array = filter_array2d(
598
+ np.zeros((grid_info['count'], len(results.sun_up_hours))), occ_mask)
599
+ arrays = [base_zero_array.copy()]
600
+ arrays_blinds_up = [base_zero_array.copy()]
601
+ arrays_blinds_down = [base_zero_array.copy()]
602
+ # combine total array for all light paths
603
+ if use_states:
604
+ array = results._array_from_states(grid_info, states=states_schedule, zero_array=True)
605
+ array = filter_array2d(array, occ_mask)
606
+
607
+ for light_path in light_paths:
608
+ # do an extra pass to calculate with blinds always up or down
609
+ if light_path != '__static_apertures__':
610
+ array_blinds_up = results._get_array(
611
+ grid_info, light_path, state=0, res_type='total')
612
+ array_filter = filter_array2d(array_blinds_up, occ_mask)
613
+ arrays_blinds_up.append(array_filter)
614
+ array_blinds_down = results._get_array(
615
+ grid_info, light_path, state=1, res_type='total')
616
+ array_filter = filter_array2d(array_blinds_down, occ_mask)
617
+ arrays_blinds_down.append(array_filter)
618
+ arrays_blinds_down.append(array_filter)
619
+ else:
620
+ static_array = results._get_array(
621
+ grid_info, light_path, state=0, res_type='total')
622
+ array_filter = filter_array2d(static_array, occ_mask)
623
+ arrays.append(array_filter)
624
+ arrays_blinds_up.append(array_filter)
625
+ arrays_blinds_down.append(array_filter)
626
+ else:
627
+ for light_path in light_paths:
628
+ array = results._get_array(
629
+ grid_info, light_path, res_type='total')
630
+ array_filter = filter_array2d(array, occ_mask)
631
+ if light_path != '__static_apertures__':
632
+ sun_up_hours = np.array(results.sun_up_hours).astype(int)
633
+ shd_trans_array = states_schedule[light_path][sun_up_hours]
634
+ shd_trans_array = shd_trans_array[occ_mask.astype(bool)]
635
+ arrays.append(array_filter * shd_trans_array)
636
+ arrays_blinds_up.append(array_filter)
637
+ arrays_blinds_down.append(
638
+ array_filter * shd_trans_dict[light_path])
639
+ else:
640
+ arrays.append(array_filter)
641
+ arrays_blinds_up.append(array_filter)
642
+ arrays_blinds_down.append(array_filter)
643
+ array = sum(arrays)
644
+
645
+ array_blinds_up = sum(arrays_blinds_up)
646
+ array_blinds_down = sum(arrays_blinds_down)
647
+ # calculate da per grid
648
+ da_grid = da_array2d(array, total_occ=total_occ, threshold=threshold)
649
+ da_grids.append(da_grid)
650
+ da_blinds_up_grid = da_array2d(
651
+ array_blinds_up, total_occ=total_occ, threshold=threshold)
652
+ da_blinds_down_grid = da_array2d(
653
+ array_blinds_down, total_occ=total_occ, threshold=threshold)
654
+ # calculate sda per grid
655
+ pass_sda_grids.append(da_grid >= target_time)
656
+ pass_sda_blinds_up_grids.append(da_blinds_up_grid >= target_time)
657
+ pass_sda_blinds_down_grids.append(da_blinds_down_grid >= target_time)
658
+ results.clear_cached_arrays(res_type='total')
659
+
660
+ # create summaries for all grids and each grid individually
661
+ summary, summary_grid = _leed_summary(
662
+ pass_ase_grids, pass_sda_grids, grids_info, grid_areas,
663
+ pass_sda_blinds_up_grids, pass_sda_blinds_down_grids)
664
+
665
+ # credits
666
+ if not fail_to_comply:
667
+ if summary['sda'] >= 75:
668
+ summary['credits'] = 3
669
+ elif summary['sda'] >= 55:
670
+ summary['credits'] = 2
671
+ elif summary['sda'] >= 40:
672
+ summary['credits'] = 1
673
+ else:
674
+ summary['credits'] = 0
675
+
676
+ if all(grid_summary['sda'] >= 55 for grid_summary in summary_grid.values()):
677
+ if summary['credits'] <= 2:
678
+ summary['credits'] += 1
679
+ else:
680
+ summary['credits'] = 'Exemplary performance'
681
+ else:
682
+ summary['credits'] = 0
683
+ fail_to_comply_rooms = ', '.join(list(fail_to_comply.keys()))
684
+ note = (
685
+ '0 credits have been awarded. The following sensor grids have at '
686
+ 'least one hour where 2% of the floor area receives direct '
687
+ f'illuminance of 1000 lux or more: {fail_to_comply_rooms}.'
688
+ )
689
+ summary['note'] = note
690
+
691
+ # convert to datacollection
692
+ def to_datacollection(aperture_group: str, values: np.ndarray):
693
+ # convert values to 0 and 1 (0 = no shading, 1 = shading)
694
+ if use_states:
695
+ header = Header(data_type=GenericType(aperture_group, ''), unit='',
696
+ analysis_period=AnalysisPeriod())
697
+ hourly_data = HourlyContinuousCollection(header=header, values=values)
698
+ else:
699
+ values[values == 1] = 0
700
+ values[values == shd_trans_dict[aperture_group]] = 1
701
+ header = Header(data_type=GenericType(aperture_group, ''), unit='',
702
+ analysis_period=AnalysisPeriod(),
703
+ metadata={'Shade Transmittance': shd_trans_dict[aperture_group]})
704
+ hourly_data = HourlyContinuousCollection(header=header, values=values.tolist())
705
+ return hourly_data.to_dict()
706
+
707
+ if use_states:
708
+ states_schedule = {
709
+ k: to_datacollection(k, v['schedule']) for k,
710
+ v in states_schedule.to_dict().items()}
711
+ else:
712
+ states_schedule = {k:to_datacollection(k, v) for k, v in states_schedule.items()}
713
+
714
+ if sub_folder:
715
+ folder = Path(sub_folder)
716
+ folder.mkdir(parents=True, exist_ok=True)
717
+
718
+ summary_file = folder.joinpath('summary.json')
719
+ summary_file.write_text(json.dumps(summary, indent=2))
720
+ summary_grid_file = folder.joinpath('summary_grid.json')
721
+ summary_grid_file.write_text(json.dumps(summary_grid, indent=2))
722
+ states_schedule_file = folder.joinpath('states_schedule.json')
723
+ states_schedule_file.write_text(json.dumps(states_schedule))
724
+ grids_info_file = folder.joinpath('grids_info.json')
725
+ grids_info_file.write_text(json.dumps(grids_info, indent=2))
726
+
727
+ for (da, h_above, ase_hr_p, grid_info) in \
728
+ zip(da_grids, hours_above, ase_hr_pct, grids_info):
729
+ grid_id = grid_info['full_id']
730
+ da_file = folder.joinpath('results', 'da', f'{grid_id}.da')
731
+ da_file.parent.mkdir(parents=True, exist_ok=True)
732
+ hours_above_file = folder.joinpath(
733
+ 'results', 'ase_hours_above', f'{grid_id}.res')
734
+ hours_above_file.parent.mkdir(parents=True, exist_ok=True)
735
+ ase_hr_p_file = folder.joinpath(
736
+ 'datacollections', 'ase_percentage_above', f'{grid_id}.json')
737
+ ase_hr_p_file.parent.mkdir(parents=True, exist_ok=True)
738
+ np.savetxt(da_file, da, fmt='%.2f')
739
+ np.savetxt(hours_above_file, h_above, fmt='%.0f')
740
+ ase_hr_p_file.write_text(json.dumps(ase_hr_p.to_dict()))
741
+
742
+ da_grids_info_file = folder.joinpath(
743
+ 'results', 'da', 'grids_info.json')
744
+ da_grids_info_file.write_text(json.dumps(grids_info, indent=2))
745
+ ase_grids_info_file = folder.joinpath(
746
+ 'results', 'ase_hours_above', 'grids_info.json')
747
+ ase_grids_info_file.write_text(json.dumps(grids_info, indent=2))
748
+ ase_hr_pct_info_file = folder.joinpath(
749
+ 'datacollections', 'ase_percentage_above', 'grids_info.json')
750
+ ase_hr_pct_info_file.write_text(json.dumps(grids_info, indent=2))
751
+
752
+ states_schedule_err_file = \
753
+ folder.joinpath('states_schedule_err.json')
754
+ states_schedule_err_file.write_text(json.dumps(fail_to_comply))
755
+
756
+ pf_folder = folder.joinpath('pass_fail')
757
+ pf_folder.mkdir(parents=True, exist_ok=True)
758
+ for pass_sda_grid, pass_ase_grid, grid_info in zip(
759
+ pass_sda_grids, pass_ase_grids, grids_info):
760
+ grid_id = grid_info['full_id']
761
+ da_pf_folder = pf_folder.joinpath('DA')
762
+ da_pf_folder.mkdir(parents=True, exist_ok=True)
763
+ da_pf_file = da_pf_folder.joinpath(f'{grid_id}.pf')
764
+ pass_sda_grid = pass_sda_grid.astype(int)
765
+ np.savetxt(da_pf_file, pass_sda_grid, fmt='%d')
766
+ grids_info_file = da_pf_folder.joinpath('grids_info.json')
767
+ grids_info_file.write_text(json.dumps(grids_info, indent=2))
768
+
769
+ ase_pf_folder = pf_folder.joinpath('ASE')
770
+ ase_pf_folder.mkdir(parents=True, exist_ok=True)
771
+ ase_pf_file = ase_pf_folder.joinpath(f'{grid_id}.pf')
772
+ pass_ase_grid = pass_ase_grid.astype(int)
773
+ np.savetxt(ase_pf_file, pass_ase_grid, fmt='%d')
774
+ grids_info_file = ase_pf_folder.joinpath('grids_info.json')
775
+ grids_info_file.write_text(json.dumps(grids_info, indent=2))
776
+
777
+ return (summary, summary_grid, da_grids, hours_above, states_schedule,
778
+ fail_to_comply, grids_info)
779
+
780
+
781
+ def _leed_daylight_option_one_vis_metadata():
782
+ """Return visualization metadata for leed daylight option one."""
783
+ da_lpar = LegendParameters(min=0, max=100, colors=Colorset.annual_comfort())
784
+ ase_hrs_lpar = LegendParameters(min=0, max=250, colors=Colorset.original())
785
+
786
+ metric_info_dict = {
787
+ 'da': {
788
+ 'type': 'VisualizationMetaData',
789
+ 'data_type': Fraction('Daylight Autonomy').to_dict(),
790
+ 'unit': '%',
791
+ 'legend_parameters': da_lpar.to_dict()
792
+ },
793
+ 'ase_hours_above': {
794
+ 'type': 'VisualizationMetaData',
795
+ 'data_type': Time('Hours above direct threshold').to_dict(),
796
+ 'unit': 'hr',
797
+ 'legend_parameters': ase_hrs_lpar.to_dict()
798
+ }
799
+ }
800
+
801
+ return metric_info_dict