honeybee-radiance-postprocess 0.4.420__py2.py3-none-any.whl → 0.4.422__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- honeybee_radiance_postprocess/cli/leed.py +21 -7
- honeybee_radiance_postprocess/cli/postprocess.py +1 -1
- honeybee_radiance_postprocess/leed/__init__.py +1 -0
- honeybee_radiance_postprocess/{leed.py → leed/leed.py} +125 -199
- honeybee_radiance_postprocess/leed/leed_schedule.py +243 -0
- honeybee_radiance_postprocess/results/annual_daylight.py +2 -4
- honeybee_radiance_postprocess/results/results.py +39 -10
- {honeybee_radiance_postprocess-0.4.420.dist-info → honeybee_radiance_postprocess-0.4.422.dist-info}/METADATA +1 -1
- {honeybee_radiance_postprocess-0.4.420.dist-info → honeybee_radiance_postprocess-0.4.422.dist-info}/RECORD +13 -11
- {honeybee_radiance_postprocess-0.4.420.dist-info → honeybee_radiance_postprocess-0.4.422.dist-info}/LICENSE +0 -0
- {honeybee_radiance_postprocess-0.4.420.dist-info → honeybee_radiance_postprocess-0.4.422.dist-info}/WHEEL +0 -0
- {honeybee_radiance_postprocess-0.4.420.dist-info → honeybee_radiance_postprocess-0.4.422.dist-info}/entry_points.txt +0 -0
- {honeybee_radiance_postprocess-0.4.420.dist-info → honeybee_radiance_postprocess-0.4.422.dist-info}/top_level.txt +0 -0
@@ -5,7 +5,7 @@ import logging
|
|
5
5
|
import os
|
6
6
|
import click
|
7
7
|
|
8
|
-
from ..leed import leed_option_one
|
8
|
+
from ..leed.leed import leed_option_one
|
9
9
|
from ..results.annual_daylight import AnnualDaylight
|
10
10
|
|
11
11
|
_logger = logging.getLogger(__name__)
|
@@ -28,24 +28,32 @@ def leed():
|
|
28
28
|
@click.option(
|
29
29
|
'--shade-transmittance', '-st', help='A value to use as a multiplier in place of '
|
30
30
|
'solar shading. Value for shade transmittance must be 1 > value > 0.',
|
31
|
-
default=0.
|
31
|
+
default=0.02, show_default=True, type=click.FLOAT
|
32
32
|
)
|
33
33
|
@click.option(
|
34
34
|
'--shade-transmittance-file', '-stf', help='A JSON file with a dictionary '
|
35
35
|
'where aperture groups are keys, and the value for each key is the shade '
|
36
36
|
'transmittance. Values for shade transmittance must be 1 > value > 0. '
|
37
37
|
'If any aperture groups are missing in the JSON file, its shade transmittance '
|
38
|
-
'value will be set to the value of the shade
|
38
|
+
'value will be set to the value of the --shade-transmittance option (0.02 by '
|
39
39
|
'default).', default=None, show_default=True,
|
40
40
|
type=click.Path(exists=False, file_okay=True, dir_okay=False, resolve_path=True)
|
41
41
|
)
|
42
|
+
@click.option(
|
43
|
+
'--use-shade-transmittance/--use-states', help='A flag to select if the '
|
44
|
+
'post-processing should use a shade transmittance or the simulated states '
|
45
|
+
'of aperture groups. Using states should only be selected if the annual '
|
46
|
+
'daylight simulation included ray tracing of a second (blind) state for '
|
47
|
+
'each aperture group.',
|
48
|
+
is_flag=True, default=True, show_default=True
|
49
|
+
)
|
42
50
|
@click.option(
|
43
51
|
'--sub-folder', '-sf', help='Relative path for subfolder to write output '
|
44
|
-
'files.', default='leed_summary'
|
52
|
+
'files.', default='leed_summary', show_default=True
|
45
53
|
)
|
46
54
|
def daylight_option_one(
|
47
55
|
folder, shade_transmittance, shade_transmittance_file, grids_filter,
|
48
|
-
sub_folder
|
56
|
+
use_shade_transmittance, sub_folder
|
49
57
|
):
|
50
58
|
"""Calculate credits for LEED v4.1 Daylight Option 1.
|
51
59
|
|
@@ -60,7 +68,12 @@ def daylight_option_one(
|
|
60
68
|
folder: Results folder. This folder is an output folder of annual daylight
|
61
69
|
recipe. The daylight simulation must include aperture groups.
|
62
70
|
"""
|
63
|
-
|
71
|
+
use_states = not use_shade_transmittance
|
72
|
+
if (
|
73
|
+
shade_transmittance_file
|
74
|
+
and os.path.isfile(shade_transmittance_file)
|
75
|
+
and use_shade_transmittance
|
76
|
+
):
|
64
77
|
with open(shade_transmittance_file) as json_file:
|
65
78
|
shd_trans = json.load(json_file)
|
66
79
|
results = AnnualDaylight(folder)
|
@@ -73,7 +86,8 @@ def daylight_option_one(
|
|
73
86
|
try:
|
74
87
|
leed_option_one(
|
75
88
|
folder, grids_filter=grids_filter,
|
76
|
-
shade_transmittance=shade_transmittance,
|
89
|
+
shade_transmittance=shade_transmittance, use_states=use_states,
|
90
|
+
sub_folder=sub_folder
|
77
91
|
)
|
78
92
|
except Exception:
|
79
93
|
_logger.exception('Failed to generate LEED summary.')
|
@@ -567,7 +567,7 @@ def point_in_time(
|
|
567
567
|
default=1000, type=float, show_default=True
|
568
568
|
)
|
569
569
|
@click.option(
|
570
|
-
'--
|
570
|
+
'--occ-hours', '-oh', help='The number of occupied hours that cannot '
|
571
571
|
'receive more than the direct_threshold.', default=250, type=int,
|
572
572
|
show_default=True
|
573
573
|
)
|
@@ -0,0 +1 @@
|
|
1
|
+
"""honeybee-radiance-postprocess library."""
|
@@ -18,16 +18,18 @@ from honeybee.model import Model
|
|
18
18
|
from honeybee.units import conversion_factor_to_meters
|
19
19
|
from honeybee_radiance.writer import _filter_by_pattern
|
20
20
|
from honeybee_radiance.postprocess.annual import filter_schedule_by_hours
|
21
|
-
|
22
|
-
from
|
23
|
-
from
|
24
|
-
from .
|
21
|
+
|
22
|
+
from ..metrics import da_array2d, ase_array2d
|
23
|
+
from ..annual import schedule_to_hoys, occupancy_schedule_8_to_6
|
24
|
+
from ..results.annual_daylight import AnnualDaylight
|
25
|
+
from ..util import filter_array, recursive_dict_merge
|
26
|
+
from ..dynamic import DynamicSchedule, ApertureGroupSchedule
|
27
|
+
from .leed_schedule import shd_trans_schedule_descending, states_schedule_descending
|
25
28
|
|
26
29
|
|
27
30
|
def _create_grid_summary(
|
28
31
|
grid_info, sda_grid, sda_blinds_up_grid, sda_blinds_down_grid, ase_grid,
|
29
|
-
pass_sda,
|
30
|
-
area_weighted=True):
|
32
|
+
pass_sda, pass_ase, total_floor, area_weighted=True):
|
31
33
|
"""Create a LEED summary for a single grid.
|
32
34
|
|
33
35
|
Args:
|
@@ -141,8 +143,7 @@ def _leed_summary(
|
|
141
143
|
grid_summary = \
|
142
144
|
_create_grid_summary(
|
143
145
|
grid_info, sda_grid, sda_blinds_up_grid, sda_blinds_down_grid,
|
144
|
-
ase_grid, area_pass_sda,
|
145
|
-
area_pass_sda_blinds_down, area_pass_ase, total_grid_area,
|
146
|
+
ase_grid, area_pass_sda, area_pass_ase, total_grid_area,
|
146
147
|
area_weighted=True
|
147
148
|
)
|
148
149
|
|
@@ -181,8 +182,7 @@ def _leed_summary(
|
|
181
182
|
grid_summary = \
|
182
183
|
_create_grid_summary(
|
183
184
|
grid_info, sda_grid, sda_blinds_up_grid, sda_blinds_down_grid,
|
184
|
-
ase_grid, sensor_count_pass_sda,
|
185
|
-
sensor_count_pass_sda_blinds_down, sensor_count_pass_ase,
|
185
|
+
ase_grid, sensor_count_pass_sda, sensor_count_pass_ase,
|
186
186
|
grid_count, area_weighted=False
|
187
187
|
)
|
188
188
|
|
@@ -291,12 +291,13 @@ def shade_transmittance_per_light_path(
|
|
291
291
|
shade_transmittances[light_path].append(1)
|
292
292
|
shd_trans_dict[light_path] = 1
|
293
293
|
|
294
|
-
return shade_transmittances
|
294
|
+
return shade_transmittances, shd_trans_dict
|
295
295
|
|
296
296
|
|
297
297
|
def leed_states_schedule(
|
298
298
|
results: Union[str, AnnualDaylight], grids_filter: str = '*',
|
299
|
-
shade_transmittance: Union[float, dict] = 0.05
|
299
|
+
shade_transmittance: Union[float, dict] = 0.05,
|
300
|
+
use_states: bool = False
|
300
301
|
) -> Tuple[dict, dict]:
|
301
302
|
"""Calculate a schedule of each aperture group for LEED compliant sDA.
|
302
303
|
|
@@ -315,6 +316,9 @@ def leed_states_schedule(
|
|
315
316
|
keys, and the value for each key is the shade transmittance. Values
|
316
317
|
for shade transmittance must be 1 > value > 0.
|
317
318
|
Defaults to 0.05.
|
319
|
+
use_states: A boolean to note whether to use the simulated states. Set
|
320
|
+
to True to use the simulated states. The default is False which will
|
321
|
+
use the shade transmittance instead.
|
318
322
|
|
319
323
|
Returns:
|
320
324
|
Tuple: A tuple with a dictionary of the annual schedule and a
|
@@ -335,172 +339,51 @@ def leed_states_schedule(
|
|
335
339
|
shd_trans_dict = {}
|
336
340
|
|
337
341
|
for grid_info in grids_info:
|
338
|
-
grid_id = grid_info['full_id']
|
339
342
|
grid_count = grid_info['count']
|
340
343
|
light_paths = [lp[0] for lp in grid_info['light_path']]
|
341
344
|
|
342
|
-
shade_transmittances =
|
343
|
-
|
345
|
+
shade_transmittances, shd_trans_dict = (
|
346
|
+
shade_transmittance_per_light_path(
|
347
|
+
light_paths, shade_transmittance, shd_trans_dict
|
348
|
+
)
|
349
|
+
)
|
344
350
|
|
345
351
|
if len(light_paths) > 6:
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
shd_trans_array.append(shade_transmittances[light_path][1])
|
355
|
-
|
356
|
-
# sum the array element-wise
|
357
|
-
full_direct_sum = sum(full_direct)
|
358
|
-
|
359
|
-
# create base list of shading combinations (all set to 1)
|
360
|
-
combinations = [
|
361
|
-
{light_path: 1 for light_path in light_paths} \
|
362
|
-
for i in range(full_direct_sum.shape[1])
|
363
|
-
]
|
364
|
-
|
365
|
-
# find the percentage of floor area >= 1000 lux
|
366
|
-
direct_pct_above = (full_direct_sum >= 1000).sum(axis=0) / grid_count
|
367
|
-
|
368
|
-
# find the indices where the percentage of floor area is > 2%
|
369
|
-
above_2_indices = np.where(direct_pct_above > 0.02)[0]
|
370
|
-
|
371
|
-
# get an array of only the relevant hours
|
372
|
-
direct_sum = np.take(full_direct_sum, above_2_indices, axis=1)
|
373
|
-
|
374
|
-
# get an array of only the relevant hours
|
375
|
-
direct = np.take(full_direct, above_2_indices, axis=2)
|
376
|
-
|
377
|
-
# get an array of only the relevant hours
|
378
|
-
thresh = np.take(full_thresh, above_2_indices, axis=1)
|
379
|
-
|
380
|
-
# sort and get indices. Negate the array to get descending order
|
381
|
-
sort_thresh = np.argsort(-thresh, axis=0).transpose()
|
382
|
-
|
383
|
-
_combinations = []
|
384
|
-
_combinations.insert(0, (np.arange(full_direct_sum.shape[1]), combinations))
|
385
|
-
|
386
|
-
if np.any(above_2_indices):
|
387
|
-
for idx in range(len(full_direct)):
|
388
|
-
# take column
|
389
|
-
sort_indices = np.take(sort_thresh, idx, axis=1)
|
390
|
-
|
391
|
-
# map light path identifiers
|
392
|
-
light_path_ids = np.take(light_paths, sort_indices)
|
393
|
-
|
394
|
-
# get a list of shade transmittances
|
395
|
-
shd_trans_array = np.take(shd_trans_array, sort_indices)
|
396
|
-
|
397
|
-
# create combination for the subset
|
398
|
-
_subset_combination = [
|
399
|
-
{light_path: _shd_trans} for light_path, _shd_trans in \
|
400
|
-
zip(light_path_ids, shd_trans_array)
|
401
|
-
]
|
402
|
-
_combinations.insert(0, (above_2_indices, _subset_combination))
|
403
|
-
|
404
|
-
# take the values from each array by indexing
|
405
|
-
direct_array = \
|
406
|
-
direct[sort_indices, :, range(len(sort_indices))].transpose()
|
407
|
-
|
408
|
-
# subtract the illuminance values
|
409
|
-
direct_sum = direct_sum - (direct_array * (1 - shd_trans_array))
|
410
|
-
|
411
|
-
# find the percentage of floor area >= 1000 lux
|
412
|
-
direct_pct_above = (direct_sum >= 1000).sum(axis=0) / grid_count
|
413
|
-
|
414
|
-
# find the indices where the percentage of floor area is > 2%
|
415
|
-
above_2_indices = np.where(direct_pct_above > 0.02)[0]
|
416
|
-
|
417
|
-
# break if there are no hours above 2%
|
418
|
-
if not np.any(above_2_indices):
|
419
|
-
break
|
420
|
-
|
421
|
-
# update variables for the next iteration
|
422
|
-
direct_sum = np.take(direct_sum, above_2_indices, axis=1)
|
423
|
-
direct = np.take(direct, above_2_indices, axis=2)
|
424
|
-
thresh = np.take(thresh, above_2_indices, axis=1)
|
425
|
-
sort_thresh = np.take(sort_thresh, above_2_indices, axis=0)
|
426
|
-
shd_trans_array = np.take(shd_trans_array, above_2_indices)
|
427
|
-
|
428
|
-
if np.any(above_2_indices):
|
429
|
-
# take column
|
430
|
-
sort_indices = np.take(sort_thresh, idx, axis=1)
|
431
|
-
|
432
|
-
# map light path identifiers
|
433
|
-
light_path_ids = np.take(light_paths, sort_indices)
|
434
|
-
|
435
|
-
# get a list of shade transmittances
|
436
|
-
shd_trans_array = np.take(shd_trans_array, sort_indices)
|
437
|
-
|
438
|
-
# create combination for the subset
|
439
|
-
_subset_combination = [
|
440
|
-
{light_path: _shd_trans} for light_path, _shd_trans in \
|
441
|
-
zip(light_path_ids, shd_trans_array)
|
442
|
-
]
|
443
|
-
_combinations.insert(0, (above_2_indices, _subset_combination))
|
444
|
-
|
445
|
-
# there are hours not complying with the 2% rule
|
446
|
-
previous_indices = []
|
447
|
-
previous_combination = []
|
448
|
-
grid_comply = []
|
449
|
-
# merge the combinations from the iterations of the subsets
|
450
|
-
for i, subset in enumerate(_combinations):
|
451
|
-
if i == 0:
|
452
|
-
previous_indices = subset[0]
|
453
|
-
else:
|
454
|
-
_indices = subset[0]
|
455
|
-
grid_comply = []
|
456
|
-
for _pr_idx in previous_indices:
|
457
|
-
grid_comply.append(_indices[_pr_idx])
|
458
|
-
previous_indices = grid_comply
|
459
|
-
# convert indices to sun up hours indices
|
460
|
-
filter_indices = np.where(occ_mask.astype(bool))[0]
|
461
|
-
grid_comply = [filter_indices[_gc] for _gc in grid_comply]
|
462
|
-
grid_comply = np.array(results.sun_up_hours)[grid_comply]
|
463
|
-
fail_to_comply[grid_info['name']] = \
|
464
|
-
[int(hoy) for hoy in grid_comply]
|
465
|
-
|
466
|
-
previous_indices = None
|
467
|
-
previous_combination = None
|
468
|
-
# merge the combinations from the iterations of the subsets
|
469
|
-
for i, subset in enumerate(_combinations):
|
470
|
-
if i == 0:
|
471
|
-
previous_indices, previous_combination = subset
|
472
|
-
else:
|
473
|
-
_indices, _combination = subset
|
474
|
-
for _pr_idx, _pr_comb in \
|
475
|
-
zip(previous_indices, previous_combination):
|
476
|
-
for light_path, _shd_trans in _pr_comb.items():
|
477
|
-
_combination[_pr_idx][light_path] = _shd_trans
|
478
|
-
previous_indices = _indices
|
479
|
-
previous_combination = _combination
|
480
|
-
|
481
|
-
combinations = _combination
|
482
|
-
|
483
|
-
# merge the combinations of dicts
|
484
|
-
for combination in combinations:
|
485
|
-
for light_path, shd_trans in combination.items():
|
486
|
-
if light_path != '__static_apertures__':
|
487
|
-
states_schedule[light_path].append(shd_trans)
|
488
|
-
|
352
|
+
if use_states:
|
353
|
+
states_schedule, fail_to_comply = states_schedule_descending(
|
354
|
+
results, grid_info, light_paths, occ_mask,
|
355
|
+
states_schedule, fail_to_comply)
|
356
|
+
else:
|
357
|
+
states_schedule, fail_to_comply = shd_trans_schedule_descending(
|
358
|
+
results, grid_info, light_paths, shade_transmittances, occ_mask,
|
359
|
+
states_schedule, fail_to_comply)
|
489
360
|
else:
|
490
|
-
|
491
|
-
|
361
|
+
if use_states:
|
362
|
+
combinations = results._get_state_combinations(grid_info)
|
363
|
+
else:
|
364
|
+
shade_transmittances, shd_trans_dict = shade_transmittance_per_light_path(
|
365
|
+
light_paths, shade_transmittance, shd_trans_dict)
|
366
|
+
keys, values = zip(*shade_transmittances.items())
|
367
|
+
combinations = [dict(zip(keys, v)) for v in itertools.product(*values)]
|
492
368
|
|
493
369
|
array_list_combinations = []
|
494
370
|
for combination in combinations:
|
495
371
|
combination_arrays = []
|
496
|
-
for light_path,
|
497
|
-
|
498
|
-
|
499
|
-
|
500
|
-
|
372
|
+
for light_path, value in combination.items():
|
373
|
+
if use_states:
|
374
|
+
combination_arrays.append(
|
375
|
+
results._get_array(grid_info, light_path, state=value,
|
376
|
+
res_type='direct')
|
377
|
+
)
|
501
378
|
else:
|
502
|
-
|
379
|
+
array = results._get_array(
|
380
|
+
grid_info, light_path, res_type='direct')
|
381
|
+
if value == 1:
|
382
|
+
combination_arrays.append(array)
|
383
|
+
else:
|
384
|
+
combination_arrays.append(array * value)
|
503
385
|
combination_array = sum(combination_arrays)
|
386
|
+
|
504
387
|
combination_percentage = \
|
505
388
|
(combination_array >= 1000).sum(axis=0) / grid_count
|
506
389
|
array_list_combinations.append(combination_percentage)
|
@@ -513,24 +396,36 @@ def leed_states_schedule(
|
|
513
396
|
fail_to_comply[grid_info['name']] = \
|
514
397
|
[int(hoy) for hoy in grid_comply]
|
515
398
|
|
516
|
-
array_combinations_filter =
|
517
|
-
|
399
|
+
array_combinations_filter = np.apply_along_axis(
|
400
|
+
filter_array, 1, array_combinations, occ_mask
|
401
|
+
)
|
518
402
|
max_indices = array_combinations_filter.argmax(axis=0)
|
519
403
|
# select the combination for each hour
|
520
404
|
combinations = [combinations[idx] for idx in max_indices]
|
521
405
|
# merge the combinations of dicts
|
522
406
|
for combination in combinations:
|
523
|
-
for light_path,
|
407
|
+
for light_path, value in combination.items():
|
524
408
|
if light_path != '__static_apertures__':
|
525
|
-
states_schedule[light_path].append(
|
409
|
+
states_schedule[light_path].append(value)
|
526
410
|
|
527
411
|
occupancy_hoys = schedule_to_hoys(schedule, results.sun_up_hours)
|
528
412
|
|
529
413
|
# map states to 8760 values
|
530
|
-
|
531
|
-
|
532
|
-
|
533
|
-
|
414
|
+
if use_states:
|
415
|
+
aperture_group_schedules = []
|
416
|
+
for identifier, values in states_schedule.items():
|
417
|
+
mapped_states = results.values_to_annual(
|
418
|
+
occupancy_hoys, values, results.timestep, dtype=np.int32)
|
419
|
+
aperture_group_schedules.append(
|
420
|
+
ApertureGroupSchedule(identifier, mapped_states.tolist())
|
421
|
+
)
|
422
|
+
states_schedule = \
|
423
|
+
DynamicSchedule.from_group_schedules(aperture_group_schedules)
|
424
|
+
else:
|
425
|
+
for light_path, shd_trans in states_schedule.items():
|
426
|
+
mapped_states = results.values_to_annual(
|
427
|
+
occupancy_hoys, shd_trans, results.timestep)
|
428
|
+
states_schedule[light_path] = mapped_states
|
534
429
|
|
535
430
|
return states_schedule, fail_to_comply, shd_trans_dict
|
536
431
|
|
@@ -538,9 +433,9 @@ def leed_states_schedule(
|
|
538
433
|
def leed_option_one(
|
539
434
|
results: Union[str, AnnualDaylight], grids_filter: str = '*',
|
540
435
|
shade_transmittance: Union[float, dict] = 0.05,
|
541
|
-
|
542
|
-
|
543
|
-
target_time: float = 50, sub_folder: str = None):
|
436
|
+
use_states: bool = False, states_schedule: dict = None,
|
437
|
+
threshold: float = 300, direct_threshold: float = 1000,
|
438
|
+
occ_hours: int = 250, target_time: float = 50, sub_folder: str = None):
|
544
439
|
"""Calculate credits for LEED v4.1 Daylight Option 1.
|
545
440
|
|
546
441
|
Args:
|
@@ -553,6 +448,9 @@ def leed_option_one(
|
|
553
448
|
keys, and the value for each key is the shade transmittance. Values
|
554
449
|
for shade transmittance must be 1 > value > 0.
|
555
450
|
Defaults to 0.05.
|
451
|
+
use_states: A boolean to note whether to use the simulated states. Set
|
452
|
+
to True to use the simulated states. The default is False which will
|
453
|
+
use the shade transmittance instead.
|
556
454
|
states_schedule: A custom dictionary of shading states. In case this is
|
557
455
|
left empty, the function will calculate a shading schedule by using
|
558
456
|
the shade_transmittance input. If a states schedule is provided it
|
@@ -598,7 +496,7 @@ def leed_option_one(
|
|
598
496
|
if not states_schedule:
|
599
497
|
states_schedule, fail_to_comply, shd_trans_dict = \
|
600
498
|
leed_states_schedule(results, grids_filter=grids_filter,
|
601
|
-
shade_transmittance=shade_transmittance)
|
499
|
+
shade_transmittance=shade_transmittance, use_states=use_states)
|
602
500
|
else:
|
603
501
|
raise NotImplementedError(
|
604
502
|
'Custom input for argument states_schedule is not yet implemented.'
|
@@ -663,22 +561,42 @@ def leed_option_one(
|
|
663
561
|
arrays_blinds_up = []
|
664
562
|
arrays_blinds_down = []
|
665
563
|
# combine total array for all light paths
|
666
|
-
|
667
|
-
array = results.
|
668
|
-
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
arrays_blinds_down.append(array_filter * shd_trans_dict[light_path])
|
677
|
-
else:
|
678
|
-
arrays.append(array_filter)
|
564
|
+
if use_states:
|
565
|
+
array = results._array_from_states(grid_info, states=states_schedule)
|
566
|
+
array = np.apply_along_axis(filter_array, 1, array, occ_mask)
|
567
|
+
|
568
|
+
for light_path in light_paths:
|
569
|
+
# do an extra pass to calculate with blinds always up or down
|
570
|
+
array_blinds_up = results._get_array(
|
571
|
+
grid_info, light_path, state=0, res_type='total')
|
572
|
+
array_filter = np.apply_along_axis(
|
573
|
+
filter_array, 1, array_blinds_up, occ_mask)
|
679
574
|
arrays_blinds_up.append(array_filter)
|
575
|
+
array_blinds_down = results._get_array(
|
576
|
+
grid_info, light_path, state=1, res_type='total')
|
577
|
+
array_filter = np.apply_along_axis(
|
578
|
+
filter_array, 1, array_blinds_down, occ_mask)
|
680
579
|
arrays_blinds_down.append(array_filter)
|
681
|
-
|
580
|
+
else:
|
581
|
+
for light_path in light_paths:
|
582
|
+
array = results._get_array(
|
583
|
+
grid_info, light_path, res_type='total')
|
584
|
+
array_filter = np.apply_along_axis(
|
585
|
+
filter_array, 1, array, occ_mask)
|
586
|
+
if light_path != '__static_apertures__':
|
587
|
+
sun_up_hours = np.array(results.sun_up_hours).astype(int)
|
588
|
+
shd_trans_array = states_schedule[light_path][sun_up_hours]
|
589
|
+
shd_trans_array = shd_trans_array[occ_mask.astype(bool)]
|
590
|
+
arrays.append(array_filter * shd_trans_array)
|
591
|
+
arrays_blinds_up.append(array_filter)
|
592
|
+
arrays_blinds_down.append(
|
593
|
+
array_filter * shd_trans_dict[light_path])
|
594
|
+
else:
|
595
|
+
arrays.append(array_filter)
|
596
|
+
arrays_blinds_up.append(array_filter)
|
597
|
+
arrays_blinds_down.append(array_filter)
|
598
|
+
array = sum(arrays)
|
599
|
+
|
682
600
|
array_blinds_up = sum(arrays_blinds_up)
|
683
601
|
array_blinds_down = sum(arrays_blinds_down)
|
684
602
|
# calculate da per grid
|
@@ -727,15 +645,23 @@ def leed_option_one(
|
|
727
645
|
# convert to datacollection
|
728
646
|
def to_datacollection(aperture_group: str, values: np.ndarray):
|
729
647
|
# convert values to 0 and 1 (0 = no shading, 1 = shading)
|
730
|
-
|
731
|
-
|
732
|
-
|
733
|
-
|
734
|
-
|
735
|
-
|
648
|
+
if use_states:
|
649
|
+
header = Header(data_type=GenericType(aperture_group, ''), unit='',
|
650
|
+
analysis_period=AnalysisPeriod())
|
651
|
+
hourly_data = HourlyContinuousCollection(header=header, values=values)
|
652
|
+
else:
|
653
|
+
values[values == 1] = 0
|
654
|
+
values[values == shd_trans_dict[aperture_group]] = 1
|
655
|
+
header = Header(data_type=GenericType(aperture_group, ''), unit='',
|
656
|
+
analysis_period=AnalysisPeriod(),
|
657
|
+
metadata={'Shade Transmittance': shd_trans_dict[aperture_group]})
|
658
|
+
hourly_data = HourlyContinuousCollection(header=header, values=values.tolist())
|
736
659
|
return hourly_data.to_dict()
|
737
660
|
|
738
|
-
|
661
|
+
if use_states:
|
662
|
+
states_schedule = {k:to_datacollection(k, v['schedule']) for k, v in states_schedule.to_dict().items()}
|
663
|
+
else:
|
664
|
+
states_schedule = {k:to_datacollection(k, v) for k, v in states_schedule.items()}
|
739
665
|
|
740
666
|
if sub_folder:
|
741
667
|
folder = Path(sub_folder)
|
@@ -0,0 +1,243 @@
|
|
1
|
+
"""Module for dynamic LEED schedules."""
|
2
|
+
from typing import Tuple
|
3
|
+
import numpy as np
|
4
|
+
|
5
|
+
from ..results.annual_daylight import AnnualDaylight
|
6
|
+
from ..util import filter_array
|
7
|
+
|
8
|
+
|
9
|
+
def shd_trans_schedule_descending(
|
10
|
+
results: AnnualDaylight, grid_info, light_paths, shade_transmittances, occ_mask,
|
11
|
+
states_schedule, fail_to_comply
|
12
|
+
) -> Tuple[dict, dict]:
|
13
|
+
grid_count = grid_info['count']
|
14
|
+
full_direct = []
|
15
|
+
full_thresh = []
|
16
|
+
full_shd_trans_array = []
|
17
|
+
for light_path in light_paths:
|
18
|
+
array = results._get_array(grid_info, light_path, res_type="direct")
|
19
|
+
array = np.apply_along_axis(filter_array, 1, array, occ_mask)
|
20
|
+
full_direct.append(array)
|
21
|
+
full_thresh.append((array >= 1000).sum(axis=0))
|
22
|
+
full_shd_trans_array.append(shade_transmittances[light_path][1])
|
23
|
+
|
24
|
+
# Sum the array element-wise.
|
25
|
+
# This array is the sum of all direct illuminance without shade
|
26
|
+
# transmittance.
|
27
|
+
full_direct_sum = sum(full_direct)
|
28
|
+
|
29
|
+
# Create base list of shading combinations (all set to 1).
|
30
|
+
# We will replace the 1s later.
|
31
|
+
combinations = [
|
32
|
+
{light_path: 1 for light_path in light_paths}
|
33
|
+
for i in range(full_direct_sum.shape[1])
|
34
|
+
]
|
35
|
+
|
36
|
+
# Find the percentage of floor area >= 1000 lux.
|
37
|
+
# This array is the percentage for each hour (axis=0).
|
38
|
+
direct_pct_above = (full_direct_sum >= 1000).sum(axis=0) / grid_count
|
39
|
+
|
40
|
+
# Find the indices where the percentage of floor area is > 2%.
|
41
|
+
# This array is the problematic hours.
|
42
|
+
above_2_indices = np.where(direct_pct_above > 0.02)[0]
|
43
|
+
|
44
|
+
# Use the indices to get the relevant hours.
|
45
|
+
direct_sum = np.take(full_direct_sum, above_2_indices, axis=1)
|
46
|
+
|
47
|
+
# Use the indices to get the relevant hours.
|
48
|
+
direct = np.take(full_direct, above_2_indices, axis=2)
|
49
|
+
|
50
|
+
# Use the indices to get the relevant hours.
|
51
|
+
thresh = np.take(full_thresh, above_2_indices, axis=1)
|
52
|
+
|
53
|
+
# Sort and get indices. Negate the array to get descending order.
|
54
|
+
# Descending order puts the "highest offender" light path first.
|
55
|
+
sort_thresh = np.argsort(-thresh, axis=0).transpose()
|
56
|
+
|
57
|
+
_combinations = []
|
58
|
+
_combinations.insert(
|
59
|
+
0, (np.arange(full_direct_sum.shape[1]), combinations)
|
60
|
+
)
|
61
|
+
|
62
|
+
if np.any(above_2_indices):
|
63
|
+
# There are hours where the percentage of floor area is > 2%.
|
64
|
+
for idx, lp in enumerate(light_paths):
|
65
|
+
# Take column. For each iteration it will take the next column
|
66
|
+
# in descending order, i.e., the "highest offender" is the first
|
67
|
+
# column.
|
68
|
+
sort_indices = np.take(sort_thresh, idx, axis=1)
|
69
|
+
|
70
|
+
# Map light path identifiers to indices.
|
71
|
+
light_path_ids = np.take(light_paths, sort_indices)
|
72
|
+
|
73
|
+
# Map shade transmittance to indices.
|
74
|
+
shd_trans_array = np.take(full_shd_trans_array, sort_indices)
|
75
|
+
|
76
|
+
# Create combination for the subset.
|
77
|
+
_subset_combination = [
|
78
|
+
{light_path: _shd_trans} for light_path, _shd_trans in
|
79
|
+
zip(light_path_ids, shd_trans_array)
|
80
|
+
]
|
81
|
+
_combinations.insert(0, (above_2_indices, _subset_combination))
|
82
|
+
|
83
|
+
# Take the values from each array by indexing.
|
84
|
+
direct_array = \
|
85
|
+
direct[sort_indices, :, range(len(sort_indices))].transpose()
|
86
|
+
|
87
|
+
# Subtract the illuminance values.
|
88
|
+
direct_sum = direct_sum - (direct_array * (1 - shd_trans_array))
|
89
|
+
|
90
|
+
# Find the percentage of floor area >= 1000 lux.
|
91
|
+
direct_pct_above = (direct_sum >= 1000).sum(axis=0) / grid_count
|
92
|
+
|
93
|
+
# Find the indices where the percentage of floor area is > 2%.
|
94
|
+
above_2_indices = np.where(direct_pct_above > 0.02)[0]
|
95
|
+
|
96
|
+
# Break if there are no hours above 2%.
|
97
|
+
if not np.any(above_2_indices):
|
98
|
+
break
|
99
|
+
|
100
|
+
# Update variables for the next iteration.
|
101
|
+
direct_sum = np.take(direct_sum, above_2_indices, axis=1)
|
102
|
+
direct = np.take(direct, above_2_indices, axis=2)
|
103
|
+
thresh = np.take(thresh, above_2_indices, axis=1)
|
104
|
+
sort_thresh = np.take(sort_thresh, above_2_indices, axis=0)
|
105
|
+
|
106
|
+
if np.any(above_2_indices):
|
107
|
+
# There are hours not complying with the 2% rule.
|
108
|
+
previous_indices = []
|
109
|
+
previous_combination = []
|
110
|
+
grid_comply = []
|
111
|
+
# Merge the combinations from the iterations of the subsets.
|
112
|
+
for i, subset in enumerate(_combinations):
|
113
|
+
if i == 0:
|
114
|
+
previous_indices = subset[0]
|
115
|
+
else:
|
116
|
+
_indices = subset[0]
|
117
|
+
grid_comply = []
|
118
|
+
for _pr_idx in previous_indices:
|
119
|
+
grid_comply.append(_indices[_pr_idx])
|
120
|
+
previous_indices = grid_comply
|
121
|
+
# Convert indices to sun up hours indices.
|
122
|
+
filter_indices = np.where(occ_mask.astype(bool))[0]
|
123
|
+
grid_comply = [filter_indices[_gc] for _gc in grid_comply]
|
124
|
+
grid_comply = np.array(results.sun_up_hours)[grid_comply]
|
125
|
+
fail_to_comply[grid_info['name']] = \
|
126
|
+
[int(hoy) for hoy in grid_comply]
|
127
|
+
|
128
|
+
previous_indices = None
|
129
|
+
previous_combination = None
|
130
|
+
# Merge the combinations from the iterations of the subsets.
|
131
|
+
for i, subset in enumerate(_combinations):
|
132
|
+
if i == 0:
|
133
|
+
previous_indices, previous_combination = subset
|
134
|
+
else:
|
135
|
+
_indices, _combination = subset
|
136
|
+
for _pr_idx, _pr_comb in \
|
137
|
+
zip(previous_indices, previous_combination):
|
138
|
+
for light_path, _shd_trans in _pr_comb.items():
|
139
|
+
_combination[_pr_idx][light_path] = _shd_trans
|
140
|
+
previous_indices = _indices
|
141
|
+
previous_combination = _combination
|
142
|
+
|
143
|
+
combinations = _combination
|
144
|
+
|
145
|
+
# Merge the combinations of dicts.
|
146
|
+
for combination in combinations:
|
147
|
+
for light_path, shd_trans in combination.items():
|
148
|
+
if light_path != "__static_apertures__":
|
149
|
+
states_schedule[light_path].append(shd_trans)
|
150
|
+
|
151
|
+
return states_schedule, fail_to_comply
|
152
|
+
|
153
|
+
|
154
|
+
def states_schedule_descending(
|
155
|
+
results: AnnualDaylight, grid_info, light_paths, occ_mask,
|
156
|
+
states_schedule, fail_to_comply
|
157
|
+
) -> Tuple[dict, dict]:
|
158
|
+
grid_count = grid_info['count']
|
159
|
+
full_direct = []
|
160
|
+
full_thresh = []
|
161
|
+
full_direct_blinds = []
|
162
|
+
for light_path in light_paths:
|
163
|
+
array = results._get_array(
|
164
|
+
grid_info, light_path, state=0, res_type="direct")
|
165
|
+
array = np.apply_along_axis(filter_array, 1, array, occ_mask)
|
166
|
+
full_direct.append(array)
|
167
|
+
full_thresh.append((array >= 1000).sum(axis=0))
|
168
|
+
|
169
|
+
array = results._get_array(
|
170
|
+
grid_info, light_path, state=1, res_type="direct")
|
171
|
+
array = np.apply_along_axis(filter_array, 1, array, occ_mask)
|
172
|
+
full_direct_blinds.append(array)
|
173
|
+
|
174
|
+
full_direct = np.array(full_direct)
|
175
|
+
full_direct_blinds = np.array(full_direct_blinds)
|
176
|
+
full_direct_sum = full_direct.sum(axis=0)
|
177
|
+
|
178
|
+
new_array = full_direct.copy()
|
179
|
+
|
180
|
+
percentage_sensors = (full_direct_sum >= 1000).sum(axis=0) / grid_count
|
181
|
+
if not np.any(percentage_sensors > 0.02):
|
182
|
+
combinations = [
|
183
|
+
{light_path: 0 for light_path in light_paths}
|
184
|
+
for i in range(full_direct_sum.shape[1])]
|
185
|
+
else:
|
186
|
+
tracking_array = np.zeros(
|
187
|
+
(new_array.shape[0], new_array.shape[2]), dtype=int)
|
188
|
+
|
189
|
+
percentage_sensors = (full_direct >= 1000).sum(axis=1) / grid_count
|
190
|
+
|
191
|
+
ranking_indices = np.argsort(-percentage_sensors, axis=0)
|
192
|
+
|
193
|
+
for rank in range(ranking_indices.shape[0]):
|
194
|
+
# Calculate the percentage of sensors with values >= 1000 for the current new_array
|
195
|
+
summed_array = np.sum(new_array, axis=0)
|
196
|
+
percentage_sensors_summed = np.sum(
|
197
|
+
summed_array >= 1000, axis=0) / grid_count
|
198
|
+
indices_above_2_percent = np.where(
|
199
|
+
percentage_sensors_summed > 0.02)[0]
|
200
|
+
|
201
|
+
# Exit if there are no more hours exceeding the threshold
|
202
|
+
if len(indices_above_2_percent) == 0:
|
203
|
+
break
|
204
|
+
|
205
|
+
# Array indices to use for replacement for these hours
|
206
|
+
replace_indices = indices_above_2_percent
|
207
|
+
array_indices = ranking_indices[rank, replace_indices]
|
208
|
+
|
209
|
+
# Use advanced indexing to replace values in new_array for these hours
|
210
|
+
for hour_idx, array_idx in zip(replace_indices, array_indices):
|
211
|
+
new_array[array_idx, :, hour_idx] = full_direct_blinds[
|
212
|
+
array_idx, :, hour_idx
|
213
|
+
]
|
214
|
+
|
215
|
+
# Update the tracking array
|
216
|
+
tracking_array[array_indices, replace_indices] = 1
|
217
|
+
|
218
|
+
combinations = []
|
219
|
+
for hour in range(new_array.shape[2]):
|
220
|
+
hour_dict = {
|
221
|
+
light_paths[i]: tracking_array[i, hour]
|
222
|
+
for i in range(tracking_array.shape[0])}
|
223
|
+
combinations.append(hour_dict)
|
224
|
+
|
225
|
+
final_summed_array = np.sum(new_array, axis=0)
|
226
|
+
final_percentage_sensors_summed = (
|
227
|
+
final_summed_array >= 1000).sum(
|
228
|
+
axis=0) / grid_count
|
229
|
+
final_indices_above_2_percent = np.where(
|
230
|
+
final_percentage_sensors_summed > 0.02)[0]
|
231
|
+
if np.any(final_indices_above_2_percent):
|
232
|
+
sun_up_hours_indices = np.where(occ_mask == 1)[0][
|
233
|
+
final_indices_above_2_percent]
|
234
|
+
grid_comply = np.array(results.sun_up_hours)[sun_up_hours_indices]
|
235
|
+
fail_to_comply[grid_info['name']] = [
|
236
|
+
int(hoy) for hoy in grid_comply]
|
237
|
+
|
238
|
+
for combination in combinations:
|
239
|
+
for light_path, value in combination.items():
|
240
|
+
if light_path != '__static_apertures__':
|
241
|
+
states_schedule[light_path].append(value)
|
242
|
+
|
243
|
+
return states_schedule, fail_to_comply
|
@@ -714,11 +714,9 @@ class AnnualDaylight(Results):
|
|
714
714
|
f'grid {grid_info["name"]}. Received {len(control_sensor)} '
|
715
715
|
'control sensors.')
|
716
716
|
control_sensor_index = control_sensor[0]
|
717
|
-
light_paths = [lp[0] for lp in grid_info['light_path']]
|
718
|
-
lp_states = {key: self.valid_states[key] for key in light_paths if key in self.valid_states}
|
719
717
|
|
720
|
-
|
721
|
-
|
718
|
+
combinations = self._get_state_combinations(grid_info)
|
719
|
+
|
722
720
|
array_list_combinations = []
|
723
721
|
for combination in combinations:
|
724
722
|
combination_arrays = []
|
@@ -4,6 +4,7 @@ from pathlib import Path
|
|
4
4
|
from itertools import islice, cycle
|
5
5
|
from typing import Tuple, Union, List
|
6
6
|
import numpy as np
|
7
|
+
import itertools
|
7
8
|
|
8
9
|
from ladybug.analysisperiod import AnalysisPeriod
|
9
10
|
from ladybug.datacollection import HourlyContinuousCollection
|
@@ -931,7 +932,8 @@ class Results(_ResultsFolder):
|
|
931
932
|
def values_to_annual(
|
932
933
|
hours: Union[List[float], np.ndarray],
|
933
934
|
values: Union[List[float], np.ndarray],
|
934
|
-
timestep: int, base_value: int = 0
|
935
|
+
timestep: int, base_value: int = 0,
|
936
|
+
dtype: np.dtype = np.float32) -> np.ndarray:
|
935
937
|
"""Map a 1D NumPy array based on a set of hours to an annual array.
|
936
938
|
|
937
939
|
This method creates an array with a base value of length 8760 and
|
@@ -945,6 +947,7 @@ class Results(_ResultsFolder):
|
|
945
947
|
regular list or a 1D NumPy array.
|
946
948
|
timestep: Time step of the simulation.
|
947
949
|
base_value: A value that will be applied for all the base array.
|
950
|
+
dtype: A NumPy dtype for the annual array.
|
948
951
|
|
949
952
|
Returns:
|
950
953
|
A 1D NumPy array.
|
@@ -955,7 +958,7 @@ class Results(_ResultsFolder):
|
|
955
958
|
assert hours.shape == values.shape
|
956
959
|
full_ap = AnalysisPeriod(timestep=timestep)
|
957
960
|
indices = np.where(np.isin(full_ap.hoys, hours))[0]
|
958
|
-
annual_array = np.repeat(base_value, 8760 * timestep).astype(
|
961
|
+
annual_array = np.repeat(base_value, 8760 * timestep).astype(dtype)
|
959
962
|
annual_array[indices] = values
|
960
963
|
|
961
964
|
return annual_array
|
@@ -1218,7 +1221,6 @@ class Results(_ResultsFolder):
|
|
1218
1221
|
Returns:
|
1219
1222
|
A NumPy array based on the states settings.
|
1220
1223
|
"""
|
1221
|
-
grid_count = grid_info['count']
|
1222
1224
|
# get states that are relevant for the grid
|
1223
1225
|
states = self._filter_grid_states(grid_info, states=states)
|
1224
1226
|
|
@@ -1226,29 +1228,35 @@ class Results(_ResultsFolder):
|
|
1226
1228
|
for light_path, gr_schedule in states.dynamic_schedule.items():
|
1227
1229
|
if gr_schedule.is_static:
|
1228
1230
|
state = gr_schedule.schedule[0]
|
1231
|
+
# if state is -1 we continue since it is "turned off"
|
1229
1232
|
if state == -1:
|
1230
1233
|
continue
|
1234
|
+
# load static array (state is static)
|
1231
1235
|
array = self._get_array(
|
1232
1236
|
grid_info, light_path, state=state, res_type=res_type)
|
1233
1237
|
arrays.append(array)
|
1234
1238
|
else:
|
1235
|
-
# create default 0 array
|
1236
|
-
array = np.zeros((
|
1239
|
+
# create default 0 array, we will add to this later
|
1240
|
+
array = np.zeros((grid_info['count'], len(self.sun_up_hours)))
|
1237
1241
|
# slice states to match sun up hours
|
1238
|
-
states_array = np.array(gr_schedule.schedule)[
|
1239
|
-
|
1242
|
+
states_array = np.array(gr_schedule.schedule)[
|
1243
|
+
list(map(int, self.sun_up_hours))]
|
1244
|
+
for state in np.unique(states_array):
|
1240
1245
|
if state == -1:
|
1246
|
+
# if state is -1 we continue since it is "turned off"
|
1241
1247
|
continue
|
1248
|
+
# load static array (state is static)
|
1242
1249
|
_array = self._get_array(
|
1243
1250
|
grid_info, light_path, state=state, res_type=res_type)
|
1244
|
-
|
1245
|
-
|
1251
|
+
# get indices and add values to base array
|
1252
|
+
states_indicies = states_array == state
|
1253
|
+
array[:, states_indicies] += _array[:, states_indicies]
|
1246
1254
|
arrays.append(array)
|
1247
1255
|
array = sum(arrays)
|
1248
1256
|
|
1249
1257
|
if not np.any(array):
|
1250
1258
|
if zero_array:
|
1251
|
-
array = np.zeros((
|
1259
|
+
array = np.zeros((grid_info['count'], len(self.sun_up_hours)))
|
1252
1260
|
else:
|
1253
1261
|
array = np.array([])
|
1254
1262
|
|
@@ -1334,3 +1342,24 @@ class Results(_ResultsFolder):
|
|
1334
1342
|
valid_states[light_path] = list(range(len(states)))
|
1335
1343
|
|
1336
1344
|
return valid_states
|
1345
|
+
|
1346
|
+
def _light_paths_from_grid_info(self, grid_info: Union[dict, str]) -> list:
|
1347
|
+
if isinstance(grid_info, str):
|
1348
|
+
for _grid_info in self.grids_info:
|
1349
|
+
if _grid_info['full_id'] == grid_info:
|
1350
|
+
grid_info = _grid_info
|
1351
|
+
break
|
1352
|
+
else:
|
1353
|
+
raise Exception(f'Grid info with full_id "{grid_info}" not found.')
|
1354
|
+
light_paths = [elem for lp in grid_info['light_path'] for elem in lp]
|
1355
|
+
|
1356
|
+
return light_paths
|
1357
|
+
|
1358
|
+
def _get_state_combinations(self, grid_info: Union[dict, str]) -> List[dict]:
|
1359
|
+
light_paths = self._light_paths_from_grid_info(grid_info)
|
1360
|
+
valid_states = self._get_valid_states()
|
1361
|
+
filtered_states = {lp: valid_states[lp] for lp in light_paths}
|
1362
|
+
keys, values = zip(*filtered_states.items())
|
1363
|
+
combinations = [dict(zip(keys, v)) for v in itertools.product(*values)]
|
1364
|
+
|
1365
|
+
return combinations
|
@@ -8,7 +8,6 @@ honeybee_radiance_postprocess/dynamic.py,sha256=RPJh2SsjASYJCsG5QRkazVCvzWjzMxm9
|
|
8
8
|
honeybee_radiance_postprocess/electriclight.py,sha256=E7uhq7-YtZ02F9a1FbEdrXnxmYJNOFnfLF0Yw3JLQ-g,732
|
9
9
|
honeybee_radiance_postprocess/en17037.py,sha256=5c5ahfzad12FqMwBL7c0sLOKHzLKSTXtlYFfaNhzA3w,10848
|
10
10
|
honeybee_radiance_postprocess/helper.py,sha256=qz5kaJxzy1tGBfVYYXc2cEToOCoj0YLOtwjr3LVI3YU,9000
|
11
|
-
honeybee_radiance_postprocess/leed.py,sha256=Mx1fqNumEvLbQ1lb0HiFHVperbqtqBUjve-y4ZDdCJ4,35944
|
12
11
|
honeybee_radiance_postprocess/metrics.py,sha256=6EHCuXf5jnhh6GglI9mTd0MFpfhfPFoKMf4b5gKRTMI,14038
|
13
12
|
honeybee_radiance_postprocess/reader.py,sha256=6myKzfGC1pO8zPixg1kKrKjPihHabTKUh2t5BlJvij0,2367
|
14
13
|
honeybee_radiance_postprocess/type_hints.py,sha256=4R0kZgacQrqzoh8Tq7f8MVzUDzynV-C_jlh80UV6GPE,1122
|
@@ -17,21 +16,24 @@ honeybee_radiance_postprocess/vis_metadata.py,sha256=7ywIgdiuNKcctxifhpy7-Q2oaSX
|
|
17
16
|
honeybee_radiance_postprocess/cli/__init__.py,sha256=PVfwkuPFl4TnvQt8ovVm01JK0Alon81BaY-0tshAXyg,795
|
18
17
|
honeybee_radiance_postprocess/cli/abnt.py,sha256=GNLmVVrEQ-1oKr5ZmBllY-KODhgJPjLVidQ_dQMcpFk,15537
|
19
18
|
honeybee_radiance_postprocess/cli/grid.py,sha256=6peLEAPVe-iw05_wdRpFruZLqO8myvC-_QT5W1q5sk8,10677
|
20
|
-
honeybee_radiance_postprocess/cli/leed.py,sha256=
|
19
|
+
honeybee_radiance_postprocess/cli/leed.py,sha256=bxGX2UBehYNcaPJWHL2yEasSP6dATD7B0aNNQOflqqM,3712
|
21
20
|
honeybee_radiance_postprocess/cli/mtxop.py,sha256=UZJnjNpPjDmShy1-Mxos4H2vTUqk_yP3ZyaC1_LLFeI,5015
|
22
|
-
honeybee_radiance_postprocess/cli/postprocess.py,sha256=
|
21
|
+
honeybee_radiance_postprocess/cli/postprocess.py,sha256=pzZ419eBt_LNNilW1y47c4lGTFxmV7cJyWnjV78GzY8,39202
|
23
22
|
honeybee_radiance_postprocess/cli/schedule.py,sha256=6uIy98Co4zm-ZRcELo4Lfx_aN3lNiqPe-BSimXwt1F8,3877
|
24
23
|
honeybee_radiance_postprocess/cli/translate.py,sha256=18zkcGeRZALJ5Z82NEB3XZ-iEX2cHyneobGWV-IXWE0,6789
|
25
24
|
honeybee_radiance_postprocess/cli/two_phase.py,sha256=xA6ayPv26DM5fuMkLhBMYGklf_j5ymowmncwJGXRgo8,7034
|
26
25
|
honeybee_radiance_postprocess/cli/util.py,sha256=Be9cGmYhcV2W37ma6SgQPCWCpWLLLlroxRYN_l58kY0,4077
|
27
26
|
honeybee_radiance_postprocess/cli/viewfactor.py,sha256=kU36YRzLya5PReYREjTfw3zOcWKHYZjVlVclyuR7Cqk,5245
|
27
|
+
honeybee_radiance_postprocess/leed/__init__.py,sha256=kQXElEqFnLGNnrMSpA51XDHoqBup849FHeAqWASIy6w,45
|
28
|
+
honeybee_radiance_postprocess/leed/leed.py,sha256=rbvhq51Dfhx4aZFcKWqOXp3FfgFy5YDRkHYn9wKHQpc,32631
|
29
|
+
honeybee_radiance_postprocess/leed/leed_schedule.py,sha256=s3by1sv1DtOlCawvaMvnIDvEo5D8ATEJvWQ_rEeJIHg,9956
|
28
30
|
honeybee_radiance_postprocess/results/__init__.py,sha256=1agBQbfT4Tf8KqSZzlfKYX8MeZryY4jJ1KB4HWqaDDk,182
|
29
|
-
honeybee_radiance_postprocess/results/annual_daylight.py,sha256=
|
31
|
+
honeybee_radiance_postprocess/results/annual_daylight.py,sha256=11d4J1iIuITKuoWyWa-2_2WdrHYBULC0YP-mWBWi4JQ,34724
|
30
32
|
honeybee_radiance_postprocess/results/annual_irradiance.py,sha256=5zwrr4MNeHUebbSRpSBbscPOZUs2AHmYCQfIIbdYImY,8298
|
31
|
-
honeybee_radiance_postprocess/results/results.py,sha256=
|
32
|
-
honeybee_radiance_postprocess-0.4.
|
33
|
-
honeybee_radiance_postprocess-0.4.
|
34
|
-
honeybee_radiance_postprocess-0.4.
|
35
|
-
honeybee_radiance_postprocess-0.4.
|
36
|
-
honeybee_radiance_postprocess-0.4.
|
37
|
-
honeybee_radiance_postprocess-0.4.
|
33
|
+
honeybee_radiance_postprocess/results/results.py,sha256=ABb_S8kDPruhGkDsfREXMg6K0p8FRhAZ3QIRUZCQPAI,54888
|
34
|
+
honeybee_radiance_postprocess-0.4.422.dist-info/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
35
|
+
honeybee_radiance_postprocess-0.4.422.dist-info/METADATA,sha256=q3U1E3sHzUes8aIUvguDRv_c6wCmRusvHkIKdlqi_NA,2245
|
36
|
+
honeybee_radiance_postprocess-0.4.422.dist-info/WHEEL,sha256=unfA4MOaH0icIyIA5oH6E2sn2Hq5zKtLlHsWapZGwes,110
|
37
|
+
honeybee_radiance_postprocess-0.4.422.dist-info/entry_points.txt,sha256=gFtVPx6UItXt27GfEZZO00eOZChJJEL6JwGSAB_O3rs,96
|
38
|
+
honeybee_radiance_postprocess-0.4.422.dist-info/top_level.txt,sha256=4-sFbzy7ewP2EDqJV3jeFlAFx7SuxtoBBELWaKAnLdA,30
|
39
|
+
honeybee_radiance_postprocess-0.4.422.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|