honeybee-radiance-postprocess 0.4.555__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- honeybee_radiance_postprocess/__init__.py +1 -0
- honeybee_radiance_postprocess/__main__.py +4 -0
- honeybee_radiance_postprocess/annual.py +73 -0
- honeybee_radiance_postprocess/annualdaylight.py +289 -0
- honeybee_radiance_postprocess/annualirradiance.py +35 -0
- honeybee_radiance_postprocess/breeam/__init__.py +1 -0
- honeybee_radiance_postprocess/breeam/breeam.py +552 -0
- honeybee_radiance_postprocess/cli/__init__.py +33 -0
- honeybee_radiance_postprocess/cli/abnt.py +392 -0
- honeybee_radiance_postprocess/cli/breeam.py +96 -0
- honeybee_radiance_postprocess/cli/datacollection.py +133 -0
- honeybee_radiance_postprocess/cli/grid.py +295 -0
- honeybee_radiance_postprocess/cli/leed.py +143 -0
- honeybee_radiance_postprocess/cli/merge.py +161 -0
- honeybee_radiance_postprocess/cli/mtxop.py +161 -0
- honeybee_radiance_postprocess/cli/postprocess.py +1092 -0
- honeybee_radiance_postprocess/cli/schedule.py +103 -0
- honeybee_radiance_postprocess/cli/translate.py +216 -0
- honeybee_radiance_postprocess/cli/two_phase.py +252 -0
- honeybee_radiance_postprocess/cli/util.py +121 -0
- honeybee_radiance_postprocess/cli/viewfactor.py +157 -0
- honeybee_radiance_postprocess/cli/well.py +110 -0
- honeybee_radiance_postprocess/data_type.py +102 -0
- honeybee_radiance_postprocess/dynamic.py +273 -0
- honeybee_radiance_postprocess/electriclight.py +24 -0
- honeybee_radiance_postprocess/en17037.py +304 -0
- honeybee_radiance_postprocess/helper.py +266 -0
- honeybee_radiance_postprocess/ies/__init__.py +1 -0
- honeybee_radiance_postprocess/ies/lm.py +224 -0
- honeybee_radiance_postprocess/ies/lm_schedule.py +248 -0
- honeybee_radiance_postprocess/leed/__init__.py +1 -0
- honeybee_radiance_postprocess/leed/leed.py +801 -0
- honeybee_radiance_postprocess/leed/leed_schedule.py +256 -0
- honeybee_radiance_postprocess/metrics.py +439 -0
- honeybee_radiance_postprocess/reader.py +80 -0
- honeybee_radiance_postprocess/results/__init__.py +4 -0
- honeybee_radiance_postprocess/results/annual_daylight.py +752 -0
- honeybee_radiance_postprocess/results/annual_irradiance.py +196 -0
- honeybee_radiance_postprocess/results/results.py +1416 -0
- honeybee_radiance_postprocess/type_hints.py +38 -0
- honeybee_radiance_postprocess/util.py +211 -0
- honeybee_radiance_postprocess/vis_metadata.py +49 -0
- honeybee_radiance_postprocess/well/__init__.py +1 -0
- honeybee_radiance_postprocess/well/well.py +509 -0
- honeybee_radiance_postprocess-0.4.555.dist-info/METADATA +79 -0
- honeybee_radiance_postprocess-0.4.555.dist-info/RECORD +50 -0
- honeybee_radiance_postprocess-0.4.555.dist-info/WHEEL +5 -0
- honeybee_radiance_postprocess-0.4.555.dist-info/entry_points.txt +2 -0
- honeybee_radiance_postprocess-0.4.555.dist-info/licenses/LICENSE +661 -0
- honeybee_radiance_postprocess-0.4.555.dist-info/top_level.txt +1 -0
@@ -0,0 +1,157 @@
|
|
1
|
+
"""Commands to compute view factors to geometry."""
|
2
|
+
import click
|
3
|
+
import os
|
4
|
+
import sys
|
5
|
+
import logging
|
6
|
+
import math
|
7
|
+
try:
|
8
|
+
import cupy as np
|
9
|
+
is_gpu = True
|
10
|
+
except ImportError:
|
11
|
+
is_gpu = False
|
12
|
+
import numpy as np
|
13
|
+
|
14
|
+
from honeybee_radiance.config import folders
|
15
|
+
|
16
|
+
from honeybee_radiance_command.rcontrib import Rcontrib, RcontribOptions
|
17
|
+
from honeybee_radiance_command._command_util import run_command
|
18
|
+
|
19
|
+
from ladybug.futil import preparedir
|
20
|
+
|
21
|
+
from honeybee_radiance_postprocess.reader import binary_to_array
|
22
|
+
|
23
|
+
_logger = logging.getLogger(__name__)
|
24
|
+
|
25
|
+
|
26
|
+
@click.group(help='Commands to compute view factors to geometry.')
|
27
|
+
def view_factor():
|
28
|
+
pass
|
29
|
+
|
30
|
+
|
31
|
+
@view_factor.command('contrib')
|
32
|
+
@click.argument(
|
33
|
+
'octree', type=click.Path(exists=True, file_okay=True, resolve_path=True)
|
34
|
+
)
|
35
|
+
@click.argument(
|
36
|
+
'sensor-grid', type=click.Path(exists=True, file_okay=True, resolve_path=True)
|
37
|
+
)
|
38
|
+
@click.argument(
|
39
|
+
'modifiers', type=click.Path(exists=True, file_okay=True, resolve_path=True)
|
40
|
+
)
|
41
|
+
@click.option(
|
42
|
+
'--ray-count', type=click.INT, default=6, show_default=True,
|
43
|
+
help='The number of rays to be equally distributed over a sphere to compute '
|
44
|
+
'the view factor for each of the input sensors.'
|
45
|
+
)
|
46
|
+
@click.option(
|
47
|
+
'--rad-params', show_default=True, help='Radiance parameters.'
|
48
|
+
)
|
49
|
+
@click.option(
|
50
|
+
'--rad-params-locked', show_default=True, help='Protected Radiance parameters. '
|
51
|
+
'These values will overwrite user input rad parameters.'
|
52
|
+
)
|
53
|
+
@click.option(
|
54
|
+
'--folder', default='.', help='Output folder into which the modifier and '
|
55
|
+
'octree files will be written.'
|
56
|
+
)
|
57
|
+
@click.option(
|
58
|
+
'--name', default='view_factor', help='File name of the view factor file.'
|
59
|
+
)
|
60
|
+
def rcontrib_command_with_view_postprocess(
|
61
|
+
octree, sensor_grid, modifiers, ray_count, rad_params, rad_params_locked,
|
62
|
+
folder, name
|
63
|
+
):
|
64
|
+
"""Run rcontrib to get spherical view factors from a sensor grid.
|
65
|
+
|
66
|
+
This command is similar to the one in honeybee-radiance, but the
|
67
|
+
post-processing is using NumPy.
|
68
|
+
|
69
|
+
\b
|
70
|
+
Args:
|
71
|
+
octree: Path to octree file.
|
72
|
+
sensor-grid: Path to sensor grid file.
|
73
|
+
modifiers: Path to modifiers file.
|
74
|
+
"""
|
75
|
+
try:
|
76
|
+
# create the directory if it's not there
|
77
|
+
if not os.path.isdir(folder):
|
78
|
+
preparedir(folder)
|
79
|
+
|
80
|
+
# generate the ray vectors to be used in the view factor calculation
|
81
|
+
if ray_count == 6:
|
82
|
+
rays = ((1, 0, 0), (0, 1, 0), (0, 0, 1), (-1, 0, 0), (0, -1, 0), (0, 0, -1))
|
83
|
+
else:
|
84
|
+
rays = _fibonacci_spiral(ray_count)
|
85
|
+
ray_str = [' {} {} {}\n'.format(*ray) for ray in rays]
|
86
|
+
|
87
|
+
# create a new .pts file with the view vectors
|
88
|
+
ray_file = os.path.abspath(os.path.join(folder, '{}.pts'.format(name)))
|
89
|
+
total_rays = 0
|
90
|
+
with open(sensor_grid) as sg_file:
|
91
|
+
with open(ray_file, 'w') as r_file:
|
92
|
+
for line in sg_file:
|
93
|
+
for ray in ray_str:
|
94
|
+
try:
|
95
|
+
r_file.write(' '.join(line.split()[:3]) + ray)
|
96
|
+
total_rays += 1
|
97
|
+
except Exception:
|
98
|
+
pass # we are at the end of the file
|
99
|
+
|
100
|
+
# set up the Rcontrib options
|
101
|
+
options = RcontribOptions()
|
102
|
+
if rad_params: # parse input radiance parameters
|
103
|
+
options.update_from_string(rad_params.strip())
|
104
|
+
if rad_params_locked: # overwrite input values with protected ones
|
105
|
+
options.update_from_string(rad_params_locked.strip())
|
106
|
+
# overwrite specific options that would otherwise break the command
|
107
|
+
options.M = modifiers
|
108
|
+
options.update_from_string('-I -V- -y {}'.format(total_rays))
|
109
|
+
|
110
|
+
# create the rcontrib command and run it
|
111
|
+
mtx_file = os.path.abspath(os.path.join(folder, '{}.mtx'.format(name)))
|
112
|
+
rcontrib = Rcontrib(options=options, octree=octree, sensors=ray_file)
|
113
|
+
cmd = rcontrib.to_radiance().replace('\\', '/')
|
114
|
+
cmd = '{} | rmtxop -ff - -c .333 .333 .334 > "{}"'.format(cmd, mtx_file.replace('\\', '/'))
|
115
|
+
run_command(cmd, env=folders.env)
|
116
|
+
|
117
|
+
# load the resulting matrix and process the results into view factors
|
118
|
+
array = binary_to_array(mtx_file)
|
119
|
+
view_fac_mtx = []
|
120
|
+
for i in range(0, len(array), ray_count):
|
121
|
+
sens_chunk = array[i:i+ray_count]
|
122
|
+
s_facs = np.sum(sens_chunk, axis=0) / (math.pi * ray_count)
|
123
|
+
view_fac_mtx.append(s_facs)
|
124
|
+
|
125
|
+
view_fac_mtx = np.stack(view_fac_mtx)
|
126
|
+
|
127
|
+
np.save(os.path.join(folder, '{}'.format(name)), view_fac_mtx)
|
128
|
+
|
129
|
+
except Exception:
|
130
|
+
_logger.exception('Failed to compute view factor contributions.')
|
131
|
+
sys.exit(1)
|
132
|
+
else:
|
133
|
+
sys.exit(0)
|
134
|
+
|
135
|
+
|
136
|
+
def _fibonacci_spiral(point_count=24):
|
137
|
+
"""Get points distributed uniformly across a unit spherical surface.
|
138
|
+
|
139
|
+
Args:
|
140
|
+
point_count: Integer for the number of points to be distributed.
|
141
|
+
|
142
|
+
Returns:
|
143
|
+
List of tuple, each with 3 values representing the XYZ coordinates of
|
144
|
+
the points that were generated.
|
145
|
+
"""
|
146
|
+
points = []
|
147
|
+
phi = math.pi * (3. - math.sqrt(5.))
|
148
|
+
|
149
|
+
for i in range(point_count):
|
150
|
+
y = 1 - (i / float(point_count - 1)) * 2
|
151
|
+
radius = math.sqrt(1 - y * y)
|
152
|
+
theta = phi * i
|
153
|
+
x = math.cos(theta) * radius
|
154
|
+
z = math.sin(theta) * radius
|
155
|
+
points.append((x, y, z))
|
156
|
+
|
157
|
+
return points
|
@@ -0,0 +1,110 @@
|
|
1
|
+
"""honeybee-radiance-postprocess WELL commands."""
|
2
|
+
import sys
|
3
|
+
import logging
|
4
|
+
import json
|
5
|
+
import os
|
6
|
+
import click
|
7
|
+
|
8
|
+
from ladybug.color import Color
|
9
|
+
from ladybug.datatype.generic import GenericType
|
10
|
+
from ladybug.legend import LegendParameters
|
11
|
+
|
12
|
+
from ..well.well import well_annual_daylight
|
13
|
+
|
14
|
+
_logger = logging.getLogger(__name__)
|
15
|
+
|
16
|
+
|
17
|
+
@click.group(help='Commands for WELL post-processing of Radiance results.')
|
18
|
+
def well():
|
19
|
+
pass
|
20
|
+
|
21
|
+
|
22
|
+
@well.command('well-annual-daylight')
|
23
|
+
@click.argument(
|
24
|
+
'folder',
|
25
|
+
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True)
|
26
|
+
)
|
27
|
+
@click.argument(
|
28
|
+
'daylight-hours',
|
29
|
+
type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True)
|
30
|
+
)
|
31
|
+
@click.option(
|
32
|
+
'--grids-filter', '-gf', help='A pattern to filter the grids.', default='*',
|
33
|
+
show_default=True
|
34
|
+
)
|
35
|
+
@click.option(
|
36
|
+
'--sub-folder', '-sf', help='Relative path for subfolder to write output '
|
37
|
+
'files.', default='well_summary', show_default=True
|
38
|
+
)
|
39
|
+
def well_daylight(
|
40
|
+
folder, daylight_hours, grids_filter, sub_folder
|
41
|
+
):
|
42
|
+
"""Calculate credits for WELL L06.
|
43
|
+
|
44
|
+
Use the shade-transmittance option to set a shade transmittance values for
|
45
|
+
aperture groups. The shade-transmittance-file option takes precedence over
|
46
|
+
the shade-transmittance, however, if any aperture groups are missing in the
|
47
|
+
JSON file given to the shade-transmittance-file option, the value from
|
48
|
+
shade-transmittance will be used for those aperture groups.
|
49
|
+
|
50
|
+
\b
|
51
|
+
Args:
|
52
|
+
folder: Results folder. This folder is an output folder of annual daylight
|
53
|
+
recipe. The daylight simulation must include aperture groups.
|
54
|
+
daylight_hours: Daylight hours schedule for EN 17037.
|
55
|
+
"""
|
56
|
+
with open(daylight_hours) as hourly_schedule:
|
57
|
+
daylight_hours = [int(float(v)) for v in hourly_schedule]
|
58
|
+
|
59
|
+
try:
|
60
|
+
well_annual_daylight(
|
61
|
+
folder, daylight_hours, grids_filter=grids_filter, sub_folder=sub_folder
|
62
|
+
)
|
63
|
+
except Exception:
|
64
|
+
_logger.exception('Failed to generate WELL summary.')
|
65
|
+
sys.exit(1)
|
66
|
+
else:
|
67
|
+
sys.exit(0)
|
68
|
+
|
69
|
+
|
70
|
+
@well.command('well-daylight-vis-metadata')
|
71
|
+
@click.option(
|
72
|
+
'--output-folder', '-o', help='Output folder for vis metadata files.',
|
73
|
+
type=click.Path(exists=False, file_okay=False, dir_okay=True, resolve_path=True),
|
74
|
+
default='visualization', show_default=True
|
75
|
+
)
|
76
|
+
def well_daylight_vis(output_folder):
|
77
|
+
"""Write visualization metadata files for WELL Daylight."""
|
78
|
+
colors = [Color(220, 0, 0), Color(0, 220, 0)]
|
79
|
+
pass_fail_lpar = \
|
80
|
+
LegendParameters(min=0, max=1, colors=colors, segment_count=2, title='Pass/Fail')
|
81
|
+
pass_fail_lpar.ordinal_dictionary = {0: "Fail", 1: "Pass"}
|
82
|
+
|
83
|
+
metric_info_dict = {
|
84
|
+
'L01': {
|
85
|
+
'type': 'VisualizationMetaData',
|
86
|
+
'data_type': GenericType('sDA200,40%', '').to_dict(),
|
87
|
+
'unit': '',
|
88
|
+
'legend_parameters': pass_fail_lpar.to_dict()
|
89
|
+
},
|
90
|
+
'L06': {
|
91
|
+
'type': 'VisualizationMetaData',
|
92
|
+
'data_type': GenericType('sDA300,50%', '').to_dict(),
|
93
|
+
'unit': '',
|
94
|
+
'legend_parameters': pass_fail_lpar.to_dict()
|
95
|
+
}
|
96
|
+
}
|
97
|
+
try:
|
98
|
+
if not os.path.exists(output_folder):
|
99
|
+
os.mkdir(output_folder)
|
100
|
+
for metric, data in metric_info_dict.items():
|
101
|
+
if not os.path.exists(os.path.join(output_folder, metric)):
|
102
|
+
os.mkdir(os.path.join(output_folder, metric))
|
103
|
+
file_path = os.path.join(output_folder, metric, 'vis_metadata.json')
|
104
|
+
with open(file_path, 'w') as fp:
|
105
|
+
json.dump(data, fp, indent=4)
|
106
|
+
except Exception:
|
107
|
+
_logger.exception('Failed to write the visualization metadata files.')
|
108
|
+
sys.exit(1)
|
109
|
+
else:
|
110
|
+
sys.exit(0)
|
@@ -0,0 +1,102 @@
|
|
1
|
+
"""Functions for NumPy data type (dtype)."""
|
2
|
+
from typing import Tuple
|
3
|
+
try:
|
4
|
+
import cupy as np
|
5
|
+
is_gpu = True
|
6
|
+
except ImportError:
|
7
|
+
is_gpu = False
|
8
|
+
import numpy as np
|
9
|
+
|
10
|
+
|
11
|
+
def smallest_integer_dtype(array: np.ndarray) -> np.signedinteger:
|
12
|
+
"""Return the smallest possible integer dtype.
|
13
|
+
|
14
|
+
Args:
|
15
|
+
array: NumPy array.
|
16
|
+
|
17
|
+
Returns:
|
18
|
+
A NumPy integer dtype.
|
19
|
+
"""
|
20
|
+
if np.all(array >= np.iinfo(np.int8).min) and \
|
21
|
+
np.all(array <= np.iinfo(np.int8).max):
|
22
|
+
return np.int8
|
23
|
+
elif np.all(array >= np.iinfo(np.int16).min) and \
|
24
|
+
np.all(array <= np.iinfo(np.int16).max):
|
25
|
+
return np.int16
|
26
|
+
elif np.all(array >= np.iinfo(np.int32).min) and \
|
27
|
+
np.all(array <= np.iinfo(np.int32).max):
|
28
|
+
return np.int32
|
29
|
+
elif np.all(array >= np.iinfo(np.int64).min) and \
|
30
|
+
np.all(array <= np.iinfo(np.int64).max):
|
31
|
+
return np.int64
|
32
|
+
|
33
|
+
|
34
|
+
def smallest_float_dtype(array: np.ndarray, rtol: float = 1e-5,
|
35
|
+
atol: float = 1e-5) -> np.floating:
|
36
|
+
"""Return the smallest possible float dtype.
|
37
|
+
|
38
|
+
The allclose function is used to check if a certain floating-point precision
|
39
|
+
can be used without losing accuracy.
|
40
|
+
|
41
|
+
Args:
|
42
|
+
array: NumPy array.
|
43
|
+
rtol: The relative tolerance parameter for `np.allclose`. The default
|
44
|
+
is 1e-5.
|
45
|
+
atol: The absolute tolerance parameter for `np.allclose`. The default
|
46
|
+
is 1e-5.
|
47
|
+
|
48
|
+
Returns:
|
49
|
+
A NumPy floating dtype.
|
50
|
+
"""
|
51
|
+
if np.all((array >= np.finfo(np.float16).min) & \
|
52
|
+
(array <= np.finfo(np.float16).max)):
|
53
|
+
if np.allclose(array, array.astype(np.float16), rtol=rtol, atol=atol):
|
54
|
+
return np.float16
|
55
|
+
if np.all((array >= np.finfo(np.float32).min) & \
|
56
|
+
(array <= np.finfo(np.float32).max)):
|
57
|
+
if np.allclose(array, array.astype(np.float32), rtol=rtol, atol=atol):
|
58
|
+
return np.float32
|
59
|
+
if np.all((array >= np.finfo(np.float64).min) & \
|
60
|
+
(array <= np.finfo(np.float64).max)):
|
61
|
+
if np.allclose(array, array.astype(np.float64), rtol=rtol, atol=atol):
|
62
|
+
return np.float64
|
63
|
+
|
64
|
+
|
65
|
+
def smallest_dtype(array: np.ndarray, rtol: float = 1e-5, atol: float = 1e-5
|
66
|
+
) -> Tuple[np.signedinteger, np.floating]:
|
67
|
+
"""Return the smallest possible dtype.
|
68
|
+
|
69
|
+
Args:
|
70
|
+
array: NumPy array.
|
71
|
+
rtol: The relative tolerance parameter for `np.allclose`. The default
|
72
|
+
is 1e-5. This is also used if the dtype of the array is np.floating.
|
73
|
+
atol: The absolute tolerance parameter for `np.allclose`. The default
|
74
|
+
is 1e-5. This is also used if the dtype of the array is np.floating.
|
75
|
+
|
76
|
+
Returns:
|
77
|
+
A NumPy dtype.
|
78
|
+
"""
|
79
|
+
if np.issubdtype(array.dtype, np.integer):
|
80
|
+
return smallest_integer_dtype(array)
|
81
|
+
elif np.issubdtype(array.dtype, np.floating):
|
82
|
+
return smallest_float_dtype(array, rtol=rtol, atol=atol)
|
83
|
+
else:
|
84
|
+
raise TypeError(f'Expected integer or floating dtype. Got {array.dtype}')
|
85
|
+
|
86
|
+
|
87
|
+
def set_smallest_dtype(array: np.ndarray, rtol: float = 1e-5,
|
88
|
+
atol: float = 1e-5) -> np.ndarray:
|
89
|
+
"""Return a NumPy array with the smallest possible dtype.
|
90
|
+
|
91
|
+
Args:
|
92
|
+
array: NumPy array.
|
93
|
+
rtol: The relative tolerance parameter for `np.allclose`. The default
|
94
|
+
is 1e-5. This is also used if the dtype of the array is np.floating.
|
95
|
+
atol: The absolute tolerance parameter for `np.allclose`. The default
|
96
|
+
is 1e-5. This is also used if the dtype of the array is np.floating.
|
97
|
+
|
98
|
+
Returns:
|
99
|
+
A new NumPy array with a smaller dtype.
|
100
|
+
"""
|
101
|
+
dtype = smallest_dtype(array, rtol=rtol, atol=atol)
|
102
|
+
return array.astype(dtype)
|
@@ -0,0 +1,273 @@
|
|
1
|
+
"""Post-processing classes for dynamic schedules."""
|
2
|
+
import json
|
3
|
+
import os
|
4
|
+
import sys
|
5
|
+
from itertools import islice, cycle
|
6
|
+
|
7
|
+
from honeybee.config import folders
|
8
|
+
|
9
|
+
|
10
|
+
class ApertureGroupSchedule(object):
|
11
|
+
"""ApertureGroupSchedule.
|
12
|
+
|
13
|
+
This class contains the dynamic states schedule of a single ApertureGroup.
|
14
|
+
|
15
|
+
Args:
|
16
|
+
identifier: Identifier of the ApertureGroup.
|
17
|
+
schedule: A list or tuple of integer values.
|
18
|
+
|
19
|
+
Properties:
|
20
|
+
* identifier
|
21
|
+
* schedule
|
22
|
+
* is_static
|
23
|
+
"""
|
24
|
+
__slots__ = ('_identifier', '_schedule', '_is_static')
|
25
|
+
|
26
|
+
def __init__(self, identifier, schedule, is_static=None):
|
27
|
+
"""Initialize ApertureGroupSchedule."""
|
28
|
+
self._identifier = identifier
|
29
|
+
self.schedule = schedule
|
30
|
+
self.is_static = is_static
|
31
|
+
|
32
|
+
@classmethod
|
33
|
+
def from_dict(cls, data):
|
34
|
+
"""Initialize a ApertureGroupSchedule from a dictionary.
|
35
|
+
|
36
|
+
Args:
|
37
|
+
data: A dictionary representation of an ApertureGroupSchedule
|
38
|
+
object.
|
39
|
+
"""
|
40
|
+
identifier = data['identifier']
|
41
|
+
schedule = data['schedule']
|
42
|
+
is_static = data['is_static'] if 'is_static' in data else None
|
43
|
+
return cls(identifier, schedule, is_static)
|
44
|
+
|
45
|
+
@property
|
46
|
+
def identifier(self):
|
47
|
+
"""Return identifier."""
|
48
|
+
return self._identifier
|
49
|
+
|
50
|
+
@property
|
51
|
+
def schedule(self):
|
52
|
+
"""Return ApertureGroup schedule."""
|
53
|
+
return self._schedule
|
54
|
+
|
55
|
+
@schedule.setter
|
56
|
+
def schedule(self, schedule):
|
57
|
+
assert isinstance(schedule, (list, tuple)), \
|
58
|
+
('Failed to add schedule. Received input of type: '
|
59
|
+
'%s. Expected input of type: list or tuple.' % type(schedule))
|
60
|
+
if isinstance(schedule, tuple):
|
61
|
+
schedule = list(schedule)
|
62
|
+
if len(schedule) < 8760:
|
63
|
+
schedule = list(islice(cycle(schedule), 8760))
|
64
|
+
elif len(schedule) > 8760:
|
65
|
+
error_message = (
|
66
|
+
'The light path %s has %s values in '
|
67
|
+
'its states schedule. Maximum allowed number of values '
|
68
|
+
'is 8760.' % (self.identifier, len(schedule))
|
69
|
+
)
|
70
|
+
raise ValueError(error_message)
|
71
|
+
self._schedule = schedule
|
72
|
+
|
73
|
+
@property
|
74
|
+
def is_static(self):
|
75
|
+
"""Return True if schedule is static."""
|
76
|
+
return self._is_static
|
77
|
+
|
78
|
+
@is_static.setter
|
79
|
+
def is_static(self, value):
|
80
|
+
if value is not None:
|
81
|
+
assert isinstance(value, bool)
|
82
|
+
self._is_static = value
|
83
|
+
else:
|
84
|
+
if len(set(self.schedule)) == 1:
|
85
|
+
self._is_static = True
|
86
|
+
else:
|
87
|
+
self._is_static = False
|
88
|
+
|
89
|
+
def to_dict(self):
|
90
|
+
"""Return ApertureGroupSchedule as a dictionary."""
|
91
|
+
base = {}
|
92
|
+
base['identifier'] = self.identifier
|
93
|
+
base['schedule'] = self.schedule
|
94
|
+
base['is_static'] = self.is_static
|
95
|
+
return base
|
96
|
+
|
97
|
+
def ToString(self):
|
98
|
+
"""Overwrite .NET ToString."""
|
99
|
+
return self.__repr__()
|
100
|
+
|
101
|
+
def __repr__(self):
|
102
|
+
return '{}: {}'.format(self.__class__.__name__, self.identifier)
|
103
|
+
|
104
|
+
|
105
|
+
class DynamicSchedule(object):
|
106
|
+
"""DynamicSchedule.
|
107
|
+
|
108
|
+
This class contains a single property (dynamic_schedule). This property is
|
109
|
+
a dictionary where keys are ApertureGroup identifiers and the value for
|
110
|
+
each key is the dynamic schedule for the ApertureGroup.
|
111
|
+
|
112
|
+
Args:
|
113
|
+
dynamic_schedule: A dictionary of ApertureGroup identifier and
|
114
|
+
schedules.
|
115
|
+
|
116
|
+
Properties:
|
117
|
+
* dynamic_schedule
|
118
|
+
"""
|
119
|
+
__slots__ = ('_dynamic_schedule',)
|
120
|
+
|
121
|
+
def __init__(self, dynamic_schedule = None):
|
122
|
+
self.dynamic_schedule = dynamic_schedule
|
123
|
+
|
124
|
+
@classmethod
|
125
|
+
def from_group_schedules(cls, group_schedules):
|
126
|
+
"""Initialize a DynamicSchedule from a list of ApertureGroupSchedules.
|
127
|
+
|
128
|
+
The method will automatically sense if there are duplicated groups in
|
129
|
+
the list and ensure the group schedule only appears once.
|
130
|
+
|
131
|
+
Args:
|
132
|
+
data: A dictionary representation of a DynamicSchedule objects.
|
133
|
+
"""
|
134
|
+
dyn_sch = cls()
|
135
|
+
dyn_sch_ids = set()
|
136
|
+
for _ap_group in group_schedules:
|
137
|
+
assert isinstance(_ap_group, ApertureGroupSchedule), \
|
138
|
+
'Expected Aperture Group Schedule. Got {}'.format(type(_ap_group))
|
139
|
+
if _ap_group.identifier not in dyn_sch_ids:
|
140
|
+
dyn_sch_ids.add(_ap_group.identifier)
|
141
|
+
dyn_sch.add_aperture_group_schedule(_ap_group)
|
142
|
+
return dyn_sch
|
143
|
+
|
144
|
+
@classmethod
|
145
|
+
def from_dict(cls, data):
|
146
|
+
"""Initialize a DynamicSchedule from a dictionary.
|
147
|
+
|
148
|
+
Args:
|
149
|
+
data: A dictionary representation of a DynamicSchedule objects.
|
150
|
+
"""
|
151
|
+
dynamic_schedule = {}
|
152
|
+
for identifier, group in data.items():
|
153
|
+
dynamic_schedule[identifier] = ApertureGroupSchedule.from_dict(group)
|
154
|
+
return cls(dynamic_schedule)
|
155
|
+
|
156
|
+
@classmethod
|
157
|
+
def from_json(cls, json_file):
|
158
|
+
"""Initialize a DynamicSchedule from a JSON file.
|
159
|
+
|
160
|
+
Args:
|
161
|
+
json_file: Path to JSON file.
|
162
|
+
"""
|
163
|
+
assert os.path.isfile(json_file), 'Failed to find %s' % json_file
|
164
|
+
if sys.version_info < (3, 0):
|
165
|
+
with open(json_file) as inf:
|
166
|
+
data = json.load(inf)
|
167
|
+
else:
|
168
|
+
with open(json_file, encoding='utf-8') as inf:
|
169
|
+
data = json.load(inf)
|
170
|
+
return cls.from_dict(data)
|
171
|
+
|
172
|
+
@property
|
173
|
+
def dynamic_schedule(self):
|
174
|
+
"""Return dynamic schedule as a dictionary."""
|
175
|
+
return self._dynamic_schedule
|
176
|
+
|
177
|
+
@dynamic_schedule.setter
|
178
|
+
def dynamic_schedule(self, dynamic_schedule):
|
179
|
+
if not dynamic_schedule:
|
180
|
+
dynamic_schedule = {}
|
181
|
+
assert isinstance(dynamic_schedule, dict), \
|
182
|
+
'Expected dictionary. Got %s.' % type(dynamic_schedule)
|
183
|
+
self._dynamic_schedule = dynamic_schedule
|
184
|
+
|
185
|
+
def add_aperture_group_schedule(self, aperture_group_schedule):
|
186
|
+
"""Add an ApertureGroupSchedule to the DynamicSchedule instance.
|
187
|
+
|
188
|
+
Args:
|
189
|
+
aperture_group_schedule: An ApertureGroupSchedule object.
|
190
|
+
"""
|
191
|
+
assert isinstance(aperture_group_schedule, ApertureGroupSchedule), \
|
192
|
+
('Failed to add ApertureGroupSchedule. Received input of type: '
|
193
|
+
'%s. Expected input of type: ApertureGroupSchedule.' \
|
194
|
+
% type(aperture_group_schedule))
|
195
|
+
identifier = aperture_group_schedule.identifier
|
196
|
+
self.dynamic_schedule[identifier] = aperture_group_schedule
|
197
|
+
|
198
|
+
def filter_by_identifiers(self, identifiers):
|
199
|
+
"""Filter the DynamicSchedule by identifiers.
|
200
|
+
|
201
|
+
This method returns a filtered DynamicSchedule object.
|
202
|
+
|
203
|
+
Args:
|
204
|
+
identifiers: A list of identifiers.
|
205
|
+
|
206
|
+
Returns:
|
207
|
+
A filtered DynamicSchedule object.
|
208
|
+
"""
|
209
|
+
filter_dyn_sch = DynamicSchedule()
|
210
|
+
for identifier in identifiers:
|
211
|
+
if identifier in self.dynamic_schedule:
|
212
|
+
filter_dyn_sch.add_aperture_group_schedule(
|
213
|
+
self.dynamic_schedule[identifier]
|
214
|
+
)
|
215
|
+
else:
|
216
|
+
filter_dyn_sch.add_aperture_group_schedule(
|
217
|
+
ApertureGroupSchedule(identifier, [0])
|
218
|
+
)
|
219
|
+
return filter_dyn_sch
|
220
|
+
|
221
|
+
def to_dict(self, simplified=False):
|
222
|
+
"""Return DynamicSchedule as a dictionary."""
|
223
|
+
base = {}
|
224
|
+
for identifier, group in self.dynamic_schedule.items():
|
225
|
+
if not simplified:
|
226
|
+
base[identifier] = group.to_dict()
|
227
|
+
else:
|
228
|
+
base[identifier] = group.to_dict()['schedule']
|
229
|
+
return base
|
230
|
+
|
231
|
+
def to_json(self, folder=None, file_name=None, indent=None, simplified=False):
|
232
|
+
"""Write a DynamicSchedule to JSON.
|
233
|
+
|
234
|
+
Args:
|
235
|
+
folder: A text string for the directory where the JSON file will be
|
236
|
+
written. If unspecified, the default simulation folder will be
|
237
|
+
used. This is usually at "C:\\Users\\USERNAME\\simulation."
|
238
|
+
file_name: _description_. Defaults to None.
|
239
|
+
indent: A positive integer to set the indentation used in the
|
240
|
+
resulting JSON file. (Default: None).
|
241
|
+
|
242
|
+
Returns:
|
243
|
+
json_file: Path to JSON file.
|
244
|
+
"""
|
245
|
+
# create dictionary of the DynamicSchedule
|
246
|
+
dyn_sch_dict = self.to_dict(simplified=simplified)
|
247
|
+
|
248
|
+
# set up name and folder for the JSON
|
249
|
+
file_name = file_name if file_name else 'dynamic_schedule'
|
250
|
+
if not file_name.endswith('.json'):
|
251
|
+
file_name += '.json'
|
252
|
+
folder = folder if folder is not None else folders.default_simulation_folder
|
253
|
+
json_file = os.path.join(folder, file_name)
|
254
|
+
|
255
|
+
# write JSON
|
256
|
+
with open(json_file, 'w') as fp:
|
257
|
+
json.dump(dyn_sch_dict, fp, indent=indent)
|
258
|
+
return json_file
|
259
|
+
|
260
|
+
def duplicate(self):
|
261
|
+
"""Get a copy of this object."""
|
262
|
+
return self.__copy__()
|
263
|
+
|
264
|
+
def ToString(self):
|
265
|
+
"""Overwrite .NET ToString."""
|
266
|
+
return self.__repr__()
|
267
|
+
|
268
|
+
def __copy__(self):
|
269
|
+
new_dyn_sch = DynamicSchedule(self.dynamic_schedule)
|
270
|
+
return new_dyn_sch
|
271
|
+
|
272
|
+
def __repr__(self):
|
273
|
+
return '{}'.format(self.__class__.__name__)
|
@@ -0,0 +1,24 @@
|
|
1
|
+
"""Functions for post-processing daylight outputs into electric lighting schedules."""
|
2
|
+
from typing import List
|
3
|
+
try:
|
4
|
+
import cupy as np
|
5
|
+
is_gpu = True
|
6
|
+
except ImportError:
|
7
|
+
is_gpu = False
|
8
|
+
import numpy as np
|
9
|
+
|
10
|
+
|
11
|
+
def array_to_dimming_fraction(
|
12
|
+
array: np.ndarray, su_pattern: List[int], setpt: float, m_pow: float,
|
13
|
+
m_lgt: float, off_m: float) -> list:
|
14
|
+
"""Compute hourly dimming fractions for a given result file."""
|
15
|
+
fract_dim = (setpt - array) / (setpt - m_lgt)
|
16
|
+
par_dim = fract_dim + ((1 - fract_dim) * m_pow)
|
17
|
+
su_values = np.where(array > setpt, 0 if off_m else m_pow,
|
18
|
+
(np.where(array <= m_lgt, 1, par_dim))).sum(axis=0)
|
19
|
+
su_values = su_values / array.shape[0]
|
20
|
+
|
21
|
+
dim_fract = np.ones(8760)
|
22
|
+
dim_fract[su_pattern] = su_values
|
23
|
+
|
24
|
+
return dim_fract
|