honeybee-radiance-postprocess 0.4.555__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. honeybee_radiance_postprocess/__init__.py +1 -0
  2. honeybee_radiance_postprocess/__main__.py +4 -0
  3. honeybee_radiance_postprocess/annual.py +73 -0
  4. honeybee_radiance_postprocess/annualdaylight.py +289 -0
  5. honeybee_radiance_postprocess/annualirradiance.py +35 -0
  6. honeybee_radiance_postprocess/breeam/__init__.py +1 -0
  7. honeybee_radiance_postprocess/breeam/breeam.py +552 -0
  8. honeybee_radiance_postprocess/cli/__init__.py +33 -0
  9. honeybee_radiance_postprocess/cli/abnt.py +392 -0
  10. honeybee_radiance_postprocess/cli/breeam.py +96 -0
  11. honeybee_radiance_postprocess/cli/datacollection.py +133 -0
  12. honeybee_radiance_postprocess/cli/grid.py +295 -0
  13. honeybee_radiance_postprocess/cli/leed.py +143 -0
  14. honeybee_radiance_postprocess/cli/merge.py +161 -0
  15. honeybee_radiance_postprocess/cli/mtxop.py +161 -0
  16. honeybee_radiance_postprocess/cli/postprocess.py +1092 -0
  17. honeybee_radiance_postprocess/cli/schedule.py +103 -0
  18. honeybee_radiance_postprocess/cli/translate.py +216 -0
  19. honeybee_radiance_postprocess/cli/two_phase.py +252 -0
  20. honeybee_radiance_postprocess/cli/util.py +121 -0
  21. honeybee_radiance_postprocess/cli/viewfactor.py +157 -0
  22. honeybee_radiance_postprocess/cli/well.py +110 -0
  23. honeybee_radiance_postprocess/data_type.py +102 -0
  24. honeybee_radiance_postprocess/dynamic.py +273 -0
  25. honeybee_radiance_postprocess/electriclight.py +24 -0
  26. honeybee_radiance_postprocess/en17037.py +304 -0
  27. honeybee_radiance_postprocess/helper.py +266 -0
  28. honeybee_radiance_postprocess/ies/__init__.py +1 -0
  29. honeybee_radiance_postprocess/ies/lm.py +224 -0
  30. honeybee_radiance_postprocess/ies/lm_schedule.py +248 -0
  31. honeybee_radiance_postprocess/leed/__init__.py +1 -0
  32. honeybee_radiance_postprocess/leed/leed.py +801 -0
  33. honeybee_radiance_postprocess/leed/leed_schedule.py +256 -0
  34. honeybee_radiance_postprocess/metrics.py +439 -0
  35. honeybee_radiance_postprocess/reader.py +80 -0
  36. honeybee_radiance_postprocess/results/__init__.py +4 -0
  37. honeybee_radiance_postprocess/results/annual_daylight.py +752 -0
  38. honeybee_radiance_postprocess/results/annual_irradiance.py +196 -0
  39. honeybee_radiance_postprocess/results/results.py +1416 -0
  40. honeybee_radiance_postprocess/type_hints.py +38 -0
  41. honeybee_radiance_postprocess/util.py +211 -0
  42. honeybee_radiance_postprocess/vis_metadata.py +49 -0
  43. honeybee_radiance_postprocess/well/__init__.py +1 -0
  44. honeybee_radiance_postprocess/well/well.py +509 -0
  45. honeybee_radiance_postprocess-0.4.555.dist-info/METADATA +79 -0
  46. honeybee_radiance_postprocess-0.4.555.dist-info/RECORD +50 -0
  47. honeybee_radiance_postprocess-0.4.555.dist-info/WHEEL +5 -0
  48. honeybee_radiance_postprocess-0.4.555.dist-info/entry_points.txt +2 -0
  49. honeybee_radiance_postprocess-0.4.555.dist-info/licenses/LICENSE +661 -0
  50. honeybee_radiance_postprocess-0.4.555.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1416 @@
1
+ """Post-processing Results class."""
2
+ import json
3
+ from pathlib import Path
4
+ from itertools import islice, cycle
5
+ from typing import Tuple, Union, List
6
+ import itertools
7
+ try:
8
+ import cupy as np
9
+ is_gpu = True
10
+ except ImportError:
11
+ is_gpu = False
12
+ import numpy as np
13
+
14
+ from ladybug.analysisperiod import AnalysisPeriod
15
+ from ladybug.datacollection import HourlyContinuousCollection
16
+ from ladybug.datatype.generic import GenericType
17
+ from ladybug.datatype.base import DataTypeBase
18
+ from ladybug.dt import DateTime
19
+ from ladybug.header import Header
20
+
21
+ from ..annual import occupancy_schedule_8_to_6
22
+ from ..metrics import (average_values_array2d, cumulative_values_array2d,
23
+ peak_values_array2d)
24
+ from ..util import filter_array2d, hoys_mask, check_array_dim, \
25
+ _filter_grids_by_pattern
26
+ from .. import type_hints
27
+ from ..dynamic import DynamicSchedule, ApertureGroupSchedule
28
+
29
+ is_cpu = not is_gpu
30
+
31
+
32
+ class _ResultsFolder(object):
33
+ """Base class for ResultsFolder.
34
+
35
+ This class includes properties that are independent of the results.
36
+
37
+ Args:
38
+ folder: Path to results folder.
39
+
40
+ Properties:
41
+ * folder
42
+ * grids_info
43
+ * sun_up_hours
44
+ * sun_down_hours
45
+ * light_paths
46
+ * default_states
47
+ * grid_states
48
+ * timestep
49
+ * study_hours
50
+
51
+ """
52
+ __slots__ = ('_folder', '_grids_info', '_sun_up_hours', '_sun_down_hours',
53
+ '_sun_up_hours_mask', '_sun_down_hours_mask', '_datetimes',
54
+ '_light_paths', '_default_states', '_grid_states', '_timestep',
55
+ '_study_hours')
56
+
57
+ def __init__(self, folder: Union[str, Path]):
58
+ """Initialize ResultsFolder."""
59
+ self._folder = Path(folder).absolute().as_posix()
60
+ self._timestep, self._study_hours = self._get_study_info()
61
+ self.grids_info = self._get_grids_info()
62
+ self.sun_up_hours = self._get_sun_up_hours()
63
+ self._sun_up_hours_mask = self._get_sun_up_hours_mask()
64
+ self._sun_down_hours_mask = self._get_sun_down_hours_mask()
65
+ self._datetimes = self._get_datetimes()
66
+ self._light_paths = self._get_light_paths()
67
+ self._default_states = self._get_default_states()
68
+ self._grid_states = self._get_grid_states()
69
+
70
+ @property
71
+ def folder(self):
72
+ """Return full path to results folder as a string."""
73
+ return self._folder
74
+
75
+ @property
76
+ def grids_info(self):
77
+ """Return grids information as list of dictionaries for each grid."""
78
+ return self._grids_info
79
+
80
+ @grids_info.setter
81
+ def grids_info(self, grids_info):
82
+ assert isinstance(grids_info, list), \
83
+ f'Grids information must be a list. Got object of type: {type(grids_info)}.'
84
+ for grid_info in grids_info:
85
+ assert isinstance(grid_info, dict), \
86
+ 'Object in grids information must be a dictionary. ' \
87
+ f'Got object of type {type(grid_info)}.'
88
+ if not grid_info.get('light_path', None):
89
+ # if light path key is nonexistent
90
+ grid_info['light_path'] = [['__static_apertures__']]
91
+ self._grids_info = grids_info
92
+
93
+ @property
94
+ def sun_up_hours(self):
95
+ """Return sun up hours."""
96
+ return self._sun_up_hours
97
+
98
+ @sun_up_hours.setter
99
+ def sun_up_hours(self, sun_up_hours):
100
+ assert isinstance(sun_up_hours, list), \
101
+ f'Sun up hours must be a list. Got object of type: {type(sun_up_hours)}'
102
+ self._sun_up_hours = sun_up_hours
103
+ self.sun_down_hours = np.setdiff1d(self.study_hours, sun_up_hours).tolist()
104
+
105
+ @property
106
+ def sun_up_hours_mask(self):
107
+ """Return sun up hours masking array."""
108
+ return self._sun_up_hours_mask
109
+
110
+ @property
111
+ def sun_down_hours(self):
112
+ """Return sun down hours."""
113
+ return self._sun_down_hours
114
+
115
+ @sun_down_hours.setter
116
+ def sun_down_hours(self, sun_down_hours):
117
+ assert isinstance(sun_down_hours, list), \
118
+ f'Sun down hours must be a list. Got object of type: {type(sun_down_hours)}'
119
+ self._sun_down_hours = sun_down_hours
120
+
121
+ @property
122
+ def sun_down_hours_mask(self):
123
+ """Return sun down hours masking array."""
124
+ return self._sun_down_hours_mask
125
+
126
+ @property
127
+ def datetimes(self):
128
+ """Return DateTimes for sun up hours."""
129
+ return self._datetimes
130
+
131
+ @property
132
+ def light_paths(self):
133
+ """Return the identifiers of the light paths."""
134
+ return self._light_paths
135
+
136
+ @property
137
+ def default_states(self):
138
+ """Return default states as a dictionary."""
139
+ return self._default_states
140
+
141
+ @property
142
+ def grid_states(self):
143
+ """Return grid states as a dictionary."""
144
+ return self._grid_states
145
+
146
+ @property
147
+ def timestep(self):
148
+ """Return timestep as an integer."""
149
+ return self._timestep
150
+
151
+ @property
152
+ def study_hours(self):
153
+ """Return study hours as a list."""
154
+ return self._study_hours
155
+
156
+ def _get_light_paths(self) -> list:
157
+ """Find all light paths in grids_info."""
158
+ lp = []
159
+ for grid_info in self.grids_info:
160
+ try:
161
+ light_paths = grid_info['light_path']
162
+ except KeyError:
163
+ grid_info['light_path'] = [['__static_apertures__']]
164
+ light_paths = grid_info['light_path']
165
+ for light_path in light_paths:
166
+ for elem in light_path:
167
+ if elem in lp:
168
+ continue
169
+ if elem == '__static_apertures__':
170
+ lp.insert(0, elem)
171
+ else:
172
+ lp.append(elem)
173
+ if not light_paths and '__static_apertures__' not in lp:
174
+ lp.insert(0, '__static_apertures__')
175
+
176
+ return lp
177
+
178
+ def _get_default_states(self) -> dict:
179
+ """Set default state to 0 for all light paths."""
180
+ default_states = {}
181
+ for light_path in self.light_paths:
182
+ default_states[light_path] = [0]
183
+ return default_states
184
+
185
+ def _get_grid_states(self) -> dict:
186
+ """Read grid_states.json if available."""
187
+ info = Path(self.folder, 'grid_states.json')
188
+ if info.is_file():
189
+ with open(info) as data_f:
190
+ data = json.load(data_f)
191
+ return data
192
+ else:
193
+ # only static results
194
+ return {}
195
+
196
+ def _get_study_info(self) -> Tuple[int, list]:
197
+ """Read study info file."""
198
+ study_info_file = Path(self.folder).joinpath('study_info.json')
199
+ if study_info_file.exists():
200
+ with open(study_info_file) as file:
201
+ study_info = json.load(file)
202
+ if study_info['timestep'] == 1:
203
+ study_info['study_hours'] = \
204
+ list(map(int, study_info['study_hours']))
205
+ else:
206
+ study_info = {}
207
+ study_info['timestep'] = 1
208
+ study_info['study_hours'] = AnalysisPeriod().hoys
209
+
210
+ return study_info['timestep'], study_info['study_hours']
211
+
212
+ def _get_datetimes(self) -> List[DateTime]:
213
+ """Get a list of DateTimes of the sun up hours."""
214
+ datetimes = [
215
+ DateTime.from_hoy(hoy) for hoy in list(map(float, self.sun_up_hours))
216
+ ]
217
+
218
+ return datetimes
219
+
220
+ def _get_grids_info(self) -> List[dict]:
221
+ """Get grids info from folder."""
222
+ info = Path(self.folder, 'grids_info.json')
223
+ with open(info) as data_f:
224
+ grids = json.load(data_f)
225
+
226
+ return grids
227
+
228
+ def _get_sun_up_hours(self) -> List[float]:
229
+ """Get sun up hours from folder."""
230
+ suh_fp = Path(self.folder, 'sun-up-hours.txt')
231
+ sun_up_hours = np.loadtxt(suh_fp, dtype=float).tolist()
232
+ if self.timestep == 1:
233
+ sun_up_hours = list(map(int, sun_up_hours))
234
+
235
+ return sun_up_hours
236
+
237
+ def _get_sun_up_hours_mask(self) -> List[int]:
238
+ """Get a sun up hours masking array of the study hours."""
239
+ sun_up_hours_mask = \
240
+ np.where(np.isin(np.array(self.study_hours), np.array(self.sun_up_hours)))[0]
241
+
242
+ return sun_up_hours_mask
243
+
244
+ def _get_sun_down_hours_mask(self) -> List[int]:
245
+ """Get a sun down hours masking array of the study hours."""
246
+ sun_down_hours_mask = \
247
+ np.where(~np.isin(np.array(self.study_hours), np.array(self.sun_up_hours)))[0]
248
+
249
+ return sun_down_hours_mask
250
+
251
+ def __repr__(self):
252
+ return f'{self.__class__.__name__}: {self.folder}'
253
+
254
+
255
+ class Results(_ResultsFolder):
256
+ """Results class.
257
+
258
+ Args:
259
+ folder: Path to results folder.
260
+ schedule: 8760 values as a list. Values must be either 0 or 1. Values of 1
261
+ indicates occupied hours. If no schedule is provided a default schedule
262
+ will be used. (Default: None).
263
+ load_arrays: Set to True to load all NumPy arrays. If False the arrays will be
264
+ loaded only once they are needed. In both cases the loaded array(s) will be
265
+ stored in a dictionary under the arrays property. (Default: False).
266
+
267
+ Properties:
268
+ * schedule
269
+ * occ_pattern
270
+ * total_occ
271
+ * sun_down_occ_hours
272
+ * occ_mask
273
+ * arrays
274
+ * valid_states
275
+ * datatype
276
+ * unit
277
+ * cache_arrays
278
+ * use_gpu
279
+ """
280
+ __slots__ = ('_schedule', '_occ_pattern', '_total_occ', '_sun_down_occ_hours',
281
+ '_occ_mask', '_arrays', '_valid_states', '_datatype', '_unit',
282
+ '_cache_arrays', '_use_gpu')
283
+
284
+ def __init__(self, folder, datatype: DataTypeBase = None,
285
+ schedule: list = None, unit: str = None,
286
+ load_arrays: bool = False, cache_arrays: bool = True):
287
+ """Initialize Results."""
288
+ _ResultsFolder.__init__(self, folder)
289
+ self.schedule = schedule
290
+ self._arrays = self._load_arrays() if load_arrays else {}
291
+ self._valid_states = self._get_valid_states()
292
+ self.datatype = datatype
293
+ self.unit = unit
294
+ self.cache_arrays = cache_arrays
295
+
296
+ @property
297
+ def schedule(self):
298
+ """Return schedule."""
299
+ return self._schedule
300
+
301
+ @schedule.setter
302
+ def schedule(self, schedule):
303
+ self._schedule = schedule if schedule else \
304
+ occupancy_schedule_8_to_6(timestep=self.timestep, as_list=True)
305
+ self._update_occ()
306
+
307
+ @property
308
+ def occ_pattern(self):
309
+ """Return a filtered version of the annual schedule that only includes the
310
+ sun-up-hours."""
311
+ return self._occ_pattern
312
+
313
+ @property
314
+ def total_occ(self):
315
+ """Return an integer for the total occupied hours of the schedule."""
316
+ return self._total_occ
317
+
318
+ @property
319
+ def sun_down_occ_hours(self):
320
+ """Return an integer for the number of occupied hours where the sun is down and
321
+ there's no possibility of being daylit or experiencing glare."""
322
+ return self._sun_down_occ_hours
323
+
324
+ @property
325
+ def occ_mask(self):
326
+ """Return an occupancy mask as a NumPy array that can be used to mask the
327
+ results."""
328
+ return self._occ_mask
329
+
330
+ @property
331
+ def arrays(self):
332
+ """Return a dictionary of all the NumPy arrays that have been loaded."""
333
+ return self._arrays
334
+
335
+ @arrays.setter
336
+ def arrays(self, value):
337
+ self._arrays = value
338
+
339
+ @property
340
+ def valid_states(self):
341
+ """Return a dictionary with valid states. Each light path is represented by a
342
+ key-value pair where the light path identifier is the key and the value is a list
343
+ of valid states, e.g., [0, 1, 2, ...]."""
344
+ return self._valid_states
345
+
346
+ @property
347
+ def datatype(self):
348
+ """Return a Ladybug DataType object."""
349
+ return self._datatype
350
+
351
+ @datatype.setter
352
+ def datatype(self, value):
353
+ if value is not None:
354
+ assert isinstance(value, DataTypeBase), \
355
+ f'data_type must be a Ladybug DataType. Got {type(value)}'
356
+ else:
357
+ value = GenericType('Generic', '')
358
+ self._datatype = value
359
+
360
+ @property
361
+ def unit(self):
362
+ """Return unit of hourly values."""
363
+ return self._unit
364
+
365
+ @unit.setter
366
+ def unit(self, value):
367
+ self._unit = value
368
+
369
+ @property
370
+ def cache_arrays(self):
371
+ """Return a boolean to indicate whether arrays are cached."""
372
+ return self._cache_arrays
373
+
374
+ @cache_arrays.setter
375
+ def cache_arrays(self, value):
376
+ self._cache_arrays = value
377
+
378
+ def total(
379
+ self, hoys: list = [], states: DynamicSchedule = None,
380
+ grids_filter: str = '*', res_type: str = 'total'
381
+ ) -> type_hints.total:
382
+ """Get summed values for each sensor.
383
+
384
+ Args:
385
+ hoys: An optional numbers or list of numbers to select the hours of
386
+ the year (HOYs) for which results will be computed. Defaults to [].
387
+ states: A dictionary of states. Defaults to None.
388
+ grids_filter: The name of a grid or a pattern to filter the grids.
389
+ Defaults to '*'.
390
+ res_type: Type of results to load. Defaults to 'total'.
391
+
392
+ Returns:
393
+ Tuple: A tuple with total values and grid information.
394
+ """
395
+ grids_info = self._filter_grids(grids_filter=grids_filter)
396
+ mask = hoys_mask(self.sun_up_hours, hoys)
397
+
398
+ total = []
399
+ for grid_info in grids_info:
400
+ array = self._array_from_states(grid_info, states=states, res_type=res_type)
401
+ if np.any(array):
402
+ array_filter = filter_array2d(array, mask=mask)
403
+ array_total = array_filter.sum(axis=1)
404
+ else:
405
+ array_total = np.zeros(grid_info['count'])
406
+ total.append(array_total)
407
+
408
+ return total, grids_info
409
+
410
+ def point_in_time(
411
+ self, datetime: Union[float, DateTime], states: DynamicSchedule = None,
412
+ grids_filter: str = '*', res_type: str = 'total'
413
+ ) -> type_hints.point_in_time:
414
+ """Get point in time values.
415
+
416
+ Args:
417
+ datetime: Hour of the year as a float or DateTime object.
418
+ states: A dictionary of states. Defaults to None.
419
+ grids_filter: The name of a grid or a pattern to filter the grids.
420
+ Defaults to '*'.
421
+ res_type: Type of results to load. Defaults to 'total'.
422
+
423
+ Returns:
424
+ Tuple: A tuple with point in time values and grid information.
425
+ """
426
+ grids_info = self._filter_grids(grids_filter=grids_filter)
427
+
428
+ if isinstance(datetime, float):
429
+ dt = DateTime.from_hoy(datetime)
430
+ elif isinstance(datetime, DateTime):
431
+ dt = datetime
432
+ else:
433
+ error_message = (
434
+ f'Input datetime must be of type {int} or {DateTime}. '
435
+ f'Received {type(DateTime)}.'
436
+ )
437
+ raise ValueError(error_message)
438
+
439
+ idx = self._index_from_datetime(dt)
440
+
441
+ pit_values = []
442
+ for grid_info in grids_info:
443
+ if idx:
444
+ array = self._array_from_states(
445
+ grid_info, states=states, res_type=res_type)
446
+ pit_values.append(array[:, idx])
447
+ else:
448
+ # datetime not in sun up hours, add zeros
449
+ pit_values.append(np.zeros(grid_info['count']))
450
+
451
+ return pit_values, grids_info
452
+
453
+ def point_in_time_to_folder(
454
+ self, target_folder: str, datetime: Union[float, DateTime],
455
+ states: DynamicSchedule = None, grids_filter: str = '*',
456
+ res_type: str = 'total'
457
+ ) -> type_hints.point_in_time:
458
+ """Get point in time values and write the values to a folder.
459
+
460
+ Args:
461
+ target_folder: Folder path to write annual metrics in. Usually this
462
+ folder is called 'metrics'.
463
+ datetime: Hour of the year as a float or DateTime object.
464
+ states: A dictionary of states. Defaults to None.
465
+ grids_filter: The name of a grid or a pattern to filter the grids.
466
+ Defaults to '*'.
467
+ res_type: Type of results to load. Defaults to 'total'.
468
+
469
+ Returns:
470
+ Tuple: A tuple with point in time values and grid information.
471
+ """
472
+ folder = Path(target_folder)
473
+ folder.mkdir(parents=True, exist_ok=True)
474
+
475
+ pit_values, grids_info = self.point_in_time(
476
+ datetime=datetime, states=states,
477
+ grids_filter=grids_filter, res_type=res_type)
478
+
479
+ metric_folder = folder.joinpath('point_in_time')
480
+
481
+ for count, grid_info in enumerate(grids_info):
482
+ d = pit_values[count]
483
+ full_id = grid_info['full_id']
484
+ output_file = metric_folder.joinpath(f'{full_id}.pit')
485
+ output_file.parent.mkdir(parents=True, exist_ok=True)
486
+ np.savetxt(output_file, d, fmt='%.2f')
487
+
488
+ info_file = metric_folder.joinpath('grids_info.json')
489
+ info_file.write_text(json.dumps(grids_info))
490
+
491
+ return pit_values, grids_info
492
+
493
+ def average_values(
494
+ self, hoys: list = [], states: DynamicSchedule = None, grids_filter: str = '*',
495
+ res_type: str = 'total') -> type_hints.average_values:
496
+ """Get average values for each sensor over a given period.
497
+
498
+ The hoys input can be used to filter the data for a particular time
499
+ period.
500
+
501
+ Args:
502
+ hoys: An optional numbers or list of numbers to select the hours of
503
+ the year (HOYs) for which results will be computed. Defaults to [].
504
+ states: A dictionary of states. Defaults to None.
505
+ grids_filter: The name of a grid or a pattern to filter the grids.
506
+ Defaults to '*'.
507
+ res_type: Type of results to load. Defaults to 'total'.
508
+
509
+ Returns:
510
+ Tuple: A tuple with the average value for each sensor and grid information.
511
+ """
512
+ grids_info = self._filter_grids(grids_filter=grids_filter)
513
+ full_length = len(self.study_hours) if len(hoys) == 0 else len(hoys)
514
+ mask = hoys_mask(self.sun_up_hours, hoys)
515
+
516
+ average_values = []
517
+ for grid_info in grids_info:
518
+ array = self._array_from_states(grid_info, states=states, res_type=res_type)
519
+ if np.any(array):
520
+ array_filter = filter_array2d(array, mask=mask)
521
+ results = average_values_array2d(
522
+ array_filter, full_length)
523
+ else:
524
+ results = np.zeros(grid_info['count'])
525
+ average_values.append(results)
526
+
527
+ return average_values, grids_info
528
+
529
+ def average_values_to_folder(
530
+ self, target_folder: str, hoys: list = [], states: DynamicSchedule = None,
531
+ grids_filter: str = '*', res_type: str = 'total'):
532
+ """Get average values for each sensor over a given period and write the
533
+ values to a folder.
534
+
535
+ Args:
536
+ target_folder: Folder path to write annual metrics in. Usually this
537
+ folder is called 'metrics'.
538
+ hoys: An optional numbers or list of numbers to select the hours of
539
+ the year (HOYs) for which results will be computed. Defaults to [].
540
+ states: A dictionary of states. Defaults to None.
541
+ grids_filter: The name of a grid or a pattern to filter the grids.
542
+ Defaults to '*'.
543
+ res_type: Type of results to load. Defaults to 'total'.
544
+ """
545
+ folder = Path(target_folder)
546
+ folder.mkdir(parents=True, exist_ok=True)
547
+
548
+ average_values, grids_info = self.average_values(
549
+ hoys=hoys, states=states,
550
+ grids_filter=grids_filter, res_type=res_type)
551
+
552
+ metric_folder = folder.joinpath('average_values')
553
+
554
+ for count, grid_info in enumerate(grids_info):
555
+ d = average_values[count]
556
+ full_id = grid_info['full_id']
557
+ output_file = metric_folder.joinpath(f'{full_id}.average')
558
+ output_file.parent.mkdir(parents=True, exist_ok=True)
559
+ np.savetxt(output_file, d, fmt='%.2f')
560
+
561
+ info_file = metric_folder.joinpath('grids_info.json')
562
+ info_file.write_text(json.dumps(grids_info))
563
+
564
+ def median_values(
565
+ self, hoys: list = [], states: dict = None, grids_filter: str = '*',
566
+ res_type: str = 'total') -> type_hints.median_values:
567
+ """Get median values for each sensor over a given period.
568
+
569
+ The hoys input can be used to filter the data for a particular time
570
+ period. If hoys is left empty the median values will likely be 0 since
571
+ there are likely more sun down hours than sun up hours.
572
+
573
+ Args:
574
+ hoys: An optional numbers or list of numbers to select the hours of
575
+ the year (HOYs) for which results will be computed. Defaults to [].
576
+ states: A dictionary of states. Defaults to None.
577
+ grids_filter: The name of a grid or a pattern to filter the grids.
578
+ Defaults to '*'.
579
+ res_type: Type of results to load. Defaults to 'total'.
580
+
581
+ Returns:
582
+ Tuple: A tuple with the median value for each sensor and grid information.
583
+ """
584
+ grids_info = self._filter_grids(grids_filter=grids_filter)
585
+ mask = hoys_mask(self.sun_up_hours, hoys)
586
+
587
+ median_values = []
588
+ for grid_info in grids_info:
589
+ array = self._array_from_states(grid_info, states=states, res_type=res_type)
590
+ if np.any(array):
591
+ array_filter = filter_array2d(array, mask=mask)
592
+ if not hoys:
593
+ # concatenate zero array
594
+ zero_array = \
595
+ np.zeros((grid_info['count'], len(self.sun_down_hours)))
596
+ array_filter = np.concatenate((array_filter, zero_array), axis=1)
597
+ else:
598
+ # find number of hoys that are sun down hours
599
+ sdh_hoys = \
600
+ len(set(self.sun_down_hours).intersection(hoys))
601
+ if sdh_hoys != 0:
602
+ # concatenate zero array
603
+ zero_array = np.zeros((grid_info['count'], sdh_hoys))
604
+ array_filter = \
605
+ np.concatenate((array_filter, zero_array), axis=1)
606
+ results = np.median(array_filter, axis=1)
607
+ else:
608
+ results = np.zeros(grid_info['count'])
609
+ median_values.append(results)
610
+
611
+ return median_values, grids_info
612
+
613
+ def median_values_to_folder(
614
+ self, target_folder: str, hoys: list = [], states: dict = None,
615
+ grids_filter: str = '*', res_type: str = 'total'):
616
+ """Get median values for each sensor over a given period and write the
617
+ values to a folder.
618
+
619
+ Args:
620
+ target_folder: Folder path to write annual metrics in. Usually this
621
+ folder is called 'metrics'.
622
+ hoys: An optional numbers or list of numbers to select the hours of
623
+ the year (HOYs) for which results will be computed. Defaults to [].
624
+ states: A dictionary of states. Defaults to None.
625
+ grids_filter: The name of a grid or a pattern to filter the grids.
626
+ Defaults to '*'.
627
+ res_type: Type of results to load. Defaults to 'total'.
628
+ """
629
+ folder = Path(target_folder)
630
+ folder.mkdir(parents=True, exist_ok=True)
631
+
632
+ median_values, grids_info = self.median_values(
633
+ hoys=hoys, states=states,
634
+ grids_filter=grids_filter, res_type=res_type)
635
+
636
+ metric_folder = folder.joinpath('median_values')
637
+
638
+ for count, grid_info in enumerate(grids_info):
639
+ d = median_values[count]
640
+ full_id = grid_info['full_id']
641
+ output_file = metric_folder.joinpath(f'{full_id}.median')
642
+ output_file.parent.mkdir(parents=True, exist_ok=True)
643
+ np.savetxt(output_file, d, fmt='%.2f')
644
+
645
+ info_file = metric_folder.joinpath('grids_info.json')
646
+ info_file.write_text(json.dumps(grids_info))
647
+
648
+ def cumulative_values(
649
+ self, hoys: list = [], states: DynamicSchedule = None,
650
+ t_step_multiplier: float = 1, grids_filter: str = '*',
651
+ res_type: str = 'total') -> type_hints.cumulative_values:
652
+ """Get cumulative values for each sensor over a given period.
653
+
654
+ The hoys input can be used to filter the data for a particular time
655
+ period.
656
+
657
+ Args:
658
+ hoys: An optional numbers or list of numbers to select the hours of
659
+ the year (HOYs) for which results will be computed. Defaults to [].
660
+ states: A dictionary of states. Defaults to None.
661
+ t_step_multiplier: A value that will be multiplied with the timestep.
662
+ grids_filter: The name of a grid or a pattern to filter the grids.
663
+ Defaults to '*'.
664
+ res_type: Type of results to load. Defaults to 'total'.
665
+
666
+ Returns:
667
+ Tuple: A tuple with the cumulative value for each sensor and grid
668
+ information.
669
+ """
670
+ grids_info = self._filter_grids(grids_filter=grids_filter)
671
+ mask = hoys_mask(self.sun_up_hours, hoys)
672
+
673
+ cumulative_values = []
674
+ for grid_info in grids_info:
675
+ array = self._array_from_states(grid_info, states=states, res_type=res_type)
676
+ if np.any(array):
677
+ array_filter = filter_array2d(array, mask=mask)
678
+ results = cumulative_values_array2d(
679
+ array_filter, self.timestep, t_step_multiplier)
680
+ else:
681
+ results = np.zeros(grid_info['count'])
682
+ cumulative_values.append(results)
683
+
684
+ return cumulative_values, grids_info
685
+
686
+ def cumulative_values_to_folder(
687
+ self, target_folder: str, hoys: list = [],
688
+ states: DynamicSchedule = None, t_step_multiplier: float = 1,
689
+ grids_filter: str = '*', res_type: str = 'total'):
690
+ """Get cumulative values for each sensor over a given period and write
691
+ the values to a folder.
692
+
693
+ Args:
694
+ target_folder: Folder path to write annual metrics in. Usually this
695
+ folder is called 'metrics'.
696
+ hoys: An optional numbers or list of numbers to select the hours of
697
+ the year (HOYs) for which results will be computed. Defaults to [].
698
+ states: A dictionary of states. Defaults to None.
699
+ t_step_multiplier: A value that will be multiplied with the timestep.
700
+ grids_filter: The name of a grid or a pattern to filter the grids.
701
+ Defaults to '*'.
702
+ res_type: Type of results to load. Defaults to 'total'.
703
+ """
704
+ folder = Path(target_folder)
705
+ folder.mkdir(parents=True, exist_ok=True)
706
+
707
+ cumulative_values, grids_info = self.cumulative_values(
708
+ hoys=hoys, states=states, t_step_multiplier=t_step_multiplier,
709
+ grids_filter=grids_filter, res_type=res_type
710
+ )
711
+
712
+ metric_folder = folder.joinpath('cumulative_values')
713
+
714
+ for count, grid_info in enumerate(grids_info):
715
+ d = cumulative_values[count]
716
+ full_id = grid_info['full_id']
717
+ output_file = metric_folder.joinpath(f'{full_id}.cumulative')
718
+ output_file.parent.mkdir(parents=True, exist_ok=True)
719
+ np.savetxt(output_file, d, fmt='%.2f')
720
+
721
+ info_file = metric_folder.joinpath('grids_info.json')
722
+ info_file.write_text(json.dumps(grids_info))
723
+
724
+ def peak_values(
725
+ self, hoys: list = [], states: DynamicSchedule = None, grids_filter: str = '*',
726
+ coincident: bool = False, res_type: str = 'total'
727
+ ) -> type_hints.peak_values:
728
+ """Get peak values for each sensor over a given period.
729
+
730
+ The hoys input can be used to filter the data for a particular time
731
+ period.
732
+
733
+ Args:
734
+ hoys: An optional numbers or list of numbers to select the hours of
735
+ the year (HOYs) for which results will be computed. Defaults to [].
736
+ states: A dictionary of states. Defaults to None.
737
+ grids_filter: The name of a grid or a pattern to filter the grids.
738
+ Defaults to '*'.
739
+ coincident: Boolean to indicate whether output values represent the peak
740
+ value for each sensor throughout the entire analysis (False) or they
741
+ represent the highest overall value across each sensor grid at a
742
+ particular timestep (True). Defaults to False.
743
+ res_type: Type of results to load. Defaults to 'total'.
744
+
745
+ Returns:
746
+ Tuple: A tuple with the peak value for each sensor and grid information.
747
+ """
748
+ grids_info = self._filter_grids(grids_filter=grids_filter)
749
+ mask = hoys_mask(self.sun_up_hours, hoys)
750
+ filt_suh = hoys if len(hoys) != 0 else self.sun_up_hours
751
+
752
+ peak_values = []
753
+ max_hoys = []
754
+ for grid_info in grids_info:
755
+ max_i = None
756
+ array = self._array_from_states(grid_info, states=states, res_type=res_type)
757
+ if np.any(array):
758
+ array_filter = filter_array2d(array, mask=mask)
759
+ results, max_i = peak_values_array2d(
760
+ array_filter, coincident=coincident)
761
+ else:
762
+ results = np.zeros(grid_info['count'])
763
+ peak_values.append(results)
764
+ if max_i:
765
+ max_hoys.append(filt_suh[max_i])
766
+ else:
767
+ max_hoys.append(None)
768
+
769
+ return peak_values, max_hoys, grids_info
770
+
771
+ def peak_values_to_folder(
772
+ self, target_folder: str, hoys: list = [], states: DynamicSchedule = None,
773
+ grids_filter: str = '*', coincident: bool = False, res_type='total'):
774
+ """Get peak values for each sensor over a given period and write the
775
+ values to a folder.
776
+
777
+ Args:
778
+ target_folder: Folder path to write peak values in. Usually this
779
+ folder is called 'metrics'.
780
+ hoys: An optional numbers or list of numbers to select the hours of
781
+ the year (HOYs) for which results will be computed. Defaults to [].
782
+ states: A dictionary of states. Defaults to None.
783
+ grids_filter: The name of a grid or a pattern to filter the grids.
784
+ coincident: Boolean to indicate whether output values represent the peak
785
+ value for each sensor throughout the entire analysis (False) or they
786
+ represent the highest overall value across each sensor grid at a
787
+ particular timestep (True). Defaults to False.
788
+ res_type: Type of results to load. Defaults to 'total'.
789
+ """
790
+ folder = Path(target_folder)
791
+ folder.mkdir(parents=True, exist_ok=True)
792
+
793
+ peak_values, max_hoys, grids_info = self.peak_values(
794
+ hoys=hoys, states=states, grids_filter=grids_filter,
795
+ coincident=coincident, res_type=res_type)
796
+
797
+ metric_folder = folder.joinpath('peak_values')
798
+
799
+ for count, grid_info in enumerate(grids_info):
800
+ d = peak_values[count]
801
+ full_id = grid_info['full_id']
802
+ output_file = metric_folder.joinpath(f'{full_id}.peak')
803
+ output_file.parent.mkdir(parents=True, exist_ok=True)
804
+ np.savetxt(output_file, d, fmt='%.2f')
805
+
806
+ max_hoys_file = metric_folder.joinpath('max_hoys.txt')
807
+ max_hoys_file.write_text('\n'.join(str(h) for h in max_hoys))
808
+
809
+ info_file = metric_folder.joinpath('grids_info.json')
810
+ info_file.write_text(json.dumps(grids_info))
811
+
812
+ def _array_to_annual_data(
813
+ self, grid_info, states: DynamicSchedule = None,
814
+ sensor_index: list = None, res_type: str = 'total'
815
+ ) -> Tuple[List[HourlyContinuousCollection], dict, list]:
816
+ """Get annual data for one or multiple sensors.
817
+
818
+ Args:
819
+ grid_info: Grid information of the grid.
820
+ states: A dictionary of states. Defaults to None.
821
+ sensor_index: A list of sensor indices as integers. Defaults to None.
822
+ res_type: Type of results to load. Defaults to 'total'.
823
+
824
+ Returns:
825
+ Tuple: A tuple with Data Collections for each sensor, grid information,
826
+ and a list of the sensors.
827
+ """
828
+ analysis_period = AnalysisPeriod(timestep=self.timestep)
829
+
830
+ # if no sensor_index, create list with all sensors
831
+ if not sensor_index:
832
+ sensor_index = [range(grid_info['count'])]
833
+
834
+ data_collections = []
835
+ array = self._array_from_states(grid_info, states=states, res_type=res_type)
836
+ for idx in sensor_index:
837
+ if np.any(array):
838
+ values = array[idx, :]
839
+ else:
840
+ values = np.zeros(len(self.sun_up_hours))
841
+ annual_array = Results.values_to_annual(
842
+ self.sun_up_hours, values, self.timestep, self.study_hours)
843
+ header = Header(self.datatype, self.unit, analysis_period)
844
+ header.metadata['sensor grid'] = grid_info['full_id']
845
+ header.metadata['sensor index'] = idx
846
+ data_collections.append(
847
+ HourlyContinuousCollection(header, annual_array.tolist()))
848
+
849
+ return data_collections, grid_info, sensor_index
850
+
851
+ def annual_data(
852
+ self, states: DynamicSchedule = None, grids_filter: str = '*',
853
+ sensor_index: dict = None, res_type: str = 'total'
854
+ ) -> type_hints.annual_data:
855
+ """Get annual data for one or multiple sensors.
856
+
857
+ Args:
858
+ states: A dictionary of states. Defaults to None.
859
+ grids_filter: The name of a grid or a pattern to filter the grids.
860
+ Defaults to '*'.
861
+ sensor_index: A dictionary with grids as keys and a list of sensor
862
+ indices as values. Defaults to None.
863
+ res_type: Type of results to load. Defaults to 'total'.
864
+
865
+ Returns:
866
+ Tuple: A tuple with Data Collections for each sensor, grid information,
867
+ and a dictionary of the sensors.
868
+ """
869
+ grids_info = self._filter_grids(grids_filter=grids_filter)
870
+ analysis_period = AnalysisPeriod(timestep=self.timestep)
871
+
872
+ # if no sensor_index, create dict with all sensors
873
+ if not sensor_index:
874
+ sensor_index = {}
875
+ for grid_info in grids_info:
876
+ sensor_index[grid_info['full_id']] = \
877
+ [i for i in range(grid_info['count'])]
878
+
879
+ data_collections = []
880
+ for grid_info in grids_info:
881
+ data_collections_grid = []
882
+ grid_id = grid_info['full_id']
883
+ array = self._array_from_states(grid_info, states=states, res_type=res_type)
884
+ indices = sensor_index[grid_id]
885
+ for idx in indices:
886
+ if np.any(array):
887
+ values = array[idx, :]
888
+ else:
889
+ values = np.zeros(len(self.sun_up_hours))
890
+ annual_array = Results.values_to_annual(
891
+ self.sun_up_hours, values, self.timestep)
892
+ header = Header(self.datatype, self.unit, analysis_period)
893
+ header.metadata['sensor grid'] = grid_id
894
+ header.metadata['sensor index'] = idx
895
+ data_collections_grid.append(
896
+ HourlyContinuousCollection(header, annual_array.tolist()))
897
+ data_collections.append(data_collections_grid)
898
+
899
+ return data_collections, grids_info, sensor_index
900
+
901
+ def annual_data_to_folder(
902
+ self, target_folder: str, states: DynamicSchedule = None, grids_filter: str = '*',
903
+ sensor_index: dict = None, res_type: str = 'total'):
904
+ """Get annual data for one or multiple sensors and write the data to a
905
+ folder as Data Collections.
906
+
907
+ Args:
908
+ target_folder: Folder path to write annual metrics in. Usually this
909
+ folder is called 'metrics'.
910
+ states: A dictionary of states. Defaults to None.
911
+ grids_filter: The name of a grid or a pattern to filter the grids.
912
+ Defaults to '*'.
913
+ sensor_index: A dictionary with grids as keys and a list of sensor
914
+ indices as values. Defaults to None.
915
+ res_type: Type of results to load. Defaults to 'total'.
916
+ """
917
+ folder = Path(target_folder)
918
+ folder.mkdir(parents=True, exist_ok=True)
919
+
920
+ data_collections, grids_info, sensor_index = self.annual_data(
921
+ states=states, grids_filter=grids_filter, sensor_index=sensor_index,
922
+ res_type=res_type)
923
+
924
+ metric_folder = folder.joinpath('datacollections')
925
+
926
+ for count, grid_info in enumerate(grids_info):
927
+ grid_collections = data_collections[count]
928
+ for data_collection in grid_collections:
929
+ grid_id = grid_info['full_id']
930
+ sensor_id = data_collection.header.metadata['sensor index']
931
+ data_dict = data_collection.to_dict()
932
+ data_file = metric_folder.joinpath(f'{grid_id}_{sensor_id}.json')
933
+ data_file.parent.mkdir(parents=True, exist_ok=True)
934
+ data_file.write_text(json.dumps(data_dict))
935
+
936
+ @staticmethod
937
+ def values_to_annual(
938
+ hours: Union[List[float], np.ndarray],
939
+ values: Union[List[float], np.ndarray],
940
+ timestep: int, base_value: int = 0,
941
+ dtype: np.dtype = np.float32) -> np.ndarray:
942
+ """Map a 1D NumPy array based on a set of hours to an annual array.
943
+
944
+ This method creates an array with a base value of length 8760 and
945
+ replaces the base value with the input 'values' at the indices of the
946
+ input 'hours'.
947
+
948
+ Args:
949
+ hours: A list of hours. This can be a regular list or a 1D NumPy
950
+ array.
951
+ values: A list of values to map to an annual array. This can be a
952
+ regular list or a 1D NumPy array.
953
+ timestep: Time step of the simulation.
954
+ base_value: A value that will be applied for the base array.
955
+ dtype: A NumPy dtype for the annual array.
956
+
957
+ Returns:
958
+ A 1D NumPy array.
959
+ """
960
+ if not isinstance(values, np.ndarray):
961
+ values = np.array(values)
962
+ if not isinstance(hours, np.ndarray):
963
+ hours = np.array(hours)
964
+ check_array_dim(values, 1)
965
+ assert hours.shape == values.shape
966
+
967
+ full_ap = np.array(AnalysisPeriod(timestep=timestep).hoys)
968
+ indices = np.where(np.isin(full_ap, hours))[0]
969
+ annual_array = np.repeat(np.array(base_value), 8760 * timestep).astype(dtype)
970
+
971
+ annual_array[indices] = values
972
+
973
+ return annual_array
974
+
975
+ def _index_from_datetime(self, datetime: DateTime) -> Union[int, None]:
976
+ """Returns the index of the input datetime in the list of datetimes
977
+ from the datetimes property.
978
+
979
+ If the DateTime is not in the list, the function will return None.
980
+
981
+ Args:
982
+ datetime: A DateTime object.
983
+
984
+ Returns:
985
+ Index as an integer or None.
986
+ """
987
+ assert isinstance(datetime, DateTime), \
988
+ f'Expected DateTime object but received {type(datetime)}'
989
+ try:
990
+ index = self.datetimes.index(datetime)
991
+ except Exception:
992
+ # DateTime not in sun up hours
993
+ index = None
994
+
995
+ return index
996
+
997
+ def _get_array(
998
+ self, grid_info: dict, light_path: str, state: int = 0,
999
+ res_type: str = 'total', extension: str = '.npy') -> np.ndarray:
1000
+ """Get an array for a given grid, light path, and state.
1001
+
1002
+ The array will be fetched from the 'arrays' property if it has been
1003
+ loaded already.
1004
+
1005
+ Args:
1006
+ grid_info: Grid information.
1007
+ light_path: Identifier of the light path.
1008
+ state: State as an integer. E.g., 0 for the default state.
1009
+ Defaults to 0.
1010
+ res_type: Type of results to load. Defaults to 'total'.
1011
+ extension: File extension of the array to load. Defaults to '.npy'.
1012
+
1013
+ Returns:
1014
+ np.ndarray: A NumPy array of a given grid, light path, and state.
1015
+ """
1016
+ grid_id = grid_info['full_id']
1017
+
1018
+ state_identifier = self._state_identifier(grid_id, light_path, state=state)
1019
+ try:
1020
+ array = self.arrays[grid_id][light_path][state_identifier][res_type]
1021
+ except Exception:
1022
+ array = self._load_array(
1023
+ grid_info, light_path, state=state, res_type=res_type,
1024
+ extension=extension
1025
+ )
1026
+
1027
+ return array
1028
+
1029
+ def _load_array(
1030
+ self, grid_info: dict, light_path: str, state: int = 0,
1031
+ res_type: str = 'total', extension: str = '.npy') -> np.ndarray:
1032
+ """Load a NumPy file to an array.
1033
+
1034
+ This method will also update the arrays property value.
1035
+
1036
+ Args:
1037
+ grid_info: Grid information.
1038
+ light_path: Identifier of the light path.
1039
+ state: State as an integer. E.g., 0 for the default state.
1040
+ Defaults to 0.
1041
+ res_type: Which type of result to return a file for. E.g., 'total' for total
1042
+ illuminance or 'direct' for direct illuminance.
1043
+ extension: File extension of the array to load. Defaults to '.npy'.
1044
+
1045
+ Returns:
1046
+ np.ndarray: A NumPy array of a given grid, light path, and state
1047
+ from a NumPy file.
1048
+ """
1049
+ grid_id = grid_info['full_id']
1050
+
1051
+ def merge_dicts(array_dict, arrays):
1052
+ for key, value in array_dict.items():
1053
+ if isinstance(value, dict):
1054
+ node = arrays.setdefault(key, {})
1055
+ merge_dicts(value, node)
1056
+ else:
1057
+ arrays[key] = value
1058
+ return arrays
1059
+
1060
+ state_identifier = self._state_identifier(grid_id, light_path, state=state)
1061
+ file = self._get_file(grid_id, light_path, state_identifier, res_type,
1062
+ extension=extension)
1063
+
1064
+ array = np.load(file)
1065
+
1066
+ if self.cache_arrays:
1067
+ array_dict = {grid_id: {light_path: {state_identifier: {res_type: array}}}}
1068
+ arrays = merge_dicts(array_dict, self.arrays)
1069
+ self.arrays = arrays
1070
+
1071
+ return array
1072
+
1073
+ def clear_cached_arrays(self, res_type: str = None):
1074
+ """Clear the cached arrays.
1075
+
1076
+ This method will simply clear the arrays property to remove arrays from
1077
+ memory, unless the res_type is selected in which case only 'total' or
1078
+ 'direct' arrays will be deleted.
1079
+
1080
+ Args:
1081
+ res_type: Which type of results to clear. This can be either
1082
+ 'total' or 'direct'.
1083
+ """
1084
+ if res_type is not None:
1085
+ assert res_type in ('total', 'direct')
1086
+ arrays = self.arrays
1087
+ for grid_id in arrays:
1088
+ for light_path in arrays[grid_id]:
1089
+ for state in arrays[grid_id][light_path]:
1090
+ del arrays[grid_id][light_path][state][res_type]
1091
+ else:
1092
+ self.arrays.clear()
1093
+
1094
+ def _state_identifier(
1095
+ self, grid_id: str, light_path: str, state: int = 0) -> Union[str, None]:
1096
+ """Get the state identifier from a light path and state integer.
1097
+
1098
+ Args:
1099
+ grid_id: Grid identifier.
1100
+ light_path: Identifier of the light path.
1101
+ state: State as an integer. E.g., 0 for the default state.
1102
+ Defaults to 0.
1103
+
1104
+ Returns:
1105
+ State identifier. For static apertures the identifier is 'default',
1106
+ and for other light paths it is the light path identifier preceded
1107
+ by the state integer, e.g., '0_light_path'. If the state is -1 the
1108
+ state identifier will be None.
1109
+ """
1110
+ # TODO: Figure out if there is a better way to handle the states.
1111
+ # I.e., state integer <--> state identifier.
1112
+ valid_states = self.valid_states[light_path]
1113
+ if state in valid_states:
1114
+ if light_path == '__static_apertures__':
1115
+ state_identifier = 'default'
1116
+ else:
1117
+ state_identifier = self.grid_states[grid_id][light_path][state]
1118
+ return state_identifier
1119
+ elif state == -1:
1120
+ return None
1121
+ else:
1122
+ error_message = (
1123
+ f'State of {light_path} must be any of {valid_states} for on '
1124
+ f'or -1 for off. Received state {state}.'
1125
+ )
1126
+ raise ValueError(error_message)
1127
+
1128
+ def _get_file(
1129
+ self, grid_id: str, light_path: str, state_identifier: str,
1130
+ res_type: str = 'total', extension: str = '.npy') -> Path:
1131
+ """Return the path of a results file.
1132
+
1133
+ Args:
1134
+ grid_id: Grid identifier.
1135
+ light_path: Identifier of the light path.
1136
+ state_identifier: State identifier.
1137
+ res_type: Which type of result to return a file for. E.g., 'total' for total
1138
+ illuminance or 'direct' for direct illuminance.
1139
+ extension: File extension of the array to load. Defaults to '.npy'.
1140
+
1141
+ Returns:
1142
+ Path to a NumPy file.
1143
+ """
1144
+ file = Path(self.folder, light_path, state_identifier,
1145
+ res_type, grid_id + extension)
1146
+ if not file.is_file():
1147
+ raise FileNotFoundError(f'File {file} not found in the results folder.')
1148
+
1149
+ return file
1150
+
1151
+ def _validate_dynamic_states(self, states: dict) -> dict:
1152
+ """Validate dynamic states and return states dictionary.
1153
+
1154
+ If all light paths in the dictionary have 8760 values, the states
1155
+ dictionary is returned as is. If some light paths have less than 8760
1156
+ values, pattern of values will be repeated until it reaches a length of
1157
+ 8760.
1158
+
1159
+ Args:
1160
+ states: A dictionary of states.
1161
+
1162
+ Returns:
1163
+ dict: A dictionary of states.
1164
+ """
1165
+ if all(len(values) == 8760 for values in states.values()):
1166
+ return states
1167
+ for light_path, values in states.items():
1168
+ if len(values) < 8760:
1169
+ states[light_path] = list(islice(cycle(values), 8760))
1170
+ elif len(values) > 8760:
1171
+ error_message = (
1172
+ f'The light path {light_path} has {len(values)} values in '
1173
+ f'its states schedule. Maximum allowed number of values '
1174
+ f'is 8760.'
1175
+ )
1176
+ raise ValueError(error_message)
1177
+
1178
+ return states
1179
+
1180
+ def _validate_states(self, states: dict) -> dict:
1181
+ """Validate states and return states dictionary.
1182
+
1183
+ If all light paths in the dictionary have integers only as values, the
1184
+ states dictionary is returned as is. If some light paths have values
1185
+ that are not integers, these values will be mapped as integers if
1186
+ possible, e.g., if the values are strings ('0', '1', etc.) instead of
1187
+ integers.
1188
+
1189
+ Args:
1190
+ states: A dictionary of states.
1191
+
1192
+ Returns:
1193
+ dict: A dictionary of states.
1194
+ """
1195
+ if all(isinstance(v, int) for values in states.values() for v in values):
1196
+ return states
1197
+ for light_path, values in states.items():
1198
+ try:
1199
+ states[light_path] = list(map(int, values))
1200
+ except ValueError as err:
1201
+ error_message = (
1202
+ f'Failed to convert states schedule for light path '
1203
+ f'{light_path} to integers.'
1204
+ )
1205
+ raise ValueError(error_message) from err
1206
+
1207
+ return states
1208
+
1209
+ def _filter_grid_states(self, grid_info, states: DynamicSchedule = None) -> DynamicSchedule:
1210
+ """Filter a dictionary of states by grid. Only light paths relevant to
1211
+ the given grid will be returned.
1212
+
1213
+ Args:
1214
+ grid_info: Grid information.
1215
+ states: A dictionary of states. Light paths as keys and lists of
1216
+ 8760 values for each key. The values should be integers
1217
+ matching the states or -1 for off. Default to None.
1218
+
1219
+ Returns:
1220
+ dict: A filtered states dictionary.
1221
+ """
1222
+ light_paths = []
1223
+ for lp in grid_info['light_path']:
1224
+ for _lp in lp:
1225
+ if _lp == '__static_apertures__' and len(lp) > 1:
1226
+ pass
1227
+ else:
1228
+ light_paths.append(_lp)
1229
+ if states:
1230
+ states = states.filter_by_identifiers(light_paths)
1231
+ else:
1232
+ default_states = self.default_states
1233
+ states = DynamicSchedule()
1234
+ for light_path in light_paths:
1235
+ ap_group_schedule = ApertureGroupSchedule(
1236
+ light_path, default_states[light_path], is_static=True)
1237
+ states.add_aperture_group_schedule(ap_group_schedule)
1238
+
1239
+ return states
1240
+
1241
+ def _array_from_states(
1242
+ self, grid_info, states: DynamicSchedule = None,
1243
+ res_type: str = 'total', zero_array: bool = False
1244
+ ) -> np.ndarray:
1245
+ """Create an array for a given grid by the states settings.
1246
+
1247
+ Args:
1248
+ grid_info: Grid information of the grid.
1249
+ states: A dictionary of states. Light paths as keys and lists of 8760 values
1250
+ for each key. The values should be integers matching the states or -1 for
1251
+ off.
1252
+ res_type: Which type of result to create an array for. E.g., 'total'
1253
+ for total illuminance or 'direct' for direct illuminance.
1254
+ zero_array: Boolean to note if a 2D zero array should be created if
1255
+ the array of the grid is zero. This is the case if the
1256
+ illuminance of the grid is zero. (Default: False).
1257
+
1258
+ Returns:
1259
+ A NumPy array based on the states settings.
1260
+ """
1261
+ # get states that are relevant for the grid
1262
+ states = self._filter_grid_states(grid_info, states=states)
1263
+
1264
+ arrays = []
1265
+ for light_path, gr_schedule in states.dynamic_schedule.items():
1266
+ if gr_schedule.is_static:
1267
+ state = gr_schedule.schedule[0]
1268
+ # if state is -1 we continue since it is "turned off"
1269
+ if state == -1:
1270
+ continue
1271
+ # load static array (state is static)
1272
+ array = self._get_array(
1273
+ grid_info, light_path, state=state, res_type=res_type)
1274
+ arrays.append(array)
1275
+ else:
1276
+ # slice states to match sun up hours
1277
+ states_array = np.array(gr_schedule.schedule)[
1278
+ np.array(self.sun_up_hours, int)]
1279
+
1280
+ unique_states = np.unique(states_array)
1281
+ unique_states = unique_states[unique_states != -1] # skip -1
1282
+ temp_arrays = []
1283
+ for state in unique_states:
1284
+ state = int(state)
1285
+ # load static array (state is static)
1286
+ _array = self._get_array(
1287
+ grid_info, light_path, state=state, res_type=res_type)
1288
+ # get indices and add values to base array
1289
+ state_indices = (states_array == state)
1290
+ masked_array = np.zeros_like(_array)
1291
+ masked_array[:, state_indices] = _array[:, state_indices]
1292
+ temp_arrays.append(masked_array)
1293
+ if temp_arrays:
1294
+ array = np.sum(np.stack(temp_arrays), axis=0)
1295
+ else:
1296
+ array = np.zeros((grid_info['count'], len(self.sun_up_hours)))
1297
+ arrays.append(array)
1298
+
1299
+ if len(arrays) == 0:
1300
+ array = np.zeros((grid_info['count'], len(self.sun_up_hours)))
1301
+ else:
1302
+ array = np.sum(np.stack(arrays, axis=0), axis=0)
1303
+
1304
+ if not np.any(array):
1305
+ if not zero_array:
1306
+ array = np.asarray([])
1307
+
1308
+ return array
1309
+
1310
+ def _update_occ(self):
1311
+ """Set properties related to occupancy."""
1312
+ occ_mask = np.array(self.schedule, dtype=int)[self.sun_up_hours_mask]
1313
+ sun_down_sch = \
1314
+ np.array(self.schedule, dtype=int)[self.sun_down_hours_mask].sum()
1315
+
1316
+ self._occ_mask = occ_mask
1317
+ self._total_occ = sum(self.schedule)
1318
+ self._sun_down_occ_hours = sun_down_sch
1319
+
1320
+ def _filter_grids(self, grids_filter: str = '*') -> list:
1321
+ """Return grids information.
1322
+
1323
+ Args:
1324
+ grids_filter: The name of a grid or a pattern to filter the grids.
1325
+ Defaults to '*'.
1326
+
1327
+ Returns:
1328
+ list: List of grid information for filtered grids.
1329
+ """
1330
+ if grids_filter != '*':
1331
+ grids_info = \
1332
+ _filter_grids_by_pattern(self.grids_info, grids_filter)
1333
+ else:
1334
+ grids_info = self.grids_info
1335
+
1336
+ return grids_info
1337
+
1338
+ def _load_arrays(self) -> dict:
1339
+ """Load all the NumPy arrays in the results folder."""
1340
+ arrays = {}
1341
+ grids_info = self.grids_info
1342
+
1343
+ for grid_info in grids_info:
1344
+ grid_id = grid_info['full_id']
1345
+ light_paths = grid_info['light_path']
1346
+ arrays[grid_id] = {}
1347
+ for light_path in light_paths:
1348
+ light_path = light_path[0]
1349
+ arrays[grid_id][light_path] = {}
1350
+ light_path_folder = Path(self.folder, light_path)
1351
+ for state_folder in Path(light_path_folder).iterdir():
1352
+ state = state_folder.name
1353
+ arrays[grid_id][light_path][state] = {}
1354
+ for res_type_folder in Path(state_folder).iterdir():
1355
+ res_type = res_type_folder.name
1356
+ file = Path(res_type_folder, grid_id + '.npy')
1357
+ array = np.load(file)
1358
+ arrays[grid_id][light_path][state][res_type] = array
1359
+
1360
+ return arrays
1361
+
1362
+ def _get_valid_states(self) -> dict:
1363
+ """Returns a dictionary with valid states for each light path.
1364
+
1365
+ For each light path there will be a key (identifier of the light path)
1366
+ and its value will be a list of valid states as integers.
1367
+
1368
+ Example of output format:
1369
+ {
1370
+ '__static_apertures__': [0],
1371
+ 'Room1_North': [0, 1],
1372
+ 'Room1_South': [0, 1],
1373
+ 'Room2_North1': [0, 1],
1374
+ 'Room2_North2': [0, 1]
1375
+ }
1376
+
1377
+ Returns:
1378
+ dict: Valid states integers for each light path.
1379
+ """
1380
+ valid_states = {}
1381
+ grid_states = self.grid_states
1382
+ if '__static_apertures__' in self.light_paths:
1383
+ valid_states['__static_apertures__'] = [0]
1384
+ for light_paths in grid_states.values():
1385
+ for light_path, states in light_paths.items():
1386
+ if light_path not in valid_states:
1387
+ valid_states[light_path] = list(range(len(states)))
1388
+
1389
+ return valid_states
1390
+
1391
+ def _light_paths_from_grid_info(self, grid_info: Union[dict, str]) -> list:
1392
+ if isinstance(grid_info, str):
1393
+ for _grid_info in self.grids_info:
1394
+ if _grid_info['full_id'] == grid_info:
1395
+ grid_info = _grid_info
1396
+ break
1397
+ else:
1398
+ raise Exception(f'Grid info with full_id "{grid_info}" not found.')
1399
+ light_paths = []
1400
+ for lp in grid_info['light_path']:
1401
+ for _lp in lp:
1402
+ if _lp == '__static_apertures__' and len(lp) > 1:
1403
+ pass
1404
+ else:
1405
+ light_paths.append(_lp)
1406
+
1407
+ return light_paths
1408
+
1409
+ def _get_state_combinations(self, grid_info: Union[dict, str]) -> List[dict]:
1410
+ light_paths = self._light_paths_from_grid_info(grid_info)
1411
+ valid_states = self._get_valid_states()
1412
+ filtered_states = {lp: valid_states[lp] for lp in light_paths}
1413
+ keys, values = zip(*filtered_states.items())
1414
+ combinations = [dict(zip(keys, v)) for v in itertools.product(*values)]
1415
+
1416
+ return combinations