honeybee-radiance-postprocess 0.4.572__py3-none-any.whl → 0.4.581__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -505,6 +505,76 @@ def annual_to_data(
505
505
  sys.exit(0)
506
506
 
507
507
 
508
+ @post_process.command('annual-summary')
509
+ @click.argument(
510
+ 'folder',
511
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True)
512
+ )
513
+ @click.option(
514
+ '--hoys-file', '-h', help='Path to an HOYs file. Values must be separated by '
515
+ 'new line. If not provided the data will not be filtered by HOYs.',
516
+ type=click.Path(exists=False, file_okay=True, dir_okay=False, resolve_path=True)
517
+ )
518
+ @click.option(
519
+ '--states', '-st', help='A JSON file with a dictionary of states. If states '
520
+ 'are not provided the default states will be used for any aperture groups.',
521
+ default=None, show_default=True,
522
+ type=click.Path(exists=False, file_okay=True, dir_okay=False, resolve_path=True)
523
+ )
524
+ @click.option(
525
+ '--grids-filter', '-gf', help='A pattern to filter the grids.', default='*',
526
+ show_default=True
527
+ )
528
+ @click.option(
529
+ '--total/--direct', is_flag=True, default=True, help='Switch between total '
530
+ 'and direct results. Default is total.'
531
+ )
532
+ @click.option(
533
+ '--sensor/--timestep', is_flag=True, default=True, help='Switch between results '
534
+ 'for each sensor or timestep. Default is sensor.'
535
+ )
536
+
537
+ @click.option(
538
+ '--sub-folder', '-sf', help='Optional relative path for subfolder to write output '
539
+ 'metric files.', default='metrics'
540
+ )
541
+ def annual_summary(
542
+ folder, hoys_file, states, grids_filter, total, sensor, sub_folder
543
+ ):
544
+ """Get annual summary statistics (average, median, minimum, maximum, cumulative).
545
+
546
+ \b
547
+ Args:
548
+ folder: Results folder. This folder is an output folder of annual daylight
549
+ recipe. Folder should include grids_info.json and sun-up-hours.txt. The
550
+ command uses the list in grids_info.json to find the result files for each
551
+ sensor grid.
552
+ """
553
+ try:
554
+ if hoys_file:
555
+ with open(hoys_file) as hoys:
556
+ hoys = [float(h) for h in hoys.readlines()]
557
+ else:
558
+ hoys = []
559
+
560
+ if states:
561
+ states = DynamicSchedule.from_json(states)
562
+
563
+ res_type = 'total' if total is True else 'direct'
564
+
565
+ axis = 1 if sensor is True else 0
566
+
567
+ results = AnnualDaylight(folder)
568
+ results.annual_summary_to_folder(
569
+ sub_folder, hoys=hoys, states=states, grids_filter=grids_filter,
570
+ res_type=res_type, axis=axis)
571
+ except Exception:
572
+ _logger.exception('Failed to calculate annual summary.')
573
+ sys.exit(1)
574
+ else:
575
+ sys.exit(0)
576
+
577
+
508
578
  @post_process.command('point-in-time')
509
579
  @click.argument(
510
580
  'folder',
@@ -1030,7 +1100,7 @@ def convert_matrix_to_binary(
1030
1100
  if not comply:
1031
1101
  # this will invert the boolean array
1032
1102
  boolean_array = ~boolean_array
1033
-
1103
+
1034
1104
  binary_array = boolean_array.astype(int)
1035
1105
  output_file = Path(output_folder, name)
1036
1106
  output_file.parent.mkdir(parents=True, exist_ok=True)
@@ -83,7 +83,7 @@ def well_daylight_vis(output_folder):
83
83
  metric_info_dict = {
84
84
  'L01': {
85
85
  'type': 'VisualizationMetaData',
86
- 'data_type': GenericType('sDA200,40%', '').to_dict(),
86
+ 'data_type': GenericType('sDA150,50%', '').to_dict(),
87
87
  'unit': '',
88
88
  'legend_parameters': pass_fail_lpar.to_dict()
89
89
  },
@@ -25,7 +25,7 @@ def da_array2d(
25
25
  threshold: Threshold value for daylight autonomy. Default: 300.
26
26
 
27
27
  Returns:
28
- A 1-dimensional NumPy array with the daylight autonomy for each row in
28
+ A 1D NumPy array with the daylight autonomy for each row in
29
29
  the input array.
30
30
  """
31
31
  check_array_dim(array, 2)
@@ -72,7 +72,7 @@ def cda_array2d(
72
72
  threshold: Threshold value for continuos daylight autonomy. Default: 300.
73
73
 
74
74
  Returns:
75
- A 1-dimensional NumPy array with the continuos daylight autonomy for
75
+ A 1D NumPy array with the continuos daylight autonomy for
76
76
  each row in the input array.
77
77
  """
78
78
  check_array_dim(array, 2)
@@ -125,7 +125,7 @@ def udi_array2d(
125
125
  max_t: Maximum threshold for useful daylight illuminance. Default: 3000.
126
126
 
127
127
  Returns:
128
- A 1-dimensional NumPy array with the useful daylight illuminance for
128
+ A 1D NumPy array with the useful daylight illuminance for
129
129
  each row in the input array.
130
130
  """
131
131
  check_array_dim(array, 2)
@@ -178,7 +178,7 @@ def udi_lower_array2d(
178
178
  sun_down_occ_hours: Number of occupied hours where the sun is down.
179
179
 
180
180
  Returns:
181
- A 1-dimensional NumPy array with the lower than useful daylight
181
+ A 1D NumPy array with the lower than useful daylight
182
182
  illuminance for each row in the input array.
183
183
  """
184
184
  check_array_dim(array, 2)
@@ -237,7 +237,7 @@ def udi_upper_array2d(
237
237
  max_t: Maximum threshold for useful daylight illuminance. Default: 3000.
238
238
 
239
239
  Returns:
240
- A 1-dimensional NumPy array with the higher than useful daylight
240
+ A 1D NumPy array with the higher than useful daylight
241
241
  illuminance for each row in the input array.
242
242
  """
243
243
  check_array_dim(array, 2)
@@ -330,20 +330,28 @@ def ase_array2d(
330
330
 
331
331
 
332
332
  def average_values_array2d(
333
- array: np.ndarray, full_length: int = 8760) -> np.ndarray:
333
+ array: np.ndarray, full_length: int = 8760, axis: int = 1) -> np.ndarray:
334
334
  """Calculate average values for a 2D NumPy array.
335
335
 
336
+ If axis is 1, the average value for each row is calculated (each sensor). In
337
+ this case the full length should correspond to the number of timesteps in the
338
+ analysis.
339
+
340
+ If axis is 0, the average value for each column is calculated (each timestep).
341
+ In this case the full length should correspond to the number of sensors.
342
+
336
343
  Args:
337
344
  array: A 2D NumPy array.
338
345
  full_length: Integer to use as divisor.
346
+ axis: Axis along which to compute the average. Default is 1.
339
347
 
340
348
  Returns:
341
- A 1-dimensional NumPy array with the average value for each row in the
342
- input array.
349
+ A 1D NumPy array with the average value for each row or column
350
+ in the input array.
343
351
  """
344
352
  check_array_dim(array, 2)
345
353
 
346
- avg_values = array.sum(axis=1) / full_length
354
+ avg_values = array.sum(axis=axis) / full_length
347
355
 
348
356
  return avg_values
349
357
 
@@ -365,22 +373,26 @@ def average_values_array1d(
365
373
 
366
374
 
367
375
  def cumulative_values_array2d(
368
- array: np.ndarray, timestep: int = 1, t_step_multiplier: float = 1
369
- ) -> np.ndarray:
376
+ array: np.ndarray, timestep: int = 1, t_step_multiplier: float = 1,
377
+ axis: int = 1) -> np.ndarray:
370
378
  """Calculate cumulative values for a 2D NumPy array.
371
379
 
372
380
  Args:
373
381
  array: A 2D NumPy array.
374
382
  timestep: Integer for the timestep of the analysis.
375
383
  t_step_multiplier: A value that will be multiplied with the timestep.
384
+ axis: Axis along which to compute the cumulative value. Default is 1.
376
385
 
377
386
  Returns:
378
- A 1-dimensional NumPy array with the cumulative value for each row in
387
+ A 1D NumPy array with the cumulative value for each row or column
379
388
  the input array.
380
389
  """
381
390
  check_array_dim(array, 2)
382
391
 
383
- cumulative_values = array.sum(axis=1) / (timestep * t_step_multiplier)
392
+ if axis == 1:
393
+ cumulative_values = array.sum(axis=axis) / (timestep * t_step_multiplier)
394
+ else:
395
+ cumulative_values = array.sum(axis=axis)
384
396
 
385
397
  return cumulative_values
386
398
 
@@ -416,7 +428,7 @@ def peak_values_array2d(
416
428
  at a particular timestep (True).
417
429
 
418
430
  Returns:
419
- A 1-dimensional NumPy array with the peak value for each row in the
431
+ A 1D NumPy array with the peak value for each row in the
420
432
  input array, and the index of the maximum value representing the
421
433
  timestep in the array with the largest value.
422
434
  """
@@ -589,20 +589,8 @@ class Results(_ResultsFolder):
589
589
  array = self._array_from_states(grid_info, states=states, res_type=res_type)
590
590
  if np.any(array):
591
591
  array_filter = filter_array2d(array, mask=mask)
592
- if not hoys:
593
- # concatenate zero array
594
- zero_array = \
595
- np.zeros((grid_info['count'], len(self.sun_down_hours)))
596
- array_filter = np.concatenate((array_filter, zero_array), axis=1)
597
- else:
598
- # find number of hoys that are sun down hours
599
- sdh_hoys = \
600
- len(set(self.sun_down_hours).intersection(hoys))
601
- if sdh_hoys != 0:
602
- # concatenate zero array
603
- zero_array = np.zeros((grid_info['count'], sdh_hoys))
604
- array_filter = \
605
- np.concatenate((array_filter, zero_array), axis=1)
592
+ array_filter = self.pad_array_for_median(
593
+ array_filter, hoys, self.sun_down_hours, grid_info['count'])
606
594
  results = np.median(array_filter, axis=1)
607
595
  else:
608
596
  results = np.zeros(grid_info['count'])
@@ -809,6 +797,208 @@ class Results(_ResultsFolder):
809
797
  info_file = metric_folder.joinpath('grids_info.json')
810
798
  info_file.write_text(json.dumps(grids_info))
811
799
 
800
+ def annual_summary(
801
+ self, hoys: list = None, states: DynamicSchedule = None, grids_filter: str = '*',
802
+ res_type: str = 'total', axis: int = 1):
803
+ """Compute annual summary statistics (average, median, minimum, maximum,
804
+ cumulative) for each sensor or timestep.
805
+
806
+ If hoys is left as None, the average will be computed for all study hours,
807
+ i.e., including sun down hours. This will likely lead to low average
808
+ and median values for a usual annual study.
809
+
810
+ Args:
811
+ hoys: An optional list of numbers to select the hours of the year
812
+ (HOYs) for which results will be computed.
813
+ states: A dictionary of states. Defaults to None.
814
+ grids_filter: The name of a grid or a pattern to filter the grids.
815
+ Defaults to '*'.
816
+ res_type: Type of results to load. Defaults to 'total'.
817
+ axis: Axis along which statistics are computed:
818
+ - 1: compute per-sensor statistics over time (default)
819
+ - 0: compute per-timestep statistics across sensors; results are
820
+ converted into HourlyContinuousCollection objects.
821
+
822
+ Returns:
823
+ A tuple of the form:
824
+ (
825
+ average_values,
826
+ median_values,
827
+ minimum_values,
828
+ maximum_values,
829
+ cumulative_values,
830
+ grids_info
831
+ )
832
+
833
+ Where each element except grids_info is a list with one entry per
834
+ sensor grid:
835
+ - If axis == 1: each entry is a 1D NumPy array of per-sensor values.
836
+ - If axis == 0: each entry is an HourlyContinuousCollection.
837
+
838
+ grids_info contains metadata for each processed grid.
839
+ """
840
+ hoys = [] if hoys is None else hoys
841
+ grids_info = self._filter_grids(grids_filter=grids_filter)
842
+ mask = hoys_mask(self.sun_up_hours, hoys)
843
+
844
+ analysis_period = AnalysisPeriod(timestep=self.timestep)
845
+
846
+ average_values = []
847
+ median_values = []
848
+ minimum_values = []
849
+ maximum_values = []
850
+ cumulative_values = []
851
+ for grid_info in grids_info:
852
+ array = self._array_from_states(grid_info, states=states, res_type=res_type)
853
+ if np.any(array):
854
+ array_filter = filter_array2d(array, mask=mask)
855
+ if axis == 1:
856
+ full_length = len(self.study_hours) if not hoys else len(hoys)
857
+ else:
858
+ full_length = grid_info['count']
859
+ _average_values = average_values_array2d(array_filter, full_length, axis=axis)
860
+ if axis == 1:
861
+ median_array_filter = self.pad_array_for_median(
862
+ array_filter, hoys, self.sun_down_hours, grid_info['count'])
863
+ else:
864
+ median_array_filter = array_filter
865
+ _median_values = np.median(median_array_filter, axis=axis)
866
+ _minimum_values = np.amin(array_filter, axis=axis)
867
+ _maximum_values = np.amax(array_filter, axis=axis)
868
+ _cumulative_values = cumulative_values_array2d(
869
+ array_filter, self.timestep, axis=axis)
870
+ else:
871
+ if axis == 1:
872
+ _average_values = np.zeros(grid_info['count'])
873
+ _median_values = np.zeros(grid_info['count'])
874
+ _minimum_values = np.zeros(grid_info['count'])
875
+ _maximum_values = np.zeros(grid_info['count'])
876
+ _cumulative_values = np.zeros(grid_info['count'])
877
+ else:
878
+ _average_values = np.zeros(len(self.sun_up_hours))[mask]
879
+ _median_values = np.zeros(len(self.sun_up_hours))[mask]
880
+ _minimum_values = np.zeros(len(self.sun_up_hours))[mask]
881
+ _maximum_values = np.zeros(len(self.sun_up_hours))[mask]
882
+ _cumulative_values = np.zeros(len(self.sun_up_hours))[mask]
883
+
884
+ if axis == 0: # convert values to data collections
885
+ header = Header(self.datatype, self.unit, analysis_period)
886
+ header.metadata['Sensor Grid'] = grid_info['full_id']
887
+ sun_up_hours = np.array(self.sun_up_hours)[mask].ravel()
888
+
889
+ annual_array = self.values_to_annual(
890
+ sun_up_hours, _average_values, self.timestep)
891
+ average_header = header.duplicate()
892
+ average_header.metadata['Metric'] = 'Average'
893
+ _average_values = HourlyContinuousCollection(average_header, annual_array.tolist())
894
+
895
+ annual_array = self.values_to_annual(
896
+ sun_up_hours, _median_values, self.timestep)
897
+ median_header = header.duplicate()
898
+ median_header.metadata['Metric'] = 'Median'
899
+ _median_values = HourlyContinuousCollection(median_header, annual_array.tolist())
900
+
901
+ annual_array = self.values_to_annual(
902
+ sun_up_hours, _minimum_values, self.timestep)
903
+ minimum_header = header.duplicate()
904
+ minimum_header.metadata['Metric'] = 'Minimum'
905
+ _minimum_values = HourlyContinuousCollection(minimum_header, annual_array.tolist())
906
+
907
+ annual_array = self.values_to_annual(
908
+ sun_up_hours, _maximum_values, self.timestep)
909
+ maximum_header = header.duplicate()
910
+ maximum_header.metadata['Metric'] = 'Maximum'
911
+ _maximum_values = HourlyContinuousCollection(maximum_header, annual_array.tolist())
912
+
913
+ annual_array = self.values_to_annual(
914
+ sun_up_hours, _cumulative_values, self.timestep)
915
+ cumulative_header = header.duplicate()
916
+ cumulative_header.metadata['Metric'] = 'Cumulative'
917
+ _cumulative_values = HourlyContinuousCollection(
918
+ cumulative_header, annual_array.tolist())
919
+
920
+ average_values.append(_average_values)
921
+ median_values.append(_median_values)
922
+ minimum_values.append(_minimum_values)
923
+ maximum_values.append(_maximum_values)
924
+ cumulative_values.append(_cumulative_values)
925
+
926
+ return (average_values, median_values, minimum_values, maximum_values,
927
+ cumulative_values, grids_info)
928
+
929
+ def annual_summary_to_folder(
930
+ self, target_folder: str, hoys: list = None, states: DynamicSchedule = None,
931
+ grids_filter: str = '*', res_type: str = 'total', axis: int = 1):
932
+ """Compute annual summary statistics (average, median, minimum, maximum,
933
+ cumulative) for each sensor or timestep and write the values to a folder.
934
+
935
+ If hoys is left as None, the average will be computed for all study hours,
936
+ i.e., including sun down hours. This will likely lead to low average
937
+ and median values for a usual annual study.
938
+
939
+ Args:
940
+ target_folder: Folder path to write annual metrics in. Usually this
941
+ folder is called 'metrics'.
942
+ hoys: An optional list of numbers to select the hours of the year
943
+ (HOYs) for which results will be computed.
944
+ states: A dictionary of states. Defaults to None.
945
+ grids_filter: The name of a grid or a pattern to filter the grids.
946
+ Defaults to '*'.
947
+ res_type: Type of results to load. Defaults to 'total'.
948
+ axis: Axis along which statistics are computed:
949
+ - 1: compute per-sensor statistics over time (default)
950
+ - 0: compute per-timestep statistics across sensors; results are
951
+ converted into HourlyContinuousCollection objects.
952
+ """
953
+ folder = Path(target_folder)
954
+ folder.mkdir(parents=True, exist_ok=True)
955
+
956
+ hoys = [] if hoys is None else hoys
957
+
958
+ (
959
+ average_values,
960
+ median_values,
961
+ minimum_values,
962
+ maximum_values,
963
+ cumulative_values,
964
+ grids_info
965
+ ) = self.annual_summary(
966
+ hoys=hoys, states=states, grids_filter=grids_filter,
967
+ res_type=res_type, axis=axis
968
+ )
969
+
970
+ metrics = {
971
+ 'average': average_values,
972
+ 'median': median_values,
973
+ 'minimum': minimum_values,
974
+ 'maximum': maximum_values,
975
+ 'cumulative': cumulative_values,
976
+ }
977
+
978
+ for metric_name in metrics:
979
+ metric_folder = folder.joinpath(f'{metric_name}_values')
980
+ metric_folder.mkdir(parents=True, exist_ok=True)
981
+ metric_folder.joinpath('grids_info.json').write_text(json.dumps(grids_info))
982
+
983
+ for idx, grid_info in enumerate(grids_info):
984
+ full_id = grid_info['full_id']
985
+ if axis == 1:
986
+ for metric_name, metric_values in metrics.items():
987
+ data = metric_values[idx]
988
+ metric_folder = folder.joinpath(f'{metric_name}_values')
989
+ out_file = metric_folder.joinpath(f'{full_id}.{metric_name}')
990
+ out_file.parent.mkdir(parents=True, exist_ok=True)
991
+ np.savetxt(out_file, data, fmt="%.2f")
992
+ else:
993
+ for metric_name, metric_values in metrics.items():
994
+ collection = metric_values[idx]
995
+ data_dict = collection.to_dict()
996
+
997
+ metric_folder = folder.joinpath(f'{metric_name}_values')
998
+ out_file = metric_folder.joinpath(f'{full_id}_{metric_name}.json')
999
+ out_file.parent.mkdir(parents=True, exist_ok=True)
1000
+ out_file.write_text(json.dumps(data_dict))
1001
+
812
1002
  def _array_to_annual_data(
813
1003
  self, grid_info, states: DynamicSchedule = None,
814
1004
  sensor_index: list = None, res_type: str = 'total'
@@ -937,7 +1127,8 @@ class Results(_ResultsFolder):
937
1127
  def values_to_annual(
938
1128
  hours: Union[List[float], np.ndarray],
939
1129
  values: Union[List[float], np.ndarray],
940
- timestep: int, base_value: int = 0,
1130
+ timestep: int,
1131
+ base_value: int = 0,
941
1132
  dtype: np.dtype = np.float32) -> np.ndarray:
942
1133
  """Map a 1D NumPy array based on a set of hours to an annual array.
943
1134
 
@@ -962,7 +1153,8 @@ class Results(_ResultsFolder):
962
1153
  if not isinstance(hours, np.ndarray):
963
1154
  hours = np.array(hours)
964
1155
  check_array_dim(values, 1)
965
- assert hours.shape == values.shape
1156
+ assert hours.shape == values.shape, \
1157
+ (f'Shape of hours {hours.shape} must be the same as shape of values {values.shape}.')
966
1158
 
967
1159
  full_ap = np.array(AnalysisPeriod(timestep=timestep).hoys)
968
1160
  indices = np.where(np.isin(full_ap, hours))[0]
@@ -972,6 +1164,37 @@ class Results(_ResultsFolder):
972
1164
 
973
1165
  return annual_array
974
1166
 
1167
+ @staticmethod
1168
+ def pad_array_for_median(
1169
+ array: np.ndarray, hoys: list, sun_down_hours: list, sensor_count: int
1170
+ ) -> np.ndarray:
1171
+ """Pad a filtered 2D result array with zeros so that median values correctly
1172
+ account for sun-down hours.
1173
+
1174
+ - If no HOYs are given, pad with all sun-down hours.
1175
+ - If HOYs are given, pad only with those HOYs that fall in sun-down hours.
1176
+
1177
+ Args:
1178
+ array: 2D NumPy array of shape (sensor_count, N_filtered_hours).
1179
+ hoys: List of selected hours-of-year (empty means all sun-up hours).
1180
+ sun_down_hours: List of HOYs when the sun is down.
1181
+ sensor_count: Number of sensors in the grid (array rows).
1182
+
1183
+ Returns:
1184
+ A new 2D NumPy array padded with zero columns for sun-down hours.
1185
+ """
1186
+ if not hoys:
1187
+ sdh_count = len(sun_down_hours)
1188
+ else:
1189
+ sdh_count = len(set(sun_down_hours).intersection(hoys))
1190
+
1191
+ if sdh_count == 0:
1192
+ return array
1193
+
1194
+ zero_pad = np.zeros((sensor_count, sdh_count))
1195
+
1196
+ return np.concatenate((array, zero_pad), axis=1)
1197
+
975
1198
  def _index_from_datetime(self, datetime: DateTime) -> Union[int, None]:
976
1199
  """Returns the index of the input datetime in the list of datetimes
977
1200
  from the datetimes property.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: honeybee-radiance-postprocess
3
- Version: 0.4.572
3
+ Version: 0.4.581
4
4
  Summary: Postprocessing of Radiance results and matrices
5
5
  Home-page: https://github.com/ladybug-tools/honeybee-radiance-postprocess
6
6
  Author: Ladybug Tools
@@ -16,7 +16,7 @@ Classifier: Programming Language :: Python :: Implementation :: CPython
16
16
  Classifier: Operating System :: OS Independent
17
17
  Description-Content-Type: text/markdown
18
18
  License-File: LICENSE
19
- Requires-Dist: honeybee-radiance==1.66.207
19
+ Requires-Dist: honeybee-radiance==1.66.214
20
20
  Requires-Dist: numpy<2.0.0
21
21
  Dynamic: author
22
22
  Dynamic: author-email
@@ -8,7 +8,7 @@ honeybee_radiance_postprocess/dynamic.py,sha256=RPJh2SsjASYJCsG5QRkazVCvzWjzMxm9
8
8
  honeybee_radiance_postprocess/electriclight.py,sha256=f1HTIoYI8s8pOGdS50QHy9OA6_RVw7IwLYrIEvZTdvU,821
9
9
  honeybee_radiance_postprocess/en17037.py,sha256=ejb84XyquVgmYK35d5TuM8msfA6LDF4OH-p__36GcCw,10888
10
10
  honeybee_radiance_postprocess/helper.py,sha256=NzGWfEmYlSzg4CpTuB9PMRSpKCIbN7gsDLVcmj3pr0c,9428
11
- honeybee_radiance_postprocess/metrics.py,sha256=Q7sMfSoVH366vMm3JWjF8rMnU-nc50QyG5n2MAq4kSk,14772
11
+ honeybee_radiance_postprocess/metrics.py,sha256=XvRDcAu9-5UHzJvafvegbtwab8-oPchu_lw03UgjylE,15303
12
12
  honeybee_radiance_postprocess/reader.py,sha256=fEuqU-87vloLofwMxdCLKV4ZXoW1eMOXydsTgUwMCFk,2762
13
13
  honeybee_radiance_postprocess/type_hints.py,sha256=39kLNk9FxcQVGdL6bkP49jIFsf_1brP6r6w3khl9hqc,1212
14
14
  honeybee_radiance_postprocess/util.py,sha256=h7fipG9it_cfIk-kFP3X_IassVmfmphPiLxpydW43R4,6295
@@ -23,13 +23,13 @@ honeybee_radiance_postprocess/cli/grid.py,sha256=IMjfyw-XPaoSu550T_BuulRSawla8kY
23
23
  honeybee_radiance_postprocess/cli/leed.py,sha256=vup_tVcSnSO5R7U_WPVDAhtSSRrPzTUgV4j9lyxvxEk,5546
24
24
  honeybee_radiance_postprocess/cli/merge.py,sha256=NgQ5RM1-GrJQs_25IPzBvQ5PAn6oTKiB_N9SeHBxgCA,5840
25
25
  honeybee_radiance_postprocess/cli/mtxop.py,sha256=pbQsc4EnzsRStBX1-KcgIKmuTlVYiG1IyNjiqbwS2V0,5103
26
- honeybee_radiance_postprocess/cli/postprocess.py,sha256=Qvr5gEB9oQH4-4jww2ttxP1bWseBydxzRHymyrpcPWQ,39445
26
+ honeybee_radiance_postprocess/cli/postprocess.py,sha256=nR0Mk7f_WCmYjpqkcxKs9tN7EWj6moFsYKnj6vUlGS0,41890
27
27
  honeybee_radiance_postprocess/cli/schedule.py,sha256=6uIy98Co4zm-ZRcELo4Lfx_aN3lNiqPe-BSimXwt1F8,3877
28
28
  honeybee_radiance_postprocess/cli/translate.py,sha256=W3G5aBN7pGx5sX3VoV1xZM6HL06Fw95iJb7vV-r6u7w,7396
29
29
  honeybee_radiance_postprocess/cli/two_phase.py,sha256=njkWBRxOqdRSp0JxLloWTGWmKruyONetgnIbjXhrrXw,8393
30
30
  honeybee_radiance_postprocess/cli/util.py,sha256=Be9cGmYhcV2W37ma6SgQPCWCpWLLLlroxRYN_l58kY0,4077
31
31
  honeybee_radiance_postprocess/cli/viewfactor.py,sha256=RJ0ai9ykhdryYOgYkY9tAim9MhXPD1gVAR3Js9UaEgE,5380
32
- honeybee_radiance_postprocess/cli/well.py,sha256=wWz-aWon4E_tLar0mepolW8WJdk6hF869wZPZSkbNrE,3772
32
+ honeybee_radiance_postprocess/cli/well.py,sha256=ABEp7qlNZdNgoGnJuLGt53kpfUr2ptT7XZsLLKcbfS8,3772
33
33
  honeybee_radiance_postprocess/ies/__init__.py,sha256=kQXElEqFnLGNnrMSpA51XDHoqBup849FHeAqWASIy6w,45
34
34
  honeybee_radiance_postprocess/ies/lm.py,sha256=6oT4XzWwr6njKRBjY0xL9TzBqQUAeQLwzen57BHOp4E,9746
35
35
  honeybee_radiance_postprocess/ies/lm_schedule.py,sha256=nZlORbRQvPiA8VeRfh7ML8cMAYUxmHOgsNCxAHuOr18,10042
@@ -39,12 +39,12 @@ honeybee_radiance_postprocess/leed/leed_schedule.py,sha256=8NUISE6RbENK1IgMA5cC2
39
39
  honeybee_radiance_postprocess/results/__init__.py,sha256=1agBQbfT4Tf8KqSZzlfKYX8MeZryY4jJ1KB4HWqaDDk,182
40
40
  honeybee_radiance_postprocess/results/annual_daylight.py,sha256=ulv8kZvS6UkhCjaXSJKXQeVhUReRdx4M0tGvy7rA8CI,34586
41
41
  honeybee_radiance_postprocess/results/annual_irradiance.py,sha256=nA1VubesNDtRxgXpHSZxFMV67XxUntIHIu5H9qtDBSk,8483
42
- honeybee_radiance_postprocess/results/results.py,sha256=twfOF_ZNflxujWqpzigz4_pZ1vcSpF8cQxWIy7ajt0Y,56378
42
+ honeybee_radiance_postprocess/results/results.py,sha256=9A_4i6Vl5R4ZBeCFsJZz9D_y_t2_XW3cWeMdShVIXVo,66852
43
43
  honeybee_radiance_postprocess/well/__init__.py,sha256=kQXElEqFnLGNnrMSpA51XDHoqBup849FHeAqWASIy6w,45
44
44
  honeybee_radiance_postprocess/well/well.py,sha256=FzxaLo3IdlvFZZ1UtblBk9zsKl_EfsCKIcAgfucq-nE,24762
45
- honeybee_radiance_postprocess-0.4.572.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
46
- honeybee_radiance_postprocess-0.4.572.dist-info/METADATA,sha256=5cmkcrdH95WiU6-zyKzNDGpsrgscwGeqGcbZ_dQxt8M,2575
47
- honeybee_radiance_postprocess-0.4.572.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
48
- honeybee_radiance_postprocess-0.4.572.dist-info/entry_points.txt,sha256=gFtVPx6UItXt27GfEZZO00eOZChJJEL6JwGSAB_O3rs,96
49
- honeybee_radiance_postprocess-0.4.572.dist-info/top_level.txt,sha256=4-sFbzy7ewP2EDqJV3jeFlAFx7SuxtoBBELWaKAnLdA,30
50
- honeybee_radiance_postprocess-0.4.572.dist-info/RECORD,,
45
+ honeybee_radiance_postprocess-0.4.581.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
46
+ honeybee_radiance_postprocess-0.4.581.dist-info/METADATA,sha256=VuGW8rd_g2nVrQ2nNaA9ZWsQmx_zfVdHaOhD5zbU2Jc,2575
47
+ honeybee_radiance_postprocess-0.4.581.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
48
+ honeybee_radiance_postprocess-0.4.581.dist-info/entry_points.txt,sha256=gFtVPx6UItXt27GfEZZO00eOZChJJEL6JwGSAB_O3rs,96
49
+ honeybee_radiance_postprocess-0.4.581.dist-info/top_level.txt,sha256=4-sFbzy7ewP2EDqJV3jeFlAFx7SuxtoBBELWaKAnLdA,30
50
+ honeybee_radiance_postprocess-0.4.581.dist-info/RECORD,,