honeybee-radiance-postprocess 0.4.441__py2.py3-none-any.whl → 0.4.443__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,6 +9,7 @@ from .schedule import schedule
9
9
  from .translate import translate
10
10
  from .viewfactor import view_factor
11
11
  from .merge import merge
12
+ from .datacollection import datacollection
12
13
 
13
14
 
14
15
  # command group for all postprocess extension commands.
@@ -26,6 +27,7 @@ postprocess.add_command(schedule)
26
27
  postprocess.add_command(translate)
27
28
  postprocess.add_command(view_factor)
28
29
  postprocess.add_command(merge)
30
+ postprocess.add_command(datacollection, name='data-collection')
29
31
 
30
32
  # add postprocess sub-commands to honeybee CLI
31
33
  main.add_command(postprocess)
@@ -0,0 +1,128 @@
1
+ """Commands to work with data collections."""
2
+ import sys
3
+ import logging
4
+ import numpy as np
5
+ from pathlib import Path
6
+ import click
7
+ import json
8
+
9
+ from ladybug.datacollection import HourlyContinuousCollection, \
10
+ HourlyDiscontinuousCollection
11
+ from ladybug.header import Header
12
+ from ladybug.datautil import collections_to_csv
13
+
14
+
15
+ _logger = logging.getLogger(__name__)
16
+
17
+
18
+ @click.group(help='Commands to work with data collections.')
19
+ def datacollection():
20
+ pass
21
+
22
+
23
+ @datacollection.command('npy-to-datacollections')
24
+ @click.argument(
25
+ 'npy-file', type=click.Path(exists=True, dir_okay=False, resolve_path=True)
26
+ )
27
+ @click.argument(
28
+ 'data-type', type=click.Path(exists=True, dir_okay=False, resolve_path=True)
29
+ )
30
+ @click.argument(
31
+ 'grid-name', type=click.STRING
32
+ )
33
+ @click.option(
34
+ '--output-file', '-f', help='Optional file to output the JSON strings of '
35
+ 'the data collections. By default, it will be printed to stdout',
36
+ type=click.File('w'), default='-', show_default=True
37
+ )
38
+ def npy_to_datacollections(npy_file, data_type, grid_name, output_file):
39
+ """Read an npy file and convert every row to a data collection.
40
+
41
+ The data collection will be saved in a JSON file. If no output file is
42
+ specified it will be send to stdout instead.
43
+
44
+ \b
45
+ Args:
46
+ npy-file: Path to npy file.
47
+ data-type: A JSON file with the data type.
48
+ grid-name: The name of the grid. This is used in the metadata of the
49
+ header.
50
+ """
51
+ with open(data_type) as json_file:
52
+ data_header = Header.from_dict(json.load(json_file))
53
+ a_per = data_header.analysis_period
54
+ continuous = True if a_per.st_hour == 0 and a_per.end_hour == 23 else False
55
+ if not continuous:
56
+ dates = a_per.datetimes
57
+ metadata = {'grid': grid_name}
58
+ try:
59
+ data_matrix = np.load(npy_file).tolist()
60
+ grid_data = []
61
+ for i, row in enumerate(data_matrix):
62
+ header = data_header.duplicate()
63
+ header.metadata = metadata.copy()
64
+ header.metadata['sensor_index'] = i
65
+ data = HourlyContinuousCollection(header, row) if continuous else \
66
+ HourlyDiscontinuousCollection(header, row, dates)
67
+ grid_data.append(data.to_dict())
68
+ output_file.write(json.dumps(grid_data))
69
+ except Exception:
70
+ _logger.exception('Failed to convert npy to data collections.')
71
+ sys.exit(1)
72
+ else:
73
+ sys.exit(0)
74
+
75
+
76
+ @datacollection.command('folder-to-datacollections')
77
+ @click.argument(
78
+ 'folder', type=click.Path(exists=True, dir_okay=True, resolve_path=True)
79
+ )
80
+ @click.argument(
81
+ 'data-type', type=click.Path(exists=True, dir_okay=False, resolve_path=True)
82
+ )
83
+ @click.option(
84
+ '--sub-folder', '-sf', type=click.STRING, default='datacollections',
85
+ show_default=True
86
+ )
87
+ def folder_to_datacollections(folder, data_type, sub_folder):
88
+ """Read an npy file and convert every row to a data collection.
89
+
90
+ The data collections will be saved as CSV files in subfolder.
91
+
92
+ \b
93
+ Args:
94
+ npy-file: Path to npy file.
95
+ data-type: A JSON file with the data type.
96
+ grid-name: The name of the grid. This is used in the metadata of the
97
+ header.
98
+ """
99
+ with open(Path(folder, 'grids_info.json')) as json_file:
100
+ grid_list = json.load(json_file)
101
+ with open(data_type) as json_file:
102
+ data_header = Header.from_dict(json.load(json_file))
103
+ a_per = data_header.analysis_period
104
+ continuous = True if a_per.st_hour == 0 and a_per.end_hour == 23 else False
105
+ if not continuous:
106
+ dates = a_per.datetimes
107
+ try:
108
+ for grid in grid_list:
109
+ grid_name = grid['full_id'] if 'full_id' in grid else 'id'
110
+ metadata = {'grid': grid_name}
111
+ grid_file = Path(folder, '{}.npy'.format(grid_name))
112
+ data_matrix = np.load(grid_file).tolist()
113
+ grid_data = []
114
+ for i, row in enumerate(data_matrix):
115
+ header = data_header.duplicate()
116
+ header.metadata = metadata.copy()
117
+ header.metadata['sensor_index'] = i
118
+ data = HourlyContinuousCollection(header, row) if continuous else \
119
+ HourlyDiscontinuousCollection(header, row, dates)
120
+ grid_data.append(data)
121
+
122
+ file_name = grid_name + '.csv'
123
+ collections_to_csv(grid_data, Path(folder, sub_folder), file_name)
124
+ except Exception:
125
+ _logger.exception('Failed to convert folder of files to data collections.')
126
+ sys.exit(1)
127
+ else:
128
+ sys.exit(0)
@@ -146,7 +146,8 @@ def merge_metrics_folder(input_folder, output_folder, dist_info, grids_info):
146
146
 
147
147
  def restore_original_distribution(
148
148
  input_folder, output_folder, extension='npy', dist_info=None,
149
- output_extension='ill', as_text=False, fmt='%.2f', delimiter='tab'):
149
+ output_extension='ill', as_text=False, fmt='%.2f', input_delimiter=',',
150
+ delimiter='tab'):
150
151
  """Restructure files to the original distribution based on the distribution info.
151
152
 
152
153
  It will assume that the files in the input folder are NumPy files. However,
@@ -166,6 +167,8 @@ def restore_original_distribution(
166
167
  as_text: Set to True if the output files should be saved as text instead
167
168
  of NumPy files.
168
169
  fmt: Format for the output files when saved as text.
170
+ input_delimiter: Delimiter for the input files. This is used only if the
171
+ input files are text files.
169
172
  delimiter: Delimiter for the output files when saved as text.
170
173
  """
171
174
  if not dist_info:
@@ -200,8 +203,16 @@ def restore_original_distribution(
200
203
  src_file = new_file
201
204
  try:
202
205
  array = np.load(src_file)
203
- except Exception:
204
- array = binary_to_array(src_file)
206
+ except:
207
+ try:
208
+ array = binary_to_array(src_file)
209
+ except:
210
+ try:
211
+ array = np.loadtxt(
212
+ src_file, delimiter=input_delimiter)
213
+ except Exception:
214
+ raise RuntimeError(
215
+ f'Failed to load input file "{src_file}"')
205
216
  slice_array = array[st:end+1,:]
206
217
 
207
218
  out_arrays.append(slice_array)
@@ -217,6 +228,8 @@ def restore_original_distribution(
217
228
  delimiter = '\t'
218
229
  elif delimiter == 'space':
219
230
  delimiter = ' '
231
+ elif delimiter == 'comma':
232
+ delimiter = ','
220
233
  np.savetxt(output_file.with_suffix(f'.{output_extension}'),
221
234
  out_array, fmt=fmt, delimiter=delimiter)
222
235
 
@@ -557,9 +557,11 @@ def leed_option_one(
557
557
  pass_sda_blinds_down_grids = []
558
558
  for grid_info in grids_info:
559
559
  light_paths = [lp[0] for lp in grid_info['light_path']]
560
- arrays = []
561
- arrays_blinds_up = []
562
- arrays_blinds_down = []
560
+ base_zero_array = np.apply_along_axis(filter_array, 1, np.zeros(
561
+ (grid_info['count'], len(results.sun_up_hours))), occ_mask)
562
+ arrays = [base_zero_array.copy()]
563
+ arrays_blinds_up = [base_zero_array.copy()]
564
+ arrays_blinds_down = [base_zero_array.copy()]
563
565
  # combine total array for all light paths
564
566
  if use_states:
565
567
  array = results._array_from_states(grid_info, states=states_schedule)
@@ -567,16 +569,24 @@ def leed_option_one(
567
569
 
568
570
  for light_path in light_paths:
569
571
  # do an extra pass to calculate with blinds always up or down
570
- array_blinds_up = results._get_array(
571
- grid_info, light_path, state=0, res_type='total')
572
- array_filter = np.apply_along_axis(
573
- filter_array, 1, array_blinds_up, occ_mask)
574
- arrays_blinds_up.append(array_filter)
575
- array_blinds_down = results._get_array(
576
- grid_info, light_path, state=1, res_type='total')
577
- array_filter = np.apply_along_axis(
578
- filter_array, 1, array_blinds_down, occ_mask)
579
- arrays_blinds_down.append(array_filter)
572
+ if light_path != '__static_apertures__':
573
+ array_blinds_up = results._get_array(
574
+ grid_info, light_path, state=0, res_type='total')
575
+ array_filter = np.apply_along_axis(
576
+ filter_array, 1, array_blinds_up, occ_mask)
577
+ arrays_blinds_up.append(array_filter)
578
+ array_blinds_down = results._get_array(
579
+ grid_info, light_path, state=1, res_type='total')
580
+ array_filter = np.apply_along_axis(
581
+ filter_array, 1, array_blinds_down, occ_mask)
582
+ arrays_blinds_down.append(array_filter)
583
+ else:
584
+ static_array = results._get_array(
585
+ grid_info, light_path, state=0, res_type='total')
586
+ array_filter = np.apply_along_axis(
587
+ filter_array, 1, static_array, occ_mask)
588
+ arrays_blinds_up.append(array_filter)
589
+ arrays_blinds_down.append(array_filter)
580
590
  else:
581
591
  for light_path in light_paths:
582
592
  array = results._get_array(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: honeybee-radiance-postprocess
3
- Version: 0.4.441
3
+ Version: 0.4.443
4
4
  Summary: Postprocessing of Radiance results and matrices
5
5
  Home-page: https://github.com/ladybug-tools/honeybee-radiance-postprocess
6
6
  Author: Ladybug Tools
@@ -13,9 +13,10 @@ honeybee_radiance_postprocess/reader.py,sha256=p4A91amyCI16lRRn0bhZdInsg-qJV0Jas
13
13
  honeybee_radiance_postprocess/type_hints.py,sha256=4R0kZgacQrqzoh8Tq7f8MVzUDzynV-C_jlh80UV6GPE,1122
14
14
  honeybee_radiance_postprocess/util.py,sha256=uxqop4TsUMp8l8iLQf784NJINprHCgj00GZHvTth1C0,5603
15
15
  honeybee_radiance_postprocess/vis_metadata.py,sha256=7ywIgdiuNKcctxifhpy7-Q2oaSX2ngQBeA0Kh7q1Gg0,1780
16
- honeybee_radiance_postprocess/cli/__init__.py,sha256=uR3Q-VEhA6ZJPszRvOg_He8qm9HVFm_UxZTCfpLzLGw,851
16
+ honeybee_radiance_postprocess/cli/__init__.py,sha256=_mYHnIOpH0qJ4QK56SB3qUT2Duuts2GR2U_0t_uE-2s,958
17
17
  honeybee_radiance_postprocess/cli/abnt.py,sha256=RmEjhxdEK6Uks3S10rQs6n8cup9qv036qRwh_wj1taA,15705
18
- honeybee_radiance_postprocess/cli/grid.py,sha256=6peLEAPVe-iw05_wdRpFruZLqO8myvC-_QT5W1q5sk8,10677
18
+ honeybee_radiance_postprocess/cli/datacollection.py,sha256=Wb3UX03uW4OUZP7jWHftKfdf3aO_FSXjrnrziR3taf0,4541
19
+ honeybee_radiance_postprocess/cli/grid.py,sha256=gqnU3-HdggWCUg9mA1RLZJYHM7tH0v6r2E_X2SSkAig,11256
19
20
  honeybee_radiance_postprocess/cli/leed.py,sha256=bxGX2UBehYNcaPJWHL2yEasSP6dATD7B0aNNQOflqqM,3712
20
21
  honeybee_radiance_postprocess/cli/merge.py,sha256=oOqqud3VSo-3f3coDoUILcp78OI4DKxXLWCS1bi3PC4,5752
21
22
  honeybee_radiance_postprocess/cli/mtxop.py,sha256=UZJnjNpPjDmShy1-Mxos4H2vTUqk_yP3ZyaC1_LLFeI,5015
@@ -26,15 +27,15 @@ honeybee_radiance_postprocess/cli/two_phase.py,sha256=xA6ayPv26DM5fuMkLhBMYGklf_
26
27
  honeybee_radiance_postprocess/cli/util.py,sha256=Be9cGmYhcV2W37ma6SgQPCWCpWLLLlroxRYN_l58kY0,4077
27
28
  honeybee_radiance_postprocess/cli/viewfactor.py,sha256=kU36YRzLya5PReYREjTfw3zOcWKHYZjVlVclyuR7Cqk,5245
28
29
  honeybee_radiance_postprocess/leed/__init__.py,sha256=kQXElEqFnLGNnrMSpA51XDHoqBup849FHeAqWASIy6w,45
29
- honeybee_radiance_postprocess/leed/leed.py,sha256=rbvhq51Dfhx4aZFcKWqOXp3FfgFy5YDRkHYn9wKHQpc,32631
30
+ honeybee_radiance_postprocess/leed/leed.py,sha256=I3bcPbIvO2ppujmpetLPY82zZwdPAuYp67heDypIlDs,33329
30
31
  honeybee_radiance_postprocess/leed/leed_schedule.py,sha256=s3by1sv1DtOlCawvaMvnIDvEo5D8ATEJvWQ_rEeJIHg,9956
31
32
  honeybee_radiance_postprocess/results/__init__.py,sha256=1agBQbfT4Tf8KqSZzlfKYX8MeZryY4jJ1KB4HWqaDDk,182
32
33
  honeybee_radiance_postprocess/results/annual_daylight.py,sha256=11d4J1iIuITKuoWyWa-2_2WdrHYBULC0YP-mWBWi4JQ,34724
33
34
  honeybee_radiance_postprocess/results/annual_irradiance.py,sha256=5zwrr4MNeHUebbSRpSBbscPOZUs2AHmYCQfIIbdYImY,8298
34
35
  honeybee_radiance_postprocess/results/results.py,sha256=ABb_S8kDPruhGkDsfREXMg6K0p8FRhAZ3QIRUZCQPAI,54888
35
- honeybee_radiance_postprocess-0.4.441.dist-info/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
36
- honeybee_radiance_postprocess-0.4.441.dist-info/METADATA,sha256=f7fl5XxoeXdNh5dt9Htl1Kcs9nt2VUeUm08tsTdTMag,2246
37
- honeybee_radiance_postprocess-0.4.441.dist-info/WHEEL,sha256=unfA4MOaH0icIyIA5oH6E2sn2Hq5zKtLlHsWapZGwes,110
38
- honeybee_radiance_postprocess-0.4.441.dist-info/entry_points.txt,sha256=gFtVPx6UItXt27GfEZZO00eOZChJJEL6JwGSAB_O3rs,96
39
- honeybee_radiance_postprocess-0.4.441.dist-info/top_level.txt,sha256=4-sFbzy7ewP2EDqJV3jeFlAFx7SuxtoBBELWaKAnLdA,30
40
- honeybee_radiance_postprocess-0.4.441.dist-info/RECORD,,
36
+ honeybee_radiance_postprocess-0.4.443.dist-info/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
37
+ honeybee_radiance_postprocess-0.4.443.dist-info/METADATA,sha256=mBLPX8Ix6oVlmGWu4_sAfzrfJ52N1heitBXT55gx54E,2246
38
+ honeybee_radiance_postprocess-0.4.443.dist-info/WHEEL,sha256=unfA4MOaH0icIyIA5oH6E2sn2Hq5zKtLlHsWapZGwes,110
39
+ honeybee_radiance_postprocess-0.4.443.dist-info/entry_points.txt,sha256=gFtVPx6UItXt27GfEZZO00eOZChJJEL6JwGSAB_O3rs,96
40
+ honeybee_radiance_postprocess-0.4.443.dist-info/top_level.txt,sha256=4-sFbzy7ewP2EDqJV3jeFlAFx7SuxtoBBELWaKAnLdA,30
41
+ honeybee_radiance_postprocess-0.4.443.dist-info/RECORD,,