honeybee-radiance-postprocess 0.4.555__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. honeybee_radiance_postprocess/__init__.py +1 -0
  2. honeybee_radiance_postprocess/__main__.py +4 -0
  3. honeybee_radiance_postprocess/annual.py +73 -0
  4. honeybee_radiance_postprocess/annualdaylight.py +289 -0
  5. honeybee_radiance_postprocess/annualirradiance.py +35 -0
  6. honeybee_radiance_postprocess/breeam/__init__.py +1 -0
  7. honeybee_radiance_postprocess/breeam/breeam.py +552 -0
  8. honeybee_radiance_postprocess/cli/__init__.py +33 -0
  9. honeybee_radiance_postprocess/cli/abnt.py +392 -0
  10. honeybee_radiance_postprocess/cli/breeam.py +96 -0
  11. honeybee_radiance_postprocess/cli/datacollection.py +133 -0
  12. honeybee_radiance_postprocess/cli/grid.py +295 -0
  13. honeybee_radiance_postprocess/cli/leed.py +143 -0
  14. honeybee_radiance_postprocess/cli/merge.py +161 -0
  15. honeybee_radiance_postprocess/cli/mtxop.py +161 -0
  16. honeybee_radiance_postprocess/cli/postprocess.py +1092 -0
  17. honeybee_radiance_postprocess/cli/schedule.py +103 -0
  18. honeybee_radiance_postprocess/cli/translate.py +216 -0
  19. honeybee_radiance_postprocess/cli/two_phase.py +252 -0
  20. honeybee_radiance_postprocess/cli/util.py +121 -0
  21. honeybee_radiance_postprocess/cli/viewfactor.py +157 -0
  22. honeybee_radiance_postprocess/cli/well.py +110 -0
  23. honeybee_radiance_postprocess/data_type.py +102 -0
  24. honeybee_radiance_postprocess/dynamic.py +273 -0
  25. honeybee_radiance_postprocess/electriclight.py +24 -0
  26. honeybee_radiance_postprocess/en17037.py +304 -0
  27. honeybee_radiance_postprocess/helper.py +266 -0
  28. honeybee_radiance_postprocess/ies/__init__.py +1 -0
  29. honeybee_radiance_postprocess/ies/lm.py +224 -0
  30. honeybee_radiance_postprocess/ies/lm_schedule.py +248 -0
  31. honeybee_radiance_postprocess/leed/__init__.py +1 -0
  32. honeybee_radiance_postprocess/leed/leed.py +801 -0
  33. honeybee_radiance_postprocess/leed/leed_schedule.py +256 -0
  34. honeybee_radiance_postprocess/metrics.py +439 -0
  35. honeybee_radiance_postprocess/reader.py +80 -0
  36. honeybee_radiance_postprocess/results/__init__.py +4 -0
  37. honeybee_radiance_postprocess/results/annual_daylight.py +752 -0
  38. honeybee_radiance_postprocess/results/annual_irradiance.py +196 -0
  39. honeybee_radiance_postprocess/results/results.py +1416 -0
  40. honeybee_radiance_postprocess/type_hints.py +38 -0
  41. honeybee_radiance_postprocess/util.py +211 -0
  42. honeybee_radiance_postprocess/vis_metadata.py +49 -0
  43. honeybee_radiance_postprocess/well/__init__.py +1 -0
  44. honeybee_radiance_postprocess/well/well.py +509 -0
  45. honeybee_radiance_postprocess-0.4.555.dist-info/METADATA +79 -0
  46. honeybee_radiance_postprocess-0.4.555.dist-info/RECORD +50 -0
  47. honeybee_radiance_postprocess-0.4.555.dist-info/WHEEL +5 -0
  48. honeybee_radiance_postprocess-0.4.555.dist-info/entry_points.txt +2 -0
  49. honeybee_radiance_postprocess-0.4.555.dist-info/licenses/LICENSE +661 -0
  50. honeybee_radiance_postprocess-0.4.555.dist-info/top_level.txt +1 -0
@@ -0,0 +1,295 @@
1
+ """honeybee radiance postprocess grid commands."""
2
+ import click
3
+ import sys
4
+ import logging
5
+ import json
6
+ from pathlib import Path
7
+ try:
8
+ import cupy as np
9
+ is_gpu = True
10
+ except ImportError:
11
+ is_gpu = False
12
+ import numpy as np
13
+
14
+ from honeybee_radiance_postprocess.reader import binary_to_array
15
+ from ..annualdaylight import _annual_daylight_vis_metadata
16
+
17
+ _logger = logging.getLogger(__name__)
18
+
19
+
20
+ @click.group(help='Commands for generating and modifying sensor grids.')
21
+ def grid():
22
+ pass
23
+
24
+
25
+ @grid.command('merge-folder')
26
+ @click.argument(
27
+ 'input-folder',
28
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True))
29
+ @click.argument(
30
+ 'output-folder',
31
+ type=click.Path(file_okay=False, dir_okay=True, resolve_path=True))
32
+ @click.argument('extension', type=str)
33
+ @click.option(
34
+ '--dist-info', '-di',
35
+ help='An optional input for distribution information to put the grids back together '
36
+ '. Alternatively, the command will look for a _redist_info.json file inside the '
37
+ 'folder.', type=click.Path(file_okay=True, dir_okay=False, resolve_path=True)
38
+ )
39
+ @click.option(
40
+ '--output-extension', '-oe',
41
+ help='Output file extension. This is only used if as_text is set to True. '
42
+ 'Otherwise the output extension will be npy.', default='ill', type=click.STRING
43
+ )
44
+ @click.option(
45
+ '--as-text', '-at',
46
+ help='Set to True if the output files should be saved as text instead of '
47
+ 'NumPy files.', default=False, type=click.BOOL
48
+ )
49
+ @click.option(
50
+ '--fmt',
51
+ help='Format for the output files when saved as text.', default='%.2f',
52
+ type=click.STRING
53
+ )
54
+ @click.option(
55
+ '--delimiter',
56
+ help='Delimiter for the output files when saved as text.',
57
+ type=click.Choice(['space', 'tab']), default='tab'
58
+ )
59
+ def merge_grid_folder(input_folder, output_folder, extension, dist_info,
60
+ output_extension, as_text, fmt, delimiter):
61
+ """Restructure files in a distributed folder.
62
+
63
+ \b
64
+ Args:
65
+ input_folder: Path to input folder.
66
+ output_folder: Path to the new restructured folder
67
+ extension: Extension of the files to collect data from. It will be ``pts`` for
68
+ sensor files. Another common extension is ``ill`` for the results of daylight
69
+ studies.
70
+ """
71
+ try:
72
+ # handle optional case for Functions input
73
+ if dist_info and not Path(dist_info).is_file():
74
+ dist_info = None
75
+ restore_original_distribution(
76
+ input_folder, output_folder, extension, dist_info, output_extension,
77
+ as_text, fmt, delimiter)
78
+ except Exception:
79
+ _logger.exception('Failed to restructure data from folder.')
80
+ sys.exit(1)
81
+ else:
82
+ sys.exit(0)
83
+
84
+
85
+ @grid.command('merge-folder-metrics')
86
+ @click.argument(
87
+ 'input-folder',
88
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True))
89
+ @click.argument(
90
+ 'output-folder',
91
+ type=click.Path(file_okay=False, dir_okay=True, resolve_path=True))
92
+ @click.option(
93
+ '--dist-info', '-di',
94
+ help='An optional input for distribution information to put the grids back together '
95
+ '. Alternatively, the command will look for a _redist_info.json file inside the '
96
+ 'folder.', type=click.Path(file_okay=True, dir_okay=False, resolve_path=True)
97
+ )
98
+ @click.option(
99
+ '--grids-info', '-gi',
100
+ help='An optional input for grid information that will be copied to each '
101
+ 'metric folder. This file is usually called grids_info.json.',
102
+ type=click.Path(file_okay=True, dir_okay=False, resolve_path=True)
103
+ )
104
+ def merge_metrics_folder(input_folder, output_folder, dist_info, grids_info):
105
+ """Restructure annual daylight metrics in a distributed folder.
106
+
107
+ Since this command redistributes metrics it is expected that the input
108
+ folder has sub folder
109
+
110
+ \b
111
+ Args:
112
+ input_folder: Path to input folder.
113
+ output_folder: Path to the new restructured folder
114
+ """
115
+ try:
116
+ # handle optional case for Functions input
117
+ if dist_info and not Path(dist_info).is_file():
118
+ dist_info = None
119
+ if grids_info:
120
+ with open(grids_info) as gi:
121
+ grids_info = json.load(gi)
122
+ extension_mapper = {
123
+ 'da': 'da',
124
+ 'cda': 'cda',
125
+ 'udi': 'udi',
126
+ 'udi_lower': 'udi',
127
+ 'udi_upper': 'udi'
128
+ }
129
+ metric_info_dict = _annual_daylight_vis_metadata()
130
+ input_folder = Path(input_folder)
131
+ output_folder = Path(output_folder)
132
+ for metric, extension in extension_mapper.items():
133
+ metric_folder = input_folder.joinpath(metric)
134
+ metric_out = output_folder.joinpath(metric)
135
+ restore_original_distribution_metrics(
136
+ metric_folder, output_folder, metric, extension, dist_info)
137
+
138
+ if grids_info:
139
+ info_file = metric_out.joinpath('grids_info.json')
140
+ info_file.write_text(json.dumps(grids_info))
141
+
142
+ vis_data = metric_info_dict[metric]
143
+ vis_metadata_file = metric_out.joinpath('vis_metadata.json')
144
+ vis_metadata_file.write_text(json.dumps(vis_data, indent=4))
145
+ except Exception:
146
+ _logger.exception('Failed to restructure data from folder.')
147
+ sys.exit(1)
148
+ else:
149
+ sys.exit(0)
150
+
151
+
152
+ def restore_original_distribution(
153
+ input_folder, output_folder, extension='npy', dist_info=None,
154
+ output_extension='ill', as_text=False, fmt='%.2f', input_delimiter=',',
155
+ delimiter='tab'):
156
+ """Restructure files to the original distribution based on the distribution info.
157
+
158
+ It will assume that the files in the input folder are NumPy files. However,
159
+ if it fails to load the files as arrays it will try to load from binary
160
+ Radiance files to array.
161
+
162
+ Args:
163
+ input_folder: Path to input folder.
164
+ output_folder: Path to the new restructured folder
165
+ extension: Extension of the files to collect data from. Default is ``npy`` for
166
+ NumPy files. Another common extension is ``ill`` for the results of daylight
167
+ studies.
168
+ dist_info: Path to dist_info.json file. If None, the function will try to load
169
+ ``_redist_info.json`` file from inside the input_folder. (Default: None).
170
+ output_extension: Output file extension. This is only used if as_text
171
+ is set to True. Otherwise the output extension will be ```npy``.
172
+ as_text: Set to True if the output files should be saved as text instead
173
+ of NumPy files.
174
+ fmt: Format for the output files when saved as text.
175
+ input_delimiter: Delimiter for the input files. This is used only if the
176
+ input files are text files.
177
+ delimiter: Delimiter for the output files when saved as text.
178
+ """
179
+ if not dist_info:
180
+ _redist_info_file = Path(input_folder, '_redist_info.json')
181
+ else:
182
+ _redist_info_file = Path(dist_info)
183
+
184
+ assert _redist_info_file.is_file(), 'Failed to find %s' % _redist_info_file
185
+
186
+ with open(_redist_info_file) as inf:
187
+ data = json.load(inf)
188
+
189
+ # create output folder
190
+ output_folder = Path(output_folder)
191
+ if not output_folder.is_dir():
192
+ output_folder.mkdir(parents=True, exist_ok=True)
193
+
194
+ src_file = Path()
195
+ for f in data:
196
+ output_file = Path(output_folder, f['identifier'])
197
+ # ensure the new folder is created. in case the identifier has a subfolder
198
+ parent_folder = output_file.parent
199
+ if not parent_folder.is_dir():
200
+ parent_folder.mkdir()
201
+
202
+ out_arrays = []
203
+ for src_info in f['dist_info']:
204
+ st = src_info['st_ln']
205
+ end = src_info['end_ln']
206
+ new_file = Path(input_folder, '%s.%s' % (src_info['identifier'], extension))
207
+ if not new_file.samefile(src_file):
208
+ src_file = new_file
209
+ try:
210
+ array = np.load(src_file)
211
+ except:
212
+ try:
213
+ array = binary_to_array(src_file)
214
+ except:
215
+ try:
216
+ array = np.loadtxt(
217
+ src_file, delimiter=input_delimiter)
218
+ except Exception:
219
+ raise RuntimeError(
220
+ f'Failed to load input file "{src_file}"')
221
+ slice_array = array[st:end+1,:]
222
+
223
+ out_arrays.append(slice_array)
224
+
225
+ out_array = np.concatenate(out_arrays)
226
+ # save numpy array, .npy extension is added automatically
227
+ if not as_text:
228
+ np.save(output_file, out_array)
229
+ else:
230
+ if output_extension.startswith('.'):
231
+ output_extension = output_extension[1:]
232
+ if delimiter == 'tab':
233
+ delimiter = '\t'
234
+ elif delimiter == 'space':
235
+ delimiter = ' '
236
+ elif delimiter == 'comma':
237
+ delimiter = ','
238
+ np.savetxt(output_file.with_suffix(f'.{output_extension}'),
239
+ out_array, fmt=fmt, delimiter=delimiter)
240
+
241
+
242
+ def restore_original_distribution_metrics(
243
+ input_folder, output_folder, metric, extension, dist_info=None):
244
+ """Restructure files to the original distribution based on the distribution info.
245
+
246
+ It will assume that the files in the input folder are NumPy files. However,
247
+ if it fails to load the files as arrays it will try to load from binary
248
+ Radiance files to array.
249
+
250
+ Args:
251
+ input_folder: Path to input folder.
252
+ output_folder: Path to the new restructured folder
253
+ metric: Name of the metric to redistribute.
254
+ extension: Extension of the files to collect data from. For annual
255
+ daylight metrics the extension can be 'da', 'cda', or 'udi'.
256
+ dist_info: Path to dist_info.json file. If None, the function will try to load
257
+ ``_redist_info.json`` file from inside the input_folder. (Default: None).
258
+ """
259
+ if not dist_info:
260
+ _redist_info_file = Path(input_folder, '_redist_info.json')
261
+ else:
262
+ _redist_info_file = Path(dist_info)
263
+
264
+ assert _redist_info_file.is_file(), 'Failed to find %s' % _redist_info_file
265
+
266
+ with open(_redist_info_file) as inf:
267
+ data = json.load(inf)
268
+
269
+ # create output folder
270
+ output_folder = Path(output_folder)
271
+ if not output_folder.is_dir():
272
+ output_folder.mkdir()
273
+
274
+ src_file = Path()
275
+ for f in data:
276
+ output_file = Path(output_folder, metric, '%s.%s' % (f['identifier'], extension))
277
+ # ensure the new folder is created. in case the identifier has a subfolder
278
+ parent_folder = output_file.parent
279
+ if not parent_folder.is_dir():
280
+ parent_folder.mkdir()
281
+
282
+ out_arrays = []
283
+ for src_info in f['dist_info']:
284
+ st = src_info['st_ln']
285
+ end = src_info['end_ln']
286
+ new_file = Path(input_folder, '%s.%s' % (src_info['identifier'], extension))
287
+ if not new_file.samefile(src_file):
288
+ src_file = new_file
289
+ array = np.loadtxt(src_file)
290
+ slice_array = array[st:end+1]
291
+ out_arrays.append(slice_array)
292
+
293
+ out_array = np.concatenate(out_arrays)
294
+ # save array as txt file
295
+ np.savetxt(output_file, out_array, fmt='%.2f')
@@ -0,0 +1,143 @@
1
+ """honeybee radiance daylight leed postprocessing commands."""
2
+ import json
3
+ import sys
4
+ import logging
5
+ import os
6
+ import click
7
+
8
+ from ladybug.color import Color
9
+ from ladybug.datatype.generic import GenericType
10
+ from ladybug.legend import LegendParameters
11
+
12
+ from ..leed.leed import leed_option_one
13
+ from ..results.annual_daylight import AnnualDaylight
14
+
15
+ _logger = logging.getLogger(__name__)
16
+
17
+
18
+ @click.group(help='Commands for LEED post-processing of Radiance results.')
19
+ def leed():
20
+ pass
21
+
22
+
23
+ @leed.command('daylight-option-one')
24
+ @click.argument(
25
+ 'folder',
26
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True)
27
+ )
28
+ @click.option(
29
+ '--grids-filter', '-gf', help='A pattern to filter the grids.', default='*',
30
+ show_default=True
31
+ )
32
+ @click.option(
33
+ '--shade-transmittance', '-st', help='A value to use as a multiplier in place of '
34
+ 'solar shading. Value for shade transmittance must be 1 > value > 0.',
35
+ default=0.02, show_default=True, type=click.FLOAT
36
+ )
37
+ @click.option(
38
+ '--shade-transmittance-file', '-stf', help='A JSON file with a dictionary '
39
+ 'where aperture groups are keys, and the value for each key is the shade '
40
+ 'transmittance. Values for shade transmittance must be 1 > value > 0. '
41
+ 'If any aperture groups are missing in the JSON file, its shade transmittance '
42
+ 'value will be set to the value of the --shade-transmittance option (0.02 by '
43
+ 'default).', default=None, show_default=True,
44
+ type=click.Path(exists=False, file_okay=True, dir_okay=False, resolve_path=True)
45
+ )
46
+ @click.option(
47
+ '--use-shade-transmittance/--use-states', help='A flag to select if the '
48
+ 'post-processing should use a shade transmittance or the simulated states '
49
+ 'of aperture groups. Using states should only be selected if the annual '
50
+ 'daylight simulation included ray tracing of a second (blind) state for '
51
+ 'each aperture group.',
52
+ is_flag=True, default=True, show_default=True
53
+ )
54
+ @click.option(
55
+ '--sub-folder', '-sf', help='Relative path for subfolder to write output '
56
+ 'files.', default='leed_summary', show_default=True
57
+ )
58
+ def daylight_option_one(
59
+ folder, shade_transmittance, shade_transmittance_file, grids_filter,
60
+ use_shade_transmittance, sub_folder
61
+ ):
62
+ """Calculate credits for LEED v4.1 Daylight Option 1.
63
+
64
+ Use the shade-transmittance option to set a shade transmittance values for
65
+ aperture groups. The shade-transmittance-file option takes precedence over
66
+ the shade-transmittance, however, if any aperture groups are missing in the
67
+ JSON file given to the shade-transmittance-file option, the value from
68
+ shade-transmittance will be used for those aperture groups.
69
+
70
+ \b
71
+ Args:
72
+ folder: Results folder. This folder is an output folder of annual daylight
73
+ recipe. The daylight simulation must include aperture groups.
74
+ """
75
+ use_states = not use_shade_transmittance
76
+ if (
77
+ shade_transmittance_file
78
+ and os.path.isfile(shade_transmittance_file)
79
+ and use_shade_transmittance
80
+ ):
81
+ with open(shade_transmittance_file) as json_file:
82
+ shd_trans = json.load(json_file)
83
+ results = AnnualDaylight(folder)
84
+ # check if aperture groups are missing in json file
85
+ for light_path in results.light_paths:
86
+ if (not light_path in shd_trans and
87
+ light_path != '__static_apertures__'):
88
+ shd_trans[light_path] = shade_transmittance
89
+ shade_transmittance = shd_trans
90
+ try:
91
+ leed_option_one(
92
+ folder, grids_filter=grids_filter,
93
+ shade_transmittance=shade_transmittance, use_states=use_states,
94
+ sub_folder=sub_folder
95
+ )
96
+ except Exception:
97
+ _logger.exception('Failed to generate LEED summary.')
98
+ sys.exit(1)
99
+ else:
100
+ sys.exit(0)
101
+
102
+
103
+ @leed.command('leed-daylight-option-one-vis-metadata')
104
+ @click.option(
105
+ '--output-folder', '-o', help='Output folder for vis metadata files.',
106
+ type=click.Path(exists=False, file_okay=False, dir_okay=True, resolve_path=True),
107
+ default='visualization', show_default=True
108
+ )
109
+ def leed_daylight_optione_one_vis(output_folder):
110
+ """Write visualization metadata files for LEED Daylight Option I."""
111
+ colors = [Color(220, 0, 0), Color(0, 220, 0)]
112
+ pass_fail_lpar = \
113
+ LegendParameters(min=0, max=1, colors=colors, segment_count=2, title='Pass/Fail')
114
+ pass_fail_lpar.ordinal_dictionary = {0: "Fail", 1: "Pass"}
115
+
116
+ metric_info_dict = {
117
+ 'DA': {
118
+ 'type': 'VisualizationMetaData',
119
+ 'data_type': GenericType('DA300,50%', '').to_dict(),
120
+ 'unit': '',
121
+ 'legend_parameters': pass_fail_lpar.to_dict()
122
+ },
123
+ 'ASE': {
124
+ 'type': 'VisualizationMetaData',
125
+ 'data_type': GenericType('ASE1000,250hrs', '').to_dict(),
126
+ 'unit': '',
127
+ 'legend_parameters': pass_fail_lpar.to_dict()
128
+ }
129
+ }
130
+ try:
131
+ if not os.path.exists(output_folder):
132
+ os.mkdir(output_folder)
133
+ for metric, data in metric_info_dict.items():
134
+ if not os.path.exists(os.path.join(output_folder, metric)):
135
+ os.mkdir(os.path.join(output_folder, metric))
136
+ file_path = os.path.join(output_folder, metric, 'vis_metadata.json')
137
+ with open(file_path, 'w') as fp:
138
+ json.dump(data, fp, indent=4)
139
+ except Exception:
140
+ _logger.exception('Failed to write the visualization metadata files.')
141
+ sys.exit(1)
142
+ else:
143
+ sys.exit(0)
@@ -0,0 +1,161 @@
1
+ """honeybee radiance postprocess merge commands."""
2
+ import click
3
+ import sys
4
+ import logging
5
+ import json
6
+ from pathlib import Path
7
+ try:
8
+ import cupy as np
9
+ is_gpu = True
10
+ except ImportError:
11
+ is_gpu = False
12
+ import numpy as np
13
+
14
+ from honeybee_radiance_postprocess.reader import binary_to_array
15
+
16
+ _logger = logging.getLogger(__name__)
17
+
18
+
19
+ @click.group(help='Commands for generating and modifying sensor grids.')
20
+ def merge():
21
+ pass
22
+
23
+
24
+ @merge.command('merge-files')
25
+ @click.argument(
26
+ 'input-folder',
27
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True))
28
+ @click.argument('extension', type=str)
29
+ @click.option(
30
+ '--output-file', '-of',
31
+ help='Name of the merged file.', default='results',
32
+ type=click.STRING
33
+ )
34
+ @click.option(
35
+ '--dist-info', '-di',
36
+ help='An optional input for distribution information to put the grids back together '
37
+ '. Alternatively, the command will look for a _redist_info.json file inside the '
38
+ 'folder.', type=click.Path(file_okay=True, dir_okay=False, resolve_path=True)
39
+ )
40
+ @click.option(
41
+ '--merge-axis', '-ma',
42
+ help='Merge files along axis.', default='0', show_default=True,
43
+ type=click.Choice(['0', '1', '2']), show_choices=True
44
+ )
45
+ @click.option(
46
+ '--output-extension', '-oe',
47
+ help='Output file extension. This is only used if as_text is set to True. '
48
+ 'Otherwise the output extension will be npy.', default='ill', type=click.STRING
49
+ )
50
+ @click.option(
51
+ '--as-text', '-at',
52
+ help='Set to True if the output files should be saved as text instead of '
53
+ 'NumPy files.', default=False, type=click.BOOL
54
+ )
55
+ @click.option(
56
+ '--fmt',
57
+ help='Format for the output files when saved as text.', default='%.2f',
58
+ type=click.STRING
59
+ )
60
+ @click.option(
61
+ '--delimiter',
62
+ help='Delimiter for the output files when saved as text.',
63
+ type=click.Choice(['space', 'tab']), default='tab'
64
+ )
65
+ def merge_files(
66
+ input_folder, output_file, extension, dist_info, merge_axis,
67
+ output_extension, as_text, fmt, delimiter):
68
+ """Merge files in a distributed folder.
69
+
70
+ \b
71
+ Args:
72
+ input_folder: Path to input folder.
73
+ output_folder: Path to the new restructured folder
74
+ extension: Extension of the files to collect data from. It will be ``pts`` for
75
+ sensor files. Another common extension is ``ill`` for the results of daylight
76
+ studies.
77
+ """
78
+ try:
79
+ # handle optional case for Functions input
80
+ if dist_info and not Path(dist_info).is_file():
81
+ dist_info = None
82
+ _merge_files(input_folder, output_file, int(merge_axis), extension, dist_info,
83
+ output_extension, as_text, fmt, delimiter)
84
+ except Exception:
85
+ _logger.exception('Failed to merge files from folder.')
86
+ sys.exit(1)
87
+ else:
88
+ sys.exit(0)
89
+
90
+
91
+ def _merge_files(
92
+ input_folder, output_file, merge_axis=0, extension='npy', dist_info=None,
93
+ output_extension='ill', as_text=False, fmt='%.2f', delimiter='tab'):
94
+ """Restructure files to the original distribution based on the distribution info.
95
+
96
+ It will assume that the files in the input folder are NumPy files. However,
97
+ if it fails to load the files as arrays it will try to load from binary
98
+ Radiance files to array.
99
+
100
+ Args:
101
+ input_folder: Path to input folder.
102
+ output_folder: Path to the new restructured folder.
103
+ merge_axis: Merge along axis.
104
+ extension: Extension of the files to collect data from. Default is ``npy`` for
105
+ NumPy files. Another common extension is ``ill`` for the results of daylight
106
+ studies.
107
+ dist_info: Path to dist_info.json file. If None, the function will try to load
108
+ ``_redist_info.json`` file from inside the input_folder. (Default: None).
109
+ output_extension: Output file extension. This is only used if as_text
110
+ is set to True. Otherwise the output extension will be ```npy``.
111
+ as_text: Set to True if the output files should be saved as text instead
112
+ of NumPy files.
113
+ fmt: Format for the output files when saved as text.
114
+ delimiter: Delimiter for the output files when saved as text.
115
+ """
116
+ if not dist_info:
117
+ _redist_info_file = Path(input_folder, '_redist_info.json')
118
+ else:
119
+ _redist_info_file = Path(dist_info)
120
+
121
+ assert _redist_info_file.is_file(), 'Failed to find %s' % _redist_info_file
122
+
123
+ with open(_redist_info_file) as inf:
124
+ data = json.load(inf)
125
+
126
+ out_arrays = []
127
+ src_file = Path()
128
+ for f in data:
129
+ output_file = Path(output_file)
130
+ # ensure the new folder is created. in case the identifier has a subfolder
131
+ parent_folder = output_file.parent
132
+ if not parent_folder.is_dir():
133
+ parent_folder.mkdir()
134
+
135
+ for src_info in f['dist_info']:
136
+ new_file = Path(input_folder, '%s.%s' %
137
+ (src_info['identifier'], extension))
138
+
139
+ if not new_file.samefile(src_file):
140
+ src_file = new_file
141
+ try:
142
+ array = np.load(src_file)
143
+ except Exception:
144
+ array = binary_to_array(src_file)
145
+
146
+ out_arrays.append(array)
147
+
148
+ out_array = np.concatenate(out_arrays, axis=merge_axis)
149
+
150
+ # save numpy array, .npy extension is added automatically
151
+ if not as_text:
152
+ np.save(output_file, out_array)
153
+ else:
154
+ if output_extension.startswith('.'):
155
+ output_extension = output_extension[1:]
156
+ if delimiter == 'tab':
157
+ delimiter = '\t'
158
+ elif delimiter == 'space':
159
+ delimiter = ' '
160
+ np.savetxt(output_file.with_suffix(f'.{output_extension}'),
161
+ out_array, fmt=fmt, delimiter=delimiter)