honeybee-radiance-postprocess 0.4.555__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. honeybee_radiance_postprocess/__init__.py +1 -0
  2. honeybee_radiance_postprocess/__main__.py +4 -0
  3. honeybee_radiance_postprocess/annual.py +73 -0
  4. honeybee_radiance_postprocess/annualdaylight.py +289 -0
  5. honeybee_radiance_postprocess/annualirradiance.py +35 -0
  6. honeybee_radiance_postprocess/breeam/__init__.py +1 -0
  7. honeybee_radiance_postprocess/breeam/breeam.py +552 -0
  8. honeybee_radiance_postprocess/cli/__init__.py +33 -0
  9. honeybee_radiance_postprocess/cli/abnt.py +392 -0
  10. honeybee_radiance_postprocess/cli/breeam.py +96 -0
  11. honeybee_radiance_postprocess/cli/datacollection.py +133 -0
  12. honeybee_radiance_postprocess/cli/grid.py +295 -0
  13. honeybee_radiance_postprocess/cli/leed.py +143 -0
  14. honeybee_radiance_postprocess/cli/merge.py +161 -0
  15. honeybee_radiance_postprocess/cli/mtxop.py +161 -0
  16. honeybee_radiance_postprocess/cli/postprocess.py +1092 -0
  17. honeybee_radiance_postprocess/cli/schedule.py +103 -0
  18. honeybee_radiance_postprocess/cli/translate.py +216 -0
  19. honeybee_radiance_postprocess/cli/two_phase.py +252 -0
  20. honeybee_radiance_postprocess/cli/util.py +121 -0
  21. honeybee_radiance_postprocess/cli/viewfactor.py +157 -0
  22. honeybee_radiance_postprocess/cli/well.py +110 -0
  23. honeybee_radiance_postprocess/data_type.py +102 -0
  24. honeybee_radiance_postprocess/dynamic.py +273 -0
  25. honeybee_radiance_postprocess/electriclight.py +24 -0
  26. honeybee_radiance_postprocess/en17037.py +304 -0
  27. honeybee_radiance_postprocess/helper.py +266 -0
  28. honeybee_radiance_postprocess/ies/__init__.py +1 -0
  29. honeybee_radiance_postprocess/ies/lm.py +224 -0
  30. honeybee_radiance_postprocess/ies/lm_schedule.py +248 -0
  31. honeybee_radiance_postprocess/leed/__init__.py +1 -0
  32. honeybee_radiance_postprocess/leed/leed.py +801 -0
  33. honeybee_radiance_postprocess/leed/leed_schedule.py +256 -0
  34. honeybee_radiance_postprocess/metrics.py +439 -0
  35. honeybee_radiance_postprocess/reader.py +80 -0
  36. honeybee_radiance_postprocess/results/__init__.py +4 -0
  37. honeybee_radiance_postprocess/results/annual_daylight.py +752 -0
  38. honeybee_radiance_postprocess/results/annual_irradiance.py +196 -0
  39. honeybee_radiance_postprocess/results/results.py +1416 -0
  40. honeybee_radiance_postprocess/type_hints.py +38 -0
  41. honeybee_radiance_postprocess/util.py +211 -0
  42. honeybee_radiance_postprocess/vis_metadata.py +49 -0
  43. honeybee_radiance_postprocess/well/__init__.py +1 -0
  44. honeybee_radiance_postprocess/well/well.py +509 -0
  45. honeybee_radiance_postprocess-0.4.555.dist-info/METADATA +79 -0
  46. honeybee_radiance_postprocess-0.4.555.dist-info/RECORD +50 -0
  47. honeybee_radiance_postprocess-0.4.555.dist-info/WHEEL +5 -0
  48. honeybee_radiance_postprocess-0.4.555.dist-info/entry_points.txt +2 -0
  49. honeybee_radiance_postprocess-0.4.555.dist-info/licenses/LICENSE +661 -0
  50. honeybee_radiance_postprocess-0.4.555.dist-info/top_level.txt +1 -0
@@ -0,0 +1,392 @@
1
+ """Commands for ABNT NBR post-processing."""
2
+ import json
3
+ import sys
4
+ import logging
5
+ from pathlib import Path
6
+ import click
7
+ try:
8
+ import cupy as np
9
+ is_gpu = True
10
+ except ImportError:
11
+ is_gpu = False
12
+ import numpy as np
13
+
14
+ from honeybee.model import Model
15
+ from honeybee.room import Room
16
+ from ladybug_geometry.geometry3d.face import Face3D
17
+ from ladybug_geometry.geometry3d.pointvector import Vector3D
18
+
19
+ from ..vis_metadata import _abnt_nbr_15575_daylight_levels_vis_metadata
20
+
21
+
22
+ _logger = logging.getLogger(__name__)
23
+
24
+
25
+ @click.group(help='Commands for ABNT NBR (Brazil) post-processing of Radiance results.')
26
+ def abnt():
27
+ pass
28
+
29
+
30
+ @abnt.command('abnt-nbr-15575')
31
+ @click.argument(
32
+ 'folder',
33
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True)
34
+ )
35
+ @click.argument('model-file', type=click.Path(
36
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
37
+ @click.option(
38
+ '--ground-level', '-gl', help='A value to define the height of the ground '
39
+ 'level. This will make sure that rooms below this height will not be '
40
+ 'counted as ground level rooms.',
41
+ default=0, show_default=True, type=click.FLOAT
42
+ )
43
+ @click.option(
44
+ '--room-center/--grid-center', '-rc/-gc', help='Flag to note whether the '
45
+ 'evaluation of the center is at the room center or the grid center.',
46
+ default=True, show_default=True)
47
+ @click.option(
48
+ '--sub-folder', '-sf', help='Relative path for subfolder to write output '
49
+ 'files.', default='abnt_nbr_15575', type=click.Path(
50
+ exists=False, file_okay=False, dir_okay=True, resolve_path=True, path_type=Path)
51
+ )
52
+ def abnt_nbr_15575(
53
+ folder, model_file, ground_level, room_center, sub_folder
54
+ ):
55
+ """Calculate metrics for ABNT NBR 15575.
56
+
57
+ \b
58
+ Args:
59
+ folder: Simulation folder for a ABNT NBR 15575 simulation. It should
60
+ contain four sub-folder of complete point-in-time illuminance
61
+ simulations labeled "4_930AM", "4_330PM", "10_930AM", and "10_330PM".
62
+ These sub-folder should each have results folder that include a
63
+ grids_info.json and .res files with illuminance values for each
64
+ sensor.
65
+ model-file: A Honeybee Model file that was used in the simulations. This
66
+ file is used to extract the center points of the sensor grids. It is
67
+ a requirement that the sensor grids have Meshes.
68
+ """
69
+ def find_surrounding_points(points, values, new_point):
70
+ """Find the four surrounding points for bilinear interpolation.
71
+
72
+ Args:
73
+ points: 2D array of points, shape.
74
+ values: 1D array of values at the points.
75
+ new_point: 1D array of the point to interpolate.
76
+
77
+ Returns:
78
+ tuple: (surrounding_points, surrounding_values) and their counts.
79
+ """
80
+ x, y = new_point
81
+ lower_left = None
82
+ upper_left = None
83
+ lower_right = None
84
+ upper_right = None
85
+
86
+ for i, (px, py) in enumerate(points):
87
+ if px <= x and py <= y:
88
+ if lower_left is None or (px >= lower_left[0] and py >= lower_left[1]):
89
+ lower_left = (px, py, values[i])
90
+ if px <= x and py >= y:
91
+ if upper_left is None or (px >= upper_left[0] and py <= upper_left[1]):
92
+ upper_left = (px, py, values[i])
93
+ if px >= x and py >= y:
94
+ if upper_right is None or (px <= upper_right[0] and py <= upper_right[1]):
95
+ upper_right = (px, py, values[i])
96
+ if px >= x and py <= y:
97
+ if lower_right is None or (px <= lower_right[0] and py >= lower_right[1]):
98
+ lower_right = (px, py, values[i])
99
+
100
+ surrounding_points = []
101
+ surrounding_values = []
102
+ if lower_left:
103
+ surrounding_points.append(lower_left[:2])
104
+ surrounding_values.append(lower_left[2])
105
+ if upper_left:
106
+ surrounding_points.append(upper_left[:2])
107
+ surrounding_values.append(upper_left[2])
108
+ if upper_right:
109
+ surrounding_points.append(upper_right[:2])
110
+ surrounding_values.append(upper_right[2])
111
+ if lower_right:
112
+ surrounding_points.append(lower_right[:2])
113
+ surrounding_values.append(lower_right[2])
114
+
115
+ return np.array(surrounding_points), np.array(surrounding_values)
116
+
117
+ def bilinear_interpolate(surrounding_points, surrounding_values, new_point):
118
+ """Perform bilinear interpolation given four surrounding points.
119
+
120
+ Args:
121
+ surrounding_points: 2D array of points.
122
+ surrounding_values: 1D array of values at the points.
123
+ new_point: 1D array of the point to interpolate.
124
+
125
+ Returns:
126
+ Interpolated value at the new_point.
127
+ """
128
+ x1, y1 = surrounding_points[0]
129
+ x2, y2 = surrounding_points[2]
130
+ x, y = new_point
131
+
132
+ fQ11 = surrounding_values[0]
133
+ fQ21 = surrounding_values[3]
134
+ fQ12 = surrounding_values[1]
135
+ fQ22 = surrounding_values[2]
136
+
137
+ interpolated_value = (
138
+ fQ11 * (x2 - x) * (y2 - y) +
139
+ fQ21 * (x - x1) * (y2 - y) +
140
+ fQ12 * (x2 - x) * (y - y1) +
141
+ fQ22 * (x - x1) * (y - y1)
142
+ ) / ((x2 - x1) * (y2 - y1))
143
+
144
+ return interpolated_value
145
+
146
+ def inverse_distance_weighting(points, values, new_point, n_nearest=4):
147
+ """Perform inverse distance weighting interpolation.
148
+
149
+ Args:
150
+ points: 2D array of points.
151
+ values: 1D array of values at the points
152
+ new_point: 1D array of the point to interpolate.
153
+ n_nearest: Number of nearest points to consider for interpolation.
154
+
155
+ Returns:
156
+ Interpolated value at the new_point.
157
+ """
158
+ distances = np.linalg.norm(points - new_point, axis=1)
159
+ nearest_indices = np.argsort(distances)[:min(n_nearest, len(points))]
160
+
161
+ nearest_values = values[nearest_indices]
162
+ nearest_distances = distances[nearest_indices]
163
+
164
+ if np.any(nearest_distances == 0):
165
+ # if the new point coincides with an existing point, return its value
166
+ return nearest_values[nearest_distances == 0][0]
167
+
168
+ weights = 1 / nearest_distances
169
+ weights /= weights.sum() # normalize weights
170
+ return np.dot(weights, nearest_values)
171
+
172
+ def perform_interpolation(x, y, x_coords, y_coords, pit_values):
173
+ points = np.column_stack((x_coords, y_coords))
174
+ values = np.array(pit_values)
175
+ new_point = np.array([x, y])
176
+
177
+ surrounding_points, surrounding_values = \
178
+ find_surrounding_points(points, values, new_point)
179
+
180
+ if len(surrounding_points) == 4:
181
+ interpolated_value = \
182
+ bilinear_interpolate(surrounding_points,
183
+ surrounding_values, new_point)
184
+ else:
185
+ interpolated_value = \
186
+ inverse_distance_weighting(
187
+ points, values, new_point, n_nearest=4)
188
+
189
+ return interpolated_value
190
+
191
+ try:
192
+ folder = Path(folder)
193
+ hb_model: Model = Model.from_file(model_file)
194
+ grouped_rooms, floor_heights = Room.group_by_floor_height(
195
+ hb_model.rooms)
196
+
197
+ # pick the first group >= to ground level
198
+ for gr, fh in zip(grouped_rooms, floor_heights):
199
+ if fh >= ground_level:
200
+ ground_level_rooms = gr
201
+ break
202
+
203
+ sensor_grids = hb_model.properties.radiance.sensor_grids
204
+ sg_full_identifier = {sg.full_identifier: sg for sg in sensor_grids}
205
+
206
+ if not sub_folder.exists():
207
+ sub_folder.mkdir(parents=True, exist_ok=True)
208
+ illuminance_levels_folder = sub_folder.joinpath('illuminance_levels')
209
+ if not illuminance_levels_folder.exists():
210
+ illuminance_levels_folder.mkdir(parents=True, exist_ok=True)
211
+
212
+ summary_rooms_csv = sub_folder.joinpath('abnt_nbr_15575_rooms.csv')
213
+ folder_names = ['4_930AM', '4_330PM', '10_930AM', '10_330PM']
214
+ pit_mapper = {
215
+ '4_930AM': '23 de abril 09:30',
216
+ '4_330PM': '23 de abril 15:30',
217
+ '10_930AM': '23 de outubro 09:30',
218
+ '10_330PM': '23 de outubro 15:30'
219
+ }
220
+
221
+ metric_info_dict = _abnt_nbr_15575_daylight_levels_vis_metadata()
222
+ summary_output = {}
223
+ summary_rooms_output = {}
224
+ pof_sensor_grids = {}
225
+ for _subfolder in folder_names:
226
+ res_folder = folder.joinpath(_subfolder, 'results')
227
+ with open(res_folder.joinpath('grids_info.json')) as data_f:
228
+ grids_info = json.load(data_f)
229
+ sub_output = []
230
+ for grid_info in grids_info:
231
+ pit_values = \
232
+ np.loadtxt(res_folder.joinpath(
233
+ f'{grid_info["full_id"]}.res'))
234
+ sensor_grid = sg_full_identifier[grid_info['full_id']]
235
+ sensor_points = np.array(
236
+ [[sensor.pos[0], sensor.pos[1], sensor.pos[2]]
237
+ for sensor in sensor_grid.sensors]
238
+ )
239
+
240
+ x_coords = sensor_points[:, 0]
241
+ y_coords = sensor_points[:, 1]
242
+ z_coords = sensor_points[:, 2]
243
+
244
+ room = hb_model.rooms_by_identifier(
245
+ [sensor_grid.room_identifier])[0]
246
+
247
+ pof_sensor_grid = \
248
+ pof_sensor_grids.get(grid_info['full_id'], None)
249
+ # if pof is not calculated for this grid
250
+ if pof_sensor_grid is None:
251
+ if room_center:
252
+ floor_face = Face3D.join_coplanar_faces(
253
+ room.horizontal_floor_boundaries(
254
+ tolerance=0.001),
255
+ 0.05)[0]
256
+ if floor_face.is_convex:
257
+ centroid = floor_face.centroid
258
+ else:
259
+ centroid = floor_face.pole_of_inaccessibility(0.01)
260
+ dz = np.mean(z_coords) - centroid.z
261
+ pof_sensor_grids[grid_info['full_id']] = \
262
+ centroid + Vector3D(0, 0, dz)
263
+ else:
264
+ faces_3d = [
265
+ Face3D(face_vertices)
266
+ for face_vertices in
267
+ sensor_grid.mesh.face_vertices]
268
+ face_3d_union = Face3D.join_coplanar_faces(
269
+ faces_3d, 0.05)
270
+ assert len(face_3d_union) == 1
271
+ if face_3d_union[0].is_convex:
272
+ centroid = face_3d_union[0].centroid
273
+ pof_sensor_grids[grid_info['full_id']] = centroid
274
+ else:
275
+ pof = face_3d_union[0].pole_of_inaccessibility(
276
+ 0.01)
277
+ pof_sensor_grids[grid_info['full_id']] = pof
278
+
279
+ x = pof_sensor_grids[grid_info['full_id']].x
280
+ y = pof_sensor_grids[grid_info['full_id']].y
281
+ f_xy = perform_interpolation(
282
+ x, y, x_coords, y_coords, pit_values)
283
+
284
+ if room in ground_level_rooms:
285
+ minimo = 48
286
+ else:
287
+ minimo = 60
288
+
289
+ if f_xy >= 120:
290
+ level = 'Superior'
291
+ elif f_xy >= 90:
292
+ level = 'Intermediario'
293
+ elif f_xy >= minimo: # add check for ground floor (48 lux)
294
+ level = 'Minimo'
295
+ else:
296
+ level = 'Nao atende'
297
+
298
+ room_summary = \
299
+ summary_rooms_output.get(grid_info['full_id'], None)
300
+ if room_summary is None:
301
+ summary_rooms_output[grid_info['full_id']] = {
302
+ 'nivel': level,
303
+ 'iluminancia': f_xy,
304
+ 'grids_info': grid_info,
305
+ pit_mapper[_subfolder]: f_xy,
306
+ }
307
+ else:
308
+ if f_xy < room_summary['iluminancia']:
309
+ room_summary['nivel'] = level
310
+ room_summary['iluminancia'] = f_xy
311
+ room_summary[pit_mapper[_subfolder]] = f_xy
312
+
313
+ sub_output.append(
314
+ {
315
+ 'nivel': level,
316
+ 'iluminancia': f_xy,
317
+ 'grids_info': grid_info
318
+ }
319
+ )
320
+
321
+ conditions = [pit_values >= 120, pit_values >=
322
+ 90, pit_values >= 60, pit_values < 60]
323
+ conditions_values = [3, 2, 1, 0]
324
+ illuminance_level = np.select(conditions, conditions_values)
325
+
326
+ ill_level_file = illuminance_levels_folder.joinpath(
327
+ _subfolder, f'{grid_info["full_id"]}.res')
328
+ ill_level_file.parent.mkdir(parents=True, exist_ok=True)
329
+ np.savetxt(ill_level_file, illuminance_level, fmt='%d')
330
+
331
+ grids_info_file = illuminance_levels_folder.joinpath(
332
+ _subfolder, 'grids_info.json')
333
+ grids_info_file.write_text(json.dumps(grids_info, indent=2))
334
+
335
+ vis_data = metric_info_dict[_subfolder]
336
+ vis_metadata_file = illuminance_levels_folder.joinpath(
337
+ _subfolder, 'vis_metadata.json')
338
+ vis_metadata_file.write_text(json.dumps(vis_data, indent=4))
339
+
340
+ summary_output[_subfolder] = sub_output
341
+
342
+ grids_info_file = folder.joinpath(_subfolder, 'grids_info.json')
343
+ grids_info_file.write_text(json.dumps(grids_info, indent=2))
344
+
345
+ # set up the default data types
346
+ dtype = [
347
+ ('Sensor Grid', 'O'),
348
+ ('Sensor Grid ID', 'O'),
349
+ ('23 de abril 09:30', np.float32),
350
+ ('23 de abril 15:30', np.float32),
351
+ ('23 de outubro 09:30', np.float32),
352
+ ('23 de outubro 15:30', np.float32),
353
+ ('Atendimento', 'O')
354
+ ]
355
+
356
+ # set up format
357
+ fmt = ['%s', '%s', '%.2f', '%.2f', '%.2f', '%.2f', '%s']
358
+
359
+ arrays = []
360
+ for room_summary in summary_rooms_output.values():
361
+ data = []
362
+ data.append(room_summary['grids_info']['name'])
363
+ data.append(room_summary['grids_info']['full_id'])
364
+ data.append(room_summary['23 de abril 09:30'])
365
+ data.append(room_summary['23 de abril 15:30'])
366
+ data.append(room_summary['23 de outubro 09:30'])
367
+ data.append(room_summary['23 de outubro 15:30'])
368
+ data.append(room_summary['nivel'])
369
+ arrays.append(tuple(data))
370
+
371
+ # create structured array
372
+ struct_array = np.array(arrays, dtype=dtype)
373
+
374
+ header = [dt[0] for dt in dtype]
375
+ # write header to summary_rooms_csv
376
+ with summary_rooms_csv.open(mode='w', encoding='utf-8') as output_file:
377
+ output_file.write(','.join(header))
378
+ output_file.write('\n') # add newline after header
379
+
380
+ # write structured array to summary_rooms_csv
381
+ with summary_rooms_csv.open(mode='a', encoding='utf-8') as output_file:
382
+ np.savetxt(output_file, struct_array, delimiter=',', fmt=fmt)
383
+
384
+ center_points_file = sub_folder.joinpath('center_points.json')
385
+ data = [pof.to_dict() for pof in pof_sensor_grids.values()]
386
+ center_points_file.write_text(json.dumps(data, indent=4))
387
+
388
+ except Exception:
389
+ _logger.exception('Failed to calculate ABNT NBR 15575 metrics.')
390
+ sys.exit(1)
391
+ else:
392
+ sys.exit(0)
@@ -0,0 +1,96 @@
1
+ """Commands for BREEAM post-processing."""
2
+ import sys
3
+ import logging
4
+ from pathlib import Path
5
+ import os
6
+ import json
7
+ import click
8
+
9
+ from ladybug.color import Color
10
+ from ladybug.datatype.generic import GenericType
11
+ from ladybug.legend import LegendParameters
12
+
13
+ from honeybee_radiance_postprocess.breeam.breeam import breeam_daylight_assessment_4b
14
+
15
+ _logger = logging.getLogger(__name__)
16
+
17
+
18
+ @click.group(help='Commands for BREEAM post-processing of Radiance results.')
19
+ def breeam():
20
+ pass
21
+
22
+
23
+ @breeam.command('breeam-4b')
24
+ @click.argument(
25
+ 'folder',
26
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True)
27
+ )
28
+ @click.option('--model-file', '-m', help='A Honeybee Model file that was used '
29
+ 'in the simulation.', type=click.Path(
30
+ exists=False, file_okay=True, dir_okay=False, resolve_path=True))
31
+ @click.option(
32
+ '--grids-filter', '-gf', help='A pattern to filter the grids.', default='*',
33
+ show_default=True
34
+ )
35
+ @click.option(
36
+ '--sub-folder', '-sf', help='Relative path for subfolder to write output '
37
+ 'files.', default='breeam_summary', type=click.Path(
38
+ exists=False, file_okay=False, dir_okay=True, resolve_path=True, path_type=Path)
39
+ )
40
+ def breeam_4b(
41
+ folder, model_file, grids_filter, sub_folder
42
+ ):
43
+ """Calculate metrics for BREEAM.
44
+
45
+ \b
46
+ Args:
47
+ folder: Results folder. This folder is an output folder of annual daylight
48
+ recipe.
49
+ """
50
+ try:
51
+ breeam_daylight_assessment_4b(
52
+ folder, model=model_file, grids_filter=grids_filter,
53
+ sub_folder=sub_folder)
54
+ except Exception:
55
+ _logger.exception('Failed to calculate BREEAM metrics.')
56
+ sys.exit(1)
57
+ else:
58
+ sys.exit(0)
59
+
60
+
61
+ @breeam.command('breeam-4b-vis-metadata')
62
+ @click.option(
63
+ '--output-folder', '-o', help='Output folder for vis metadata files.',
64
+ type=click.Path(exists=False, file_okay=False, dir_okay=True, resolve_path=True),
65
+ default='visualization', show_default=True
66
+ )
67
+ def breeam_4b_vis(output_folder):
68
+ """Write visualization metadata files for BREEAM 4b."""
69
+ colors = [Color(220, 0, 0), Color(220, 110, 25), Color(255, 190, 0), Color(0, 220, 0)]
70
+ pass_fail_lpar = \
71
+ LegendParameters(min=0, max=3, colors=colors, segment_count=4, title='Pass/Fail')
72
+ pass_fail_lpar.ordinal_dictionary = {
73
+ 0: 'Fail', 1: 'Min. illuminance only', 2: 'Avg. illuminance only', 3: 'Pass'}
74
+
75
+ metric_info_dict = {
76
+ 'pass_fail': {
77
+ 'type': 'VisualizationMetaData',
78
+ 'data_type': GenericType('Pass/Fail', '').to_dict(),
79
+ 'unit': '',
80
+ 'legend_parameters': pass_fail_lpar.to_dict()
81
+ }
82
+ }
83
+ try:
84
+ if not os.path.exists(output_folder):
85
+ os.mkdir(output_folder)
86
+ for metric, data in metric_info_dict.items():
87
+ if not os.path.exists(os.path.join(output_folder, metric)):
88
+ os.mkdir(os.path.join(output_folder, metric))
89
+ file_path = os.path.join(output_folder, metric, 'vis_metadata.json')
90
+ with open(file_path, 'w') as fp:
91
+ json.dump(data, fp, indent=4)
92
+ except Exception:
93
+ _logger.exception('Failed to write the visualization metadata files.')
94
+ sys.exit(1)
95
+ else:
96
+ sys.exit(0)
@@ -0,0 +1,133 @@
1
+ """Commands to work with data collections."""
2
+ import sys
3
+ import logging
4
+ from pathlib import Path
5
+ import click
6
+ import json
7
+ try:
8
+ import cupy as np
9
+ is_gpu = True
10
+ except ImportError:
11
+ is_gpu = False
12
+ import numpy as np
13
+
14
+ from ladybug.datacollection import HourlyContinuousCollection, \
15
+ HourlyDiscontinuousCollection
16
+ from ladybug.header import Header
17
+ from ladybug.datautil import collections_to_csv
18
+
19
+
20
+ _logger = logging.getLogger(__name__)
21
+
22
+
23
+ @click.group(help='Commands to work with data collections.')
24
+ def datacollection():
25
+ pass
26
+
27
+
28
+ @datacollection.command('npy-to-datacollections')
29
+ @click.argument(
30
+ 'npy-file', type=click.Path(exists=True, dir_okay=False, resolve_path=True)
31
+ )
32
+ @click.argument(
33
+ 'data-type', type=click.Path(exists=True, dir_okay=False, resolve_path=True)
34
+ )
35
+ @click.argument(
36
+ 'grid-name', type=click.STRING
37
+ )
38
+ @click.option(
39
+ '--output-file', '-f', help='Optional file to output the JSON strings of '
40
+ 'the data collections. By default, it will be printed to stdout',
41
+ type=click.File('w'), default='-', show_default=True
42
+ )
43
+ def npy_to_datacollections(npy_file, data_type, grid_name, output_file):
44
+ """Read an npy file and convert every row to a data collection.
45
+
46
+ The data collection will be saved in a JSON file. If no output file is
47
+ specified it will be send to stdout instead.
48
+
49
+ \b
50
+ Args:
51
+ npy-file: Path to npy file.
52
+ data-type: A JSON file with the data type.
53
+ grid-name: The name of the grid. This is used in the metadata of the
54
+ header.
55
+ """
56
+ with open(data_type) as json_file:
57
+ data_header = Header.from_dict(json.load(json_file))
58
+ a_per = data_header.analysis_period
59
+ continuous = True if a_per.st_hour == 0 and a_per.end_hour == 23 else False
60
+ if not continuous:
61
+ dates = a_per.datetimes
62
+ metadata = {'grid': grid_name}
63
+ try:
64
+ data_matrix = np.load(npy_file).tolist()
65
+ grid_data = []
66
+ for i, row in enumerate(data_matrix):
67
+ header = data_header.duplicate()
68
+ header.metadata = metadata.copy()
69
+ header.metadata['sensor_index'] = i
70
+ data = HourlyContinuousCollection(header, row) if continuous else \
71
+ HourlyDiscontinuousCollection(header, row, dates)
72
+ grid_data.append(data.to_dict())
73
+ output_file.write(json.dumps(grid_data))
74
+ except Exception:
75
+ _logger.exception('Failed to convert npy to data collections.')
76
+ sys.exit(1)
77
+ else:
78
+ sys.exit(0)
79
+
80
+
81
+ @datacollection.command('folder-to-datacollections')
82
+ @click.argument(
83
+ 'folder', type=click.Path(exists=True, dir_okay=True, resolve_path=True)
84
+ )
85
+ @click.argument(
86
+ 'data-type', type=click.Path(exists=True, dir_okay=False, resolve_path=True)
87
+ )
88
+ @click.option(
89
+ '--sub-folder', '-sf', type=click.STRING, default='datacollections',
90
+ show_default=True
91
+ )
92
+ def folder_to_datacollections(folder, data_type, sub_folder):
93
+ """Read an npy file and convert every row to a data collection.
94
+
95
+ The data collections will be saved as CSV files in subfolder.
96
+
97
+ \b
98
+ Args:
99
+ npy-file: Path to npy file.
100
+ data-type: A JSON file with the data type.
101
+ grid-name: The name of the grid. This is used in the metadata of the
102
+ header.
103
+ """
104
+ with open(Path(folder, 'grids_info.json')) as json_file:
105
+ grid_list = json.load(json_file)
106
+ with open(data_type) as json_file:
107
+ data_header = Header.from_dict(json.load(json_file))
108
+ a_per = data_header.analysis_period
109
+ continuous = True if a_per.st_hour == 0 and a_per.end_hour == 23 else False
110
+ if not continuous:
111
+ dates = a_per.datetimes
112
+ try:
113
+ for grid in grid_list:
114
+ grid_name = grid['full_id'] if 'full_id' in grid else 'id'
115
+ metadata = {'grid': grid_name}
116
+ grid_file = Path(folder, '{}.npy'.format(grid_name))
117
+ data_matrix = np.load(grid_file).tolist()
118
+ grid_data = []
119
+ for i, row in enumerate(data_matrix):
120
+ header = data_header.duplicate()
121
+ header.metadata = metadata.copy()
122
+ header.metadata['sensor_index'] = i
123
+ data = HourlyContinuousCollection(header, row) if continuous else \
124
+ HourlyDiscontinuousCollection(header, row, dates)
125
+ grid_data.append(data)
126
+
127
+ file_name = grid_name + '.csv'
128
+ collections_to_csv(grid_data, Path(folder, sub_folder), file_name)
129
+ except Exception:
130
+ _logger.exception('Failed to convert folder of files to data collections.')
131
+ sys.exit(1)
132
+ else:
133
+ sys.exit(0)