honeybee-energy 1.116.106__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (162) hide show
  1. honeybee_energy/__init__.py +24 -0
  2. honeybee_energy/__main__.py +4 -0
  3. honeybee_energy/_extend_honeybee.py +145 -0
  4. honeybee_energy/altnumber.py +21 -0
  5. honeybee_energy/baseline/__init__.py +2 -0
  6. honeybee_energy/baseline/create.py +608 -0
  7. honeybee_energy/baseline/data/__init__.py +1 -0
  8. honeybee_energy/baseline/data/constructions.csv +64 -0
  9. honeybee_energy/baseline/data/fen_ratios.csv +15 -0
  10. honeybee_energy/baseline/data/lpd_building.csv +21 -0
  11. honeybee_energy/baseline/data/pci_2016.csv +22 -0
  12. honeybee_energy/baseline/data/pci_2019.csv +22 -0
  13. honeybee_energy/baseline/data/pci_2022.csv +22 -0
  14. honeybee_energy/baseline/data/shw.csv +21 -0
  15. honeybee_energy/baseline/pci.py +512 -0
  16. honeybee_energy/baseline/result.py +371 -0
  17. honeybee_energy/boundarycondition.py +128 -0
  18. honeybee_energy/cli/__init__.py +69 -0
  19. honeybee_energy/cli/baseline.py +475 -0
  20. honeybee_energy/cli/edit.py +327 -0
  21. honeybee_energy/cli/lib.py +1154 -0
  22. honeybee_energy/cli/result.py +810 -0
  23. honeybee_energy/cli/setconfig.py +124 -0
  24. honeybee_energy/cli/settings.py +569 -0
  25. honeybee_energy/cli/simulate.py +380 -0
  26. honeybee_energy/cli/translate.py +1714 -0
  27. honeybee_energy/cli/validate.py +224 -0
  28. honeybee_energy/config.json +11 -0
  29. honeybee_energy/config.py +842 -0
  30. honeybee_energy/construction/__init__.py +1 -0
  31. honeybee_energy/construction/_base.py +374 -0
  32. honeybee_energy/construction/air.py +325 -0
  33. honeybee_energy/construction/dictutil.py +89 -0
  34. honeybee_energy/construction/dynamic.py +607 -0
  35. honeybee_energy/construction/opaque.py +460 -0
  36. honeybee_energy/construction/shade.py +319 -0
  37. honeybee_energy/construction/window.py +1096 -0
  38. honeybee_energy/construction/windowshade.py +847 -0
  39. honeybee_energy/constructionset.py +1655 -0
  40. honeybee_energy/dictutil.py +56 -0
  41. honeybee_energy/generator/__init__.py +5 -0
  42. honeybee_energy/generator/loadcenter.py +204 -0
  43. honeybee_energy/generator/pv.py +535 -0
  44. honeybee_energy/hvac/__init__.py +21 -0
  45. honeybee_energy/hvac/_base.py +124 -0
  46. honeybee_energy/hvac/_template.py +270 -0
  47. honeybee_energy/hvac/allair/__init__.py +22 -0
  48. honeybee_energy/hvac/allair/_base.py +349 -0
  49. honeybee_energy/hvac/allair/furnace.py +168 -0
  50. honeybee_energy/hvac/allair/psz.py +131 -0
  51. honeybee_energy/hvac/allair/ptac.py +163 -0
  52. honeybee_energy/hvac/allair/pvav.py +109 -0
  53. honeybee_energy/hvac/allair/vav.py +128 -0
  54. honeybee_energy/hvac/detailed.py +337 -0
  55. honeybee_energy/hvac/doas/__init__.py +28 -0
  56. honeybee_energy/hvac/doas/_base.py +345 -0
  57. honeybee_energy/hvac/doas/fcu.py +127 -0
  58. honeybee_energy/hvac/doas/radiant.py +329 -0
  59. honeybee_energy/hvac/doas/vrf.py +81 -0
  60. honeybee_energy/hvac/doas/wshp.py +91 -0
  61. honeybee_energy/hvac/heatcool/__init__.py +23 -0
  62. honeybee_energy/hvac/heatcool/_base.py +177 -0
  63. honeybee_energy/hvac/heatcool/baseboard.py +61 -0
  64. honeybee_energy/hvac/heatcool/evapcool.py +72 -0
  65. honeybee_energy/hvac/heatcool/fcu.py +92 -0
  66. honeybee_energy/hvac/heatcool/gasunit.py +53 -0
  67. honeybee_energy/hvac/heatcool/radiant.py +269 -0
  68. honeybee_energy/hvac/heatcool/residential.py +77 -0
  69. honeybee_energy/hvac/heatcool/vrf.py +54 -0
  70. honeybee_energy/hvac/heatcool/windowac.py +70 -0
  71. honeybee_energy/hvac/heatcool/wshp.py +62 -0
  72. honeybee_energy/hvac/idealair.py +699 -0
  73. honeybee_energy/internalmass.py +310 -0
  74. honeybee_energy/lib/__init__.py +1 -0
  75. honeybee_energy/lib/_loadconstructions.py +194 -0
  76. honeybee_energy/lib/_loadconstructionsets.py +117 -0
  77. honeybee_energy/lib/_loadmaterials.py +83 -0
  78. honeybee_energy/lib/_loadprogramtypes.py +125 -0
  79. honeybee_energy/lib/_loadschedules.py +87 -0
  80. honeybee_energy/lib/_loadtypelimits.py +64 -0
  81. honeybee_energy/lib/constructions.py +207 -0
  82. honeybee_energy/lib/constructionsets.py +95 -0
  83. honeybee_energy/lib/materials.py +67 -0
  84. honeybee_energy/lib/programtypes.py +125 -0
  85. honeybee_energy/lib/schedules.py +61 -0
  86. honeybee_energy/lib/scheduletypelimits.py +31 -0
  87. honeybee_energy/load/__init__.py +1 -0
  88. honeybee_energy/load/_base.py +190 -0
  89. honeybee_energy/load/daylight.py +397 -0
  90. honeybee_energy/load/dictutil.py +47 -0
  91. honeybee_energy/load/equipment.py +771 -0
  92. honeybee_energy/load/hotwater.py +543 -0
  93. honeybee_energy/load/infiltration.py +460 -0
  94. honeybee_energy/load/lighting.py +480 -0
  95. honeybee_energy/load/people.py +497 -0
  96. honeybee_energy/load/process.py +472 -0
  97. honeybee_energy/load/setpoint.py +816 -0
  98. honeybee_energy/load/ventilation.py +550 -0
  99. honeybee_energy/material/__init__.py +1 -0
  100. honeybee_energy/material/_base.py +166 -0
  101. honeybee_energy/material/dictutil.py +59 -0
  102. honeybee_energy/material/frame.py +367 -0
  103. honeybee_energy/material/gas.py +1087 -0
  104. honeybee_energy/material/glazing.py +854 -0
  105. honeybee_energy/material/opaque.py +1351 -0
  106. honeybee_energy/material/shade.py +1360 -0
  107. honeybee_energy/measure.py +472 -0
  108. honeybee_energy/programtype.py +723 -0
  109. honeybee_energy/properties/__init__.py +1 -0
  110. honeybee_energy/properties/aperture.py +333 -0
  111. honeybee_energy/properties/door.py +342 -0
  112. honeybee_energy/properties/extension.py +244 -0
  113. honeybee_energy/properties/face.py +274 -0
  114. honeybee_energy/properties/model.py +2640 -0
  115. honeybee_energy/properties/room.py +1747 -0
  116. honeybee_energy/properties/shade.py +314 -0
  117. honeybee_energy/properties/shademesh.py +262 -0
  118. honeybee_energy/reader.py +48 -0
  119. honeybee_energy/result/__init__.py +1 -0
  120. honeybee_energy/result/colorobj.py +648 -0
  121. honeybee_energy/result/emissions.py +290 -0
  122. honeybee_energy/result/err.py +101 -0
  123. honeybee_energy/result/eui.py +100 -0
  124. honeybee_energy/result/generation.py +160 -0
  125. honeybee_energy/result/loadbalance.py +890 -0
  126. honeybee_energy/result/match.py +202 -0
  127. honeybee_energy/result/osw.py +90 -0
  128. honeybee_energy/result/rdd.py +59 -0
  129. honeybee_energy/result/zsz.py +190 -0
  130. honeybee_energy/run.py +1577 -0
  131. honeybee_energy/schedule/__init__.py +1 -0
  132. honeybee_energy/schedule/day.py +626 -0
  133. honeybee_energy/schedule/dictutil.py +59 -0
  134. honeybee_energy/schedule/fixedinterval.py +1012 -0
  135. honeybee_energy/schedule/rule.py +619 -0
  136. honeybee_energy/schedule/ruleset.py +1867 -0
  137. honeybee_energy/schedule/typelimit.py +310 -0
  138. honeybee_energy/shw.py +315 -0
  139. honeybee_energy/simulation/__init__.py +1 -0
  140. honeybee_energy/simulation/control.py +214 -0
  141. honeybee_energy/simulation/daylightsaving.py +185 -0
  142. honeybee_energy/simulation/dictutil.py +51 -0
  143. honeybee_energy/simulation/output.py +646 -0
  144. honeybee_energy/simulation/parameter.py +606 -0
  145. honeybee_energy/simulation/runperiod.py +443 -0
  146. honeybee_energy/simulation/shadowcalculation.py +295 -0
  147. honeybee_energy/simulation/sizing.py +546 -0
  148. honeybee_energy/ventcool/__init__.py +5 -0
  149. honeybee_energy/ventcool/_crack_data.py +91 -0
  150. honeybee_energy/ventcool/afn.py +289 -0
  151. honeybee_energy/ventcool/control.py +269 -0
  152. honeybee_energy/ventcool/crack.py +126 -0
  153. honeybee_energy/ventcool/fan.py +493 -0
  154. honeybee_energy/ventcool/opening.py +365 -0
  155. honeybee_energy/ventcool/simulation.py +314 -0
  156. honeybee_energy/writer.py +1078 -0
  157. honeybee_energy-1.116.106.dist-info/METADATA +113 -0
  158. honeybee_energy-1.116.106.dist-info/RECORD +162 -0
  159. honeybee_energy-1.116.106.dist-info/WHEEL +5 -0
  160. honeybee_energy-1.116.106.dist-info/entry_points.txt +2 -0
  161. honeybee_energy-1.116.106.dist-info/licenses/LICENSE +661 -0
  162. honeybee_energy-1.116.106.dist-info/top_level.txt +1 -0
@@ -0,0 +1,810 @@
1
+ """honeybee energy result parsing commands."""
2
+ import click
3
+ import sys
4
+ import logging
5
+ import os
6
+ import json
7
+
8
+ from honeybee.model import Model
9
+ from honeybee.face import Face
10
+ from ladybug.datacollection import HourlyContinuousCollection, DailyCollection, \
11
+ MonthlyCollection
12
+ from ladybug.sql import SQLiteResult
13
+ from ladybug.dt import Date
14
+ from ladybug.datatype.area import Area
15
+ from ladybug.datatype.energyintensity import EnergyIntensity
16
+ from ladybug.datatype.energy import Energy
17
+
18
+ from honeybee_energy.result.match import match_rooms_to_data, match_faces_to_data
19
+ from honeybee_energy.result.eui import eui_from_sql
20
+ from honeybee_energy.result.generation import generation_summary_from_sql, \
21
+ generation_data_from_sql
22
+ from honeybee_energy.result.emissions import emissions_from_sql
23
+ from honeybee_energy.result.loadbalance import LoadBalance
24
+
25
+ _logger = logging.getLogger(__name__)
26
+
27
+
28
+ @click.group(help='Commands for parsing EnergyPlus results.')
29
+ def result():
30
+ pass
31
+
32
+
33
+ @result.command('available-results')
34
+ @click.argument('result-sql', type=click.Path(
35
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
36
+ @click.option('--output-file', '-f', help='Optional file to output the list of available'
37
+ ' outputs. By default, it will be printed to stdout',
38
+ type=click.File('w'), default='-', show_default=True)
39
+ def available_results(result_sql, output_file):
40
+ """Get an array of all timeseries outputs within an sql file.
41
+
42
+ \b
43
+ Args:
44
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
45
+ """
46
+ try:
47
+ sql_obj = SQLiteResult(result_sql)
48
+ output_file.write(json.dumps(sql_obj.available_outputs))
49
+ except Exception as e:
50
+ _logger.exception('Failed to parse sql file.\n{}'.format(e))
51
+ sys.exit(1)
52
+ else:
53
+ sys.exit(0)
54
+
55
+
56
+ @result.command('available-results-info')
57
+ @click.argument('result-sql', type=click.Path(
58
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
59
+ @click.option('--output-file', '-f', help='Optional file to output the list of available'
60
+ ' outputs. By default, it will be printed to stdout',
61
+ type=click.File('w'), default='-', show_default=True)
62
+ def available_results_info(result_sql, output_file):
63
+ """Get all timeseries outputs within an sql file and metadata about them.
64
+
65
+ \b
66
+ Args:
67
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
68
+ """
69
+ try:
70
+ sql_obj = SQLiteResult(result_sql)
71
+ all_info = []
72
+ for outp_dict in sql_obj.available_outputs_info:
73
+ clean_dict = {
74
+ 'output_name': outp_dict['output_name'],
75
+ 'object_type': outp_dict['object_type'],
76
+ 'units': outp_dict['units']
77
+ }
78
+ d_type = outp_dict['data_type']
79
+ clean_dict['units_ip'] = d_type.ip_units[0]
80
+ clean_dict['cumulative'] = d_type.cumulative
81
+ if d_type.normalized_type is not None:
82
+ norm_type = d_type.normalized_type()
83
+ clean_dict['normalized_units'] = norm_type.units[0]
84
+ clean_dict['normalized_units_ip'] = norm_type.ip_units[0]
85
+ else:
86
+ clean_dict['normalized_units'] = None
87
+ clean_dict['normalized_units_ip'] = None
88
+ all_info.append(clean_dict)
89
+ output_file.write(json.dumps(all_info))
90
+ except Exception as e:
91
+ _logger.exception('Failed to parse sql file.\n{}'.format(e))
92
+ sys.exit(1)
93
+ else:
94
+ sys.exit(0)
95
+
96
+
97
+ @result.command('available-run-period-info')
98
+ @click.argument('result-sql', type=click.Path(
99
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
100
+ @click.option('--output-file', '-f', help='Optional file to output the list of available'
101
+ ' outputs. By default, it will be printed to stdout',
102
+ type=click.File('w'), default='-', show_default=True)
103
+ def available_run_period_info(result_sql, output_file):
104
+ """Get an array of run period info within an sql file.
105
+
106
+ \b
107
+ Args:
108
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
109
+ """
110
+ try:
111
+ sql_obj = SQLiteResult(result_sql)
112
+ time_int = sql_obj.reporting_frequency
113
+ all_info = []
114
+ for runper, per_name in zip(sql_obj.run_periods, sql_obj.run_period_names):
115
+ clean_dict = {
116
+ 'name': per_name,
117
+ 'time_interval ': time_int,
118
+ 'start_date ': [runper.st_month, runper.st_day],
119
+ 'end_date ': [runper.end_month, runper.end_day]
120
+ }
121
+ all_info.append(clean_dict)
122
+ output_file.write(json.dumps(all_info))
123
+ except Exception as e:
124
+ _logger.exception('Failed to parse sql file.\n{}'.format(e))
125
+ sys.exit(1)
126
+ else:
127
+ sys.exit(0)
128
+
129
+
130
+ @result.command('all-available-info')
131
+ @click.argument('result-sql', type=click.Path(
132
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
133
+ @click.option('--output-file', '-f', help='Optional file to output the list of available'
134
+ ' outputs. By default, it will be printed to stdout',
135
+ type=click.File('w'), default='-', show_default=True)
136
+ def all_available_info(result_sql, output_file):
137
+ """Get a dictionary with metadata of all outputs and run periods within an sql file.
138
+
139
+ The dictionary will have two keys - 'run_periods', 'outputs'.
140
+
141
+ \b
142
+ Args:
143
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
144
+ """
145
+ try:
146
+ # create the SQLiteResult object
147
+ sql_obj = SQLiteResult(result_sql)
148
+ all_info = {}
149
+
150
+ # get all of the info on the outputs within the file
151
+ all_outp = []
152
+ for outp_dict in sql_obj.available_outputs_info:
153
+ clean_dict = {
154
+ 'output_name': outp_dict['output_name'],
155
+ 'object_type': outp_dict['object_type'],
156
+ 'units': outp_dict['units']
157
+ }
158
+ d_type = outp_dict['data_type']
159
+ clean_dict['units_ip'] = d_type.ip_units[0]
160
+ clean_dict['cumulative'] = d_type.cumulative
161
+ if d_type.normalized_type is not None:
162
+ norm_type = d_type.normalized_type()
163
+ clean_dict['normalized_units'] = norm_type.units[0]
164
+ clean_dict['normalized_units_ip'] = norm_type.ip_units[0]
165
+ else:
166
+ clean_dict['normalized_units'] = None
167
+ clean_dict['normalized_units_ip'] = None
168
+ all_outp.append(clean_dict)
169
+ all_info['outputs'] = all_outp
170
+
171
+ # get all of the run periods within the fil
172
+ time_int = sql_obj.reporting_frequency
173
+ all_run_per = []
174
+ for runper, per_name in zip(sql_obj.run_periods, sql_obj.run_period_names):
175
+ clean_dict = {
176
+ 'name': per_name,
177
+ 'time_interval ': time_int,
178
+ 'start_date ': [runper.st_month, runper.st_day],
179
+ 'end_date ': [runper.end_month, runper.end_day]
180
+ }
181
+ all_run_per.append(clean_dict)
182
+ all_info['run_periods'] = all_run_per
183
+ output_file.write(json.dumps(all_info))
184
+ except Exception as e:
185
+ _logger.exception('Failed to parse sql file.\n{}'.format(e))
186
+ sys.exit(1)
187
+ else:
188
+ sys.exit(0)
189
+
190
+
191
+ @result.command('energy-use-intensity')
192
+ @click.argument('result-paths', nargs=-1, required=True, type=click.Path(
193
+ exists=True, file_okay=True, dir_okay=True, resolve_path=True))
194
+ @click.option('--si/--ip', help='Flag to note whether the EUI should be in '
195
+ 'SI (kWh/m2) or IP (kBtu/ft2) units.', default=True, show_default=True)
196
+ @click.option('--output-file', '-f', help='Optional file to output the result of the '
197
+ 'EUI calculation. By default, it will be printed to stdout',
198
+ type=click.File('w'), default='-', show_default=True)
199
+ def energy_use_intensity(result_paths, si, output_file):
200
+ """Get information about energy use intensity and an EUI breakdown by end use.
201
+
202
+ \b
203
+ Args:
204
+ result_paths: Path to one or more SQLite files that were generated by
205
+ EnergyPlus or folders containing such files. Folders can be from a
206
+ single EnergyPlus simulation or may contain multiple SQLite files.
207
+ EUI will be computed across all files provided.
208
+ """
209
+ try:
210
+ # assemble all of the eui results into a dictionary
211
+ result_dict = eui_from_sql(result_paths)
212
+
213
+ # convert data to IP if requested
214
+ if not si:
215
+ eui_typ, a_typ, e_typ = EnergyIntensity(), Area(), Energy()
216
+ result_dict['eui'] = \
217
+ round(eui_typ.to_ip([result_dict['eui']], 'kWh/m2')[0][0], 3)
218
+ result_dict['total_floor_area'] = \
219
+ round(a_typ.to_ip([result_dict['total_floor_area']], 'm2')[0][0], 3)
220
+ result_dict['conditioned_floor_area'] = \
221
+ round(a_typ.to_ip(
222
+ [result_dict['conditioned_floor_area']], 'm2')[0][0], 3)
223
+ result_dict['total_energy'] = \
224
+ round(e_typ.to_ip([result_dict['total_energy']], 'kWh')[0][0], 3)
225
+ result_dict['end_uses'] = \
226
+ {key: round(eui_typ.to_ip([val], 'kWh/m2')[0][0], 3)
227
+ for key, val in result_dict['end_uses'].items()}
228
+
229
+ # write everything into the output file
230
+ output_file.write(json.dumps(result_dict, indent=4))
231
+ except Exception as e:
232
+ _logger.exception('Failed to compute EUI from sql files.\n{}'.format(e))
233
+ sys.exit(1)
234
+ else:
235
+ sys.exit(0)
236
+
237
+
238
+ @result.command('carbon-emission-intensity')
239
+ @click.argument('result-paths', nargs=-1, required=True, type=click.Path(
240
+ exists=True, file_okay=True, dir_okay=True, resolve_path=True))
241
+ @click.option('--electricity-emissions', '-e', help='A number for the electric '
242
+ 'grid carbon emissions in kg CO2 per MWh.',
243
+ type=float, default=400, show_default=True)
244
+ @click.option('--output-file', '-f', help='Optional file to output the result of the '
245
+ 'EUI calculation. By default, it will be printed to stdout',
246
+ type=click.File('w'), default='-', show_default=True)
247
+ def carbon_emission_intensity(result_paths, electricity_emissions, output_file):
248
+ """Get information about energy use intensity and an EUI breakdown by end use.
249
+
250
+ \b
251
+ Args:
252
+ result_paths: Path to one or more SQLite files that were generated by
253
+ EnergyPlus or folders containing such files. Folders can be from a
254
+ single EnergyPlus simulation or may contain multiple SQLite files.
255
+ EUI will be computed across all files provided.
256
+ """
257
+ try:
258
+ # assemble all of the eui results into a dictionary
259
+ result_dict = emissions_from_sql(result_paths, electricity_emissions)
260
+ # write everything into the output file
261
+ output_file.write(json.dumps(result_dict, indent=4))
262
+ except Exception as e:
263
+ _logger.exception('Failed to compute EUI from sql files.\n{}'.format(e))
264
+ sys.exit(1)
265
+ else:
266
+ sys.exit(0)
267
+
268
+
269
+ @result.command('generation-summary')
270
+ @click.argument('result-paths', nargs=-1, required=True, type=click.Path(
271
+ exists=True, file_okay=True, dir_okay=True, resolve_path=True))
272
+ @click.option('--output-file', '-f', help='Optional file to output the result of the '
273
+ 'EUI calculation. By default, it will be printed to stdout',
274
+ type=click.File('w'), default='-', show_default=True)
275
+ def generation_summary(result_paths, output_file):
276
+ """Get a summary about electricity generation and usage.
277
+
278
+ \b
279
+ Args:
280
+ result_paths: Path to one or more SQLite files that were generated by
281
+ EnergyPlus or folders containing such files. Folders can be from a
282
+ single EnergyPlus simulation or may contain multiple SQLite files.
283
+ Generation metrics will be computed across all files provided.
284
+ """
285
+ try:
286
+ # assemble all of the eui results into a dictionary
287
+ result_dict = generation_summary_from_sql(result_paths)
288
+ # write everything into the output file
289
+ output_file.write(json.dumps(result_dict, indent=4))
290
+ except Exception as e:
291
+ _logger.exception(
292
+ 'Failed to compute generation summary from sql files.\n{}'.format(e))
293
+ sys.exit(1)
294
+ else:
295
+ sys.exit(0)
296
+
297
+
298
+ @result.command('generation-data')
299
+ @click.argument('result-paths', nargs=-1, required=True, type=click.Path(
300
+ exists=True, file_okay=True, dir_okay=True, resolve_path=True))
301
+ @click.option('--output-file', '-f', help='Optional file to output the result of the '
302
+ 'EUI calculation. By default, it will be printed to stdout',
303
+ type=click.File('w'), default='-', show_default=True)
304
+ def generation_data(result_paths, output_file):
305
+ """Get data collections for electricity generation and usage.
306
+
307
+ \b
308
+ Args:
309
+ result_paths: Path to one or more SQLite files that were generated by
310
+ EnergyPlus or folders containing such files. Folders can be from a
311
+ single EnergyPlus simulation or may contain multiple SQLite files.
312
+ Generation metrics will be computed across all files provided.
313
+ """
314
+ try:
315
+ # assemble all of the eui results into a dictionary
316
+ production, consumption = generation_data_from_sql(result_paths)
317
+ # write everything into the output file
318
+ if consumption is None:
319
+ output_file.write(json.dumps([], indent=4))
320
+ elif isinstance(consumption, (float, int)):
321
+ output_file.write(json.dumps([production, consumption], indent=4))
322
+ else:
323
+ output_file.write(json.dumps(
324
+ [production.to_dict(), consumption.to_dict()],
325
+ indent=4
326
+ ))
327
+ except Exception as e:
328
+ _logger.exception(
329
+ 'Failed to compute generation data from sql files.\n{}'.format(e))
330
+ sys.exit(1)
331
+ else:
332
+ sys.exit(0)
333
+
334
+
335
+ @result.command('tabular-data')
336
+ @click.argument('result-sql', type=click.Path(
337
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
338
+ @click.argument('table-name', type=str)
339
+ @click.option('--output-file', '-f', help='Optional file to output the JSON matrix of '
340
+ 'tabular data. By default, it will be printed to stdout',
341
+ type=click.File('w'), default='-', show_default=True)
342
+ def tabular_data(result_sql, table_name, output_file):
343
+ """Get all the data within a table of a Summary Report using the table name.
344
+
345
+ \b
346
+ Args:
347
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
348
+ table_name: Text string for the name of a table within a summary
349
+ report. (eg. 'General').
350
+ """
351
+ try:
352
+ sql_obj = SQLiteResult(result_sql)
353
+ table_dict = sql_obj.tabular_data_by_name(str(table_name))
354
+ output_file.write(json.dumps(list(table_dict.values())))
355
+ except Exception as e:
356
+ _logger.exception('Failed to retrieve table data from sql file.\n{}'.format(e))
357
+ sys.exit(1)
358
+ else:
359
+ sys.exit(0)
360
+
361
+
362
+ @result.command('tabular-metadata')
363
+ @click.argument('result-sql', type=click.Path(
364
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
365
+ @click.argument('table-name', type=str)
366
+ @click.option('--output-file', '-f', help='Optional file to output the JSON matrix of '
367
+ 'tabular data. By default, it will be printed to stdout',
368
+ type=click.File('w'), default='-', show_default=True)
369
+ def tabular_metadata(result_sql, table_name, output_file):
370
+ """Get a dictionary with the names of a table's rows and columns.
371
+
372
+ \b
373
+ Args:
374
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
375
+ table_name: Text string for the name of a table within a summary
376
+ report. (eg. 'General').
377
+ """
378
+ try:
379
+ sql_obj = SQLiteResult(result_sql)
380
+ table_dict = sql_obj.tabular_data_by_name(str(table_name))
381
+ row_names = list(table_dict.keys())
382
+ col_names = sql_obj.tabular_column_names(str(table_name))
383
+ output_file.write(json.dumps(
384
+ {'row_names': row_names, 'column_names': col_names}))
385
+ except Exception as e:
386
+ _logger.exception('Failed to retrieve table data from sql file.\n{}'.format(e))
387
+ sys.exit(1)
388
+ else:
389
+ sys.exit(0)
390
+
391
+
392
+ @result.command('data-by-output')
393
+ @click.argument('result-sql', type=click.Path(
394
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
395
+ @click.argument('output-name', type=str)
396
+ @click.option('--output-file', '-f', help='Optional file to output the JSON strings of '
397
+ 'the data collections. By default, it will be printed to stdout',
398
+ type=click.File('w'), default='-', show_default=True)
399
+ def data_by_output(result_sql, output_name, output_file):
400
+ """Get an array of DataCollection JSONs for a specific EnergyPlus output.
401
+
402
+ \b
403
+ Args:
404
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
405
+ output_name: The name of an EnergyPlus output to be retrieved from
406
+ the SQLite result file. This can also be an array of names if the
407
+ string is formatted as a JSON array with [] brackets. Note that only
408
+ a single array of data collection JSONs will be returned from this
409
+ method and, if data collections must be grouped, the data_by_outputs
410
+ method should be used.
411
+ """
412
+ try:
413
+ sql_obj = SQLiteResult(result_sql)
414
+ output_name = str(output_name)
415
+ if output_name.startswith('['):
416
+ output_name = tuple(outp.replace('"', '').strip()
417
+ for outp in output_name.strip('[]').split(','))
418
+ data_colls = sql_obj.data_collections_by_output_name(output_name)
419
+ output_file.write(json.dumps([data.to_dict() for data in data_colls]))
420
+ except Exception as e:
421
+ _logger.exception('Failed to retrieve outputs from sql file.\n{}'.format(e))
422
+ sys.exit(1)
423
+ else:
424
+ sys.exit(0)
425
+
426
+
427
+ @result.command('data-by-outputs')
428
+ @click.argument('result-sql', type=click.Path(
429
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
430
+ @click.argument('output-names', type=str, nargs=-1)
431
+ @click.option('--output-file', '-f', help='Optional file to output the JSON strings of '
432
+ 'the data collections. By default, it will be printed to stdout',
433
+ type=click.File('w'), default='-', show_default=True)
434
+ def data_by_outputs(result_sql, output_names, output_file):
435
+ """Get an array of DataCollection JSONs for a several EnergyPlus outputs.
436
+
437
+ \b
438
+ Args:
439
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
440
+ output_names: An array of EnergyPlus output names to be retrieved from
441
+ the SQLite result file. This can also be a nested array (an array of
442
+ output name arrays) if each string is formatted as a JSON array
443
+ with [] brackets.
444
+ """
445
+ try:
446
+ sql_obj = SQLiteResult(result_sql)
447
+ data_colls = []
448
+ for output_name in output_names:
449
+ output_name = str(output_name)
450
+ if output_name.startswith('['):
451
+ output_name = tuple(outp.replace('"', '').strip()
452
+ for outp in output_name.strip('[]').split(','))
453
+ data_cs = sql_obj.data_collections_by_output_name(output_name)
454
+ data_colls.append([data.to_dict() for data in data_cs])
455
+ output_file.write(json.dumps(data_colls))
456
+ except Exception as e:
457
+ _logger.exception('Failed to retrieve outputs from sql file.\n{}'.format(e))
458
+ sys.exit(1)
459
+ else:
460
+ sys.exit(0)
461
+
462
+
463
+ @result.command('output-csv')
464
+ @click.argument('result-sql', type=click.Path(
465
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
466
+ @click.argument('output-names', type=str, nargs=-1)
467
+ @click.option('--output-file', '-f', help='Optional file path to output the CSV data of '
468
+ 'the results. By default, it will be printed to stdout',
469
+ type=click.File('w'), default='-', show_default=True)
470
+ def output_csv(result_sql, output_names, output_file):
471
+ """Get CSV for specific EnergyPlus outputs.
472
+
473
+ \b
474
+ Args:
475
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
476
+ output_names: The name of an EnergyPlus output to be retrieved from
477
+ the SQLite result file. This can also be several output names
478
+ for which all data collections should be retrieved.
479
+ """
480
+ try:
481
+ # get the data collections
482
+ sql_obj = SQLiteResult(result_sql)
483
+ data_colls = []
484
+ for output_name in output_names:
485
+ output_name = str(output_name)
486
+ if output_name.startswith('['):
487
+ output_name = tuple(outp.replace('"', '').strip()
488
+ for outp in output_name.strip('[]').split(','))
489
+ data_colls.extend(sql_obj.data_collections_by_output_name(output_name))
490
+
491
+ # create the header rows
492
+ type_row = ['DateTime'] + [data.header.metadata['type'] for data in data_colls]
493
+ units_row = [''] + [data.header.unit for data in data_colls]
494
+ obj_row = ['']
495
+ for data in data_colls:
496
+ try:
497
+ obj_row.append(data.header.metadata['Zone'])
498
+ except KeyError:
499
+ try:
500
+ obj_row.append(data.header.metadata['Surface'])
501
+ except KeyError:
502
+ try:
503
+ obj_row.append(data.header.metadata['System'])
504
+ except KeyError:
505
+ obj_row.append('')
506
+
507
+ # create the data rows
508
+ try:
509
+ datetimes = [data_colls[0].datetimes]
510
+ except IndexError: # no data for the requested type
511
+ datetimes = []
512
+ val_columns = datetimes + [data.values for data in data_colls]
513
+
514
+ # write everything into the output file
515
+ def write_row(row):
516
+ output_file.write(','.join([str(item) for item in row]) + '\n')
517
+ write_row(type_row)
518
+ write_row(units_row)
519
+ write_row(obj_row)
520
+ for row in zip(*val_columns):
521
+ write_row(row)
522
+ except Exception as e:
523
+ _logger.exception('Failed to retrieve outputs from sql file.\n{}'.format(e))
524
+ sys.exit(1)
525
+ else:
526
+ sys.exit(0)
527
+
528
+
529
+ @result.command('output-csv-queryable')
530
+ @click.argument('result-sql', type=click.Path(
531
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
532
+ @click.argument('model-json', type=click.Path(
533
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
534
+ @click.argument('run-period-name', type=str)
535
+ @click.argument('output-names', type=str, nargs=-1)
536
+ @click.option('--si/--ip', help='Flag to note whether the data in the resulting CSV '
537
+ 'should be in SI or IP units.', default=True, show_default=True)
538
+ @click.option('--normalize/--no-normalize', ' /-nn', help='Flag to note whether the '
539
+ 'data in the resulting CSV should be normalized by floor area (in the '
540
+ 'case of Zone/System data) or surface area (in the case of Surface data). '
541
+ 'This flag has no effect if the requested data is not normalizable',
542
+ default=True, show_default=True)
543
+ @click.option('--folder', '-f', help='Folder on this computer, into which the CSV '
544
+ 'files will be written. If None, the files will be output in the'
545
+ 'same location as the result_sql.', default=None, show_default=True,
546
+ type=click.Path(file_okay=False, dir_okay=True, resolve_path=True))
547
+ @click.option('--log-file', '-log', help='Optional file to output the names of the '
548
+ 'columns within the CSV. By default, it will be printed to stdout',
549
+ type=click.File('w'), default='-', show_default=True)
550
+ def output_csv_queryable(result_sql, model_json, run_period_name, output_names,
551
+ si, normalize, folder, log_file):
552
+ """Get CSV of outputs resembling a SQLite table that is easily queryable.
553
+
554
+ \b
555
+ Args:
556
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
557
+ model_json: Full path to a Model JSON that will be matched with the results.
558
+ run_period_name: The name of the run period from which the CSV data will
559
+ be selected (eg. "BOSTON LOGAN INTL ARPT ANN CLG .4% CONDNS DB=>MWB").
560
+ output_names: The name of an EnergyPlus output to be retrieved from
561
+ the SQLite result file. This can also be several output names
562
+ for which all data collections should be retrieved.
563
+ """
564
+ try:
565
+ # figure out the index of the run period
566
+ sql_obj = SQLiteResult(result_sql)
567
+ per_names, per_indices = sql_obj.run_period_names, sql_obj.run_period_indices
568
+ per_i = per_indices[per_names.index(run_period_name)]
569
+
570
+ # get the data collections for each output
571
+ data_colls = []
572
+ for output_name in output_names:
573
+ output_name = str(output_name)
574
+ if output_name.startswith('['):
575
+ output_names = tuple(outp.replace('"', '').strip()
576
+ for outp in output_name.strip('[]').split(','))
577
+ for outp in output_names:
578
+ col = sql_obj.data_collections_by_output_name_run_period(outp, per_i)
579
+ data_colls.append(col)
580
+ else:
581
+ col = sql_obj.data_collections_by_output_name_run_period(
582
+ output_name, per_i)
583
+ data_colls.append(col)
584
+
585
+ # convert the data to IP if it was requested
586
+ if not si:
587
+ for colls in data_colls:
588
+ for data in colls:
589
+ data.convert_to_ip()
590
+
591
+ # re-serialize the Model to Python and ensure it's in correct SI/IP units
592
+ with open(model_json) as json_file:
593
+ data = json.load(json_file)
594
+ model = Model.from_dict(data)
595
+ if si:
596
+ model.convert_to_units('Meters')
597
+ else:
598
+ model.convert_to_units('Feet')
599
+
600
+ # match the objects in the Model to the data collections
601
+ room_csv_data = []
602
+ face_csv_data = []
603
+ faces = None
604
+ for colls in data_colls:
605
+ if len(colls) == 0:
606
+ continue
607
+ if 'Surface' in colls[0].header.metadata:
608
+ if faces is None:
609
+ faces = []
610
+ for room in model.rooms:
611
+ faces.extend(room.faces)
612
+ match_data = match_faces_to_data(colls, faces)
613
+ if len(match_data) != 0:
614
+ face_csv_data.append(match_data)
615
+ elif 'Zone' in colls[0].header.metadata \
616
+ or 'System' in colls[0].header.metadata:
617
+ match_data = match_rooms_to_data(colls, model.rooms)
618
+ if len(match_data) != 0:
619
+ room_csv_data.append(match_data)
620
+ assert len(room_csv_data) != 0 or len(face_csv_data) != 0, \
621
+ 'None of the requested outputs could be matched to the model_json.'
622
+
623
+ # normalize the data if this was requested
624
+ if normalize:
625
+ for matched_data in face_csv_data: # normalize face data
626
+ if matched_data[0][1].header.data_type.normalized_type is not None:
627
+ for matched_tup in matched_data:
628
+ area = matched_tup[0].area if not isinstance(matched_tup, Face) \
629
+ else matched_tup[0].punched_geometry.area
630
+ matched_tup[1].values = \
631
+ [val / area for val in matched_tup[1].values]
632
+ for matched_data in room_csv_data: # normalize room data
633
+ if normalize and matched_data[0][1].header.data_type.normalized_type \
634
+ is not None:
635
+ for matched_tup in matched_data:
636
+ area = matched_tup[0].floor_area
637
+ try:
638
+ matched_tup[1].values = [val / (area * matched_tup[2])
639
+ for val in matched_tup[1].values]
640
+ except ZeroDivisionError: # no floor area for room
641
+ matched_tup[1].values = [0] * len(matched_tup[1])
642
+ else: # we should still account for room multipliers
643
+ matched_tup[1].values = \
644
+ [val / matched_tup[2] for val in matched_tup[1].values]
645
+
646
+ # create the datetime columns
647
+ base_coll = room_csv_data[0][0][1] if len(room_csv_data) != 0 else \
648
+ face_csv_data[0][0][1]
649
+ year = '2016' if base_coll.header.analysis_period.is_leap_year else '2017'
650
+ date_times = []
651
+ if isinstance(base_coll, HourlyContinuousCollection):
652
+ for dat_t in base_coll.datetimes:
653
+ date_times.append(
654
+ [year, str(dat_t.month), str(dat_t.day), str(dat_t.hour),
655
+ str(dat_t.minute)])
656
+ elif isinstance(base_coll, DailyCollection):
657
+ for dat_t in base_coll.datetimes:
658
+ date_obj = Date.from_doy(dat_t)
659
+ date_times.append(
660
+ [year, str(date_obj.month), str(date_obj.day), '0', '0'])
661
+ elif isinstance(base_coll, MonthlyCollection):
662
+ for dat_t in base_coll.datetimes:
663
+ date_times.append([year, str(dat_t), '1', '0', '0'])
664
+
665
+ # determine the output folder location
666
+ if folder is None:
667
+ folder = os.path.dirname(result_sql)
668
+
669
+ # write everything into the output CSVs
670
+ def write_rows(csv_file, datas, identifier):
671
+ data_rows = [row[:] for row in date_times] # copy datetimes
672
+ for row in data_rows:
673
+ row.append(identifier)
674
+ for data in datas:
675
+ for i, val in enumerate(data.values):
676
+ data_rows[i].append(str(val))
677
+ for row in data_rows:
678
+ csv_file.write(','.join(row) + '\n')
679
+
680
+ col_names_dict = {}
681
+ if len(room_csv_data) != 0:
682
+ room_file = os.path.join(folder, 'eplusout_room.csv')
683
+ col_names_dict['eplusout_room'] = \
684
+ ['year', 'month', 'day', 'hour', 'minute', 'identifier'] + \
685
+ [data[0][1].header.metadata['type'].replace(' ', '_').lower()
686
+ for data in room_csv_data]
687
+ with open(room_file, 'w') as rm_file:
688
+ rm_file.write(','.join(col_names_dict['eplusout_room']) + '\n')
689
+ for outp_tups in zip(*room_csv_data):
690
+ datas = [tup[1] for tup in outp_tups]
691
+ identifier = outp_tups[0][0].identifier
692
+ write_rows(rm_file, datas, identifier)
693
+ if len(face_csv_data) != 0:
694
+ room_file = os.path.join(folder, 'eplusout_face.csv')
695
+ col_names_dict['eplusout_face'] = \
696
+ ['year', 'month', 'day', 'hour', 'minute', 'identifier'] + \
697
+ [data[0][1].header.metadata['type'].replace(' ', '_').lower()
698
+ for data in face_csv_data]
699
+ with open(room_file, 'w') as f_file:
700
+ f_file.write(','.join(col_names_dict['eplusout_face']) + '\n')
701
+ for outp_tups in zip(*face_csv_data):
702
+ datas = [tup[1] for tup in outp_tups]
703
+ identifier = outp_tups[0][0].identifier
704
+ write_rows(f_file, datas, identifier)
705
+
706
+ # write the column names into the output file
707
+ log_file.write(json.dumps(col_names_dict))
708
+ except Exception as e:
709
+ _logger.exception('Failed to write queryable csv from sql file.\n{}'.format(e))
710
+ sys.exit(1)
711
+ else:
712
+ sys.exit(0)
713
+
714
+
715
+ @result.command('zone-sizes')
716
+ @click.argument('result-sql', type=click.Path(
717
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
718
+ @click.option('--output-file', '-f', help='Optional file to output the JSON strings of '
719
+ 'the ZoneSize objects. By default, it will be printed to stdout',
720
+ type=click.File('w'), default='-', show_default=True)
721
+ def zone_sizes(result_sql, output_file):
722
+ """Get a dictionary with two arrays of ZoneSize JSONs under 'cooling' and 'heating'.
723
+
724
+ \b
725
+ Args:
726
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
727
+ """
728
+ try:
729
+ sql_obj = SQLiteResult(result_sql)
730
+ base = {}
731
+ base['cooling'] = [zs.to_dict() for zs in sql_obj.zone_cooling_sizes]
732
+ base['heating'] = [zs.to_dict() for zs in sql_obj.zone_heating_sizes]
733
+ output_file.write(json.dumps(base))
734
+ except Exception as e:
735
+ _logger.exception('Failed to retrieve zone sizes from sql file.\n{}'.format(e))
736
+ sys.exit(1)
737
+ else:
738
+ sys.exit(0)
739
+
740
+
741
+ @result.command('component-sizes')
742
+ @click.argument('result-sql', type=click.Path(
743
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
744
+ @click.option('--component-type', '-ct', help='A name of a HVAC component type, which '
745
+ 'will be used to filter the output HVAC components. If "None", all HVAC '
746
+ 'component sizes will be output.',
747
+ type=str, default=None, show_default=True)
748
+ @click.option('--output-file', '-f', help='Optional file to output the JSON strings of '
749
+ 'the ComponentSize objects. By default, it will be printed to stdout',
750
+ type=click.File('w'), default='-', show_default=True)
751
+ def component_sizes(result_sql, component_type, output_file):
752
+ """Get a list of ComponentSize JSONs.
753
+
754
+ \b
755
+ Args:
756
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
757
+ """
758
+ try:
759
+ sql_obj = SQLiteResult(result_sql)
760
+ comp_sizes = []
761
+ if component_type is None or component_type == '' or component_type == 'None':
762
+ for comp_size in sql_obj.component_sizes:
763
+ comp_sizes.append(comp_size.to_dict())
764
+ else:
765
+ for comp_size in sql_obj.component_sizes_by_type(component_type):
766
+ comp_sizes.append(comp_size.to_dict())
767
+ output_file.write(json.dumps(comp_sizes))
768
+ except Exception as e:
769
+ _logger.exception('Failed to retrieve component sizes from sql.\n{}'.format(e))
770
+ sys.exit(1)
771
+ else:
772
+ sys.exit(0)
773
+
774
+
775
+ @result.command('load-balance')
776
+ @click.argument('model-json', type=click.Path(
777
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
778
+ @click.argument('result-sql', type=click.Path(
779
+ exists=True, file_okay=True, dir_okay=False, resolve_path=True))
780
+ @click.option('--normalize/--no-normalize', ' /-nn', help='Flag to note whether the '
781
+ 'data should be normalized by floor area. This flag has no effect if the '
782
+ 'requested data is not normalizable', default=True, show_default=True)
783
+ @click.option('--storage/--no-storage', ' /-ns', help='to note whether the storage term '
784
+ 'should be included in the list.', default=True, show_default=True)
785
+ @click.option('--output-file', '-f', help='Optional file to output the JSON strings of '
786
+ 'the data collections. By default, it will be printed to stdout',
787
+ type=click.File('w'), default='-', show_default=True)
788
+ def load_balance(model_json, result_sql, normalize, storage, output_file):
789
+ """Get an array of DataCollection JSONs for a complete model's load balance.
790
+
791
+ \b
792
+ Args:
793
+ model_json: Full path to a Model JSON file used for simulation.
794
+ result_sql: Full path to an SQLite file that was generated by EnergyPlus.
795
+ """
796
+ try:
797
+ # serialize the objects to Python
798
+ with open(model_json) as json_file:
799
+ data = json.load(json_file)
800
+ model = Model.from_dict(data)
801
+
802
+ # create the load balance object and output data to a JSON
803
+ bal_obj = LoadBalance.from_sql_file(model, result_sql)
804
+ balance = bal_obj.load_balance_terms(normalize, storage)
805
+ output_file.write(json.dumps([data.to_dict() for data in balance]))
806
+ except Exception as e:
807
+ _logger.exception('Failed to construct load balance.\n{}'.format(e))
808
+ sys.exit(1)
809
+ else:
810
+ sys.exit(0)