openstudio-workflow 1.0.0.pat1 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +12 -0
  3. data/README.md +16 -68
  4. data/Rakefile +9 -9
  5. data/bin/openstudio_cli +786 -0
  6. data/lib/openstudio/workflow/adapters/input/local.rb +97 -0
  7. data/lib/openstudio/workflow/adapters/output/local.rb +90 -0
  8. data/lib/openstudio/workflow/adapters/output/socket.rb +70 -0
  9. data/lib/openstudio/workflow/{jobs/run_preflight/run_preflight.rb → adapters/output/web.rb} +37 -19
  10. data/lib/openstudio/workflow/{adapter.rb → adapters/output_adapter.rb} +53 -51
  11. data/lib/openstudio/workflow/job.rb +22 -0
  12. data/lib/openstudio/workflow/jobs/{run_energyplus → resources}/monthly_report.idf +0 -0
  13. data/lib/openstudio/workflow/jobs/run_energyplus.rb +49 -0
  14. data/lib/openstudio/workflow/jobs/run_ep_measures.rb +55 -0
  15. data/lib/openstudio/workflow/jobs/run_initialization.rb +136 -0
  16. data/lib/openstudio/workflow/jobs/run_os_measures.rb +59 -0
  17. data/lib/openstudio/workflow/jobs/run_postprocess.rb +53 -0
  18. data/lib/openstudio/workflow/jobs/run_preprocess.rb +81 -0
  19. data/lib/openstudio/workflow/jobs/run_reporting_measures.rb +86 -0
  20. data/lib/openstudio/workflow/jobs/run_translation.rb +49 -0
  21. data/lib/openstudio/workflow/multi_delegator.rb +1 -3
  22. data/lib/openstudio/workflow/registry.rb +137 -0
  23. data/lib/openstudio/workflow/run.rb +182 -221
  24. data/lib/openstudio/workflow/time_logger.rb +1 -1
  25. data/lib/openstudio/workflow/util/energyplus.rb +564 -0
  26. data/lib/openstudio/workflow/util/io.rb +33 -0
  27. data/lib/openstudio/workflow/util/measure.rb +520 -0
  28. data/lib/openstudio/workflow/util/model.rb +100 -0
  29. data/lib/openstudio/workflow/util/post_process.rb +177 -0
  30. data/lib/openstudio/workflow/util/weather_file.rb +108 -0
  31. data/lib/openstudio/workflow/util.rb +14 -0
  32. data/lib/openstudio/workflow/version.rb +1 -1
  33. data/lib/openstudio/workflow_json.rb +399 -0
  34. data/lib/openstudio/workflow_runner.rb +213 -0
  35. data/lib/openstudio-workflow.rb +13 -118
  36. metadata +45 -85
  37. data/lib/openstudio/extended_runner.rb +0 -105
  38. data/lib/openstudio/workflow/adapters/local.rb +0 -101
  39. data/lib/openstudio/workflow/adapters/mongo.rb +0 -227
  40. data/lib/openstudio/workflow/jobs/lib/apply_measures.rb +0 -253
  41. data/lib/openstudio/workflow/jobs/run_energyplus/run_energyplus.rb +0 -314
  42. data/lib/openstudio/workflow/jobs/run_openstudio/run_openstudio.rb +0 -230
  43. data/lib/openstudio/workflow/jobs/run_postprocess/run_postprocess.rb +0 -110
  44. data/lib/openstudio/workflow/jobs/run_reporting_measures/run_reporting_measures.rb +0 -471
  45. data/lib/openstudio/workflow/jobs/run_runmanager/run_runmanager.rb +0 -247
  46. data/lib/openstudio/workflow/jobs/run_xml/run_xml.rb +0 -279
@@ -1,471 +0,0 @@
1
- ######################################################################
2
- # Copyright (c) 2008-2014, Alliance for Sustainable Energy.
3
- # All rights reserved.
4
- #
5
- # This library is free software; you can redistribute it and/or
6
- # modify it under the terms of the GNU Lesser General Public
7
- # License as published by the Free Software Foundation; either
8
- # version 2.1 of the License, or (at your option) any later version.
9
- #
10
- # This library is distributed in the hope that it will be useful,
11
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
12
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13
- # Lesser General Public License for more details.
14
- #
15
- # You should have received a copy of the GNU Lesser General Public
16
- # License along with this library; if not, write to the Free Software
17
- # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
- ######################################################################
19
-
20
- # Run precanned post processing to extract object functions
21
- require 'csv'
22
- require 'ostruct'
23
-
24
- class RunReportingMeasures
25
- # Mixin the MeasureApplication module to apply measures
26
- include OpenStudio::Workflow::ApplyMeasures
27
-
28
- def initialize(directory, logger, time_logger, adapter, workflow_arguments, past_results, options = {})
29
- defaults = {}
30
- @options = defaults.merge(options)
31
- @directory = directory
32
- @run_directory = "#{@directory}/run"
33
- @adapter = adapter
34
- @logger = logger
35
- @time_logger = time_logger
36
- @workflow_arguments = workflow_arguments
37
- @past_results = past_results
38
- @results = {}
39
- @output_attributes = {}
40
-
41
- # TODO: we shouldn't have to keep loading this file if we need it. It should be availabe for any job.
42
- # TODO: passing in the options everytime is ridiculuous
43
- @analysis_json = @adapter.get_problem(@directory, @options)
44
-
45
- @logger.info "#{self.class} passed the following options #{@options}"
46
-
47
- @model = load_model @options[:run_openstudio][:osm]
48
- @model_idf = load_idf @options[:run_openstudio][:idf]
49
-
50
- # TODO: should read the name of the sql output file via the :run_openstudio options hash
51
- # I want to reiterate that this is cheezy!
52
- @sql_filename = "#{@run_directory}/eplusout.sql"
53
- fail "EnergyPlus SQL file did not exist #{@sql_filename}" unless File.exist? @sql_filename
54
-
55
- @objective_functions = {}
56
- end
57
-
58
- def perform
59
- @logger.info "Calling #{__method__} in the #{self.class} class"
60
- @logger.info 'RunPostProcess Retrieving datapoint and problem'
61
-
62
- begin
63
- @datapoint_json = @adapter.get_datapoint(@directory, @options)
64
- @analysis_json = @adapter.get_problem(@directory, @options)
65
-
66
- @time_logger.start('Running standard post process')
67
- run_monthly_postprocess
68
- @time_logger.stop('Running standard post process')
69
-
70
- translate_csv_to_json
71
-
72
- run_packaged_measures
73
-
74
- if @analysis_json && @analysis_json[:analysis]
75
- apply_measures(:reporting_measure)
76
- end
77
-
78
- @logger.info 'Saving reporting measures output attributes JSON'
79
- File.open("#{@run_directory}/reporting_measure_attributes.json", 'w') do |f|
80
- f << JSON.pretty_generate(@output_attributes)
81
- end
82
-
83
- run_extract_inputs_and_outputs
84
-
85
- @logger.info "Objective Function JSON is #{@objective_functions}"
86
- obj_fun_file = "#{@directory}/objectives.json"
87
- FileUtils.rm_f(obj_fun_file) if File.exist?(obj_fun_file)
88
- File.open(obj_fun_file, 'w') { |f| f << JSON.pretty_generate(@objective_functions) }
89
-
90
- rescue => e
91
- log_message = "Runner error #{__FILE__} failed with #{e.message}, #{e.backtrace.join("\n")}"
92
- raise log_message
93
- end
94
-
95
- @results
96
- end
97
-
98
- def run_extract_inputs_and_outputs
99
- # For xml, the measure attributes are in the measure_attributes_xml.json file
100
- # TODO: somehow pass the metadata around on which JSONs to suck into the database
101
- if File.exist? "#{@run_directory}/measure_attributes_xml.json"
102
- h = JSON.parse(File.read("#{@run_directory}/measure_attributes_xml.json"), symbolize_names: true)
103
- h = rename_hash_keys(h)
104
- @results.merge! h
105
- end
106
-
107
- # Inputs are in the measure_attributes.json file
108
- if File.exist? "#{@run_directory}/measure_attributes.json"
109
- h = JSON.parse(File.read("#{@run_directory}/measure_attributes.json"), symbolize_names: true)
110
- h = rename_hash_keys(h)
111
- @results.merge! h
112
- end
113
-
114
- # Inputs are in the reporting_measure_attributes.jsonfile
115
- if File.exist? "#{@run_directory}/reporting_measure_attributes.json"
116
- h = JSON.parse(File.read("#{@run_directory}/reporting_measure_attributes.json"), symbolize_names: true)
117
- h = rename_hash_keys(h)
118
- @results.merge! h
119
- end
120
-
121
- # Initialize the objective function variable.
122
- @objective_functions = {}
123
- if File.exist? "#{@run_directory}/standard_report_legacy.json"
124
- h = JSON.parse(File.read("#{@run_directory}/standard_report_legacy.json"), symbolize_names: true)
125
- h = rename_hash_keys(h)
126
- @results[:standard_report_legacy] = h
127
- end
128
-
129
- @logger.info 'Saving the result hash to file'
130
- File.open("#{@run_directory}/results.json", 'w') { |f| f << JSON.pretty_generate(@results) }
131
-
132
- @logger.info 'Iterating over Analysis JSON Output Variables'
133
- # Save the objective functions to the object for sending back to the simulation executive
134
-
135
- if @analysis_json[:analysis] && @analysis_json[:analysis][:output_variables]
136
- @analysis_json[:analysis][:output_variables].each do |variable|
137
- # determine which ones are the objective functions (code smell: todo: use enumerator)
138
- if variable[:objective_function]
139
- @logger.info "Looking for objective function #{variable[:name]}"
140
- # TODO: move this to cleaner logic. Use ostruct?
141
- if variable[:name].include? '.'
142
- k, v = variable[:name].split('.')
143
-
144
- # look for the objective function key and make sure that it is not nil. False is an okay obj function.
145
- if @results.key?(k.to_sym) && !@results[k.to_sym][v.to_sym].nil?
146
- @objective_functions["objective_function_#{variable[:objective_function_index] + 1}"] = @results[k.to_sym][v.to_sym]
147
- if variable[:objective_function_target]
148
- @logger.info "Found objective function target for #{variable[:name]}"
149
- @objective_functions["objective_function_target_#{variable[:objective_function_index] + 1}"] = variable[:objective_function_target].to_f
150
- end
151
- if variable[:scaling_factor]
152
- @logger.info "Found scaling factor for #{variable[:name]}"
153
- @objective_functions["scaling_factor_#{variable[:objective_function_index] + 1}"] = variable[:scaling_factor].to_f
154
- end
155
- if variable[:objective_function_group]
156
- @logger.info "Found objective function group for #{variable[:name]}"
157
- @objective_functions["objective_function_group_#{variable[:objective_function_index] + 1}"] = variable[:objective_function_group].to_f
158
- end
159
- else
160
- @logger.warn "No results for objective function #{variable[:name]}"
161
- @objective_functions["objective_function_#{variable[:objective_function_index] + 1}"] = Float::MAX
162
- @objective_functions["objective_function_target_#{variable[:objective_function_index] + 1}"] = nil
163
- @objective_functions["scaling_factor_#{variable[:objective_function_index] + 1}"] = nil
164
- @objective_functions["objective_function_group_#{variable[:objective_function_index] + 1}"] = nil
165
- end
166
- else
167
- # variable name is not nested with the '.' -- this is for legacy purposes and should be deleted on 9/30/2014
168
- if @results[variable[:name]]
169
- @objective_functions["objective_function_#{variable[:objective_function_index] + 1}"] = @results[k.to_sym][v.to_sym]
170
- if variable[:objective_function_target]
171
- @logger.info "Found objective function target for #{variable[:name]}"
172
- @objective_functions["objective_function_target_#{variable[:objective_function_index] + 1}"] = variable[:objective_function_target].to_f
173
- end
174
- if variable[:scaling_factor]
175
- @logger.info "Found scaling factor for #{variable[:name]}"
176
- @objective_functions["scaling_factor_#{variable[:objective_function_index] + 1}"] = variable[:scaling_factor].to_f
177
- end
178
- if variable[:objective_function_group]
179
- @logger.info "Found objective function group for #{variable[:name]}"
180
- @objective_functions["objective_function_group_#{variable[:objective_function_index] + 1}"] = variable[:objective_function_group].to_f
181
- end
182
- else
183
- @logger.warn "No results for objective function #{variable[:name]}"
184
- @objective_functions["objective_function_#{variable[:objective_function_index] + 1}"] = Float::MAX
185
- @objective_functions["objective_function_target_#{variable[:objective_function_index] + 1}"] = nil
186
- @objective_functions["scaling_factor_#{variable[:objective_function_index] + 1}"] = nil
187
- @objective_functions["objective_function_group_#{variable[:objective_function_index] + 1}"] = nil
188
- end
189
- end
190
- end
191
- end
192
- end
193
- end
194
-
195
- private
196
-
197
- # Load in the OpenStudio model. It is required for post processing
198
- def load_model(filename)
199
- model = nil
200
- @logger.info 'Loading model'
201
-
202
- # TODO: wrap this in an exception block and fail as appropriate
203
- # assume that the seed model has been placed in the directory
204
- if File.exist? filename
205
- @logger.info "Reading in model #{filename}"
206
- translator = OpenStudio::OSVersion::VersionTranslator.new
207
- model = translator.loadModel(filename)
208
- fail 'OpenStudio model is empty or could not be loaded' if model.empty?
209
- model = model.get
210
- else
211
- fail "Model '#{filename}' did not exist"
212
- end
213
-
214
- model
215
- end
216
-
217
- # Load in the IDF model. It is required for post processing
218
- def load_idf(filename)
219
- fail "IDF file does not exist: #{filename}" unless File.exist? filename
220
- OpenStudio::Workspace.load(filename, 'EnergyPlus'.to_IddFileType).get
221
- end
222
-
223
- # Run the prepackaged measures in OpenStudio. Currently this runs the standard reports and the calibration measure
224
- # TODO: add a flag on which packaged reporting measures to run
225
- def run_packaged_measures
226
- # configure the workflow item json to pass
227
- workflow_item = {
228
- display_name: 'Standard Reports',
229
- measure_definition_directory: File.expand_path(File.join(OpenStudio::BCLMeasure.standardReportMeasure.directory.to_s, 'measure.rb')),
230
- measure_definition_class_name: 'OpenStudioResults',
231
- measure_type: 'ReportingMeasure',
232
- name: 'standard_reports'
233
- }
234
- @logger.info 'Running packaged Standard Reports measures'
235
- begin
236
- apply_measure(workflow_item)
237
- rescue => e
238
- @logger.warn "Error applying Standard Reports measure. Failed with #{e.message}, #{e.backtrace.join("\n")} \n Continuing."
239
- end
240
-
241
- @logger.info "Found #{@model.getUtilityBills.length} utility bills"
242
- if @model.getUtilityBills.length > 0
243
- workflow_item = {
244
- display_name: 'Calibration Reports',
245
- measure_definition_directory: File.expand_path(File.join(OpenStudio::BCLMeasure.calibrationReportMeasure.directory.to_s, 'measure.rb')),
246
- measure_definition_class_name: 'CalibrationReports',
247
- measure_type: 'CalibrationReports',
248
- name: 'calibration_reports'
249
- }
250
- @logger.info 'Running packaged Calibration Reports measures'
251
- apply_measure(workflow_item)
252
- end
253
-
254
- @logger.info 'Finished Running Packaged Measures'
255
- end
256
-
257
- def translate_csv_to_json
258
- if File.exist?("#{@run_directory}/eplustbl.csv")
259
- @logger.info 'Translating EnergyPlus table CSV to JSON file'
260
- results = {}
261
- csv = CSV.read("#{@run_directory}/eplustbl.csv")
262
- csv.transpose.each do |k, v|
263
- longname = k.gsub(/\(.*\)/, '').strip
264
- short_name = longname.downcase.tr(' ', '_')
265
- units = k.match(/\(.*\)/)[0].delete('(').delete(')')
266
- results[short_name.to_sym] = v.nil? ? nil : v.to_f
267
- results["#{short_name}_units".to_sym] = units
268
- results["#{short_name}_display_name".to_sym] = longname
269
- end
270
-
271
- @logger.info 'Saving results to json'
272
-
273
- # save out results
274
- File.open("#{@run_directory}/standard_report_legacy.json", 'w') { |f| f << JSON.pretty_generate(results) }
275
- end
276
- end
277
-
278
- # Remove any invalid characters in the measure attribute keys.
279
- # Periods and Pipes are the most problematic because mongo does not allow hash keys with periods, and the pipes
280
- # are used in the map/reduce method that was written to speed up the data write in openstudio-server.
281
- # Also remove any trailing underscores and spaces
282
- def rename_hash_keys(hash)
283
- # TODO: log the name changes?
284
- regex = /[|!@#\$%^&\*\(\)\{\}\\\[\]|;:'",<.>\/?\+=]+/
285
-
286
- rename_keys = lambda do |h|
287
- if Hash === h
288
- h.each_key do |key|
289
- if key.to_s =~ regex
290
- @logger.warn "Renaming result key '#{key}' to remove invalid characters"
291
- end
292
- end
293
- Hash[h.map { |k, v| [k.to_s.gsub(regex, '_').squeeze('_').gsub(/[_\s]+$/, '').chomp.to_sym, rename_keys[v]] }]
294
- else
295
- h
296
- end
297
- end
298
-
299
- rename_keys[hash]
300
- end
301
-
302
- # TODO: This needs to be cleaned up and tested. This is just ugly. Sorry.
303
- def run_monthly_postprocess
304
- def sql_query(sql, report_name, query)
305
- val = nil
306
- result = sql.execAndReturnFirstDouble("SELECT Value FROM TabularDataWithStrings WHERE ReportName='#{report_name}' AND #{query}")
307
- if result.empty?
308
- @logger.warn "Query for run_monthly_postprocess failed for #{query}"
309
- else
310
- begin
311
- val = result.get
312
- rescue => e
313
- @logger.info "#{__FILE__} failed with #{e.message}, #{e.backtrace.join("\n")}"
314
- val = nil
315
- end
316
- end
317
-
318
- val
319
- end
320
-
321
- # add results from sql method
322
- def add_data(sql, query, hdr, area, val)
323
- row = []
324
- val = sql_query(sql, 'AnnualBuildingUtilityPerformanceSummary', query) if val.nil?
325
- row << hdr
326
- if area.nil?
327
- row << val
328
- else
329
- row << (val * 1000) / area
330
- end
331
- row
332
- end
333
-
334
- # add results from sql method
335
- def add_data2(sql, query, hdr, area, val)
336
- row = []
337
- val = sql_query(sql, 'BUILDING ENERGY PERFORMANCE - ELECTRICITY', query) if val.nil?
338
- row << hdr
339
- if area.nil?
340
- row << val
341
- else
342
- row << (val * 1000) / area
343
- end
344
- row
345
- end
346
-
347
- # add results from sql method
348
- def add_data3(sql, query, hdr, area, val)
349
- row = []
350
- val = sql_query(sql, 'BUILDING ENERGY PERFORMANCE - NATURAL GAS', query) if val.nil?
351
- row << hdr
352
- if area.nil?
353
- row << val
354
- else
355
- row << (val * 1000) / area
356
- end
357
- row
358
- end
359
-
360
- # add results from sql method
361
- def add_data4(sql, query, hdr, area, val)
362
- row = []
363
-
364
- if val.nil?
365
- val = 0
366
-
367
- ['INTERIORLIGHTS:ELECTRICITY', 'EXTERIORLIGHTS:ELECTRICITY', 'INTERIOREQUIPMENT:ELECTRICITY', 'EXTERIOREQUIPMENT:ELECTRICITY',
368
- 'FANS:ELECTRICITY', 'PUMPS:ELECTRICITY', 'HEATING:ELECTRICITY', 'COOLING:ELECTRICITY', 'HEATREJECTION:ELECTRICITY',
369
- 'HUMIDIFIER:ELECTRICITY', 'HEATRECOVERY:ELECTRICITY', 'WATERSYSTEMS:ELECTRICITY', 'COGENERATION:ELECTRICITY', 'REFRIGERATION:ELECTRICITY'].each do |end_use|
370
- tmp_query = query + " AND ColumnName='#{end_use}'"
371
- tmp_val = sql_query(sql, 'BUILDING ENERGY PERFORMANCE - ELECTRICITY', tmp_query)
372
- val += tmp_val unless tmp_val.nil?
373
- end
374
- end
375
-
376
- row << hdr
377
- if area.nil?
378
- row << val
379
- else
380
- row << (val * 1000) / area
381
- end
382
- row
383
- end
384
-
385
- # open sql file
386
- sql_file = OpenStudio::SqlFile.new(@sql_filename)
387
-
388
- # get building area
389
- bldg_area = sql_query(sql_file, 'AnnualBuildingUtilityPerformanceSummary', "TableName='Building Area' AND RowName='Net Conditioned Building Area' AND ColumnName='Area'")
390
- # populate data array
391
-
392
- tbl_data = []
393
- tbl_data << add_data(sql_file, "TableName='Site and Source Energy' AND RowName='Total Site Energy' AND ColumnName='Energy Per Conditioned Building Area'", 'Total Energy (MJ/m2)', nil, nil)
394
- tbl_data << add_data(sql_file, "TableName='Site and Source Energy' AND RowName='Total Source Energy' AND ColumnName='Energy Per Conditioned Building Area'", 'Total Source Energy (MJ/m2)', nil, nil)
395
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Total End Uses' AND ColumnName='Electricity'", 'Total Electricity (MJ/m2)', bldg_area, nil)
396
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Total End Uses' AND ColumnName='Natural Gas'", 'Total Natural Gas (MJ/m2)', bldg_area, nil)
397
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Heating' AND ColumnName='Electricity'", 'Heating Electricity (MJ/m2)', bldg_area, nil)
398
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Heating' AND ColumnName='Natural Gas'", 'Heating Natural Gas (MJ/m2)', bldg_area, nil)
399
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Cooling' AND ColumnName='Electricity'", 'Cooling Electricity (MJ/m2)', bldg_area, nil)
400
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Interior Lighting' AND ColumnName='Electricity'", 'Interior Lighting Electricity (MJ/m2)', bldg_area, nil)
401
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Exterior Lighting' AND ColumnName='Electricity'", 'Exterior Lighting Electricity (MJ/m2)', bldg_area, nil)
402
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Interior Equipment' AND ColumnName='Electricity'", 'Interior Equipment Electricity (MJ/m2)', bldg_area, nil)
403
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Interior Equipment' AND ColumnName='Natural Gas'", 'Interior Equipment Natural Gas (MJ/m2)', bldg_area, nil)
404
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Exterior Equipment' AND ColumnName='Electricity'", 'Exterior Equipment Electricity (MJ/m2)', bldg_area, nil)
405
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Fans' AND ColumnName='Electricity'", 'Fans Electricity (MJ/m2)', bldg_area, nil)
406
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Pumps' AND ColumnName='Electricity'", 'Pumps Electricity (MJ/m2)', bldg_area, nil)
407
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Heat Rejection' AND ColumnName='Electricity'", 'Heat Rejection Electricity (MJ/m2)', bldg_area, nil)
408
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Humidification' AND ColumnName='Electricity'", 'Humidification Electricity (MJ/m2)', bldg_area, nil)
409
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Water Systems' AND ColumnName='Electricity'", 'Water Systems Electricity (MJ/m2)', bldg_area, nil)
410
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Water Systems' AND ColumnName='Natural Gas'", 'Water Systems Natural Gas (MJ/m2)', bldg_area, nil)
411
- tbl_data << add_data(sql_file, "TableName='End Uses' AND RowName='Refrigeration' AND ColumnName='Electricity'", 'Refrigeration Electricity (MJ/m2)', bldg_area, nil)
412
- htg_hrs = sql_query(sql_file, 'AnnualBuildingUtilityPerformanceSummary', "TableName='Comfort and Setpoint Not Met Summary' AND RowName='Time Setpoint Not Met During Occupied Heating' AND ColumnName='Facility'")
413
- clg_hrs = sql_query(sql_file, 'AnnualBuildingUtilityPerformanceSummary', "TableName='Comfort and Setpoint Not Met Summary' AND RowName='Time Setpoint Not Met During Occupied Cooling' AND ColumnName='Facility'")
414
- tot_hrs = clg_hrs && htg_hrs ? htg_hrs + clg_hrs : nil
415
- tbl_data << add_data(sql_file, nil, 'Heating Hours Unmet (hr)', nil, htg_hrs)
416
- tbl_data << add_data(sql_file, nil, 'Cooling Hours Unmet (hr)', nil, clg_hrs)
417
- tbl_data << add_data(sql_file, nil, 'Total Hours Unmet (hr)', nil, tot_hrs)
418
- total_cost = sql_query(sql_file, 'Life-Cycle Cost Report', "TableName='Present Value by Category' AND RowName='Grand Total' AND ColumnName='Present Value'")
419
- tbl_data << add_data(sql_file, nil, 'Total Life Cycle Cost ($)', nil, total_cost)
420
- # cooling:electricity
421
- tbl_data << add_data2(sql_file, "RowName='January' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Jan (J)', nil, nil)
422
- tbl_data << add_data2(sql_file, "RowName='February' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Feb (J)', nil, nil)
423
- tbl_data << add_data2(sql_file, "RowName='March' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Mar (J)', nil, nil)
424
- tbl_data << add_data2(sql_file, "RowName='April' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Apr (J)', nil, nil)
425
- tbl_data << add_data2(sql_file, "RowName='May' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity May (J)', nil, nil)
426
- tbl_data << add_data2(sql_file, "RowName='June' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Jun (J)', nil, nil)
427
- tbl_data << add_data2(sql_file, "RowName='July' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Jul (J)', nil, nil)
428
- tbl_data << add_data2(sql_file, "RowName='August' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Aug (J)', nil, nil)
429
- tbl_data << add_data2(sql_file, "RowName='September' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Sep (J)', nil, nil)
430
- tbl_data << add_data2(sql_file, "RowName='October' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Oct (J)', nil, nil)
431
- tbl_data << add_data2(sql_file, "RowName='November' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Nov (J)', nil, nil)
432
- tbl_data << add_data2(sql_file, "RowName='December' AND ColumnName='COOLING:ELECTRICITY'", 'Cooling Electricity Dec (J)', nil, nil)
433
- # heating:gas
434
- tbl_data << add_data3(sql_file, "RowName='January' AND ColumnName='HEATING:GAS'", 'Heating Gas Jan (J)', nil, nil)
435
- tbl_data << add_data3(sql_file, "RowName='February' AND ColumnName='HEATING:GAS'", 'Heating Gas Feb (J)', nil, nil)
436
- tbl_data << add_data3(sql_file, "RowName='March' AND ColumnName='HEATING:GAS'", 'Heating Gas Mar (J)', nil, nil)
437
- tbl_data << add_data3(sql_file, "RowName='April' AND ColumnName='HEATING:GAS'", 'Heating Gas Apr (J)', nil, nil)
438
- tbl_data << add_data3(sql_file, "RowName='May' AND ColumnName='HEATING:GAS'", 'Heating Gas May (J)', nil, nil)
439
- tbl_data << add_data3(sql_file, "RowName='June' AND ColumnName='HEATING:GAS'", 'Heating Gas Jun (J)', nil, nil)
440
- tbl_data << add_data3(sql_file, "RowName='July' AND ColumnName='HEATING:GAS'", 'Heating Gas Jul (J)', nil, nil)
441
- tbl_data << add_data3(sql_file, "RowName='August' AND ColumnName='HEATING:GAS'", 'Heating Gas Aug (J)', nil, nil)
442
- tbl_data << add_data3(sql_file, "RowName='September' AND ColumnName='HEATING:GAS'", 'Heating Gas Sep (J)', nil, nil)
443
- tbl_data << add_data3(sql_file, "RowName='October' AND ColumnName='HEATING:GAS'", 'Heating Gas Oct (J)', nil, nil)
444
- tbl_data << add_data3(sql_file, "RowName='November' AND ColumnName='HEATING:GAS'", 'Heating Gas Nov (J)', nil, nil)
445
- tbl_data << add_data3(sql_file, "RowName='December' AND ColumnName='HEATING:GAS'", 'Heating Gas Dec (J)', nil, nil)
446
- # total Electricity
447
- tbl_data << add_data4(sql_file, "RowName='January'", 'Total Electricity Jan (J)', nil, nil)
448
- tbl_data << add_data4(sql_file, "RowName='February'", 'Total Electricity Feb (J)', nil, nil)
449
- tbl_data << add_data4(sql_file, "RowName='March'", 'Total Electricity Mar (J)', nil, nil)
450
- tbl_data << add_data4(sql_file, "RowName='April'", 'Total Electricity Apr (J)', nil, nil)
451
- tbl_data << add_data4(sql_file, "RowName='May'", 'Total Electricity May (J)', nil, nil)
452
- tbl_data << add_data4(sql_file, "RowName='June'", 'Total Electricity Jun (J)', nil, nil)
453
- tbl_data << add_data4(sql_file, "RowName='July'", 'Total Electricity Jul (J)', nil, nil)
454
- tbl_data << add_data4(sql_file, "RowName='August'", 'Total Electricity Aug (J)', nil, nil)
455
- tbl_data << add_data4(sql_file, "RowName='September'", 'Total Electricity Sep (J)', nil, nil)
456
- tbl_data << add_data4(sql_file, "RowName='October'", 'Total Electricity Oct (J)', nil, nil)
457
- tbl_data << add_data4(sql_file, "RowName='November'", 'Total Electricity Nov (J)', nil, nil)
458
- tbl_data << add_data4(sql_file, "RowName='December'", 'Total Electricity Dec (J)', nil, nil) # close SQL file
459
- sql_file.close
460
- # transpose data
461
- tbl_rows = tbl_data.transpose
462
-
463
- @logger.info tbl_rows
464
- # write electricity data to CSV
465
- CSV.open("#{@run_directory}/eplustbl.csv", 'wb') do |csv|
466
- tbl_rows.each do |row|
467
- csv << row
468
- end
469
- end
470
- end
471
- end