openstudio-workflow 1.0.0.pat1 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +12 -0
  3. data/README.md +16 -68
  4. data/Rakefile +9 -9
  5. data/bin/openstudio_cli +786 -0
  6. data/lib/openstudio/workflow/adapters/input/local.rb +97 -0
  7. data/lib/openstudio/workflow/adapters/output/local.rb +90 -0
  8. data/lib/openstudio/workflow/adapters/output/socket.rb +70 -0
  9. data/lib/openstudio/workflow/{jobs/run_preflight/run_preflight.rb → adapters/output/web.rb} +37 -19
  10. data/lib/openstudio/workflow/{adapter.rb → adapters/output_adapter.rb} +53 -51
  11. data/lib/openstudio/workflow/job.rb +22 -0
  12. data/lib/openstudio/workflow/jobs/{run_energyplus → resources}/monthly_report.idf +0 -0
  13. data/lib/openstudio/workflow/jobs/run_energyplus.rb +49 -0
  14. data/lib/openstudio/workflow/jobs/run_ep_measures.rb +55 -0
  15. data/lib/openstudio/workflow/jobs/run_initialization.rb +136 -0
  16. data/lib/openstudio/workflow/jobs/run_os_measures.rb +59 -0
  17. data/lib/openstudio/workflow/jobs/run_postprocess.rb +53 -0
  18. data/lib/openstudio/workflow/jobs/run_preprocess.rb +81 -0
  19. data/lib/openstudio/workflow/jobs/run_reporting_measures.rb +86 -0
  20. data/lib/openstudio/workflow/jobs/run_translation.rb +49 -0
  21. data/lib/openstudio/workflow/multi_delegator.rb +1 -3
  22. data/lib/openstudio/workflow/registry.rb +137 -0
  23. data/lib/openstudio/workflow/run.rb +182 -221
  24. data/lib/openstudio/workflow/time_logger.rb +1 -1
  25. data/lib/openstudio/workflow/util/energyplus.rb +564 -0
  26. data/lib/openstudio/workflow/util/io.rb +33 -0
  27. data/lib/openstudio/workflow/util/measure.rb +520 -0
  28. data/lib/openstudio/workflow/util/model.rb +100 -0
  29. data/lib/openstudio/workflow/util/post_process.rb +177 -0
  30. data/lib/openstudio/workflow/util/weather_file.rb +108 -0
  31. data/lib/openstudio/workflow/util.rb +14 -0
  32. data/lib/openstudio/workflow/version.rb +1 -1
  33. data/lib/openstudio/workflow_json.rb +399 -0
  34. data/lib/openstudio/workflow_runner.rb +213 -0
  35. data/lib/openstudio-workflow.rb +13 -118
  36. metadata +45 -85
  37. data/lib/openstudio/extended_runner.rb +0 -105
  38. data/lib/openstudio/workflow/adapters/local.rb +0 -101
  39. data/lib/openstudio/workflow/adapters/mongo.rb +0 -227
  40. data/lib/openstudio/workflow/jobs/lib/apply_measures.rb +0 -253
  41. data/lib/openstudio/workflow/jobs/run_energyplus/run_energyplus.rb +0 -314
  42. data/lib/openstudio/workflow/jobs/run_openstudio/run_openstudio.rb +0 -230
  43. data/lib/openstudio/workflow/jobs/run_postprocess/run_postprocess.rb +0 -110
  44. data/lib/openstudio/workflow/jobs/run_reporting_measures/run_reporting_measures.rb +0 -471
  45. data/lib/openstudio/workflow/jobs/run_runmanager/run_runmanager.rb +0 -247
  46. data/lib/openstudio/workflow/jobs/run_xml/run_xml.rb +0 -279
@@ -1,227 +0,0 @@
1
- ######################################################################
2
- # Copyright (c) 2008-2014, Alliance for Sustainable Energy.
3
- # All rights reserved.
4
- #
5
- # This library is free software; you can redistribute it and/or
6
- # modify it under the terms of the GNU Lesser General Public
7
- # License as published by the Free Software Foundation; either
8
- # version 2.1 of the License, or (at your option) any later version.
9
- #
10
- # This library is distributed in the hope that it will be useful,
11
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
12
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13
- # Lesser General Public License for more details.
14
- #
15
- # You should have received a copy of the GNU Lesser General Public
16
- # License along with this library; if not, write to the Free Software
17
- # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
- ######################################################################
19
-
20
- require_relative '../adapter'
21
-
22
- module OpenStudio
23
- module Workflow
24
- module Adapters
25
- class MongoLog
26
- def initialize(datapoint_model)
27
- @dp = datapoint_model
28
- @dp.sdp_log_file ||= []
29
- end
30
-
31
- def write(msg)
32
- @dp.sdp_log_file << msg.delete("\n")
33
- @dp.save!
34
- end
35
- end
36
-
37
- class Mongo < Adapter
38
- attr_reader :datapoint
39
-
40
- def initialize(options = {})
41
- super
42
-
43
- require 'mongoid'
44
- require 'mongoid_paperclip'
45
- require 'delayed_job_mongoid'
46
- base_path = @options[:mongoid_path] ? @options[:mongoid_path] : "#{File.dirname(__FILE__)}/mongo"
47
-
48
- Dir["#{base_path}/models/*.rb"].each { |f| require f }
49
- Mongoid.load!("#{base_path}/mongoid.yml", @options[:rails_env])
50
- end
51
-
52
- # Tell the system that the process has started
53
- def communicate_started(directory, options = {})
54
- # Watch out for namespace conflicts (::Time is okay but Time is OpenStudio::Time)
55
- File.open("#{directory}/started.job", 'w') { |f| f << "Started Workflow #{::Time.now}" }
56
-
57
- @datapoint ||= get_datapoint_model(options[:datapoint_id])
58
- @datapoint.status = 'started'
59
- @datapoint.status_message = ''
60
- @datapoint.run_start_time = ::Time.now
61
-
62
- # TODO: use the ComputeNode model to pull out the information so that we can reuse the methods
63
- # Determine what the IP address is of the worker node and save in the data point
64
-
65
- # ami-id: ami-7c7e4e14
66
- # instance-id: i-c52e0412
67
- # instance-type: m3.medium
68
- # local-hostname: ip-10-99-169-57.ec2.internal
69
- # local-ipv4: 10.99.169.57
70
- # placement: us-east-1a
71
- # public-hostname: ec2-54-161-221-129.compute-1.amazonaws.com
72
- # public-ipv4: 54.161.221.129
73
- # number_of_cores: 1
74
- if File.exist? '/etc/openstudio-server/instance.yml'
75
- y = YAML.load_file('/etc/openstudio-server/instance.yml')
76
- @datapoint.ip_address = y['public-ipv4'] if y['public-ipv4']
77
- @datapoint.internal_ip_address = y['local-ipv4'] if y['local-ipv4']
78
- else
79
- # try to infer it from the socket/facter information
80
- # note, facter will be deprecated in the future, so don't extend it!
81
- retries = 0
82
- begin
83
- require 'socket'
84
- if Socket.gethostname =~ /os-.*/
85
- # Maybe use this in the future: /sbin/ifconfig eth1|grep inet|head -1|sed 's/\:/ /'|awk '{print $3}'
86
- # Must be on vagrant and just use the hostname to do a lookup
87
- map = {
88
- 'os-server' => '192.168.33.10',
89
- 'os-worker-1' => '192.168.33.11',
90
- 'os-worker-2' => '192.168.33.12'
91
- }
92
- @datapoint.ip_address = map[Socket.gethostname]
93
- @datapoint.internal_ip_address = @datapoint.ip_address
94
- else
95
- if Gem.loaded_specs['facter']
96
- # Use EC2 public to check if we are on AWS.
97
- @datapoint.ip_address = Facter.fact(:ec2_public_ipv4) ? Facter.fact(:ec2_public_ipv4).value : Facter.fact(:ipaddress).value
98
- @datapoint.internal_ip_address = Facter.fact(:ipaddress).value
99
- end
100
- end
101
- rescue => e
102
- # catch any exceptions. It appears that if a new instance of amazon starts, then it is likely that
103
- # the Facter for AWS may not be initialized yet. Retry after waiting for 15 seconds if this happens.
104
- # If this fails out, then the only issue with this is that the data point won't be downloaded because
105
- # the worker node is not known
106
-
107
- # retry just in case
108
- if retries < 30 # try for up to 5 minutes
109
- retries += 1
110
- sleep 10
111
- retry
112
- else
113
- raise "could not find Facter based data for worker node after #{retries} retries with message #{e.message}"
114
- # just do nothing for now
115
- end
116
- end
117
- end
118
-
119
- @datapoint.save!
120
- end
121
-
122
- # Get the data point from the path
123
- def get_datapoint(directory, options = {})
124
- # TODO : make this a conditional on when to create one vs when to error out.
125
- # keep @datapoint as the model instance
126
- @datapoint = DataPoint.find_or_create_by(uuid: options[:datapoint_id])
127
-
128
- # convert to JSON for the workflow - and rearrange the version (fix THIS)
129
- datapoint_hash = {}
130
- if @datapoint.nil?
131
- fail 'Could not find datapoint'
132
- else
133
- datapoint_hash[:data_point] = @datapoint.as_document.to_hash
134
- # TODO: Can i remove this openstudio_version stuff?
135
- # datapoint_hash[:openstudio_version] = datapoint_hash[:openstudio_version]
136
-
137
- # TODO: need to figure out how to get symbols from mongo.
138
- datapoint_hash = MultiJson.load(MultiJson.dump(datapoint_hash), symbolize_keys: true)
139
-
140
- # save to disk for inspection
141
- save_dp = File.join(directory, 'data_point.json')
142
- FileUtils.rm_f save_dp if File.exist? save_dp
143
- File.open(save_dp, 'w') { |f| f << MultiJson.dump(datapoint_hash, pretty: true) }
144
- end
145
-
146
- datapoint_hash
147
- end
148
-
149
- # TODO: cleanup these options. Make them part of the class. They are just unwieldly here.
150
- def get_problem(directory, options = {})
151
- defaults = { format: 'json' }
152
- options = defaults.merge(options)
153
-
154
- get_datapoint(directory, options) unless @datapoint
155
-
156
- if @datapoint
157
- analysis = @datapoint.analysis.as_document.to_hash
158
- else
159
- fail 'Cannot retrieve problem because datapoint was nil'
160
- end
161
-
162
- analysis_hash = {}
163
- if analysis
164
- analysis_hash[:analysis] = analysis
165
- analysis_hash[:openstudio_version] = analysis[:openstudio_version]
166
-
167
- # TODO: need to figure out how to get symbols from mongo.
168
- analysis_hash = MultiJson.load(MultiJson.dump(analysis_hash, pretty: true), symbolize_keys: true)
169
- end
170
- analysis_hash
171
- end
172
-
173
- def communicate_intermediate_result(_directory)
174
- # noop
175
- end
176
-
177
- def communicate_complete(_directory)
178
- @datapoint.run_end_time = ::Time.now
179
- @datapoint.status = 'completed'
180
- @datapoint.status_message = 'completed normal'
181
- @datapoint.save!
182
- end
183
-
184
- # Final state of the simulation. The os_directory is the run directory and may be needed to
185
- # zip up the results of the simuation.
186
- def communicate_failure(directory)
187
- # zip up the folder even on datapoint failures
188
- if directory && File.exist?(directory)
189
- zip_results(directory)
190
- end
191
-
192
- @datapoint.run_end_time = ::Time.now
193
- @datapoint.status = 'completed'
194
- @datapoint.status_message = 'datapoint failure'
195
- @datapoint.save!
196
- end
197
-
198
- def communicate_results(directory, results)
199
- zip_results(directory)
200
-
201
- # @logger.info 'Saving EnergyPlus JSON file'
202
- if results
203
- @datapoint.results ? @datapoint.results.merge!(results) : @datapoint.results = results
204
- end
205
- result = @datapoint.save! # redundant because next method calls save too.
206
- end
207
-
208
- # TODO: Implement the writing to the mongo_db for logging
209
- def get_logger(directory, options = {})
210
- # get the datapoint object
211
- get_datapoint(directory, options) unless @datapoint
212
- @log = OpenStudio::Workflow::Adapters::MongoLog.new(@datapoint)
213
-
214
- @log
215
- end
216
-
217
- private
218
-
219
- def get_datapoint_model(uuid)
220
- # TODO : make this a conditional on when to create one vs when to error out.
221
- # keep @datapoint as the model instance
222
- DataPoint.find_or_create_by(uuid: uuid)
223
- end
224
- end
225
- end
226
- end
227
- end
@@ -1,253 +0,0 @@
1
- ######################################################################
2
- # Copyright (c) 2008-2014, Alliance for Sustainable Energy.
3
- # All rights reserved.
4
- #
5
- # This library is free software; you can redistribute it and/or
6
- # modify it under the terms of the GNU Lesser General Public
7
- # License as published by the Free Software Foundation; either
8
- # version 2.1 of the License, or (at your option) any later version.
9
- #
10
- # This library is distributed in the hope that it will be useful,
11
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
12
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13
- # Lesser General Public License for more details.
14
- #
15
- # You should have received a copy of the GNU Lesser General Public
16
- # License along with this library; if not, write to the Free Software
17
- # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
- ######################################################################
19
-
20
- # module containing the methods to apply measures to a model
21
- # must define the following
22
- # @logger : where to log information
23
- # @model : the OpenStudio model on which to apply measures
24
- # @datapoint_json : the datapoint JSON
25
- # @anlaysis_json : the analysis JSON
26
- # @output_attributes : hash to store any output attributes
27
- # @sql_filename : needed for reporting measures
28
-
29
- module OpenStudio
30
- module Workflow
31
- module ApplyMeasures
32
- MEASURE_TYPES = {
33
- openstudio_measure: 'RubyMeasure',
34
- energyplus_measure: 'EnergyPlusMeasure',
35
- reporting_measure: 'ReportingMeasure'
36
- }
37
-
38
- def apply_arguments(argument_map, argument)
39
- success = true
40
-
41
- unless argument[:value].nil?
42
- @logger.info "Setting argument value '#{argument[:name]}' to '#{argument[:value]}'"
43
-
44
- v = argument_map[argument[:name]]
45
- fail "Could not find argument map in measure for '#{argument[:name]}' with value '#{argument[:value]}'" unless v
46
- value_set = v.setValue(argument[:value])
47
- fail "Could not set argument '#{argument[:name]}' of value '#{argument[:value]}' on model" unless value_set
48
- argument_map[argument[:name]] = v.clone
49
- else
50
- @logger.warn "Value for argument '#{argument[:name]}' not set in argument list therefore will use default"
51
- # success = false
52
-
53
- # TODO: what is the fail case (success = false?)
54
- end
55
-
56
- success
57
- end
58
-
59
- # Apply the variable values to the measure argument map object
60
- def apply_variables(argument_map, variable)
61
- success = true
62
-
63
- # save the uuid of the variable
64
- variable_uuid = variable[:uuid].to_sym
65
- if variable[:argument]
66
- variable_name = variable[:argument][:name]
67
-
68
- # Get the value from the data point json that was set via R / Problem Formulation
69
- if @datapoint_json[:data_point]
70
- if @datapoint_json[:data_point][:set_variable_values]
71
- unless @datapoint_json[:data_point][:set_variable_values][variable_uuid].nil?
72
- @logger.info "Setting variable '#{variable_name}' to #{@datapoint_json[:data_point][:set_variable_values][variable_uuid]}"
73
- v = argument_map[variable_name]
74
- fail 'Could not find argument map in measure' unless v
75
- variable_value = @datapoint_json[:data_point][:set_variable_values][variable_uuid]
76
- value_set = v.setValue(variable_value)
77
- fail "Could not set variable '#{variable_name}' of value #{variable_value} on model" unless value_set
78
- argument_map[variable_name] = v.clone
79
- else
80
- fail "[ERROR] Value for variable '#{variable_name}:#{variable_uuid}' not set in datapoint object"
81
- # @logger.error "Value for variable '#{variable_name}:#{variable_uuid}' not set in datapoint object"
82
- # success = false
83
- end
84
- else
85
- fail 'No block for set_variable_values in data point record'
86
- end
87
- else
88
- fail 'No block for data_point in data_point record'
89
- end
90
- else
91
- fail "Variable '#{variable_name}' is defined but no argument is present"
92
- end
93
-
94
- success
95
- end
96
-
97
- def apply_measure(workflow_item)
98
- @logger.info "Starting #{__method__} for #{workflow_item[:name]}"
99
- @time_logger.start("Measure:#{workflow_item[:name]}")
100
- # start_time = ::Time.now
101
- current_dir = Dir.pwd
102
- begin
103
- measure_working_directory = "#{@run_directory}/#{workflow_item[:measure_definition_class_name]}"
104
-
105
- @logger.info "Creating run directory to #{measure_working_directory}"
106
- FileUtils.mkdir_p measure_working_directory
107
- Dir.chdir measure_working_directory
108
-
109
- measure_path = workflow_item[:measure_definition_directory]
110
- measure_name = workflow_item[:measure_definition_class_name]
111
- @logger.info "Apply measure running in #{Dir.pwd}"
112
-
113
- measure_file_path = nil
114
- if (Pathname.new measure_path).absolute?
115
- measure_file_path = measure_path
116
- else
117
- measure_file_path = File.expand_path(File.join(@options[:analysis_root_path], measure_path, 'measure.rb'))
118
- end
119
-
120
- @logger.info "Loading Measure from #{measure_file_path}"
121
- fail "Measure file does not exist #{measure_name} in #{measure_file_path}" unless File.exist? measure_file_path
122
-
123
- measure = nil
124
- runner = nil
125
- result = nil
126
- begin
127
- require measure_file_path
128
- measure = Object.const_get(measure_name).new
129
- runner = ExtendedRunner.new(@logger, @analysis_json, @datapoint_json)
130
- runner.former_workflow_arguments = @workflow_arguments
131
- runner.past_results = @past_results
132
- rescue => e
133
- log_message = "Error requiring measure #{__FILE__}. Failed with #{e.message}, #{e.backtrace.join("\n")}"
134
- raise log_message
135
- end
136
-
137
- arguments = nil
138
-
139
- begin
140
- if workflow_item[:measure_type] == 'RubyMeasure'
141
- arguments = measure.arguments(@model)
142
- elsif workflow_item[:measure_type] == 'EnergyPlusMeasure'
143
- arguments = measure.arguments(@model)
144
- elsif workflow_item[:measure_type] == 'ReportingMeasure'
145
- arguments = measure.arguments
146
- end
147
-
148
- # Create argument map and initialize all the arguments
149
- argument_map = OpenStudio::Ruleset::OSArgumentMap.new
150
- if arguments
151
- arguments.each do |v|
152
- argument_map[v.name] = v.clone
153
- end
154
- end
155
- # @logger.info "Argument map for measure is #{argument_map}"
156
-
157
- @logger.info "Iterating over arguments for workflow item '#{workflow_item[:name]}'"
158
- if workflow_item[:arguments]
159
- workflow_item[:arguments].each do |argument|
160
- success = apply_arguments(argument_map, argument)
161
- fail 'Could not set arguments' unless success
162
- end
163
- end
164
-
165
- @logger.info "Iterating over variables for workflow item '#{workflow_item[:name]}'"
166
- if workflow_item[:variables]
167
- workflow_item[:variables].each do |variable|
168
- success = apply_variables(argument_map, variable)
169
- fail 'Could not set variables' unless success
170
- end
171
- end
172
- rescue => e
173
- log_message = "Error assigning argument in measure #{__FILE__}. Failed with #{e.message}, #{e.backtrace.join("\n")}"
174
- raise log_message
175
- end
176
-
177
- begin
178
- @logger.info "Calling measure.run for '#{workflow_item[:name]}'"
179
- if workflow_item[:measure_type] == 'RubyMeasure'
180
- measure.run(@model, runner, argument_map)
181
- elsif workflow_item[:measure_type] == 'EnergyPlusMeasure'
182
- runner.setLastOpenStudioModel(@model)
183
- measure.run(@model_idf, runner, argument_map)
184
- elsif workflow_item[:measure_type] == 'ReportingMeasure'
185
- # This is silly, set the last model, last IDF, and last sqlfile instead of passing it into the measure.run method
186
- runner.setLastOpenStudioModel(@model)
187
- runner.setLastEnergyPlusWorkspace(@model_idf)
188
- runner.setLastEnergyPlusSqlFilePath(@sql_filename)
189
-
190
- measure.run(runner, argument_map)
191
- end
192
- @workflow_arguments[workflow_item[:name].to_sym] = runner.workflow_arguments
193
- @logger.info "Finished measure.run for '#{workflow_item[:name]}'"
194
-
195
- # run garbage collector after every measure to help address race conditions
196
- GC.start
197
- rescue => e
198
- log_message = "Runner error #{__FILE__} failed with #{e.message}, #{e.backtrace.join("\n")}"
199
- raise log_message
200
- end
201
-
202
- begin
203
- result = runner.result
204
- @logger.info "Running of measure '#{workflow_item[:name]}' completed. Post-processing measure output"
205
-
206
- fail "Measure #{measure_name} reported an error, check log" if result.errors.size != 0
207
- rescue => e
208
- log_message = "Runner error #{__FILE__} failed with #{e.message}, #{e.backtrace.join("\n")}"
209
- raise log_message
210
- end
211
-
212
- begin
213
- measure_attributes = JSON.parse(OpenStudio.toJSON(result.attributes), symbolize_names: true)
214
- @output_attributes[workflow_item[:name].to_sym] = measure_attributes[:attributes]
215
- @past_results[workflow_item[:name].to_sym] = measure_attributes[:attributes]
216
-
217
- # add an applicability flag to all the measure results
218
- @output_attributes[workflow_item[:name].to_sym][:applicable] = result.value.value != -1
219
- @past_results[workflow_item[:name].to_sym][:applicable] = result.value.value != -1
220
- rescue => e
221
- log_message = "#{__FILE__} failed with #{e.message}, #{e.backtrace.join("\n")}"
222
- @logger.error log_message
223
- end
224
- rescue => e
225
- log_message = "#{__FILE__} failed with message #{e.message} in #{e.backtrace.join("\n")}"
226
- @logger.error log_message
227
- raise log_message
228
- ensure
229
- Dir.chdir current_dir
230
- @time_logger.stop("Measure:#{workflow_item[:name]}")
231
-
232
- @logger.info "Finished #{__method__} for #{workflow_item[:name]} in #{@time_logger.delta("Measure:#{workflow_item[:name]}")} s"
233
- end
234
- end
235
-
236
- def apply_measures(measure_type)
237
- if @analysis_json[:analysis][:problem] && @analysis_json[:analysis][:problem][:workflow]
238
- current_dir = Dir.pwd
239
- begin
240
- @logger.info "Applying measures for #{MEASURE_TYPES[measure_type]}"
241
- @analysis_json[:analysis][:problem][:workflow].each do |wf|
242
- next unless wf[:measure_type] == MEASURE_TYPES[measure_type]
243
-
244
- apply_measure(wf)
245
- end
246
- ensure
247
- Dir.chdir current_dir
248
- end
249
- end
250
- end
251
- end
252
- end
253
- end