openstudio-analysis 0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 900f442cddeeeaec5d3c46f05979c8e61c94df06
4
+ data.tar.gz: e4aa3cfea70f6b1a31730d5bd02b44e72ea7c254
5
+ SHA512:
6
+ metadata.gz: 957702d15cac0c9c264c4b375e1387cc15d1f228420e7eccdf18a28fcb525f5edb11dcf70995679d6a13d0e2f55683528c82501fd655b638b6da2eeba8030149
7
+ data.tar.gz: 1b02cd2c8560babad1d18a8d4c9653f32c40b755c9e4c3639da010eb8006829cfdc744078edcebaaa6f8f611847d4926351784191bb43f3c1a7db3c1eac26187
data/README.md ADDED
@@ -0,0 +1,20 @@
1
+ OpenStudio Analysis Gem
2
+ =======================
3
+
4
+ The OpenStudio Analysis Gem is used to communicate files to the OpenStudio Distributed Analysis.
5
+
6
+ The purpose of this gem is to generate the analysis.json file, analysis.zip, and communicate with the server to upload
7
+ the simulations.
8
+
9
+ The gem does not create the cluster (not launch it). Currently the only supported Cloud platform is
10
+ Amazon AWS using either [OpenStudio's PAT](https://openstudio.nrel.gov) the [openstudio-aws gem](https://rubygems.org/gems/openstudio-aws) or using [vagrant](http://www.vagrantup.com/).
11
+
12
+ Instructions
13
+ ------------
14
+
15
+ Testing
16
+ -------
17
+
18
+ This gem used RSpec for testing. To test simple run `rspec` at the command line.
19
+
20
+
data/Rakefile ADDED
@@ -0,0 +1,51 @@
1
+ require "bundler"
2
+ Bundler.setup
3
+
4
+ require "rake"
5
+ require "rspec/core/rake_task"
6
+
7
+ $LOAD_PATH.unshift File.expand_path("../lib", __FILE__)
8
+ require "openstudio/analysis/version"
9
+
10
+ task :gem => :build
11
+ task :build do
12
+ system "gem build openstudio-analysis.gemspec"
13
+ end
14
+
15
+ desc "build and install gem locally"
16
+ task :install => :build do
17
+ system "gem install openstudio-analysis-#{OpenStudio::Analysis::VERSION}.gem --no-ri --no-rdoc"
18
+ end
19
+
20
+ task :release => :build do
21
+ system "git tag -a v#{OpenStudio::Analysis::VERSION} -m 'Tagging #{OpenStudio::Analysis::VERSION}'"
22
+ system "git push --tags"
23
+ system "gem push openstudio-analysis-#{OpenStudio::Analysis::VERSION}.gem"
24
+ system "rm openstudio-analysis-#{OpenStudio::Analysis::VERSION}.gem"
25
+ end
26
+
27
+ RSpec::Core::RakeTask.new("spec") do |spec|
28
+ spec.pattern = "spec/**/*_spec.rb"
29
+ end
30
+
31
+ RSpec::Core::RakeTask.new('spec:progress') do |spec|
32
+ spec.rspec_opts = %w(--format progress)
33
+ spec.pattern = "spec/**/*_spec.rb"
34
+ end
35
+
36
+ task :default => :spec
37
+
38
+ desc "import files from other repos"
39
+ task :import_files do
40
+ # tbd
41
+ end
42
+
43
+ desc "uninstall all openstudio-analysis gems"
44
+ task :uninstall do
45
+
46
+ system "gem uninstall openstudio-analysis -a"
47
+ end
48
+
49
+ desc "reinstall the gem (uninstall, build, and reinstall"
50
+ task :reinstall => [:uninstall, :install]
51
+
@@ -0,0 +1,16 @@
1
+ require 'json'
2
+ require 'faraday'
3
+ require 'uuid'
4
+ require 'roo'
5
+ require 'erb'
6
+ require 'zip'
7
+
8
+ # core
9
+ require 'openstudio/analysis/server_api'
10
+ require 'openstudio/analysis/version'
11
+
12
+ # translators
13
+ require 'openstudio/analysis/translator/excel'
14
+
15
+ # helpers
16
+ require 'openstudio/helpers/string'
@@ -0,0 +1,302 @@
1
+ # Class manages the communication with the server.
2
+ # Presently, this class is simple and stores all information in hashs
3
+ module OpenStudio
4
+ module Analysis
5
+ class ServerApi
6
+ attr_reader :hostname
7
+
8
+ def initialize(options = {})
9
+ defaults = {:hostname => "http://localhost:8080"}
10
+ options = defaults.merge(options)
11
+
12
+ @hostname = options[:hostname]
13
+
14
+ raise "no host defined for server api class" if @hostname.nil?
15
+
16
+ # create connection with basic capabilities
17
+ @conn = Faraday.new(:url => @hostname) do |faraday|
18
+ faraday.request :url_encoded # form-encode POST params
19
+ faraday.response :logger # log requests to STDOUT
20
+ faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
21
+ end
22
+
23
+ # create connection to server api with multipart capabilities
24
+ @conn_multipart = Faraday.new(:url => @hostname) do |faraday|
25
+ faraday.request :multipart
26
+ faraday.request :url_encoded # form-encode POST params
27
+ faraday.response :logger # log requests to STDOUT
28
+ faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
29
+ end
30
+ end
31
+
32
+ def get_projects()
33
+ response = @conn.get '/projects.json'
34
+
35
+ projects_json = nil
36
+ if response.status == 200
37
+ projects_json = JSON.parse(response.body, :symbolize_names => true, :max_nesting => false)
38
+ else
39
+ raise "did not receive a 200 in get_projects"
40
+ end
41
+
42
+ projects_json
43
+ end
44
+
45
+ def get_project_ids()
46
+ ids = get_projects()
47
+ ids.map { |project| project[:uuid] }
48
+ end
49
+
50
+ def delete_all()
51
+ ids = get_project_ids()
52
+ puts "Deleting Projects #{ids}"
53
+ ids.each do |id|
54
+ response = @conn.delete "/projects/#{id}.json"
55
+ if response.status == 200
56
+ puts "Successfully deleted project #{id}"
57
+ else
58
+ puts "ERROR deleting project #{id}"
59
+ end
60
+ end
61
+ end
62
+
63
+ def new_project(options = {})
64
+ defaults = {project_name: "project #{(rand()*1000).round}"}
65
+ options = defaults.merge(options)
66
+ project_id = nil
67
+
68
+ project_hash = {project: {name: "#{options[:project_name]}"}}
69
+
70
+ response = @conn.post do |req|
71
+ req.url "/projects.json"
72
+ req.headers['Content-Type'] = 'application/json'
73
+ req.body = project_hash.to_json
74
+ end
75
+
76
+ if response.status == 201
77
+ project_id = JSON.parse(response.body)["_id"]
78
+
79
+ puts "new project created with ID: #{project_id}"
80
+ #grab the project id
81
+ elsif response.code == 500
82
+ puts "500 Error"
83
+ puts response.inspect
84
+ end
85
+
86
+ project_id
87
+ end
88
+
89
+ def get_analyses(project_id)
90
+ analysis_ids = []
91
+ response = @conn.get "/projects/#{project_id}.json"
92
+ if response.status == 200
93
+ puts "received the list of analyses for the project"
94
+
95
+ analyses = JSON.parse(response.body, :symbolize_names => true, :max_nesting => false)
96
+ if analyses[:analyses]
97
+ analyses[:analyses].each do |analysis|
98
+ analysis_ids << analysis[:_id]
99
+ end
100
+ end
101
+ end
102
+
103
+ analysis_ids
104
+ end
105
+
106
+ def new_analysis(project_id, options)
107
+ defaults = {analysis_name: nil, reset_uuids: false}
108
+ options = defaults.merge(options)
109
+
110
+ raise "No project id passed" if project_id.nil?
111
+ raise "no formulation passed to new_analysis" if !options[:formulation_file]
112
+ raise "No formation exists #{options[:formulation_file]}" if !File.exists?(options[:formulation_file])
113
+
114
+ formulation_json = JSON.parse(File.read(options[:formulation_file]), :symbolize_names => true)
115
+
116
+ # read in the analysis id from the analysis.json file
117
+ analysis_id = nil
118
+ if options[:reset_uuids]
119
+ analysis_id = UUID.new.generate
120
+ formulation_json[:analysis][:uuid] = analysis_id
121
+
122
+ formulation_json[:analysis][:problem][:workflow].each do |wf|
123
+ wf[:uuid] = UUID.new.generate
124
+ if wf[:arguments]
125
+ wf[:arguments].each do |arg|
126
+ arg[:uuid] = UUID.new.generate
127
+ end
128
+ end
129
+ if wf[:variables]
130
+ wf[:variables].each do |var|
131
+ var[:uuid] = UUID.new.generate
132
+ if var[:argument]
133
+ var[:argument][:uuid] = UUID.new.generate
134
+ end
135
+ end
136
+ end
137
+ end
138
+ else
139
+ analysis_id = formulation_json[:analysis][:uuid]
140
+ end
141
+ raise "No analysis id defined in analyis.json #{options[:formulation_file]}" if analysis_id.nil?
142
+
143
+ # set the analysis name
144
+ formulation_json[:analysis][:name] = "#{options[:analysis_name]}" unless options[:analysis_name].nil?
145
+
146
+ # save out this file to compare
147
+ #File.open('formulation_merge.json', 'w') { |f| f << JSON.pretty_generate(formulation_json) }
148
+
149
+ response = @conn.post do |req|
150
+ req.url "projects/#{project_id}/analyses.json"
151
+ req.headers['Content-Type'] = 'application/json'
152
+ req.body = formulation_json.to_json
153
+ end
154
+
155
+ if response.status == 201
156
+ puts "asked to create analysis with #{analysis_id}"
157
+ #puts resp.inspect
158
+ analysis_id = JSON.parse(response.body)["_id"]
159
+
160
+ puts "new analysis created with ID: #{analysis_id}"
161
+ else
162
+ raise "Could not create new analysis"
163
+ end
164
+
165
+ # check if we need to upload the analysis zip file
166
+ if options[:upload_file]
167
+ raise "upload file does not exist #{options[:upload_file]}" if !File.exists?(options[:upload_file])
168
+
169
+ payload = {:file => Faraday::UploadIO.new(options[:upload_file], 'application/zip')}
170
+ response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload
171
+
172
+ if response.status == 201
173
+ puts "Successfully uploaded ZIP file"
174
+ else
175
+ raise response.inspect
176
+ end
177
+ end
178
+
179
+ analysis_id
180
+ end
181
+
182
+ def upload_datapoint(analysis_id, options)
183
+ defaults = {reset_uuids: false}
184
+ options = defaults.merge(options)
185
+
186
+ raise "No analysis id passed" if analysis_id.nil?
187
+ raise "No datapoints file passed to new_analysis" if !options[:datapoint_file]
188
+ raise "No datapoints_file exists #{options[:datapoint_file]}" if !File.exists?(options[:datapoint_file])
189
+
190
+ dp_hash = JSON.parse(File.open(options[:datapoint_file]).read, :symbolize_names => true)
191
+
192
+ if options[:reset_uuids]
193
+ dp_hash[:analysis_uuid] = analysis_id
194
+ dp_hash[:uuid] = UUID.new.generate
195
+ end
196
+
197
+ # merge in the analysis_id as it has to be what is in the database
198
+ response = @conn.post do |req|
199
+ req.url "analyses/#{analysis_id}/data_points.json"
200
+ req.headers['Content-Type'] = 'application/json'
201
+ req.body = dp_hash.to_json
202
+ end
203
+
204
+ if response.status == 201
205
+ puts "new datapoints created for analysis #{analysis_id}"
206
+ else
207
+ raise "could not create new datapoints #{response.body}"
208
+ end
209
+ end
210
+
211
+ def upload_datapoints(analysis_id, options)
212
+ defaults = {}
213
+ options = defaults.merge(options)
214
+
215
+ raise "No analysis id passed" if analysis_id.nil?
216
+ raise "No datapoints file passed to new_analysis" if !options[:datapoints_file]
217
+ raise "No datapoints_file exists #{options[:datapoints_file]}" if !File.exists?(options[:datapoints_file])
218
+
219
+ dp_hash = JSON.parse(File.open(options[:datapoints_file]).read, :symbolize_names => true)
220
+
221
+ # merge in the analysis_id as it has to be what is in the database
222
+ response = @conn.post do |req|
223
+ req.url "analyses/#{analysis_id}/data_points/batch_upload.json"
224
+ req.headers['Content-Type'] = 'application/json'
225
+ req.body = dp_hash.to_json
226
+ end
227
+
228
+ if response.status == 201
229
+ puts "new datapoints created for analysis #{analysis_id}"
230
+ else
231
+ raise "could not create new datapoints #{response.body}"
232
+ end
233
+ end
234
+
235
+ def run_analysis(analysis_id, options)
236
+ defaults = {analysis_action: "start", without_delay: false}
237
+ options = defaults.merge(options)
238
+
239
+ puts "Run analysis is configured with #{options.to_json}"
240
+ response = @conn.post do |req|
241
+ req.url "analyses/#{analysis_id}/action.json"
242
+ req.headers['Content-Type'] = 'application/json'
243
+ req.body = options.to_json
244
+ req.options[:timeout] = 1800 #seconds
245
+ end
246
+
247
+ if response.status == 200
248
+ puts "Recieved request to run analysis #{analysis_id}"
249
+ else
250
+ raise "Could not start the analysis"
251
+ end
252
+ end
253
+
254
+ def kill_analysis(analysis_id)
255
+ analysis_action = {analysis_action: "stop"}
256
+
257
+ response = @conn.post do |req|
258
+ req.url "analyses/#{analysis_id}/action.json"
259
+ req.headers['Content-Type'] = 'application/json'
260
+ req.body = analysis_action.to_json
261
+ end
262
+
263
+ if response.status == 200
264
+ puts "Killed analysis #{analysis_id}"
265
+ else
266
+ #raise "Could not kill the analysis with response of #{response.inspect}"
267
+ end
268
+
269
+ end
270
+
271
+ def kill_all_analyses
272
+ project_ids = get_project_ids
273
+ puts "List of projects ids are: #{project_ids}"
274
+
275
+ project_ids.each do |project_id|
276
+ analysis_ids = get_analyses(project_id)
277
+ puts analysis_ids
278
+ analysis_ids.each do |analysis_id|
279
+ puts "Trying to kill #{analysis_id}"
280
+ kill_analysis(analysis_id)
281
+ end
282
+ end
283
+ end
284
+
285
+
286
+ def get_datapoint_status(analysis_id, filter = nil)
287
+ # get the status of all the entire analysis
288
+ if !analysis_id.nil?
289
+ if filter.nil? || filter == ""
290
+ resp = @conn.get "analyses/#{analysis_id}/status.json"
291
+ puts "Data points (all): #{resp}"
292
+ else
293
+ resp = @conn.get "#{@hostname}/analyses/#{analysis_id}/status.json", {jobs: filter}
294
+ puts "Data points (#{filter}): #{resp}"
295
+ end
296
+ end
297
+ end
298
+
299
+ end
300
+ end
301
+
302
+ end
@@ -0,0 +1,366 @@
1
+ module OpenStudio
2
+ module Analysis
3
+ module Translator
4
+ class Excel
5
+ attr_reader :variables
6
+ attr_reader :models
7
+ attr_reader :weather_files
8
+
9
+ # remove these once we have classes to construct the JSON file
10
+ attr_reader :name
11
+ attr_reader :number_of_samples
12
+
13
+ # pass in the filename to read
14
+ def initialize(xls_filename)
15
+ @root_path = File.expand_path(File.dirname(xls_filename))
16
+
17
+ @xls = nil
18
+ # try to read the spreadsheet as a roo object
19
+ if File.exists?(xls_filename)
20
+ @xls = Roo::Spreadsheet.open(xls_filename)
21
+ else
22
+ raise "File #{xls_filename} does not exist"
23
+ end
24
+
25
+ # Initialize some other instance variables
26
+ @weather_files = []
27
+ @models = []
28
+ @other_files = []
29
+ @export_path = "./export"
30
+ @measure_path = "./measures"
31
+ @number_of_samples = 0
32
+
33
+ @setup = parse_setup()
34
+ @variables = parse_variables()
35
+
36
+ # call validate to make sure everything that is needed exists (i.e. directories)
37
+ validate_analysis()
38
+ end
39
+
40
+ # Save off the legacy format of the JSON file
41
+ def save_variable_json(filename)
42
+ FileUtils.rm_f(filename) if File.exists?(filename)
43
+ File.open(filename, 'w') { |f| f << JSON.pretty_generate(@variables) }
44
+ end
45
+
46
+
47
+ def validate_analysis
48
+ # Setup the paths and do some error checking
49
+ raise "Measures directory '#{@measure_path}' does not exist" unless Dir.exists?(@measure_path)
50
+
51
+ @models.uniq!
52
+ raise "No seed models defined in spreadsheet" if @models.empty?
53
+
54
+ @models.each do |model|
55
+ raise "Seed model does not exist: #{model[:path]}" unless File.exists?(model[:path])
56
+ end
57
+
58
+ @weather_files.uniq!
59
+ raise "No weather files found based on what is in the spreadsheet" if @weather_files.empty?
60
+
61
+ @weather_files.each do |wf|
62
+ raise "Weather file does not exist: #{wf}" unless File.exists?(wf)
63
+ end
64
+
65
+ # This can be a directory as well
66
+ @other_files.each do |of|
67
+ raise "Other files do not exist for: #{of[:path]}" unless File.exists?(of[:path])
68
+ end
69
+
70
+ FileUtils.mkdir_p(@export_path)
71
+ end
72
+
73
+ def save_analysis
74
+ # save the format in the OpenStudio analysis json format
75
+ new_json = translate_to_analysis_json()
76
+
77
+ # iterate over each model and save the zip and json
78
+ @models.each do |model|
79
+ save_analysis_zip(model)
80
+ save_analysis_json(new_json, model)
81
+ end
82
+ end
83
+
84
+ protected
85
+
86
+ # helper method for ERB
87
+ def get_binding
88
+ binding
89
+ end
90
+
91
+ # TODO: move this into a new class that helps construct this file
92
+ def translate_to_analysis_json
93
+ # Load in the templates for constructing the JSON file
94
+ template_root = File.join(File.dirname(__FILE__), "../../templates")
95
+ analysis_template = ERB.new(File.open("#{template_root}/analysis.json.erb", 'r').read)
96
+ workflow_template = ERB.new(File.open("#{template_root}/workflow_item.json.erb", 'r').read)
97
+ uncertain_variable_template = ERB.new(File.open("#{template_root}/uncertain_variable.json.erb", 'r').read)
98
+ discrete_uncertain_variable_template = ERB.new(File.open("#{template_root}/discrete_uncertain_variable.json.erb", 'r').read)
99
+ static_variable_template = ERB.new(File.open("#{template_root}/static_variable.json.erb", 'r').read)
100
+ pivot_variable_template = ERB.new(File.open("#{template_root}/pivot_variable.json.erb", 'r').read)
101
+ argument_template = ERB.new(File.open("#{template_root}/argument.json.erb", 'r').read)
102
+
103
+ # Templated analysis json file (this is what is returned)
104
+ puts "Analysis name is #{@name}"
105
+ openstudio_analysis_json = JSON.parse(analysis_template.result(get_binding))
106
+
107
+ @measure_index = -1
108
+ @variables['data'].each do |measure|
109
+ # With OpenStudio server we need to create the workflow with all the measure instances
110
+ if measure['enabled'] && measure['name'] != 'baseline'
111
+ @measure_index += 1
112
+
113
+ puts " Adding measure item '#{measure['name']}'"
114
+ @measure = measure
115
+ @measure['measure_file_name_dir'] = @measure['measure_file_name'].underscore
116
+
117
+ # Grab the measure json file out of the right directory
118
+ wf = JSON.parse(workflow_template.result(get_binding))
119
+
120
+ # add in the variables
121
+ measure['variables'].each do |variable|
122
+ @variable = variable
123
+
124
+ # Determine if row is suppose to be an argument or a variable to be perturbed.
125
+ if @variable['variable_type'] == 'argument'
126
+ ag = nil
127
+ if @variable['method'] == 'static'
128
+ # add this as an argument
129
+ ag = JSON.parse(argument_template.result(get_binding))
130
+ elsif raise "can not have an argument without having a static value defined in which to set the argument"
131
+ end
132
+ wf['arguments'] << ag
133
+ else
134
+ vr = nil
135
+ if @variable['method'] == 'static'
136
+ # add this as an argument
137
+ vr = JSON.parse(static_variable_template.result(get_binding))
138
+ elsif @variable['method'] == 'lhs'
139
+ if @variable['type'] == 'enum'
140
+ @values_and_weights = @variable['distribution']['enumerations'].map { |v| {value: v} }.to_json
141
+ vr =JSON.parse(discrete_uncertain_variable_template.result(get_binding))
142
+ else
143
+ vr = JSON.parse(uncertain_variable_template.result(get_binding))
144
+ end
145
+ elsif @variable['method'] == 'pivot'
146
+ @values_and_weights = @variable['distribution']['enumerations'].map { |v| {value: v} }.to_json
147
+ vr =JSON.parse(pivot_variable_template.result(get_binding))
148
+ end
149
+ wf['variables'] << vr
150
+ end
151
+ end
152
+
153
+ openstudio_analysis_json['analysis']['problem']['workflow'] << wf
154
+ end
155
+ end
156
+
157
+ openstudio_analysis_json
158
+ end
159
+
160
+ # Package up the seed, weather files, and measures
161
+ def save_analysis_zip(model)
162
+ zipfile_name = "#{@export_path}/#{model[:name]}.zip"
163
+ FileUtils.rm_f(zipfile_name) if File.exists?(zipfile_name)
164
+
165
+ Zip::File.open(zipfile_name, Zip::File::CREATE) do |zipfile|
166
+ @weather_files.each do |filename|
167
+ puts " Adding #{filename}"
168
+ zipfile.add("./weather/#{File.basename(filename)}", filename)
169
+ end
170
+
171
+ Dir.glob("#{@measure_path}/**/*.rb").each do |measure|
172
+ next if measure.include?("spec") # don't include the spec folders nor files
173
+ measure_name = measure.split(File::SEPARATOR).last(2).first
174
+ puts " Adding ./measures/#{measure_name}/#{File.basename(measure)}"
175
+ zipfile.add("./measures/#{measure_name}/#{File.basename(measure)}", measure)
176
+ end
177
+
178
+ puts "Adding #{model[:path]}"
179
+ zipfile.add("./seed/#{File.basename(model[:path])}", model[:path])
180
+
181
+ puts "Adding in other files #{@other_files.inspect}"
182
+ @other_files.each do |others|
183
+ Dir[File.join(others[:path], '**', '**')].each do |file|
184
+ zipfile.add(file.sub(others[:path], "./lib/#{others[:lib_zip_name]}/"), file)
185
+ end
186
+ end
187
+ end
188
+ end
189
+
190
+ def save_analysis_json(analysis_json, model)
191
+ # Set the seed model in the analysis_json
192
+ analysis_json['analysis']['seed']['file_type'] = model[:type]
193
+ # This is the path that will be seen on the server when this runs
194
+ analysis_json['analysis']['seed']['path'] = "./seed/#{File.basename(model[:path])}"
195
+
196
+ # Set the weather file as the first in the list -- this is optional
197
+ # TODO: check if epw or if zip file
198
+ analysis_json['analysis']['weather_file']['file_type'] = 'EPW'
199
+ analysis_json['analysis']['weather_file']['path'] = "./weather/#{File.basename(@weather_files.first, '.zip')}.epw"
200
+
201
+ json_file_name = "#{@export_path}/#{model[:name]}.json"
202
+ FileUtils.rm_f(json_file_name) if File.exists?(json_file_name)
203
+
204
+ File.open("#{@export_path}/#{model[:name]}.json", "w") { |f| f << JSON.pretty_generate(analysis_json) }
205
+ end
206
+
207
+
208
+ # parse_setup will pull out the data on the "setup" tab and store it in memory for later use
209
+ def parse_setup()
210
+ rows = @xls.sheet('Setup').parse()
211
+ b_run_setup = false
212
+ b_problem_setup = false
213
+ b_weather_files = false
214
+ b_models = false
215
+ b_other_libs = false
216
+
217
+ rows.each do |row|
218
+ if row[0] == "Running Setup"
219
+ b_run_setup = true
220
+ b_problem_setup = false
221
+ b_weather_files = false
222
+ b_models = false
223
+ b_other_libs = false
224
+ next
225
+ elsif row[0] == "Problem Definition"
226
+ b_run_setup = false
227
+ b_problem_setup = true
228
+ b_weather_files = false
229
+ b_models = false
230
+ b_other_libs = false
231
+ next
232
+ elsif row[0] == "Weather Files"
233
+ b_run_setup = false
234
+ b_problem_setup = false
235
+ b_weather_files = true
236
+ b_models = false
237
+ b_other_libs = false
238
+ next
239
+ elsif row[0] == "Models"
240
+ b_run_setup = false
241
+ b_problem_setup = false
242
+ b_weather_files = false
243
+ b_models = true
244
+ b_other_libs = false
245
+ next
246
+ elsif row[0] == "Other Library Files"
247
+ b_run_setup = false
248
+ b_problem_setup = false
249
+ b_weather_files = false
250
+ b_models = false
251
+ b_other_libs = true
252
+ next
253
+ end
254
+
255
+ next if row[0].nil?
256
+
257
+ if b_run_setup
258
+ @name = row[1].chomp if row[0] == "Analysis Name"
259
+ @export_path = File.expand_path(File.join(@root_path, row[1])) if row[0] == "Export Directory"
260
+ @measure_path = File.expand_path(File.join(@root_path, row[1])) if row[0] == "Measure Directory"
261
+ elsif b_problem_setup
262
+ @number_of_samples = row[1].to_i if row[0] == "Number of Samples"
263
+ elsif b_weather_files
264
+ if row[0] == "Weather File"
265
+ @weather_files += Dir.glob(File.expand_path(File.join(@root_path, row[1])))
266
+ end
267
+ elsif b_models
268
+ @models << {name: row[1], type: row[2], path: File.expand_path(File.join(@root_path, row[3]))}
269
+ elsif b_other_libs
270
+ @other_files << {lib_zip_name: row[1], path: row[2]}
271
+ end
272
+ end
273
+ end
274
+
275
+
276
+ # parse_variables will parse the XLS spreadsheet and save the data into
277
+ # a higher level JSON file. The JSON file is historic and it should really
278
+ # be omitted as an intermediate step
279
+ def parse_variables()
280
+ rows = @xls.sheet('Sensitivity').parse()
281
+ data = {}
282
+ data['data'] = []
283
+
284
+ icnt = 0
285
+ measure_index = -1
286
+ variable_index = -1
287
+ measure_name = nil
288
+ rows.each do |row|
289
+ icnt += 1
290
+ # puts "Parsing line: #{icnt}"
291
+ next if icnt <= 3 # skip the first 3 lines of the file
292
+
293
+ # check if we are a measure
294
+ if row[0].nil?
295
+ unless measure_name.nil?
296
+ variable_index += 1
297
+
298
+ var = {}
299
+ var['variable_type'] = row[1]
300
+ var['display_name'] = row[2].strip
301
+ var['machine_name'] = var['display_name'].downcase.strip.gsub("-", "_").gsub(" ", "_").strip
302
+ var['name'] = row[3].strip
303
+ var['index'] = variable_index #order of the variable (eventually use to force order of applying measures)
304
+
305
+ var['method'] = row[4]
306
+ var['type'] = row[5]
307
+ var['units'] = row[6]
308
+
309
+ var['distribution'] = {}
310
+
311
+ #parse the enums
312
+ if var['type'] == 'enum'
313
+ var['distribution']['enumerations'] = row[8].gsub("|", "").split(",").map { |v| v.strip }
314
+ elsif var['type'] == 'bool' #TODO: i think this has been deprecated
315
+ var['distribution']['enumerations'] = []
316
+ var['distribution']['enumerations'] << 'true'
317
+ var['distribution']['enumerations'] << 'false'
318
+ end
319
+
320
+ if var['method'] == 'lhs'
321
+ var['distribution']['min'] = row[9]
322
+ var['distribution']['max'] = row[10]
323
+ var['distribution']['mean'] = row[11]
324
+ var['distribution']['stddev'] = row[12]
325
+ var['distribution']['type'] = row[13]
326
+ var['distribution']['source'] = row[14]
327
+ elsif var['method'] == 'static'
328
+ var['distribution']['static_value'] = row[7]
329
+ #var['distribution']['apply_method'] = row[14]
330
+ var['distribution']['source'] = row[14]
331
+ end
332
+
333
+ var['notes'] = row[15]
334
+ var['relation_to_eui'] = row[16]
335
+
336
+ data['data'][measure_index]['apply_method'] ||= row[14]
337
+ data['data'][measure_index]['variables'] << var
338
+ end
339
+ else
340
+ measure_index += 1
341
+ variable_index = 0
342
+ data['data'][measure_index] = {}
343
+
344
+ #generate name id
345
+ puts "Parsing measure #{row[1]}"
346
+ display_name = row[1].chomp.strip
347
+ measure_name = display_name.downcase.strip.gsub("-", "_").gsub(" ", "_")
348
+ data['data'][measure_index]['display_name'] = display_name
349
+ data['data'][measure_index]['name'] = measure_name
350
+ data['data'][measure_index]['enabled'] = row[0] == "TRUE" ? true : false
351
+ data['data'][measure_index]['measure_file_name'] = row[2]
352
+ data['data'][measure_index]['measure_file_name_directory'] = row[2].underscore
353
+ data['data'][measure_index]['version'] = @version_id
354
+
355
+ data['data'][measure_index]['variables'] = []
356
+
357
+ end
358
+ end
359
+
360
+ data
361
+ end
362
+
363
+ end
364
+ end
365
+ end
366
+ end
@@ -0,0 +1,6 @@
1
+ module OpenStudio
2
+ module Analysis
3
+ VERSION = "0.1"
4
+ OPENSTUDIO_VERSION = "1.1.2"
5
+ end
6
+ end
@@ -0,0 +1,12 @@
1
+ # add the underscore from rails for snake_casing strings
2
+
3
+ class String
4
+ def underscore
5
+ self.gsub(/::/, '/').
6
+ gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2').
7
+ gsub(/([a-z\d])([A-Z])/, '\1_\2').
8
+ tr("-", "_").
9
+ downcase
10
+ end
11
+ end
12
+
@@ -0,0 +1,26 @@
1
+ {
2
+ "analysis": {
3
+ "display_name": "<%= @name %>",
4
+ "name": "<%= @name.downcase.gsub(" ", "_") %>",
5
+ "algorithm": {
6
+ "sample_method": "lhs",
7
+ "sensitivity_method": "single_measure",
8
+ "number_of_samples": <%= @number_of_samples %>
9
+ },
10
+ "parameter_space": [],
11
+ "problem": {
12
+ "number_of_samples_KEEP_HERE_UNTIL_ALGO_IS_IMPLEMENTED": <%= @number_of_samples %>,
13
+ "number_of_samples": <%= @number_of_samples %>,
14
+ "name": "Problem",
15
+ "workflow": []
16
+ },
17
+ "seed": {
18
+ "file_type": "xml",
19
+ "path": "unknown"
20
+ },
21
+ "weather_file": {
22
+ "file_type": "EPW",
23
+ "path": "unknown"
24
+ }
25
+ }
26
+ }
@@ -0,0 +1,10 @@
1
+ {
2
+ "display_name": "<%= @variable['display_name'] %>",
3
+ "machine_name": "<%= @variable['machine_name'] %>",
4
+ "name": "<%= @variable['name'] %>",
5
+ "value": "<%= @variable['distribution']['static_value'] %>",
6
+ "uuid_DELETEME": "<%= UUID.new.generate %>",
7
+ "uuid": "<%= UUID.new.generate %>",
8
+ "version_uuid_DELETEME": "<%= UUID.new.generate %>",
9
+ "version_uuid": "<%= UUID.new.generate %>"
10
+ }
@@ -0,0 +1,32 @@
1
+ {
2
+ "argument": {
3
+ "display_name": "<%= @variable['display_name'] %>",
4
+ "machine_name": "<%= @variable['machine_name'] %>",
5
+ "name": "<%= @variable['name'] %>",
6
+ "required": false,
7
+ "uuid": "a0618d15-bb0b-4494-a72f-8ad628693a7e",
8
+ "version_uuid": "b33cf6b0-f1aa-4706-afab-9470e6bd1912"
9
+ },
10
+ "display_name": "<%= @variable['display_name'] %>",
11
+ "machine_name": "<%= @variable['machine_name'] %>",
12
+ "name": "<%= @variable['machine_name'] %>",
13
+ "units": "<%= @variable['units'] %>",
14
+ "variable": true,
15
+ "variable_ADDME": true,
16
+ "relation_to_output": "<%= @variable['relation_to_eui'] %>",
17
+ "uncertainty_description": {
18
+ "attributes": [
19
+ {
20
+ "name": "discrete",
21
+ "values_and_weights":
22
+ <%= @values_and_weights %>
23
+ }
24
+ ],
25
+ "type": "discrete_uncertain"
26
+ },
27
+ "uuid": "d157d709-b83e-42bd-bd30-1bc3a7979672",
28
+ "variable_type": "RubyContinuousVariable",
29
+ "version_uuid": "26e7d8de-83e3-4a53-938b-45d3e0f29953"
30
+ }
31
+
32
+
@@ -0,0 +1,31 @@
1
+ {
2
+ "argument": {
3
+ "display_name": "<%= @variable['display_name'] %>",
4
+ "name": "<%= @variable['name'] %>",
5
+ "required": false,
6
+ "uuid": "a0618d15-bb0b-4494-a72f-8ad628693a7e",
7
+ "version_uuid": "b33cf6b0-f1aa-4706-afab-9470e6bd1912"
8
+ },
9
+ "display_name": "<%= @variable['display_name'] %>",
10
+ "machine_name": "<%= @variable['machine_name'] %>",
11
+ "name": "<%= @variable['machine_name'] %>",
12
+ "units": "<%= @variable['units'] %>",
13
+ "pivot": true,
14
+ "pivot_ADDME": true,
15
+ "relation_to_output": null,
16
+ "uncertainty_description": {
17
+ "attributes": [
18
+ {
19
+ "name": "discrete",
20
+ "values_and_weights":
21
+ <%= @values_and_weights %>
22
+ }
23
+ ],
24
+ "type": "discrete_uncertain"
25
+ },
26
+ "uuid": "d157d709-b83e-42bd-bd30-1bc3a7979672",
27
+ "variable_type": "RubyContinuousVariable",
28
+ "version_uuid": "26e7d8de-83e3-4a53-938b-45d3e0f29953"
29
+ }
30
+
31
+
@@ -0,0 +1,22 @@
1
+ {
2
+ "argument": {
3
+ "display_name": "<%= @variable['display_name'] %>",
4
+ "machine_name": "<%= @variable['machine_name'] %>",
5
+ "name": "<%= @variable['name'] %>",
6
+ "uuid": "<%= UUID.new.generate %>",
7
+ "version_uuid": "b33cf6b0-f1aa-4706-afab-9470e6bd1912"
8
+ },
9
+ "display_name": "<%= @variable['display_name'] %>",
10
+ "machine_name": "<%= @variable['machine_name'] %>",
11
+ "name": "<%= @variable['machine_name'] %>",
12
+ "units": "<%= @variable['units'] %>",
13
+ "static": true,
14
+ "static_ADDME": true,
15
+ "relation_to_output": "<%= @variable['relation_to_eui'] %>",
16
+ "relation_to_output_ADDME": "<%= @variable['relation_to_eui'] %>",
17
+ "static_value": <%= @variable['distribution']['static_value'].inspect %>,
18
+ "static_value_ADDME": <%= @variable['distribution']['static_value'].inspect %>,
19
+ "uuid": "d157d709-b83e-42bd-bd30-1bc3a7979672",
20
+ "variable_type": "RubyContinuousVariable",
21
+ "version_uuid": "26e7d8de-83e3-4a53-938b-45d3e0f29953"
22
+ }
@@ -0,0 +1,42 @@
1
+ {
2
+ "argument": {
3
+ "display_name": "<%= @variable['display_name'] %>",
4
+ "machine_name": "<%= @variable['machine_name'] %>",
5
+ "name": "<%= @variable['name'] %>",
6
+ "uuid": "a0618d15-bb0b-4494-a72f-8ad628693a7e",
7
+ "version_uuid": "b33cf6b0-f1aa-4706-afab-9470e6bd1912"
8
+ },
9
+ "display_name": "<%= @variable['display_name'] %>",
10
+ "machine_name": "<%= @variable['machine_name'] %>",
11
+ "name": "<%= @variable['machine_name'] %>",
12
+ "minimum": <%= @variable['distribution']['min'].inspect %>,
13
+ "maximum": <%= @variable['distribution']['max'].inspect %>,
14
+ "units": "<%= @variable['units'] %>",
15
+ "variable": true,
16
+ "variable_ADDME": true,
17
+ "relation_to_output": "<%= @variable['relation_to_eui'] %>",
18
+ "uncertainty_description": {
19
+ "attributes": [
20
+ {
21
+ "name": "modes",
22
+ "value": <%= @variable['distribution']['mean'] %>
23
+ },
24
+ {
25
+ "name": "lower_bounds",
26
+ "value": <%= @variable['distribution']['min'] %>
27
+ },
28
+ {
29
+ "name": "upper_bounds",
30
+ "value": <%= @variable['distribution']['max'] %>
31
+ },
32
+ {
33
+ "name": "stddev",
34
+ "value": <%= @variable['distribution']['stddev'] %>
35
+ }
36
+ ],
37
+ "type": <%= @variable['distribution']['type'].inspect %>
38
+ },
39
+ "uuid": "d157d709-b83e-42bd-bd30-1bc3a7979672",
40
+ "variable_type": "RubyContinuousVariable",
41
+ "version_uuid": "26e7d8de-83e3-4a53-938b-45d3e0f29953"
42
+ }
@@ -0,0 +1,19 @@
1
+ {
2
+ "arguments": [],
3
+ "bcl_measure_directory": "./measures/<%= @measure['measure_file_name_directory'] %>",
4
+ "measure_definition_directory_CHANGE_TO_ME": "./measures/<%= @measure['measure_file_name_directory'] %>",
5
+ "bcl_measure_class_name_ADDME": "<%= @measure['measure_file_name'] %>",
6
+ "measure_definition_class_name_CHANGE_TO_ME": "<%= @measure['measure_file_name'] %>",
7
+ "bcl_measure_uuid": "<%= UUID.new.generate %>",
8
+ "measure_definition_uuid_CHANGE_TO_ME": "<%= UUID.new.generate %>",
9
+ "bcl_measure_version_uuid": "<%= UUID.new.generate %>",
10
+ "measure_definition_version_uuid_CHANGE_TO_ME": "<%= UUID.new.generate %>",
11
+ "measure_type": "XmlMeasure",
12
+ "name": "<%= @measure['name'] %>",
13
+ "display_name": "<%= @measure['display_name'] %>",
14
+ "uuid": "<%= UUID.new.generate %>",
15
+ "variables": [],
16
+ "version_uuid": "<%= UUID.new.generate %>",
17
+ "workflow_index": <%= @measure_index %>,
18
+ "workflow_step_type": "Measure"
19
+ }
@@ -0,0 +1,24 @@
1
+ require 'spec_helper'
2
+
3
+ describe OpenStudio::Analysis::ServerApi do
4
+ context "create a new localhost instance" do
5
+ before(:all) do
6
+ @api = OpenStudio::Analysis::ServerApi.new
7
+ end
8
+
9
+ it "should set the default host to localhost" do
10
+ @api.hostname.should eq("http://localhost:8080")
11
+ end
12
+ end
13
+
14
+ context "test not localhost" do
15
+ it "should have a not localhost URL" do
16
+ options = {hostname: "http://abc.def.ghi"}
17
+ api = OpenStudio::Analysis::ServerApi.new(options)
18
+ api.hostname.should eq(options[:hostname])
19
+
20
+ end
21
+ end
22
+
23
+
24
+ end
@@ -0,0 +1,8 @@
1
+ $LOAD_PATH.unshift(File.dirname(__FILE__))
2
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), "..", "lib"))
3
+
4
+ require 'rspec'
5
+ require 'openstudio-analysis'
6
+
7
+
8
+
metadata ADDED
@@ -0,0 +1,133 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: openstudio-analysis
3
+ version: !ruby/object:Gem::Version
4
+ version: '0.1'
5
+ platform: ruby
6
+ authors:
7
+ - Nicholas Long
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2013-11-18 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: json
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - '>='
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - '>='
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: faraday
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - '>='
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - '>='
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: uuid
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - '>='
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - '>='
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: roo
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ~>
60
+ - !ruby/object:Gem::Version
61
+ version: 1.12.2
62
+ type: :runtime
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ~>
67
+ - !ruby/object:Gem::Version
68
+ version: 1.12.2
69
+ - !ruby/object:Gem::Dependency
70
+ name: rubyzip
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - ~>
74
+ - !ruby/object:Gem::Version
75
+ version: 1.1.0
76
+ type: :runtime
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ~>
81
+ - !ruby/object:Gem::Version
82
+ version: 1.1.0
83
+ description: Basic classes for generating the files needed for OpenStudio-Server
84
+ email:
85
+ - Nicholas.Long@nrel.gov
86
+ executables: []
87
+ extensions: []
88
+ extra_rdoc_files: []
89
+ files:
90
+ - lib/openstudio/analysis/server_api.rb
91
+ - lib/openstudio/analysis/translator/excel.rb
92
+ - lib/openstudio/analysis/version.rb
93
+ - lib/openstudio/helpers/string.rb
94
+ - lib/openstudio/templates/analysis.json.erb
95
+ - lib/openstudio/templates/argument.json.erb
96
+ - lib/openstudio/templates/discrete_uncertain_variable.json.erb
97
+ - lib/openstudio/templates/pivot_variable.json.erb
98
+ - lib/openstudio/templates/static_variable.json.erb
99
+ - lib/openstudio/templates/uncertain_variable.json.erb
100
+ - lib/openstudio/templates/workflow_item.json.erb
101
+ - lib/openstudio-analysis.rb
102
+ - README.md
103
+ - Rakefile
104
+ - spec/openstudio-analysis/server_api_spec.rb
105
+ - spec/spec_helper.rb
106
+ homepage: http://openstudio.nrel.gov
107
+ licenses:
108
+ - LGPL
109
+ metadata: {}
110
+ post_install_message:
111
+ rdoc_options: []
112
+ require_paths:
113
+ - lib
114
+ required_ruby_version: !ruby/object:Gem::Requirement
115
+ requirements:
116
+ - - '>='
117
+ - !ruby/object:Gem::Version
118
+ version: '2.0'
119
+ required_rubygems_version: !ruby/object:Gem::Requirement
120
+ requirements:
121
+ - - '>='
122
+ - !ruby/object:Gem::Version
123
+ version: 1.3.6
124
+ requirements: []
125
+ rubyforge_project:
126
+ rubygems_version: 2.0.2
127
+ signing_key:
128
+ specification_version: 4
129
+ summary: Create JSON, ZIP to communicate with OpenStudio Distributed Analysis in the
130
+ Cloud
131
+ test_files:
132
+ - spec/openstudio-analysis/server_api_spec.rb
133
+ - spec/spec_helper.rb