openstudio-analysis 1.3.5 → 1.3.6

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,454 +1,454 @@
1
- # *******************************************************************************
2
- # OpenStudio(R), Copyright (c) Alliance for Sustainable Energy, LLC.
3
- # See also https://openstudio.net/license
4
- # *******************************************************************************
5
-
6
- module OpenStudio
7
- module Analysis
8
- module Translator
9
- class Datapoints
10
- attr_reader :version
11
- attr_reader :settings
12
- attr_reader :variables
13
- attr_reader :outputs
14
- attr_reader :models
15
- attr_reader :measure_paths
16
- attr_reader :weather_paths
17
- attr_reader :worker_inits
18
- attr_reader :worker_finals
19
- attr_reader :export_path
20
- attr_reader :cluster_name
21
- attr_reader :variables
22
- attr_reader :algorithm
23
- attr_reader :problem
24
- attr_reader :run_setup
25
- attr_reader :aws_tags
26
-
27
- # remove these once we have classes to construct the JSON file
28
- attr_accessor :name
29
- attr_reader :analysis_name
30
-
31
- require 'rexml/document'
32
-
33
- # Pass in the filename to read
34
- def initialize(csv_filename)
35
- @csv_filename = csv_filename
36
- @root_path = File.expand_path(File.dirname(@csv_filename))
37
-
38
- @csv = nil
39
- # Try to read the spreadsheet as a roo object
40
- if File.exist?(@csv_filename)
41
- @csv = CSV.read(@csv_filename)
42
- else
43
- raise "File #{@csv_filename} does not exist"
44
- end
45
-
46
- # Remove nil rows and check row length
47
- @csv.delete_if { |row| row.uniq.length == 1 && row.uniq[0].nil? }
48
-
49
- # Initialize some other instance variables
50
- @version = '0.0.1'
51
- @analyses = [] # Array o OpenStudio::Analysis. Use method to access
52
- @name = nil
53
- @analysis_name = nil
54
- @cluster_name = nil
55
- @settings = {}
56
- @weather_paths = []
57
- @models = []
58
- @other_files = []
59
- @worker_inits = []
60
- @worker_finals = []
61
- @export_path = './export'
62
- @measure_paths = []
63
- @problem = {}
64
- @algorithm = {}
65
- @outputs = {}
66
- @run_setup = {}
67
- @aws_tags = []
68
- end
69
-
70
- def process
71
- # Seperate CSV into meta and measure groups
72
- measure_tag_index = nil
73
- @csv.each_with_index { |row, index| measure_tag_index = index if row[0] == 'BEGIN-MEASURES' }
74
- raise "ERROR: No 'BEGIN-MEASURES' tag found in input csv file." unless measure_tag_index
75
- meta_rows = []
76
- measure_rows = []
77
- @csv.each_with_index do |_, index|
78
- meta_rows << @csv[index] if index < measure_tag_index
79
- measure_rows << @csv[index] if index > measure_tag_index
80
- end
81
-
82
- @setup = parse_csv_meta(meta_rows)
83
-
84
- @version = Semantic::Version.new @version
85
- raise "CSV interface version #{@version} is no longer supported. Please upgrade your csv interface to at least 0.0.1" if @version < '0.0.0'
86
-
87
- @variables = parse_csv_measures(measure_rows)
88
-
89
- # call validate to make sure everything that is needed exists (i.e. directories)
90
- validate_analysis
91
- end
92
-
93
- # Helper methods to remove models and add new ones programatically. Note that these should
94
- # be moved into a general analysis class
95
- def delete_models
96
- @models = []
97
- end
98
-
99
- def add_model(name, display_name, type, path)
100
- @models << {
101
- name: name,
102
- display_name: display_name,
103
- type: type,
104
- path: path
105
- }
106
- end
107
-
108
- def validate_analysis
109
- # Setup the paths and do some error checking
110
- @measure_paths.each do |mp|
111
- raise "Measures directory '#{mp}' does not exist" unless Dir.exist?(mp)
112
- end
113
-
114
- @models.uniq!
115
- raise 'No seed models defined in spreadsheet' if @models.empty?
116
-
117
- @models.each do |model|
118
- raise "Seed model does not exist: #{model[:path]}" unless File.exist?(model[:path])
119
- end
120
-
121
- @weather_paths.uniq!
122
- raise 'No weather files found based on what is in the spreadsheet' if @weather_paths.empty?
123
-
124
- @weather_paths.each do |wf|
125
- raise "Weather file does not exist: #{wf}" unless File.exist?(wf)
126
- end
127
-
128
- # This can be a directory as well
129
- @other_files.each do |f|
130
- raise "Other files do not exist for: #{f[:path]}" unless File.exist?(f[:path])
131
- end
132
-
133
- @worker_inits.each do |f|
134
- raise "Worker initialization file does not exist for: #{f[:path]}" unless File.exist?(f[:path])
135
- end
136
-
137
- @worker_finals.each do |f|
138
- raise "Worker finalization file does not exist for: #{f[:path]}" unless File.exist?(f[:path])
139
- end
140
-
141
- FileUtils.mkdir_p(@export_path)
142
-
143
- # verify that the measure display names are unique
144
- # puts @variables.inspect
145
- measure_display_names = @variables.map { |m| m[:measure_data][:display_name] }.compact
146
- measure_display_names_mult = measure_display_names.select { |m| measure_display_names.count(m) > 1 }.uniq
147
- if measure_display_names_mult && !measure_display_names_mult.empty?
148
- raise "Measure Display Names are not unique for '#{measure_display_names_mult.join('\', \'')}'"
149
- end
150
-
151
- variable_names = @variables.map { |v| v[:vars].map { |hash| hash[:display_name] } }.flatten
152
- dupes = variable_names.select { |e| variable_names.count(e) > 1 }.uniq
153
- if dupes.count > 0
154
- raise "duplicate variable names found in list #{dupes.inspect}"
155
- end
156
- end
157
-
158
- # convert the data in excel's parsed data into an OpenStudio Analysis Object
159
- # @seed_model [Hash] Seed model to set the new analysis to
160
- # @append_model_name [Boolean] Append the name of the seed model to the display name
161
- # @return [Object] An OpenStudio::Analysis
162
- def analysis(seed_model = nil, append_model_name = false)
163
- raise 'There are no seed models defined in the excel file. Please add one.' if @models.empty?
164
- raise 'There are more than one seed models defined in the excel file. This is not supported by the CSV Translator.' if @models.size > 1 && seed_model.nil?
165
-
166
- seed_model = @models.first if seed_model.nil?
167
-
168
- # Use the programmatic interface to make the analysis
169
- # append the model name to the analysis name if requested (normally if there are more than 1 models in the spreadsheet)
170
- display_name = append_model_name ? @name + ' ' + seed_model[:display_name] : @name
171
-
172
- a = OpenStudio::Analysis.create(display_name)
173
-
174
- @variables.each do |measure|
175
- @measure_paths.each do |measure_path|
176
- measure_dir_to_add = "#{measure_path}/#{measure[:measure_data][:classname]}"
177
- if Dir.exist? measure_dir_to_add
178
- if File.exist? "#{measure_dir_to_add}/measure.rb"
179
- measure[:measure_data][:local_path_to_measure] = "#{measure_dir_to_add}/measure.rb"
180
- break
181
- else
182
- raise "Measure in directory '#{measure_dir_to_add}' did not contain a measure.rb file"
183
- end
184
- end
185
- end
186
-
187
- raise "Could not find measure '#{measure['name']}' in directory named '#{measure['measure_file_name_directory']}' in the measure paths '#{@measure_paths.join(', ')}'" unless measure[:measure_data][:local_path_to_measure]
188
-
189
- a.workflow.add_measure_from_csv(measure)
190
- end
191
-
192
- @other_files.each do |library|
193
- a.libraries.add(library[:path], library_name: library[:lib_zip_name])
194
- end
195
-
196
- @worker_inits.each do |w|
197
- a.worker_inits.add(w[:path], args: w[:args])
198
- end
199
-
200
- @worker_finals.each do |w|
201
- a.worker_finalizes.add(w[:path], args: w[:args])
202
- end
203
-
204
- # Add in the outputs
205
- @outputs.each do |o|
206
- o = Hash[o.map { |k, v| [k.to_sym, v] }]
207
- a.add_output(o)
208
- end
209
-
210
- a.analysis_type = @problem['analysis_type']
211
-
212
- # clear out the seed files before adding new ones
213
- a.seed_model = seed_model[:path]
214
-
215
- # clear out the weather files before adding new ones
216
- a.weather_files.clear
217
- @weather_paths.each do |wp|
218
- a.weather_files.add_files(wp)
219
- end
220
-
221
- a
222
- end
223
-
224
- protected
225
-
226
- def parse_csv_meta(meta_rows)
227
- # Convert to hash
228
- config_hash = {}
229
- meta_rows.each do |row|
230
- config_hash[row[0].to_sym] = row[1]
231
- end
232
-
233
- # Assign required attributes
234
- raise 'Require setting not found: version' unless config_hash[:version]
235
- @version = config_hash[:version]
236
-
237
- if config_hash[:analysis_name]
238
- @name = config_hash[:analysis_name]
239
- else
240
- @name = SecureRandom.uuid
241
- end
242
- @analysis_name = @name.to_underscore
243
-
244
- raise 'Require setting not found: measure_path' unless config_hash[:measure_paths]
245
- config_hash[:measure_paths] = [config_hash[:measure_paths]] unless config_hash[:measure_paths].respond_to?(:each)
246
- config_hash[:measure_paths].each do |path|
247
- if (Pathname.new path).absolute?
248
- @measure_paths << path
249
- else
250
- @measure_paths << File.expand_path(File.join(@root_path, path))
251
- end
252
- end
253
-
254
- raise 'Required setting not found: weather_paths' unless config_hash[:weather_paths]
255
- config_hash[:weather_paths] = config_hash[:weather_paths].split(',')
256
- config_hash[:weather_paths].each do |path|
257
- if (Pathname.new path).absolute?
258
- @weather_paths << path
259
- else
260
- @weather_paths << File.expand_path(File.join(@root_path, path))
261
- end
262
- end
263
-
264
- raise 'Required setting not found: models' unless config_hash[:models]
265
- config_hash[:models] = [config_hash[:models]] unless config_hash[:models].respond_to?(:each)
266
- config_hash[:models].each do |path|
267
- model_name = File.basename(path).split('.')[0]
268
- model_name = SecureRandom.uuid if model_name == ''
269
- type = File.basename(path).split('.')[1].upcase
270
- unless (Pathname.new path).absolute?
271
- path = File.expand_path(File.join(@root_path, path))
272
- end
273
- @models << { name: model_name.to_underscore, display_name: model_name, type: type, path: path }
274
- end
275
-
276
- # Assign optional attributes
277
- if config_hash[:output_json]
278
- path = File.expand_path(File.join(@root_path, config_hash[:output_json].to_s))
279
- if File.exist? path
280
- @outputs = MultiJson.load(File.read(path))
281
- else
282
- raise "Could not find output json: #{config_hash[:output_json]}"
283
- end
284
- end
285
-
286
- if config_hash[:export_path]
287
- if (Pathname.new config_hash[:export_path]).absolute?
288
- @export_path = config_hash[:export_path]
289
- else
290
- @export_path = File.expand_path(File.join(@root_path, config_hash[:export_path]))
291
- end
292
- end
293
-
294
- if config_hash[:library_path]
295
- library_name = File.basename(config_hash[:library_path]).split('.')[0]
296
- unless (Pathname.new config_hash[:library_path]).absolute?
297
- config_hash[:library_path] = File.expand_path(File.join(@root_path, config_hash[:library_path]))
298
- end
299
- @other_files << { lib_zip_name: library_name, path: config_hash[:library_path] }
300
- end
301
-
302
- if config_hash[:allow_multiple_jobs]
303
- raise 'allow_multiple_jobs is no longer a valid option in the CSV, please delete and rerun'
304
- end
305
- if config_hash[:use_server_as_worker]
306
- raise 'use_server_as_worker is no longer a valid option in the CSV, please delete and rerun'
307
- end
308
-
309
- # Assign AWS settings
310
- @settings[:proxy_port] = config_hash[:proxy_port] if config_hash[:proxy_port]
311
- @settings[:cluster_name] = config_hash[:cluster_name] if config_hash[:cluster_name]
312
- @settings[:user_id] = config_hash[:user_id] if config_hash[:user_id]
313
- @settings[:os_server_version] = config_hash[:os_server_version] if config_hash[:os_server_version]
314
- @settings[:server_instance_type] = config_hash[:server_instance_type] if config_hash[:server_instance_type]
315
- @settings[:worker_instance_type] = config_hash[:worker_instance_type] if config_hash[:worker_instance_type]
316
- @settings[:worker_node_number] = config_hash[:worker_node_number].to_i if config_hash[:worker_node_number]
317
- @settings[:aws_tags] = config_hash[:aws_tags] if config_hash[:aws_tags]
318
- @settings[:analysis_type] = 'batch_datapoints'
319
- end
320
-
321
- def parse_csv_measures(measure_rows)
322
- # Build metadata required for parsing
323
- measures = measure_rows[0].uniq.reject(&:nil?).map(&:to_sym)
324
- measure_map = {}
325
- measure_var_list = []
326
- measures.each do |measure|
327
- measure_map[measure] = {}
328
- col_ind = (0..(measure_rows[0].length - 1)).to_a.select { |i| measure_rows[0][i] == measure.to_s }
329
- col_ind.each do |var_ind|
330
- tuple = measure.to_s + measure_rows[1][var_ind]
331
- raise "Multiple measure_variable tuples found for '#{measure}_#{measure_rows[1][var_ind]}'. These tuples must be unique." if measure_var_list.include? tuple
332
- measure_var_list << tuple
333
- measure_map[measure][measure_rows[1][var_ind].to_sym] = var_ind
334
- end
335
- end
336
-
337
- # For each measure load measure json and parse out critical variable requirements
338
- data = []
339
- measures.each_with_index do |measure, measure_index|
340
- data[measure_index] = {}
341
- measure_parsed = find_measure(measure.to_s)
342
-
343
- raise "Could not find measure #{measure} xml in measure_paths: '#{@measure_paths.join("\n")}'" unless measure_parsed
344
- measure_data = {}
345
- measure_data[:classname] = measure_parsed[:classname]
346
- measure_data[:name] = measure_parsed[:name]
347
- measure_data[:display_name] = measure_parsed[:display_name]
348
- measure_data[:measure_type] = measure_parsed[:measure_type]
349
- measure_data[:uid] = measure_parsed[:uid]
350
- measure_data[:version_id] = measure_parsed[:version_id]
351
- data[measure_index][:measure_data] = measure_data
352
- data[measure_index][:vars] = []
353
- vars = measure_map[measure]
354
-
355
- # construct the list of variables
356
- vars.each do |var|
357
- # var looks like [:cooling_adjustment, 0]
358
- var = var[0]
359
- next if var.to_s == 'None'
360
- var_hash = {}
361
- found_arg = nil
362
- measure_parsed[:arguments].each do |arg|
363
- if var.to_s == '__SKIP__' || arg[:name] == var.to_s
364
- found_arg = arg
365
- break
366
- end
367
- end
368
-
369
- # var_json = measure_json['arguments'].select { |hash| hash['local_variable'] == var.to_s }[0]
370
- raise "measure.xml for measure #{measure} does not have an argument with argument == #{var}" unless found_arg
371
- var_type = nil
372
- var_units = ''
373
- if var.to_s == '__SKIP__'
374
- var_type = 'boolean'
375
- var_units = ''
376
- else
377
- var_type = found_arg[:variable_type].downcase
378
- var_units = found_arg[:units]
379
- end
380
-
381
- var_hash[:name] = var.to_s
382
- var_hash[:variable_type] = 'variable'
383
- var_hash[:display_name] = measure_rows[2][measure_map[measure][var]]
384
- var_hash[:display_name_short] = var_hash[:display_name]
385
- # var_hash[:name] = var_json['local_variable']
386
- var_hash[:type] = var_type
387
- var_hash[:units] = var_units
388
- var_hash[:distribution] = {}
389
- case var_hash[:type].downcase
390
- when 'bool', 'boolean'
391
- var_hash[:distribution][:values] = (3..(measure_rows.length - 1)).map { |value| measure_rows[value.to_i][measure_map[measure][var]].to_s.casecmp('true').zero? }
392
- var_hash[:distribution][:maximum] = true
393
- var_hash[:distribution][:minimum] = false
394
- var_hash[:distribution][:mode] = var_hash[:distribution][:values].group_by { |i| i }.max { |x, y| x[1].length <=> y[1].length }[0]
395
- when 'choice', 'string'
396
- var_hash[:distribution][:values] = (3..measure_rows.length - 1).map { |value| measure_rows[value.to_i][measure_map[measure][var]].to_s }
397
- var_hash[:distribution][:minimum] = var_hash[:distribution][:values].min
398
- var_hash[:distribution][:maximum] = var_hash[:distribution][:values].max
399
- var_hash[:distribution][:mode] = var_hash[:distribution][:values].group_by { |i| i }.max { |x, y| x[1].length <=> y[1].length }[0]
400
- else
401
- var_hash[:distribution][:values] = (3..(measure_rows.length - 1)).map { |value| eval(measure_rows[value.to_i][measure_map[measure][var]]) }
402
- var_hash[:distribution][:minimum] = var_hash[:distribution][:values].map(&:to_i).min
403
- var_hash[:distribution][:maximum] = var_hash[:distribution][:values].map(&:to_i).max
404
- var_hash[:distribution][:mode] = var_hash[:distribution][:values].group_by { |i| i }.max { |x, y| x[1].length <=> y[1].length }[0]
405
- end
406
- var_hash[:distribution][:weights] = eval('[' + "#{1.0 / (measure_rows.length - 3)}," * (measure_rows.length - 3) + ']')
407
- var_hash[:distribution][:type] = 'discrete'
408
- var_hash[:distribution][:units] = var_hash[:units]
409
- if var_hash[:type] == 'choice'
410
- # var_hash[:distribution][:enumerations] = found_arg.xpath('choices/choice').map { |s| s.xpath('value').text }
411
- # This would need to be updated if we want to do this again... sorry.
412
- elsif var_hash[:type] == 'bool'
413
- var_hash[:distribution][:enumerations] = []
414
- var_hash[:distribution][:enumerations] << true
415
- var_hash[:distribution][:enumerations] << false
416
- end
417
- data[measure_index][:vars] << var_hash
418
- end
419
- data[measure_index][:args] = []
420
-
421
- measure_parsed[:arguments].each do |arg_xml|
422
- arg = {}
423
- arg[:value_type] = arg_xml[:variable_type]
424
- arg[:name] = arg_xml[:name]
425
- arg[:display_name] = arg_xml[:display_name].downcase
426
- arg[:display_name_short] = arg[:display_name]
427
- arg[:default_value] = arg_xml[:default_value].downcase
428
- arg[:value] = arg[:default_value]
429
- data[measure_index][:args] << arg
430
- end
431
- end
432
-
433
- data
434
- end
435
-
436
- private
437
-
438
- # Find the measure in the measure path
439
- def find_measure(measure_name)
440
- @measure_paths.each do |mp|
441
- measure_xml = File.join(mp, measure_name, 'measure.xml')
442
- measure_rb = File.join(mp, measure_name, 'measure.rb')
443
- if File.exist?(measure_xml) && File.exist?(measure_rb)
444
- measure_parsed = parse_measure_xml(measure_xml)
445
- return measure_parsed
446
- end
447
- end
448
-
449
- return nil
450
- end
451
- end
452
- end
453
- end
454
- end
1
+ # *******************************************************************************
2
+ # OpenStudio(R), Copyright (c) Alliance for Sustainable Energy, LLC.
3
+ # See also https://openstudio.net/license
4
+ # *******************************************************************************
5
+
6
+ module OpenStudio
7
+ module Analysis
8
+ module Translator
9
+ class Datapoints
10
+ attr_reader :version
11
+ attr_reader :settings
12
+ attr_reader :variables
13
+ attr_reader :outputs
14
+ attr_reader :models
15
+ attr_reader :measure_paths
16
+ attr_reader :weather_paths
17
+ attr_reader :worker_inits
18
+ attr_reader :worker_finals
19
+ attr_reader :export_path
20
+ attr_reader :cluster_name
21
+ attr_reader :variables
22
+ attr_reader :algorithm
23
+ attr_reader :problem
24
+ attr_reader :run_setup
25
+ attr_reader :aws_tags
26
+
27
+ # remove these once we have classes to construct the JSON file
28
+ attr_accessor :name
29
+ attr_reader :analysis_name
30
+
31
+ require 'rexml/document'
32
+
33
+ # Pass in the filename to read
34
+ def initialize(csv_filename)
35
+ @csv_filename = csv_filename
36
+ @root_path = File.expand_path(File.dirname(@csv_filename))
37
+
38
+ @csv = nil
39
+ # Try to read the spreadsheet as a roo object
40
+ if File.exist?(@csv_filename)
41
+ @csv = CSV.read(@csv_filename)
42
+ else
43
+ raise "File #{@csv_filename} does not exist"
44
+ end
45
+
46
+ # Remove nil rows and check row length
47
+ @csv.delete_if { |row| row.uniq.length == 1 && row.uniq[0].nil? }
48
+
49
+ # Initialize some other instance variables
50
+ @version = '0.0.1'
51
+ @analyses = [] # Array o OpenStudio::Analysis. Use method to access
52
+ @name = nil
53
+ @analysis_name = nil
54
+ @cluster_name = nil
55
+ @settings = {}
56
+ @weather_paths = []
57
+ @models = []
58
+ @other_files = []
59
+ @worker_inits = []
60
+ @worker_finals = []
61
+ @export_path = './export'
62
+ @measure_paths = []
63
+ @problem = {}
64
+ @algorithm = {}
65
+ @outputs = {}
66
+ @run_setup = {}
67
+ @aws_tags = []
68
+ end
69
+
70
+ def process
71
+ # Seperate CSV into meta and measure groups
72
+ measure_tag_index = nil
73
+ @csv.each_with_index { |row, index| measure_tag_index = index if row[0] == 'BEGIN-MEASURES' }
74
+ raise "ERROR: No 'BEGIN-MEASURES' tag found in input csv file." unless measure_tag_index
75
+ meta_rows = []
76
+ measure_rows = []
77
+ @csv.each_with_index do |_, index|
78
+ meta_rows << @csv[index] if index < measure_tag_index
79
+ measure_rows << @csv[index] if index > measure_tag_index
80
+ end
81
+
82
+ @setup = parse_csv_meta(meta_rows)
83
+
84
+ @version = Semantic::Version.new @version
85
+ raise "CSV interface version #{@version} is no longer supported. Please upgrade your csv interface to at least 0.0.1" if @version < '0.0.0'
86
+
87
+ @variables = parse_csv_measures(measure_rows)
88
+
89
+ # call validate to make sure everything that is needed exists (i.e. directories)
90
+ validate_analysis
91
+ end
92
+
93
+ # Helper methods to remove models and add new ones programatically. Note that these should
94
+ # be moved into a general analysis class
95
+ def delete_models
96
+ @models = []
97
+ end
98
+
99
+ def add_model(name, display_name, type, path)
100
+ @models << {
101
+ name: name,
102
+ display_name: display_name,
103
+ type: type,
104
+ path: path
105
+ }
106
+ end
107
+
108
+ def validate_analysis
109
+ # Setup the paths and do some error checking
110
+ @measure_paths.each do |mp|
111
+ raise "Measures directory '#{mp}' does not exist" unless Dir.exist?(mp)
112
+ end
113
+
114
+ @models.uniq!
115
+ raise 'No seed models defined in spreadsheet' if @models.empty?
116
+
117
+ @models.each do |model|
118
+ raise "Seed model does not exist: #{model[:path]}" unless File.exist?(model[:path])
119
+ end
120
+
121
+ @weather_paths.uniq!
122
+ raise 'No weather files found based on what is in the spreadsheet' if @weather_paths.empty?
123
+
124
+ @weather_paths.each do |wf|
125
+ raise "Weather file does not exist: #{wf}" unless File.exist?(wf)
126
+ end
127
+
128
+ # This can be a directory as well
129
+ @other_files.each do |f|
130
+ raise "Other files do not exist for: #{f[:path]}" unless File.exist?(f[:path])
131
+ end
132
+
133
+ @worker_inits.each do |f|
134
+ raise "Worker initialization file does not exist for: #{f[:path]}" unless File.exist?(f[:path])
135
+ end
136
+
137
+ @worker_finals.each do |f|
138
+ raise "Worker finalization file does not exist for: #{f[:path]}" unless File.exist?(f[:path])
139
+ end
140
+
141
+ FileUtils.mkdir_p(@export_path)
142
+
143
+ # verify that the measure display names are unique
144
+ # puts @variables.inspect
145
+ measure_display_names = @variables.map { |m| m[:measure_data][:display_name] }.compact
146
+ measure_display_names_mult = measure_display_names.select { |m| measure_display_names.count(m) > 1 }.uniq
147
+ if measure_display_names_mult && !measure_display_names_mult.empty?
148
+ raise "Measure Display Names are not unique for '#{measure_display_names_mult.join('\', \'')}'"
149
+ end
150
+
151
+ variable_names = @variables.map { |v| v[:vars].map { |hash| hash[:display_name] } }.flatten
152
+ dupes = variable_names.select { |e| variable_names.count(e) > 1 }.uniq
153
+ if dupes.count > 0
154
+ raise "duplicate variable names found in list #{dupes.inspect}"
155
+ end
156
+ end
157
+
158
+ # convert the data in excel's parsed data into an OpenStudio Analysis Object
159
+ # @seed_model [Hash] Seed model to set the new analysis to
160
+ # @append_model_name [Boolean] Append the name of the seed model to the display name
161
+ # @return [Object] An OpenStudio::Analysis
162
+ def analysis(seed_model = nil, append_model_name = false)
163
+ raise 'There are no seed models defined in the excel file. Please add one.' if @models.empty?
164
+ raise 'There are more than one seed models defined in the excel file. This is not supported by the CSV Translator.' if @models.size > 1 && seed_model.nil?
165
+
166
+ seed_model = @models.first if seed_model.nil?
167
+
168
+ # Use the programmatic interface to make the analysis
169
+ # append the model name to the analysis name if requested (normally if there are more than 1 models in the spreadsheet)
170
+ display_name = append_model_name ? @name + ' ' + seed_model[:display_name] : @name
171
+
172
+ a = OpenStudio::Analysis.create(display_name)
173
+
174
+ @variables.each do |measure|
175
+ @measure_paths.each do |measure_path|
176
+ measure_dir_to_add = "#{measure_path}/#{measure[:measure_data][:classname]}"
177
+ if Dir.exist? measure_dir_to_add
178
+ if File.exist? "#{measure_dir_to_add}/measure.rb"
179
+ measure[:measure_data][:local_path_to_measure] = "#{measure_dir_to_add}/measure.rb"
180
+ break
181
+ else
182
+ raise "Measure in directory '#{measure_dir_to_add}' did not contain a measure.rb file"
183
+ end
184
+ end
185
+ end
186
+
187
+ raise "Could not find measure '#{measure['name']}' in directory named '#{measure['measure_file_name_directory']}' in the measure paths '#{@measure_paths.join(', ')}'" unless measure[:measure_data][:local_path_to_measure]
188
+
189
+ a.workflow.add_measure_from_csv(measure)
190
+ end
191
+
192
+ @other_files.each do |library|
193
+ a.libraries.add(library[:path], library_name: library[:lib_zip_name])
194
+ end
195
+
196
+ @worker_inits.each do |w|
197
+ a.worker_inits.add(w[:path], args: w[:args])
198
+ end
199
+
200
+ @worker_finals.each do |w|
201
+ a.worker_finalizes.add(w[:path], args: w[:args])
202
+ end
203
+
204
+ # Add in the outputs
205
+ @outputs.each do |o|
206
+ o = Hash[o.map { |k, v| [k.to_sym, v] }]
207
+ a.add_output(o)
208
+ end
209
+
210
+ a.analysis_type = @problem['analysis_type']
211
+
212
+ # clear out the seed files before adding new ones
213
+ a.seed_model = seed_model[:path]
214
+
215
+ # clear out the weather files before adding new ones
216
+ a.weather_files.clear
217
+ @weather_paths.each do |wp|
218
+ a.weather_files.add_files(wp)
219
+ end
220
+
221
+ a
222
+ end
223
+
224
+ protected
225
+
226
+ def parse_csv_meta(meta_rows)
227
+ # Convert to hash
228
+ config_hash = {}
229
+ meta_rows.each do |row|
230
+ config_hash[row[0].to_sym] = row[1]
231
+ end
232
+
233
+ # Assign required attributes
234
+ raise 'Require setting not found: version' unless config_hash[:version]
235
+ @version = config_hash[:version]
236
+
237
+ if config_hash[:analysis_name]
238
+ @name = config_hash[:analysis_name]
239
+ else
240
+ @name = SecureRandom.uuid
241
+ end
242
+ @analysis_name = @name.to_underscore
243
+
244
+ raise 'Require setting not found: measure_path' unless config_hash[:measure_paths]
245
+ config_hash[:measure_paths] = [config_hash[:measure_paths]] unless config_hash[:measure_paths].respond_to?(:each)
246
+ config_hash[:measure_paths].each do |path|
247
+ if (Pathname.new path).absolute?
248
+ @measure_paths << path
249
+ else
250
+ @measure_paths << File.expand_path(File.join(@root_path, path))
251
+ end
252
+ end
253
+
254
+ raise 'Required setting not found: weather_paths' unless config_hash[:weather_paths]
255
+ config_hash[:weather_paths] = config_hash[:weather_paths].split(',')
256
+ config_hash[:weather_paths].each do |path|
257
+ if (Pathname.new path).absolute?
258
+ @weather_paths << path
259
+ else
260
+ @weather_paths << File.expand_path(File.join(@root_path, path))
261
+ end
262
+ end
263
+
264
+ raise 'Required setting not found: models' unless config_hash[:models]
265
+ config_hash[:models] = [config_hash[:models]] unless config_hash[:models].respond_to?(:each)
266
+ config_hash[:models].each do |path|
267
+ model_name = File.basename(path).split('.')[0]
268
+ model_name = SecureRandom.uuid if model_name == ''
269
+ type = File.basename(path).split('.')[1].upcase
270
+ unless (Pathname.new path).absolute?
271
+ path = File.expand_path(File.join(@root_path, path))
272
+ end
273
+ @models << { name: model_name.to_underscore, display_name: model_name, type: type, path: path }
274
+ end
275
+
276
+ # Assign optional attributes
277
+ if config_hash[:output_json]
278
+ path = File.expand_path(File.join(@root_path, config_hash[:output_json].to_s))
279
+ if File.exist? path
280
+ @outputs = MultiJson.load(File.read(path))
281
+ else
282
+ raise "Could not find output json: #{config_hash[:output_json]}"
283
+ end
284
+ end
285
+
286
+ if config_hash[:export_path]
287
+ if (Pathname.new config_hash[:export_path]).absolute?
288
+ @export_path = config_hash[:export_path]
289
+ else
290
+ @export_path = File.expand_path(File.join(@root_path, config_hash[:export_path]))
291
+ end
292
+ end
293
+
294
+ if config_hash[:library_path]
295
+ library_name = File.basename(config_hash[:library_path]).split('.')[0]
296
+ unless (Pathname.new config_hash[:library_path]).absolute?
297
+ config_hash[:library_path] = File.expand_path(File.join(@root_path, config_hash[:library_path]))
298
+ end
299
+ @other_files << { lib_zip_name: library_name, path: config_hash[:library_path] }
300
+ end
301
+
302
+ if config_hash[:allow_multiple_jobs]
303
+ raise 'allow_multiple_jobs is no longer a valid option in the CSV, please delete and rerun'
304
+ end
305
+ if config_hash[:use_server_as_worker]
306
+ raise 'use_server_as_worker is no longer a valid option in the CSV, please delete and rerun'
307
+ end
308
+
309
+ # Assign AWS settings
310
+ @settings[:proxy_port] = config_hash[:proxy_port] if config_hash[:proxy_port]
311
+ @settings[:cluster_name] = config_hash[:cluster_name] if config_hash[:cluster_name]
312
+ @settings[:user_id] = config_hash[:user_id] if config_hash[:user_id]
313
+ @settings[:os_server_version] = config_hash[:os_server_version] if config_hash[:os_server_version]
314
+ @settings[:server_instance_type] = config_hash[:server_instance_type] if config_hash[:server_instance_type]
315
+ @settings[:worker_instance_type] = config_hash[:worker_instance_type] if config_hash[:worker_instance_type]
316
+ @settings[:worker_node_number] = config_hash[:worker_node_number].to_i if config_hash[:worker_node_number]
317
+ @settings[:aws_tags] = config_hash[:aws_tags] if config_hash[:aws_tags]
318
+ @settings[:analysis_type] = 'batch_datapoints'
319
+ end
320
+
321
+ def parse_csv_measures(measure_rows)
322
+ # Build metadata required for parsing
323
+ measures = measure_rows[0].uniq.reject(&:nil?).map(&:to_sym)
324
+ measure_map = {}
325
+ measure_var_list = []
326
+ measures.each do |measure|
327
+ measure_map[measure] = {}
328
+ col_ind = (0..(measure_rows[0].length - 1)).to_a.select { |i| measure_rows[0][i] == measure.to_s }
329
+ col_ind.each do |var_ind|
330
+ tuple = measure.to_s + measure_rows[1][var_ind]
331
+ raise "Multiple measure_variable tuples found for '#{measure}_#{measure_rows[1][var_ind]}'. These tuples must be unique." if measure_var_list.include? tuple
332
+ measure_var_list << tuple
333
+ measure_map[measure][measure_rows[1][var_ind].to_sym] = var_ind
334
+ end
335
+ end
336
+
337
+ # For each measure load measure json and parse out critical variable requirements
338
+ data = []
339
+ measures.each_with_index do |measure, measure_index|
340
+ data[measure_index] = {}
341
+ measure_parsed = find_measure(measure.to_s)
342
+
343
+ raise "Could not find measure #{measure} xml in measure_paths: '#{@measure_paths.join("\n")}'" unless measure_parsed
344
+ measure_data = {}
345
+ measure_data[:classname] = measure_parsed[:classname]
346
+ measure_data[:name] = measure_parsed[:name]
347
+ measure_data[:display_name] = measure_parsed[:display_name]
348
+ measure_data[:measure_type] = measure_parsed[:measure_type]
349
+ measure_data[:uid] = measure_parsed[:uid]
350
+ measure_data[:version_id] = measure_parsed[:version_id]
351
+ data[measure_index][:measure_data] = measure_data
352
+ data[measure_index][:vars] = []
353
+ vars = measure_map[measure]
354
+
355
+ # construct the list of variables
356
+ vars.each do |var|
357
+ # var looks like [:cooling_adjustment, 0]
358
+ var = var[0]
359
+ next if var.to_s == 'None'
360
+ var_hash = {}
361
+ found_arg = nil
362
+ measure_parsed[:arguments].each do |arg|
363
+ if var.to_s == '__SKIP__' || arg[:name] == var.to_s
364
+ found_arg = arg
365
+ break
366
+ end
367
+ end
368
+
369
+ # var_json = measure_json['arguments'].select { |hash| hash['local_variable'] == var.to_s }[0]
370
+ raise "measure.xml for measure #{measure} does not have an argument with argument == #{var}" unless found_arg
371
+ var_type = nil
372
+ var_units = ''
373
+ if var.to_s == '__SKIP__'
374
+ var_type = 'boolean'
375
+ var_units = ''
376
+ else
377
+ var_type = found_arg[:variable_type].downcase
378
+ var_units = found_arg[:units]
379
+ end
380
+
381
+ var_hash[:name] = var.to_s
382
+ var_hash[:variable_type] = 'variable'
383
+ var_hash[:display_name] = measure_rows[2][measure_map[measure][var]]
384
+ var_hash[:display_name_short] = var_hash[:display_name]
385
+ # var_hash[:name] = var_json['local_variable']
386
+ var_hash[:type] = var_type
387
+ var_hash[:units] = var_units
388
+ var_hash[:distribution] = {}
389
+ case var_hash[:type].downcase
390
+ when 'bool', 'boolean'
391
+ var_hash[:distribution][:values] = (3..(measure_rows.length - 1)).map { |value| measure_rows[value.to_i][measure_map[measure][var]].to_s.casecmp('true').zero? }
392
+ var_hash[:distribution][:maximum] = true
393
+ var_hash[:distribution][:minimum] = false
394
+ var_hash[:distribution][:mode] = var_hash[:distribution][:values].group_by { |i| i }.max { |x, y| x[1].length <=> y[1].length }[0]
395
+ when 'choice', 'string'
396
+ var_hash[:distribution][:values] = (3..measure_rows.length - 1).map { |value| measure_rows[value.to_i][measure_map[measure][var]].to_s }
397
+ var_hash[:distribution][:minimum] = var_hash[:distribution][:values].min
398
+ var_hash[:distribution][:maximum] = var_hash[:distribution][:values].max
399
+ var_hash[:distribution][:mode] = var_hash[:distribution][:values].group_by { |i| i }.max { |x, y| x[1].length <=> y[1].length }[0]
400
+ else
401
+ var_hash[:distribution][:values] = (3..(measure_rows.length - 1)).map { |value| eval(measure_rows[value.to_i][measure_map[measure][var]]) }
402
+ var_hash[:distribution][:minimum] = var_hash[:distribution][:values].map(&:to_i).min
403
+ var_hash[:distribution][:maximum] = var_hash[:distribution][:values].map(&:to_i).max
404
+ var_hash[:distribution][:mode] = var_hash[:distribution][:values].group_by { |i| i }.max { |x, y| x[1].length <=> y[1].length }[0]
405
+ end
406
+ var_hash[:distribution][:weights] = eval('[' + "#{1.0 / (measure_rows.length - 3)}," * (measure_rows.length - 3) + ']')
407
+ var_hash[:distribution][:type] = 'discrete'
408
+ var_hash[:distribution][:units] = var_hash[:units]
409
+ if var_hash[:type] == 'choice'
410
+ # var_hash[:distribution][:enumerations] = found_arg.xpath('choices/choice').map { |s| s.xpath('value').text }
411
+ # This would need to be updated if we want to do this again... sorry.
412
+ elsif var_hash[:type] == 'bool'
413
+ var_hash[:distribution][:enumerations] = []
414
+ var_hash[:distribution][:enumerations] << true
415
+ var_hash[:distribution][:enumerations] << false
416
+ end
417
+ data[measure_index][:vars] << var_hash
418
+ end
419
+ data[measure_index][:args] = []
420
+
421
+ measure_parsed[:arguments].each do |arg_xml|
422
+ arg = {}
423
+ arg[:value_type] = arg_xml[:variable_type]
424
+ arg[:name] = arg_xml[:name]
425
+ arg[:display_name] = arg_xml[:display_name].downcase
426
+ arg[:display_name_short] = arg[:display_name]
427
+ arg[:default_value] = arg_xml[:default_value].downcase
428
+ arg[:value] = arg[:default_value]
429
+ data[measure_index][:args] << arg
430
+ end
431
+ end
432
+
433
+ data
434
+ end
435
+
436
+ private
437
+
438
+ # Find the measure in the measure path
439
+ def find_measure(measure_name)
440
+ @measure_paths.each do |mp|
441
+ measure_xml = File.join(mp, measure_name, 'measure.xml')
442
+ measure_rb = File.join(mp, measure_name, 'measure.rb')
443
+ if File.exist?(measure_xml) && File.exist?(measure_rb)
444
+ measure_parsed = parse_measure_xml(measure_xml)
445
+ return measure_parsed
446
+ end
447
+ end
448
+
449
+ return nil
450
+ end
451
+ end
452
+ end
453
+ end
454
+ end