urbanopt-scenario 0.1.1 → 0.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (40) hide show
  1. checksums.yaml +5 -5
  2. data/.github/ISSUE_TEMPLATE/bug_report.md +0 -8
  3. data/.github/ISSUE_TEMPLATE/feature_request.md +2 -10
  4. data/.github/pull_request_template.md +5 -15
  5. data/.rubocop.yml +1 -1
  6. data/CHANGELOG.md +45 -1
  7. data/{.github/CONTRIBUTING.md → CONTRIBUTING.md} +0 -0
  8. data/Gemfile +8 -12
  9. data/Jenkinsfile +2 -2
  10. data/Rakefile +2 -2
  11. data/docs/package-lock.json +4607 -6451
  12. data/docs/package.json +1 -1
  13. data/lib/measures/.rubocop.yml +1 -1
  14. data/lib/measures/default_feature_reports/LICENSE.md +1 -1
  15. data/lib/measures/default_feature_reports/README.md +5 -35
  16. data/lib/measures/default_feature_reports/measure.rb +315 -44
  17. data/lib/measures/default_feature_reports/measure.xml +38 -17
  18. data/lib/urbanopt/scenario.rb +1 -0
  19. data/lib/urbanopt/scenario/default_reports/distributed_generation.rb +209 -17
  20. data/lib/urbanopt/scenario/default_reports/feature_report.rb +57 -3
  21. data/lib/urbanopt/scenario/default_reports/power_distribution.rb +102 -0
  22. data/lib/urbanopt/scenario/default_reports/program.rb +6 -2
  23. data/lib/urbanopt/scenario/default_reports/reporting_period.rb +15 -9
  24. data/lib/urbanopt/scenario/default_reports/scenario_report.rb +24 -7
  25. data/lib/urbanopt/scenario/default_reports/schema/README.md +11 -12
  26. data/lib/urbanopt/scenario/default_reports/schema/scenario_csv_columns.txt +33 -12
  27. data/lib/urbanopt/scenario/default_reports/schema/scenario_schema.json +52 -25
  28. data/lib/urbanopt/scenario/default_reports/solar_pv.rb +1 -0
  29. data/lib/urbanopt/scenario/default_reports/timeseries_csv.rb +62 -21
  30. data/lib/urbanopt/scenario/scenario_post_processor_opendss.rb +276 -0
  31. data/lib/urbanopt/scenario/scenario_runner_osw.rb +21 -5
  32. data/lib/urbanopt/scenario/simulation_dir_osw.rb +0 -4
  33. data/lib/urbanopt/scenario/version.rb +1 -1
  34. data/urbanopt-scenario-gem.gemspec +10 -12
  35. metadata +31 -48
  36. data/.travis.yml +0 -23
  37. data/lib/change_log.rb +0 -147
  38. data/lib/measures/default_feature_reports/tests/USA_CO_Golden-NREL.724666_TMY3.epw +0 -8768
  39. data/lib/measures/default_feature_reports/tests/default_feature_reports_test.rb +0 -238
  40. data/lib/measures/default_feature_reports/tests/example_model.osm +0 -4378
@@ -54,6 +54,7 @@ module URBANopt
54
54
  hash.delete_if { |k, v| v.nil? }
55
55
 
56
56
  @size_kw = hash[:size_kw]
57
+ @id = hash[:id]
57
58
 
58
59
  # initialize class variables @@validator and @@schema
59
60
  @@validator ||= Validator.new
@@ -56,7 +56,12 @@ module URBANopt
56
56
 
57
57
  @path = hash[:path]
58
58
  @first_report_datetime = hash[:first_report_datetime]
59
+
60
+ # from scenario csv shema get required reults to be aggregated
61
+ @required_column_names = load_scenario_csv_schema_headers
62
+
59
63
  @column_names = hash[:column_names]
64
+ @column_names.delete_if { |x| !@required_column_names.include? x.split('(')[0] }
60
65
 
61
66
  # hash of column_name to array of values, does not get serialized to hash
62
67
  @mutex = Mutex.new
@@ -70,6 +75,24 @@ module URBANopt
70
75
  @@logger ||= URBANopt::Scenario::DefaultReports.logger
71
76
  end
72
77
 
78
+ ##
79
+ # load required scenario report csv headers from reports schema
80
+ ##
81
+ def load_scenario_csv_schema_headers
82
+ # rubocop: disable Security/Open
83
+ scenario_csv_schema = open(File.expand_path('../default_reports/schema/scenario_csv_columns.txt', File.dirname(__FILE__)))
84
+ # rubocop: enable Security/Open
85
+
86
+ scenario_csv_schema_headers = []
87
+ File.readlines(scenario_csv_schema).each do |line|
88
+ l = line.delete("\n")
89
+ a = l.delete("\t")
90
+ r = a.delete("\r")
91
+ scenario_csv_schema_headers << r
92
+ end
93
+ return scenario_csv_schema_headers
94
+ end
95
+
73
96
  ##
74
97
  # Assigns default values if values does not exist.
75
98
  ##
@@ -130,7 +153,11 @@ module URBANopt
130
153
  end
131
154
  else
132
155
  row.each_with_index do |value, i|
133
- @data[@column_names[i]] << value.to_f
156
+ if i == 0
157
+ @data[@column_names[i]] << value
158
+ else
159
+ @data[@column_names[i]] << value.to_f
160
+ end
134
161
  end
135
162
  end
136
163
  end
@@ -153,7 +180,7 @@ module URBANopt
153
180
  end
154
181
  else
155
182
  row.each_with_index do |value, i|
156
- @data[@column_names[i]] << value.to_f
183
+ @data[@column_names[i]] << value
157
184
  end
158
185
  end
159
186
  end
@@ -182,6 +209,7 @@ module URBANopt
182
209
  if path.nil?
183
210
  path = @path
184
211
  end
212
+
185
213
  File.open(path, 'w') do |f|
186
214
  f.puts @column_names.join(',')
187
215
  n = @data[@column_names[0]].size - 1
@@ -214,7 +242,7 @@ module URBANopt
214
242
  ##
215
243
  def add_timeseries_csv(other)
216
244
  # initialize first_report_datetime with the incoming first_report_datetime if its nil.
217
- if @first_report_datetime.nil?
245
+ if @first_report_datetime.nil? || @first_report_datetime == ''
218
246
  @first_report_datetime = other.first_report_datetime
219
247
  end
220
248
 
@@ -224,31 +252,44 @@ module URBANopt
224
252
  end
225
253
 
226
254
  # merge the column names
227
- @column_names = @column_names.concat(other.column_names).uniq
255
+ other_column_names = []
256
+ other.column_names.each do |n|
257
+ if !n[0, 4].casecmp('ZONE').zero?
258
+ other_column_names << n
259
+ end
260
+ end
261
+
262
+ @column_names = @column_names.concat(other_column_names).uniq
228
263
 
229
264
  # merge the column data
230
265
  other.column_names.each do |column_name|
231
- if !@column_names.include? column_name
232
- @column_names.push column_name
233
- end
234
-
235
- new_values = other.get_data(column_name)
266
+ if !column_name[0, 4].casecmp('ZONE').zero?
267
+ if !@column_names.include? column_name
268
+ @column_names.push column_name
269
+ end
236
270
 
237
- if @data.nil?
238
- @data = {}
239
- end
271
+ new_values = other.get_data(column_name)
240
272
 
241
- current_values = @data[column_name]
242
- if current_values
243
- if current_values.size != new_values.size
244
- raise 'Values of different sizes in add_timeseries_csv'
273
+ if @data.nil?
274
+ @data = {}
245
275
  end
246
- new_values.each_with_index do |value, i|
247
- new_values[i] = value + current_values[i]
276
+
277
+ current_values = @data[column_name]
278
+
279
+ if current_values
280
+ if current_values.size != new_values.size
281
+ raise 'Values of different sizes in add_timeseries_csv'
282
+ end
283
+ new_values.each_with_index do |value, i|
284
+ # aggregate all columns except Datime column
285
+ if column_name != 'Datetime'
286
+ new_values[i] = value.to_f + current_values[i].to_f
287
+ end
288
+ end
289
+ @data[column_name] = new_values
290
+ else
291
+ @data[column_name] = new_values
248
292
  end
249
- @data[column_name] = new_values
250
- else
251
- @data[column_name] = new_values
252
293
  end
253
294
  end
254
295
  end
@@ -0,0 +1,276 @@
1
+ # *********************************************************************************
2
+ # URBANopt, Copyright (c) 2019-2020, Alliance for Sustainable Energy, LLC, and other
3
+ # contributors. All rights reserved.
4
+ #
5
+ # Redistribution and use in source and binary forms, with or without modification,
6
+ # are permitted provided that the following conditions are met:
7
+ #
8
+ # Redistributions of source code must retain the above copyright notice, this list
9
+ # of conditions and the following disclaimer.
10
+ #
11
+ # Redistributions in binary form must reproduce the above copyright notice, this
12
+ # list of conditions and the following disclaimer in the documentation and/or other
13
+ # materials provided with the distribution.
14
+ #
15
+ # Neither the name of the copyright holder nor the names of its contributors may be
16
+ # used to endorse or promote products derived from this software without specific
17
+ # prior written permission.
18
+ #
19
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
20
+ # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21
+ # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22
+ # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
23
+ # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
24
+ # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
26
+ # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
27
+ # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
28
+ # OF THE POSSIBILITY OF SUCH DAMAGE.
29
+ # *********************************************************************************
30
+
31
+ # require 'urbanopt/scenario/scenario_post_processor_base'
32
+ require 'urbanopt/scenario/default_reports'
33
+ require 'urbanopt/scenario/default_reports/logger'
34
+
35
+ require 'csv'
36
+ require 'json'
37
+ require 'fileutils'
38
+ require 'pathname'
39
+
40
+ module URBANopt
41
+ module Scenario
42
+ class OpenDSSPostProcessor
43
+ ##
44
+ # OpenDSSPostProcessor post-processes OpenDSS results to selected OpenDSS results and integrate them in scenario and feature reports.
45
+ ##
46
+ # [parameters:]
47
+ # +scenario_report+ - _ScenarioBase_ - An object of Scenario_report class.
48
+ # +opendss_results_dir_name+ - _directory name of opendss results
49
+ def initialize(scenario_report, opendss_results_dir_name = 'opendss')
50
+ if !scenario_report.nil?
51
+ @scenario_report = scenario_report
52
+ @opendss_results_dir = File.join(@scenario_report.directory_name, opendss_results_dir_name)
53
+ else
54
+ raise 'scenario_report is not valid'
55
+ end
56
+
57
+ # hash of column_name to array of values, does not get serialized to hash
58
+ @mutex = Mutex.new
59
+
60
+ # initialize opendss data
61
+ @opendss_data = {}
62
+
63
+ # initialize feature_reports data
64
+ @feature_reports_data = {}
65
+
66
+ # initialize logger
67
+ @@logger ||= URBANopt::Scenario::DefaultReports.logger
68
+ end
69
+
70
+ # load opendss data
71
+ def load_opendss_data
72
+ # load building features data
73
+ @scenario_report.feature_reports.each do |feature_report|
74
+ # read results from opendss
75
+ opendss_csv = CSV.read(File.join(@opendss_results_dir, 'results', 'Features', feature_report.id + '.csv'))
76
+ # add results to data
77
+ @opendss_data[feature_report.id] = opendss_csv
78
+ end
79
+
80
+ ## load transformers data
81
+
82
+ # transformers results directory path
83
+ tf_results_path = File.join(@opendss_results_dir, 'results', 'Transformers')
84
+
85
+ # get transformer ids
86
+ transformer_ids = []
87
+ Dir.entries(tf_results_path.to_s).select do |f|
88
+ if !File.directory? f
89
+ fn = File.basename(f, '.csv')
90
+ transformer_ids << fn
91
+ end
92
+ end
93
+
94
+ # add transformer results to @opendss_data
95
+ transformer_ids.each do |id|
96
+ # read results from transformers
97
+ transformer_csv = CSV.read(File.join(tf_results_path, id + '.csv'))
98
+ # add results to data
99
+ @opendss_data[id] = transformer_csv
100
+ end
101
+ end
102
+
103
+ # load feature report data
104
+ def load_feature_report_data
105
+ @scenario_report.feature_reports.each do |feature_report|
106
+ # read feature results
107
+ feature_csv = CSV.read(File.join(feature_report.timeseries_csv.path))
108
+ # add results to data
109
+ @feature_reports_data[feature_report.id] = feature_csv
110
+ end
111
+ end
112
+
113
+ # load feature report data and opendss data
114
+ def load_data
115
+ # load selected opendss data
116
+ load_opendss_data
117
+ # load selected feature reports data
118
+ load_feature_report_data
119
+ end
120
+
121
+ # merge data
122
+ def merge_data(feature_report_data, opendss_data)
123
+ output = CSV.generate do |csv|
124
+ opendss_data.each_with_index do |row, i|
125
+ if row.include? 'Datetime'
126
+ row.map { |header| header.prepend('opendss_') }
127
+ end
128
+ csv << (feature_report_data[i] + row[1..-1])
129
+ end
130
+ end
131
+
132
+ return output
133
+ end
134
+
135
+ # add feature reports for transformers
136
+ def save_transformers_reports
137
+ @opendss_data.keys.each do |k|
138
+ if k.include? 'Transformer'
139
+
140
+ # create transformer directory
141
+ transformer_dir = File.join(@scenario_report.directory_name, k)
142
+ FileUtils.mkdir_p(File.join(transformer_dir, 'feature_reports'))
143
+
144
+ # write data to csv
145
+ # store under voltages and over voltages
146
+ under_voltage_hrs = 0
147
+ over_voltage_hrs = 0
148
+
149
+ transformer_csv = CSV.generate do |csv|
150
+ @opendss_data[k].each_with_index do |row, i|
151
+ csv << row
152
+
153
+ if !row[1].include? 'loading'
154
+ if row[1].to_f > 1.05
155
+ over_voltage_hrs += 1
156
+ end
157
+
158
+ if row[1].to_f < 0.95
159
+ under_voltage_hrs += 1
160
+ end
161
+ end
162
+ end
163
+ end
164
+
165
+ # save transformer CSV report
166
+ File.write(File.join(transformer_dir, 'feature_reports', 'default_feature_report_opendss' + '.csv'), transformer_csv)
167
+
168
+ # create transformer report
169
+ transformer_report = URBANopt::Scenario::DefaultReports::FeatureReport.new(id: k, name: k, directory_name: transformer_dir, feature_type: 'Transformer',
170
+ timesteps_per_hour: @scenario_report.timesteps_per_hour,
171
+ simulation_status: 'complete')
172
+
173
+ # assign results to transfomrer report
174
+ transformer_report.power_distribution.over_voltage_hours = over_voltage_hrs
175
+ transformer_report.power_distribution.under_voltage_hours = under_voltage_hrs
176
+
177
+ ## save transformer JSON file
178
+ # transformer_hash
179
+ transformer_hash = transformer_report.to_hash
180
+ # transformer_hash.delete_if { |k, v| v.nil? }
181
+
182
+ json_name_path = File.join(transformer_dir, 'feature_reports', 'default_feature_report_opendss' + '.json')
183
+
184
+ # save the json file
185
+ File.open(json_name_path, 'w') do |f|
186
+ f.puts JSON.pretty_generate(transformer_hash)
187
+ # make sure data is written to the disk one way or the other
188
+ begin
189
+ f.fsync
190
+ rescue StandardError
191
+ f.flush
192
+ end
193
+ end
194
+
195
+ # add transformers reports to scenario_report
196
+ @scenario_report.feature_reports << transformer_report
197
+
198
+ end
199
+ end
200
+ end
201
+
202
+ ##
203
+ # Save csv report method
204
+ ##
205
+ # [parameters:]
206
+ # +feature_report+ - _feature report object_ - An onject of the feature report
207
+ # +updated_feature_report_csv+ - _CSV_ - An updated feature report csv
208
+ # +file_name+ - _String_ - Assigned name to save the file with no extension
209
+ def save_csv(feature_report, updated_feature_report_csv, file_name = 'default_feature_report')
210
+ File.write(File.join(feature_report.directory_name, 'feature_reports', "#{file_name}.csv"), updated_feature_report_csv)
211
+ end
212
+
213
+ ##
214
+ # create opendss json report results
215
+ ##
216
+ # [parameters:]
217
+ # +feature_report+ - _feature report object_ - An onject of the feature report
218
+ def add_summary_results(feature_report)
219
+ under_voltage_hrs = 0
220
+ over_voltage_hrs = 0
221
+
222
+ id = feature_report.id
223
+ @opendss_data[id].each_with_index do |row, i|
224
+ if !row[1].include? 'voltage'
225
+
226
+ if row[1].to_f > 1.05
227
+ over_voltage_hrs += 1
228
+ end
229
+
230
+ if row[1].to_f < 0.95
231
+ under_voltage_hrs += 1
232
+ end
233
+
234
+ end
235
+ end
236
+
237
+ # assign results to feature report
238
+ feature_report.power_distribution.over_voltage_hours = over_voltage_hrs
239
+ feature_report.power_distribution.under_voltage_hours = under_voltage_hrs
240
+
241
+ return feature_report
242
+ end
243
+
244
+ ##
245
+ # run opendss post_processor
246
+ ##
247
+ def run
248
+ @scenario_report.feature_reports.each do |feature_report|
249
+ # load data
250
+ load_data
251
+
252
+ # puts " @opendss data = #{@opendss_data}"
253
+
254
+ # get summary results
255
+ add_summary_results(feature_report)
256
+
257
+ # merge csv data
258
+ id = feature_report.id
259
+ updated_feature_csv = merge_data(@feature_reports_data[id], @opendss_data[id])
260
+
261
+ # save fetaure reports
262
+ feature_report.save_feature_report('default_feature_report_opendss')
263
+
264
+ # resave updated csv report
265
+ save_csv(feature_report, updated_feature_csv, 'default_feature_report_opendss')
266
+ end
267
+
268
+ # add transformer reports
269
+ save_transformers_reports
270
+
271
+ # save the updated scenario reports
272
+ @scenario_report.save(file_name = 'scenario_report_opendss')
273
+ end
274
+ end
275
+ end
276
+ end
@@ -132,7 +132,7 @@ module URBANopt
132
132
  end
133
133
  end
134
134
 
135
- # Run osw groups in order and store simulation failure in a array.
135
+ # Run osw groups in order and store simulation failure in an array.
136
136
  # Return simulation_dirs after running all simulations.
137
137
 
138
138
  # failures
@@ -140,17 +140,33 @@ module URBANopt
140
140
  # run building_osws
141
141
  # building_failures = runner.run_osws(building_osws, num_parallel = Extension::NUM_PARALLEL, max_to_run = Extension::MAX_DATAPOINTS)
142
142
  building_failures = runner.run_osws(building_osws)
143
- failures << building_failures
143
+ failures + building_failures
144
144
  # run district_system_osws
145
145
  # district_system_failures = runner.run_osws(district_system_osws, num_parallel = Extension::NUM_PARALLEL, max_to_run = Extension::MAX_DATAPOINTS)
146
146
  district_system_failures = runner.run_osws(district_system_osws)
147
- failures << district_system_failures
147
+ failures + district_system_failures
148
148
  # run transformer_osws
149
149
  # transformer_failures = runner.run_osws(transformer_osws, num_parallel = Extension::NUM_PARALLEL, max_to_run = Extension::MAX_DATAPOINTS)
150
150
  transformer_failures = runner.run_osws(transformer_osws)
151
- failures << transformer_failures
151
+ failures + transformer_failures
152
+
153
+ puts 'Done Running Scenario'
154
+
155
+ # if failures.size > 0
156
+ # puts "DATAPOINT FAILURES: #{failures}"
157
+ # end
158
+
159
+ # look for other failed datapoints
160
+ failed_sims = []
161
+ simulation_dirs.each do |simulation_dir|
162
+ if File.exist?(File.join(simulation_dir.run_dir, 'failed.job'))
163
+ failed_sims << simulation_dir.run_dir.split('/')[-1]
164
+ end
165
+ end
166
+ if !failed_sims.empty?
167
+ puts "FAILED SIMULATION IDs: #{failed_sims.join(',')}"
168
+ end
152
169
 
153
- # puts "failures = #{failures}"
154
170
  return simulation_dirs
155
171
  end
156
172
  end