urbanopt-reopt 0.6.0 → 0.6.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -42,7 +42,7 @@ require 'logger'
42
42
 
43
43
  module URBANopt
44
44
  module REopt
45
- @@reopt_logger = Logger.new(STDOUT)
45
+ @@reopt_logger = Logger.new($stdout)
46
46
  ##
47
47
  # Definining class variable "@@logger" to log errors, info and warning messages.
48
48
  def self.reopt_logger
@@ -79,18 +79,18 @@ module URBANopt # :nodoc:
79
79
  if !scenario_report.nil?
80
80
  @scenario_report = scenario_report
81
81
 
82
- if !Dir.exist?(File.join(@scenario_report.directory_name, "reopt"))
83
- Dir.mkdir(File.join(@scenario_report.directory_name, "reopt"))
84
- @@logger.info("Created directory: " + File.join(@scenario_report.directory_name, "reopt"))
82
+ if !Dir.exist?(File.join(@scenario_report.directory_name, 'reopt'))
83
+ Dir.mkdir(File.join(@scenario_report.directory_name, 'reopt'))
84
+ @@logger.info("Created directory: #{File.join(@scenario_report.directory_name, 'reopt')}")
85
85
  end
86
86
 
87
87
  @scenario_reopt_default_output_file = File.join(@scenario_report.directory_name, "reopt/scenario_report_#{@scenario_report.id}_reopt_run.json")
88
88
  @scenario_timeseries_default_output_file = File.join(@scenario_report.directory_name, "scenario_report_#{@scenario_report.id}_timeseries.csv")
89
89
 
90
90
  @scenario_report.feature_reports.each do |fr|
91
- if !Dir.exist?(File.join(fr.directory_name, "reopt"))
92
- Dir.mkdir(File.join(fr.directory_name, "reopt"))
93
- @@logger.info("Created directory: " + File.join(fr.directory_name, "reopt"))
91
+ if !Dir.exist?(File.join(fr.directory_name, 'reopt'))
92
+ Dir.mkdir(File.join(fr.directory_name, 'reopt'))
93
+ @@logger.info("Created directory: #{File.join(fr.directory_name, 'reopt')}")
94
94
  end
95
95
  @feature_reports_reopt_default_output_files << File.join(fr.directory_name, "reopt/feature_report_#{fr.id}_reopt_run.json")
96
96
  end
@@ -113,8 +113,7 @@ module URBANopt # :nodoc:
113
113
  end
114
114
  end
115
115
 
116
- attr_accessor :scenario_reopt_default_assumptions_hash, :scenario_reopt_default_output_file, :scenario_timeseries_default_output_file
117
- attr_accessor :feature_reports_reopt_default_assumption_hashes, :feature_reports_reopt_default_output_files, :feature_reports_timeseries_default_output_files
116
+ attr_accessor :scenario_reopt_default_assumptions_hash, :scenario_reopt_default_output_file, :scenario_timeseries_default_output_file, :feature_reports_reopt_default_assumption_hashes, :feature_reports_reopt_default_output_files, :feature_reports_timeseries_default_output_files
118
117
 
119
118
  ##
120
119
  # Updates a FeatureReport based on an optional set of \REopt Lite optimization assumptions.
@@ -129,7 +128,7 @@ module URBANopt # :nodoc:
129
128
  #
130
129
  # [*return:*] _URBANopt::Reporting::DefaultReports::FeatureReport_ - Returns an updated FeatureReport
131
130
  ##
132
- def run_feature_report(feature_report:, reopt_assumptions_hash:nil, reopt_output_file:nil, timeseries_csv_path:nil, save_name:nil, run_resilience:true)
131
+ def run_feature_report(feature_report:, reopt_assumptions_hash: nil, reopt_output_file: nil, timeseries_csv_path: nil, save_name: nil, run_resilience: true)
133
132
  api = URBANopt::REopt::REoptLiteAPI.new(@nrel_developer_key, @localhost)
134
133
  adapter = URBANopt::REopt::FeatureReportAdapter.new
135
134
 
@@ -143,7 +142,7 @@ module URBANopt # :nodoc:
143
142
  if File.directory? reopt_output_file
144
143
  resilience_stats = api.resilience_request(run_uuid, reopt_output_file)
145
144
  else
146
- resilience_stats = api.resilience_request(run_uuid, reopt_output_file.sub!('.json','_resilience.json'))
145
+ resilience_stats = api.resilience_request(run_uuid, reopt_output_file.sub!('.json', '_resilience.json'))
147
146
  end
148
147
  else
149
148
  resilience_stats = nil
@@ -167,7 +166,7 @@ module URBANopt # :nodoc:
167
166
  # * +timeseries_csv_path+ - _String_ - Optional. Path to a file at which the new timeseries CSV for the ScenarioReport will be saved.
168
167
  #
169
168
  # [*return:*] _URBANopt::Scenario::DefaultReports::ScenarioReport_ Returns an updated ScenarioReport
170
- def run_scenario_report(scenario_report:, reopt_assumptions_hash:nil, reopt_output_file:nil, timeseries_csv_path:nil, save_name:nil, run_resilience:true)
169
+ def run_scenario_report(scenario_report:, reopt_assumptions_hash: nil, reopt_output_file: nil, timeseries_csv_path: nil, save_name: nil, run_resilience: true)
171
170
  if !reopt_assumptions_hash.nil?
172
171
  @scenario_reopt_default_assumptions_hash = reopt_assumptions_hash
173
172
  end
@@ -189,7 +188,7 @@ module URBANopt # :nodoc:
189
188
  if File.directory? @scenario_reopt_default_output_file
190
189
  resilience_stats = api.resilience_request(run_uuid, @scenario_reopt_default_output_file)
191
190
  else
192
- resilience_stats = api.resilience_request(run_uuid, @scenario_reopt_default_output_file.sub!('.json','_resilience.json'))
191
+ resilience_stats = api.resilience_request(run_uuid, @scenario_reopt_default_output_file.sub!('.json', '_resilience.json'))
193
192
  end
194
193
  else
195
194
  resilience_stats = nil
@@ -213,8 +212,7 @@ module URBANopt # :nodoc:
213
212
  # * +timeseries_csv_path+ - _Array_ - Optional. A array of paths to files at which the new timeseries CSV for the FeatureReports will be saved. The number and order of the paths should match the feature_reports array.
214
213
  #
215
214
  # [*return:*] _Array_ Returns an array of updated _URBANopt::Scenario::DefaultReports::FeatureReport_ objects
216
- def run_feature_reports(feature_reports:, reopt_assumptions_hashes:[], reopt_output_files:[], timeseries_csv_paths:[], save_names:nil, run_resilience:true)
217
-
215
+ def run_feature_reports(feature_reports:, reopt_assumptions_hashes: [], reopt_output_files: [], timeseries_csv_paths: [], save_names: nil, run_resilience: true, keep_existing_output: false)
218
216
  if !reopt_assumptions_hashes.empty?
219
217
  @feature_reports_reopt_default_assumption_hashes = reopt_assumptions_hashes
220
218
  end
@@ -243,36 +241,59 @@ module URBANopt # :nodoc:
243
241
  feature_adapter = URBANopt::REopt::FeatureReportAdapter.new
244
242
  new_feature_reports = []
245
243
  feature_reports.each_with_index do |feature_report, idx|
246
- begin
247
- reopt_input = feature_adapter.reopt_json_from_feature_report(feature_report, @feature_reports_reopt_default_assumption_hashes[idx])
248
- reopt_output = api.reopt_request(reopt_input, @feature_reports_reopt_default_output_files[idx])
249
- if run_resilience
250
- run_uuid = reopt_output['outputs']['Scenario']['run_uuid']
251
- if File.directory? @feature_reports_reopt_default_output_files[idx]
252
- resilience_stats = api.resilience_request(run_uuid, @feature_reports_reopt_default_output_files[idx])
244
+ # check if we should rerun
245
+ if !(keep_existing_output && output_exists(@feature_reports_reopt_default_output_files[idx]))
246
+ begin
247
+ reopt_input = feature_adapter.reopt_json_from_feature_report(feature_report, @feature_reports_reopt_default_assumption_hashes[idx])
248
+ reopt_output = api.reopt_request(reopt_input, @feature_reports_reopt_default_output_files[idx])
249
+ if run_resilience
250
+ run_uuid = reopt_output['outputs']['Scenario']['run_uuid']
251
+ if File.directory? @feature_reports_reopt_default_output_files[idx]
252
+ resilience_stats = api.resilience_request(run_uuid, @feature_reports_reopt_default_output_files[idx])
253
+ else
254
+ resilience_stats = api.resilience_request(run_uuid, @feature_reports_reopt_default_output_files[idx].sub!('.json', '_resilience.json'))
255
+ end
253
256
  else
254
- resilience_stats = api.resilience_request(run_uuid, @feature_reports_reopt_default_output_files[idx].sub!('.json','_resilience.json'))
257
+ resilience_stats = nil
255
258
  end
256
- else
257
- resilience_stats = nil
258
- end
259
- new_feature_report = feature_adapter.update_feature_report(feature_report, reopt_output, @feature_reports_timeseries_default_output_files[idx], resilience_stats)
260
- new_feature_reports.push(new_feature_report)
261
- if !save_names.nil?
262
- if save_names.length == feature_reports.length
263
- new_feature_report.save save_names[idx]
264
- else
265
- warn "Could not save feature reports - the number of save names provided did not match the number of feature reports"
259
+ new_feature_report = feature_adapter.update_feature_report(feature_report, reopt_output, @feature_reports_timeseries_default_output_files[idx], resilience_stats)
260
+ new_feature_reports.push(new_feature_report)
261
+ if !save_names.nil?
262
+ if save_names.length == feature_reports.length
263
+ new_feature_report.save save_names[idx]
264
+ else
265
+ warn 'Could not save feature reports - the number of save names provided did not match the number of feature reports'
266
+ end
266
267
  end
268
+ rescue StandardError
269
+ @@logger.info("Could not optimize Feature Report #{feature_report.name} #{feature_report.id}")
267
270
  end
268
- rescue StandardError
269
- @@logger.info("Could not optimize Feature Report #{feature_report.name} #{feature_report.id}")
271
+ else
272
+ puts('Output file already exists...skipping')
270
273
  end
271
274
  end
272
275
 
273
276
  return new_feature_reports
274
277
  end
275
278
 
279
+ # Checks whether a feature has already been run by determining if output files already exists (for rate limit issues and larger projects)
280
+ ##
281
+ #
282
+ # [*parameters:*]
283
+ #
284
+ # * +output_file+ - _Array_ - Optional. An array of paths to files at which REpopt Lite responses will be saved. The number and order of the paths should match the array in ScenarioReport.feature_reports.
285
+ # [*return:*] _Boolean_ - Returns true if file or nonempty directory exist
286
+ def output_exists(output_file)
287
+ res = false
288
+ if File.directory?(output_file) && !File.empty?(output_file)
289
+ res = true
290
+ elsif File.exist? output_file
291
+ res = true
292
+ end
293
+
294
+ return res
295
+ end
296
+
276
297
  # Updates a ScenarioReport based on an optional set of \REopt Lite optimization assumptions.
277
298
  ##
278
299
  #
@@ -284,22 +305,26 @@ module URBANopt # :nodoc:
284
305
  # * +feature_report_timeseries_csv_paths+ - _Array_ - Optional. An array of paths to files at which the new timeseries CSV for the FeatureReports will be saved. The number and order of the paths should match the array in ScenarioReport.feature_reports.
285
306
  #
286
307
  # [*return:*] _URBANopt::Scenario::DefaultReports::ScenarioReport_ - Returns an updated ScenarioReport
287
- def run_scenario_report_features(scenario_report:, reopt_assumptions_hashes:[], reopt_output_files:[], feature_report_timeseries_csv_paths:[], save_names_feature_reports:nil, save_name_scenario_report:nil, run_resilience:true)
288
- new_feature_reports = run_feature_reports(feature_reports:scenario_report.feature_reports, reopt_assumptions_hashes:reopt_assumptions_hashes, reopt_output_files:reopt_output_files, timeseries_csv_paths:feature_report_timeseries_csv_paths,save_names:save_names_feature_reports, run_resilience:run_resilience)
289
-
308
+ def run_scenario_report_features(scenario_report:, reopt_assumptions_hashes: [], reopt_output_files: [], feature_report_timeseries_csv_paths: [], save_names_feature_reports: nil, save_name_scenario_report: nil, run_resilience: true, keep_existing_output: false)
309
+ new_feature_reports = run_feature_reports(feature_reports: scenario_report.feature_reports, reopt_assumptions_hashes: reopt_assumptions_hashes, reopt_output_files: reopt_output_files, timeseries_csv_paths: feature_report_timeseries_csv_paths, save_names: save_names_feature_reports, run_resilience: run_resilience, keep_existing_output: keep_existing_output)
310
+ puts("KEEP EXISTING? #{keep_existing_output}")
311
+ # only do this if you have run feature reports
290
312
  new_scenario_report = URBANopt::Reporting::DefaultReports::ScenarioReport.new
291
- new_scenario_report.id = scenario_report.id
292
- new_scenario_report.name = scenario_report.name
293
- new_scenario_report.directory_name = scenario_report.directory_name
313
+ if !new_feature_reports.empty?
294
314
 
295
- timeseries_hash = { column_names: scenario_report.timeseries_csv.column_names }
296
- new_scenario_report.timeseries_csv = URBANopt::Reporting::DefaultReports::TimeseriesCSV.new(timeseries_hash)
315
+ new_scenario_report.id = scenario_report.id
316
+ new_scenario_report.name = scenario_report.name
317
+ new_scenario_report.directory_name = scenario_report.directory_name
297
318
 
298
- new_feature_reports.each do |feature_report|
299
- new_scenario_report.add_feature_report(feature_report)
300
- end
301
- if !save_name_scenario_report.nil?
302
- new_scenario_report.save save_name_scenario_report
319
+ timeseries_hash = { column_names: scenario_report.timeseries_csv.column_names }
320
+ new_scenario_report.timeseries_csv = URBANopt::Reporting::DefaultReports::TimeseriesCSV.new(timeseries_hash)
321
+
322
+ new_feature_reports.each do |feature_report|
323
+ new_scenario_report.add_feature_report(feature_report)
324
+ end
325
+ if !save_name_scenario_report.nil?
326
+ new_scenario_report.save save_name_scenario_report
327
+ end
303
328
  end
304
329
  return new_scenario_report
305
330
  end
@@ -44,8 +44,10 @@ require 'urbanopt/scenario/simulation_dir_osw'
44
44
  require 'csv'
45
45
  require 'fileutils'
46
46
 
47
- module URBANopt # nodoc:
48
- module Scenario # nodoc:
47
+ # nodoc:
48
+ module URBANopt
49
+ # nodoc:
50
+ module Scenario
49
51
  class REoptScenarioCSV < ScenarioCSV
50
52
  ##
51
53
  # REoptScenarioCSV is an extension of ScenarioCSV which assigns a Simulation Mapper to each Feature in a FeatureFile using a simple CSV format.
@@ -101,10 +103,8 @@ module URBANopt # nodoc:
101
103
  mapper_class = row[2].chomp
102
104
  # Assume fourth columns, if exists, contains the name of the JSON file in the reopt_files_dir to use when running \REopt Lite for the feature report
103
105
 
104
- if row.length > 3
105
- if !@reopt_files_dir.nil?
106
- @reopt_feature_assumptions[idx - 1] = File.join(@reopt_files_dir, row[3].chomp)
107
- end
106
+ if row.length > 3 && !@reopt_files_dir.nil?
107
+ @reopt_feature_assumptions[idx - 1] = File.join(@reopt_files_dir, row[3].chomp)
108
108
  end
109
109
 
110
110
  # gets +features+ from the feature_file.
@@ -120,6 +120,6 @@ module URBANopt # nodoc:
120
120
  end
121
121
  return result
122
122
  end
123
- end
123
+ end
124
124
  end
125
125
  end
@@ -81,22 +81,20 @@ module URBANopt # :nodoc:
81
81
  end
82
82
 
83
83
  # Update required info
84
- if scenario_report.location.latitude_deg.nil? || scenario_report.location.longitude_deg.nil? || (scenario_report.location.latitude_deg == 0) || (scenario_report.location.longitude_deg == 0)
85
- if !scenario_report.feature_reports.nil? && (scenario_report.feature_reports != [])
86
- lats = []
87
- longs = []
88
- scenario_report.feature_reports.each do |x|
89
- puts " ERROR: #{x.location.latitude_deg}"
90
- if ![nil].include?(x.location.latitude_deg) && ![nil].include?(x.location.longitude_deg)
91
- lats.push(x.location.latitude_deg)
92
- longs.push(x.location.longitude_deg)
93
- end
84
+ if (scenario_report.location.latitude_deg.nil? || scenario_report.location.longitude_deg.nil? || (scenario_report.location.latitude_deg == 0) || (scenario_report.location.longitude_deg == 0)) && (!scenario_report.feature_reports.nil? && (scenario_report.feature_reports != []))
85
+ lats = []
86
+ longs = []
87
+ scenario_report.feature_reports.each do |x|
88
+ puts " ERROR: #{x.location.latitude_deg}"
89
+ if ![nil].include?(x.location.latitude_deg) && ![nil].include?(x.location.longitude_deg)
90
+ lats.push(x.location.latitude_deg)
91
+ longs.push(x.location.longitude_deg)
94
92
  end
93
+ end
95
94
 
96
- if !lats.empty? && !longs.empty?
97
- scenario_report.location.latitude_deg = lats.reduce(:+) / lats.size.to_f
98
- scenario_report.location.longitude_deg = longs.reduce(:+) / longs.size.to_f
99
- end
95
+ if !lats.empty? && !longs.empty?
96
+ scenario_report.location.latitude_deg = lats.reduce(:+) / lats.size.to_f
97
+ scenario_report.location.longitude_deg = longs.reduce(:+) / longs.size.to_f
100
98
  end
101
99
  end
102
100
 
@@ -120,16 +118,12 @@ module URBANopt # :nodoc:
120
118
 
121
119
  # Update optional info
122
120
  # REK: attribute names should be updated
123
- if reopt_inputs[:Scenario][:Site][:roof_squarefeet].nil?
124
- if !scenario_report.program.roof_area_sqft.nil?
125
- reopt_inputs[:Scenario][:Site][:roof_squarefeet] = scenario_report.program.roof_area_sqft[:available_roof_area_sqft]
126
- end
121
+ if reopt_inputs[:Scenario][:Site][:roof_squarefeet].nil? && !scenario_report.program.roof_area_sqft.nil?
122
+ reopt_inputs[:Scenario][:Site][:roof_squarefeet] = scenario_report.program.roof_area_sqft[:available_roof_area_sqft]
127
123
  end
128
124
 
129
- if reopt_inputs[:Scenario][:Site][:land_acres].nil?
130
- if !scenario_report.program.site_area_sqft.nil?
131
- reopt_inputs[:Scenario][:Site][:land_acres] = scenario_report.program.site_area_sqft * 1.0 / 43560 # acres/sqft
132
- end
125
+ if reopt_inputs[:Scenario][:Site][:land_acres].nil? && !scenario_report.program.site_area_sqft.nil?
126
+ reopt_inputs[:Scenario][:Site][:land_acres] = scenario_report.program.site_area_sqft * 1.0 / 43560 # acres/sqft
133
127
  end
134
128
 
135
129
  if reopt_inputs[:Scenario][:time_steps_per_hour].nil?
@@ -140,17 +134,17 @@ module URBANopt # :nodoc:
140
134
  begin
141
135
  col_num = scenario_report.timeseries_csv.column_names.index('Electricity:Facility(kWh)')
142
136
  t = CSV.read(scenario_report.timeseries_csv.path, headers: true, converters: :numeric)
143
- energy_timeseries_kw = t.by_col[col_num].map { |e| ((e * scenario_report.timesteps_per_hour || 0) ) }
137
+ energy_timeseries_kw = t.by_col[col_num].map { |e| ((e * scenario_report.timesteps_per_hour || 0)) }
144
138
  if energy_timeseries_kw.length < (scenario_report.timesteps_per_hour * 8760)
145
- start_date = Time.parse(t.by_col["Datetime"][0])
139
+ start_date = Time.parse(t.by_col['Datetime'][0])
146
140
  start_ts = (((start_date.yday * 60.0 * 60.0 * 24) + (start_date.hour * 60.0 * 60.0) + (start_date.min * 60.0) + start_date.sec) / \
147
- (( 60 / scenario_report.timesteps_per_hour ) * 60)).to_int
148
- end_date = Time.parse(t.by_col["Datetime"][-1])
141
+ ((60 / scenario_report.timesteps_per_hour) * 60)).to_int
142
+ end_date = Time.parse(t.by_col['Datetime'][-1])
149
143
  end_ts = (((end_date.yday * 60.0 * 60.0 * 24) + (end_date.hour * 60.0 * 60.0) + (end_date.min * 60.0) + end_date.sec) / \
150
- (( 60 / scenario_report.timesteps_per_hour ) * 60)).to_int
151
- energy_timeseries_kw = [0.0]*(start_ts-1) + energy_timeseries_kw + [0.0]*((scenario_report.timesteps_per_hour * 8760) - end_ts)
144
+ ((60 / scenario_report.timesteps_per_hour) * 60)).to_int
145
+ energy_timeseries_kw = [0.0] * (start_ts - 1) + energy_timeseries_kw + [0.0] * ((scenario_report.timesteps_per_hour * 8760) - end_ts)
152
146
  end
153
- energy_timeseries_kw = energy_timeseries_kw.map { |e| e ? e : 0 }[0,(scenario_report.timesteps_per_hour * 8760)]
147
+ energy_timeseries_kw = energy_timeseries_kw.map { |e| e || 0 }[0, (scenario_report.timesteps_per_hour * 8760)]
154
148
  rescue StandardError
155
149
  @@logger.error("Could not parse the annual electric load from the timeseries csv - #{scenario_report.timeseries_csv.path}")
156
150
  raise "Could not parse the annual electric load from the timeseries csv - #{scenario_report.timeseries_csv.path}"
@@ -159,7 +153,7 @@ module URBANopt # :nodoc:
159
153
  # Convert load to REopt Resolution
160
154
  begin
161
155
  reopt_inputs[:Scenario][:Site][:LoadProfile][:loads_kw] = convert_powerflow_resolution(energy_timeseries_kw, scenario_report.timesteps_per_hour, reopt_inputs[:Scenario][:time_steps_per_hour])
162
- rescue
156
+ rescue StandardError
163
157
  @@logger.error("Could not convert the annual electric load from a resolution of #{scenario_report.timesteps_per_hour} to #{reopt_inputs[:Scenario][:time_steps_per_hour]}")
164
158
  raise "Could not convert the annual electric load from a resolution of #{scenario_report.timesteps_per_hour} to #{reopt_inputs[:Scenario][:time_steps_per_hour]}"
165
159
  end
@@ -167,9 +161,9 @@ module URBANopt # :nodoc:
167
161
  if reopt_inputs[:Scenario][:Site][:ElectricTariff][:coincident_peak_load_active_timesteps].nil?
168
162
  n_top_values = 100
169
163
  tmp1 = reopt_inputs[:Scenario][:Site][:LoadProfile][:loads_kw]
170
- tmp2 = tmp1.each_index.max_by(n_top_values*reopt_inputs[:Scenario][:time_steps_per_hour]){|i| tmp1[i]}
164
+ tmp2 = tmp1.each_index.max_by(n_top_values * reopt_inputs[:Scenario][:time_steps_per_hour]) { |i| tmp1[i] }
171
165
  for i in (0...tmp2.count)
172
- tmp2[i] += 1
166
+ tmp2[i] += 1
173
167
  end
174
168
  reopt_inputs[:Scenario][:Site][:ElectricTariff][:coincident_peak_load_active_timesteps] = tmp2
175
169
  end
@@ -181,7 +175,6 @@ module URBANopt # :nodoc:
181
175
  return reopt_inputs
182
176
  end
183
177
 
184
-
185
178
  ##
186
179
  # Converts a FeatureReport list from a ScenarioReport into an array of \REopt Lite posts
187
180
  #
@@ -215,7 +208,7 @@ module URBANopt # :nodoc:
215
208
  #
216
209
  # [*return:*] _URBANopt::Reporting::DefaultReports::ScenarioReport_ - Returns an updated ScenarioReport
217
210
  ##
218
- def update_scenario_report(scenario_report, reopt_output, timeseries_csv_path=nil, resilience_stats=nil)
211
+ def update_scenario_report(scenario_report, reopt_output, timeseries_csv_path = nil, resilience_stats = nil)
219
212
  if reopt_output['outputs']['Scenario']['status'] != 'optimal'
220
213
  @@logger.info("Warning cannot Feature Report #{scenario_report.name} #{scenario_report.id} - REopt optimization was non-optimal")
221
214
  return scenario_report
@@ -248,56 +241,46 @@ module URBANopt # :nodoc:
248
241
  scenario_report.distributed_generation.probs_of_surviving_by_hour_of_the_day = resilience_stats['probs_of_surviving_by_hour_of_the_day']
249
242
  end
250
243
 
251
- if reopt_output['outputs']['Scenario']['Site']['PV'].class == Hash
244
+ if reopt_output['outputs']['Scenario']['Site']['PV'].instance_of?(Hash)
252
245
  reopt_output['outputs']['Scenario']['Site']['PV'] = [reopt_output['outputs']['Scenario']['Site']['PV']]
253
246
  elsif reopt_output['outputs']['Scenario']['Site']['PV'].nil?
254
247
  reopt_output['outputs']['Scenario']['Site']['PV'] = []
255
248
  end
256
249
 
257
250
  reopt_output['outputs']['Scenario']['Site']['PV'].each_with_index do |pv, i|
258
- scenario_report.distributed_generation.add_tech 'solar_pv', URBANopt::Reporting::DefaultReports::SolarPV.new( {size_kw: (pv['size_kw'] || 0), id: i })
251
+ scenario_report.distributed_generation.add_tech 'solar_pv', URBANopt::Reporting::DefaultReports::SolarPV.new({ size_kw: (pv['size_kw'] || 0), id: i })
259
252
  end
260
253
 
261
254
  wind = reopt_output['outputs']['Scenario']['Site']['Wind']
262
- if !wind['size_kw'].nil? and wind['size_kw'] != 0
263
- scenario_report.distributed_generation.add_tech 'wind', URBANopt::Reporting::DefaultReports::Wind.new( {size_kw: (wind['size_kw'] || 0) })
255
+ if !wind['size_kw'].nil? && (wind['size_kw'] != 0)
256
+ scenario_report.distributed_generation.add_tech 'wind', URBANopt::Reporting::DefaultReports::Wind.new({ size_kw: (wind['size_kw'] || 0) })
264
257
  end
265
258
 
266
259
  generator = reopt_output['outputs']['Scenario']['Site']['Generator']
267
- if !generator['size_kw'].nil? and generator['size_kw'] != 0
268
- scenario_report.distributed_generation.add_tech 'generator', URBANopt::Reporting::DefaultReports::Generator.new( {size_kw: (generator['size_kw'] || 0) })
260
+ if !generator['size_kw'].nil? && (generator['size_kw'] != 0)
261
+ scenario_report.distributed_generation.add_tech 'generator', URBANopt::Reporting::DefaultReports::Generator.new({ size_kw: (generator['size_kw'] || 0) })
269
262
  end
270
263
 
271
264
  storage = reopt_output['outputs']['Scenario']['Site']['Storage']
272
- if !storage['size_kw'].nil? and storage['size_kw'] != 0
273
- scenario_report.distributed_generation.add_tech 'storage', URBANopt::Reporting::DefaultReports::Storage.new( {size_kwh: (storage['size_kwh'] || 0), size_kw: (storage['size_kw'] || 0) })
265
+ if !storage['size_kw'].nil? && (storage['size_kw'] != 0)
266
+ scenario_report.distributed_generation.add_tech 'storage', URBANopt::Reporting::DefaultReports::Storage.new({ size_kwh: (storage['size_kwh'] || 0), size_kw: (storage['size_kw'] || 0) })
274
267
  end
275
268
 
276
269
  reopt_resolution = reopt_output['inputs']['Scenario']['time_steps_per_hour']
277
270
  generation_timeseries_kwh = Matrix[[0] * (8760 * scenario_report.timesteps_per_hour)]
278
271
 
279
272
  reopt_output['outputs']['Scenario']['Site']['PV'].each do |pv|
280
- if (pv['size_kw'] || 0) > 0
281
- if !pv['year_one_power_production_series_kw'].nil?
282
- generation_timeseries_kwh += Matrix[convert_powerflow_resolution(pv['year_one_power_production_series_kw'], reopt_resolution, scenario_report.timesteps_per_hour)]
283
- end
273
+ if (pv['size_kw'] || 0) > 0 && !pv['year_one_power_production_series_kw'].nil?
274
+ generation_timeseries_kwh += Matrix[convert_powerflow_resolution(pv['year_one_power_production_series_kw'], reopt_resolution, scenario_report.timesteps_per_hour)]
284
275
  end
285
- end
276
+ end
286
277
 
287
- unless reopt_output['outputs']['Scenario']['Site']['Wind'].nil?
288
- if (reopt_output['outputs']['Scenario']['Site']['Wind']['size_kw'] || 0) > 0
289
- if !reopt_output['outputs']['Scenario']['Site']['Wind']['year_one_power_production_series_kw'].nil?
290
- generation_timeseries_kwh += Matrix[convert_powerflow_resolution(reopt_output['outputs']['Scenario']['Site']['Wind']['year_one_power_production_series_kw'], reopt_resolution, scenario_report.timesteps_per_hour)]
291
- end
292
- end
278
+ if !reopt_output['outputs']['Scenario']['Site']['Wind'].nil? && ((reopt_output['outputs']['Scenario']['Site']['Wind']['size_kw'] || 0) > 0) && !reopt_output['outputs']['Scenario']['Site']['Wind']['year_one_power_production_series_kw'].nil?
279
+ generation_timeseries_kwh += Matrix[convert_powerflow_resolution(reopt_output['outputs']['Scenario']['Site']['Wind']['year_one_power_production_series_kw'], reopt_resolution, scenario_report.timesteps_per_hour)]
293
280
  end
294
281
 
295
- unless reopt_output['outputs']['Scenario']['Site']['Generator'].nil?
296
- if (reopt_output['outputs']['Scenario']['Site']['Generator']['size_kw'] || 0) > 0
297
- if !reopt_output['outputs']['Scenario']['Site']['Generator']['year_one_power_production_series_kw'].nil?
298
- generation_timeseries_kwh += Matrix[convert_powerflow_resolution(reopt_output['outputs']['Scenario']['Site']['Generator']['year_one_power_production_series_kw'], reopt_resolution, scenario_report.timesteps_per_hour)]
299
- end
300
- end
282
+ if !reopt_output['outputs']['Scenario']['Site']['Generator'].nil? && ((reopt_output['outputs']['Scenario']['Site']['Generator']['size_kw'] || 0) > 0) && !reopt_output['outputs']['Scenario']['Site']['Generator']['year_one_power_production_series_kw'].nil?
283
+ generation_timeseries_kwh += Matrix[convert_powerflow_resolution(reopt_output['outputs']['Scenario']['Site']['Generator']['year_one_power_production_series_kw'], reopt_resolution, scenario_report.timesteps_per_hour)]
301
284
  end
302
285
 
303
286
  $generation_timeseries_kwh = generation_timeseries_kwh.to_a[0] || [0] * (8760 * scenario_report.timesteps_per_hour)
@@ -476,13 +459,13 @@ module URBANopt # :nodoc:
476
459
  start_ts = (
477
460
  (
478
461
  ((start_date.yday - 1) * 60.0 * 60.0 * 24) +
479
- (((start_date.hour) - 1) * 60.0 * 60.0) +
480
- (start_date.min * 60.0) + start_date.sec ) /
481
- (( 60 / scenario_report.timesteps_per_hour ) * 60)
462
+ ((start_date.hour - 1) * 60.0 * 60.0) +
463
+ (start_date.min * 60.0) + start_date.sec) /
464
+ ((60 / scenario_report.timesteps_per_hour) * 60)
482
465
  ).to_int
483
466
  mod_data = old_data.map.with_index do |x, i|
484
467
  if i > 0
485
- modrow(x, start_ts + i -1)
468
+ modrow(x, start_ts + i - 1)
486
469
  else
487
470
  x
488
471
  end
@@ -1,111 +1,111 @@
1
- def convert_powerflow_resolution(timeseries_kw, original_res, destination_res)
2
- if timeseries_kw.nil?
3
- return nil
4
- end
5
-
6
- if timeseries_kw.length == 0
7
- return nil
8
- end
9
-
10
- if original_res > destination_res
11
- # Timesteps will be reduced, i.e. 35040 -> 8760
12
-
13
- # This algorithm works by stepping along the origin timeseries at an interval equal to
14
- # one timestep in the destintion timeseries and then averaging all origin values that
15
- # coincide with the interval. Averages are weighted if a destination timestep
16
- # only partaially overlaps an origin timestep.
17
-
18
- # EX 1
19
- # stepping interval 2
20
- # origin stepping | 1 | 2 | 2 | 4 |
21
- # destination stepping | 1.5 | 3 |
1
+ def convert_powerflow_resolution(timeseries_kw, original_res, destination_res)
2
+ if timeseries_kw.nil?
3
+ return nil
4
+ end
22
5
 
23
- # EX 2
24
- # stepping interval 2.5
25
- # origin stepping | 1 | 1 | 4 | 2 | 2 |
26
- # destination stepping | 1.6 | 2.4 |
27
-
28
- result = []
29
- stepping_interval = Float(original_res) / Float(destination_res)
30
- current_origin_ts = 0 # fraction stepped along the origin time series
31
- current_origin_idx = 0 # current integer index of the origin timeseries
32
- (0..(8760*destination_res-1)).each do |ts|
33
- next_stopping_ts = current_origin_ts + stepping_interval #stop at the next destination interval
34
- total_power = [] #create to store wieghted origin timestep values to average
35
- while current_origin_ts != next_stopping_ts do
36
- next_origin_ts_int = Integer(current_origin_ts) + 1
37
- # Calc next stopping point that will being you to the next origin or destination time step
38
- next_origin_ts = [next_origin_ts_int, next_stopping_ts].min
39
- # Calc next step length
40
- delta_to_next_origin_ts = next_origin_ts - current_origin_ts
41
- # Add the proportional origin timestep value to the total power variable
42
- total_power.push(Float(timeseries_kw[current_origin_idx]) * delta_to_next_origin_ts)
43
- # Only move on to the next origin timestep if you are not ending mid way though an origin timestep
44
- # i.e in EX 2 above, the value 4 is needed in destination timestep 1 and 2
45
- if next_origin_ts_int <= next_stopping_ts
46
- current_origin_idx += 1
47
- end
48
- # Step to the next stopping point
49
- current_origin_ts += delta_to_next_origin_ts
50
- end
51
- # Add averaged total power variable for the destination time step
52
- result.push(Float(total_power.sum) / stepping_interval)
53
- end
54
- end
55
- if destination_res > original_res
56
- #Timesteps will be expanded, i.e. 8760 -> 35040
6
+ if timeseries_kw.empty?
7
+ return nil
8
+ end
9
+
10
+ if original_res > destination_res
11
+ # Timesteps will be reduced, i.e. 35040 -> 8760
12
+
13
+ # This algorithm works by stepping along the origin timeseries at an interval equal to
14
+ # one timestep in the destintion timeseries and then averaging all origin values that
15
+ # coincide with the interval. Averages are weighted if a destination timestep
16
+ # only partaially overlaps an origin timestep.
17
+
18
+ # EX 1
19
+ # stepping interval 2
20
+ # origin stepping | 1 | 2 | 2 | 4 |
21
+ # destination stepping | 1.5 | 3 |
57
22
 
58
- # This algorithm works by stepping along the destination timeseries. Steps are made to the next
59
- # destination or origin breakpoint, and at each step the propotional amount of the origin stepped
60
- # is added to the destination. For example, in in EX 1 below 4 steps are made each with adding the full amount of
61
- # the origin (1, 1, 2 and 2) since each in the destination overlaps perfectly with 2 origin
62
- # timesteps. In EX 2, the origin overlaps with the first 2 destination timesteps but the third
63
- # destination value much be compose of half the 1st origin timestep value and half the second
64
- # (i.e 4, 4, (4 * 1/2) + (3 * 1/2), 3, and 3 are added to the destination).
65
-
66
- # EX 1
67
- # stepping interval 2
68
- # origin stepping | 1 | 2 |
69
- # destination stepping | 1 | 1 | 2 | 2 |
23
+ # EX 2
24
+ # stepping interval 2.5
25
+ # origin stepping | 1 | 1 | 4 | 2 | 2 |
26
+ # destination stepping | 1.6 | 2.4 |
70
27
 
71
- # EX 2
72
- # stepping interval 2.5
73
- # origin stepping | 4 | 3 |
74
- # destination stepping | 4 | 4 | 3.5 | 3 | 3 |
75
-
76
- result = []
77
- stepping_interval = (Float(destination_res) / Float(original_res))
78
- current_destination_ts = 0 # fraction stepped along the origin time series
79
- (0..(8760*original_res-1)).each do |original_ts|
80
- # keep track of step length along the destination time series
81
- original_indices_stepped = 0
82
- # See if you are start in the middle of a destination time step and add the proportional
83
- # value to the most recent (and incomplete) destination value
84
- remainder = (current_destination_ts - Integer(current_destination_ts))
85
- if remainder > 0
86
- current_destination_ts += (1 - remainder)
87
- original_indices_stepped += (1 - remainder)
88
- result[-1] = result[-1] + (Float(timeseries_kw[original_ts])*(1-remainder))
89
- end
90
- # Make whole steps along the destination timeseries that overlap perfectly with the
91
- # origin timeseries
92
- while (original_indices_stepped < stepping_interval) and ((original_indices_stepped + 1) <= stepping_interval)
93
- result.push(Float(timeseries_kw[original_ts]))
94
- original_indices_stepped += 1
95
- current_destination_ts += 1
96
- end
97
- # See if you need to end your step in the middle of a destination time step and
98
- # just add the proportional value from the current origin timestep
99
- remainder = (stepping_interval - original_indices_stepped)
100
- if remainder > 0
101
- result.push((Float(timeseries_kw[original_ts]) * remainder))
102
- current_destination_ts += remainder
103
- end
104
- end
28
+ result = []
29
+ stepping_interval = Float(original_res) / Float(destination_res)
30
+ current_origin_ts = 0 # fraction stepped along the origin time series
31
+ current_origin_idx = 0 # current integer index of the origin timeseries
32
+ (0..(8760 * destination_res - 1)).each do |ts|
33
+ next_stopping_ts = current_origin_ts + stepping_interval # stop at the next destination interval
34
+ total_power = [] # create to store wieghted origin timestep values to average
35
+ while current_origin_ts != next_stopping_ts
36
+ next_origin_ts_int = Integer(current_origin_ts) + 1
37
+ # Calc next stopping point that will being you to the next origin or destination time step
38
+ next_origin_ts = [next_origin_ts_int, next_stopping_ts].min
39
+ # Calc next step length
40
+ delta_to_next_origin_ts = next_origin_ts - current_origin_ts
41
+ # Add the proportional origin timestep value to the total power variable
42
+ total_power.push(Float(timeseries_kw[current_origin_idx]) * delta_to_next_origin_ts)
43
+ # Only move on to the next origin timestep if you are not ending mid way though an origin timestep
44
+ # i.e in EX 2 above, the value 4 is needed in destination timestep 1 and 2
45
+ if next_origin_ts_int <= next_stopping_ts
46
+ current_origin_idx += 1
47
+ end
48
+ # Step to the next stopping point
49
+ current_origin_ts += delta_to_next_origin_ts
50
+ end
51
+ # Add averaged total power variable for the destination time step
52
+ result.push(Float(total_power.sum) / stepping_interval)
105
53
  end
106
- if destination_res == original_res
107
- #No resolution conversion necessary
108
- result = timeseries_kw
54
+ end
55
+ if destination_res > original_res
56
+ # Timesteps will be expanded, i.e. 8760 -> 35040
57
+
58
+ # This algorithm works by stepping along the destination timeseries. Steps are made to the next
59
+ # destination or origin breakpoint, and at each step the propotional amount of the origin stepped
60
+ # is added to the destination. For example, in in EX 1 below 4 steps are made each with adding the full amount of
61
+ # the origin (1, 1, 2 and 2) since each in the destination overlaps perfectly with 2 origin
62
+ # timesteps. In EX 2, the origin overlaps with the first 2 destination timesteps but the third
63
+ # destination value much be compose of half the 1st origin timestep value and half the second
64
+ # (i.e 4, 4, (4 * 1/2) + (3 * 1/2), 3, and 3 are added to the destination).
65
+
66
+ # EX 1
67
+ # stepping interval 2
68
+ # origin stepping | 1 | 2 |
69
+ # destination stepping | 1 | 1 | 2 | 2 |
70
+
71
+ # EX 2
72
+ # stepping interval 2.5
73
+ # origin stepping | 4 | 3 |
74
+ # destination stepping | 4 | 4 | 3.5 | 3 | 3 |
75
+
76
+ result = []
77
+ stepping_interval = (Float(destination_res) / Float(original_res))
78
+ current_destination_ts = 0 # fraction stepped along the origin time series
79
+ (0..(8760 * original_res - 1)).each do |original_ts|
80
+ # keep track of step length along the destination time series
81
+ original_indices_stepped = 0
82
+ # See if you are start in the middle of a destination time step and add the proportional
83
+ # value to the most recent (and incomplete) destination value
84
+ remainder = (current_destination_ts - Integer(current_destination_ts))
85
+ if remainder > 0
86
+ current_destination_ts += (1 - remainder)
87
+ original_indices_stepped += (1 - remainder)
88
+ result[-1] = result[-1] + (Float(timeseries_kw[original_ts]) * (1 - remainder))
89
+ end
90
+ # Make whole steps along the destination timeseries that overlap perfectly with the
91
+ # origin timeseries
92
+ while (original_indices_stepped < stepping_interval) && ((original_indices_stepped + 1) <= stepping_interval)
93
+ result.push(Float(timeseries_kw[original_ts]))
94
+ original_indices_stepped += 1
95
+ current_destination_ts += 1
96
+ end
97
+ # See if you need to end your step in the middle of a destination time step and
98
+ # just add the proportional value from the current origin timestep
99
+ remainder = (stepping_interval - original_indices_stepped)
100
+ if remainder > 0
101
+ result.push((Float(timeseries_kw[original_ts]) * remainder))
102
+ current_destination_ts += remainder
103
+ end
109
104
  end
110
- return result
111
- end
105
+ end
106
+ if destination_res == original_res
107
+ # No resolution conversion necessary
108
+ result = timeseries_kw
109
+ end
110
+ return result
111
+ end