openstudio-workflow 0.0.4 → 0.1.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +19 -1
- data/README.md +21 -12
- data/Rakefile +11 -6
- data/lib/openstudio-workflow.rb +30 -4
- data/lib/openstudio/workflow/adapter.rb +64 -0
- data/lib/openstudio/workflow/adapters/local.rb +3 -25
- data/lib/openstudio/workflow/adapters/mongo.rb +49 -71
- data/lib/openstudio/workflow/jobs/lib/apply_measures.rb +51 -32
- data/lib/openstudio/workflow/jobs/run_energyplus/run_energyplus.rb +102 -39
- data/lib/openstudio/workflow/jobs/run_openstudio/run_openstudio.rb +8 -8
- data/lib/openstudio/workflow/jobs/run_postprocess/run_postprocess.rb +4 -1
- data/lib/openstudio/workflow/jobs/run_reporting_measures/run_reporting_measures.rb +3 -5
- data/lib/openstudio/workflow/jobs/run_runmanager/run_runmanager.rb +2 -2
- data/lib/openstudio/workflow/jobs/run_xml/run_xml.rb +6 -7
- data/lib/openstudio/workflow/run.rb +4 -6
- data/lib/openstudio/workflow/time_logger.rb +8 -8
- data/lib/openstudio/workflow/version.rb +1 -1
- metadata +13 -13
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: e81fcf5450aa2cf5f13bc03f0a253f14b95d74e9
|
4
|
+
data.tar.gz: 90a64f53393b213e23b9961a3096895de4fe4f4b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6d95bdac9f31a85775003125291db53cabf63be1bf2a054fe79def4a1281f05f422b68cd23d4cc2f37132d286288ed0cdcf610b790ea7c92103f42ae20499dd2
|
7
|
+
data.tar.gz: 22491f3a0392364381028ef6287cf0dc2a4a837388da2a62013caa981799cb797f98a2150176b05b6055f87fb350230609e99c95c7ca7ccfc301435111758636
|
data/CHANGELOG.md
CHANGED
@@ -1,7 +1,25 @@
|
|
1
1
|
OpenStudio::Workflow Change Log
|
2
2
|
==================================
|
3
3
|
|
4
|
-
Version 0.
|
4
|
+
Version 0.1.1
|
5
|
+
------------------
|
6
|
+
* Catch exception when loading a measure file.
|
7
|
+
* Enable running simulations on windows and mac
|
8
|
+
* Use rubyzip gem instead of system call
|
9
|
+
* Fix the double directory which caused the zip files to behave strangely on windows
|
10
|
+
* New find_energyplus method which tries to use OpenStudio's version of EnergyPlus
|
11
|
+
* Copy all EnergyPlus files into run directory
|
12
|
+
* Better cleanup after EnergyPlus simulation
|
13
|
+
* Read machine information from /etc/openstudio-server directory, if available
|
14
|
+
|
15
|
+
|
16
|
+
Version 0.1.0
|
17
|
+
-------------
|
18
|
+
* Tests for programmatically creating the analysis.json files from the OpenStudio-analysis-gem
|
19
|
+
* Upgrade to EnergyPlus 8.2. Right now the run energyplus and run runmanager job hard code these paths.
|
20
|
+
* Upgrade and fix Facter facts to grab the correct ip address when running on EC2
|
21
|
+
|
22
|
+
Version 0.0.4
|
5
23
|
-------------
|
6
24
|
* Include rubyXL gem for reading/writing MS Excel files
|
7
25
|
* Remove invalid characters from OpenStudio Measure Attributes. /[|!@#\$%^&\*\(\)\{\}\\\[\]|;:'",<.>\/?\+=]+/
|
data/README.md
CHANGED
@@ -1,19 +1,19 @@
|
|
1
1
|
# OpenStudio::Workflow
|
2
|
+
[![Dependency Status](https://www.versioneye.com/user/projects/5531fb7b10e714121100102e/badge.svg?style=flat)](https://www.versioneye.com/user/projects/5531fb7b10e714121100102e)
|
2
3
|
|
3
4
|
Run an EnergyPlus simulation using a file-based workflow that is read from a Local or MongoDB adapter.
|
4
5
|
|
5
6
|
## Installation
|
6
7
|
|
7
|
-
|
8
|
+
The OpenStudio Workflow Gem has the following dependencies:
|
8
9
|
|
9
10
|
* Ruby 2.0
|
10
11
|
* OpenStudio with Ruby 2.0 bindings
|
11
|
-
* EnergyPlus 8.
|
12
|
+
* EnergyPlus 8.2 (assuming OpenStudio >= 1.5.4)
|
12
13
|
* MongoDB if using MongoDB Adapter (or when running rspec)
|
13
14
|
|
14
15
|
[OpenStudio](http://developer.nrel.gov/downloads/buildings/openstudio/builds/) needs to be installed
|
15
|
-
and in your path. On Mac/Linux it is easiest to add the following to your .bash_profile or /etc/profile.d
|
16
|
-
to make sure that OpenStudio can be loaded.
|
16
|
+
and in your path. On Mac/Linux it is easiest to add the following to your .bash_profile or /etc/profile.d/<file>.sh to ensure OpenStudio can be loaded.
|
17
17
|
|
18
18
|
export OPENSTUDIO_ROOT=/usr/local
|
19
19
|
export RUBYLIB=$OPENSTUDIO_ROOT/lib/ruby/site_ruby/2.0.0
|
@@ -24,18 +24,27 @@ Add this line to your application's Gemfile:
|
|
24
24
|
|
25
25
|
Use this line if you want the bleeding edge:
|
26
26
|
|
27
|
-
gem 'OpenStudio-workflow', :
|
27
|
+
gem 'OpenStudio-workflow', github: 'NREL/OpenStudio-workflow-gem', branch: 'EnergyPlus-8.2.0'
|
28
28
|
|
29
29
|
And then execute:
|
30
|
+
|
31
|
+
Mac/Linux:
|
30
32
|
|
31
|
-
|
33
|
+
$ bundle
|
34
|
+
|
35
|
+
Windows (avoids native extensions):
|
36
|
+
|
37
|
+
$ bundle install --without xml profile
|
32
38
|
|
33
39
|
Or install it yourself as:
|
34
|
-
|
40
|
+
|
35
41
|
$ gem install OpenStudio-workflow
|
36
|
-
|
42
|
+
|
37
43
|
## Usage
|
38
44
|
|
45
|
+
Note that the branches of the Workflow Gem depict which version of EnergyPlus is in use. The develop branch at the
|
46
|
+
moment should not be used.
|
47
|
+
|
39
48
|
There are currently two adapters to run OpenStudio workflow. The first is a simple Local adapter
|
40
49
|
allowing the user to pass in the directory to simulation. The directory must have an
|
41
50
|
[analysis/problem JSON file](spec/files/local_ex1/analysis_1.json) and a [datapoint JSON file](spec/files/local_ex1/datapoint_1.json).
|
@@ -57,11 +66,11 @@ The workflow manager can also use MongoDB to receive instructions on the workflo
|
|
57
66
|
|
58
67
|
### Todos
|
59
68
|
|
60
|
-
* Read the analysis.json file to determine the states that are going to run instead of (or
|
69
|
+
* Read the analysis.json file to determine the states that are going to run instead of (or in addition to) passing them into the constructor
|
61
70
|
* Implement better error handling with custom exception classes
|
62
|
-
* Implement a different measure directory, seed model directory, and weather file directory option
|
71
|
+
* ~Implement a different measure directory, seed model directory, and weather file directory option~
|
63
72
|
* ~Dynamically add other "states" to the workflow~
|
64
|
-
* Create and change into a unique directory when running measures
|
73
|
+
* ~~Create and change into a unique directory when running measures~~
|
65
74
|
* ~~Implement Error State~~
|
66
75
|
* ~~Implement MongoDB Adapter~~
|
67
76
|
* ~~Implement remaining Adapter states (i.e. communicate success, communicate failure etc~~
|
@@ -108,7 +117,7 @@ If you are testing changes to OpenStudio source code and want to test these on t
|
|
108
117
|
** echo 'export CC=/usr/bin/clang-3.5' >> ~/.bashrc
|
109
118
|
** echo 'export CXX=/usr/bin/clang++-3.5' >> ~/.bashrc
|
110
119
|
** source ~/.bashrc
|
111
|
-
|
120
|
+
* cd /home/vagrant
|
112
121
|
* git clone https://github.com/NREL/OpenStudio.git openstudio
|
113
122
|
* cd openstudio
|
114
123
|
* git checkout your_branch_name
|
data/Rakefile
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
require 'bundler'
|
2
|
-
|
2
|
+
|
3
3
|
begin
|
4
4
|
Bundler.setup
|
5
5
|
rescue Bundler::BundlerError => e
|
@@ -8,16 +8,21 @@ rescue Bundler::BundlerError => e
|
|
8
8
|
exit e.status_code
|
9
9
|
end
|
10
10
|
|
11
|
-
require 'rake'
|
12
|
-
require 'rspec/core'
|
13
11
|
require 'rspec/core/rake_task'
|
14
12
|
|
15
|
-
#
|
16
|
-
|
13
|
+
# Always create spec reports
|
14
|
+
require 'ci/reporter/rake/rspec'
|
15
|
+
|
16
|
+
# Gem tasks
|
17
|
+
require 'bundler/gem_tasks'
|
18
|
+
|
19
|
+
RSpec::Core::RakeTask.new('spec:unit') do |spec|
|
17
20
|
spec.rspec_opts = %w(--format progress --format CI::Reporter::RSpec)
|
18
21
|
spec.pattern = FileList['spec/**/*_spec.rb']
|
19
22
|
end
|
20
23
|
|
24
|
+
task 'spec:unit' => 'ci:setup:rspec'
|
25
|
+
|
21
26
|
require 'rubocop/rake_task'
|
22
27
|
desc 'Run RuboCop on the lib directory'
|
23
28
|
RuboCop::RakeTask.new(:rubocop) do |task|
|
@@ -28,4 +33,4 @@ RuboCop::RakeTask.new(:rubocop) do |task|
|
|
28
33
|
task.fail_on_error = false
|
29
34
|
end
|
30
35
|
|
31
|
-
task default:
|
36
|
+
task default: 'spec:unit'
|
data/lib/openstudio-workflow.rb
CHANGED
@@ -22,14 +22,15 @@ require 'rubyXL'
|
|
22
22
|
require 'multi_json'
|
23
23
|
require 'colored'
|
24
24
|
require 'fileutils'
|
25
|
-
require 'securerandom'
|
25
|
+
require 'securerandom' # uuids
|
26
26
|
require 'json' # needed for a single pretty generate call
|
27
27
|
require 'pathname'
|
28
|
+
require 'mkmf' # for finding files
|
28
29
|
|
29
30
|
begin
|
30
31
|
require 'facter'
|
31
32
|
rescue LoadError => e
|
32
|
-
|
33
|
+
warn 'Could not load Facter. Will not be able to save the IP address to the log'.red
|
33
34
|
end
|
34
35
|
|
35
36
|
require 'openstudio/workflow/version'
|
@@ -86,8 +87,12 @@ module OpenStudio
|
|
86
87
|
end
|
87
88
|
|
88
89
|
# predefined method that simply runs EnergyPlus in the specified directory. It does not apply any workflow steps
|
89
|
-
# such as preprocessing / postprocessing.
|
90
|
-
#
|
90
|
+
# such as preprocessing / postprocessing. The directory must have the IDF and EPW file in the folder. The
|
91
|
+
# simulations will run in the directory/run path
|
92
|
+
#
|
93
|
+
# @param adapter_name [String] Type of adapter, local or mongo.
|
94
|
+
# @param run_directory [String] Path to where the simulation is to run
|
95
|
+
# @param options [Hash] List of options for the adapter
|
91
96
|
def run_energyplus(adapter_name, run_directory, options = {})
|
92
97
|
unless (Pathname.new run_directory).absolute?
|
93
98
|
# relative to wherever you are running the script
|
@@ -114,9 +119,30 @@ module OpenStudio
|
|
114
119
|
|
115
120
|
adapter = load_adapter adapter_name, options[:adapter_options]
|
116
121
|
run_klass = OpenStudio::Workflow::Run.new(adapter, run_directory, options)
|
122
|
+
|
117
123
|
run_klass
|
118
124
|
end
|
119
125
|
|
126
|
+
# Extract an archive to a specific location
|
127
|
+
# @param archive_filename [String] Path and name of the file to extract
|
128
|
+
# @param destination [String] Path to extract to
|
129
|
+
# @param overwrite [Boolean] If true, will overwrite any extracted file that may already exist
|
130
|
+
def extract_archive(archive_filename, destination, overwrite = true)
|
131
|
+
Zip::File.open(archive_filename) do |zf|
|
132
|
+
zf.each do |f|
|
133
|
+
f_path = File.join(destination, f.name)
|
134
|
+
FileUtils.mkdir_p(File.dirname(f_path))
|
135
|
+
|
136
|
+
if File.exist?(f_path) && overwrite
|
137
|
+
FileUtils.rm_rf(f_path)
|
138
|
+
zf.extract(f, f_path)
|
139
|
+
elsif !File.exist? f_path
|
140
|
+
zf.extract(f, f_path)
|
141
|
+
end
|
142
|
+
end
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
120
146
|
private
|
121
147
|
|
122
148
|
def load_adapter(name, adapter_options = {})
|
@@ -26,6 +26,7 @@ module OpenStudio
|
|
26
26
|
def initialize(options = {})
|
27
27
|
@options = options
|
28
28
|
@log = nil
|
29
|
+
@datapoint = nil
|
29
30
|
end
|
30
31
|
|
31
32
|
# class << self
|
@@ -62,6 +63,69 @@ module OpenStudio
|
|
62
63
|
def get_logger(file, options = {})
|
63
64
|
instance.get_logger file, options
|
64
65
|
end
|
66
|
+
|
67
|
+
protected
|
68
|
+
|
69
|
+
# Zip up a folder and it's contents
|
70
|
+
def zip_directory(directory, zip_filename, pattern = '*')
|
71
|
+
# Submethod for adding the directory to the zip folder.
|
72
|
+
def add_directory_to_zip(zip_file, local_directory, root_directory)
|
73
|
+
Dir[File.join("#{local_directory}", '**', '**')].each do |file|
|
74
|
+
# remove the base directory from the zip file
|
75
|
+
rel_dir = local_directory.sub("#{root_directory}/", '')
|
76
|
+
zip_file_to_add = file.gsub("#{local_directory}", "#{rel_dir}")
|
77
|
+
zip_file.add(zip_file_to_add, file)
|
78
|
+
end
|
79
|
+
|
80
|
+
zip_file
|
81
|
+
end
|
82
|
+
|
83
|
+
FileUtils.rm_f(zip_filename) if File.exist?(zip_filename)
|
84
|
+
|
85
|
+
Zip.default_compression = Zlib::BEST_COMPRESSION
|
86
|
+
Zip::File.open(zip_filename, Zip::File::CREATE) do |zf|
|
87
|
+
Dir[File.join(directory, pattern)].each do |file|
|
88
|
+
if File.directory?(file)
|
89
|
+
# skip a few directory that should not be zipped as they are inputs
|
90
|
+
if File.basename(file) =~ /seed|measures|weather/
|
91
|
+
next
|
92
|
+
end
|
93
|
+
add_directory_to_zip(zf, file, directory)
|
94
|
+
else
|
95
|
+
next if File.extname(file) =~ /\.rb.*/
|
96
|
+
next if File.extname(file) =~ /\.zip.*/
|
97
|
+
|
98
|
+
zip_file_to_add = file.gsub("#{directory}/", '')
|
99
|
+
zf.add(zip_file_to_add, file)
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
File.chmod(0664, zip_filename)
|
105
|
+
end
|
106
|
+
|
107
|
+
# Main method to zip up the results of the simulation results. This will append the UUID of the data point
|
108
|
+
# if it exists. This method will create two zip files. One for the reports and one for the entire data point. The
|
109
|
+
# Data Point ZIP will also contain the reports.
|
110
|
+
#
|
111
|
+
# @param directory [String] The data point directory to zip up.
|
112
|
+
# @return nil
|
113
|
+
def zip_results(directory)
|
114
|
+
# create zip file using a system call
|
115
|
+
if Dir.exist?(directory) && File.directory?(directory)
|
116
|
+
zip_filename = @datapoint ? "data_point_#{@datapoint.uuid}.zip" : 'data_point.zip'
|
117
|
+
zip_filename = File.join(directory, zip_filename)
|
118
|
+
zip_directory directory, zip_filename
|
119
|
+
end
|
120
|
+
|
121
|
+
# zip up only the reports folder
|
122
|
+
report_dir = File.join(directory, 'reports')
|
123
|
+
if Dir.exist?(report_dir) && File.directory?(report_dir)
|
124
|
+
zip_filename = @datapoint ? "data_point_#{@datapoint.uuid}_reports.zip" : 'data_point_reports.zip'
|
125
|
+
zip_filename = File.join(directory, zip_filename)
|
126
|
+
zip_directory directory, zip_filename, 'reports'
|
127
|
+
end
|
128
|
+
end
|
65
129
|
end
|
66
130
|
end
|
67
131
|
end
|
@@ -76,10 +76,10 @@ module OpenStudio
|
|
76
76
|
end
|
77
77
|
|
78
78
|
def communicate_results(directory, results)
|
79
|
-
zip_results(directory
|
79
|
+
zip_results(directory)
|
80
80
|
|
81
81
|
if results.is_a? Hash
|
82
|
-
File.open("#{directory}/
|
82
|
+
File.open("#{directory}/data_point_out.json", 'w') { |f| f << JSON.pretty_generate(results) }
|
83
83
|
else
|
84
84
|
pp "Unknown datapoint result type. Please handle #{results.class}"
|
85
85
|
# data_point_json_path = OpenStudio::Path.new(run_dir) / OpenStudio::Path.new('data_point_out.json')
|
@@ -90,33 +90,11 @@ module OpenStudio
|
|
90
90
|
|
91
91
|
# For the local adapter send back a handle to a file to append the data. For this adapter
|
92
92
|
# the log messages are likely to be the same as the run.log messages.
|
93
|
-
#
|
93
|
+
# ?: do we really want two local logs from the Local adapter? One is in the run dir and the other is in the root
|
94
94
|
def get_logger(directory, _options = {})
|
95
95
|
@log ||= File.open("#{directory}/local_adapter.log", 'w')
|
96
96
|
@log
|
97
97
|
end
|
98
|
-
|
99
|
-
# TODO: this uses a system call to zip results at the moment
|
100
|
-
def zip_results(directory, _analysis_type = 'workflow')
|
101
|
-
current_dir = Dir.pwd
|
102
|
-
begin
|
103
|
-
# create zip file using a system call
|
104
|
-
# @logger.info "Zipping up data point #{analysis_dir}"
|
105
|
-
if File.directory? directory
|
106
|
-
Dir.chdir(directory)
|
107
|
-
`zip -9 -r --exclude=*.rb* data_point.zip .`
|
108
|
-
end
|
109
|
-
|
110
|
-
# zip up only the reports folder
|
111
|
-
report_dir = 'reports'
|
112
|
-
# @logger.info "Zipping up Analysis Reports Directory #{report_dir}/reports"
|
113
|
-
if File.directory? report_dir
|
114
|
-
`zip -9 -r data_point_reports.zip reports`
|
115
|
-
end
|
116
|
-
ensure
|
117
|
-
Dir.chdir(current_dir)
|
118
|
-
end
|
119
|
-
end
|
120
98
|
end
|
121
99
|
end
|
122
100
|
end
|
@@ -47,8 +47,6 @@ module OpenStudio
|
|
47
47
|
|
48
48
|
Dir["#{base_path}/models/*.rb"].each { |f| require f }
|
49
49
|
Mongoid.load!("#{base_path}/mongoid.yml", :development)
|
50
|
-
|
51
|
-
@datapoint = nil
|
52
50
|
end
|
53
51
|
|
54
52
|
# Tell the system that the process has started
|
@@ -61,52 +59,60 @@ module OpenStudio
|
|
61
59
|
@datapoint.status_message = ''
|
62
60
|
@datapoint.run_start_time = ::Time.now
|
63
61
|
|
64
|
-
# TODO: Get Facter to play well on windows and replace 'socket'
|
65
62
|
# TODO: use the ComputeNode model to pull out the information so that we can reuse the methods
|
66
63
|
# Determine what the IP address is of the worker node and save in the data point
|
67
64
|
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
65
|
+
# ami-id: ami-7c7e4e14
|
66
|
+
# instance-id: i-c52e0412
|
67
|
+
# instance-type: m3.medium
|
68
|
+
# local-hostname: ip-10-99-169-57.ec2.internal
|
69
|
+
# local-ipv4: 10.99.169.57
|
70
|
+
# placement: us-east-1a
|
71
|
+
# public-hostname: ec2-54-161-221-129.compute-1.amazonaws.com
|
72
|
+
# public-ipv4: 54.161.221.129
|
73
|
+
# number_of_cores: 1
|
74
|
+
if File.exist? '/etc/openstudio-server/instance.yml'
|
75
|
+
y = YAML.load_file('/etc/openstudio-server/instance.yml')
|
76
|
+
@datapoint.ip_address = y['public-ipv4'] if y['public-ipv4']
|
77
|
+
@datapoint.internal_ip_address = y['local-ipv4'] if y['local-ipv4']
|
78
|
+
else
|
79
|
+
# try to infer it from the socket/facter information
|
80
|
+
# note, facter will be deprecated in the future, so don't extend it!
|
81
|
+
retries = 0
|
82
|
+
begin
|
83
|
+
require 'socket'
|
84
|
+
if Socket.gethostname =~ /os-.*/
|
85
|
+
# Maybe use this in the future: /sbin/ifconfig eth1|grep inet|head -1|sed 's/\:/ /'|awk '{print $3}'
|
86
|
+
# Must be on vagrant and just use the hostname to do a lookup
|
87
|
+
map = {
|
88
|
+
'os-server' => '192.168.33.10',
|
89
|
+
'os-worker-1' => '192.168.33.11',
|
90
|
+
'os-worker-2' => '192.168.33.12'
|
91
|
+
}
|
92
|
+
@datapoint.ip_address = map[Socket.gethostname]
|
93
|
+
@datapoint.internal_ip_address = @datapoint.ip_address
|
94
|
+
else
|
95
|
+
if Gem.loaded_specs['facter']
|
96
|
+
# Use EC2 public to check if we are on AWS.
|
97
|
+
@datapoint.ip_address = Facter.fact(:ec2_public_ipv4) ? Facter.fact(:ec2_public_ipv4).value : Facter.fact(:ipaddress).value
|
92
98
|
@datapoint.internal_ip_address = Facter.fact(:ipaddress).value
|
93
99
|
end
|
94
100
|
end
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
101
|
+
rescue => e
|
102
|
+
# catch any exceptions. It appears that if a new instance of amazon starts, then it is likely that
|
103
|
+
# the Facter for AWS may not be initialized yet. Retry after waiting for 15 seconds if this happens.
|
104
|
+
# If this fails out, then the only issue with this is that the data point won't be downloaded because
|
105
|
+
# the worker node is not known
|
106
|
+
|
107
|
+
# retry just in case
|
108
|
+
if retries < 30 # try for up to 5 minutes
|
109
|
+
retries += 1
|
110
|
+
sleep 10
|
111
|
+
retry
|
112
|
+
else
|
113
|
+
raise "could not find Facter based data for worker node after #{retries} retries with message #{e.message}"
|
114
|
+
# just do nothing for now
|
115
|
+
end
|
110
116
|
end
|
111
117
|
end
|
112
118
|
|
@@ -190,19 +196,13 @@ module OpenStudio
|
|
190
196
|
end
|
191
197
|
|
192
198
|
def communicate_results(directory, results)
|
193
|
-
zip_results(directory
|
199
|
+
zip_results(directory)
|
194
200
|
|
195
201
|
# @logger.info 'Saving EnergyPlus JSON file'
|
196
202
|
if results
|
197
203
|
@datapoint.results ? @datapoint.results.merge!(results) : @datapoint.results = results
|
198
204
|
end
|
199
205
|
result = @datapoint.save! # redundant because next method calls save too.
|
200
|
-
|
201
|
-
if result
|
202
|
-
# @logger.info 'Successfully saved result to database'
|
203
|
-
else
|
204
|
-
# @logger.error 'ERROR saving result to database'
|
205
|
-
end
|
206
206
|
end
|
207
207
|
|
208
208
|
# TODO: Implement the writing to the mongo_db for logging
|
@@ -221,28 +221,6 @@ module OpenStudio
|
|
221
221
|
# keep @datapoint as the model instance
|
222
222
|
DataPoint.find_or_create_by(uuid: uuid)
|
223
223
|
end
|
224
|
-
|
225
|
-
# TODO: this uses a system call to zip results at the moment, replace with rubylib
|
226
|
-
def zip_results(directory, _analysis_type = 'workflow')
|
227
|
-
current_dir = Dir.pwd
|
228
|
-
begin
|
229
|
-
# create zip file using a system call
|
230
|
-
# @logger.info "Zipping up data point #{analysis_dir}"
|
231
|
-
if File.directory? directory
|
232
|
-
Dir.chdir(directory)
|
233
|
-
`zip -9 -r --exclude=*.rb* data_point_#{@datapoint.uuid}.zip .`
|
234
|
-
end
|
235
|
-
|
236
|
-
# zip up only the reports folder
|
237
|
-
report_dir = 'reports'
|
238
|
-
# @logger.info "Zipping up Analysis Reports Directory #{report_dir}/reports"
|
239
|
-
if File.directory? report_dir
|
240
|
-
`zip -9 -r data_point_#{@datapoint.uuid}_reports.zip reports`
|
241
|
-
end
|
242
|
-
ensure
|
243
|
-
Dir.chdir(current_dir)
|
244
|
-
end
|
245
|
-
end
|
246
224
|
end
|
247
225
|
end
|
248
226
|
end
|
@@ -39,12 +39,12 @@ module OpenStudio
|
|
39
39
|
success = true
|
40
40
|
|
41
41
|
unless argument[:value].nil?
|
42
|
-
@logger.info "Setting argument value #{argument[:name]} to #{argument[:value]}"
|
42
|
+
@logger.info "Setting argument value '#{argument[:name]}' to '#{argument[:value]}'"
|
43
43
|
|
44
44
|
v = argument_map[argument[:name]]
|
45
45
|
fail "Could not find argument map in measure for '#{argument[:name]}' with value '#{argument[:value]}'" unless v
|
46
46
|
value_set = v.setValue(argument[:value])
|
47
|
-
fail "Could not set argument #{argument[:name]} of value #{argument[:value]} on model" unless value_set
|
47
|
+
fail "Could not set argument '#{argument[:name]}' of value '#{argument[:value]}' on model" unless value_set
|
48
48
|
argument_map[argument[:name]] = v.clone
|
49
49
|
else
|
50
50
|
@logger.warn "Value for argument '#{argument[:name]}' not set in argument list therefore will use default"
|
@@ -97,7 +97,7 @@ module OpenStudio
|
|
97
97
|
def apply_measure(workflow_item)
|
98
98
|
@logger.info "Starting #{__method__} for #{workflow_item[:name]}"
|
99
99
|
@time_logger.start("Measure:#{workflow_item[:name]}")
|
100
|
-
#start_time = ::Time.now
|
100
|
+
# start_time = ::Time.now
|
101
101
|
current_dir = Dir.pwd
|
102
102
|
begin
|
103
103
|
measure_working_directory = "#{@run_directory}/#{workflow_item[:measure_definition_class_name]}"
|
@@ -120,41 +120,56 @@ module OpenStudio
|
|
120
120
|
@logger.info "Loading Measure from #{measure_file_path}"
|
121
121
|
fail "Measure file does not exist #{measure_name} in #{measure_file_path}" unless File.exist? measure_file_path
|
122
122
|
|
123
|
-
|
124
|
-
|
125
|
-
runner = OpenStudio::Ruleset::OSRunner.new
|
123
|
+
measure = nil
|
124
|
+
runner = nil
|
126
125
|
result = nil
|
126
|
+
begin
|
127
|
+
require measure_file_path
|
128
|
+
measure = Object.const_get(measure_name).new
|
129
|
+
runner = OpenStudio::Ruleset::OSRunner.new
|
130
|
+
rescue => e
|
131
|
+
log_message = "Error requiring measure #{__FILE__}. Failed with #{e.message}, #{e.backtrace.join("\n")}"
|
132
|
+
raise log_message
|
133
|
+
end
|
127
134
|
|
128
135
|
arguments = nil
|
129
|
-
if workflow_item[:measure_type] == 'RubyMeasure'
|
130
|
-
arguments = measure.arguments(@model)
|
131
|
-
elsif workflow_item[:measure_type] == 'EnergyPlusMeasure'
|
132
|
-
arguments = measure.arguments(@model)
|
133
|
-
elsif workflow_item[:measure_type] == 'ReportingMeasure'
|
134
|
-
arguments = measure.arguments
|
135
|
-
end
|
136
136
|
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
137
|
+
begin
|
138
|
+
if workflow_item[:measure_type] == 'RubyMeasure'
|
139
|
+
arguments = measure.arguments(@model)
|
140
|
+
elsif workflow_item[:measure_type] == 'EnergyPlusMeasure'
|
141
|
+
arguments = measure.arguments(@model)
|
142
|
+
elsif workflow_item[:measure_type] == 'ReportingMeasure'
|
143
|
+
arguments = measure.arguments
|
144
|
+
end
|
145
|
+
|
146
|
+
@logger.info "Extracted the following arguments: #{arguments}"
|
143
147
|
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
fail 'Could not set arguments' unless success
|
148
|
+
# Create argument map and initialize all the arguments
|
149
|
+
argument_map = OpenStudio::Ruleset::OSArgumentMap.new
|
150
|
+
arguments.each do |v|
|
151
|
+
argument_map[v.name] = v.clone
|
149
152
|
end
|
150
|
-
|
153
|
+
# @logger.info "Argument map for measure is #{argument_map}"
|
151
154
|
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
155
|
+
@logger.info "Iterating over arguments for workflow item '#{workflow_item[:name]}'"
|
156
|
+
if workflow_item[:arguments]
|
157
|
+
workflow_item[:arguments].each do |argument|
|
158
|
+
success = apply_arguments(argument_map, argument)
|
159
|
+
fail 'Could not set arguments' unless success
|
160
|
+
end
|
157
161
|
end
|
162
|
+
|
163
|
+
@logger.info "Iterating over variables for workflow item '#{workflow_item[:name]}'"
|
164
|
+
if workflow_item[:variables]
|
165
|
+
workflow_item[:variables].each do |variable|
|
166
|
+
success = apply_variables(argument_map, variable)
|
167
|
+
fail 'Could not set variables' unless success
|
168
|
+
end
|
169
|
+
end
|
170
|
+
rescue => e
|
171
|
+
log_message = "Error assigning argument in measure #{__FILE__}. Failed with #{e.message}, #{e.backtrace.join("\n")}"
|
172
|
+
raise log_message
|
158
173
|
end
|
159
174
|
|
160
175
|
begin
|
@@ -200,9 +215,13 @@ module OpenStudio
|
|
200
215
|
measure_attributes = JSON.parse(OpenStudio.toJSON(result.attributes), symbolize_names: true)
|
201
216
|
@output_attributes[workflow_item[:name].to_sym] = measure_attributes[:attributes]
|
202
217
|
rescue => e
|
203
|
-
log_message = "
|
204
|
-
@logger.
|
218
|
+
log_message = "#{__FILE__} failed with #{e.message}, #{e.backtrace.join("\n")}"
|
219
|
+
@logger.error log_message
|
205
220
|
end
|
221
|
+
rescue => e
|
222
|
+
log_message = "#{__FILE__} failed with message #{e.message} in #{e.backtrace.join("\n")}"
|
223
|
+
@logger.error log_message
|
224
|
+
raise log_message
|
206
225
|
ensure
|
207
226
|
Dir.chdir current_dir
|
208
227
|
@time_logger.stop("Measure:#{workflow_item[:name]}")
|
@@ -17,18 +17,19 @@
|
|
17
17
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
18
18
|
######################################################################
|
19
19
|
|
20
|
+
# Force the MakeMakefile logger write file output to null.
|
21
|
+
module MakeMakefile::Logging
|
22
|
+
@logfile = File::NULL
|
23
|
+
end
|
24
|
+
|
20
25
|
class RunEnergyplus
|
21
26
|
# Initialize
|
22
27
|
# param directory: base directory where the simulation files are prepared
|
23
28
|
# param logger: logger object in which to write log messages
|
24
29
|
def initialize(directory, logger, time_logger, adapter, options = {})
|
25
|
-
|
26
|
-
if /cygwin|mswin|mingw|bccwin|wince|emx/ =~ RUBY_PLATFORM
|
27
|
-
energyplus_path = 'C:/EnergyPlus-8-1-0'
|
28
|
-
else
|
29
|
-
energyplus_path = '/usr/local/EnergyPlus-8-1-0'
|
30
|
-
end
|
30
|
+
@logger = logger
|
31
31
|
|
32
|
+
energyplus_path = find_energyplus
|
32
33
|
defaults = {
|
33
34
|
energyplus_path: energyplus_path
|
34
35
|
}
|
@@ -38,10 +39,15 @@ class RunEnergyplus
|
|
38
39
|
@directory = directory
|
39
40
|
@run_directory = "#{@directory}/run"
|
40
41
|
@adapter = adapter
|
41
|
-
@logger = logger
|
42
42
|
@time_logger = time_logger
|
43
43
|
@results = {}
|
44
44
|
|
45
|
+
# container for storing the energyplus files there were copied into the local directory. These will be
|
46
|
+
# removed at the end of the simulation.
|
47
|
+
@energyplus_files = []
|
48
|
+
@energyplus_exe = nil
|
49
|
+
@expand_objects_exe = nil
|
50
|
+
|
45
51
|
@logger.info "#{self.class} passed the following options #{@options}"
|
46
52
|
end
|
47
53
|
|
@@ -98,35 +104,90 @@ class RunEnergyplus
|
|
98
104
|
end
|
99
105
|
|
100
106
|
# can't create symlinks because the /vagrant mount is actually a windows mount
|
101
|
-
@
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
FileUtils.copy("#{@options[:energyplus_path]}/libDElight.so", "#{@run_directory}/libDElight.so")
|
107
|
-
FileUtils.copy("#{@options[:energyplus_path]}/libDElight.so", "#{@run_directory}/libDElight.so")
|
108
|
-
FileUtils.copy("#{@options[:energyplus_path]}/ExpandObjects", "#{@run_directory}/ExpandObjects")
|
109
|
-
FileUtils.copy("#{@options[:energyplus_path]}/EnergyPlus", "#{@run_directory}/EnergyPlus")
|
110
|
-
FileUtils.copy("#{@options[:energyplus_path]}/Energy+.idd", "#{@run_directory}/Energy+.idd")
|
111
|
-
@time_logger.stop("Copying EnergyPlus files")
|
112
|
-
|
113
|
-
@time_logger.start("Running EnergyPlus")
|
107
|
+
@time_logger.start('Copying EnergyPlus files')
|
108
|
+
prepare_energyplus_dir
|
109
|
+
@time_logger.stop('Copying EnergyPlus files')
|
110
|
+
|
111
|
+
@time_logger.start('Running EnergyPlus')
|
114
112
|
@results = call_energyplus
|
115
|
-
@time_logger.stop(
|
113
|
+
@time_logger.stop('Running EnergyPlus')
|
116
114
|
|
117
115
|
@results
|
118
116
|
end
|
119
117
|
|
120
118
|
private
|
121
119
|
|
120
|
+
# Look for the location of EnergyPlus
|
121
|
+
def find_energyplus
|
122
|
+
if ENV['ENERGYPLUSDIR']
|
123
|
+
return ENV['ENERGYPLUSDIR']
|
124
|
+
elsif ENV['RUBYLIB'] =~ /OpenStudio/
|
125
|
+
path = ENV['RUBYLIB'].split(':')
|
126
|
+
path = File.dirname(path.find { |p| p =~ /OpenStudio/ })
|
127
|
+
# Grab the version out of the openstudio path
|
128
|
+
path += '/sharedresources/EnergyPlus-8-2-0'
|
129
|
+
@logger.info "found EnergyPlus path of #{path}"
|
130
|
+
return path
|
131
|
+
else
|
132
|
+
if /cygwin|mswin|mingw|bccwin|wince|emx/ =~ RUBY_PLATFORM
|
133
|
+
energyplus_path = 'C:/EnergyPlus-8-2-0'
|
134
|
+
else
|
135
|
+
energyplus_path = '/usr/local/EnergyPlus-8-2-0'
|
136
|
+
end
|
137
|
+
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
def clean_directory
|
142
|
+
@logger.info 'Removing any copied EnergyPlus files'
|
143
|
+
@energyplus_files.each do |file|
|
144
|
+
if File.exist? file
|
145
|
+
FileUtils.rm_f file
|
146
|
+
end
|
147
|
+
end
|
148
|
+
|
149
|
+
paths_to_rm = []
|
150
|
+
paths_to_rm << "#{@run_directory}/packaged_measures"
|
151
|
+
paths_to_rm << "#{@run_directory}/Energy+.ini"
|
152
|
+
paths_to_rm.each { |p| FileUtils.rm_rf(p) if File.exist?(p) }
|
153
|
+
end
|
154
|
+
|
155
|
+
# Prepare the directory to run EnergyPlus. In EnergyPlus < 8.2, we have to copy all the files into the directory.
|
156
|
+
#
|
157
|
+
# @return [Boolean] Returns true is there is more than one file copied
|
158
|
+
def prepare_energyplus_dir
|
159
|
+
@logger.info "Copying EnergyPlus files to run directory: #{@run_directory}"
|
160
|
+
Dir["#{@options[:energyplus_path]}/*"].each do |file|
|
161
|
+
next if File.directory? file
|
162
|
+
next if File.extname(file).downcase =~ /.pdf|.app|.html|.gif|.txt|.xlsx/
|
163
|
+
|
164
|
+
dest_file = "#{@run_directory}/#{File.basename(file)}"
|
165
|
+
@energyplus_files << dest_file
|
166
|
+
|
167
|
+
@energyplus_exe = File.basename(dest_file) if File.basename(dest_file) =~ /^energyplus.{0,4}$/i
|
168
|
+
@expand_objects_exe = File.basename(dest_file) if File.basename(dest_file) =~ /^ExpandObjects.{0,4}$/i
|
169
|
+
FileUtils.copy file, dest_file
|
170
|
+
end
|
171
|
+
|
172
|
+
fail "Could not find EnergyPlus Executable in #{@options[:energyplus_path]}" unless @energyplus_exe
|
173
|
+
fail "Could not find ExpandObjects Executable in #{@options[:energyplus_path]}" unless @expand_objects_exe
|
174
|
+
|
175
|
+
@energyplus_files.size > 0
|
176
|
+
end
|
177
|
+
|
122
178
|
def call_energyplus
|
123
179
|
begin
|
124
180
|
current_dir = Dir.pwd
|
125
181
|
Dir.chdir(@run_directory)
|
126
182
|
@logger.info "Starting simulation in run directory: #{Dir.pwd}"
|
127
183
|
|
184
|
+
# @logger.info "Contents of: #{Dir.pwd}"
|
185
|
+
# Dir.glob("*").each do |f|
|
186
|
+
# @logger.info " #{f}"
|
187
|
+
# end
|
188
|
+
|
128
189
|
File.open('stdout-expandobject', 'w') do |file|
|
129
|
-
IO.popen(
|
190
|
+
IO.popen("./#{@expand_objects_exe}") do |io|
|
130
191
|
while (line = io.gets)
|
131
192
|
file << line
|
132
193
|
end
|
@@ -141,7 +202,7 @@ class RunEnergyplus
|
|
141
202
|
|
142
203
|
# create stdout
|
143
204
|
File.open('stdout-energyplus', 'w') do |file|
|
144
|
-
IO.popen(
|
205
|
+
IO.popen("./#{@energyplus_exe} 2>&1") do |io|
|
145
206
|
while (line = io.gets)
|
146
207
|
file << line
|
147
208
|
end
|
@@ -149,25 +210,25 @@ class RunEnergyplus
|
|
149
210
|
end
|
150
211
|
r = $?
|
151
212
|
|
152
|
-
@logger.info "
|
153
|
-
|
154
|
-
paths_to_rm = []
|
155
|
-
paths_to_rm << Pathname.glob("#{@run_directory}/*.ini")
|
156
|
-
paths_to_rm << Pathname.glob("#{@run_directory}/*.so")
|
157
|
-
paths_to_rm << Pathname.glob("#{@run_directory}/*.idd")
|
158
|
-
paths_to_rm << Pathname.glob("#{@run_directory}/ExpandObjects")
|
159
|
-
paths_to_rm << Pathname.glob("#{@run_directory}/EnergyPlus")
|
160
|
-
paths_to_rm << Pathname.glob("#{@run_directory}/packaged_measures")
|
161
|
-
paths_to_rm.each { |p| FileUtils.rm_rf(p) }
|
162
|
-
|
213
|
+
@logger.info "EnergyPlus returned '#{r}'"
|
163
214
|
unless r == 0
|
164
|
-
|
215
|
+
@logger.warn 'EnergyPlus returned a non-zero exit code. Check the stdout-energyplus log.'
|
216
|
+
end
|
217
|
+
|
218
|
+
if File.exist? 'eplusout.end'
|
219
|
+
f = File.read('eplusout.end').force_encoding('ISO-8859-1').encode('utf-8', replace: nil)
|
220
|
+
warnings_count = f[/(\d*).Warning/, 1]
|
221
|
+
error_count = f[/(\d*).Severe.Errors/, 1]
|
222
|
+
@logger.info "EnergyPlus finished with #{warnings_count} warnings and #{error_count} severe errors"
|
223
|
+
if f =~ /EnergyPlus Terminated--Fatal Error Detected/
|
224
|
+
fail 'EnergyPlus Terminated with a Fatal Error. Check eplusout.err log.'
|
225
|
+
end
|
226
|
+
else
|
227
|
+
fail 'EnergyPlus failed and did not create an eplusout.end file. Check the stdout-energyplus log.'
|
165
228
|
end
|
166
229
|
|
167
|
-
# TODO: check the end or err file
|
168
230
|
if File.exist? 'eplusout.err'
|
169
|
-
eplus_err = File.read('eplusout.err')
|
170
|
-
eplus_err = eplus_err.force_encoding('ISO-8859-1').encode('utf-8', replace: nil)
|
231
|
+
eplus_err = File.read('eplusout.err').force_encoding('ISO-8859-1').encode('utf-8', replace: nil)
|
171
232
|
if eplus_err =~ /EnergyPlus Terminated--Fatal Error Detected/
|
172
233
|
fail 'EnergyPlus Terminated with a Fatal Error. Check eplusout.err log.'
|
173
234
|
end
|
@@ -177,11 +238,13 @@ class RunEnergyplus
|
|
177
238
|
@logger.error log_message
|
178
239
|
raise log_message
|
179
240
|
ensure
|
241
|
+
@logger.info "Ensuring 'clean' directory"
|
242
|
+
clean_directory
|
243
|
+
|
180
244
|
Dir.chdir(current_dir)
|
181
245
|
@logger.info 'EnergyPlus Completed'
|
182
246
|
end
|
183
247
|
|
184
|
-
# TODO: get list of all the files that are generated and return
|
185
248
|
{}
|
186
249
|
end
|
187
250
|
end
|
@@ -64,9 +64,9 @@ class RunOpenstudio
|
|
64
64
|
|
65
65
|
apply_measures(:openstudio_measure)
|
66
66
|
|
67
|
-
@time_logger.start(
|
67
|
+
@time_logger.start('Translating to EnergyPlus')
|
68
68
|
translate_to_energyplus
|
69
|
-
@time_logger.stop(
|
69
|
+
@time_logger.stop('Translating to EnergyPlus')
|
70
70
|
|
71
71
|
apply_measures(:energyplus_measure)
|
72
72
|
|
@@ -84,14 +84,14 @@ class RunOpenstudio
|
|
84
84
|
end
|
85
85
|
|
86
86
|
@logger.info 'Saving measure output attributes JSON'
|
87
|
-
File.open("#{@run_directory}/measure_attributes.json", 'w') do
|
88
|
-
|
87
|
+
File.open("#{@run_directory}/measure_attributes.json", 'w') do |f|
|
88
|
+
f << JSON.pretty_generate(@output_attributes)
|
89
89
|
end
|
90
90
|
end
|
91
91
|
|
92
|
-
@time_logger.start(
|
92
|
+
@time_logger.start('Saving OSM and IDF')
|
93
93
|
save_osm_and_idf
|
94
|
-
@time_logger.stop(
|
94
|
+
@time_logger.stop('Saving OSM and IDF')
|
95
95
|
|
96
96
|
@results
|
97
97
|
end
|
@@ -135,7 +135,7 @@ class RunOpenstudio
|
|
135
135
|
|
136
136
|
# assume that the seed model has been placed in the directory
|
137
137
|
baseline_model_path = File.expand_path(
|
138
|
-
|
138
|
+
File.join(@options[:analysis_root_path], @analysis_json[:analysis][:seed][:path]))
|
139
139
|
else
|
140
140
|
fail 'No seed model path in JSON defined'
|
141
141
|
end
|
@@ -173,7 +173,7 @@ class RunOpenstudio
|
|
173
173
|
elsif @analysis_json[:analysis][:weather_file]
|
174
174
|
if @analysis_json[:analysis][:weather_file][:path]
|
175
175
|
weather_filename = File.expand_path(
|
176
|
-
|
176
|
+
File.join(@options[:analysis_root_path], @analysis_json[:analysis][:weather_file][:path])
|
177
177
|
)
|
178
178
|
@weather_file_path = File.dirname(weather_filename)
|
179
179
|
else
|
@@ -88,7 +88,10 @@ class RunPostprocess
|
|
88
88
|
end
|
89
89
|
|
90
90
|
# Remove empty directories in run folder
|
91
|
-
Dir["#{@run_directory}/*"].select { |d| File.directory? d }.select { |d| (Dir.entries(d) - %w(. ..)).empty? }.each
|
91
|
+
Dir["#{@run_directory}/*"].select { |d| File.directory? d }.select { |d| (Dir.entries(d) - %w(. ..)).empty? }.each do |d|
|
92
|
+
@logger.info "Removing empty directory #{d}"
|
93
|
+
Dir.rmdir d
|
94
|
+
end
|
92
95
|
|
93
96
|
paths_to_rm = []
|
94
97
|
# paths_to_rm << Pathname.glob("#{@run_directory}/*.osm")
|
@@ -60,13 +60,13 @@ class RunReportingMeasures
|
|
60
60
|
@datapoint_json = @adapter.get_datapoint(@directory, @options)
|
61
61
|
@analysis_json = @adapter.get_problem(@directory, @options)
|
62
62
|
|
63
|
-
@time_logger.start(
|
63
|
+
@time_logger.start('Running standard post process')
|
64
64
|
if @options[:use_monthly_reports]
|
65
65
|
run_monthly_postprocess
|
66
66
|
else
|
67
67
|
run_standard_postprocess
|
68
68
|
end
|
69
|
-
@time_logger.stop(
|
69
|
+
@time_logger.stop('Running standard post process')
|
70
70
|
|
71
71
|
translate_csv_to_json
|
72
72
|
|
@@ -77,8 +77,7 @@ class RunReportingMeasures
|
|
77
77
|
end
|
78
78
|
|
79
79
|
@logger.info 'Saving reporting measures output attributes JSON'
|
80
|
-
File.open("#{@run_directory}/reporting_measure_attributes.json", 'w') do
|
81
|
-
|f|
|
80
|
+
File.open("#{@run_directory}/reporting_measure_attributes.json", 'w') do |f|
|
82
81
|
f << JSON.pretty_generate(@output_attributes)
|
83
82
|
end
|
84
83
|
|
@@ -371,7 +370,6 @@ class RunReportingMeasures
|
|
371
370
|
['INTERIORLIGHTS:ELECTRICITY', 'EXTERIORLIGHTS:ELECTRICITY', 'INTERIOREQUIPMENT:ELECTRICITY', 'EXTERIOREQUIPMENT:ELECTRICITY',
|
372
371
|
'FANS:ELECTRICITY', 'PUMPS:ELECTRICITY', 'HEATING:ELECTRICITY', 'COOLING:ELECTRICITY', 'HEATREJECTION:ELECTRICITY',
|
373
372
|
'HUMIDIFIER:ELECTRICITY', 'HEATRECOVERY:ELECTRICITY', 'WATERSYSTEMS:ELECTRICITY', 'COGENERATION:ELECTRICITY', 'REFRIGERATION:ELECTRICITY'].each do |end_use|
|
374
|
-
|
375
373
|
tmp_query = query + " AND ColumnName='#{end_use}'"
|
376
374
|
tmp_val = sql_query(sql, 'BUILDING ENERGY PERFORMANCE - ELECTRICITY', tmp_query)
|
377
375
|
val += tmp_val unless tmp_val.nil?
|
@@ -30,9 +30,9 @@ class RunRunmanager
|
|
30
30
|
def initialize(directory, logger, time_logger, adapter, options = {})
|
31
31
|
energyplus_path = nil
|
32
32
|
if /cygwin|mswin|mingw|bccwin|wince|emx/ =~ RUBY_PLATFORM
|
33
|
-
energyplus_path = 'C:/EnergyPlus-8-
|
33
|
+
energyplus_path = 'C:/EnergyPlus-8-2-0'
|
34
34
|
else
|
35
|
-
energyplus_path = '/usr/local/EnergyPlus-8-
|
35
|
+
energyplus_path = '/usr/local/EnergyPlus-8-2-0'
|
36
36
|
end
|
37
37
|
|
38
38
|
defaults = {
|
@@ -73,8 +73,7 @@ class RunXml
|
|
73
73
|
end
|
74
74
|
|
75
75
|
# @logger.debug "XML measure output attributes JSON is #{@output_attributes}"
|
76
|
-
File.open("#{@run_directory}/measure_attributes_xml.json", 'w') do
|
77
|
-
|f|
|
76
|
+
File.open("#{@run_directory}/measure_attributes_xml.json", 'w') do |f|
|
78
77
|
f << JSON.pretty_generate(@output_attributes)
|
79
78
|
end
|
80
79
|
end
|
@@ -96,7 +95,7 @@ class RunXml
|
|
96
95
|
|
97
96
|
# assume that the seed model has been placed in the directory
|
98
97
|
baseline_model_path = File.expand_path(
|
99
|
-
|
98
|
+
File.join(@options[:analysis_root_path], @analysis_json[:analysis][:seed][:path]))
|
100
99
|
|
101
100
|
if File.exist? baseline_model_path
|
102
101
|
@logger.info "Reading in baseline model #{baseline_model_path}"
|
@@ -125,7 +124,7 @@ class RunXml
|
|
125
124
|
# This last(4) needs to be cleaned up. Why don't we know the path of the file?
|
126
125
|
# assume that the seed model has been placed in the directory
|
127
126
|
weather_filename = File.expand_path(
|
128
|
-
|
127
|
+
File.join(@options[:analysis_root_path], @analysis_json[:analysis][:weather_file][:path]))
|
129
128
|
unless File.exist?(weather_filename)
|
130
129
|
@logger.warn "Could not find weather file for simulation #{weather_filename}. Will continue because may change"
|
131
130
|
end
|
@@ -190,7 +189,7 @@ class RunXml
|
|
190
189
|
|
191
190
|
@logger.info "Loading measure in relative path #{measure_path}"
|
192
191
|
measure_file_path = File.expand_path(
|
193
|
-
|
192
|
+
File.join(@options[:analysis_root_path], measure_path, 'measure.rb'))
|
194
193
|
fail "Measure file does not exist #{measure_name} in #{measure_file_path}" unless File.exist? measure_file_path
|
195
194
|
|
196
195
|
require measure_file_path
|
@@ -211,14 +210,14 @@ class RunXml
|
|
211
210
|
if wf[:arguments]
|
212
211
|
wf[:arguments].each do |wf_arg|
|
213
212
|
if wf_arg[:value]
|
214
|
-
@logger.info "Setting argument value #{wf_arg[:name]} to #{wf_arg[:value]}"
|
213
|
+
@logger.info "Setting argument value '#{wf_arg[:name]}' to '#{wf_arg[:value]}'"
|
215
214
|
# Note that these measures have symbolized hash keys and not strings. I really want indifferential access here!
|
216
215
|
args[wf_arg[:name].to_sym] = wf_arg[:value]
|
217
216
|
end
|
218
217
|
end
|
219
218
|
end
|
220
219
|
|
221
|
-
@logger.info "iterate over variables for workflow item #{wf[:name]}"
|
220
|
+
@logger.info "iterate over variables for workflow item '#{wf[:name]}'"
|
222
221
|
if wf[:variables]
|
223
222
|
wf[:variables].each do |wf_var|
|
224
223
|
# Argument hash in workflow looks like the following
|
@@ -157,7 +157,7 @@ module OpenStudio
|
|
157
157
|
@adapter.communicate_results @directory, @job_results[:run_runmanager]
|
158
158
|
elsif @job_results[:run_reporting_measures]
|
159
159
|
@logger.info 'Sending the reporting measuers results back to the adapter'
|
160
|
-
@time_logger.save(File.join(@directory,'profile.json'))
|
160
|
+
@time_logger.save(File.join(@directory, 'profile.json'))
|
161
161
|
@adapter.communicate_results @directory, @job_results[:run_reporting_measures]
|
162
162
|
end
|
163
163
|
ensure
|
@@ -169,9 +169,7 @@ module OpenStudio
|
|
169
169
|
|
170
170
|
@logger.info 'Workflow complete'
|
171
171
|
# Write out the TimeLogger once again in case the run_reporting_measures didn't exist
|
172
|
-
@time_logger.save(File.join(@directory,'profile.json'))
|
173
|
-
|
174
|
-
|
172
|
+
@time_logger.save(File.join(@directory, 'profile.json'))
|
175
173
|
|
176
174
|
# TODO: define the outputs and figure out how to show it correctly
|
177
175
|
obj_function_array ||= ['NA']
|
@@ -285,7 +283,7 @@ module OpenStudio
|
|
285
283
|
|
286
284
|
def next_state
|
287
285
|
@logger.info "Current state: '#{@current_state}'"
|
288
|
-
ns = @transitions.
|
286
|
+
ns = @transitions.find { |h| h[:from] == @current_state }[:to]
|
289
287
|
@logger.info "Next state will be: '#{ns}'"
|
290
288
|
|
291
289
|
# Set the next state before calling the method
|
@@ -304,7 +302,7 @@ module OpenStudio
|
|
304
302
|
|
305
303
|
# result
|
306
304
|
|
307
|
-
# TODO fix this so that it gets the base config options plus its job options. Need to
|
305
|
+
# TODO: fix this so that it gets the base config options plus its job options. Need to
|
308
306
|
# also merge in all the former job results.
|
309
307
|
@options.merge(@job_results)
|
310
308
|
end
|
@@ -13,17 +13,17 @@ class TimeLogger
|
|
13
13
|
def start(channel)
|
14
14
|
# warning -- "will reset timer for #{moniker}" if @monikers.key? moniker
|
15
15
|
s = ::Time.now
|
16
|
-
@channels[channel] = {start_time_str: "#{s}", start_time: s.to_f}
|
16
|
+
@channels[channel] = { start_time_str: "#{s}", start_time: s.to_f }
|
17
17
|
end
|
18
18
|
|
19
19
|
def stop(channel)
|
20
20
|
end_time = ::Time.now.to_f
|
21
21
|
@logger << {
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
22
|
+
channel: channel,
|
23
|
+
start_time: @channels[channel][:start_time],
|
24
|
+
start_time_str: @channels[channel][:start_time_str],
|
25
|
+
end_time: end_time,
|
26
|
+
delta: end_time - @channels[channel][:start_time]
|
27
27
|
}
|
28
28
|
|
29
29
|
# remove the channel
|
@@ -43,11 +43,11 @@ class TimeLogger
|
|
43
43
|
|
44
44
|
# this will report all the values for all the channels with this name.
|
45
45
|
def delta(channel)
|
46
|
-
@logger.map { |k| {channel.to_s => k[:delta]} if k[:channel] == channel }
|
46
|
+
@logger.map { |k| { channel.to_s => k[:delta] } if k[:channel] == channel }.compact
|
47
47
|
end
|
48
48
|
|
49
49
|
# save the data to a file. This will overwrite the file if it already exists
|
50
50
|
def save(filename)
|
51
51
|
File.open(filename, 'w') { |f| f << JSON.pretty_generate(@logger) }
|
52
52
|
end
|
53
|
-
end
|
53
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: openstudio-workflow
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.1.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Nicholas Long
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2015-06-03 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -70,16 +70,16 @@ dependencies:
|
|
70
70
|
name: facter
|
71
71
|
requirement: !ruby/object:Gem::Requirement
|
72
72
|
requirements:
|
73
|
-
- -
|
73
|
+
- - '>='
|
74
74
|
- !ruby/object:Gem::Version
|
75
|
-
version: 2.0
|
75
|
+
version: '2.0'
|
76
76
|
type: :runtime
|
77
77
|
prerelease: false
|
78
78
|
version_requirements: !ruby/object:Gem::Requirement
|
79
79
|
requirements:
|
80
|
-
- -
|
80
|
+
- - '>='
|
81
81
|
- !ruby/object:Gem::Version
|
82
|
-
version: 2.0
|
82
|
+
version: '2.0'
|
83
83
|
- !ruby/object:Gem::Dependency
|
84
84
|
name: rubyXL
|
85
85
|
requirement: !ruby/object:Gem::Requirement
|
@@ -100,14 +100,14 @@ dependencies:
|
|
100
100
|
requirements:
|
101
101
|
- - ~>
|
102
102
|
- !ruby/object:Gem::Version
|
103
|
-
version: 1.1.
|
103
|
+
version: 1.1.7
|
104
104
|
type: :runtime
|
105
105
|
prerelease: false
|
106
106
|
version_requirements: !ruby/object:Gem::Requirement
|
107
107
|
requirements:
|
108
108
|
- - ~>
|
109
109
|
- !ruby/object:Gem::Version
|
110
|
-
version: 1.1.
|
110
|
+
version: 1.1.7
|
111
111
|
description: Run OpenStudio based simulations using EnergyPlus
|
112
112
|
email:
|
113
113
|
- nicholas.long@nrel.gov
|
@@ -115,6 +115,10 @@ executables: []
|
|
115
115
|
extensions: []
|
116
116
|
extra_rdoc_files: []
|
117
117
|
files:
|
118
|
+
- CHANGELOG.md
|
119
|
+
- README.md
|
120
|
+
- Rakefile
|
121
|
+
- lib/openstudio-workflow.rb
|
118
122
|
- lib/openstudio/workflow/adapter.rb
|
119
123
|
- lib/openstudio/workflow/adapters/local.rb
|
120
124
|
- lib/openstudio/workflow/adapters/mongo.rb
|
@@ -135,10 +139,6 @@ files:
|
|
135
139
|
- lib/openstudio/workflow/run.rb
|
136
140
|
- lib/openstudio/workflow/time_logger.rb
|
137
141
|
- lib/openstudio/workflow/version.rb
|
138
|
-
- lib/openstudio-workflow.rb
|
139
|
-
- README.md
|
140
|
-
- CHANGELOG.md
|
141
|
-
- Rakefile
|
142
142
|
homepage: https://github.com/NREL/OpenStudio-workflow-gem
|
143
143
|
licenses:
|
144
144
|
- LGPL
|
@@ -159,7 +159,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
159
159
|
version: '0'
|
160
160
|
requirements: []
|
161
161
|
rubyforge_project:
|
162
|
-
rubygems_version: 2.
|
162
|
+
rubygems_version: 2.4.5
|
163
163
|
signing_key:
|
164
164
|
specification_version: 4
|
165
165
|
summary: Workflow Manager
|