rhoconnect 3.0.6 → 3.1.0.beta1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG.md +9 -0
- data/Gemfile +3 -3
- data/Gemfile.lock +38 -17
- data/Rakefile +0 -10
- data/bench/benchapp/Gemfile.lock +1 -0
- data/bench/distr_bench/distr_bench_main +94 -27
- data/bench/distr_bench/run_test_query_script.sh +22 -18
- data/bench/lib/bench/aws_utils.rb +326 -0
- data/bench/lib/bench/bench_result_processor.rb +268 -75
- data/bench/lib/bench/cli.rb +1 -0
- data/bench/lib/bench/distr_runner.rb +102 -0
- data/bench/lib/bench/utils.rb +127 -0
- data/bench/lib/bench.rb +16 -15
- data/bench/prepare_bench +3 -11
- data/bench/scripts/test_query_script.rb +6 -7
- data/bin/rhoconnect-benchmark +257 -5
- data/doc/benchmarks-running.txt +140 -0
- data/doc/client-java.txt +236 -0
- data/doc/client-objc.txt +41 -1
- data/doc/client.txt +12 -0
- data/doc/command-line.txt +12 -3
- data/doc/cud-conflicts.txt +68 -0
- data/doc/deploying.txt +1 -70
- data/doc/hosting-rhohub.txt +3 -0
- data/doc/install.txt +50 -13
- data/doc/java-plugin.txt +217 -177
- data/doc/net-plugin.txt +97 -64
- data/doc/plugin-intro.txt +4 -2
- data/doc/preparing-production.txt +63 -0
- data/doc/rhoconnect-redis-stack.txt +252 -0
- data/doc/source-adapters.txt +3 -1
- data/doc/tutorial.txt +111 -49
- data/examples/simple/dump.rdb +0 -0
- data/installer/unix-like/rho_connect_install_constants.rb +6 -5
- data/installer/unix-like/rho_connect_install_installers.rb +6 -2
- data/installer/utils/nix_install_test.rb +2 -0
- data/installer/utils/package_upload/auto-repo.rb +136 -0
- data/installer/utils/package_upload/repos.rake +6 -3
- data/installer/utils/package_upload/s3_upload.rb +11 -6
- data/installer/windows/rhosync.nsi +5 -5
- data/lib/rhoconnect/client_sync.rb +2 -2
- data/lib/rhoconnect/document.rb +12 -0
- data/lib/rhoconnect/jobs/source_job.rb +2 -2
- data/lib/rhoconnect/predefined_adapters/bench_adapter.rb +61 -0
- data/lib/rhoconnect/source.rb +5 -0
- data/lib/rhoconnect/source_adapter.rb +10 -1
- data/lib/rhoconnect/source_sync.rb +161 -88
- data/lib/rhoconnect/store.rb +48 -0
- data/lib/rhoconnect/test_methods.rb +6 -6
- data/lib/rhoconnect/version.rb +1 -1
- data/lib/rhoconnect.rb +25 -2
- data/spec/apps/rhotestapp/sources/sample_adapter.rb +29 -0
- data/spec/jobs/source_job_spec.rb +5 -5
- data/spec/source_adapter_spec.rb +10 -0
- data/spec/source_sync_spec.rb +114 -33
- data/spec/spec_helper.rb +21 -2
- data/spec/store_spec.rb +29 -0
- data/spec/support/shared_examples.rb +1 -1
- data/spec/test_methods_spec.rb +4 -4
- data/tasks/redis.rake +2 -2
- metadata +59 -59
- data/bench/benchapp/log/passenger.3000.log +0 -1
- data/bench/benchapp/log/passenger.9292.log +0 -59
- data/bench/benchapp/tmp/pids/passenger.3000.pid.lock +0 -0
- data/bench/benchapp/tmp/pids/passenger.9292.pid.lock +0 -0
- data/bench/lib/testdata/0-data.txt +0 -0
- data/bench/lib/testdata/1-data.txt +0 -0
- data/bench/lib/testdata/10-data.txt +0 -15
- data/bench/lib/testdata/2-data.txt +0 -3
- data/bench/lib/testdata/25-data.txt +0 -39
- data/bench/lib/testdata/250-data.txt +0 -353
- data/bench/lib/testdata/3-data.txt +0 -4
- data/bench/lib/testdata/50-data.txt +0 -70
- data/bench/lib/testdata/500-data.txt +0 -711
@@ -0,0 +1,326 @@
|
|
1
|
+
$:.unshift File.join(File.dirname(__FILE__), '..', '..','..','lib')
|
2
|
+
|
3
|
+
require 'rubygems'
|
4
|
+
require 'yaml'
|
5
|
+
|
6
|
+
require 'readline'
|
7
|
+
require 'xmlsimple'
|
8
|
+
|
9
|
+
module Bench
|
10
|
+
module AWSUtils
|
11
|
+
extend self
|
12
|
+
|
13
|
+
module Constants
|
14
|
+
RC_VERSION = Rhoconnect::VERSION
|
15
|
+
REGION = 'us-west-1'
|
16
|
+
TEMPLATE_URL = 'http://s3.amazonaws.com/rhoconnect-bench/packages/cloud-formation/ec2-autostack.txt'
|
17
|
+
CLIENTS_GROUP_LOGICAL_ID = 'BenchClientsGroup'
|
18
|
+
WAIT_FOR_SSH = 120
|
19
|
+
HOME_DIR = `echo ~/`.strip.chomp("/")
|
20
|
+
end
|
21
|
+
|
22
|
+
class ClientsGroup
|
23
|
+
attr_accessor :stack_name, :client_instances, :auto_scaling_group
|
24
|
+
|
25
|
+
def initialize(stack_name)
|
26
|
+
@stack_name = stack_name
|
27
|
+
auto_scaling = Fog::AWS::AutoScaling.new(
|
28
|
+
:region => Bench::AWSUtils.aws_region,
|
29
|
+
:aws_access_key_id => Bench::AWSUtils.aws_access_key_id,
|
30
|
+
:aws_secret_access_key => Bench::AWSUtils.aws_secret_access_key
|
31
|
+
)
|
32
|
+
group_resources = Bench::AWSUtils.cloud_formation.describe_stack_resources({'StackName' => stack_name,
|
33
|
+
'LogicalResourceId' => Bench::AWSUtils::Constants::CLIENTS_GROUP_LOGICAL_ID}).body
|
34
|
+
|
35
|
+
@auto_scaling_group = auto_scaling.groups.get(group_resources['StackResources'].first['PhysicalResourceId'])
|
36
|
+
@client_instances = []
|
37
|
+
@auto_scaling_group.instances.each do |instance|
|
38
|
+
next if instance.auto_scaling_group_name != @auto_scaling_group.id
|
39
|
+
ec2_instance = Bench::AWSUtils.fog_connection.servers.get(instance.id)
|
40
|
+
@client_instances << ec2_instance.dns_name
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
class << self
|
46
|
+
attr_accessor :fog_connection, :cloud_formation, :aws_access_key_id, :aws_secret_access_key, :aws_region
|
47
|
+
attr_accessor :aws_key_pair_name, :aws_ssh_pem_file
|
48
|
+
end
|
49
|
+
|
50
|
+
def validate_presense_of_file(fname)
|
51
|
+
return File.file?(File.expand_path(fname.to_s))
|
52
|
+
end
|
53
|
+
|
54
|
+
def init_connection(settings_file)
|
55
|
+
unless Bench::gem_installed?('net-ssh-multi')
|
56
|
+
puts "In order to run distributed benchmark you need to have 'net-ssh-multi' gem installed"
|
57
|
+
puts "Install it by using : '[sudo] gem install net-ssh-multi'"
|
58
|
+
raise "Gem 'net-ssh-multi' is missing"
|
59
|
+
end
|
60
|
+
unless Bench::gem_installed?('fog')
|
61
|
+
puts "In order to run distributed benchmark you need to have 'fog' gem installed"
|
62
|
+
puts "Install it by using : '[sudo] gem install fog'"
|
63
|
+
raise "Gem 'fog' is missing"
|
64
|
+
end
|
65
|
+
|
66
|
+
require 'net/ssh/multi'
|
67
|
+
require 'fog'
|
68
|
+
|
69
|
+
fog_conf_file = ENV['HOME'] + '/.fog'
|
70
|
+
settings_file ||= fog_conf_file
|
71
|
+
|
72
|
+
if validate_presense_of_file(settings_file)
|
73
|
+
# Read Fog ~/.fog configuration file
|
74
|
+
# :default:
|
75
|
+
# :aws_access_key_id: AKIAI...
|
76
|
+
# :aws_secret_access_key: 9l2ruLeCINbilik...
|
77
|
+
# :region: us-west-1
|
78
|
+
puts "Using AWS settings from #{settings_file} file"
|
79
|
+
|
80
|
+
settings = YAML::load(File.open(settings_file))
|
81
|
+
if not settings or settings[:default].nil?
|
82
|
+
raise "ERROR : AWS Settings file '#{settings_file}' doesn't have the mandatoty 'default' section"
|
83
|
+
end
|
84
|
+
config = settings[:default]
|
85
|
+
|
86
|
+
@aws_ssh_pem_file = config[:aws_ssh_pem_file]
|
87
|
+
unless validate_presense_of_file(aws_ssh_pem_file)
|
88
|
+
raise "ERROR : Can not locate SSH Access Pem File '#{aws_ssh_pem_file}'\nMake sure you set :aws_ssh_pem_file properly in the AWS Settings file"
|
89
|
+
end
|
90
|
+
@aws_access_key_id = config[:aws_access_key_id]
|
91
|
+
@aws_secret_access_key = config[:aws_secret_access_key]
|
92
|
+
@aws_region = config[:region] || Constants::REGION
|
93
|
+
@aws_key_pair_name = config[:aws_key_pair_name]
|
94
|
+
|
95
|
+
else
|
96
|
+
raise "ERROR : Can not locate AWS Settings file '#{settings_file}'\nYou must have this file in order to run the Distributed Benchmark Test"
|
97
|
+
end
|
98
|
+
|
99
|
+
make_fog
|
100
|
+
make_cloud_formation
|
101
|
+
end
|
102
|
+
|
103
|
+
# get_access_keys
|
104
|
+
# Retrieves the access key and secret access key from the above specified file.
|
105
|
+
def get_access_keys(fname)
|
106
|
+
return true if aws_access_key_id and aws_secret_access_key
|
107
|
+
|
108
|
+
lines = IO.readlines(fname)
|
109
|
+
@aws_access_key_id = lines.first.strip.split("=")[1]
|
110
|
+
@aws_secret_access_key = lines.last.strip.split("=")[1]
|
111
|
+
end
|
112
|
+
|
113
|
+
# make_fog
|
114
|
+
# Generates the Fog object used to create the new ec2 instance.
|
115
|
+
def make_fog
|
116
|
+
@fog_connection ||= Fog::Compute.new(
|
117
|
+
:provider => 'AWS',
|
118
|
+
:region => aws_region,
|
119
|
+
:aws_access_key_id => aws_access_key_id,
|
120
|
+
:aws_secret_access_key => aws_secret_access_key
|
121
|
+
)
|
122
|
+
end #make_fog
|
123
|
+
|
124
|
+
def make_cloud_formation
|
125
|
+
@cloud_formation ||= Fog::AWS::CloudFormation.new(
|
126
|
+
:region => aws_region,
|
127
|
+
:aws_access_key_id => aws_access_key_id,
|
128
|
+
:aws_secret_access_key => aws_secret_access_key
|
129
|
+
)
|
130
|
+
end
|
131
|
+
|
132
|
+
def get_template_data(template_url)
|
133
|
+
template_data = ''
|
134
|
+
begin
|
135
|
+
uri = URI.parse(template_url)
|
136
|
+
unless uri.scheme
|
137
|
+
File.open(uri.path) { |f| template_data << f.read }
|
138
|
+
else
|
139
|
+
response = Net::HTTP.get_response(uri)
|
140
|
+
template_data = response.body if response.code == '200'
|
141
|
+
end
|
142
|
+
rescue Exception => e
|
143
|
+
puts "ERROR: Can not obtain CloudFormation template from '#{template_url}'"
|
144
|
+
puts e.message
|
145
|
+
end
|
146
|
+
template_data
|
147
|
+
end
|
148
|
+
|
149
|
+
# Creates new CloudFormation stack based upon template
|
150
|
+
def create_cf_stack
|
151
|
+
puts ""
|
152
|
+
puts " Creating new AWS CloudFormation stack at '#{aws_region}' region"
|
153
|
+
puts " using '#{Constants::TEMPLATE_URL}' template ..."
|
154
|
+
puts " This may take several minutes, please be patient ..."
|
155
|
+
puts ""
|
156
|
+
|
157
|
+
stack_name = nil
|
158
|
+
stack_created = false
|
159
|
+
|
160
|
+
begin
|
161
|
+
template_data = get_template_data(Constants::TEMPLATE_URL)
|
162
|
+
cloud_formation.validate_template('TemplateBody' => template_data)
|
163
|
+
|
164
|
+
template_params = {}
|
165
|
+
template_params['SecurityKeyPair'] = aws_key_pair_name.to_s
|
166
|
+
options = {'TemplateBody' => template_data,
|
167
|
+
'Parameters' => template_params}
|
168
|
+
stack_name = "BenchStack" + Time.now.strftime("%Y%m%d%H%M%S")
|
169
|
+
result = cloud_formation.create_stack(stack_name, options)
|
170
|
+
|
171
|
+
event_counter = 0
|
172
|
+
in_progress = true
|
173
|
+
stack_created = false
|
174
|
+
while in_progress
|
175
|
+
events = cloud_formation.describe_stack_events(stack_name).body['StackEvents']
|
176
|
+
events.reverse[event_counter..-1].each do |event|
|
177
|
+
puts "Timestamp: #{event['Timestamp']}"
|
178
|
+
puts "LogicalResourceId: #{event['LogicalResourceId']}"
|
179
|
+
puts "ResourceType: #{event['ResourceType']}"
|
180
|
+
puts "ResourceStatus: #{event['ResourceStatus']}"
|
181
|
+
puts "ResourceStatusReason: #{event['ResourceStatusReason']}" if event['ResourceStatusReason']
|
182
|
+
puts "--"
|
183
|
+
|
184
|
+
# track creation of the stack
|
185
|
+
if event['LogicalResourceId'] == stack_name
|
186
|
+
case event['ResourceStatus']
|
187
|
+
when 'CREATE_COMPLETE'
|
188
|
+
stack_created = true
|
189
|
+
in_progress = false
|
190
|
+
when /ROLLBACK/
|
191
|
+
stack_created = false
|
192
|
+
in_progress = false
|
193
|
+
when /DELETE/
|
194
|
+
stack_created = false
|
195
|
+
in_progress = false
|
196
|
+
when /FAILED/
|
197
|
+
stack_created = false
|
198
|
+
in_progress = false
|
199
|
+
break
|
200
|
+
end
|
201
|
+
end
|
202
|
+
end
|
203
|
+
event_counter += events.size - event_counter
|
204
|
+
sleep(2)
|
205
|
+
end
|
206
|
+
rescue Excon::Errors::BadRequest => excon_error
|
207
|
+
error_str = XmlSimple.xml_in(excon_error.response.body)['Error'][0]['Message'][0]
|
208
|
+
puts "ERROR: Cannot create AWS CloudFormation stack : #{error_str}"
|
209
|
+
stack_created = false
|
210
|
+
rescue Excon::Errors::Forbidden => excon_error
|
211
|
+
error_str = XmlSimple.xml_in(excon_error.response.body)['Error'][0]['Message'][0]
|
212
|
+
puts "ERROR: Cannot create AWS CloudFormation stack : #{error_str}"
|
213
|
+
stack_created = false
|
214
|
+
rescue Exception => e
|
215
|
+
puts "ERROR: Cannot create AWS CloudFormation stack : #{e.class.name}: #{e.message}"
|
216
|
+
stack_created = false
|
217
|
+
end
|
218
|
+
|
219
|
+
clients_group = nil
|
220
|
+
if stack_created
|
221
|
+
clients_group = get_clients_group(stack_name)
|
222
|
+
# wait until the SSH service is up and running
|
223
|
+
stack_created = establish_ssh_connection(clients_group)
|
224
|
+
end
|
225
|
+
|
226
|
+
unless stack_created
|
227
|
+
delete_cf_stack(stack_name)
|
228
|
+
clients_group = nil
|
229
|
+
stack_name = nil
|
230
|
+
end
|
231
|
+
|
232
|
+
clients_group
|
233
|
+
end
|
234
|
+
|
235
|
+
# Creates new CloudFormation stack based upon template
|
236
|
+
def delete_cf_stack(stack_name)
|
237
|
+
return unless stack_name
|
238
|
+
puts ""
|
239
|
+
puts "Destroying AWS CloudFormation stack '#{stack_name}' at '#{ aws_region}' region"
|
240
|
+
puts " NOTE: this command doesn't ensure deletion of the stack. "
|
241
|
+
puts " It is advised to check later that the stack has been really destroyed"
|
242
|
+
puts ""
|
243
|
+
|
244
|
+
begin
|
245
|
+
cloud_formation.delete_stack(stack_name)
|
246
|
+
rescue Excon::Errors::BadRequest => excon_error
|
247
|
+
error_str = XmlSimple.xml_in(excon_error.response.body)['Error'][0]['Message'][0]
|
248
|
+
puts "ERROR: Cannot delete the stack '#{stack_name}' : #{error_str}"
|
249
|
+
rescue Excon::Errors::Forbidden => excon_error
|
250
|
+
error_str = XmlSimple.xml_in(excon_error.response.body)['Error'][0]['Message'][0]
|
251
|
+
puts "ERROR: Cannot delete the stack '#{stack_name}' : #{error_str}"
|
252
|
+
rescue Exception => e
|
253
|
+
puts "ERROR: Cannot delete the stack '#{stack_name}' : #{e.class.name}: #{e.message}"
|
254
|
+
end
|
255
|
+
end
|
256
|
+
|
257
|
+
def get_clients_group(stack_name)
|
258
|
+
ClientsGroup.new(stack_name)
|
259
|
+
end
|
260
|
+
|
261
|
+
def establish_ssh_connection(clients_group)
|
262
|
+
STDOUT.sync = true
|
263
|
+
ssh_established = false
|
264
|
+
begin
|
265
|
+
start_timestamp = Time.now
|
266
|
+
sess_options = {:keys => [aws_ssh_pem_file]}
|
267
|
+
# just some simple command
|
268
|
+
command = 'pwd 1>/dev/null'
|
269
|
+
|
270
|
+
# clean-up outdated info (sometimes DNS names are re-used
|
271
|
+
# so we need to clean-up SSH known hosts file)
|
272
|
+
clients_group.client_instances.each do |hostname|
|
273
|
+
system("ssh-keygen -R #{hostname} 1>/dev/null 2>&1")
|
274
|
+
end
|
275
|
+
|
276
|
+
puts ""
|
277
|
+
puts " Stack '#{clients_group.stack_name}' is created. Waiting for SSH services to start-up..."
|
278
|
+
while not ssh_established
|
279
|
+
begin
|
280
|
+
run_stack_ssh_command(clients_group.client_instances, command)
|
281
|
+
# if we are here - SSH command has executed succesfully
|
282
|
+
puts " Done."
|
283
|
+
ssh_established = true
|
284
|
+
break
|
285
|
+
rescue Interrupt => i
|
286
|
+
raise "User Interruption"
|
287
|
+
rescue Net::SSH::AuthenticationFailed => e
|
288
|
+
raise e
|
289
|
+
rescue OpenSSL::PKey::PKeyError => e
|
290
|
+
raise e
|
291
|
+
rescue Errno::ECONNREFUSED => e
|
292
|
+
# service is not yet started - wait more
|
293
|
+
end
|
294
|
+
|
295
|
+
# try for 60 seconds maximum
|
296
|
+
if (Time.now.to_i - start_timestamp.to_i) > Constants::WAIT_FOR_SSH
|
297
|
+
puts " Failed!"
|
298
|
+
puts "ERROR: Cannot establish SSH session with the stack's EC2 instances..."
|
299
|
+
puts ""
|
300
|
+
break
|
301
|
+
end
|
302
|
+
|
303
|
+
sleep(10)
|
304
|
+
print '. '
|
305
|
+
end
|
306
|
+
rescue Exception => e
|
307
|
+
puts " Failed!"
|
308
|
+
puts "ERROR: Cannot establish SSH session with the stack's EC2 instances : #{e.class.name} : #{e.message}"
|
309
|
+
puts ""
|
310
|
+
end
|
311
|
+
|
312
|
+
ssh_established
|
313
|
+
end
|
314
|
+
|
315
|
+
def run_stack_ssh_command(ec2_clients, command)
|
316
|
+
sess_options = {:keys => [aws_ssh_pem_file]}
|
317
|
+
Net::SSH::Multi.start({:default_user => 'ec2-user'}) do |session|
|
318
|
+
# define the servers we want to use
|
319
|
+
session.use(sess_options) { ec2_clients }
|
320
|
+
|
321
|
+
# execute commands on all servers
|
322
|
+
session.exec command
|
323
|
+
end
|
324
|
+
end
|
325
|
+
end
|
326
|
+
end
|
@@ -1,90 +1,283 @@
|
|
1
1
|
#!/usr/bin/ruby
|
2
2
|
require 'rubygems'
|
3
|
-
require 'yaml'
|
4
|
-
require 'gruff'
|
5
|
-
$LOAD_PATH.unshift File.expand_path(File.join(File.dirname(__FILE__), '..', '..','lib'))
|
6
|
-
require 'bench'
|
7
3
|
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
end
|
15
|
-
Dir.chdir results_dir
|
16
|
-
results_dir = Dir.pwd
|
17
|
-
|
18
|
-
Dir.chdir current_dir
|
19
|
-
output_dir = ARGV[1]
|
20
|
-
output_dir ||= 'images'
|
21
|
-
begin
|
22
|
-
Dir.mkdir output_dir
|
23
|
-
rescue
|
24
|
-
end
|
25
|
-
Dir.chdir output_dir
|
26
|
-
output_dir = Dir.pwd
|
4
|
+
module Bench
|
5
|
+
def self.gem_installed?(gem_name)
|
6
|
+
(Gem::Specification.respond_to?(:find_by_name) ?
|
7
|
+
Gem::Specification.find_by_name(gem_name) : Gem.source_index.find_name(gem_name).last) != nil
|
8
|
+
rescue Exception => e
|
9
|
+
false
|
10
|
+
end
|
27
11
|
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
12
|
+
module PostProcessing
|
13
|
+
@plugins = ['RhoSpreadSheet','RhoGruff']
|
14
|
+
|
15
|
+
|
16
|
+
def self.execute(res_dir)
|
17
|
+
return unless res_dir
|
18
|
+
|
19
|
+
puts ""
|
20
|
+
puts "Starting Benchmark Post-Processing ..."
|
21
|
+
puts ""
|
22
|
+
|
23
|
+
@plugins.each do |plugin|
|
24
|
+
plugin_class = eval(plugin)
|
25
|
+
available = plugin_class.has?
|
26
|
+
if available
|
27
|
+
available = plugin_class.load_myself
|
28
|
+
end
|
29
|
+
|
30
|
+
# print the message
|
31
|
+
if not available
|
32
|
+
puts ""
|
33
|
+
plugin_class.what_is_needed?
|
34
|
+
puts ""
|
35
|
+
next
|
36
|
+
end
|
37
|
+
|
38
|
+
plugin_instance = plugin_class.new
|
39
|
+
plugin_instance.process res_dir
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
# this post-processor creates EXCEL spreadsheets
|
44
|
+
class RhoSpreadSheet
|
45
|
+
def self.has?
|
46
|
+
Bench::gem_installed?('spreadsheet')
|
47
|
+
end
|
48
|
+
|
49
|
+
def self.what_is_needed?
|
50
|
+
puts "In order to run SpreadSheet post-processor - you need to have SpreadSheet gem installed"
|
51
|
+
puts "Install it by using : '[sudo] gem install spreadsheet'"
|
52
|
+
end
|
53
|
+
|
54
|
+
def self.load_myself
|
55
|
+
require 'yaml'
|
56
|
+
require 'spreadsheet'
|
57
|
+
true
|
58
|
+
end
|
59
|
+
|
60
|
+
def process(res_dir)
|
61
|
+
current_dir = Dir.pwd
|
62
|
+
begin
|
63
|
+
puts "Starting SpreadSheet post-processor..."
|
64
|
+
# 1) Create images dir
|
65
|
+
Dir.chdir res_dir
|
66
|
+
@results_dir = Dir.pwd
|
67
|
+
output_dir = Bench.create_subdir 'spreadsheet'
|
68
|
+
Dir.chdir output_dir
|
69
|
+
@output_dir = Dir.pwd
|
70
|
+
|
71
|
+
_load_meta_hash
|
72
|
+
_init
|
73
|
+
_process_res_files
|
74
|
+
_write
|
75
|
+
Dir.chdir current_dir
|
76
|
+
rescue Exception => e
|
77
|
+
Dir.chdir current_dir
|
78
|
+
raise e
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
def _load_meta_hash
|
83
|
+
# load meta.yaml
|
84
|
+
@meta_hash = YAML.load_file(File.join(@results_dir,'raw_data','meta.yml')) if File.exists?(File.join(@results_dir,'raw_data','meta.yml'))
|
85
|
+
if @meta_hash.nil?
|
86
|
+
raise "SpreadSheet Result Processor: No valid meta.yml file is found in the result directory - Skipping ..."
|
87
|
+
end
|
34
88
|
|
35
|
-
@metrics = meta_hash[:metrics]
|
36
|
-
if @metrics.nil?
|
37
|
-
|
38
|
-
|
39
|
-
end
|
89
|
+
@metrics = @meta_hash[:metrics]
|
90
|
+
if @metrics.nil?
|
91
|
+
raise "SpreadSheet Result Processor: No valid metrics are found in the result directory - Skipping ..."
|
92
|
+
end
|
40
93
|
|
41
|
-
if meta_hash[:x_keys].nil?
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
@x_keys =
|
46
|
-
|
94
|
+
if @meta_hash[:x_keys].nil?
|
95
|
+
raise "SpreadSheet Result Processor: No valid x_keys are found in the result directory - Skipping ..."
|
96
|
+
end
|
97
|
+
@x_keys = @meta_hash[:x_keys].keys
|
98
|
+
@x_keys = @x_keys.sort_by(&Bench.sort_natural_order)
|
99
|
+
end
|
100
|
+
|
101
|
+
def _init
|
102
|
+
# initialize graphs for each metric
|
103
|
+
# row 0 - payload labels
|
104
|
+
# col 0 - X keys
|
105
|
+
@title = @meta_hash[:label]
|
106
|
+
@book = Spreadsheet::Workbook.new(@title)
|
107
|
+
@sheets = {}
|
108
|
+
axis_format = Spreadsheet::Format.new :color => :blue,
|
109
|
+
:weight => :bold,
|
110
|
+
:size => 18
|
111
|
+
@metrics.each do |name,index|
|
112
|
+
sheet = @book.create_worksheet({:name => "#{name} (#{@title})"})
|
113
|
+
@sheets[index] = sheet
|
114
|
+
sheet.column(0).default_format = axis_format
|
115
|
+
sheet.row(0).default_format = axis_format
|
116
|
+
@meta_hash[:x_keys].each do |key,key_index|
|
117
|
+
sheet[key_index + 1, 0] = "#{key}"
|
118
|
+
end
|
119
|
+
end
|
120
|
+
end
|
121
|
+
|
122
|
+
def _process_res_files
|
123
|
+
# load all result files
|
124
|
+
res_files = Dir.entries(File.join(@results_dir,'raw_data')).collect { |entry| entry if entry =~ /bench.*result/ }
|
125
|
+
res_files.compact!
|
126
|
+
res_files = res_files.sort_by(&Bench.sort_natural_order)
|
127
|
+
|
128
|
+
res_files.each_with_index do |entry, entry_index|
|
129
|
+
begin
|
130
|
+
res_hash = YAML.load_file(File.join(@results_dir,'raw_data',entry))
|
131
|
+
next if res_hash.nil? or res_hash.empty?
|
47
132
|
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
g.title = "#{title} (#{name})"
|
54
|
-
g.labels = meta_hash[:x_keys].invert
|
55
|
-
graphs[index] = g
|
56
|
-
end
|
133
|
+
marker = entry.split('.').last.to_s
|
134
|
+
|
135
|
+
@sheets.each do |index,sheet|
|
136
|
+
sheet[0, entry_index + 1] = "#{marker}"
|
137
|
+
end
|
57
138
|
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
139
|
+
g_data = Array.new(@metrics.size) { Array.new }
|
140
|
+
@x_keys.each do |x_key|
|
141
|
+
row_idx = @meta_hash[:x_keys][x_key] + 1
|
142
|
+
results = res_hash[x_key]
|
143
|
+
results ||= Array.new(@metrics.size, 0.0)
|
144
|
+
results.each_with_index do |res, index|
|
145
|
+
col_idx = entry_index + 1
|
146
|
+
@sheets[index][row_idx,col_idx] = ("%0.4f" % res).to_f
|
147
|
+
end
|
148
|
+
end
|
149
|
+
rescue Exception => e
|
150
|
+
raise "SpreadSheet processing resulted in Error : #{e.message} " + e.backtrace.join("\n")
|
151
|
+
end
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
def _write
|
156
|
+
image_fname = File.join(@output_dir,"bench_results.xls")
|
157
|
+
puts "Spreadsheet processor: writing #{image_fname}"
|
158
|
+
@book.write image_fname
|
74
159
|
end
|
75
160
|
end
|
76
161
|
|
77
|
-
|
78
|
-
|
162
|
+
# this post-processor creates PNG graph files
|
163
|
+
class RhoGruff
|
164
|
+
def self.has?
|
165
|
+
Bench::gem_installed?('gruff')
|
166
|
+
end
|
167
|
+
|
168
|
+
def self.what_is_needed?
|
169
|
+
puts "In order to run Gruff post-processor - you need to have Gruff gem installed"
|
170
|
+
puts "Install it by using : '[sudo] gem install gruff'"
|
171
|
+
puts "You may also need to install additional components - please check Gruff documentation for details"
|
172
|
+
end
|
173
|
+
|
174
|
+
def self.load_myself
|
175
|
+
res = true
|
176
|
+
begin
|
177
|
+
require 'yaml'
|
178
|
+
require 'gruff'
|
179
|
+
rescue Exception => e
|
180
|
+
puts " Can not run Gruff post-processor : #{e.message}"
|
181
|
+
res = false
|
182
|
+
end
|
183
|
+
res
|
184
|
+
end
|
185
|
+
|
186
|
+
def process(res_dir)
|
187
|
+
current_dir = Dir.pwd
|
188
|
+
begin
|
189
|
+
puts "Starting Gruff post-processor..."
|
190
|
+
# 1) Create images dir
|
191
|
+
Dir.chdir res_dir
|
192
|
+
@results_dir = Dir.pwd
|
193
|
+
output_dir = Bench.create_subdir 'images'
|
194
|
+
Dir.chdir output_dir
|
195
|
+
@output_dir = Dir.pwd
|
196
|
+
|
197
|
+
_load_meta_hash
|
198
|
+
_init_graphs
|
199
|
+
_process_res_files
|
200
|
+
_write_graphs
|
201
|
+
Dir.chdir current_dir
|
202
|
+
rescue Exception => e
|
203
|
+
Dir.chdir current_dir
|
204
|
+
raise e
|
205
|
+
end
|
206
|
+
end
|
207
|
+
|
208
|
+
def _load_meta_hash
|
209
|
+
# load meta.yaml
|
210
|
+
@meta_hash = YAML.load_file(File.join(@results_dir,'raw_data','meta.yml')) if File.exists?(File.join(@results_dir,'raw_data','meta.yml'))
|
211
|
+
if @meta_hash.nil?
|
212
|
+
raise "Gruff Result Processor: No valid meta.yml file is found in the result directory - Skipping ..."
|
213
|
+
end
|
214
|
+
|
215
|
+
@metrics = @meta_hash[:metrics]
|
216
|
+
if @metrics.nil?
|
217
|
+
raise "Gruff Result Processor: No valid metrics are found in the result directory - Skipping ..."
|
218
|
+
end
|
219
|
+
|
220
|
+
if @meta_hash[:x_keys].nil?
|
221
|
+
raise "Gruff Result Processor: No valid x_keys are found in the result directory - Skipping ..."
|
222
|
+
end
|
223
|
+
@x_keys = @meta_hash[:x_keys].keys
|
224
|
+
@x_keys = @x_keys.sort_by(&Bench.sort_natural_order)
|
225
|
+
end
|
226
|
+
|
227
|
+
def _init_graphs
|
228
|
+
# initialize graphs for each metric
|
229
|
+
@graphs = {}
|
230
|
+
@title = @meta_hash[:label]
|
231
|
+
@metrics.each do |name,index|
|
232
|
+
g = Gruff::Line.new
|
233
|
+
g.title = "#{@title} (#{name})"
|
234
|
+
g.labels = @meta_hash[:x_keys].invert
|
235
|
+
@graphs[index] = g
|
236
|
+
end
|
237
|
+
end
|
238
|
+
|
239
|
+
def _process_res_files
|
240
|
+
# load all result files
|
241
|
+
res_files = Dir.entries(File.join(@results_dir,'raw_data')).collect { |entry| entry if entry =~ /bench.*result/ }
|
242
|
+
res_files.compact!
|
243
|
+
res_files = res_files.sort_by(&Bench.sort_natural_order)
|
244
|
+
|
245
|
+
# we can only create 7 unique lines
|
246
|
+
# per graph
|
247
|
+
for entry in res_files.last(7) do
|
248
|
+
begin
|
249
|
+
res_hash = YAML.load_file(File.join(@results_dir,'raw_data',entry))
|
250
|
+
next if res_hash.nil? or res_hash.empty?
|
251
|
+
|
252
|
+
marker = entry.split('.').last.to_s
|
253
|
+
|
254
|
+
g_data = Array.new(@metrics.size) { Array.new }
|
255
|
+
@x_keys.each do |x_key|
|
256
|
+
results = res_hash[x_key]
|
257
|
+
results ||= Array.new(@metrics.size, 0.0)
|
258
|
+
results.each_with_index do |res, index|
|
259
|
+
g_data[index] << ("%0.4f" % res).to_f
|
260
|
+
end
|
261
|
+
end
|
262
|
+
|
263
|
+
@graphs.each do |index, graph|
|
264
|
+
graph.data("#{marker}", g_data[index])
|
265
|
+
end
|
266
|
+
rescue Exception => e
|
267
|
+
raise "Gruff processing resulted in Error : " + e.backtrace.join("\n")
|
268
|
+
end
|
269
|
+
end
|
270
|
+
end
|
271
|
+
|
272
|
+
def _write_graphs
|
273
|
+
# write out resulting graphs
|
274
|
+
@metrics.each do |name, index|
|
275
|
+
image_fname = File.join(@output_dir,"#{name}.png")
|
276
|
+
puts "Gruff processor: writing #{image_fname}"
|
277
|
+
@graphs[index].write image_fname
|
278
|
+
end
|
279
|
+
end
|
79
280
|
end
|
80
|
-
rescue Exception => e
|
81
|
-
puts " Benchmark processing resulted in Error : " + e.backtrace.join("\n")
|
82
|
-
throw e
|
83
281
|
end
|
84
282
|
end
|
85
283
|
|
86
|
-
# write out resulting graphs
|
87
|
-
@metrics.each do |name, index|
|
88
|
-
puts "writing #{output_dir}/#{name}.png"
|
89
|
-
graphs[index].write "#{output_dir}/#{name}.png"
|
90
|
-
end
|
data/bench/lib/bench/cli.rb
CHANGED