rflow 1.0.0a1 → 1.0.0a2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +2 -0
  3. data/.rspec +1 -0
  4. data/Gemfile +0 -1
  5. data/NOTES +0 -13
  6. data/README.md +6 -1
  7. data/bin/rflow +2 -9
  8. data/example/basic_config.rb +1 -33
  9. data/example/basic_extensions.rb +0 -98
  10. data/example/http_config.rb +2 -3
  11. data/example/http_extensions.rb +6 -63
  12. data/lib/rflow.rb +31 -39
  13. data/lib/rflow/child_process.rb +112 -0
  14. data/lib/rflow/component.rb +77 -148
  15. data/lib/rflow/component/port.rb +38 -41
  16. data/lib/rflow/components.rb +4 -8
  17. data/lib/rflow/components/clock.rb +49 -0
  18. data/lib/rflow/components/integer.rb +39 -0
  19. data/lib/rflow/components/raw.rb +10 -6
  20. data/lib/rflow/components/replicate.rb +20 -0
  21. data/lib/rflow/components/ruby_proc_filter.rb +27 -0
  22. data/lib/rflow/configuration.rb +105 -184
  23. data/lib/rflow/configuration/component.rb +1 -4
  24. data/lib/rflow/configuration/connection.rb +11 -16
  25. data/lib/rflow/configuration/port.rb +3 -5
  26. data/lib/rflow/configuration/ruby_dsl.rb +105 -119
  27. data/lib/rflow/configuration/setting.rb +19 -25
  28. data/lib/rflow/configuration/shard.rb +1 -3
  29. data/lib/rflow/connection.rb +47 -10
  30. data/lib/rflow/connections.rb +0 -1
  31. data/lib/rflow/connections/zmq_connection.rb +34 -38
  32. data/lib/rflow/daemon_process.rb +155 -0
  33. data/lib/rflow/logger.rb +41 -25
  34. data/lib/rflow/master.rb +23 -105
  35. data/lib/rflow/message.rb +78 -108
  36. data/lib/rflow/pid_file.rb +37 -37
  37. data/lib/rflow/shard.rb +33 -100
  38. data/lib/rflow/version.rb +2 -2
  39. data/rflow.gemspec +2 -2
  40. data/schema/tick.avsc +10 -0
  41. data/spec/fixtures/config_ints.rb +4 -40
  42. data/spec/fixtures/config_shards.rb +1 -2
  43. data/spec/fixtures/extensions_ints.rb +0 -98
  44. data/spec/rflow/component/port_spec.rb +61 -0
  45. data/spec/rflow/components/clock_spec.rb +72 -0
  46. data/spec/rflow/configuration/ruby_dsl_spec.rb +150 -0
  47. data/spec/rflow/configuration_spec.rb +54 -0
  48. data/spec/rflow/forward_to_input_port_spec.rb +48 -0
  49. data/spec/rflow/forward_to_output_port_spec.rb +40 -0
  50. data/spec/rflow/logger_spec.rb +48 -0
  51. data/spec/rflow/message/data/raw_spec.rb +29 -0
  52. data/spec/rflow/message/data_spec.rb +58 -0
  53. data/spec/rflow/message_spec.rb +154 -0
  54. data/spec/rflow_spec.rb +94 -124
  55. data/spec/spec_helper.rb +8 -12
  56. metadata +46 -22
  57. data/lib/rflow/components/raw/extensions.rb +0 -18
  58. data/lib/rflow/port.rb +0 -4
  59. data/lib/rflow/util.rb +0 -19
  60. data/spec/rflow_component_port_spec.rb +0 -58
  61. data/spec/rflow_configuration_ruby_dsl_spec.rb +0 -148
  62. data/spec/rflow_configuration_spec.rb +0 -73
  63. data/spec/rflow_message_data_raw.rb +0 -26
  64. data/spec/rflow_message_data_spec.rb +0 -60
  65. data/spec/rflow_message_spec.rb +0 -182
  66. data/spec/schema_spec.rb +0 -28
  67. data/temp.rb +0 -295
@@ -0,0 +1,58 @@
1
+ require 'spec_helper'
2
+ require 'rflow/message'
3
+
4
+ class RFlow
5
+ class Message
6
+ describe Data do
7
+ let(:string) { 'this is a string to be serialized' }
8
+ let(:invalid_schema) { 'invalid schema' }
9
+ let(:valid_schema) { '{"type": "string"}' }
10
+ let(:serialized_string) { encode_avro(valid_schema, string) }
11
+
12
+ context "if created without a schema" do
13
+ it "should throw an exception" do
14
+ expect { Data.new(nil) }.to raise_error(ArgumentError, /^Invalid schema/)
15
+ end
16
+ end
17
+
18
+ context "if created with an invalid schema for the serialization" do
19
+ ['avro', :avro].each do |it|
20
+ it "should throw an exception for serialization type #{it.inspect}" do
21
+ expect { Data.new(invalid_schema, it) }.to raise_error(ArgumentError, /^Invalid schema/)
22
+ end
23
+ end
24
+ end
25
+
26
+ context "if created with a valid avro schema" do
27
+ ['avro', :avro].each do |it|
28
+ it "should instantiate correctly for serialization type #{it.inspect}" do
29
+ expect { Data.new(valid_schema, it) }.to_not raise_error
30
+ end
31
+ end
32
+
33
+ context "if created with a non-avro data serialization" do
34
+ ['unknown', :unknown, 'xml', :xml].each do |it|
35
+ it "should throw an exception for serialization type #{it.inspect}" do
36
+ expect { Data.new(valid_schema, it) }.to raise_error(
37
+ ArgumentError, 'Only Avro serialization_type supported at the moment')
38
+ end
39
+ end
40
+ end
41
+
42
+ context "if created with an avro serialization" do
43
+ ['avro', :avro].each do |it|
44
+ it "should instantiate correctly for serialization type #{it.inspect}" do
45
+ expect { Data.new(valid_schema, it) }.to_not raise_error
46
+ end
47
+ end
48
+
49
+ context "if created with a serialized data object" do
50
+ it "should instantiate correctly" do
51
+ expect { Data.new(valid_schema, 'avro', serialized_string )}.to_not raise_error
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,154 @@
1
+ require 'spec_helper'
2
+ require 'digest/md5'
3
+ require 'rflow/message'
4
+
5
+ class RFlow
6
+ describe Message do
7
+ context "if created with an unknown data type" do
8
+ it "should throw an exception" do
9
+ expect { Message.new('non_existent_data_type') }.to raise_error(
10
+ ArgumentError, "Data type 'non_existent_data_type' with serialization_type 'avro' not found")
11
+ end
12
+ end
13
+
14
+ context "if created with a known data type" do
15
+ before(:all) do
16
+ @schema = '{"type": "string"}'
17
+ Configuration.add_available_data_type(:string_type, 'avro', @schema)
18
+ end
19
+
20
+ it "should instantiate correctly" do
21
+ expect { Message.new('string_type') }.to_not raise_error
22
+ end
23
+
24
+ context "if created with empty provenance" do
25
+ context "if created with an unknown data serialization" do
26
+ ['unknown', :unknown].each do |it|
27
+ it "should throw an exception for #{it.inspect}" do
28
+ expect { Message.new('string_type', [], it) }.to raise_error(
29
+ ArgumentError, "Data type 'string_type' with serialization_type 'unknown' not found")
30
+ end
31
+ end
32
+ end
33
+
34
+ context "if created with a known data serialization" do
35
+ ['avro', :avro].each do |it|
36
+ it "should instantiate correctly for #{it.inspect}" do
37
+ expect { Message.new('string_type', [], it) }.to_not raise_error
38
+ end
39
+ end
40
+
41
+ context "if created with a mismatched schema" do
42
+ it
43
+ end
44
+ context "if created with a matched schema" do
45
+ it
46
+ end
47
+
48
+ context "if created with a nil schema" do
49
+ context "if created with a serialized data object" do
50
+ let(:serialized_string) { encode_avro(@schema, 'this is a string to be serialized') }
51
+
52
+ it "should instantiate correctly" do
53
+ expect { Message.new('string_type', [], 'avro', nil, serialized_string) }.to_not raise_error
54
+ end
55
+ end
56
+ end
57
+ end
58
+ end
59
+
60
+ context "if created with invalid provenance" do
61
+ let(:invalid_processing_event_hash) { {'started_at' => 'bad time string'} }
62
+ let(:invalid_provenance) { [invalid_processing_event_hash] }
63
+
64
+ it "should throw an exception" do
65
+ expect { Message.new('string_type', invalid_provenance) }.to raise_error(
66
+ ArgumentError, 'invalid date: "bad time string"')
67
+ end
68
+ end
69
+
70
+ context "if created with valid provenance" do
71
+ let(:valid_xmlschema_time) { '2001-01-01T01:01:01.000001Z' }
72
+ let(:valid_processing_event_hash) { {'component_instance_uuid' => 'uuid', 'started_at' => valid_xmlschema_time } }
73
+ let(:valid_processing_event) { Message::ProcessingEvent.new('uuid', valid_xmlschema_time, valid_xmlschema_time, 'context') }
74
+ let(:valid_provenance) do
75
+ [Message::ProcessingEvent.new('uuid'),
76
+ valid_processing_event_hash,
77
+ valid_processing_event]
78
+ end
79
+
80
+ it "should instantiate correctly" do
81
+ expect { Message.new('string_type', valid_provenance) }.to_not raise_error
82
+ end
83
+
84
+ it "should correctly set the provenance processing events" do
85
+ Message.new('string_type', valid_provenance).provenance[1].tap do |p|
86
+ p.component_instance_uuid.should == 'uuid'
87
+ p.started_at.should == Time.xmlschema(valid_xmlschema_time)
88
+ p.completed_at.should be_nil
89
+ p.context.should be_nil
90
+ end
91
+ end
92
+
93
+ it "should to_hash its provenance correctly" do
94
+ Message.new('string_type', valid_provenance).provenance.map(&:to_hash).should == [
95
+ {"component_instance_uuid" => "uuid", "started_at" => nil, "completed_at" => nil, "context" => nil},
96
+ {"component_instance_uuid" => "uuid", "started_at" => valid_xmlschema_time, "completed_at" => nil, "context" => nil},
97
+ {"component_instance_uuid" => "uuid", "started_at" => valid_xmlschema_time, "completed_at" => valid_xmlschema_time, "context" => "context"}]
98
+ end
99
+ end
100
+
101
+ context "if correctly created" do
102
+ it "should serialize and deserialize correctly to/from avro" do
103
+ message = Message.new('string_type').tap do |m|
104
+ m.provenance << Message::ProcessingEvent.new('UUID')
105
+ m.data.data_object = 'teh awesome'
106
+ end
107
+
108
+ Message.from_avro(message.to_avro).tap do |processed|
109
+ processed.data.to_avro.should == message.data.to_avro
110
+ processed.data.data_object.should == message.data.data_object
111
+ end
112
+ end
113
+ end
114
+
115
+ context "if data extensions exist" do
116
+ it "should extend the data element with the extension" do
117
+ module ExtensionModule; def ext_method; end; end
118
+
119
+ message = Message.new('string_type')
120
+ message.data.methods.should_not include(:ext_method)
121
+
122
+ Configuration.add_available_data_extension('string_type', ExtensionModule)
123
+ message = Message.new('string_type')
124
+ message.data.methods.should include(:ext_method)
125
+ end
126
+ end
127
+ end
128
+
129
+ it "should correctly handle large raw types" do
130
+ message = Message.new('RFlow::Message::Data::Raw').tap do |m|
131
+ m.data.raw = Array.new(101) { rand(256) }.pack('c*')
132
+ end
133
+
134
+ message_avro = message.to_avro.force_encoding('BINARY')
135
+
136
+ processed_message = Message.from_avro(message_avro)
137
+ processed_message_avro = processed_message.to_avro.force_encoding('BINARY')
138
+
139
+ @raw_schema = Configuration.available_data_types['RFlow::Message::Data::Raw']['avro']
140
+
141
+ encode_avro(@raw_schema, message.data.data_object).should == message.data.to_avro
142
+ decode_avro(@raw_schema, message.data.to_avro).should == message.data.data_object
143
+
144
+ message_data_avro = message.data.to_avro.force_encoding('BINARY')
145
+ processed_message_data_avro = processed_message.data.to_avro.force_encoding('BINARY')
146
+
147
+ Digest::MD5.hexdigest(message_avro).should == Digest::MD5.hexdigest(processed_message_avro)
148
+
149
+ message_data_avro.should == processed_message_data_avro
150
+ Digest::MD5.hexdigest(message_data_avro).should == Digest::MD5.hexdigest(processed_message_data_avro)
151
+ Digest::MD5.hexdigest(message.data.raw).should == Digest::MD5.hexdigest(processed_message.data.raw)
152
+ end
153
+ end
154
+ end
data/spec/rflow_spec.rb CHANGED
@@ -1,62 +1,51 @@
1
- require 'spec_helper.rb'
2
-
1
+ require 'spec_helper'
3
2
  require 'open3'
4
3
  require 'rflow'
5
4
 
6
5
  describe RFlow do
7
-
8
6
  before(:all) do
9
7
  @extensions_file_name = File.join(File.dirname(__FILE__), 'fixtures', 'extensions_ints.rb')
10
8
  end
11
9
 
12
- context "when executing from the test script" do
10
+ before(:each) do
11
+ @original_directory_path = Dir.getwd
12
+ @run_directory_path = File.join(@temp_directory_path, 'run')
13
+ @log_directory_path = File.join(@temp_directory_path, 'log')
14
+ Dir.mkdir @run_directory_path
15
+ Dir.mkdir @log_directory_path
16
+ Dir.chdir @temp_directory_path
17
+ end
13
18
 
14
- before(:all) do
15
- load @extensions_file_name
16
- end
19
+ after(:each) { Dir.chdir @original_directory_path }
17
20
 
18
- describe '.run' do
19
- before(:each) do
20
- @original_directory_path = Dir.getwd
21
- @run_directory_path = File.join(@temp_directory_path, 'run')
22
- @log_directory_path = File.join(@temp_directory_path, 'log')
23
- Dir.mkdir @run_directory_path
24
- Dir.mkdir @log_directory_path
25
- end
26
-
27
- after(:each) do
28
- Dir.chdir @original_directory_path
29
- end
21
+ context "when executing from the test script" do
22
+ before(:all) { load @extensions_file_name }
30
23
 
24
+ describe '.run!' do
31
25
  def run_rflow_with_dsl(&block)
32
26
  rflow_thread = Thread.new do
33
27
  ActiveRecord::Base.establish_connection adapter: "sqlite3", database: ":memory:"
34
28
  RFlow::Configuration.migrate_database
35
- RFlow::Configuration::RubyDSL.configure do |c|
36
- block.call(c)
37
- end
38
-
29
+ RFlow::Configuration::RubyDSL.configure {|c| block.call(c) }
39
30
  RFlow::Configuration.merge_defaults!
40
-
41
- RFlow.run nil, false
31
+ RFlow.run! nil, false
42
32
  end
43
33
 
44
34
  # TODO: figure out a way to get rid of this sleep, as there
45
35
  # should be a better way to figure out when RFlow is done
46
- sleep(2)
36
+ sleep(5)
47
37
 
48
- # Shut down the reactor and the thread
38
+ # Shut down the workers, the reactor, and the thread
39
+ RFlow.master.shutdown! 'SIGQUIT'
49
40
  EM.run { EM.stop }
50
41
  rflow_thread.join
51
42
  end
52
43
 
53
-
54
44
  it "should run a non-sharded workflow" do
55
-
56
45
  run_rflow_with_dsl do |c|
57
- c.setting('rflow.log_level', 'DEBUG')
58
- c.setting('rflow.application_directory_path', @temp_directory_path)
59
- c.setting('rflow.application_name', 'nonsharded_test')
46
+ c.setting 'rflow.log_level', 'FATAL'
47
+ c.setting 'rflow.application_directory_path', @temp_directory_path
48
+ c.setting 'rflow.application_name', 'nonsharded_test'
60
49
 
61
50
  c.component 'generate_ints', 'RFlow::Components::GenerateIntegerSequence', 'start' => 20, 'finish' => 30
62
51
  c.component 'output', 'RFlow::Components::FileOutput', 'output_file_path' => 'out'
@@ -75,8 +64,8 @@ describe RFlow do
75
64
  c.connect 'generate_ints2#even_odd_out' => 'output_even_odd2#in'
76
65
  end
77
66
 
78
- RFlow.master.shards.count.should == 1
79
- RFlow.master.shards.first.workers.count.should == 1
67
+ RFlow.master.should have(1).shard
68
+ RFlow.master.shards.first.should have(1).worker
80
69
 
81
70
  output_files = {
82
71
  'out' => [20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30],
@@ -88,19 +77,17 @@ describe RFlow do
88
77
  }
89
78
 
90
79
  output_files.each do |file_name, expected_contents|
91
- File.exist?(File.join(@temp_directory_path, file_name)).should be_true
80
+ File.exist?(File.join(@temp_directory_path, file_name)).should be true
92
81
  File.readlines(file_name).map(&:to_i).should == expected_contents
93
82
  end
94
83
  end
95
84
 
96
-
97
85
  it "should run a sharded workflow" do
98
86
  run_rflow_with_dsl do |c|
99
- c.setting('rflow.log_level', 'DEBUG')
100
- c.setting('rflow.application_directory_path', @temp_directory_path)
101
- c.setting('rflow.application_name', 'sharded_test')
87
+ c.setting 'rflow.log_level', 'FATAL'
88
+ c.setting 'rflow.application_directory_path', @temp_directory_path
89
+ c.setting 'rflow.application_name', 'sharded_test'
102
90
 
103
- # Instantiate components
104
91
  c.shard 's1', :process => 3 do |s|
105
92
  s.component 'generate_ints1', 'RFlow::Components::GenerateIntegerSequence', 'start' => 0, 'finish' => 10, 'step' => 3
106
93
  end
@@ -119,7 +106,6 @@ describe RFlow do
119
106
  c.component 'output3', 'RFlow::Components::FileOutput', 'output_file_path' => 'out3'
120
107
  c.component 'output_all', 'RFlow::Components::FileOutput', 'output_file_path' => 'out_all'
121
108
 
122
- # Hook components together
123
109
  c.connect 'generate_ints1#out' => 'output1#in'
124
110
  c.connect 'generate_ints2#out' => 'output2#in'
125
111
  c.connect 'generate_ints3#out' => 'output3#in'
@@ -128,7 +114,7 @@ describe RFlow do
128
114
  c.connect 'generate_ints3#out' => 'output_all#in'
129
115
  end
130
116
 
131
- RFlow.master.shards.count.should == 4
117
+ RFlow.master.should have(4).shards
132
118
  RFlow.master.shards.map(&:count).should == [1, 3, 2, 2]
133
119
  RFlow.master.shards.map(&:workers).map(&:count).should == [1, 3, 2, 2]
134
120
 
@@ -140,7 +126,7 @@ describe RFlow do
140
126
  }
141
127
 
142
128
  output_files.each do |file_name, expected_contents|
143
- File.exist?(File.join(@temp_directory_path, file_name)).should be_true
129
+ File.exist?(File.join(@temp_directory_path, file_name)).should be true
144
130
  File.readlines(file_name).map(&:to_i).sort.should == expected_contents.sort
145
131
  end
146
132
  end
@@ -148,29 +134,17 @@ describe RFlow do
148
134
  end
149
135
 
150
136
  context "when executing via the rflow binary" do
151
- before(:each) do
152
- @original_directory_path = Dir.getwd
153
- @run_directory_path = File.join(@temp_directory_path, 'run')
154
- @log_directory_path = File.join(@temp_directory_path, 'log')
155
- Dir.mkdir @run_directory_path
156
- Dir.mkdir @log_directory_path
157
- Dir.chdir @temp_directory_path
158
- end
159
-
160
- after(:each) do
161
- Dir.chdir @original_directory_path
162
- end
163
-
164
- def execute_rflow(rflow_args)
165
- r = {}
166
- r[:stdout], r[:stderr], r[:status] = Open3.capture3("bundle exec rflow #{rflow_args}")
167
- r
137
+ def execute_rflow(args)
138
+ stdout, stderr, status = Open3.capture3("bundle exec rflow #{args}")
139
+ {:stdout => stdout, :stderr => stderr, :status => status}
168
140
  end
169
141
 
170
142
  context "with a simple ruby DSL config file" do
143
+ let(:config_file_name) { 'input_config' }
144
+ let(:db_file_name) { 'outdb' }
145
+
171
146
  before(:each) do
172
- @config_file_name = 'input_config'
173
- File.open('input_config', 'w+') do |file|
147
+ File.open(config_file_name, 'w+') do |file|
174
148
  file.write <<-EOF
175
149
  RFlow::Configuration::RubyDSL.configure do |c|
176
150
  c.setting 'mysetting', 'myvalue'
@@ -180,9 +154,7 @@ describe RFlow do
180
154
  end
181
155
 
182
156
  it "should load a ruby dsl file into a sqlite DB" do
183
- db_file_name = 'outdb'
184
-
185
- r = execute_rflow("load -d #{db_file_name} -c #{@config_file_name}")
157
+ r = execute_rflow("load -d #{db_file_name} -c #{config_file_name}")
186
158
 
187
159
  # Make sure that the process execution worked
188
160
  r[:status].exitstatus.should == 0
@@ -195,31 +167,30 @@ describe RFlow do
195
167
  end
196
168
 
197
169
  it "should not load a database if the database file already exists" do
198
- db_file_name = 'outdb'
199
- File.open(db_file_name, 'w') { |file| file.write 'boom' }
170
+ File.open(db_file_name, 'w') {|file| file.write 'boom' }
200
171
 
201
- r = execute_rflow("load -d #{db_file_name} -c #{@config_file_name}")
172
+ r = execute_rflow("load -d #{db_file_name} -c #{config_file_name}")
202
173
 
203
174
  # Make sure that the process execution worked
204
175
  r[:status].exitstatus.should == 1
205
176
  r[:stderr].should == ''
206
177
  r[:stdout].should match /Config database.*#{db_file_name}.*exists/
207
178
  end
208
-
209
179
  end
210
180
 
211
181
  context "with a complex, sharded ruby DSL config file" do
182
+ let(:config_file_name) { 'input_config' }
183
+ let(:db_file_name) { 'config_db' }
184
+ let(:app_name) { 'sharded_bin_test' }
185
+
212
186
  before(:each) do
213
- @config_file_name = 'input_config'
214
- @db_file_name = 'config_db'
215
- @app_name = 'sharded_bin_test'
216
- File.open(@config_file_name, 'w+') do |file|
187
+ File.open(config_file_name, 'w+') do |file|
217
188
  file.write <<-EOF
218
189
  RFlow::Configuration::RubyDSL.configure do |c|
219
190
  c.setting('rflow.log_level', 'INFO')
220
191
  c.setting('rflow.application_directory_path', '#{@temp_directory_path}')
221
- c.setting('rflow.application_name', '#{@app_name}')
222
- # Instantiate components
192
+ c.setting('rflow.application_name', '#{app_name}')
193
+
223
194
  c.shard 's1', :process => 3 do |s|
224
195
  s.component 'generate_ints1', 'RFlow::Components::GenerateIntegerSequence', 'start' => 0, 'finish' => 10, 'step' => 3
225
196
  end
@@ -233,7 +204,7 @@ describe RFlow do
233
204
  end
234
205
  c.component 'output3', 'RFlow::Components::FileOutput', 'output_file_path' => 'out3'
235
206
  c.component 'output_all', 'RFlow::Components::FileOutput', 'output_file_path' => 'out_all'
236
- # Hook components together
207
+
237
208
  c.connect 'generate_ints1#out' => 'output1#in'
238
209
  c.connect 'generate_ints2#out' => 'output2#in'
239
210
  c.connect 'generate_ints3#out' => 'output3#in'
@@ -243,14 +214,14 @@ describe RFlow do
243
214
  end
244
215
  EOF
245
216
  end
246
- r = execute_rflow("load -d #{@db_file_name} -c #{@config_file_name}")
217
+ r = execute_rflow("load -d #{db_file_name} -c #{config_file_name}")
247
218
  r[:status].exitstatus.should == 0
248
219
  r[:stderr].should == ''
249
- r[:stdout].should match /Successfully initialized database.*#{@db_file_name}/
220
+ r[:stdout].should match /Successfully initialized database.*#{db_file_name}/
250
221
  end
251
222
 
252
223
  it "should not start if the components aren't loaded" do
253
- r = execute_rflow("start -d #{@db_file_name} -f")
224
+ r = execute_rflow("start -d #{db_file_name} -f")
254
225
 
255
226
  r[:status].exitstatus.should == 1
256
227
  r[:stderr].should == ''
@@ -258,64 +229,63 @@ describe RFlow do
258
229
  end
259
230
 
260
231
  it "should daemonize and run in the background" do
261
- r = execute_rflow("start -d #{@db_file_name} -e #{@extensions_file_name}")
232
+ begin
233
+ r = execute_rflow("start -d #{db_file_name} -e #{@extensions_file_name}")
262
234
 
263
- r[:status].exitstatus.should == 0
264
- r[:stderr].should == ''
265
- r[:stdout].should_not match /error/i
235
+ r[:status].exitstatus.should == 0
236
+ r[:stderr].should == ''
237
+ r[:stdout].should_not match /error/i
266
238
 
267
- sleep 1 # give the daemon a chance to finish
239
+ sleep 2 # give the daemon a chance to finish
268
240
 
269
- log_contents = File.read("log/#{@app_name}.log").chomp
270
- log_lines = log_contents.split("\n")
241
+ log_contents = File.read("log/#{app_name}.log").chomp
242
+ log_lines = log_contents.split("\n")
271
243
 
272
- puts '++++++++++++++++++++'
273
- puts log_contents
274
- puts '++++++++++++++++++++'
244
+ log_lines.each {|line| line.should_not match /^ERROR/ }
245
+ log_lines.each {|line| line.should_not match /^DEBUG/ }
275
246
 
276
- # Log file testing
277
- log_lines.each { |line| line.should_not match /^ERROR/ }
278
- log_lines.each { |line| line.should_not match /^DEBUG/ }
247
+ # Grab all the pids from the log, which seems to be the only
248
+ # reliable way to get them
249
+ log_pids = log_lines.map {|line| /\((\d+)\)/.match(line)[1].to_i }.uniq
279
250
 
280
- # Grab all the pids from the log, which seems to be the only
281
- # reliable way to get them
282
- log_pids = log_lines.map { |line| /\((\d+)\)/.match(line)[1].to_i }.uniq
251
+ initial_pid = r[:status].pid
252
+ master_pid = File.read("run/#{app_name}.pid").chomp.to_i
253
+ worker_pids = log_pids - [initial_pid, master_pid]
283
254
 
284
- initial_pid = r[:status].pid
285
- master_pid = File.read("run/#{@app_name}.pid").chomp.to_i
286
- worker_pids = log_pids - [initial_pid, master_pid]
255
+ log_pids.should include initial_pid
256
+ log_pids.should include master_pid
287
257
 
288
- log_pids.should include initial_pid
289
- log_pids.should include master_pid
258
+ worker_pids.should have(8).pids
259
+ worker_pids.should_not include 0
290
260
 
291
- worker_pids.size.should == 8
292
- worker_pids.should_not include 0
261
+ expect { Process.kill(0, initial_pid) }.to raise_error(Errno::ESRCH)
262
+ ([master_pid] + worker_pids).each do |pid|
263
+ Process.kill(0, pid).should == 1
264
+ end
293
265
 
294
- # Process checks
295
- expect { Process.kill(0, initial_pid) }.to raise_error(Errno::ESRCH)
296
- ([master_pid] + worker_pids).each do |pid|
297
- Process.kill(0, pid).should == 1
298
- end
266
+ output_files = {
267
+ 'out1' => [0, 3, 6, 9] * 3,
268
+ 'out2' => (20..30).to_a * 2,
269
+ 'out3' => (100..105).to_a,
270
+ 'out_all' => [0, 3, 6, 9] * 3 + (20..30).to_a * 2 + (100..105).to_a
271
+ }
299
272
 
300
- # Output checks
301
- output_files = {
302
- 'out1' => [0, 3, 6, 9] * 3,
303
- 'out2' => (20..30).to_a * 2,
304
- 'out3' => (100..105).to_a,
305
- 'out_all' => [0, 3, 6, 9] * 3 + (20..30).to_a * 2 + (100..105).to_a
306
- }
307
-
308
- output_files.each do |file_name, expected_contents|
309
- File.exist?(File.join(@temp_directory_path, file_name)).should be_true
310
- File.readlines(file_name).map(&:to_i).sort.should == expected_contents.sort
311
- end
273
+ output_files.each do |file_name, expected_contents|
274
+ File.exist?(File.join(@temp_directory_path, file_name)).should be true
275
+ File.readlines(file_name).map(&:to_i).sort.should == expected_contents.sort
276
+ end
312
277
 
313
- # Terminate the master
314
- Process.kill("TERM", master_pid).should == 1
278
+ # Terminate the master
279
+ Process.kill("TERM", master_pid).should == 1
315
280
 
316
- # Make sure everything is dead
317
- ([master_pid] + worker_pids).each do |pid|
318
- expect { Process.kill(0, pid) }.to raise_error(Errno::ESRCH)
281
+ # Make sure everything is dead after a second
282
+ sleep 1
283
+ ([master_pid] + worker_pids).each do |pid|
284
+ expect { Process.kill(0, pid) }.to raise_error(Errno::ESRCH)
285
+ end
286
+ rescue Exception => e
287
+ Process.kill("TERM", master_pid) if master_pid
288
+ raise
319
289
  end
320
290
  end
321
291
  end