toiler 0.3.0 → 0.3.1.beta1

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,55 +1,55 @@
1
- require 'toiler/actor/fetcher'
2
- require 'toiler/actor/processor'
3
-
4
- module Toiler
5
- module Actor
6
- # Actor that starts and supervises Toiler's actors
7
- class Supervisor < Concurrent::Actor::RestartingContext
8
- include Utils::ActorLogging
9
-
10
- attr_accessor :client
11
-
12
- def initialize
13
- @client = ::Aws::SQS::Client.new
14
- spawn_fetchers
15
- spawn_processors
16
- end
17
-
18
- def on_message(_msg)
19
- pass
20
- end
21
-
22
- def queues
23
- Toiler.worker_class_registry
24
- end
25
-
26
- def spawn_fetchers
27
- queues.each do |queue, _klass|
28
- begin
29
- fetcher = Actor::Fetcher.spawn! name: "fetcher_#{queue}".to_sym,
30
- supervise: true, args: [queue, client]
31
- Toiler.set_fetcher queue, fetcher
32
- rescue StandardError => e
33
- error "Failed to start Fetcher for queue #{queue}: #{e.message}\n#{e.backtrace.join("\n")}"
34
- end
35
- end
36
- end
37
-
38
- def spawn_processors
39
- queues.each do |queue, klass|
40
- name = "processor_pool_#{queue}".to_sym
41
- count = klass.concurrency
42
- begin
43
- pool = Concurrent::Actor::Utils::Pool.spawn! name, count do |index|
44
- Actor::Processor.spawn name: "processor_#{queue}_#{index}".to_sym,
45
- supervise: true, args: [queue]
46
- end
47
- Toiler.set_processor_pool queue, pool
48
- rescue StandardError => e
49
- error "Failed to spawn Processor Pool for queue #{queue}: #{e.message}\n#{e.backtrace.join("\n")}"
50
- end
51
- end
52
- end
53
- end
54
- end
55
- end
1
+ require 'toiler/actor/fetcher'
2
+ require 'toiler/actor/processor'
3
+
4
+ module Toiler
5
+ module Actor
6
+ # Actor that starts and supervises Toiler's actors
7
+ class Supervisor < Concurrent::Actor::RestartingContext
8
+ include Utils::ActorLogging
9
+
10
+ attr_accessor :client
11
+
12
+ def initialize
13
+ @client = ::Aws::SQS::Client.new
14
+ spawn_fetchers
15
+ spawn_processors
16
+ end
17
+
18
+ def on_message(_msg)
19
+ pass
20
+ end
21
+
22
+ def queues
23
+ Toiler.worker_class_registry
24
+ end
25
+
26
+ def spawn_fetchers
27
+ queues.each do |queue, _klass|
28
+ begin
29
+ fetcher = Actor::Fetcher.spawn! name: "fetcher_#{queue}".to_sym,
30
+ supervise: true, args: [queue, client]
31
+ Toiler.set_fetcher queue, fetcher
32
+ rescue StandardError => e
33
+ error "Failed to start Fetcher for queue #{queue}: #{e.message}\n#{e.backtrace.join("\n")}"
34
+ end
35
+ end
36
+ end
37
+
38
+ def spawn_processors
39
+ queues.each do |queue, klass|
40
+ name = "processor_pool_#{queue}".to_sym
41
+ count = klass.concurrency
42
+ begin
43
+ pool = Concurrent::Actor::Utils::Pool.spawn! name, count do |index|
44
+ Actor::Processor.spawn name: "processor_#{queue}_#{index}".to_sym,
45
+ supervise: true, args: [queue]
46
+ end
47
+ Toiler.set_processor_pool queue, pool
48
+ rescue StandardError => e
49
+ error "Failed to spawn Processor Pool for queue #{queue}: #{e.message}\n#{e.backtrace.join("\n")}"
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -1,28 +1,28 @@
1
- module Toiler
2
- module Actor
3
- module Utils
4
- # Provides helper methods for logging
5
- module ActorLogging
6
- def error(msg)
7
- log Logger::Severity::ERROR, msg
8
- end
9
-
10
- def info(msg)
11
- log Logger::Severity::INFO, msg
12
- end
13
-
14
- def debug(msg)
15
- log Logger::Severity::DEBUG, msg
16
- end
17
-
18
- def warn(msg)
19
- log Logger::Severity::WARN, smsg
20
- end
21
-
22
- def fatal(msg)
23
- log Logger::Severity::FATAL, msg
24
- end
25
- end
26
- end
27
- end
28
- end
1
+ module Toiler
2
+ module Actor
3
+ module Utils
4
+ # Provides helper methods for logging
5
+ module ActorLogging
6
+ def error(msg)
7
+ log Logger::Severity::ERROR, msg
8
+ end
9
+
10
+ def info(msg)
11
+ log Logger::Severity::INFO, msg
12
+ end
13
+
14
+ def debug(msg)
15
+ log Logger::Severity::DEBUG, msg
16
+ end
17
+
18
+ def warn(msg)
19
+ log Logger::Severity::WARN, smsg
20
+ end
21
+
22
+ def fatal(msg)
23
+ log Logger::Severity::FATAL, msg
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
@@ -1,64 +1,64 @@
1
- module Toiler
2
- module Aws
3
- # SQS Message abstraction
4
- # Provides methods for querying and acting on a SQS message
5
- class Message
6
- attr_accessor :client, :queue_url, :data
7
-
8
- def initialize(client, queue_url, data)
9
- @client = client
10
- @queue_url = queue_url
11
- @data = data
12
- end
13
-
14
- def delete
15
- client.delete_message(
16
- queue_url: queue_url,
17
- receipt_handle: data.receipt_handle
18
- )
19
- end
20
-
21
- def change_visibility(options)
22
- client.change_message_visibility(
23
- options.merge(queue_url: queue_url, receipt_handle: receipt_handle)
24
- )
25
- end
26
-
27
- def visibility_timeout=(timeout)
28
- client.change_message_visibility(
29
- queue_url: queue_url,
30
- receipt_handle: data.receipt_handle,
31
- visibility_timeout: timeout
32
- )
33
- end
34
-
35
- def message_id
36
- data.message_id
37
- end
38
-
39
- def receipt_handle
40
- data.receipt_handle
41
- end
42
-
43
- def md5_of_body
44
- data.md5_of_body
45
- end
46
-
47
- def body
48
- data.body
49
- end
50
-
51
- def attributes
52
- data.attributes
53
- end
54
-
55
- def md5_of_message_attributes
56
- data.md5_of_message_attributes
57
- end
58
-
59
- def message_attributes
60
- data.message_attributes
61
- end
62
- end
63
- end
64
- end
1
+ module Toiler
2
+ module Aws
3
+ # SQS Message abstraction
4
+ # Provides methods for querying and acting on a SQS message
5
+ class Message
6
+ attr_accessor :client, :queue_url, :data
7
+
8
+ def initialize(client, queue_url, data)
9
+ @client = client
10
+ @queue_url = queue_url
11
+ @data = data
12
+ end
13
+
14
+ def delete
15
+ client.delete_message(
16
+ queue_url: queue_url,
17
+ receipt_handle: data.receipt_handle
18
+ )
19
+ end
20
+
21
+ def change_visibility(options)
22
+ client.change_message_visibility(
23
+ options.merge(queue_url: queue_url, receipt_handle: receipt_handle)
24
+ )
25
+ end
26
+
27
+ def visibility_timeout=(timeout)
28
+ client.change_message_visibility(
29
+ queue_url: queue_url,
30
+ receipt_handle: data.receipt_handle,
31
+ visibility_timeout: timeout
32
+ )
33
+ end
34
+
35
+ def message_id
36
+ data.message_id
37
+ end
38
+
39
+ def receipt_handle
40
+ data.receipt_handle
41
+ end
42
+
43
+ def md5_of_body
44
+ data.md5_of_body
45
+ end
46
+
47
+ def body
48
+ data.body
49
+ end
50
+
51
+ def attributes
52
+ data.attributes
53
+ end
54
+
55
+ def md5_of_message_attributes
56
+ data.md5_of_message_attributes
57
+ end
58
+
59
+ def message_attributes
60
+ data.message_attributes
61
+ end
62
+ end
63
+ end
64
+ end
@@ -1,61 +1,61 @@
1
- require 'toiler/aws/message'
2
-
3
- module Toiler
4
- module Aws
5
- # SQS Queue abstraction
6
- # Provides methods for querying and acting on a SQS queue
7
- class Queue
8
- attr_accessor :name, :client, :url
9
-
10
- def initialize(name, client = nil)
11
- @name = name
12
- @client = client || ::Aws::SQS::Client.new
13
- @url = client.get_queue_url(queue_name: name).queue_url
14
- end
15
-
16
- def visibility_timeout
17
- client.get_queue_attributes(
18
- queue_url: url,
19
- attribute_names: ['VisibilityTimeout']
20
- ).attributes['VisibilityTimeout'].to_i
21
- end
22
-
23
- def delete_messages(options)
24
- client.delete_message_batch options.merge queue_url: url
25
- end
26
-
27
- def send_message(options)
28
- client.send_message sanitize_message_body options.merge queue_url: url
29
- end
30
-
31
- def send_messages(options)
32
- client.send_message_batch(
33
- sanitize_message_body options.merge queue_url: url
34
- )
35
- end
36
-
37
- def receive_messages(options)
38
- client.receive_message(options.merge(queue_url: url))
39
- .messages
40
- .map { |m| Message.new(client, url, m) }
41
- end
42
-
43
- private
44
-
45
- def sanitize_message_body(options)
46
- messages = options[:entries] || [options]
47
-
48
- messages.each do |m|
49
- body = m[:message_body]
50
- if body.is_a?(Hash)
51
- m[:message_body] = JSON.dump(body)
52
- elsif !body.is_a? String
53
- fail ArgumentError, "Body must be a String, found #{body.class}"
54
- end
55
- end
56
-
57
- options
58
- end
59
- end
60
- end
61
- end
1
+ require 'toiler/aws/message'
2
+
3
+ module Toiler
4
+ module Aws
5
+ # SQS Queue abstraction
6
+ # Provides methods for querying and acting on a SQS queue
7
+ class Queue
8
+ attr_accessor :name, :client, :url
9
+
10
+ def initialize(name, client = nil)
11
+ @name = name
12
+ @client = client || ::Aws::SQS::Client.new
13
+ @url = client.get_queue_url(queue_name: name).queue_url
14
+ end
15
+
16
+ def visibility_timeout
17
+ client.get_queue_attributes(
18
+ queue_url: url,
19
+ attribute_names: ['VisibilityTimeout']
20
+ ).attributes['VisibilityTimeout'].to_i
21
+ end
22
+
23
+ def delete_messages(options)
24
+ client.delete_message_batch options.merge queue_url: url
25
+ end
26
+
27
+ def send_message(options)
28
+ client.send_message sanitize_message_body options.merge queue_url: url
29
+ end
30
+
31
+ def send_messages(options)
32
+ client.send_message_batch(
33
+ sanitize_message_body options.merge queue_url: url
34
+ )
35
+ end
36
+
37
+ def receive_messages(options)
38
+ client.receive_message(options.merge(queue_url: url))
39
+ .messages
40
+ .map { |m| Message.new(client, url, m) }
41
+ end
42
+
43
+ private
44
+
45
+ def sanitize_message_body(options)
46
+ messages = options[:entries] || [options]
47
+
48
+ messages.each do |m|
49
+ body = m[:message_body]
50
+ if body.is_a?(Hash)
51
+ m[:message_body] = JSON.dump(body)
52
+ elsif !body.is_a? String
53
+ fail ArgumentError, "Body must be a String, found #{body.class}"
54
+ end
55
+ end
56
+
57
+ options
58
+ end
59
+ end
60
+ end
61
+ end
data/lib/toiler/cli.rb CHANGED
@@ -1,135 +1,153 @@
1
- require 'singleton'
2
- require 'timeout'
3
- require 'optparse'
4
- require 'toiler'
5
-
6
- module Toiler
7
- # See: https://github.com/mperham/sidekiq/blob/33f5d6b2b6c0dfaab11e5d39688cab7ebadc83ae/lib/sidekiq/cli.rb#L20
8
- class Shutdown < Interrupt; end
9
-
10
- # Command line client interface
11
- class CLI
12
- include Singleton
13
-
14
- attr_accessor :supervisor
15
-
16
- def run(args)
17
- @self_read, @self_write = IO.pipe
18
-
19
- trap_signals
20
- options = Utils::ArgumentParser.parse(args)
21
- Utils::EnvironmentLoader.load(options)
22
- daemonize
23
- write_pid
24
- load_concurrent
25
- start_supervisor
26
-
27
- handle_stop
28
- end
29
-
30
- private
31
-
32
- def handle_stop
33
- while (readable_io = IO.select([@self_read]))
34
- handle_signal(readable_io.first[0].gets.strip)
35
- end
36
- rescue Interrupt
37
- puts 'Waiting up to 60 seconds for actors to finish...'
38
- supervisor.ask(:terminate!).wait(60)
39
- ensure
40
- exit 0
41
- end
42
-
43
- def shutdown_pools
44
- Concurrent.global_fast_executor.shutdown
45
- Concurrent.global_io_executor.shutdown
46
- return if Concurrent.global_io_executor.wait_for_termination(60)
47
- Concurrent.global_io_executor.kill
48
- end
49
-
50
- def start_supervisor
51
- require 'toiler/actor/supervisor'
52
- @supervisor = Actor::Supervisor.spawn! :supervisor
53
- end
54
-
55
- def trap_signals
56
- %w(INT TERM QUIT USR1 USR2 TTIN).each do |sig|
57
- begin
58
- trap sig do
59
- @self_write.puts(sig)
60
- end
61
- rescue ArgumentError
62
- puts "System does not support signal #{sig}"
63
- end
64
- end
65
- end
66
-
67
- def print_stacktraces
68
- return unless Toiler.logger
69
- Toiler.logger.info "-------------------\nReceived QUIT, dumping threads:"
70
- Thread.list.each do |t|
71
- id = t.object_id
72
- Toiler.logger.info "[thread:#{id}] #{t.backtrace.join("\n[thread:#{id}] ")}"
73
- end
74
- Toiler.logger.info '-------------------'
75
- end
76
-
77
- def handle_signal(signal)
78
- case signal
79
- when 'QUIT'
80
- print_stacktraces
81
- when 'INT', 'TERM'
82
- fail Interrupt
83
- end
84
- end
85
-
86
- def load_concurrent
87
- fail 'Concurrent should not be required now' if defined?(::Concurrent)
88
- require 'concurrent-edge'
89
- Concurrent.global_logger = lambda do |level, progname, msg = nil, &block|
90
- Toiler.logger.log(level, msg, progname, &block)
91
- end if Toiler.logger
92
- end
93
-
94
- def daemonize
95
- return unless Toiler.options[:daemon]
96
- fail 'Logfile required when daemonizing' unless Toiler.options[:logfile]
97
-
98
- files_to_reopen = []
99
- ObjectSpace.each_object(File) do |file|
100
- files_to_reopen << file unless file.closed?
101
- end
102
-
103
- Process.daemon(true, true)
104
-
105
- reopen_files(files_to_reopen)
106
- reopen_std
107
- end
108
-
109
- def reopen_files(files_to_reopen)
110
- files_to_reopen.each do |file|
111
- begin
112
- file.reopen file.path, 'a+'
113
- file.sync = true
114
- rescue StandardError
115
- puts "Failed to reopen file #{file}"
116
- end
117
- end
118
- end
119
-
120
- def reopen_std
121
- [$stdout, $stderr].each do |io|
122
- File.open(Toiler.options[:logfile], 'ab') do |f|
123
- io.reopen(f)
124
- end
125
- io.sync = true
126
- end
127
- $stdin.reopen('/dev/null')
128
- end
129
-
130
- def write_pid
131
- file = Toiler.options[:pidfile]
132
- File.write file, Process.pid if file
133
- end
134
- end
135
- end
1
+ require 'singleton'
2
+ require 'timeout'
3
+ require 'optparse'
4
+ require 'toiler'
5
+
6
+ module Toiler
7
+ # See: https://github.com/mperham/sidekiq/blob/33f5d6b2b6c0dfaab11e5d39688cab7ebadc83ae/lib/sidekiq/cli.rb#L20
8
+ class Shutdown < Interrupt; end
9
+
10
+ # Command line client interface
11
+ class CLI
12
+ include Singleton
13
+
14
+ attr_accessor :supervisor
15
+
16
+ def run(args)
17
+ @self_read, @self_write = IO.pipe
18
+
19
+ trap_signals
20
+ options = Utils::ArgumentParser.parse(args)
21
+ Utils::EnvironmentLoader.load(options)
22
+ daemonize
23
+ write_pid
24
+ load_concurrent
25
+ start_supervisor
26
+
27
+ handle_stop
28
+ end
29
+
30
+ private
31
+
32
+ def handle_stop
33
+ while (readable_io = IO.select([@self_read]))
34
+ handle_signal(readable_io.first[0].gets.strip)
35
+ end
36
+ rescue Interrupt
37
+ puts 'Waiting up to 60 seconds for actors to finish...'
38
+ supervisor.ask(:terminate!).wait(60)
39
+ ensure
40
+ exit 0
41
+ end
42
+
43
+ def shutdown_pools
44
+ Concurrent.global_fast_executor.shutdown
45
+ Concurrent.global_io_executor.shutdown
46
+ return if Concurrent.global_io_executor.wait_for_termination(60)
47
+ Concurrent.global_io_executor.kill
48
+ end
49
+
50
+ def start_supervisor
51
+ require 'toiler/actor/supervisor'
52
+ @supervisor = Actor::Supervisor.spawn! :supervisor
53
+ end
54
+
55
+ def trap_signals
56
+ %w(INT TERM QUIT USR1 USR2 TTIN).each do |sig|
57
+ begin
58
+ trap sig do
59
+ @self_write.puts(sig)
60
+ end
61
+ rescue ArgumentError
62
+ puts "System does not support signal #{sig}"
63
+ end
64
+ end
65
+ end
66
+
67
+ def print_stacktraces
68
+ return unless Toiler.logger
69
+ Toiler.logger.info "-------------------\nReceived QUIT, dumping threads:"
70
+ Thread.list.each do |t|
71
+ id = t.object_id
72
+ Toiler.logger.info "[thread:#{id}] #{t.backtrace.join("\n[thread:#{id}] ")}"
73
+ end
74
+ Toiler.logger.info '-------------------'
75
+ end
76
+
77
+ def print_status
78
+ return unless Toiler.logger
79
+ Toiler.logger.info "-------------------\nReceived QUIT, dumping status:"
80
+ Toiler.queues.each do |queue|
81
+ fetcher = Toiler.fetcher(queue).send(:core).send(:context)
82
+ processor_pool = Toiler.processor_pool(queue).send(:core).send(:context)
83
+ processors = processor_pool.instance_variable_get(:@workers).collect{|w| w.send(:core).send(:context)}
84
+ busy_processors = processors.count{|pr| pr.executing?}
85
+ Toiler.logger.info "[fetcher:#{fetcher.name}] [executing:#{fetcher.executing?}] [scheduled:#{fetcher.scheduled?}] [free_processors:#{fetcher.get_free_processors}]"
86
+ Toiler.logger.info "[processor_pool:#{processor_pool.name}] [workers:#{processors.count}] [busy:#{busy_processors}]"
87
+ processors.each do |processor|
88
+ Toiler.logger.info "[processor:#{processor.name}] [executing:#{processor.executing?}] [thread:#{processor.thread.object_id}]"
89
+ end
90
+ end
91
+ Toiler.logger.info '-------------------'
92
+ end
93
+
94
+ def handle_signal(signal)
95
+ case signal
96
+ when 'QUIT'
97
+ print_stacktraces
98
+ print_status
99
+ when 'INT', 'TERM'
100
+ fail Interrupt
101
+ end
102
+ end
103
+
104
+ def load_concurrent
105
+ fail 'Concurrent should not be required now' if defined?(::Concurrent)
106
+ require 'concurrent-edge'
107
+ Concurrent.global_logger = lambda do |level, progname, msg = nil, &block|
108
+ Toiler.logger.log(level, msg, progname, &block)
109
+ end if Toiler.logger
110
+ end
111
+
112
+ def daemonize
113
+ return unless Toiler.options[:daemon]
114
+ fail 'Logfile required when daemonizing' unless Toiler.options[:logfile]
115
+
116
+ files_to_reopen = []
117
+ ObjectSpace.each_object(File) do |file|
118
+ files_to_reopen << file unless file.closed?
119
+ end
120
+
121
+ Process.daemon(true, true)
122
+
123
+ reopen_files(files_to_reopen)
124
+ reopen_std
125
+ end
126
+
127
+ def reopen_files(files_to_reopen)
128
+ files_to_reopen.each do |file|
129
+ begin
130
+ file.reopen file.path, 'a+'
131
+ file.sync = true
132
+ rescue StandardError
133
+ puts "Failed to reopen file #{file}"
134
+ end
135
+ end
136
+ end
137
+
138
+ def reopen_std
139
+ [$stdout, $stderr].each do |io|
140
+ File.open(Toiler.options[:logfile], 'ab') do |f|
141
+ io.reopen(f)
142
+ end
143
+ io.sync = true
144
+ end
145
+ $stdin.reopen('/dev/null')
146
+ end
147
+
148
+ def write_pid
149
+ file = Toiler.options[:pidfile]
150
+ File.write file, Process.pid if file
151
+ end
152
+ end
153
+ end