toiler 0.3.6 → 0.4.0.beta1

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,55 +1,55 @@
1
- require 'toiler/actor/fetcher'
2
- require 'toiler/actor/processor'
3
-
4
- module Toiler
5
- module Actor
6
- # Actor that starts and supervises Toiler's actors
7
- class Supervisor < Concurrent::Actor::RestartingContext
8
- include Utils::ActorLogging
9
-
10
- attr_accessor :client
11
-
12
- def initialize
13
- @client = ::Aws::SQS::Client.new
14
- spawn_fetchers
15
- spawn_processors
16
- end
17
-
18
- def on_message(_msg)
19
- pass
20
- end
21
-
22
- def queues
23
- Toiler.worker_class_registry
24
- end
25
-
26
- def spawn_fetchers
27
- queues.each do |queue, _klass|
28
- begin
29
- fetcher = Actor::Fetcher.spawn! name: "fetcher_#{queue}".to_sym,
30
- supervise: true, args: [queue, client]
31
- Toiler.set_fetcher queue, fetcher
32
- rescue StandardError => e
33
- error "Failed to start Fetcher for queue #{queue}: #{e.message}\n#{e.backtrace.join("\n")}"
34
- end
35
- end
36
- end
37
-
38
- def spawn_processors
39
- queues.each do |queue, klass|
40
- name = "processor_pool_#{queue}".to_sym
41
- count = klass.concurrency
42
- begin
43
- pool = Concurrent::Actor::Utils::Pool.spawn! name, count do |index|
44
- Actor::Processor.spawn name: "processor_#{queue}_#{index}".to_sym,
45
- supervise: true, args: [queue]
46
- end
47
- Toiler.set_processor_pool queue, pool
48
- rescue StandardError => e
49
- error "Failed to spawn Processor Pool for queue #{queue}: #{e.message}\n#{e.backtrace.join("\n")}"
50
- end
51
- end
52
- end
53
- end
54
- end
55
- end
1
+ require 'toiler/actor/fetcher'
2
+ require 'toiler/actor/processor'
3
+
4
+ module Toiler
5
+ module Actor
6
+ # Actor that starts and supervises Toiler's actors
7
+ class Supervisor < Concurrent::Actor::RestartingContext
8
+ include Utils::ActorLogging
9
+
10
+ attr_accessor :client
11
+
12
+ def initialize
13
+ @client = ::Aws::SQS::Client.new
14
+ spawn_fetchers
15
+ spawn_processors
16
+ end
17
+
18
+ def on_message(_msg)
19
+ pass
20
+ end
21
+
22
+ def queues
23
+ Toiler.worker_class_registry
24
+ end
25
+
26
+ def spawn_fetchers
27
+ queues.each do |queue, _klass|
28
+ begin
29
+ fetcher = Actor::Fetcher.spawn! name: "fetcher_#{queue}".to_sym,
30
+ supervise: true, args: [queue, client]
31
+ Toiler.set_fetcher queue, fetcher
32
+ rescue StandardError => e
33
+ error "Failed to start Fetcher for queue #{queue}: #{e.message}\n#{e.backtrace.join("\n")}"
34
+ end
35
+ end
36
+ end
37
+
38
+ def spawn_processors
39
+ queues.each do |queue, klass|
40
+ name = "processor_pool_#{queue}".to_sym
41
+ count = klass.concurrency
42
+ begin
43
+ pool = Concurrent::Actor::Utils::Pool.spawn! name, count do |index|
44
+ Actor::Processor.spawn name: "processor_#{queue}_#{index}".to_sym,
45
+ supervise: true, args: [queue]
46
+ end
47
+ Toiler.set_processor_pool queue, pool
48
+ rescue StandardError => e
49
+ error "Failed to spawn Processor Pool for queue #{queue}: #{e.message}\n#{e.backtrace.join("\n")}"
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -1,28 +1,28 @@
1
- module Toiler
2
- module Actor
3
- module Utils
4
- # Provides helper methods for logging
5
- module ActorLogging
6
- def error(msg)
7
- log Logger::Severity::ERROR, msg
8
- end
9
-
10
- def info(msg)
11
- log Logger::Severity::INFO, msg
12
- end
13
-
14
- def debug(msg)
15
- log Logger::Severity::DEBUG, msg
16
- end
17
-
18
- def warn(msg)
19
- log Logger::Severity::WARN, smsg
20
- end
21
-
22
- def fatal(msg)
23
- log Logger::Severity::FATAL, msg
24
- end
25
- end
26
- end
27
- end
28
- end
1
+ module Toiler
2
+ module Actor
3
+ module Utils
4
+ # Provides helper methods for logging
5
+ module ActorLogging
6
+ def error(msg)
7
+ log Logger::Severity::ERROR, msg
8
+ end
9
+
10
+ def info(msg)
11
+ log Logger::Severity::INFO, msg
12
+ end
13
+
14
+ def debug(msg)
15
+ log Logger::Severity::DEBUG, msg
16
+ end
17
+
18
+ def warn(msg)
19
+ log Logger::Severity::WARN, smsg
20
+ end
21
+
22
+ def fatal(msg)
23
+ log Logger::Severity::FATAL, msg
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
@@ -1,64 +1,64 @@
1
- module Toiler
2
- module Aws
3
- # SQS Message abstraction
4
- # Provides methods for querying and acting on a SQS message
5
- class Message
6
- attr_accessor :client, :queue_url, :data
7
-
8
- def initialize(client, queue_url, data)
9
- @client = client
10
- @queue_url = queue_url
11
- @data = data
12
- end
13
-
14
- def delete
15
- client.delete_message(
16
- queue_url: queue_url,
17
- receipt_handle: data.receipt_handle
18
- )
19
- end
20
-
21
- def change_visibility(options)
22
- client.change_message_visibility(
23
- options.merge(queue_url: queue_url, receipt_handle: receipt_handle)
24
- )
25
- end
26
-
27
- def visibility_timeout=(timeout)
28
- client.change_message_visibility(
29
- queue_url: queue_url,
30
- receipt_handle: data.receipt_handle,
31
- visibility_timeout: timeout
32
- )
33
- end
34
-
35
- def message_id
36
- data.message_id
37
- end
38
-
39
- def receipt_handle
40
- data.receipt_handle
41
- end
42
-
43
- def md5_of_body
44
- data.md5_of_body
45
- end
46
-
47
- def body
48
- data.body
49
- end
50
-
51
- def attributes
52
- data.attributes
53
- end
54
-
55
- def md5_of_message_attributes
56
- data.md5_of_message_attributes
57
- end
58
-
59
- def message_attributes
60
- data.message_attributes
61
- end
62
- end
63
- end
64
- end
1
+ module Toiler
2
+ module Aws
3
+ # SQS Message abstraction
4
+ # Provides methods for querying and acting on a SQS message
5
+ class Message
6
+ attr_accessor :client, :queue_url, :data
7
+
8
+ def initialize(client, queue_url, data)
9
+ @client = client
10
+ @queue_url = queue_url
11
+ @data = data
12
+ end
13
+
14
+ def delete
15
+ client.delete_message(
16
+ queue_url: queue_url,
17
+ receipt_handle: data.receipt_handle
18
+ )
19
+ end
20
+
21
+ def change_visibility(options)
22
+ client.change_message_visibility(
23
+ options.merge(queue_url: queue_url, receipt_handle: receipt_handle)
24
+ )
25
+ end
26
+
27
+ def visibility_timeout=(timeout)
28
+ client.change_message_visibility(
29
+ queue_url: queue_url,
30
+ receipt_handle: data.receipt_handle,
31
+ visibility_timeout: timeout
32
+ )
33
+ end
34
+
35
+ def message_id
36
+ data.message_id
37
+ end
38
+
39
+ def receipt_handle
40
+ data.receipt_handle
41
+ end
42
+
43
+ def md5_of_body
44
+ data.md5_of_body
45
+ end
46
+
47
+ def body
48
+ data.body
49
+ end
50
+
51
+ def attributes
52
+ data.attributes
53
+ end
54
+
55
+ def md5_of_message_attributes
56
+ data.md5_of_message_attributes
57
+ end
58
+
59
+ def message_attributes
60
+ data.message_attributes
61
+ end
62
+ end
63
+ end
64
+ end
@@ -1,61 +1,61 @@
1
- require 'toiler/aws/message'
2
-
3
- module Toiler
4
- module Aws
5
- # SQS Queue abstraction
6
- # Provides methods for querying and acting on a SQS queue
7
- class Queue
8
- attr_accessor :name, :client, :url
9
-
10
- def initialize(name, client = nil)
11
- @name = name
12
- @client = client || ::Aws::SQS::Client.new
13
- @url = client.get_queue_url(queue_name: name).queue_url
14
- end
15
-
16
- def visibility_timeout
17
- client.get_queue_attributes(
18
- queue_url: url,
19
- attribute_names: ['VisibilityTimeout']
20
- ).attributes['VisibilityTimeout'].to_i
21
- end
22
-
23
- def delete_messages(options)
24
- client.delete_message_batch options.merge queue_url: url
25
- end
26
-
27
- def send_message(options)
28
- client.send_message sanitize_message_body options.merge queue_url: url
29
- end
30
-
31
- def send_messages(options)
32
- client.send_message_batch(
33
- sanitize_message_body options.merge queue_url: url
34
- )
35
- end
36
-
37
- def receive_messages(options)
38
- client.receive_message(options.merge(queue_url: url))
39
- .messages
40
- .map { |m| Message.new(client, url, m) }
41
- end
42
-
43
- private
44
-
45
- def sanitize_message_body(options)
46
- messages = options[:entries] || [options]
47
-
48
- messages.each do |m|
49
- body = m[:message_body]
50
- if body.is_a?(Hash)
51
- m[:message_body] = JSON.dump(body)
52
- elsif !body.is_a? String
53
- fail ArgumentError, "Body must be a String, found #{body.class}"
54
- end
55
- end
56
-
57
- options
58
- end
59
- end
60
- end
61
- end
1
+ require 'toiler/aws/message'
2
+
3
+ module Toiler
4
+ module Aws
5
+ # SQS Queue abstraction
6
+ # Provides methods for querying and acting on a SQS queue
7
+ class Queue
8
+ attr_accessor :name, :client, :url
9
+
10
+ def initialize(name, client = nil)
11
+ @name = name
12
+ @client = client || ::Aws::SQS::Client.new
13
+ @url = client.get_queue_url(queue_name: name).queue_url
14
+ end
15
+
16
+ def visibility_timeout
17
+ client.get_queue_attributes(
18
+ queue_url: url,
19
+ attribute_names: ['VisibilityTimeout']
20
+ ).attributes['VisibilityTimeout'].to_i
21
+ end
22
+
23
+ def delete_messages(options)
24
+ client.delete_message_batch options.merge queue_url: url
25
+ end
26
+
27
+ def send_message(options)
28
+ client.send_message sanitize_message_body options.merge queue_url: url
29
+ end
30
+
31
+ def send_messages(options)
32
+ client.send_message_batch(
33
+ sanitize_message_body options.merge queue_url: url
34
+ )
35
+ end
36
+
37
+ def receive_messages(options)
38
+ client.receive_message(options.merge(queue_url: url))
39
+ .messages
40
+ .map { |m| Message.new(client, url, m) }
41
+ end
42
+
43
+ private
44
+
45
+ def sanitize_message_body(options)
46
+ messages = options[:entries] || [options]
47
+
48
+ messages.each do |m|
49
+ body = m[:message_body]
50
+ if body.is_a?(Hash)
51
+ m[:message_body] = JSON.dump(body)
52
+ elsif !body.is_a? String
53
+ fail ArgumentError, "Body must be a String, found #{body.class}"
54
+ end
55
+ end
56
+
57
+ options
58
+ end
59
+ end
60
+ end
61
+ end
data/lib/toiler/cli.rb CHANGED
@@ -1,164 +1,164 @@
1
- require 'singleton'
2
- require 'timeout'
3
- require 'optparse'
4
- require 'toiler'
5
-
6
- module Toiler
7
- # See: https://github.com/mperham/sidekiq/blob/33f5d6b2b6c0dfaab11e5d39688cab7ebadc83ae/lib/sidekiq/cli.rb#L20
8
- class Shutdown < Interrupt; end
9
-
10
- # Command line client interface
11
- class CLI
12
- include Singleton
13
-
14
- attr_accessor :supervisor
15
-
16
- def run(args)
17
- @self_read, @self_write = IO.pipe
18
-
19
- trap_signals
20
- options = Utils::ArgumentParser.parse(args)
21
- Utils::EnvironmentLoader.load(options)
22
- daemonize
23
- write_pid
24
- load_concurrent
25
- start_supervisor
26
-
27
- handle_stop
28
- end
29
-
30
- private
31
-
32
- def handle_stop
33
- while (readable_io = IO.select([@self_read]))
34
- handle_signal(readable_io.first[0].gets.strip)
35
- end
36
- rescue Interrupt
37
- Toiler.logger.info 'Received Interrupt, Waiting up to 60 seconds for actors to finish...'
38
- success = supervisor.ask(:terminate!).wait(60)
39
- if success
40
- Toiler.logger.info 'Supervisor successfully terminated'
41
- else
42
- Toiler.logger.info 'Timeout waiting dor Supervisor to terminate'
43
- end
44
- ensure
45
- exit 0
46
- end
47
-
48
- def shutdown_pools
49
- Concurrent.global_fast_executor.shutdown
50
- Concurrent.global_io_executor.shutdown
51
- return if Concurrent.global_io_executor.wait_for_termination(60)
52
- Concurrent.global_io_executor.kill
53
- end
54
-
55
- def start_supervisor
56
- require 'toiler/actor/supervisor'
57
- @supervisor = Actor::Supervisor.spawn! :supervisor
58
- end
59
-
60
- def trap_signals
61
- %w(INT TERM QUIT USR1 USR2 TTIN).each do |sig|
62
- begin
63
- trap sig do
64
- @self_write.puts(sig)
65
- end
66
- rescue ArgumentError
67
- puts "System does not support signal #{sig}"
68
- end
69
- end
70
- end
71
-
72
- def print_stacktraces
73
- return unless Toiler.logger
74
- Toiler.logger.info "-------------------"
75
- Toiler.logger.info "Received QUIT, dumping threads:"
76
- Thread.list.each do |t|
77
- id = t.object_id
78
- Toiler.logger.info "[thread:#{id}] #{t.backtrace.join("\n[thread:#{id}] ")}"
79
- end
80
- Toiler.logger.info '-------------------'
81
- end
82
-
83
- def print_status
84
- return unless Toiler.logger
85
- Toiler.logger.info "-------------------"
86
- Toiler.logger.info "Received QUIT, dumping status:"
87
- Toiler.queues.each do |queue|
88
- fetcher = Toiler.fetcher(queue).send(:core).send(:context)
89
- processor_pool = Toiler.processor_pool(queue).send(:core).send(:context)
90
- processors = processor_pool.instance_variable_get(:@workers).collect{|w| w.send(:core).send(:context)}
91
- busy_processors = processors.count{|pr| pr.executing?}
92
- message = "Status for [queue:#{queue}]:"
93
- message += "\n[fetcher:#{fetcher.name}] [executing:#{fetcher.executing?}] [polling:#{fetcher.polling?}] [scheduled:#{fetcher.scheduled?}] [free_processors:#{fetcher.get_free_processors}]"
94
- message += "\n[processor_pool:#{processor_pool.name}] [workers:#{processors.count}] [busy:#{busy_processors}]"
95
- processors.each do |processor|
96
- thread = processor.thread
97
- thread_id = thread.nil? ? "nil" : thread.object_id
98
- message += "\n[processor:#{processor.name}] [executing:#{processor.executing?}] [thread:#{thread_id}]"
99
- message += " Stack:\n" + thread.backtrace.join("\n\t") unless thread.nil?
100
- end
101
- Toiler.logger.info message
102
- end
103
- Toiler.logger.info '-------------------'
104
- end
105
-
106
- def handle_signal(signal)
107
- case signal
108
- when 'QUIT'
109
- print_stacktraces
110
- print_status
111
- when 'INT', 'TERM'
112
- fail Interrupt
113
- end
114
- end
115
-
116
- def load_concurrent
117
- require 'concurrent-edge'
118
- Concurrent.global_logger = lambda do |level, progname, msg = nil, &block|
119
- Toiler.logger.log(level, msg, progname, &block)
120
- end if Toiler.logger
121
- end
122
-
123
- def daemonize
124
- return unless Toiler.options[:daemon]
125
- fail 'Logfile required when daemonizing' unless Toiler.options[:logfile]
126
-
127
- files_to_reopen = []
128
- ObjectSpace.each_object(File) do |file|
129
- files_to_reopen << file unless file.closed?
130
- end
131
-
132
- Process.daemon(true, true)
133
-
134
- reopen_files(files_to_reopen)
135
- reopen_std
136
- end
137
-
138
- def reopen_files(files_to_reopen)
139
- files_to_reopen.each do |file|
140
- begin
141
- file.reopen file.path, 'a+'
142
- file.sync = true
143
- rescue StandardError
144
- puts "Failed to reopen file #{file}"
145
- end
146
- end
147
- end
148
-
149
- def reopen_std
150
- [$stdout, $stderr].each do |io|
151
- File.open(Toiler.options[:logfile], 'ab') do |f|
152
- io.reopen(f)
153
- end
154
- io.sync = true
155
- end
156
- $stdin.reopen('/dev/null')
157
- end
158
-
159
- def write_pid
160
- file = Toiler.options[:pidfile]
161
- File.write file, Process.pid if file
162
- end
163
- end
164
- end
1
+ require 'singleton'
2
+ require 'timeout'
3
+ require 'optparse'
4
+ require 'toiler'
5
+
6
+ module Toiler
7
+ # See: https://github.com/mperham/sidekiq/blob/33f5d6b2b6c0dfaab11e5d39688cab7ebadc83ae/lib/sidekiq/cli.rb#L20
8
+ class Shutdown < Interrupt; end
9
+
10
+ # Command line client interface
11
+ class CLI
12
+ include Singleton
13
+
14
+ attr_accessor :supervisor
15
+
16
+ def run(args)
17
+ @self_read, @self_write = IO.pipe
18
+
19
+ trap_signals
20
+ options = Utils::ArgumentParser.parse(args)
21
+ Utils::EnvironmentLoader.load(options)
22
+ daemonize
23
+ write_pid
24
+ load_concurrent
25
+ start_supervisor
26
+
27
+ handle_stop
28
+ end
29
+
30
+ private
31
+
32
+ def handle_stop
33
+ while (readable_io = IO.select([@self_read]))
34
+ handle_signal(readable_io.first[0].gets.strip)
35
+ end
36
+ rescue Interrupt
37
+ Toiler.logger.info 'Received Interrupt, Waiting up to 60 seconds for actors to finish...'
38
+ success = supervisor.ask(:terminate!).wait(60)
39
+ if success
40
+ Toiler.logger.info 'Supervisor successfully terminated'
41
+ else
42
+ Toiler.logger.info 'Timeout waiting dor Supervisor to terminate'
43
+ end
44
+ ensure
45
+ exit 0
46
+ end
47
+
48
+ def shutdown_pools
49
+ Concurrent.global_fast_executor.shutdown
50
+ Concurrent.global_io_executor.shutdown
51
+ return if Concurrent.global_io_executor.wait_for_termination(60)
52
+ Concurrent.global_io_executor.kill
53
+ end
54
+
55
+ def start_supervisor
56
+ require 'toiler/actor/supervisor'
57
+ @supervisor = Actor::Supervisor.spawn! :supervisor
58
+ end
59
+
60
+ def trap_signals
61
+ %w(INT TERM QUIT USR1 USR2 TTIN).each do |sig|
62
+ begin
63
+ trap sig do
64
+ @self_write.puts(sig)
65
+ end
66
+ rescue ArgumentError
67
+ puts "System does not support signal #{sig}"
68
+ end
69
+ end
70
+ end
71
+
72
+ def print_stacktraces
73
+ return unless Toiler.logger
74
+ Toiler.logger.info "-------------------"
75
+ Toiler.logger.info "Received QUIT, dumping threads:"
76
+ Thread.list.each do |t|
77
+ id = t.object_id
78
+ Toiler.logger.info "[thread:#{id}] #{t.backtrace.join("\n[thread:#{id}] ")}"
79
+ end
80
+ Toiler.logger.info '-------------------'
81
+ end
82
+
83
+ def print_status
84
+ return unless Toiler.logger
85
+ Toiler.logger.info "-------------------"
86
+ Toiler.logger.info "Received QUIT, dumping status:"
87
+ Toiler.queues.each do |queue|
88
+ fetcher = Toiler.fetcher(queue).send(:core).send(:context)
89
+ processor_pool = Toiler.processor_pool(queue).send(:core).send(:context)
90
+ processors = processor_pool.instance_variable_get(:@workers).collect{|w| w.send(:core).send(:context)}
91
+ busy_processors = processors.count{|pr| pr.executing?}
92
+ message = "Status for [queue:#{queue}]:"
93
+ message += "\n[fetcher:#{fetcher.name}] [executing:#{fetcher.executing?}] [polling:#{fetcher.polling?}] [scheduled:#{fetcher.scheduled?}] [free_processors:#{fetcher.get_free_processors}]"
94
+ message += "\n[processor_pool:#{processor_pool.name}] [workers:#{processors.count}] [busy:#{busy_processors}]"
95
+ processors.each do |processor|
96
+ thread = processor.thread
97
+ thread_id = thread.nil? ? "nil" : thread.object_id
98
+ message += "\n[processor:#{processor.name}] [executing:#{processor.executing?}] [thread:#{thread_id}]"
99
+ message += " Stack:\n" + thread.backtrace.join("\n\t") unless thread.nil?
100
+ end
101
+ Toiler.logger.info message
102
+ end
103
+ Toiler.logger.info '-------------------'
104
+ end
105
+
106
+ def handle_signal(signal)
107
+ case signal
108
+ when 'QUIT'
109
+ print_stacktraces
110
+ print_status
111
+ when 'INT', 'TERM'
112
+ fail Interrupt
113
+ end
114
+ end
115
+
116
+ def load_concurrent
117
+ require 'concurrent-edge'
118
+ Concurrent.global_logger = lambda do |level, progname, msg = nil, &block|
119
+ Toiler.logger.log(level, msg, progname, &block)
120
+ end if Toiler.logger
121
+ end
122
+
123
+ def daemonize
124
+ return unless Toiler.options[:daemon]
125
+ fail 'Logfile required when daemonizing' unless Toiler.options[:logfile]
126
+
127
+ files_to_reopen = []
128
+ ObjectSpace.each_object(File) do |file|
129
+ files_to_reopen << file unless file.closed?
130
+ end
131
+
132
+ Process.daemon(true, true)
133
+
134
+ reopen_files(files_to_reopen)
135
+ reopen_std
136
+ end
137
+
138
+ def reopen_files(files_to_reopen)
139
+ files_to_reopen.each do |file|
140
+ begin
141
+ file.reopen file.path, 'a+'
142
+ file.sync = true
143
+ rescue StandardError
144
+ puts "Failed to reopen file #{file}"
145
+ end
146
+ end
147
+ end
148
+
149
+ def reopen_std
150
+ [$stdout, $stderr].each do |io|
151
+ File.open(Toiler.options[:logfile], 'ab') do |f|
152
+ io.reopen(f)
153
+ end
154
+ io.sync = true
155
+ end
156
+ $stdin.reopen('/dev/null')
157
+ end
158
+
159
+ def write_pid
160
+ file = Toiler.options[:pidfile]
161
+ File.write file, Process.pid if file
162
+ end
163
+ end
164
+ end