asynk 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +18 -0
- data/Gemfile +3 -0
- data/Gemfile.lock +172 -0
- data/README.md +168 -0
- data/asynk.gemspec +27 -0
- data/bin/asynk +12 -0
- data/bin/asynkctl +93 -0
- data/consumer.rb +52 -0
- data/lib/asynk/benchmark.rb +21 -0
- data/lib/asynk/broker.rb +33 -0
- data/lib/asynk/cli.rb +146 -0
- data/lib/asynk/config.rb +33 -0
- data/lib/asynk/consumer.rb +98 -0
- data/lib/asynk/logging.rb +97 -0
- data/lib/asynk/message.rb +30 -0
- data/lib/asynk/publisher.rb +28 -0
- data/lib/asynk/response.rb +57 -0
- data/lib/asynk/server.rb +62 -0
- data/lib/asynk/sync_publisher.rb +36 -0
- data/lib/asynk/test_helper.rb +52 -0
- data/lib/asynk/version.rb +3 -0
- data/lib/asynk/worker.rb +56 -0
- data/lib/asynk.rb +65 -0
- data/myapp/.gitignore +17 -0
- data/myapp/Gemfile +16 -0
- data/myapp/Gemfile.lock +175 -0
- data/myapp/README.rdoc +28 -0
- data/myapp/Rakefile +6 -0
- data/myapp/app/assets/images/.keep +0 -0
- data/myapp/app/assets/javascripts/application.js +16 -0
- data/myapp/app/assets/stylesheets/application.css +15 -0
- data/myapp/app/consumers/wallet_events_consumer.rb +22 -0
- data/myapp/app/controllers/application_controller.rb +5 -0
- data/myapp/app/controllers/concerns/.keep +0 -0
- data/myapp/app/helpers/application_helper.rb +2 -0
- data/myapp/app/mailers/.keep +0 -0
- data/myapp/app/models/.keep +0 -0
- data/myapp/app/models/concerns/.keep +0 -0
- data/myapp/app/models/user.rb +2 -0
- data/myapp/app/views/layouts/application.html.erb +14 -0
- data/myapp/bin/bundle +3 -0
- data/myapp/bin/rails +8 -0
- data/myapp/bin/rake +8 -0
- data/myapp/bin/setup +29 -0
- data/myapp/bin/spring +15 -0
- data/myapp/config/application.rb +26 -0
- data/myapp/config/boot.rb +3 -0
- data/myapp/config/database.yml +32 -0
- data/myapp/config/environment.rb +5 -0
- data/myapp/config/environments/development.rb +41 -0
- data/myapp/config/environments/production.rb +79 -0
- data/myapp/config/environments/test.rb +42 -0
- data/myapp/config/initializers/assets.rb +11 -0
- data/myapp/config/initializers/backtrace_silencers.rb +7 -0
- data/myapp/config/initializers/cookies_serializer.rb +3 -0
- data/myapp/config/initializers/filter_parameter_logging.rb +4 -0
- data/myapp/config/initializers/inflections.rb +16 -0
- data/myapp/config/initializers/mime_types.rb +4 -0
- data/myapp/config/initializers/session_store.rb +3 -0
- data/myapp/config/initializers/wrap_parameters.rb +14 -0
- data/myapp/config/locales/en.yml +23 -0
- data/myapp/config/routes.rb +56 -0
- data/myapp/config/secrets.yml +22 -0
- data/myapp/config.ru +4 -0
- data/myapp/db/migrate/20150826104429_create_users.rb +10 -0
- data/myapp/db/schema.rb +26 -0
- data/myapp/db/seeds.rb +7 -0
- data/myapp/lib/assets/.keep +0 -0
- data/myapp/lib/tasks/.keep +0 -0
- data/myapp/log/.keep +0 -0
- data/myapp/public/404.html +67 -0
- data/myapp/public/422.html +67 -0
- data/myapp/public/500.html +66 -0
- data/myapp/public/favicon.ico +0 -0
- data/myapp/public/robots.txt +5 -0
- data/myapp/test/controllers/.keep +0 -0
- data/myapp/test/fixtures/.keep +0 -0
- data/myapp/test/fixtures/users.yml +9 -0
- data/myapp/test/helpers/.keep +0 -0
- data/myapp/test/integration/.keep +0 -0
- data/myapp/test/mailers/.keep +0 -0
- data/myapp/test/models/.keep +0 -0
- data/myapp/test/models/user_test.rb +7 -0
- data/myapp/test/test_helper.rb +10 -0
- data/myapp/vendor/assets/javascripts/.keep +0 -0
- data/myapp/vendor/assets/stylesheets/.keep +0 -0
- data/publisher.rb +15 -0
- data/test/consumer_example.rb +31 -0
- data/test/consumer_testing_example_test.rb +21 -0
- data/test/test_helper.rb +7 -0
- metadata +271 -0
data/lib/asynk/cli.rb
ADDED
@@ -0,0 +1,146 @@
|
|
1
|
+
$stdout.sync = true
|
2
|
+
|
3
|
+
require 'optparse'
|
4
|
+
require 'fileutils'
|
5
|
+
require 'asynk'
|
6
|
+
module Asynk
|
7
|
+
class CLI
|
8
|
+
include Singleton
|
9
|
+
attr_accessor :environment
|
10
|
+
|
11
|
+
def run(args=ARGV)
|
12
|
+
Asynk.booted_inside = true
|
13
|
+
setup_options(args)
|
14
|
+
initialize_logger
|
15
|
+
daemonize
|
16
|
+
write_pid
|
17
|
+
load_celluloid
|
18
|
+
boot_system
|
19
|
+
Asynk.server.run
|
20
|
+
end
|
21
|
+
|
22
|
+
private
|
23
|
+
|
24
|
+
def load_celluloid
|
25
|
+
raise "Celluloid cannot be required until here, or it will break Sidekiq's daemonization" if defined?(::Celluloid) && options[:daemon]
|
26
|
+
|
27
|
+
# Celluloid can't be loaded until after we've daemonized
|
28
|
+
# because it spins up threads and creates locks which get
|
29
|
+
# into a very bad state if forked.
|
30
|
+
require 'celluloid'
|
31
|
+
require 'celluloid/io'
|
32
|
+
Celluloid.logger = (options[:verbose] ? Asynk.logger : nil)
|
33
|
+
end
|
34
|
+
|
35
|
+
def daemonize
|
36
|
+
return unless options[:daemon]
|
37
|
+
|
38
|
+
raise ArgumentError, "You really should set a logfile if you're going to daemonize" unless options[:logfile]
|
39
|
+
files_to_reopen = []
|
40
|
+
ObjectSpace.each_object(File) do |file|
|
41
|
+
files_to_reopen << file unless file.closed?
|
42
|
+
end
|
43
|
+
|
44
|
+
::Process.daemon(true, true)
|
45
|
+
|
46
|
+
files_to_reopen.each do |file|
|
47
|
+
begin
|
48
|
+
file.reopen file.path, "a+"
|
49
|
+
file.sync = true
|
50
|
+
rescue ::Exception
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
[$stdout, $stderr].each do |io|
|
55
|
+
File.open(options[:logfile], 'ab') do |f|
|
56
|
+
io.reopen(f)
|
57
|
+
end
|
58
|
+
io.sync = true
|
59
|
+
end
|
60
|
+
$stdin.reopen('/dev/null')
|
61
|
+
|
62
|
+
initialize_logger
|
63
|
+
end
|
64
|
+
|
65
|
+
def write_pid
|
66
|
+
if path = options[:pidfile]
|
67
|
+
pidfile = File.expand_path(path)
|
68
|
+
File.open(pidfile, 'w') do |f|
|
69
|
+
f.puts ::Process.pid
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
def options
|
75
|
+
Asynk.options
|
76
|
+
end
|
77
|
+
|
78
|
+
def initialize_logger
|
79
|
+
Asynk::Logging.initialize_logger(options[:logfile]) if options[:logfile]
|
80
|
+
Asynk.logger.level = ::Logger::DEBUG if options[:verbose]
|
81
|
+
end
|
82
|
+
|
83
|
+
def setup_options(args)
|
84
|
+
opts = parse_options(args)
|
85
|
+
set_environment opts[:environment]
|
86
|
+
options.merge!(opts)
|
87
|
+
end
|
88
|
+
|
89
|
+
def boot_system
|
90
|
+
Asynk.logger.info 'Booting Asynk App'
|
91
|
+
ENV['RACK_ENV'] = ENV['RAILS_ENV'] = environment
|
92
|
+
|
93
|
+
raise ArgumentError, "#{options[:require]} does not exist" unless File.exist?(options[:require])
|
94
|
+
|
95
|
+
if File.directory?(options[:require])
|
96
|
+
require File.expand_path("#{options[:require]}/config/application.rb")
|
97
|
+
require File.expand_path("#{options[:require]}/config/environment.rb")
|
98
|
+
::Rails.application.eager_load!
|
99
|
+
else
|
100
|
+
require options[:require]
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
def set_environment(cli_env)
|
105
|
+
@environment = cli_env || ENV['RAILS_ENV'] || ENV['RACK_ENV'] || 'development'
|
106
|
+
end
|
107
|
+
|
108
|
+
def parse_options(argv)
|
109
|
+
opts = {}
|
110
|
+
|
111
|
+
@parser = OptionParser.new do |o|
|
112
|
+
o.on '-d', '--daemon', "Daemonize process" do |arg|
|
113
|
+
opts[:daemon] = arg
|
114
|
+
end
|
115
|
+
|
116
|
+
o.on '-e', '--environment ENV', "Application environment" do |arg|
|
117
|
+
opts[:environment] = arg
|
118
|
+
end
|
119
|
+
|
120
|
+
o.on '-r', '--require [PATH|DIR]', "Location of Rails application with workers or file to require" do |arg|
|
121
|
+
opts[:require] = arg
|
122
|
+
end
|
123
|
+
|
124
|
+
o.on '-L', '--logfile PATH', "path to writable logfile" do |arg|
|
125
|
+
opts[:logfile] = arg
|
126
|
+
end
|
127
|
+
|
128
|
+
o.on '-P', '--pidfile PATH', "path to pidfile" do |arg|
|
129
|
+
opts[:pidfile] = arg
|
130
|
+
end
|
131
|
+
|
132
|
+
o.on '-V', '--version', "Print version and exit" do |arg|
|
133
|
+
puts "Asynk #{Asynk::VERSION}"
|
134
|
+
exit(0)
|
135
|
+
end
|
136
|
+
|
137
|
+
o.on "-v", "--verbose", "Print more verbose output" do |arg|
|
138
|
+
opts[:verbose] = arg
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
142
|
+
@parser.parse!(argv)
|
143
|
+
opts
|
144
|
+
end
|
145
|
+
end
|
146
|
+
end
|
data/lib/asynk/config.rb
ADDED
@@ -0,0 +1,33 @@
|
|
1
|
+
module Asynk
|
2
|
+
class Config
|
3
|
+
include Singleton
|
4
|
+
def initialize
|
5
|
+
@params = {
|
6
|
+
mq_exchange: 'asynk_exchange_topic',
|
7
|
+
sync_publish_wait_timeout: 10,
|
8
|
+
default_consumer_concurrency: 1,
|
9
|
+
default_sync: false,
|
10
|
+
daemonize: false,
|
11
|
+
logfile: 'log/asynk.log',
|
12
|
+
pidifle: 'tmp/pids/asynk.pid',
|
13
|
+
mq_host: 'localhost',
|
14
|
+
mq_port: 5672,
|
15
|
+
mq_vhost: '/',
|
16
|
+
mq_username: 'guest',
|
17
|
+
mq_password: 'guest',
|
18
|
+
publisher_execution_time: true,
|
19
|
+
respond_back_execution_time: true,
|
20
|
+
ignored_consumers: []
|
21
|
+
}
|
22
|
+
end
|
23
|
+
|
24
|
+
def [](key)
|
25
|
+
@params[key]
|
26
|
+
end
|
27
|
+
|
28
|
+
def []=(key, value)
|
29
|
+
@params[key] = value
|
30
|
+
end
|
31
|
+
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,98 @@
|
|
1
|
+
module Asynk
|
2
|
+
module Consumer
|
3
|
+
|
4
|
+
def self.included(base)
|
5
|
+
base.extend(ClassMethods)
|
6
|
+
base.include ActiveSupport::Rescuable
|
7
|
+
Asynk.register_consumer(base)
|
8
|
+
end
|
9
|
+
|
10
|
+
def initialize(channel, delivery_info, &block)
|
11
|
+
@channel = channel
|
12
|
+
@delivery_info = delivery_info
|
13
|
+
@callback_block = block
|
14
|
+
end
|
15
|
+
|
16
|
+
def ack!
|
17
|
+
@channel.ack(@delivery_info.delivery_tag)
|
18
|
+
end
|
19
|
+
|
20
|
+
def reject!
|
21
|
+
@channel.reject(@delivery_info.delivery_tag)
|
22
|
+
end
|
23
|
+
|
24
|
+
def requeue!
|
25
|
+
@channel.reject(@delivery_info.delivery_tag, true)
|
26
|
+
end
|
27
|
+
|
28
|
+
def logger
|
29
|
+
Asynk.logger
|
30
|
+
end
|
31
|
+
|
32
|
+
def respond(result)
|
33
|
+
@callback_block.call(result)
|
34
|
+
end
|
35
|
+
|
36
|
+
def invoke_processing(message)
|
37
|
+
method_for_exec = (self.class.route_ending_as_action? && message.routing_key) ?
|
38
|
+
self.class.action_name_from_routing_key(message.routing_key) : :process
|
39
|
+
|
40
|
+
begin
|
41
|
+
public_send(method_for_exec, message)
|
42
|
+
rescue Exception => ex
|
43
|
+
raise(ex) unless rescue_with_handler(ex)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
module ClassMethods
|
48
|
+
attr_reader :routing_keys, :subscribe_arguments, :queue_options
|
49
|
+
|
50
|
+
def set_consume(*routing_keys)
|
51
|
+
@routing_keys = routing_keys
|
52
|
+
end
|
53
|
+
|
54
|
+
def set_route_ending_as_action(value)
|
55
|
+
@route_ending_as_action = value
|
56
|
+
end
|
57
|
+
|
58
|
+
def route_ending_as_action?
|
59
|
+
@route_ending_as_action || false
|
60
|
+
end
|
61
|
+
|
62
|
+
def set_queue_name(options = {})
|
63
|
+
@queue_name = name
|
64
|
+
end
|
65
|
+
|
66
|
+
def set_queue_options(options = {})
|
67
|
+
@queue_options = options
|
68
|
+
end
|
69
|
+
|
70
|
+
def queue_name
|
71
|
+
return @queue_name unless @queue_name.nil?
|
72
|
+
app_name = Rails.application.class.parent_name.dup.underscore if defined?(Rails)
|
73
|
+
queue_name = ActiveSupport::Inflector.underscore(self.name.gsub(/::/, '.'))
|
74
|
+
queue_name = [app_name, queue_name].join('.') if app_name
|
75
|
+
queue_name
|
76
|
+
end
|
77
|
+
|
78
|
+
def set_subscribe_arguments(arguments = {})
|
79
|
+
@subscribe_arguments = arguments
|
80
|
+
end
|
81
|
+
|
82
|
+
def set_concurrency(size)
|
83
|
+
@concurrency = size
|
84
|
+
end
|
85
|
+
|
86
|
+
def concurrency
|
87
|
+
@concurrency || Asynk.config[:default_consumer_concurrency]
|
88
|
+
end
|
89
|
+
|
90
|
+
def action_name_from_routing_key(routing_key)
|
91
|
+
splitted = routing_key.split('.')
|
92
|
+
raise 'There now action in routing_key' if splitted.empty? || splitted.count < 2
|
93
|
+
splitted.last.to_sym
|
94
|
+
end
|
95
|
+
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
@@ -0,0 +1,97 @@
|
|
1
|
+
require 'time'
|
2
|
+
require 'logger'
|
3
|
+
|
4
|
+
module Asynk
|
5
|
+
class Logging
|
6
|
+
|
7
|
+
class Pretty < Logger::Formatter
|
8
|
+
SPACE = " "
|
9
|
+
|
10
|
+
# Provide a call() method that returns the formatted message.
|
11
|
+
def call(severity, time, program_name, message)
|
12
|
+
"#{time.utc.iso8601(3)} #{severity}: #{message}\n"
|
13
|
+
end
|
14
|
+
|
15
|
+
def context
|
16
|
+
c = Thread.current[:sidekiq_context]
|
17
|
+
" #{c.join(SPACE)}" if c && c.any?
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
class WithoutTimestamp < Pretty
|
22
|
+
def call(severity, time, program_name, message)
|
23
|
+
#{severity}: #{message}\n"
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.initialize_logger(log_target = STDOUT)
|
28
|
+
oldlogger = defined?(@logger) ? @logger : nil
|
29
|
+
@logger = Logger.new(log_target)
|
30
|
+
@logger.level = Logger::INFO
|
31
|
+
@logger.formatter = ENV['DYNO'] ? WithoutTimestamp.new : Pretty.new
|
32
|
+
oldlogger.close if oldlogger && !$TESTING # don't want to close testing's STDOUT logging
|
33
|
+
@logger
|
34
|
+
end
|
35
|
+
|
36
|
+
def self.logger
|
37
|
+
defined?(@logger) ? @logger : initialize_logger
|
38
|
+
end
|
39
|
+
|
40
|
+
def self.logger=(log)
|
41
|
+
@logger = (log ? log : Logger.new('/dev/null'))
|
42
|
+
end
|
43
|
+
|
44
|
+
# This reopens ALL logfiles in the process that have been rotated
|
45
|
+
# using logrotate(8) (without copytruncate) or similar tools.
|
46
|
+
# A +File+ object is considered for reopening if it is:
|
47
|
+
# 1) opened with the O_APPEND and O_WRONLY flags
|
48
|
+
# 2) the current open file handle does not match its original open path
|
49
|
+
# 3) unbuffered (as far as userspace buffering goes, not O_SYNC)
|
50
|
+
# Returns the number of files reopened
|
51
|
+
def self.reopen_logs
|
52
|
+
to_reopen = []
|
53
|
+
append_flags = File::WRONLY | File::APPEND
|
54
|
+
|
55
|
+
ObjectSpace.each_object(File) do |fp|
|
56
|
+
begin
|
57
|
+
if !fp.closed? && fp.stat.file? && fp.sync && (fp.fcntl(Fcntl::F_GETFL) & append_flags) == append_flags
|
58
|
+
to_reopen << fp
|
59
|
+
end
|
60
|
+
rescue IOError, Errno::EBADF
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
nr = 0
|
65
|
+
to_reopen.each do |fp|
|
66
|
+
orig_st = begin
|
67
|
+
fp.stat
|
68
|
+
rescue IOError, Errno::EBADF
|
69
|
+
next
|
70
|
+
end
|
71
|
+
|
72
|
+
begin
|
73
|
+
b = File.stat(fp.path)
|
74
|
+
next if orig_st.ino == b.ino && orig_st.dev == b.dev
|
75
|
+
rescue Errno::ENOENT
|
76
|
+
end
|
77
|
+
|
78
|
+
begin
|
79
|
+
File.open(fp.path, 'a') { |tmpfp| fp.reopen(tmpfp) }
|
80
|
+
fp.sync = true
|
81
|
+
nr += 1
|
82
|
+
rescue IOError, Errno::EBADF
|
83
|
+
# not much we can do...
|
84
|
+
end
|
85
|
+
end
|
86
|
+
nr
|
87
|
+
rescue RuntimeError => ex
|
88
|
+
# RuntimeError: ObjectSpace is disabled; each_object will only work with Class, pass -X+O to enable
|
89
|
+
puts "Unable to reopen logs: #{ex.message}"
|
90
|
+
end
|
91
|
+
|
92
|
+
def logger
|
93
|
+
Sidekiq::Logging.logger
|
94
|
+
end
|
95
|
+
|
96
|
+
end
|
97
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
require 'active_support/core_ext/hash/indifferent_access'
|
2
|
+
require 'forwardable'
|
3
|
+
|
4
|
+
module Asynk
|
5
|
+
class Message
|
6
|
+
extend Forwardable
|
7
|
+
|
8
|
+
attr_reader :delivery_info, :properties, :payload, :body
|
9
|
+
|
10
|
+
def initialize(delivery_info, properties, payload)
|
11
|
+
@delivery_info = delivery_info
|
12
|
+
@properties = properties
|
13
|
+
@payload = payload
|
14
|
+
@body = JSON.parse(payload).with_indifferent_access
|
15
|
+
end
|
16
|
+
|
17
|
+
def_delegator :@body, :[]
|
18
|
+
def_delegators :@properties, :message_id, :timestamp
|
19
|
+
def_delegators :@delivery_info, :routing_key, :exchange
|
20
|
+
|
21
|
+
|
22
|
+
def to_s
|
23
|
+
attrs = { :@body => body.to_s, message_id: message_id,
|
24
|
+
timestamp: timestamp, routing_key: routing_key }
|
25
|
+
"#<Message #{attrs.map { |k,v| "#{k}=#{v.inspect}" }.join(', ')}>"
|
26
|
+
end
|
27
|
+
|
28
|
+
alias_method :inspect, :to_s
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
module Asynk
|
2
|
+
class Publisher
|
3
|
+
class PublisherError < RuntimeError; end
|
4
|
+
|
5
|
+
class << self
|
6
|
+
def publish(routing_key, params = {})
|
7
|
+
global_start_time = Asynk::Benchmark.start if Asynk.config[:publisher_execution_time]
|
8
|
+
message_id = params.delete(:message_id) || generate_message_id
|
9
|
+
|
10
|
+
Asynk.broker.pool.with do |channel, exchange, reply_queue|
|
11
|
+
exchange.publish(params.to_json, message_id: message_id, routing_key: routing_key)
|
12
|
+
end
|
13
|
+
|
14
|
+
if Asynk.config[:publisher_execution_time]
|
15
|
+
Asynk.logger.info "Sending async message #{routing_key}:#{message_id} with params: #{params}. Completed In: #{Asynk::Benchmark.end(global_start_time)} ms."
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
def sync_publish(routing_key, params = {})
|
20
|
+
Asynk::SyncPublisher.new(routing_key, params).send
|
21
|
+
end
|
22
|
+
|
23
|
+
def generate_message_id(legnth = 8)
|
24
|
+
SecureRandom.hex(legnth)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
require 'json'
|
2
|
+
module Asynk
|
3
|
+
class Response
|
4
|
+
attr_reader :status, :body, :error_message
|
5
|
+
|
6
|
+
def initialize(status: , body: nil, error_message: nil)
|
7
|
+
@status, @body, @error_message = status, body, error_message
|
8
|
+
end
|
9
|
+
|
10
|
+
def success?; @status.to_s == 'ok'; end
|
11
|
+
def fail?; !success?; end
|
12
|
+
|
13
|
+
def to_h(options = {})
|
14
|
+
{ status: @status, body: @body, error_message: @error_message }
|
15
|
+
end
|
16
|
+
|
17
|
+
def status
|
18
|
+
ActiveSupport::StringInquirer.new(@status)
|
19
|
+
end
|
20
|
+
|
21
|
+
def errors; @error_message; end
|
22
|
+
|
23
|
+
def [](key)
|
24
|
+
@body[key]
|
25
|
+
end
|
26
|
+
|
27
|
+
def to_s
|
28
|
+
to_h
|
29
|
+
end
|
30
|
+
|
31
|
+
def to_json
|
32
|
+
to_h.to_json
|
33
|
+
end
|
34
|
+
|
35
|
+
alias_method :to_s, :to_h
|
36
|
+
alias_method :as_json, :to_h
|
37
|
+
alias_method :inspect, :to_s
|
38
|
+
|
39
|
+
def self.try_to_create_from_hash(payload)
|
40
|
+
return nil if payload.nil?
|
41
|
+
parsed_payload = try_parse_json(payload)
|
42
|
+
return payload unless parsed_payload
|
43
|
+
return payload unless parsed_payload.kind_of?(Hash)
|
44
|
+
hiwa = parsed_payload.with_indifferent_access
|
45
|
+
return payload unless (hiwa.has_key?(:status) && hiwa.has_key?(:body) && hiwa.has_key?(:error_message))
|
46
|
+
new(status: hiwa[:status], body: hiwa[:body], error_message: hiwa[:error_message])
|
47
|
+
end
|
48
|
+
|
49
|
+
def self.try_parse_json(str)
|
50
|
+
begin
|
51
|
+
JSON.parse(str)
|
52
|
+
rescue JSON::ParserError => e
|
53
|
+
return false
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
data/lib/asynk/server.rb
ADDED
@@ -0,0 +1,62 @@
|
|
1
|
+
module Asynk
|
2
|
+
class Server
|
3
|
+
include Singleton
|
4
|
+
|
5
|
+
def initialize
|
6
|
+
end
|
7
|
+
|
8
|
+
def run
|
9
|
+
require 'asynk/worker'
|
10
|
+
prepare_consumers
|
11
|
+
register_signal_handlers
|
12
|
+
Asynk.logger.info "All consumers are prepared"
|
13
|
+
handle_signals
|
14
|
+
# handle_signals
|
15
|
+
end
|
16
|
+
|
17
|
+
def shutdown
|
18
|
+
futures = workers.map { |w| w.future.shutdown }
|
19
|
+
futures.map(&:value)
|
20
|
+
Asynk.broker.amqp_connection.close
|
21
|
+
Asynk.logger.info "Server shutdown!"
|
22
|
+
end
|
23
|
+
|
24
|
+
private
|
25
|
+
def handle_signals
|
26
|
+
loop do
|
27
|
+
signal = Thread.main[:signal_queue].shift
|
28
|
+
if signal
|
29
|
+
Asynk.logger.info "Caught sig#{signal.downcase}, stopping asynk server..."
|
30
|
+
shutdown
|
31
|
+
break
|
32
|
+
end
|
33
|
+
sleep(0.1)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def workers
|
38
|
+
@workers ||= []
|
39
|
+
end
|
40
|
+
|
41
|
+
def register_signal_handlers
|
42
|
+
Thread.main[:signal_queue] = []
|
43
|
+
%w(QUIT TERM INT).keep_if { |s| Signal.list.keys.include? s }.map(&:to_sym).each do |sig|
|
44
|
+
# This needs to be reentrant, so we queue up signals to be handled
|
45
|
+
# in the run loop, rather than acting on signals here
|
46
|
+
trap(sig) do
|
47
|
+
Thread.main[:signal_queue] << sig
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def prepare_consumers
|
53
|
+
Asynk.consumers.each{ |consumer| prepare_consumer(consumer) }
|
54
|
+
end
|
55
|
+
|
56
|
+
def prepare_consumer(consumer)
|
57
|
+
consumer.concurrency.times do |index|
|
58
|
+
workers << Asynk::Worker.new(Asynk.broker.amqp_connection, consumer, index)
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
module Asynk
|
2
|
+
class SyncPublisher
|
3
|
+
def initialize(routing_key, params)
|
4
|
+
@routing_key = routing_key
|
5
|
+
@params = params
|
6
|
+
@message_id = (@params.delete(:message_id) || generate_message_id)
|
7
|
+
@wait_timeout = (@params.delete(:timeout) || Asynk.config[:sync_publish_wait_timeout]) * 1000
|
8
|
+
@correlation_id = generate_message_id
|
9
|
+
end
|
10
|
+
|
11
|
+
def send
|
12
|
+
global_start_time = Asynk::Benchmark.start if Asynk.config[:publisher_execution_time]
|
13
|
+
Asynk.broker.pool.with do |channel, exchange, reply_queue|
|
14
|
+
exchange.publish(@params.to_json, message_id: @message_id, routing_key: @routing_key, correlation_id: @correlation_id, reply_to: reply_queue.name)
|
15
|
+
|
16
|
+
start_time = Asynk::Benchmark.start
|
17
|
+
while !@response do
|
18
|
+
delivery_info, properties, payload = reply_queue.pop
|
19
|
+
@response = payload if payload && properties[:correlation_id] == @correlation_id
|
20
|
+
raise(RuntimeError.new('Timeout error reached')) if @wait_timeout <= Asynk::Benchmark.end(start_time)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
message = Asynk::Response.try_to_create_from_hash(@response)
|
25
|
+
if Asynk.config[:publisher_execution_time]
|
26
|
+
Asynk.logger.info "Sending sync message to #{@routing_key}:#{@message_id}. Completed In: #{Asynk::Benchmark.end(global_start_time)} ms."
|
27
|
+
end
|
28
|
+
|
29
|
+
message
|
30
|
+
end
|
31
|
+
|
32
|
+
def generate_message_id(legnth = 8)
|
33
|
+
SecureRandom.hex(legnth)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,52 @@
|
|
1
|
+
module Asynk
|
2
|
+
module TestHelper
|
3
|
+
def publish_sync(routing_key, options)
|
4
|
+
consumers = find_consumers_by_route(routing_key)
|
5
|
+
raise "Cant find consumer by route: #{routing_key}" if consumers.empty?
|
6
|
+
raise "No ability to test multiple consumer per route" if consumers.count > 1
|
7
|
+
consumer = consumers.first
|
8
|
+
|
9
|
+
bunny_mocked_channel = BunnyMockedChannel.new
|
10
|
+
bunny_mocked_delivery_info = BuunyMockedDeliveryInfo.new(routing_key)
|
11
|
+
bunny_mock_properties = BuunyMockedProperties.new
|
12
|
+
|
13
|
+
message = Asynk::Message.new(bunny_mocked_delivery_info, bunny_mock_properties, options.to_json)
|
14
|
+
consumer_instance = consumer.new(bunny_mocked_channel, bunny_mocked_delivery_info) do |result|
|
15
|
+
self.asynk_response = result.kind_of?(String) ? result : result.to_json
|
16
|
+
end
|
17
|
+
|
18
|
+
consumer_instance.invoke_processing(message)
|
19
|
+
asynk_response
|
20
|
+
end
|
21
|
+
|
22
|
+
def asynk_response=(response)
|
23
|
+
@asynk_response = response
|
24
|
+
end
|
25
|
+
|
26
|
+
def asynk_response
|
27
|
+
@asynk_response
|
28
|
+
end
|
29
|
+
|
30
|
+
def find_consumers_by_route(route)
|
31
|
+
Asynk.consumers.select{ |consumer| consumer.routing_keys.any?{ |key| key == route } }
|
32
|
+
end
|
33
|
+
|
34
|
+
class BunnyMockedChannel
|
35
|
+
def ack(*args); end
|
36
|
+
def reject(*args); end
|
37
|
+
def retry(*args); end
|
38
|
+
end
|
39
|
+
|
40
|
+
class BuunyMockedDeliveryInfo
|
41
|
+
attr_reader :routing_key
|
42
|
+
def initialize(routing_key); @routing_key = routing_key; end
|
43
|
+
def exchange; end
|
44
|
+
def delivery_tag; end
|
45
|
+
end
|
46
|
+
|
47
|
+
class BuunyMockedProperties
|
48
|
+
def timestamp; Time.now; end
|
49
|
+
def message_id; 1; end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|