manageiq-messaging 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +7 -0
  2. data/.codeclimate.yml +47 -0
  3. data/.gitignore +10 -0
  4. data/.rspec +2 -0
  5. data/.rubocop.yml +4 -0
  6. data/.rubocop_cc.yml +5 -0
  7. data/.rubocop_local.yml +2 -0
  8. data/.travis.yml +10 -0
  9. data/CHANGES +2 -0
  10. data/CODE_OF_CONDUCT.md +74 -0
  11. data/Gemfile +4 -0
  12. data/LICENSE.txt +21 -0
  13. data/README.md +171 -0
  14. data/Rakefile +6 -0
  15. data/bin/console +14 -0
  16. data/bin/setup +8 -0
  17. data/examples/README.md +16 -0
  18. data/examples/background_job.rb +36 -0
  19. data/examples/common.rb +40 -0
  20. data/examples/message.rb +42 -0
  21. data/lib/manageiq-messaging.rb +1 -0
  22. data/lib/manageiq/messaging.rb +24 -0
  23. data/lib/manageiq/messaging/client.rb +205 -0
  24. data/lib/manageiq/messaging/common.rb +36 -0
  25. data/lib/manageiq/messaging/kafka.rb +7 -0
  26. data/lib/manageiq/messaging/kafka/background_job.rb +13 -0
  27. data/lib/manageiq/messaging/kafka/client.rb +91 -0
  28. data/lib/manageiq/messaging/kafka/common.rb +105 -0
  29. data/lib/manageiq/messaging/kafka/queue.rb +41 -0
  30. data/lib/manageiq/messaging/kafka/topic.rb +28 -0
  31. data/lib/manageiq/messaging/null_logger.rb +11 -0
  32. data/lib/manageiq/messaging/received_message.rb +11 -0
  33. data/lib/manageiq/messaging/stomp.rb +7 -0
  34. data/lib/manageiq/messaging/stomp/background_job.rb +61 -0
  35. data/lib/manageiq/messaging/stomp/client.rb +85 -0
  36. data/lib/manageiq/messaging/stomp/common.rb +84 -0
  37. data/lib/manageiq/messaging/stomp/queue.rb +53 -0
  38. data/lib/manageiq/messaging/stomp/topic.rb +37 -0
  39. data/lib/manageiq/messaging/version.rb +5 -0
  40. data/manageiq-messaging.gemspec +33 -0
  41. metadata +210 -0
@@ -0,0 +1,40 @@
1
+ require 'optparse'
2
+
3
+ class Common
4
+ def initialize
5
+ @options = {}
6
+ end
7
+
8
+ def parse
9
+ options = {}
10
+
11
+ OptionParser.new do |opt|
12
+ opt.on("--hostname HOSTNAME", String, "Hostname") { |v| options[:hostname] = v }
13
+ opt.on("--port PORT", Integer, "Port" ) { |v| options[:port] = v }
14
+ opt.on("--username USERNAME", String, "Username") { |v| options[:user] = v }
15
+ opt.on("--password PASSWORD", String, "Password") { |v| options[:password] = v }
16
+ opt.on("--debug") { ManageIQ::Messaging.logger = Logger.new(STDOUT) }
17
+ opt.parse!
18
+ end
19
+
20
+ options[:hostname] ||= ENV["QUEUE_HOSTNAME"] || "localhost"
21
+ options[:port] ||= ENV["QUEUE_PORT"] || 61616
22
+ options[:user] ||= ENV["QUEUE_USER"] || "admin"
23
+ options[:password] ||= ENV["QUEUE_PASSWORD"] || "smartvm"
24
+
25
+ options[:port] = options[:port].to_i
26
+
27
+ @options = options
28
+ self
29
+ end
30
+
31
+ def q_options
32
+ {
33
+ :host => @options[:hostname],
34
+ :port => @options[:port].to_i,
35
+ :username => @options[:user],
36
+ :password => @options[:password],
37
+ :client_ref => "background_example",
38
+ }
39
+ end
40
+ end
@@ -0,0 +1,42 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'manageiq-messaging'
4
+ require_relative "common"
5
+
6
+ Thread::abort_on_exception = true
7
+
8
+ class ProducerConsumer < Common
9
+ def run
10
+ ManageIQ::Messaging::Client.open(q_options) do |client|
11
+ puts "producer"
12
+ 5.times do |i|
13
+ client.publish_message(
14
+ :service => 'ems_operation',
15
+ :affinity => 'ems_amazon1',
16
+ :message => 'power_on',
17
+ :payload => {
18
+ :ems_ref => 'u987',
19
+ :id => i.to_s,
20
+ }
21
+ )
22
+ end
23
+ puts "produced 5 messages"
24
+
25
+ puts "consumer"
26
+ client.subscribe_messages(:service => 'ems_operation', :affinity => 'ems_amazon1') do |messages|
27
+ messages.each do |msg|
28
+ do_stuff(msg)
29
+ client.ack(msg.ack_ref)
30
+ end
31
+ end
32
+ sleep(5)
33
+ puts "consumed"
34
+ end
35
+ end
36
+
37
+ def do_stuff(msg)
38
+ puts "GOT MESSAGE: #{msg.message}: #{msg.payload}"
39
+ end
40
+ end
41
+
42
+ ProducerConsumer.new.parse.run
@@ -0,0 +1 @@
1
+ require 'manageiq/messaging'
@@ -0,0 +1,24 @@
1
+ require 'active_support/core_ext/module/delegation'
2
+ require 'active_support/core_ext/hash'
3
+ require 'yaml'
4
+
5
+ require 'manageiq/messaging/null_logger'
6
+
7
+ module ManageIQ
8
+ module Messaging
9
+ autoload :Stomp, 'manageiq/messaging/stomp'
10
+ autoload :Kafka, 'manageiq/messaging/kafka'
11
+
12
+ class << self
13
+ attr_writer :logger
14
+ end
15
+
16
+ def self.logger
17
+ @logger ||= NullLogger.new
18
+ end
19
+ end
20
+ end
21
+
22
+ require 'manageiq/messaging/version'
23
+ require 'manageiq/messaging/client'
24
+ require 'manageiq/messaging/received_message'
@@ -0,0 +1,205 @@
1
+ module ManageIQ
2
+ module Messaging
3
+ # The abstract client class. It defines methods needed to publish or subscribe messages.
4
+ # It is not recommended to directly create a solid subclass instance. The proper way is
5
+ # to call class method +Client.open+ with desired protocol. For example:
6
+ #
7
+ # client = ManageIQ::Messaging::Client.open(
8
+ # :protocol => 'Stomp',
9
+ # :host => 'localhost',
10
+ # :port => 61616,
11
+ # :password => 'smartvm',
12
+ # :username => 'admin',
13
+ # :client_ref => 'generic_1',
14
+ # :encoding => 'json'
15
+ # )
16
+ #
17
+ # To close the connection one needs to explicitly call +client.close+.
18
+ # Alternatively if a block is given for the +open+ method, the connection will be closed
19
+ # automatically before existing the block. For example:
20
+ #
21
+ # ManageIQ::Messaging::Client.open(
22
+ # :protocol => 'Stomp'
23
+ # :host => 'localhost',
24
+ # :port => 61616,
25
+ # :password => 'smartvm',
26
+ # :username => 'admin',
27
+ # :client_ref => 'generic_1'
28
+ # ) do |client|
29
+ # # do stuff with the client
30
+ # end
31
+ # end
32
+ class Client
33
+ # Open or create a connection to the message broker.
34
+ # Expected +options+ keys are:
35
+ # * :protocol (Implemented: 'Stomp', 'Kafka'. Default 'Stomp')
36
+ # * :encoding ('yaml' or 'json'. Default 'yaml')
37
+ # Other connection options are underlying messaging system specific.
38
+ #
39
+ # Returns a +Client+ instance if no block is given.
40
+ def self.open(options)
41
+ protocol = options[:protocol] || :Stomp
42
+ client = Object.const_get("ManageIQ::Messaging::#{protocol}::Client").new(options)
43
+ return client unless block_given?
44
+
45
+ begin
46
+ yield client
47
+ ensure
48
+ client.close
49
+ end
50
+ nil
51
+ end
52
+
53
+ # Publish a message to a queue. The message will be delivered to only one subscriber.
54
+ # Expected keys in +options+ are:
55
+ # * :service (service and affinity are used to determine the queue name)
56
+ # * :affinity (optional)
57
+ # * :class_name (optional)
58
+ # * :message (e.g. method name or message type)
59
+ # * :payload (message body, a string or an user object that can be serialized)
60
+ # * :sender (optional, identify the sender)
61
+ # Other options are underlying messaging system specific.
62
+ #
63
+ # Optionally a call back block can be provided to wait on the consumer to send
64
+ # an acknowledgment. Not every underlying messaging system supports callback.
65
+ # Example:
66
+ #
67
+ # client.publish_message(
68
+ # :service => 'ems_operation',
69
+ # :affinity => 'ems_amazon1',
70
+ # :message => 'power_on',
71
+ # :payload => {
72
+ # :ems_ref => 'u987',
73
+ # :id => '123'
74
+ # }
75
+ # ) do |result|
76
+ # ansible_install_pkg(vm1) if result == 'running'
77
+ # end
78
+ def publish_message(options, &block)
79
+ assert_options(options, [:message, :service])
80
+
81
+ publish_message_impl(options, &block)
82
+ end
83
+
84
+ # Publish multiple messages to a queue.
85
+ # An aggregate version of +#publish_message+ but for better performance.
86
+ # All messages are sent in a batch. Every element in +messages+ array is
87
+ # an +options+ hash.
88
+ #
89
+ def publish_messages(messages)
90
+ publish_messages_impl(messages)
91
+ end
92
+
93
+ # Subscribe to receive messages from a queue.
94
+ # Expected keys in +options+ are:
95
+ # * :service (service and affinity are used to determine the queue)
96
+ # * :affinity (optional)
97
+ # Other options are underlying messaging system specific.
98
+ #
99
+ # A callback block is needed to consume the messages:
100
+ #
101
+ # client.subscribe_message(options) do |messages|
102
+ # messages.each do |msg|
103
+ # # msg is a type of ManageIQ::Messaging::ReceivedMessage
104
+ # # attributes in msg
105
+ # msg.sender
106
+ # msg.message
107
+ # msg.payload
108
+ # msg.ack_ref #used to ack the message
109
+ #
110
+ # client.ack(msg.ack_ref)
111
+ # # process the message
112
+ # end
113
+ # end
114
+ #
115
+ # Some messaging systems require the subscriber to ack each message in the
116
+ # callback block. The code in the block can decide when to ack according
117
+ # to whether a message can be retried. Ack the message in the beginning of
118
+ # processing if the message is not re-triable; otherwise ack it after the
119
+ # message is done. Any un-acked message will be redelivered to next subscriber
120
+ # AFTER the current subscriber disconnects normally or abnormally (e.g. crashed).
121
+ #
122
+ # To ack a message call +ack+(+msg.ack_ref+)
123
+ def subscribe_messages(options, &block)
124
+ raise "A block is required" unless block_given?
125
+ assert_options(options, [:service])
126
+
127
+ subscribe_messages_impl(options, &block)
128
+ end
129
+
130
+ # Subscribe to receive from a queue and run each message as a background job.
131
+ # Expected keys in +options+ are:
132
+ # * :service (service and affinity are used to determine the queue)
133
+ # * :affinity (optional)
134
+ # Other options are underlying messaging system specific.
135
+ #
136
+ # This subscriber consumes messages sent through +publish_message+ with required
137
+ # +options+ keys, for example:
138
+ #
139
+ # client.publish_message(
140
+ # :service => 'generic',
141
+ # :class_name => 'MiqTask',
142
+ # :message => 'update_attributes', # method name, for instance method :instance_id is required
143
+ # :payload => {
144
+ # :instance_id => 2, # database id of class instance stored in rails DB
145
+ # :args => [{:status => 'Timeout'}] # argument list expected by the method
146
+ # }
147
+ # )
148
+ #
149
+ # Background job assumes each job is not re-triable. It will ack as soon as a request
150
+ # is received
151
+ def subscribe_background_job(options)
152
+ assert_options(options, [:service])
153
+
154
+ subscribe_background_job_impl(options)
155
+ end
156
+
157
+ # Publish a message as a topic. All subscribers will receive a copy of the message.
158
+ # Expected keys in +options+ are:
159
+ # * :service (service is used to determine the topic address)
160
+ # * :event (event name)
161
+ # * :payload (message body, a string or an user object that can be serialized)
162
+ # * :sender (optional, identify the sender)
163
+ # Other options are underlying messaging system specific.
164
+ #
165
+ def publish_topic(options)
166
+ assert_options(options, [:event, :service])
167
+
168
+ publish_topic_impl(options)
169
+ end
170
+
171
+ # Subscribe to receive topic type messages.
172
+ # Expected keys in +options+ are:
173
+ # * :service (service is used to determine the topic address)
174
+ # Other options are underlying messaging system specific.
175
+ #
176
+ # Some messaging systems allow subscribers to consume events missed during the period when
177
+ # the client is offline when they reconnect. Additional options are needed to turn on
178
+ # this feature.
179
+ #
180
+ # A callback block is needed to consume the topic:
181
+ #
182
+ # client.subcribe_topic(:service => 'provider_events') do |sender, event, payload|
183
+ # # sender, event, and payload are from publish_topic
184
+ # end
185
+ #
186
+ def subscribe_topic(options, &block)
187
+ raise "A block is required" unless block_given?
188
+ assert_options(options, [:service])
189
+
190
+ subscribe_topic_impl(options, &block)
191
+ end
192
+
193
+ private
194
+
195
+ def logger
196
+ ManageIQ::Messaging.logger
197
+ end
198
+
199
+ def assert_options(options, keys)
200
+ missing = keys - options.keys
201
+ raise ArgumentError, "options must contain keys #{missing}" unless missing.empty?
202
+ end
203
+ end
204
+ end
205
+ end
@@ -0,0 +1,36 @@
1
+ module ManageIQ
2
+ module Messaging
3
+ module Common
4
+ private
5
+
6
+ def encode_body(headers, body)
7
+ return body if body.kind_of?(String)
8
+ headers[:encoding] = encoding
9
+ case encoding
10
+ when "json"
11
+ JSON.generate(body)
12
+ when "yaml"
13
+ body.to_yaml
14
+ else
15
+ raise "unknown message encoding: #{encoding}"
16
+ end
17
+ end
18
+
19
+ def decode_body(headers, raw_body)
20
+ return raw_body unless headers.kind_of?(Hash)
21
+ case headers["encoding"]
22
+ when "json"
23
+ JSON.parse(raw_body)
24
+ when "yaml"
25
+ YAML.safe_load(raw_body)
26
+ else
27
+ raw_body
28
+ end
29
+ end
30
+
31
+ def payload_log(payload)
32
+ payload.to_s[0..100]
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,7 @@
1
+ module ManageIQ
2
+ module Messaging
3
+ module Kafka
4
+ autoload :Client, 'manageiq/messaging/kafka/client'
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,13 @@
1
+ module ManageIQ
2
+ module Messaging
3
+ module Kafka
4
+ module BackgroundJob
5
+ private
6
+
7
+ def subscribe_background_job_impl(_options)
8
+ raise NotImplementedError
9
+ end
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,91 @@
1
+ module ManageIQ
2
+ module Messaging
3
+ module Kafka
4
+ # Messaging client implementation with Kafka being the underlying supporting system.
5
+ # Do not directly instantiate an instance from this class. Use
6
+ # +ManageIQ::Messaging::Client.open+ method.
7
+ #
8
+ # Kafka specific connection options accepted by +open+ method:
9
+ # * :client_ref (A reference string to identify the client)
10
+ # * :hosts (Array of Kafka cluster hosts, or)
11
+ # * :host (Single host name)
12
+ # * :port (host port number)
13
+ # * :ssl_ca_cert (security options)
14
+ # * :ssl_client_cert
15
+ # * :ssl_client_cert_key
16
+ # * :sasl_gssapi_principal
17
+ # * :sasl_gssapi_keytab
18
+ # * :sasl_plain_username
19
+ # * :sasl_plain_password
20
+ # * :sasl_scram_username
21
+ # * :sasl_scram_password
22
+ # * :sasl_scram_mechanism
23
+ #
24
+ # Kafka specific +publish_message+ options:
25
+ # * :group_name (Used as Kafka partition_key)
26
+ #
27
+ # Kafka specific +subscribe_topic+ options:
28
+ # * :persist_ref (Used as Kafka group_id)
29
+ #
30
+ # Without +:persist_ref+ every topic subscriber receives a copy of each message
31
+ # only when they are active. If multiple topic subscribers join with the same
32
+ # +:persist_ref+, each message is consumed by only one of the subscribers. This
33
+ # allows a load balancing among the subscribers. Also any messages sent when
34
+ # all members of the +:persist_ref+ group are offline will be persisted and delivered
35
+ # when any member in the group is back online. Each message is still copied and
36
+ # delivered to other subscribers that belongs to other +:persist_ref+ groups or no group.
37
+ #
38
+ # +subscribe_background_job+ is currently not implemented.
39
+ class Client < ManageIQ::Messaging::Client
40
+ require 'kafka'
41
+ require 'manageiq/messaging/kafka/common'
42
+ require 'manageiq/messaging/kafka/queue'
43
+ require 'manageiq/messaging/kafka/background_job'
44
+ require 'manageiq/messaging/kafka/topic'
45
+
46
+ include Common
47
+ include Queue
48
+ include BackgroundJob
49
+ include Topic
50
+
51
+ private *delegate(:subscribe, :unsubscribe, :publish, :to => :kafka_client)
52
+ delegate :close, :to => :kafka_client
53
+
54
+ attr_accessor :encoding
55
+
56
+ def ack(*_args)
57
+ end
58
+
59
+ def close
60
+ @consumer.try(:stop)
61
+ @consumer = nil
62
+
63
+ @producer.try(:shutdown)
64
+ @producer = nil
65
+
66
+ kafka_client.close
67
+ @kafka_client = nil
68
+ end
69
+
70
+ private
71
+
72
+ attr_reader :kafka_client
73
+
74
+ def initialize(options)
75
+ hosts = Array(options[:hosts] || options[:host])
76
+ hosts.collect! { |host| "#{host}:#{options[:port]}" }
77
+
78
+ @encoding = options[:encoding] || 'yaml'
79
+ require "json" if @encoding == "json"
80
+
81
+ connection_opts = {}
82
+ connection_opts[:client_id] = options[:client_ref] if options[:client_ref]
83
+
84
+ connection_opts.merge!(options.slice(:ssl_ca_cert, :ssl_client_cert, :ssl_client_cert_key, :sasl_gssapi_principal, :sasl_gssapi_keytab, :sasl_plain_username, :sasl_plain_password, :sasl_scram_username, :sasl_scram_password, :sasl_scram_mechanism))
85
+
86
+ @kafka_client = ::Kafka.new(hosts, connection_opts)
87
+ end
88
+ end
89
+ end
90
+ end
91
+ end