kafka-rest-rb 0.1.0.alpha5 → 0.1.0.alpha6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 27bd59add9955e9335735cb54c34d4bc66343f75
4
- data.tar.gz: 24f6743e1828e96d689643e88308445fef656d39
3
+ metadata.gz: 2830f3a07c3fb219d42bf57f6339a7ecdc9bd471
4
+ data.tar.gz: cdfe237fd60fb23ec55ab65f237b61c70604c5e0
5
5
  SHA512:
6
- metadata.gz: 725737a430db0a97edbf2649792a489a993ae99ad9a051b5a36e7300c0c32613e8c1f08837ebf4faee9e61127da2823047258b888d8215fd2b893c031dba4102
7
- data.tar.gz: c0fa43a9dcb4591d24e33036a2cf923a8b9ce280e5dfa29af11530a1e44b99d90acd4733fea0f43c0715eb15bf43572d68748ec2a75ac5f100ed7309961693d4
6
+ metadata.gz: 8e345c4cc6b29a8fffe512c90502dc36db0c9f296c7859a11e1e76b63fc5dea3d3853f0ef016b4641f147399e443ad9ad7132e6985ea30c343f3d21a1ac7e962
7
+ data.tar.gz: 703f11ff97e5b580c67bd1a728f304cbc0bc42e65ba651b202b79e28aa5edd54b7c3eb96750df4d6ec83cc2aa8db2bd6ffd767d679e7b29a9609876afc2143da
@@ -28,7 +28,11 @@ module KafkaRest
28
28
 
29
29
  class JsonResponse < FaradayMiddleware::ResponseMiddleware
30
30
  define_parser do |body|
31
- MultiJson.load(body)
31
+ if body == ""
32
+ nil
33
+ else
34
+ MultiJson.load(body)
35
+ end
32
36
  end
33
37
  end
34
38
 
@@ -5,7 +5,6 @@ module KafkaRest
5
5
 
6
6
  def option(name, opts = {})
7
7
  name = name.to_s
8
- required = opts[:required] || false
9
8
  default = opts[:default]
10
9
  validate = opts[:validate] || ->(val) { true }
11
10
  error_msg = opts[:error_message] || "`#{name}`'s value is invalid"
@@ -23,13 +22,7 @@ module KafkaRest
23
22
 
24
23
  class << self
25
24
  def get_#{name}
26
- @#{name}.tap do |v|
27
- if #{required} && v.nil?
28
- raise KafkaRest::Dsl::MissingRequiredOption.new(
29
- "Missing required option `#{name}`"
30
- )
31
- end
32
- end
25
+ @#{name}
33
26
  end
34
27
 
35
28
  def #{name}(val)
@@ -1,3 +1,3 @@
1
1
  module KafkaRest
2
- VERSION = '0.1.0.alpha5'
2
+ VERSION = '0.1.0.alpha6'
3
3
  end
@@ -12,6 +12,8 @@ module KafkaRest
12
12
  def initialize(client)
13
13
  @client = client
14
14
  @started = false
15
+ @util_threads = []
16
+ @out_pipe, @in_pipe = IO.pipe
15
17
  @thread_pool = Concurrent::ThreadPoolExecutor.new(
16
18
  min_threads: KafkaRest.config.worker_min_threads,
17
19
  max_threads: KafkaRest.config.worker_max_threads,
@@ -25,34 +27,85 @@ module KafkaRest
25
27
  end
26
28
 
27
29
  def start
30
+ trap('SIGINT') { @in_pipe.puts 'SIGINT' } # more signals
31
+
28
32
  begin
29
33
  @running = true
30
34
 
31
- trap(:SIGINT) do
32
- stop
33
- end
35
+ util_thread { run_dead_cleaner }
36
+ util_thread { run_work_loop }
34
37
 
35
- init_consumers
36
- run_work_loop
38
+ wait_for_signal
37
39
  rescue => e
38
40
  logger.error "[Kafka REST] Got exception: #{e.class} (#{e.message})"
39
41
  e.backtrace.each { |msg| logger.error "\t #{msg}" }
40
42
  stop
43
+ ensure
44
+ [@in_pipe, @out_pipe].each &:close
41
45
  end
42
46
  end
43
47
 
44
48
  def stop
45
49
  logger.info "[Kafka REST] Stopping worker..."
50
+
46
51
  @running = false
47
- remove_consumers
52
+ @util_threads.map &:join
53
+
54
+ logger.info "[Kafka REST] Bye."
55
+ exit(0)
48
56
  end
49
57
 
50
58
  private
51
59
 
52
- def run_work_loop
60
+ # a dirty hack
61
+ def send_quit
62
+ @in_pipe.puts "SIGINT"
63
+ end
64
+
65
+ def wait_for_signal
66
+ while data = IO.select([@out_pipe])
67
+ signal = data.first[0].gets.strip
68
+ handle_signal(signal)
69
+ end
70
+ end
71
+
72
+ def handle_signal(signal)
73
+ case signal
74
+ when 'SIGINT'
75
+ stop
76
+ else
77
+ raise Interrupt
78
+ end
79
+ end
80
+
81
+ # Runs some job in an utility thread
82
+ def util_thread(&block)
83
+ parent = Thread.current
84
+
85
+ @util_threads << Thread.new(&block).tap do |t|
86
+ t.abort_on_exception = true
87
+ end
88
+ end
89
+
90
+ def run_dead_cleaner
53
91
  while @running
54
- check_dead!
92
+ sleep(3)
93
+
94
+ dead = @consumers.select(&:dead?)
95
+
96
+ if @consumers.count == dead.count
97
+ logger.warn "All consumers are dead. Quitting..."
98
+ send_quit
99
+ else
100
+ dead.each(&:remove!)
101
+ end
102
+ end
103
+ end
55
104
 
105
+ def run_work_loop
106
+ init_consumers
107
+
108
+ while @running
56
109
  jobs = @consumers.select(&:poll?)
57
110
 
58
111
  if jobs.empty?
@@ -70,22 +123,16 @@ module KafkaRest
70
123
  sleep(BUSY_THREAD_POOL_DELAY)
71
124
  end
72
125
  end
73
- end
74
126
 
75
- def check_dead!
76
- # Do we need this?
77
- if @consumers.all?(&:dead?)
78
- logger.info "[Kafka REST] All consumers are dead. Quitting..."
79
- stop
80
- end
127
+ remove_consumers
81
128
  end
82
129
 
83
130
  def init_consumers
84
- @consumers.map &:add!
131
+ @consumers.select(&:initial?).each(&:add!)
85
132
  end
86
133
 
87
134
  def remove_consumers
88
- @consumers.reject(&:initial?).map &:remove!
135
+ @consumers.reject(&:initial?).each(&:remove!)
89
136
  end
90
137
 
91
138
  def max_queue
@@ -0,0 +1,82 @@
1
+ module KafkaRest
2
+ class Worker
3
+ class Consumer
4
+ # Does the actual work of working with kafka-rest api,
5
+ # processing messages and commiting offsets.
6
+ # This class is not thread-safe. In threaded environments
7
+ # it must be used within KafkaRest::Worker::ConsumerManager
8
+
9
+ include KafkaRest::Logging
10
+ extend Forwardable
11
+
12
+ def_delegators :@instance,
13
+ :topic, :group_name, :poll_delay, :auto_commit,
14
+ :offset_reset, :format, :max_bytes
15
+
16
+ def initialize(consumer, client)
17
+ @instance = consumer.new
18
+ @client = client
19
+ @id = nil
20
+ @uri = nil
21
+ end
22
+
23
+ def add!
24
+ params = {}
25
+ params[:auto_commit_enable] = auto_commit if auto_commit
26
+ params[:auto_offset_reset] = offset_reset if offset_reset
27
+ params[:format] = format if format
28
+
29
+ resp = @client.consumer_add(group_name, params)
30
+
31
+ @id = resp.body['instance_id']
32
+ @uri = resp.body['base_uri']
33
+
34
+ logger.info "[Kafka REST] Added consumer #{@id}"
35
+ end
36
+
37
+ def remove!
38
+ resp = @client.consumer_remove(group_name, @id)
39
+ @id = nil
40
+ @uri = nil
41
+
42
+ logger.info "[Kafka REST] Removed consumer #{@id}"
43
+ end
44
+
45
+ def poll!
46
+ params = {}
47
+ params[:format] = format if format
48
+ params[:max_bytes] = max_bytes if max_bytes
49
+
50
+ logger.debug "[Kafka REST] Polling consumer #{@id} in group #{group_name}..."
51
+
52
+ resp = @client.consumer_consume_from_topic(group_name, @id, topic, params)
53
+ process_messages(resp.body)
54
+ end
55
+
56
+ def commit!
57
+ @client.consumer_commit_offsets(group_name, @id)
58
+ end
59
+
60
+ def added?
61
+ !@id.nil?
62
+ end
63
+
64
+ private
65
+
66
+ def process_messages(messages)
67
+ if messages.any?
68
+ messages.each do |msg|
69
+ logger.debug "[Kafka REST] Consumer #{@id} got message: #{msg}"
70
+ @instance.receive ConsumerMessage.new(msg, topic)
71
+ end
72
+
73
+ commit! unless auto_commit
74
+
75
+ true
76
+ else
77
+ false
78
+ end
79
+ end
80
+ end
81
+ end
82
+ end
@@ -1,45 +1,57 @@
1
1
  require 'concurrent/utility/monotonic_time'
2
+ require 'kafka_rest/worker/consumer_message'
3
+ require 'kafka_rest/worker/consumer'
2
4
 
3
5
  module KafkaRest
4
6
  class Worker
5
7
  class ConsumerManager
8
+ # Manages state and lifecycle of a Consumer instance
9
+
6
10
  STATES = [:initial, :idle, :working, :dead]
7
11
 
8
12
  include KafkaRest::Logging
9
13
 
10
14
  class << self
11
- @@consumers = []
15
+ @@register_consumer_procs = []
16
+ @@consumers = nil
17
+
18
+ def register!(consumer)
19
+ # Delay consumers validating until they are actually needed
20
+ # and all classes are loaded.
21
+ @@register_consumer_procs << ->(consumers){
22
+ topic, group_name = consumer.get_topic, consumer.get_group_name
23
+
24
+ if topic.nil?
25
+ raise Exception.new("#{consumer.name}: topic must not be empty")
26
+ end
27
+
28
+ if group_name.nil?
29
+ raise Exception.new("#{consumer.name}: group_name must not be empty")
30
+ end
12
31
 
13
- def register!(consumer_class)
14
- # TODO: raise exception if group_id + topic are not unique
15
- # TODO: Thread.current???
16
- @@consumers << consumer_class
32
+ key = [topic, group_name]
33
+
34
+ if consumers.has_key?(key)
35
+ raise Exception.new("#{consumer.name}: group_name and topic are not unique")
36
+ end
37
+
38
+ { key => consumer }
39
+ }
17
40
  end
18
41
 
19
42
  def consumers
20
- @@consumers
43
+ @@consumers ||= @@register_consumer_procs.reduce({}) do |c, p|
44
+ c.merge(p.call c)
45
+ end.values
21
46
  end
22
47
  end
23
48
 
24
- extend Forwardable
25
-
26
- def_delegators :@consumer,
27
- :topic,
28
- :group_name,
29
- :poll_delay,
30
- :auto_commit,
31
- :offset_reset,
32
- :format,
33
- :max_bytes
34
-
35
- def initialize(client, consumer)
36
- @client = client
37
- @consumer = consumer.new
38
- @id = nil
39
- @uri = nil
40
- @state = :initial
41
- @next_poll = Concurrent.monotonic_time
42
- @lock = Mutex.new
49
+ def initialize(client, klass)
50
+ @consumer = Consumer.new(klass, client)
51
+ @state = :initial
52
+ @poll_delay = klass.get_poll_delay
53
+ @next_poll = Concurrent.monotonic_time
54
+ @lock = Mutex.new
43
55
  end
44
56
 
45
57
  STATES.each do |state|
@@ -50,72 +62,52 @@ module KafkaRest
50
62
  }
51
63
  end
52
64
 
53
- def poll?
54
- with_lock {
65
+ def poll?(lock = true)
66
+ with_lock(lock) do
55
67
  idle?(false) && Concurrent.monotonic_time > @next_poll
56
- }
68
+ end
57
69
  end
58
70
 
59
71
  def add!
60
- params = {}.tap do |h|
61
- auto_commit.nil? or h[:auto_commit_enable] = auto_commit
62
- offset_reset and h[:auto_offset_reset] = offset_reset
63
- format and h[:format] = format
72
+ with_lock do
73
+ return nil if @consumer.added?
74
+ @consumer.add!
75
+ @state = :idle
64
76
  end
65
-
66
- resp = @client.consumer_add(group_name, params)
67
- @id = resp.body['instance_id']
68
- @uri = resp.body['base_uri']
69
- @state = :idle
70
-
71
- logger.info "[Kafka REST] Added consumer #{@id}"
72
77
  end
73
78
 
74
79
  def remove!
75
- resp = @client.consumer_remove(group_name, @id)
76
- logger.info "[Kafka REST] Removed consumer #{@id}"
80
+ with_lock do
81
+ return nil unless @consumer.added?
82
+ @consumer.remove!
83
+ @state = :initial
84
+ end
77
85
  end
78
86
 
79
87
  def poll!
88
+ with_lock do
89
+ return nil unless idle?(false)
90
+ @state = :working
91
+ end
92
+
80
93
  begin
81
- with_lock do
82
- return false unless idle?(false)
83
- @state = :working
94
+ unless @consumer.poll!
95
+ @next_poll = Concurrent.monotonic_time + @poll_delay
84
96
  end
85
97
 
86
- logger.debug "Polling #{group_name}..."
87
-
88
- params = {}.tap do |h|
89
- format and h[:format] = format
90
- max_bytes and h[:max_bytes] = max_bytes
98
+ with_lock { @state = :idle }
99
+ rescue Exception => e
100
+ # TODO:
101
+ # - Recover from Faraday errors.
102
+ # - Mark dead only when encountering application errors,
103
+ # because it's obvious that after restart it will be raised again
104
+ logger.error "[Kafka REST] Consumer died due to an error"
105
+ logger.error "#{e.class}: #{e.message}"
106
+
107
+ e.backtrace.each do |s|
108
+ logger.error s
91
109
  end
92
110
 
93
- messages = @client.consumer_consume_from_topic(
94
- group_name,
95
- @id,
96
- topic,
97
- params
98
- ).body
99
-
100
- if messages.any?
101
- messages.each do |msg|
102
- logger.debug "[Kafka REST] Consumer #{@id} got message: #{msg}"
103
- @consumer.receive(msg)
104
- end
105
-
106
- unless auto_commit
107
- @client.consumer_commit_offsets(group_name, @id)
108
- end
109
-
110
- with_lock { @state = :idle }
111
- else
112
- with_lock do
113
- @next_poll = Concurrent.monotonic_time + poll_delay
114
- @state = :idle
115
- end
116
- end
117
- rescue Exception => e # TODO: handle errors
118
- logger.warn "[Kafka REST] Consumer died due to error: #{e.class}, #{e.message}"
119
111
  with_lock { @state = :dead }
120
112
  end
121
113
  end
@@ -0,0 +1,16 @@
1
+ module KafkaRest
2
+ class Worker
3
+ class ConsumerMessage
4
+ attr_reader :key, :value, :offset, :partition, :timestamp
5
+
6
+ def initialize(payload, topic)
7
+ @key = payload['key']
8
+ @value = payload['value']
9
+ @partition = payload['partition']
10
+ @timestamp = payload['timestamp']
11
+ @offset = payload['offset']
12
+ @topic = topic
13
+ end
14
+ end
15
+ end
16
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: kafka-rest-rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0.alpha5
4
+ version: 0.1.0.alpha6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Theodore Konukhov
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-10-26 00:00:00.000000000 Z
11
+ date: 2016-11-01 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: faraday
@@ -195,7 +195,9 @@ files:
195
195
  - lib/kafka_rest/sender/test_sender.rb
196
196
  - lib/kafka_rest/version.rb
197
197
  - lib/kafka_rest/worker.rb
198
+ - lib/kafka_rest/worker/consumer.rb
198
199
  - lib/kafka_rest/worker/consumer_manager.rb
200
+ - lib/kafka_rest/worker/consumer_message.rb
199
201
  - perf/perftest
200
202
  homepage: https://github.com/konukhov/kafka-rest-rb
201
203
  licenses: