ruby-kafka 0.7.2 → 0.7.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.circleci/config.yml +36 -1
- data/CHANGELOG.md +4 -0
- data/lib/kafka/async_producer.rb +9 -4
- data/lib/kafka/consumer.rb +1 -0
- data/lib/kafka/fetcher.rb +12 -12
- data/lib/kafka/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d8ea1d759bb30b26f2130fd560d2f09bd46b0960add9a1e65460e9643cb50142
|
4
|
+
data.tar.gz: 9816bf4c2c73567aa0a12fbc0cfe79af84861517a1845eaba5881ac05bd99f77
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c53dbab79d1157151328c2d718d1d2d2ba9600208419f5788c231df3cc63542f05bfe514cea199d72dde781eab252cf89fd9889f0e22c148afa210d1f499e28a
|
7
|
+
data.tar.gz: 8a8bc9e3dc7d7edb465ebe990c4343330b03db21d595f93cb82698e7abe68166f132975e3e1ff95805b663c2d8227b87e74342b4efab412fda3c4d43be190a97
|
data/.circleci/config.yml
CHANGED
@@ -26,6 +26,8 @@ jobs:
|
|
26
26
|
KAFKA_DELETE_TOPIC_ENABLE: true
|
27
27
|
- image: wurstmeister/kafka:2.11-0.11.0.3
|
28
28
|
environment:
|
29
|
+
KAFKA_ADVERTISED_HOST_NAME: localhost
|
30
|
+
KAFKA_ADVERTISED_PORT: 9093
|
29
31
|
KAFKA_PORT: 9093
|
30
32
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
31
33
|
KAFKA_DELETE_TOPIC_ENABLE: true
|
@@ -105,6 +107,38 @@ jobs:
|
|
105
107
|
- run: bundle install --path vendor/bundle
|
106
108
|
- run: bundle exec rspec --profile --tag functional spec/functional
|
107
109
|
|
110
|
+
kafka-2.0:
|
111
|
+
docker:
|
112
|
+
- image: circleci/ruby:2.5.1-node
|
113
|
+
environment:
|
114
|
+
LOG_LEVEL: DEBUG
|
115
|
+
- image: wurstmeister/zookeeper
|
116
|
+
- image: wurstmeister/kafka:2.11-2.0.0
|
117
|
+
environment:
|
118
|
+
KAFKA_ADVERTISED_HOST_NAME: localhost
|
119
|
+
KAFKA_ADVERTISED_PORT: 9092
|
120
|
+
KAFKA_PORT: 9092
|
121
|
+
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
122
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
123
|
+
- image: wurstmeister/kafka:2.11-2.0.0
|
124
|
+
environment:
|
125
|
+
KAFKA_ADVERTISED_HOST_NAME: localhost
|
126
|
+
KAFKA_ADVERTISED_PORT: 9093
|
127
|
+
KAFKA_PORT: 9093
|
128
|
+
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
129
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
130
|
+
- image: wurstmeister/kafka:2.11-2.0.0
|
131
|
+
environment:
|
132
|
+
KAFKA_ADVERTISED_HOST_NAME: localhost
|
133
|
+
KAFKA_ADVERTISED_PORT: 9094
|
134
|
+
KAFKA_PORT: 9094
|
135
|
+
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
136
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
137
|
+
steps:
|
138
|
+
- checkout
|
139
|
+
- run: bundle install --path vendor/bundle
|
140
|
+
- run: bundle exec rspec --profile --tag functional spec/functional
|
141
|
+
|
108
142
|
workflows:
|
109
143
|
version: 2
|
110
144
|
test:
|
@@ -112,4 +146,5 @@ workflows:
|
|
112
146
|
- unit
|
113
147
|
- kafka-0.11
|
114
148
|
- kafka-1.0.0
|
115
|
-
- kafka-1.1
|
149
|
+
- kafka-1.1
|
150
|
+
- kafka-2.0
|
data/CHANGELOG.md
CHANGED
@@ -4,6 +4,10 @@ Changes and additions to the library will be listed here.
|
|
4
4
|
|
5
5
|
## Unreleased
|
6
6
|
|
7
|
+
## 0.7.3
|
8
|
+
|
9
|
+
- Synchronize access to @worker_thread and @timer_thread in AsyncProducer to prevent creating multiple threads (#661).
|
10
|
+
|
7
11
|
## 0.7.2
|
8
12
|
|
9
13
|
- Handle case when paused partition does not belong to group on resume (#656).
|
data/lib/kafka/async_producer.rb
CHANGED
@@ -59,6 +59,7 @@ module Kafka
|
|
59
59
|
# producer.shutdown
|
60
60
|
#
|
61
61
|
class AsyncProducer
|
62
|
+
THREAD_MUTEX = Mutex.new
|
62
63
|
|
63
64
|
# Initializes a new AsyncProducer.
|
64
65
|
#
|
@@ -146,11 +147,15 @@ module Kafka
|
|
146
147
|
private
|
147
148
|
|
148
149
|
def ensure_threads_running!
|
149
|
-
|
150
|
-
|
150
|
+
THREAD_MUTEX.synchronize do
|
151
|
+
@worker_thread = nil unless @worker_thread && @worker_thread.alive?
|
152
|
+
@worker_thread ||= Thread.new { @worker.run }
|
153
|
+
end
|
151
154
|
|
152
|
-
|
153
|
-
|
155
|
+
THREAD_MUTEX.synchronize do
|
156
|
+
@timer_thread = nil unless @timer_thread && @timer_thread.alive?
|
157
|
+
@timer_thread ||= Thread.new { @timer.run }
|
158
|
+
end
|
154
159
|
end
|
155
160
|
|
156
161
|
def buffer_overflow(topic, message)
|
data/lib/kafka/consumer.rb
CHANGED
data/lib/kafka/fetcher.rb
CHANGED
@@ -28,12 +28,6 @@ module Kafka
|
|
28
28
|
|
29
29
|
# The maximum number of bytes to fetch per partition, by topic.
|
30
30
|
@max_bytes_per_partition = {}
|
31
|
-
|
32
|
-
@thread = Thread.new do
|
33
|
-
loop while true
|
34
|
-
end
|
35
|
-
|
36
|
-
@thread.abort_on_exception = true
|
37
31
|
end
|
38
32
|
|
39
33
|
def subscribe(topic, max_bytes_per_partition:)
|
@@ -49,16 +43,21 @@ module Kafka
|
|
49
43
|
end
|
50
44
|
|
51
45
|
def start
|
52
|
-
|
53
|
-
end
|
46
|
+
return if @running
|
54
47
|
|
55
|
-
|
56
|
-
|
48
|
+
@thread = Thread.new do
|
49
|
+
while @running
|
50
|
+
loop
|
51
|
+
end
|
52
|
+
@logger.info "Fetcher thread exited."
|
53
|
+
end
|
54
|
+
@thread.abort_on_exception = true
|
57
55
|
|
58
56
|
@running = true
|
59
57
|
end
|
60
58
|
|
61
59
|
def stop
|
60
|
+
return unless @running
|
62
61
|
@commands << [:stop, []]
|
63
62
|
end
|
64
63
|
|
@@ -81,14 +80,14 @@ module Kafka
|
|
81
80
|
queue_size: @queue.size,
|
82
81
|
})
|
83
82
|
|
83
|
+
return unless @running
|
84
|
+
|
84
85
|
if !@commands.empty?
|
85
86
|
cmd, args = @commands.deq
|
86
87
|
|
87
88
|
@logger.debug "Handling fetcher command: #{cmd}"
|
88
89
|
|
89
90
|
send("handle_#{cmd}", *args)
|
90
|
-
elsif !@running
|
91
|
-
sleep 0.1
|
92
91
|
elsif @queue.size < @max_queue_size
|
93
92
|
step
|
94
93
|
else
|
@@ -110,6 +109,7 @@ module Kafka
|
|
110
109
|
|
111
110
|
def handle_stop(*)
|
112
111
|
@running = false
|
112
|
+
@commands.clear
|
113
113
|
|
114
114
|
# After stopping, we need to reconfigure the topics and partitions to fetch
|
115
115
|
# from. Otherwise we'd keep fetching from a bunch of partitions we may no
|
data/lib/kafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.7.
|
4
|
+
version: 0.7.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-
|
11
|
+
date: 2018-10-15 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: digest-crc
|