bps-kafka 0.0.1 → 0.1.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/bps/kafka.rb +2 -4
- data/lib/bps/publisher/kafka.rb +18 -5
- data/lib/bps/publisher/kafka_async.rb +4 -4
- data/spec/bps/kafka_spec.rb +52 -27
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b5d05abc6c4a93507504408f2896d0b58bfd0e0a31b8cc1158880cd10c0f9f4d
|
4
|
+
data.tar.gz: dedbc0320b57dab92bdc9496dce28e8bbf0f00d49783fb63b6970adf49ca411e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5aefa008139667df54d170f7f7ddbce4084136885dae21565f2734a71ce4016d3d633964076b9c8b4bcf44a24c0bcc0ad260fb5925eb334599018c7d4a3817b5
|
7
|
+
data.tar.gz: 1bc340118cd5b21d996f78f0705f8acd60f8d13a75c1e0576a60bc99f923015d0834a26932efa19a2b05a0f6f631be33395c62f89091ad6515ef0100ad9886e8
|
data/lib/bps/kafka.rb
CHANGED
@@ -6,13 +6,11 @@ require 'bps/publisher/kafka_async'
|
|
6
6
|
module BPS
|
7
7
|
module Publisher
|
8
8
|
register('kafka+sync') do |url, **opts|
|
9
|
-
|
10
|
-
Kafka.new(addrs, **Kafka.coercer.coerce(opts))
|
9
|
+
Kafka.new(url, **Kafka.coercer.coerce(opts))
|
11
10
|
end
|
12
11
|
|
13
12
|
register('kafka') do |url, **opts|
|
14
|
-
|
15
|
-
KafkaAsync.new(addrs, **Kafka.coercer.coerce(opts))
|
13
|
+
KafkaAsync.new(url, **Kafka.coercer.coerce(opts))
|
16
14
|
end
|
17
15
|
end
|
18
16
|
end
|
data/lib/bps/publisher/kafka.rb
CHANGED
@@ -3,8 +3,10 @@ require 'bps/kafka'
|
|
3
3
|
module BPS
|
4
4
|
module Publisher
|
5
5
|
class Kafka < Abstract
|
6
|
-
class Topic
|
6
|
+
class Topic < Abstract::Topic
|
7
7
|
def initialize(producer, topic)
|
8
|
+
super()
|
9
|
+
|
8
10
|
@producer = producer
|
9
11
|
@topic = topic
|
10
12
|
end
|
@@ -14,7 +16,7 @@ module BPS
|
|
14
16
|
after_publish
|
15
17
|
end
|
16
18
|
|
17
|
-
def flush
|
19
|
+
def flush(**)
|
18
20
|
@producer.deliver_messages
|
19
21
|
end
|
20
22
|
|
@@ -74,12 +76,15 @@ module BPS
|
|
74
76
|
@coercer ||= BPS::Coercer.new(CLIENT_OPTS.merge(PRODUCER_OPTS)).freeze
|
75
77
|
end
|
76
78
|
|
77
|
-
# @param [Array<String
|
79
|
+
# @param [Array<String>,URI] brokers the seed broker addresses.
|
78
80
|
# @param [Hash] opts the options.
|
79
81
|
# @see https://www.rubydoc.info/gems/ruby-kafka/Kafka/Client#initialize-instance_method
|
80
82
|
def initialize(broker_addrs, **opts)
|
81
|
-
|
82
|
-
|
83
|
+
super()
|
84
|
+
|
85
|
+
broker_addrs = parse_url(broker_addrs) if broker_addrs.is_a?(URI)
|
86
|
+
@topics = {}
|
87
|
+
@client = ::Kafka.new(broker_addrs, **opts.slice(*CLIENT_OPTS.keys))
|
83
88
|
@producer = init_producer(**opts.slice(*PRODUCER_OPTS.keys))
|
84
89
|
end
|
85
90
|
|
@@ -94,6 +99,14 @@ module BPS
|
|
94
99
|
|
95
100
|
private
|
96
101
|
|
102
|
+
def parse_url(url)
|
103
|
+
port = url.port&.to_s || '9092'
|
104
|
+
CGI.unescape(url.host).split(',').map do |addr|
|
105
|
+
addr << ':' << port unless addr.match(/:\d+$/)
|
106
|
+
addr
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
97
110
|
def init_producer(**opts)
|
98
111
|
@producer = @client.producer(**opts)
|
99
112
|
end
|
@@ -14,18 +14,18 @@ module BPS
|
|
14
14
|
# @param [Hash] opts the options.
|
15
15
|
# @option opts [Integer] :max_queue_size (defaults to: 1000)
|
16
16
|
# the maximum number of messages allowed in the queue.
|
17
|
-
# @option opts [Integer] :delivery_threshold (defaults to:
|
17
|
+
# @option opts [Integer] :delivery_threshold (defaults to: 1000)
|
18
18
|
# if greater than zero, the number of buffered messages that will automatically
|
19
19
|
# trigger a delivery.
|
20
|
-
# @option opts [Integer] :delivery_interval (defaults to:
|
20
|
+
# @option opts [Integer] :delivery_interval (defaults to: 30) if greater than zero, the number of
|
21
21
|
# seconds between automatic message deliveries.
|
22
|
-
def initialize(broker_addrs, **opts)
|
22
|
+
def initialize(broker_addrs, **opts) # rubocop:disable Lint/UselessMethodDefinition
|
23
23
|
super
|
24
24
|
end
|
25
25
|
|
26
26
|
private
|
27
27
|
|
28
|
-
def init_producer(max_queue_size: 1000, delivery_threshold:
|
28
|
+
def init_producer(max_queue_size: 1000, delivery_threshold: 1000, delivery_interval: 30)
|
29
29
|
@client.async_producer(
|
30
30
|
max_queue_size: max_queue_size,
|
31
31
|
delivery_threshold: delivery_threshold,
|
data/spec/bps/kafka_spec.rb
CHANGED
@@ -1,38 +1,63 @@
|
|
1
1
|
require 'spec_helper'
|
2
2
|
require 'bps/kafka'
|
3
3
|
|
4
|
-
|
5
|
-
|
6
|
-
|
4
|
+
RSpec.describe 'Kafka', kafka: true do
|
5
|
+
context 'resolve addrs' do
|
6
|
+
let(:client) { double('Kafka', producer: nil) }
|
7
|
+
before { allow(::Kafka).to receive(:new).and_return(client) }
|
7
8
|
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
9
|
+
it 'should resolve simple URLs' do
|
10
|
+
if Gem::Version.new(RUBY_VERSION) < Gem::Version.new('2.7')
|
11
|
+
expect(::Kafka).to receive(:new).with(['test.host:9092'], {}).and_return(client)
|
12
|
+
else
|
13
|
+
expect(::Kafka).to receive(:new).with(['test.host:9092']).and_return(client)
|
14
|
+
end
|
15
|
+
BPS::Publisher.resolve(URI.parse('kafka+sync://test.host:9092'))
|
16
|
+
end
|
16
17
|
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
y << msg.value
|
18
|
+
it 'should resolve URLs with multiple hosts' do
|
19
|
+
if Gem::Version.new(RUBY_VERSION) < Gem::Version.new('2.7')
|
20
|
+
expect(::Kafka).to receive(:new).with(['foo.host:9092', 'bar.host:9092'], {}).and_return(client)
|
21
|
+
else
|
22
|
+
expect(::Kafka).to receive(:new).with(['foo.host:9092', 'bar.host:9092']).and_return(client)
|
23
23
|
end
|
24
|
-
|
25
|
-
|
26
|
-
client&.close
|
27
|
-
end
|
28
|
-
end
|
24
|
+
BPS::Publisher.resolve(URI.parse('kafka+sync://foo.host,bar.host:9092'))
|
25
|
+
end
|
29
26
|
|
30
|
-
|
31
|
-
|
32
|
-
|
27
|
+
it 'should resolve URLs with multiple hosts/ports' do
|
28
|
+
if Gem::Version.new(RUBY_VERSION) < Gem::Version.new('2.7')
|
29
|
+
expect(::Kafka).to receive(:new).with(['foo.host:9093', 'bar.host:9092'], {}).and_return(client)
|
30
|
+
else
|
31
|
+
expect(::Kafka).to receive(:new).with(['foo.host:9093', 'bar.host:9092']).and_return(client)
|
32
|
+
end
|
33
|
+
BPS::Publisher.resolve(URI.parse('kafka+sync://foo.host%3A9093,bar.host'))
|
34
|
+
end
|
33
35
|
end
|
34
36
|
|
35
|
-
|
36
|
-
|
37
|
+
describe 'publishers' do
|
38
|
+
let(:kafka_addrs) { ENV.fetch('KAFKA_ADDRS', '127.0.0.1:9092').split(',') }
|
39
|
+
|
40
|
+
def read_messages(topic_name, num_messages)
|
41
|
+
client = ::Kafka.new(kafka_addrs)
|
42
|
+
Enumerator.new do |y|
|
43
|
+
client.each_message(topic: topic_name, start_from_beginning: true) do |msg|
|
44
|
+
y << msg.value
|
45
|
+
end
|
46
|
+
end.take(num_messages)
|
47
|
+
ensure
|
48
|
+
client&.close
|
49
|
+
end
|
50
|
+
|
51
|
+
context BPS::Publisher::Kafka do
|
52
|
+
let(:publisher_url) { "kafka+sync://#{CGI.escape(kafka_addrs.join(','))}/" }
|
53
|
+
|
54
|
+
it_behaves_like 'publisher'
|
55
|
+
end
|
56
|
+
|
57
|
+
context BPS::Publisher::KafkaAsync do
|
58
|
+
let(:publisher_url) { "kafka://#{CGI.escape(kafka_addrs.join(','))}/" }
|
59
|
+
|
60
|
+
it_behaves_like 'publisher'
|
61
|
+
end
|
37
62
|
end
|
38
63
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: bps-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.1.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Black Square Media
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-
|
11
|
+
date: 2020-11-24 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bps
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - '='
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: 0.
|
19
|
+
version: 0.1.3
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - '='
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: 0.
|
26
|
+
version: 0.1.3
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: ruby-kafka
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -69,7 +69,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
69
69
|
- !ruby/object:Gem::Version
|
70
70
|
version: '0'
|
71
71
|
requirements: []
|
72
|
-
rubygems_version: 3.1.
|
72
|
+
rubygems_version: 3.1.2
|
73
73
|
signing_key:
|
74
74
|
specification_version: 4
|
75
75
|
summary: Kafka adapter for bps
|