rom-kafka 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (51) hide show
  1. checksums.yaml +7 -0
  2. data/.coveralls.yml +2 -0
  3. data/.gitignore +9 -0
  4. data/.metrics +9 -0
  5. data/.rspec +2 -0
  6. data/.rubocop.yml +2 -0
  7. data/.travis.yml +34 -0
  8. data/.yardopts +3 -0
  9. data/CHANGELOG.md +3 -0
  10. data/Gemfile +7 -0
  11. data/Guardfile +14 -0
  12. data/LICENSE +21 -0
  13. data/README.md +83 -0
  14. data/Rakefile +34 -0
  15. data/config/metrics/STYLEGUIDE +230 -0
  16. data/config/metrics/cane.yml +5 -0
  17. data/config/metrics/churn.yml +6 -0
  18. data/config/metrics/flay.yml +2 -0
  19. data/config/metrics/metric_fu.yml +14 -0
  20. data/config/metrics/reek.yml +1 -0
  21. data/config/metrics/roodi.yml +24 -0
  22. data/config/metrics/rubocop.yml +71 -0
  23. data/config/metrics/saikuro.yml +3 -0
  24. data/config/metrics/simplecov.yml +6 -0
  25. data/config/metrics/yardstick.yml +37 -0
  26. data/lib/rom-kafka.rb +3 -0
  27. data/lib/rom/kafka.rb +29 -0
  28. data/lib/rom/kafka/brokers.rb +72 -0
  29. data/lib/rom/kafka/brokers/broker.rb +68 -0
  30. data/lib/rom/kafka/connection.rb +22 -0
  31. data/lib/rom/kafka/connection/consumer.rb +105 -0
  32. data/lib/rom/kafka/connection/producer.rb +114 -0
  33. data/lib/rom/kafka/create.rb +75 -0
  34. data/lib/rom/kafka/dataset.rb +132 -0
  35. data/lib/rom/kafka/gateway.rb +165 -0
  36. data/lib/rom/kafka/relation.rb +78 -0
  37. data/lib/rom/kafka/version.rb +13 -0
  38. data/rom-kafka.gemspec +33 -0
  39. data/spec/integration/basic_usage_spec.rb +58 -0
  40. data/spec/integration/keys_usage_spec.rb +34 -0
  41. data/spec/shared/scholars_topic.rb +28 -0
  42. data/spec/spec_helper.rb +20 -0
  43. data/spec/unit/brokers/broker_spec.rb +89 -0
  44. data/spec/unit/brokers_spec.rb +46 -0
  45. data/spec/unit/connection/consumer_spec.rb +90 -0
  46. data/spec/unit/connection/producer_spec.rb +79 -0
  47. data/spec/unit/create_spec.rb +79 -0
  48. data/spec/unit/dataset_spec.rb +165 -0
  49. data/spec/unit/gateway_spec.rb +171 -0
  50. data/spec/unit/relation_spec.rb +96 -0
  51. metadata +219 -0
@@ -0,0 +1,79 @@
1
+ # encoding: utf-8
2
+
3
+ describe ROM::Kafka::Connection::Producer do
4
+
5
+ # ============================================================================
6
+ # We test not the poseidon API, but its proper usage by the Producer.
7
+ # That's why we stub poseidon classes.
8
+ # ----------------------------------------------------------------------------
9
+ let(:driver) { Poseidon::Producer }
10
+ let(:message) { Poseidon::MessageToSend }
11
+ let(:connection) { double :connection, send_messages: nil }
12
+
13
+ before do
14
+ allow(driver).to receive(:new) { connection }
15
+
16
+ allow(message).to receive(:new) do |*args|
17
+ double :message, to_tuple: Hash[[:topic, :value, :key].zip(args)]
18
+ end
19
+ end
20
+ # ============================================================================
21
+
22
+ let(:producer) { described_class.new options }
23
+ let(:client) { "foo" }
24
+ let(:brokers) { %w(127.0.0.1:9092 127.0.0.2:9093) }
25
+ let(:options) { attributes.merge(brokers: brokers, client_id: client) }
26
+ let(:attributes) do
27
+ {
28
+ partitioner: proc { |v, c| v % c },
29
+ type: :sync,
30
+ compression_codec: :gzip,
31
+ metadata_refresh_interval_ms: 100,
32
+ max_send_retries: 3,
33
+ retry_backoff_ms: 300,
34
+ required_acks: 2,
35
+ ack_timeout_ms: 200,
36
+ socket_timeout_ms: 400
37
+ }
38
+ end
39
+
40
+ describe "#connection" do
41
+ subject { producer.connection }
42
+
43
+ it "instantiates the driver" do
44
+ expect(driver).to receive(:new).with(brokers, client, attributes)
45
+ expect(subject).to eql(connection)
46
+ end
47
+ end # describe #connection
48
+
49
+ describe "#publish" do
50
+ subject { producer.publish(*input) }
51
+
52
+ let(:input) do
53
+ [
54
+ { value: "foo", topic: "foos", key: 1 },
55
+ [{ value: "bar", topic: "bars" }]
56
+ ]
57
+ end
58
+
59
+ let(:output) do
60
+ [
61
+ { value: "foo", topic: "foos", key: "1" },
62
+ { value: "bar", topic: "bars", key: nil }
63
+ ]
64
+ end
65
+
66
+ it "builds messages and sends it to the #connection" do
67
+ expect(connection).to receive(:send_messages) do |args|
68
+ expect(args.map(&:to_tuple)).to eql output
69
+ end
70
+
71
+ subject
72
+ end
73
+
74
+ it "returns the plain array of tuples" do
75
+ expect(subject).to eql output
76
+ end
77
+ end # describe #publish
78
+
79
+ end # describe ROM::Kafka::Connection::Producer
@@ -0,0 +1,79 @@
1
+ # encoding: utf-8
2
+
3
+ describe ROM::Kafka::Commands::Create do
4
+
5
+ let(:command) { described_class.new relation }
6
+ let(:relation) { double :relation, dataset: dataset }
7
+ let(:dataset) { double :dataset, producer: producer, topic: "qux" }
8
+ let(:producer) { double :producer, publish: output }
9
+ let(:output) { double :output }
10
+
11
+ describe ".adapter" do
12
+ subject { described_class.adapter }
13
+
14
+ it { is_expected.to eql(:kafka) }
15
+ end # describe .adapter
16
+
17
+ describe ".new" do
18
+ subject { command }
19
+
20
+ it { is_expected.to be_kind_of ROM::Commands::Create }
21
+ end # describe .new
22
+
23
+ describe "#key" do
24
+ subject { command.key }
25
+
26
+ it { is_expected.to be_nil }
27
+ end # describe #key
28
+
29
+ describe "#with" do
30
+ subject { command.with(key: "foo") }
31
+
32
+ it "returns a command" do
33
+ expect(subject).to be_kind_of described_class
34
+ end
35
+
36
+ it "preserves current relation" do
37
+ expect(subject.relation).to eql relation
38
+ end
39
+
40
+ it "updates the key" do
41
+ expect(subject.key).to eql("foo")
42
+ end
43
+ end # describe #using
44
+
45
+ describe "#call" do
46
+ subject { command.call(:bar, ["baz"]) }
47
+
48
+ context "when key isn't set" do
49
+ let(:bar) { { value: "bar", topic: "qux", key: nil } }
50
+ let(:baz) { { value: "baz", topic: "qux", key: nil } }
51
+
52
+ it "publishes tuples to the producer" do
53
+ expect(producer).to receive(:publish).with(bar, baz)
54
+ subject
55
+ end
56
+
57
+ it "returns tuples" do
58
+ expect(subject).to eql output
59
+ end
60
+ end
61
+
62
+ context "when key is set" do
63
+ let(:command) { described_class.new(relation).with(key: "foo") }
64
+
65
+ let(:bar) { { value: "bar", topic: "qux", key: "foo" } }
66
+ let(:baz) { { value: "baz", topic: "qux", key: "foo" } }
67
+
68
+ it "publishes tuples to the producer" do
69
+ expect(producer).to receive(:publish).with(bar, baz)
70
+ subject
71
+ end
72
+
73
+ it "returns tuples" do
74
+ expect(subject).to eql output
75
+ end
76
+ end
77
+ end # describe #call
78
+
79
+ end # describe ROM::Kafka::Relation
@@ -0,0 +1,165 @@
1
+ # encoding: utf-8
2
+
3
+ describe ROM::Kafka::Dataset do
4
+
5
+ let(:gateway_class) { ROM::Kafka::Gateway }
6
+ let(:consumer_class) { ROM::Kafka::Connection::Consumer }
7
+ let(:consumer) { double :consumer }
8
+ before { allow(consumer_class).to receive(:new) { consumer } }
9
+
10
+ let(:dataset) { described_class.new(gateway, topic.to_sym) }
11
+ let(:gateway) { gateway_class.new client_id: "foo" }
12
+ let(:topic) { "bar" }
13
+
14
+ describe "#gateway" do
15
+ subject { dataset.gateway }
16
+
17
+ it "is initialized" do
18
+ expect(subject).to eql gateway
19
+ end
20
+ end # describe #gateway
21
+
22
+ describe "#topic" do
23
+ subject { dataset.topic }
24
+
25
+ it "is initialized" do
26
+ expect(subject).to eql topic
27
+ end
28
+ end # describe #topic
29
+
30
+ describe "#attributes" do
31
+ subject { dataset.attributes }
32
+
33
+ context "by default" do
34
+ let(:attributes) do
35
+ {
36
+ partition: 0,
37
+ offset: 0,
38
+ limit: 0,
39
+ min_bytes: gateway.min_bytes,
40
+ max_bytes: gateway.max_bytes,
41
+ max_wait_ms: gateway.max_wait_ms
42
+ }
43
+ end
44
+
45
+ it "is taken from a gateway" do
46
+ expect(subject).to eql attributes
47
+ end
48
+ end
49
+
50
+ context "when options are set" do
51
+ let(:dataset) { described_class.new(gateway, topic, attributes) }
52
+ let(:attributes) do
53
+ {
54
+ partition: 1,
55
+ offset: 2,
56
+ limit: 10,
57
+ min_bytes: 1_024,
58
+ max_bytes: 10_240,
59
+ max_wait_ms: 100
60
+ }
61
+ end
62
+
63
+ it "is initialized" do
64
+ expect(subject).to eql attributes
65
+ end
66
+ end
67
+ end # describe #attributes
68
+
69
+ describe "#producer" do
70
+ subject { dataset.producer }
71
+
72
+ it "is taken from #gateway" do
73
+ expect(subject).to eql gateway.producer
74
+ end
75
+ end # describe #producer
76
+
77
+ describe "#consumer" do
78
+ subject { dataset.consumer }
79
+
80
+ let(:dataset) { described_class.new(gateway, topic, attributes) }
81
+
82
+ let(:attributes) do
83
+ {
84
+ partition: 1,
85
+ offset: 2,
86
+ limit: 0,
87
+ min_bytes: 1_024,
88
+ max_bytes: 10_240,
89
+ max_wait_ms: 100
90
+ }
91
+ end
92
+
93
+ let(:options) do
94
+ attributes.merge(
95
+ topic: topic,
96
+ client_id: gateway.client_id,
97
+ brokers: gateway.brokers
98
+ )
99
+ end
100
+
101
+ it "is initialized with proper options" do
102
+ expect(consumer_class).to receive(:new).with(options)
103
+ expect(subject).to eql consumer
104
+ end
105
+ end # describe #consumer
106
+
107
+ describe "#using" do
108
+ subject { dataset.using(update) }
109
+
110
+ let(:dataset) { described_class.new gateway, topic, min_bytes: 8 }
111
+ let(:update) { { partition: 1, offset: 2 } }
112
+
113
+ it "builds new dataset" do
114
+ expect(subject).to be_kind_of described_class
115
+ end
116
+
117
+ it "preserves gateway" do
118
+ expect(subject.gateway).to eql(gateway)
119
+ end
120
+
121
+ it "preserves topic" do
122
+ expect(subject.topic).to eql(topic)
123
+ end
124
+
125
+ it "updates attributes" do
126
+ expect(subject.attributes).to eql(dataset.attributes.merge(update))
127
+ end
128
+ end # describe #using
129
+
130
+ describe "#each" do
131
+ subject { dataset.to_a }
132
+
133
+ let(:consumer) { double :consumer, each: data.each }
134
+ let(:data) { %w(foo bar baz qux) }
135
+
136
+ context "when limit isn't set" do
137
+ it "is delegated to the consumer" do
138
+ expect(subject).to eql data
139
+ end
140
+
141
+ it "yields limited number of times" do
142
+ expect { |b| dataset.each(&b) }.to yield_control.exactly(4).times
143
+ end
144
+ end
145
+
146
+ context "when limit is set" do
147
+ let(:dataset) { described_class.new(gateway, topic, limit: 2) }
148
+
149
+ it "is delegated to the consumer" do
150
+ expect(subject).to eql data[0..1]
151
+ end
152
+
153
+ it "yields limited number of times" do
154
+ expect { |b| dataset.each(&b) }.to yield_control.twice
155
+ end
156
+ end
157
+
158
+ context "without a block" do
159
+ subject { dataset.each }
160
+
161
+ it { is_expected.to be_kind_of Enumerator }
162
+ end
163
+ end # describe #each
164
+
165
+ end # describe ROM::Kafka::Dataset
@@ -0,0 +1,171 @@
1
+ # encoding: utf-8
2
+
3
+ describe ROM::Kafka::Gateway do
4
+
5
+ let(:gateway) { described_class.new(client_id: :foo) }
6
+
7
+ describe ".new" do
8
+ context "without client id" do
9
+ subject { gateway }
10
+
11
+ it { is_expected.to be_kind_of ROM::Gateway }
12
+ end
13
+
14
+ context "without client id" do
15
+ subject { described_class.new }
16
+
17
+ it "fails" do
18
+ expect { subject }.to raise_error ArgumentError
19
+ end
20
+ end
21
+ end # describe .new
22
+
23
+ describe "#brokers" do
24
+ subject { gateway.brokers }
25
+
26
+ let(:brokers) { %w(localhost:9092 127.0.0.1:9092) }
27
+
28
+ context "from strings" do
29
+ let(:gateway) { described_class.new(*brokers, client_id: :foo) }
30
+
31
+ it { is_expected.to eql brokers }
32
+ end
33
+
34
+ context "from hosts and port options" do
35
+ let(:gateway) do
36
+ described_class
37
+ .new(client_id: :foo, hosts: %w(localhost 127.0.0.1), port: 9092)
38
+ end
39
+
40
+ it { is_expected.to eql brokers }
41
+ end
42
+
43
+ context "from mixed options" do
44
+ let(:gateway) do
45
+ described_class
46
+ .new("localhost", client_id: :foo, hosts: %w(127.0.0.1), port: 9092)
47
+ end
48
+
49
+ it { is_expected.to eql brokers }
50
+ end
51
+ end # describe #hosts
52
+
53
+ describe "#attributes" do
54
+ subject { gateway.attributes }
55
+
56
+ context "by default" do
57
+ let(:attributes) { { client_id: :foo } }
58
+
59
+ it "is set" do
60
+ expect(subject).to eql(
61
+ ack_timeout_ms: 1_500,
62
+ brokers: ["localhost:9092"],
63
+ client_id: "foo",
64
+ compression_codec: nil,
65
+ max_bytes: 1_048_576,
66
+ max_send_retries: 3,
67
+ max_wait_ms: 100,
68
+ metadata_refresh_interval_ms: 600_000,
69
+ min_bytes: 1,
70
+ partitioner: nil,
71
+ required_acks: 0,
72
+ retry_backoff_ms: 100,
73
+ socket_timeout_ms: 10_000
74
+ )
75
+ end
76
+ end
77
+
78
+ context "when assigned" do
79
+ let(:gateway) { described_class.new(attributes) }
80
+
81
+ let(:attributes) do
82
+ {
83
+ ack_timeout_ms: 200,
84
+ brokers: ["localhost:9092"],
85
+ client_id: "foo",
86
+ compression_codec: :gzip,
87
+ max_bytes: 2_048,
88
+ max_send_retries: 2,
89
+ max_wait_ms: 300,
90
+ metadata_refresh_interval_ms: 300_000,
91
+ min_bytes: 1_024,
92
+ partitioner: proc { |value, count| value % count },
93
+ required_acks: 1,
94
+ retry_backoff_ms: 200,
95
+ socket_timeout_ms: 20_000
96
+ }
97
+ end
98
+
99
+ it { is_expected.to eql attributes }
100
+ end
101
+ end # describe #attributes
102
+
103
+ describe "#[]" do
104
+ subject { gateway[:foo] }
105
+
106
+ it { is_expected.to eql(nil) }
107
+ end # describe #[]
108
+
109
+ describe "#dataset?" do
110
+ before do
111
+ allow(gateway).to receive(:[]) { |name| { foo: :FOO }[name.to_sym] }
112
+ end
113
+
114
+ context "when dataset is registered" do
115
+ subject { gateway.dataset? "foo" }
116
+
117
+ it { is_expected.to eql true }
118
+ end
119
+
120
+ context "when dataset isn't registered" do
121
+ subject { gateway.dataset? "bar" }
122
+
123
+ it { is_expected.to eql false }
124
+ end
125
+ end # describe #dataset
126
+
127
+ describe "#dataset" do
128
+ subject { gateway.dataset topic }
129
+
130
+ let(:klass) { ROM::Kafka::Dataset }
131
+ let(:dataset) { double :dataset }
132
+ let(:topic) { "foobar" }
133
+
134
+ before { allow(klass).to receive(:new) { dataset } }
135
+
136
+ it "builds a dataset" do
137
+ expect(klass).to receive(:new).with(gateway, topic)
138
+ subject
139
+ end
140
+
141
+ it "registers a dataset by symbol" do
142
+ expect { subject }.to change { gateway[:foobar] }.from(nil).to(dataset)
143
+ end
144
+
145
+ it "registers a dataset by string" do
146
+ expect { subject }.to change { gateway["foobar"] }.from(nil).to(dataset)
147
+ end
148
+
149
+ it "returns a dataset" do
150
+ expect(subject).to eql(dataset)
151
+ end
152
+ end # describe #dataset
153
+
154
+ describe "#producer" do
155
+ subject { gateway.producer }
156
+ let(:producer) { double :producer }
157
+
158
+ it "builds a producer" do
159
+ attributes = {}
160
+ producer = double :producer
161
+
162
+ expect(ROM::Kafka::Connection::Producer).to receive(:new) do |opts|
163
+ attributes = opts
164
+ producer
165
+ end
166
+ expect(subject).to eql producer
167
+ expect(attributes).to eql gateway.attributes
168
+ end
169
+ end # describe #producer
170
+
171
+ end # describe ROM::Kafka::Gateway