kafkat 0.0.11 → 0.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/.gitignore +1 -1
- data/lib/kafkat/command/drain.rb +7 -2
- data/lib/kafkat/utility/formatting.rb +1 -1
- data/lib/kafkat/version.rb +1 -1
- data/spec/factories/topic.rb +20 -20
- data/spec/lib/kafkat/command/drain_spec.rb +6 -6
- metadata +3 -3
- data/Gemfile.lock +0 -69
checksums.yaml
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
---
|
2
2
|
!binary "U0hBMQ==":
|
3
3
|
metadata.gz: !binary |-
|
4
|
-
|
4
|
+
MzRmNjZhMzY3YjRlNzRkZWFiMjhlNzE2ZTE3YWQyMTAzMDZkMmNiZQ==
|
5
5
|
data.tar.gz: !binary |-
|
6
|
-
|
6
|
+
MDg5MDkwMTk1ZjU1MDBlYWIzYThmY2ZhYTg3OWY3ODYxZDY1ZTJlNQ==
|
7
7
|
SHA512:
|
8
8
|
metadata.gz: !binary |-
|
9
|
-
|
10
|
-
|
11
|
-
|
9
|
+
NzA4ODM1NDRkMGNkNmYxODZlMzNkMDQ0YWEyYTY0MGRiNTQ4Njg5MGEyOWU2
|
10
|
+
YzVhYjI2ZjM4YjZjYjUzYWRhYjA0YTIxMDcyNTM2MTVhZjJjYjVkZTNiYzM0
|
11
|
+
ZjQ4Y2FiZTBiZWJmOGEyNDUxMjQyM2IxZTkyYjFmZTQ0Y2M4NDQ=
|
12
12
|
data.tar.gz: !binary |-
|
13
|
-
|
14
|
-
|
15
|
-
|
13
|
+
YWMxYTY2ZDVjMjMzYTRlMWIwN2E2NWQyNTE4NTdkYTM5NTVmNjBkNWUwMzJk
|
14
|
+
ZTE4MmE5ZDJjNjBlZGQyYWI1NzA1NmNmN2U4ZjBkMGNkNjQ5ZDFmMjhjYjAw
|
15
|
+
ZGFlMjc5M2FkYzNkMTVlYzNmNDYzMjdiYzJlNTM5M2JjOGE2OTY=
|
data/.gitignore
CHANGED
data/lib/kafkat/command/drain.rb
CHANGED
@@ -6,7 +6,7 @@ module Kafkat
|
|
6
6
|
usage 'drain <broker id> [--topic <t>] [--brokers <ids>]',
|
7
7
|
'Reassign partitions from a specific broker to destination brokers.'
|
8
8
|
|
9
|
-
# For each partition (of
|
9
|
+
# For each partition (of specified topic) on the source broker, the command is to
|
10
10
|
# assign the partition to one of the destination brokers that does not already have
|
11
11
|
# this partition, along with existing brokers to achieve minimal movement of data.
|
12
12
|
# To help distribute data evenly, if there are more than one destination brokers
|
@@ -57,6 +57,7 @@ module Kafkat
|
|
57
57
|
t.partitions.each do |p|
|
58
58
|
if p.replicas.include? source_broker
|
59
59
|
replicas = p.replicas - [source_broker]
|
60
|
+
source_broker_is_leader = p.replicas.first == source_broker
|
60
61
|
potential_broker_ids = destination_brokers - replicas
|
61
62
|
if potential_broker_ids.empty?
|
62
63
|
print "ERROR: Not enough destination brokers to reassign topic \"#{t.name}\".\n"
|
@@ -66,7 +67,11 @@ module Kafkat
|
|
66
67
|
num_partitions_on_potential_broker =
|
67
68
|
partitions_by_broker.select { |id, _| potential_broker_ids.include? id }
|
68
69
|
assigned_broker_id = num_partitions_on_potential_broker.min_by{ |id, num| num }[0]
|
69
|
-
|
70
|
+
if source_broker_is_leader
|
71
|
+
replicas.unshift(assigned_broker_id)
|
72
|
+
else
|
73
|
+
replicas << assigned_broker_id
|
74
|
+
end
|
70
75
|
partitions_by_broker[assigned_broker_id] += 1
|
71
76
|
|
72
77
|
assignments << Assignment.new(t.name, p.id, replicas)
|
data/lib/kafkat/version.rb
CHANGED
data/spec/factories/topic.rb
CHANGED
@@ -4,35 +4,35 @@ module Kafkat
|
|
4
4
|
name "topic_name"
|
5
5
|
|
6
6
|
factory :topic_with_one_empty_broker do
|
7
|
-
partitions {[Partition.new(name, 0, [0], 0, 0),
|
8
|
-
Partition.new(name, 1, [1], 1, 1),
|
9
|
-
Partition.new(name, 2, [0], 0, 0),
|
10
|
-
Partition.new(name, 3, [0], 0, 0),
|
11
|
-
Partition.new(name, 4, [1], 1, 1)]}
|
7
|
+
partitions {[Partition.new(name, 0, [0], 0, [0]),
|
8
|
+
Partition.new(name, 1, [1], 1, [1]),
|
9
|
+
Partition.new(name, 2, [0], 0, [0]),
|
10
|
+
Partition.new(name, 3, [0], 0, [0]),
|
11
|
+
Partition.new(name, 4, [1], 1, [1])]}
|
12
12
|
end
|
13
13
|
|
14
14
|
factory :topic_rep_factor_one do
|
15
|
-
partitions {[Partition.new(name, 0, [0], 0, 0),
|
16
|
-
Partition.new(name, 1, [1], 1, 1),
|
17
|
-
Partition.new(name, 2, [2], 2, 2),
|
18
|
-
Partition.new(name, 3, [0], 0, 0),
|
19
|
-
Partition.new(name, 4, [1], 1, 1)]}
|
15
|
+
partitions {[Partition.new(name, 0, [0], 0, [0]),
|
16
|
+
Partition.new(name, 1, [1], 1, [1]),
|
17
|
+
Partition.new(name, 2, [2], 2, [2]),
|
18
|
+
Partition.new(name, 3, [0], 0, [0]),
|
19
|
+
Partition.new(name, 4, [1], 1, [1])]}
|
20
20
|
end
|
21
21
|
|
22
22
|
factory :topic_rep_factor_two do
|
23
|
-
partitions {[Partition.new(name, 0, [0, 1], 0, 0),
|
24
|
-
Partition.new(name, 1, [0, 2], 2, 2),
|
25
|
-
Partition.new(name, 2, [1, 2], 1, 1),
|
26
|
-
Partition.new(name, 3, [0, 1], 0, 0),
|
27
|
-
Partition.new(name, 4, [0, 2], 2, 2)]}
|
23
|
+
partitions {[Partition.new(name, 0, [0, 1], 0, [0]),
|
24
|
+
Partition.new(name, 1, [0, 2], 2, [2]),
|
25
|
+
Partition.new(name, 2, [1, 2], 1, [1]),
|
26
|
+
Partition.new(name, 3, [0, 1], 0, [0]),
|
27
|
+
Partition.new(name, 4, [0, 2], 2, [2])]}
|
28
28
|
end
|
29
29
|
|
30
30
|
factory :topic_rep_factor_three do
|
31
|
-
partitions {[Partition.new(name, 0, [0, 1, 2], 0, 0),
|
32
|
-
Partition.new(name, 1, [0, 1, 2], 1, 1),
|
33
|
-
Partition.new(name, 2, [0, 1, 2], 2, 2),
|
34
|
-
Partition.new(name, 3, [0, 1, 2], 0, 0),
|
35
|
-
Partition.new(name, 4, [0, 1, 2], 1, 1)]}
|
31
|
+
partitions {[Partition.new(name, 0, [0, 1, 2], 0, [0]),
|
32
|
+
Partition.new(name, 1, [0, 1, 2], 1, [1]),
|
33
|
+
Partition.new(name, 2, [0, 1, 2], 2, [2]),
|
34
|
+
Partition.new(name, 3, [0, 1, 2], 0, [0]),
|
35
|
+
Partition.new(name, 4, [0, 1, 2], 1, [1])]}
|
36
36
|
end
|
37
37
|
end
|
38
38
|
end
|
@@ -13,7 +13,7 @@ module Kafkat
|
|
13
13
|
assignments = drain.generate_assignments(broker_id,
|
14
14
|
{"topic_name" => topic_rep_factor_one},
|
15
15
|
destination_broker_ids)
|
16
|
-
expect(assignments).to have_exactly(2).
|
16
|
+
expect(assignments).to have_exactly(2).items
|
17
17
|
expect(assignments[0].replicas).to eq([2])
|
18
18
|
expect(assignments[1].replicas).to eq([1])
|
19
19
|
end
|
@@ -25,11 +25,11 @@ module Kafkat
|
|
25
25
|
assignments = drain.generate_assignments(broker_id,
|
26
26
|
{"topic_name" => topic_rep_factor_two},
|
27
27
|
destination_broker_ids)
|
28
|
-
expect(assignments).to have_exactly(4).
|
29
|
-
expect(assignments[0].replicas).to eq([
|
30
|
-
expect(assignments[1].replicas).to eq([
|
31
|
-
expect(assignments[2].replicas).to eq([
|
32
|
-
expect(assignments[3].replicas).to eq([
|
28
|
+
expect(assignments).to have_exactly(4).items
|
29
|
+
expect(assignments[0].replicas).to eq([2, 1])
|
30
|
+
expect(assignments[1].replicas).to eq([1, 2])
|
31
|
+
expect(assignments[2].replicas).to eq([2, 1])
|
32
|
+
expect(assignments[3].replicas).to eq([1, 2])
|
33
33
|
end
|
34
34
|
end
|
35
35
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: kafkat
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.12
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Nelson Gauthier
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-05-02 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: zk
|
@@ -181,7 +181,6 @@ files:
|
|
181
181
|
- .simplecov
|
182
182
|
- .travis.yml
|
183
183
|
- Gemfile
|
184
|
-
- Gemfile.lock
|
185
184
|
- LICENSE.txt
|
186
185
|
- README.md
|
187
186
|
- Rakefile
|
@@ -247,3 +246,4 @@ test_files:
|
|
247
246
|
- spec/factories/topic.rb
|
248
247
|
- spec/lib/kafkat/command/drain_spec.rb
|
249
248
|
- spec/spec_helper.rb
|
249
|
+
has_rdoc:
|
data/Gemfile.lock
DELETED
@@ -1,69 +0,0 @@
|
|
1
|
-
PATH
|
2
|
-
remote: .
|
3
|
-
specs:
|
4
|
-
kafkat (0.0.11)
|
5
|
-
colored (~> 1.2)
|
6
|
-
highline (~> 1.6, >= 1.6.21)
|
7
|
-
retryable (~> 1.3, >= 1.3.5)
|
8
|
-
trollop (~> 2.0)
|
9
|
-
zk (~> 1.9, >= 1.9.4)
|
10
|
-
|
11
|
-
GEM
|
12
|
-
remote: https://rubygems.org/
|
13
|
-
specs:
|
14
|
-
activesupport (4.2.1)
|
15
|
-
i18n (~> 0.7)
|
16
|
-
json (~> 1.7, >= 1.7.7)
|
17
|
-
minitest (~> 5.1)
|
18
|
-
thread_safe (~> 0.3, >= 0.3.4)
|
19
|
-
tzinfo (~> 1.1)
|
20
|
-
colored (1.2)
|
21
|
-
diff-lcs (1.2.5)
|
22
|
-
docile (1.1.5)
|
23
|
-
factory_girl (4.5.0)
|
24
|
-
activesupport (>= 3.0.0)
|
25
|
-
highline (1.7.2)
|
26
|
-
i18n (0.7.0)
|
27
|
-
json (1.8.3)
|
28
|
-
minitest (5.7.0)
|
29
|
-
multi_json (1.11.1)
|
30
|
-
rake (10.4.2)
|
31
|
-
retryable (1.3.6)
|
32
|
-
rspec (3.2.0)
|
33
|
-
rspec-core (~> 3.2.0)
|
34
|
-
rspec-expectations (~> 3.2.0)
|
35
|
-
rspec-mocks (~> 3.2.0)
|
36
|
-
rspec-collection_matchers (1.1.2)
|
37
|
-
rspec-expectations (>= 2.99.0.beta1)
|
38
|
-
rspec-core (3.2.3)
|
39
|
-
rspec-support (~> 3.2.0)
|
40
|
-
rspec-expectations (3.2.1)
|
41
|
-
diff-lcs (>= 1.2.0, < 2.0)
|
42
|
-
rspec-support (~> 3.2.0)
|
43
|
-
rspec-mocks (3.2.1)
|
44
|
-
diff-lcs (>= 1.2.0, < 2.0)
|
45
|
-
rspec-support (~> 3.2.0)
|
46
|
-
rspec-support (3.2.2)
|
47
|
-
simplecov (0.9.0)
|
48
|
-
docile (~> 1.1.0)
|
49
|
-
multi_json
|
50
|
-
simplecov-html (~> 0.8.0)
|
51
|
-
simplecov-html (0.8.0)
|
52
|
-
thread_safe (0.3.5)
|
53
|
-
trollop (2.1.2)
|
54
|
-
tzinfo (1.2.2)
|
55
|
-
thread_safe (~> 0.1)
|
56
|
-
zk (1.9.6)
|
57
|
-
zookeeper (~> 1.4.0)
|
58
|
-
zookeeper (1.4.11)
|
59
|
-
|
60
|
-
PLATFORMS
|
61
|
-
ruby
|
62
|
-
|
63
|
-
DEPENDENCIES
|
64
|
-
factory_girl (~> 4.5.0)
|
65
|
-
kafkat!
|
66
|
-
rake
|
67
|
-
rspec (~> 3.2.0)
|
68
|
-
rspec-collection_matchers (~> 1.1.0)
|
69
|
-
simplecov
|