simple_kafka_consumer 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +14 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +22 -0
- data/README.md +79 -0
- data/Rakefile +2 -0
- data/lib/simple_kafka_consumer.rb +8 -0
- data/lib/simple_kafka_consumer/consumer.rb +48 -0
- data/lib/simple_kafka_consumer/version.rb +3 -0
- data/simple_kafka_consumer.gemspec +27 -0
- metadata +124 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: eaf8648e57f4f0e015d8409855ff5dfa127f09c3
|
4
|
+
data.tar.gz: ea11108291577064530a0a5728d0daa0dee2fdce
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 872f04ac59f0bd87c1c34d293370caa796db5dd10ec9e44cd2a6f30239462cfe77204232ea70733c3227217c80c4d1e715bbbd147cf1e1323458a2db7f5609a6
|
7
|
+
data.tar.gz: 6b6b8d44c1c1f825131ce494e77cba1f8eaf71238935a6efe33fe86a2349dbf000e20c2c1d0267f621d3bffc57c1e1dd52ff104680489be28c20011634f50b35
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Copyright (c) 2015 Jeff Ching
|
2
|
+
|
3
|
+
MIT License
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
6
|
+
a copy of this software and associated documentation files (the
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,79 @@
|
|
1
|
+
# SimpleKafkaConsumer
|
2
|
+
|
3
|
+
Write Kafka consumers in a model with retry
|
4
|
+
|
5
|
+
## Installation
|
6
|
+
|
7
|
+
Add this line to your application's Gemfile:
|
8
|
+
|
9
|
+
```ruby
|
10
|
+
gem 'simple_kafka_consumer'
|
11
|
+
```
|
12
|
+
|
13
|
+
And then execute:
|
14
|
+
|
15
|
+
$ bundle
|
16
|
+
|
17
|
+
Or install it yourself as:
|
18
|
+
|
19
|
+
$ gem install simple_kafka_consumer
|
20
|
+
|
21
|
+
## Usage
|
22
|
+
|
23
|
+
You will want to write your own consumer class that inherits from `SimpleKafkaConsumer::Consumer`. You will want to specify the `group_name` and `topic_name`. You'll also want to define the `consume` method which is the handler for batch of messages received.
|
24
|
+
|
25
|
+
```ruby
|
26
|
+
class MyConsumer < SimpleKafkaConsumer::Consumer
|
27
|
+
# the name used for coordinating multiple consumers
|
28
|
+
self.group_name = "my-group-name"
|
29
|
+
|
30
|
+
# the kafka topic we're reading from
|
31
|
+
self.topic_name = "my-topic-name"
|
32
|
+
|
33
|
+
# handle the messages
|
34
|
+
def consume(message)
|
35
|
+
puts message
|
36
|
+
end
|
37
|
+
end
|
38
|
+
```
|
39
|
+
|
40
|
+
### Formatting
|
41
|
+
|
42
|
+
You can have the consumer handle deserializing your data that is sent in the message. For example, if you used json as your message format:
|
43
|
+
|
44
|
+
```ruby
|
45
|
+
class MyConsumer
|
46
|
+
def parse(message)
|
47
|
+
JSON.parse(message)
|
48
|
+
end
|
49
|
+
|
50
|
+
# the message you're consuming is now a parsed json object
|
51
|
+
def consume(json_object)
|
52
|
+
puts json_object['name']
|
53
|
+
end
|
54
|
+
end
|
55
|
+
```
|
56
|
+
|
57
|
+
### Creating and Running
|
58
|
+
|
59
|
+
To create a consumer instance, you'll need to provide an array of kafka servers and an array of zookeeper servers. You can optionally provide a logger as well.
|
60
|
+
|
61
|
+
```ruby
|
62
|
+
# create a consumer
|
63
|
+
kafka_servers = ["localhost:9092"]
|
64
|
+
zookeeper_servers = ["localhost:2181"]
|
65
|
+
consumer = MyConsumer.new(kafka_servers, zookeeper_servers, logger: nil)
|
66
|
+
|
67
|
+
# run the consumer (loops and blocks)
|
68
|
+
consumer.run
|
69
|
+
```
|
70
|
+
|
71
|
+
This gem utilizes the `poseidon_cluster` gem and consumers coordinate via zookeeper. Thus, you can run many consumers. The `group_name` is what's used to determine which messages have already been processed.
|
72
|
+
|
73
|
+
## Contributing
|
74
|
+
|
75
|
+
1. Fork it ( https://github.com/chingor13/simple_kafka_consumer/fork )
|
76
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
77
|
+
3. Commit your changes (`git commit -am 'Add some feature'`)
|
78
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
79
|
+
5. Create a new Pull Request
|
data/Rakefile
ADDED
@@ -0,0 +1,48 @@
|
|
1
|
+
module SimpleKafkaConsumer
|
2
|
+
class Consumer
|
3
|
+
class_attribute :group_name, :topic_name
|
4
|
+
attr_reader :consumer, :logger
|
5
|
+
def initialize(kafka_servers, zookeeper_servers, logger: nil)
|
6
|
+
@consumer = Poseidon::ConsumerGroup.new(
|
7
|
+
group_name,
|
8
|
+
kafka_servers,
|
9
|
+
zookeeper_servers,
|
10
|
+
topic_name
|
11
|
+
)
|
12
|
+
@logger = logger
|
13
|
+
end
|
14
|
+
|
15
|
+
def run
|
16
|
+
debug "partitions: #{consumer.partitions}"
|
17
|
+
debug "claimed: #{consumer.claimed}"
|
18
|
+
consumer.fetch_loop do |partition, bulk|
|
19
|
+
bulk.each do |message|
|
20
|
+
consume(parse(message))
|
21
|
+
end
|
22
|
+
end
|
23
|
+
rescue ZK::Exceptions::OperationTimeOut => e
|
24
|
+
log e.message
|
25
|
+
retry
|
26
|
+
end
|
27
|
+
|
28
|
+
protected
|
29
|
+
|
30
|
+
def parse(message)
|
31
|
+
message
|
32
|
+
end
|
33
|
+
|
34
|
+
def log(message)
|
35
|
+
return false unless logger
|
36
|
+
logger.info message
|
37
|
+
end
|
38
|
+
|
39
|
+
def debug(message)
|
40
|
+
return false unless logger
|
41
|
+
logger.debug message
|
42
|
+
end
|
43
|
+
|
44
|
+
def consume(message)
|
45
|
+
puts "doing nothing"
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'simple_kafka_consumer/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "simple_kafka_consumer"
|
8
|
+
spec.version = SimpleKafkaConsumer::VERSION
|
9
|
+
spec.authors = ["Jeff Ching"]
|
10
|
+
spec.email = ["jching@avvo.com"]
|
11
|
+
spec.summary = %q{Write Kafka consumers in a model with retry}
|
12
|
+
spec.description = %q{Write Kafka consumers in a model with retry}
|
13
|
+
spec.homepage = ""
|
14
|
+
spec.license = "MIT"
|
15
|
+
|
16
|
+
spec.files = `git ls-files -z`.split("\x0")
|
17
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
18
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
19
|
+
spec.require_paths = ["lib"]
|
20
|
+
|
21
|
+
spec.add_dependency "activesupport", ">= 3.2.0"
|
22
|
+
spec.add_dependency "poseidon", "0.0.4"
|
23
|
+
spec.add_dependency "poseidon_cluster", "0.1.1"
|
24
|
+
|
25
|
+
spec.add_development_dependency "bundler", "~> 1.7"
|
26
|
+
spec.add_development_dependency "rake", "~> 10.0"
|
27
|
+
end
|
metadata
ADDED
@@ -0,0 +1,124 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: simple_kafka_consumer
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Jeff Ching
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2015-05-22 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: activesupport
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: 3.2.0
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">="
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: 3.2.0
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: poseidon
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - '='
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: 0.0.4
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - '='
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: 0.0.4
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: poseidon_cluster
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - '='
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: 0.1.1
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - '='
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: 0.1.1
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: bundler
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '1.7'
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '1.7'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: rake
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '10.0'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '10.0'
|
83
|
+
description: Write Kafka consumers in a model with retry
|
84
|
+
email:
|
85
|
+
- jching@avvo.com
|
86
|
+
executables: []
|
87
|
+
extensions: []
|
88
|
+
extra_rdoc_files: []
|
89
|
+
files:
|
90
|
+
- ".gitignore"
|
91
|
+
- Gemfile
|
92
|
+
- LICENSE.txt
|
93
|
+
- README.md
|
94
|
+
- Rakefile
|
95
|
+
- lib/simple_kafka_consumer.rb
|
96
|
+
- lib/simple_kafka_consumer/consumer.rb
|
97
|
+
- lib/simple_kafka_consumer/version.rb
|
98
|
+
- simple_kafka_consumer.gemspec
|
99
|
+
homepage: ''
|
100
|
+
licenses:
|
101
|
+
- MIT
|
102
|
+
metadata: {}
|
103
|
+
post_install_message:
|
104
|
+
rdoc_options: []
|
105
|
+
require_paths:
|
106
|
+
- lib
|
107
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
108
|
+
requirements:
|
109
|
+
- - ">="
|
110
|
+
- !ruby/object:Gem::Version
|
111
|
+
version: '0'
|
112
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
113
|
+
requirements:
|
114
|
+
- - ">="
|
115
|
+
- !ruby/object:Gem::Version
|
116
|
+
version: '0'
|
117
|
+
requirements: []
|
118
|
+
rubyforge_project:
|
119
|
+
rubygems_version: 2.2.2
|
120
|
+
signing_key:
|
121
|
+
specification_version: 4
|
122
|
+
summary: Write Kafka consumers in a model with retry
|
123
|
+
test_files: []
|
124
|
+
has_rdoc:
|