kafka-rest-rb 0.1.0.alpha2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: ef34042990b7684ddc6a59d1ac9ba8ee44f87d30
4
+ data.tar.gz: 95dd2134ff748d790ffdb5b6ab216d99d326b59c
5
+ SHA512:
6
+ metadata.gz: 246f29462bbf834ae1952c7b010bea02a7bfbd8b0613b6cbfd506755144d8dcab06291346ce6db3f5b2556ed9a0002e14257152c9403c91b8c5816c190b04d35
7
+ data.tar.gz: eb2a8e82e9740b6e677605b848a2e2cf70330e76670533392059689acf317dac87380014fc864f0cc666185981a56a0aa6920b69aefd965bc94c4cddff117e25
data/.gitignore ADDED
@@ -0,0 +1,11 @@
1
+ /.bundle/
2
+ /.yardoc
3
+ /Gemfile.lock
4
+ /_yardoc/
5
+ /coverage/
6
+ /doc/
7
+ /pkg/
8
+ /spec/reports/
9
+ /tmp/
10
+
11
+ *.gem
data/.rspec ADDED
@@ -0,0 +1,2 @@
1
+ --format documentation
2
+ --color
data/.travis.yml ADDED
@@ -0,0 +1,5 @@
1
+ sudo: false
2
+ language: ruby
3
+ rvm:
4
+ - 2.2.2
5
+ before_install: gem install bundler -v 1.12.5
@@ -0,0 +1,49 @@
1
+ # Contributor Code of Conduct
2
+
3
+ As contributors and maintainers of this project, and in the interest of
4
+ fostering an open and welcoming community, we pledge to respect all people who
5
+ contribute through reporting issues, posting feature requests, updating
6
+ documentation, submitting pull requests or patches, and other activities.
7
+
8
+ We are committed to making participation in this project a harassment-free
9
+ experience for everyone, regardless of level of experience, gender, gender
10
+ identity and expression, sexual orientation, disability, personal appearance,
11
+ body size, race, ethnicity, age, religion, or nationality.
12
+
13
+ Examples of unacceptable behavior by participants include:
14
+
15
+ * The use of sexualized language or imagery
16
+ * Personal attacks
17
+ * Trolling or insulting/derogatory comments
18
+ * Public or private harassment
19
+ * Publishing other's private information, such as physical or electronic
20
+ addresses, without explicit permission
21
+ * Other unethical or unprofessional conduct
22
+
23
+ Project maintainers have the right and responsibility to remove, edit, or
24
+ reject comments, commits, code, wiki edits, issues, and other contributions
25
+ that are not aligned to this Code of Conduct, or to ban temporarily or
26
+ permanently any contributor for other behaviors that they deem inappropriate,
27
+ threatening, offensive, or harmful.
28
+
29
+ By adopting this Code of Conduct, project maintainers commit themselves to
30
+ fairly and consistently applying these principles to every aspect of managing
31
+ this project. Project maintainers who do not follow or enforce the Code of
32
+ Conduct may be permanently removed from the project team.
33
+
34
+ This code of conduct applies both within project spaces and in public spaces
35
+ when an individual is representing the project or its community.
36
+
37
+ Instances of abusive, harassing, or otherwise unacceptable behavior may be
38
+ reported by contacting a project maintainer at komolov.f@gmail.com. All
39
+ complaints will be reviewed and investigated and will result in a response that
40
+ is deemed necessary and appropriate to the circumstances. Maintainers are
41
+ obligated to maintain confidentiality with regard to the reporter of an
42
+ incident.
43
+
44
+ This Code of Conduct is adapted from the [Contributor Covenant][homepage],
45
+ version 1.3.0, available at
46
+ [http://contributor-covenant.org/version/1/3/0/][version]
47
+
48
+ [homepage]: http://contributor-covenant.org
49
+ [version]: http://contributor-covenant.org/version/1/3/0/
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'https://rubygems.org'
2
+
3
+ # Specify your gem's dependencies in kafka-rest-rb.gemspec
4
+ gemspec
data/LICENSE.txt ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2016 Theodore Konukhov
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,3 @@
1
+ # Kafka-REST client, producer/consumer DSLs and worker daemon for Ruby/Rails apps. [![CircleCI](https://circleci.com/gh/konukhov/kafka-rest-rb.svg?style=shield)](https://circleci.com/gh/konukhov/kafka-rest-rb)
2
+
3
+ In early development stage. Come back later to see the docs!
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ require "bundler/gem_tasks"
2
+ require "rspec/core/rake_task"
3
+
4
+ RSpec::Core::RakeTask.new(:spec)
5
+
6
+ task :default => :spec
data/bin/console ADDED
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require "bundler/setup"
4
+ require "kafka-rest"
5
+
6
+ # You can add fixtures and/or initialization code here to make experimenting
7
+ # with your gem easier. You can also use a different console, if you like.
8
+
9
+ # (If you use this, don't forget to add pry to your Gemfile!)
10
+ # require "pry"
11
+ # Pry.start
12
+
13
+ require "irb"
14
+ IRB.start
data/bin/kafka-rest ADDED
@@ -0,0 +1,19 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # TODO, pids, config and stuff
4
+
5
+ ENV['RAILS_ENV'] ||= ENV['RACK_ENV'] || 'development'
6
+
7
+ app_path = ENV['APP_PATH'] || '.'
8
+
9
+ require 'kafka_rest/logging'
10
+ require 'kafka_rest/worker'
11
+
12
+ require File.expand_path(app_path, 'config/environment.rb')
13
+
14
+ Rails.application.eager_load!
15
+
16
+ client = KafkaRest::Client.new
17
+ worker = KafkaRest::Worker.new(client)
18
+
19
+ worker.start
data/bin/setup ADDED
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ IFS=$'\n\t'
4
+ set -vx
5
+
6
+ bundle install
7
+
8
+ # Do any other automated setup that you need to do here
@@ -0,0 +1,30 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'kafka_rest/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "kafka-rest-rb"
8
+ spec.version = KafkaRest::VERSION
9
+ spec.authors = ["Theodore Konukhov"]
10
+ spec.email = ["me@thdr.io"]
11
+
12
+ spec.summary = %q{Kafka-REST proxy client for Ruby on Rails.}
13
+ spec.description = %q{Kafka-REST client, DSLs and consumer workers for Ruby.}
14
+ spec.homepage = "https://github.com/konukhov/kafka-rest-rb"
15
+ spec.license = "MIT"
16
+
17
+ spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
18
+ spec.bindir = 'bin'
19
+ spec.executables = ['kafka-rest']
20
+ spec.require_paths = ['lib']
21
+
22
+ spec.add_runtime_dependency 'faraday', '~> 0.9'
23
+ spec.add_runtime_dependency 'faraday_middleware', '~> 0.10'
24
+ spec.add_runtime_dependency 'concurrent-ruby', '~> 1.0'
25
+ spec.add_runtime_dependency 'oj', '~> 2.17'
26
+
27
+ spec.add_development_dependency "bundler", "~> 1.12"
28
+ spec.add_development_dependency "rake", "~> 10.0"
29
+ spec.add_development_dependency "rspec", "~> 3.0"
30
+ end
data/lib/kafka-rest.rb ADDED
@@ -0,0 +1 @@
1
+ require 'kafka_rest'
@@ -0,0 +1,69 @@
1
+ require 'faraday'
2
+ require 'faraday_middleware/response_middleware'
3
+ require 'oj'
4
+
5
+ module KafkaRest
6
+ class Client
7
+ class KafkaRestClientException < StandardError
8
+ attr_reader :body, :status
9
+
10
+ def initialize(resp)
11
+ @body = resp.body
12
+ @status = resp.status
13
+
14
+ super "#{@body['message']}" +
15
+ " (HTTP Status: #{@status}; " +
16
+ "error code: #{@body['error_code']})"
17
+ end
18
+ end
19
+
20
+ class DefaultHeaders < Faraday::Middleware
21
+ def initialize(app = nil, default_headers = {})
22
+ @default_headers = default_headers
23
+ super(app)
24
+ end
25
+
26
+ def call(env)
27
+ env[:request_headers] = @default_headers.merge env[:request_headers]
28
+ @app.call(env)
29
+ end
30
+ end
31
+
32
+ class JsonRequest < Faraday::Middleware
33
+ def call(env)
34
+ if env[:body]
35
+ env[:body] = Oj.dump env[:body], mode: :compat, symbol_keys: false
36
+ end
37
+
38
+ @app.call(env)
39
+ end
40
+ end
41
+
42
+ class JsonResponse < FaradayMiddleware::ResponseMiddleware
43
+ define_parser do |body|
44
+ Oj.load(body)
45
+ end
46
+ end
47
+
48
+ class RaiseException < FaradayMiddleware::ResponseMiddleware
49
+ def call(env)
50
+ response = @app.call(env)
51
+ response.on_complete do
52
+ unless response.success?
53
+ raise KafkaRestClientException.new(response)
54
+ end
55
+ end
56
+ end
57
+ end
58
+
59
+ Faraday::Request.register_middleware(
60
+ default_headers: DefaultHeaders,
61
+ encode_json: JsonRequest
62
+ )
63
+
64
+ Faraday::Response.register_middleware(
65
+ decode_json: JsonResponse,
66
+ raise_exception: RaiseException
67
+ )
68
+ end
69
+ end
@@ -0,0 +1,137 @@
1
+ require 'kafka_rest/client/middleware.rb'
2
+ require 'faraday'
3
+
4
+ module KafkaRest
5
+ class Client
6
+ def initialize
7
+ @conn = Faraday.new(url: KafkaRest.config.url) do |c|
8
+ c.request :encode_json
9
+ c.request :default_headers, default_headers
10
+
11
+ c.response :raise_exception
12
+ c.response :decode_json
13
+
14
+ c.adapter :net_http_persistent
15
+ end
16
+ end
17
+
18
+ # Get list of topics
19
+ ### returns: array of topics
20
+ def topics
21
+ @conn.get("/topics")
22
+ end
23
+
24
+ # Get topic metadata by name
25
+ ### returns: name, configs, partitions
26
+ def topic(topic)
27
+ @conn.get("/topics/#{topic}")
28
+ end
29
+
30
+ # Get topic's partitions
31
+ ###
32
+ def topic_partitions(topic)
33
+ @conn.get("/topics/#{topic}/partitions")
34
+ end
35
+
36
+ # Get topic's partition metadata
37
+ def topic_partition(topic, partition)
38
+ @conn.get("/topics/#{topic}/partitions/#{partition}")
39
+ end
40
+
41
+ # Get messages from topic's partition.
42
+ def topic_partition_messages(topic, partition, params = {})
43
+ params[:count] ||= 1
44
+ format = params.delete(:format) || 'binary'
45
+
46
+ @conn.get(
47
+ "/topics/#{topic}/partitions/#{partition}/messages",
48
+ params,
49
+ accept(format)
50
+ )
51
+ end
52
+
53
+ def produce_message(path, records, format, params)
54
+ body = params.merge(
55
+ records: records.is_a?(Array) ? records : [records]
56
+ )
57
+
58
+ @conn.post(path, body, content_type(format))
59
+ end
60
+ private :produce_message
61
+
62
+ # Produce message into a topic
63
+ ### params: key_schema, value_schema, key_schema_id, value_schema_id, records { key, value, partition }
64
+ ### returns: key_schema_id, value_schema_id, offsets { partition, offset, error_code, error }
65
+ def topic_produce_message(topic, records, format = 'json', params = {})
66
+ produce_message("topics/#{topic}", records, format, params)
67
+ end
68
+
69
+ # Produce message into a topic and partition
70
+ ### see topic_produce_message
71
+ def topic_partition_produce_message(topic, partition, records, format = 'json', params = {})
72
+ produce_message("topics/#{topic}/partitions/#{partition}", records, format, params)
73
+ end
74
+
75
+ # Add new consumer to a group
76
+ ### params: name, format, auto.offset.reset, auto.commit.enable
77
+ ### returns: instance_id, base_uri
78
+ def consumer_add(group_name, params = {})
79
+ body = {}
80
+ body['auto.offset.reset'] = params[:auth_offset_reset] || 'largest'
81
+ body['auto.commit.enable'] = params[:auto_commit_enable] == true || false
82
+ body['format'] = params[:format] || 'json'
83
+
84
+ @conn.post("consumers/#{group_name}", body)
85
+ end
86
+
87
+ def consumer_commit_offsets(group_name, consumer_id)
88
+ @conn.post("consumers/#{group_name}/instances/#{consumer_id}/offsets")
89
+ end
90
+
91
+ def consumer_remove(group_name, consumer_id)
92
+ @conn.delete("consumers/#{group_name}/instances/#{consumer_id}")
93
+ end
94
+
95
+ def consumer_consume_from_topic(group_name, consumer_id, topic, params = {})
96
+ format = params.delete(:format) || 'json'
97
+
98
+ @conn.get(
99
+ "consumers/#{group_name}/instances/#{consumer_id}/topics/#{topic}",
100
+ params,
101
+ accept(format)
102
+ )
103
+ end
104
+
105
+ def brokers
106
+ @conn.get("/brokers")
107
+ end
108
+
109
+ private
110
+
111
+ def default_headers
112
+ {
113
+ 'Accept' => 'application/vnd.kafka.v1+json, application/vnd.kafka+json, application/json',
114
+ 'Content-Type' => 'application/vnd.kafka.v1+json'
115
+ }
116
+ end
117
+
118
+ def kafka_mime_type(format = :json)
119
+ case format.to_sym
120
+ when :avro
121
+ 'application/vnd.kafka.avro.v1+json'
122
+ when :binary
123
+ 'application/vnd.kafka.binary.v1+json'
124
+ when :json
125
+ 'application/vnd.kafka.json.v1+json'
126
+ end
127
+ end
128
+
129
+ def content_type(format = nil, headers = {})
130
+ headers.merge 'Content-Type' => kafka_mime_type(format)
131
+ end
132
+
133
+ def accept(format, headers = {})
134
+ headers.merge 'Accept' => kafka_mime_type(format)
135
+ end
136
+ end
137
+ end
@@ -0,0 +1,29 @@
1
+ module KafkaRest
2
+ class Config
3
+ attr_accessor :url,
4
+ :message_format,
5
+ :serialization_adapter,
6
+ :worker_min_threads,
7
+ :worker_max_threads,
8
+ :worker_max_queue
9
+
10
+ def initialize
11
+ @url = 'http://localhost:8082'
12
+ @message_format = 'json'
13
+ @serialization_adapter = nil
14
+ @worker_min_threads = 4
15
+ @worker_max_threads = 4
16
+ @worker_max_queue = nil
17
+ end
18
+ end
19
+
20
+ @@config = Config.new
21
+
22
+ def self.configure(&block)
23
+ block.call @@config
24
+ end
25
+
26
+ def self.config
27
+ @@config
28
+ end
29
+ end
@@ -0,0 +1,37 @@
1
+ require 'kafka_rest/dsl'
2
+
3
+ module KafkaRest
4
+ module Consumer
5
+ def self.included(base)
6
+ base.class_eval do
7
+ extend Dsl
8
+
9
+ option :topic, required: true
10
+
11
+ option :group_name, required: true
12
+
13
+ option :format, default: KafkaRest.config.message_format, validate: ->(v){
14
+ %w(json binary avro).include?(v.to_s)
15
+ }, error_message: 'Format must be either `json`, `avro` or `binary`'
16
+
17
+ option :auto_commit, default: false
18
+
19
+ option :offset_reset, default: :largest, validate: ->(val){
20
+ %w(smallest largest).include?(val.to_s)
21
+ }, error_message: 'Offset reset strategy must be `smallest` or `largest`'
22
+
23
+ option :max_bytes
24
+
25
+ option :poll_delay, default: 0.5, validate: ->(val){
26
+ val > 0
27
+ }, error_message: 'Poll delay should be a number greater than zero'
28
+ end
29
+
30
+ Worker::ConsumerManager.register!(base)
31
+ end
32
+
33
+ def receive(*args)
34
+ raise NotImplementedError
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,46 @@
1
+ module KafkaRest
2
+ module Dsl
3
+ class MissingRequiredOption < StandardError; end
4
+ class InvalidOptionValue < StandardError; end
5
+
6
+ def option(name, opts = {})
7
+ name = name.to_s
8
+ required = opts[:required] || false
9
+ default = opts[:default]
10
+ validate = opts[:validate] || ->(val) { true }
11
+ error_msg = opts[:error_message] || "`#{name}`'s value is invalid"
12
+
13
+ class_eval do
14
+ metaclass = class << self; self; end
15
+ instance_variable_set "@#{name}", default
16
+ metaclass.send :define_method, "_validate_#{name}", ->(val) { validate.call(val) }
17
+ end
18
+
19
+ class_eval %Q{
20
+ def #{name}
21
+ self.class.get_#{name}
22
+ end
23
+
24
+ class << self
25
+ def get_#{name}
26
+ @#{name}.tap do |v|
27
+ if #{required} && v.nil?
28
+ raise KafkaRest::Dsl::MissingRequiredOption.new(
29
+ "Missing required option `#{name}`"
30
+ )
31
+ end
32
+ end
33
+ end
34
+
35
+ def #{name}(val)
36
+ unless _validate_#{name}(val)
37
+ raise KafkaRest::Dsl::InvalidOptionValue.new("#{error_msg}")
38
+ end
39
+
40
+ @#{name} = val
41
+ end
42
+ end
43
+ }
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,20 @@
1
+ module KafkaRest
2
+ module Logging
3
+ def self.logger
4
+ @logger ||= (
5
+ require 'logger'
6
+ ::Logger.new(STDOUT).tap do |l|
7
+ l.level = ::Logger::INFO
8
+ end
9
+ )
10
+ end
11
+
12
+ def self.logger=(l)
13
+ @logger = l
14
+ end
15
+
16
+ def logger
17
+ Logging.logger
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,27 @@
1
+ module KafkaRest
2
+ module Producer
3
+ module Serialization
4
+ class ActiveModel < Adapter
5
+ def serialize(obj, opts = {})
6
+ klass.new(obj, opts).as_json
7
+ end
8
+
9
+ private
10
+
11
+ def klass
12
+ @klass ||= (
13
+ unless defined?(::ActiveModel::Serializer)
14
+ 'ActiveModel::Serializer cannot be found'
15
+ end
16
+
17
+ if (kl = @args.first) && kl < ::ActiveModel::Serializer
18
+ kl
19
+ else
20
+ raise 'Provide ActiveModel::Serializer child as an argunent to `serializer`'
21
+ end
22
+ )
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,15 @@
1
+ module KafkaRest
2
+ module Producer
3
+ module Serialization
4
+ class Adapter
5
+ def initialize(*args)
6
+ @args = args
7
+ end
8
+
9
+ def serialize(obj, options = {})
10
+ raise NotImplementedError
11
+ end
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,11 @@
1
+ module KafkaRest
2
+ module Producer
3
+ module Serialization
4
+ class Noop < Adapter
5
+ def serialize(obj, opts = {})
6
+ obj.to_s
7
+ end
8
+ end
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,69 @@
1
+ require 'kafka_rest/dsl'
2
+
3
+ module KafkaRest
4
+ module Producer
5
+ DEFAULT_KEY_SCHEMA = "{\"type\": \"string\"}"
6
+
7
+ def self.included(base)
8
+ base.class_eval do
9
+ extend ClassMethods
10
+ extend Dsl
11
+
12
+ option :topic, required: true
13
+
14
+ option :format, default: KafkaRest.config.message_format, validate: ->(v){
15
+ %w(json binary avro).include?(v.to_s)
16
+ }, error_message: 'Format must be `avro`, `json` or `binary`.'
17
+
18
+
19
+ option :key_schema, validate: ->(v){
20
+ v.is_a?(Symbol) || v.is_a?(String) || v.is_a?(Proc)
21
+ }, default: DEFAULT_KEY_SCHEMA
22
+
23
+ option :value_schema, validate: ->(v){
24
+ v.is_a?(Symbol) || v.is_a?(String) || v.is_a?(Proc)
25
+ }
26
+
27
+ option :key, validate: ->(val) {
28
+ if val
29
+ val.is_a?(Symbol) || val.is_a?(Proc)
30
+ else
31
+ true
32
+ end
33
+ }
34
+
35
+ option :serialization_adapter, validate: ->(val){
36
+ if val
37
+ val.is_a?(Class) && val < Serialization::Adapter
38
+ else
39
+ true
40
+ end
41
+ }, default: KafkaRest.config.serialization_adapter
42
+
43
+ option :serializer
44
+
45
+ class << base
46
+ # right away override default get_serializer and get_value_schema
47
+ def get_serializer
48
+ @serializer_inst ||= get_serialization_adapter.new @serializer
49
+ end
50
+
51
+ def get_value_schema
52
+ if get_format.to_s == 'avro' && @value_schema.nil?
53
+ raise 'Format `avro` requires providing `value_schema`'
54
+ end
55
+
56
+ @value_schema
57
+ end
58
+ end
59
+ end
60
+ end
61
+
62
+ module ClassMethods
63
+ def send!(obj, opts = {}, producer = nil)
64
+ (producer || KafkaRest::Sender.instance)
65
+ .send!(self, obj, opts)
66
+ end
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,15 @@
1
+ module KafkaRest
2
+ class Sender
3
+ class Payload
4
+ class AvroBuilder < Builder
5
+ # TODO: get rid of this
6
+ def build
7
+ {
8
+ key: @payload.key,
9
+ value: @payload.value,
10
+ }
11
+ end
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,16 @@
1
+ require 'base64'
2
+
3
+ module KafkaRest
4
+ class Sender
5
+ class Payload
6
+ class BinaryBuilder < Builder
7
+ def build
8
+ {
9
+ key: @payload.key,
10
+ value: Base64.strict_encode64(@payload.value)
11
+ }
12
+ end
13
+ end
14
+ end
15
+ end
16
+ end
@@ -0,0 +1,15 @@
1
+ module KafkaRest
2
+ class Sender
3
+ class Payload
4
+ class Builder
5
+ def initialize(payload)
6
+ @payload = payload
7
+ end
8
+
9
+ def build
10
+ raise NotImplementedError
11
+ end
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,14 @@
1
+ module KafkaRest
2
+ class Sender
3
+ class Payload
4
+ class JsonBuilder < Builder
5
+ def build
6
+ {
7
+ key: @payload.key,
8
+ value: @payload.value
9
+ }
10
+ end
11
+ end
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,72 @@
1
+ require 'kafka_rest/sender/payload/builder'
2
+ require 'kafka_rest/sender/payload/avro_builder'
3
+ require 'kafka_rest/sender/payload/json_builder'
4
+ require 'kafka_rest/sender/payload/binary_builder'
5
+
6
+ module KafkaRest
7
+ class Sender
8
+ class Payload
9
+ attr_reader :klass
10
+
11
+ def initialize(klass, obj, opts = {})
12
+ @klass = klass
13
+ @obj = obj
14
+ @opts = opts
15
+ @builder = get_builder.new(self)
16
+
17
+ @key = @opts.delete(:key)
18
+ @timestamp = @opts.delete(:timestamp)
19
+ @partition = @opts.delete(:partition)
20
+ end
21
+
22
+ def build
23
+ @builder.build.tap do |pl|
24
+ @timestamp and pl[:timestamp] = @timestamp
25
+ @partition and pl[:partition] = @partition
26
+ end
27
+ end
28
+
29
+ def value
30
+ @klass.get_serializer.serialize(@obj, @opts)
31
+ end
32
+
33
+ def key
34
+ return @key if @key
35
+
36
+ k = @klass.get_key
37
+
38
+ case k
39
+ when NilClass
40
+ k
41
+ when Symbol
42
+ if inst.respond_to?(k)
43
+ inst.send(k, @obj)
44
+ elsif @obj.respond_to?(k)
45
+ @obj.send(k)
46
+ else
47
+ raise NoMethodError.new("Undefined method \"#{k}\"")
48
+ end
49
+ when Proc
50
+ k.call(@obj)
51
+ end
52
+ end
53
+
54
+ private
55
+
56
+ def get_builder
57
+ case klass.get_format
58
+ when :avro
59
+ AvroBuilder
60
+ when :json
61
+ JsonBuilder
62
+ when :binary
63
+ BinaryBuilder
64
+ end
65
+ end
66
+
67
+ def inst
68
+ @inst ||= @klass.new
69
+ end
70
+ end
71
+ end
72
+ end
@@ -0,0 +1,81 @@
1
+ require 'thread'
2
+ require 'kafka_rest/sender/payload'
3
+
4
+ module KafkaRest
5
+ class Sender
6
+ @@lock = Mutex.new
7
+
8
+ class << self
9
+ def instance
10
+ @@lock.synchronize do
11
+ @instance ||= self.new(Client.new, lock: @@lock)
12
+ end
13
+ end
14
+ end
15
+
16
+ attr_reader :key_schema_cache, :value_schema_cache
17
+
18
+ # TODO: buffering???
19
+ def initialize(client, opts = {})
20
+ @lock = opts[:lock] || Mutex.new
21
+ @client = client
22
+ @key_schema_cache = {}
23
+ @value_schema_cache = {}
24
+ end
25
+
26
+ # TODO: back-off retry if offset[i].errors is a retriable error
27
+ def send!(klass, obj, opts = {})
28
+ topic, payload, format, params = build_request(klass, obj, opts)
29
+ send_produce_request!(topic, payload, format, params)
30
+ end
31
+
32
+ private
33
+
34
+ def build_request(klass, obj, opts)
35
+ # TODO: oooh, dirty and weird - this should not be here.
36
+ # come up with something good!
37
+ topic = klass.get_topic.to_s
38
+ key = klass.get_key
39
+ payload = Payload.new(klass, obj, opts).build
40
+ format = klass.get_format.to_s
41
+ params = {}.tap do |_p|
42
+ if format == 'avro'
43
+ unless key.nil?
44
+ if kid = @key_schema_cache[topic]
45
+ _p[:key_schema_id] = kid
46
+ else
47
+ _p[:key_schema] = klass.get_key_schema
48
+ end
49
+ end
50
+
51
+ if vid = @value_schema_cache[topic]
52
+ _p[:value_schema_id] = vid
53
+ else
54
+ _p[:value_schema] = klass.get_value_schema
55
+ end
56
+ end
57
+ end
58
+
59
+ [topic, payload, format, params]
60
+ end
61
+
62
+ def send_produce_request!(topic, payload, format, params)
63
+ @client.topic_produce_message(topic, payload, format, params).body.tap do |re|
64
+ # this too (line 27)
65
+ cache_schema_ids!(re, topic) if format == 'avro'
66
+ end['offsets']
67
+ end
68
+
69
+ def cache_schema_ids!(resp, topic)
70
+ @lock.synchronize do
71
+ if @key_schema_cache[topic].nil? && kid = resp['key_schema_id']
72
+ @key_schema_cache[topic] = kid
73
+ end
74
+
75
+ if @value_schema_cache[topic].nil? && vid = resp['value_schema_id']
76
+ @value_schema_cache[topic] = vid
77
+ end
78
+ end
79
+ end
80
+ end
81
+ end
@@ -0,0 +1,3 @@
1
+ module KafkaRest
2
+ VERSION = '0.1.0.alpha2'
3
+ end
@@ -0,0 +1,134 @@
1
+ require 'concurrent/utility/monotonic_time'
2
+
3
+ module KafkaRest
4
+ class Worker
5
+ class ConsumerManager
6
+ STATES = [:initial, :idle, :working, :dead]
7
+
8
+ include KafkaRest::Logging
9
+
10
+ class << self
11
+ @@consumers = []
12
+
13
+ def register!(consumer_class)
14
+ # TODO: raise exception if group_id + topic are not unique
15
+ # TODO: Thread.current???
16
+ @@consumers << consumer_class
17
+ end
18
+
19
+ def consumers
20
+ @@consumers
21
+ end
22
+ end
23
+
24
+ extend Forwardable
25
+
26
+ def_delegators :@consumer,
27
+ :topic,
28
+ :group_name,
29
+ :poll_delay,
30
+ :auto_commit,
31
+ :offset_reset,
32
+ :format,
33
+ :max_bytes
34
+
35
+ def initialize(client, consumer)
36
+ @client = client
37
+ @consumer = consumer.new
38
+ @id = nil
39
+ @uri = nil
40
+ @state = :initial
41
+ @next_poll = Concurrent.monotonic_time
42
+ @lock = Mutex.new
43
+ end
44
+
45
+ STATES.each do |state|
46
+ class_eval %Q{
47
+ def #{state}?(lock = true)
48
+ with_lock(lock) { @state == :#{state} }
49
+ end
50
+ }
51
+ end
52
+
53
+ def poll?
54
+ with_lock {
55
+ idle?(false) && Concurrent.monotonic_time > @next_poll
56
+ }
57
+ end
58
+
59
+ def add!
60
+ params = {}.tap do |h|
61
+ auto_commit.nil? or h[:auto_commit_enable] = auto_commit
62
+ offset_reset and h[:auto_offset_reset] = offset_reset
63
+ format and h[:format] = format
64
+ end
65
+
66
+ resp = @client.consumer_add(group_name, params)
67
+ @id = resp.body['instance_id']
68
+ @uri = resp.body['base_uri']
69
+ @state = :idle
70
+
71
+ logger.info "[Kafka REST] Added consumer #{@id}"
72
+ end
73
+
74
+ def remove!
75
+ resp = @client.consumer_remove(group_name, @id)
76
+ logger.info "[Kafka REST] Removed consumer #{@id}"
77
+ end
78
+
79
+ def poll!
80
+ begin
81
+ with_lock do
82
+ return false unless idle?(false)
83
+ @state = :working
84
+ end
85
+
86
+ logger.debug "Polling #{group_name}..."
87
+
88
+ params = {}.tap do |h|
89
+ format and h[:format] = format
90
+ max_bytes and h[:max_bytes] = max_bytes
91
+ end
92
+
93
+ messages = @client.consumer_consume_from_topic(
94
+ group_name,
95
+ @id,
96
+ topic,
97
+ params
98
+ ).body
99
+
100
+ if messages.any?
101
+ messages.each do |msg|
102
+ logger.debug "[Kafka REST] Consumer #{@id} got message: #{msg}"
103
+ @consumer.receive(msg)
104
+ end
105
+
106
+ unless auto_commit
107
+ @client.consumer_commit_offsets(group_name, @id)
108
+ end
109
+
110
+ with_lock { @state = :idle }
111
+ else
112
+ with_lock do
113
+ @next_poll = Concurrent.monotonic_time + poll_delay
114
+ @state = :idle
115
+ end
116
+ end
117
+ rescue Exception => e # TODO: handle errors
118
+ logger.warn "[Kafka REST] Consumer died due to error: #{e.class}, #{e.message}"
119
+ with_lock { @state = :dead }
120
+ end
121
+ end
122
+
123
+ private
124
+
125
+ def with_lock(lock = true, &block)
126
+ if lock
127
+ @lock.synchronize &block
128
+ else
129
+ block.call
130
+ end
131
+ end
132
+ end
133
+ end
134
+ end
@@ -0,0 +1,96 @@
1
+ require 'kafka_rest/client'
2
+ require 'kafka_rest/worker/consumer_manager'
3
+ require 'concurrent/executor/thread_pool_executor'
4
+
5
+ module KafkaRest
6
+ class Worker
7
+ BUSY_THREAD_POOL_DELAY = 0.5
8
+ NO_WORK_DELAY = 0.1
9
+
10
+ include KafkaRest::Logging
11
+
12
+ def initialize(client)
13
+ @client = client
14
+ @started = false
15
+ @thread_pool = Concurrent::ThreadPoolExecutor.new(
16
+ min_threads: KafkaRest.config.worker_min_threads,
17
+ max_threads: KafkaRest.config.worker_max_threads,
18
+ max_queue: max_queue,
19
+ fallback_policy: :discard
20
+ )
21
+
22
+ @consumers = ConsumerManager.consumers.map do |kl|
23
+ ConsumerManager.new(@client, kl)
24
+ end
25
+ end
26
+
27
+ def start
28
+ begin
29
+ @running = true
30
+
31
+ trap(:SIGINT) do
32
+ stop
33
+ end
34
+
35
+ init_consumers
36
+ run_work_loop
37
+ rescue => e
38
+ logger.error "[Kafka REST] Got exception: #{e.class} (#{e.message})"
39
+ e.backtrace.each { |msg| logger.error "\t #{msg}" }
40
+ stop
41
+ end
42
+ end
43
+
44
+ def stop
45
+ logger.info "[Kafka REST] Stopping worker..."
46
+ @running = false
47
+ remove_consumers
48
+ end
49
+
50
+ private
51
+
52
+ def run_work_loop
53
+ while @running
54
+ check_dead!
55
+
56
+ jobs = @consumers.select(&:poll?)
57
+
58
+ if jobs.empty?
59
+ sleep(NO_WORK_DELAY)
60
+ next
61
+ end
62
+
63
+ pool_available = jobs.each do |c|
64
+ unless @thread_pool.post { c.poll! }
65
+ break(false)
66
+ end
67
+ end
68
+
69
+ unless pool_available
70
+ sleep(BUSY_THREAD_POOL_DELAY)
71
+ end
72
+ end
73
+ end
74
+
75
+ def check_dead!
76
+ # Do we need this?
77
+ if @consumers.all?(&:dead?)
78
+ logger.info "[Kafka REST] All consumers are dead. Quitting..."
79
+ stop
80
+ end
81
+ end
82
+
83
+ def init_consumers
84
+ @consumers.map &:add!
85
+ end
86
+
87
+ def remove_consumers
88
+ @consumers.reject(&:initial?).map &:remove!
89
+ end
90
+
91
+ def max_queue
92
+ KafkaRest.config.worker_max_queue ||
93
+ ConsumerManager.consumers.size * 2
94
+ end
95
+ end
96
+ end
data/lib/kafka_rest.rb ADDED
@@ -0,0 +1,24 @@
1
+ require 'kafka_rest/config'
2
+ require 'kafka_rest/logging'
3
+ require 'kafka_rest/client'
4
+ require 'kafka_rest/worker'
5
+ require 'kafka_rest/producer'
6
+ require 'kafka_rest/producer/serialization/adapter'
7
+ require 'kafka_rest/sender'
8
+ require 'kafka_rest/consumer'
9
+
10
+ KafkaRest.configure do |c|
11
+ serializers = KafkaRest::Producer::Serialization
12
+
13
+ if defined?(ActiveModelSerializers) || defined?(::ActiveModel::Serializer)
14
+ require 'kafka_rest/producer/serialization/active_model'
15
+ c.serialization_adapter = serializers::ActiveModel
16
+ # elsif defined?(JBuilder)
17
+ # TODO jbuilder is default
18
+ # elsif defined?(Rabl)
19
+ # TODO rabl is default
20
+ else
21
+ require 'kafka_rest/producer/serialization/noop'
22
+ c.serialization_adapter = serializers::Noop
23
+ end
24
+ end
metadata ADDED
@@ -0,0 +1,176 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: kafka-rest-rb
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0.alpha2
5
+ platform: ruby
6
+ authors:
7
+ - Theodore Konukhov
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2016-10-12 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: faraday
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '0.9'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '0.9'
27
+ - !ruby/object:Gem::Dependency
28
+ name: faraday_middleware
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '0.10'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '0.10'
41
+ - !ruby/object:Gem::Dependency
42
+ name: concurrent-ruby
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '1.0'
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '1.0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: oj
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: '2.17'
62
+ type: :runtime
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: '2.17'
69
+ - !ruby/object:Gem::Dependency
70
+ name: bundler
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: '1.12'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: '1.12'
83
+ - !ruby/object:Gem::Dependency
84
+ name: rake
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - "~>"
88
+ - !ruby/object:Gem::Version
89
+ version: '10.0'
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - "~>"
95
+ - !ruby/object:Gem::Version
96
+ version: '10.0'
97
+ - !ruby/object:Gem::Dependency
98
+ name: rspec
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - "~>"
102
+ - !ruby/object:Gem::Version
103
+ version: '3.0'
104
+ type: :development
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - "~>"
109
+ - !ruby/object:Gem::Version
110
+ version: '3.0'
111
+ description: Kafka-REST client, DSLs and consumer workers for Ruby.
112
+ email:
113
+ - me@thdr.io
114
+ executables:
115
+ - kafka-rest
116
+ extensions: []
117
+ extra_rdoc_files: []
118
+ files:
119
+ - ".gitignore"
120
+ - ".rspec"
121
+ - ".travis.yml"
122
+ - CODE_OF_CONDUCT.md
123
+ - Gemfile
124
+ - LICENSE.txt
125
+ - README.md
126
+ - Rakefile
127
+ - bin/console
128
+ - bin/kafka-rest
129
+ - bin/setup
130
+ - kafka-rest-rb.gemspec
131
+ - lib/kafka-rest.rb
132
+ - lib/kafka_rest.rb
133
+ - lib/kafka_rest/client.rb
134
+ - lib/kafka_rest/client/middleware.rb
135
+ - lib/kafka_rest/config.rb
136
+ - lib/kafka_rest/consumer.rb
137
+ - lib/kafka_rest/dsl.rb
138
+ - lib/kafka_rest/logging.rb
139
+ - lib/kafka_rest/producer.rb
140
+ - lib/kafka_rest/producer/serialization/active_model.rb
141
+ - lib/kafka_rest/producer/serialization/adapter.rb
142
+ - lib/kafka_rest/producer/serialization/noop.rb
143
+ - lib/kafka_rest/sender.rb
144
+ - lib/kafka_rest/sender/payload.rb
145
+ - lib/kafka_rest/sender/payload/avro_builder.rb
146
+ - lib/kafka_rest/sender/payload/binary_builder.rb
147
+ - lib/kafka_rest/sender/payload/builder.rb
148
+ - lib/kafka_rest/sender/payload/json_builder.rb
149
+ - lib/kafka_rest/version.rb
150
+ - lib/kafka_rest/worker.rb
151
+ - lib/kafka_rest/worker/consumer_manager.rb
152
+ homepage: https://github.com/konukhov/kafka-rest-rb
153
+ licenses:
154
+ - MIT
155
+ metadata: {}
156
+ post_install_message:
157
+ rdoc_options: []
158
+ require_paths:
159
+ - lib
160
+ required_ruby_version: !ruby/object:Gem::Requirement
161
+ requirements:
162
+ - - ">="
163
+ - !ruby/object:Gem::Version
164
+ version: '0'
165
+ required_rubygems_version: !ruby/object:Gem::Requirement
166
+ requirements:
167
+ - - ">"
168
+ - !ruby/object:Gem::Version
169
+ version: 1.3.1
170
+ requirements: []
171
+ rubyforge_project:
172
+ rubygems_version: 2.4.5
173
+ signing_key:
174
+ specification_version: 4
175
+ summary: Kafka-REST proxy client for Ruby on Rails.
176
+ test_files: []