kafka_rest 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: d5e671a973f14b00032c1179221bb5b601f69e74
4
+ data.tar.gz: f862965c6fe2abcbc74fb6f854842c457046b484
5
+ SHA512:
6
+ metadata.gz: 10586878e19b68a9213dd3fb27005213120153d0b8a83f7752ecf6e68a629c922c4bf340d43011fdc98b70de5223847a1b6cbd9e7d4e8c61a0b5b19dd80c786b
7
+ data.tar.gz: bb163d15b9c8971fde3d29a0f5d7bfe0087a148bf2373aec67c2b8b0b50a23a95be4e655761566fde3b33c1ec9701b96f94c8924019d95489d8ff8f1d6e76ce3
data/.gitignore ADDED
@@ -0,0 +1,9 @@
1
+ /.bundle/
2
+ /.yardoc
3
+ /Gemfile.lock
4
+ /_yardoc/
5
+ /coverage/
6
+ /doc/
7
+ /pkg/
8
+ /spec/reports/
9
+ /tmp/
data/.rspec ADDED
@@ -0,0 +1,2 @@
1
+ --format documentation
2
+ --color
data/.travis.yml ADDED
@@ -0,0 +1,4 @@
1
+ language: ruby
2
+ rvm:
3
+ - 2.2.2
4
+ before_install: gem install bundler -v 1.11.2
@@ -0,0 +1,49 @@
1
+ # Contributor Code of Conduct
2
+
3
+ As contributors and maintainers of this project, and in the interest of
4
+ fostering an open and welcoming community, we pledge to respect all people who
5
+ contribute through reporting issues, posting feature requests, updating
6
+ documentation, submitting pull requests or patches, and other activities.
7
+
8
+ We are committed to making participation in this project a harassment-free
9
+ experience for everyone, regardless of level of experience, gender, gender
10
+ identity and expression, sexual orientation, disability, personal appearance,
11
+ body size, race, ethnicity, age, religion, or nationality.
12
+
13
+ Examples of unacceptable behavior by participants include:
14
+
15
+ * The use of sexualized language or imagery
16
+ * Personal attacks
17
+ * Trolling or insulting/derogatory comments
18
+ * Public or private harassment
19
+ * Publishing other's private information, such as physical or electronic
20
+ addresses, without explicit permission
21
+ * Other unethical or unprofessional conduct
22
+
23
+ Project maintainers have the right and responsibility to remove, edit, or
24
+ reject comments, commits, code, wiki edits, issues, and other contributions
25
+ that are not aligned to this Code of Conduct, or to ban temporarily or
26
+ permanently any contributor for other behaviors that they deem inappropriate,
27
+ threatening, offensive, or harmful.
28
+
29
+ By adopting this Code of Conduct, project maintainers commit themselves to
30
+ fairly and consistently applying these principles to every aspect of managing
31
+ this project. Project maintainers who do not follow or enforce the Code of
32
+ Conduct may be permanently removed from the project team.
33
+
34
+ This code of conduct applies both within project spaces and in public spaces
35
+ when an individual is representing the project or its community.
36
+
37
+ Instances of abusive, harassing, or otherwise unacceptable behavior may be
38
+ reported by contacting a project maintainer at jjlangholtz@gmail.com. All
39
+ complaints will be reviewed and investigated and will result in a response that
40
+ is deemed necessary and appropriate to the circumstances. Maintainers are
41
+ obligated to maintain confidentiality with regard to the reporter of an
42
+ incident.
43
+
44
+ This Code of Conduct is adapted from the [Contributor Covenant][homepage],
45
+ version 1.3.0, available at
46
+ [http://contributor-covenant.org/version/1/3/0/][version]
47
+
48
+ [homepage]: http://contributor-covenant.org
49
+ [version]: http://contributor-covenant.org/version/1/3/0/
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'https://rubygems.org'
2
+
3
+ # Specify your gem's dependencies in kafka_rest.gemspec
4
+ gemspec
data/LICENSE.txt ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2016 Josh Langholtz
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,117 @@
1
+ # KafkaRest
2
+
3
+ A ruby wrapper for Kakfa Rest Proxy.
4
+
5
+ ## Installation
6
+
7
+ Add this line to your application's Gemfile:
8
+
9
+ ```ruby
10
+ gem 'kafka_rest'
11
+ ```
12
+
13
+ And then execute:
14
+
15
+ $ bundle
16
+
17
+ Or install it yourself as:
18
+
19
+ $ gem install kafka_rest
20
+
21
+ ## Usage
22
+
23
+ #### Metadata
24
+
25
+ ```ruby
26
+ # Create a client
27
+ kafka = KafkaRest::Client.new(url: 'http://localhost:8080')
28
+
29
+ # List and update brokers
30
+ kafka.list_brokers
31
+
32
+ # List and update topics
33
+ kafka.list_topics
34
+
35
+ # Access single topic
36
+ topic = kafka.topic(name) # or kafka[name]
37
+
38
+ # Get a topic's metadata
39
+ topic.get
40
+
41
+ # List and update partitions for topic
42
+ topic.list_partitions
43
+
44
+ # Get a single topic partition by id
45
+ partition = topic.partition(id) # or topic[id]
46
+ ```
47
+
48
+ #### Producing
49
+
50
+ ```ruby
51
+ # Produce a message to a topic
52
+ topic.produce(message)
53
+
54
+ # Messages can be produced in a number of formats
55
+ topic.produce('msg1')
56
+ topic.produce('msg1', 'msg2', 'msg3')
57
+ topic.produce(['msg1', 'msg2', 'msg3'])
58
+ topic.produce(key: 'key1', value: 'msg1')
59
+ topic.produce(partition: 0, value: 'msg1')
60
+ topic.produce({ key: 'key1', value: 'msg1'}, { partition: 0, value: 'msg2' })
61
+ topic.produce([{ key: 'key1', value: 'msg1'}, { partition: 0, value: 'msg2' }])
62
+
63
+ # Messages can also be produced from a partition
64
+ partition.produce(message)
65
+
66
+ # You can even produce messages asynchronously
67
+ partition.produce_async(message)
68
+ ```
69
+
70
+ #### Consuming
71
+
72
+ ```ruby
73
+ # Create a consumer group
74
+ consumer = kafka.consumer('group1')
75
+
76
+ # Create an instance in the group, blocks and consumes in a loop after yielding
77
+ consumer.join do |instance|
78
+ # Subscribe to a stream for topic
79
+ instance.subscribe('topic1') do |stream|
80
+ stream.on(:data) do |messages|
81
+ # Your event-driven code
82
+ end
83
+
84
+ stream.on(:error) do |error|
85
+ # Error handling
86
+ if some_unrecoverable_exception?
87
+ stream.shutdown! do
88
+ # Optionally any cleanup code before stream is killed
89
+ end
90
+ end
91
+ end
92
+ end
93
+
94
+ # The same consumer instance *CANNOT* be used to subscribe to multiple topics
95
+ consumer.join do |instance|
96
+ instance.subscribe('foo') do |stream|
97
+ stream.on(:data) { }
98
+ end
99
+ instance.subscribe('bar') do |stream|
100
+ stream.on(:data) { }
101
+ end
102
+ end
103
+ ```
104
+
105
+ ## Development
106
+
107
+ After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
108
+
109
+ To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
110
+
111
+ ## Contributing
112
+
113
+ Bug reports and pull requests are welcome on GitHub at https://github.com/jjlangholtz/kafka_rest. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.
114
+
115
+ ## License
116
+
117
+ The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ require "bundler/gem_tasks"
2
+ require "rspec/core/rake_task"
3
+
4
+ RSpec::Core::RakeTask.new(:spec)
5
+
6
+ task :default => :spec
data/bin/console ADDED
@@ -0,0 +1,33 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require "bundler/setup"
4
+ require "kafka_rest"
5
+ require "pry"
6
+
7
+ docker = `docker-machine active | xargs docker-machine ip`.chomp
8
+ fail 'docker-machine is not active, did you bin/setup?' unless $?.success?
9
+ fail 'rest proxy is not reachable, did you bin/setup?' unless system("nc -z #{docker} 8082")
10
+
11
+ $k = KafkaRest::Client.new(url: "http://#{docker}:8082")
12
+
13
+ @is_code = false
14
+ `cat README.md`.each_line do |line|
15
+ if line.start_with?('```')
16
+ @is_code = !@is_code
17
+ next
18
+ end
19
+ puts line if @is_code
20
+ end
21
+ puts "You can use the already created 'KafkaRest::Client' with \e[32m$k\e[0m"
22
+
23
+ if ARGV[0] == 'consumer'
24
+ $k.consumer('fb').join do |i|
25
+ i.subscribe('foo') do |s|
26
+ s.on(:data) do |msg|
27
+ puts msg
28
+ end
29
+ end
30
+ end
31
+ end
32
+
33
+ Pry.start
data/bin/setup ADDED
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ IFS=$'\n\t'
4
+ set -vx
5
+
6
+ bundle install
7
+
8
+ # Do any other automated setup that you need to do here
9
+
10
+ for cmd in docker docker-machine docker-compose; do
11
+ if test ! $(which $cmd); then echo "install docker at https://docs.docker.com/" && exit 1; fi
12
+ done
13
+
14
+ $(docker-compose up -d)
@@ -0,0 +1,29 @@
1
+ zookeeper:
2
+ image: confluent/zookeeper
3
+ ports:
4
+ - "2181:2181"
5
+
6
+ kafka:
7
+ image: confluent/kafka
8
+ ports:
9
+ - "9092:9092"
10
+ links:
11
+ - zookeeper
12
+
13
+ schema-registry:
14
+ image: confluent/schema-registry
15
+ ports:
16
+ - "8081:8081"
17
+ links:
18
+ - zookeeper
19
+ - kafka
20
+
21
+ rest-proxy:
22
+ image: confluent/rest-proxy
23
+ ports:
24
+ - "8082:8082"
25
+ links:
26
+ - zookeeper
27
+ - kafka
28
+ - schema-registry
29
+ command: bash -c "curl -sSL https://s3.amazonaws.com/validic-downloads/wait-for-confluent.sh | sh && /usr/local/bin/rest-proxy-docker.sh"
@@ -0,0 +1,27 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'kafka_rest/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "kafka_rest"
8
+ spec.version = KafkaRest::VERSION
9
+ spec.authors = ["Josh Langholtz"]
10
+ spec.email = ["jjlangholtz@gmail.com"]
11
+
12
+ spec.summary = "Ruby wrapper for the Kafka REST Proxy"
13
+ spec.homepage = "https://github.com/jjlangholtz/kafka_rest"
14
+ spec.license = "MIT"
15
+
16
+ spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
17
+ spec.bindir = "exe"
18
+ spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
19
+ spec.require_paths = ["lib"]
20
+
21
+ spec.add_development_dependency "bundler", "~> 1.11"
22
+ spec.add_development_dependency "pry", "~> 0.10"
23
+ spec.add_development_dependency "rake", "~> 10.0"
24
+ spec.add_development_dependency "rspec", "~> 3.0"
25
+ spec.add_development_dependency "simplecov", "~> 0.11"
26
+ spec.add_development_dependency "webmock", "~> 1.22"
27
+ end
data/lib/kafka_rest.rb ADDED
@@ -0,0 +1,29 @@
1
+ require 'net/http'
2
+
3
+ require 'kafka_rest/event_emitter'
4
+ require 'kafka_rest/logging'
5
+ require 'kafka_rest/producable'
6
+
7
+ require 'kafka_rest/broker'
8
+ require 'kafka_rest/client'
9
+ require 'kafka_rest/consumer'
10
+ require 'kafka_rest/consumer_instance'
11
+ require 'kafka_rest/consumer_stream'
12
+ require 'kafka_rest/partition'
13
+ require 'kafka_rest/schema'
14
+ require 'kafka_rest/schema_parser'
15
+ require 'kafka_rest/topic'
16
+
17
+ require 'kafka_rest/version'
18
+
19
+ module KafkaRest
20
+ EMPTY_STRING = ''.freeze
21
+ TWO_OCTET_JSON = '{}'.freeze
22
+ RIGHT_BRACE = '}'.freeze
23
+
24
+ class << self
25
+ def logger
26
+ KafkaRest::Logging.logger
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,14 @@
1
+ module KafkaRest
2
+ class Broker
3
+ attr_reader :client, :id
4
+
5
+ def initialize(client, id)
6
+ @client = client
7
+ @id = id
8
+ end
9
+
10
+ def to_s
11
+ "Broker{id=#{id}}".freeze
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,77 @@
1
+ require 'json'
2
+ require 'uri'
3
+
4
+ module KafkaRest
5
+ class Client
6
+ DEFAULT_URL = 'http://localhost:8080'.freeze
7
+ BROKERS_PATH = '/brokers'.freeze
8
+ TOPICS_PATH = '/topics'.freeze
9
+ CONTENT_JSON = 'application/json'.freeze
10
+
11
+ attr_reader :url, :brokers, :topics, :consumers
12
+
13
+ def initialize(url: DEFAULT_URL)
14
+ @url = url
15
+ @brokers = []
16
+ @topics = {}
17
+ @consumers = {}
18
+ end
19
+
20
+ def list_brokers
21
+ request(BROKERS_PATH).fetch('brokers'.freeze, []).map do |id|
22
+ KafkaRest::Broker.new(self, id)
23
+ end.tap { |b| @brokers = b }
24
+ end
25
+
26
+ def list_topics
27
+ request(TOPICS_PATH).map do |name|
28
+ @topics[name] = KafkaRest::Topic.new(self, name)
29
+ end
30
+ end
31
+
32
+ def topic(name, schema = nil)
33
+ @topics[name] ||= KafkaRest::Topic.new(self, name, EMPTY_STRING, schema)
34
+ end
35
+ alias_method :[], :topic
36
+
37
+ def consumer(group, &block)
38
+ @consumers[group] ||= Consumer.new(self, group)
39
+ end
40
+
41
+ def request(path, verb: Net::HTTP::Get, body: nil, schema: nil, &block)
42
+ uri = URI.parse(path)
43
+ uri = URI.parse(url + path) unless uri.absolute?
44
+
45
+ Net::HTTP.start(uri.host, uri.port) do |http|
46
+ req = verb.new(uri)
47
+ req['User-Agent'.freeze] = user_agent
48
+ req['Accept'.freeze] = CONTENT_JSON
49
+
50
+ unless verb.is_a? Net::HTTP::Post
51
+ req['Content-Type'.freeze] = schema ? schema.content_type : CONTENT_JSON
52
+ req.body = body.to_json
53
+ KafkaRest.logger.info { "Post body: #{req.body}" }
54
+ end
55
+
56
+ res = http.request(req)
57
+ yield res if block_given?
58
+
59
+ JSON.parse(res.body.to_s)
60
+ end
61
+ end
62
+
63
+ def post(path, body = nil, schema = nil, raw_response = false)
64
+ raw = nil
65
+ res = request(path, verb: Net::HTTP::Post, body: body, schema: schema) do |resp|
66
+ raw = resp
67
+ end
68
+ raw_response ? raw : res
69
+ end
70
+
71
+ private
72
+
73
+ def user_agent
74
+ "kafka-rest-ruby/#{KafkaRest::VERSION}".freeze
75
+ end
76
+ end
77
+ end
@@ -0,0 +1,25 @@
1
+ module KafkaRest
2
+ class Consumer
3
+ attr_reader :client, :group_name, :instances
4
+
5
+ def initialize(client, group_name)
6
+ @client = client
7
+ @group_name = group_name
8
+ @instances = {}
9
+ end
10
+
11
+ def join
12
+ res = client.post(consumers_path)
13
+ instance = ConsumerInstance.new(self, res)
14
+ @instances[res['instance_id']] = instance
15
+ yield instance if block_given?
16
+ instance.start!
17
+ end
18
+
19
+ private
20
+
21
+ def consumers_path
22
+ "/consumers/#{group_name}".freeze
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,37 @@
1
+ module KafkaRest
2
+ class ConsumerInstance
3
+ attr_reader :client, :consumer, :raw, :id, :uri, :streams
4
+
5
+ def initialize(consumer, raw)
6
+ @client = consumer.client
7
+ @consumer = consumer
8
+ @raw = raw
9
+ @id = raw.fetch('instance_id') { fail 'consumer response did not contain instance_id' }
10
+ @uri = raw.fetch('base_uri') { fail 'consumer response did not contain base_uri' }
11
+ @streams = []
12
+ @active = true
13
+ end
14
+
15
+ def subscribe(topic)
16
+ stream = ConsumerStream.new(self, topic)
17
+ @streams << stream
18
+ yield stream if block_given?
19
+ end
20
+
21
+ def start!
22
+ threads = []
23
+ @streams.each { |stream| threads << Thread.new { stream.read } }
24
+ threads.each(&:join)
25
+ end
26
+
27
+ def shutdown!
28
+ @streams.each(&:shutdown!)
29
+ client.request(uri, verb: Net::HTTP::Delete)
30
+ @active = false
31
+ end
32
+
33
+ def active?
34
+ !!@active
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,59 @@
1
+ require 'base64'
2
+
3
+ module KafkaRest
4
+ class ConsumerStream
5
+ include EventEmitter
6
+
7
+ attr_reader :client, :instance, :topic
8
+
9
+ def initialize(instance, topic)
10
+ @client = instance.client
11
+ @instance = instance
12
+ @topic = topic
13
+ @active = true
14
+ end
15
+
16
+ def read
17
+ loop do
18
+ client.request(consume_path) do |res|
19
+ messages = JSON.parse(res.body.to_s)
20
+ break if messages.empty?
21
+
22
+ if res.code.to_i > 400
23
+ emit(:error, messages)
24
+ else
25
+ emit(:data, messages.map(&decode))
26
+ end
27
+ end
28
+
29
+ unless active?
30
+ emit(:end)
31
+ @cleanup.call if @cleanup.is_a? Proc
32
+ break # out of read loop
33
+ end
34
+ end
35
+ end
36
+
37
+ def active?
38
+ !!@active
39
+ end
40
+
41
+ def shutdown!(&block)
42
+ @active = false
43
+ @cleanup = block if block_given?
44
+ end
45
+
46
+ private
47
+
48
+ def consume_path
49
+ "#{instance.uri}/topics/#{topic}".freeze
50
+ end
51
+
52
+ # { 'key' => 'aGVsbG8' } -> { 'key' => 'hello' }
53
+ # { 'value' => 'd29ybGQ' } -> { 'value' => 'world' }
54
+ # { 'key' => 'aGVsbG8', value' => 'd29ybGQ' } -> { 'key' => 'hello', 'value' => world' }
55
+ def decode
56
+ ->(h) { %w(key value).each { |k| next unless h[k]; h[k] = Base64.decode64(h[k]) }; h }
57
+ end
58
+ end
59
+ end
@@ -0,0 +1,18 @@
1
+ module KafkaRest
2
+ module EventEmitter
3
+ def on(type, &block)
4
+ callbacks[type] << block
5
+ self
6
+ end
7
+
8
+ def emit(type, *args)
9
+ callbacks[type].each { |block| block.call(*args) }
10
+ end
11
+
12
+ private
13
+
14
+ def callbacks
15
+ @callbacks ||= Hash.new { |h, k| h[k] = [] }
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,17 @@
1
+ require 'logger'
2
+
3
+ module KafkaRest
4
+ module Logging
5
+ class << self
6
+ def initialize_logger(log_target = STDOUT)
7
+ @logger = Logger.new(log_target)
8
+ @logger.level = Logger::INFO
9
+ @logger
10
+ end
11
+
12
+ def logger
13
+ @logger || initialize_logger
14
+ end
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,35 @@
1
+ module KafkaRest
2
+ class Partition
3
+ include Producable
4
+
5
+ attr_reader :client, :topic, :id, :raw
6
+
7
+ def initialize(client, topic, id, raw = EMPTY_STRING)
8
+ @client = client
9
+ @topic = topic
10
+ @id = id
11
+ @raw = raw
12
+ end
13
+
14
+ def get
15
+ client.request(partition_path).tap { |res| @raw = res }
16
+ end
17
+
18
+ def to_s
19
+ res = "Partition{topic=\"#{topic.name}\", id=#{id}".freeze
20
+ res += ", leader=#{raw['leader']}".freeze unless raw.empty?
21
+ res += ", replicas=#{raw['replicas'].size}".freeze unless raw.empty?
22
+ res += RIGHT_BRACE
23
+ end
24
+
25
+ private
26
+
27
+ def partition_path
28
+ "/topics/#{topic.name}/partitions/#{id}".freeze
29
+ end
30
+
31
+ def produce_path
32
+ partition_path
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,14 @@
1
+ module KafkaRest
2
+ module Producable
3
+ def produce(*messages)
4
+ client.post(produce_path, records: messages.flatten.map(&wrap))
5
+ end
6
+
7
+ private
8
+
9
+ # 'msg' -> { value: 'msg' }
10
+ def wrap
11
+ ->(m) { m.is_a?(Hash) ? m : Hash[:value, m] }
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,23 @@
1
+ module KafkaRest
2
+ class Schema
3
+ AVRO_CONTENT = 'application/vnd.kafka.avro.v1+json'.freeze
4
+
5
+ attr_accessor :id
6
+ attr_reader :serialized, :content_type
7
+
8
+ def self.parse(file)
9
+ new(SchemaParser.call(file))
10
+ end
11
+
12
+ def initialize(serialized)
13
+ @id = nil
14
+ @serialized = serialized
15
+ @mutex = Mutex.new
16
+ @content_type = AVRO_CONTENT
17
+ end
18
+
19
+ def update_id(id)
20
+ @mutex.synchronize { @id = id }
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,35 @@
1
+ module KafkaRest
2
+ module SchemaParser
3
+ TYPE_RE = %r{(?<="type":\s")[\w\.]+(?=")}.freeze
4
+ WHITELIST = %w(array boolean bytes double enum fixed float int long map null record string)
5
+
6
+ class << self
7
+ def call(file)
8
+ fail ArgumentError, "#{file} is not a file" unless File.file?(file)
9
+
10
+ File.open(file) { |f| parse_file(f) }
11
+ end
12
+
13
+ private
14
+
15
+ def parse_file(file)
16
+ file.each_line.inject(EMPTY_STRING) { |a, e| a + parse_line(e) }
17
+ end
18
+
19
+ def parse_line(line)
20
+ if match = TYPE_RE.match(line)
21
+ match = match.to_s
22
+ type = match.split('.').last || match
23
+
24
+ unless WHITELIST.include?(type)
25
+ File.open("#{type}.avsc") do |file|
26
+ line.sub!("\"#{match}\"", parse_file(file))
27
+ end
28
+ end
29
+ end
30
+
31
+ line.gsub!(/\s/, EMPTY_STRING)
32
+ end
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,117 @@
1
+ require 'monitor'
2
+
3
+ module KafkaRest
4
+ class Topic
5
+ attr_reader :client, :name, :raw, :partitions, :schema
6
+
7
+ def initialize(client, name, raw = EMPTY_STRING, schema = nil)
8
+ @client = client
9
+ @name = name
10
+ @schema = schema
11
+ @raw = raw
12
+ @partitions = []
13
+
14
+ @retry_count = 3
15
+ @running = true
16
+ @queue = Queue.new
17
+ @cond = ConditionVariable.new
18
+ @mutex = Mutex.new
19
+
20
+ @thread = thread_start
21
+ end
22
+
23
+ def get
24
+ client.request(topic_path).tap { |res| @raw = res }
25
+ end
26
+
27
+ def partition(id)
28
+ partitions[id] ||= Partition.new(client, self, id)
29
+ end
30
+ alias_method :[], :partition
31
+
32
+ def list_partitions
33
+ client.request(partitions_path).map do |raw|
34
+ Partition.new(client, self, raw['partition'], raw)
35
+ end.tap { |p| @partitions = p }
36
+ end
37
+
38
+ def produce(*messages)
39
+ payload = { records: format(messages) }
40
+
41
+ if schema && schema.id
42
+ payload[:value_schema_id] = schema.id
43
+ else
44
+ payload[:value_schema] = schema.serialized
45
+ end
46
+
47
+ res = client.post(topic_path, payload, schema, true)
48
+
49
+ if schema && schema_id = JSON.parse(res.body.to_s)['value_schema_id']
50
+ schema.update_id(schema_id)
51
+ end
52
+
53
+ res
54
+ end
55
+
56
+ def produce_async(*messages)
57
+ @queue << format(messages)
58
+ @cond.signal
59
+ end
60
+
61
+ def to_s
62
+ "Topic{name=#{name}}".freeze
63
+ end
64
+
65
+ private
66
+
67
+ def format(*messages)
68
+ messages.flatten.map(&wrap)
69
+ end
70
+
71
+ # 'msg' -> { value: 'msg' }
72
+ def wrap
73
+ ->(m) { m.is_a?(Hash) ? m : Hash[:value, m] }
74
+ end
75
+
76
+ def topic_path
77
+ "/topics/#{name}".freeze
78
+ end
79
+
80
+ def partitions_path
81
+ "/topics/#{name}/partitions".freeze
82
+ end
83
+
84
+ def produce_path
85
+ topic_path
86
+ end
87
+
88
+ def thread_start
89
+ Thread.new do
90
+ begin
91
+ while @running
92
+ @mutex.synchronize do
93
+ if @queue.empty?
94
+ @cond.wait(@mutex)
95
+ else
96
+ messages = @queue.pop
97
+
98
+ @retry_count.times do
99
+ begin
100
+ res = produce(messages)
101
+ break unless res.code.to_i >= 400
102
+ rescue StandardError
103
+ KafkaRest.logger.info { e.message }
104
+ KafkaRest.logger.info { e.backtrace.join('\n') }
105
+ end
106
+ end
107
+ end
108
+ end
109
+ end
110
+ rescue ::Exception => e
111
+ KafkaRest.logger.info { e.message }
112
+ KafkaRest.logger.info { e.backtrace.join('\n') }
113
+ end
114
+ end
115
+ end
116
+ end
117
+ end
@@ -0,0 +1,3 @@
1
+ module KafkaRest
2
+ VERSION = '0.1.0'.freeze
3
+ end
metadata ADDED
@@ -0,0 +1,154 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: kafka_rest
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Josh Langholtz
8
+ autorequire:
9
+ bindir: exe
10
+ cert_chain: []
11
+ date: 2016-01-18 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: bundler
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '1.11'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '1.11'
27
+ - !ruby/object:Gem::Dependency
28
+ name: pry
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '0.10'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '0.10'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rake
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '10.0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '10.0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: rspec
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: '3.0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: '3.0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: simplecov
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: '0.11'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: '0.11'
83
+ - !ruby/object:Gem::Dependency
84
+ name: webmock
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - "~>"
88
+ - !ruby/object:Gem::Version
89
+ version: '1.22'
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - "~>"
95
+ - !ruby/object:Gem::Version
96
+ version: '1.22'
97
+ description:
98
+ email:
99
+ - jjlangholtz@gmail.com
100
+ executables: []
101
+ extensions: []
102
+ extra_rdoc_files: []
103
+ files:
104
+ - ".gitignore"
105
+ - ".rspec"
106
+ - ".travis.yml"
107
+ - CODE_OF_CONDUCT.md
108
+ - Gemfile
109
+ - LICENSE.txt
110
+ - README.md
111
+ - Rakefile
112
+ - bin/console
113
+ - bin/setup
114
+ - docker-compose.yml
115
+ - kafka_rest.gemspec
116
+ - lib/kafka_rest.rb
117
+ - lib/kafka_rest/broker.rb
118
+ - lib/kafka_rest/client.rb
119
+ - lib/kafka_rest/consumer.rb
120
+ - lib/kafka_rest/consumer_instance.rb
121
+ - lib/kafka_rest/consumer_stream.rb
122
+ - lib/kafka_rest/event_emitter.rb
123
+ - lib/kafka_rest/logging.rb
124
+ - lib/kafka_rest/partition.rb
125
+ - lib/kafka_rest/producable.rb
126
+ - lib/kafka_rest/schema.rb
127
+ - lib/kafka_rest/schema_parser.rb
128
+ - lib/kafka_rest/topic.rb
129
+ - lib/kafka_rest/version.rb
130
+ homepage: https://github.com/jjlangholtz/kafka_rest
131
+ licenses:
132
+ - MIT
133
+ metadata: {}
134
+ post_install_message:
135
+ rdoc_options: []
136
+ require_paths:
137
+ - lib
138
+ required_ruby_version: !ruby/object:Gem::Requirement
139
+ requirements:
140
+ - - ">="
141
+ - !ruby/object:Gem::Version
142
+ version: '0'
143
+ required_rubygems_version: !ruby/object:Gem::Requirement
144
+ requirements:
145
+ - - ">="
146
+ - !ruby/object:Gem::Version
147
+ version: '0'
148
+ requirements: []
149
+ rubyforge_project:
150
+ rubygems_version: 2.4.5.1
151
+ signing_key:
152
+ specification_version: 4
153
+ summary: Ruby wrapper for the Kafka REST Proxy
154
+ test_files: []