kril 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/.travis.yml ADDED
@@ -0,0 +1,22 @@
1
+ language: ruby
2
+ cache: bundler
3
+
4
+ rvm:
5
+ - 2.4.0
6
+
7
+ before_install: gem install bundler -v 1.16.0
8
+
9
+ notifications:
10
+ email:
11
+ recipients:
12
+ - chad.bowman0@gmail.com
13
+ on_failure: change
14
+ on_success: never
15
+
16
+ deploy:
17
+ provider: pages
18
+ skip_cleanup: true
19
+ github_token: $GITHUB_TOKEN
20
+ project_name: kril
21
+ on:
22
+ branch: master
data/Gemfile ADDED
@@ -0,0 +1,5 @@
1
+ source 'https://rubygems.org'
2
+
3
+ git_source(:github) {|repo_name| "https://github.com/#{repo_name}" }
4
+
5
+ gemspec
data/LICENSE.txt ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2018 Chad Bowman
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,89 @@
1
+ # Kril 🦐
2
+
3
+ Kril is an easy to use command line interface (CLI) for interacting with [Apache Kafka](https://kafka.apache.org/). It uses [Apache Avro](https://avro.apache.org/) for serialization/deserialization.
4
+
5
+ [![Build Status](https://travis-ci.org/ChadBowman/kril.svg?branch=master)](https://travis-ci.org/ChadBowman/kril)
6
+
7
+ ## Installation
8
+
9
+ Add this line to your application's Gemspec:
10
+
11
+ ```ruby
12
+ spec.add_development_dependency 'kril', '~> 0.1'
13
+ ```
14
+
15
+ And then execute:
16
+
17
+ $ bundle
18
+
19
+ Or install it yourself as:
20
+
21
+ $ gem install kril
22
+
23
+ ## Usage
24
+
25
+ Assuming your schema is not registered with the schema registry, and doesn't exist in the repository, you can define a schema and commit a record like so:
26
+ ```bash
27
+ $ kril --bootstrap-servers 'localhost:9092,localhost:9093,localhost:9094' \
28
+ --schema-registry 'http://localhost:8081' \
29
+ --with-schema '{"type":"record","name":"human","fields":[{"name":"age","type":"int"}]}' \
30
+ --record '{"age": 27}' \
31
+ human
32
+ ```
33
+ ```bash
34
+ 🦐 saved human: {"type"=>"record", "name"=>"human", "fields"=>[{"name"=>"age", "type"=>"int"}]}
35
+ 🦐 human: {"age"=>27}
36
+ ```
37
+
38
+ Now we can consume a single record:
39
+ ```bash
40
+ $ kril --pretty-print human
41
+ ```
42
+ ```bash
43
+ 🦐 human:
44
+ {
45
+ "key": null,
46
+ "value": {
47
+ "age": 27
48
+ },
49
+ "offset": 0,
50
+ "create_time": "2018-03-04 00:29:47 -0700",
51
+ "topic": "human",
52
+ "partition": 4
53
+ }
54
+ ```
55
+ ---
56
+ Now that the schema exists, we can produce records simply:
57
+ ```bash
58
+ $ kril -r '{"age": 33}' human
59
+ ```
60
+ ```bash
61
+ 🦐 human: {"age"=>33}
62
+ ```
63
+ ---
64
+ Consuming all records ever:
65
+ ```bash
66
+ $ kril --consume-all human
67
+ ```
68
+ ```bash
69
+ 🦐 human: {:key=>nil, :value=>{"age"=>27}, :offset=>0, :create_time=>2018-03-04 00:12:32 -0700, :topic=>"human", :partition=>2}
70
+ 🦐 human: {:key=>nil, :value=>{"age"=>27}, :offset=>0, :create_time=>2018-03-04 00:29:47 -0700, :topic=>"human", :partition=>4}
71
+ 🦐 human: {:key=>nil, :value=>{"age"=>27}, :offset=>0, :create_time=>2018-03-04 00:26:33 -0700, :topic=>"human", :partition=>1}
72
+ 🦐 human: {:key=>nil, :value=>{"age"=>27}, :offset=>0, :create_time=>2018-03-04 00:25:54 -0700, :topic=>"human", :partition=>3}
73
+ 🦐 human: {:key=>nil, :value=>{"age"=>33}, :offset=>1, :create_time=>2018-03-04 00:34:07 -0700, :topic=>"human", :partition=>3}
74
+ 🦐 human: {:key=>nil, :value=>{"age"=>27}, :offset=>0, :create_time=>2018-03-04 00:13:13 -0700, :topic=>"human", :partition=>0}
75
+ ```
76
+
77
+ ## Contributing
78
+
79
+ 1. Fork it ( https://github.com/ChadBowman/kril/fork )
80
+ 2. Create your feature branch (git checkout -b my-new-feature)
81
+ 3. Commit your changes (git commit -am 'add some feature')
82
+ 4. Push to the branch (git push origin my-new-feature)
83
+ 5. Create a new Pull Request
84
+
85
+ Please try to obey [Rubocop](https://github.com/bbatsov/rubocop) to the best of your abilities.
86
+
87
+ ## License
88
+
89
+ The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
data/Rakefile ADDED
@@ -0,0 +1,10 @@
1
+ require 'rspec/core/rake_task'
2
+ require 'bundler/gem_tasks'
3
+
4
+ # Default directory to look in is `/specs`
5
+ # Run with `rake spec`
6
+ RSpec::Core::RakeTask.new(:spec) do |task|
7
+ task.rspec_opts = ['--color', '--format documentation']
8
+ end
9
+
10
+ task :default => :spec
data/bin/kril ADDED
@@ -0,0 +1,79 @@
1
+ #!/usr/bin/env ruby
2
+ $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
3
+ $VERBOSE = ARGV.map { |arg| true if arg.match?(/-v|--verbose/) }.compact.first
4
+
5
+ require 'avro_turf/messaging'
6
+ require 'kafka'
7
+ require 'clamp'
8
+ require 'kril'
9
+
10
+ Clamp do
11
+ option %w[-k --bootstrap-servers], '', 'address(es) of kafka cluster',
12
+ default: %w[localhost:9092 localhost:9093 localhost:9094] do |address_string|
13
+ address_string.split(/,\s*/)
14
+ end
15
+ # configuration
16
+ option %w[-g --schema-registry], '', 'address of schema registry', default: 'http://localhost:8081'
17
+ option %w[-p --schemas-path], '', 'directory of avro schemas', default: 'schemas/'
18
+ option %w[-v --verbose], :flag, 'print logs, warnings'
19
+ option %w[-e --pretty-print], :flag, 'pretty print records', default: false
20
+
21
+ # producing
22
+ option %w[-r --record], '', 'record to commit to topic'
23
+ option %w[-o --syncronous], :flag, 'commit records syncronously', default: false
24
+ option %w[-w --with-schema], '', 'add schema to respository'
25
+
26
+ # consuming
27
+ option %w[-f --follow], :flag, 'consume from topic indefinitely', defualt: false
28
+ option %w[-a --consume-all], :flag, 'consume every record on topic', default: false
29
+
30
+ parameter 'TOPIC', 'topic to produce to or consume from'
31
+ parameter '[NAME]', 'schema name'
32
+
33
+ def execute
34
+ if topic
35
+ log = Logger.new($STDOUT)
36
+ log.level = verbose? ? Logger::WARN : Logger::ERROR
37
+ kafka = Kafka.new(bootstrap_servers, logger: log, client_id: 'kril')
38
+ avro = AvroTurf::Messaging.new(registry_url: schema_registry,
39
+ schemas_path: schemas_path,
40
+ logger: log)
41
+
42
+ if with_schema
43
+ schema = JSON.parse(with_schema)
44
+ schema_name = schema['name']
45
+ path = File.join(schemas_path, "#{schema_name}.avsc")
46
+ File.open(path, 'w') { |file| file.write(JSON.pretty_generate(schema)) }
47
+ print_record("saved #{schema_name}", schema)
48
+ end
49
+
50
+ if record
51
+ producer = Kril::Producer.new(kafka: kafka, avro: avro)
52
+ rec = JSON.parse(record)
53
+ producer.send(record: rec,
54
+ schema_name: name || topic,
55
+ topic: topic,
56
+ syncronous: syncronous?)
57
+ print_record(topic, rec)
58
+ else
59
+ consumer = Kril::Consumer.new(kafka: kafka, avro: avro)
60
+ if consume_all?
61
+ consumer.consume_all(topic) do |message|
62
+ print_record(topic, message)
63
+ end
64
+ elsif follow?
65
+ consumer.listen(topic) do |message|
66
+ print_record(topic, message)
67
+ end
68
+ else
69
+ print_record(topic, consumer.consume_one(topic))
70
+ end
71
+ end
72
+ end
73
+ end
74
+
75
+ def print_record(topic, record)
76
+ record = "\n#{JSON.pretty_generate(record)}" if pretty_print?
77
+ puts "🦐 #{topic}: #{record}"
78
+ end
79
+ end
data/kril.gemspec ADDED
@@ -0,0 +1,35 @@
1
+
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'kril/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = 'kril'
8
+ spec.version = Kril::VERSION
9
+ spec.authors = ['Chad Bowman']
10
+ spec.email = ['chad.bowman0@gmail.com']
11
+
12
+ spec.summary = 'A simple command line tool for interacting with Kafka'
13
+ spec.description = 'Makes producing and consuming topics simple. Useful when experimenting.'
14
+ spec.homepage = 'https://github.com/ChadBowman/kril'
15
+ spec.license = 'MIT'
16
+
17
+ spec.files = `git ls-files -z`.split("\x0").reject do |f|
18
+ f.match(%r{^(test|spec|features)/})
19
+ end
20
+
21
+ spec.bindir = 'bin'
22
+ spec.executables = ['kril']
23
+ spec.require_paths = ['lib']
24
+
25
+ spec.add_development_dependency 'avro_turf', '~> 0.8.0'
26
+ spec.add_development_dependency 'bundler', '~> 1.16'
27
+ spec.add_development_dependency 'clamp', '~> 1.2', '>= 1.2.1'
28
+ spec.add_development_dependency 'httparty', '~> 0.16.0'
29
+ spec.add_development_dependency 'rake', '~> 10.0'
30
+ spec.add_development_dependency 'rspec', '~> 3.7'
31
+ spec.add_development_dependency 'rspec-nc', '~> 0.3.0'
32
+ spec.add_development_dependency 'ruby-kafka', '~> 0.5.3'
33
+ spec.add_development_dependency 'sinatra', '~> 2.0', '>= 2.0.1'
34
+ spec.add_development_dependency 'webmock', '~> 3.3'
35
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kril
4
+ # Consumers records from Kafka
5
+ class Consumer
6
+ def initialize(avro: nil, kafka: nil, config: {})
7
+ config[:group_id] ||= '🦐'
8
+ @avro = avro
9
+ @kafka = kafka
10
+ @config = config
11
+ end
12
+
13
+ def consume_one(topic)
14
+ consumer = build_consumer(topic, true, @config)
15
+ msg = nil
16
+ consumer.each_message do |message|
17
+ msg = decode(message)
18
+ consumer.mark_message_as_processed(message)
19
+ consumer.commit_offsets
20
+ consumer.stop
21
+ end
22
+ msg
23
+ ensure
24
+ consumer.stop
25
+ end
26
+
27
+ def consume_all(topic)
28
+ config = @config.clone
29
+ config[:group_id] = SecureRandom.uuid
30
+ consumer = build_consumer(topic, true, config)
31
+ consumer.each_message do |message|
32
+ yield decode(message), consumer
33
+ end
34
+ ensure
35
+ consumer.stop
36
+ end
37
+
38
+ def listen(topic)
39
+ consumer = build_consumer(topic, false, @config)
40
+ consumer.each_message do |message|
41
+ yield decode(message), consumer
42
+ end
43
+ ensure
44
+ consumer.stop
45
+ end
46
+
47
+ private
48
+
49
+ def build_consumer(topic, start_from_beginning, config)
50
+ consumer = @kafka.consumer(config)
51
+ consumer.subscribe(topic, start_from_beginning: start_from_beginning)
52
+ consumer
53
+ end
54
+
55
+ def decode(message)
56
+ {
57
+ key: message.key,
58
+ value: @avro.decode(message.value),
59
+ offset: message.offset,
60
+ create_time: message.create_time,
61
+ topic: message.topic,
62
+ partition: message.partition
63
+ }
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kril
4
+ # Produces records to Kafka
5
+ class Producer
6
+ def initialize(avro: nil, kafka: nil, config: {})
7
+ config[:required_acks] ||= 1
8
+ config[:delivery_threshold] ||= 1
9
+ sync_config = config.dup
10
+ @avro = avro
11
+ @async = kafka.async_producer(config)
12
+ sync_config.delete(:delivery_threshold)
13
+ @sync = kafka.producer(sync_config)
14
+ end
15
+
16
+ def send(record:, schema_name:, topic: nil, syncronous: false)
17
+ topic ||= schema_name
18
+ encoded = @avro.encode(record, schema_name: schema_name)
19
+ if syncronous
20
+ @producer.produce(encoded, topic: topic)
21
+ @producer.deliver_messages
22
+ else
23
+ @async.produce(encoded, topic: topic)
24
+ end
25
+ ensure
26
+ @async.shutdown
27
+ @sync.shutdown
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kril
4
+ # Creates and validates a record based from a defined schema.
5
+ class RecordBuilder
6
+ def initialize(schema_file_name: nil,
7
+ schemas_dir: 'schemas',
8
+ schema_path: nil)
9
+ path = schema_path || File.join(schemas_dir, "#{schema_file_name}.avsc")
10
+ file = File.read(path)
11
+ @schema = JSON.parse(file)
12
+ end
13
+
14
+ def build(data)
15
+ data = JSON.parse(data)
16
+ build_from_record(@schema, data)
17
+ end
18
+
19
+ private
20
+
21
+ def build_from_record(schema, data)
22
+ schema['fields'].each_with_object({}) do |field, record|
23
+ field_name = field['name']
24
+ record[field_name] =
25
+ case field['type']
26
+ when 'array'
27
+ build_from_array(field, data[field_name])
28
+ when 'map'
29
+ build_from_map(field, data[field_name])
30
+ when 'record'
31
+ build_from_record(field, data[field_name])
32
+ else
33
+ build_field(field, data[field_name])
34
+ end
35
+ end
36
+ end
37
+
38
+ def build_from_array(field, data)
39
+ data.map { |element| build_field(field, element) }
40
+ end
41
+
42
+ def build_from_map(field, data)
43
+ data.transform_values { |element| build_field(field, element) }
44
+ end
45
+
46
+ def build_field(field, datum)
47
+ check_nullity(datum, field)
48
+ type = field['items'] || field['values'] || field['type']
49
+ convert_type(datum, type)
50
+ end
51
+
52
+ def check_nullity(datum, field)
53
+ type = field['values'] || field['items'] || field['type']
54
+ unless datum || type&.include?('null')
55
+ raise ArgumentError.new, "Input for #{field['name']} cannot be nil"
56
+ end
57
+ end
58
+
59
+ def convert_type(datum, type)
60
+ type = gather_types(type)
61
+ if datum.nil?
62
+ nil
63
+ elsif type.include?('int') || type.include?('long')
64
+ datum.to_i
65
+ elsif type.include?('float') || type.include?('double')
66
+ datum.to_f
67
+ elsif type.include?('boolean')
68
+ datum.casecmp('true').zero?
69
+ else
70
+ datum
71
+ end
72
+ end
73
+
74
+ def gather_types(type)
75
+ case type
76
+ when String
77
+ type
78
+ when Array
79
+ type.flat_map { |t| gather_types(t) }
80
+ when Hash
81
+ gather_types(type['type'])
82
+ end
83
+ end
84
+ end
85
+ end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kril
4
+ # Extracts avro schemas from avro genrated java files.
5
+ class SchemaExtractor
6
+ def initialize(source_dir:, output_dir:)
7
+ @source_dir = File.join(Dir.pwd, source_dir)
8
+ @output_dir = File.join(Dir.pwd, output_dir)
9
+ end
10
+
11
+ def extract
12
+ find_java_files(@source_dir) do |file|
13
+ schema = parse_avro_java_class(file)
14
+ write_avsc(schema, @output_dir) if schema
15
+ end
16
+ end
17
+
18
+ private
19
+
20
+ def find_java_files(root_dir)
21
+ old_dir = Dir.pwd
22
+ Dir.chdir(root_dir)
23
+ java_files = File.join('**', '*.java')
24
+ Dir.glob(java_files) do |file|
25
+ yield File.new(file)
26
+ end
27
+ Dir.chdir(old_dir)
28
+ end
29
+
30
+ def write_avsc(contents, directory)
31
+ path = File.join(directory, "#{contents['name']}.avsc")
32
+ File.open(path, 'w') do |file|
33
+ file.write(JSON.pretty_generate(contents))
34
+ end
35
+ end
36
+
37
+ def dejavafy(java_string)
38
+ java_string.split('","').join.gsub(/\\?\\"/, '"')
39
+ end
40
+
41
+ def parse_avro_java_class(file)
42
+ file.each_line do |line|
43
+ extraction = line[/SCHEMA.*parse\("(.*)"\);/, 1]
44
+ break JSON.parse(dejavafy(extraction)) if extraction
45
+ nil
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kril
4
+ VERSION = '0.1.0'
5
+ end
data/lib/kril.rb ADDED
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'kril/version'
4
+ require 'kril/record_builder'
5
+ require 'kril/schema_extractor'
6
+ require 'kril/producer'
7
+ require 'kril/consumer'
8
+ require 'json'
9
+ require 'yaml'
10
+ require 'logger'
11
+ require 'avro_turf/messaging'
12
+ require 'kafka'
13
+ require 'securerandom'
14
+
15
+ # TODO
16
+ module Kril
17
+ end