ckafka 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c270b9b1b5eec4340d4c34f073650e55855eb54e
4
- data.tar.gz: 677a7fca922e3e02fff193c483179b6542bdee15
3
+ metadata.gz: 211654fb7358899c3c277b80460a270f481c5011
4
+ data.tar.gz: 22c9d00374d5000d33939fe30c0c13145ab7d73f
5
5
  SHA512:
6
- metadata.gz: fee2faba877e2153ab619e36ffde79bcf90c05c3c6086923cc7473181fe9c66b77534a9bc121f2aa8fa915a88e1fab6ba34d3accd8dff55423c376d1bdf80ab0
7
- data.tar.gz: ff6a04d6f4470e4eeed5195713273090095e18369d0e84abb48fc17cf95ffbeaad95db6f0d69cae0a04a0a26803e2b46be3e9524d014b2b30707f75eeddcf3df
6
+ metadata.gz: 7eb6d2ea1812e081a818e306926d00413cdc6c6c28f25aebd79a968001b6d3433d7daaec62ef4383108d89e85d55cec4c06fcfbd00ace335ef16108b6819c0df
7
+ data.tar.gz: 5efd6674116bb4f8fd3e6e88ef44607365d3489b66a56fff6a1819e56039c69ebf4b23562c8dd0bd24ad70a918d369aa1e03984033d8efd6d8f0ef7ab8ad8543
data/.gitignore CHANGED
@@ -7,4 +7,5 @@
7
7
  /pkg/
8
8
  /spec/reports/
9
9
  /tmp/
10
+ /confluent/
10
11
  *.bundle
data/README.md CHANGED
@@ -1,4 +1,4 @@
1
- # Ckafka
1
+ # Ckafka ![CircleCI](https://circleci.com/gh/Shopify/ckafka.svg?style=shield)
2
2
 
3
3
  Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/ckafka`. To experiment with that code, run `bin/console` for an interactive prompt.
4
4
 
data/Rakefile CHANGED
@@ -2,7 +2,7 @@ require "bundler/gem_tasks"
2
2
  require "rspec/core/rake_task"
3
3
  require 'rake/extensiontask'
4
4
 
5
- RSpec::Core::RakeTask.new(:spec)
5
+ RSpec::Core::RakeTask.new(spec: :compile)
6
6
 
7
7
  task :default => :spec
8
8
 
data/circle.yml ADDED
@@ -0,0 +1,27 @@
1
+ machine:
2
+ ruby:
3
+ version: 2.0.0
4
+ environment:
5
+ LD_LIBRARY_PATH: /usr/local/lib
6
+
7
+ dependencies:
8
+ pre:
9
+ - make confluent/rest/start
10
+ - confluent/bin/kafka-topics --create --topic test.1 --partitions 1 --replication-factor 1 --zookeeper localhost
11
+ - >
12
+ git clone git@github.com:edenhill/librdkafka.git ~/librdkafka;
13
+ cd ~/librdkafka;
14
+ git reset --hard 0c15023708302c41a36e95f9650d69c453dfabba;
15
+ ./configure;
16
+ make;
17
+ sudo make install
18
+ post:
19
+ - confluent/bin/kafka-topics --list --zookeeper localhost
20
+
21
+ test:
22
+ override:
23
+ - bundle exec rake spec
24
+
25
+ database:
26
+ override:
27
+ - echo "noop"
data/ckafka.gemspec CHANGED
@@ -19,8 +19,10 @@ Gem::Specification.new do |spec|
19
19
  spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
20
20
  spec.require_paths = ["lib"]
21
21
  spec.extensions = %w(ext/ckafka/extconf.rb)
22
- spec.add_development_dependency "bundler", "~> 1.11"
22
+ spec.add_development_dependency "bundler", "~> 1.9"
23
23
  spec.add_development_dependency "rake", "~> 10.0"
24
24
  spec.add_development_dependency "rspec", "~> 3.0"
25
- spec.add_development_dependency 'rake-compile'
25
+ spec.add_development_dependency 'rake-compiler'
26
+ spec.add_development_dependency 'minitest', '~> 5.8'
27
+ spec.add_development_dependency "poseidon", "= 0.0.5"
26
28
  end
data/ext/ckafka/ckafka.c CHANGED
@@ -9,28 +9,35 @@
9
9
 
10
10
  static rd_kafka_t *rk = NULL;
11
11
 
12
+ static void error(const char *fmt, ...)
13
+ {
14
+ va_list args;
15
+
16
+ va_start(args, fmt);
17
+ vfprintf(stderr, fmt, args);
18
+ va_end(args);
19
+ fputs("\n", stderr);
20
+ }
21
+
12
22
  static void logger(const rd_kafka_t *rk, int level,
13
23
  const char *fac, const char *buf)
14
24
  {
15
- fprintf(stderr, "%s: %s\n", fac, buf);
25
+ fprintf(stderr, "CKafka [%d]: %s: %s\n", level, fac, buf);
16
26
  }
17
27
 
18
28
 
19
29
  static VALUE kafka_send(VALUE self, VALUE topic_value, VALUE key, VALUE message)
20
30
  {
21
- rd_kafka_topic_conf_t *topic_conf;
22
- rd_kafka_topic_t *topic;
23
- char *topic_name;
24
- void *message_bytes;
25
- size_t message_len;
26
- void *key_buf;
27
- size_t key_len;
28
- int res;
29
-
30
- if (NIL_P(key)) {
31
- key_buf = NULL;
32
- key_len = 0;
33
- } else {
31
+ rd_kafka_topic_conf_t *topic_conf = NULL;
32
+ rd_kafka_topic_t *topic = NULL;
33
+ char *topic_name = NULL;
34
+ void *message_bytes = NULL;
35
+ size_t message_len = 0;
36
+ void *key_buf = NULL;
37
+ size_t key_len = 0;
38
+ int res = 0;
39
+
40
+ if (!NIL_P(key)) {
34
41
  key_buf = RSTRING_PTR(key);
35
42
  key_len = RSTRING_LEN(key);
36
43
  }
@@ -40,10 +47,7 @@ static VALUE kafka_send(VALUE self, VALUE topic_value, VALUE key, VALUE message)
40
47
  rb_raise(rb_eStandardError, "topic is not a string!");
41
48
  }
42
49
 
43
- if(NIL_P(message)) {
44
- message = NULL;
45
- message_len = 0;
46
- } else {
50
+ if(!NIL_P(message)) {
47
51
  message_bytes = RSTRING_PTR(message);
48
52
  if(!message_bytes) {
49
53
  rb_raise(rb_eStandardError, "failed to get message ptr");
@@ -65,8 +69,6 @@ static VALUE kafka_send(VALUE self, VALUE topic_value, VALUE key, VALUE message)
65
69
  res = rd_kafka_produce(topic, RD_KAFKA_PARTITION_UA, RD_KAFKA_MSG_F_COPY, message_bytes, message_len,
66
70
  key_buf, key_len, NULL);
67
71
 
68
- rd_kafka_topic_destroy(topic);
69
-
70
72
  if (res) {
71
73
  rb_raise(rb_eStandardError, "rd_kafka_produce failed: %d", res);
72
74
  }
@@ -77,17 +79,19 @@ static VALUE kafka_send(VALUE self, VALUE topic_value, VALUE key, VALUE message)
77
79
  static VALUE kafka_destroy()
78
80
  {
79
81
  if(rk) {
80
- int i;
81
-
82
- for ( i = 0 ; i < MAX_SHUTDOWN_TRIES ; ++i ) {
83
- if (rd_kafka_outq_len(rk) <= 0 ) {
82
+ int i,res;
83
+ for(i = 0 ; i < MAX_SHUTDOWN_TRIES; ++i) {
84
+ if(!rd_kafka_outq_len(rk)) {
84
85
  break;
85
- } else {
86
- rd_kafka_poll(rk, 100);
87
86
  }
87
+ rd_kafka_poll(rk, 100);
88
88
  }
89
89
 
90
90
  rd_kafka_destroy(rk);
91
+ res = rd_kafka_wait_destroyed(100);
92
+ if(res) {
93
+ error("wait_destroyed returned: %d\n", res);
94
+ }
91
95
  rk = NULL;
92
96
  }
93
97
  return Qnil;
@@ -110,27 +114,37 @@ static VALUE kafka_add_broker(VALUE self, VALUE broker)
110
114
  return Qnil;
111
115
  }
112
116
 
113
- static VALUE kafka_init()
117
+ #define LOG_DEBUG 7
118
+
119
+ static void error_cb(rd_kafka_t *rk, int err, const char *reason, void *opaque)
120
+ {
121
+ error("[%d] %s\n", err, reason);
122
+ }
123
+
124
+ static VALUE kafka_init(VALUE self)
114
125
  {
115
126
  rd_kafka_conf_t *conf;
116
127
  char errstr[512];
117
128
 
118
- if(!rk) {
129
+ if(rk) {
119
130
  kafka_destroy();
120
131
  }
121
132
  conf = rd_kafka_conf_new();
122
133
 
134
+ rd_kafka_conf_set_error_cb(conf, error_cb);
123
135
  rd_kafka_conf_set_log_cb(conf, logger);
124
136
 
125
137
  rk = rd_kafka_new(RD_KAFKA_PRODUCER, conf, errstr, sizeof(errstr));
126
138
  if (!rk) {
127
139
  rb_raise(rb_eStandardError, "failed to create kafka producer: %s\n", errstr);
128
140
  }
141
+
129
142
  return Qnil;
130
143
  }
131
144
 
132
145
  VALUE Init_ckafka()
133
146
  {
147
+
134
148
  VALUE kafka_module = rb_define_module("Ckafka");
135
149
 
136
150
  rb_define_singleton_method(kafka_module, "init", kafka_init, 0);
@@ -138,7 +152,7 @@ VALUE Init_ckafka()
138
152
  rb_define_singleton_method(kafka_module, "add_broker", kafka_add_broker, 1);
139
153
  rb_define_singleton_method(kafka_module, "close", kafka_destroy, 0);
140
154
 
141
- kafka_init();
155
+ kafka_init(Qnil);
142
156
 
143
157
  return Qnil;
144
158
  }
@@ -1,3 +1,3 @@
1
1
  module Ckafka
2
- VERSION = "0.2.0"
2
+ VERSION = '0.3.0'
3
3
  end
data/makefile ADDED
@@ -0,0 +1,54 @@
1
+ .PHONY: confluent/kafka/* confluent/zookeeper/* confluent/registry/* confluent/start confluent/stop
2
+
3
+
4
+ # Confluent platform tasks
5
+
6
+ confluent/start: confluent/rest/start
7
+
8
+ confluent/stop: confluent/rest/stop confluent/registry/stop confluent/kafka/stop confluent/zookeeper/stop
9
+
10
+ # Download & extract tasks
11
+
12
+ confluent/confluent.tgz:
13
+ mkdir -p confluent && wget http://packages.confluent.io/archive/1.0/confluent-1.0-2.10.4.tar.gz -O confluent/confluent.tgz
14
+
15
+ confluent/EXTRACTED: confluent/confluent.tgz
16
+ tar xzf confluent/confluent.tgz -C confluent --strip-components 1 && mkdir confluent/logs && touch confluent/EXTRACTED
17
+ echo "delete.topic.enable=true" >> confluent/etc/kafka/server.properties
18
+ echo "auto.create.topics.enable=false" >> confluent/etc/kafka/server.properties
19
+
20
+ # Zookeeper tasks
21
+
22
+ confluent/zookeeper/start: confluent/EXTRACTED
23
+ nohup confluent/bin/zookeeper-server-start confluent/etc/kafka/zookeeper.properties 2> confluent/logs/zookeeper.err > confluent/logs/zookeeper.out < /dev/null &
24
+ while ! nc localhost 2181 </dev/null; do echo "Waiting for zookeeper..."; sleep 1; done
25
+
26
+ confluent/zookeeper/stop: confluent/EXTRACTED
27
+ confluent/bin/zookeeper-server-stop
28
+
29
+ # Kafka tasks
30
+
31
+ confluent/kafka/start: confluent/zookeeper/start confluent/EXTRACTED
32
+ nohup confluent/bin/kafka-server-start confluent/etc/kafka/server.properties 2> confluent/logs/kafka.err > confluent/logs/kafka.out < /dev/null &
33
+ while ! nc localhost 9092 </dev/null; do echo "Waiting for Kafka..."; sleep 1; done
34
+
35
+ confluent/kafka/stop: confluent/EXTRACTED
36
+ confluent/bin/kafka-server-stop
37
+
38
+ # schema-registry tasks
39
+
40
+ confluent/registry/start: confluent/kafka/start confluent/EXTRACTED
41
+ nohup confluent/bin/schema-registry-start confluent/etc/schema-registry/schema-registry.properties 2> confluent/logs/schema-registry.err > confluent/logs/schema-registry.out < /dev/null &
42
+ while ! nc localhost 8081 </dev/null; do echo "Waiting for schema registry..."; sleep 1; done
43
+
44
+ confluent/registry/stop: confluent/EXTRACTED
45
+ confluent/bin/kafka-server-stop
46
+
47
+ # REST proxy tasks
48
+
49
+ confluent/rest/start: confluent/registry/start confluent/EXTRACTED
50
+ nohup confluent/bin/kafka-rest-start confluent/etc/kafka-rest/kafka-rest.properties 2> confluent/logs/kafka-rest.err > confluent/logs/kafka-rest.out < /dev/null &
51
+ while ! nc localhost 8082 </dev/null; do echo "Waiting for REST proxy..."; sleep 1; done
52
+
53
+ confluent/rest/stop: confluent/EXTRACTED
54
+ confluent/bin/kafka-rest-stop
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ckafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ian Quick
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2016-05-05 00:00:00.000000000 Z
11
+ date: 2016-05-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -16,14 +16,14 @@ dependencies:
16
16
  requirements:
17
17
  - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '1.11'
19
+ version: '1.9'
20
20
  type: :development
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '1.11'
26
+ version: '1.9'
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: rake
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -53,7 +53,7 @@ dependencies:
53
53
  - !ruby/object:Gem::Version
54
54
  version: '3.0'
55
55
  - !ruby/object:Gem::Dependency
56
- name: rake-compile
56
+ name: rake-compiler
57
57
  requirement: !ruby/object:Gem::Requirement
58
58
  requirements:
59
59
  - - ">="
@@ -66,6 +66,34 @@ dependencies:
66
66
  - - ">="
67
67
  - !ruby/object:Gem::Version
68
68
  version: '0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: minitest
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: '5.8'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: '5.8'
83
+ - !ruby/object:Gem::Dependency
84
+ name: poseidon
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - '='
88
+ - !ruby/object:Gem::Version
89
+ version: 0.0.5
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - '='
95
+ - !ruby/object:Gem::Version
96
+ version: 0.0.5
69
97
  description: interface to the rdkafka C library
70
98
  email:
71
99
  - ian.quick@gmail.com
@@ -83,11 +111,13 @@ files:
83
111
  - Rakefile
84
112
  - bin/console
85
113
  - bin/setup
114
+ - circle.yml
86
115
  - ckafka.gemspec
87
116
  - ext/ckafka/ckafka.c
88
117
  - ext/ckafka/extconf.rb
89
118
  - lib/ckafka.rb
90
119
  - lib/ckafka/version.rb
120
+ - makefile
91
121
  homepage: https://github.com/ibawt/ckafka
92
122
  licenses:
93
123
  - MIT